language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/loader/ast/spi/SqlArrayMultiKeyLoader.java
|
{
"start": 288,
"end": 348
}
|
interface ____ extends MultiKeyLoader {
}
|
SqlArrayMultiKeyLoader
|
java
|
quarkusio__quarkus
|
extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/ResourceMetadataHandler.java
|
{
"start": 5346,
"end": 11458
}
|
class ____ implements Handler<RoutingContext> {
private final OidcTenantConfig oidcConfig;
private final DefaultTenantConfigResolver resolver;
RouteHandler(OidcTenantConfig oidcTenantConfig, DefaultTenantConfigResolver resolver) {
this.oidcConfig = oidcTenantConfig;
this.resolver = resolver;
}
@Override
public void handle(RoutingContext context) {
LOG.debugf("Resource metadata request for the tenant %s received", oidcConfig.tenantId().get());
context.response().setStatusCode(200);
context.response().end(prepareMetadata(context));
}
private String prepareMetadata(RoutingContext context) {
JsonObject metadata = new JsonObject();
String resourceIdentifier = buildResourceIdentifierUrl(context, resolver, oidcConfig);
metadata.put(OidcConstants.RESOURCE_METADATA_RESOURCE, resourceIdentifier);
if (oidcConfig.resourceMetadata().scopes().isPresent()) {
JsonArray scopes = new JsonArray();
for (String scope : oidcConfig.resourceMetadata().scopes().get()) {
scopes.add(scope);
}
metadata.put(OidcConstants.RESOURCE_METADATA_SCOPES, scopes);
}
JsonArray authorizationServers = new JsonArray();
authorizationServers.add(0, oidcConfig.resourceMetadata().authorizationServer()
.orElse(oidcConfig.authServerUrl().get()));
metadata.put(OidcConstants.RESOURCE_METADATA_AUTHORIZATION_SERVERS, authorizationServers);
return metadata.toString();
}
}
static void fireResourceMetadataChangedEvent(OidcTenantConfig oidcConfig, TenantConfigContext tenant) {
if (oidcConfig.resourceMetadata().enabled() ||
(tenant.oidcConfig() != null && tenant.oidcConfig().resourceMetadata().enabled())) {
boolean resourceChanged = tenant.oidcConfig() == null
|| !oidcConfig.resourceMetadata().resource().orElse("")
.equals(tenant.oidcConfig().resourceMetadata().resource().orElse(""))
|| oidcConfig.resourceMetadata().enabled() != tenant.oidcConfig().resourceMetadata().enabled()
|| oidcConfig.resourceMetadata().forceHttpsScheme() != tenant.oidcConfig().resourceMetadata()
.forceHttpsScheme();
if (resourceChanged) {
fireResourceMetadataEvent();
}
}
}
static void fireResourceMetadataReadyEvent(OidcTenantConfig oidcConfig) {
if (oidcConfig.resourceMetadata().enabled()) {
fireResourceMetadataEvent();
}
}
private static void fireResourceMetadataEvent() {
Event<NewResourceMetadata> event = Arc.container().beanManager().getEvent()
.select(NewResourceMetadata.class);
event.fire(new NewResourceMetadata());
}
static String resourceMetadataAuthenticateParameter(RoutingContext context, DefaultTenantConfigResolver resolver,
OidcTenantConfig oidcConfig) {
return " " + RESOURCE_METADATA_AUTHENTICATE_PARAM + "=\""
+ buildAbsoluteResourceIdentifierUrl(context, resolver, oidcConfig)
+ "\"";
}
static String buildResourceIdentifierUrl(RoutingContext context, DefaultTenantConfigResolver resolver,
OidcTenantConfig oidcConfig) {
String configuredResource = oidcConfig.resourceMetadata().resource().orElse("");
if (configuredResource.startsWith(HTTP_SCHEME)) {
return configuredResource;
} else {
if (!configuredResource.isEmpty()) {
if (!SLASH.equals(configuredResource)) {
configuredResource = OidcCommonUtils.prependSlash(configuredResource);
}
} else if (!OidcUtils.DEFAULT_TENANT_ID.equals(oidcConfig.tenantId().get())) {
configuredResource += OidcCommonUtils.prependSlash(oidcConfig.tenantId().get().toLowerCase());
}
String authority = URI.create(context.request().absoluteURI()).getAuthority();
return buildUri(context, resolver.isEnableHttpForwardedPrefix(),
oidcConfig.resourceMetadata().forceHttpsScheme(), authority, configuredResource);
}
}
static String buildAbsoluteResourceIdentifierUrl(RoutingContext context, DefaultTenantConfigResolver resolver,
OidcTenantConfig oidcConfig) {
String configuredResource = getResourceMetadataPath(oidcConfig, resolver.getRootPath());
if (configuredResource.startsWith(HTTP_SCHEME)) {
return configuredResource;
} else {
String authority = URI.create(context.request().absoluteURI()).getAuthority();
return buildUri(context, resolver.isEnableHttpForwardedPrefix(),
oidcConfig.resourceMetadata().forceHttpsScheme(), authority, configuredResource);
}
}
private static String buildUri(RoutingContext context,
boolean enableHttpForwardedPrefix, boolean forceHttps, String authority, String path) {
final String scheme = forceHttps ? "https" : context.request().scheme();
String forwardedPrefix = "";
if (enableHttpForwardedPrefix) {
String forwardedPrefixHeader = context.request().getHeader("X-Forwarded-Prefix");
if (forwardedPrefixHeader != null && !forwardedPrefixHeader.equals("/")
&& !forwardedPrefixHeader.equals("//")) {
forwardedPrefix = forwardedPrefixHeader;
if (forwardedPrefix.endsWith("/")) {
forwardedPrefix = forwardedPrefix.substring(0, forwardedPrefix.length() - 1);
}
}
}
return new StringBuilder(scheme).append("://")
.append(authority)
.append(forwardedPrefix)
.append(path)
.toString();
}
}
|
RouteHandler
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-csi/src/main/java/org/apache/hadoop/yarn/csi/adaptor/CsiAdaptorFactory.java
|
{
"start": 1347,
"end": 1390
}
|
class ____ the csi-driver.
*/
public final
|
for
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockProvider.java
|
{
"start": 354,
"end": 767
}
|
enum ____ {
AMAZONTITAN,
ANTHROPIC,
AI21LABS,
COHERE,
META,
MISTRAL;
public static final String NAME = "amazon_bedrock_provider";
public static AmazonBedrockProvider fromString(String name) {
return valueOf(name.trim().toUpperCase(Locale.ROOT));
}
@Override
public String toString() {
return name().toLowerCase(Locale.ROOT);
}
}
|
AmazonBedrockProvider
|
java
|
apache__flink
|
flink-python/src/main/java/org/apache/flink/table/runtime/arrow/writers/FloatWriter.java
|
{
"start": 1113,
"end": 2191
}
|
class ____<T> extends ArrowFieldWriter<T> {
public static FloatWriter<RowData> forRow(Float4Vector floatVector) {
return new FloatWriterForRow(floatVector);
}
public static FloatWriter<ArrayData> forArray(Float4Vector floatVector) {
return new FloatWriterForArray(floatVector);
}
// ------------------------------------------------------------------------------------------
private FloatWriter(Float4Vector floatVector) {
super(floatVector);
}
abstract boolean isNullAt(T in, int ordinal);
abstract float readFloat(T in, int ordinal);
@Override
public void doWrite(T in, int ordinal) {
if (isNullAt(in, ordinal)) {
((Float4Vector) getValueVector()).setNull(getCount());
} else {
((Float4Vector) getValueVector()).setSafe(getCount(), readFloat(in, ordinal));
}
}
// ------------------------------------------------------------------------------------------
/** {@link FloatWriter} for {@link RowData} input. */
public static final
|
FloatWriter
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_3849/DeduplicateGenericMapper.java
|
{
"start": 433,
"end": 1515
}
|
interface ____ {
DeduplicateGenericMapper INSTANCE = Mappers.getMapper( DeduplicateGenericMapper.class );
List<String> INVOKED_METHODS = new ArrayList<>();
ParentDto mapParent(Parent source);
ChildDto mapChild(Parent source);
@BeforeMapping
default <T extends ParentDto> void deduplicateBefore(Parent source, @MappingTarget T target) {
INVOKED_METHODS.add( "beforeMappingParentGeneric" );
}
@BeforeMapping
default void deduplicateBefore(Parent source, @MappingTarget ChildDto target) {
INVOKED_METHODS.add( "beforeMappingChild" );
}
@AfterMapping
default <T> void deduplicate(Parent source, @MappingTarget T target) {
INVOKED_METHODS.add( "afterMappingGeneric" );
}
@AfterMapping
default void deduplicate(Parent source, @MappingTarget ParentDto target) {
INVOKED_METHODS.add( "afterMappingParent" );
}
@AfterMapping
default void deduplicate(Parent source, @MappingTarget ChildDto target) {
INVOKED_METHODS.add( "afterMappingChild" );
}
}
|
DeduplicateGenericMapper
|
java
|
apache__avro
|
lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroOutputFormat.java
|
{
"start": 2408,
"end": 7421
}
|
class ____<T> extends FileOutputFormat<AvroWrapper<T>, NullWritable> {
/** The file name extension for avro data files. */
public final static String EXT = ".avro";
/** The configuration key for Avro deflate level. */
public static final String DEFLATE_LEVEL_KEY = "avro.mapred.deflate.level";
/** The configuration key for Avro XZ level. */
public static final String XZ_LEVEL_KEY = "avro.mapred.xz.level";
/** The configuration key for Avro ZSTD level. */
public static final String ZSTD_LEVEL_KEY = "avro.mapred.zstd.level";
/** The configuration key for Avro ZSTD buffer pool. */
public static final String ZSTD_BUFFERPOOL_KEY = "avro.mapred.zstd.bufferpool";
/** The configuration key for Avro sync interval. */
public static final String SYNC_INTERVAL_KEY = "avro.mapred.sync.interval";
/** Enable output compression using the deflate codec and specify its level. */
public static void setDeflateLevel(JobConf job, int level) {
FileOutputFormat.setCompressOutput(job, true);
job.setInt(DEFLATE_LEVEL_KEY, level);
}
/**
* Set the sync interval to be used by the underlying {@link DataFileWriter}.
*/
public static void setSyncInterval(JobConf job, int syncIntervalInBytes) {
job.setInt(SYNC_INTERVAL_KEY, syncIntervalInBytes);
}
static <T> void configureDataFileWriter(DataFileWriter<T> writer, JobConf job) throws UnsupportedEncodingException {
CodecFactory factory = getCodecFactory(job);
if (factory != null) {
writer.setCodec(factory);
}
writer.setSyncInterval(job.getInt(SYNC_INTERVAL_KEY, DEFAULT_SYNC_INTERVAL));
// copy metadata from job
for (Map.Entry<String, String> e : job) {
if (e.getKey().startsWith(AvroJob.TEXT_PREFIX))
writer.setMeta(e.getKey().substring(AvroJob.TEXT_PREFIX.length()), e.getValue());
if (e.getKey().startsWith(AvroJob.BINARY_PREFIX))
writer.setMeta(e.getKey().substring(AvroJob.BINARY_PREFIX.length()),
URLDecoder.decode(e.getValue(), StandardCharsets.ISO_8859_1.name()).getBytes(StandardCharsets.ISO_8859_1));
}
}
/**
* This will select the correct compression codec from the JobConf. The order of
* selection is as follows:
* <ul>
* <li>If mapred.output.compress is true then look for codec otherwise no
* compression</li>
* <li>Use avro.output.codec if populated</li>
* <li>Next use mapred.output.compression.codec if populated</li>
* <li>If not default to Deflate Codec</li>
* </ul>
*/
static CodecFactory getCodecFactory(JobConf job) {
CodecFactory factory = null;
if (FileOutputFormat.getCompressOutput(job)) {
int deflateLevel = job.getInt(DEFLATE_LEVEL_KEY, DEFAULT_DEFLATE_LEVEL);
int xzLevel = job.getInt(XZ_LEVEL_KEY, DEFAULT_XZ_LEVEL);
int zstdLevel = job.getInt(ZSTD_LEVEL_KEY, DEFAULT_ZSTANDARD_LEVEL);
boolean zstdBufferPool = job.getBoolean(ZSTD_BUFFERPOOL_KEY, DEFAULT_ZSTANDARD_BUFFERPOOL);
String codecName = job.get(AvroJob.OUTPUT_CODEC);
if (codecName == null) {
String codecClassName = job.get("mapred.output.compression.codec", null);
String avroCodecName = HadoopCodecFactory.getAvroCodecName(codecClassName);
if (codecClassName != null && avroCodecName != null) {
factory = HadoopCodecFactory.fromHadoopString(codecClassName);
job.set(AvroJob.OUTPUT_CODEC, avroCodecName);
return factory;
} else {
return CodecFactory.deflateCodec(deflateLevel);
}
} else {
if (codecName.equals(DEFLATE_CODEC)) {
factory = CodecFactory.deflateCodec(deflateLevel);
} else if (codecName.equals(XZ_CODEC)) {
factory = CodecFactory.xzCodec(xzLevel);
} else if (codecName.equals(ZSTANDARD_CODEC)) {
factory = CodecFactory.zstandardCodec(zstdLevel, false, zstdBufferPool);
} else {
factory = CodecFactory.fromString(codecName);
}
}
}
return factory;
}
@Override
public RecordWriter<AvroWrapper<T>, NullWritable> getRecordWriter(FileSystem ignore, JobConf job, String name,
Progressable prog) throws IOException {
boolean isMapOnly = job.getNumReduceTasks() == 0;
Schema schema = isMapOnly ? AvroJob.getMapOutputSchema(job) : AvroJob.getOutputSchema(job);
GenericData dataModel = AvroJob.createDataModel(job);
final DataFileWriter<T> writer = new DataFileWriter<T>(dataModel.createDatumWriter(null));
configureDataFileWriter(writer, job);
Path path = FileOutputFormat.getTaskOutputPath(job, name + EXT);
writer.create(schema, path.getFileSystem(job).create(path));
return new RecordWriter<AvroWrapper<T>, NullWritable>() {
@Override
public void write(AvroWrapper<T> wrapper, NullWritable ignore) throws IOException {
writer.append(wrapper.datum());
}
@Override
public void close(Reporter reporter) throws IOException {
writer.close();
}
};
}
}
|
AvroOutputFormat
|
java
|
apache__spark
|
sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/KerberosSaslHelper.java
|
{
"start": 3950,
"end": 4484
}
|
class ____ extends TProcessorFactory {
private final ThriftCLIService service;
private final Server saslServer;
CLIServiceProcessorFactory(Server saslServer, ThriftCLIService service) {
super(null);
this.service = service;
this.saslServer = saslServer;
}
@Override
public TProcessor getProcessor(TTransport trans) {
TProcessor sqlProcessor = new TCLIService.Processor<Iface>(service);
return saslServer.wrapNonAssumingProcessor(sqlProcessor);
}
}
}
|
CLIServiceProcessorFactory
|
java
|
mybatis__mybatis-3
|
src/main/java/org/apache/ibatis/exceptions/IbatisException.java
|
{
"start": 741,
"end": 1124
}
|
class ____ extends RuntimeException {
private static final long serialVersionUID = 3880206998166270511L;
public IbatisException() {
}
public IbatisException(String message) {
super(message);
}
public IbatisException(String message, Throwable cause) {
super(message, cause);
}
public IbatisException(Throwable cause) {
super(cause);
}
}
|
IbatisException
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/MpClassNameScopeOverrideTest.java
|
{
"start": 2146,
"end": 2326
}
|
interface ____ {
@POST
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
@Path("/")
String echo(String name);
}
}
|
MPRestClient
|
java
|
processing__processing4
|
app/src/processing/app/ui/EditorConsole.java
|
{
"start": 10440,
"end": 11290
}
|
class ____ extends OutputStream {
boolean err;
public EditorConsoleStream(boolean err) {
this.err = err;
}
public void write(byte[] b, int offset, int length) {
message(new String(b, offset, length), err);
}
// doesn't appear to be called (but must be implemented)
public void write(int b) {
write(new byte[] { (byte) b }, 0, 1);
}
}
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
/**
* Buffer updates to the console and output them in batches. For info, see:
* http://java.sun.com/products/jfc/tsc/articles/text/element_buffer and
* http://javatechniques.com/public/java/docs/gui/jtextpane-speed-part2.html
* appendString() is called from multiple threads, and insertAll from the
* swing event thread, so they need to be synchronized
*/
|
EditorConsoleStream
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/PostOptimizationPlanVerificationAware.java
|
{
"start": 1072,
"end": 3481
}
|
interface ____ {
/**
* Validates the implementing expression - discovered failures are reported to the given {@link Failures} class.
*
* <p>
* Example: the SORT command, {@code OrderBy}, can only be executed currently if it can be associated with a LIMIT {@code Limit}
* and together transformed into a {@code TopN} (which is executable). The replacement of the LIMIT+SORT into a TopN is done at
* the end of the optimization phase. This means that any SORT still existing in the plan post optimization is an error.
* However, there can be a LIMIT in the plan, but separated from the SORT by an INLINE STATS; in this case, the LIMIT cannot be
* pushed down near the SORT. To inform the user how they need to modify the query so it can be run, we implement this:
* <pre>
* {@code
*
* @Override
* public BiConsumer<LogicalPlan, Failures> postOptimizationPlanVerification() {
* return (p, failures) -> {
* if (p instanceof InlineJoin inlineJoin) {
* inlineJoin.forEachUp(OrderBy.class, orderBy -> {
* failures.add(
* fail(
* inlineJoin,
* "unbounded SORT [{}] not supported before INLINE STATS [{}], move the sort after the INLINE STATS",
* orderBy.sourceText(),
* inlineJoin.sourceText()
* )
* );
* });
* } else if (p instanceof OrderBy) {
* failures.add(fail(p, "Unbounded SORT not supported yet [{}] please add a LIMIT", p.sourceText()));
* }
* };
* }
* }
* </pre>
* <p>
* If we didn't need to check the structure of the plan, it would have sufficed to implement the
* {@link PostOptimizationVerificationAware} interface, which would simply check if there is an instance of {@code OrderBy} in the
* plan.
*
* @return a consumer that will receive a tree to check and an accumulator of failures found during inspection.
*/
BiConsumer<LogicalPlan, Failures> postOptimizationPlanVerification();
}
|
PostOptimizationPlanVerificationAware
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java
|
{
"start": 1845,
"end": 5240
}
|
class ____ extends HandledTransportAction<DeleteFilterAction.Request, AcknowledgedResponse> {
private final Client client;
private final JobConfigProvider jobConfigProvider;
@Inject
public TransportDeleteFilterAction(
TransportService transportService,
ActionFilters actionFilters,
Client client,
JobConfigProvider jobConfigProvider
) {
super(
DeleteFilterAction.NAME,
transportService,
actionFilters,
DeleteFilterAction.Request::new,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
this.client = client;
this.jobConfigProvider = jobConfigProvider;
}
@Override
protected void doExecute(Task task, DeleteFilterAction.Request request, ActionListener<AcknowledgedResponse> listener) {
final String filterId = request.getFilterId();
jobConfigProvider.findJobsWithCustomRules(listener.delegateFailureAndWrap((delegate, jobs) -> {
List<String> currentlyUsedBy = findJobsUsingFilter(jobs, filterId);
if (currentlyUsedBy.isEmpty() == false) {
delegate.onFailure(
ExceptionsHelper.conflictStatusException(Messages.getMessage(Messages.FILTER_CANNOT_DELETE, filterId, currentlyUsedBy))
);
} else {
deleteFilter(filterId, delegate);
}
}));
}
private static List<String> findJobsUsingFilter(List<Job> jobs, String filterId) {
List<String> currentlyUsedBy = new ArrayList<>();
for (Job job : jobs) {
List<Detector> detectors = job.getAnalysisConfig().getDetectors();
for (Detector detector : detectors) {
if (detector.extractReferencedFilters().contains(filterId)) {
currentlyUsedBy.add(job.getId());
break;
}
}
}
return currentlyUsedBy;
}
private void deleteFilter(String filterId, ActionListener<AcknowledgedResponse> listener) {
DeleteRequest deleteRequest = new DeleteRequest(MlMetaIndex.indexName(), MlFilter.documentId(filterId));
BulkRequestBuilder bulkRequestBuilder = client.prepareBulk();
bulkRequestBuilder.add(deleteRequest);
bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
executeAsyncWithOrigin(
client,
ML_ORIGIN,
TransportBulkAction.TYPE,
bulkRequestBuilder.request(),
new ActionListener<BulkResponse>() {
@Override
public void onResponse(BulkResponse bulkResponse) {
if (bulkResponse.getItems()[0].status() == RestStatus.NOT_FOUND) {
listener.onFailure(
new ResourceNotFoundException("Could not delete filter with ID [" + filterId + "] because it does not exist")
);
} else {
listener.onResponse(AcknowledgedResponse.TRUE);
}
}
@Override
public void onFailure(Exception e) {
listener.onFailure(ExceptionsHelper.serverError("Could not delete filter with ID [" + filterId + "]", e));
}
}
);
}
}
|
TransportDeleteFilterAction
|
java
|
apache__camel
|
core/camel-management/src/test/java/org/apache/camel/management/JmxInstrumentationCustomMBeanTest.java
|
{
"start": 4517,
"end": 4773
}
|
class ____ extends MockComponent {
@Override
protected Endpoint createEndpoint(final String uri, final String remaining, final Map<String, Object> parameters) {
return new CustomEndpoint(uri, this);
}
}
}
|
CustomComponent
|
java
|
apache__rocketmq
|
remoting/src/test/java/org/apache/rocketmq/remoting/protocol/RemotingCommandTest.java
|
{
"start": 1293,
"end": 11246
}
|
class ____ {
@Test
public void testMarkProtocolType_JSONProtocolType() {
int source = 261;
SerializeType type = SerializeType.JSON;
byte[] result = new byte[4];
int x = RemotingCommand.markProtocolType(source, type);
result[0] = (byte) (x >> 24);
result[1] = (byte) (x >> 16);
result[2] = (byte) (x >> 8);
result[3] = (byte) x;
assertThat(result).isEqualTo(new byte[] {0, 0, 1, 5});
}
@Test
public void testMarkProtocolType_ROCKETMQProtocolType() {
int source = 16777215;
SerializeType type = SerializeType.ROCKETMQ;
byte[] result = new byte[4];
int x = RemotingCommand.markProtocolType(source, type);
result[0] = (byte) (x >> 24);
result[1] = (byte) (x >> 16);
result[2] = (byte) (x >> 8);
result[3] = (byte) x;
assertThat(result).isEqualTo(new byte[] {1, -1, -1, -1});
}
@Test
public void testCreateRequestCommand_RegisterBroker() {
System.setProperty(RemotingCommand.REMOTING_VERSION_KEY, "2333");
int code = 103; //org.apache.rocketmq.remoting.protocol.RequestCode.REGISTER_BROKER
CommandCustomHeader header = new SampleCommandCustomHeader();
RemotingCommand cmd = RemotingCommand.createRequestCommand(code, header);
assertThat(cmd.getCode()).isEqualTo(code);
assertThat(cmd.getVersion()).isEqualTo(2333);
assertThat(cmd.getFlag() & 0x01).isEqualTo(0); //flag bit 0: 0 presents request
}
@Test
public void testCreateResponseCommand_SuccessWithHeader() {
System.setProperty(RemotingCommand.REMOTING_VERSION_KEY, "2333");
int code = RemotingSysResponseCode.SUCCESS;
String remark = "Sample remark";
RemotingCommand cmd = RemotingCommand.createResponseCommand(code, remark, SampleCommandCustomHeader.class);
assertThat(cmd.getCode()).isEqualTo(code);
assertThat(cmd.getVersion()).isEqualTo(2333);
assertThat(cmd.getRemark()).isEqualTo(remark);
assertThat(cmd.getFlag() & 0x01).isEqualTo(1); //flag bit 0: 1 presents response
}
@Test
public void testCreateResponseCommand_SuccessWithoutHeader() {
System.setProperty(RemotingCommand.REMOTING_VERSION_KEY, "2333");
int code = RemotingSysResponseCode.SUCCESS;
String remark = "Sample remark";
RemotingCommand cmd = RemotingCommand.createResponseCommand(code, remark);
assertThat(cmd.getCode()).isEqualTo(code);
assertThat(cmd.getVersion()).isEqualTo(2333);
assertThat(cmd.getRemark()).isEqualTo(remark);
assertThat(cmd.getFlag() & 0x01).isEqualTo(1); //flag bit 0: 1 presents response
}
@Test
public void testCreateResponseCommand_FailToCreateCommand() {
System.setProperty(RemotingCommand.REMOTING_VERSION_KEY, "2333");
int code = RemotingSysResponseCode.SUCCESS;
String remark = "Sample remark";
RemotingCommand cmd = RemotingCommand.createResponseCommand(code, remark, CommandCustomHeader.class);
assertThat(cmd).isNull();
}
@Test
public void testCreateResponseCommand_SystemError() {
System.setProperty(RemotingCommand.REMOTING_VERSION_KEY, "2333");
RemotingCommand cmd = RemotingCommand.createResponseCommand(SampleCommandCustomHeader.class);
assertThat(cmd.getCode()).isEqualTo(RemotingSysResponseCode.SYSTEM_ERROR);
assertThat(cmd.getVersion()).isEqualTo(2333);
assertThat(cmd.getRemark()).contains("not set any response code");
assertThat(cmd.getFlag() & 0x01).isEqualTo(1); //flag bit 0: 1 presents response
}
@Test
public void testEncodeAndDecode_EmptyBody() {
System.setProperty(RemotingCommand.REMOTING_VERSION_KEY, "2333");
int code = 103; //org.apache.rocketmq.remoting.protocol.RequestCode.REGISTER_BROKER
CommandCustomHeader header = new SampleCommandCustomHeader();
RemotingCommand cmd = RemotingCommand.createRequestCommand(code, header);
ByteBuffer buffer = cmd.encode();
//Simulate buffer being read in NettyDecoder
buffer.getInt();
byte[] bytes = new byte[buffer.limit() - 4];
buffer.get(bytes, 0, buffer.limit() - 4);
buffer = ByteBuffer.wrap(bytes);
RemotingCommand decodedCommand = null;
try {
decodedCommand = RemotingCommand.decode(buffer);
assertThat(decodedCommand.getSerializeTypeCurrentRPC()).isEqualTo(SerializeType.JSON);
assertThat(decodedCommand.getBody()).isNull();
} catch (RemotingCommandException e) {
e.printStackTrace();
Assert.fail("Should not throw IOException");
}
}
@Test
public void testEncodeAndDecode_FilledBody() {
System.setProperty(RemotingCommand.REMOTING_VERSION_KEY, "2333");
int code = 103; //org.apache.rocketmq.remoting.protocol.RequestCode.REGISTER_BROKER
CommandCustomHeader header = new SampleCommandCustomHeader();
RemotingCommand cmd = RemotingCommand.createRequestCommand(code, header);
cmd.setBody(new byte[] {0, 1, 2, 3, 4});
ByteBuffer buffer = cmd.encode();
//Simulate buffer being read in NettyDecoder
buffer.getInt();
byte[] bytes = new byte[buffer.limit() - 4];
buffer.get(bytes, 0, buffer.limit() - 4);
buffer = ByteBuffer.wrap(bytes);
RemotingCommand decodedCommand = null;
try {
decodedCommand = RemotingCommand.decode(buffer);
assertThat(decodedCommand.getSerializeTypeCurrentRPC()).isEqualTo(SerializeType.JSON);
assertThat(decodedCommand.getBody()).isEqualTo(new byte[] {0, 1, 2, 3, 4});
} catch (RemotingCommandException e) {
e.printStackTrace();
Assert.fail("Should not throw IOException");
}
}
@Test
public void testEncodeAndDecode_FilledBodyWithExtFields() throws RemotingCommandException {
System.setProperty(RemotingCommand.REMOTING_VERSION_KEY, "2333");
int code = 103; //org.apache.rocketmq.remoting.protocol.RequestCode.REGISTER_BROKER
CommandCustomHeader header = new ExtFieldsHeader();
RemotingCommand cmd = RemotingCommand.createRequestCommand(code, header);
cmd.addExtField("key", "value");
ByteBuffer buffer = cmd.encode();
//Simulate buffer being read in NettyDecoder
buffer.getInt();
byte[] bytes = new byte[buffer.limit() - 4];
buffer.get(bytes, 0, buffer.limit() - 4);
buffer = ByteBuffer.wrap(bytes);
RemotingCommand decodedCommand = null;
try {
decodedCommand = RemotingCommand.decode(buffer);
assertThat(decodedCommand.getExtFields().get("stringValue")).isEqualTo("bilibili");
assertThat(decodedCommand.getExtFields().get("intValue")).isEqualTo("2333");
assertThat(decodedCommand.getExtFields().get("longValue")).isEqualTo("23333333");
assertThat(decodedCommand.getExtFields().get("booleanValue")).isEqualTo("true");
assertThat(decodedCommand.getExtFields().get("doubleValue")).isEqualTo("0.618");
assertThat(decodedCommand.getExtFields().get("key")).isEqualTo("value");
CommandCustomHeader decodedHeader = decodedCommand.decodeCommandCustomHeader(ExtFieldsHeader.class);
assertThat(((ExtFieldsHeader) decodedHeader).getStringValue()).isEqualTo("bilibili");
assertThat(((ExtFieldsHeader) decodedHeader).getIntValue()).isEqualTo(2333);
assertThat(((ExtFieldsHeader) decodedHeader).getLongValue()).isEqualTo(23333333L);
assertThat(((ExtFieldsHeader) decodedHeader).isBooleanValue()).isEqualTo(true);
assertThat(((ExtFieldsHeader) decodedHeader).getDoubleValue()).isBetween(0.617, 0.619);
} catch (RemotingCommandException e) {
e.printStackTrace();
Assert.fail("Should not throw IOException");
}
}
@Test
public void testNotNullField() throws Exception {
RemotingCommand remotingCommand = new RemotingCommand();
Method method = RemotingCommand.class.getDeclaredMethod("isFieldNullable", Field.class);
method.setAccessible(true);
Field nullString = FieldTestClass.class.getDeclaredField("nullString");
assertThat(method.invoke(remotingCommand, nullString)).isEqualTo(false);
Field nullableString = FieldTestClass.class.getDeclaredField("nullable");
assertThat(method.invoke(remotingCommand, nullableString)).isEqualTo(true);
Field value = FieldTestClass.class.getDeclaredField("value");
assertThat(method.invoke(remotingCommand, value)).isEqualTo(false);
}
@Test
public void testParentField() throws Exception {
SubExtFieldsHeader subExtFieldsHeader = new SubExtFieldsHeader();
RemotingCommand remotingCommand = RemotingCommand.createRequestCommand(1, subExtFieldsHeader);
Field[] fields = remotingCommand.getClazzFields(subExtFieldsHeader.getClass());
Set<String> fieldNames = new HashSet<>();
for (Field field: fields) {
fieldNames.add(field.getName());
}
Assert.assertTrue(fields.length >= 7);
Set<String> names = new HashSet<>();
names.add("stringValue");
names.add("intValue");
names.add("longValue");
names.add("booleanValue");
names.add("doubleValue");
names.add("name");
names.add("value");
for (String name: names) {
Assert.assertTrue(fieldNames.contains(name));
}
remotingCommand.makeCustomHeaderToNet();
SubExtFieldsHeader other = (SubExtFieldsHeader) remotingCommand.decodeCommandCustomHeader(subExtFieldsHeader.getClass());
Assert.assertEquals(other, subExtFieldsHeader);
}
}
|
RemotingCommandTest
|
java
|
greenrobot__EventBus
|
EventBusTestJava/src/main/java/org/greenrobot/eventbus/EventBusOrderedSubscriptionsTest.java
|
{
"start": 2134,
"end": 4152
}
|
class ____ {
@Subscribe(priority = 1)
public void onEventP1(String event) {
handleEvent(1, event);
}
@Subscribe(priority = -1)
public void onEventM1(String event) {
handleEvent(-1, event);
}
@Subscribe(priority = 0)
public void onEventP0(String event) {
handleEvent(0, event);
}
@Subscribe(priority = 10)
public void onEventP10(String event) {
handleEvent(10, event);
}
@Subscribe(priority = -100)
public void onEventM100(String event) {
handleEvent(-100, event);
}
@Subscribe(threadMode = ThreadMode.MAIN, priority = -1)
public void onEventMainThreadM1(IntTestEvent event) {
handleEvent(-1, event);
}
@Subscribe(threadMode = ThreadMode.MAIN)
public void onEventMainThreadP0(IntTestEvent event) {
handleEvent(0, event);
}
@Subscribe(threadMode = ThreadMode.MAIN, priority = 1)
public void onEventMainThreadP1(IntTestEvent event) {
handleEvent(1, event);
}
@Subscribe(threadMode = ThreadMode.BACKGROUND, priority = 1)
public void onEventBackgroundThreadP1(Integer event) {
handleEvent(1, event);
}
@Subscribe(threadMode = ThreadMode.BACKGROUND)
public void onEventBackgroundThreadP0(Integer event) {
handleEvent(0, event);
}
@Subscribe(threadMode = ThreadMode.BACKGROUND, priority = -1)
public void onEventBackgroundThreadM1(Integer event) {
handleEvent(-1, event);
}
protected void handleEvent(int prio, Object event) {
if (prio > lastPrio) {
fail = "Called prio " + prio + " after " + lastPrio;
}
lastPrio = prio;
log("Subscriber " + prio + " got: " + event);
trackEvent(event);
}
}
public final
|
PrioSubscriber
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/clients/admin/ConfigEntry.java
|
{
"start": 930,
"end": 1030
}
|
class ____ a configuration entry containing name, value and additional metadata.
*/
public
|
representing
|
java
|
apache__maven
|
impl/maven-impl/src/main/java/org/apache/maven/impl/model/DefaultPluginManagementInjector.java
|
{
"start": 1667,
"end": 2069
}
|
class ____ implements PluginManagementInjector {
private ManagementModelMerger merger = new ManagementModelMerger();
@Override
public Model injectManagement(Model model, ModelBuilderRequest request, ModelProblemCollector problems) {
return merger.mergeManagedBuildPlugins(model);
}
/**
* ManagementModelMerger
*/
protected static
|
DefaultPluginManagementInjector
|
java
|
spring-projects__spring-security
|
web/src/main/java/org/springframework/security/web/authentication/RememberMeServices.java
|
{
"start": 1918,
"end": 2157
}
|
interface ____ not define how remember-me services should offer a "cancel all
* remember-me tokens" type capability, as this will be implementation specific and
* requires no hooks into Spring Security.
*
* @author Ben Alex
*/
public
|
does
|
java
|
quarkusio__quarkus
|
core/runtime/src/main/java/io/quarkus/runtime/PreventFurtherStepsException.java
|
{
"start": 502,
"end": 960
}
|
class ____ extends RuntimeException {
private final int exitCode;
public PreventFurtherStepsException() {
this(1);
}
public PreventFurtherStepsException(int exitCode) {
this(null, exitCode);
}
public PreventFurtherStepsException(String message, int exitCode) {
super(message);
this.exitCode = exitCode;
}
public int getExitCode() {
return exitCode;
}
}
|
PreventFurtherStepsException
|
java
|
spring-projects__spring-boot
|
module/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/endpoint/web/EndpointMapping.java
|
{
"start": 842,
"end": 1689
}
|
class ____ {
private final String path;
/**
* Creates a new {@code EndpointMapping} using the given {@code path}.
* @param path the path
*/
public EndpointMapping(String path) {
this.path = normalizePath(path);
}
/**
* Returns the path to which endpoints should be mapped.
* @return the path
*/
public String getPath() {
return this.path;
}
public String createSubPath(String path) {
return this.path + normalizePath(path);
}
private static String normalizePath(String path) {
if (!StringUtils.hasText(path)) {
return path;
}
String normalizedPath = path;
if (!normalizedPath.startsWith("/")) {
normalizedPath = "/" + normalizedPath;
}
if (normalizedPath.endsWith("/")) {
normalizedPath = normalizedPath.substring(0, normalizedPath.length() - 1);
}
return normalizedPath;
}
}
|
EndpointMapping
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/jdk/JDKScalarsDeserTest.java
|
{
"start": 1584,
"end": 1720
}
|
class ____ {
char _v;
void setV(char v) { _v = v; }
char getV() { return _v; }
}
final static
|
CharacterBean
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/StringTemplateEndpointBuilderFactory.java
|
{
"start": 1439,
"end": 1579
}
|
interface ____ {
/**
* Builder for endpoint for the String Template component.
*/
public
|
StringTemplateEndpointBuilderFactory
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/env/NodeEnvironmentIT.java
|
{
"start": 1989,
"end": 14319
}
|
class ____ extends ESIntegTestCase {
public void testStartFailureOnDataForNonDataNode() throws Exception {
final String indexName = "test-fail-on-data";
logger.info("--> starting one node");
final boolean writeDanglingIndices = randomBoolean();
String node = internalCluster().startNode(
Settings.builder().put(IndicesService.WRITE_DANGLING_INDICES_INFO_SETTING.getKey(), writeDanglingIndices).build()
);
Settings dataPathSettings = internalCluster().dataPathSettings(node);
logger.info("--> creating index");
prepareCreate(indexName, indexSettings(1, 0)).get();
final String indexUUID = resolveIndex(indexName).getUUID();
if (writeDanglingIndices) {
assertBusy(
() -> internalCluster().getInstances(IndicesService.class)
.forEach(indicesService -> assertTrue(indicesService.allPendingDanglingIndicesWritten()))
);
}
logger.info("--> restarting the node without the data and master roles");
IllegalStateException ex = expectThrows(
IllegalStateException.class,
"node not having the data and master roles while having existing index metadata must fail",
() -> internalCluster().restartRandomDataNode(new InternalTestCluster.RestartCallback() {
@Override
public Settings onNodeStopped(String nodeName) {
return NodeRoles.removeRoles(nonDataNode(), Set.of(DiscoveryNodeRole.MASTER_ROLE));
}
})
);
if (writeDanglingIndices) {
assertThat(ex.getMessage(), startsWith("node does not have the data and master roles but has index metadata"));
} else {
assertThat(ex.getMessage(), startsWith("node does not have the data role but has shard data"));
}
logger.info("--> start the node again with data and master roles");
internalCluster().startNode(dataPathSettings);
logger.info("--> indexing a simple document");
prepareIndex(indexName).setId("1").setSource("field1", "value1").get();
logger.info("--> restarting the node without the data role");
ex = expectThrows(
IllegalStateException.class,
"node not having the data role while having existing shard data must fail",
() -> internalCluster().restartRandomDataNode(new InternalTestCluster.RestartCallback() {
@Override
public Settings onNodeStopped(String nodeName) {
return nonDataNode();
}
})
);
assertThat(ex.getMessage(), containsString(indexUUID));
assertThat(ex.getMessage(), startsWith("node does not have the data role but has shard data"));
}
private IllegalStateException expectThrowsOnRestart(CheckedConsumer<Path[], Exception> onNodeStopped) {
internalCluster().startNode();
final Path[] dataPaths = internalCluster().getInstance(NodeEnvironment.class).nodeDataPaths();
return expectThrows(
IllegalStateException.class,
() -> internalCluster().restartRandomDataNode(new InternalTestCluster.RestartCallback() {
@Override
public Settings onNodeStopped(String nodeName) {
try {
onNodeStopped.accept(dataPaths);
} catch (Exception e) {
throw new AssertionError(e);
}
return Settings.EMPTY;
}
})
);
}
public void testFailsToStartIfDowngraded() {
final IllegalStateException illegalStateException = expectThrowsOnRestart(
dataPaths -> PersistedClusterStateService.overrideVersion(NodeMetadataTests.tooNewBuildVersion(), dataPaths)
);
assertThat(
illegalStateException.getMessage(),
allOf(startsWith("cannot downgrade a node from version ["), endsWith("] to version [" + Build.current().version() + "]"))
);
}
public void testFailsToStartIfUpgradedTooFar() {
final IllegalStateException illegalStateException = expectThrowsOnRestart(
dataPaths -> PersistedClusterStateService.overrideVersion(NodeMetadataTests.tooOldBuildVersion(), dataPaths)
);
assertThat(
illegalStateException.getMessage(),
allOf(
startsWith("cannot upgrade a node from version ["),
endsWith(
"] directly to version ["
+ Build.current().version()
+ "], upgrade to version ["
+ Build.current().minWireCompatVersion()
+ "] first."
)
)
);
}
public void testUpgradeDataFolder() throws IOException, InterruptedException {
String node = internalCluster().startNode();
prepareCreate("test").get();
indexRandom(true, prepareIndex("test").setId("1").setSource("{}", XContentType.JSON));
String nodeId = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState().nodes().getMasterNodeId();
final Settings dataPathSettings = internalCluster().dataPathSettings(node);
internalCluster().stopRandomDataNode();
// simulate older data path layout by moving data under "nodes/0" folder
final List<Path> dataPaths = Environment.PATH_DATA_SETTING.get(dataPathSettings).stream().map(PathUtils::get).toList();
dataPaths.forEach(path -> {
final Path nodesPath = path.resolve("nodes");
final Path targetPath = nodesPath.resolve("0");
try {
assertTrue(Files.isRegularFile(nodesPath));
Files.delete(nodesPath);
Files.createDirectories(targetPath);
try (DirectoryStream<Path> stream = Files.newDirectoryStream(path)) {
for (Path subPath : stream) {
String fileName = subPath.getFileName().toString();
Path targetSubPath = targetPath.resolve(fileName);
if (fileName.equals("nodes") == false) {
Files.move(subPath, targetSubPath, StandardCopyOption.ATOMIC_MOVE);
}
}
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
});
dataPaths.forEach(path -> assertTrue(Files.exists(path.resolve("nodes"))));
// create extra file/folder, and check that upgrade fails
if (dataPaths.isEmpty() == false) {
final Path badFileInNodesDir = Files.createTempFile(randomFrom(dataPaths).resolve("nodes"), "bad", "file");
IllegalStateException ise = expectThrows(IllegalStateException.class, () -> internalCluster().startNode(dataPathSettings));
assertThat(ise.getMessage(), containsString("unexpected file/folder encountered during data folder upgrade"));
Files.delete(badFileInNodesDir);
final Path badFolderInNodesDir = Files.createDirectories(randomFrom(dataPaths).resolve("nodes").resolve("bad-folder"));
ise = expectThrows(IllegalStateException.class, () -> internalCluster().startNode(dataPathSettings));
assertThat(ise.getMessage(), containsString("unexpected file/folder encountered during data folder upgrade"));
Files.delete(badFolderInNodesDir);
final Path badFile = Files.createTempFile(randomFrom(dataPaths).resolve("nodes").resolve("0"), "bad", "file");
ise = expectThrows(IllegalStateException.class, () -> internalCluster().startNode(dataPathSettings));
assertThat(ise.getMessage(), containsString("unexpected file/folder encountered during data folder upgrade"));
Files.delete(badFile);
final Path badFolder = Files.createDirectories(randomFrom(dataPaths).resolve("nodes").resolve("0").resolve("bad-folder"));
ise = expectThrows(IllegalStateException.class, () -> internalCluster().startNode(dataPathSettings));
assertThat(ise.getMessage(), containsString("unexpected folder encountered during data folder upgrade"));
Files.delete(badFolder);
final Path randomDataPath = randomFrom(dataPaths);
final Path conflictingFolder = randomDataPath.resolve("indices");
final Path sourceFolder = randomDataPath.resolve("nodes").resolve("0").resolve("indices");
if (Files.exists(sourceFolder) && Files.exists(conflictingFolder) == false) {
Files.createDirectories(conflictingFolder);
ise = expectThrows(IllegalStateException.class, () -> internalCluster().startNode(dataPathSettings));
assertThat(ise.getMessage(), containsString("target folder already exists during data folder upgrade"));
Files.delete(conflictingFolder);
}
}
// simulate a frozen node with a shared cache file
if (rarely()) {
final Path randomDataPath = randomFrom(dataPaths);
final Path sharedCache = randomDataPath.resolve("nodes").resolve("0").resolve(SEARCHABLE_SHARED_CACHE_FILE);
Files.createFile(sharedCache);
}
// check that settings are validated prior to moving folders
dataPaths.forEach(path -> assertTrue(Files.isDirectory(path.resolve("nodes"))));
expectThrows(
IllegalArgumentException.class,
() -> internalCluster().startNode(Settings.builder().put(dataPathSettings).put("bad", "setting"))
);
// check that upgrade works
dataPaths.forEach(path -> assertTrue(Files.isDirectory(path.resolve("nodes"))));
internalCluster().startNode(dataPathSettings);
dataPaths.forEach(path -> assertTrue(Files.isRegularFile(path.resolve("nodes"))));
assertEquals(nodeId, clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState().nodes().getMasterNodeId());
assertTrue(indexExists("test"));
ensureYellow("test");
assertHitCount(prepareSearch().setQuery(matchAllQuery()), 1L);
}
public void testFailsToStartOnDataPathsFromMultipleNodes() throws IOException {
final List<String> nodes = internalCluster().startNodes(2);
ensureStableCluster(2);
final List<String> node0DataPaths = Environment.PATH_DATA_SETTING.get(internalCluster().dataPathSettings(nodes.get(0)));
final List<String> node1DataPaths = Environment.PATH_DATA_SETTING.get(internalCluster().dataPathSettings(nodes.get(1)));
final List<String> allDataPaths = new ArrayList<>(node0DataPaths);
allDataPaths.addAll(node1DataPaths);
internalCluster().stopNode(nodes.get(1));
internalCluster().stopNode(nodes.get(0));
CorruptStateException corruptStateException = expectThrows(
CorruptStateException.class,
() -> PersistedClusterStateService.nodeMetadata(allDataPaths.stream().map(PathUtils::get).toArray(Path[]::new))
);
assertThat(corruptStateException.getMessage(), containsString("unexpected node ID in metadata"));
corruptStateException = expectThrows(
ElasticsearchException.class,
CorruptStateException.class,
() -> internalCluster().startNode(Settings.builder().putList(Environment.PATH_DATA_SETTING.getKey(), allDataPaths))
);
assertThat(corruptStateException.getMessage(), containsString("unexpected node ID in metadata"));
final List<String> node0DataPathsPlusOne = new ArrayList<>(node0DataPaths);
node0DataPathsPlusOne.add(createTempDir().toString());
internalCluster().startNode(Settings.builder().putList(Environment.PATH_DATA_SETTING.getKey(), node0DataPathsPlusOne));
final List<String> node1DataPathsPlusOne = new ArrayList<>(node1DataPaths);
node1DataPathsPlusOne.add(createTempDir().toString());
internalCluster().startNode(Settings.builder().putList(Environment.PATH_DATA_SETTING.getKey(), node1DataPathsPlusOne));
ensureStableCluster(2);
}
}
|
NodeEnvironmentIT
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/ReadFromImpl.java
|
{
"start": 5997,
"end": 6835
}
|
interface ____ {
boolean isInSubnet(String ipAddress);
}
SubnetRule createSubnetRule(String cidrNotation) {
String[] parts = cidrNotation.split("/");
LettuceAssert.isTrue(parts.length == 2, "CIDR notation must have exact one '/'");
String ipAddress = parts[0];
int cidrPrefix = Integer.parseInt(parts[1]);
if (NetUtil.isValidIpV4Address(ipAddress)) {
return new Ipv4SubnetRule(ipAddress, cidrPrefix, ipv4AddressCache);
} else if (NetUtil.isValidIpV6Address(ipAddress)) {
return new Ipv6SubnetRule(ipAddress, cidrPrefix, ipv6AddressCache);
} else {
throw new IllegalArgumentException("Invalid CIDR notation " + cidrNotation);
}
}
static
|
SubnetRule
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/main/java/org/springframework/messaging/handler/annotation/support/MethodArgumentTypeMismatchException.java
|
{
"start": 1047,
"end": 1295
}
|
class ____ extends MethodArgumentResolutionException {
public MethodArgumentTypeMismatchException(Message<?> message, MethodParameter parameter, String description) {
super(message, parameter, description);
}
}
|
MethodArgumentTypeMismatchException
|
java
|
apache__camel
|
components/camel-as2/camel-as2-api/src/main/java/org/apache/camel/component/as2/api/entity/ApplicationPkcs7MimeEnvelopedDataEntity.java
|
{
"start": 1735,
"end": 5656
}
|
class ____ extends MimeEntity {
private static final String CONTENT_DISPOSITION = "attachment; filename=\"smime.p7m\"";
private byte[] encryptedData;
public ApplicationPkcs7MimeEnvelopedDataEntity(MimeEntity entity2Encrypt,
CMSEnvelopedDataGenerator dataGenerator,
OutputEncryptor encryptor,
String encryptedContentTransferEncoding,
boolean isMainBody)
throws HttpException {
super(ContentType.create("application/pkcs7-mime", new BasicNameValuePair("smime-type", "enveloped-data"),
new BasicNameValuePair("name", "smime.p7m")),
encryptedContentTransferEncoding);
addHeader(AS2Header.CONTENT_DISPOSITION, CONTENT_DISPOSITION);
setMainBody(isMainBody);
try {
this.encryptedData = createEncryptedData(entity2Encrypt, dataGenerator, encryptor);
} catch (Exception e) {
throw new HttpException("Failed to create encrypted data");
}
}
public ApplicationPkcs7MimeEnvelopedDataEntity(byte[] encryptedData, String encryptedContentTransferEncoding,
boolean isMainBody) {
super(ContentType.create("application/pkcs7-mime", new BasicNameValuePair("smime-type", "enveloped-data"),
new BasicNameValuePair("name", "smime.p7m")),
encryptedContentTransferEncoding);
this.encryptedData = ObjectHelper.notNull(encryptedData, "encryptedData");
addHeader(AS2Header.CONTENT_DISPOSITION, CONTENT_DISPOSITION);
setMainBody(isMainBody);
}
@Override
public void writeTo(OutputStream outstream) throws IOException {
NoCloseOutputStream ncos = new NoCloseOutputStream(outstream);
// Write out mime part headers if this is not the main body of message.
if (!isMainBody()) {
try (CanonicalOutputStream canonicalOutstream = new CanonicalOutputStream(ncos, StandardCharsets.US_ASCII.name())) {
for (Header header : getAllHeaders()) {
canonicalOutstream.writeln(header.toString());
}
canonicalOutstream.writeln(); // ensure empty line between
// headers and body; RFC2046 -
// 5.1.1
}
}
// Write out signed data.
String transferEncoding = getContentTransferEncoding() == null ? null : getContentTransferEncoding().getValue();
try (OutputStream transferEncodedStream = EntityUtils.encode(ncos, transferEncoding)) {
transferEncodedStream.write(encryptedData);
} catch (Exception e) {
throw new IOException("Failed to write to output stream", e);
}
}
public MimeEntity getEncryptedEntity(PrivateKey privateKey) throws HttpException {
return EntityParser.parseEnvelopedEntity(encryptedData, privateKey);
}
private byte[] createEncryptedData(
MimeEntity entity2Encrypt, CMSEnvelopedDataGenerator envelopedDataGenerator, OutputEncryptor encryptor)
throws IOException, CMSException {
try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) {
entity2Encrypt.writeTo(bos);
bos.flush();
CMSTypedData contentData = new CMSProcessableByteArray(bos.toByteArray());
CMSEnvelopedData envelopedData = envelopedDataGenerator.generate(contentData, encryptor);
return envelopedData.getEncoded();
}
}
@Override
public void close() throws IOException {
// do nothing
}
}
|
ApplicationPkcs7MimeEnvelopedDataEntity
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/annotation/AnnotatedElementUtilsTests.java
|
{
"start": 60044,
"end": 60186
}
|
class ____ {
}
@AliasedComposedContextConfig(xmlConfigFiles = "test.xml")
static
|
HalfConventionBasedAndHalfAliasedComposedContextConfigClassV2
|
java
|
quarkusio__quarkus
|
extensions/resteasy-classic/resteasy-multipart/deployment/src/main/java/io/quarkus/resteasy/multipart/deployment/ResteasyMultipartProcessor.java
|
{
"start": 455,
"end": 960
}
|
class ____ {
@BuildStep
void feature(BuildProducer<FeatureBuildItem> feature) {
feature.produce(new FeatureBuildItem(Feature.RESTEASY_MULTIPART));
}
@BuildStep
AdditionalBeanBuildItem filter() {
return new AdditionalBeanBuildItem.Builder()
.addBeanClass(MultipartInputPartConfigContainerRequestFilter.class)
.setUnremovable()
.setDefaultScope(DotNames.SINGLETON)
.build();
}
}
|
ResteasyMultipartProcessor
|
java
|
elastic__elasticsearch
|
x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/ParserUtils.java
|
{
"start": 833,
"end": 10177
}
|
class ____ {
private static final String UNQUOTED_LITERAL_TERM_DELIMITER = " ";
private static final char ESCAPE_CHAR = '\\';
private static final char QUOTE_CHAR = '"';
private static final char WILDCARD_CHAR = '*';
private ParserUtils() {
throw new UnsupportedOperationException("No need to instantiate this class");
}
/**
* Performs type-safe parsing using the provided visitor.
*
* @param visitor The visitor to use to do the parsing
* @param ctx The parser tree context to visit
* @param type The expected return type class
* @return The parsed result, casted to the expected type
*/
@SuppressWarnings("unchecked")
public static <T> T typedParsing(ParseTreeVisitor<?> visitor, ParserRuleContext ctx, Class<T> type) {
Object result = ctx.accept(visitor);
if (type.isInstance(result)) {
return (T) result;
}
throw new KqlParsingException(
"Invalid query '{}'[{}] given; expected {} but found {}",
ctx.start.getLine(),
ctx.start.getCharPositionInLine(),
ctx.getText(),
ctx.getClass().getSimpleName(),
type.getSimpleName(),
(result != null ? result.getClass().getSimpleName() : "null")
);
}
/**
* Extracts text from a parser tree context by joining all terminal nodes with a space delimiter.
*
* @param ctx The parser tree context
*
* @return The extracted text
*/
public static String extractText(ParserRuleContext ctx) {
return String.join(UNQUOTED_LITERAL_TERM_DELIMITER, extractTextTokens(ctx));
}
/**
* Checks if the given context contains any unescaped wildcard characters.
*
* @param ctx The tree context to check
* @return true if wildcards are present, false otherwise
*/
public static boolean hasWildcard(ParserRuleContext ctx) {
return ctx.children.stream().anyMatch(childNode -> {
if (childNode instanceof TerminalNode terminalNode) {
Token token = terminalNode.getSymbol();
return switch (token.getType()) {
case KqlBaseParser.WILDCARD -> true;
case KqlBaseParser.UNQUOTED_LITERAL -> token.getText().matches("[^\\\\]*[*].*");
default -> false;
};
} else if (childNode instanceof ParserRuleContext childCtx && hasWildcard(childCtx)) {
return true;
}
return false;
});
}
/**
* Escapes special characters in a query string for use in Lucene queries.
*
* @param queryText The query text to escape
* @param preserveWildcards If true, does not escape wildcard characters (*)
* @return The escaped query string
*/
public static String escapeLuceneQueryString(String queryText, boolean preserveWildcards) {
if (preserveWildcards) {
StringBuilder escapedQuery = new StringBuilder(queryText.length());
StringBuilder subpart = new StringBuilder(queryText.length());
for (char currentChar : queryText.toCharArray()) {
if (currentChar == WILDCARD_CHAR) {
escapedQuery.append(QueryParser.escape(subpart.toString())).append(currentChar);
subpart.setLength(0);
} else {
subpart.append(currentChar);
}
}
return escapedQuery.append(QueryParser.escape(subpart.toString())).toString();
}
return QueryParser.escape(queryText);
}
private static List<String> extractTextTokens(ParserRuleContext ctx) {
assert ctx.children != null;
List<String> textTokens = new ArrayList<>(ctx.children.size());
for (ParseTree currentNode : ctx.children) {
if (currentNode instanceof TerminalNode terminalNode) {
textTokens.add(extractText(terminalNode));
} else if (currentNode instanceof ParserRuleContext childCtx) {
textTokens.addAll(extractTextTokens(childCtx));
} else {
throw new KqlParsingException("Unable to extract text from ctx", ctx.start.getLine(), ctx.start.getCharPositionInLine());
}
}
return textTokens;
}
private static String extractText(TerminalNode node) {
if (node.getSymbol().getType() == KqlBaseParser.QUOTED_STRING) {
return unescapeQuotedString(node);
} else if (node.getSymbol().getType() == KqlBaseParser.UNQUOTED_LITERAL) {
return unescapeUnquotedLiteral(node);
}
return node.getText();
}
private static String unescapeQuotedString(TerminalNode ctx) {
String inputText = ctx.getText();
assert inputText.length() >= 2 && inputText.charAt(0) == QUOTE_CHAR && inputText.charAt(inputText.length() - 1) == QUOTE_CHAR;
StringBuilder sb = new StringBuilder();
for (int i = 1; i < inputText.length() - 1;) {
char currentChar = inputText.charAt(i++);
if (currentChar == ESCAPE_CHAR && i + 1 < inputText.length()) {
currentChar = inputText.charAt(i++);
switch (currentChar) {
case 't' -> sb.append('\t');
case 'n' -> sb.append('\n');
case 'r' -> sb.append('\r');
case 'u' -> i = handleUnicodeSequemce(ctx, sb, inputText, i);
case QUOTE_CHAR -> sb.append('\"');
case ESCAPE_CHAR -> sb.append(ESCAPE_CHAR);
default -> sb.append(ESCAPE_CHAR).append(currentChar);
}
} else {
sb.append(currentChar);
}
}
return sb.toString();
}
private static String unescapeUnquotedLiteral(TerminalNode ctx) {
String inputText = ctx.getText();
if (inputText == null || inputText.isEmpty()) {
return inputText;
}
StringBuilder sb = new StringBuilder(inputText.length());
for (int i = 0; i < inputText.length();) {
char currentChar = inputText.charAt(i++);
if (currentChar == ESCAPE_CHAR && i < inputText.length()) {
if (isEscapedKeywordSequence(inputText, i)) {
String sequence = handleKeywordSequence(inputText, i);
sb.append(sequence);
i += sequence.length();
} else {
currentChar = inputText.charAt(i++);
switch (currentChar) {
case 't' -> sb.append('\t');
case 'n' -> sb.append('\n');
case 'r' -> sb.append('\r');
case 'u' -> i = handleUnicodeSequemce(ctx, sb, inputText, i);
case QUOTE_CHAR -> sb.append('\"');
case ESCAPE_CHAR -> sb.append(ESCAPE_CHAR);
case '(', ')', ':', '<', '>', '*', '{', '}' -> sb.append(currentChar);
default -> sb.append(ESCAPE_CHAR).append(currentChar);
}
}
} else {
sb.append(currentChar);
}
}
return sb.toString();
}
private static boolean isEscapedKeywordSequence(String input, int startIndex) {
if (startIndex + 1 >= input.length()) {
return false;
}
String remaining = input.substring(startIndex).toLowerCase(Locale.ROOT);
return remaining.startsWith("and") || remaining.startsWith("or") || remaining.startsWith("not");
}
private static String handleKeywordSequence(String input, int startIndex) {
String remaining = input.substring(startIndex);
if (remaining.toLowerCase(Locale.ROOT).startsWith("and")) return remaining.substring(0, 3);
if (remaining.toLowerCase(Locale.ROOT).startsWith("or")) return remaining.substring(0, 2);
if (remaining.toLowerCase(Locale.ROOT).startsWith("not")) return remaining.substring(0, 3);
return "";
}
private static int handleUnicodeSequemce(TerminalNode ctx, StringBuilder sb, String text, int startIdx) {
int endIdx = startIdx + 4;
String hex = text.substring(startIdx, endIdx);
try {
int code = Integer.parseInt(hex, 16);
if (code >= 0xD800 && code <= 0xDFFF) {
// U+D800—U+DFFF can only be used as surrogate pairs and are not valid character codes.
throw new KqlParsingException(
"Invalid unicode character code, [{}] is a surrogate code",
ctx.getSymbol().getLine(),
ctx.getSymbol().getCharPositionInLine() + startIdx,
hex
);
}
sb.append(String.valueOf(Character.toChars(code)));
} catch (IllegalArgumentException e) {
throw new KqlParsingException(
"Invalid unicode character code [{}]",
ctx.getSymbol().getLine(),
ctx.getSymbol().getCharPositionInLine() + startIdx,
hex
);
}
return endIdx;
}
}
|
ParserUtils
|
java
|
alibaba__nacos
|
persistence/src/main/java/com/alibaba/nacos/persistence/utils/DatasourcePlatformUtil.java
|
{
"start": 900,
"end": 1444
}
|
class ____ {
/**
* get datasource platform.
*
* @param defaultPlatform default platform.
* @return
*/
public static String getDatasourcePlatform(String defaultPlatform) {
String platform = EnvUtil.getProperty(PersistenceConstant.DATASOURCE_PLATFORM_PROPERTY, defaultPlatform);
if (StringUtils.isBlank(platform)) {
platform = EnvUtil.getProperty(PersistenceConstant.DATASOURCE_PLATFORM_PROPERTY_OLD, defaultPlatform);
}
return platform;
}
}
|
DatasourcePlatformUtil
|
java
|
quarkusio__quarkus
|
extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/intrumentation/vertx/OpenTelemetryVertxHttpMetricsFactory.java
|
{
"start": 1410,
"end": 2538
}
|
class ____
implements HttpServerMetrics<MetricRequest, Object, Object>,
VertxMetrics, ExtendedQuarkusVertxHttpMetrics {
@Override
public HttpServerMetrics<?, ?, ?> createHttpServerMetrics(final HttpServerOptions options,
final SocketAddress localAddress) {
return this;
}
@Override
public MetricRequest requestBegin(final Object socketMetric, final HttpRequest request) {
return MetricRequest.request(request);
}
@Override
public void requestRouted(final MetricRequest requestMetric, final String route) {
if (route != null) {
requestMetric.getContext().ifPresent(context -> context.putLocal("VertxRoute", route));
}
}
@Override
public ConnectionTracker getHttpConnectionTracker() {
// To be implemented if we decide to instrument with OpenTelemetry. See VertxMeterBinderAdapter for an example.
return ExtendedQuarkusVertxHttpMetrics.NOOP_CONNECTION_TRACKER;
}
}
}
|
OpenTelemetryVertxHttpServerMetrics
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestDeleteIndexTemplateAction.java
|
{
"start": 1015,
"end": 1774
}
|
class ____ extends BaseRestHandler {
@Override
public List<Route> routes() {
return List.of(new Route(DELETE, "/_template/{name}"));
}
@Override
public String getName() {
return "delete_index_template_action";
}
@Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
DeleteIndexTemplateRequest deleteIndexTemplateRequest = new DeleteIndexTemplateRequest(request.param("name"));
deleteIndexTemplateRequest.masterNodeTimeout(getMasterNodeTimeout(request));
return channel -> client.admin().indices().deleteTemplate(deleteIndexTemplateRequest, new RestToXContentListener<>(channel));
}
}
|
RestDeleteIndexTemplateAction
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/MiscFunctionsITCase.java
|
{
"start": 15965,
"end": 16422
}
|
class ____ extends ScalarFunction {
public String eval(
int i,
@ArgumentHint(isOptional = true) String optional,
Long l,
Boolean b,
String s) {
return String.format("i=%s,optional=%s,l=%s,b=%s,s=%s", i, optional, l, b, s);
}
}
/** Function that uses a static signature with optionals at the end. */
public static
|
OptionalArgInMiddleFunction
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java
|
{
"start": 28575,
"end": 29062
}
|
class ____, make sure we can find the appropriate
// plugin to use in the constructor in that case too
public void testLoadServiceProvidersInSameClassLoader() {
PluginsService service = newMockPluginsService(List.of(BarPlugin.class, PluginOther.class));
// There's only one TestService implementation, FooTestService which uses FooPlugin in the constructor.
// We should find only one instance of this service when we load with two plugins in the same
|
loader
|
java
|
alibaba__nacos
|
config/src/main/java/com/alibaba/nacos/config/server/model/ConfigCacheGray.java
|
{
"start": 907,
"end": 2811
}
|
class ____ extends ConfigCache implements Serializable {
private String grayName;
private GrayRule grayRule;
/**
* clear cache.
*/
@Override
public void clear() {
super.clear();
}
public ConfigCacheGray() {}
public ConfigCacheGray(String grayName) {
this.grayName = grayName;
}
public GrayRule getGrayRule() {
return grayRule;
}
public String getGrayName() {
return grayName;
}
public void setGrayName(String grayName) {
this.grayName = grayName;
}
/**
* get raw gray rule from db.
*
* @return raw gray rule from db.
* @date 2024/3/14
*/
public String getRawGrayRule() {
return grayRule.getRawGrayRuleExp();
}
/**
* reset gray rule.
*
* @param grayRule raw gray rule from db.
* @throws RuntimeException if gray rule is invalid.
* @date 2024/3/14
*/
public void resetGrayRule(String grayRule) throws RuntimeException {
this.grayRule = GrayRuleManager.constructGrayRule(GrayRuleManager.deserializeConfigGrayPersistInfo(grayRule));
if (this.grayRule == null || !this.grayRule.isValid()) {
throw new RuntimeException("raw gray rule is invalid");
}
}
/**
* judge whether match gray rule.
*
* @param tags conn tags.
* @return true if match, false otherwise.
* @date 2024/3/14
*/
public boolean match(Map<String, String> tags) {
return grayRule.match(tags);
}
public int getPriority() {
return grayRule.getPriority();
}
/**
* if gray rule is valid.
*
* @return true if valid, false otherwise.
* @date 2024/3/14
*/
public boolean isValid() {
return grayRule != null && grayRule.isValid();
}
}
|
ConfigCacheGray
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsView.java
|
{
"start": 3318,
"end": 4954
}
|
class ____ from {@link #content()}
*/
private String jobsTableInit() {
return tableInit().
append(", 'aaData': jobsTableData").
append(", bDeferRender: true").
append(", bProcessing: true").
// Sort by id upon page load
append(", aaSorting: [[3, 'desc']]").
append(", aoColumnDefs:[").
// Maps Total, Maps Completed, Reduces Total and Reduces Completed
append("{'sType':'numeric', 'bSearchable': false" +
", 'aTargets': [ 8, 9, 10, 11 ] }").
append("]}").
toString();
}
/**
* @return javascript to add into the jquery block after the table has
* been initialized. This code adds in per field filtering.
*/
private String jobsPostTableInit() {
return "var asInitVals = new Array();\n" +
"$('tfoot input').keyup( function () \n{"+
" jobsDataTable.fnFilter( this.value, $('tfoot input').index(this) );\n"+
"} );\n"+
"$('tfoot input').each( function (i) {\n"+
" asInitVals[i] = this.value;\n"+
"} );\n"+
"$('tfoot input').focus( function () {\n"+
" if ( this.className == 'search_init' )\n"+
" {\n"+
" this.className = '';\n"+
" this.value = '';\n"+
" }\n"+
"} );\n"+
"$('tfoot input').blur( function (i) {\n"+
" if ( this.value == '' )\n"+
" {\n"+
" this.className = 'search_init';\n"+
" this.value = asInitVals[$('tfoot input').index(this)];\n"+
" }\n"+
"} );\n";
}
}
|
returned
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/http/InterceptUrlConfigTests.java
|
{
"start": 14536,
"end": 14647
}
|
class ____ {
@GetMapping("/error")
String error() {
return "error";
}
}
public static
|
ErrorController
|
java
|
dropwizard__dropwizard
|
dropwizard-e2e/src/main/java/com/example/health/HealthApp.java
|
{
"start": 360,
"end": 3218
}
|
class ____ extends Application<Configuration> {
static final String CRITICAL_HEALTH_CHECK_NAME_1 = "critical1";
static final String CRITICAL_HEALTH_CHECK_NAME_2 = "critical2";
static final String NON_CRITICAL_HEALTH_CHECK_NAME = "nonCritical";
private final AtomicBoolean criticalCheckHealthy1 = new AtomicBoolean();
private final AtomicBoolean criticalCheckHealthy2 = new AtomicBoolean();
private final AtomicBoolean nonCriticalCheckHealthy = new AtomicBoolean();
private final AtomicInteger healthyCheckCounter = new AtomicInteger();
private final AtomicInteger unhealthyCheckCounter = new AtomicInteger();
private final AtomicInteger stateChangeCounter = new AtomicInteger();
@Override
public void run(final Configuration configuration, final Environment environment) throws Exception {
environment.healthChecks().register(CRITICAL_HEALTH_CHECK_NAME_1, new HealthCheck() {
@Override
protected Result check() {
return criticalCheckHealthy1.get() ? Result.healthy() : Result.builder().unhealthy().build();
}
});
environment.healthChecks().register(CRITICAL_HEALTH_CHECK_NAME_2, new HealthCheck() {
@Override
protected Result check() {
return criticalCheckHealthy2.get() ? Result.healthy() : Result.builder().unhealthy().build();
}
});
environment.healthChecks().register(NON_CRITICAL_HEALTH_CHECK_NAME, new HealthCheck() {
@Override
protected Result check() {
return nonCriticalCheckHealthy.get() ? Result.healthy() : Result.builder().unhealthy().build();
}
});
environment.health().addHealthStateListener(new HealthStateListener() {
@Override
public void onHealthyCheck(String healthCheckName) {
healthyCheckCounter.incrementAndGet();
}
@Override
public void onUnhealthyCheck(String healthCheckName) {
unhealthyCheckCounter.incrementAndGet();
}
@Override
public void onStateChanged(String healthCheckName, boolean healthy) {
stateChangeCounter.incrementAndGet();
}
});
}
AtomicBoolean getCriticalCheckHealthy1() {
return criticalCheckHealthy1;
}
AtomicBoolean getCriticalCheckHealthy2() {
return criticalCheckHealthy2;
}
AtomicBoolean getNonCriticalCheckHealthy() {
return nonCriticalCheckHealthy;
}
AtomicInteger getHealthyCheckCounter() {
return healthyCheckCounter;
}
AtomicInteger getUnhealthyCheckCounter() {
return unhealthyCheckCounter;
}
AtomicInteger getStateChangeCounter() {
return stateChangeCounter;
}
}
|
HealthApp
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/registry/classloading/spi/ClassLoaderService.java
|
{
"start": 811,
"end": 879
}
|
class ____
*
* @throws ClassLoadingException Indicates the
|
reference
|
java
|
google__truth
|
core/src/test/java/com/google/common/truth/extension/FakeHrDatabase.java
|
{
"start": 1015,
"end": 1934
}
|
class ____ implements HrDatabase {
private final Map<Long, Employee> employees = new HashMap<>();
public void put(Employee employee) {
employees.put(employee.id(), employee);
}
@Override
public Employee get(long id) {
return employees.get(id);
}
@Override
public void relocate(long id, Location location) {
checkNotNull(location);
Employee old = get(id);
checkState(old != null, "No employee found with ID %s", id);
employees.put(id, Employee.create(old.username(), old.id(), old.name(), location, old.isCeo()));
}
@Override
public ImmutableSet<Employee> getByLocation(Location location) {
checkNotNull(location);
ImmutableSet.Builder<Employee> result = ImmutableSet.builder();
for (Employee employee : employees.values()) {
if (employee.location() == location) {
result.add(employee);
}
}
return result.build();
}
}
|
FakeHrDatabase
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/InjectConstructorFactoryGeneratorTest.java
|
{
"start": 29298,
"end": 30025
}
|
class ____ {",
" @Inject CheckedExceptionClass() throws Exception {}",
"}");
daggerCompiler(file)
.compile(
subject -> {
subject.hasErrorCount(1);
subject.hasErrorContaining(
"Dagger does not support checked exceptions on @Inject constructors")
.onSource(file)
.onLine(6);
});
}
@Test public void injectConstructorWithCheckedExceptionsWarning() {
Source file =
CompilerTests.javaSource(
"test.CheckedExceptionClass",
"package test;",
"",
"import javax.inject.Inject;",
"",
"
|
CheckedExceptionClass
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/matching/InvalidHeaderTest.java
|
{
"start": 646,
"end": 1685
}
|
class ____ {
@RegisterExtension
static ResteasyReactiveUnitTest test = new ResteasyReactiveUnitTest()
.setArchiveProducer(new Supplier<>() {
@Override
public JavaArchive get() {
return ShrinkWrap.create(JavaArchive.class)
.addClasses(TestResource.class);
}
});
@Test
void test() {
given()
.header("Accept", "application/json")
.when().get("/test/1")
.then()
.statusCode(200)
.body(is("{\"id\": \"1\"}"));
given()
.header("Accept", "text/plain")
.when().get("/test/1")
.then()
.statusCode(200)
.body(is("1"));
given()
.header("Accept", "foobar")
.when().get("/test/1")
.then()
.statusCode(400);
}
@Path("/test")
public static
|
InvalidHeaderTest
|
java
|
quarkusio__quarkus
|
integration-tests/logging-panache/src/test/java/io/quarkus/logging/LoggingWithPanacheTest.java
|
{
"start": 414,
"end": 3086
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar.addClasses(LoggingBean.class, LoggingInterface.class, LoggingEntity.class,
NoStackTraceTestException.class))
.overrideConfigKey("quarkus.log.category.\"io.quarkus.logging\".min-level", "TRACE")
.overrideConfigKey("quarkus.log.category.\"io.quarkus.logging\".level", "TRACE")
.setLogRecordPredicate(record -> record.getLoggerName().startsWith("io.quarkus.logging.Logging"))
.assertLogRecords(records -> {
Formatter formatter = new PatternFormatter("[%p] %m");
List<String> lines = records.stream().map(formatter::format).map(String::trim).collect(Collectors.toList());
assertThat(lines).containsExactly(
"[INFO] Heya!",
"[TRACE] LoggingBean created",
"[INFO] Default method from interface: abc",
"[DEBUG] starting massive computation",
"[DEBUG] one: 42",
"[TRACE] two: 42 | 13",
"[DEBUG] three: 42 | 13 | 1",
"[DEBUG] one: foo",
"[INFO] two: foo | bar",
"[WARN] three: foo | bar | baz",
"[ERROR] four: foo | bar | baz | quux",
"[WARN] foo | bar | baz | quux: io.quarkus.logging.NoStackTraceTestException",
"[ERROR] Hello Error: io.quarkus.logging.NoStackTraceTestException",
"[INFO] Hi!",
"[INFO] number 42",
"[INFO] string now",
"[INFO] foo",
"[INFO] bar",
"[INFO] baz",
"[INFO] quux",
"[DEBUG] foo: io.quarkus.logging.NoStackTraceTestException",
"[DEBUG] bar: io.quarkus.logging.NoStackTraceTestException",
"[WARN] foo bar",
"[WARN] baz quux",
"[INFO] foo bar baz",
"[ERROR] foo bar baz quux",
"[INFO] foo bar baz qux quux",
"[INFO] foo bar baz qux quux quuux");
});
@Inject
LoggingBean bean;
@Test
public void test() {
bean.doSomething();
new LoggingEntity().something();
bean.reproduceStackDisciplineIssue();
bean.reproduceMethodReferenceIssue();
}
}
|
LoggingWithPanacheTest
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/serializer/UnicodeTest.java
|
{
"start": 149,
"end": 360
}
|
class ____ extends TestCase {
public void test_unicode() throws Exception {
String text = JSON.toJSONString(Collections.singletonMap("v", "\u0018"));
System.out.println(text);
}
}
|
UnicodeTest
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/value/enum2enum/ErroneousOrderMapperThrowExceptionAsSourceType.java
|
{
"start": 483,
"end": 1148
}
|
interface ____ {
ErroneousOrderMapperThrowExceptionAsSourceType INSTANCE = Mappers.getMapper(
ErroneousOrderMapperThrowExceptionAsSourceType.class );
@ValueMappings({
@ValueMapping(source = "EXTRA", target = "SPECIAL"),
@ValueMapping(source = "STANDARD", target = "DEFAULT"),
@ValueMapping(source = "NORMAL", target = "DEFAULT"),
@ValueMapping(source = "<ANY_REMAINING>", target = "DEFAULT"),
@ValueMapping(source = "<THROW_EXCEPTION>", target = "DEFAULT")
})
ExternalOrderType orderTypeToExternalOrderTypeWithErroneousSourceMapping(OrderType orderType);
}
|
ErroneousOrderMapperThrowExceptionAsSourceType
|
java
|
quarkusio__quarkus
|
integration-tests/smallrye-metrics/src/test/java/io/quarkus/it/metrics/MetricsInheritanceTestCase.java
|
{
"start": 196,
"end": 948
}
|
class ____ {
@Test
public void verifyRegistrations() {
RestAssured.when().get("/metricsinheritanceresource/registration")
.then().body("", Matchers.containsInAnyOrder(
"io.quarkus.it.metrics.inheritance.InheritanceMetricsBase.InheritanceMetricsBase",
"io.quarkus.it.metrics.inheritance.InheritanceMetricsBase.baseMethod",
"io.quarkus.it.metrics.inheritance.InheritanceMetricsExtended.InheritanceMetricsExtended",
"io.quarkus.it.metrics.inheritance.InheritanceMetricsExtended.anotherMethod",
"io.quarkus.it.metrics.inheritance.InheritanceMetricsExtended.baseMethod"));
}
}
|
MetricsInheritanceTestCase
|
java
|
spring-projects__spring-security
|
docs/src/test/java/org/springframework/security/docs/reactive/configuration/serverhttpsecuritycustomizerbean/ServerHttpSecurityCustomizerBeanTests.java
|
{
"start": 1210,
"end": 1809
}
|
class ____ {
public final SpringTestContext spring = new SpringTestContext(this);
@Autowired
private WebTestClient webTest;
@Test
void httpSecurityCustomizer() throws Exception {
this.spring.register(
ServerHttpSecurityCustomizerBeanConfiguration.class).autowire();
// @formatter:off
this.webTest
.get()
.uri("http://localhost/")
.exchange()
.expectHeader().location("https://localhost/")
.expectHeader()
.value("Content-Security-Policy", csp ->
assertThat(csp).isEqualTo("object-src 'none'")
);
// @formatter:on
}
}
|
ServerHttpSecurityCustomizerBeanTests
|
java
|
apache__camel
|
components/camel-observation/src/test/java/org/apache/camel/observation/TestSEDASpanDecorator.java
|
{
"start": 1018,
"end": 1440
}
|
class ____ extends SedaSpanDecorator {
@Override
public void pre(SpanAdapter span, Exchange exchange, Endpoint endpoint) {
super.pre(span, exchange, endpoint);
span.setTag("pre", "test");
}
@Override
public void post(SpanAdapter span, Exchange exchange, Endpoint endpoint) {
super.post(span, exchange, endpoint);
span.setTag("post", "test");
}
}
|
TestSEDASpanDecorator
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/dataflow/nullnesspropagation/NullnessPropagationTest.java
|
{
"start": 52839,
"end": 52886
}
|
enum ____ {
ENUM_INSTANCE;
}
static
|
MyEnum
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CryptoAdmin.java
|
{
"start": 6772,
"end": 8199
}
|
class ____
implements AdminHelper.Command {
@Override
public String getName() {
return "-getFileEncryptionInfo";
}
@Override
public String getShortUsage() {
return "[" + getName() + " -path <path>]\n";
}
@Override
public String getLongUsage() {
final TableListing listing = AdminHelper.getOptionDescriptionListing();
listing.addRow("<path>", "The path to the file to show encryption info.");
return getShortUsage() + "\n" + "Get encryption info of a file.\n\n" +
listing.toString();
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
final String path = StringUtils.popOptionWithArgument("-path", args);
if (!args.isEmpty()) {
System.err.println("Can't understand argument: " + args.get(0));
return 1;
}
Path p = new Path(path);
final HdfsAdmin admin =
new HdfsAdmin(p.toUri(), conf);
try {
final FileEncryptionInfo fei =
admin.getFileEncryptionInfo(p);
if (fei == null) {
System.err.println("No FileEncryptionInfo found for path " + path);
return 2;
}
System.out.println(fei.toStringStable());
} catch (IOException e) {
System.err.println(prettifyException(e));
return 3;
}
return 0;
}
}
private static
|
GetFileEncryptionInfoCommand
|
java
|
apache__spark
|
sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java
|
{
"start": 1453,
"end": 5480
}
|
class ____ extends HiveSessionImpl {
public static final String HS2TOKEN = "HiveServer2ImpersonationToken";
private UserGroupInformation sessionUgi = null;
private String delegationTokenStr = null;
private HiveSession proxySession = null;
public HiveSessionImplwithUGI(TProtocolVersion protocol, String username, String password,
HiveConf hiveConf, String ipAddress, String delegationToken) throws HiveSQLException {
super(protocol, username, password, hiveConf, ipAddress);
setSessionUGI(username);
setDelegationToken(delegationToken);
}
// setup appropriate UGI for the session
public void setSessionUGI(String owner) throws HiveSQLException {
if (owner == null) {
throw new HiveSQLException("No username provided for impersonation");
}
if (UserGroupInformation.isSecurityEnabled()) {
try {
sessionUgi = UserGroupInformation.createProxyUser(
owner, UserGroupInformation.getLoginUser());
} catch (IOException e) {
throw new HiveSQLException("Couldn't setup proxy user", e);
}
} else {
sessionUgi = UserGroupInformation.createRemoteUser(owner);
}
}
public UserGroupInformation getSessionUgi() {
return this.sessionUgi;
}
public String getDelegationToken() {
return this.delegationTokenStr;
}
/**
* Close the file systems for the session and remove it from the FileSystem cache.
* Cancel the session's delegation token and close the metastore connection
*/
@Override
public void close() throws HiveSQLException {
try {
acquire(true);
cancelDelegationToken();
} finally {
try {
super.close();
} finally {
try {
FileSystem.closeAllForUGI(sessionUgi);
} catch (IOException ioe) {
throw new HiveSQLException("Could not clean up file-system handles for UGI: "
+ sessionUgi, ioe);
}
}
}
}
/**
* Enable delegation token for the session
* save the token string and set the token.signature in hive conf. The metastore client uses
* this token.signature to determine where to use kerberos or delegation token
* @throws HiveException
* @throws IOException
*/
private void setDelegationToken(String delegationTokenStr) throws HiveSQLException {
this.delegationTokenStr = delegationTokenStr;
if (delegationTokenStr != null) {
getHiveConf().set("hive.metastore.token.signature", HS2TOKEN);
try {
Utils.setTokenStr(sessionUgi, delegationTokenStr, HS2TOKEN);
} catch (IOException e) {
throw new HiveSQLException("Couldn't setup delegation token in the ugi", e);
}
}
}
// If the session has a delegation token obtained from the metastore, then cancel it
private void cancelDelegationToken() throws HiveSQLException {
if (delegationTokenStr != null) {
try {
Hive.getWithoutRegisterFns(getHiveConf()).cancelDelegationToken(delegationTokenStr);
} catch (HiveException e) {
throw new HiveSQLException("Couldn't cancel delegation token", e);
}
// close the metastore connection created with this delegation token
Hive.closeCurrent();
}
}
@Override
protected HiveSession getSession() {
assert proxySession != null;
return proxySession;
}
public void setProxySession(HiveSession proxySession) {
this.proxySession = proxySession;
}
@Override
public String getDelegationToken(HiveAuthFactory authFactory, String owner,
String renewer) throws HiveSQLException {
return authFactory.getDelegationToken(owner, renewer, getIpAddress());
}
@Override
public void cancelDelegationToken(HiveAuthFactory authFactory, String tokenStr)
throws HiveSQLException {
authFactory.cancelDelegationToken(tokenStr);
}
@Override
public void renewDelegationToken(HiveAuthFactory authFactory, String tokenStr)
throws HiveSQLException {
authFactory.renewDelegationToken(tokenStr);
}
}
|
HiveSessionImplwithUGI
|
java
|
mockito__mockito
|
mockito-extensions/mockito-junit-jupiter/src/test/java/org/mockitousage/InjectMocksTest.java
|
{
"start": 401,
"end": 560
}
|
class ____ {
int identifier;
public ToBeMockedInTestSuperClass(int identifier) {
this.identifier = identifier;
}
}
|
ToBeMockedInTestSuperClass
|
java
|
dropwizard__dropwizard
|
dropwizard-views-mustache/src/test/java/io/dropwizard/views/mustache/MustacheViewRendererFileSystemTest.java
|
{
"start": 1309,
"end": 4104
}
|
class ____ {
@GET
@Path("/absolute")
public AbsoluteView showAbsolute() {
return new AbsoluteView("yay");
}
@GET
@Path("/relative")
public RelativeView showRelative() {
return new RelativeView();
}
@GET
@Path("/bad")
public BadView showBad() {
return new BadView();
}
@GET
@Path("/error")
public ErrorView showError() {
return new ErrorView();
}
}
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
}
@Override
@AfterEach
public void tearDown() throws Exception {
super.tearDown();
}
@Override
protected Application configure() {
ResourceConfig config = DropwizardResourceConfig.forTesting();
final ViewRenderer renderer = new MustacheViewRenderer();
renderer.configure(Collections.singletonMap("fileRoot", "src/test/resources"));
config.register(new ViewMessageBodyWriter(new MetricRegistry(), Collections.singletonList(renderer)));
config.register(new ViewRenderExceptionMapper());
config.register(new ExampleResource());
return config;
}
@Test
void rendersViewsWithAbsoluteTemplatePaths() {
assertThat(target("/test/absolute").request().get(String.class))
.isEqualTo("Woop woop. yay\n");
}
@Test
void rendersViewsWithRelativeTemplatePaths() {
assertThat(target("/test/relative").request().get(String.class))
.isEqualTo("Ok.\n");
}
@Test
void returnsA500ForViewsWithBadTemplatePaths() {
Invocation.Builder request = target("/test/bad").request();
assertThatExceptionOfType(WebApplicationException.class)
.isThrownBy(() -> request.get(String.class))
.extracting(WebApplicationException::getResponse)
.satisfies(response -> assertThat(response.getStatus())
.isEqualTo(500))
.satisfies(response -> assertThat(response.readEntity(String.class))
.isEqualTo(ViewRenderExceptionMapper.TEMPLATE_ERROR_MSG));
}
@Test
void returnsA500ForViewsThatCantCompile() {
Invocation.Builder request = target("/test/error").request();
assertThatExceptionOfType(WebApplicationException.class)
.isThrownBy(() -> request.get(String.class))
.extracting(WebApplicationException::getResponse)
.satisfies(response -> assertThat(response.getStatus())
.isEqualTo(500))
.satisfies(response -> assertThat(response.readEntity(String.class))
.isEqualTo(ViewRenderExceptionMapper.TEMPLATE_ERROR_MSG));
}
}
|
ExampleResource
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/javadoc/EscapedEntity.java
|
{
"start": 2892,
"end": 3399
}
|
class ____ extends DocTreePathScanner<Void, Void> {
private final VisitorState state;
private Scanner(VisitorState state) {
this.state = state;
}
@Override
public Void visitLiteral(LiteralTree node, Void unused) {
Matcher matcher = HTML_ENTITY.matcher(node.getBody().getBody());
if (matcher.find()) {
state.reportMatch(buildDescription(diagnosticPosition(getCurrentPath(), state)).build());
}
return super.visitLiteral(node, null);
}
}
}
|
Scanner
|
java
|
elastic__elasticsearch
|
x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/ShutdownTests.java
|
{
"start": 347,
"end": 456
}
|
class ____ extends ESTestCase {
public void testIt() {
// TODO: implement tests
}
}
|
ShutdownTests
|
java
|
elastic__elasticsearch
|
x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene40/blocktree/Lucene40BlockTreeTermsReader.java
|
{
"start": 3119,
"end": 17173
}
|
class ____ extends FieldsProducer {
static final Outputs<BytesRef> FST_OUTPUTS = ByteSequenceOutputs.getSingleton();
static final BytesRef NO_OUTPUT = FST_OUTPUTS.getNoOutput();
static final int OUTPUT_FLAGS_NUM_BITS = 2;
static final int OUTPUT_FLAGS_MASK = 0x3;
static final int OUTPUT_FLAG_IS_FLOOR = 0x1;
static final int OUTPUT_FLAG_HAS_TERMS = 0x2;
/** Extension of terms file */
static final String TERMS_EXTENSION = "tim";
static final String TERMS_CODEC_NAME = "BlockTreeTermsDict";
/** Initial terms format. */
public static final int VERSION_START = 2;
/** Auto-prefix terms have been superseded by points. */
public static final int VERSION_AUTO_PREFIX_TERMS_REMOVED = 3;
/** The long[] + byte[] metadata has been replaced with a single byte[]. */
public static final int VERSION_META_LONGS_REMOVED = 4;
/** Suffixes are compressed to save space. */
public static final int VERSION_COMPRESSED_SUFFIXES = 5;
/** Metadata is written to its own file. */
public static final int VERSION_META_FILE = 6;
/** Current terms format. */
public static final int VERSION_CURRENT = VERSION_META_FILE;
/** Extension of terms index file */
static final String TERMS_INDEX_EXTENSION = "tip";
static final String TERMS_INDEX_CODEC_NAME = "BlockTreeTermsIndex";
/** Extension of terms meta file */
static final String TERMS_META_EXTENSION = "tmd";
static final String TERMS_META_CODEC_NAME = "BlockTreeTermsMeta";
// Open input to the main terms dict file (_X.tib)
final IndexInput termsIn;
// Open input to the terms index file (_X.tip)
final IndexInput indexIn;
// private static final boolean DEBUG = BlockTreeTermsWriter.DEBUG;
// Reads the terms dict entries, to gather state to
// produce DocsEnum on demand
final PostingsReaderBase postingsReader;
private final Map<String, FieldReader> fieldMap;
private final List<String> fieldList;
final String segment;
final int version;
/** Sole constructor. */
public Lucene40BlockTreeTermsReader(PostingsReaderBase postingsReader, SegmentReadState state) throws IOException {
boolean success = false;
this.postingsReader = postingsReader;
this.segment = state.segmentInfo.name;
try {
String termsName = IndexFileNames.segmentFileName(segment, state.segmentSuffix, TERMS_EXTENSION);
termsIn = EndiannessReverserUtil.openInput(state.directory, termsName, state.context);
version = CodecUtil.checkIndexHeader(
termsIn,
TERMS_CODEC_NAME,
VERSION_START,
VERSION_CURRENT,
state.segmentInfo.getId(),
state.segmentSuffix
);
if (version < VERSION_AUTO_PREFIX_TERMS_REMOVED) {
// pre-6.2 index, records whether auto-prefix terms are enabled in the header
byte b = termsIn.readByte();
if (b != 0) {
throw new CorruptIndexException("Index header pretends the index has auto-prefix terms: " + b, termsIn);
}
}
String indexName = IndexFileNames.segmentFileName(segment, state.segmentSuffix, TERMS_INDEX_EXTENSION);
indexIn = EndiannessReverserUtil.openInput(state.directory, indexName, state.context);
CodecUtil.checkIndexHeader(indexIn, TERMS_INDEX_CODEC_NAME, version, version, state.segmentInfo.getId(), state.segmentSuffix);
if (version < VERSION_META_FILE) {
// Have PostingsReader init itself
postingsReader.init(termsIn, state);
// Verifying the checksum against all bytes would be too costly, but for now we at least
// verify proper structure of the checksum footer. This is cheap and can detect some forms
// of corruption such as file truncation.
CodecUtil.retrieveChecksum(indexIn);
CodecUtil.retrieveChecksum(termsIn);
}
// Read per-field details
String metaName = IndexFileNames.segmentFileName(segment, state.segmentSuffix, TERMS_META_EXTENSION);
Map<String, FieldReader> fieldMap = null;
Throwable priorE = null;
long indexLength = -1, termsLength = -1;
try (
ChecksumIndexInput metaIn = version >= VERSION_META_FILE
? EndiannessReverserUtil.openChecksumInput(state.directory, metaName, state.context)
: null
) {
try {
final IndexInput indexMetaIn, termsMetaIn;
if (version >= VERSION_META_FILE) {
CodecUtil.checkIndexHeader(
metaIn,
TERMS_META_CODEC_NAME,
version,
version,
state.segmentInfo.getId(),
state.segmentSuffix
);
indexMetaIn = termsMetaIn = metaIn;
postingsReader.init(metaIn, state);
} else {
seekDir(termsIn);
seekDir(termsIn);
seekDir(indexIn);
indexMetaIn = indexIn;
termsMetaIn = termsIn;
}
final int numFields = termsMetaIn.readVInt();
if (numFields < 0) {
throw new CorruptIndexException("invalid numFields: " + numFields, termsMetaIn);
}
fieldMap = new HashMap<>((int) (numFields / 0.75f) + 1);
for (int i = 0; i < numFields; ++i) {
final int field = termsMetaIn.readVInt();
final long numTerms = termsMetaIn.readVLong();
if (numTerms <= 0) {
throw new CorruptIndexException("Illegal numTerms for field number: " + field, termsMetaIn);
}
final BytesRef rootCode = readBytesRef(termsMetaIn);
final FieldInfo fieldInfo = state.fieldInfos.fieldInfo(field);
if (fieldInfo == null) {
throw new CorruptIndexException("invalid field number: " + field, termsMetaIn);
}
final long sumTotalTermFreq = termsMetaIn.readVLong();
// when frequencies are omitted, sumDocFreq=sumTotalTermFreq and only one value is
// written.
final long sumDocFreq = fieldInfo.getIndexOptions() == IndexOptions.DOCS
? sumTotalTermFreq
: termsMetaIn.readVLong();
final int docCount = termsMetaIn.readVInt();
if (version < VERSION_META_LONGS_REMOVED) {
final int longsSize = termsMetaIn.readVInt();
if (longsSize < 0) {
throw new CorruptIndexException(
"invalid longsSize for field: " + fieldInfo.name + ", longsSize=" + longsSize,
termsMetaIn
);
}
}
BytesRef minTerm = readBytesRef(termsMetaIn);
BytesRef maxTerm = readBytesRef(termsMetaIn);
if (docCount < 0 || docCount > state.segmentInfo.maxDoc()) { // #docs with field must be <= #docs
throw new CorruptIndexException(
"invalid docCount: " + docCount + " maxDoc: " + state.segmentInfo.maxDoc(),
termsMetaIn
);
}
if (sumDocFreq < docCount) { // #postings must be >= #docs with field
throw new CorruptIndexException("invalid sumDocFreq: " + sumDocFreq + " docCount: " + docCount, termsMetaIn);
}
if (sumTotalTermFreq < sumDocFreq) { // #positions must be >= #postings
throw new CorruptIndexException(
"invalid sumTotalTermFreq: " + sumTotalTermFreq + " sumDocFreq: " + sumDocFreq,
termsMetaIn
);
}
final long indexStartFP = indexMetaIn.readVLong();
FieldReader previous = fieldMap.put(
fieldInfo.name,
/*
The FieldReader used differs from the original Lucene variant, in that it is more flexible
around the versions it can read from.
*/
new FieldReader(
this,
fieldInfo,
numTerms,
rootCode,
sumTotalTermFreq,
sumDocFreq,
docCount,
indexStartFP,
indexMetaIn,
indexIn,
minTerm,
maxTerm
)
);
if (previous != null) {
throw new CorruptIndexException("duplicate field: " + fieldInfo.name, termsMetaIn);
}
}
if (version >= VERSION_META_FILE) {
indexLength = metaIn.readLong();
termsLength = metaIn.readLong();
}
} catch (Throwable exception) {
priorE = exception;
} finally {
if (metaIn != null) {
CodecUtil.checkFooter(metaIn, priorE);
} else if (priorE != null) {
rethrowAlways(priorE);
}
}
}
if (version >= VERSION_META_FILE) {
// At this point the checksum of the meta file has been verified so the lengths are likely
// correct
CodecUtil.retrieveChecksum(indexIn, indexLength);
CodecUtil.retrieveChecksum(termsIn, termsLength);
} else {
assert indexLength == -1 : indexLength;
assert termsLength == -1 : termsLength;
}
List<String> fieldList = new ArrayList<>(fieldMap.keySet());
fieldList.sort(null);
this.fieldMap = fieldMap;
this.fieldList = Collections.unmodifiableList(fieldList);
success = true;
} finally {
if (success == false) {
// this.close() will close in:
IOUtils.closeWhileHandlingException(this);
}
}
}
private static BytesRef readBytesRef(IndexInput in) throws IOException {
int numBytes = in.readVInt();
if (numBytes < 0) {
throw new CorruptIndexException("invalid bytes length: " + numBytes, in);
}
BytesRef bytes = new BytesRef();
bytes.length = numBytes;
bytes.bytes = new byte[numBytes];
in.readBytes(bytes.bytes, 0, numBytes);
return bytes;
}
/** Seek {@code input} to the directory offset. */
private static void seekDir(IndexInput input) throws IOException {
input.seek(input.length() - CodecUtil.footerLength() - 8);
long offset = input.readLong();
input.seek(offset);
}
@SuppressForbidden(reason = "Lucene class")
private static Error rethrowAlways(Throwable th) throws IOException, RuntimeException {
return org.apache.lucene.util.IOUtils.rethrowAlways(th);
}
// for debugging
// private static String toHex(int v) {
// return "0x" + Integer.toHexString(v);
// }
@Override
public void close() throws IOException {
try {
IOUtils.close(indexIn, termsIn, postingsReader);
} finally {
// Clear so refs to terms index is GCable even if
// app hangs onto us:
fieldMap.clear();
}
}
@Override
public Iterator<String> iterator() {
return fieldList.iterator();
}
@Override
public Terms terms(String field) throws IOException {
assert field != null;
return fieldMap.get(field);
}
@Override
public int size() {
return fieldMap.size();
}
// for debugging
String brToString(BytesRef b) {
if (b == null) {
return "null";
} else {
try {
return b.utf8ToString() + " " + b;
} catch (@SuppressWarnings("unused") Throwable t) {
// If BytesRef isn't actually UTF8, or it's eg a
// prefix of UTF8 that ends mid-unicode-char, we
// fallback to hex:
return b.toString();
}
}
}
@Override
public void checkIntegrity() throws IOException {
// terms index
CodecUtil.checksumEntireFile(indexIn);
// term dictionary
CodecUtil.checksumEntireFile(termsIn);
// postings
postingsReader.checkIntegrity();
}
@Override
public String toString() {
return getClass().getSimpleName() + "(fields=" + fieldMap.size() + ",delegate=" + postingsReader + ")";
}
}
|
Lucene40BlockTreeTermsReader
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/streaming/runtime/TimestampITCase.java
|
{
"start": 36478,
"end": 37356
}
|
class ____ implements SourceFunction<Integer> {
int numWatermarks;
public MyNonWatermarkingSource(int numWatermarks) {
this.numWatermarks = numWatermarks;
}
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
for (int i = 0; i < numWatermarks; i++) {
ctx.collect(i);
}
}
@Override
public void cancel() {}
}
private static List<JobID> getRunningJobs(ClusterClient<?> client) throws Exception {
Collection<JobStatusMessage> statusMessages = client.listJobs().get();
return statusMessages.stream()
.filter(status -> status.getJobState() == JobStatus.RUNNING)
.map(JobStatusMessage::getJobId)
.collect(Collectors.toList());
}
public static
|
MyNonWatermarkingSource
|
java
|
spring-projects__spring-boot
|
module/spring-boot-devtools/src/main/java/org/springframework/boot/devtools/autoconfigure/DevToolsR2dbcAutoConfiguration.java
|
{
"start": 4340,
"end": 5912
}
|
class ____ extends SpringBootCondition implements ConfigurationCondition {
@Override
public ConfigurationPhase getConfigurationPhase() {
return ConfigurationPhase.REGISTER_BEAN;
}
@Override
public ConditionOutcome getMatchOutcome(ConditionContext context, AnnotatedTypeMetadata metadata) {
ConditionMessage.Builder message = ConditionMessage.forCondition("DevTools ConnectionFactory Condition");
ConfigurableListableBeanFactory beanFactory = context.getBeanFactory();
Assert.state(beanFactory != null, "'beanFactory' must not be null");
String[] beanNames = beanFactory.getBeanNamesForType(ConnectionFactory.class, true, false);
if (beanNames.length != 1) {
return ConditionOutcome.noMatch(message.didNotFind("a single ConnectionFactory bean").atAll());
}
BeanDefinition beanDefinition = context.getRegistry().getBeanDefinition(beanNames[0]);
if (beanDefinition instanceof AnnotatedBeanDefinition annotatedBeanDefinition
&& isAutoConfigured(annotatedBeanDefinition)) {
return ConditionOutcome.match(message.foundExactly("auto-configured ConnectionFactory"));
}
return ConditionOutcome.noMatch(message.didNotFind("an auto-configured ConnectionFactory").atAll());
}
private boolean isAutoConfigured(AnnotatedBeanDefinition beanDefinition) {
MethodMetadata methodMetadata = beanDefinition.getFactoryMethodMetadata();
return methodMetadata != null && methodMetadata.getDeclaringClassName()
.startsWith(R2dbcAutoConfiguration.class.getPackage().getName());
}
}
static
|
DevToolsConnectionFactoryCondition
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/language/XPathFromFileExceptionTest.java
|
{
"start": 1017,
"end": 2580
}
|
class ____ extends ContextTestSupport {
@Test
public void testXPathFromFileExceptionOk() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(1);
getMockEndpoint("mock:error").expectedMessageCount(0);
template.sendBodyAndHeader(fileUri(), "<hello>world!</hello>", Exchange.FILE_NAME, "hello.xml");
assertMockEndpointsSatisfied();
oneExchangeDone.matchesWaitTime();
assertFileNotExists(testFile("hello.xml"));
assertFileExists(testFile("ok/hello.xml"));
}
@Test
public void testXPathFromFileExceptionFail() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(0);
getMockEndpoint("mock:error").expectedMessageCount(1);
// the last tag is not ended properly
template.sendBodyAndHeader(fileUri(), "<hello>world!</hello", Exchange.FILE_NAME, "hello2.xml");
assertMockEndpointsSatisfied();
oneExchangeDone.matchesWaitTime();
assertFileNotExists(testFile("hello2.xml"));
assertFileExists(testFile("error/hello2.xml"));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from(fileUri("?initialDelay=0&delay=10&moveFailed=error&move=ok")).onException(Exception.class)
.to("mock:error").end().choice().when().xpath("/hello")
.to("mock:result").end();
}
};
}
}
|
XPathFromFileExceptionTest
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManager.java
|
{
"start": 5367,
"end": 5940
}
|
class ____ configured");
// With invalid className
conf.set(YarnConfiguration.NM_NODE_LABELS_PROVIDER_CONFIG,
"org.apache.hadoop.yarn.server.nodemanager.NodeManager");
try {
labelsProviderService = nodeManager.createNodeLabelsProvider(conf);
fail("Expected to throw IOException on Invalid configuration");
} catch (IOException e) {
// exception expected on invalid configuration
}
assertNotNull(labelsProviderService, "LabelsProviderService should be initialized When "
+ "node labels provider
|
is
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorIndexNameAction.java
|
{
"start": 874,
"end": 1961
}
|
class ____ extends BaseRestHandler {
private static final String CONNECTOR_ID_PARAM = "connector_id";
@Override
public String getName() {
return "connector_update_index_name_action";
}
@Override
public List<Route> routes() {
return List.of(new Route(PUT, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT + "/{" + CONNECTOR_ID_PARAM + "}/_index_name"));
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
try (XContentParser parser = restRequest.contentParser()) {
UpdateConnectorIndexNameAction.Request request = UpdateConnectorIndexNameAction.Request.fromXContent(
parser,
restRequest.param(CONNECTOR_ID_PARAM)
);
return channel -> client.execute(
UpdateConnectorIndexNameAction.INSTANCE,
request,
new RestToXContentListener<>(channel, ConnectorUpdateActionResponse::status)
);
}
}
}
|
RestUpdateConnectorIndexNameAction
|
java
|
micronaut-projects__micronaut-core
|
http/src/main/java/io/micronaut/http/ssl/FileCertificateProvider.java
|
{
"start": 15229,
"end": 15431
}
|
enum ____ {
NONE,
FILE_WATCHER,
SCHEDULER,
FILE_WATCHER_OR_SCHEDULER,
}
/**
* Supported on-disk formats for certificate material.
*/
public
|
RefreshMode
|
java
|
spring-projects__spring-security
|
core/src/main/java/org/springframework/security/authorization/RequiredFactor.java
|
{
"start": 1527,
"end": 3153
}
|
class ____ {
private final String authority;
private final @Nullable Duration validDuration;
private RequiredFactor(String authority, @Nullable Duration validDuration) {
Assert.notNull(authority, "authority cannot be null");
this.authority = authority;
this.validDuration = validDuration;
}
/**
* The expected {@link GrantedAuthority#getAuthority()}.
* @return the authority.
*/
public String getAuthority() {
return this.authority;
}
/**
* How long the
* {@link org.springframework.security.core.authority.FactorGrantedAuthority} is valid
* for.
* @return
*/
public @Nullable Duration getValidDuration() {
return this.validDuration;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof RequiredFactor that)) {
return false;
}
return Objects.equals(this.authority, that.authority) && Objects.equals(this.validDuration, that.validDuration);
}
@Override
public int hashCode() {
return Objects.hash(this.authority, this.validDuration);
}
@Override
public String toString() {
return "RequiredFactor [authority=" + this.authority + ", validDuration=" + this.validDuration + "]";
}
/**
* Creates a {@link Builder} with the specified authority.
* @param authority the authority.
* @return the builder.
*/
public static Builder withAuthority(String authority) {
return builder().authority(authority);
}
/**
* Creates a new {@link Builder}.
* @return
*/
public static Builder builder() {
return new Builder();
}
/**
* A builder for {@link RequiredFactor}.
*
* @author Rob Winch
* @since 7.0
*/
public static
|
RequiredFactor
|
java
|
spring-projects__spring-boot
|
module/spring-boot-data-jpa/src/test/java/org/springframework/boot/data/jpa/autoconfigure/AbstractDataJpaRepositoriesAutoConfigurationTests.java
|
{
"start": 8089,
"end": 8231
}
|
class ____ {
}
@Configuration(proxyBeanMethods = false)
@TestAutoConfigurationPackage(Country.class)
static
|
SortOfInvalidCustomConfiguration
|
java
|
apache__maven
|
its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng6957BuildConsumer.java
|
{
"start": 1574,
"end": 6129
}
|
class ____ extends AbstractMavenIntegrationTestCase {
/**
* Verifies:
* <ul>
* <li>preserve license</li>
* <li>consistent line separators</li>
* <li>resolved project versions (at least 2 levels deep) in parent and dependencies</li>
* <li>removal of modules in aggregators</li>
* </ul>
*
* @throws Exception in case of failure
*/
@Test
public void testPublishedPoms() throws Exception {
File testDir = extractResources("/mng-6957-buildconsumer");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.addCliArguments("-Dchangelist=MNG6957", "-Dmaven.consumer.pom.flatten=true");
verifier.addCliArgument("install");
verifier.execute();
verifier.verifyErrorFreeLog();
assertTextEquals(
new File(testDir, "expected/parent.pom"),
new File(verifier.getArtifactPath(
"org.sonatype.mavenbook.multi", "parent", "0.9-MNG6957-SNAPSHOT", "pom")));
assertTextEquals(
new File(testDir, "expected/parent-build.pom"),
new File(verifier.getArtifactPath(
"org.sonatype.mavenbook.multi", "parent", "0.9-MNG6957-SNAPSHOT", "pom", "build")));
assertTextEquals(
new File(testDir, "expected/simple-parent.pom"),
new File(verifier.getArtifactPath(
"org.sonatype.mavenbook.multi", "simple-parent", "0.9-MNG6957-SNAPSHOT", "pom")));
assertTextEquals(
new File(testDir, "expected/simple-parent-build.pom"),
new File(verifier.getArtifactPath(
"org.sonatype.mavenbook.multi", "simple-parent", "0.9-MNG6957-SNAPSHOT", "pom", "build")));
assertTextEquals(
new File(testDir, "expected/simple-weather.pom"),
new File(verifier.getArtifactPath(
"org.sonatype.mavenbook.multi", "simple-weather", "0.9-MNG6957-SNAPSHOT", "pom")));
assertTextEquals(
new File(testDir, "expected/simple-weather-build.pom"),
new File(verifier.getArtifactPath(
"org.sonatype.mavenbook.multi", "simple-weather", "0.9-MNG6957-SNAPSHOT", "pom", "build")));
assertTextEquals(
new File(testDir, "expected/simple-webapp.pom"),
new File(verifier.getArtifactPath(
"org.sonatype.mavenbook.multi", "simple-webapp", "0.9-MNG6957-SNAPSHOT", "pom")));
assertTextEquals(
new File(testDir, "expected/simple-webapp-build.pom"),
new File(verifier.getArtifactPath(
"org.sonatype.mavenbook.multi", "simple-webapp", "0.9-MNG6957-SNAPSHOT", "pom", "build")));
assertTextEquals(
new File(testDir, "expected/simple-testutils.pom"),
new File(verifier.getArtifactPath(
"org.sonatype.mavenbook.multi", "simple-testutils", "0.9-MNG6957-SNAPSHOT", "pom")));
assertTextEquals(
new File(testDir, "expected/simple-testutils-build.pom"),
new File(verifier.getArtifactPath(
"org.sonatype.mavenbook.multi", "simple-testutils", "0.9-MNG6957-SNAPSHOT", "pom", "build")));
assertTextEquals(
new File(testDir, "expected/utils-parent.pom"),
new File(verifier.getArtifactPath(
"org.sonatype.mavenbook.multi", "utils-parent", "0.9-MNG6957-SNAPSHOT", "pom")));
assertTextEquals(
new File(testDir, "expected/utils-parent-build.pom"),
new File(verifier.getArtifactPath(
"org.sonatype.mavenbook.multi", "utils-parent", "0.9-MNG6957-SNAPSHOT", "pom", "build")));
}
static void assertTextEquals(File file1, File file2) throws IOException {
assertEquals(
String.join(
"\n",
Files.readAllLines(file1.toPath()).stream()
.map(String::trim)
.toList()),
String.join(
"\n",
Files.readAllLines(file2.toPath()).stream()
.map(String::trim)
.toList()),
"pom files differ " + file1 + " " + file2);
}
}
|
MavenITmng6957BuildConsumer
|
java
|
hibernate__hibernate-orm
|
local-build-plugins/src/main/java/org/hibernate/orm/post/InternalsReportTask.java
|
{
"start": 494,
"end": 1733
}
|
class ____ extends AbstractJandexAwareTask {
public static final String INTERNAL_ANN_NAME = "org.hibernate.Internal";
private final Property<RegularFile> reportFile;
public InternalsReportTask() {
setDescription( "Generates a report of things consider internal" );
reportFile = getProject().getObjects().fileProperty();
reportFile.convention( getProject().getLayout().getBuildDirectory().file( "orm/reports/internal.txt" ) );
}
@Override
protected Provider<RegularFile> getTaskReportFileReference() {
return reportFile;
}
@TaskAction
public void generateInternalsReport() {
final TreeSet<Inclusion> internals = new TreeSet<>( Comparator.comparing( Inclusion::getPath ) );
internals.addAll( getIndexManager().getInternalPackageNames() );
processAnnotations( DotName.createSimple( INTERNAL_ANN_NAME ), internals );
writeReport( internals );
}
@Override
protected void writeReportHeader(OutputStreamWriter fileWriter) {
super.writeReportHeader( fileWriter );
try {
fileWriter.write( "# All API elements considered internal for Hibernate's own use" );
fileWriter.write( '\n' );
fileWriter.write( '\n' );
}
catch (IOException e) {
throw new RuntimeException( e );
}
}
}
|
InternalsReportTask
|
java
|
apache__rocketmq
|
remoting/src/main/java/org/apache/rocketmq/remoting/protocol/subscription/SubscriptionGroupConfig.java
|
{
"start": 1082,
"end": 8402
}
|
class ____ {
private String groupName;
private boolean consumeEnable = true;
private boolean consumeFromMinEnable = true;
private boolean consumeBroadcastEnable = true;
private boolean consumeMessageOrderly = false;
private int retryQueueNums = 1;
private int retryMaxTimes = 16;
private GroupRetryPolicy groupRetryPolicy = new GroupRetryPolicy();
private long brokerId = MixAll.MASTER_ID;
private long whichBrokerWhenConsumeSlowly = 1;
private boolean notifyConsumerIdsChangedEnable = true;
private int groupSysFlag = 0;
// Only valid for push consumer
private int consumeTimeoutMinute = 15;
private Set<SimpleSubscriptionData> subscriptionDataSet;
private Map<String, String> attributes = new HashMap<>();
public String getGroupName() {
return groupName;
}
public void setGroupName(String groupName) {
this.groupName = groupName;
}
public boolean isConsumeEnable() {
return consumeEnable;
}
public void setConsumeEnable(boolean consumeEnable) {
this.consumeEnable = consumeEnable;
}
public boolean isConsumeFromMinEnable() {
return consumeFromMinEnable;
}
public void setConsumeFromMinEnable(boolean consumeFromMinEnable) {
this.consumeFromMinEnable = consumeFromMinEnable;
}
public boolean isConsumeBroadcastEnable() {
return consumeBroadcastEnable;
}
public void setConsumeBroadcastEnable(boolean consumeBroadcastEnable) {
this.consumeBroadcastEnable = consumeBroadcastEnable;
}
public boolean isConsumeMessageOrderly() {
return consumeMessageOrderly;
}
public void setConsumeMessageOrderly(boolean consumeMessageOrderly) {
this.consumeMessageOrderly = consumeMessageOrderly;
}
public int getRetryQueueNums() {
return retryQueueNums;
}
public void setRetryQueueNums(int retryQueueNums) {
this.retryQueueNums = retryQueueNums;
}
public int getRetryMaxTimes() {
return retryMaxTimes;
}
public void setRetryMaxTimes(int retryMaxTimes) {
this.retryMaxTimes = retryMaxTimes;
}
public GroupRetryPolicy getGroupRetryPolicy() {
return groupRetryPolicy;
}
public void setGroupRetryPolicy(GroupRetryPolicy groupRetryPolicy) {
this.groupRetryPolicy = groupRetryPolicy;
}
public long getBrokerId() {
return brokerId;
}
public void setBrokerId(long brokerId) {
this.brokerId = brokerId;
}
public long getWhichBrokerWhenConsumeSlowly() {
return whichBrokerWhenConsumeSlowly;
}
public void setWhichBrokerWhenConsumeSlowly(long whichBrokerWhenConsumeSlowly) {
this.whichBrokerWhenConsumeSlowly = whichBrokerWhenConsumeSlowly;
}
public boolean isNotifyConsumerIdsChangedEnable() {
return notifyConsumerIdsChangedEnable;
}
public void setNotifyConsumerIdsChangedEnable(final boolean notifyConsumerIdsChangedEnable) {
this.notifyConsumerIdsChangedEnable = notifyConsumerIdsChangedEnable;
}
public int getGroupSysFlag() {
return groupSysFlag;
}
public void setGroupSysFlag(int groupSysFlag) {
this.groupSysFlag = groupSysFlag;
}
public int getConsumeTimeoutMinute() {
return consumeTimeoutMinute;
}
public void setConsumeTimeoutMinute(int consumeTimeoutMinute) {
this.consumeTimeoutMinute = consumeTimeoutMinute;
}
public Set<SimpleSubscriptionData> getSubscriptionDataSet() {
return subscriptionDataSet;
}
public void setSubscriptionDataSet(Set<SimpleSubscriptionData> subscriptionDataSet) {
this.subscriptionDataSet = subscriptionDataSet;
}
public Map<String, String> getAttributes() {
return attributes;
}
public void setAttributes(Map<String, String> attributes) {
this.attributes = attributes;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (int) (brokerId ^ (brokerId >>> 32));
result = prime * result + (consumeBroadcastEnable ? 1231 : 1237);
result = prime * result + (consumeEnable ? 1231 : 1237);
result = prime * result + (consumeFromMinEnable ? 1231 : 1237);
result = prime * result + (notifyConsumerIdsChangedEnable ? 1231 : 1237);
result = prime * result + (consumeMessageOrderly ? 1231 : 1237);
result = prime * result + ((groupName == null) ? 0 : groupName.hashCode());
result = prime * result + retryMaxTimes;
result = prime * result + retryQueueNums;
result =
prime * result + (int) (whichBrokerWhenConsumeSlowly ^ (whichBrokerWhenConsumeSlowly >>> 32));
result = prime * result + groupSysFlag;
result = prime * result + consumeTimeoutMinute;
result = prime * result + ((subscriptionDataSet == null) ? 0 : subscriptionDataSet.hashCode());
result = prime * result + attributes.hashCode();
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
SubscriptionGroupConfig other = (SubscriptionGroupConfig) obj;
return new EqualsBuilder()
.append(groupName, other.groupName)
.append(consumeEnable, other.consumeEnable)
.append(consumeFromMinEnable, other.consumeFromMinEnable)
.append(consumeBroadcastEnable, other.consumeBroadcastEnable)
.append(consumeMessageOrderly, other.consumeMessageOrderly)
.append(retryQueueNums, other.retryQueueNums)
.append(retryMaxTimes, other.retryMaxTimes)
.append(whichBrokerWhenConsumeSlowly, other.whichBrokerWhenConsumeSlowly)
.append(notifyConsumerIdsChangedEnable, other.notifyConsumerIdsChangedEnable)
.append(groupSysFlag, other.groupSysFlag)
.append(consumeTimeoutMinute, other.consumeTimeoutMinute)
.append(subscriptionDataSet, other.subscriptionDataSet)
.append(attributes, other.attributes)
.isEquals();
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("groupName", groupName)
.add("consumeEnable", consumeEnable)
.add("consumeFromMinEnable", consumeFromMinEnable)
.add("consumeBroadcastEnable", consumeBroadcastEnable)
.add("consumeMessageOrderly", consumeMessageOrderly)
.add("retryQueueNums", retryQueueNums)
.add("retryMaxTimes", retryMaxTimes)
.add("groupRetryPolicy", groupRetryPolicy)
.add("brokerId", brokerId)
.add("whichBrokerWhenConsumeSlowly", whichBrokerWhenConsumeSlowly)
.add("notifyConsumerIdsChangedEnable", notifyConsumerIdsChangedEnable)
.add("groupSysFlag", groupSysFlag)
.add("consumeTimeoutMinute", consumeTimeoutMinute)
.add("subscriptionDataSet", subscriptionDataSet)
.add("attributes", attributes)
.toString();
}
}
|
SubscriptionGroupConfig
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/client/runtime/src/main/java/org/jboss/resteasy/reactive/client/spi/MissingMessageBodyReaderErrorMessageContextualizer.java
|
{
"start": 596,
"end": 746
}
|
interface ____ {
Class<?> type();
Type genericType();
Annotation[] annotations();
MediaType mediaType();
}
}
|
Input
|
java
|
apache__camel
|
components/camel-joor/src/main/java/org/apache/camel/language/joor/JoorExpression.java
|
{
"start": 1058,
"end": 3446
}
|
class ____ extends ExpressionAdapter {
private final String text;
private JoorCompiler compiler;
private JoorMethod method;
private Class<?> resultType;
private boolean preCompile = true;
private boolean singleQuotes = true;
public JoorExpression(String text) {
this.text = text;
}
@Override
public String toString() {
return "joor:" + text;
}
public JoorCompiler getCompiler() {
return compiler;
}
public void setCompiler(JoorCompiler compiler) {
this.compiler = compiler;
}
public boolean isPreCompile() {
return preCompile;
}
public void setPreCompile(boolean preCompile) {
this.preCompile = preCompile;
}
public Class<?> getResultType() {
return resultType;
}
public void setResultType(Class<?> resultType) {
this.resultType = resultType;
}
public boolean isSingleQuotes() {
return singleQuotes;
}
public void setSingleQuotes(boolean singleQuotes) {
this.singleQuotes = singleQuotes;
}
@Override
public Object evaluate(Exchange exchange) {
JoorMethod target = this.method;
if (target == null) {
target = compiler.compile(exchange.getContext(), text, singleQuotes);
}
// optimize as we call the same method all the time so we dont want to find the method every time as joor would do
// if you use its call method
Object body = exchange.getIn().getBody();
// in the rare case the body is already an optional
Optional<?> optional = body instanceof Optional ? (Optional<?>) body : Optional.ofNullable(body);
Object out;
try {
out = target.evaluate(exchange.getContext(), exchange, exchange.getIn(), body, optional);
} catch (Exception e) {
throw new ExpressionEvaluationException(this, exchange, e);
}
if (out != null && resultType != null) {
return exchange.getContext().getTypeConverter().convertTo(resultType, exchange, out);
} else {
return out;
}
}
@Override
public void init(CamelContext context) {
super.init(context);
if (preCompile && this.method == null) {
this.method = compiler.compile(context, text, singleQuotes);
}
}
}
|
JoorExpression
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/mysql/ast/statement/MySqlManageInstanceGroupStatement.java
|
{
"start": 942,
"end": 2115
}
|
class ____ extends MySqlStatementImpl {
private List<SQLExpr> groupNames = new ArrayList<SQLExpr>();
private SQLIntegerExpr replication;
private SQLName operation;
@Override
public void accept0(MySqlASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, groupNames);
acceptChild(visitor, replication);
acceptChild(visitor, operation);
}
visitor.endVisit(this);
}
public List<String> getGroupNamesToString() {
List<String> names = new ArrayList<String>(groupNames.size());
for (SQLExpr groupName : groupNames) {
names.add(groupName.toString());
}
return names;
}
public List<SQLExpr> getGroupNames() {
return groupNames;
}
public SQLIntegerExpr getReplication() {
return replication;
}
public void setReplication(SQLIntegerExpr replication) {
this.replication = replication;
}
public SQLName getOperation() {
return operation;
}
public void setOperation(SQLName operation) {
this.operation = operation;
}
}
|
MySqlManageInstanceGroupStatement
|
java
|
apache__flink
|
flink-clients/src/test/java/org/apache/flink/client/cli/CliFrontendDynamicPropertiesTest.java
|
{
"start": 1831,
"end": 8480
}
|
class ____ {
private GenericCLI cliUnderTest;
private Configuration configuration;
@BeforeAll
static void init() {
CliFrontendTestUtils.pipeSystemOutToNull();
}
@AfterAll
static void shutdown() {
CliFrontendTestUtils.restoreSystemOut();
}
@BeforeEach
void setup(@TempDir java.nio.file.Path tmp) {
Options testOptions = new Options();
configuration = new Configuration();
configuration.set(CoreOptions.CHECK_LEAKED_CLASSLOADER, false);
cliUnderTest = new GenericCLI(configuration, tmp.toAbsolutePath().toString());
cliUnderTest.addGeneralOptions(testOptions);
}
@Test
void testDynamicPropertiesWithParentFirstClassloader() throws Exception {
String[] args = {
"-e",
"test-executor",
"-D" + CoreOptions.DEFAULT_PARALLELISM.key() + "=5",
"-D" + "classloader.resolve-order=parent-first",
getTestJarPath(),
"-a",
"--debug",
"true",
"arg1",
"arg2"
};
Map<String, String> expectedConfigValues = new HashMap<>();
expectedConfigValues.put("parallelism.default", "5");
expectedConfigValues.put("classloader.resolve-order", "parent-first");
verifyCliFrontendWithDynamicProperties(
configuration,
args,
cliUnderTest,
expectedConfigValues,
(configuration, program) ->
assertThat(ParentFirstClassLoader.class.getName())
.isEqualTo(program.getUserCodeClassLoader().getClass().getName()));
}
@Test
void testDynamicPropertiesWithDefaultChildFirstClassloader() throws Exception {
String[] args = {
"-e",
"test-executor",
"-D" + CoreOptions.DEFAULT_PARALLELISM.key() + "=5",
getTestJarPath(),
"-a",
"--debug",
"true",
"arg1",
"arg2"
};
Map<String, String> expectedConfigValues = new HashMap<>();
expectedConfigValues.put("parallelism.default", "5");
verifyCliFrontendWithDynamicProperties(
configuration,
args,
cliUnderTest,
expectedConfigValues,
(configuration, program) ->
assertThat(ChildFirstClassLoader.class.getName())
.isEqualTo(program.getUserCodeClassLoader().getClass().getName()));
}
@Test
void testDynamicPropertiesWithChildFirstClassloader() throws Exception {
String[] args = {
"-e",
"test-executor",
"-D" + CoreOptions.DEFAULT_PARALLELISM.key() + "=5",
"-D" + "classloader.resolve-order=child-first",
getTestJarPath(),
"-a",
"--debug",
"true",
"arg1",
"arg2"
};
Map<String, String> expectedConfigValues = new HashMap<>();
expectedConfigValues.put("parallelism.default", "5");
expectedConfigValues.put("classloader.resolve-order", "child-first");
verifyCliFrontendWithDynamicProperties(
configuration,
args,
cliUnderTest,
expectedConfigValues,
(configuration, program) ->
assertThat(ChildFirstClassLoader.class.getName())
.isEqualTo(program.getUserCodeClassLoader().getClass().getName()));
}
@Test
public void testDynamicPropertiesWithClientTimeoutAndDefaultParallelism() throws Exception {
String[] args = {
"-e",
"test-executor",
"-Dclient.timeout=10min",
"-Dparallelism.default=12",
getTestJarPath(),
};
Map<String, String> expectedConfigValues = new HashMap<>();
expectedConfigValues.put("client.timeout", "10min");
expectedConfigValues.put("parallelism.default", "12");
verifyCliFrontendWithDynamicProperties(
configuration, args, cliUnderTest, expectedConfigValues);
}
@Test
public void testSecurityConfigWithDynamicProperties(@TempDir File tempDir) throws Exception {
File keytabFile = new File(tempDir, "keytab.file");
keytabFile.createNewFile();
String[] args = {
"-e",
"test-executor",
"-D" + SecurityOptions.KERBEROS_LOGIN_KEYTAB.key() + "=" + keytabFile.getPath(),
"-D" + SecurityOptions.KERBEROS_LOGIN_PRINCIPAL.key() + "=principal",
getTestJarPath(),
};
TestingCliFrontendWithDynamicProperties testFrontend =
new TestingCliFrontendWithDynamicProperties(
configuration, cliUnderTest, null, null);
CommandLine commandLine = testFrontend.getCommandLine(new Options(), args, true);
Configuration securityConfig = new Configuration(configuration);
DynamicPropertiesUtil.encodeDynamicProperties(commandLine, securityConfig);
SecurityConfiguration securityConfiguration = new SecurityConfiguration(securityConfig);
assertThat(securityConfiguration.getKeytab()).isEqualTo(keytabFile.getPath());
assertThat(securityConfiguration.getPrincipal()).isEqualTo("principal");
}
// --------------------------------------------------------------------------------------------
public static void verifyCliFrontendWithDynamicProperties(
Configuration configuration,
String[] parameters,
GenericCLI cliUnderTest,
Map<String, String> expectedConfigValues)
throws Exception {
verifyCliFrontendWithDynamicProperties(
configuration, parameters, cliUnderTest, expectedConfigValues, null);
}
public static void verifyCliFrontendWithDynamicProperties(
Configuration configuration,
String[] parameters,
GenericCLI cliUnderTest,
Map<String, String> expectedConfigValues,
TestingCliFrontendWithDynamicProperties.CustomTester customTester)
throws Exception {
TestingCliFrontendWithDynamicProperties testFrontend =
new TestingCliFrontendWithDynamicProperties(
configuration, cliUnderTest, expectedConfigValues, customTester);
testFrontend.run(parameters); // verifies the expected values (see below)
}
private static final
|
CliFrontendDynamicPropertiesTest
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeResourceChecker.java
|
{
"start": 2195,
"end": 7189
}
|
class ____ implements CheckableNameNodeResource {
private DF df;
private boolean required;
private String volume;
public CheckedVolume(File dirToCheck, boolean required)
throws IOException {
df = new DF(dirToCheck, conf);
this.required = required;
volume = df.getFilesystem();
}
public String getVolume() {
return volume;
}
@Override
public boolean isRequired() {
return required;
}
@Override
public boolean isResourceAvailable() {
long availableSpace = df.getAvailable();
if (LOG.isDebugEnabled()) {
LOG.debug("Space available on volume '" + volume + "' is "
+ availableSpace);
}
if (availableSpace < duReserved) {
LOG.warn("Space available on volume '" + volume + "' is "
+ availableSpace +
", which is below the configured reserved amount " + duReserved);
return false;
} else {
return true;
}
}
@Override
public String toString() {
return "volume: " + volume + " required: " + required +
" resource available: " + isResourceAvailable();
}
}
/**
* Create a NameNodeResourceChecker, which will check the edits dirs and any
* additional dirs to check set in <code>conf</code>.
*/
public NameNodeResourceChecker(Configuration conf) throws IOException {
this.conf = conf;
volumes = new HashMap<String, CheckedVolume>();
duReserved = conf.getLongBytes(DFSConfigKeys.DFS_NAMENODE_DU_RESERVED_KEY,
DFSConfigKeys.DFS_NAMENODE_DU_RESERVED_DEFAULT);
Collection<URI> extraCheckedVolumes = Util.stringCollectionAsURIs(conf
.getTrimmedStringCollection(DFSConfigKeys.DFS_NAMENODE_CHECKED_VOLUMES_KEY));
Collection<URI> localEditDirs =
FSNamesystem.getNamespaceEditsDirs(conf).stream().filter(
input -> {
if (input.getScheme().equals(NNStorage.LOCAL_URI_SCHEME)) {
return true;
}
return false;
}).collect(Collectors.toList());
// Add all the local edits dirs, marking some as required if they are
// configured as such.
for (URI editsDirToCheck : localEditDirs) {
addDirToCheck(editsDirToCheck,
FSNamesystem.getRequiredNamespaceEditsDirs(conf).contains(
editsDirToCheck));
}
// All extra checked volumes are marked "required"
for (URI extraDirToCheck : extraCheckedVolumes) {
addDirToCheck(extraDirToCheck, true);
}
minimumRedundantVolumes = conf.getInt(
DFSConfigKeys.DFS_NAMENODE_CHECKED_VOLUMES_MINIMUM_KEY,
DFSConfigKeys.DFS_NAMENODE_CHECKED_VOLUMES_MINIMUM_DEFAULT);
}
/**
* Add the volume of the passed-in directory to the list of volumes to check.
* If <code>required</code> is true, and this volume is already present, but
* is marked redundant, it will be marked required. If the volume is already
* present but marked required then this method is a no-op.
*
* @param directoryToCheck
* The directory whose volume will be checked for available space.
*/
private void addDirToCheck(URI directoryToCheck, boolean required)
throws IOException {
File dir = new File(directoryToCheck.getPath());
if (!dir.exists()) {
throw new IOException("Missing directory "+dir.getAbsolutePath());
}
CheckedVolume newVolume = new CheckedVolume(dir, required);
CheckedVolume volume = volumes.get(newVolume.getVolume());
if (volume == null || !volume.isRequired()) {
volumes.put(newVolume.getVolume(), newVolume);
}
}
/**
* Return true if disk space is available on at least one of the configured
* redundant volumes, and all of the configured required volumes.
*
* @return True if the configured amount of disk space is available on at
* least one redundant volume and all of the required volumes, false
* otherwise.
*/
public boolean hasAvailableDiskSpace() {
return NameNodeResourcePolicy.areResourcesAvailable(volumes.values(),
minimumRedundantVolumes);
}
/**
* Return the set of directories which are low on space.
*
* @return the set of directories whose free space is below the threshold.
*/
@VisibleForTesting
Collection<String> getVolumesLowOnSpace() throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("Going to check the following volumes disk space: " + volumes);
}
Collection<String> lowVolumes = new ArrayList<String>();
for (CheckedVolume volume : volumes.values()) {
lowVolumes.add(volume.getVolume());
}
return lowVolumes;
}
@VisibleForTesting
void setVolumes(Map<String, CheckedVolume> volumes) {
this.volumes = volumes;
}
@VisibleForTesting
void setMinimumReduntdantVolumes(int minimumRedundantVolumes) {
this.minimumRedundantVolumes = minimumRedundantVolumes;
}
}
|
CheckedVolume
|
java
|
apache__camel
|
components/camel-oauth/src/main/java/org/apache/camel/oauth/OAuthCodeFlowCallback.java
|
{
"start": 1079,
"end": 2914
}
|
class ____ extends AbstractOAuthProcessor {
private final Logger log = LoggerFactory.getLogger(getClass());
@Override
public void process(Exchange exchange) {
var context = exchange.getContext();
var msg = exchange.getMessage();
logRequestHeaders(procName, msg);
// Validate auth callback request headers/parameters
//
var authCode = msg.getHeader("code", String.class);
if (authCode == null) {
log.error("Authorization code is missing in the request");
msg.setHeader("CamelHttpResponseCode", 400);
msg.setBody("Authorization code missing");
return;
}
// Require an active OAuthSession
//
var oauth = findOAuthOrThrow(context);
var session = oauth.getOrCreateSession(exchange);
// Exchange the authorization code for access/refresh/id tokens
//
String redirectUri = getRequiredProperty(exchange.getContext(), CAMEL_OAUTH_REDIRECT_URI);
var userProfile = oauth.authenticate(new AuthCodeCredentials()
.setRedirectUri(redirectUri)
.setCode(authCode));
session.putUserProfile(userProfile);
log.info("Authenticated {}", userProfile.subject());
userProfile.logDetails();
var postLoginUrl = (String) session.removeValue("OAuthPostLoginUrl").orElse(null);
if (postLoginUrl == null) {
postLoginUrl = getRequiredProperty(exchange.getContext(), CAMEL_OAUTH_REDIRECT_URI);
var lastSlashIdx = postLoginUrl.lastIndexOf('/');
postLoginUrl = postLoginUrl.substring(0, lastSlashIdx + 1);
log.warn("Cannot find OAuthPostLoginUrl, now using {}", postLoginUrl);
}
sendRedirect(msg, postLoginUrl);
}
}
|
OAuthCodeFlowCallback
|
java
|
apache__camel
|
catalog/camel-route-parser/src/test/java/org/apache/camel/parser/java/RoasterJavaDslTest.java
|
{
"start": 1422,
"end": 3403
}
|
class ____ extends CamelTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(RoasterJavaDslTest.class);
@Override
public boolean isDumpRouteCoverage() {
return true;
}
@Test
void parseTree() throws Exception {
JavaClassSource clazz = (JavaClassSource) Roaster
.parse(new File("src/test/java/org/apache/camel/parser/java/MyJavaDslRouteBuilder.java"));
List<CamelNodeDetails> list = RouteBuilderParser.parseRouteBuilderTree(clazz,
"src/test/java/org/apache/camel/parser/java/MyJavaDslRouteBuilder.java", true);
assertEquals(1, list.size());
CamelNodeDetails details = list.get(0);
assertEquals("src/test/java/org/apache/camel/parser/java/MyJavaDslRouteBuilder.java", details.getFileName());
assertEquals("bar", details.getRouteId());
assertEquals("configure", details.getMethodName());
assertEquals("org.apache.camel.parser.java.MyJavaDslRouteBuilder", details.getClassName());
assertEquals("28", list.get(0).getLineNumber());
assertEquals("28", list.get(0).getLineNumberEnd());
String tree = details.dump(0);
LOG.info("\n{}", tree);
assertTrue(tree.contains("28\tfrom"));
assertTrue(tree.contains("29\t log"));
assertTrue(tree.contains("30\t setHeader"));
assertTrue(tree.contains("31\t choice"));
assertTrue(tree.contains("33\t to"));
assertTrue(tree.contains("34\t toD"));
assertTrue(tree.contains("36\t toD"));
assertTrue(tree.contains("38\t log"));
assertTrue(tree.contains("40\t to"));
}
@Test
void testRouteCoverage() throws Exception {
context.addRoutes(new MyJavaDslRouteBuilder());
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
MockEndpoint.assertIsSatisfied(context);
}
}
|
RoasterJavaDslTest
|
java
|
spring-projects__spring-framework
|
spring-context-support/src/main/java/org/springframework/cache/jcache/config/JCacheConfigurer.java
|
{
"start": 1853,
"end": 2293
}
|
class ____ implements JCacheConfigurer {
* @Bean // important!
* @Override
* public CacheResolver exceptionCacheResolver() {
* // configure and return CacheResolver instance
* }
* // ...
* }
* </pre>
* See {@link org.springframework.cache.annotation.EnableCaching} for more complete examples.
*/
default @Nullable CacheResolver exceptionCacheResolver() {
return null;
}
}
|
AppConfig
|
java
|
alibaba__nacos
|
config/src/main/java/com/alibaba/nacos/config/server/service/capacity/TenantCapacityPersistService.java
|
{
"start": 2181,
"end": 2955
}
|
class ____ {
private static final TenantCapacityRowMapper TENANT_CAPACITY_ROW_MAPPER = new TenantCapacityRowMapper();
private JdbcTemplate jdbcTemplate;
private DataSourceService dataSourceService;
private MapperManager mapperManager;
/**
* init method.
*/
@PostConstruct
public void init() {
this.dataSourceService = DynamicDataSource.getInstance().getDataSource();
this.jdbcTemplate = dataSourceService.getJdbcTemplate();
Boolean isDataSourceLogEnable = EnvUtil.getProperty(CommonConstant.NACOS_PLUGIN_DATASOURCE_LOG, Boolean.class,
false);
this.mapperManager = MapperManager.instance(isDataSourceLogEnable);
}
static final
|
TenantCapacityPersistService
|
java
|
apache__kafka
|
clients/src/test/java/org/apache/kafka/clients/consumer/KafkaConsumerTest.java
|
{
"start": 196707,
"end": 197053
}
|
class ____ extends MockDeserializer implements Monitorable {
@Override
public void withPluginMetrics(PluginMetrics metrics) {
MetricName name = metrics.metricName(NAME, DESCRIPTION, TAGS);
metrics.addMetric(name, (Measurable) (config, now) -> VALUE);
}
}
public static
|
MonitorableDeserializer
|
java
|
apache__spark
|
core/src/main/java/org/apache/spark/shuffle/sort/io/LocalDiskSingleSpillMapOutputWriter.java
|
{
"start": 1097,
"end": 2161
}
|
class ____
implements SingleSpillShuffleMapOutputWriter {
private final int shuffleId;
private final long mapId;
private final IndexShuffleBlockResolver blockResolver;
public LocalDiskSingleSpillMapOutputWriter(
int shuffleId,
long mapId,
IndexShuffleBlockResolver blockResolver) {
this.shuffleId = shuffleId;
this.mapId = mapId;
this.blockResolver = blockResolver;
}
@Override
public void transferMapSpillFile(
File mapSpillFile,
long[] partitionLengths,
long[] checksums) throws IOException {
// The map spill file already has the proper format, and it contains all of the partition data.
// So just transfer it directly to the destination without any merging.
File outputFile = blockResolver.getDataFile(shuffleId, mapId);
File tempFile = Utils.tempFileWith(outputFile);
Files.move(mapSpillFile.toPath(), tempFile.toPath());
blockResolver
.writeMetadataFileAndCommit(shuffleId, mapId, partitionLengths, checksums, tempFile);
}
}
|
LocalDiskSingleSpillMapOutputWriter
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/SystemUtils.java
|
{
"start": 67385,
"end": 67885
}
|
class ____ loaded.
* </p>
*
* @since 3.1
*/
public static final boolean IS_OS_WINDOWS_2008 = getOsNameMatches(OS_NAME_WINDOWS_PREFIX + " Server 2008");
/**
* The constant {@code true} if this is Windows Server 2012.
* <p>
* The result depends on the value of the {@link #OS_NAME} constant.
* </p>
* <p>
* The field will return {@code false} if {@link #OS_NAME} is {@code null}.
* </p>
* <p>
* This value is initialized when the
|
is
|
java
|
spring-projects__spring-boot
|
module/spring-boot-webflux/src/main/java/org/springframework/boot/webflux/autoconfigure/ProblemDetailsExceptionHandler.java
|
{
"start": 1033,
"end": 1114
}
|
class ____ extends ResponseEntityExceptionHandler {
}
|
ProblemDetailsExceptionHandler
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/EqualsGetClassTest.java
|
{
"start": 1436,
"end": 1766
}
|
class ____ {
private int a;
@Override
public boolean equals(Object o) {
return o != null && o.getClass().equals(getClass()) && a == ((Test) o).a;
}
}
""")
.addOutputLines(
"Test.java",
"""
|
Test
|
java
|
apache__kafka
|
clients/src/test/java/org/apache/kafka/clients/consumer/internals/StreamsRebalanceListenerInvokerTest.java
|
{
"start": 1873,
"end": 11036
}
|
class ____ {
@Mock
private StreamsRebalanceListener mockListener;
@Mock
private StreamsRebalanceData streamsRebalanceData;
private StreamsRebalanceListenerInvoker invoker;
private final LogContext logContext = new LogContext();
@BeforeEach
public void setup() {
invoker = new StreamsRebalanceListenerInvoker(logContext, streamsRebalanceData);
}
@Test
public void testSetRebalanceListenerWithNull() {
NullPointerException exception = assertThrows(NullPointerException.class,
() -> invoker.setRebalanceListener(null));
assertEquals("StreamsRebalanceListener cannot be null", exception.getMessage());
}
@Test
public void testSetRebalanceListenerOverwritesExisting() {
StreamsRebalanceListener firstListener = org.mockito.Mockito.mock(StreamsRebalanceListener.class);
StreamsRebalanceListener secondListener = org.mockito.Mockito.mock(StreamsRebalanceListener.class);
StreamsRebalanceData.Assignment mockAssignment = createMockAssignment();
when(streamsRebalanceData.reconciledAssignment()).thenReturn(mockAssignment);
// Set first listener
invoker.setRebalanceListener(firstListener);
// Overwrite with second listener
invoker.setRebalanceListener(secondListener);
// Should use second listener
invoker.invokeAllTasksRevoked();
verify(firstListener, never()).onTasksRevoked(any());
verify(secondListener).onTasksRevoked(eq(mockAssignment.activeTasks()));
}
@Test
public void testInvokeMethodsWithNoListener() {
assertNull(invoker.invokeAllTasksRevoked());
assertNull(invoker.invokeTasksAssigned(createMockAssignment()));
assertNull(invoker.invokeTasksRevoked(createMockTasks()));
assertNull(invoker.invokeAllTasksLost());
}
@Test
public void testInvokeAllTasksRevokedWithListener() {
invoker.setRebalanceListener(mockListener);
StreamsRebalanceData.Assignment mockAssignment = createMockAssignment();
when(streamsRebalanceData.reconciledAssignment()).thenReturn(mockAssignment);
Exception result = invoker.invokeAllTasksRevoked();
assertNull(result);
verify(mockListener).onTasksRevoked(eq(mockAssignment.activeTasks()));
}
@Test
public void testInvokeTasksAssignedWithListener() {
invoker.setRebalanceListener(mockListener);
StreamsRebalanceData.Assignment assignment = createMockAssignment();
Exception result = invoker.invokeTasksAssigned(assignment);
assertNull(result);
verify(mockListener).onTasksAssigned(eq(assignment));
}
@Test
public void testInvokeTasksAssignedWithWakeupException() {
invoker.setRebalanceListener(mockListener);
StreamsRebalanceData.Assignment assignment = createMockAssignment();
WakeupException wakeupException = new WakeupException();
doThrow(wakeupException).when(mockListener).onTasksAssigned(assignment);
WakeupException thrownException = assertThrows(WakeupException.class,
() -> invoker.invokeTasksAssigned(assignment));
assertEquals(wakeupException, thrownException);
verify(mockListener).onTasksAssigned(eq(assignment));
}
@Test
public void testInvokeTasksAssignedWithInterruptException() {
invoker.setRebalanceListener(mockListener);
StreamsRebalanceData.Assignment assignment = createMockAssignment();
InterruptException interruptException = new InterruptException("Test interrupt");
doThrow(interruptException).when(mockListener).onTasksAssigned(assignment);
InterruptException thrownException = assertThrows(InterruptException.class,
() -> invoker.invokeTasksAssigned(assignment));
assertEquals(interruptException, thrownException);
verify(mockListener).onTasksAssigned(eq(assignment));
}
@Test
public void testInvokeTasksAssignedWithOtherException() {
invoker.setRebalanceListener(mockListener);
StreamsRebalanceData.Assignment assignment = createMockAssignment();
RuntimeException runtimeException = new RuntimeException("Test exception");
doThrow(runtimeException).when(mockListener).onTasksAssigned(assignment);
Exception result = invoker.invokeTasksAssigned(assignment);
assertEquals(runtimeException, result);
verify(mockListener).onTasksAssigned(eq(assignment));
}
@Test
public void testInvokeTasksRevokedWithListener() {
invoker.setRebalanceListener(mockListener);
Set<StreamsRebalanceData.TaskId> tasks = createMockTasks();
Exception result = invoker.invokeTasksRevoked(tasks);
assertNull(result);
verify(mockListener).onTasksRevoked(eq(tasks));
}
@Test
public void testInvokeTasksRevokedWithWakeupException() {
invoker.setRebalanceListener(mockListener);
Set<StreamsRebalanceData.TaskId> tasks = createMockTasks();
WakeupException wakeupException = new WakeupException();
doThrow(wakeupException).when(mockListener).onTasksRevoked(tasks);
WakeupException thrownException = assertThrows(WakeupException.class,
() -> invoker.invokeTasksRevoked(tasks));
assertEquals(wakeupException, thrownException);
verify(mockListener).onTasksRevoked(eq(tasks));
}
@Test
public void testInvokeTasksRevokedWithInterruptException() {
invoker.setRebalanceListener(mockListener);
Set<StreamsRebalanceData.TaskId> tasks = createMockTasks();
InterruptException interruptException = new InterruptException("Test interrupt");
doThrow(interruptException).when(mockListener).onTasksRevoked(tasks);
InterruptException thrownException = assertThrows(InterruptException.class,
() -> invoker.invokeTasksRevoked(tasks));
assertEquals(interruptException, thrownException);
verify(mockListener).onTasksRevoked(eq(tasks));
}
@Test
public void testInvokeTasksRevokedWithOtherException() {
invoker.setRebalanceListener(mockListener);
Set<StreamsRebalanceData.TaskId> tasks = createMockTasks();
RuntimeException runtimeException = new RuntimeException("Test exception");
doThrow(runtimeException).when(mockListener).onTasksRevoked(tasks);
Exception result = invoker.invokeTasksRevoked(tasks);
assertEquals(runtimeException, result);
verify(mockListener).onTasksRevoked(eq(tasks));
}
@Test
public void testInvokeAllTasksLostWithListener() {
invoker.setRebalanceListener(mockListener);
Exception result = invoker.invokeAllTasksLost();
assertNull(result);
verify(mockListener).onAllTasksLost();
}
@Test
public void testInvokeAllTasksLostWithWakeupException() {
invoker.setRebalanceListener(mockListener);
WakeupException wakeupException = new WakeupException();
doThrow(wakeupException).when(mockListener).onAllTasksLost();
WakeupException thrownException = assertThrows(WakeupException.class,
() -> invoker.invokeAllTasksLost());
assertEquals(wakeupException, thrownException);
verify(mockListener).onAllTasksLost();
}
@Test
public void testInvokeAllTasksLostWithInterruptException() {
invoker.setRebalanceListener(mockListener);
InterruptException interruptException = new InterruptException("Test interrupt");
doThrow(interruptException).when(mockListener).onAllTasksLost();
InterruptException thrownException = assertThrows(InterruptException.class,
() -> invoker.invokeAllTasksLost());
assertEquals(interruptException, thrownException);
verify(mockListener).onAllTasksLost();
}
@Test
public void testInvokeAllTasksLostWithOtherException() {
invoker.setRebalanceListener(mockListener);
RuntimeException runtimeException = new RuntimeException("Test exception");
doThrow(runtimeException).when(mockListener).onAllTasksLost();
Exception result = invoker.invokeAllTasksLost();
assertEquals(runtimeException, result);
verify(mockListener).onAllTasksLost();
}
private StreamsRebalanceData.Assignment createMockAssignment() {
Set<StreamsRebalanceData.TaskId> activeTasks = createMockTasks();
Set<StreamsRebalanceData.TaskId> standbyTasks = Set.of();
Set<StreamsRebalanceData.TaskId> warmupTasks = Set.of();
return new StreamsRebalanceData.Assignment(activeTasks, standbyTasks, warmupTasks);
}
private Set<StreamsRebalanceData.TaskId> createMockTasks() {
return Set.of(
new StreamsRebalanceData.TaskId("subtopology1", 0),
new StreamsRebalanceData.TaskId("subtopology1", 1)
);
}
}
|
StreamsRebalanceListenerInvokerTest
|
java
|
apache__spark
|
common/network-common/src/main/java/org/apache/spark/network/protocol/AbstractResponseMessage.java
|
{
"start": 917,
"end": 966
}
|
class ____ response messages.
*/
public abstract
|
for
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/alterTable/MySqlAlterTableTest37.java
|
{
"start": 865,
"end": 1412
}
|
class ____ extends TestCase {
public void test_alter_modify_clustered_by() throws Exception {
String sql = "alter table new_ddl.ddl_test_1 clustered by (col1, col2)";
List<SQLStatement> stmtList = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
assertEquals(1, stmtList.size());
SQLStatement stmt = stmtList.get(0);
String output = SQLUtils.toMySqlString(stmt);
assertEquals("ALTER TABLE new_ddl.ddl_test_1\n"
+ "\tCLUSTERED BY col1, col2", output);
}
}
|
MySqlAlterTableTest37
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/configuration/RecursiveComparisonConfiguration_ignoreFields_Test.java
|
{
"start": 887,
"end": 1409
}
|
class ____ {
@Test
void should_register_fields_path_to_ignore_without_duplicates() {
// GIVEN
RecursiveComparisonConfiguration recursiveComparisonConfiguration = new RecursiveComparisonConfiguration();
recursiveComparisonConfiguration.ignoreFields("foo", "bar", "foo.bar", "bar");
// WHEN
Set<String> fields = recursiveComparisonConfiguration.getIgnoredFields();
// THEN
then(fields).containsExactlyInAnyOrder("foo", "bar", "foo.bar");
}
}
|
RecursiveComparisonConfiguration_ignoreFields_Test
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/event/internal/OnUpdateVisitor.java
|
{
"start": 723,
"end": 2245
}
|
class ____ extends ReattachVisitor {
public OnUpdateVisitor(EventSource session, Object key, Object owner) {
super( session, key, owner );
}
@Override
Object processCollection(Object collection, CollectionType type) throws HibernateException {
if ( collection == CollectionType.UNFETCHED_COLLECTION ) {
return null;
}
final var session = getSession();
final var persister =
session.getFactory().getMappingMetamodel()
.getCollectionDescriptor( type.getRole() );
final Object collectionKey = extractCollectionKeyFromOwner( persister );
if ( collection instanceof PersistentCollection<?> persistentCollection ) {
if ( persistentCollection.setCurrentSession( session ) ) {
//a "detached" collection!
if ( !isOwnerUnchanged( persister, collectionKey, persistentCollection ) ) {
// if the collection belonged to a different entity,
// clean up the existing state of the collection
removeCollection( persister, collectionKey, session );
}
reattachCollection( persistentCollection, type );
}
else {
// a collection loaded in the current session
// can not possibly be the collection belonging
// to the entity passed to update()
removeCollection( persister, collectionKey, session );
}
}
else {
// null or brand-new collection
// this will also (inefficiently) handle arrays, which have
// no snapshot, so we can't do any better
removeCollection( persister, collectionKey, session );
}
return null;
}
}
|
OnUpdateVisitor
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/validation/AbstractBindingResultAssert.java
|
{
"start": 3945,
"end": 4219
}
|
class ____ extends BasicErrorMessageFactory {
private UnexpectedBindingResult(String reason, Object... arguments) {
super("%nExpecting binding result:%n %s%n%s", AbstractBindingResultAssert.this.actual,
reason.formatted(arguments));
}
}
}
|
UnexpectedBindingResult
|
java
|
apache__spark
|
common/network-common/src/main/java/org/apache/spark/network/TransportContext.java
|
{
"start": 4806,
"end": 14311
}
|
class ____. JVM will report
* `ClassCircularityError` to prevent such infinite recursion. (See SPARK-17714)
*/
private static final MessageToMessageEncoder<Message> ENCODER = MessageEncoder.INSTANCE;
private static final MessageToMessageEncoder<Message> SSL_ENCODER = SslMessageEncoder.INSTANCE;
private static final MessageDecoder DECODER = MessageDecoder.INSTANCE;
// Separate thread pool for handling ChunkFetchRequest. This helps to enable throttling
// max number of TransportServer worker threads that are blocked on writing response
// of ChunkFetchRequest message back to the client via the underlying channel.
private final EventLoopGroup chunkFetchWorkers;
public TransportContext(TransportConf conf, RpcHandler rpcHandler) {
this(conf, rpcHandler, false, false);
}
public TransportContext(
TransportConf conf,
RpcHandler rpcHandler,
boolean closeIdleConnections) {
this(conf, rpcHandler, closeIdleConnections, false);
}
/**
* Enables TransportContext initialization for underlying client and server.
*
* @param conf TransportConf
* @param rpcHandler RpcHandler responsible for handling requests and responses.
* @param closeIdleConnections Close idle connections if it is set to true.
* @param isClientOnly This config indicates the TransportContext is only used by a client.
* This config is more important when external shuffle is enabled.
* It stops creating extra event loop and subsequent thread pool
* for shuffle clients to handle chunked fetch requests.
*/
public TransportContext(
TransportConf conf,
RpcHandler rpcHandler,
boolean closeIdleConnections,
boolean isClientOnly) {
this.conf = conf;
this.rpcHandler = rpcHandler;
this.closeIdleConnections = closeIdleConnections;
this.sslFactory = createSslFactory();
if (conf.getModuleName() != null &&
conf.getModuleName().equalsIgnoreCase("shuffle") &&
!isClientOnly && conf.separateChunkFetchRequest()) {
chunkFetchWorkers = NettyUtils.createEventLoop(
IOMode.valueOf(conf.ioMode()),
conf.chunkFetchHandlerThreads(),
"shuffle-chunk-fetch-handler");
} else {
chunkFetchWorkers = null;
}
}
/**
* Initializes a ClientFactory which runs the given TransportClientBootstraps prior to returning
* a new Client. Bootstraps will be executed synchronously, and must run successfully in order
* to create a Client.
*/
public TransportClientFactory createClientFactory(List<TransportClientBootstrap> bootstraps) {
return new TransportClientFactory(this, bootstraps);
}
public TransportClientFactory createClientFactory() {
return createClientFactory(new ArrayList<>());
}
/** Create a server which will attempt to bind to a specific port. */
public TransportServer createServer(int port, List<TransportServerBootstrap> bootstraps) {
return new TransportServer(this, null, port, rpcHandler, bootstraps);
}
/** Create a server which will attempt to bind to a specific host and port. */
public TransportServer createServer(
String host, int port, List<TransportServerBootstrap> bootstraps) {
return new TransportServer(this, host, port, rpcHandler, bootstraps);
}
/** Creates a new server, binding to any available ephemeral port. */
public TransportServer createServer(List<TransportServerBootstrap> bootstraps) {
return createServer(0, bootstraps);
}
public TransportServer createServer() {
return createServer(0, new ArrayList<>());
}
public TransportChannelHandler initializePipeline(SocketChannel channel, boolean isClient) {
return initializePipeline(channel, rpcHandler, isClient);
}
public boolean sslEncryptionEnabled() {
return this.sslFactory != null;
}
/**
* Initializes a client or server Netty Channel Pipeline which encodes/decodes messages and
* has a {@link org.apache.spark.network.server.TransportChannelHandler} to handle request or
* response messages.
*
* @param channel The channel to initialize.
* @param channelRpcHandler The RPC handler to use for the channel.
*
* @return Returns the created TransportChannelHandler, which includes a TransportClient that can
* be used to communicate on this channel. The TransportClient is directly associated with a
* ChannelHandler to ensure all users of the same channel get the same TransportClient object.
*/
public TransportChannelHandler initializePipeline(
SocketChannel channel,
RpcHandler channelRpcHandler,
boolean isClient) {
try {
TransportChannelHandler channelHandler = createChannelHandler(channel, channelRpcHandler);
ChannelPipeline pipeline = channel.pipeline();
if (nettyLogger.getLoggingHandler() != null) {
pipeline.addLast("loggingHandler", nettyLogger.getLoggingHandler());
}
if (sslEncryptionEnabled()) {
SslHandler sslHandler;
try {
sslHandler = new SslHandler(sslFactory.createSSLEngine(isClient, channel.alloc()));
} catch (Exception e) {
throw new IllegalStateException("Error creating Netty SslHandler", e);
}
pipeline.addFirst("NettySslEncryptionHandler", sslHandler);
// Cannot use zero-copy with HTTPS, so we add in our ChunkedWriteHandler just before the
// MessageEncoder
pipeline.addLast("chunkedWriter", new ChunkedWriteHandler());
}
pipeline
.addLast("encoder", sslEncryptionEnabled()? SSL_ENCODER : ENCODER)
.addLast(TransportFrameDecoder.HANDLER_NAME, NettyUtils.createFrameDecoder())
.addLast("decoder", getDecoder())
.addLast("idleStateHandler",
new IdleStateHandler(0, 0, conf.connectionTimeoutMs() / 1000))
// NOTE: Chunks are currently guaranteed to be returned in the order of request, but this
// would require more logic to guarantee if this were not part of the same event loop.
.addLast("handler", channelHandler);
// Use a separate EventLoopGroup to handle ChunkFetchRequest messages for shuffle rpcs.
if (chunkFetchWorkers != null) {
ChunkFetchRequestHandler chunkFetchHandler = new ChunkFetchRequestHandler(
channelHandler.getClient(), rpcHandler.getStreamManager(),
conf.maxChunksBeingTransferred(), true /* syncModeEnabled */);
pipeline.addLast(chunkFetchWorkers, "chunkFetchHandler", chunkFetchHandler);
}
return channelHandler;
} catch (RuntimeException e) {
logger.error("Error while initializing Netty pipeline", e);
throw e;
}
}
protected MessageToMessageDecoder<ByteBuf> getDecoder() {
return DECODER;
}
private SSLFactory createSslFactory() {
if (conf.sslRpcEnabled()) {
if (conf.sslRpcEnabledAndKeysAreValid()) {
return new SSLFactory.Builder()
.openSslEnabled(conf.sslRpcOpenSslEnabled())
.requestedProtocol(conf.sslRpcProtocol())
.requestedCiphers(conf.sslRpcRequestedCiphers())
.keyStore(conf.sslRpcKeyStore(), conf.sslRpcKeyStorePassword())
.privateKey(conf.sslRpcPrivateKey())
.privateKeyPassword(conf.sslRpcPrivateKeyPassword())
.keyPassword(conf.sslRpcKeyPassword())
.certChain(conf.sslRpcCertChain())
.trustStore(
conf.sslRpcTrustStore(),
conf.sslRpcTrustStorePassword(),
conf.sslRpcTrustStoreReloadingEnabled(),
conf.sslRpctrustStoreReloadIntervalMs())
.build();
} else {
logger.error("RPC SSL encryption enabled but keys not found!" +
"Please ensure the configured keys are present.");
throw new IllegalArgumentException("RPC SSL encryption enabled but keys not found!");
}
} else {
return null;
}
}
/**
* Creates the server- and client-side handler which is used to handle both RequestMessages and
* ResponseMessages. The channel is expected to have been successfully created, though certain
* properties (such as the remoteAddress()) may not be available yet.
*/
private TransportChannelHandler createChannelHandler(Channel channel, RpcHandler rpcHandler) {
TransportResponseHandler responseHandler = new TransportResponseHandler(channel);
TransportClient client = new TransportClient(channel, responseHandler);
boolean separateChunkFetchRequest = conf.separateChunkFetchRequest();
ChunkFetchRequestHandler chunkFetchRequestHandler = null;
if (!separateChunkFetchRequest) {
chunkFetchRequestHandler = new ChunkFetchRequestHandler(
client, rpcHandler.getStreamManager(),
conf.maxChunksBeingTransferred(), false /* syncModeEnabled */);
}
TransportRequestHandler requestHandler = new TransportRequestHandler(channel, client,
rpcHandler, conf.maxChunksBeingTransferred(), chunkFetchRequestHandler);
return new TransportChannelHandler(client, responseHandler, requestHandler,
conf.connectionTimeoutMs(), separateChunkFetchRequest, closeIdleConnections, this);
}
public TransportConf getConf() { return conf; }
public Counter getRegisteredConnections() {
return registeredConnections;
}
@Override
public void close() {
if (chunkFetchWorkers != null) {
chunkFetchWorkers.shutdownGracefully();
}
if (sslFactory != null) {
sslFactory.destroy();
}
}
}
|
again
|
java
|
apache__kafka
|
streams/src/test/java/org/apache/kafka/streams/processor/internals/StreamTaskTest.java
|
{
"start": 134152,
"end": 149637
}
|
class ____ implements ProcessingExceptionHandler {
@Override
public Response handleError(final ErrorHandlerContext context, final Record<?, ?> record, final Exception exception) {
return null;
}
@Override
public void configure(final Map<String, ?> configs) {
// No-op
}
}
private ProcessorStateManager mockStateManager() {
final ProcessorStateManager manager = mock(ProcessorStateManager.class);
doReturn(TaskType.ACTIVE).when(manager).taskType();
doReturn(taskId).when(manager).taskId();
return manager;
}
private List<MetricName> getTaskMetrics() {
return metrics.metrics().keySet().stream().filter(m -> m.tags().containsKey("task-id")).collect(Collectors.toList());
}
private StreamTask createOptimizedStatefulTask(final StreamsConfig config, final Consumer<byte[], byte[]> consumer) {
final StateStore stateStore = new MockKeyValueStore(storeName, true);
final ProcessorTopology topology = ProcessorTopologyFactories.with(
singletonList(source1),
mkMap(mkEntry(topic1, source1)),
singletonList(stateStore),
Collections.singletonMap(storeName, topic1));
final InternalProcessorContext<?, ?> context = new ProcessorContextImpl(
taskId,
config,
stateManager,
streamsMetrics,
null
);
return new StreamTask(
taskId,
Set.of(partition1),
topology,
consumer,
new TopologyConfig(null, config, new Properties()).getTaskConfig(),
streamsMetrics,
stateDirectory,
cache,
time,
stateManager,
recordCollector,
context,
logContext,
false
);
}
private StreamTask createDisconnectedTask(final StreamsConfig config) {
final MockKeyValueStore stateStore = new MockKeyValueStore(storeName, false);
final ProcessorTopology topology = ProcessorTopologyFactories.with(
asList(source1, source2),
mkMap(mkEntry(topic1, source1), mkEntry(topic2, source2)),
singletonList(stateStore),
emptyMap());
final MockConsumer<byte[], byte[]> consumer = new MockConsumer<>(AutoOffsetResetStrategy.EARLIEST.name()) {
@Override
public Map<TopicPartition, OffsetAndMetadata> committed(final Set<TopicPartition> partitions) {
throw new TimeoutException("KABOOM!");
}
};
final InternalProcessorContext<?, ?> context = new ProcessorContextImpl(
taskId,
config,
stateManager,
streamsMetrics,
null
);
return new StreamTask(
taskId,
partitions,
topology,
consumer,
new TopologyConfig(null, config, new Properties()).getTaskConfig(),
streamsMetrics,
stateDirectory,
cache,
time,
stateManager,
recordCollector,
context,
logContext,
false
);
}
private StreamTask createFaultyStatefulTask(final StreamsConfig config) {
final ProcessorTopology topology = ProcessorTopologyFactories.with(
asList(source1, source3),
mkMap(mkEntry(topic1, source1), mkEntry(topic2, source3)),
singletonList(stateStore),
emptyMap()
);
final InternalProcessorContext<?, ?> context = new ProcessorContextImpl(
taskId,
config,
stateManager,
streamsMetrics,
null
);
return new StreamTask(
taskId,
partitions,
topology,
consumer,
new TopologyConfig(null, config, new Properties()).getTaskConfig(),
streamsMetrics,
stateDirectory,
cache,
time,
stateManager,
recordCollector,
context,
logContext,
false
);
}
private StreamTask createStatefulTask(final StreamsConfig config, final boolean logged) {
return createStatefulTask(config, logged, stateManager);
}
private StreamTask createStatefulTask(final StreamsConfig config, final boolean logged, final ProcessorStateManager stateManager) {
final MockKeyValueStore stateStore = new MockKeyValueStore(storeName, logged);
final ProcessorTopology topology = ProcessorTopologyFactories.with(
asList(source1, source2),
mkMap(mkEntry(topic1, source1), mkEntry(topic2, source2)),
singletonList(stateStore),
logged ? Collections.singletonMap(storeName, storeName + "-changelog") : Collections.emptyMap());
final InternalProcessorContext<?, ?> context = new ProcessorContextImpl(
taskId,
config,
stateManager,
streamsMetrics,
null
);
return new StreamTask(
taskId,
partitions,
topology,
consumer,
new TopologyConfig(null, config, new Properties()).getTaskConfig(),
streamsMetrics,
stateDirectory,
cache,
time,
stateManager,
recordCollector,
context,
logContext,
false
);
}
private StreamTask createSingleSourceStateless(final StreamsConfig config) {
final ProcessorTopology topology = withSources(
asList(source1, processorStreamTime, processorSystemTime),
mkMap(mkEntry(topic1, source1))
);
source1.addChild(processorStreamTime);
source1.addChild(processorSystemTime);
final InternalProcessorContext<?, ?> context = new ProcessorContextImpl(
taskId,
config,
stateManager,
streamsMetrics,
null
);
return new StreamTask(
taskId,
Set.of(partition1),
topology,
consumer,
new TopologyConfig(null, config, new Properties()).getTaskConfig(),
new StreamsMetricsImpl(metrics, "test", time),
stateDirectory,
cache,
time,
stateManager,
recordCollector,
context,
logContext,
false
);
}
private StreamTask createStatelessTask(final StreamsConfig config) {
final ProcessorTopology topology = withSources(
asList(source1, source2, processorStreamTime, processorSystemTime),
mkMap(mkEntry(topic1, source1), mkEntry(topic2, source2))
);
source1.addChild(processorStreamTime);
source2.addChild(processorStreamTime);
source1.addChild(processorSystemTime);
source2.addChild(processorSystemTime);
final InternalProcessorContext<?, ?> context = new ProcessorContextImpl(
taskId,
config,
stateManager,
streamsMetrics,
null
);
return new StreamTask(
taskId,
partitions,
topology,
consumer,
new TopologyConfig(null, config, new Properties()).getTaskConfig(),
new StreamsMetricsImpl(metrics, "test", time),
stateDirectory,
cache,
time,
stateManager,
recordCollector,
context,
logContext,
false
);
}
private StreamTask createStatelessTaskWithAnchoredPunctuation(
final StreamsConfig config,
final MockProcessorNode<Integer, Integer, ?, ?> anchoredProcessorSystemTime
) {
final ProcessorTopology topology = withSources(
asList(source1, source2, anchoredProcessorStreamTime, anchoredProcessorSystemTime),
mkMap(mkEntry(topic1, source1), mkEntry(topic2, source2))
);
source1.addChild(anchoredProcessorStreamTime);
source2.addChild(anchoredProcessorStreamTime);
source1.addChild(anchoredProcessorSystemTime);
source2.addChild(anchoredProcessorSystemTime);
final InternalProcessorContext<?, ?> context = new ProcessorContextImpl(
taskId,
config,
stateManager,
streamsMetrics,
null
);
return new StreamTask(
taskId,
partitions,
topology,
consumer,
new TopologyConfig(null, config, new Properties()).getTaskConfig(),
new StreamsMetricsImpl(metrics, "test", time),
stateDirectory,
cache,
time,
stateManager,
recordCollector,
context,
logContext,
false
);
}
private StreamTask createStatelessTaskWithForwardingTopology(final SourceNode<Integer, Integer> sourceNode) {
final ProcessorTopology topology = withSources(
asList(sourceNode, processorStreamTime),
singletonMap(topic1, sourceNode)
);
sourceNode.addChild(processorStreamTime);
final StreamsConfig config = createConfig();
final InternalProcessorContext<?, ?> context = new ProcessorContextImpl(
taskId,
config,
stateManager,
streamsMetrics,
null
);
return new StreamTask(
taskId,
singleton(partition1),
topology,
consumer,
new TopologyConfig(null, config, new Properties()).getTaskConfig(),
new StreamsMetricsImpl(metrics, "test", time),
stateDirectory,
cache,
time,
stateManager,
recordCollector,
context,
logContext,
false
);
}
private void createTimeoutTask(final String eosConfig) {
final ProcessorTopology topology = withSources(
singletonList(timeoutSource),
mkMap(mkEntry(topic1, timeoutSource))
);
final StreamsConfig config = createConfig(eosConfig, "0");
final InternalProcessorContext<?, ?> context = new ProcessorContextImpl(
taskId,
config,
stateManager,
streamsMetrics,
null
);
task = new StreamTask(
taskId,
Set.of(partition1),
topology,
consumer,
new TopologyConfig(null, config, new Properties()).getTaskConfig(),
streamsMetrics,
stateDirectory,
cache,
time,
stateManager,
recordCollector,
context,
logContext,
false
);
}
private ConsumerRecord<byte[], byte[]> getConsumerRecordWithOffsetAsTimestamp(final TopicPartition topicPartition,
final long offset,
final int value) {
return new ConsumerRecord<>(
topicPartition.topic(),
topicPartition.partition(),
offset,
offset, // use the offset as the timestamp
TimestampType.CREATE_TIME,
0,
0,
recordKey,
intSerializer.serialize(null, value),
new RecordHeaders(),
Optional.empty()
);
}
private ConsumerRecord<byte[], byte[]> getConsumerRecordWithOffsetAsTimestamp(final TopicPartition topicPartition,
final long offset) {
return new ConsumerRecord<>(
topicPartition.topic(),
topicPartition.partition(),
offset,
offset, // use the offset as the timestamp
TimestampType.CREATE_TIME,
0,
0,
recordKey,
recordValue,
new RecordHeaders(),
Optional.empty()
);
}
private ConsumerRecord<byte[], byte[]> getConsumerRecordWithOffsetAsTimestamp(final Integer key, final long offset) {
return new ConsumerRecord<>(
topic1,
0,
offset,
offset, // use the offset as the timestamp
TimestampType.CREATE_TIME,
0,
0,
new IntegerSerializer().serialize(topic1, key),
recordValue,
new RecordHeaders(),
Optional.empty()
);
}
private ConsumerRecord<byte[], byte[]> getConsumerRecordWithInvalidTimestamp(final long offset) {
return new ConsumerRecord<>(
topic1,
0,
offset,
-1L, // invalid (negative) timestamp
TimestampType.CREATE_TIME,
0,
0,
recordKey,
recordValue,
new RecordHeaders(),
Optional.empty()
);
}
private ConsumerRecord<byte[], byte[]> getConsumerRecordWithOffsetAsTimestampWithLeaderEpoch(final TopicPartition topicPartition,
final long offset,
final int leaderEpoch) {
return new ConsumerRecord<>(
topicPartition.topic(),
topicPartition.partition(),
offset,
offset, // use the offset as the timestamp
TimestampType.CREATE_TIME,
0,
0,
recordKey,
recordValue,
new RecordHeaders(),
Optional.of(leaderEpoch)
);
}
private ConsumerRecord<byte[], byte[]> getCorruptedConsumerRecordWithOffsetAsTimestamp(final long offset) {
return new ConsumerRecord<>(
topic1,
0,
offset,
offset, // use the offset as the timestamp
TimestampType.CREATE_TIME,
-1,
-1,
new byte[0],
"I am not an integer.".getBytes(),
new RecordHeaders(),
Optional.empty()
);
}
private MetricName setupCloseTaskMetric() {
final MetricName metricName = new MetricName("name", "group", "description", Collections.emptyMap());
final Sensor sensor = streamsMetrics.threadLevelSensor(threadId, "task-closed", Sensor.RecordingLevel.INFO);
sensor.add(metricName, new CumulativeSum());
return metricName;
}
private void verifyCloseTaskMetric(final double expected, final StreamsMetricsImpl streamsMetrics, final MetricName metricName) {
final KafkaMetric metric = (KafkaMetric) streamsMetrics.metrics().get(metricName);
final double totalCloses = metric.measurable().measure(metric.config(), System.currentTimeMillis());
assertThat(totalCloses, equalTo(expected));
}
}
|
NullProcessingExceptionHandler
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/main/java/org/springframework/messaging/handler/annotation/reactive/DestinationVariableMethodArgumentResolver.java
|
{
"start": 2934,
"end": 3153
}
|
class ____ extends NamedValueInfo {
private DestinationVariableNamedValueInfo(DestinationVariable annotation) {
super(annotation.value(), true, ValueConstants.DEFAULT_NONE);
}
}
}
|
DestinationVariableNamedValueInfo
|
java
|
google__guava
|
android/guava/src/com/google/common/cache/LocalCache.java
|
{
"start": 33193,
"end": 34324
}
|
class ____<K, V> extends StrongEntry<K, V> {
StrongAccessEntry(K key, int hash, @Nullable ReferenceEntry<K, V> next) {
super(key, hash, next);
}
// The code below is exactly the same for each access entry type.
volatile long accessTime = Long.MAX_VALUE;
@Override
public long getAccessTime() {
return accessTime;
}
@Override
public void setAccessTime(long time) {
this.accessTime = time;
}
// Guarded By Segment.this
@Weak ReferenceEntry<K, V> nextAccess = nullEntry();
@Override
public ReferenceEntry<K, V> getNextInAccessQueue() {
return nextAccess;
}
@Override
public void setNextInAccessQueue(ReferenceEntry<K, V> next) {
this.nextAccess = next;
}
// Guarded By Segment.this
@Weak ReferenceEntry<K, V> previousAccess = nullEntry();
@Override
public ReferenceEntry<K, V> getPreviousInAccessQueue() {
return previousAccess;
}
@Override
public void setPreviousInAccessQueue(ReferenceEntry<K, V> previous) {
this.previousAccess = previous;
}
}
static final
|
StrongAccessEntry
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/SqlArithmeticOperation.java
|
{
"start": 763,
"end": 1249
}
|
class ____ extends ArithmeticOperation {
private DataType dataType;
public SqlArithmeticOperation(Source source, Expression left, Expression right, BinaryArithmeticOperation operation) {
super(source, left, right, operation);
}
@Override
public DataType dataType() {
if (dataType == null) {
dataType = SqlDataTypeConverter.commonType(left().dataType(), right().dataType());
}
return dataType;
}
}
|
SqlArithmeticOperation
|
java
|
apache__camel
|
components/camel-jetty/src/test/java/org/apache/camel/component/jetty/rest/RestJettyBindingModeJsonTest.java
|
{
"start": 1277,
"end": 3036
}
|
class ____ extends BaseJettyTest {
@Test
public void testBindingMode() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:input");
mock.expectedMessageCount(1);
mock.message(0).body().isInstanceOf(UserJaxbPojo.class);
String body = "{\"id\": 123, \"name\": \"Donald Duck\"}";
template.sendBody("http://localhost:" + getPort() + "/users/new", body);
MockEndpoint.assertIsSatisfied(context);
UserJaxbPojo user = mock.getReceivedExchanges().get(0).getIn().getBody(UserJaxbPojo.class);
assertNotNull(user);
assertEquals(123, user.getId());
assertEquals("Donald Duck", user.getName());
}
@Test
public void testBindingModeWrong() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:input");
mock.expectedMessageCount(0);
// we bind to json, but send in xml, which is not possible
String body = "<user name=\"Donald Duck\" id=\"123\"></user>";
try {
template.sendBody("http://localhost:" + getPort() + "/users/new", body);
fail("Should have thrown exception");
} catch (Exception e) {
// expected
}
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
restConfiguration().component("jetty").host("localhost").port(getPort()).bindingMode(RestBindingMode.json);
// use the rest DSL to define the rest services
rest("/users/").post("new").type(UserJaxbPojo.class).to("mock:input");
}
};
}
}
|
RestJettyBindingModeJsonTest
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/resource/basic/resource/SubResourceLocatorPlatformServiceResource.java
|
{
"start": 195,
"end": 477
}
|
interface ____ {
@Path("/users/{user}")
SubResourceLocatorUserResource getUserService(
@HeaderParam("entity") String entity,
@HeaderParam("ticket") String ticket,
@PathParam("user") String userId);
}
|
SubResourceLocatorPlatformServiceResource
|
java
|
apache__camel
|
components/camel-webhook/src/test/java/org/apache/camel/component/webhook/WebhookMultiRouteTest.java
|
{
"start": 1141,
"end": 2636
}
|
class ____ extends WebhookTestBase {
@Test
public void testMultiRoute() {
String result = template.requestBody("netty-http:http://localhost:" + port
+ WebhookConfiguration.computeDefaultPath("wb-delegate://yy"),
"", String.class);
assertEquals("uri: webhook", result);
result = template.requestBody("netty-http:http://localhost:" + port
+ WebhookConfiguration.computeDefaultPath("wb-delegate://xx"),
"", String.class);
assertEquals("msg: webhook", result);
}
@Override
protected void bindToRegistry(Registry registry) {
registry.bind("wb-delegate-component", new TestComponent(endpoint -> {
endpoint.setWebhookHandler(proc -> ex -> {
ex.getMessage().setBody("webhook");
proc.process(ex);
});
}));
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
restConfiguration()
.host("0.0.0.0")
.port(port);
from("webhook:wb-delegate://yy")
.transform(body().prepend("uri: "));
from("webhook:wb-delegate://xx")
.transform(body().prepend("msg: "));
}
};
}
}
|
WebhookMultiRouteTest
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/XQueryEndpointBuilderFactory.java
|
{
"start": 46209,
"end": 55214
}
|
interface ____ extends EndpointProducerBuilder {
default XQueryEndpointProducerBuilder basic() {
return (XQueryEndpointProducerBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedXQueryEndpointProducerBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedXQueryEndpointProducerBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* To use a custom Saxon configuration.
*
* The option is a: <code>net.sf.saxon.Configuration</code> type.
*
* Group: advanced
*
* @param configuration the value to set
* @return the dsl builder
*/
default AdvancedXQueryEndpointProducerBuilder configuration(net.sf.saxon.Configuration configuration) {
doSetProperty("configuration", configuration);
return this;
}
/**
* To use a custom Saxon configuration.
*
* The option will be converted to a
* <code>net.sf.saxon.Configuration</code> type.
*
* Group: advanced
*
* @param configuration the value to set
* @return the dsl builder
*/
default AdvancedXQueryEndpointProducerBuilder configuration(String configuration) {
doSetProperty("configuration", configuration);
return this;
}
/**
* To set custom Saxon configuration properties.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
*
* Group: advanced
*
* @param configurationProperties the value to set
* @return the dsl builder
*/
default AdvancedXQueryEndpointProducerBuilder configurationProperties(Map<java.lang.String, java.lang.Object> configurationProperties) {
doSetProperty("configurationProperties", configurationProperties);
return this;
}
/**
* To set custom Saxon configuration properties.
*
* The option will be converted to a
* <code>java.util.Map<java.lang.String, java.lang.Object></code>
* type.
*
* Group: advanced
*
* @param configurationProperties the value to set
* @return the dsl builder
*/
default AdvancedXQueryEndpointProducerBuilder configurationProperties(String configurationProperties) {
doSetProperty("configurationProperties", configurationProperties);
return this;
}
/**
* To use the custom ModuleURIResolver.
*
* The option is a: <code>net.sf.saxon.lib.ModuleURIResolver</code>
* type.
*
* Group: advanced
*
* @param moduleURIResolver the value to set
* @return the dsl builder
*/
default AdvancedXQueryEndpointProducerBuilder moduleURIResolver(net.sf.saxon.lib.ModuleURIResolver moduleURIResolver) {
doSetProperty("moduleURIResolver", moduleURIResolver);
return this;
}
/**
* To use the custom ModuleURIResolver.
*
* The option will be converted to a
* <code>net.sf.saxon.lib.ModuleURIResolver</code> type.
*
* Group: advanced
*
* @param moduleURIResolver the value to set
* @return the dsl builder
*/
default AdvancedXQueryEndpointProducerBuilder moduleURIResolver(String moduleURIResolver) {
doSetProperty("moduleURIResolver", moduleURIResolver);
return this;
}
/**
* Additional parameters.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
*
* Group: advanced
*
* @param parameters the value to set
* @return the dsl builder
*/
default AdvancedXQueryEndpointProducerBuilder parameters(Map<java.lang.String, java.lang.Object> parameters) {
doSetProperty("parameters", parameters);
return this;
}
/**
* Additional parameters.
*
* The option will be converted to a
* <code>java.util.Map<java.lang.String, java.lang.Object></code>
* type.
*
* Group: advanced
*
* @param parameters the value to set
* @return the dsl builder
*/
default AdvancedXQueryEndpointProducerBuilder parameters(String parameters) {
doSetProperty("parameters", parameters);
return this;
}
/**
* Properties to configure the serialization parameters.
*
* The option is a: <code>java.util.Properties</code> type.
*
* Group: advanced
*
* @param properties the value to set
* @return the dsl builder
*/
default AdvancedXQueryEndpointProducerBuilder properties(Properties properties) {
doSetProperty("properties", properties);
return this;
}
/**
* Properties to configure the serialization parameters.
*
* The option will be converted to a <code>java.util.Properties</code>
* type.
*
* Group: advanced
*
* @param properties the value to set
* @return the dsl builder
*/
default AdvancedXQueryEndpointProducerBuilder properties(String properties) {
doSetProperty("properties", properties);
return this;
}
/**
* To use a custom Saxon StaticQueryContext.
*
* The option is a: <code>net.sf.saxon.query.StaticQueryContext</code>
* type.
*
* Group: advanced
*
* @param staticQueryContext the value to set
* @return the dsl builder
*/
default AdvancedXQueryEndpointProducerBuilder staticQueryContext(net.sf.saxon.query.StaticQueryContext staticQueryContext) {
doSetProperty("staticQueryContext", staticQueryContext);
return this;
}
/**
* To use a custom Saxon StaticQueryContext.
*
* The option will be converted to a
* <code>net.sf.saxon.query.StaticQueryContext</code> type.
*
* Group: advanced
*
* @param staticQueryContext the value to set
* @return the dsl builder
*/
default AdvancedXQueryEndpointProducerBuilder staticQueryContext(String staticQueryContext) {
doSetProperty("staticQueryContext", staticQueryContext);
return this;
}
}
/**
* Builder for endpoint for the XQuery component.
*/
public
|
AdvancedXQueryEndpointProducerBuilder
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/internal/entities/mapper/relation/CommonCollectionMapperData.java
|
{
"start": 482,
"end": 1677
}
|
class ____ {
private final String versionsMiddleEntityName;
private final PropertyData collectionReferencingPropertyData;
private final MiddleIdData referencingIdData;
private final RelationQueryGenerator queryGenerator;
private final String collectionRole;
public CommonCollectionMapperData(
String versionsMiddleEntityName,
PropertyData collectionReferencingPropertyData,
MiddleIdData referencingIdData,
RelationQueryGenerator queryGenerator,
String collectionRole) {
this.versionsMiddleEntityName = versionsMiddleEntityName;
this.collectionReferencingPropertyData = collectionReferencingPropertyData;
this.referencingIdData = referencingIdData;
this.queryGenerator = queryGenerator;
this.collectionRole = collectionRole;
}
public String getVersionsMiddleEntityName() {
return versionsMiddleEntityName;
}
public PropertyData getCollectionReferencingPropertyData() {
return collectionReferencingPropertyData;
}
public MiddleIdData getReferencingIdData() {
return referencingIdData;
}
public RelationQueryGenerator getQueryGenerator() {
return queryGenerator;
}
public String getRole() {
return collectionRole;
}
}
|
CommonCollectionMapperData
|
java
|
netty__netty
|
codec-http3/src/test/java/io/netty/handler/codec/http3/Http3FrameTypeValidationHandlerTest.java
|
{
"start": 855,
"end": 1673
}
|
class ____ extends
AbstractHttp3FrameTypeValidationHandlerTest<Http3RequestStreamFrame> {
Http3FrameTypeValidationHandlerTest(boolean isInbound, boolean isOutbound) {
super(QuicStreamType.BIDIRECTIONAL, isInbound, isOutbound);
}
@Override
protected ChannelHandler newHandler(boolean server) {
return new Http3FrameTypeDuplexValidationHandler<>(Http3RequestStreamFrame.class);
}
@Override
protected List<Http3RequestStreamFrame> newValidFrames() {
return Collections.singletonList(Http3TestUtils.newHttp3RequestStreamFrame());
}
@Override
protected List<Http3Frame> newInvalidFrames() {
return Arrays.asList(Http3TestUtils.newHttp3ControlStreamFrame(), Http3TestUtils.newHttp3PushStreamFrame());
}
}
|
Http3FrameTypeValidationHandlerTest
|
java
|
apache__dubbo
|
dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/mapping/RestRequestHandlerMapping.java
|
{
"start": 2578,
"end": 7128
}
|
class ____ implements RequestHandlerMapping {
private static final Logger LOGGER = LoggerFactory.getLogger(RestRequestHandlerMapping.class);
private final RequestMappingRegistry requestMappingRegistry;
private final ArgumentResolver argumentResolver;
private final TypeConverter typeConverter;
private final ContentNegotiator contentNegotiator;
private final CodecUtils codecUtils;
public RestRequestHandlerMapping(FrameworkModel frameworkModel) {
ScopeBeanFactory beanFactory = frameworkModel.getBeanFactory();
requestMappingRegistry = beanFactory.getOrRegisterBean(DefaultRequestMappingRegistry.class);
argumentResolver = beanFactory.getOrRegisterBean(CompositeArgumentResolver.class);
typeConverter = beanFactory.getOrRegisterBean(GeneralTypeConverter.class);
contentNegotiator = beanFactory.getOrRegisterBean(ContentNegotiator.class);
codecUtils = beanFactory.getOrRegisterBean(CodecUtils.class);
}
@Override
public RequestHandler getRequestHandler(URL url, HttpRequest request, HttpResponse response) {
LOGGER.debug("Received http request: {}", request);
HandlerMeta meta = requestMappingRegistry.lookup(request);
if (meta == null) {
String path = request.attribute(RestConstants.PATH_ATTRIBUTE);
if (RestConstants.SLASH.equals(path) && HttpMethods.OPTIONS.name().equals(request.method())) {
handleOptionsRequest(request);
}
LOGGER.debug("No handler found for http request: {}", request);
return null;
}
String method = request.method();
if (HttpMethods.OPTIONS.name().equals(method)) {
handleOptionsRequest(request);
}
String requestMediaType = request.mediaType();
String responseMediaType = contentNegotiator.negotiate(request, meta);
if (responseMediaType != null) {
response.setContentType(responseMediaType);
} else {
if (requestMediaType != null && !RequestUtils.isFormOrMultiPart(request)) {
responseMediaType = requestMediaType;
} else {
responseMediaType = MediaType.APPLICATION_JSON.getName();
}
}
RestHttpMessageCodec codec = new RestHttpMessageCodec(
request,
response,
meta.getParameters(),
argumentResolver,
typeConverter,
codecUtils.determineHttpMessageEncoder(url, responseMediaType));
if (HttpMethods.supportBody(method) && !RequestUtils.isFormOrMultiPart(request)) {
if (StringUtils.isEmpty(requestMediaType)) {
requestMediaType = responseMediaType;
}
request.setAttribute(
RestConstants.BODY_DECODER_ATTRIBUTE,
codecUtils.determineHttpMessageDecoder(url, requestMediaType));
}
LOGGER.debug("Content-type negotiate result: request='{}', response='{}'", requestMediaType, responseMediaType);
RequestHandler handler = new RequestHandler(meta.getInvoker());
handler.setHasStub(false);
handler.setMethodDescriptor(meta.getMethodDescriptor());
handler.setMethodMetadata(meta.getMethodMetadata());
handler.setServiceDescriptor(meta.getServiceDescriptor());
handler.setHttpMessageDecoder(codec);
handler.setHttpMessageEncoder(codec);
return handler;
}
private static void handleOptionsRequest(HttpRequest request) {
RequestMapping mapping = request.attribute(RestConstants.MAPPING_ATTRIBUTE);
MethodsCondition condition = mapping == null ? null : mapping.getMethodsCondition();
if (condition == null) {
throw new HttpResultPayloadException(HttpResult.builder()
.status(HttpStatus.NO_CONTENT)
.header("allow", "GET,HEAD,POST,PUT,PATCH,DELETE,OPTIONS")
.build());
}
Set<String> methods = condition.getMethods();
if (methods.size() == 1 && methods.contains(HttpMethods.OPTIONS.name())) {
return;
}
throw new HttpResultPayloadException(HttpResult.builder()
.status(HttpStatus.NO_CONTENT)
.header("allow", StringUtils.join(methods, ","))
.build());
}
@Override
public String getType() {
return TripleConstants.TRIPLE_HANDLER_TYPE_REST;
}
}
|
RestRequestHandlerMapping
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.