language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__camel | components/camel-aws/camel-aws2-sns/src/main/java/org/apache/camel/component/aws2/sns/Sns2Constants.java | {
"start": 945,
"end": 1562
} | interface ____ {
@Metadata(description = "The Amazon SNS message ID.", javaType = "String")
String MESSAGE_ID = "CamelAwsSnsMessageId";
@Metadata(description = "The Amazon SNS message subject. If not set, the subject from the\n" +
"`SnsConfiguration` is used.",
javaType = "String")
String SUBJECT = "CamelAwsSnsSubject";
@Metadata(description = "The message structure to use such as json.", javaType = "String")
String MESSAGE_STRUCTURE = "CamelAwsSnsMessageStructure";
String MESSAGE_GROUP_ID_PROPERTY = "CamelAwsSnsMessageGroupId";
}
| Sns2Constants |
java | apache__dubbo | dubbo-remoting/dubbo-remoting-http12/src/main/java/org/apache/dubbo/remoting/http12/HttpHeaderNames.java | {
"start": 954,
"end": 2486
} | enum ____ {
STATUS(PseudoHeaderName.STATUS.value()),
PATH(PseudoHeaderName.PATH.value()),
METHOD(PseudoHeaderName.METHOD.value()),
ACCEPT(io.netty.handler.codec.http.HttpHeaderNames.ACCEPT),
CONTENT_TYPE(io.netty.handler.codec.http.HttpHeaderNames.CONTENT_TYPE),
CONTENT_LENGTH(io.netty.handler.codec.http.HttpHeaderNames.CONTENT_LENGTH),
CONTENT_LANGUAGE(io.netty.handler.codec.http.HttpHeaderNames.CONTENT_LANGUAGE),
TRANSFER_ENCODING(io.netty.handler.codec.http.HttpHeaderNames.TRANSFER_ENCODING),
CACHE_CONTROL(io.netty.handler.codec.http.HttpHeaderNames.CACHE_CONTROL),
LOCATION(io.netty.handler.codec.http.HttpHeaderNames.LOCATION),
HOST(io.netty.handler.codec.http.HttpHeaderNames.HOST),
COOKIE(io.netty.handler.codec.http.HttpHeaderNames.COOKIE),
SET_COOKIE(io.netty.handler.codec.http.HttpHeaderNames.SET_COOKIE),
LAST_MODIFIED(io.netty.handler.codec.http.HttpHeaderNames.LAST_MODIFIED),
TE(io.netty.handler.codec.http.HttpHeaderNames.TE),
CONNECTION(io.netty.handler.codec.http.HttpHeaderNames.CONNECTION),
ALT_SVC("alt-svc");
private final String name;
private final CharSequence key;
HttpHeaderNames(String name) {
this.name = name;
key = AsciiString.cached(name);
}
HttpHeaderNames(CharSequence key) {
name = key.toString();
this.key = key;
}
public String getName() {
return name;
}
public CharSequence getKey() {
return key;
}
}
| HttpHeaderNames |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/IndexedFileAggregatedLogsBlock.java | {
"start": 2577,
"end": 9479
} | class ____ extends LogAggregationHtmlBlock {
private final LogAggregationIndexedFileController fileController;
private final Configuration conf;
@Inject
public IndexedFileAggregatedLogsBlock(ViewContext ctx,
Configuration conf,
LogAggregationIndexedFileController fileController) {
super(ctx);
this.conf = conf;
this.fileController = fileController;
}
@Override
protected void render(Block html) {
BlockParameters params = verifyAndParseParameters(html);
if (params == null) {
return;
}
ApplicationId appId = params.getAppId();
ContainerId containerId = params.getContainerId();
NodeId nodeId = params.getNodeId();
String appOwner = params.getAppOwner();
String logEntity = params.getLogEntity();
long start = params.getStartIndex();
long end = params.getEndIndex();
long startTime = params.getStartTime();
long endTime = params.getEndTime();
List<FileStatus> nodeFiles = null;
try {
nodeFiles = LogAggregationUtils
.getRemoteNodeFileList(conf, appId, appOwner,
this.fileController.getRemoteRootLogDir(),
this.fileController.getRemoteRootLogDirSuffix());
} catch(Exception ex) {
html.h1("Unable to locate any logs for container "
+ containerId.toString());
LOG.error(ex.getMessage());
return;
}
Map<String, Long> checkSumFiles;
try {
checkSumFiles = fileController.parseCheckSumFiles(nodeFiles);
} catch (IOException ex) {
LOG.error("Error getting logs for " + logEntity, ex);
html.h1("Error getting logs for " + logEntity);
return;
}
List<FileStatus> fileToRead;
try {
fileToRead = fileController.getNodeLogFileToRead(nodeFiles,
nodeId.toString(), appId);
} catch (IOException ex) {
LOG.error("Error getting logs for " + logEntity, ex);
html.h1("Error getting logs for " + logEntity);
return;
}
boolean foundLog = false;
String desiredLogType = $(CONTAINER_LOG_TYPE);
try {
for (FileStatus thisNodeFile : fileToRead) {
Long checkSumIndex = checkSumFiles.get(
thisNodeFile.getPath().getName());
long endIndex = -1;
if (checkSumIndex != null) {
endIndex = checkSumIndex.longValue();
}
IndexedLogsMeta indexedLogsMeta = null;
try {
indexedLogsMeta = fileController.loadIndexedLogsMeta(
thisNodeFile.getPath(), endIndex, appId);
} catch (Exception ex) {
// DO NOTHING
LOG.warn("Can not load log meta from the log file:"
+ thisNodeFile.getPath());
continue;
}
if (indexedLogsMeta == null) {
continue;
}
Map<ApplicationAccessType, String> appAcls = indexedLogsMeta.getAcls();
String user = indexedLogsMeta.getUser();
String remoteUser = request().getRemoteUser();
if (!checkAcls(conf, appId, user, appAcls, remoteUser)) {
html.h1().__("User [" + remoteUser
+ "] is not authorized to view the logs for " + logEntity
+ " in log file [" + thisNodeFile.getPath().getName() + "]")
.__();
LOG.error("User [" + remoteUser
+ "] is not authorized to view the logs for " + logEntity);
continue;
}
String compressAlgo = indexedLogsMeta.getCompressName();
List<IndexedFileLogMeta> candidates = new ArrayList<>();
for (IndexedPerAggregationLogMeta logMeta
: indexedLogsMeta.getLogMetas()) {
for (Entry<String, List<IndexedFileLogMeta>> meta
: logMeta.getLogMetas().entrySet()) {
for (IndexedFileLogMeta log : meta.getValue()) {
if (!log.getContainerId().equals(containerId.toString())) {
continue;
}
if (desiredLogType != null && !desiredLogType.isEmpty()
&& !desiredLogType.equals(log.getFileName())) {
continue;
}
candidates.add(log);
}
}
}
if (candidates.isEmpty()) {
continue;
}
foundLog = readContainerLog(compressAlgo, html, thisNodeFile, start,
end, candidates, startTime, endTime, foundLog, logEntity);
}
if (!foundLog) {
if (desiredLogType.isEmpty()) {
html.h1("No logs available for container " + containerId.toString());
} else {
html.h1("Unable to locate '" + desiredLogType
+ "' log for container " + containerId.toString());
}
}
} catch (RuntimeException e) {
throw e;
} catch (Exception ex) {
html.h1().__("Error getting logs for " + logEntity).__();
LOG.error("Error getting logs for " + logEntity, ex);
}
}
private boolean readContainerLog(String compressAlgo, Block html,
FileStatus thisNodeFile, long start, long end,
List<IndexedFileLogMeta> candidates, long startTime, long endTime,
boolean foundLog, String logEntity) throws IOException {
Algorithm compressName = Compression.getCompressionAlgorithmByName(
compressAlgo);
Decompressor decompressor = compressName.getDecompressor();
FileContext fileContext = FileContext.getFileContext(
thisNodeFile.getPath().toUri(), conf);
try (FSDataInputStream fsin = fileContext.open(thisNodeFile.getPath())) {
int bufferSize = 65536;
for (IndexedFileLogMeta candidate : candidates) {
if (candidate.getLastModifiedTime() < startTime
|| candidate.getLastModifiedTime() > endTime) {
continue;
}
byte[] cbuf = new byte[bufferSize];
InputStream in = null;
try {
in = compressName.createDecompressionStream(
new BoundedRangeFileInputStream(fsin, candidate.getStartIndex(),
candidate.getFileCompressedSize()), decompressor,
LogAggregationIndexedFileController.getFSInputBufferSize(conf));
long logLength = candidate.getFileSize();
html.pre().__("\n\n").__();
html.p().__("Log Type: " + candidate.getFileName()).__();
html.p().__("Log Upload Time: " +
Times.format(candidate.getLastModifiedTime())).__();
html.p().__("Log Length: " + Long.toString(logLength)).__();
long[] range = checkParseRange(html, start, end, startTime, endTime,
logLength, candidate.getFileName());
processContainerLog(html, range, in, bufferSize, cbuf);
foundLog = true;
} catch (Exception ex) {
LOG.error("Error getting logs for " + logEntity, ex);
continue;
} finally {
IOUtils.closeStream(in);
}
}
}
return foundLog;
}
}
| IndexedFileAggregatedLogsBlock |
java | apache__camel | components/camel-reactive-streams/src/test/java/org/apache/camel/component/reactive/streams/BackpressurePublisherRoutePolicyTest.java | {
"start": 1551,
"end": 6632
} | class ____ extends BaseReactiveTest {
@Test
public void testThatBackpressureCausesTemporaryRouteStop() throws Exception {
CountDownLatch generationLatch = new CountDownLatch(25);
new RouteBuilder() {
@Override
public void configure() {
ThrottlingInflightRoutePolicy policy = new ThrottlingInflightRoutePolicy();
policy.setMaxInflightExchanges(10);
policy.setScope(ThrottlingInflightRoutePolicy.ThrottlingScope.Route);
policy.setResumePercentOfMax(70);
policy.setLoggingLevel(LoggingLevel.INFO);
from("timer:tick?period=50&repeatCount=35")
.id("policy-route")
.routePolicy(policy)
.process(x -> generationLatch.countDown())
.to("reactive-streams:pub");
}
}.addRoutesToCamelContext(context);
CountDownLatch receptionLatch = new CountDownLatch(35);
Publisher<Exchange> pub = CamelReactiveStreams.get(context()).fromStream("pub", Exchange.class);
TestSubscriber<Exchange> subscriber = new TestSubscriber<Exchange>() {
@Override
public void onNext(Exchange o) {
super.onNext(o);
receptionLatch.countDown();
}
};
subscriber.setInitiallyRequested(10);
pub.subscribe(subscriber);
// Add another (fast) subscription that should not affect the backpressure on the route
Observable.fromPublisher(pub)
.subscribe();
context.start();
generationLatch.await(5, TimeUnit.SECONDS); // after 25 messages are generated
// The number of exchanges should be 10 (requested by the subscriber), so 35-10=25
assertEquals(25, receptionLatch.getCount());
// fire a delayed request from the subscriber (required by camel core)
subscriber.request(1);
Thread.sleep(250);
StatefulService service = (StatefulService) context().getRoute("policy-route").getConsumer();
// ensure the route is stopped or suspended
assertTrue(service.isStopped() || service.isSuspended());
// request all the remaining exchanges
subscriber.request(24);
assertTrue(receptionLatch.await(5, TimeUnit.SECONDS));
// The reception latch has gone to 0
}
@Test
public void testThatRouteRestartsOnUnsubscription() throws Exception {
CountDownLatch generationLatch = new CountDownLatch(25);
new RouteBuilder() {
@Override
public void configure() {
ThrottlingInflightRoutePolicy policy = new ThrottlingInflightRoutePolicy();
policy.setMaxInflightExchanges(10);
policy.setScope(ThrottlingInflightRoutePolicy.ThrottlingScope.Route);
policy.setResumePercentOfMax(70);
policy.setLoggingLevel(LoggingLevel.INFO);
from("timer:tick?period=50") // unbounded
.id("policy-route")
.routePolicy(policy)
.process(x -> generationLatch.countDown())
.to("reactive-streams:pub");
}
}.addRoutesToCamelContext(context);
CountDownLatch receptionLatch = new CountDownLatch(35);
Publisher<Exchange> pub = CamelReactiveStreams.get(context()).fromStream("pub", Exchange.class);
TestSubscriber<Exchange> subscriber = new TestSubscriber<Exchange>() {
@Override
public void onNext(Exchange o) {
super.onNext(o);
receptionLatch.countDown();
}
};
subscriber.setInitiallyRequested(10);
pub.subscribe(subscriber);
// Add another (fast) subscription that should not affect the backpressure on the route
Observable.fromPublisher(pub)
.subscribe();
context.start();
generationLatch.await(5, TimeUnit.SECONDS); // after 25 messages are generated
// The number of exchanges should be 10 (requested by the subscriber), so 35-10=25
assertEquals(25, receptionLatch.getCount());
// fire a delayed request from the subscriber (required by camel core)
subscriber.request(1);
Thread.sleep(250);
StatefulService service = (StatefulService) context().getRoute("policy-route").getConsumer();
// ensure the route is stopped or suspended
assertTrue(service.isStopped() || service.isSuspended());
subscriber.cancel();
// request other exchanges to ensure that the route works
CountDownLatch latch = new CountDownLatch(20);
Observable.fromPublisher(pub)
.subscribe(n -> {
latch.countDown();
});
assertTrue(latch.await(5, TimeUnit.SECONDS));
}
@Override
public boolean isUseRouteBuilder() {
return false;
}
}
| BackpressurePublisherRoutePolicyTest |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RedissonClient.java | {
"start": 969,
"end": 1149
} | interface ____ access
* to all redisson objects with sync/async interface.
*
* @see RedissonReactiveClient
* @see RedissonRxClient
*
* @author Nikita Koksharov
*
*/
public | for |
java | apache__camel | components/camel-activemq/src/main/java/org/apache/camel/component/activemq/converter/ActiveMQMessageConverter.java | {
"start": 1349,
"end": 3585
} | class ____ {
private JmsBinding binding = new JmsBinding();
/**
* Converts the inbound message exchange to an ActiveMQ JMS message
*
* @return the ActiveMQ message
*/
@Converter
public ActiveMQMessage toMessage(Exchange exchange) throws JMSException {
ActiveMQMessage message = createActiveMQMessage(exchange);
getBinding().appendJmsProperties(message, exchange);
return message;
}
/**
* Allows a JMS {@link MessageListener} to be converted to a Camel {@link Processor} so that we can provide better
* <a href="">Bean Integration</a> so that we can use any JMS MessageListener in in Camel as a bean
*
* @param listener the JMS message listener
* @return a newly created Camel Processor which when invoked will invoke
* {@link MessageListener#onMessage(Message)}
*/
@Converter
public Processor toProcessor(final MessageListener listener) {
return new Processor() {
public void process(Exchange exchange) throws Exception {
Message message = toMessage(exchange);
listener.onMessage(message);
}
@Override
public String toString() {
return "Processor of MessageListener: " + listener;
}
};
}
private static ActiveMQMessage createActiveMQMessage(Exchange exchange) throws JMSException {
Object body = exchange.getIn().getBody();
if (body instanceof String) {
ActiveMQTextMessage answer = new ActiveMQTextMessage();
answer.setText((String) body);
return answer;
} else if (body instanceof Serializable) {
ActiveMQObjectMessage answer = new ActiveMQObjectMessage();
answer.setObject((Serializable) body);
return answer;
} else {
return new ActiveMQMessage();
}
}
// Properties
// -------------------------------------------------------------------------
public JmsBinding getBinding() {
return binding;
}
public void setBinding(JmsBinding binding) {
this.binding = binding;
}
}
| ActiveMQMessageConverter |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/descriptor/LifecycleMethodUtilsTests.java | {
"start": 9645,
"end": 9873
} | class ____ {
@BeforeAll
void three() {
}
@BeforeAll
void four() {
}
@AfterAll
void seven() {
}
@AfterAll
void eight() {
}
}
@SuppressWarnings("JUnitMalformedDeclaration")
@NullUnmarked
| TestCaseWithLifecyclePerClass |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/factories/TestValuesRuntimeFunctions.java | {
"start": 10199,
"end": 15159
} | class ____
implements SourceFunction<RowData>, LineageVertexProvider {
private static final String LINEAGE_NAMESPACE =
"values://FromElementSourceFunctionWithWatermark";
/** The (de)serializer to be used for the data elements. */
private final TypeSerializer<RowData> serializer;
/** The actual data elements, in serialized form. */
private final byte[] elementsSerialized;
/** The number of serialized elements. */
private final int numElements;
/** The number of elements emitted already. */
private volatile int numElementsEmitted;
/** WatermarkStrategy to generate watermark generator. */
private final WatermarkStrategy<RowData> watermarkStrategy;
private volatile boolean isRunning = true;
private final String tableName;
private final TerminatingLogic terminating;
public FromElementSourceFunctionWithWatermark(
String tableName,
TypeSerializer<RowData> serializer,
Iterable<RowData> elements,
WatermarkStrategy<RowData> watermarkStrategy,
TerminatingLogic terminating)
throws IOException {
this.tableName = tableName;
this.terminating = terminating;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputViewStreamWrapper wrapper = new DataOutputViewStreamWrapper(baos);
int count = 0;
try {
for (RowData element : elements) {
serializer.serialize(element, wrapper);
count++;
}
} catch (Exception e) {
throw new IOException(
"Serializing the source elements failed: " + e.getMessage(), e);
}
this.numElements = count;
this.elementsSerialized = baos.toByteArray();
this.watermarkStrategy = watermarkStrategy;
this.serializer = serializer;
}
@Override
public void run(SourceContext<RowData> ctx) throws Exception {
ByteArrayInputStream bais = new ByteArrayInputStream(elementsSerialized);
final DataInputView input = new DataInputViewStreamWrapper(bais);
WatermarkGenerator<RowData> generator =
watermarkStrategy.createWatermarkGenerator(
new WatermarkGeneratorSupplier.Context() {
@Override
public MetricGroup getMetricGroup() {
return null;
}
@Override
public RelativeClock getInputActivityClock() {
return SystemClock.getInstance();
}
});
WatermarkOutput output = new TestValuesWatermarkOutput(ctx);
final Object lock = ctx.getCheckpointLock();
while (isRunning && numElementsEmitted < numElements) {
RowData next;
try {
next = serializer.deserialize(input);
} catch (Exception e) {
throw new IOException(
"Failed to deserialize an element from the source. "
+ "If you are using user-defined serialization (Value and Writable types), check the "
+ "serialization functions.\nSerializer is "
+ serializer,
e);
}
synchronized (lock) {
ctx.collect(next);
numElementsEmitted++;
generator.onEvent(next, Long.MIN_VALUE, output);
generator.onPeriodicEmit(output);
}
}
if (terminating == TerminatingLogic.INFINITE) {
// wait until being canceled
while (isRunning) {
Thread.sleep(100);
}
}
}
@Override
public void cancel() {
isRunning = false;
}
@Override
public LineageVertex getLineageVertex() {
return new SourceLineageVertex() {
@Override
public Boundedness boundedness() {
return Boundedness.BOUNDED;
}
@Override
public List<LineageDataset> datasets() {
return Arrays.asList(
new DefaultLineageDataset(
tableName, LINEAGE_NAMESPACE, new HashMap<>()));
}
};
}
private | FromElementSourceFunctionWithWatermark |
java | elastic__elasticsearch | modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java | {
"start": 9396,
"end": 9532
} | class ____ so the parent node can defer some analysis and writing to
* the sub node.
*/
package org.elasticsearch.painless.node;
| hierarchy |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/constructor/mixed/ConstructorMixedWithSettersMapper.java | {
"start": 367,
"end": 564
} | interface ____ {
ConstructorMixedWithSettersMapper INSTANCE = Mappers.getMapper( ConstructorMixedWithSettersMapper.class );
PersonMixed map(PersonDto dto);
}
| ConstructorMixedWithSettersMapper |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java | {
"start": 7681,
"end": 9139
} | class ____ implements Runnable {
int datasize;
TestRpcService proxy;
Transactions(TestRpcService proxy, int datasize) {
this.proxy = proxy;
this.datasize = datasize;
}
// do two RPC that transfers data.
@Override
public void run() {
Integer[] indata = new Integer[datasize];
Arrays.fill(indata, 123);
TestProtos.ExchangeRequestProto exchangeRequest =
TestProtos.ExchangeRequestProto.newBuilder().addAllValues(
Arrays.asList(indata)).build();
Integer[] outdata = null;
TestProtos.ExchangeResponseProto exchangeResponse;
TestProtos.AddRequestProto addRequest =
TestProtos.AddRequestProto.newBuilder().setParam1(1)
.setParam2(2).build();
TestProtos.AddResponseProto addResponse;
int val = 0;
try {
exchangeResponse = proxy.exchange(null, exchangeRequest);
outdata = new Integer[exchangeResponse.getValuesCount()];
outdata = exchangeResponse.getValuesList().toArray(outdata);
addResponse = proxy.add(null, addRequest);
val = addResponse.getResult();
} catch (ServiceException e) {
assertTrue(false, "Exception from RPC exchange() " + e);
}
assertEquals(indata.length, outdata.length);
assertEquals(3, val);
for (int i = 0; i < outdata.length; i++) {
assertEquals(outdata[i].intValue(), i);
}
}
}
//
// A | Transactions |
java | elastic__elasticsearch | libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileStoreActions.java | {
"start": 884,
"end": 2893
} | class ____ {
@EntitlementTest(expectedAccess = ALWAYS_DENIED)
static void checkGetFileStoreAttributeView() throws IOException {
Files.getFileStore(FileCheckActions.readWriteFile()).getFileStoreAttributeView(FileStoreAttributeView.class);
}
@EntitlementTest(expectedAccess = SERVER_ONLY)
static void checkGetAttribute() throws IOException {
try {
Files.getFileStore(FileCheckActions.readFile()).getAttribute("zfs:compression");
} catch (UnsupportedOperationException e) {
// It's OK if the attribute view is not available or it does not support reading the attribute
}
}
@EntitlementTest(expectedAccess = SERVER_ONLY)
static void checkGetBlockSize() throws IOException {
Files.getFileStore(FileCheckActions.readWriteFile()).getBlockSize();
}
@EntitlementTest(expectedAccess = SERVER_ONLY)
static void checkGetTotalSpace() throws IOException {
Files.getFileStore(FileCheckActions.readWriteFile()).getTotalSpace();
}
@EntitlementTest(expectedAccess = SERVER_ONLY)
static void checkGetUnallocatedSpace() throws IOException {
Files.getFileStore(FileCheckActions.readWriteFile()).getUnallocatedSpace();
}
@EntitlementTest(expectedAccess = SERVER_ONLY)
static void checkGetUsableSpace() throws IOException {
Files.getFileStore(FileCheckActions.readFile()).getUsableSpace();
}
@EntitlementTest(expectedAccess = SERVER_ONLY)
static void checkIsReadOnly() throws IOException {
Files.getFileStore(FileCheckActions.readFile()).isReadOnly();
}
@EntitlementTest(expectedAccess = SERVER_ONLY)
static void checkName() throws IOException {
Files.getFileStore(FileCheckActions.readFile()).name();
}
@EntitlementTest(expectedAccess = SERVER_ONLY)
static void checkType() throws IOException {
Files.getFileStore(FileCheckActions.readFile()).type();
}
private FileStoreActions() {}
}
| FileStoreActions |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/inheritance/version/InheritanceImplicitVersionUpdateTest.java | {
"start": 4657,
"end": 4973
} | class ____ extends ObjectWithUnid {
private String name;
public Employee() {
}
public Employee(String name) {
this.name = name;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
@Entity( name = "CustomEmployee" )
public static | Employee |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/ResteasyReactiveProcessorTest.java | {
"start": 1168,
"end": 1520
} | class ____ {
@GET
public String hello() {
return "test/subresource";
}
}
@Test
public void testSimpleSubresource() {
given().when().get("/test/subresource")
.then()
.statusCode(200)
.body(Matchers.equalTo("test/subresource"));
}
}
| TestSubResource |
java | apache__flink | flink-end-to-end-tests/flink-end-to-end-tests-sql/src/test/java/org/apache/flink/table/sql/PlannerScalaFreeITCase.java | {
"start": 1850,
"end": 3907
} | class ____ extends SqlITCaseBase {
private static final ResolvedSchema SINK_TABLE_SCHEMA =
new ResolvedSchema(
Arrays.asList(
Column.physical("user_name", DataTypes.STRING()),
Column.physical("order_cnt", DataTypes.BIGINT())),
Collections.emptyList(),
UniqueConstraint.primaryKey("pk", Collections.singletonList("user_name")),
Collections.singletonList(
DefaultIndex.newIndex("idx", Collections.singletonList("user_name"))));
private static final DebeziumJsonDeserializationSchema DESERIALIZATION_SCHEMA =
createDebeziumDeserializationSchema(SINK_TABLE_SCHEMA);
public PlannerScalaFreeITCase(String executionMode) {
super(executionMode);
}
@Test
public void testImperativeUdaf() throws Exception {
runAndCheckSQL("scala_free_e2e.sql", Arrays.asList("+I[Bob, 2]", "+I[Alice, 1]"));
}
/** The test data is from {@link org.apache.flink.table.toolbox.TestSourceFunction#DATA}. */
@Test
public void testWatermarkPushDown() throws Exception {
assumeTrue(executionMode.equalsIgnoreCase("streaming"));
runAndCheckSQL("watermark_push_down_e2e.sql", Arrays.asList("+I[Bob, 1]", "+I[Alice, 2]"));
}
@Override
protected List<String> formatRawResult(List<String> rawResult) {
return convertToMaterializedResult(rawResult, SINK_TABLE_SCHEMA, DESERIALIZATION_SCHEMA);
}
@Override
protected void executeSqlStatements(
ClusterController clusterController, List<String> sqlLines, List<URI> dependencies)
throws Exception {
clusterController.submitSQLJob(
new SQLJobSubmission.SQLJobSubmissionBuilder(sqlLines)
.addJar(SQL_TOOL_BOX_JAR)
.addJars(dependencies.toArray(new URI[0]))
.build(),
Duration.ofMinutes(2L));
}
}
| PlannerScalaFreeITCase |
java | quarkusio__quarkus | extensions/cache/deployment/src/test/java/io/quarkus/cache/test/deployment/DisabledCacheTest.java | {
"start": 414,
"end": 1005
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addAsResource(new StringAsset("quarkus.cache.enabled=false"), "application.properties")
.addClass(CachedService.class));
private static final String KEY = "key";
@Inject
CachedService cachedService;
@Test
public void testEnabledFlagProperty() {
assertNotEquals(cachedService.cachedMethod(KEY), cachedService.cachedMethod(KEY));
}
@Dependent
static | DisabledCacheTest |
java | spring-projects__spring-framework | spring-tx/src/main/java/org/springframework/transaction/interceptor/AbstractFallbackTransactionAttributeSource.java | {
"start": 7322,
"end": 7665
} | class ____ the original method.
txAttr = findTransactionAttribute(method.getDeclaringClass());
if (txAttr != null && ClassUtils.isUserLevelMethod(method)) {
return txAttr;
}
}
return null;
}
/**
* Subclasses need to implement this to return the transaction attribute for the
* given class, if any.
* @param clazz the | of |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/MRClientProtocolPB.java | {
"start": 1097,
"end": 1183
} | interface ____ extends MRClientProtocolService.BlockingInterface {
}
| MRClientProtocolPB |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/embeddable/strategy/instantiator/intf/Person.java | {
"start": 578,
"end": 1457
} | class ____ {
@Id
public Integer id;
@Embedded
@EmbeddableInstantiator( NameInstantiator.class )
@Access( AccessType.PROPERTY )
public Name name;
@ElementCollection
@Embedded
@EmbeddableInstantiator( NameInstantiator.class )
@Access( AccessType.PROPERTY )
public Set<Name> aliases;
private Person() {
// for Hibernate use
}
public Person(Integer id, Name name) {
this.id = id;
this.name = name;
}
public Integer getId() {
return id;
}
public Name getName() {
return name;
}
public void setName(Name name) {
this.name = name;
}
public void setId(Integer id) {
this.id = id;
}
public Set<Name> getAliases() {
return aliases;
}
public void setAliases(Set<Name> aliases) {
this.aliases = aliases;
}
public void addAlias(Name alias) {
if ( aliases == null ) {
aliases = new HashSet<>();
}
aliases.add( alias );
}
}
| Person |
java | apache__flink | flink-core/src/test/java/org/apache/flink/util/FlinkUserCodeClassLoadersTest.java | {
"start": 2290,
"end": 3420
} | class ____ with RocksDB related code: the state backend and
// RocksDB itself
final URL childCodePath = getClass().getProtectionDomain().getCodeSource().getLocation();
final URLClassLoader childClassLoader1 =
createParentFirstClassLoader(childCodePath, parentClassLoader);
final URLClassLoader childClassLoader2 =
createParentFirstClassLoader(childCodePath, parentClassLoader);
final String className = FlinkUserCodeClassLoadersTest.class.getName();
final Class<?> clazz1 = Class.forName(className, false, parentClassLoader);
final Class<?> clazz2 = Class.forName(className, false, childClassLoader1);
final Class<?> clazz3 = Class.forName(className, false, childClassLoader2);
assertThat(clazz2).isEqualTo(clazz1);
assertThat(clazz3).isEqualTo(clazz1);
childClassLoader1.close();
childClassLoader2.close();
}
@Test
void testChildFirstClassLoading() throws Exception {
final ClassLoader parentClassLoader = getClass().getClassLoader();
// collect the libraries / | folders |
java | apache__camel | components/camel-dataset/src/generated/java/org/apache/camel/component/dataset/DataSetTestComponentConfigurer.java | {
"start": 729,
"end": 868
} | class ____ extends MockComponentConfigurer implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
}
| DataSetTestComponentConfigurer |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/filter/headers/observation/ObservationClosingWebExceptionHandler.java | {
"start": 1232,
"end": 2059
} | class ____ implements WebExceptionHandler {
private static final LogAccessor log = new LogAccessor(ObservationClosingWebExceptionHandler.class);
@Override
public Mono<Void> handle(ServerWebExchange exchange, Throwable ex) {
Object attribute = exchange.getAttribute(ObservedResponseHttpHeadersFilter.OBSERVATION_STOPPED);
if (attribute == null) {
Observation observation = exchange.getAttribute(ServerWebExchangeUtils.GATEWAY_OBSERVATION_ATTR);
if (observation != null) {
if (log.isDebugEnabled()) {
observation.scoped(() -> log.debug(
() -> "An exception occurred and observation was not previously stopped, will stop it. The exception was ["
+ ex + "]"));
}
observation.error(ex);
observation.stop();
}
}
return Mono.error(ex);
}
}
| ObservationClosingWebExceptionHandler |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/request/QualityValue.java | {
"start": 188,
"end": 3176
} | class ____ extends Number implements Comparable<QualityValue> {
public static final QualityValue NOT_ACCEPTABLE = new QualityValue(0);
public static final QualityValue LOWEST = new QualityValue(1);
public static final QualityValue HIGHEST = new QualityValue(1000);
public static final QualityValue DEFAULT = HIGHEST;
private static final long serialVersionUID = 1L;
private static final String MALFORMED_VALUE_MESSAGE = "Malformed quality value.";
private final int WEIGHT;
private QualityValue(final int weight) {
assert weight >= 0;
assert weight <= 1000;
WEIGHT = weight;
}
/**
* @param qvalue the quality value or {@code null} if undefined.
* @return {@link QualityValue}
*/
public static QualityValue valueOf(String qvalue) {
if (qvalue == null)
return DEFAULT;
return new QualityValue(parseAsInteger(qvalue));
}
public boolean isPrefered() {
return WEIGHT == HIGHEST.WEIGHT;
}
public boolean isAcceptable() {
return WEIGHT != NOT_ACCEPTABLE.WEIGHT;
}
public int compareTo(QualityValue o) {
return WEIGHT - o.WEIGHT;
}
@Override
public boolean equals(Object o) {
if (o == null || o.getClass() != QualityValue.class)
return false;
QualityValue other = (QualityValue) o;
return WEIGHT == other.WEIGHT;
}
@Override
public int hashCode() {
return WEIGHT;
}
@Override
public double doubleValue() {
return (double) WEIGHT / 1000d;
}
@Override
public float floatValue() {
return (float) WEIGHT / 1000f;
}
@Override
public int intValue() {
return WEIGHT;
}
@Override
public long longValue() {
return WEIGHT;
}
private static int parseAsInteger(String value) {
int length = value.length();
if (length == 0 || length > 5)
throw new BadRequestException(MALFORMED_VALUE_MESSAGE);
if (length > 1 && value.charAt(1) != '.')
throw new BadRequestException(MALFORMED_VALUE_MESSAGE);
int firstCharacter = value.codePointAt(0);
if (firstCharacter == '1') {
for (int i = 2; i < length; ++i)
if (value.charAt(i) != '0')
throw new BadRequestException(MALFORMED_VALUE_MESSAGE);
return 1000;
} else if (firstCharacter == '0') {
int weight = 0;
for (int i = 2; i < 5; ++i) {
weight *= 10;
if (i < length) {
int digit = value.codePointAt(i) - '0';
if (digit < 0 || digit > 9)
throw new BadRequestException(MALFORMED_VALUE_MESSAGE);
weight += digit;
}
}
return weight;
} else
throw new BadRequestException(MALFORMED_VALUE_MESSAGE);
}
}
| QualityValue |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/flush/AlwaysFlushTest.java | {
"start": 1881,
"end": 2443
} | class ____ {
@Id
@GeneratedValue
private Long id;
private String name;
@OneToMany(mappedBy = "person", cascade = CascadeType.ALL)
private List<Phone> phones = new ArrayList<>();
public Person() {
}
public Person(String name) {
this.name = name;
}
public Long getId() {
return id;
}
public String getName() {
return name;
}
public List<Phone> getPhones() {
return phones;
}
public void addPhone(Phone phone) {
phones.add(phone);
phone.setPerson(this);
}
}
@Entity(name = "Phone")
public static | Person |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/ast/statement/SQLExportTableStatement.java | {
"start": 284,
"end": 1576
} | class ____ extends SQLStatementImpl {
private SQLExprTableSource table;
private List<SQLAssignItem> partition = new ArrayList<SQLAssignItem>();
private SQLExpr to;
private SQLExpr forReplication;
public SQLExportTableStatement() {
dbType = DbType.hive;
}
public SQLExprTableSource getTable() {
return table;
}
public void setTable(SQLExprTableSource x) {
if (x != null) {
x.setParent(this);
}
this.table = x;
}
public List<SQLAssignItem> getPartition() {
return partition;
}
public SQLExpr getTo() {
return to;
}
public void setTo(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.to = x;
}
public SQLExpr getForReplication() {
return forReplication;
}
public void setForReplication(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.forReplication = x;
}
protected void accept0(SQLASTVisitor v) {
if (v.visit(this)) {
acceptChild(v, table);
acceptChild(v, partition);
acceptChild(v, to);
acceptChild(v, forReplication);
}
v.endVisit(this);
}
}
| SQLExportTableStatement |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogger.java | {
"start": 24178,
"end": 25103
} | class ____ implements AuditLogger {
static boolean initialized;
static int logCount;
static int unsuccessfulCount;
static short foundPermission;
static String remoteAddr;
private static String lastCommand;
public void initialize(Configuration conf) {
initialized = true;
}
public static void resetLogCount() {
logCount = 0;
unsuccessfulCount = 0;
}
public void logAuditEvent(boolean succeeded, String userName,
InetAddress addr, String cmd, String src, String dst,
FileStatus stat) {
remoteAddr = addr.getHostAddress();
logCount++;
if (!succeeded) {
unsuccessfulCount++;
}
lastCommand = cmd;
if (stat != null) {
foundPermission = stat.getPermission().toShort();
}
}
public static String getLastCommand() {
return lastCommand;
}
}
public static | DummyAuditLogger |
java | grpc__grpc-java | core/src/main/java/io/grpc/internal/PickFirstLoadBalancer.java | {
"start": 7015,
"end": 7471
} | class ____ extends SubchannelPicker {
private final AtomicBoolean connectionRequested = new AtomicBoolean(false);
@Override
public PickResult pickSubchannel(PickSubchannelArgs args) {
if (connectionRequested.compareAndSet(false, true)) {
helper.getSynchronizationContext().execute(PickFirstLoadBalancer.this::requestConnection);
}
return PickResult.withNoResult();
}
}
public static final | RequestConnectionPicker |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/bean/override/mockito/MockitoBeanDuplicateTypeReplacementIntegrationTests.java | {
"start": 2001,
"end": 3287
} | class ____ {
@MockitoBean(reset = AFTER)
ExampleService mock1;
@MockitoBean(reset = BEFORE)
ExampleService mock2;
@Autowired
List<ExampleService> services;
/**
* One could argue that we would ideally expect an exception to be thrown when
* two competing mocks are created to replace the same existing bean; however,
* we currently only log a warning in such cases.
* <p>This method therefore asserts the status quo in terms of behavior.
* <p>And the log can be manually checked to verify that an appropriate
* warning was logged.
*/
@Test
void onlyOneMockShouldHaveBeenCreated() {
// Currently logs something similar to the following.
//
// WARN - Bean with name 'exampleService' was overridden by multiple handlers:
// [MockitoBeanOverrideHandler@5478ce1e ..., MockitoBeanOverrideHandler@5edc70ed ...]
// Last one wins: there's only one physical mock
assertThat(services).containsExactly(mock2);
assertThat(mock1).isSameAs(mock2);
assertIsMock(mock2);
assertThat(MockReset.get(mock2)).as("MockReset").isEqualTo(BEFORE);
assertThat(mock2.greeting()).isNull();
given(mock2.greeting()).willReturn("mocked");
assertThat(mock2.greeting()).isEqualTo("mocked");
}
@Configuration
static | MockitoBeanDuplicateTypeReplacementIntegrationTests |
java | apache__maven | its/core-it-support/core-it-plugins/maven-it-plugin-class-loader/maven-it-plugin-class-loader/src/main/java/org/apache/maven/plugin/coreit/AbstractLoadMojo.java | {
"start": 1580,
"end": 2824
} | class ____ <code>QCN</code> that was
* successfully loaded, the generated properties files will contain a key named <code>QCN</code>. The value of this
* key will be the hash code of the requested class. In addition, a key named <code>QCN.methods</code> holds the
* comma separated list of all public methods declared directly in that class, in alphabetic order and possibly with
* duplicates to account for overloaded methods.
*/
@Parameter(property = "clsldr.classNames")
protected String classNames;
/**
* The comma separated set of resources to load. For each specified absolute resource path <code>ARP</code> that was
* successfully loaded, the generated properties files will contain a key named <code>ARP</code> whose value gives
* the URL to the resource. In addition, the keys <code>ARP.count</code>, <code>ARP.0</code>, <code>ARP.1</code>
* etc. will enumerate all URLs matching the resource name.
*/
@Parameter(property = "clsldr.resourcePaths")
protected String resourcePaths;
/**
* Loads the classes/resources.
*
* @param outputFile The path to the properties file to generate, must not be <code>null</code>.
* @param classLoader The | name |
java | spring-projects__spring-framework | spring-webflux/src/main/java/org/springframework/web/reactive/result/condition/PatternsRequestCondition.java | {
"start": 1448,
"end": 6553
} | class ____ extends AbstractRequestCondition<PatternsRequestCondition> {
private static final SortedSet<PathPattern> EMPTY_PATH_PATTERN =
new TreeSet<>(Collections.singleton(PathPatternParser.defaultInstance.parse("")));
private static final Set<String> EMPTY_PATH = Collections.singleton("");
private static final SortedSet<PathPattern> ROOT_PATH_PATTERNS =
new TreeSet<>(List.of(new PathPatternParser().parse(""), new PathPatternParser().parse("/")));
private final SortedSet<PathPattern> patterns;
/**
* Creates a new instance with the given URL patterns.
* @param patterns 0 or more URL patterns; if 0 the condition will match to every request.
*/
public PatternsRequestCondition(PathPattern... patterns) {
this(ObjectUtils.isEmpty(patterns) ? Collections.emptyList() : Arrays.asList(patterns));
}
/**
* Creates a new instance with the given URL patterns.
*/
public PatternsRequestCondition(List<PathPattern> patterns) {
this.patterns = (patterns.isEmpty() ? EMPTY_PATH_PATTERN : new TreeSet<>(patterns));
}
private PatternsRequestCondition(SortedSet<PathPattern> patterns) {
this.patterns = patterns;
}
public Set<PathPattern> getPatterns() {
return this.patterns;
}
@Override
protected Collection<PathPattern> getContent() {
return this.patterns;
}
@Override
protected String getToStringInfix() {
return " || ";
}
/**
* Whether the condition is the "" (empty path) mapping.
* @since 6.0.10
*/
public boolean isEmptyPathMapping() {
return (this.patterns == EMPTY_PATH_PATTERN);
}
/**
* Return the mapping paths that are not patterns.
* @since 5.3
*/
public Set<String> getDirectPaths() {
if (isEmptyPathMapping()) {
return EMPTY_PATH;
}
Set<String> result = Collections.emptySet();
for (PathPattern pattern : this.patterns) {
if (!pattern.hasPatternSyntax()) {
result = (result.isEmpty() ? new HashSet<>(1) : result);
result.add(pattern.getPatternString());
}
}
return result;
}
/**
* Combine the patterns of the current and of the other instances as follows:
* <ul>
* <li>If only one instance has patterns, use those.
* <li>If both have patterns, combine patterns from "this" instance with
* patterns from the other instance via {@link PathPattern#combine(PathPattern)}.
* <li>If neither has patterns, use {@code ""} and {@code "/"} as root path patterns.
* </ul>
*/
@Override
public PatternsRequestCondition combine(PatternsRequestCondition other) {
if (isEmptyPathMapping() && other.isEmptyPathMapping()) {
return new PatternsRequestCondition(ROOT_PATH_PATTERNS);
}
else if (other.isEmptyPathMapping()) {
return this;
}
else if (isEmptyPathMapping()) {
return other;
}
else {
SortedSet<PathPattern> combined = new TreeSet<>();
for (PathPattern pattern1 : this.patterns) {
for (PathPattern pattern2 : other.patterns) {
combined.add(pattern1.combine(pattern2));
}
}
return new PatternsRequestCondition(combined);
}
}
/**
* Checks if any of the patterns match the given request and returns an instance
* that is guaranteed to contain matching patterns, sorted.
* @param exchange the current exchange
* @return the same instance if the condition contains no patterns;
* or a new condition with sorted matching patterns;
* or {@code null} if no patterns match.
*/
@Override
public @Nullable PatternsRequestCondition getMatchingCondition(ServerWebExchange exchange) {
SortedSet<PathPattern> matches = getMatchingPatterns(exchange);
return (matches != null ? new PatternsRequestCondition(matches) : null);
}
private @Nullable SortedSet<PathPattern> getMatchingPatterns(ServerWebExchange exchange) {
PathContainer lookupPath = exchange.getRequest().getPath().pathWithinApplication();
TreeSet<PathPattern> result = null;
for (PathPattern pattern : this.patterns) {
if (pattern.matches(lookupPath)) {
result = (result != null ? result : new TreeSet<>());
result.add(pattern);
}
}
return result;
}
/**
* Compare the two conditions based on the URL patterns they contain.
* Patterns are compared one at a time, from top to bottom. If all compared
* patterns match equally, but one instance has more patterns, it is
* considered a closer match.
* <p>It is assumed that both instances have been obtained via
* {@link #getMatchingCondition(ServerWebExchange)} to ensure they
* contain only patterns that match the request and are sorted with
* the best matches on top.
*/
@Override
public int compareTo(PatternsRequestCondition other, ServerWebExchange exchange) {
Iterator<PathPattern> iterator = this.patterns.iterator();
Iterator<PathPattern> iteratorOther = other.getPatterns().iterator();
while (iterator.hasNext() && iteratorOther.hasNext()) {
int result = PathPattern.SPECIFICITY_COMPARATOR.compare(iterator.next(), iteratorOther.next());
if (result != 0) {
return result;
}
}
if (iterator.hasNext()) {
return -1;
}
else if (iteratorOther.hasNext()) {
return 1;
}
else {
return 0;
}
}
}
| PatternsRequestCondition |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CombinedResourceCalculator.java | {
"start": 1291,
"end": 3319
} | class ____ extends ResourceCalculatorProcessTree {
private final List<ResourceCalculatorProcessTree> resourceCalculators;
private final ProcfsBasedProcessTree procfsBasedProcessTree;
public CombinedResourceCalculator(String pid) {
super(pid);
this.procfsBasedProcessTree = new ProcfsBasedProcessTree(pid);
this.resourceCalculators = Arrays.asList(
new CGroupsV2ResourceCalculator(pid),
new CGroupsResourceCalculator(pid),
procfsBasedProcessTree
);
}
@Override
public void initialize() throws YarnException {
for (ResourceCalculatorProcessTree calculator : resourceCalculators) {
calculator.initialize();
}
}
@Override
public void updateProcessTree() {
resourceCalculators.stream().parallel()
.forEach(ResourceCalculatorProcessTree::updateProcessTree);
}
@Override
public String getProcessTreeDump() {
return procfsBasedProcessTree.getProcessTreeDump();
}
@Override
public boolean checkPidPgrpidForMatch() {
return procfsBasedProcessTree.checkPidPgrpidForMatch();
}
@Override
public long getVirtualMemorySize(int olderThanAge) {
return procfsBasedProcessTree.getVirtualMemorySize(olderThanAge);
}
@Override
public long getRssMemorySize(int olderThanAge) {
return resourceCalculators.stream()
.map(calculator -> calculator.getRssMemorySize(olderThanAge))
.filter(result -> UNAVAILABLE < result)
.findAny().orElse((long) UNAVAILABLE);
}
@Override
public long getCumulativeCpuTime() {
return resourceCalculators.stream()
.map(ResourceCalculatorProcessTree::getCumulativeCpuTime)
.filter(result -> UNAVAILABLE < result)
.findAny().orElse((long) UNAVAILABLE);
}
@Override
public float getCpuUsagePercent() {
return resourceCalculators.stream()
.map(ResourceCalculatorProcessTree::getCpuUsagePercent)
.filter(result -> UNAVAILABLE < result)
.findAny().orElse((float) UNAVAILABLE);
}
}
| CombinedResourceCalculator |
java | quarkusio__quarkus | core/runtime/src/main/java/io/quarkus/runtime/ShutdownEvent.java | {
"start": 1383,
"end": 1460
} | enum ____ values reflecting the reason for application shutdown.
*/
| with |
java | apache__maven | compat/maven-model-builder/src/main/java/org/apache/maven/model/interpolation/PathTranslatingPostProcessor.java | {
"start": 1232,
"end": 2309
} | class ____ implements InterpolationPostProcessor {
private final Collection<String> unprefixedPathKeys;
private final File projectDir;
private final PathTranslator pathTranslator;
private final List<String> expressionPrefixes;
PathTranslatingPostProcessor(
List<String> expressionPrefixes,
Collection<String> unprefixedPathKeys,
File projectDir,
PathTranslator pathTranslator) {
this.expressionPrefixes = expressionPrefixes;
this.unprefixedPathKeys = unprefixedPathKeys;
this.projectDir = projectDir;
this.pathTranslator = pathTranslator;
}
@Override
public Object execute(String expression, Object value) {
if (value != null) {
expression = ValueSourceUtils.trimPrefix(expression, expressionPrefixes, true);
if (unprefixedPathKeys.contains(expression)) {
return pathTranslator.alignToBaseDirectory(String.valueOf(value), projectDir);
}
}
return null;
}
}
| PathTranslatingPostProcessor |
java | apache__camel | components/camel-google/camel-google-mail/src/main/java/org/apache/camel/component/google/mail/stream/GoogleMailStreamConfiguration.java | {
"start": 1198,
"end": 6512
} | class ____ implements Cloneable {
@UriPath
@Metadata(required = true)
private String index;
@UriParam
private String clientId;
@UriParam(label = "security", secret = true)
private String clientSecret;
@UriParam(label = "security", secret = true)
private String accessToken;
@UriParam(label = "security", secret = true)
private String refreshToken;
@UriParam
private boolean raw;
@UriParam
private String applicationName;
@UriParam(defaultValue = "is:unread")
private String query = "is:unread";
@UriParam(defaultValue = "10")
private long maxResults = 10L;
@UriParam
private String labels;
@UriParam(defaultValue = "true")
private boolean markAsRead = true;
/* Service account */
@UriParam(label = "security")
private String serviceAccountKey;
@UriParam
private String delegate;
@UriParam
private String scopes;
public String getClientId() {
return clientId;
}
/**
* Client ID of the mail application
*/
public void setClientId(String clientId) {
this.clientId = clientId;
}
public String getClientSecret() {
return clientSecret;
}
/**
* Client secret of the mail application
*/
public void setClientSecret(String clientSecret) {
this.clientSecret = clientSecret;
}
public String getAccessToken() {
return accessToken;
}
/**
* OAuth 2 access token. This typically expires after an hour so refreshToken is recommended for long term usage.
*/
public void setAccessToken(String accessToken) {
this.accessToken = accessToken;
}
public String getRefreshToken() {
return refreshToken;
}
/**
* OAuth 2 refresh token. Using this, the Google Mail component can obtain a new accessToken whenever the current
* one expires - a necessity if the application is long-lived.
*/
public void setRefreshToken(String refreshToken) {
this.refreshToken = refreshToken;
}
public boolean isRaw() {
return raw;
}
/**
* Whether to store the entire email message in an RFC 2822 formatted and base64url encoded string (in JSon format),
* in the Camel message body.
*/
public void setRaw(boolean raw) {
this.raw = raw;
}
public String getApplicationName() {
return applicationName;
}
/**
* Google mail application name. Example would be "camel-google-mail/1.0"
*/
public void setApplicationName(String applicationName) {
this.applicationName = applicationName;
}
public String getIndex() {
return index;
}
/**
* Currently not in use
*/
public void setIndex(String index) {
this.index = index;
}
public String getQuery() {
return query;
}
/**
* The query to execute on gmail box
*/
public void setQuery(String query) {
this.query = query;
}
public long getMaxResults() {
return maxResults;
}
/**
* Max results to be returned
*/
public void setMaxResults(long maxResults) {
this.maxResults = maxResults;
}
public String getLabels() {
return labels;
}
/**
* Comma separated list of labels to take into account
*/
public void setLabels(String labels) {
this.labels = labels;
}
public boolean isMarkAsRead() {
return markAsRead;
}
/**
* Mark the message as read once it has been consumed
*/
public void setMarkAsRead(boolean markAsRead) {
this.markAsRead = markAsRead;
}
public String getServiceAccountKey() {
return serviceAccountKey;
}
/**
* Sets "*.json" file with credentials for Service account
*
* @param serviceAccountKey String file, classpath, or http url
*/
public void setServiceAccountKey(String serviceAccountKey) {
this.serviceAccountKey = serviceAccountKey;
}
public String getDelegate() {
return delegate;
}
/**
* Delegate for wide-domain service account
*/
public void setDelegate(String delegate) {
this.delegate = delegate;
}
public String getScopes() {
return scopes;
}
public Collection<String> getScopesAsList() {
if (scopes != null) {
return List.of(scopes.split(","));
} else {
return null;
}
}
/**
* Specifies the level of permissions you want a calendar application to have to a user account. See
* https://developers.google.com/identity/protocols/googlescopes for more info. Multiple scopes can be separated by
* comma.
*
* @see com.google.api.services.gmail.GmailScopes
*/
public void setScopes(String scopes) {
this.scopes = scopes;
}
// *************************************************
//
// *************************************************
public GoogleMailStreamConfiguration copy() {
try {
return (GoogleMailStreamConfiguration) super.clone();
} catch (CloneNotSupportedException e) {
throw new RuntimeCamelException(e);
}
}
}
| GoogleMailStreamConfiguration |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/testutil/WithServiceImplementation.java | {
"start": 739,
"end": 998
} | class ____ is to be made available during the annotation processing.
*/
Class<?> value();
/**
* @return The SPI that the service implementation provides. If omitted, then {@link #value()} is expected to
* implement exactly one | that |
java | apache__spark | core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillMerger.java | {
"start": 953,
"end": 3711
} | class ____ {
private int numRecords = 0;
private final PriorityQueue<UnsafeSorterIterator> priorityQueue;
UnsafeSorterSpillMerger(
RecordComparator recordComparator,
PrefixComparator prefixComparator,
int numSpills) {
Comparator<UnsafeSorterIterator> comparator = (left, right) -> {
int prefixComparisonResult =
prefixComparator.compare(left.getKeyPrefix(), right.getKeyPrefix());
if (prefixComparisonResult == 0) {
return recordComparator.compare(
left.getBaseObject(), left.getBaseOffset(), left.getRecordLength(),
right.getBaseObject(), right.getBaseOffset(), right.getRecordLength());
} else {
return prefixComparisonResult;
}
};
priorityQueue = new PriorityQueue<>(numSpills, comparator);
}
/**
* Add an UnsafeSorterIterator to this merger
*/
public void addSpillIfNotEmpty(UnsafeSorterIterator spillReader) throws IOException {
if (spillReader.hasNext()) {
// We only add the spillReader to the priorityQueue if it is not empty. We do this to
// make sure the hasNext method of UnsafeSorterIterator returned by getSortedIterator
// does not return wrong result because hasNext will return true
// at least priorityQueue.size() times. If we allow n spillReaders in the
// priorityQueue, we will have n extra empty records in the result of UnsafeSorterIterator.
spillReader.loadNext();
priorityQueue.add(spillReader);
numRecords += spillReader.getNumRecords();
}
}
public UnsafeSorterIterator getSortedIterator() throws IOException {
return new UnsafeSorterIterator() {
private UnsafeSorterIterator spillReader;
@Override
public int getNumRecords() {
return numRecords;
}
@Override
public long getCurrentPageNumber() {
throw new UnsupportedOperationException();
}
@Override
public boolean hasNext() {
return !priorityQueue.isEmpty() || (spillReader != null && spillReader.hasNext());
}
@Override
public void loadNext() throws IOException {
if (spillReader != null) {
if (spillReader.hasNext()) {
spillReader.loadNext();
priorityQueue.add(spillReader);
}
}
spillReader = priorityQueue.remove();
}
@Override
public Object getBaseObject() { return spillReader.getBaseObject(); }
@Override
public long getBaseOffset() { return spillReader.getBaseOffset(); }
@Override
public int getRecordLength() { return spillReader.getRecordLength(); }
@Override
public long getKeyPrefix() { return spillReader.getKeyPrefix(); }
};
}
}
| UnsafeSorterSpillMerger |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/CloudtrailEndpointBuilderFactory.java | {
"start": 46167,
"end": 46504
} | class ____ extends AbstractEndpointBuilder implements CloudtrailEndpointBuilder, AdvancedCloudtrailEndpointBuilder {
public CloudtrailEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new CloudtrailEndpointBuilderImpl(path);
}
} | CloudtrailEndpointBuilderImpl |
java | netty__netty | codec-http/src/main/java/io/netty/handler/codec/http/websocketx/extensions/WebSocketExtension.java | {
"start": 756,
"end": 1206
} | interface ____ {
int RSV1 = 0x04;
int RSV2 = 0x02;
int RSV3 = 0x01;
/**
* @return the reserved bit value to ensure that no other extension should interfere.
*/
int rsv();
/**
* @return create the extension encoder.
*/
WebSocketExtensionEncoder newExtensionEncoder();
/**
* @return create the extension decoder.
*/
WebSocketExtensionDecoder newExtensionDecoder();
}
| WebSocketExtension |
java | elastic__elasticsearch | x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java | {
"start": 52439,
"end": 53602
} | class ____ {
final String autoFollowPatternName;
final Exception clusterStateFetchException;
final Map<Index, Exception> autoFollowExecutionResults;
AutoFollowResult(String autoFollowPatternName, List<Tuple<Index, Exception>> results) {
this.autoFollowPatternName = autoFollowPatternName;
Map<Index, Exception> mutableAutoFollowExecutionResults = new HashMap<>();
for (Tuple<Index, Exception> result : results) {
mutableAutoFollowExecutionResults.put(result.v1(), result.v2());
}
this.clusterStateFetchException = null;
this.autoFollowExecutionResults = Collections.unmodifiableMap(mutableAutoFollowExecutionResults);
}
AutoFollowResult(String autoFollowPatternName, Exception e) {
this.autoFollowPatternName = autoFollowPatternName;
this.clusterStateFetchException = e;
this.autoFollowExecutionResults = Collections.emptyMap();
}
AutoFollowResult(String autoFollowPatternName) {
this(autoFollowPatternName, (Exception) null);
}
}
}
| AutoFollowResult |
java | quarkusio__quarkus | extensions/panache/hibernate-orm-panache/deployment/src/test/java/io/quarkus/hibernate/orm/panache/deployment/test/multiple_pu/ErroneousPersistenceUnitConfigTest.java | {
"start": 430,
"end": 973
} | class ____ {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.setExpectedException(IllegalStateException.class)
.withApplicationRoot((jar) -> jar
.addClasses(FirstEntity.class, SecondEntity.class, PanacheTestResource.class)
.addAsResource("application-erroneous-multiple-persistence-units.properties", "application.properties"));
@Test
public void shouldNotReachHere() {
Assertions.fail();
}
}
| ErroneousPersistenceUnitConfigTest |
java | quarkusio__quarkus | integration-tests/spring-data-jpa/src/main/java/io/quarkus/it/spring/data/jpa/PersonFragment3Impl.java | {
"start": 47,
"end": 201
} | class ____ implements PersonFragment3 {
@Override
public String getName(Person person) {
return person.getName();
}
}
| PersonFragment3Impl |
java | apache__dubbo | dubbo-metrics/dubbo-metrics-event/src/main/java/org/apache/dubbo/metrics/model/TimePair.java | {
"start": 850,
"end": 1380
} | class ____ {
private final long begin;
private long end;
private static final TimePair empty = new TimePair(0L);
public TimePair(long currentTimeMillis) {
this.begin = currentTimeMillis;
}
public static TimePair start() {
return new TimePair(System.currentTimeMillis());
}
public void end() {
this.end = System.currentTimeMillis();
}
public long calc() {
return end - begin;
}
public static TimePair empty() {
return empty;
}
}
| TimePair |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/ManagementInterfaceSecurityProcessor.java | {
"start": 4877,
"end": 5177
} | class ____ extends SimpleBuildItem {
private final RuntimeValue<AuthenticationHandler> handler;
private ManagementAuthenticationHandlerBuildItem(RuntimeValue<AuthenticationHandler> handler) {
this.handler = handler;
}
}
}
| ManagementAuthenticationHandlerBuildItem |
java | apache__logging-log4j2 | log4j-mongodb4/src/main/java/org/apache/logging/log4j/mongodb4/MongoDb4Connection.java | {
"start": 1631,
"end": 6121
} | class ____ extends AbstractNoSqlConnection<Document, MongoDb4DocumentObject> {
private static final Logger LOGGER = StatusLogger.getLogger();
private static MongoCollection<Document> getOrCreateMongoCollection(
final MongoDatabase database, final String collectionName, final boolean isCapped, final Long sizeInBytes) {
try {
LOGGER.debug("Getting collection '{}'...", collectionName);
// throws IllegalArgumentException if collectionName is invalid
final MongoCollection<Document> found = database.getCollection(collectionName);
LOGGER.debug("Got collection {}", found);
return found;
} catch (final IllegalStateException e) {
LOGGER.debug("Collection '{}' does not exist.", collectionName);
final CreateCollectionOptions options =
new CreateCollectionOptions().capped(isCapped).sizeInBytes(sizeInBytes);
LOGGER.debug("Creating collection '{}' with options {}...", collectionName, options);
database.createCollection(collectionName, options);
LOGGER.debug("Created collection.");
final MongoCollection<Document> created = database.getCollection(collectionName);
LOGGER.debug("Got created collection {}", created);
return created;
}
}
private final ConnectionString connectionString;
private final MongoCollection<Document> collection;
private final MongoClient mongoClient;
/**
* @deprecated Use {@link #MongoDb4Connection(ConnectionString, MongoClient, MongoDatabase, String, boolean, Long)} instead
*/
@Deprecated
public MongoDb4Connection(
final ConnectionString connectionString,
final MongoClient mongoClient,
final MongoDatabase mongoDatabase,
final boolean isCapped,
final Integer sizeInBytes) {
this(
connectionString,
mongoClient,
mongoDatabase,
connectionString.getCollection(),
isCapped,
Long.valueOf(sizeInBytes));
}
/**
* @deprecated Use {@link #MongoDb4Connection(ConnectionString, MongoClient, MongoDatabase, String, boolean, Long)} instead
*/
@Deprecated
public MongoDb4Connection(
final ConnectionString connectionString,
final MongoClient mongoClient,
final MongoDatabase mongoDatabase,
final boolean isCapped,
final Long sizeInBytes) {
this.connectionString = connectionString;
this.mongoClient = mongoClient;
this.collection =
getOrCreateMongoCollection(mongoDatabase, connectionString.getCollection(), isCapped, sizeInBytes);
}
public MongoDb4Connection(
final ConnectionString connectionString,
final MongoClient mongoClient,
final MongoDatabase mongoDatabase,
final String collectionName,
final boolean isCapped,
final Long sizeInBytes) {
this.connectionString = connectionString;
this.mongoClient = mongoClient;
this.collection = getOrCreateMongoCollection(mongoDatabase, collectionName, isCapped, sizeInBytes);
}
@Override
public void closeImpl() {
// LOG4J2-1196
mongoClient.close();
}
@Override
public MongoDb4DocumentObject[] createList(final int length) {
return new MongoDb4DocumentObject[length];
}
@Override
public MongoDb4DocumentObject createObject() {
return new MongoDb4DocumentObject();
}
@Override
public void insertObject(final NoSqlObject<Document> object) {
try {
final Document unwrapped = object.unwrap();
LOGGER.debug("Inserting BSON Document {}", unwrapped);
final InsertOneResult insertOneResult = this.collection.insertOne(unwrapped);
LOGGER.debug("Insert MongoDb result {}", insertOneResult);
} catch (final MongoException e) {
throw new AppenderLoggingException(
"Failed to write log event to MongoDB due to error: " + e.getMessage(), e);
}
}
@Override
public String toString() {
return String.format(
"Mongo4Connection [connectionString=%s, collection=%s, mongoClient=%s]",
connectionString, collection, mongoClient);
}
}
| MongoDb4Connection |
java | spring-projects__spring-framework | framework-docs/src/main/java/org/springframework/docs/web/websocket/stomp/websocketstomporderedmessages/PublishOrderWebSocketConfiguration.java | {
"start": 1091,
"end": 1335
} | class ____ implements WebSocketMessageBrokerConfigurer {
@Override
public void configureMessageBroker(MessageBrokerRegistry registry) {
// ...
registry.setPreservePublishOrder(true);
}
}
// end::snippet[]
| PublishOrderWebSocketConfiguration |
java | jhy__jsoup | src/main/java/org/jsoup/nodes/Document.java | {
"start": 621,
"end": 11483
} | class ____ extends Element {
private @Nullable Connection connection; // the connection this doc was fetched from, if any
private OutputSettings outputSettings = new OutputSettings();
private Parser parser; // the parser used to parse this document
private QuirksMode quirksMode = QuirksMode.noQuirks;
private final String location;
/**
Create a new, empty Document, in the specified namespace.
@param namespace the namespace of this Document's root node.
@param baseUri base URI of document
@see org.jsoup.Jsoup#parse
@see #createShell
*/
public Document(String namespace, String baseUri) {
this(namespace, baseUri, Parser.htmlParser()); // default HTML parser, but overridable
}
private Document(String namespace, String baseUri, Parser parser) {
super(new Tag("#root", namespace), baseUri);
this.location = baseUri;
this.parser = parser;
}
/**
Create a new, empty Document, in the HTML namespace.
@param baseUri base URI of document
@see org.jsoup.Jsoup#parse
@see #Document(String namespace, String baseUri)
*/
public Document(String baseUri) {
this(NamespaceHtml, baseUri);
}
/**
Create a valid, empty shell of an HTML document, suitable for adding more elements to.
@param baseUri baseUri of document
@return document with html, head, and body elements.
*/
public static Document createShell(String baseUri) {
Validate.notNull(baseUri);
Document doc = new Document(baseUri);
Element html = doc.appendElement("html");
html.appendElement("head");
html.appendElement("body");
return doc;
}
/**
* Get the URL this Document was parsed from. If the starting URL is a redirect,
* this will return the final URL from which the document was served from.
* <p>Will return an empty string if the location is unknown (e.g. if parsed from a String).
* @return location
*/
public String location() {
return location;
}
/**
Returns the Connection (Request/Response) object that was used to fetch this document, if any; otherwise, a new
default Connection object. This can be used to continue a session, preserving settings and cookies, etc.
@return the Connection (session) associated with this Document, or an empty one otherwise.
@see Connection#newRequest()
*/
public Connection connection() {
if (connection == null)
return Jsoup.newSession();
else
return connection;
}
/**
* Returns this Document's doctype.
* @return document type, or null if not set
*/
public @Nullable DocumentType documentType() {
for (Node node : childNodes) {
if (node instanceof DocumentType)
return (DocumentType) node;
else if (!(node instanceof LeafNode)) // scans forward across comments, text, processing instructions etc
break;
}
return null;
}
/**
Find the root HTML element, or create it if it doesn't exist.
@return the root HTML element.
*/
private Element htmlEl() {
Element el = firstElementChild();
while (el != null) {
if (el.nameIs("html"))
return el;
el = el.nextElementSibling();
}
return appendElement("html");
}
/**
Get this document's {@code head} element.
<p>
As a side effect, if this Document does not already have an HTML structure, it will be created. If you do not want
that, use {@code #selectFirst("head")} instead.
@return {@code head} element.
*/
public Element head() {
final Element html = htmlEl();
Element el = html.firstElementChild();
while (el != null) {
if (el.nameIs("head"))
return el;
el = el.nextElementSibling();
}
return html.prependElement("head");
}
/**
Get this document's {@code <body>} or {@code <frameset>} element.
<p>
As a <b>side-effect</b>, if this Document does not already have an HTML structure, it will be created with a {@code
<body>} element. If you do not want that, use {@code #selectFirst("body")} instead.
@return {@code body} element for documents with a {@code <body>}, a new {@code <body>} element if the document
had no contents, or the outermost {@code <frameset> element} for frameset documents.
*/
public Element body() {
final Element html = htmlEl();
Element el = html.firstElementChild();
while (el != null) {
if (el.nameIs("body") || el.nameIs("frameset"))
return el;
el = el.nextElementSibling();
}
return html.appendElement("body");
}
/**
Get each of the {@code <form>} elements contained in this document.
@return a List of FormElement objects, which will be empty if there are none.
@see Elements#forms()
@see FormElement#elements()
@since 1.15.4
*/
public List<FormElement> forms() {
return select("form").forms();
}
/**
Selects the first {@link FormElement} in this document that matches the query. If none match, throws an
{@link IllegalArgumentException}.
@param cssQuery a {@link Selector} CSS query
@return the first matching {@code <form>} element
@throws IllegalArgumentException if no match is found
@since 1.15.4
*/
public FormElement expectForm(String cssQuery) {
Elements els = select(cssQuery);
for (Element el : els) {
if (el instanceof FormElement) return (FormElement) el;
}
Validate.fail("No form elements matched the query '%s' in the document.", cssQuery);
return null; // (not really)
}
/**
Get the string contents of the document's {@code title} element.
@return Trimmed title, or empty string if none set.
*/
public String title() {
// title is a preserve whitespace tag (for document output), but normalised here
Element titleEl = head().selectFirst(titleEval);
return titleEl != null ? StringUtil.normaliseWhitespace(titleEl.text()).trim() : "";
}
private static final Evaluator titleEval = new Evaluator.Tag("title");
/**
Set the document's {@code title} element. Updates the existing element, or adds {@code title} to {@code head} if
not present
@param title string to set as title
*/
public void title(String title) {
Validate.notNull(title);
Element titleEl = head().selectFirst(titleEval);
if (titleEl == null) // add to head
titleEl = head().appendElement("title");
titleEl.text(title);
}
/**
Create a new Element, with this document's base uri. Does not make the new element a child of this document.
@param tagName element tag name (e.g. {@code a})
@return new element
*/
public Element createElement(String tagName) {
return new Element(
parser.tagSet().valueOf(tagName, parser.defaultNamespace(), ParseSettings.preserveCase),
searchUpForAttribute(this, BaseUriKey)
);
}
@Override
public String outerHtml() {
return super.html(); // no outer wrapper tag
}
/**
Set the text of the {@code body} of this document. Any existing nodes within the body will be cleared.
@param text un-encoded text
@return this document
*/
@Override
public Element text(String text) {
body().text(text); // overridden to not nuke doc structure
return this;
}
@Override
public String nodeName() {
return "#document";
}
/**
Set the output character set of this Document. This method is equivalent to
{@link OutputSettings#charset(java.nio.charset.Charset) OutputSettings.charset(Charset)}, but additionally adds or
updates the charset / encoding element within the Document.
<p>If there's no existing element with charset / encoding information yet, one will
be created. Obsolete charset / encoding definitions are removed.</p>
<p><b>Elements used:</b></p>
<ul>
<li><b>HTML:</b> <i><meta charset="CHARSET"></i></li>
<li><b>XML:</b> <i><?xml version="1.0" encoding="CHARSET"></i></li>
</ul>
@param charset Charset
@see OutputSettings#charset(java.nio.charset.Charset)
*/
public void charset(Charset charset) {
outputSettings.charset(charset);
ensureMetaCharsetElement();
}
/**
Get the output character set of this Document. This method is equivalent to {@link OutputSettings#charset()}.
@return the current Charset
@see OutputSettings#charset()
*/
public Charset charset() {
return outputSettings.charset();
}
@Override
public Document clone() {
Document clone = (Document) super.clone();
if (attributes != null) clone.attributes = attributes.clone();
clone.outputSettings = this.outputSettings.clone();
// parser is pointer copy
return clone;
}
@Override
public Document shallowClone() {
Document clone = new Document(this.tag().namespace(), baseUri(), parser); // preserves parser pointer
if (attributes != null) clone.attributes = attributes.clone();
clone.outputSettings = this.outputSettings.clone();
return clone;
}
private void ensureMetaCharsetElement() {
OutputSettings.Syntax syntax = outputSettings().syntax();
if (syntax == OutputSettings.Syntax.html) {
Element metaCharset = selectFirst("meta[charset]");
if (metaCharset != null) {
metaCharset.attr("charset", charset().displayName());
} else {
head().appendElement("meta").attr("charset", charset().displayName());
}
select("meta[name=charset]").remove(); // Remove obsolete elements
} else if (syntax == OutputSettings.Syntax.xml) {
XmlDeclaration decl = ensureXmlDecl();
decl.attr("version", "1.0");
decl.attr("encoding", charset().displayName());
}
}
private XmlDeclaration ensureXmlDecl() {
Node node = firstChild();
if (node instanceof XmlDeclaration) {
XmlDeclaration decl = (XmlDeclaration) node;
if (decl.name().equals("xml")) return decl;
}
XmlDeclaration decl = new XmlDeclaration("xml", false);
prependChild(decl);
return decl;
}
/**
* A Document's output settings control the form of the text() and html() methods.
*/
public static | Document |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/parsing/PropertyEntry.java | {
"start": 840,
"end": 1326
} | class ____ implements ParseState.Entry {
private final String name;
/**
* Create a new {@code PropertyEntry} instance.
* @param name the name of the JavaBean property represented by this instance
*/
public PropertyEntry(String name) {
if (!StringUtils.hasText(name)) {
throw new IllegalArgumentException("Invalid property name '" + name + "'");
}
this.name = name;
}
@Override
public String toString() {
return "Property '" + this.name + "'";
}
}
| PropertyEntry |
java | google__guice | core/src/com/google/inject/internal/ProviderMethod.java | {
"start": 4352,
"end": 12312
} | class ____.
}
if (InternalFlags.isBytecodeGenEnabled() && !skipFastClassGeneration) {
try {
BiFunction<Object, Object[], Object> fastMethod = BytecodeGen.fastMethod(method);
if (fastMethod != null) {
return new FastClassProviderMethod<T>(
key, method, instance, dependencies, scopeAnnotation, annotation, fastMethod);
}
} catch (Exception | LinkageError e) {
/* fall-through */
}
}
if (!Modifier.isPublic(modifiers)
|| !Modifier.isPublic(method.getDeclaringClass().getModifiers())) {
method.setAccessible(true);
}
return new ReflectionProviderMethod<T>(
key, method, instance, dependencies, scopeAnnotation, annotation);
}
protected final Object instance;
protected final Method method;
private final Key<T> key;
private final Class<? extends Annotation> scopeAnnotation;
private final ImmutableSet<Dependency<?>> dependencies;
private final boolean exposed;
private final Annotation annotation;
private int circularFactoryId;
/**
* Set by {@link #initialize(InjectorImpl, Errors)} so it is always available prior to injection.
*/
private SingleParameterInjector<?>[] parameterInjectors;
/**
* @param method the method to invoke. Its return type must be the same type as {@code key}.
*/
ProviderMethod(
Key<T> key,
Method method,
Object instance,
ImmutableSet<Dependency<?>> dependencies,
Class<? extends Annotation> scopeAnnotation,
Annotation annotation) {
// We can be safely initialized eagerly since our bindings must exist statically and it is an
// error for them not to.
super(InitializationTiming.EAGER);
this.key = key;
this.scopeAnnotation = scopeAnnotation;
this.instance = instance;
this.dependencies = dependencies;
this.method = method;
this.exposed = method.isAnnotationPresent(Exposed.class);
this.annotation = annotation;
}
@Override
public Key<T> getKey() {
return key;
}
@Override
public Method getMethod() {
return method;
}
// exposed for GIN
public Object getInstance() {
return instance;
}
@Override
public Object getEnclosingInstance() {
return instance;
}
@Override
public Annotation getAnnotation() {
return annotation;
}
public void configure(Binder binder) {
binder = binder.withSource(method);
if (scopeAnnotation != null) {
binder.bind(key).toProvider(this).in(scopeAnnotation);
} else {
binder.bind(key).toProvider(this);
}
if (exposed) {
// the cast is safe 'cause the only binder we have implements PrivateBinder. If there's a
// misplaced @Exposed, calling this will add an error to the binder's error queue
((PrivateBinder) binder).expose(key);
}
}
@Override
void initialize(InjectorImpl injector, Errors errors) throws ErrorsException {
parameterInjectors = injector.getParametersInjectors(dependencies.asList(), errors);
circularFactoryId = injector.circularFactoryIdFactory.next();
}
@Override
public final T get(final InternalContext context, final Dependency<?> dependency, boolean linked)
throws InternalProvisionException {
@SuppressWarnings("unchecked")
T result = (T) context.tryStartConstruction(circularFactoryId, dependency);
if (result != null) {
// We have a circular reference between bindings. Return a proxy.
return result;
}
return super.get(context, dependency, linked);
}
@Override
protected T doProvision(InternalContext context, Dependency<?> dependency)
throws InternalProvisionException {
T t = null;
try {
t = doProvision(SingleParameterInjector.getAll(context, parameterInjectors));
if (t == null && !dependency.isNullable()) {
InternalProvisionException.onNullInjectedIntoNonNullableDependency(getMethod(), dependency);
}
return t;
} catch (IllegalAccessException e) {
throw new AssertionError(e);
} catch (InternalProvisionException e) {
throw e.addSource(getSource());
} catch (InvocationTargetException userException) {
Throwable cause = userException.getCause() != null ? userException.getCause() : userException;
throw InternalProvisionException.errorInProvider(cause).addSource(getSource());
} catch (Throwable unexpected) {
throw InternalProvisionException.errorInProvider(unexpected).addSource(getSource());
} finally {
context.finishConstruction(circularFactoryId, t);
}
}
/** Extension point for our subclasses to implement the provisioning strategy. */
abstract T doProvision(Object[] parameters)
throws IllegalAccessException, InvocationTargetException;
@Override
MethodHandleResult makeHandle(LinkageContext context, boolean linked) {
MethodHandleResult result = super.makeHandle(context, linked);
checkState(result.cachability == MethodHandleResult.Cachability.ALWAYS);
// Handle circular proxies.
return makeCachable(
InternalMethodHandles.tryStartConstruction(result.methodHandle, circularFactoryId));
}
/** Creates a method handle that constructs the object to be injected. */
@Override
protected final MethodHandle doGetHandle(LinkageContext context) {
MethodHandle handle =
doProvisionHandle(SingleParameterInjector.getAllHandles(context, parameterInjectors));
InternalMethodHandles.checkHasElementFactoryType(handle);
// add a dependency parameter so `nullCheckResult` can use it.
handle = MethodHandles.dropArguments(handle, 1, Dependency.class);
handle = InternalMethodHandles.nullCheckResult(handle, getMethod());
// catch everything else and rethrow as an error in provider.
handle =
InternalMethodHandles.catchThrowableInProviderAndRethrowWithSource(handle, getSource());
handle = InternalMethodHandles.finishConstruction(handle, circularFactoryId);
return handle;
}
/**
* Extension point for our subclasses to implement the provisioning strategy.
*
* <p>Should return a handle with the signature {@code (InternalContext) -> Object}
*/
abstract MethodHandle doProvisionHandle(MethodHandle[] parameters);
@Override
public Set<Dependency<?>> getDependencies() {
return dependencies;
}
@Override
@SuppressWarnings("unchecked")
public <B, V> V acceptExtensionVisitor(
BindingTargetVisitor<B, V> visitor, ProviderInstanceBinding<? extends B> binding) {
if (visitor instanceof ProvidesMethodTargetVisitor) {
return ((ProvidesMethodTargetVisitor<T, V>) visitor).visit(this);
}
return visitor.visit(binding);
}
@Override
public String toString() {
String annotationString = annotation.toString();
// Show @Provides w/o the com.google.inject prefix.
if (annotation.annotationType() == Provides.class) {
annotationString = "@Provides";
} else if (annotationString.endsWith("()")) {
// Remove the common "()" suffix if there are no values.
annotationString = annotationString.substring(0, annotationString.length() - 2);
}
return annotationString + " " + StackTraceElements.forMember(method);
}
@Override
public boolean equals(Object obj) {
if (obj instanceof ProviderMethod) {
ProviderMethod<?> o = (ProviderMethod<?>) obj;
return method.equals(o.method)
&& Objects.equal(instance, o.instance)
&& annotation.equals(o.annotation);
} else {
return false;
}
}
@Override
public int hashCode() {
// Avoid calling hashCode on 'instance', which is a user-object
// that might not be expecting it.
// (We need to call equals, so we do. But we can avoid hashCode.)
return Objects.hashCode(method, annotation);
}
/**
* A {@link ProviderMethod} implementation that uses bytecode generation to invoke the provider
* method.
*/
private static final | generation |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/hierarchies/standard/TestHierarchyLevelTwoWithBareContextConfigurationInSuperclassTests.java | {
"start": 1509,
"end": 1686
} | class ____ extends
TestHierarchyLevelOneWithBareContextConfigurationInSuperclassTests {
@Configuration
static | TestHierarchyLevelTwoWithBareContextConfigurationInSuperclassTests |
java | apache__camel | components/camel-bindy/src/test/java/org/apache/camel/dataformat/bindy/fixed/unmarshall/simple/trimfield/BindySimpleFixedLengthUnmarshallTrimAlignedBFieldTest.java | {
"start": 1682,
"end": 2957
} | class ____ {
private static final String URI_MOCK_RESULT = "mock:result";
private static final String URI_DIRECT_START = "direct:start";
@Produce(URI_DIRECT_START)
private ProducerTemplate template;
@EndpointInject(URI_MOCK_RESULT)
private MockEndpoint result;
private String expected;
@Test
@DirtiesContext
public void testUnMarshallMessageWithTrimBoth() throws Exception {
expected = "10A9 Pauline M ISINXD12345678BUYShare000002500.45USD01-08-2009 Hello###XXTEST123X";
template.sendBody(expected);
result.expectedMessageCount(1);
result.assertIsSatisfied();
// check the model
BindySimpleFixedLengthUnmarshallTrimAlignedBFieldTest.Order order = result.getReceivedExchanges().get(0).getIn()
.getBody(BindySimpleFixedLengthUnmarshallTrimAlignedBFieldTest.Order.class);
assertEquals(10, order.getOrderNr());
// the field is not trimmed
assertEquals("Pauline", order.getFirstName());
assertEquals("M ", order.getLastName()); // no trim
assertEquals(" Hello", order.getComment());
assertEquals("TEST123", order.getCommentBAligned());
}
public static | BindySimpleFixedLengthUnmarshallTrimAlignedBFieldTest |
java | mockito__mockito | mockito-core/src/test/java/org/mockito/internal/util/reflection/ParameterizedConstructorInstantiatorTest.java | {
"start": 5469,
"end": 5862
} | class ____ extends OneConstructor {
Observer observer;
Map map;
public MultipleConstructor(Observer observer) {
this(observer, null);
}
public MultipleConstructor(Observer observer, Map map) {
super(observer);
this.observer = observer;
this.map = map;
}
}
private static | MultipleConstructor |
java | apache__camel | components/camel-mllp/src/test/java/org/apache/camel/component/mllp/MllpSSLContextParametersTest.java | {
"start": 1529,
"end": 4285
} | class ____ extends CamelTestSupport {
@RegisterExtension
public MllpClientResource mllpClient = new MllpClientResource();
@EndpointInject("mock://result")
MockEndpoint result;
public SSLContextParameters createSslContextParameters() {
KeyStoreParameters ksp = new KeyStoreParameters();
ksp.setResource(this.getClass().getClassLoader().getResource("keystore.jks").toString());
ksp.setPassword("password");
KeyManagersParameters kmp = new KeyManagersParameters();
kmp.setKeyPassword("password");
kmp.setKeyStore(ksp);
TrustManagersParameters tmp = new TrustManagersParameters();
tmp.setKeyStore(ksp);
SSLContextParameters sslContextParameters = new SSLContextParameters();
sslContextParameters.setKeyManagers(kmp);
sslContextParameters.setTrustManagers(tmp);
return sslContextParameters;
}
@Override
protected CamelContext createCamelContext() throws Exception {
mllpClient.setMllpHost("localhost");
mllpClient.setMllpPort(AvailablePortFinder.getNextAvailable());
DefaultCamelContext context = (DefaultCamelContext) super.createCamelContext();
context.setUseMDCLogging(true);
context.getCamelContextExtension().setName(this.getClass().getSimpleName());
SSLContextParameters sslContextParameters = createSslContextParameters();
context.getRegistry().bind("sslContextParameters", sslContextParameters);
return context;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
String routeId = "mllp-ssl-sender";
public void configure() {
fromF("mllp://%d?sslContextParameters=#sslContextParameters", mllpClient.getMllpPort())
.log(LoggingLevel.INFO, routeId, "Received Message: ${body}")
.to(result);
}
};
}
@Test
public void testSSLInOutWithMllpConsumer() throws Exception {
String hl7Message = "MSH|^~\\&|CLIENT|TEST|SERVER|ACK|20231118120000||ADT^A01|123456|T|2.6\r" +
"EVN|A01|20231118120000\r" +
"PID|1|12345|67890||DOE^JOHN||19800101|M|||123 Main St^^Springfield^IL^62704||(555)555-5555|||||S\r"
+
"PV1|1|O\r";
result.expectedBodiesReceived(hl7Message);
String endpointUri = String.format("mllp://%s:%d?sslContextParameters=#sslContextParameters",
mllpClient.getMllpHost(), mllpClient.getMllpPort());
template.sendBody(endpointUri, hl7Message);
result.assertIsSatisfied();
}
}
| MllpSSLContextParametersTest |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/util/BackpressureHelper.java | {
"start": 814,
"end": 911
} | class ____ help with backpressure-related operations such as request aggregation.
*/
public final | to |
java | apache__spark | common/sketch/src/main/java/org/apache/spark/util/sketch/CountMinSketchImpl.java | {
"start": 910,
"end": 10114
} | class ____ extends CountMinSketch implements Serializable {
private static final long PRIME_MODULUS = (1L << 31) - 1;
private int depth;
private int width;
private long[][] table;
private long[] hashA;
private long totalCount;
private double eps;
private double confidence;
private CountMinSketchImpl() {}
CountMinSketchImpl(int depth, int width, int seed) {
if (depth <= 0 || width <= 0) {
throw new IllegalArgumentException("Depth and width must be both positive");
}
this.depth = depth;
this.width = width;
this.eps = 2.0 / width;
this.confidence = 1 - 1 / Math.pow(2, depth);
initTablesWith(depth, width, seed);
}
CountMinSketchImpl(double eps, double confidence, int seed) {
if (eps <= 0D) {
throw new IllegalArgumentException("Relative error must be positive");
}
if (confidence <= 0D || confidence >= 1D) {
throw new IllegalArgumentException("Confidence must be within range (0.0, 1.0)");
}
// 2/w = eps ; w = 2/eps
// 1/2^depth <= 1-confidence ; depth >= -log2 (1-confidence)
this.eps = eps;
this.confidence = confidence;
this.width = (int) Math.ceil(2 / eps);
this.depth = (int) Math.ceil(-Math.log1p(-confidence) / Math.log(2));
initTablesWith(depth, width, seed);
}
@Override
public boolean equals(Object other) {
if (other == this) {
return true;
}
if (!(other instanceof CountMinSketchImpl that)) {
return false;
}
return
this.depth == that.depth &&
this.width == that.width &&
this.totalCount == that.totalCount &&
Arrays.equals(this.hashA, that.hashA) &&
Arrays.deepEquals(this.table, that.table);
}
@Override
public int hashCode() {
int hash = depth;
hash = hash * 31 + width;
hash = hash * 31 + (int) (totalCount ^ (totalCount >>> 32));
hash = hash * 31 + Arrays.hashCode(hashA);
hash = hash * 31 + Arrays.deepHashCode(table);
return hash;
}
private void initTablesWith(int depth, int width, int seed) {
this.table = new long[depth][width];
this.hashA = new long[depth];
Random r = new Random(seed);
// We're using a linear hash functions
// of the form (a*x+b) mod p.
// a,b are chosen independently for each hash function.
// However we can set b = 0 as all it does is shift the results
// without compromising their uniformity or independence with
// the other hashes.
for (int i = 0; i < depth; ++i) {
hashA[i] = r.nextInt(Integer.MAX_VALUE);
}
}
@Override
public double relativeError() {
return eps;
}
@Override
public double confidence() {
return confidence;
}
@Override
public int depth() {
return depth;
}
@Override
public int width() {
return width;
}
@Override
public long totalCount() {
return totalCount;
}
@Override
public void add(Object item) {
add(item, 1);
}
@Override
public void add(Object item, long count) {
if (item instanceof String str) {
addString(str, count);
} else if (item instanceof byte[] bytes) {
addBinary(bytes, count);
} else {
addLong(Utils.integralToLong(item), count);
}
}
@Override
public void addString(String item) {
addString(item, 1);
}
@Override
public void addString(String item, long count) {
addBinary(Utils.getBytesFromUTF8String(item), count);
}
@Override
public void addLong(long item) {
addLong(item, 1);
}
@Override
public void addLong(long item, long count) {
if (count < 0) {
throw new IllegalArgumentException("Negative increments not implemented");
}
for (int i = 0; i < depth; ++i) {
table[i][hash(item, i)] += count;
}
totalCount += count;
}
@Override
public void addBinary(byte[] item) {
addBinary(item, 1);
}
@Override
public void addBinary(byte[] item, long count) {
if (count < 0) {
throw new IllegalArgumentException("Negative increments not implemented");
}
int[] buckets = getHashBuckets(item, depth, width);
for (int i = 0; i < depth; ++i) {
table[i][buckets[i]] += count;
}
totalCount += count;
}
private int hash(long item, int count) {
long hash = hashA[count] * item;
// A super fast way of computing x mod 2^p-1
// See http://www.cs.princeton.edu/courses/archive/fall09/cos521/Handouts/universalclasses.pdf
// page 149, right after Proposition 7.
hash += hash >> 32;
hash &= PRIME_MODULUS;
// Doing "%" after (int) conversion is ~2x faster than %'ing longs.
return ((int) hash) % width;
}
private static int[] getHashBuckets(String key, int hashCount, int max) {
return getHashBuckets(Utils.getBytesFromUTF8String(key), hashCount, max);
}
private static int[] getHashBuckets(byte[] b, int hashCount, int max) {
int[] result = new int[hashCount];
int hash1 = Murmur3_x86_32.hashUnsafeBytes(b, Platform.BYTE_ARRAY_OFFSET, b.length, 0);
int hash2 = Murmur3_x86_32.hashUnsafeBytes(b, Platform.BYTE_ARRAY_OFFSET, b.length, hash1);
for (int i = 0; i < hashCount; i++) {
result[i] = Math.abs((hash1 + i * hash2) % max);
}
return result;
}
@Override
public long estimateCount(Object item) {
if (item instanceof String str) {
return estimateCountForStringItem(str);
} else if (item instanceof byte[] bytes) {
return estimateCountForBinaryItem(bytes);
} else {
return estimateCountForLongItem(Utils.integralToLong(item));
}
}
private long estimateCountForLongItem(long item) {
long res = Long.MAX_VALUE;
for (int i = 0; i < depth; ++i) {
res = Math.min(res, table[i][hash(item, i)]);
}
return res;
}
private long estimateCountForStringItem(String item) {
long res = Long.MAX_VALUE;
int[] buckets = getHashBuckets(item, depth, width);
for (int i = 0; i < depth; ++i) {
res = Math.min(res, table[i][buckets[i]]);
}
return res;
}
private long estimateCountForBinaryItem(byte[] item) {
long res = Long.MAX_VALUE;
int[] buckets = getHashBuckets(item, depth, width);
for (int i = 0; i < depth; ++i) {
res = Math.min(res, table[i][buckets[i]]);
}
return res;
}
@Override
public CountMinSketch mergeInPlace(CountMinSketch other) throws IncompatibleMergeException {
if (other == null) {
throw new IncompatibleMergeException("Cannot merge null estimator");
}
if (!(other instanceof CountMinSketchImpl that)) {
throw new IncompatibleMergeException(
"Cannot merge estimator of class " + other.getClass().getName()
);
}
if (this.depth != that.depth) {
throw new IncompatibleMergeException("Cannot merge estimators of different depth");
}
if (this.width != that.width) {
throw new IncompatibleMergeException("Cannot merge estimators of different width");
}
if (!Arrays.equals(this.hashA, that.hashA)) {
throw new IncompatibleMergeException("Cannot merge estimators of different seed");
}
for (int i = 0; i < this.table.length; ++i) {
for (int j = 0; j < this.table[i].length; ++j) {
this.table[i][j] = this.table[i][j] + that.table[i][j];
}
}
this.totalCount += that.totalCount;
return this;
}
@Override
public void writeTo(OutputStream out) throws IOException {
DataOutputStream dos = new DataOutputStream(out);
dos.writeInt(Version.V1.getVersionNumber());
dos.writeLong(this.totalCount);
dos.writeInt(this.depth);
dos.writeInt(this.width);
for (int i = 0; i < this.depth; ++i) {
dos.writeLong(this.hashA[i]);
}
for (int i = 0; i < this.depth; ++i) {
for (int j = 0; j < this.width; ++j) {
dos.writeLong(table[i][j]);
}
}
}
@Override
public byte[] toByteArray() throws IOException {
try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
writeTo(out);
return out.toByteArray();
}
}
public static CountMinSketchImpl readFrom(InputStream in) throws IOException {
CountMinSketchImpl sketch = new CountMinSketchImpl();
sketch.readFrom0(in);
return sketch;
}
private void readFrom0(InputStream in) throws IOException {
DataInputStream dis = new DataInputStream(in);
int version = dis.readInt();
if (version != Version.V1.getVersionNumber()) {
throw new IOException("Unexpected Count-Min Sketch version number (" + version + ")");
}
this.totalCount = dis.readLong();
this.depth = dis.readInt();
this.width = dis.readInt();
this.eps = 2.0 / width;
this.confidence = 1 - 1 / Math.pow(2, depth);
this.hashA = new long[depth];
for (int i = 0; i < depth; ++i) {
this.hashA[i] = dis.readLong();
}
this.table = new long[depth][width];
for (int i = 0; i < depth; ++i) {
for (int j = 0; j < width; ++j) {
this.table[i][j] = dis.readLong();
}
}
}
private void writeObject(ObjectOutputStream out) throws IOException {
this.writeTo(out);
}
private void readObject(ObjectInputStream in) throws IOException {
this.readFrom0(in);
}
}
| CountMinSketchImpl |
java | resilience4j__resilience4j | resilience4j-rxjava2/src/main/java/io/github/resilience4j/adapter/RxJava2Adapter.java | {
"start": 344,
"end": 1440
} | class ____ {
/**
* Converts the EventPublisher into a Flowable.
*
* @param eventPublisher the event publisher
* @param <T> the type of the event
* @return the Flowable
*/
public static <T> Flowable<T> toFlowable(EventPublisher<T> eventPublisher) {
PublishProcessor<T> publishProcessor = PublishProcessor.create();
FlowableProcessor<T> flowableProcessor = publishProcessor.toSerialized();
eventPublisher.onEvent(flowableProcessor::onNext);
return flowableProcessor;
}
/**
* Converts the EventPublisher into an Observable.
*
* @param eventPublisher the event publisher
* @param <T> the type of the event
* @return the Observable
*/
public static <T> Observable<T> toObservable(EventPublisher<T> eventPublisher) {
PublishSubject<T> publishSubject = PublishSubject.create();
Subject<T> serializedSubject = publishSubject.toSerialized();
eventPublisher.onEvent(serializedSubject::onNext);
return serializedSubject;
}
}
| RxJava2Adapter |
java | spring-projects__spring-boot | module/spring-boot-jackson/src/test/java/org/springframework/boot/jackson/autoconfigure/JacksonAutoConfigurationTests.java | {
"start": 40808,
"end": 41415
} | enum ____ {
CBOR(CBORMapper.class, CBORMapper.Builder.class), JSON(JsonMapper.class, JsonMapper.Builder.class),
XML(XmlMapper.class, XmlMapper.Builder.class);
private final Class<? extends ObjectMapper> mapperClass;
private final Class<? extends MapperBuilder<?, ?>> builderClass;
<M extends ObjectMapper, B extends MapperBuilder<M, B>> MapperType(Class<M> mapperClass,
Class<B> builderClass) {
this.mapperClass = mapperClass;
this.builderClass = builderClass;
}
ObjectMapper getMapper(ApplicationContext context) {
return context.getBean(this.mapperClass);
}
}
}
| MapperType |
java | apache__maven | compat/maven-artifact/src/main/java/org/apache/maven/artifact/resolver/filter/ArtifactFilter.java | {
"start": 919,
"end": 988
} | interface ____ {
boolean include(Artifact artifact);
}
| ArtifactFilter |
java | apache__avro | lang/java/protobuf/src/test/java/org/apache/avro/protobuf/noopt/Test.java | {
"start": 12311,
"end": 22672
} | class ____ extends com.google.protobuf.GeneratedMessage implements
// @@protoc_insertion_point(message_implements:org.apache.avro.protobuf.noopt.Foo)
FooOrBuilder {
private static final long serialVersionUID = 0L;
static {
com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion(
com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, /* major= */ 4, /* minor= */ 26, /* patch= */ 1,
/* suffix= */ "", Foo.class.getName());
}
// Use Foo.newBuilder() to construct.
private Foo(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
}
private Foo() {
string_ = "";
bytes_ = com.google.protobuf.ByteString.EMPTY;
enum_ = 3;
intArray_ = emptyIntList();
fooArray_ = java.util.Collections.emptyList();
syms_ = java.util.Collections.emptyList();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return org.apache.avro.protobuf.noopt.Test.internal_static_org_apache_avro_protobuf_noopt_Foo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() {
return org.apache.avro.protobuf.noopt.Test.internal_static_org_apache_avro_protobuf_noopt_Foo_fieldAccessorTable
.ensureFieldAccessorsInitialized(org.apache.avro.protobuf.noopt.Test.Foo.class,
org.apache.avro.protobuf.noopt.Test.Foo.Builder.class);
}
private int bitField0_;
public static final int INT32_FIELD_NUMBER = 1;
private int int32_ = 0;
/**
* <pre>
* all the primitive types
* </pre>
*
* <code>required int32 int32 = 1;</code>
*
* @return Whether the int32 field is set.
*/
@java.lang.Override
public boolean hasInt32() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* all the primitive types
* </pre>
*
* <code>required int32 int32 = 1;</code>
*
* @return The int32.
*/
@java.lang.Override
public int getInt32() {
return int32_;
}
public static final int INT64_FIELD_NUMBER = 2;
private long int64_ = 0L;
/**
* <code>optional int64 int64 = 2;</code>
*
* @return Whether the int64 field is set.
*/
@java.lang.Override
public boolean hasInt64() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <code>optional int64 int64 = 2;</code>
*
* @return The int64.
*/
@java.lang.Override
public long getInt64() {
return int64_;
}
public static final int UINT32_FIELD_NUMBER = 3;
private int uint32_ = 0;
/**
* <code>optional uint32 uint32 = 3;</code>
*
* @return Whether the uint32 field is set.
*/
@java.lang.Override
public boolean hasUint32() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <code>optional uint32 uint32 = 3;</code>
*
* @return The uint32.
*/
@java.lang.Override
public int getUint32() {
return uint32_;
}
public static final int UINT64_FIELD_NUMBER = 4;
private long uint64_ = 0L;
/**
* <code>optional uint64 uint64 = 4;</code>
*
* @return Whether the uint64 field is set.
*/
@java.lang.Override
public boolean hasUint64() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* <code>optional uint64 uint64 = 4;</code>
*
* @return The uint64.
*/
@java.lang.Override
public long getUint64() {
return uint64_;
}
public static final int SINT32_FIELD_NUMBER = 5;
private int sint32_ = 0;
/**
* <code>optional sint32 sint32 = 5;</code>
*
* @return Whether the sint32 field is set.
*/
@java.lang.Override
public boolean hasSint32() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* <code>optional sint32 sint32 = 5;</code>
*
* @return The sint32.
*/
@java.lang.Override
public int getSint32() {
return sint32_;
}
public static final int SINT64_FIELD_NUMBER = 6;
private long sint64_ = 0L;
/**
* <code>optional sint64 sint64 = 6;</code>
*
* @return Whether the sint64 field is set.
*/
@java.lang.Override
public boolean hasSint64() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* <code>optional sint64 sint64 = 6;</code>
*
* @return The sint64.
*/
@java.lang.Override
public long getSint64() {
return sint64_;
}
public static final int FIXED32_FIELD_NUMBER = 7;
private int fixed32_ = 0;
/**
* <code>optional fixed32 fixed32 = 7;</code>
*
* @return Whether the fixed32 field is set.
*/
@java.lang.Override
public boolean hasFixed32() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
* <code>optional fixed32 fixed32 = 7;</code>
*
* @return The fixed32.
*/
@java.lang.Override
public int getFixed32() {
return fixed32_;
}
public static final int FIXED64_FIELD_NUMBER = 8;
private long fixed64_ = 0L;
/**
* <code>optional fixed64 fixed64 = 8;</code>
*
* @return Whether the fixed64 field is set.
*/
@java.lang.Override
public boolean hasFixed64() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
* <code>optional fixed64 fixed64 = 8;</code>
*
* @return The fixed64.
*/
@java.lang.Override
public long getFixed64() {
return fixed64_;
}
public static final int SFIXED32_FIELD_NUMBER = 9;
private int sfixed32_ = 0;
/**
* <code>optional sfixed32 sfixed32 = 9;</code>
*
* @return Whether the sfixed32 field is set.
*/
@java.lang.Override
public boolean hasSfixed32() {
return ((bitField0_ & 0x00000100) != 0);
}
/**
* <code>optional sfixed32 sfixed32 = 9;</code>
*
* @return The sfixed32.
*/
@java.lang.Override
public int getSfixed32() {
return sfixed32_;
}
public static final int SFIXED64_FIELD_NUMBER = 10;
private long sfixed64_ = 0L;
/**
* <code>optional sfixed64 sfixed64 = 10;</code>
*
* @return Whether the sfixed64 field is set.
*/
@java.lang.Override
public boolean hasSfixed64() {
return ((bitField0_ & 0x00000200) != 0);
}
/**
* <code>optional sfixed64 sfixed64 = 10;</code>
*
* @return The sfixed64.
*/
@java.lang.Override
public long getSfixed64() {
return sfixed64_;
}
public static final int FLOAT_FIELD_NUMBER = 11;
private float float_ = 0F;
/**
* <code>optional float float = 11;</code>
*
* @return Whether the float field is set.
*/
@java.lang.Override
public boolean hasFloat() {
return ((bitField0_ & 0x00000400) != 0);
}
/**
* <code>optional float float = 11;</code>
*
* @return The float.
*/
@java.lang.Override
public float getFloat() {
return float_;
}
public static final int DOUBLE_FIELD_NUMBER = 12;
private double double_ = 0D;
/**
* <code>optional double double = 12;</code>
*
* @return Whether the double field is set.
*/
@java.lang.Override
public boolean hasDouble() {
return ((bitField0_ & 0x00000800) != 0);
}
/**
* <code>optional double double = 12;</code>
*
* @return The double.
*/
@java.lang.Override
public double getDouble() {
return double_;
}
public static final int BOOL_FIELD_NUMBER = 13;
private boolean bool_ = false;
/**
* <code>optional bool bool = 13;</code>
*
* @return Whether the bool field is set.
*/
@java.lang.Override
public boolean hasBool() {
return ((bitField0_ & 0x00001000) != 0);
}
/**
* <code>optional bool bool = 13;</code>
*
* @return The bool.
*/
@java.lang.Override
public boolean getBool() {
return bool_;
}
public static final int STRING_FIELD_NUMBER = 14;
@SuppressWarnings("serial")
private volatile java.lang.Object string_ = "";
/**
* <code>optional string string = 14;</code>
*
* @return Whether the string field is set.
*/
@java.lang.Override
public boolean hasString() {
return ((bitField0_ & 0x00002000) != 0);
}
/**
* <code>optional string string = 14;</code>
*
* @return The string.
*/
@java.lang.Override
public java.lang.String getString() {
java.lang.Object ref = string_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
string_ = s;
}
return s;
}
}
/**
* <code>optional string string = 14;</code>
*
* @return The bytes for string.
*/
@java.lang.Override
public com.google.protobuf.ByteString getStringBytes() {
java.lang.Object ref = string_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
string_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int BYTES_FIELD_NUMBER = 15;
private com.google.protobuf.ByteString bytes_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes bytes = 15;</code>
*
* @return Whether the bytes field is set.
*/
@java.lang.Override
public boolean hasBytes() {
return ((bitField0_ & 0x00004000) != 0);
}
/**
* <code>optional bytes bytes = 15;</code>
*
* @return The bytes.
*/
@java.lang.Override
public com.google.protobuf.ByteString getBytes() {
return bytes_;
}
public static final int ENUM_FIELD_NUMBER = 16;
private int enum_ = 3;
/**
* <code>optional .org.apache.avro.protobuf.noopt.A enum = 16 [default = Z];</code>
*
* @return Whether the | Foo |
java | grpc__grpc-java | okhttp/src/test/java/io/grpc/okhttp/ExceptionHandlingFrameWriterTest.java | {
"start": 1635,
"end": 3054
} | class ____ {
private static final Logger logger = Logger.getLogger(OkHttpClientTransport.class.getName());
private final FrameWriter mockedFrameWriter = mock(FrameWriter.class);
private final TransportExceptionHandler transportExceptionHandler =
mock(TransportExceptionHandler.class);
private final ExceptionHandlingFrameWriter exceptionHandlingFrameWriter =
new ExceptionHandlingFrameWriter(transportExceptionHandler, mockedFrameWriter);
@Test
public void exception() throws IOException {
IOException exception = new IOException("some exception");
doThrow(exception).when(mockedFrameWriter)
.synReply(false, 100, new ArrayList<Header>());
exceptionHandlingFrameWriter.synReply(false, 100, new ArrayList<Header>());
verify(transportExceptionHandler).onException(exception);
verify(mockedFrameWriter).synReply(false, 100, new ArrayList<Header>());
}
@Test
public void unknownException() {
assertThat(getLogLevel(new Exception())).isEqualTo(Level.INFO);
}
@Test
public void ioException() {
assertThat(getLogLevel(new IOException("Socket closed"))).isEqualTo(Level.FINE);
}
@Test
public void ioException_nullMessage() {
IOException e = new IOException();
assertThat(e.getMessage()).isNull();
assertThat(getLogLevel(e)).isEqualTo(Level.FINE);
}
@Test
public void extendedIoException() {
| ExceptionHandlingFrameWriterTest |
java | google__guava | android/guava-tests/test/com/google/common/collect/LinkedHashMultisetTest.java | {
"start": 1557,
"end": 4720
} | class ____ extends TestCase {
@J2ktIncompatible
@GwtIncompatible // suite
@AndroidIncompatible // test-suite builders
public static Test suite() {
TestSuite suite = new TestSuite();
suite.addTest(
MultisetTestSuiteBuilder.using(linkedHashMultisetGenerator())
.named("LinkedHashMultiset")
.withFeatures(
CollectionSize.ANY,
CollectionFeature.KNOWN_ORDER,
CollectionFeature.FAILS_FAST_ON_CONCURRENT_MODIFICATION,
CollectionFeature.ALLOWS_NULL_VALUES,
CollectionFeature.SERIALIZABLE,
CollectionFeature.GENERAL_PURPOSE,
MultisetFeature.ENTRIES_ARE_VIEWS)
.createTestSuite());
suite.addTestSuite(LinkedHashMultisetTest.class);
return suite;
}
@J2ktIncompatible
@AndroidIncompatible // test-suite builders
private static TestStringMultisetGenerator linkedHashMultisetGenerator() {
return new TestStringMultisetGenerator() {
@Override
protected Multiset<String> create(String[] elements) {
return LinkedHashMultiset.create(asList(elements));
}
@Override
public List<String> order(List<String> insertionOrder) {
List<String> order = new ArrayList<>();
for (String s : insertionOrder) {
int index = order.indexOf(s);
if (index == -1) {
order.add(s);
} else {
order.add(index, s);
}
}
return order;
}
};
}
public void testCreate() {
Multiset<String> multiset = LinkedHashMultiset.create();
multiset.add("foo", 2);
multiset.add("bar");
assertEquals(3, multiset.size());
assertEquals(2, multiset.count("foo"));
assertEquals("[foo x 2, bar]", multiset.toString());
}
public void testCreateWithSize() {
Multiset<String> multiset = LinkedHashMultiset.create(50);
multiset.add("foo", 2);
multiset.add("bar");
assertEquals(3, multiset.size());
assertEquals(2, multiset.count("foo"));
assertEquals("[foo x 2, bar]", multiset.toString());
}
public void testCreateFromIterable() {
Multiset<String> multiset = LinkedHashMultiset.create(asList("foo", "bar", "foo"));
assertEquals(3, multiset.size());
assertEquals(2, multiset.count("foo"));
assertEquals("[foo x 2, bar]", multiset.toString());
}
public void testToString() {
Multiset<String> ms = LinkedHashMultiset.create();
ms.add("a", 3);
ms.add("c", 1);
ms.add("b", 2);
assertEquals("[a x 3, c, b x 2]", ms.toString());
}
public void testLosesPlaceInLine() throws Exception {
Multiset<String> ms = LinkedHashMultiset.create();
ms.add("a");
ms.add("b", 2);
ms.add("c");
assertThat(ms.elementSet()).containsExactly("a", "b", "c").inOrder();
ms.remove("b");
assertThat(ms.elementSet()).containsExactly("a", "b", "c").inOrder();
ms.add("b");
assertThat(ms.elementSet()).containsExactly("a", "b", "c").inOrder();
ms.remove("b", 2);
ms.add("b");
assertThat(ms.elementSet()).containsExactly("a", "c", "b").inOrder();
}
}
| LinkedHashMultisetTest |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-dubbo/src/test/java/org/apache/dubbo/rpc/protocol/dubbo/DubboInvokerAvailableTest.java | {
"start": 1752,
"end": 7915
} | class ____ {
private static DubboProtocol protocol;
private static ProxyFactory proxy =
ExtensionLoader.getExtensionLoader(ProxyFactory.class).getAdaptiveExtension();
@BeforeAll
public static void setUpBeforeClass() {}
@BeforeEach
public void setUp() throws Exception {
protocol = new DubboProtocol(FrameworkModel.defaultModel());
}
@AfterAll
public static void tearDownAfterClass() {
ProtocolUtils.closeAll();
}
@Test
void test_Normal_available() {
int port = NetUtils.getAvailablePort();
URL url = URL.valueOf("dubbo://127.0.0.1:" + port + "/org.apache.dubbo.rpc.protocol.dubbo.IDemoService");
ProtocolUtils.export(new DemoServiceImpl(), IDemoService.class, url);
DubboInvoker<?> invoker = (DubboInvoker<?>) protocol.protocolBindingRefer(IDemoService.class, url);
Assertions.assertTrue(invoker.isAvailable());
invoker.destroy();
Assertions.assertFalse(invoker.isAvailable());
}
@Test
void test_Normal_ChannelReadOnly() throws Exception {
int port = NetUtils.getAvailablePort();
URL url = URL.valueOf("dubbo://127.0.0.1:" + port + "/org.apache.dubbo.rpc.protocol.dubbo.IDemoService");
ProtocolUtils.export(new DemoServiceImpl(), IDemoService.class, url);
DubboInvoker<?> invoker = (DubboInvoker<?>) protocol.protocolBindingRefer(IDemoService.class, url);
Assertions.assertTrue(invoker.isAvailable());
getClients(invoker)[0].setAttribute(Constants.CHANNEL_ATTRIBUTE_READONLY_KEY, Boolean.TRUE);
Assertions.assertFalse(invoker.isAvailable());
// reset status since connection is shared among invokers
getClients(invoker)[0].removeAttribute(Constants.CHANNEL_ATTRIBUTE_READONLY_KEY);
}
@Disabled
@Test
void test_normal_channel_close_wait_gracefully() {
int testPort = NetUtils.getAvailablePort();
URL url = URL.valueOf("dubbo://127.0.0.1:" + testPort
+ "/org.apache.dubbo.rpc.protocol.dubbo.IDemoService?scope=true&lazy=false");
Exporter<IDemoService> exporter = ProtocolUtils.export(new DemoServiceImpl(), IDemoService.class, url);
Exporter<IDemoService> exporter0 = ProtocolUtils.export(new DemoServiceImpl0(), IDemoService.class, url);
DubboInvoker<?> invoker = (DubboInvoker<?>) protocol.protocolBindingRefer(IDemoService.class, url);
long start = System.currentTimeMillis();
try {
System.setProperty(SHUTDOWN_WAIT_KEY, "2000");
protocol.destroy();
} finally {
System.getProperties().remove(SHUTDOWN_WAIT_KEY);
}
long waitTime = System.currentTimeMillis() - start;
Assertions.assertTrue(waitTime >= 2000);
Assertions.assertFalse(invoker.isAvailable());
}
@Test
void test_NoInvokers() throws Exception {
int port = NetUtils.getAvailablePort();
URL url = URL.valueOf(
"dubbo://127.0.0.1:" + port + "/org.apache.dubbo.rpc.protocol.dubbo.IDemoService?connections=1");
ProtocolUtils.export(new DemoServiceImpl(), IDemoService.class, url);
DubboInvoker<?> invoker = (DubboInvoker<?>) protocol.protocolBindingRefer(IDemoService.class, url);
ExchangeClient[] clients = getClients(invoker);
clients[0].close();
Assertions.assertFalse(invoker.isAvailable());
}
@Test
void test_Lazy_ChannelReadOnly() throws Exception {
int port = NetUtils.getAvailablePort();
URL url = URL.valueOf("dubbo://127.0.0.1:" + port
+ "/org.apache.dubbo.rpc.protocol.dubbo.IDemoService?lazy=true&connections=1&timeout=10000");
ProtocolUtils.export(new DemoServiceImpl(), IDemoService.class, url);
Invoker<?> invoker = protocol.refer(IDemoService.class, url);
Assertions.assertTrue(invoker.isAvailable());
ExchangeClient exchangeClient = getClients((DubboInvoker<?>) invoker)[0];
Assertions.assertFalse(exchangeClient.isClosed());
exchangeClient.setAttribute(Constants.CHANNEL_ATTRIBUTE_READONLY_KEY, Boolean.TRUE);
// invoke method --> init client
IDemoService service = (IDemoService) proxy.getProxy(invoker);
Assertions.assertEquals("ok", service.get());
Assertions.assertFalse(invoker.isAvailable());
exchangeClient.removeAttribute(Constants.CHANNEL_ATTRIBUTE_READONLY_KEY);
Assertions.assertTrue(invoker.isAvailable());
exchangeClient.setAttribute(Constants.CHANNEL_ATTRIBUTE_READONLY_KEY, Boolean.TRUE);
Assertions.assertFalse(invoker.isAvailable());
}
/**
* The test prefer serialization
*
* @throws Exception Exception
*/
@Test
public void testPreferSerialization() throws Exception {
int port = NetUtils.getAvailablePort();
URL url = URL.valueOf(
"dubbo://127.0.0.1:" + port
+ "/org.apache.dubbo.rpc.protocol.dubbo.IDemoService?lazy=true&connections=1&timeout=10000&serialization=fastjson&prefer_serialization=fastjson2,hessian2");
ProtocolUtils.export(new DemoServiceImpl(), IDemoService.class, url);
Invoker<?> invoker = protocol.refer(IDemoService.class, url);
Assertions.assertTrue(invoker.isAvailable());
ExchangeClient exchangeClient = getClients((DubboInvoker<?>) invoker)[0];
Assertions.assertFalse(exchangeClient.isClosed());
// invoke method --> init client
IDemoService service = (IDemoService) proxy.getProxy(invoker);
Assertions.assertEquals("ok", service.get());
}
private ExchangeClient[] getClients(DubboInvoker<?> invoker) throws Exception {
Field field = DubboInvoker.class.getDeclaredField("clientsProvider");
field.setAccessible(true);
ClientsProvider clientsProvider = (ClientsProvider) field.get(invoker);
List<? extends ExchangeClient> clients = clientsProvider.getClients();
Assertions.assertEquals(1, clients.size());
return clients.toArray(new ExchangeClient[0]);
}
public | DubboInvokerAvailableTest |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/orm/junit/SettingProvider.java | {
"start": 513,
"end": 599
} | interface ____<S> {
/**
* Get the setting value
*/
S getSetting();
}
}
| Provider |
java | apache__camel | components/camel-kafka/src/test/java/org/apache/camel/component/kafka/integration/KafkaConsumerIdempotentWithProcessorIT.java | {
"start": 1435,
"end": 3995
} | class ____ extends KafkaConsumerIdempotentTestSupport {
private static final String TOPIC;
private static final String REPOSITORY_TOPIC;
private final int size = 200;
static {
UUID topicId = UUID.randomUUID();
TOPIC = "idempt_" + topicId;
REPOSITORY_TOPIC = "TEST_IDEMPOTENT_" + topicId;
}
@BeforeAll
public static void createRepositoryTopic() {
KafkaTestUtil.createTopic(service, REPOSITORY_TOPIC, 1);
}
@AfterAll
public static void removeRepositoryTopic() {
kafkaAdminClient.deleteTopics(Collections.singleton(REPOSITORY_TOPIC)).all();
}
@BindToRegistry("kafkaIdempotentRepository")
private final KafkaIdempotentRepository kafkaIdempotentRepository
= new KafkaIdempotentRepository(REPOSITORY_TOPIC, getBootstrapServers());
@BeforeEach
public void before() {
doSend(size, TOPIC);
}
@AfterEach
public void after() {
// clean test topic
kafkaAdminClient.deleteTopics(Collections.singleton(TOPIC)).all();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("kafka:" + TOPIC
+ "?groupId=KafkaConsumerIdempotentWithProcessorIT&autoOffsetReset=earliest"
+ "&keyDeserializer=org.apache.kafka.common.serialization.StringDeserializer"
+ "&valueDeserializer=org.apache.kafka.common.serialization.StringDeserializer"
+ "&autoCommitIntervalMs=1000&pollTimeoutMs=1000&autoCommitEnable=true"
+ "&interceptorClasses=org.apache.camel.component.kafka.MockConsumerInterceptor")
.routeId("idemp-with-prop")
.process(exchange -> {
byte[] id = exchange.getIn().getHeader("id", byte[].class);
BigInteger bi = new BigInteger(id);
exchange.getIn().setHeader("id", String.valueOf(bi.longValue()));
})
.idempotentConsumer(header("id"))
.idempotentRepository("kafkaIdempotentRepository")
.to(KafkaTestUtil.MOCK_RESULT);
}
};
}
@Test
void kafkaMessageIsConsumedByCamel() {
MockEndpoint to = contextExtension.getMockEndpoint(KafkaTestUtil.MOCK_RESULT);
doRun(to, size);
}
}
| KafkaConsumerIdempotentWithProcessorIT |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/decorator/Person2.java | {
"start": 257,
"end": 848
} | class ____ extends Person {
private SportsClub sportsClub;
private Employer employer;
public Person2(String firstName, String lastName, Date dateOfBirth, Address address) {
super( firstName, lastName, dateOfBirth, address );
}
public SportsClub getSportsClub() {
return sportsClub;
}
public void setSportsClub(SportsClub sportsClub) {
this.sportsClub = sportsClub;
}
public Employer getEmployer() {
return employer;
}
public void setEmployer(Employer employer) {
this.employer = employer;
}
}
| Person2 |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/JobVertexBackPressureInfo.java | {
"start": 1800,
"end": 5050
} | class ____ implements ResponseBody {
public static final String FIELD_NAME_STATUS = "status";
public static final String DEPRECATED_FIELD_NAME_BACKPRESSURE_LEVEL = "backpressure-level";
public static final String FIELD_NAME_BACKPRESSURE_LEVEL = "backpressureLevel";
public static final String FIELD_NAME_END_TIMESTAMP = "end-timestamp";
public static final String FIELD_NAME_SUBTASKS = "subtasks";
/** Immutable singleton instance denoting that the back pressure stats are not available. */
private static final JobVertexBackPressureInfo DEPRECATED_JOB_VERTEX_BACK_PRESSURE_INFO =
new JobVertexBackPressureInfo(VertexBackPressureStatus.DEPRECATED, null, null, null);
@JsonProperty(FIELD_NAME_STATUS)
private final VertexBackPressureStatus status;
@JsonProperty(FIELD_NAME_BACKPRESSURE_LEVEL)
@JsonAlias(DEPRECATED_FIELD_NAME_BACKPRESSURE_LEVEL)
private final VertexBackPressureLevel backpressureLevel;
@JsonProperty(FIELD_NAME_END_TIMESTAMP)
private final Long endTimestamp;
@JsonProperty(FIELD_NAME_SUBTASKS)
private final List<SubtaskBackPressureInfo> subtasks;
@JsonCreator
public JobVertexBackPressureInfo(
@JsonProperty(FIELD_NAME_STATUS) VertexBackPressureStatus status,
@JsonProperty(FIELD_NAME_BACKPRESSURE_LEVEL) VertexBackPressureLevel backpressureLevel,
@JsonProperty(FIELD_NAME_END_TIMESTAMP) Long endTimestamp,
@JsonProperty(FIELD_NAME_SUBTASKS) List<SubtaskBackPressureInfo> subtasks) {
this.status = status;
this.backpressureLevel = backpressureLevel;
this.endTimestamp = endTimestamp;
this.subtasks = subtasks;
}
public static JobVertexBackPressureInfo deprecated() {
return DEPRECATED_JOB_VERTEX_BACK_PRESSURE_INFO;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
JobVertexBackPressureInfo that = (JobVertexBackPressureInfo) o;
return Objects.equals(status, that.status)
&& Objects.equals(backpressureLevel, that.backpressureLevel)
&& Objects.equals(endTimestamp, that.endTimestamp)
&& Objects.equals(subtasks, that.subtasks);
}
@Override
public int hashCode() {
return Objects.hash(status, backpressureLevel, endTimestamp, subtasks);
}
public VertexBackPressureStatus getStatus() {
return status;
}
@Nullable
@Hidden
@JsonProperty(DEPRECATED_FIELD_NAME_BACKPRESSURE_LEVEL)
public VertexBackPressureLevel getBackpressureLevel() {
return backpressureLevel;
}
@Nullable
public Long getEndTimestamp() {
return endTimestamp;
}
@Nullable
public List<SubtaskBackPressureInfo> getSubtasks() {
return subtasks == null ? null : Collections.unmodifiableList(subtasks);
}
// ---------------------------------------------------------------------------------
// Static helper classes
// ---------------------------------------------------------------------------------
/** Nested | JobVertexBackPressureInfo |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/bind/annotation/ResponseStatus.java | {
"start": 2180,
"end": 2620
} | class ____ its subclasses unless
* overridden by a local {@code @ResponseStatus} declaration on the method.
*
* @author Arjen Poutsma
* @author Sam Brannen
* @since 3.0
* @see org.springframework.web.servlet.mvc.annotation.ResponseStatusExceptionResolver
* @see jakarta.servlet.http.HttpServletResponse#sendError(int, String)
*/
@Target({ElementType.TYPE, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @ | and |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/params/aggregator/AggregatorIntegrationTests.java | {
"start": 9765,
"end": 9905
} | interface ____ {
}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.PARAMETER)
@AggregateWith(AddressAggregator.class)
@ | CsvToPerson |
java | dropwizard__dropwizard | dropwizard-jersey/src/main/java/io/dropwizard/jersey/errors/IllegalStateExceptionMapper.java | {
"start": 486,
"end": 2090
} | class ____ extends LoggingExceptionMapper<IllegalStateException> {
public IllegalStateExceptionMapper() {
super(LoggerFactory.getLogger(IllegalStateExceptionMapper.class));
}
@Override
public Response toResponse(final IllegalStateException exception) {
final String message = exception.getMessage();
if (LocalizationMessages.FORM_PARAM_CONTENT_TYPE_ERROR().equals(message)) {
/*
* If a POST request contains a Content-Type that is not application/x-www-form-urlencoded, Jersey throws
* IllegalStateException with or without @Consumes. See: https://java.net/jira/browse/JERSEY-2636
*/
// Logs exception with additional information for developers.
logger.debug("If the HTTP method is POST and using @FormParam in a resource method"
+ ", Content-Type should be application/x-www-form-urlencoded.", exception);
// Returns the same response as if NotSupportedException was thrown.
return createResponse(new NotSupportedException());
}
// LoggingExceptionMapper will log exception
return super.toResponse(exception);
}
private Response createResponse(final WebApplicationException exception) {
final ErrorMessage errorMessage = new ErrorMessage(exception.getResponse().getStatus(),
exception.getLocalizedMessage());
return Response.status(errorMessage.getCode())
.type(APPLICATION_JSON_TYPE)
.entity(errorMessage)
.build();
}
}
| IllegalStateExceptionMapper |
java | google__guava | android/guava-tests/test/com/google/common/base/FunctionsTest.java | {
"start": 13498,
"end": 16413
} | class ____ implements Supplier<Integer>, Serializable {
@GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 0;
private int value;
@Override
public Integer get() {
return ++value;
}
@Override
public boolean equals(@Nullable Object obj) {
if (obj instanceof CountingSupplier) {
return this.value == ((CountingSupplier) obj).value;
}
return false;
}
@Override
public int hashCode() {
return value;
}
}
public void testForSupplier() {
Supplier<Integer> supplier = new CountingSupplier();
Function<@Nullable Object, Integer> function = Functions.forSupplier(supplier);
assertEquals(1, (int) function.apply(null));
assertEquals(2, (int) function.apply("foo"));
new EqualsTester()
.addEqualityGroup(function, Functions.forSupplier(supplier))
.addEqualityGroup(Functions.forSupplier(new CountingSupplier()))
.addEqualityGroup(Functions.forSupplier(Suppliers.ofInstance(12)))
.addEqualityGroup(Functions.toStringFunction())
.testEquals();
}
@J2ktIncompatible
@GwtIncompatible // SerializableTester
public void testForSupplierSerializable() {
checkCanReserialize(Functions.forSupplier(new CountingSupplier()));
}
@J2ktIncompatible
@GwtIncompatible // reflection
public void testNulls() throws Exception {
new ClassSanityTester().forAllPublicStaticMethods(Functions.class).testNulls();
}
@J2ktIncompatible
@GwtIncompatible // reflection
@AndroidIncompatible // TODO(cpovirk): ClassNotFoundException: com.google.common.base.Function
// (I suspect that this and the other similar failures happen with ArbitraryInstances proxies.)
public void testEqualsAndSerializable() throws Exception {
new ClassSanityTester().forAllPublicStaticMethods(Functions.class).testEqualsAndSerializable();
}
@J2ktIncompatible
@GwtIncompatible // SerializableTester
private static <Y> void checkCanReserialize(Function<? super Integer, Y> f) {
Function<? super Integer, Y> g = SerializableTester.reserializeAndAssert(f);
for (int i = 1; i < 5; i++) {
// convoluted way to check that the same result happens from each
Y expected = null;
try {
expected = f.apply(i);
} catch (IllegalArgumentException e) {
try {
g.apply(i);
fail();
} catch (IllegalArgumentException ok) {
continue;
}
}
assertEquals(expected, g.apply(i));
}
}
@J2ktIncompatible
@GwtIncompatible // SerializableTester
private static <Y> void checkCanReserializeSingleton(Function<? super String, Y> f) {
Function<? super String, Y> g = SerializableTester.reserializeAndAssert(f);
assertSame(f, g);
for (Integer i = 1; i < 5; i++) {
assertEquals(f.apply(i.toString()), g.apply(i.toString()));
}
}
}
| CountingSupplier |
java | mockito__mockito | mockito-core/src/test/java/org/mockitousage/bugs/varargs/VarargsErrorWhenCallingRealMethodTest.java | {
"start": 375,
"end": 717
} | class ____ {
int blah(String a, String b, Object... c) {
return 1;
}
}
@Test
public void shouldNotThrowAnyException() throws Exception {
Foo foo = mock(Foo.class);
when(foo.blah(anyString(), anyString())).thenCallRealMethod();
assertEquals(1, foo.blah("foo", "bar"));
}
}
| Foo |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/builder/EqualsBuilder.java | {
"start": 3491,
"end": 31799
} | class ____ implements Builder<Boolean> {
/**
* A registry of objects used by reflection methods to detect cyclical object references and avoid infinite loops.
*
* @since 3.0
*/
private static final ThreadLocal<Set<Pair<IDKey, IDKey>>> REGISTRY = ThreadLocal.withInitial(HashSet::new);
/*
* NOTE: we cannot store the actual objects in a HashSet, as that would use the very hashCode()
* we are in the process of calculating.
*
* So we generate a one-to-one mapping from the original object to a new object.
*
* Now HashSet uses equals() to determine if two elements with the same hash code really
* are equal, so we also need to ensure that the replacement objects are only equal
* if the original objects are identical.
*
* The original implementation (2.4 and before) used the System.identityHashCode()
* method - however this is not guaranteed to generate unique ids (e.g. LANG-459)
*
* We now use the IDKey helper class (adapted from org.apache.axis.utils.IDKey)
* to disambiguate the duplicate ids.
*/
/**
* Converters value pair into a register pair.
*
* @param lhs {@code this} object
* @param rhs the other object
* @return the pair
*/
static Pair<IDKey, IDKey> getRegisterPair(final Object lhs, final Object rhs) {
return Pair.of(new IDKey(lhs), new IDKey(rhs));
}
/**
* Gets the registry of object pairs being traversed by the reflection
* methods in the current thread.
*
* @return Set the registry of objects being traversed
* @since 3.0
*/
static Set<Pair<IDKey, IDKey>> getRegistry() {
return REGISTRY.get();
}
/**
* Tests whether the registry contains the given object pair.
* <p>
* Used by the reflection methods to avoid infinite loops.
* Objects might be swapped therefore a check is needed if the object pair
* is registered in given or swapped order.
* </p>
*
* @param lhs {@code this} object to lookup in registry
* @param rhs the other object to lookup on registry
* @return boolean {@code true} if the registry contains the given object.
* @since 3.0
*/
static boolean isRegistered(final Object lhs, final Object rhs) {
final Set<Pair<IDKey, IDKey>> registry = getRegistry();
final Pair<IDKey, IDKey> pair = getRegisterPair(lhs, rhs);
final Pair<IDKey, IDKey> swappedPair = Pair.of(pair.getRight(), pair.getLeft());
return registry != null && (registry.contains(pair) || registry.contains(swappedPair));
}
/**
* This method uses reflection to determine if the two {@link Object}s
* are equal.
*
* <p>It uses {@code AccessibleObject.setAccessible} to gain access to private
* fields. This means that it will throw a security exception if run under
* a security manager, if the permissions are not set up correctly. It is also
* not as efficient as testing explicitly. Non-primitive fields are compared using
* {@code equals()}.</p>
*
* <p>If the TestTransients parameter is set to {@code true}, transient
* members will be tested, otherwise they are ignored, as they are likely
* derived fields, and not part of the value of the {@link Object}.</p>
*
* <p>Static fields will not be tested. Superclass fields will be included.</p>
*
* @param lhs {@code this} object
* @param rhs the other object
* @param testTransients whether to include transient fields
* @return {@code true} if the two Objects have tested equals.
* @see EqualsExclude
*/
public static boolean reflectionEquals(final Object lhs, final Object rhs, final boolean testTransients) {
return reflectionEquals(lhs, rhs, testTransients, null);
}
/**
* This method uses reflection to determine if the two {@link Object}s
* are equal.
*
* <p>It uses {@code AccessibleObject.setAccessible} to gain access to private
* fields. This means that it will throw a security exception if run under
* a security manager, if the permissions are not set up correctly. It is also
* not as efficient as testing explicitly. Non-primitive fields are compared using
* {@code equals()}.</p>
*
* <p>If the testTransients parameter is set to {@code true}, transient
* members will be tested, otherwise they are ignored, as they are likely
* derived fields, and not part of the value of the {@link Object}.</p>
*
* <p>Static fields will not be included. Superclass fields will be appended
* up to and including the specified superclass. A null superclass is treated
* as java.lang.Object.</p>
*
* <p>If the testRecursive parameter is set to {@code true}, non primitive
* (and non primitive wrapper) field types will be compared by
* {@link EqualsBuilder} recursively instead of invoking their
* {@code equals()} method. Leading to a deep reflection equals test.
*
* @param lhs {@code this} object
* @param rhs the other object
* @param testTransients whether to include transient fields
* @param reflectUpToClass the superclass to reflect up to (inclusive),
* may be {@code null}
* @param testRecursive whether to call reflection equals on non-primitive
* fields recursively.
* @param excludeFields array of field names to exclude from testing
* @return {@code true} if the two Objects have tested equals.
* @see EqualsExclude
* @since 3.6
*/
public static boolean reflectionEquals(final Object lhs, final Object rhs, final boolean testTransients, final Class<?> reflectUpToClass,
final boolean testRecursive, final String... excludeFields) {
if (lhs == rhs) {
return true;
}
if (lhs == null || rhs == null) {
return false;
}
// @formatter:off
return new EqualsBuilder()
.setExcludeFields(excludeFields)
.setReflectUpToClass(reflectUpToClass)
.setTestTransients(testTransients)
.setTestRecursive(testRecursive)
.reflectionAppend(lhs, rhs)
.isEquals();
// @formatter:on
}
/**
* This method uses reflection to determine if the two {@link Object}s
* are equal.
*
* <p>It uses {@code AccessibleObject.setAccessible} to gain access to private
* fields. This means that it will throw a security exception if run under
* a security manager, if the permissions are not set up correctly. It is also
* not as efficient as testing explicitly. Non-primitive fields are compared using
* {@code equals()}.</p>
*
* <p>If the testTransients parameter is set to {@code true}, transient
* members will be tested, otherwise they are ignored, as they are likely
* derived fields, and not part of the value of the {@link Object}.</p>
*
* <p>Static fields will not be included. Superclass fields will be appended
* up to and including the specified superclass. A null superclass is treated
* as java.lang.Object.</p>
*
* @param lhs {@code this} object
* @param rhs the other object
* @param testTransients whether to include transient fields
* @param reflectUpToClass the superclass to reflect up to (inclusive),
* may be {@code null}
* @param excludeFields array of field names to exclude from testing
* @return {@code true} if the two Objects have tested equals.
* @see EqualsExclude
* @since 2.0
*/
public static boolean reflectionEquals(final Object lhs, final Object rhs, final boolean testTransients, final Class<?> reflectUpToClass,
final String... excludeFields) {
return reflectionEquals(lhs, rhs, testTransients, reflectUpToClass, false, excludeFields);
}
/**
* This method uses reflection to determine if the two {@link Object}s
* are equal.
*
* <p>It uses {@code AccessibleObject.setAccessible} to gain access to private
* fields. This means that it will throw a security exception if run under
* a security manager, if the permissions are not set up correctly. It is also
* not as efficient as testing explicitly. Non-primitive fields are compared using
* {@code equals()}.</p>
*
* <p>Transient members will be not be tested, as they are likely derived
* fields, and not part of the value of the Object.</p>
*
* <p>Static fields will not be tested. Superclass fields will be included.</p>
*
* @param lhs {@code this} object
* @param rhs the other object
* @param excludeFields Collection of String field names to exclude from testing
* @return {@code true} if the two Objects have tested equals.
* @see EqualsExclude
*/
public static boolean reflectionEquals(final Object lhs, final Object rhs, final Collection<String> excludeFields) {
return reflectionEquals(lhs, rhs, ReflectionToStringBuilder.toNoNullStringArray(excludeFields));
}
/**
* This method uses reflection to determine if the two {@link Object}s
* are equal.
*
* <p>It uses {@code AccessibleObject.setAccessible} to gain access to private
* fields. This means that it will throw a security exception if run under
* a security manager, if the permissions are not set up correctly. It is also
* not as efficient as testing explicitly. Non-primitive fields are compared using
* {@code equals()}.</p>
*
* <p>Transient members will be not be tested, as they are likely derived
* fields, and not part of the value of the Object.</p>
*
* <p>Static fields will not be tested. Superclass fields will be included.</p>
*
* @param lhs {@code this} object
* @param rhs the other object
* @param excludeFields array of field names to exclude from testing
* @return {@code true} if the two Objects have tested equals.
* @see EqualsExclude
*/
public static boolean reflectionEquals(final Object lhs, final Object rhs, final String... excludeFields) {
return reflectionEquals(lhs, rhs, false, null, excludeFields);
}
/**
* Registers the given object pair.
* Used by the reflection methods to avoid infinite loops.
*
* @param lhs {@code this} object to register
* @param rhs the other object to register
*/
private static void register(final Object lhs, final Object rhs) {
getRegistry().add(getRegisterPair(lhs, rhs));
}
/**
* Unregisters the given object pair.
*
* <p>
* Used by the reflection methods to avoid infinite loops.
* </p>
*
* @param lhs {@code this} object to unregister
* @param rhs the other object to unregister
* @since 3.0
*/
private static void unregister(final Object lhs, final Object rhs) {
final Set<Pair<IDKey, IDKey>> registry = getRegistry();
registry.remove(getRegisterPair(lhs, rhs));
if (registry.isEmpty()) {
REGISTRY.remove();
}
}
/**
* If the fields tested are equals.
* The default value is {@code true}.
*/
private boolean isEquals = true;
private boolean testTransients;
private boolean testRecursive;
private List<Class<?>> bypassReflectionClasses;
private Class<?> reflectUpToClass;
private String[] excludeFields;
/**
* Constructor for EqualsBuilder.
*
* <p>Starts off assuming that equals is {@code true}.</p>
* @see Object#equals(Object)
*/
public EqualsBuilder() {
// set up default classes to bypass reflection for
bypassReflectionClasses = new ArrayList<>(1);
bypassReflectionClasses.add(String.class); //hashCode field being lazy but not transient
}
/**
* Test if two {@code booleans}s are equal.
*
* @param lhs the left-hand side {@code boolean}
* @param rhs the right-hand side {@code boolean}
* @return {@code this} instance.
*/
public EqualsBuilder append(final boolean lhs, final boolean rhs) {
if (!isEquals) {
return this;
}
isEquals = lhs == rhs;
return this;
}
/**
* Deep comparison of array of {@code boolean}. Length and all
* values are compared.
*
* <p>The method {@link #append(boolean, boolean)} is used.</p>
*
* @param lhs the left-hand side {@code boolean[]}
* @param rhs the right-hand side {@code boolean[]}
* @return {@code this} instance.
*/
public EqualsBuilder append(final boolean[] lhs, final boolean[] rhs) {
if (!isEquals) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null || rhs == null) {
setEquals(false);
return this;
}
if (lhs.length != rhs.length) {
setEquals(false);
return this;
}
for (int i = 0; i < lhs.length && isEquals; ++i) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* Test if two {@code byte}s are equal.
*
* @param lhs the left-hand side {@code byte}
* @param rhs the right-hand side {@code byte}
* @return {@code this} instance.
*/
public EqualsBuilder append(final byte lhs, final byte rhs) {
if (isEquals) {
isEquals = lhs == rhs;
}
return this;
}
/**
* Deep comparison of array of {@code byte}. Length and all
* values are compared.
*
* <p>The method {@link #append(byte, byte)} is used.</p>
*
* @param lhs the left-hand side {@code byte[]}
* @param rhs the right-hand side {@code byte[]}
* @return {@code this} instance.
*/
public EqualsBuilder append(final byte[] lhs, final byte[] rhs) {
if (!isEquals) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null || rhs == null) {
setEquals(false);
return this;
}
if (lhs.length != rhs.length) {
setEquals(false);
return this;
}
for (int i = 0; i < lhs.length && isEquals; ++i) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* Test if two {@code char}s are equal.
*
* @param lhs the left-hand side {@code char}
* @param rhs the right-hand side {@code char}
* @return {@code this} instance.
*/
public EqualsBuilder append(final char lhs, final char rhs) {
if (isEquals) {
isEquals = lhs == rhs;
}
return this;
}
/**
* Deep comparison of array of {@code char}. Length and all
* values are compared.
*
* <p>The method {@link #append(char, char)} is used.</p>
*
* @param lhs the left-hand side {@code char[]}
* @param rhs the right-hand side {@code char[]}
* @return {@code this} instance.
*/
public EqualsBuilder append(final char[] lhs, final char[] rhs) {
if (!isEquals) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null || rhs == null) {
setEquals(false);
return this;
}
if (lhs.length != rhs.length) {
setEquals(false);
return this;
}
for (int i = 0; i < lhs.length && isEquals; ++i) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* Test if two {@code double}s are equal by testing that the
* pattern of bits returned by {@code doubleToLong} are equal.
*
* <p>This handles NaNs, Infinities, and {@code -0.0}.</p>
*
* <p>It is compatible with the hash code generated by
* {@link HashCodeBuilder}.</p>
*
* @param lhs the left-hand side {@code double}
* @param rhs the right-hand side {@code double}
* @return {@code this} instance.
*/
public EqualsBuilder append(final double lhs, final double rhs) {
if (isEquals) {
return append(Double.doubleToLongBits(lhs), Double.doubleToLongBits(rhs));
}
return this;
}
/**
* Deep comparison of array of {@code double}. Length and all
* values are compared.
*
* <p>The method {@link #append(double, double)} is used.</p>
*
* @param lhs the left-hand side {@code double[]}
* @param rhs the right-hand side {@code double[]}
* @return {@code this} instance.
*/
public EqualsBuilder append(final double[] lhs, final double[] rhs) {
if (!isEquals) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null || rhs == null) {
setEquals(false);
return this;
}
if (lhs.length != rhs.length) {
setEquals(false);
return this;
}
for (int i = 0; i < lhs.length && isEquals; ++i) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* Test if two {@code float}s are equal by testing that the
* pattern of bits returned by doubleToLong are equal.
*
* <p>This handles NaNs, Infinities, and {@code -0.0}.</p>
*
* <p>It is compatible with the hash code generated by
* {@link HashCodeBuilder}.</p>
*
* @param lhs the left-hand side {@code float}
* @param rhs the right-hand side {@code float}
* @return {@code this} instance.
*/
public EqualsBuilder append(final float lhs, final float rhs) {
if (isEquals) {
return append(Float.floatToIntBits(lhs), Float.floatToIntBits(rhs));
}
return this;
}
/**
* Deep comparison of array of {@code float}. Length and all
* values are compared.
*
* <p>The method {@link #append(float, float)} is used.</p>
*
* @param lhs the left-hand side {@code float[]}
* @param rhs the right-hand side {@code float[]}
* @return {@code this} instance.
*/
public EqualsBuilder append(final float[] lhs, final float[] rhs) {
if (!isEquals) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null || rhs == null) {
setEquals(false);
return this;
}
if (lhs.length != rhs.length) {
setEquals(false);
return this;
}
for (int i = 0; i < lhs.length && isEquals; ++i) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* Test if two {@code int}s are equal.
*
* @param lhs the left-hand side {@code int}
* @param rhs the right-hand side {@code int}
* @return {@code this} instance.
*/
public EqualsBuilder append(final int lhs, final int rhs) {
if (isEquals) {
isEquals = lhs == rhs;
}
return this;
}
/**
* Deep comparison of array of {@code int}. Length and all
* values are compared.
*
* <p>The method {@link #append(int, int)} is used.</p>
*
* @param lhs the left-hand side {@code int[]}
* @param rhs the right-hand side {@code int[]}
* @return {@code this} instance.
*/
public EqualsBuilder append(final int[] lhs, final int[] rhs) {
if (!isEquals) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null || rhs == null) {
setEquals(false);
return this;
}
if (lhs.length != rhs.length) {
setEquals(false);
return this;
}
for (int i = 0; i < lhs.length && isEquals; ++i) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* Test if two {@code long}s are equal.
*
* @param lhs
* the left-hand side {@code long}
* @param rhs
* the right-hand side {@code long}
* @return {@code this} instance.
*/
public EqualsBuilder append(final long lhs, final long rhs) {
if (isEquals) {
isEquals = lhs == rhs;
}
return this;
}
/**
* Deep comparison of array of {@code long}. Length and all
* values are compared.
*
* <p>The method {@link #append(long, long)} is used.</p>
*
* @param lhs the left-hand side {@code long[]}
* @param rhs the right-hand side {@code long[]}
* @return {@code this} instance.
*/
public EqualsBuilder append(final long[] lhs, final long[] rhs) {
if (!isEquals) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null || rhs == null) {
setEquals(false);
return this;
}
if (lhs.length != rhs.length) {
setEquals(false);
return this;
}
for (int i = 0; i < lhs.length && isEquals; ++i) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* Test if two {@link Object}s are equal using either
* #{@link #reflectionAppend(Object, Object)}, if object are non
* primitives (or wrapper of primitives) or if field {@code testRecursive}
* is set to {@code false}. Otherwise, using their
* {@code equals} method.
*
* @param lhs the left-hand side object
* @param rhs the right-hand side object
* @return {@code this} instance.
*/
public EqualsBuilder append(final Object lhs, final Object rhs) {
if (!isEquals) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null || rhs == null) {
setEquals(false);
return this;
}
final Class<?> lhsClass = lhs.getClass();
if (lhsClass.isArray()) {
// factor out array case in order to keep method small enough
// to be inlined
appendArray(lhs, rhs);
} else // The simple case, not an array, just test the element
if (testRecursive && !ClassUtils.isPrimitiveOrWrapper(lhsClass)) {
reflectionAppend(lhs, rhs);
} else {
isEquals = lhs.equals(rhs);
}
return this;
}
/**
* Performs a deep comparison of two {@link Object} arrays.
*
* <p>This also will be called for the top level of
* multi-dimensional, ragged, and multi-typed arrays.</p>
*
* <p>Note that this method does not compare the type of the arrays; it only
* compares the contents.</p>
*
* @param lhs the left-hand side {@code Object[]}
* @param rhs the right-hand side {@code Object[]}
* @return {@code this} instance.
*/
public EqualsBuilder append(final Object[] lhs, final Object[] rhs) {
if (!isEquals) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null || rhs == null) {
setEquals(false);
return this;
}
if (lhs.length != rhs.length) {
setEquals(false);
return this;
}
for (int i = 0; i < lhs.length && isEquals; ++i) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* Test if two {@code short}s are equal.
*
* @param lhs the left-hand side {@code short}
* @param rhs the right-hand side {@code short}
* @return {@code this} instance.
*/
public EqualsBuilder append(final short lhs, final short rhs) {
if (isEquals) {
isEquals = lhs == rhs;
}
return this;
}
/**
* Deep comparison of array of {@code short}. Length and all
* values are compared.
*
* <p>The method {@link #append(short, short)} is used.</p>
*
* @param lhs the left-hand side {@code short[]}
* @param rhs the right-hand side {@code short[]}
* @return {@code this} instance.
*/
public EqualsBuilder append(final short[] lhs, final short[] rhs) {
if (!isEquals) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null || rhs == null) {
setEquals(false);
return this;
}
if (lhs.length != rhs.length) {
setEquals(false);
return this;
}
for (int i = 0; i < lhs.length && isEquals; ++i) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* Test if an {@link Object} is equal to an array.
*
* @param lhs the left-hand side object, an array
* @param rhs the right-hand side object
*/
private void appendArray(final Object lhs, final Object rhs) {
// First we compare different dimensions, for example: a boolean[][] to a boolean[]
// then we 'Switch' on type of array, to dispatch to the correct handler
// This handles multidimensional arrays of the same depth
if (lhs.getClass() != rhs.getClass()) {
setEquals(false);
} else if (lhs instanceof long[]) {
append((long[]) lhs, (long[]) rhs);
} else if (lhs instanceof int[]) {
append((int[]) lhs, (int[]) rhs);
} else if (lhs instanceof short[]) {
append((short[]) lhs, (short[]) rhs);
} else if (lhs instanceof char[]) {
append((char[]) lhs, (char[]) rhs);
} else if (lhs instanceof byte[]) {
append((byte[]) lhs, (byte[]) rhs);
} else if (lhs instanceof double[]) {
append((double[]) lhs, (double[]) rhs);
} else if (lhs instanceof float[]) {
append((float[]) lhs, (float[]) rhs);
} else if (lhs instanceof boolean[]) {
append((boolean[]) lhs, (boolean[]) rhs);
} else {
// Not an array of primitives
append((Object[]) lhs, (Object[]) rhs);
}
}
/**
* Adds the result of {@code super.equals()} to this builder.
*
* @param superEquals the result of calling {@code super.equals()}
* @return {@code this} instance.
* @since 2.0
*/
public EqualsBuilder appendSuper(final boolean superEquals) {
if (!isEquals) {
return this;
}
isEquals = superEquals;
return this;
}
/**
* Returns {@code true} if the fields that have been checked
* are all equal.
*
* @return {@code true} if all of the fields that have been checked
* are equal, {@code false} otherwise.
*
* @since 3.0
*/
@Override
public Boolean build() {
return Boolean.valueOf(isEquals());
}
/**
* Returns {@code true} if the fields that have been checked
* are all equal.
*
* @return boolean
*/
public boolean isEquals() {
return isEquals;
}
/**
* Tests if two {@code objects} by using reflection.
*
* <p>It uses {@code AccessibleObject.setAccessible} to gain access to private
* fields. This means that it will throw a security exception if run under
* a security manager, if the permissions are not set up correctly. It is also
* not as efficient as testing explicitly. Non-primitive fields are compared using
* {@code equals()}.</p>
*
* <p>If the testTransients field is set to {@code true}, transient
* members will be tested, otherwise they are ignored, as they are likely
* derived fields, and not part of the value of the {@link Object}.</p>
*
* <p>Static fields will not be included. Superclass fields will be appended
* up to and including the specified superclass in field {@code reflectUpToClass}.
* A null superclass is treated as java.lang.Object.</p>
*
* <p>Field names listed in field {@code excludeFields} will be ignored.</p>
*
* <p>If either | EqualsBuilder |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/querycache/QueryCacheExistingEntityInstanceTest.java | {
"start": 5534,
"end": 5815
} | class ____ {
@Id
@GeneratedValue
private Long id;
private String name;
public ChildEntity() {
}
public ChildEntity(String name) {
this.name = name;
}
public String getName() {
return name;
}
}
@Entity( name = "ParentEntity" )
public static | ChildEntity |
java | apache__camel | components/camel-test/camel-test-junit5/src/test/java/org/apache/camel/test/junit5/patterns/IsMockEndpointsAndSkipJUnit5Test.java | {
"start": 1196,
"end": 2591
} | class ____ extends CamelTestSupport {
@Override
public String isMockEndpointsAndSkip() {
// override this method and return the pattern for which endpoints to
// mock,
// and skip sending to the original endpoint.
return "direct:foo";
}
@Test
public void testMockEndpointAndSkip() throws Exception {
// notice we have automatic mocked the direct:foo endpoints and the name
// of the endpoints is "mock:uri"
getMockEndpoint("mock:result").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:direct:foo").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
MockEndpoint.assertIsSatisfied(context);
// the message was not send to the direct:foo route and thus not sent to
// the seda endpoint
SedaEndpoint seda = context.getEndpoint("seda:foo", SedaEndpoint.class);
assertEquals(0, seda.getCurrentQueueSize());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("direct:foo").to("mock:result");
from("direct:foo").transform(constant("Bye World")).to("seda:foo");
}
};
}
}
// end::e1[]
// END SNIPPET: e1
| IsMockEndpointsAndSkipJUnit5Test |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineKeyValuesFilter.java | {
"start": 1052,
"end": 1238
} | class ____ represents filter to be applied based on multiple values
* for a key and these values being equal or not equal to values in back-end
* store.
*/
@Private
@Unstable
public | which |
java | hibernate__hibernate-orm | local-build-plugins/src/main/java/org/hibernate/orm/db/DatabaseServicePlugin.java | {
"start": 435,
"end": 1360
} | class ____ implements Plugin<Project> {
@Override
@SuppressWarnings("UnstableApiUsage")
public void apply(Project project) {
// register the service used to restrict parallel execution
// of tests - used to avoid database schema/catalog collisions
final BuildServiceRegistry sharedServices = project.getGradle().getSharedServices();
final Provider<DatabaseService> databaseServiceProvider = sharedServices.registerIfAbsent(
REGISTRATION_NAME,
DatabaseService.class,
spec -> spec.getMaxParallelUsages().set( 1 )
);
final String database = (String) project.getProperties().get( "db" );
// H2 and HSQLDB are in-memory, so there is no sharing that needs to be avoided
if ( database != null && !"h2".equals( database ) && !"hsqldb".equals( database ) ) {
project.getTasks().withType( Test.class ).forEach(
test -> test.usesService( databaseServiceProvider )
);
}
}
}
| DatabaseServicePlugin |
java | apache__flink | flink-formats/flink-orc/src/test/java/org/apache/flink/orc/OrcFileSystemFilterTest.java | {
"start": 1503,
"end": 6065
} | class ____ {
@Test
@SuppressWarnings("unchecked")
void testApplyPredicate() {
List<ResolvedExpression> args = new ArrayList<>();
// equal
FieldReferenceExpression fieldReferenceExpression =
new FieldReferenceExpression("long1", DataTypes.BIGINT(), 0, 0);
ValueLiteralExpression valueLiteralExpression = new ValueLiteralExpression(10);
args.add(fieldReferenceExpression);
args.add(valueLiteralExpression);
CallExpression equalExpression =
CallExpression.permanent(
BuiltInFunctionDefinitions.EQUALS, args, DataTypes.BOOLEAN());
OrcFilters.Predicate predicate1 = OrcFilters.toOrcPredicate(equalExpression);
OrcFilters.Predicate predicate2 = new OrcFilters.Equals("long1", LONG, 10);
assertThat(predicate1).hasToString(predicate2.toString());
// greater than
CallExpression greaterExpression =
CallExpression.permanent(
BuiltInFunctionDefinitions.GREATER_THAN, args, DataTypes.BOOLEAN());
OrcFilters.Predicate predicate3 = OrcFilters.toOrcPredicate(greaterExpression);
OrcFilters.Predicate predicate4 =
new OrcFilters.Not(new OrcFilters.LessThanEquals("long1", LONG, 10));
assertThat(predicate3).hasToString(predicate4.toString());
// less than
CallExpression lessExpression =
CallExpression.permanent(
BuiltInFunctionDefinitions.LESS_THAN, args, DataTypes.BOOLEAN());
OrcFilters.Predicate predicate5 = OrcFilters.toOrcPredicate(lessExpression);
OrcFilters.Predicate predicate6 = new OrcFilters.LessThan("long1", LONG, 10);
assertThat(predicate5).hasToString(predicate6.toString());
// and
CallExpression andExpression =
CallExpression.permanent(
BuiltInFunctionDefinitions.AND,
Arrays.asList(greaterExpression, lessExpression),
DataTypes.BOOLEAN());
OrcFilters.Predicate predicate7 = OrcFilters.toOrcPredicate(andExpression);
OrcFilters.Predicate predicate8 = new OrcFilters.And(predicate4, predicate6);
assertThat(predicate7).hasToString(predicate8.toString());
}
@Test
@SuppressWarnings("unchecked")
void testApplyPredicateReverse() {
List<ResolvedExpression> args = new ArrayList<>();
FieldReferenceExpression fieldReferenceExpression =
new FieldReferenceExpression("x", DataTypes.BIGINT(), 0, 0);
ValueLiteralExpression valueLiteralExpression = new ValueLiteralExpression(10);
args.add(valueLiteralExpression);
args.add(fieldReferenceExpression);
CallExpression expression;
OrcFilters.Predicate predicate;
// assert that 10 >= x expression is converted to x <= 10 ORC predicate
expression =
CallExpression.permanent(
BuiltInFunctionDefinitions.GREATER_THAN_OR_EQUAL,
args,
DataTypes.BOOLEAN());
predicate = new OrcFilters.LessThanEquals("x", LONG, 10);
assertThat(OrcFilters.toOrcPredicate(expression)).hasToString(predicate.toString());
// assert that 10 > x expression is converted to x < 10 ORC predicate
expression =
CallExpression.permanent(
BuiltInFunctionDefinitions.GREATER_THAN, args, DataTypes.BOOLEAN());
predicate = new OrcFilters.LessThan("x", LONG, 10);
assertThat(OrcFilters.toOrcPredicate(expression)).hasToString(predicate.toString());
// assert that 10 <= x expression is converted to NOT(x < 10) ORC predicate
expression =
CallExpression.permanent(
BuiltInFunctionDefinitions.LESS_THAN_OR_EQUAL, args, DataTypes.BOOLEAN());
predicate = new OrcFilters.Not(new OrcFilters.LessThan("x", LONG, 10));
assertThat(OrcFilters.toOrcPredicate(expression)).hasToString(predicate.toString());
// assert that 10 < x expression is converted to NOT(x <= 10) ORC predicate
expression =
CallExpression.permanent(
BuiltInFunctionDefinitions.LESS_THAN, args, DataTypes.BOOLEAN());
predicate = new OrcFilters.Not(new OrcFilters.LessThanEquals("x", LONG, 10));
assertThat(OrcFilters.toOrcPredicate(expression)).hasToString(predicate.toString());
}
}
| OrcFileSystemFilterTest |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java | {
"start": 934,
"end": 1022
} | class ____ extends HttpOpParam<GetOpParam.Op> {
/** Get operations. */
public | GetOpParam |
java | apache__camel | components/camel-netty-http/src/main/java/org/apache/camel/component/netty/http/NettySharedHttpServerBootstrapConfiguration.java | {
"start": 934,
"end": 2405
} | class ____ extends NettyServerBootstrapConfiguration {
private int chunkedMaxContentLength = 1024 * 1024;
private boolean chunked = true;
private boolean compression;
private int maxHeaderSize = 8192;
private int maxInitialLineLength = 4096;
private int maxChunkSize = 8192;
public boolean isChunked() {
return chunked;
}
public void setChunked(boolean chunked) {
this.chunked = chunked;
}
public int getChunkedMaxContentLength() {
return chunkedMaxContentLength;
}
public void setChunkedMaxContentLength(int chunkedMaxContentLength) {
this.chunkedMaxContentLength = chunkedMaxContentLength;
}
public boolean isCompression() {
return compression;
}
public void setCompression(boolean compression) {
this.compression = compression;
}
public int getMaxHeaderSize() {
return maxHeaderSize;
}
public void setMaxHeaderSize(int maxHeaderSize) {
this.maxHeaderSize = maxHeaderSize;
}
public void setMaxInitialLineLength(int maxInitialLineLength) {
this.maxInitialLineLength = maxInitialLineLength;
}
public int getMaxInitialLineLength() {
return maxInitialLineLength;
}
public void setMaxChunkSize(int maxChunkSize) {
this.maxChunkSize = maxChunkSize;
}
public int getMaxChunkSize() {
return maxChunkSize;
}
}
| NettySharedHttpServerBootstrapConfiguration |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/task/TaskSchedulingAutoConfigurationTests.java | {
"start": 14285,
"end": 14597
} | class ____ {
private final Set<String> threadNames = ConcurrentHashMap.newKeySet();
private final CountDownLatch latch = new CountDownLatch(1);
@Scheduled(fixedRate = 60000)
void accumulate() {
this.threadNames.add(Thread.currentThread().getName());
this.latch.countDown();
}
}
static | TestBean |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/NonApplicationEscapeTest.java | {
"start": 2710,
"end": 4800
} | class ____ implements Handler<RoutingContext> {
@Override
public void handle(RoutingContext routingContext) {
routingContext.response()
.setStatusCode(200)
.end(routingContext.request().query() != null
? routingContext.request().path() + "?" + routingContext.request().query()
: routingContext.request().path());
}
}
@Test
public void testNonApplicationEndpointEscaped() {
AtomicReference<String> result = new AtomicReference<>();
WebClient.create(vertx)
.get(uri.getPort(), uri.getHost(), "/non-app-absolute")
.expect(ResponsePredicate.SC_OK)
.send(ar -> {
if (ar.succeeded()) {
HttpResponse<Buffer> response = ar.result();
result.set(response.bodyAsString());
} else {
result.set(ar.cause().getMessage());
}
});
Awaitility.await().atMost(Duration.ofMinutes(2)).until(() -> result.get() != null);
Assertions.assertEquals("/non-app-absolute", result.get());
}
@Test
public void testNonApplicationEndpointWithQueryEscaped() {
AtomicReference<String> result = new AtomicReference<>();
WebClient.create(vertx)
.get(uri.getPort(), uri.getHost(), "/non-app-absolute?query=true")
.expect(ResponsePredicate.SC_OK)
.send(ar -> {
if (ar.succeeded()) {
HttpResponse<Buffer> response = ar.result();
result.set(response.bodyAsString());
} else {
result.set(ar.cause().getMessage());
}
});
Awaitility.await().atMost(Duration.ofMinutes(2)).until(() -> result.get() != null);
Assertions.assertEquals("/non-app-absolute?query=true", result.get());
}
@Singleton
static | MyHandler |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/time/JavaDurationGetSecondsGetNanoTest.java | {
"start": 2422,
"end": 2793
} | class ____ {
public static ImmutableMap<String, Object> foo(Duration duration) {
return ImmutableMap.of("seconds", duration.getSeconds(), "nanos", duration.getNano());
}
}
""")
.doTest();
}
@Test
public void getSecondsWithGetNanosDifferentScope() {
// Ideally we would also catch cases like this, but it requires scanning "too much" of the | TestCase |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/transformations/SourceTransformation.java | {
"start": 1576,
"end": 4622
} | class ____<OUT, SplitT extends SourceSplit, EnumChkT>
extends TransformationWithLineage<OUT> implements WithBoundedness {
private final Source<OUT, SplitT, EnumChkT> source;
private final WatermarkStrategy<OUT> watermarkStrategy;
private ChainingStrategy chainingStrategy = ChainingStrategy.DEFAULT_CHAINING_STRATEGY;
private @Nullable String coordinatorListeningID;
/**
* Creates a new {@code Transformation} with the given name, output type and parallelism.
*
* @param name The name of the {@code Transformation}, this will be shown in Visualizations and
* the Log
* @param source The {@link Source} itself
* @param watermarkStrategy The {@link WatermarkStrategy} to use
* @param outputType The output type of this {@code Transformation}
* @param parallelism The parallelism of this {@code Transformation}
*/
public SourceTransformation(
String name,
Source<OUT, SplitT, EnumChkT> source,
WatermarkStrategy<OUT> watermarkStrategy,
TypeInformation<OUT> outputType,
int parallelism) {
super(name, outputType, parallelism);
this.source = source;
this.watermarkStrategy = watermarkStrategy;
this.extractLineageVertex();
}
public SourceTransformation(
String name,
Source<OUT, SplitT, EnumChkT> source,
WatermarkStrategy<OUT> watermarkStrategy,
TypeInformation<OUT> outputType,
int parallelism,
boolean parallelismConfigured) {
super(name, outputType, parallelism, parallelismConfigured);
this.source = source;
this.watermarkStrategy = watermarkStrategy;
this.extractLineageVertex();
}
public Source<OUT, SplitT, EnumChkT> getSource() {
return source;
}
public WatermarkStrategy<OUT> getWatermarkStrategy() {
return watermarkStrategy;
}
@Override
public Boundedness getBoundedness() {
return source.getBoundedness();
}
@Override
protected List<Transformation<?>> getTransitivePredecessorsInternal() {
return Collections.singletonList(this);
}
@Override
public List<Transformation<?>> getInputs() {
return Collections.emptyList();
}
@Override
public void setChainingStrategy(ChainingStrategy chainingStrategy) {
this.chainingStrategy = checkNotNull(chainingStrategy);
}
public ChainingStrategy getChainingStrategy() {
return chainingStrategy;
}
public void setCoordinatorListeningID(@Nullable String coordinatorListeningID) {
this.coordinatorListeningID = coordinatorListeningID;
}
@Nullable
public String getCoordinatorListeningID() {
return coordinatorListeningID;
}
private void extractLineageVertex() {
if (source instanceof LineageVertexProvider) {
setLineageVertex(((LineageVertexProvider) source).getLineageVertex());
}
}
}
| SourceTransformation |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestRenderSearchApplicationQueryAction.java | {
"start": 851,
"end": 2224
} | class ____ extends EnterpriseSearchBaseRestHandler {
public RestRenderSearchApplicationQueryAction(XPackLicenseState licenseState) {
super(licenseState, LicenseUtils.Product.SEARCH_APPLICATION);
}
public static final String ENDPOINT_PATH = "/" + EnterpriseSearch.SEARCH_APPLICATION_API_ENDPOINT + "/{name}" + "/_render_query";
@Override
public String getName() {
return "search_application_render_query_action";
}
@Override
public List<Route> routes() {
return List.of(new Route(POST, ENDPOINT_PATH));
}
@Override
protected RestChannelConsumer innerPrepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
final String searchAppName = restRequest.param("name");
SearchApplicationSearchRequest request;
if (restRequest.hasContent()) {
try (var parser = restRequest.contentParser()) {
request = SearchApplicationSearchRequest.fromXContent(searchAppName, parser);
}
} else {
request = new SearchApplicationSearchRequest(searchAppName);
}
final SearchApplicationSearchRequest finalRequest = request;
return channel -> client.execute(RenderSearchApplicationQueryAction.INSTANCE, finalRequest, new RestToXContentListener<>(channel));
}
}
| RestRenderSearchApplicationQueryAction |
java | grpc__grpc-java | api/src/test/java/io/grpc/ClientInterceptorsTest.java | {
"start": 16226,
"end": 16519
} | class ____ implements ClientInterceptor {
@Override
public <ReqT, RespT> ClientCall<ReqT, RespT> interceptCall(MethodDescriptor<ReqT, RespT> method,
CallOptions callOptions, Channel next) {
return next.newCall(method, callOptions);
}
}
private static | NoopInterceptor |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/QueueCapacities.java | {
"start": 1988,
"end": 2291
} | enum ____ {
USED_CAP(0), ABS_USED_CAP(1), MAX_CAP(2), ABS_MAX_CAP(3), CAP(4), ABS_CAP(5),
MAX_AM_PERC(6), RESERVED_CAP(7), ABS_RESERVED_CAP(8), WEIGHT(9), NORMALIZED_WEIGHT(10);
private final int idx;
CapacityType(int idx) {
this.idx = idx;
}
}
private static | CapacityType |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/net/NetClient.java | {
"start": 934,
"end": 5625
} | interface ____ extends Measured {
/**
* Open a connection to a server at the specific {@code port} and {@code host}.
* <p>
* {@code host} can be a valid host name or IP address. The connect is done asynchronously and on success, a
* {@link NetSocket} instance is supplied via the {@code connectHandler} instance
*
* @param port the port
* @param host the host
* @return a future notified when the socket is connected
*/
Future<NetSocket> connect(int port, String host);
/**
* Open a connection to a server at the specific {@code port} and {@code host}.
* <p>
* {@code host} can be a valid host name or IP address. The connect is done asynchronously and on success, a
* {@link NetSocket} instance is supplied via the {@code connectHandler} instance
*
* @param port the port
* @param host the host
* @param serverName the SNI server name
* @return a future notified when the socket is connected
*/
Future<NetSocket> connect(int port, String host, String serverName);
/**
* Open a connection to a server at the specific {@code remoteAddress}.
* <p>
* The connect is done asynchronously and on success, a {@link NetSocket} instance is supplied via the {@code connectHandler} instance
*
* @param remoteAddress the remote address
* @return a future notified when the socket is connected
*/
Future<NetSocket> connect(SocketAddress remoteAddress);
/**
* Open a connection to a server at the specific {@code remoteAddress}.
* <p>
* The connect is done asynchronously and on success, a {@link NetSocket} instance is supplied via the {@code connectHandler} instance
*
* @param remoteAddress the remote address
* @param serverName the SNI server name
* @return a future notified when the socket is connected
*/
Future<NetSocket> connect(SocketAddress remoteAddress, String serverName);
/**
* Open a connection to a server at the specific {@code connectOptions}.
* <p>
* The connect is done asynchronously and on success, a {@link NetSocket} instance is supplied via the {@code connectHandler} instance
*
* @param connectOptions the options describing how to connect to the remote server
* @return a future notified when the socket is connected
*/
Future<NetSocket> connect(ConnectOptions connectOptions);
/**
* Close immediately ({@code shutdown(0, TimeUnit.SECONDS}).
*
* @return a future notified when the client is closed
*/
default Future<Void> close() {
return shutdown(0L, TimeUnit.SECONDS);
}
/**
* Shutdown with a 30 seconds timeout ({@code shutdown(30, TimeUnit.SECONDS)}).
*
* @return a future completed when shutdown has completed
*/
default Future<Void> shutdown() {
return shutdown(30, TimeUnit.SECONDS);
}
/**
* Initiate the client shutdown sequence.
* <p>
* Connections are taken out of service and notified the close sequence has started through {@link NetSocket#shutdownHandler(Handler)}.
* When all connections are closed the client is closed. When the {@code timeout} expires, all unclosed connections are immediately closed.
*
* @return a future notified when the client is closed
* @param timeout the amount of time after which all resources are forcibly closed
* @param unit the of the timeout
*/
Future<Void> shutdown(long timeout, TimeUnit unit);
/**
* <p>Update the client with new SSL {@code options}, the update happens if the options object is valid and different
* from the existing options object.
*
* <p>The boolean succeeded future result indicates whether the update occurred.
*
* @param options the new SSL options
* @return a future signaling the update success
*/
default Future<Boolean> updateSSLOptions(ClientSSLOptions options) {
return updateSSLOptions(options, false);
}
/**
* <p>Update the client with new SSL {@code options}, the update happens if the options object is valid and different
* from the existing options object.
*
* <p>The {@code options} object is compared using its {@code equals} method against the existing options to prevent
* an update when the objects are equals since loading options can be costly, this can happen for share TCP servers.
* When object are equals, setting {@code force} to {@code true} forces the update.
*
* <p>The boolean succeeded future result indicates whether the update occurred.
*
* @param options the new SSL options
* @param force force the update when options are equals
* @return a future signaling the update success
*/
Future<Boolean> updateSSLOptions(ClientSSLOptions options, boolean force);
}
| NetClient |
java | apache__kafka | tools/src/main/java/org/apache/kafka/tools/EndToEndLatency.java | {
"start": 2781,
"end": 17526
} | class ____ {
private static final long POLL_TIMEOUT_MS = 60000;
private static final short DEFAULT_REPLICATION_FACTOR = 1;
private static final int DEFAULT_NUM_PARTITIONS = 1;
public static void main(String... args) {
Exit.exit(mainNoExit(args));
}
static int mainNoExit(String... args) {
try {
execute(args);
return 0;
} catch (TerseException e) {
System.err.println(e.getMessage());
return 1;
} catch (Throwable e) {
System.err.println(e.getMessage());
System.err.println(Utils.stackTrace(e));
return 1;
}
}
// Visible for testing
static void execute(String[] args) throws Exception {
String[] processedArgs = convertLegacyArgsIfNeeded(args);
EndToEndLatencyCommandOptions opts = new EndToEndLatencyCommandOptions(processedArgs);
// required
String brokers = opts.options.valueOf(opts.bootstrapServerOpt);
String topic = opts.options.valueOf(opts.topicOpt);
int numRecords = opts.options.valueOf(opts.numRecordsOpt);
String acks = opts.options.valueOf(opts.acksOpt);
int recordValueSize = opts.options.valueOf(opts.recordSizeOpt);
// optional
Optional<String> propertiesFile = Optional.ofNullable(opts.options.valueOf(opts.commandConfigOpt));
int recordKeySize = opts.options.valueOf(opts.recordKeySizeOpt);
int numHeaders = opts.options.valueOf(opts.numHeadersOpt);
int headerKeySize = opts.options.valueOf(opts.recordHeaderKeySizeOpt);
int headerValueSize = opts.options.valueOf(opts.recordHeaderValueSizeOpt);
try (KafkaConsumer<byte[], byte[]> consumer = createKafkaConsumer(propertiesFile, brokers);
KafkaProducer<byte[], byte[]> producer = createKafkaProducer(propertiesFile, brokers, acks)) {
if (!consumer.listTopics().containsKey(topic)) {
createTopic(propertiesFile, brokers, topic);
}
setupConsumer(topic, consumer);
double totalTime = 0.0;
long[] latencies = new long[numRecords];
Random random = new Random(0);
for (int i = 0; i < numRecords; i++) {
byte[] recordKey = randomBytesOfLen(random, recordKeySize);
byte[] recordValue = randomBytesOfLen(random, recordValueSize);
List<Header> headers = generateHeadersWithSeparateSizes(random, numHeaders, headerKeySize, headerValueSize);
long begin = System.nanoTime();
//Send message (of random bytes) synchronously then immediately poll for it
producer.send(new ProducerRecord<>(topic, null, recordKey, recordValue, headers)).get();
ConsumerRecords<byte[], byte[]> records = consumer.poll(Duration.ofMillis(POLL_TIMEOUT_MS));
long elapsed = System.nanoTime() - begin;
validate(consumer, recordValue, records, recordKey, headers);
//Report progress
if (i % 1000 == 0)
System.out.println(i + "\t" + elapsed / 1000.0 / 1000.0);
totalTime += elapsed;
latencies[i] = elapsed / 1000 / 1000;
}
printResults(numRecords, totalTime, latencies);
consumer.commitSync();
}
}
// Visible for testing
static void validate(KafkaConsumer<byte[], byte[]> consumer, byte[] sentRecordValue, ConsumerRecords<byte[], byte[]> records, byte[] sentRecordKey, Iterable<Header> sentHeaders) {
if (records.isEmpty()) {
commitAndThrow(consumer, "poll() timed out before finding a result (timeout:[" + POLL_TIMEOUT_MS + "ms])");
}
ConsumerRecord<byte[], byte[]> record = records.iterator().next();
String sent = new String(sentRecordValue, StandardCharsets.UTF_8);
String read = new String(record.value(), StandardCharsets.UTF_8);
if (!read.equals(sent)) {
commitAndThrow(consumer, "The message value read [" + read + "] did not match the message value sent [" + sent + "]");
}
if (sentRecordKey != null) {
if (record.key() == null) {
commitAndThrow(consumer, "Expected message key but received null");
}
String sentKey = new String(sentRecordKey, StandardCharsets.UTF_8);
String readKey = new String(record.key(), StandardCharsets.UTF_8);
if (!readKey.equals(sentKey)) {
commitAndThrow(consumer, "The message key read [" + readKey + "] did not match the message key sent [" + sentKey + "]");
}
} else if (record.key() != null) {
commitAndThrow(consumer, "Expected null message key but received [" + new String(record.key(), StandardCharsets.UTF_8) + "]");
}
validateHeaders(consumer, sentHeaders, record);
//Check we only got the one message
if (records.count() != 1) {
int count = records.count();
commitAndThrow(consumer, "Only one result was expected during this test. We found [" + count + "]");
}
}
private static void commitAndThrow(KafkaConsumer<byte[], byte[]> consumer, String message) {
consumer.commitSync();
throw new RuntimeException(message);
}
private static void validateHeaders(KafkaConsumer<byte[], byte[]> consumer, Iterable<Header> sentHeaders, ConsumerRecord<byte[], byte[]> record) {
if (sentHeaders != null && sentHeaders.iterator().hasNext()) {
if (!record.headers().iterator().hasNext()) {
commitAndThrow(consumer, "Expected message headers but received none");
}
Iterator<Header> sentIterator = sentHeaders.iterator();
Iterator<Header> receivedIterator = record.headers().iterator();
while (sentIterator.hasNext() && receivedIterator.hasNext()) {
Header sentHeader = sentIterator.next();
Header receivedHeader = receivedIterator.next();
if (!receivedHeader.key().equals(sentHeader.key()) || !Arrays.equals(receivedHeader.value(), sentHeader.value())) {
String receivedValueStr = receivedHeader.value() == null ? "null" : Arrays.toString(receivedHeader.value());
String sentValueStr = sentHeader.value() == null ? "null" : Arrays.toString(sentHeader.value());
commitAndThrow(consumer, "The message header read [" + receivedHeader.key() + ":" + receivedValueStr +
"] did not match the message header sent [" + sentHeader.key() + ":" + sentValueStr + "]");
}
}
if (sentIterator.hasNext() || receivedIterator.hasNext()) {
commitAndThrow(consumer, "Header count mismatch between sent and received messages");
}
}
}
private static List<Header> generateHeadersWithSeparateSizes(Random random, int numHeaders, int keySize, int valueSize) {
List<Header> headers = new ArrayList<>();
for (int i = 0; i < numHeaders; i++) {
String headerKey = new String(randomBytesOfLen(random, keySize), StandardCharsets.UTF_8);
byte[] headerValue = valueSize == -1 ? null : randomBytesOfLen(random, valueSize);
headers.add(new Header() {
@Override
public String key() {
return headerKey;
}
@Override
public byte[] value() {
return headerValue;
}
});
}
return headers;
}
private static void setupConsumer(String topic, KafkaConsumer<byte[], byte[]> consumer) {
List<TopicPartition> topicPartitions = consumer
.partitionsFor(topic)
.stream()
.map(p -> new TopicPartition(p.topic(), p.partition()))
.collect(Collectors.toList());
consumer.assign(topicPartitions);
consumer.seekToEnd(topicPartitions);
consumer.assignment().forEach(consumer::position);
}
private static void printResults(int numRecords, double totalTime, long[] latencies) {
System.out.printf("Avg latency: %.4f ms%n", totalTime / numRecords / 1000.0 / 1000.0);
Arrays.sort(latencies);
int p50 = (int) latencies[(int) (latencies.length * 0.5)];
int p99 = (int) latencies[(int) (latencies.length * 0.99)];
int p999 = (int) latencies[(int) (latencies.length * 0.999)];
System.out.printf("Percentiles: 50th = %d, 99th = %d, 99.9th = %d%n", p50, p99, p999);
}
private static byte[] randomBytesOfLen(Random random, int length) {
byte[] randomBytes = new byte[length];
Arrays.fill(randomBytes, Integer.valueOf(random.nextInt(26) + 65).byteValue());
return randomBytes;
}
private static void createTopic(Optional<String> propertiesFile, String brokers, String topic) throws IOException {
System.out.printf("Topic \"%s\" does not exist. "
+ "Will create topic with %d partition(s) and replication factor = %d%n",
topic, DEFAULT_NUM_PARTITIONS, DEFAULT_REPLICATION_FACTOR);
Properties adminProps = loadPropsWithBootstrapServers(propertiesFile, brokers);
Admin adminClient = Admin.create(adminProps);
NewTopic newTopic = new NewTopic(topic, DEFAULT_NUM_PARTITIONS, DEFAULT_REPLICATION_FACTOR);
try {
adminClient.createTopics(Set.of(newTopic)).all().get();
} catch (ExecutionException | InterruptedException e) {
System.out.printf("Creation of topic %s failed%n", topic);
throw new RuntimeException(e);
} finally {
Utils.closeQuietly(adminClient, "AdminClient");
}
}
private static Properties loadPropsWithBootstrapServers(Optional<String> propertiesFile, String brokers) throws IOException {
Properties properties = propertiesFile.isPresent() ? Utils.loadProps(propertiesFile.get()) : new Properties();
properties.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, brokers);
return properties;
}
private static KafkaConsumer<byte[], byte[]> createKafkaConsumer(Optional<String> propsFile, String brokers) throws IOException {
Properties consumerProps = loadPropsWithBootstrapServers(propsFile, brokers);
consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group-" + System.currentTimeMillis());
consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer");
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer");
consumerProps.put(ConsumerConfig.FETCH_MAX_WAIT_MS_CONFIG, "0"); //ensure we have no temporal batching
return new KafkaConsumer<>(consumerProps);
}
private static KafkaProducer<byte[], byte[]> createKafkaProducer(Optional<String> propsFile, String brokers, String acks) throws IOException {
Properties producerProps = loadPropsWithBootstrapServers(propsFile, brokers);
producerProps.put(ProducerConfig.LINGER_MS_CONFIG, "0"); //ensure writes are synchronous
producerProps.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, Long.MAX_VALUE);
producerProps.put(ProducerConfig.ACKS_CONFIG, acks);
producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer");
producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer");
return new KafkaProducer<>(producerProps);
}
/**
* Converts legacy positional arguments to named arguments for backward compatibility.
*
* @param args the command line arguments to convert
* @return converted named arguments
* @throws Exception if the legacy arguments are invalid
* @deprecated Positional argument usage is deprecated and will be removed in Apache Kafka 5.0.
* Use named arguments instead: --bootstrap-server, --topic, --num-records, --producer-acks, --record-size, --command-config
*/
@Deprecated(since = "4.2", forRemoval = true)
static String[] convertLegacyArgsIfNeeded(String[] args) throws Exception {
if (args.length == 0) {
return args;
}
boolean hasRequiredNamedArgs = Arrays.stream(args).anyMatch(arg ->
arg.equals("--bootstrap-server") ||
arg.equals("--topic") ||
arg.equals("--num-records") ||
arg.equals("--producer-acks") ||
arg.equals("--record-size"));
if (hasRequiredNamedArgs) {
return args;
}
if (args.length != 5 && args.length != 6) {
throw new TerseException("Invalid number of arguments. Expected 5 or 6 positional arguments, but got " + args.length + ". " +
"Usage: bootstrap-server topic num-records producer-acks record-size [optional] command-config");
}
return convertLegacyArgs(args);
}
private static String[] convertLegacyArgs(String[] legacyArgs) {
List<String> newArgs = new ArrayList<>();
// broker_list -> --bootstrap-server
newArgs.add("--bootstrap-server");
newArgs.add(legacyArgs[0]);
// topic -> --topic
newArgs.add("--topic");
newArgs.add(legacyArgs[1]);
// num_messages -> --num-records
newArgs.add("--num-records");
newArgs.add(legacyArgs[2]);
// producer_acks -> --producer-acks
newArgs.add("--producer-acks");
newArgs.add(legacyArgs[3]);
// message_size_bytes -> --record-size
newArgs.add("--record-size");
newArgs.add(legacyArgs[4]);
// properties_file -> --command-config
if (legacyArgs.length == 6) {
newArgs.add("--command-config");
newArgs.add(legacyArgs[5]);
}
System.out.println("WARNING: Positional argument usage is deprecated and will be removed in Apache Kafka 5.0. " +
"Please use named arguments instead: --bootstrap-server, --topic, --num-records, --producer-acks, --record-size, --command-config");
return newArgs.toArray(new String[0]);
}
public static final | EndToEndLatency |
java | apache__flink | flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/state/rocksdb/snapshot/RocksNativeFullSnapshotStrategy.java | {
"start": 5590,
"end": 10400
} | class ____ extends RocksDBSnapshotOperation {
private RocksDBNativeFullSnapshotOperation(
long checkpointId,
@Nonnull CheckpointStreamFactory checkpointStreamFactory,
@Nonnull SnapshotDirectory localBackupDirectory,
@Nonnull List<StateMetaInfoSnapshot> stateMetaInfoSnapshots) {
super(
checkpointId,
checkpointStreamFactory,
localBackupDirectory,
stateMetaInfoSnapshots);
}
@Override
public SnapshotResult<KeyedStateHandle> get(CloseableRegistry snapshotCloseableRegistry)
throws Exception {
boolean completed = false;
// Handle to the meta data file
SnapshotResult<StreamStateHandle> metaStateHandle = null;
// Handles to all the files in the current snapshot will go here
final List<HandleAndLocalPath> privateFiles = new ArrayList<>();
try {
metaStateHandle =
materializeMetaData(
snapshotCloseableRegistry,
tmpResourcesRegistry,
stateMetaInfoSnapshots,
checkpointId,
checkpointStreamFactory);
// Sanity checks - they should never fail
Preconditions.checkNotNull(metaStateHandle, "Metadata was not properly created.");
Preconditions.checkNotNull(
metaStateHandle.getJobManagerOwnedSnapshot(),
"Metadata for job manager was not properly created.");
long checkpointedSize = metaStateHandle.getStateSize();
checkpointedSize +=
uploadSnapshotFiles(
privateFiles, snapshotCloseableRegistry, tmpResourcesRegistry);
final IncrementalRemoteKeyedStateHandle jmIncrementalKeyedStateHandle =
new IncrementalRemoteKeyedStateHandle(
backendUID,
keyGroupRange,
checkpointId,
Collections.emptyList(),
privateFiles,
metaStateHandle.getJobManagerOwnedSnapshot(),
checkpointedSize);
Optional<KeyedStateHandle> localSnapshot =
getLocalSnapshot(
metaStateHandle.getTaskLocalSnapshot(), Collections.emptyList());
final SnapshotResult<KeyedStateHandle> snapshotResult =
localSnapshot
.map(
keyedStateHandle ->
SnapshotResult.withLocalState(
jmIncrementalKeyedStateHandle,
keyedStateHandle))
.orElseGet(() -> SnapshotResult.of(jmIncrementalKeyedStateHandle));
completed = true;
return snapshotResult;
} finally {
if (!completed) {
cleanupIncompleteSnapshot(tmpResourcesRegistry, localBackupDirectory);
}
}
}
/** upload files and return total uploaded size. */
private long uploadSnapshotFiles(
@Nonnull List<HandleAndLocalPath> privateFiles,
@Nonnull CloseableRegistry snapshotCloseableRegistry,
@Nonnull CloseableRegistry tmpResourcesRegistry)
throws Exception {
// write state data
Preconditions.checkState(localBackupDirectory.exists());
Path[] files = localBackupDirectory.listDirectory();
long uploadedSize = 0;
if (files != null) {
// all sst files are private in full snapshot
List<HandleAndLocalPath> uploadedFiles =
stateUploader.uploadFilesToCheckpointFs(
Arrays.asList(files),
checkpointStreamFactory,
CheckpointedStateScope.EXCLUSIVE,
snapshotCloseableRegistry,
tmpResourcesRegistry);
uploadedSize += uploadedFiles.stream().mapToLong(e -> e.getStateSize()).sum();
privateFiles.addAll(uploadedFiles);
}
return uploadedSize;
}
}
}
| RocksDBNativeFullSnapshotOperation |
java | playframework__playframework | documentation/manual/working/javaGuide/main/forms/code/javaguide/forms/JavaForms.java | {
"start": 10138,
"end": 10806
} | class ____ {
String render(Form<?> form) {
return "rendered";
}
}
Controller2(JavaHandlerComponents javaHandlerComponents) {
super(javaHandlerComponents);
}
public Result index(Http.Request request) {
Form<User> userForm = formFactory().form(User.class).bindFromRequest(request);
// #handle-errors
if (userForm.hasErrors()) {
return badRequest(views.html.form.render(userForm));
} else {
User user = userForm.get();
return ok("Got user " + user);
}
// #handle-errors
}
}
@Test
public void fillForm() {
// User needs a constructor. Give it one.
| Pform |
java | netty__netty | codec-marshalling/src/main/java/io/netty/handler/codec/marshalling/ChannelBufferByteInput.java | {
"start": 871,
"end": 2137
} | class ____ implements ByteInput {
private final ByteBuf buffer;
ChannelBufferByteInput(ByteBuf buffer) {
this.buffer = buffer;
}
@Override
public void close() throws IOException {
// nothing to do
}
@Override
public int available() throws IOException {
return buffer.readableBytes();
}
@Override
public int read() throws IOException {
if (buffer.isReadable()) {
return buffer.readByte() & 0xff;
}
return -1;
}
@Override
public int read(byte[] array) throws IOException {
return read(array, 0, array.length);
}
@Override
public int read(byte[] dst, int dstIndex, int length) throws IOException {
int available = available();
if (available == 0) {
return -1;
}
length = Math.min(available, length);
buffer.readBytes(dst, dstIndex, length);
return length;
}
@Override
public long skip(long bytes) throws IOException {
int readable = buffer.readableBytes();
if (readable < bytes) {
bytes = readable;
}
buffer.readerIndex((int) (buffer.readerIndex() + bytes));
return bytes;
}
}
| ChannelBufferByteInput |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/CodeTemplates.java | {
"start": 20091,
"end": 21331
} | class ____ {
@Advice.OnMethodEnter
static void enter(@Advice.This Object self, @FieldValue Collection<?> field, @Advice.Argument(0) Collection<?> argument, @InverseSide boolean inverseSide, @BidirectionalAttribute String bidirectionalAttribute) {
if ( getterSelf() != null ) {
Object[] array = field.toArray();
for ( int i = 0; i < array.length; i++ ) {
if ( argument == null || !argument.contains( array[i] ) ) {
getter( array[i] ).remove( self );
}
}
}
}
@Advice.OnMethodExit
static void exit(@Advice.This Object self, @Advice.Argument(0) Collection<?> argument, @InverseSide boolean inverseSide, @BidirectionalAttribute String bidirectionalAttribute) {
if ( argument != null ) {
Object[] array = argument.toArray();
for ( Object array1 : array ) {
Collection<Object> c = getter( array1 );
if ( c != null && !c.contains( self ) ) {
c.add( self );
}
}
}
}
static Collection<Object> getter(Object self) {
// is replaced by the actual method call
throw new AssertionError();
}
static Object getterSelf() {
// is replaced by the actual method call
throw new AssertionError();
}
}
@Retention(RetentionPolicy.RUNTIME)
@ | ManyToManyHandler |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/config/ConfigDataActivationContextTests.java | {
"start": 1440,
"end": 3885
} | class ____ {
@Test
void getCloudPlatformWhenCloudPropertyNotPresentDeducesCloudPlatform() {
Environment environment = new MockEnvironment();
Binder binder = Binder.get(environment);
ConfigDataActivationContext context = new ConfigDataActivationContext(environment, binder);
assertThat(context.getCloudPlatform()).isNull();
}
@Test
void getCloudPlatformWhenCloudPropertyInEnvironmentDeducesCloudPlatform() {
MockEnvironment environment = createKubernetesEnvironment();
Binder binder = Binder.get(environment);
ConfigDataActivationContext context = new ConfigDataActivationContext(environment, binder);
assertThat(context.getCloudPlatform()).isEqualTo(CloudPlatform.KUBERNETES);
}
@Test
void getCloudPlatformWhenCloudPropertyHasBeenContributedDuringInitialLoadDeducesCloudPlatform() {
Environment environment = createKubernetesEnvironment();
Binder binder = new Binder(
new MapConfigurationPropertySource(Collections.singletonMap("spring.main.cloud-platform", "HEROKU")));
ConfigDataActivationContext context = new ConfigDataActivationContext(environment, binder);
assertThat(context.getCloudPlatform()).isEqualTo(CloudPlatform.HEROKU);
}
@Test
void getProfilesWhenWithoutProfilesReturnsNull() {
Environment environment = new MockEnvironment();
Binder binder = Binder.get(environment);
ConfigDataActivationContext context = new ConfigDataActivationContext(environment, binder);
assertThat(context.getProfiles()).isNull();
}
@Test
void getProfilesWhenWithProfilesReturnsProfiles() {
MockEnvironment environment = new MockEnvironment();
environment.setActiveProfiles("a", "b", "c");
Binder binder = Binder.get(environment);
ConfigDataActivationContext context = new ConfigDataActivationContext(environment, binder);
Profiles profiles = new Profiles(environment, binder, null);
context = context.withProfiles(profiles);
assertThat(context.getProfiles()).isEqualTo(profiles);
}
private MockEnvironment createKubernetesEnvironment() {
MockEnvironment environment = new MockEnvironment();
Map<String, Object> map = new LinkedHashMap<>();
map.put("KUBERNETES_SERVICE_HOST", "host");
map.put("KUBERNETES_SERVICE_PORT", "port");
PropertySource<?> propertySource = new MapPropertySource(
StandardEnvironment.SYSTEM_ENVIRONMENT_PROPERTY_SOURCE_NAME, map);
environment.getPropertySources().addLast(propertySource);
return environment;
}
}
| ConfigDataActivationContextTests |
java | spring-projects__spring-framework | spring-aspects/src/test/java/org/springframework/transaction/aspectj/ClassWithPrivateAnnotatedMember.java | {
"start": 796,
"end": 945
} | class ____ {
public void doSomething() {
doInTransaction();
}
@Transactional
private void doInTransaction() {}
}
| ClassWithPrivateAnnotatedMember |
java | elastic__elasticsearch | x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/EqlBaseParser.java | {
"start": 86381,
"end": 87408
} | class ____ extends PrimaryExpressionContext {
public ConstantContext constant() {
return getRuleContext(ConstantContext.class, 0);
}
public ConstantDefaultContext(PrimaryExpressionContext ctx) {
copyFrom(ctx);
}
@Override
public void enterRule(ParseTreeListener listener) {
if (listener instanceof EqlBaseListener) ((EqlBaseListener) listener).enterConstantDefault(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if (listener instanceof EqlBaseListener) ((EqlBaseListener) listener).exitConstantDefault(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if (visitor instanceof EqlBaseVisitor) return ((EqlBaseVisitor<? extends T>) visitor).visitConstantDefault(this);
else return visitor.visitChildren(this);
}
}
@SuppressWarnings("CheckReturnValue")
public static | ConstantDefaultContext |
java | quarkusio__quarkus | extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/typesafe/ItemWithName.java | {
"start": 372,
"end": 610
} | class ____ {
public String toUpperCase() {
return OrOperatorTemplateExtensionTest.ITEM_NAME.toUpperCase();
}
public String pleaseMakeMyCaseUpper() {
return "UPPER CASE";
}
}
}
| Name |
java | apache__camel | components/camel-quartz/src/test/java/org/apache/camel/component/quartz/QuartzCronRouteWithEmptyStartDateExpiredEndDateTest.java | {
"start": 1209,
"end": 2266
} | class ____ extends BaseQuartzTest {
@Test
public void testQuartzCronRouteWithStartDateEndDateTest() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(0);
mock.await(2, TimeUnit.SECONDS);
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssz");
Calendar calendar = Calendar.getInstance();
calendar.setTimeZone(TimeZone.getTimeZone("UTC"));
calendar.add(Calendar.YEAR, -2);
Date endDate = calendar.getTime();
fromF("quartz://myGroup/myTimerName?cron=0/1 * * * * ?&ignoreExpiredNextFireTime=true&trigger.endAt=%s",
dateFormat.format(endDate)).to("mock:result");
}
};
}
}
| QuartzCronRouteWithEmptyStartDateExpiredEndDateTest |
java | square__moshi | moshi/src/test/java/com/squareup/moshi/CircularAdaptersTest.java | {
"start": 1022,
"end": 1217
} | class ____ {
final String lead;
final Project[] projects;
public Team(String lead, Project... projects) {
this.lead = lead;
this.projects = projects;
}
}
static | Team |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/maybe/MaybeFlatMapNotification.java | {
"start": 1133,
"end": 2179
} | class ____<T, R> extends AbstractMaybeWithUpstream<T, R> {
final Function<? super T, ? extends MaybeSource<? extends R>> onSuccessMapper;
final Function<? super Throwable, ? extends MaybeSource<? extends R>> onErrorMapper;
final Supplier<? extends MaybeSource<? extends R>> onCompleteSupplier;
public MaybeFlatMapNotification(MaybeSource<T> source,
Function<? super T, ? extends MaybeSource<? extends R>> onSuccessMapper,
Function<? super Throwable, ? extends MaybeSource<? extends R>> onErrorMapper,
Supplier<? extends MaybeSource<? extends R>> onCompleteSupplier) {
super(source);
this.onSuccessMapper = onSuccessMapper;
this.onErrorMapper = onErrorMapper;
this.onCompleteSupplier = onCompleteSupplier;
}
@Override
protected void subscribeActual(MaybeObserver<? super R> observer) {
source.subscribe(new FlatMapMaybeObserver<>(observer, onSuccessMapper, onErrorMapper, onCompleteSupplier));
}
static final | MaybeFlatMapNotification |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/AnnotationIntrospector.java | {
"start": 12001,
"end": 13599
} | class ____ introspect
*
* @return Boolean.TRUE if properties of type should be ignored;
* Boolean.FALSE if they are not to be ignored, null for default
* handling (which is 'do not ignore')
*/
public Boolean isIgnorableType(MapperConfig<?> config, AnnotatedClass ac) { return null; }
/**
* Method for finding information about properties to ignore either by
* name, or by more general specification ("ignore all unknown").
* This method combines multiple aspects of name-based (as opposed to value-based)
* ignorals.
*
* @param config Configuration settings in effect (for serialization or deserialization)
* @param ann Annotated entity (Class, Accessor) to introspect
*/
public JsonIgnoreProperties.Value findPropertyIgnoralByName(MapperConfig<?> config, Annotated ann) {
return JsonIgnoreProperties.Value.empty();
}
/**
* Method for finding information about names of properties to included.
* This is typically used to strictly limit properties to include based
* on fully defined set of names ("allow-listing"), as opposed to excluding
* potential properties by exclusion ("deny-listing").
*
* @param config Configuration settings in effect (for serialization or deserialization)
* @param ann Annotated entity (Class, Accessor) to introspect
*/
public JsonIncludeProperties.Value findPropertyInclusionByName(MapperConfig<?> config, Annotated ann) {
return JsonIncludeProperties.Value.all();
}
/**
* Method for finding if annotated | to |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java | {
"start": 2077,
"end": 17773
} | class ____ {
private static final String WITH_QUOTAS = "Content summary with quotas";
private static final String NO_QUOTAS = "Content summary without quotas";
private static final String HUMAN = "human: ";
private static final String BYTES = "bytes: ";
private static final String QUOTAS_AND_USAGE = "quotas and usage";
private static Configuration conf;
private static FileSystem mockFs;
private static FileStatus fileStat;
@BeforeAll
public static void setup() {
conf = new Configuration();
conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class);
mockFs = mock(FileSystem.class);
fileStat = mock(FileStatus.class);
when(fileStat.isFile()).thenReturn(true);
}
@BeforeEach
public void resetMock() {
reset(mockFs);
}
@Test
public void processOptionsHumanReadable() {
LinkedList<String> options = new LinkedList<String>();
options.add("-h");
options.add("dummy");
Count count = new Count();
count.processOptions(options);
assertFalse(count.isShowQuotas());
assertTrue(count.isHumanReadable());
}
@Test
public void processOptionsAll() {
LinkedList<String> options = new LinkedList<String>();
options.add("-q");
options.add("-h");
options.add("-t");
options.add("SSD");
options.add("dummy");
Count count = new Count();
count.processOptions(options);
assertTrue(count.isShowQuotas());
assertTrue(count.isHumanReadable());
assertTrue(count.isShowQuotabyType());
assertEquals(1, count.getStorageTypes().size());
assertEquals(StorageType.SSD, count.getStorageTypes().get(0));
}
// check no options is handled correctly
@Test
public void processOptionsNoOptions() {
LinkedList<String> options = new LinkedList<String>();
options.add("dummy");
Count count = new Count();
count.processOptions(options);
assertFalse(count.isShowQuotas());
}
// check -q is handled correctly
@Test
public void processOptionsShowQuotas() {
LinkedList<String> options = new LinkedList<String>();
options.add("-q");
options.add("dummy");
Count count = new Count();
count.processOptions(options);
assertTrue(count.isShowQuotas());
}
// check missing arguments is handled correctly
@Test
public void processOptionsMissingArgs() {
LinkedList<String> options = new LinkedList<String>();
Count count = new Count();
try {
count.processOptions(options);
fail("Count.processOptions - NotEnoughArgumentsException not thrown");
} catch (NotEnoughArgumentsException e) {
}
assertFalse(count.isShowQuotas());
}
// check the correct header is produced with no quotas (-v)
@Test
public void processOptionsHeaderNoQuotas() {
LinkedList<String> options = new LinkedList<String>();
options.add("-v");
options.add("dummy");
PrintStream out = mock(PrintStream.class);
Count count = new Count();
count.out = out;
count.processOptions(options);
String noQuotasHeader =
// <----12----> <----12----> <-------18------->
" DIR_COUNT FILE_COUNT CONTENT_SIZE PATHNAME";
verify(out).println(noQuotasHeader);
verifyNoMoreInteractions(out);
}
// check the correct header is produced with quotas (-q -v)
@Test
public void processOptionsHeaderWithQuotas() {
LinkedList<String> options = new LinkedList<String>();
options.add("-q");
options.add("-v");
options.add("dummy");
PrintStream out = mock(PrintStream.class);
Count count = new Count();
count.out = out;
count.processOptions(options);
String withQuotasHeader =
// <----12----> <-----15------> <-----15------> <-----15------>
" QUOTA REM_QUOTA SPACE_QUOTA REM_SPACE_QUOTA " +
// <----12----> <----12----> <-------18------->
" DIR_COUNT FILE_COUNT CONTENT_SIZE PATHNAME";
verify(out).println(withQuotasHeader);
verifyNoMoreInteractions(out);
}
// check quotas are reported correctly
@Test
public void processPathShowQuotas() throws Exception {
Path path = new Path("mockfs:/test");
when(mockFs.getFileStatus(eq(path))).thenReturn(fileStat);
PathData pathData = new PathData(path.toString(), conf);
PrintStream out = mock(PrintStream.class);
Count count = new Count();
count.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add("-q");
options.add("dummy");
count.processOptions(options);
count.processPath(pathData);
verify(out).println(BYTES + WITH_QUOTAS + path.toString());
verifyNoMoreInteractions(out);
}
// check counts without quotas are reported correctly
@Test
public void processPathNoQuotas() throws Exception {
Path path = new Path("mockfs:/test");
when(mockFs.getFileStatus(eq(path))).thenReturn(fileStat);
PathData pathData = new PathData(path.toString(), conf);
PrintStream out = mock(PrintStream.class);
Count count = new Count();
count.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add("dummy");
count.processOptions(options);
count.processPath(pathData);
verify(out).println(BYTES + NO_QUOTAS + path.toString());
verifyNoMoreInteractions(out);
}
@Test
public void processPathShowQuotasHuman() throws Exception {
Path path = new Path("mockfs:/test");
when(mockFs.getFileStatus(eq(path))).thenReturn(fileStat);
PathData pathData = new PathData(path.toString(), conf);
PrintStream out = mock(PrintStream.class);
Count count = new Count();
count.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add("-q");
options.add("-h");
options.add("dummy");
count.processOptions(options);
count.processPath(pathData);
verify(out).println(HUMAN + WITH_QUOTAS + path.toString());
}
@Test
public void processPathNoQuotasHuman() throws Exception {
Path path = new Path("mockfs:/test");
when(mockFs.getFileStatus(eq(path))).thenReturn(fileStat);
PathData pathData = new PathData(path.toString(), conf);
PrintStream out = mock(PrintStream.class);
Count count = new Count();
count.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add("-h");
options.add("dummy");
count.processOptions(options);
count.processPath(pathData);
verify(out).println(HUMAN + NO_QUOTAS + path.toString());
}
@Test
public void processPathWithQuotasByStorageTypesHeader() throws Exception {
Path path = new Path("mockfs:/test");
when(mockFs.getFileStatus(eq(path))).thenReturn(fileStat);
PrintStream out = mock(PrintStream.class);
Count count = new Count();
count.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add("-q");
options.add("-v");
options.add("-t");
options.add("all");
options.add("dummy");
count.processOptions(options);
String withStorageTypeHeader =
// <----14----> <-------18-------> <-----14-----> <-------18------->
" SSD_QUOTA REM_SSD_QUOTA DISK_QUOTA REM_DISK_QUOTA " +
" ARCHIVE_QUOTA REM_ARCHIVE_QUOTA PROVIDED_QUOTA REM_PROVIDED_QUOTA " +
" NVDIMM_QUOTA REM_NVDIMM_QUOTA " +
"PATHNAME";
verify(out).println(withStorageTypeHeader);
verifyNoMoreInteractions(out);
}
@Test
public void processPathWithQuotasBySSDStorageTypesHeader() throws Exception {
Path path = new Path("mockfs:/test");
when(mockFs.getFileStatus(eq(path))).thenReturn(fileStat);
PrintStream out = mock(PrintStream.class);
Count count = new Count();
count.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add("-q");
options.add("-v");
options.add("-t");
options.add("SSD");
options.add("dummy");
count.processOptions(options);
String withStorageTypeHeader =
// <----14----> <-------18------->
" SSD_QUOTA REM_SSD_QUOTA " +
"PATHNAME";
verify(out).println(withStorageTypeHeader);
verifyNoMoreInteractions(out);
}
@Test
public void processPathWithQuotasByQTVH() throws Exception {
Path path = new Path("mockfs:/test");
when(mockFs.getFileStatus(eq(path))).thenReturn(fileStat);
PrintStream out = mock(PrintStream.class);
Count count = new Count();
count.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add("-q");
options.add("-t");
options.add("-v");
options.add("-h");
options.add("dummy");
count.processOptions(options);
String withStorageTypeHeader =
// <----14----> <-------18------->
" SSD_QUOTA REM_SSD_QUOTA " +
" DISK_QUOTA REM_DISK_QUOTA " +
" ARCHIVE_QUOTA REM_ARCHIVE_QUOTA " +
"PROVIDED_QUOTA REM_PROVIDED_QUOTA " +
" NVDIMM_QUOTA REM_NVDIMM_QUOTA " +
"PATHNAME";
verify(out).println(withStorageTypeHeader);
verifyNoMoreInteractions(out);
}
@Test
public void processPathWithQuotasByMultipleStorageTypesContent()
throws Exception {
processMultipleStorageTypesContent(false);
}
@Test
public void processPathWithQuotaUsageByMultipleStorageTypesContent()
throws Exception {
processMultipleStorageTypesContent(true);
}
// "-q -t" is the same as "-u -t"; only return the storage quota and usage.
private void processMultipleStorageTypesContent(boolean quotaUsageOnly)
throws Exception {
Path path = new Path("mockfs:/test");
when(mockFs.getFileStatus(eq(path))).thenReturn(fileStat);
PathData pathData = new PathData(path.toString(), conf);
PrintStream out = mock(PrintStream.class);
Count count = new Count();
count.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add(quotaUsageOnly ? "-u" : "-q");
options.add("-t");
options.add("SSD,DISK");
options.add("dummy");
count.processOptions(options);
count.processPath(pathData);
String withStorageType = BYTES + StorageType.SSD.toString()
+ " " + StorageType.DISK.toString() + " " + pathData.toString();
verify(out).println(withStorageType);
verifyNoMoreInteractions(out);
}
@Test
public void processPathWithQuotasByMultipleStorageTypes() throws Exception {
Path path = new Path("mockfs:/test");
when(mockFs.getFileStatus(eq(path))).thenReturn(fileStat);
PrintStream out = mock(PrintStream.class);
Count count = new Count();
count.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add("-q");
options.add("-v");
options.add("-t");
options.add("SSD,DISK");
options.add("dummy");
count.processOptions(options);
String withStorageTypeHeader =
// <----14----> <------18-------->
" SSD_QUOTA REM_SSD_QUOTA " +
" DISK_QUOTA REM_DISK_QUOTA " +
"PATHNAME";
verify(out).println(withStorageTypeHeader);
verifyNoMoreInteractions(out);
}
@Test
public void processPathWithSnapshotHeader() throws Exception {
Path path = new Path("mockfs:/test");
when(mockFs.getFileStatus(eq(path))).thenReturn(fileStat);
PrintStream out = mock(PrintStream.class);
Count count = new Count();
count.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add("-s");
options.add("-v");
options.add("dummy");
count.processOptions(options);
String withSnapshotHeader = " DIR_COUNT FILE_COUNT CONTENT_SIZE "
+ " SNAPSHOT_LENGTH SNAPSHOT_FILE_COUNT "
+ " SNAPSHOT_DIR_COUNT SNAPSHOT_SPACE_CONSUMED PATHNAME";
verify(out).println(withSnapshotHeader);
verifyNoMoreInteractions(out);
}
@Test
public void getCommandName() {
Count count = new Count();
String actual = count.getCommandName();
String expected = "count";
assertEquals(expected, actual, "Count.getCommandName");
}
@Test
public void isDeprecated() {
Count count = new Count();
boolean actual = count.isDeprecated();
boolean expected = false;
assertEquals(expected, actual, "Count.isDeprecated");
}
@Test
public void getReplacementCommand() {
Count count = new Count();
String actual = count.getReplacementCommand();
String expected = null;
assertEquals(expected, actual, "Count.getReplacementCommand");
}
@Test
public void getName() {
Count count = new Count();
String actual = count.getName();
String expected = "count";
assertEquals(expected, actual, "Count.getName");
}
@Test
public void getUsage() {
Count count = new Count();
String actual = count.getUsage();
String expected =
"-count [-q] [-h] [-v] [-t [<storage type>]]"
+ " [-u] [-x] [-e] [-s] <path> ...";
assertEquals(expected, actual, "Count.getUsage");
}
// check the correct description is returned
@Test
public void getDescription() {
Count count = new Count();
String actual = count.getDescription();
String expected =
"Count the number of directories, files and bytes under the paths\n"
+ "that match the specified file pattern. The output columns are:\n"
+ "DIR_COUNT FILE_COUNT CONTENT_SIZE PATHNAME\n"
+ "or, with the -q option:\n"
+ "QUOTA REM_QUOTA SPACE_QUOTA REM_SPACE_QUOTA\n"
+ " DIR_COUNT FILE_COUNT CONTENT_SIZE PATHNAME\n"
+ "The -h option shows file sizes in human readable format.\n"
+ "The -v option displays a header line.\n"
+ "The -x option excludes snapshots from being calculated. \n"
+ "The -t option displays quota by storage types.\n"
+ "It should be used with -q or -u option, "
+ "otherwise it will be ignored.\n"
+ "If a comma-separated list of storage types is given after the -t option, \n"
+ "it displays the quota and usage for the specified types. \n"
+ "Otherwise, it displays the quota and usage for all the storage \n"
+ "types that support quota. The list of possible storage "
+ "types(case insensitive):\n"
+ "ram_disk, ssd, disk, archive and nvdimm.\n"
+ "It can also pass the value '', 'all' or 'ALL' to specify all the "
+ "storage types.\n"
+ "The -u option shows the quota and \n"
+ "the usage against the quota without the detailed content summary."
+ "The -e option shows the erasure coding policy."
+ "The -s option shows snapshot counts.";
assertEquals(expected, actual, "Count.getDescription");
}
@Test
public void processPathWithQuotaUsageHuman() throws Exception {
processPathWithQuotaUsage(false);
}
@Test
public void processPathWithQuotaUsageRawBytes() throws Exception {
processPathWithQuotaUsage(true);
}
private void processPathWithQuotaUsage(boolean rawBytes) throws Exception {
Path path = new Path("mockfs:/test");
when(mockFs.getFileStatus(eq(path))).thenReturn(fileStat);
PathData pathData = new PathData(path.toString(), conf);
PrintStream out = mock(PrintStream.class);
Count count = new Count();
count.out = out;
LinkedList<String> options = new LinkedList<String>();
if (!rawBytes) {
options.add("-h");
}
options.add("-u");
options.add("dummy");
count.processOptions(options);
count.processPath(pathData);
String withStorageType = (rawBytes ? BYTES : HUMAN) + QUOTAS_AND_USAGE +
pathData.toString();
verify(out).println(withStorageType);
verifyNoMoreInteractions(out);
}
// mock content system
static | TestCount |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.