language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__camel | core/camel-core-languages/src/main/java/org/apache/camel/language/simple/SimpleExpressionParser.java | {
"start": 1903,
"end": 12136
} | class ____ extends BaseSimpleParser {
// use caches to avoid re-parsing the same expressions over and over again
private final Map<String, Expression> cacheExpression;
private boolean skipFileFunctions;
public SimpleExpressionParser(CamelContext camelContext, String expression,
boolean allowEscape,
Map<String, Expression> cacheExpression) {
this(camelContext, expression, allowEscape, false, cacheExpression);
}
public SimpleExpressionParser(CamelContext camelContext, String expression,
boolean allowEscape, boolean skipFileFunctions,
Map<String, Expression> cacheExpression) {
super(camelContext, expression, allowEscape);
this.cacheExpression = cacheExpression;
this.skipFileFunctions = skipFileFunctions;
}
public Expression parseExpression() {
try {
parseTokens();
return doParseExpression();
} catch (SimpleParserException e) {
// catch parser exception and turn that into a syntax exceptions
throw new SimpleIllegalSyntaxException(expression, e.getIndex(), e.getMessage(), e);
} catch (Exception e) {
// include exception in rethrown exception
throw new SimpleIllegalSyntaxException(expression, -1, e.getMessage(), e);
}
}
public String parseCode() {
try {
parseTokens();
return doParseCode();
} catch (SimpleParserException e) {
// catch parser exception and turn that into a syntax exceptions
throw new SimpleIllegalSyntaxException(expression, e.getIndex(), e.getMessage(), e);
} catch (Exception e) {
// include exception in rethrown exception
throw new SimpleIllegalSyntaxException(expression, -1, e.getMessage(), e);
}
}
/**
* First step parsing into a list of nodes.
*
* This is used as SPI for camel-csimple to do AST transformation and parse into java source code.
*/
protected List<SimpleNode> parseTokens() {
clear();
// parse the expression using the following grammar
nextToken();
while (!token.getType().isEol()) {
// an expression supports just template (eg text), functions, or unary operator
templateText();
functionText();
unaryOperator();
nextToken();
}
// now after parsing, we need a bit of work to do, to make it easier to turn the tokens
// into an ast, and then from the ast, to Camel expression(s).
// hence why there are a number of tasks going on below to accomplish this
// turn the tokens into the ast model
parseAndCreateAstModel();
// compact and stack blocks (eg function start/end)
prepareBlocks();
// compact and stack unary operators
prepareUnaryExpressions();
return nodes;
}
/**
* Second step parsing into an expression
*/
protected Expression doParseExpression() {
// create and return as a Camel expression
List<Expression> expressions = createExpressions();
if (expressions.isEmpty()) {
// return an empty string as response as there was nothing to parse
return ExpressionBuilder.constantExpression("");
} else if (expressions.size() == 1) {
return expressions.get(0);
} else {
// concat expressions as evaluating an expression is like a template language
return ExpressionBuilder.concatExpression(expressions, expression);
}
}
protected void parseAndCreateAstModel() {
// we loop the tokens and create a sequence of ast nodes
// counter to keep track of number of functions in the tokens
AtomicInteger functions = new AtomicInteger();
LiteralNode imageToken = null;
for (SimpleToken token : tokens) {
// break if eol
if (token.getType().isEol()) {
break;
}
// create a node from the token
SimpleNode node = createNode(token, functions);
if (node != null) {
// a new token was created so the current image token need to be added first
if (imageToken != null) {
nodes.add(imageToken);
imageToken = null;
}
// and then add the created node
nodes.add(node);
// continue to next
continue;
}
// if no token was created, then it's a character/whitespace/escaped symbol
// which we need to add together in the same image
if (imageToken == null) {
imageToken = new LiteralExpression(token);
}
imageToken.addText(token.getText());
}
// append any leftover image tokens (when we reached eol)
if (imageToken != null) {
nodes.add(imageToken);
}
}
private SimpleNode createNode(SimpleToken token, AtomicInteger functions) {
// expression only support functions and unary operators
if (token.getType().isFunctionStart()) {
// starting a new function
functions.incrementAndGet();
return new SimpleFunctionStart(token, cacheExpression, skipFileFunctions);
} else if (functions.get() > 0 && token.getType().isFunctionEnd()) {
// there must be a start function already, to let this be a end function
functions.decrementAndGet();
return new SimpleFunctionEnd(token);
} else if (token.getType().isUnary()) {
// there must be a end function as previous, to let this be a unary function
if (!nodes.isEmpty() && nodes.get(nodes.size() - 1) instanceof SimpleFunctionEnd) {
return new UnaryExpression(token);
}
}
// by returning null, we will let the parser determine what to do
return null;
}
private List<Expression> createExpressions() {
List<Expression> answer = new ArrayList<>();
for (SimpleNode token : nodes) {
Expression exp = token.createExpression(camelContext, expression);
if (exp != null) {
answer.add(exp);
}
}
return answer;
}
/**
* Second step parsing into code
*/
protected String doParseCode() {
StringBuilder sb = new StringBuilder(256);
boolean firstIsLiteral = false;
for (SimpleNode node : nodes) {
String exp = node.createCode(camelContext, expression);
if (exp != null) {
if (sb.isEmpty() && node instanceof LiteralNode) {
firstIsLiteral = true;
}
if (!sb.isEmpty()) {
// okay we append together and this requires that the first node to be literal
if (!firstIsLiteral) {
// then insert an empty string + to force type into string so the compiler
// can compile with the + function
sb.insert(0, "\"\" + ");
}
sb.append(" + ");
}
parseLiteralNode(sb, node, exp);
}
}
String code = sb.toString();
code = code.replace(BaseSimpleParser.CODE_START, "");
code = code.replace(BaseSimpleParser.CODE_END, "");
return code;
}
static void parseLiteralNode(StringBuilder sb, SimpleNode node, String exp) {
if (node instanceof LiteralNode) {
exp = StringHelper.removeLeadingAndEndingQuotes(exp);
sb.append("\"");
// " should be escaped to \"
exp = LanguageHelper.escapeQuotes(exp);
// \n \t \r should be escaped
exp = exp.replaceAll("\n", "\\\\n");
exp = exp.replaceAll("\t", "\\\\t");
exp = exp.replaceAll("\r", "\\\\r");
if (exp.endsWith("\\") && !exp.endsWith("\\\\")) {
// there is a single trailing slash which we need to escape
exp += "\\";
}
sb.append(exp);
sb.append("\"");
} else {
sb.append(exp);
}
}
// --------------------------------------------------------------
// grammar
// --------------------------------------------------------------
// the expression parser only understands
// - template = literal texts with can contain embedded functions
// - function = simple functions such as ${body} etc.
// - unary operator = operator attached to the left-hand side node
protected void templateText() {
// for template, we accept anything but functions
while (!token.getType().isFunctionStart() && !token.getType().isFunctionEnd() && !token.getType().isEol()) {
nextToken();
}
}
protected boolean functionText() {
if (accept(TokenType.functionStart)) {
nextToken();
while (!token.getType().isFunctionEnd() && !token.getType().isEol()) {
if (token.getType().isFunctionStart()) {
// embedded function
functionText();
}
// we need to loop until we find the ending function quote, an embedded function, or the eol
nextToken();
}
// if its not an embedded function then we expect the end token
if (!token.getType().isFunctionStart()) {
expect(TokenType.functionEnd);
}
return true;
}
return false;
}
protected boolean unaryOperator() {
if (accept(TokenType.unaryOperator)) {
nextToken();
// there should be a whitespace after the operator
expect(TokenType.whiteSpace);
return true;
}
return false;
}
}
| SimpleExpressionParser |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/postgresql/expr/PolygonTest.java | {
"start": 274,
"end": 605
} | class ____ extends PGTest {
public void test_timestamp() throws Exception {
String sql = "# '((1,0),(0,1),(-1,0))'";
PGExprParser parser = new PGExprParser(sql);
SQLUnaryExpr unaryExpr = (SQLUnaryExpr) parser.expr();
assertEquals(SQLUnaryOperator.Pound, unaryExpr.getOperator());
}
}
| PolygonTest |
java | apache__rocketmq | client/src/test/java/org/apache/rocketmq/client/impl/consumer/DefaultMQPushConsumerImplTest.java | {
"start": 4866,
"end": 38461
} | class ____ {
@Mock
private DefaultMQPushConsumer defaultMQPushConsumer;
@Mock
private MQClientInstance mQClientFactory;
@Mock
private RebalanceImpl rebalanceImpl;
@Mock
private PullAPIWrapper pullAPIWrapper;
@Mock
private PullRequest pullRequest;
@Mock
private PopRequest popRequest;
@Mock
private ProcessQueue processQueue;
@Mock
private PopProcessQueue popProcessQueue;
@Mock
private MQClientAPIImpl mqClientAPIImpl;
@Mock
private OffsetStore offsetStore;
private DefaultMQPushConsumerImpl defaultMQPushConsumerImpl;
@Rule
public ExpectedException thrown = ExpectedException.none();
private final String defaultKey = "defaultKey";
private final String defaultTopic = "defaultTopic";
private final String defaultBroker = "defaultBroker";
private final String defaultBrokerAddr = "127.0.0.1:10911";
private final String defaultGroup = "defaultGroup";
private final long defaultTimeout = 3000L;
@Test
public void checkConfigTest() throws MQClientException {
//test type
thrown.expect(MQClientException.class);
//test message
thrown.expectMessage("consumeThreadMin (10) is larger than consumeThreadMax (9)");
DefaultMQPushConsumer consumer = new DefaultMQPushConsumer("test_consumer_group");
consumer.setConsumeThreadMin(10);
consumer.setConsumeThreadMax(9);
consumer.registerMessageListener((MessageListenerConcurrently) (msgs, context) -> ConsumeConcurrentlyStatus.CONSUME_SUCCESS);
DefaultMQPushConsumerImpl defaultMQPushConsumerImpl = new DefaultMQPushConsumerImpl(consumer, null);
defaultMQPushConsumerImpl.start();
}
@Test
public void testHook() {
DefaultMQPushConsumerImpl defaultMQPushConsumerImpl = new DefaultMQPushConsumerImpl(defaultMQPushConsumer, null);
defaultMQPushConsumerImpl.registerConsumeMessageHook(new ConsumeMessageHook() {
@Override
public String hookName() {
return "consumerHook";
}
@Override
public void consumeMessageBefore(ConsumeMessageContext context) {
assertThat(context).isNotNull();
}
@Override
public void consumeMessageAfter(ConsumeMessageContext context) {
assertThat(context).isNotNull();
}
});
defaultMQPushConsumerImpl.registerFilterMessageHook(new FilterMessageHook() {
@Override
public String hookName() {
return "filterHook";
}
@Override
public void filterMessage(FilterMessageContext context) {
assertThat(context).isNotNull();
}
});
defaultMQPushConsumerImpl.executeHookBefore(new ConsumeMessageContext());
defaultMQPushConsumerImpl.executeHookAfter(new ConsumeMessageContext());
}
@Ignore
@Test
public void testPush() throws Exception {
when(defaultMQPushConsumer.getMessageListener()).thenReturn((MessageListenerConcurrently) (msgs, context) -> {
assertThat(msgs).size().isGreaterThan(0);
assertThat(context).isNotNull();
return ConsumeConcurrentlyStatus.CONSUME_SUCCESS;
});
DefaultMQPushConsumerImpl defaultMQPushConsumerImpl = new DefaultMQPushConsumerImpl(defaultMQPushConsumer, null);
try {
defaultMQPushConsumerImpl.start();
} finally {
defaultMQPushConsumerImpl.shutdown();
}
}
@Before
public void init() throws NoSuchFieldException, IllegalAccessException {
MQAdminImpl mqAdminImpl = mock(MQAdminImpl.class);
when(mQClientFactory.getMQAdminImpl()).thenReturn(mqAdminImpl);
ConsumerStatsManager consumerStatsManager = mock(ConsumerStatsManager.class);
ConsumeStatus consumeStatus = mock(ConsumeStatus.class);
when(consumerStatsManager.consumeStatus(any(), any())).thenReturn(consumeStatus);
when(mQClientFactory.getConsumerStatsManager()).thenReturn(consumerStatsManager);
when(mQClientFactory.getPullMessageService()).thenReturn(mock(PullMessageService.class));
when(mQClientFactory.getMQClientAPIImpl()).thenReturn(mqClientAPIImpl);
FindBrokerResult findBrokerResult = mock(FindBrokerResult.class);
when(findBrokerResult.getBrokerAddr()).thenReturn(defaultBrokerAddr);
when(mQClientFactory.findBrokerAddressInSubscribe(anyString(), anyLong(), anyBoolean())).thenReturn(findBrokerResult);
Set<MessageQueue> messageQueueSet = Collections.singleton(createMessageQueue());
ConcurrentMap<String, Set<MessageQueue>> topicMessageQueueMap = new ConcurrentHashMap<>();
topicMessageQueueMap.put(defaultTopic, messageQueueSet);
when(rebalanceImpl.getTopicSubscribeInfoTable()).thenReturn(topicMessageQueueMap);
ConcurrentMap<MessageQueue, ProcessQueue> processQueueTable = new ConcurrentHashMap<>();
when(rebalanceImpl.getProcessQueueTable()).thenReturn(processQueueTable);
RPCHook rpcHook = mock(RPCHook.class);
defaultMQPushConsumerImpl = new DefaultMQPushConsumerImpl(defaultMQPushConsumer, rpcHook);
defaultMQPushConsumerImpl.setOffsetStore(offsetStore);
FieldUtils.writeDeclaredField(defaultMQPushConsumerImpl, "mQClientFactory", mQClientFactory, true);
FieldUtils.writeDeclaredField(defaultMQPushConsumerImpl, "rebalanceImpl", rebalanceImpl, true);
FieldUtils.writeDeclaredField(defaultMQPushConsumerImpl, "pullAPIWrapper", pullAPIWrapper, true);
FilterMessageHook filterMessageHook = mock(FilterMessageHook.class);
ArrayList<FilterMessageHook> filterMessageHookList = new ArrayList<>();
filterMessageHookList.add(filterMessageHook);
ConsumeMessageService consumeMessagePopService = mock(ConsumeMessageService.class);
ConsumeMessageService consumeMessageService = mock(ConsumeMessageService.class);
FieldUtils.writeDeclaredField(defaultMQPushConsumerImpl, "filterMessageHookList", filterMessageHookList, true);
FieldUtils.writeDeclaredField(defaultMQPushConsumerImpl, "consumeMessageService", consumeMessageService, true);
FieldUtils.writeDeclaredField(defaultMQPushConsumerImpl, "consumeMessagePopService", consumeMessagePopService, true);
ConcurrentMap<String, SubscriptionData> subscriptionDataMap = new ConcurrentHashMap<>();
SubscriptionData subscriptionData = new SubscriptionData();
subscriptionData.setTopic(defaultTopic);
subscriptionDataMap.put(defaultTopic, subscriptionData);
when(rebalanceImpl.getSubscriptionInner()).thenReturn(subscriptionDataMap);
}
@Test
public void testFetchSubscribeMessageQueues() throws MQClientException {
Set<MessageQueue> actual = defaultMQPushConsumerImpl.fetchSubscribeMessageQueues(defaultTopic);
assertNotNull(actual);
Assert.assertEquals(1, actual.size());
MessageQueue next = actual.iterator().next();
assertEquals(defaultTopic, next.getTopic());
assertEquals(defaultBroker, next.getBrokerName());
assertEquals(0, next.getQueueId());
}
@Test
public void testEarliestMsgStoreTime() throws MQClientException {
assertEquals(0, defaultMQPushConsumerImpl.earliestMsgStoreTime(createMessageQueue()));
}
@Test
public void testMaxOffset() throws MQClientException {
assertEquals(0, defaultMQPushConsumerImpl.maxOffset(createMessageQueue()));
}
@Test
public void testMinOffset() throws MQClientException {
assertEquals(0, defaultMQPushConsumerImpl.minOffset(createMessageQueue()));
}
@Test
public void testGetOffsetStore() {
assertEquals(offsetStore, defaultMQPushConsumerImpl.getOffsetStore());
}
@Test
public void testPullMessageWithStateNotOk() {
when(pullRequest.getProcessQueue()).thenReturn(processQueue);
defaultMQPushConsumerImpl.pullMessage(pullRequest);
}
@Test
public void testPullMessageWithIsPause() {
when(pullRequest.getProcessQueue()).thenReturn(processQueue);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
defaultMQPushConsumerImpl.setPause(true);
defaultMQPushConsumerImpl.pullMessage(pullRequest);
}
@Test
public void testPullMessageWithMsgCountFlowControl() {
when(processQueue.getMsgCount()).thenReturn(new AtomicLong(2));
when(processQueue.getMsgSize()).thenReturn(new AtomicLong(3 * 1024 * 1024));
TreeMap<Long, MessageExt> treeMap = new TreeMap<>();
treeMap.put(1L, new MessageExt());
when(processQueue.getMsgTreeMap()).thenReturn(treeMap);
when(pullRequest.getProcessQueue()).thenReturn(processQueue);
when(defaultMQPushConsumer.getPullThresholdForQueue()).thenReturn(1);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
defaultMQPushConsumerImpl.pullMessage(pullRequest);
}
@Test
public void testPullMessageWithMsgSizeFlowControl() {
when(processQueue.getMsgCount()).thenReturn(new AtomicLong(2));
when(processQueue.getMsgSize()).thenReturn(new AtomicLong(3 * 1024 * 1024));
TreeMap<Long, MessageExt> treeMap = new TreeMap<>();
treeMap.put(1L, new MessageExt());
when(processQueue.getMsgTreeMap()).thenReturn(treeMap);
when(pullRequest.getProcessQueue()).thenReturn(processQueue);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
when(defaultMQPushConsumer.getPullThresholdForQueue()).thenReturn(3);
when(defaultMQPushConsumer.getPullThresholdSizeForQueue()).thenReturn(1);
defaultMQPushConsumerImpl.pullMessage(pullRequest);
}
@Test
public void testPullMessageWithMaxSpanFlowControl() {
when(processQueue.getMsgCount()).thenReturn(new AtomicLong(2));
when(processQueue.getMaxSpan()).thenReturn(2L);
when(processQueue.getMsgSize()).thenReturn(new AtomicLong(3 * 1024 * 1024));
TreeMap<Long, MessageExt> treeMap = new TreeMap<>();
treeMap.put(1L, new MessageExt());
when(processQueue.getMsgTreeMap()).thenReturn(treeMap);
when(pullRequest.getProcessQueue()).thenReturn(processQueue);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
when(defaultMQPushConsumer.getPullThresholdForQueue()).thenReturn(3);
when(defaultMQPushConsumer.getPullThresholdSizeForQueue()).thenReturn(10);
defaultMQPushConsumerImpl.pullMessage(pullRequest);
}
@Test
public void testPullMessageWithNotLocked() {
when(processQueue.getMsgCount()).thenReturn(new AtomicLong(2));
when(processQueue.getMsgSize()).thenReturn(new AtomicLong(3 * 1024 * 1024));
when(pullRequest.getProcessQueue()).thenReturn(processQueue);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
defaultMQPushConsumerImpl.setConsumeOrderly(true);
when(defaultMQPushConsumer.getPullThresholdForQueue()).thenReturn(3);
when(defaultMQPushConsumer.getPullThresholdSizeForQueue()).thenReturn(10);
defaultMQPushConsumerImpl.pullMessage(pullRequest);
}
@Test
public void testPullMessageWithSubscriptionDataIsNull() {
when(processQueue.getMsgCount()).thenReturn(new AtomicLong(2));
when(processQueue.getMsgSize()).thenReturn(new AtomicLong(3 * 1024 * 1024));
when(pullRequest.getMessageQueue()).thenReturn(createMessageQueue());
when(pullRequest.getProcessQueue()).thenReturn(processQueue);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
when(defaultMQPushConsumer.getPullThresholdForQueue()).thenReturn(3);
when(defaultMQPushConsumer.getPullThresholdSizeForQueue()).thenReturn(10);
defaultMQPushConsumerImpl.pullMessage(pullRequest);
}
@Test
public void testPullMessageWithNoMatchedMsg() throws MQBrokerException, RemotingException, InterruptedException, MQClientException {
when(processQueue.getMsgCount()).thenReturn(new AtomicLong(2));
when(processQueue.getMsgSize()).thenReturn(new AtomicLong(3 * 1024 * 1024));
when(pullRequest.getMessageQueue()).thenReturn(createMessageQueue());
when(pullRequest.getProcessQueue()).thenReturn(processQueue);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
when(defaultMQPushConsumer.getPullThresholdForQueue()).thenReturn(3);
when(defaultMQPushConsumer.getPullThresholdSizeForQueue()).thenReturn(10);
PullResult pullResultMock = mock(PullResult.class);
when(pullAPIWrapper.processPullResult(any(MessageQueue.class), any(PullResult.class), any(SubscriptionData.class))).thenReturn(pullResultMock);
when(pullResultMock.getPullStatus()).thenReturn(PullStatus.NO_MATCHED_MSG);
doAnswer(invocation -> {
PullCallback callback = invocation.getArgument(12);
PullResult pullResult = mock(PullResult.class);
callback.onSuccess(pullResult);
return null;
}).when(pullAPIWrapper).pullKernelImpl(
any(MessageQueue.class),
any(),
any(),
anyLong(),
anyLong(),
anyInt(),
anyInt(),
anyInt(),
anyLong(),
anyLong(),
anyLong(),
any(CommunicationMode.class),
any(PullCallback.class));
defaultMQPushConsumerImpl.pullMessage(pullRequest);
}
@Test
public void testPullMessageWithOffsetIllegal() throws MQBrokerException, RemotingException, InterruptedException, MQClientException {
when(processQueue.getMsgCount()).thenReturn(new AtomicLong(2));
when(processQueue.getMsgSize()).thenReturn(new AtomicLong(3 * 1024 * 1024));
when(pullRequest.getMessageQueue()).thenReturn(createMessageQueue());
when(pullRequest.getProcessQueue()).thenReturn(processQueue);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
when(defaultMQPushConsumer.getPullThresholdForQueue()).thenReturn(3);
when(defaultMQPushConsumer.getPullThresholdSizeForQueue()).thenReturn(10);
PullResult pullResultMock = mock(PullResult.class);
when(pullAPIWrapper.processPullResult(any(MessageQueue.class), any(PullResult.class), any(SubscriptionData.class))).thenReturn(pullResultMock);
when(pullResultMock.getPullStatus()).thenReturn(PullStatus.OFFSET_ILLEGAL);
doAnswer(invocation -> {
PullCallback callback = invocation.getArgument(12);
PullResult pullResult = mock(PullResult.class);
callback.onSuccess(pullResult);
return null;
}).when(pullAPIWrapper).pullKernelImpl(
any(MessageQueue.class),
any(),
any(),
anyLong(),
anyLong(),
anyInt(),
anyInt(),
anyInt(),
anyLong(),
anyLong(),
anyLong(),
any(CommunicationMode.class),
any(PullCallback.class));
defaultMQPushConsumerImpl.pullMessage(pullRequest);
}
@Test
public void testPullMessageWithException() throws MQBrokerException, RemotingException, InterruptedException, MQClientException {
when(processQueue.getMsgCount()).thenReturn(new AtomicLong(2));
when(processQueue.getMsgSize()).thenReturn(new AtomicLong(3 * 1024 * 1024));
when(pullRequest.getMessageQueue()).thenReturn(createMessageQueue());
when(pullRequest.getProcessQueue()).thenReturn(processQueue);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
when(defaultMQPushConsumer.getPullThresholdForQueue()).thenReturn(3);
when(defaultMQPushConsumer.getPullThresholdSizeForQueue()).thenReturn(10);
doAnswer(invocation -> {
PullCallback callback = invocation.getArgument(12);
callback.onException(new RuntimeException("exception"));
return null;
}).when(pullAPIWrapper).pullKernelImpl(
any(MessageQueue.class),
any(),
any(),
anyLong(),
anyLong(),
anyInt(),
anyInt(),
anyInt(),
anyLong(),
anyLong(),
anyLong(),
any(CommunicationMode.class),
any(PullCallback.class));
defaultMQPushConsumerImpl.pullMessage(pullRequest);
}
@Test
public void testPopMessageWithFound() throws RemotingException, InterruptedException, MQClientException {
when(popRequest.getPopProcessQueue()).thenReturn(popProcessQueue);
when(popRequest.getMessageQueue()).thenReturn(createMessageQueue());
when(popRequest.getConsumerGroup()).thenReturn(defaultGroup);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
ConcurrentMap<String, SubscriptionData> subscriptionDataMap = new ConcurrentHashMap<>();
SubscriptionData subscriptionData = new SubscriptionData();
subscriptionData.setTagsSet(Collections.singleton("*"));
subscriptionDataMap.put(defaultTopic, subscriptionData);
when(rebalanceImpl.getSubscriptionInner()).thenReturn(subscriptionDataMap);
doAnswer(invocation -> {
PopCallback callback = invocation.getArgument(5);
PopResult popResult = mock(PopResult.class);
when(popResult.getPopStatus()).thenReturn(PopStatus.FOUND);
when(popResult.getMsgFoundList()).thenReturn(Collections.singletonList(createMessageExt()));
callback.onSuccess(popResult);
return null;
}).when(pullAPIWrapper).popAsync(
any(MessageQueue.class),
anyLong(),
anyInt(),
any(),
anyLong(),
any(PopCallback.class),
anyBoolean(),
anyInt(),
anyBoolean(),
any(),
any());
defaultMQPushConsumerImpl.popMessage(popRequest);
}
@Test
public void testPopMessageWithException() throws RemotingException, InterruptedException, MQClientException {
when(popRequest.getPopProcessQueue()).thenReturn(popProcessQueue);
when(popRequest.getMessageQueue()).thenReturn(createMessageQueue());
when(popRequest.getConsumerGroup()).thenReturn(defaultGroup);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
ConcurrentMap<String, SubscriptionData> subscriptionDataMap = new ConcurrentHashMap<>();
SubscriptionData subscriptionData = new SubscriptionData();
subscriptionData.setTagsSet(Collections.singleton("*"));
subscriptionDataMap.put(defaultTopic, subscriptionData);
when(rebalanceImpl.getSubscriptionInner()).thenReturn(subscriptionDataMap);
doAnswer(invocation -> {
PopCallback callback = invocation.getArgument(5);
callback.onException(new RuntimeException("exception"));
return null;
}).when(pullAPIWrapper).popAsync(
any(MessageQueue.class),
anyLong(),
anyInt(),
any(),
anyLong(),
any(PopCallback.class),
anyBoolean(),
anyInt(),
anyBoolean(),
any(),
any());
defaultMQPushConsumerImpl.popMessage(popRequest);
}
@Test
public void testPopMessageWithNoNewMsg() throws RemotingException, InterruptedException, MQClientException {
when(popRequest.getPopProcessQueue()).thenReturn(popProcessQueue);
when(popRequest.getMessageQueue()).thenReturn(createMessageQueue());
when(popRequest.getConsumerGroup()).thenReturn(defaultGroup);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
ConcurrentMap<String, SubscriptionData> subscriptionDataMap = new ConcurrentHashMap<>();
SubscriptionData subscriptionData = new SubscriptionData();
subscriptionData.setTagsSet(Collections.singleton("*"));
subscriptionDataMap.put(defaultTopic, subscriptionData);
when(rebalanceImpl.getSubscriptionInner()).thenReturn(subscriptionDataMap);
doAnswer(invocation -> {
PopCallback callback = invocation.getArgument(5);
PopResult popResult = mock(PopResult.class);
when(popResult.getPopStatus()).thenReturn(PopStatus.NO_NEW_MSG);
callback.onSuccess(popResult);
return null;
}).when(pullAPIWrapper).popAsync(
any(MessageQueue.class),
anyLong(),
anyInt(),
any(),
anyLong(),
any(PopCallback.class),
anyBoolean(),
anyInt(),
anyBoolean(),
any(),
any());
defaultMQPushConsumerImpl.popMessage(popRequest);
}
@Test
public void testPopMessageWithPollingFull() throws RemotingException, InterruptedException, MQClientException {
when(popRequest.getPopProcessQueue()).thenReturn(popProcessQueue);
when(popRequest.getMessageQueue()).thenReturn(createMessageQueue());
when(popRequest.getConsumerGroup()).thenReturn(defaultGroup);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
ConcurrentMap<String, SubscriptionData> subscriptionDataMap = new ConcurrentHashMap<>();
SubscriptionData subscriptionData = new SubscriptionData();
subscriptionData.setTagsSet(Collections.singleton("*"));
subscriptionDataMap.put(defaultTopic, subscriptionData);
when(rebalanceImpl.getSubscriptionInner()).thenReturn(subscriptionDataMap);
doAnswer(invocation -> {
PopCallback callback = invocation.getArgument(5);
PopResult popResult = mock(PopResult.class);
when(popResult.getPopStatus()).thenReturn(PopStatus.POLLING_FULL);
callback.onSuccess(popResult);
return null;
}).when(pullAPIWrapper).popAsync(any(
MessageQueue.class),
anyLong(),
anyInt(),
any(),
anyLong(),
any(PopCallback.class),
anyBoolean(),
anyInt(),
anyBoolean(),
any(),
any());
defaultMQPushConsumerImpl.popMessage(popRequest);
}
@Test
public void testPopMessageWithStateNotOk() {
when(popRequest.getPopProcessQueue()).thenReturn(popProcessQueue);
defaultMQPushConsumerImpl.popMessage(popRequest);
}
@Test
public void testPopMessageWithIsPause() {
when(popRequest.getPopProcessQueue()).thenReturn(popProcessQueue);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
defaultMQPushConsumerImpl.setPause(true);
defaultMQPushConsumerImpl.popMessage(popRequest);
}
@Test
public void testPopMessageWithWaiAckMsgCountFlowControl() {
when(popProcessQueue.getWaiAckMsgCount()).thenReturn(2);
when(popRequest.getPopProcessQueue()).thenReturn(popProcessQueue);
when(defaultMQPushConsumer.getPopThresholdForQueue()).thenReturn(1);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
defaultMQPushConsumerImpl.popMessage(popRequest);
}
@Test
public void testPopMessageWithSubscriptionDataIsNull() throws RemotingException, InterruptedException, MQClientException {
when(popProcessQueue.getWaiAckMsgCount()).thenReturn(2);
when(popRequest.getPopProcessQueue()).thenReturn(popProcessQueue);
when(popRequest.getMessageQueue()).thenReturn(createMessageQueue());
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
when(defaultMQPushConsumer.getPopThresholdForQueue()).thenReturn(3);
defaultMQPushConsumerImpl.popMessage(popRequest);
verify(pullAPIWrapper).popAsync(any(MessageQueue.class),
eq(60000L),
eq(0),
any(),
eq(15000L),
any(PopCallback.class),
eq(true),
eq(0),
eq(false),
any(),
any());
}
@Test
public void testQueryMessage() throws InterruptedException, MQClientException {
assertNull(defaultMQPushConsumerImpl.queryMessage(defaultTopic, defaultKey, 1, 0, 1));
}
@Test
public void testQueryMessageByUniqKey() throws InterruptedException, MQClientException {
assertNull(defaultMQPushConsumerImpl.queryMessageByUniqKey(defaultTopic, defaultKey));
}
@Test
public void testSendMessageBack() throws InterruptedException, MQClientException, MQBrokerException, RemotingException {
when(mQClientFactory.findBrokerAddressInPublish(anyString())).thenReturn(defaultBrokerAddr);
defaultMQPushConsumerImpl.sendMessageBack(createMessageExt(), 1, createMessageQueue());
verify(mqClientAPIImpl).consumerSendMessageBack(
eq(defaultBrokerAddr),
eq(defaultBroker),
any(MessageExt.class),
any(),
eq(1),
eq(5000L),
eq(0));
}
@Test
public void testAckAsync() throws MQBrokerException, RemotingException, InterruptedException {
doAnswer(invocation -> {
AckCallback callback = invocation.getArgument(2);
AckResult result = mock(AckResult.class);
when(result.getStatus()).thenReturn(AckStatus.OK);
callback.onSuccess(result);
return null;
}).when(mqClientAPIImpl).ackMessageAsync(any(),
anyLong(),
any(AckCallback.class),
any(AckMessageRequestHeader.class));
defaultMQPushConsumerImpl.ackAsync(createMessageExt(), defaultGroup);
verify(mqClientAPIImpl).ackMessageAsync(eq(defaultBrokerAddr),
eq(3000L),
any(AckCallback.class),
any(AckMessageRequestHeader.class));
}
@Test
public void testChangePopInvisibleTimeAsync() throws MQBrokerException, RemotingException, InterruptedException, MQClientException {
AckCallback callback = mock(AckCallback.class);
String extraInfo = createMessageExt().getProperty(MessageConst.PROPERTY_POP_CK);
defaultMQPushConsumerImpl.changePopInvisibleTimeAsync(defaultTopic, defaultGroup, extraInfo, defaultTimeout, callback);
verify(mqClientAPIImpl).changeInvisibleTimeAsync(eq(defaultBroker),
eq(defaultBrokerAddr),
any(ChangeInvisibleTimeRequestHeader.class),
eq(defaultTimeout),
any(AckCallback.class));
}
@Test
public void testShutdown() {
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
defaultMQPushConsumerImpl.shutdown();
assertEquals(ServiceState.SHUTDOWN_ALREADY, defaultMQPushConsumerImpl.getServiceState());
}
@Test
public void testSubscribe() throws MQClientException {
defaultMQPushConsumerImpl.subscribe(defaultTopic, "fullClassname", "filterClassSource");
RebalanceImpl actual = defaultMQPushConsumerImpl.getRebalanceImpl();
assertEquals(1, actual.getSubscriptionInner().size());
}
@Test
public void testSubscribeByMessageSelector() throws MQClientException {
MessageSelector messageSelector = mock(MessageSelector.class);
defaultMQPushConsumerImpl.subscribe(defaultTopic, messageSelector);
RebalanceImpl actual = defaultMQPushConsumerImpl.getRebalanceImpl();
assertEquals(1, actual.getSubscriptionInner().size());
}
@Test
public void testSuspend() {
defaultMQPushConsumerImpl.suspend();
assertTrue(defaultMQPushConsumerImpl.isPause());
}
@Test
public void testViewMessage() throws InterruptedException, MQClientException, MQBrokerException, RemotingException {
assertNull(defaultMQPushConsumerImpl.viewMessage(defaultTopic, createMessageExt().getMsgId()));
}
@Test
public void testResetOffsetByTimeStamp() throws MQClientException {
ConcurrentMap<String, SubscriptionData> subscriptionDataMap = new ConcurrentHashMap<>();
subscriptionDataMap.put(defaultTopic, new SubscriptionData());
when(rebalanceImpl.getSubscriptionInner()).thenReturn(subscriptionDataMap);
defaultMQPushConsumerImpl.resetOffsetByTimeStamp(System.currentTimeMillis());
verify(mQClientFactory).resetOffset(eq(defaultTopic), any(), any());
}
@Test
public void testSearchOffset() throws MQClientException {
assertEquals(0, defaultMQPushConsumerImpl.searchOffset(createMessageQueue(), System.currentTimeMillis()));
}
@Test
public void testQueryConsumeTimeSpan() throws InterruptedException, MQClientException, MQBrokerException, RemotingException {
TopicRouteData topicRouteData = new TopicRouteData();
topicRouteData.getBrokerDatas().add(createBrokerData());
when(mqClientAPIImpl.getTopicRouteInfoFromNameServer(any(), anyLong())).thenReturn(topicRouteData);
List<QueueTimeSpan> actual = defaultMQPushConsumerImpl.queryConsumeTimeSpan(defaultTopic);
assertNotNull(actual);
assertEquals(0, actual.size());
}
@Test
public void testTryResetPopRetryTopic() {
TopicRouteData topicRouteData = new TopicRouteData();
topicRouteData.getBrokerDatas().add(createBrokerData());
MessageExt messageExt = createMessageExt();
List<MessageExt> msgs = new ArrayList<>();
messageExt.setTopic(MixAll.RETRY_GROUP_TOPIC_PREFIX + defaultGroup + "_" + defaultTopic);
msgs.add(messageExt);
defaultMQPushConsumerImpl.tryResetPopRetryTopic(msgs, defaultGroup);
assertEquals(defaultTopic, msgs.get(0).getTopic());
}
@Test
public void testGetPopDelayLevel() {
int[] actual = defaultMQPushConsumerImpl.getPopDelayLevel();
int[] expected = new int[]{10, 30, 60, 120, 180, 240, 300, 360, 420, 480, 540, 600, 1200, 1800, 3600, 7200};
assertArrayEquals(expected, actual);
}
@Test
public void testGetMessageQueueListener() {
assertNull(defaultMQPushConsumerImpl.getMessageQueueListener());
}
@Test
public void testConsumerRunningInfo() {
ConcurrentMap<MessageQueue, ProcessQueue> processQueueMap = new ConcurrentHashMap<>();
ConcurrentMap<MessageQueue, PopProcessQueue> popProcessQueueMap = new ConcurrentHashMap<>();
processQueueMap.put(createMessageQueue(), new ProcessQueue());
popProcessQueueMap.put(createMessageQueue(), new PopProcessQueue());
when(rebalanceImpl.getProcessQueueTable()).thenReturn(processQueueMap);
when(rebalanceImpl.getPopProcessQueueTable()).thenReturn(popProcessQueueMap);
ConsumerRunningInfo actual = defaultMQPushConsumerImpl.consumerRunningInfo();
assertNotNull(actual);
assertEquals(1, actual.getSubscriptionSet().size());
assertEquals(defaultTopic, actual.getSubscriptionSet().iterator().next().getTopic());
assertEquals(1, actual.getMqTable().size());
assertEquals(1, actual.getMqPopTable().size());
assertEquals(1, actual.getStatusTable().size());
}
private BrokerData createBrokerData() {
BrokerData result = new BrokerData();
HashMap<Long, String> brokerAddrMap = new HashMap<>();
brokerAddrMap.put(MixAll.MASTER_ID, defaultBrokerAddr);
result.setBrokerAddrs(brokerAddrMap);
result.setBrokerName(defaultBroker);
return result;
}
private MessageQueue createMessageQueue() {
MessageQueue result = new MessageQueue();
result.setQueueId(0);
result.setBrokerName(defaultBroker);
result.setTopic(defaultTopic);
return result;
}
private MessageExt createMessageExt() {
MessageExt result = new MessageExt();
result.setBody("body".getBytes(StandardCharsets.UTF_8));
result.setTopic(defaultTopic);
result.setBrokerName(defaultBroker);
result.putUserProperty("key", "value");
result.getProperties().put(MessageConst.PROPERTY_PRODUCER_GROUP, defaultGroup);
result.getProperties().put(MessageConst.PROPERTY_UNIQ_CLIENT_MESSAGE_ID_KEYIDX, "TX1");
long curTime = System.currentTimeMillis();
result.setBornTimestamp(curTime - 1000);
String popProps = String.format("%d %d %d %d %d %s %d %d %d", curTime, curTime, curTime, curTime, curTime, defaultBroker, 1, 0L, 1L);
result.getProperties().put(MessageConst.PROPERTY_POP_CK, popProps);
result.setKeys("keys");
result.setTags("*");
SocketAddress bornHost = new InetSocketAddress("127.0.0.1", 12911);
SocketAddress storeHost = new InetSocketAddress("127.0.0.1", 10911);
result.setBornHost(bornHost);
result.setStoreHost(storeHost);
return result;
}
}
| DefaultMQPushConsumerImplTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/refaster/UTypeVarTest.java | {
"start": 1046,
"end": 2236
} | class ____ {
@Test
public void equality() {
UType nullType = UPrimitiveType.create(TypeKind.NULL);
UType objectType = UClassType.create("java.lang.Object", ImmutableList.<UType>of());
UType charSequenceType = UClassType.create("java.lang.CharSequence", ImmutableList.<UType>of());
UType stringType = UClassType.create("java.lang.String", ImmutableList.<UType>of());
new EqualsTester()
.addEqualityGroup(UTypeVar.create("T", nullType, charSequenceType))
// T extends CharSequence
.addEqualityGroup(UTypeVar.create("T", stringType, charSequenceType))
// T extends CharSequence super String
.addEqualityGroup(UTypeVar.create("T", nullType, objectType))
// T extends Object
.addEqualityGroup(UTypeVar.create("E", nullType, charSequenceType))
// E extends CharSequence
.testEquals();
}
@Test
public void serialization() {
UType nullType = UPrimitiveType.create(TypeKind.NULL);
UType charSequenceType = UClassType.create("java.lang.CharSequence", ImmutableList.<UType>of());
SerializableTester.reserializeAndAssert(UTypeVar.create("T", nullType, charSequenceType));
}
}
| UTypeVarTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/deletion/recovery/DeletionTaskRecoveryInfo.java | {
"start": 1118,
"end": 2231
} | class ____ {
private DeletionTask task;
private List<Integer> successorTaskIds;
private long deletionTimestamp;
/**
* Information needed for recovering the DeletionTask.
*
* @param task the DeletionTask
* @param successorTaskIds the dependent DeletionTasks.
* @param deletionTimestamp the scheduled times of deletion.
*/
public DeletionTaskRecoveryInfo(DeletionTask task,
List<Integer> successorTaskIds, long deletionTimestamp) {
this.task = task;
this.successorTaskIds = successorTaskIds;
this.deletionTimestamp = deletionTimestamp;
}
/**
* Return the recovered DeletionTask.
*
* @return the recovered DeletionTask.
*/
public DeletionTask getTask() {
return task;
}
/**
* Return all of the dependent DeletionTasks.
*
* @return the dependent DeletionTasks.
*/
public List<Integer> getSuccessorTaskIds() {
return successorTaskIds;
}
/**
* Return the deletion timestamp.
*
* @return the deletion timestamp.
*/
public long getDeletionTimestamp() {
return deletionTimestamp;
}
} | DeletionTaskRecoveryInfo |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java | {
"start": 29007,
"end": 29773
} | class ____ implements FileSystemAccess.FileSystemExecutor<JSONObject> {
/**
* Executes the filesystem operation.
*
* @param fs filesystem instance to use.
*
* @return a JSON object with the user home directory.
*
* @throws IOException thrown if an IO error occurred.
*/
@Override
@SuppressWarnings("unchecked")
public JSONObject execute(FileSystem fs) throws IOException {
Path homeDir = fs.getHomeDirectory();
JSONObject json = new JSONObject();
json.put(HttpFSFileSystem.HOME_DIR_JSON, homeDir.toUri().getPath());
return json;
}
}
/**
* Executor that performs a list-status FileSystemAccess files system operation.
*/
@InterfaceAudience.Private
public static | FSHomeDir |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java | {
"start": 991,
"end": 1405
} | class ____ extends TestSymlinkLocalFS {
@BeforeAll
public static void testSetup() throws Exception {
FileContext context = FileContext.getLocalFSFileContext();
wrapper = new FileContextTestWrapper(context);
}
@Override
public void testRenameFileWithDestParentSymlink() throws IOException {
assumeNotWindows();
super.testRenameFileWithDestParentSymlink();
}
}
| TestSymlinkLocalFSFileContext |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/create/OracleCreateTypeTest11.java | {
"start": 1021,
"end": 4606
} | class ____ extends OracleTest {
public void test_types() throws Exception {
String sql = "CREATE TYPE BODY rational\n" +
" IS\n" +
" MAP MEMBER FUNCTION rat_to_real RETURN REAL IS\n" +
" BEGIN\n" +
" RETURN numerator/denominator;\n" +
" END;\n" +
"\n" +
" MEMBER PROCEDURE normalize IS\n" +
" gcd NUMBER := integer_operations.greatest_common_divisor\n" +
" (numerator, denominator);\n" +
" BEGIN\n" +
" numerator := numerator/gcd;\n" +
" denominator := denominator/gcd;\n" +
" END;\n" +
"\n" +
" MEMBER FUNCTION plus(x rational) RETURN rational IS\n" +
" r rational := rational_operations.make_rational\n" +
" (numerator*x.denominator +\n" +
" x.numerator*denominator,\n" +
" denominator*x.denominator);\n" +
" BEGIN\n" +
" RETURN r;\n" +
" END;\n" +
"\n" +
" END;";
System.out.println(sql);
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
assertEquals("CREATE TYPE BODY rational IS\n" +
"\tMAP MEMBER FUNCTION rat_to_real () RETURN REAL\n" +
"\tIS\n" +
"\tBEGIN\n" +
"\t\tRETURN numerator / denominator;\n" +
"\tEND;\n" +
"\tMEMBER PROCEDURE normalize\n" +
"\tIS\n" +
"\tgcd NUMBER := integer_operations.greatest_common_divisor(numerator, denominator);\n" +
"\tBEGIN\n" +
"\t\tnumerator := numerator / gcd;\n" +
"\t\tdenominator := denominator / gcd;\n" +
"\tEND;\n" +
"\tMEMBER FUNCTION plus (x rational) RETURN rational\n" +
"\tIS\n" +
"\tr rational := rational_operations.make_rational(numerator * x.denominator + x.numerator * denominator, denominator * x.denominator);\n" +
"\tBEGIN\n" +
"\t\tRETURN r;\n" +
"\tEND;\n" +
"END;",
SQLUtils.toSQLString(stmt, JdbcConstants.ORACLE));
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(0, visitor.getTables().size());
assertEquals(0, visitor.getColumns().size());
SQLUtils.toPGString(stmt);
// assertTrue(visitor.getColumns().contains(new TableStat.Column("orders", "order_total")));
}
}
| OracleCreateTypeTest11 |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/json/JsonComparator.java | {
"start": 918,
"end": 1742
} | interface ____ {
/**
* Compare the given JSON strings.
* @param expectedJson the expected JSON
* @param actualJson the actual JSON
* @return the JSON comparison
*/
JsonComparison compare(@Nullable String expectedJson, @Nullable String actualJson);
/**
* Assert that the {@code expectedJson} matches the comparison rules of ths
* instance against the {@code actualJson}. Throw an {@link AssertionError}
* if the comparison does not match.
* @param expectedJson the expected JSON
* @param actualJson the actual JSON
*/
default void assertIsMatch(@Nullable String expectedJson, @Nullable String actualJson) {
JsonComparison comparison = compare(expectedJson, actualJson);
if (comparison.getResult() == Result.MISMATCH) {
throw new AssertionError(comparison.getMessage());
}
}
}
| JsonComparator |
java | elastic__elasticsearch | build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java | {
"start": 3259,
"end": 25047
} | class ____ implements Plugin<Project> {
private static final String TESTS_MAX_PARALLEL_FORKS_SYSPROP = "tests.max.parallel.forks";
private static final String DEFAULT_DISTRIBUTION_SYSPROP = "tests.default.distribution";
private static final String INTEG_TEST_DISTRIBUTION_SYSPROP = "tests.integ-test.distribution";
private static final String BWC_SNAPSHOT_DISTRIBUTION_SYSPROP_PREFIX = "tests.snapshot.distribution.";
private static final String BWC_RELEASED_DISTRIBUTION_SYSPROP_PREFIX = "tests.release.distribution.";
private static final String TESTS_CLUSTER_MODULES_PATH_SYSPROP = "tests.cluster.modules.path";
private static final String TESTS_CLUSTER_PLUGINS_PATH_SYSPROP = "tests.cluster.plugins.path";
private static final String DEFAULT_REST_INTEG_TEST_DISTRO = "default_distro";
private static final String INTEG_TEST_REST_INTEG_TEST_DISTRO = "integ_test_distro";
private static final String MODULES_CONFIGURATION = "clusterModules";
private static final String PLUGINS_CONFIGURATION = "clusterPlugins";
private static final String EXTRACTED_PLUGINS_CONFIGURATION = "extractedPlugins";
private static final Attribute<String> CONFIGURATION_ATTRIBUTE = Attribute.of("test-cluster-artifacts", String.class);
private static final String FEATURES_METADATA_CONFIGURATION = "featuresMetadataDeps";
private static final String DEFAULT_DISTRO_FEATURES_METADATA_CONFIGURATION = "defaultDistrofeaturesMetadataDeps";
private static final String TESTS_FEATURES_METADATA_PATH = "tests.features.metadata.path";
private static final String MINIMUM_WIRE_COMPATIBLE_VERSION_SYSPROP = "tests.minimum.wire.compatible";
private final ProviderFactory providerFactory;
@Inject
public RestTestBasePlugin(ProviderFactory providerFactory) {
this.providerFactory = providerFactory;
}
@Override
public void apply(Project project) {
project.getPluginManager().apply(ElasticsearchJavaBasePlugin.class);
project.getPluginManager().apply(InternalDistributionDownloadPlugin.class);
project.getPluginManager().apply(JvmToolchainsPlugin.class);
var bwcVersions = loadBuildParams(project).get().getBwcVersions();
// Register integ-test and default distributions
ElasticsearchDistribution defaultDistro = createDistribution(
project,
DEFAULT_REST_INTEG_TEST_DISTRO,
VersionProperties.getElasticsearch(),
false
);
ElasticsearchDistribution integTestDistro = createDistribution(
project,
INTEG_TEST_REST_INTEG_TEST_DISTRO,
VersionProperties.getElasticsearch(),
ElasticsearchDistributionTypes.INTEG_TEST_ZIP,
false
);
// Create configures for module and plugin dependencies
Configuration modulesConfiguration = createPluginConfiguration(project, MODULES_CONFIGURATION, true, false);
Configuration pluginsConfiguration = createPluginConfiguration(project, PLUGINS_CONFIGURATION, false, false);
Configuration extractedPluginsConfiguration = createPluginConfiguration(project, EXTRACTED_PLUGINS_CONFIGURATION, true, true);
extractedPluginsConfiguration.extendsFrom(pluginsConfiguration);
configureArtifactTransforms(project);
// Create configuration for aggregating feature metadata
FileCollection featureMetadataConfig = project.getConfigurations().create(FEATURES_METADATA_CONFIGURATION, c -> {
c.setCanBeConsumed(false);
c.setCanBeResolved(true);
c.attributes(
a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ClusterFeaturesMetadataPlugin.FEATURES_METADATA_TYPE)
);
c.defaultDependencies(d -> d.add(project.getDependencies().project(Map.of("path", ":server"))));
c.withDependencies(dependencies -> {
// We can't just use Configuration#extendsFrom() here as we'd inherit the wrong project configuration
copyDependencies(project, dependencies, modulesConfiguration);
copyDependencies(project, dependencies, pluginsConfiguration);
});
});
FileCollection defaultDistroFeatureMetadataConfig = project.getConfigurations()
.create(DEFAULT_DISTRO_FEATURES_METADATA_CONFIGURATION, c -> {
c.setCanBeConsumed(false);
c.setCanBeResolved(true);
c.attributes(
a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ClusterFeaturesMetadataPlugin.FEATURES_METADATA_TYPE)
);
c.defaultDependencies(
d -> d.add(project.getDependencies().project(Map.of("path", ":distribution", "configuration", "featuresMetadata")))
);
});
// For plugin and module projects, register the current project plugin bundle as a dependency
project.getPluginManager().withPlugin("elasticsearch.esplugin", plugin -> {
if (GradleUtils.isModuleProject(project.getPath())) {
project.getDependencies().add(MODULES_CONFIGURATION, getExplodedBundleDependency(project, project.getPath()));
} else {
project.getDependencies().add(PLUGINS_CONFIGURATION, getBundleZipTaskDependency(project, project.getPath()));
}
});
project.getTasks().withType(StandaloneRestIntegTestTask.class).configureEach(task -> {
SystemPropertyCommandLineArgumentProvider nonInputSystemProperties = task.getExtensions()
.getByType(SystemPropertyCommandLineArgumentProvider.class);
task.dependsOn(integTestDistro, modulesConfiguration);
registerDistributionInputs(task, integTestDistro);
// Pass feature metadata on to tests
task.getInputs().files(featureMetadataConfig).withPathSensitivity(PathSensitivity.NONE);
nonInputSystemProperties.systemProperty(TESTS_FEATURES_METADATA_PATH, () -> featureMetadataConfig.getAsPath());
// Enable parallel execution for these tests since each test gets its own cluster
task.setMaxParallelForks(Math.max(1, task.getProject().getGradle().getStartParameter().getMaxWorkerCount() / 2));
nonInputSystemProperties.systemProperty(TESTS_MAX_PARALLEL_FORKS_SYSPROP, () -> String.valueOf(task.getMaxParallelForks()));
// Disable test failure reporting since this stuff is now captured in build scans
task.getExtensions().getByType(ErrorReportingTestListener.class).setDumpOutputOnFailure(false);
// Disable the security manager and syscall filter since the test framework needs to fork processes
task.systemProperty("tests.security.manager", "false");
task.systemProperty("tests.system_call_filter", "false");
// Pass minimum wire compatible version which is used by upgrade tests
task.systemProperty(MINIMUM_WIRE_COMPATIBLE_VERSION_SYSPROP, bwcVersions.getMinimumWireCompatibleVersion());
// Register plugins and modules as task inputs and pass paths as system properties to tests
var modulePath = project.getObjects().fileCollection().from(modulesConfiguration);
nonInputSystemProperties.systemProperty(TESTS_CLUSTER_MODULES_PATH_SYSPROP, modulePath::getAsPath);
registerConfigurationInputs(task, modulesConfiguration.getName(), modulePath);
var pluginPath = project.getObjects().fileCollection().from(pluginsConfiguration);
nonInputSystemProperties.systemProperty(TESTS_CLUSTER_PLUGINS_PATH_SYSPROP, pluginPath::getAsPath);
registerConfigurationInputs(
task,
extractedPluginsConfiguration.getName(),
project.getObjects().fileCollection().from(extractedPluginsConfiguration)
);
// Wire up integ-test distribution by default for all test tasks
FileCollection extracted = integTestDistro.getExtracted();
nonInputSystemProperties.systemProperty(INTEG_TEST_DISTRIBUTION_SYSPROP, () -> extracted.getSingleFile().getPath());
// Add `usesDefaultDistribution()` extension method to test tasks to indicate they require the default distro
task.getExtensions().getExtraProperties().set("usesDefaultDistribution", new Closure<Void>(task) {
@Override
public Void call(Object... args) {
if (reasonForUsageProvided(args) == false) {
throw new IllegalArgumentException(
"Reason for using `usesDefaultDistribution` required.\nUse usesDefaultDistribution(\"reason why default distro is required here\")."
);
}
task.dependsOn(defaultDistro);
registerDistributionInputs(task, defaultDistro);
nonInputSystemProperties.systemProperty(
DEFAULT_DISTRIBUTION_SYSPROP,
providerFactory.provider(() -> defaultDistro.getExtracted().getSingleFile().getPath())
);
// If we are using the default distribution we need to register all module feature metadata
task.getInputs().files(defaultDistroFeatureMetadataConfig).withPathSensitivity(PathSensitivity.NONE);
nonInputSystemProperties.systemProperty(TESTS_FEATURES_METADATA_PATH, defaultDistroFeatureMetadataConfig::getAsPath);
return null;
}
private static boolean reasonForUsageProvided(Object[] args) {
return args.length == 1 && args[0] instanceof String && ((String) args[0]).isBlank() == false;
}
});
// Add `usesBwcDistribution(version)` extension method to test tasks to indicate they require a BWC distribution
task.getExtensions().getExtraProperties().set("usesBwcDistribution", new Closure<Void>(task) {
@Override
public Void call(Object... args) {
if (args.length != 1 || args[0] instanceof Version == false) {
throw new IllegalArgumentException("Expected exactly one argument of type org.elasticsearch.gradle.Version");
}
Version version = (Version) args[0];
boolean isReleased = bwcVersions.unreleasedInfo(version) == null;
String versionString = version.toString();
ElasticsearchDistribution bwcDistro = createDistribution(project, "bwc_" + versionString, versionString, false);
handleJdkIncompatibleWithOS(version, project, task);
task.dependsOn(bwcDistro);
registerDistributionInputs(task, bwcDistro);
nonInputSystemProperties.systemProperty(
(isReleased ? BWC_RELEASED_DISTRIBUTION_SYSPROP_PREFIX : BWC_SNAPSHOT_DISTRIBUTION_SYSPROP_PREFIX) + versionString,
providerFactory.provider(() -> bwcDistro.getExtracted().getSingleFile().getPath())
);
if (version.before(bwcVersions.getMinimumWireCompatibleVersion())) {
// If we are upgrade testing older versions we also need to upgrade to 7.last
this.call(bwcVersions.getMinimumWireCompatibleVersion());
}
return null;
}
});
task.getExtensions().getExtraProperties().set("usesBwcDistributionFromRef", new Closure<Void>(task) {
@Override
public Void call(Object... args) {
if (args.length != 2 || args[0] instanceof String == false || args[1] instanceof Version == false) {
throw new IllegalArgumentException("Expected arguments (String refSpec, org.elasticsearch.gradle.Version version)");
}
String refSpec = (String) args[0];
Version version = (Version) args[1];
boolean isDetachedVersion = true;
String versionString = version.toString();
ElasticsearchDistribution bwcDistro = createDistribution(project, "bwc_" + refSpec, versionString, isDetachedVersion);
handleJdkIncompatibleWithOS(version, project, task);
task.dependsOn(bwcDistro);
registerDistributionInputs(task, bwcDistro);
nonInputSystemProperties.systemProperty(
BWC_SNAPSHOT_DISTRIBUTION_SYSPROP_PREFIX + versionString,
providerFactory.provider(() -> bwcDistro.getExtracted().getSingleFile().getPath())
);
return null;
}
});
});
}
/**
* Older distributions ship with openjdk versions that are not compatible with newer kernels of ubuntu 24.04 and later
* Therefore we pass explicitly the runtime java to use the adoptium jdk that is maintained longer and compatible
* with newer kernels.
* 8.10.4 is the last version shipped with jdk < 21. We configure these cluster to run with jdk 17 adoptium as 17 was
* the last LTS release before 21
*/
private static void handleJdkIncompatibleWithOS(Version version, Project project, StandaloneRestIntegTestTask task) {
if (jdkIsIncompatibleWithOS(version)) {
var toolChainService = project.getExtensions().getByType(JavaToolchainService.class);
var fallbackJdk17Launcher = toolChainService.launcherFor(spec -> {
spec.getVendor().set(JvmVendorSpec.ADOPTIUM);
spec.getLanguageVersion().set(JavaLanguageVersion.of(17));
});
task.environment(
"ES_FALLBACK_JAVA_HOME",
fallbackJdk17Launcher.get().getMetadata().getInstallationPath().getAsFile().getPath()
);
}
}
private void copyDependencies(Project project, DependencySet dependencies, Configuration configuration) {
configuration.getDependencies()
.stream()
.filter(d -> d instanceof ProjectDependency)
.map(d -> project.getDependencies().project(Map.of("path", ((ProjectDependencyInternal) d).getPath())))
.forEach(dependencies::add);
}
private ElasticsearchDistribution createDistribution(Project project, String name, String version, boolean detachedVersion) {
return createDistribution(project, name, version, null, detachedVersion);
}
private ElasticsearchDistribution createDistribution(
Project project,
String name,
String version,
ElasticsearchDistributionType type,
boolean detachedVersion
) {
NamedDomainObjectContainer<ElasticsearchDistribution> distributions = DistributionDownloadPlugin.getContainer(project);
ElasticsearchDistribution maybeDistro = distributions.findByName(name);
if (maybeDistro == null) {
return distributions.create(name, distro -> {
distro.setVersion(version);
distro.setDetachedVersion(detachedVersion);
distro.setArchitecture(Architecture.current());
if (type != null) {
distro.setType(type);
}
});
} else {
return maybeDistro;
}
}
private FileTree getDistributionFiles(ElasticsearchDistribution distribution, Action<PatternFilterable> patternFilter) {
return distribution.getExtracted().getAsFileTree().matching(patternFilter);
}
private void registerConfigurationInputs(Task task, String configurationName, ConfigurableFileCollection configuration) {
task.getInputs()
.files(providerFactory.provider(() -> configuration.getAsFileTree().filter(f -> f.getName().endsWith(".jar") == false)))
.withPropertyName(configurationName + "-files")
.withPathSensitivity(PathSensitivity.RELATIVE);
task.getInputs()
.files(providerFactory.provider(() -> configuration.getAsFileTree().filter(f -> f.getName().endsWith(".jar"))))
.withPropertyName(configurationName + "-classpath")
.withNormalizer(ClasspathNormalizer.class);
}
private void registerDistributionInputs(Task task, ElasticsearchDistribution distribution) {
task.getInputs()
.files(providerFactory.provider(() -> getDistributionFiles(distribution, filter -> filter.exclude("**/*.jar"))))
.withPropertyName(distribution.getName() + "-files")
.withPathSensitivity(PathSensitivity.RELATIVE);
task.getInputs()
.files(providerFactory.provider(() -> getDistributionFiles(distribution, filter -> filter.include("**/*.jar"))))
.withPropertyName(distribution.getName() + "-classpath")
.withNormalizer(ClasspathNormalizer.class);
}
private Optional<String> findModulePath(Project project, String pluginName) {
return project.getRootProject()
.getAllprojects()
.stream()
.filter(p -> GradleUtils.isModuleProject(p.getPath()))
.filter(p -> p.getPlugins().hasPlugin(PluginBuildPlugin.class))
.filter(p -> p.getExtensions().getByType(PluginPropertiesExtension.class).getName().equals(pluginName))
.findFirst()
.map(Project::getPath);
}
private Configuration createPluginConfiguration(Project project, String name, boolean useExploded, boolean isExtended) {
return project.getConfigurations().create(name, c -> {
c.attributes(a -> a.attribute(CONFIGURATION_ATTRIBUTE, name));
if (useExploded) {
c.attributes(a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE));
} else {
c.attributes(a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE));
}
if (isExtended == false) {
c.withDependencies(dependencies -> {
// Add dependencies of any modules
Collection<Dependency> additionalDependencies = new LinkedHashSet<>();
for (Iterator<Dependency> iterator = dependencies.iterator(); iterator.hasNext();) {
Dependency dependency = iterator.next();
// this logic of relying on other projects metadata should probably live in a build service
if (dependency instanceof ProjectDependency projectDependency) {
Project dependencyProject = project.project(projectDependency.getPath());
List<String> extendedPlugins = dependencyProject.getExtensions()
.getByType(PluginPropertiesExtension.class)
.getExtendedPlugins();
// Replace project dependency with explicit dependency on exploded configuration to workaround variant bug
if (projectDependency.getTargetConfiguration() == null) {
iterator.remove();
additionalDependencies.add(
useExploded
? getExplodedBundleDependency(project, projectDependency.getPath())
: getBundleZipTaskDependency(project, projectDependency.getPath())
);
}
for (String extendedPlugin : extendedPlugins) {
findModulePath(project, extendedPlugin).ifPresent(
modulePath -> additionalDependencies.add(
useExploded
? getExplodedBundleDependency(project, modulePath)
: getBundleZipTaskDependency(project, modulePath)
)
);
}
}
}
dependencies.addAll(additionalDependencies);
});
}
});
}
private Dependency getExplodedBundleDependency(Project project, String projectPath) {
return project.getDependencies()
.project(Map.of("path", projectPath, "configuration", BasePluginBuildPlugin.EXPLODED_BUNDLE_CONFIG));
}
private Dependency getBundleZipTaskDependency(Project project, String projectPath) {
Project dependencyProject = project.findProject(projectPath);
return project.getDependencies()
.create(project.files(dependencyProject.getTasks().named(BasePluginBuildPlugin.BUNDLE_PLUGIN_TASK_NAME)));
}
private void configureArtifactTransforms(Project project) {
project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> {
transformSpec.getFrom().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE);
transformSpec.getTo().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE);
transformSpec.getParameters().setAsFiletreeOutput(false);
});
}
}
| RestTestBasePlugin |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/stubbing/answers/ClonesArguments.java | {
"start": 693,
"end": 1886
} | class ____ implements Answer<Object> {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
Object[] arguments = invocation.getArguments();
for (int i = 0; i < arguments.length; i++) {
Object from = arguments[i];
if (from != null) {
if (from.getClass().isArray()) {
int len = Array.getLength(from);
Object newInstance = Array.newInstance(from.getClass().getComponentType(), len);
for (int j = 0; j < len; ++j) {
Array.set(newInstance, j, Array.get(from, j));
}
arguments[i] = newInstance;
} else {
Instantiator instantiator =
Plugins.getInstantiatorProvider().getInstantiator(null);
Object newInstance = instantiator.newInstance(from.getClass());
new LenientCopyTool().copyToRealObject(from, newInstance);
arguments[i] = newInstance;
}
}
}
return new ReturnsEmptyValues().answer(invocation);
}
}
| ClonesArguments |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/junit/jupiter/TestConstructorIntegrationTests.java | {
"start": 3623,
"end": 3699
} | class ____ {
@Bean
String foo() {
return "bar";
}
}
}
}
| Config |
java | resilience4j__resilience4j | resilience4j-core/src/main/java/io/github/resilience4j/core/registry/AbstractRegistry.java | {
"start": 1108,
"end": 6994
} | class ____<E, C> implements Registry<E, C> {
protected static final String DEFAULT_CONFIG = "default";
protected static final String CONFIG_MUST_NOT_BE_NULL = "Config must not be null";
protected static final String CONSUMER_MUST_NOT_BE_NULL = "EventConsumers must not be null";
protected static final String SUPPLIER_MUST_NOT_BE_NULL = "Supplier must not be null";
protected static final String TAGS_MUST_NOT_BE_NULL = "Tags must not be null";
private static final String NAME_MUST_NOT_BE_NULL = "Name must not be null";
private static final String REGISTRY_STORE_MUST_NOT_BE_NULL = "Registry Store must not be null";
protected final RegistryStore<E> entryMap;
protected final ConcurrentMap<String, C> configurations;
/**
* Global tags which must be added to each instance created by this registry.
*/
protected final Map<String, String> registryTags;
private final RegistryEventProcessor eventProcessor;
public AbstractRegistry(C defaultConfig) {
this(defaultConfig, Collections.emptyMap());
}
public AbstractRegistry(C defaultConfig, Map<String, String> registryTags) {
this(defaultConfig, new ArrayList<>(), registryTags);
}
public AbstractRegistry(C defaultConfig, RegistryEventConsumer<E> registryEventConsumer) {
this(defaultConfig, registryEventConsumer, Collections.emptyMap());
}
public AbstractRegistry(C defaultConfig, RegistryEventConsumer<E> registryEventConsumer,
Map<String, String> tags) {
this(defaultConfig, Collections.singletonList(
Objects.requireNonNull(registryEventConsumer, CONSUMER_MUST_NOT_BE_NULL)), tags);
}
public AbstractRegistry(C defaultConfig,
List<RegistryEventConsumer<E>> registryEventConsumers) {
this(defaultConfig, registryEventConsumers, Collections.emptyMap());
}
public AbstractRegistry(C defaultConfig, List<RegistryEventConsumer<E>> registryEventConsumers,
Map<String, String> tags) {
this.configurations = new ConcurrentHashMap<>();
this.entryMap = new InMemoryRegistryStore<E>();
this.eventProcessor = new RegistryEventProcessor(
Objects.requireNonNull(registryEventConsumers, CONSUMER_MUST_NOT_BE_NULL));
this.registryTags = Objects.requireNonNull(tags, TAGS_MUST_NOT_BE_NULL);
this.configurations
.put(DEFAULT_CONFIG, Objects.requireNonNull(defaultConfig, CONFIG_MUST_NOT_BE_NULL));
}
public AbstractRegistry(C defaultConfig, List<RegistryEventConsumer<E>> registryEventConsumers,
Map<String, String> tags, RegistryStore<E> registryStore) {
this.configurations = new ConcurrentHashMap<>();
this.entryMap = Objects.requireNonNull(registryStore, REGISTRY_STORE_MUST_NOT_BE_NULL);
this.eventProcessor = new RegistryEventProcessor(
Objects.requireNonNull(registryEventConsumers, CONSUMER_MUST_NOT_BE_NULL));
this.registryTags = Objects.requireNonNull(tags, TAGS_MUST_NOT_BE_NULL);
this.configurations
.put(DEFAULT_CONFIG, Objects.requireNonNull(defaultConfig, CONFIG_MUST_NOT_BE_NULL));
}
protected E computeIfAbsent(String name, Supplier<E> supplier) {
return entryMap.computeIfAbsent(Objects.requireNonNull(name, NAME_MUST_NOT_BE_NULL), k -> {
E entry = supplier.get();
eventProcessor.processEvent(new EntryAddedEvent<>(entry));
return entry;
});
}
@Override
public Optional<E> find(String name) {
return entryMap.find(name);
}
@Override
public Optional<E> remove(String name) {
Optional<E> removedEntry = entryMap.remove(name);
removedEntry
.ifPresent(entry -> eventProcessor.processEvent(new EntryRemovedEvent<>(entry)));
return removedEntry;
}
@Override
public Optional<E> replace(String name, E newEntry) {
Optional<E> replacedEntry = entryMap.replace(name, newEntry);
replacedEntry.ifPresent(
oldEntry -> eventProcessor.processEvent(new EntryReplacedEvent<>(oldEntry, newEntry)));
return replacedEntry;
}
@Override
public void addConfiguration(String configName, C configuration) {
if (configName.equals(DEFAULT_CONFIG)) {
throw new IllegalArgumentException(
"You cannot use 'default' as a configuration name as it is preserved for default configuration");
}
this.configurations.put(configName, configuration);
}
@Override
public Optional<C> getConfiguration(String configName) {
return Optional.ofNullable(this.configurations.get(configName));
}
@Override
public C removeConfiguration(String configName) {
if (configName.equals(DEFAULT_CONFIG)) {
throw new IllegalArgumentException(
"You cannot remove the default configuration");
}
return this.configurations.remove(configName);
}
@Override
public C getDefaultConfig() {
return configurations.get(DEFAULT_CONFIG);
}
@Override
public Map<String, String> getTags() {
return registryTags;
}
@Override
public EventPublisher<E> getEventPublisher() {
return eventProcessor;
}
/**
* Creates map with all tags (registry and instance). When tags (keys) of the two collide the
* tags passed with this method will override the tags of the registry.
*
* @param tags Tags of the instance.
* @return Map containing all tags
*/
protected Map<String, String> getAllTags(Map<String, String> tags) {
final HashMap<String, String> allTags = new HashMap<>(Objects.requireNonNull(registryTags, TAGS_MUST_NOT_BE_NULL));
allTags.putAll(tags);
return allTags;
}
private | AbstractRegistry |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/core/commands/CommandInterfacesIntegrationTests.java | {
"start": 2818,
"end": 3359
} | interface ____ extends Commands {
@Command("BF.INSERT :filter CAPACITY :capacity ERROR :error ITEMS :items ")
Flux<Boolean> insert(@Param("filter") String filter, @Param("capacity") long capacity, @Param("error") double error,
@Param("items") String[] items);
static CustomCommands instance(StatefulConnection<String, String> conn) {
RedisCommandFactory factory = new RedisCommandFactory(conn);
return factory.getCommands(CustomCommands.class);
}
}
}
| CustomCommands |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/config/ConfigMappingTest.java | {
"start": 9827,
"end": 10101
} | interface ____ extends Base {
}
@Inject
Base base;
@Inject
ExtendsBase extendsBase;
@Test
void hierarchy() {
assertSame(base, extendsBase);
assertEquals("bar", extendsBase.foo());
}
@Dependent
public static | ExtendsBase |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/devmode/StaticResourcesDevModeTest.java | {
"start": 359,
"end": 2259
} | class ____ extends AbstractStaticResourcesTest {
@RegisterExtension
final static QuarkusDevModeTest test = new QuarkusDevModeTest()
.withApplicationRoot((jar) -> jar
.add(new StringAsset("quarkus.http.enable-compression=true\n"),
"application.properties")
.addAsResource("static-file.html", "META-INF/resources/dir/file.txt")
.addAsResource("static-file.html", "META-INF/resources/l'équipe.pdf")
.addAsResource("static-file.html", "META-INF/resources/static file.txt")
.addAsResource("static-file.html", "META-INF/resources/static-file.html")
.addAsResource("static-file.html", "META-INF/resources/.hidden-file.html")
.addAsResource("static-file.html", "META-INF/resources/index.html")
.addAsResource("static-file.html", "META-INF/resources/image.svg"));
@Test
void shouldChangeContentOnModification() {
RestAssured.when().get("/static-file.html")
.then()
.body(Matchers.containsString("This is the title of the webpage!"))
.statusCode(200);
RestAssured.when().get("/")
.then()
.body(Matchers.containsString("This is the title of the webpage!"))
.statusCode(200);
test.modifyResourceFile("META-INF/resources/static-file.html", s -> s.replace("webpage", "Andy"));
RestAssured.when().get("/static-file.html")
.then()
.body(Matchers.containsString("This is the title of the Andy!"))
.statusCode(200);
RestAssured.when().get("/")
.then()
.body(Matchers.containsString("This is the title of the webpage!"))
.statusCode(200);
}
}
| StaticResourcesDevModeTest |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/bindinggraphvalidation/InvalidProductionBindingScopeValidator.java | {
"start": 1055,
"end": 2062
} | class ____ extends ValidationBindingGraphPlugin {
@Inject
InvalidProductionBindingScopeValidator() {}
@Override
public String pluginName() {
return "Dagger/InvalidProductionBindingScope";
}
@Override
public void visitGraph(BindingGraph bindingGraph, DiagnosticReporter reporter) {
// Note: ProducesMethodValidator validates that @Produces methods aren't scoped, but here we
// take that a step further and validate that anything that transitively depends on a @Produces
// method is also not scoped (i.e. all production binding types).
bindingGraph.bindings().stream()
.filter(Binding::isProduction)
.filter(binding -> binding.scope().isPresent())
.forEach(binding -> reporter.reportBinding(ERROR, binding, errorMessage(binding)));
}
private String errorMessage(Binding binding) {
return String.format(
"%s cannot be scoped because it delegates to an @Produces method.",
binding);
}
}
| InvalidProductionBindingScopeValidator |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/annotations/SchedulerSupport.java | {
"start": 1686,
"end": 1902
} | class ____ on RxJava's {@linkplain Schedulers#computation() computation
* scheduler} or takes timing information from it.
*/
String COMPUTATION = "io.reactivex:computation";
/**
* The operator/ | runs |
java | apache__camel | components/camel-jdbc/src/test/java/org/apache/camel/component/jdbc/JdbcProducerOutputTypeSelectOneOutputClassTest.java | {
"start": 1164,
"end": 2218
} | class ____ extends AbstractJdbcTestSupport {
@EndpointInject("mock:result")
private MockEndpoint mock;
@Test
public void testOutputTypeSelectOneOutputClass() throws Exception {
mock.expectedMessageCount(1);
template.sendBody("direct:start", "select * from customer where ID = 'cust1'");
MockEndpoint.assertIsSatisfied(context);
CustomerModel model = assertIsInstanceOf(CustomerModel.class,
mock.getReceivedExchanges().get(0).getIn().getBody(CustomerModel.class));
assertEquals("cust1", model.getId());
assertEquals("jstrachan", model.getName());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:start")
.to("jdbc:testdb?outputType=SelectOne&outputClass=org.apache.camel.component.jdbc.CustomerModel")
.to("mock:result");
}
};
}
}
| JdbcProducerOutputTypeSelectOneOutputClassTest |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/cluster/models/partitions/ClusterPartitionParser.java | {
"start": 497,
"end": 11518
} | class ____ {
public static final String CONNECTED = "connected";
private static final String TOKEN_SLOT_IN_TRANSITION = "[";
private static final char TOKEN_NODE_SEPARATOR = '\n';
private static final Map<String, RedisClusterNode.NodeFlag> FLAG_MAPPING;
static {
Map<String, RedisClusterNode.NodeFlag> map = new HashMap<>();
map.put("noflags", RedisClusterNode.NodeFlag.NOFLAGS);
map.put("myself", RedisClusterNode.NodeFlag.MYSELF);
map.put("master", RedisClusterNode.NodeFlag.MASTER);
map.put("slave", RedisClusterNode.NodeFlag.SLAVE);
map.put("replica", RedisClusterNode.NodeFlag.REPLICA);
map.put("fail?", RedisClusterNode.NodeFlag.EVENTUAL_FAIL);
map.put("fail", RedisClusterNode.NodeFlag.FAIL);
map.put("handshake", RedisClusterNode.NodeFlag.HANDSHAKE);
map.put("noaddr", RedisClusterNode.NodeFlag.NOADDR);
map.put("loading", RedisClusterNode.NodeFlag.LOADING);
map.put("online", RedisClusterNode.NodeFlag.ONLINE);
FLAG_MAPPING = Collections.unmodifiableMap(map);
}
/**
* Utility constructor.
*/
private ClusterPartitionParser() {
}
/**
* Parse partition lines into Partitions object.
*
* @param clusterShards output of CLUSTER SHARDS
* @return the partitions object.
* @since 6.2
*/
public static Partitions parse(List<Object> clusterShards) {
Partitions partitions = new Partitions();
try {
Map<String, RedisClusterNode> nodeMap = new LinkedHashMap<>();
for (Object s : clusterShards) {
List<Object> shard = (List<Object>) s;
if (shard.size() < 4) {
continue;
}
KeyValueMap shardMap = toMap(shard);
List<Integer> slotRanges = shardMap.get("slots");
List<List<Object>> nodes = shardMap.get("nodes");
BitSet bitSet = readSlotRanges(slotRanges);
List<RedisClusterNode> parsedNodes = new ArrayList<>(nodes.size());
for (List<Object> node : nodes) {
RedisClusterNode clusterNode = parseNode(node, (BitSet) bitSet.clone());
nodeMap.putIfAbsent(clusterNode.getNodeId(), clusterNode);
parsedNodes.add(clusterNode);
}
RedisClusterNode master = findMaster(parsedNodes);
if (master != null) {
associateMasterWithReplicas(master, parsedNodes);
}
}
partitions.addAll(nodeMap.values());
} catch (Exception e) {
throw new RedisException("Cannot parse " + clusterShards, e);
}
return partitions;
}
private static RedisClusterNode findMaster(List<RedisClusterNode> nodes) {
for (RedisClusterNode parsedNode : nodes) {
if (parsedNode.is(RedisClusterNode.NodeFlag.UPSTREAM) || parsedNode.is(RedisClusterNode.NodeFlag.MASTER)) {
return parsedNode;
}
}
return null;
}
private static void associateMasterWithReplicas(RedisClusterNode master, List<RedisClusterNode> nodes) {
for (RedisClusterNode parsedNode : nodes) {
if (parsedNode.is(RedisClusterNode.NodeFlag.REPLICA) || parsedNode.is(RedisClusterNode.NodeFlag.SLAVE)) {
parsedNode.setSlaveOf(master.getNodeId());
}
}
}
private static RedisClusterNode parseNode(List<Object> kvlist, BitSet slots) {
KeyValueMap nodeMap = toMap(kvlist);
RedisClusterNode node = new RedisClusterNode();
node.setNodeId(nodeMap.get("id"));
RedisURI uri;
int port = ((Long) nodeMap.get("port")).intValue();
if (LettuceStrings.isNotEmpty(nodeMap.get("hostname"))) {
uri = RedisURI.create(nodeMap.get("hostname"), port);
} else {
uri = RedisURI.create(nodeMap.get("endpoint"), port);
}
node.setUri(uri);
Set<RedisClusterNode.NodeFlag> flags = new HashSet<>();
flags.add(FLAG_MAPPING.get(nodeMap.<String> get("role")));
flags.add(FLAG_MAPPING.get(nodeMap.<String> get("health")));
if (flags.contains(RedisClusterNode.NodeFlag.SLAVE)) {
flags.add(RedisClusterNode.NodeFlag.REPLICA);
} else if (flags.contains(RedisClusterNode.NodeFlag.REPLICA)) {
flags.add(RedisClusterNode.NodeFlag.SLAVE);
}
if (flags.contains(RedisClusterNode.NodeFlag.MASTER)) {
flags.add(RedisClusterNode.NodeFlag.UPSTREAM);
} else if (flags.contains(RedisClusterNode.NodeFlag.UPSTREAM)) {
flags.add(RedisClusterNode.NodeFlag.MASTER);
}
node.setFlags(flags);
node.setReplOffset(nodeMap.get("replication-offset"));
node.setSlots(slots);
return node;
}
/**
* Parse partition lines into Partitions object.
*
* @param nodes output of CLUSTER NODES
* @return the partitions object.
*/
public static Partitions parse(String nodes) {
Partitions partitions = new Partitions();
try {
String[] lines = nodes.split(Character.toString(TOKEN_NODE_SEPARATOR));
List<RedisClusterNode> mappedNodes = new ArrayList<>(lines.length);
for (String line : lines) {
if (line.isEmpty()) {
continue;
}
mappedNodes.add(ClusterPartitionParser.parseNode(line));
}
partitions.addAll(mappedNodes);
} catch (Exception e) {
throw new RedisException("Cannot parse " + nodes, e);
}
return partitions;
}
private static RedisClusterNode parseNode(String nodeInformation) {
Iterator<String> iterator = Arrays.asList(nodeInformation.split(" ")).iterator();
String nodeId = iterator.next();
boolean connected = false;
RedisURI uri = null;
String hostAndPortPart = iterator.next();
String announcedHostName = null;
int atIndex = hostAndPortPart.indexOf('@');
if (atIndex != -1) {
String busPart = hostAndPortPart.substring(atIndex + 1);
hostAndPortPart = hostAndPortPart.substring(0, atIndex);
int comma = busPart.indexOf(',');
if (comma != -1) {
announcedHostName = busPart.substring(comma + 1);
}
}
HostAndPort hostAndPort = HostAndPort.parseCompat(hostAndPortPart);
if (LettuceStrings.isNotEmpty(announcedHostName)) {
hostAndPort = HostAndPort.of(announcedHostName, hostAndPort.getPort());
}
if (LettuceStrings.isNotEmpty(hostAndPort.getHostText())) {
uri = RedisURI.Builder.redis(hostAndPort.getHostText(), hostAndPort.getPort()).build();
}
String flags = iterator.next();
List<String> flagStrings = LettuceLists.newList(flags.split("\\,"));
Set<RedisClusterNode.NodeFlag> nodeFlags = readFlags(flagStrings);
String replicaOfString = iterator.next(); // (nodeId or -)
String replicaOf = "-".equals(replicaOfString) ? null : replicaOfString;
long pingSentTs = getLongFromIterator(iterator, 0);
long pongReceivedTs = getLongFromIterator(iterator, 0);
long configEpoch = getLongFromIterator(iterator, 0);
String connectedFlags = iterator.next(); // "connected" : "disconnected"
if (CONNECTED.equals(connectedFlags)) {
connected = true;
}
List<String> slotStrings = LettuceLists.newList(iterator); // slot, from-to [slot->-nodeID] [slot-<-nodeID]
BitSet slots = readSlots(slotStrings);
RedisClusterNode partition = new RedisClusterNode(uri, nodeId, connected, replicaOf, pingSentTs, pongReceivedTs,
configEpoch, slots, nodeFlags);
return partition;
}
private static Set<RedisClusterNode.NodeFlag> readFlags(List<String> flagStrings) {
Set<RedisClusterNode.NodeFlag> flags = new HashSet<>();
for (String flagString : flagStrings) {
if (FLAG_MAPPING.containsKey(flagString)) {
flags.add(FLAG_MAPPING.get(flagString));
}
}
if (flags.contains(RedisClusterNode.NodeFlag.SLAVE)) {
flags.add(RedisClusterNode.NodeFlag.REPLICA);
}
return Collections.unmodifiableSet(flags);
}
private static BitSet readSlots(List<String> slotStrings) {
BitSet slots = new BitSet(SlotHash.SLOT_COUNT);
for (String slotString : slotStrings) {
if (slotString.startsWith(TOKEN_SLOT_IN_TRANSITION)) {
// not interesting
continue;
}
if (slotString.contains("-")) {
// slot range
Iterator<String> it = Arrays.asList(slotString.split("\\-")).iterator();
int from = Integer.parseInt(it.next());
int to = Integer.parseInt(it.next());
addSlots(slots, from, to);
continue;
}
slots.set(Integer.parseInt(slotString));
}
return slots;
}
private static BitSet readSlotRanges(List<?> slotRanges) {
BitSet slots = new BitSet(SlotHash.SLOT_COUNT);
for (int i = 0; i < slotRanges.size(); i += 2) {
Number from = getAsNumber(slotRanges.get(i));
Number to = getAsNumber(slotRanges.get(i + 1));
addSlots(slots, from.intValue(), to.intValue());
}
return slots;
}
private static Number getAsNumber(Object stringOrNumber) {
if (stringOrNumber instanceof Number) {
return (Number) stringOrNumber;
}
return Integer.parseInt(stringOrNumber.toString());
}
private static void addSlots(BitSet slots, int from, int to) {
for (int slot = from; slot <= to; slot++) {
slots.set(slot);
}
}
private static long getLongFromIterator(Iterator<?> iterator, long defaultValue) {
if (iterator.hasNext()) {
Object object = iterator.next();
if (object instanceof String) {
return Long.parseLong((String) object);
}
}
return defaultValue;
}
private static KeyValueMap toMap(List<Object> kvlist) {
if (kvlist.size() % 2 != 0) {
throw new IllegalArgumentException("Key-Value list must contain an even number of key-value tuples");
}
Map<String, Object> map = new LinkedHashMap<>(kvlist.size() / 2);
for (int i = 0; i < kvlist.size(); i += 2) {
String key = (String) kvlist.get(i);
Object value = kvlist.get(i + 1);
map.put(key, value);
}
return new KeyValueMap(map);
}
static | ClusterPartitionParser |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/RuncContainerExecutorConfig.java | {
"start": 15869,
"end": 16662
} | class ____ {
final private String path;
final private List<String> args;
final private List<String> env;
final private int timeout;
public String getPath() {
return path;
}
public List<String> getArgs() {
return args;
}
public List<String> getEnv() {
return env;
}
public int getTimeout() {
return timeout;
}
public HookType(String path, List<String> args, List<String> env,
int timeout) {
this.path = path;
this.args = args;
this.env = env;
this.timeout = timeout;
}
public HookType() {
this(null, null, null, 0);
}
}
}
/**
* This | HookType |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/hamcrest/CollectionSizeMatcher.java | {
"start": 273,
"end": 630
} | class ____ extends BaseMatcher<Collection> {
private final int size;
public CollectionSizeMatcher(int size) {
this.size = size;
}
@Override
public boolean matches(Object item) {
return ( (Collection) item ).size() == size;
}
@Override
public void describeTo(Description description) {
description.appendValue( size );
}
}
| CollectionSizeMatcher |
java | alibaba__druid | druid-wrapper/src/main/java/org/apache/commons/dbcp/ManagedBasicDataSourceFactory.java | {
"start": 101,
"end": 647
} | class ____ extends BasicDataSourceFactory {
public static DataSource createDataSource(Properties properties) throws Exception {
ManagedBasicDataSource dataSource = new ManagedBasicDataSource();
config(dataSource, properties);
return dataSource;
}
protected DataSource createDataSourceInternal(Properties properties) throws Exception {
ManagedBasicDataSource dataSource = new ManagedBasicDataSource();
config(dataSource, properties);
return dataSource;
}
}
| ManagedBasicDataSourceFactory |
java | elastic__elasticsearch | x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/sp/SamlServiceProviderDocument.java | {
"start": 25129,
"end": 25557
} | interface ____ {
ParseField SP_SIGNING = new ParseField("sp_signing");
ParseField IDP_SIGNING = new ParseField("idp_signing");
ParseField IDP_METADATA = new ParseField("idp_metadata");
}
}
@Override
public String toString() {
return getClass().getSimpleName() + "{docId=" + docId + ", name=" + name + ", entityId=" + entityId + "}@" + hashCode();
}
}
| Certificates |
java | spring-projects__spring-boot | module/spring-boot-http-converter/src/main/java/org/springframework/boot/http/converter/autoconfigure/JacksonHttpMessageConvertersConfiguration.java | {
"start": 2419,
"end": 2670
} | class ____ {
@Bean
@ConditionalOnMissingBean
public JacksonXmlHttpMessageConverter jacksonXmlHttpMessageConverter(XmlMapper xmlMapper) {
return new JacksonXmlHttpMessageConverter(xmlMapper);
}
}
}
| JacksonXmlHttpMessageConverterConfiguration |
java | apache__camel | components/camel-vertx/camel-vertx-http/src/generated/java/org/apache/camel/component/vertx/http/VertxHttpComponentConfigurer.java | {
"start": 737,
"end": 8601
} | class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
VertxHttpComponent target = (VertxHttpComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "allowjavaserializedobject":
case "allowJavaSerializedObject": target.setAllowJavaSerializedObject(property(camelContext, boolean.class, value)); return true;
case "autowiredenabled":
case "autowiredEnabled": target.setAutowiredEnabled(property(camelContext, boolean.class, value)); return true;
case "basicauthpassword":
case "basicAuthPassword": target.setBasicAuthPassword(property(camelContext, java.lang.String.class, value)); return true;
case "basicauthusername":
case "basicAuthUsername": target.setBasicAuthUsername(property(camelContext, java.lang.String.class, value)); return true;
case "bearertoken":
case "bearerToken": target.setBearerToken(property(camelContext, java.lang.String.class, value)); return true;
case "headerfilterstrategy":
case "headerFilterStrategy": target.setHeaderFilterStrategy(property(camelContext, org.apache.camel.spi.HeaderFilterStrategy.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "proxyhost":
case "proxyHost": target.setProxyHost(property(camelContext, java.lang.String.class, value)); return true;
case "proxypassword":
case "proxyPassword": target.setProxyPassword(property(camelContext, java.lang.String.class, value)); return true;
case "proxyport":
case "proxyPort": target.setProxyPort(property(camelContext, java.lang.Integer.class, value)); return true;
case "proxytype":
case "proxyType": target.setProxyType(property(camelContext, io.vertx.core.net.ProxyType.class, value)); return true;
case "proxyusername":
case "proxyUsername": target.setProxyUsername(property(camelContext, java.lang.String.class, value)); return true;
case "responsepayloadasbytearray":
case "responsePayloadAsByteArray": target.setResponsePayloadAsByteArray(property(camelContext, boolean.class, value)); return true;
case "sslcontextparameters":
case "sslContextParameters": target.setSslContextParameters(property(camelContext, org.apache.camel.support.jsse.SSLContextParameters.class, value)); return true;
case "useglobalsslcontextparameters":
case "useGlobalSslContextParameters": target.setUseGlobalSslContextParameters(property(camelContext, boolean.class, value)); return true;
case "vertx": target.setVertx(property(camelContext, io.vertx.core.Vertx.class, value)); return true;
case "vertxhttpbinding":
case "vertxHttpBinding": target.setVertxHttpBinding(property(camelContext, org.apache.camel.component.vertx.http.VertxHttpBinding.class, value)); return true;
case "vertxoptions":
case "vertxOptions": target.setVertxOptions(property(camelContext, io.vertx.core.VertxOptions.class, value)); return true;
case "webclientoptions":
case "webClientOptions": target.setWebClientOptions(property(camelContext, io.vertx.ext.web.client.WebClientOptions.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "allowjavaserializedobject":
case "allowJavaSerializedObject": return boolean.class;
case "autowiredenabled":
case "autowiredEnabled": return boolean.class;
case "basicauthpassword":
case "basicAuthPassword": return java.lang.String.class;
case "basicauthusername":
case "basicAuthUsername": return java.lang.String.class;
case "bearertoken":
case "bearerToken": return java.lang.String.class;
case "headerfilterstrategy":
case "headerFilterStrategy": return org.apache.camel.spi.HeaderFilterStrategy.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "proxyhost":
case "proxyHost": return java.lang.String.class;
case "proxypassword":
case "proxyPassword": return java.lang.String.class;
case "proxyport":
case "proxyPort": return java.lang.Integer.class;
case "proxytype":
case "proxyType": return io.vertx.core.net.ProxyType.class;
case "proxyusername":
case "proxyUsername": return java.lang.String.class;
case "responsepayloadasbytearray":
case "responsePayloadAsByteArray": return boolean.class;
case "sslcontextparameters":
case "sslContextParameters": return org.apache.camel.support.jsse.SSLContextParameters.class;
case "useglobalsslcontextparameters":
case "useGlobalSslContextParameters": return boolean.class;
case "vertx": return io.vertx.core.Vertx.class;
case "vertxhttpbinding":
case "vertxHttpBinding": return org.apache.camel.component.vertx.http.VertxHttpBinding.class;
case "vertxoptions":
case "vertxOptions": return io.vertx.core.VertxOptions.class;
case "webclientoptions":
case "webClientOptions": return io.vertx.ext.web.client.WebClientOptions.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
VertxHttpComponent target = (VertxHttpComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "allowjavaserializedobject":
case "allowJavaSerializedObject": return target.isAllowJavaSerializedObject();
case "autowiredenabled":
case "autowiredEnabled": return target.isAutowiredEnabled();
case "basicauthpassword":
case "basicAuthPassword": return target.getBasicAuthPassword();
case "basicauthusername":
case "basicAuthUsername": return target.getBasicAuthUsername();
case "bearertoken":
case "bearerToken": return target.getBearerToken();
case "headerfilterstrategy":
case "headerFilterStrategy": return target.getHeaderFilterStrategy();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "proxyhost":
case "proxyHost": return target.getProxyHost();
case "proxypassword":
case "proxyPassword": return target.getProxyPassword();
case "proxyport":
case "proxyPort": return target.getProxyPort();
case "proxytype":
case "proxyType": return target.getProxyType();
case "proxyusername":
case "proxyUsername": return target.getProxyUsername();
case "responsepayloadasbytearray":
case "responsePayloadAsByteArray": return target.isResponsePayloadAsByteArray();
case "sslcontextparameters":
case "sslContextParameters": return target.getSslContextParameters();
case "useglobalsslcontextparameters":
case "useGlobalSslContextParameters": return target.isUseGlobalSslContextParameters();
case "vertx": return target.getVertx();
case "vertxhttpbinding":
case "vertxHttpBinding": return target.getVertxHttpBinding();
case "vertxoptions":
case "vertxOptions": return target.getVertxOptions();
case "webclientoptions":
case "webClientOptions": return target.getWebClientOptions();
default: return null;
}
}
}
| VertxHttpComponentConfigurer |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/properties/SimpleUnlessBuildPropertyTest.java | {
"start": 1027,
"end": 1257
} | class ____ implements PingService {
@Override
public int ping() {
return 10;
}
}
@UnlessBuildProperty(name = "foo.bar", stringValue = "qux")
@ApplicationScoped
static | AlphaService |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/binding/AssistedInjectionAnnotations.java | {
"start": 10825,
"end": 13930
} | class ____ {
public static AssistedParameter create(
XExecutableParameterElement parameter, XType parameterType) {
AssistedParameter assistedParameter =
new AutoValue_AssistedInjectionAnnotations_AssistedParameter(
Optional.ofNullable(parameter.getAnnotation(XTypeNames.ASSISTED))
.map(assisted -> assisted.getAsString("value"))
.orElse(""),
XTypes.equivalence().wrap(parameterType));
assistedParameter.parameterElement = parameter;
return assistedParameter;
}
private XExecutableParameterElement parameterElement;
/** Returns the string qualifier from the {@link Assisted#value()}. */
public abstract String qualifier();
/** Returns the equivalence wrapper for the type annotated with {@link Assisted}. */
abstract Equivalence.Wrapper<XType> wrappedType();
/** Returns the type annotated with {@link Assisted}. */
public final XType type() {
return wrappedType().get();
}
public final XExecutableParameterElement element() {
return parameterElement;
}
@Override
public final String toString() {
return qualifier().isEmpty()
? String.format("@Assisted %s", XTypes.toStableString(type()))
: String.format("@Assisted(\"%s\") %s", qualifier(), XTypes.toStableString(type()));
}
}
public static ImmutableList<AssistedParameter> assistedInjectAssistedParameters(
XType assistedInjectType) {
// We keep track of the constructor both as an ExecutableElement to access @Assisted
// parameters and as an ExecutableType to access the resolved parameter types.
XConstructorElement assistedInjectConstructor =
getOnlyElement(assistedInjectedConstructors(assistedInjectType.getTypeElement()));
XConstructorType assistedInjectConstructorType =
assistedInjectConstructor.asMemberOf(assistedInjectType.makeNonNullable());
ImmutableList.Builder<AssistedParameter> builder = ImmutableList.builder();
for (int i = 0; i < assistedInjectConstructor.getParameters().size(); i++) {
XExecutableParameterElement parameter = assistedInjectConstructor.getParameters().get(i);
XType parameterType = assistedInjectConstructorType.getParameterTypes().get(i);
if (parameter.hasAnnotation(XTypeNames.ASSISTED)) {
builder.add(AssistedParameter.create(parameter, parameterType));
}
}
return builder.build();
}
private static ImmutableList<AssistedParameter> assistedFactoryAssistedParameters(
XMethodElement factoryMethod, XMethodType factoryMethodType) {
ImmutableList.Builder<AssistedParameter> builder = ImmutableList.builder();
for (int i = 0; i < factoryMethod.getParameters().size(); i++) {
XExecutableParameterElement parameter = factoryMethod.getParameters().get(i);
XType parameterType = factoryMethodType.getParameterTypes().get(i);
builder.add(AssistedParameter.create(parameter, parameterType));
}
return builder.build();
}
private AssistedInjectionAnnotations() {}
}
| AssistedParameter |
java | elastic__elasticsearch | libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/AccessibleJdkMethods.java | {
"start": 9939,
"end": 10764
} | class
____ (isPublic || isFinalClass == false) {
if (modulePredicate.test(moduleClass.module)) {
accessibleImplementations.add(method);
}
// if public and not static, the method can be accessible on non-public and non-exported subclasses,
// but skip constructors
if (isPublic && isStatic == false && name.equals("<init>") == false) {
inheritableAccess.add(method);
}
}
} else if (inheritableAccess.contains(method)) {
if (modulePredicate.test(moduleClass.module)) {
accessibleImplementations.add(method);
}
}
return mv;
}
}
}
| if |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java | {
"start": 3586,
"end": 48176
} | class ____ extends ESTestCase {
public void testRolloverAliasActions() {
String sourceAlias = randomAlphaOfLength(10);
String sourceIndex = randomAlphaOfLength(10);
String targetIndex = randomAlphaOfLength(10);
List<AliasAction> actions = MetadataRolloverService.rolloverAliasToNewIndex(sourceIndex, targetIndex, false, null, sourceAlias);
assertThat(actions, hasSize(2));
boolean foundAdd = false;
boolean foundRemove = false;
for (AliasAction action : actions) {
if (action.getIndex().equals(targetIndex)) {
assertEquals(sourceAlias, ((AliasAction.Add) action).getAlias());
foundAdd = true;
} else if (action.getIndex().equals(sourceIndex)) {
assertEquals(sourceAlias, ((AliasAction.Remove) action).getAlias());
foundRemove = true;
} else {
throw new AssertionError("Unknown index [" + action.getIndex() + "]");
}
}
assertTrue(foundAdd);
assertTrue(foundRemove);
}
public void testRolloverAliasActionsWithExplicitWriteIndex() {
String sourceAlias = randomAlphaOfLength(10);
String sourceIndex = randomAlphaOfLength(10);
String targetIndex = randomAlphaOfLength(10);
List<AliasAction> actions = MetadataRolloverService.rolloverAliasToNewIndex(sourceIndex, targetIndex, true, null, sourceAlias);
assertThat(actions, hasSize(2));
boolean foundAddWrite = false;
boolean foundRemoveWrite = false;
for (AliasAction action : actions) {
AliasAction.Add addAction = (AliasAction.Add) action;
if (action.getIndex().equals(targetIndex)) {
assertEquals(sourceAlias, addAction.getAlias());
assertTrue(addAction.writeIndex());
foundAddWrite = true;
} else if (action.getIndex().equals(sourceIndex)) {
assertEquals(sourceAlias, addAction.getAlias());
assertFalse(addAction.writeIndex());
foundRemoveWrite = true;
} else {
throw new AssertionError("Unknown index [" + action.getIndex() + "]");
}
}
assertTrue(foundAddWrite);
assertTrue(foundRemoveWrite);
}
public void testRolloverAliasActionsWithHiddenAliasAndExplicitWriteIndex() {
String sourceAlias = randomAlphaOfLength(10);
String sourceIndex = randomAlphaOfLength(10);
String targetIndex = randomAlphaOfLength(10);
List<AliasAction> actions = MetadataRolloverService.rolloverAliasToNewIndex(sourceIndex, targetIndex, true, true, sourceAlias);
assertThat(actions, hasSize(2));
boolean foundAddWrite = false;
boolean foundRemoveWrite = false;
for (AliasAction action : actions) {
assertThat(action, instanceOf(AliasAction.Add.class));
AliasAction.Add addAction = (AliasAction.Add) action;
if (action.getIndex().equals(targetIndex)) {
assertEquals(sourceAlias, addAction.getAlias());
assertTrue(addAction.writeIndex());
assertTrue(addAction.isHidden());
foundAddWrite = true;
} else if (action.getIndex().equals(sourceIndex)) {
assertEquals(sourceAlias, addAction.getAlias());
assertFalse(addAction.writeIndex());
assertTrue(addAction.isHidden());
foundRemoveWrite = true;
} else {
throw new AssertionError("Unknown index [" + action.getIndex() + "]");
}
}
assertTrue(foundAddWrite);
assertTrue(foundRemoveWrite);
}
public void testRolloverAliasActionsWithHiddenAliasAndImplicitWriteIndex() {
String sourceAlias = randomAlphaOfLength(10);
String sourceIndex = randomAlphaOfLength(10);
String targetIndex = randomAlphaOfLength(10);
List<AliasAction> actions = MetadataRolloverService.rolloverAliasToNewIndex(sourceIndex, targetIndex, false, true, sourceAlias);
assertThat(actions, hasSize(2));
boolean foundAddWrite = false;
boolean foundRemoveWrite = false;
for (AliasAction action : actions) {
if (action.getIndex().equals(targetIndex)) {
assertThat(action, instanceOf(AliasAction.Add.class));
AliasAction.Add addAction = (AliasAction.Add) action;
assertEquals(sourceAlias, addAction.getAlias());
assertThat(addAction.writeIndex(), nullValue());
assertTrue(addAction.isHidden());
foundAddWrite = true;
} else if (action.getIndex().equals(sourceIndex)) {
assertThat(action, instanceOf(AliasAction.Remove.class));
AliasAction.Remove removeAction = (AliasAction.Remove) action;
assertEquals(sourceAlias, removeAction.getAlias());
foundRemoveWrite = true;
} else {
throw new AssertionError("Unknown index [" + action.getIndex() + "]");
}
}
assertTrue(foundAddWrite);
assertTrue(foundRemoveWrite);
}
public void testAliasValidation() {
String index1 = randomAlphaOfLength(10);
String aliasWithWriteIndex = randomAlphaOfLength(10);
String index2 = randomAlphaOfLength(10);
String aliasWithNoWriteIndex = randomAlphaOfLength(10);
Boolean firstIsWriteIndex = randomFrom(false, null);
final Settings settings = indexSettings(IndexVersion.current(), 1, 0).put(
IndexMetadata.SETTING_INDEX_UUID,
UUIDs.randomBase64UUID()
).build();
ProjectMetadata.Builder metadataBuilder = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(
IndexMetadata.builder(index1)
.settings(settings)
.putAlias(AliasMetadata.builder(aliasWithWriteIndex))
.putAlias(AliasMetadata.builder(aliasWithNoWriteIndex).writeIndex(firstIsWriteIndex))
);
IndexMetadata.Builder indexTwoBuilder = IndexMetadata.builder(index2).settings(settings);
if (firstIsWriteIndex == null) {
indexTwoBuilder.putAlias(AliasMetadata.builder(aliasWithNoWriteIndex).writeIndex(randomFrom(false, null)));
}
metadataBuilder.put(indexTwoBuilder);
ProjectMetadata metadata = metadataBuilder.build();
CreateIndexRequest req = new CreateIndexRequest();
Exception exception = expectThrows(
IllegalArgumentException.class,
() -> MetadataRolloverService.validate(metadata, aliasWithNoWriteIndex, randomAlphaOfLength(5), req)
);
assertThat(exception.getMessage(), equalTo("rollover target [" + aliasWithNoWriteIndex + "] does not point to a write index"));
exception = expectThrows(
IllegalArgumentException.class,
() -> MetadataRolloverService.validate(metadata, randomFrom(index1, index2), randomAlphaOfLength(5), req)
);
assertThat(exception.getMessage(), equalTo("rollover target is a [concrete index] but one of [alias,data_stream] was expected"));
final String aliasName = randomAlphaOfLength(5);
exception = expectThrows(
ResourceNotFoundException.class,
() -> MetadataRolloverService.validate(metadata, aliasName, randomAlphaOfLength(5), req)
);
assertThat(exception.getMessage(), equalTo("rollover target [" + aliasName + "] does not exist"));
MetadataRolloverService.validate(metadata, aliasWithWriteIndex, randomAlphaOfLength(5), req);
}
public void testDataStreamValidation() throws IOException {
ProjectMetadata.Builder md = ProjectMetadata.builder(randomProjectIdOrDefault());
DataStream randomDataStream = DataStreamTestHelper.randomInstance(false);
for (Index index : randomDataStream.getIndices()) {
md.put(DataStreamTestHelper.getIndexMetadataBuilderForIndex(index));
}
md.put(randomDataStream);
ProjectMetadata metadata = md.build();
CreateIndexRequest req = new CreateIndexRequest();
MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, req);
IllegalArgumentException exception = expectThrows(
IllegalArgumentException.class,
() -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), randomAlphaOfLength(5), req)
);
assertThat(exception.getMessage(), equalTo("new index name may not be specified when rolling over a data stream"));
CreateIndexRequest aliasReq = new CreateIndexRequest().alias(new Alias("no_aliases_permitted"));
exception = expectThrows(
IllegalArgumentException.class,
() -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, aliasReq)
);
assertThat(
exception.getMessage(),
equalTo("aliases, mappings, and index settings may not be specified when rolling over a data stream")
);
String mapping = Strings.toString(JsonXContent.contentBuilder().startObject().startObject("_doc").endObject().endObject());
CreateIndexRequest mappingReq = new CreateIndexRequest().mapping(mapping);
exception = expectThrows(
IllegalArgumentException.class,
() -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, mappingReq)
);
assertThat(
exception.getMessage(),
equalTo("aliases, mappings, and index settings may not be specified when rolling over a data stream")
);
CreateIndexRequest settingReq = new CreateIndexRequest().settings(Settings.builder().put("foo", "bar"));
exception = expectThrows(
IllegalArgumentException.class,
() -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, settingReq)
);
assertThat(
exception.getMessage(),
equalTo("aliases, mappings, and index settings may not be specified when rolling over a data stream")
);
}
public void testGenerateRolloverIndexName() {
String invalidIndexName = randomAlphaOfLength(10) + "A";
expectThrows(IllegalArgumentException.class, () -> MetadataRolloverService.generateRolloverIndexName(invalidIndexName));
int num = randomIntBetween(0, 100);
final String indexPrefix = randomAlphaOfLength(10);
String indexEndingInNumbers = indexPrefix + "-" + num;
assertThat(
MetadataRolloverService.generateRolloverIndexName(indexEndingInNumbers),
equalTo(indexPrefix + "-" + Strings.format("%06d", num + 1))
);
assertThat(MetadataRolloverService.generateRolloverIndexName("index-name-1"), equalTo("index-name-000002"));
assertThat(MetadataRolloverService.generateRolloverIndexName("index-name-2"), equalTo("index-name-000003"));
assertEquals("<index-name-{now/d}-000002>", MetadataRolloverService.generateRolloverIndexName("<index-name-{now/d}-1>"));
}
public void testCreateIndexRequest() {
String alias = randomAlphaOfLength(10);
String rolloverIndex = randomAlphaOfLength(10);
final RolloverRequest rolloverRequest = new RolloverRequest(alias, randomAlphaOfLength(10));
final ActiveShardCount activeShardCount = randomBoolean() ? ActiveShardCount.ALL : ActiveShardCount.ONE;
rolloverRequest.getCreateIndexRequest().waitForActiveShards(activeShardCount);
final Settings settings = indexSettings(IndexVersion.current(), 1, 0).put(
IndexMetadata.SETTING_INDEX_UUID,
UUIDs.randomBase64UUID()
).build();
rolloverRequest.getCreateIndexRequest().settings(settings);
final CreateIndexClusterStateUpdateRequest createIndexRequest = MetadataRolloverService.prepareCreateIndexRequest(
randomProjectIdOrDefault(),
rolloverIndex,
rolloverIndex,
rolloverRequest.getCreateIndexRequest()
);
assertThat(createIndexRequest.settings(), equalTo(settings));
assertThat(createIndexRequest.index(), equalTo(rolloverIndex));
assertThat(createIndexRequest.cause(), equalTo("rollover_index"));
}
public void testCreateIndexRequestForDataStream() {
DataStream dataStream = DataStreamTestHelper.randomInstance();
final String newWriteIndexName = DataStream.getDefaultBackingIndexName(dataStream.getName(), dataStream.getGeneration() + 1);
final RolloverRequest rolloverRequest = new RolloverRequest(dataStream.getName(), randomAlphaOfLength(10));
final ActiveShardCount activeShardCount = randomBoolean() ? ActiveShardCount.ALL : ActiveShardCount.ONE;
rolloverRequest.getCreateIndexRequest().waitForActiveShards(activeShardCount);
final Settings settings = indexSettings(IndexVersion.current(), 1, 0).put(
IndexMetadata.SETTING_INDEX_UUID,
UUIDs.randomBase64UUID()
).build();
rolloverRequest.getCreateIndexRequest().settings(settings);
final CreateIndexClusterStateUpdateRequest createIndexRequest = MetadataRolloverService.prepareDataStreamCreateIndexRequest(
randomProjectIdOrDefault(),
dataStream.getName(),
newWriteIndexName,
rolloverRequest.getCreateIndexRequest(),
null,
Instant.now()
);
for (String settingKey : settings.keySet()) {
assertThat(settings.get(settingKey), equalTo(createIndexRequest.settings().get(settingKey)));
}
assertThat(createIndexRequest.settings().get("index.hidden"), equalTo("true"));
assertThat(createIndexRequest.index(), equalTo(newWriteIndexName));
assertThat(createIndexRequest.cause(), equalTo("rollover_data_stream"));
}
public void testRejectDuplicateAlias() {
final IndexTemplateMetadata template = IndexTemplateMetadata.builder("test-template")
.patterns(Arrays.asList("foo-*", "bar-*"))
.putAlias(AliasMetadata.builder("foo-write"))
.putAlias(AliasMetadata.builder("bar-write").writeIndex(randomBoolean()))
.build();
final ProjectMetadata projectMetadata = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(createMetadata(randomAlphaOfLengthBetween(5, 7)), false)
.put(template)
.build();
String indexName = randomFrom("foo-123", "bar-xyz");
String aliasName = randomFrom("foo-write", "bar-write");
final IllegalArgumentException ex = expectThrows(
IllegalArgumentException.class,
() -> MetadataRolloverService.checkNoDuplicatedAliasInIndexTemplate(projectMetadata, indexName, aliasName, randomBoolean())
);
assertThat(ex.getMessage(), containsString("index template [test-template]"));
}
public void testRejectDuplicateAliasV2() {
Map<String, AliasMetadata> aliases = new HashMap<>();
aliases.put("foo-write", AliasMetadata.builder("foo-write").build());
aliases.put("bar-write", AliasMetadata.builder("bar-write").writeIndex(randomBoolean()).build());
final ComposableIndexTemplate template = ComposableIndexTemplate.builder()
.indexPatterns(Arrays.asList("foo-*", "bar-*"))
.template(new Template(null, null, aliases))
.build();
final ProjectMetadata projectMetadata = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(createMetadata(randomAlphaOfLengthBetween(5, 7)), false)
.put("test-template", template)
.build();
String indexName = randomFrom("foo-123", "bar-xyz");
String aliasName = randomFrom("foo-write", "bar-write");
final IllegalArgumentException ex = expectThrows(
IllegalArgumentException.class,
() -> MetadataRolloverService.checkNoDuplicatedAliasInIndexTemplate(projectMetadata, indexName, aliasName, randomBoolean())
);
assertThat(ex.getMessage(), containsString("index template [test-template]"));
}
public void testRejectDuplicateAliasV2UsingComponentTemplates() {
Map<String, AliasMetadata> aliases = new HashMap<>();
aliases.put("foo-write", AliasMetadata.builder("foo-write").build());
aliases.put("bar-write", AliasMetadata.builder("bar-write").writeIndex(randomBoolean()).build());
final ComponentTemplate ct = new ComponentTemplate(new Template(null, null, aliases), null, null);
final ComposableIndexTemplate template = ComposableIndexTemplate.builder()
.indexPatterns(Arrays.asList("foo-*", "bar-*"))
.componentTemplates(Collections.singletonList("ct"))
.build();
final ProjectMetadata projectMetadata = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(createMetadata(randomAlphaOfLengthBetween(5, 7)), false)
.put("ct", ct)
.put("test-template", template)
.build();
String indexName = randomFrom("foo-123", "bar-xyz");
String aliasName = randomFrom("foo-write", "bar-write");
final IllegalArgumentException ex = expectThrows(
IllegalArgumentException.class,
() -> MetadataRolloverService.checkNoDuplicatedAliasInIndexTemplate(projectMetadata, indexName, aliasName, randomBoolean())
);
assertThat(ex.getMessage(), containsString("index template [test-template]"));
}
public void testRolloverDoesntRejectOperationIfValidComposableTemplateOverridesLegacyTemplate() {
final IndexTemplateMetadata legacyTemplate = IndexTemplateMetadata.builder("legacy-template")
.patterns(Arrays.asList("foo-*", "bar-*"))
.putAlias(AliasMetadata.builder("foo-write"))
.putAlias(AliasMetadata.builder("bar-write").writeIndex(randomBoolean()))
.build();
// v2 template overrides the v1 template and does not define the rollover aliases
final ComposableIndexTemplate composableTemplate = ComposableIndexTemplate.builder()
.indexPatterns(Arrays.asList("foo-*", "bar-*"))
.template(new Template(null, null, null))
.build();
final ProjectMetadata projectMetadata = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(createMetadata(randomAlphaOfLengthBetween(5, 7)), false)
.put(legacyTemplate)
.put("composable-template", composableTemplate)
.build();
String indexName = randomFrom("foo-123", "bar-xyz");
String aliasName = randomFrom("foo-write", "bar-write");
// the valid v2 template takes priority over the v1 template so the validation should not throw any exception
MetadataRolloverService.checkNoDuplicatedAliasInIndexTemplate(projectMetadata, indexName, aliasName, randomBoolean());
}
public void testHiddenAffectsResolvedTemplates() {
final IndexTemplateMetadata template = IndexTemplateMetadata.builder("test-template")
.patterns(Collections.singletonList("*"))
.putAlias(AliasMetadata.builder("foo-write"))
.putAlias(AliasMetadata.builder("bar-write").writeIndex(randomBoolean()))
.build();
final ProjectMetadata projectMetadata = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(createMetadata(randomAlphaOfLengthBetween(5, 7)), false)
.put(template)
.build();
String indexName = randomFrom("foo-123", "bar-xyz");
String aliasName = randomFrom("foo-write", "bar-write");
// hidden shouldn't throw
MetadataRolloverService.checkNoDuplicatedAliasInIndexTemplate(projectMetadata, indexName, aliasName, Boolean.TRUE);
// not hidden will throw
final IllegalArgumentException ex = expectThrows(
IllegalArgumentException.class,
() -> MetadataRolloverService.checkNoDuplicatedAliasInIndexTemplate(
projectMetadata,
indexName,
aliasName,
randomFrom(Boolean.FALSE, null)
)
);
assertThat(ex.getMessage(), containsString("index template [test-template]"));
}
public void testHiddenAffectsResolvedV2Templates() {
Map<String, AliasMetadata> aliases = new HashMap<>();
aliases.put("foo-write", AliasMetadata.builder("foo-write").build());
aliases.put("bar-write", AliasMetadata.builder("bar-write").writeIndex(randomBoolean()).build());
final ComposableIndexTemplate template = ComposableIndexTemplate.builder()
.indexPatterns(Collections.singletonList("*"))
.template(new Template(null, null, aliases))
.build();
final ProjectMetadata projectMetadata = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(createMetadata(randomAlphaOfLengthBetween(5, 7)), false)
.put("test-template", template)
.build();
String indexName = randomFrom("foo-123", "bar-xyz");
String aliasName = randomFrom("foo-write", "bar-write");
// hidden shouldn't throw
MetadataRolloverService.checkNoDuplicatedAliasInIndexTemplate(projectMetadata, indexName, aliasName, Boolean.TRUE);
// not hidden will throw
final IllegalArgumentException ex = expectThrows(
IllegalArgumentException.class,
() -> MetadataRolloverService.checkNoDuplicatedAliasInIndexTemplate(
projectMetadata,
indexName,
aliasName,
randomFrom(Boolean.FALSE, null)
)
);
assertThat(ex.getMessage(), containsString("index template [test-template]"));
}
public void testHiddenAffectsResolvedV2ComponentTemplates() {
Map<String, AliasMetadata> aliases = new HashMap<>();
aliases.put("foo-write", AliasMetadata.builder("foo-write").build());
aliases.put("bar-write", AliasMetadata.builder("bar-write").writeIndex(randomBoolean()).build());
final ComponentTemplate ct = new ComponentTemplate(new Template(null, null, aliases), null, null);
final ComposableIndexTemplate template = ComposableIndexTemplate.builder()
.indexPatterns(Collections.singletonList("*"))
.componentTemplates(Collections.singletonList("ct"))
.build();
final ProjectMetadata projectMetadata = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(createMetadata(randomAlphaOfLengthBetween(5, 7)), false)
.put("ct", ct)
.put("test-template", template)
.build();
String indexName = randomFrom("foo-123", "bar-xyz");
String aliasName = randomFrom("foo-write", "bar-write");
// hidden shouldn't throw
MetadataRolloverService.checkNoDuplicatedAliasInIndexTemplate(projectMetadata, indexName, aliasName, Boolean.TRUE);
// not hidden will throw
final IllegalArgumentException ex = expectThrows(
IllegalArgumentException.class,
() -> MetadataRolloverService.checkNoDuplicatedAliasInIndexTemplate(
projectMetadata,
indexName,
aliasName,
randomFrom(Boolean.FALSE, null)
)
);
assertThat(ex.getMessage(), containsString("index template [test-template]"));
}
/**
* Test the main rolloverClusterState method. This does not validate every detail to depth, rather focuses on observing that each
* parameter is used for the purpose intended.
*/
public void testRolloverClusterState() throws Exception {
final String aliasName = "logs-alias";
final String indexPrefix = "logs-index-00000";
String sourceIndexName = indexPrefix + "1";
final IndexMetadata.Builder indexMetadata = IndexMetadata.builder(sourceIndexName)
.putAlias(AliasMetadata.builder(aliasName).writeIndex(true).build())
.settings(settings(IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(1);
final var projectId = randomProjectIdOrDefault();
final ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT)
.putProjectMetadata(ProjectMetadata.builder(projectId).put(indexMetadata))
.build();
final TestTelemetryPlugin telemetryPlugin = new TestTelemetryPlugin();
ThreadPool testThreadPool = new TestThreadPool(getTestName());
try {
MetadataRolloverService rolloverService = DataStreamTestHelper.getMetadataRolloverService(
null,
testThreadPool,
Set.of(),
xContentRegistry(),
telemetryPlugin.getTelemetryProvider(Settings.EMPTY)
);
MaxDocsCondition condition = new MaxDocsCondition(randomNonNegativeLong());
List<Condition<?>> metConditions = Collections.singletonList(condition);
String newIndexName = randomBoolean() ? "logs-index-9" : null;
int numberOfShards = randomIntBetween(1, 5);
CreateIndexRequest createIndexRequest = new CreateIndexRequest("_na_");
createIndexRequest.settings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numberOfShards));
long before = testThreadPool.absoluteTimeInMillis();
MetadataRolloverService.RolloverResult rolloverResult = rolloverService.rolloverClusterState(
clusterState.projectState(projectId),
aliasName,
newIndexName,
createIndexRequest,
metConditions,
Instant.now(),
randomBoolean(),
false,
null,
null,
false
);
long after = testThreadPool.absoluteTimeInMillis();
newIndexName = newIndexName == null ? indexPrefix + "2" : newIndexName;
assertEquals(sourceIndexName, rolloverResult.sourceIndexName());
assertEquals(newIndexName, rolloverResult.rolloverIndexName());
ProjectMetadata rolloverMetadata = rolloverResult.clusterState().metadata().getProject(projectId);
assertEquals(2, rolloverMetadata.indices().size());
IndexMetadata rolloverIndexMetadata = rolloverMetadata.index(newIndexName);
assertThat(rolloverIndexMetadata.getNumberOfShards(), equalTo(numberOfShards));
IndexAbstraction alias = rolloverMetadata.getIndicesLookup().get(aliasName);
assertThat(alias.getType(), equalTo(IndexAbstraction.Type.ALIAS));
assertThat(alias.getIndices(), hasSize(2));
assertThat(alias.getIndices(), hasItem(rolloverMetadata.index(sourceIndexName).getIndex()));
assertThat(alias.getIndices(), hasItem(rolloverIndexMetadata.getIndex()));
assertThat(alias.getWriteIndex(), equalTo(rolloverIndexMetadata.getIndex()));
RolloverInfo info = rolloverMetadata.index(sourceIndexName).getRolloverInfos().get(aliasName);
assertThat(info.getTime(), lessThanOrEqualTo(after));
assertThat(info.getTime(), greaterThanOrEqualTo(before));
assertThat(info.getMetConditions(), hasSize(1));
assertThat(info.getMetConditions().get(0).value(), equalTo(condition.value()));
for (String metric : MetadataRolloverService.AUTO_SHARDING_METRIC_NAMES.values()) {
assertThat(telemetryPlugin.getLongCounterMeasurement(metric), empty());
}
} finally {
testThreadPool.shutdown();
}
}
public void testRolloverClusterStateForDataStream() throws Exception {
final DataStream dataStream = DataStreamTestHelper.randomInstance()
// ensure no replicate data stream
.promoteDataStream();
ComposableIndexTemplate template = ComposableIndexTemplate.builder()
.indexPatterns(List.of(dataStream.getName() + "*"))
.dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate())
.build();
final var projectId = randomProjectIdOrDefault();
ProjectMetadata.Builder builder = ProjectMetadata.builder(projectId);
builder.put("template", template);
for (Index index : dataStream.getIndices()) {
builder.put(DataStreamTestHelper.getIndexMetadataBuilderForIndex(index));
}
builder.put(dataStream);
final ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).putProjectMetadata(builder).build();
final TestTelemetryPlugin telemetryPlugin = new TestTelemetryPlugin();
ThreadPool testThreadPool = new TestThreadPool(getTestName());
try {
MetadataRolloverService rolloverService = DataStreamTestHelper.getMetadataRolloverService(
dataStream,
testThreadPool,
Set.of(),
xContentRegistry(),
telemetryPlugin.getTelemetryProvider(Settings.EMPTY)
);
MaxDocsCondition condition = new MaxDocsCondition(randomNonNegativeLong());
List<Condition<?>> metConditions = Collections.singletonList(condition);
CreateIndexRequest createIndexRequest = new CreateIndexRequest("_na_");
String sourceIndexName = dataStream.getWriteIndex().getName();
long before = testThreadPool.absoluteTimeInMillis();
MetadataRolloverService.RolloverResult rolloverResult = rolloverService.rolloverClusterState(
clusterState.projectState(projectId),
dataStream.getName(),
null,
createIndexRequest,
metConditions,
Instant.now(),
randomBoolean(),
false,
null,
null,
false
);
long after = testThreadPool.absoluteTimeInMillis();
Settings rolledOverIndexSettings = rolloverResult.clusterState()
.metadata()
.getProject(projectId)
.index(rolloverResult.rolloverIndexName())
.getSettings();
Set<String> rolledOverIndexSettingNames = rolledOverIndexSettings.keySet();
ClusterService clusterService = ClusterServiceUtils.createClusterService(testThreadPool);
IndicesService indicesService = mock(IndicesService.class);
MetadataDataStreamsService metadataDataStreamsService = new MetadataDataStreamsService(
clusterService,
indicesService,
DataStreamGlobalRetentionSettings.create(ClusterSettings.createBuiltInClusterSettings()),
IndexSettingProviders.EMPTY
);
for (String settingName : metadataDataStreamsService.getEffectiveSettings(
clusterState.metadata().getProject(projectId),
dataStream
).keySet()) {
assertTrue(rolledOverIndexSettingNames.contains(settingName));
}
String newIndexName = rolloverResult.rolloverIndexName();
assertEquals(sourceIndexName, rolloverResult.sourceIndexName());
assertThat(newIndexName, DataStreamTestHelper.backingIndexEqualTo(dataStream.getName(), (int) dataStream.getGeneration() + 1));
ProjectMetadata rolloverMetadata = rolloverResult.clusterState().metadata().getProject(projectId);
assertEquals(dataStream.getIndices().size() + 1, rolloverMetadata.indices().size());
IndexMetadata rolloverIndexMetadata = rolloverMetadata.index(newIndexName);
IndexAbstraction ds = rolloverMetadata.getIndicesLookup().get(dataStream.getName());
assertThat(ds.getType(), equalTo(IndexAbstraction.Type.DATA_STREAM));
assertThat(ds.getIndices(), hasSize(dataStream.getIndices().size() + 1));
assertThat(ds.getIndices(), hasItem(rolloverMetadata.index(sourceIndexName).getIndex()));
assertThat(ds.getIndices(), hasItem(rolloverIndexMetadata.getIndex()));
assertThat(ds.getWriteIndex(), equalTo(rolloverIndexMetadata.getIndex()));
RolloverInfo info = rolloverMetadata.index(sourceIndexName).getRolloverInfos().get(dataStream.getName());
assertThat(info.getTime(), lessThanOrEqualTo(after));
assertThat(info.getTime(), greaterThanOrEqualTo(before));
assertThat(info.getMetConditions(), hasSize(1));
assertThat(info.getMetConditions().get(0).value(), equalTo(condition.value()));
} finally {
testThreadPool.shutdown();
}
}
public void testRolloverClusterStateForDataStreamFailureStore() throws Exception {
final DataStream dataStream = DataStreamTestHelper.randomInstance(true)
// ensure no replicate data stream
.promoteDataStream();
ComposableIndexTemplate template = ComposableIndexTemplate.builder()
.indexPatterns(List.of(dataStream.getName() + "*"))
.dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate())
.build();
final var projectId = randomProjectIdOrDefault();
ProjectMetadata.Builder builder = ProjectMetadata.builder(projectId);
builder.put("template", template);
dataStream.getIndices().forEach(index -> builder.put(DataStreamTestHelper.getIndexMetadataBuilderForIndex(index)));
dataStream.getFailureIndices().forEach(index -> builder.put(DataStreamTestHelper.getIndexMetadataBuilderForIndex(index)));
builder.put(dataStream);
final ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).putProjectMetadata(builder).build();
final TestTelemetryPlugin telemetryPlugin = new TestTelemetryPlugin();
ThreadPool testThreadPool = new TestThreadPool(getTestName());
try {
MetadataRolloverService rolloverService = DataStreamTestHelper.getMetadataRolloverService(
dataStream,
testThreadPool,
Set.of(),
xContentRegistry(),
telemetryPlugin.getTelemetryProvider(Settings.EMPTY)
);
MaxDocsCondition condition = new MaxDocsCondition(randomNonNegativeLong());
List<Condition<?>> metConditions = Collections.singletonList(condition);
CreateIndexRequest createIndexRequest = new CreateIndexRequest("_na_");
long before = testThreadPool.absoluteTimeInMillis();
MetadataRolloverService.RolloverResult rolloverResult = rolloverService.rolloverClusterState(
clusterState.projectState(projectId),
dataStream.getName(),
null,
createIndexRequest,
metConditions,
Instant.now(),
randomBoolean(),
false,
null,
null,
true
);
long after = testThreadPool.absoluteTimeInMillis();
Settings rolledOverIndexSettings = rolloverResult.clusterState()
.projectState(projectId)
.metadata()
.index(rolloverResult.rolloverIndexName())
.getSettings();
Set<String> rolledOverIndexSettingNames = rolledOverIndexSettings.keySet();
for (String settingName : dataStream.getSettings().keySet()) {
assertFalse(rolledOverIndexSettingNames.contains(settingName));
}
var epochMillis = System.currentTimeMillis();
String sourceIndexName = DataStream.getDefaultFailureStoreName(dataStream.getName(), dataStream.getGeneration(), epochMillis);
String newIndexName = DataStream.getDefaultFailureStoreName(dataStream.getName(), dataStream.getGeneration() + 1, epochMillis);
assertEquals(sourceIndexName, rolloverResult.sourceIndexName());
assertEquals(newIndexName, rolloverResult.rolloverIndexName());
ProjectMetadata rolloverMetadata = rolloverResult.clusterState().metadata().getProject(projectId);
assertEquals(dataStream.getIndices().size() + dataStream.getFailureIndices().size() + 1, rolloverMetadata.indices().size());
IndexMetadata rolloverIndexMetadata = rolloverMetadata.index(newIndexName);
var ds = (DataStream) rolloverMetadata.getIndicesLookup().get(dataStream.getName());
assertThat(ds.getType(), equalTo(IndexAbstraction.Type.DATA_STREAM));
assertThat(ds.getIndices(), hasSize(dataStream.getIndices().size()));
assertThat(ds.getFailureIndices(), hasSize(dataStream.getFailureIndices().size() + 1));
assertThat(ds.getFailureIndices(), hasItem(rolloverMetadata.index(sourceIndexName).getIndex()));
assertThat(ds.getFailureIndices(), hasItem(rolloverIndexMetadata.getIndex()));
assertThat(ds.getWriteFailureIndex(), equalTo(rolloverIndexMetadata.getIndex()));
RolloverInfo info = rolloverMetadata.index(sourceIndexName).getRolloverInfos().get(dataStream.getName());
assertThat(info.getTime(), lessThanOrEqualTo(after));
assertThat(info.getTime(), greaterThanOrEqualTo(before));
assertThat(info.getMetConditions(), hasSize(1));
assertThat(info.getMetConditions().get(0).value(), equalTo(condition.value()));
} finally {
testThreadPool.shutdown();
}
}
public void testValidation() throws Exception {
final String rolloverTarget;
final String sourceIndexName;
final String defaultRolloverIndexName;
final boolean useDataStream = randomBoolean();
final var now = Instant.now();
final var projectId = randomProjectIdOrDefault();
ProjectMetadata.Builder builder = ProjectMetadata.builder(projectId);
boolean isFailureStoreRollover = false;
if (useDataStream) {
// ensure no replicate data stream
DataStream dataStream = DataStreamTestHelper.randomInstance(now::toEpochMilli).promoteDataStream();
rolloverTarget = dataStream.getName();
if (dataStream.isFailureStoreExplicitlyEnabled() && randomBoolean()) {
sourceIndexName = dataStream.getWriteFailureIndex().getName();
isFailureStoreRollover = true;
defaultRolloverIndexName = DataStream.getDefaultFailureStoreName(
dataStream.getName(),
dataStream.getGeneration() + 1,
now.toEpochMilli()
);
} else {
sourceIndexName = dataStream.getIndices().get(dataStream.getIndices().size() - 1).getName();
defaultRolloverIndexName = DataStream.getDefaultBackingIndexName(
dataStream.getName(),
dataStream.getGeneration() + 1,
now.toEpochMilli()
);
}
ComposableIndexTemplate template = ComposableIndexTemplate.builder()
.indexPatterns(List.of(dataStream.getName() + "*"))
.dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate())
.build();
builder.put("template", template);
for (Index index : dataStream.getIndices()) {
builder.put(DataStreamTestHelper.getIndexMetadataBuilderForIndex(index));
}
builder.put(dataStream);
} else {
String indexPrefix = "logs-index-00000";
rolloverTarget = "logs-alias";
sourceIndexName = indexPrefix + "1";
defaultRolloverIndexName = indexPrefix + "2";
final IndexMetadata.Builder indexMetadata = IndexMetadata.builder(sourceIndexName)
.putAlias(AliasMetadata.builder(rolloverTarget).writeIndex(true).build())
.settings(settings(IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(1);
builder.put(indexMetadata);
}
final ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).putProjectMetadata(builder).build();
MetadataCreateIndexService createIndexService = mock(MetadataCreateIndexService.class);
MetadataIndexAliasesService metadataIndexAliasesService = mock(MetadataIndexAliasesService.class);
ClusterService clusterService = mock(ClusterService.class);
final TestTelemetryPlugin telemetryPlugin = new TestTelemetryPlugin();
MetadataRolloverService rolloverService = new MetadataRolloverService(
null,
createIndexService,
metadataIndexAliasesService,
EmptySystemIndices.INSTANCE,
WriteLoadForecaster.DEFAULT,
clusterService,
telemetryPlugin.getTelemetryProvider(Settings.EMPTY)
);
String newIndexName = useDataStream == false && randomBoolean() ? "logs-index-9" : null;
MetadataRolloverService.RolloverResult rolloverResult = rolloverService.rolloverClusterState(
clusterState.projectState(projectId),
rolloverTarget,
newIndexName,
new CreateIndexRequest("_na_"),
null,
now,
randomBoolean(),
true,
null,
null,
isFailureStoreRollover
);
newIndexName = newIndexName == null ? defaultRolloverIndexName : newIndexName;
assertEquals(sourceIndexName, rolloverResult.sourceIndexName());
assertEquals(newIndexName, rolloverResult.rolloverIndexName());
assertSame(rolloverResult.clusterState(), clusterState);
}
public void testRolloverClusterStateForDataStreamNoTemplate() throws Exception {
final DataStream dataStream = DataStreamTestHelper.randomInstance();
final var projectId = randomProjectIdOrDefault();
ProjectMetadata.Builder builder = ProjectMetadata.builder(projectId);
for (Index index : dataStream.getIndices()) {
builder.put(DataStreamTestHelper.getIndexMetadataBuilderForIndex(index));
}
builder.put(dataStream);
final ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).putProjectMetadata(builder).build();
final TestTelemetryPlugin telemetryPlugin = new TestTelemetryPlugin();
ThreadPool testThreadPool = mock(ThreadPool.class);
when(testThreadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY));
MetadataRolloverService rolloverService = DataStreamTestHelper.getMetadataRolloverService(
dataStream,
testThreadPool,
Set.of(),
xContentRegistry(),
telemetryPlugin.getTelemetryProvider(Settings.EMPTY)
);
MaxDocsCondition condition = new MaxDocsCondition(randomNonNegativeLong());
List<Condition<?>> metConditions = Collections.singletonList(condition);
CreateIndexRequest createIndexRequest = new CreateIndexRequest("_na_");
Exception e = expectThrows(
IllegalArgumentException.class,
() -> rolloverService.rolloverClusterState(
clusterState.projectState(projectId),
dataStream.getName(),
null,
createIndexRequest,
metConditions,
Instant.now(),
false,
randomBoolean(),
null,
null,
false
)
);
assertThat(e.getMessage(), equalTo("no matching index template found for data stream [" + dataStream.getName() + "]"));
}
private static IndexMetadata createMetadata(String indexName) {
return IndexMetadata.builder(indexName)
.creationDate(System.currentTimeMillis() - TimeValue.timeValueHours(3).getMillis())
.settings(indexSettings(IndexVersion.current(), 1, 0).put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()))
.build();
}
}
| MetadataRolloverServiceTests |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/lock/internal/SqlAstBasedLockingStrategy.java | {
"start": 2242,
"end": 8730
} | class ____ implements LockingStrategy {
private final EntityMappingType entityToLock;
private final LockMode lockMode;
private final Locking.Scope lockScope;
public SqlAstBasedLockingStrategy(EntityPersister lockable, LockMode lockMode, Locking.Scope lockScope) {
this.entityToLock = lockable.getRootEntityDescriptor();
this.lockMode = lockMode;
this.lockScope = lockScope;
}
@Override
public void lock(
Object id,
Object version,
Object object,
int timeout,
SharedSessionContractImplementor session)
throws StaleObjectStateException, LockingStrategyException {
final var factory = session.getFactory();
final var lockOptions = new LockOptions( lockMode );
lockOptions.setScope( lockScope );
lockOptions.setTimeOut( timeout );
final var entityPath = new NavigablePath( entityToLock.getRootPathName() );
final var rootQuerySpec = new QuerySpec( true );
rootQuerySpec.applyRootPathForLocking( entityPath );
final var idMapping = entityToLock.getIdentifierMapping();
// NOTE: there are 2 possible ways to handle the select list for the query...
// 1) use normal `idMapping.createDomainResult`. for simple ids, this is fine; however,
// for composite ids, this would require a proper implementation of `FetchProcessor`
// (the parts of the composition are considered `Fetch`es). `FetchProcessor` is not
// a trivial thing to implement though. this would be the "best" approach though.
// look at simplifying LoaderSelectBuilder.visitFetches for reusability
// 2) for now, we'll just manually build the selection list using "one of" the id columns
// and manually build a simple `BasicResult`
final var sqlAstCreationState = new LoaderSqlAstCreationState(
rootQuerySpec,
new SqlAliasBaseManager(),
new SimpleFromClauseAccessImpl(),
lockOptions,
// todo (db-locking) : look to simplify LoaderSelectBuilder.visitFetches for reusability
(fetchParent, creationState) -> ImmutableFetchList.EMPTY,
true,
new LoadQueryInfluencers( factory ),
factory.getSqlTranslationEngine()
);
final var rootTableGroup = entityToLock.createRootTableGroup(
true,
entityPath,
null,
null,
() -> p -> {
},
sqlAstCreationState
);
rootQuerySpec.getFromClause().addRoot( rootTableGroup );
sqlAstCreationState.getFromClauseAccess().registerTableGroup( entityPath, rootTableGroup );
final var sqlExpressionResolver = sqlAstCreationState.getSqlExpressionResolver();
final var firstIdColumn = idMapping.getSelectable( 0 );
sqlExpressionResolver.resolveSqlSelection(
sqlExpressionResolver.resolveSqlExpression( rootTableGroup.getPrimaryTableReference(), firstIdColumn ),
firstIdColumn.getJdbcMapping().getJdbcJavaType(),
null,
session.getTypeConfiguration()
);
final BasicResult<Object> idResult =
new BasicResult<>( 0, null, idMapping.getJdbcMapping( 0 ) );
final var versionMapping = entityToLock.getVersionMapping();
final int jdbcTypeCount = idMapping.getJdbcTypeCount();
final int jdbcParamCount = versionMapping == null ? jdbcTypeCount : jdbcTypeCount + 1;
final var jdbcParameterBindings = new JdbcParameterBindingsImpl( jdbcParamCount );
idMapping.breakDownJdbcValues(
id,
(valueIndex, value, jdbcValueMapping) -> handleRestriction(
value,
jdbcValueMapping,
rootQuerySpec,
sqlAstCreationState,
rootTableGroup,
jdbcParameterBindings
),
session
);
if ( versionMapping != null ) {
versionMapping.breakDownJdbcValues(
version,
(valueIndex, value, jdbcValueMapping) -> handleRestriction(
value,
jdbcValueMapping,
rootQuerySpec,
sqlAstCreationState,
rootTableGroup,
jdbcParameterBindings
),
session
);
}
final var selectStatement = new SelectStatement( rootQuerySpec, List.of( idResult ) );
final JdbcSelect selectOperation =
session.getDialect().getSqlAstTranslatorFactory()
.buildSelectTranslator( factory, selectStatement )
.translate( jdbcParameterBindings, sqlAstCreationState );
final var lockingExecutionContext = new LockingExecutionContext( session );
try {
factory.getJdbcServices().getJdbcSelectExecutor()
.executeQuery(
selectOperation,
jdbcParameterBindings,
lockingExecutionContext,
null,
idResult.getResultJavaType().getJavaTypeClass(),
1,
SingleResultConsumer.instance()
);
if ( lockOptions.getScope() == Locking.Scope.INCLUDE_COLLECTIONS ) {
SqmMutationStrategyHelper.visitCollectionTables( entityToLock, (attribute) -> {
final var collectionToLock = (PersistentCollection<?>) attribute.getValue( object );
LockingHelper.lockCollectionTable(
attribute,
lockMode,
lockOptions.getTimeout(),
collectionToLock,
lockingExecutionContext
);
} );
}
}
catch (LockTimeoutException lockTimeout) {
throw new PessimisticEntityLockException(
object,
String.format( Locale.ROOT, "Lock timeout exceeded attempting to lock row(s) for %s", object ),
lockTimeout
);
}
catch (NoRowException noRow) {
if ( !entityToLock.optimisticLockStyle().isNone() ) {
final String entityName = entityToLock.getEntityName();
final var statistics = session.getFactory().getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.optimisticFailure( entityName );
}
throw new StaleObjectStateException( entityName, id,
"No rows were returned from JDBC query for versioned entity" );
}
else {
throw noRow;
}
}
}
private static void handleRestriction(
Object value,
SelectableMapping jdbcValueMapping,
QuerySpec rootQuerySpec,
LoaderSqlAstCreationState sqlAstCreationState,
TableGroup rootTableGroup,
JdbcParameterBindings jdbcParameterBindings) {
final var jdbcParameter = new SqlTypedMappingJdbcParameter( jdbcValueMapping );
rootQuerySpec.applyPredicate(
new ComparisonPredicate(
sqlAstCreationState.getSqlExpressionResolver().resolveSqlExpression(
rootTableGroup.getTableReference( jdbcValueMapping.getContainingTableExpression() ),
jdbcValueMapping
),
ComparisonOperator.EQUAL,
jdbcParameter
)
);
final var jdbcMapping = jdbcValueMapping.getJdbcMapping();
jdbcParameterBindings.addBinding( jdbcParameter, new JdbcParameterBindingImpl( jdbcMapping, value ) );
}
}
| SqlAstBasedLockingStrategy |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/checkpointing/TimestampedFileInputSplitTest.java | {
"start": 1260,
"end": 5737
} | class ____ extends TestLogger {
@Test
public void testSplitEquality() {
TimestampedFileInputSplit richFirstSplit =
new TimestampedFileInputSplit(10, 2, new Path("test"), 0, 100, null);
TimestampedFileInputSplit richSecondSplit =
new TimestampedFileInputSplit(10, 2, new Path("test"), 0, 100, null);
Assert.assertEquals(richFirstSplit, richSecondSplit);
TimestampedFileInputSplit richModSecondSplit =
new TimestampedFileInputSplit(11, 2, new Path("test"), 0, 100, null);
Assert.assertNotEquals(richSecondSplit, richModSecondSplit);
TimestampedFileInputSplit richThirdSplit =
new TimestampedFileInputSplit(10, 2, new Path("test/test1"), 0, 100, null);
Assert.assertEquals(richThirdSplit.getModificationTime(), 10);
Assert.assertNotEquals(richFirstSplit, richThirdSplit);
TimestampedFileInputSplit richThirdSplitCopy =
new TimestampedFileInputSplit(10, 2, new Path("test/test1"), 0, 100, null);
Assert.assertEquals(richThirdSplitCopy, richThirdSplit);
}
@Test
public void testSplitComparison() {
TimestampedFileInputSplit richFirstSplit =
new TimestampedFileInputSplit(0, 3, new Path("test/test1"), 0, 100, null);
TimestampedFileInputSplit richSecondSplit =
new TimestampedFileInputSplit(10, 2, new Path("test/test2"), 0, 100, null);
TimestampedFileInputSplit richThirdSplit =
new TimestampedFileInputSplit(10, 1, new Path("test/test2"), 0, 100, null);
TimestampedFileInputSplit richForthSplit =
new TimestampedFileInputSplit(11, 0, new Path("test/test3"), 0, 100, null);
TimestampedFileInputSplit richFifthSplit =
new TimestampedFileInputSplit(11, 1, new Path("test/test3"), 0, 100, null);
// smaller mod time
Assert.assertTrue(richFirstSplit.compareTo(richSecondSplit) < 0);
// lexicographically on the path
Assert.assertTrue(richThirdSplit.compareTo(richFifthSplit) < 0);
// same mod time, same file so smaller split number first
Assert.assertTrue(richThirdSplit.compareTo(richSecondSplit) < 0);
// smaller modification time first
Assert.assertTrue(richThirdSplit.compareTo(richForthSplit) < 0);
}
@Test(expected = IllegalArgumentException.class)
public void testIllegalArgument() {
new TimestampedFileInputSplit(
-10, 2, new Path("test"), 0, 100, null); // invalid modification time
}
@Test
public void testPriorityQ() {
TimestampedFileInputSplit richFirstSplit =
new TimestampedFileInputSplit(0, 3, new Path("test/test1"), 0, 100, null);
TimestampedFileInputSplit richSecondSplit =
new TimestampedFileInputSplit(10, 2, new Path("test/test2"), 0, 100, null);
TimestampedFileInputSplit richThirdSplit =
new TimestampedFileInputSplit(10, 1, new Path("test/test2"), 0, 100, null);
TimestampedFileInputSplit richForthSplit =
new TimestampedFileInputSplit(11, 0, new Path("test/test3"), 0, 100, null);
TimestampedFileInputSplit richFifthSplit =
new TimestampedFileInputSplit(11, 1, new Path("test/test3"), 0, 100, null);
Queue<TimestampedFileInputSplit> pendingSplits = new PriorityQueue<>();
pendingSplits.add(richSecondSplit);
pendingSplits.add(richForthSplit);
pendingSplits.add(richFirstSplit);
pendingSplits.add(richFifthSplit);
pendingSplits.add(richFifthSplit);
pendingSplits.add(richThirdSplit);
List<TimestampedFileInputSplit> actualSortedSplits = new ArrayList<>();
while (true) {
actualSortedSplits.add(pendingSplits.poll());
if (pendingSplits.isEmpty()) {
break;
}
}
List<TimestampedFileInputSplit> expectedSortedSplits = new ArrayList<>();
expectedSortedSplits.add(richFirstSplit);
expectedSortedSplits.add(richThirdSplit);
expectedSortedSplits.add(richSecondSplit);
expectedSortedSplits.add(richForthSplit);
expectedSortedSplits.add(richFifthSplit);
expectedSortedSplits.add(richFifthSplit);
Assert.assertArrayEquals(expectedSortedSplits.toArray(), actualSortedSplits.toArray());
}
}
| TimestampedFileInputSplitTest |
java | playframework__playframework | core/play-guice/src/main/java/play/libs/pekko/BinderAccessor.java | {
"start": 334,
"end": 904
} | class ____ {
/** Get the binder from an AbstractModule. */
static Binder binder(Object module) {
if (module instanceof AbstractModule) {
try {
Method method = AbstractModule.class.getDeclaredMethod("binder");
if (!method.canAccess(module)) {
method.setAccessible(true);
}
return (Binder) method.invoke(module);
} catch (Exception e) {
throw new RuntimeException(e);
}
} else {
throw new IllegalArgumentException("Module must be an instance of AbstractModule");
}
}
}
| BinderAccessor |
java | google__auto | factory/src/test/resources/expected/Generics_FooImplFactory.java | {
"start": 841,
"end": 1069
} | class ____<M extends Generics.Bar> implements Generics.FooFactory<M> {
@Inject
Generics_FooImplFactory() {}
@Override
public Generics.FooImpl<M> create() {
return new Generics.FooImpl<M>();
}
}
| Generics_FooImplFactory |
java | quarkusio__quarkus | extensions/smallrye-fault-tolerance/deployment/src/test/java/io/quarkus/smallrye/faulttolerance/test/asynchronous/types/mutiny/resubscription/MutinyResubscriptionTest.java | {
"start": 354,
"end": 1265
} | class ____ {
// this test verifies resubscription, which is triggered via retry
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar.addClasses(MutinyHelloService.class));
@Inject
MutinyHelloService service;
@Test
public void test() {
Uni<String> hello = service.hello()
.onFailure().retry().atMost(2)
.onFailure().recoverWithItem("hello");
assertThat(MutinyHelloService.COUNTER).hasValue(0);
assertThat(hello.await().indefinitely()).isEqualTo("hello");
// the service.hello() method has @Retry with default settings, so 1 initial attempt + 3 retries = 4 total
// the onFailure().retry() handler does 1 initial attempt + 2 retries = 3 total
assertThat(MutinyHelloService.COUNTER).hasValue(4 * 3);
}
}
| MutinyResubscriptionTest |
java | apache__flink | flink-state-backends/flink-statebackend-rocksdb/src/test/java/org/apache/flink/state/rocksdb/RocksDBInitITCase.java | {
"start": 1203,
"end": 2207
} | class ____ {
@Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder();
/**
* This test checks that the RocksDB native code loader still responds to resetting the init
* flag.
*/
@Test
public void testResetInitFlag() throws Exception {
EmbeddedRocksDBStateBackend.resetRocksDBLoadedFlag();
}
@Test
public void testTempLibFolderDeletedOnFail() throws Exception {
File tempFolder = temporaryFolder.newFolder();
try {
EmbeddedRocksDBStateBackend.ensureRocksDBIsLoaded(
tempFolder.getAbsolutePath(),
() -> {
throw new ExpectedTestException();
});
fail("Not throwing expected exception.");
} catch (IOException ignored) {
// ignored
}
File[] files = tempFolder.listFiles();
Assert.assertNotNull(files);
Assert.assertEquals(0, files.length);
}
}
| RocksDBInitITCase |
java | mapstruct__mapstruct | integrationtest/src/test/resources/expressionTextBlocksTest/src/main/java/org/mapstruct/itest/textBlocks/CarAndWheelMapper.java | {
"start": 420,
"end": 1165
} | interface ____ {
CarAndWheelMapper INSTANCE = Mappers.getMapper( CarAndWheelMapper.class );
@Mapping(target = "wheelPosition",
expression =
"""
java(
source.getWheelPosition() == null ?
null :
source.getWheelPosition().getPosition()
)
""")
CarDto carDtoFromCar(Car source);
@Mapping(target = "wheelPosition",
expression = """
java(
source.wheelPosition() == null ?
null :
new WheelPosition(source.wheelPosition())
)
""")
Car carFromCarDto(CarDto source);
}
| CarAndWheelMapper |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/functions/sink/legacy/PrintSinkFunction.java | {
"start": 1795,
"end": 1835
} | interface ____.
*/
@Internal
public | instead |
java | quarkusio__quarkus | extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/metrics/MongoMetricsConnectionPoolListener.java | {
"start": 735,
"end": 3699
} | class ____ implements ConnectionPoolListener {
private final static String SIZE_NAME = "mongodb.connection-pool.size";
private final static String CHECKED_OUT_COUNT_NAME = "mongodb.connection-pool.checked-out-count";
@Override
public void connectionPoolCreated(ConnectionPoolCreatedEvent event) {
Tag[] tags = createTags(event.getServerId());
registerGauge(SIZE_NAME, "the current size of the pool, including idle and and in-use members", tags);
registerGauge(CHECKED_OUT_COUNT_NAME, "the current count of connections that are currently in use", tags);
}
@Override
public void connectionCheckedOut(ConnectionCheckedOutEvent event) {
MetricID metricID = createMetricID(CHECKED_OUT_COUNT_NAME, event.getConnectionId().getServerId());
Metric metric = getMetricRegistry().getMetrics().get(metricID);
if (metric != null) {
((ConnectionPoolGauge) metric).increment();
}
}
@Override
public void connectionCheckedIn(ConnectionCheckedInEvent event) {
MetricID metricID = createMetricID(CHECKED_OUT_COUNT_NAME, event.getConnectionId().getServerId());
Metric metric = getMetricRegistry().getMetrics().get(metricID);
if (metric != null) {
((ConnectionPoolGauge) metric).decrement();
}
}
@Override
public void connectionCreated(ConnectionCreatedEvent event) {
MetricID metricID = createMetricID(SIZE_NAME, event.getConnectionId().getServerId());
Metric metric = getMetricRegistry().getMetrics().get(metricID);
if (metric != null) {
((ConnectionPoolGauge) metric).increment();
}
}
@Override
public void connectionClosed(ConnectionClosedEvent event) {
MetricID metricID = createMetricID(SIZE_NAME, event.getConnectionId().getServerId());
Metric metric = getMetricRegistry().getMetrics().get(metricID);
if (metric != null) {
((ConnectionPoolGauge) metric).decrement();
}
}
private void registerGauge(String metricName, String description, Tag[] tags) {
getMetricRegistry().remove(new MetricID(metricName, tags));
Metadata metaData = Metadata.builder().withName(metricName).withType(MetricType.GAUGE)
.withDescription(description).build();
getMetricRegistry().register(metaData, new ConnectionPoolGauge(), tags);
}
private MetricRegistry getMetricRegistry() {
return MetricRegistries.get(MetricRegistry.Type.VENDOR);
}
private Tag[] createTags(ServerId server) {
return new Tag[] {
new Tag("host", server.getAddress().getHost()),
new Tag("port", String.valueOf(server.getAddress().getPort())),
};
}
private MetricID createMetricID(String metricName, ServerId server) {
return new MetricID(metricName, createTags(server));
}
}
| MongoMetricsConnectionPoolListener |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/engine/internal/BeforeTransactionCompletionProcessQueue.java | {
"start": 405,
"end": 1049
} | class ____
extends AbstractTransactionCompletionProcessQueue<BeforeCompletionCallback> {
BeforeTransactionCompletionProcessQueue(SharedSessionContractImplementor session) {
super( session );
}
void beforeTransactionCompletion() {
BeforeCompletionCallback process;
while ( (process = processes.poll()) != null ) {
try {
process.doBeforeTransactionCompletion( session );
}
catch (HibernateException he) {
throw he;
}
catch (Exception e) {
throw new HibernateException(
"Unable to perform beforeTransactionCompletion callback: " + e.getMessage(), e );
}
}
}
}
| BeforeTransactionCompletionProcessQueue |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java | {
"start": 4186,
"end": 7851
} | class ____ {
final Iterator<Entry.Option> optionsIterator;
Entry.Option current;
private ShardOptions(Iterator<Entry.Option> optionsIterator) {
assert optionsIterator.hasNext();
this.optionsIterator = optionsIterator;
this.current = optionsIterator.next();
assert this.current.getDoc().shardIndex != -1 : "shardIndex is not set";
}
boolean advanceToNextOption() {
if (optionsIterator.hasNext()) {
current = optionsIterator.next();
return true;
} else {
return false;
}
}
}
@Override
public CompletionSuggestion reduce(List<Suggest.Suggestion<Entry>> toReduce) {
if (toReduce.isEmpty()) {
return null;
} else {
final CompletionSuggestion leader = (CompletionSuggestion) toReduce.get(0);
final Entry leaderEntry = leader.getEntries().get(0);
final String name = leader.getName();
int size = leader.getSize();
if (toReduce.size() == 1) {
return leader;
} else {
// combine suggestion entries from participating shards on the coordinating node
// the global top <code>size</code> entries are collected from the shard results
// using a priority queue
OptionPriorityQueue pq = new OptionPriorityQueue(toReduce.size());
for (Suggest.Suggestion<Entry> suggestion : toReduce) {
assert suggestion.getName().equals(name) : "name should be identical across all suggestions";
Iterator<Entry.Option> it = ((CompletionSuggestion) suggestion).getOptions().iterator();
if (it.hasNext()) {
pq.add(new ShardOptions(it));
}
}
// Dedup duplicate suggestions (based on the surface form) if skip duplicates is activated
final CharArraySet seenSurfaceForms = leader.skipDuplicates ? new CharArraySet(leader.getSize(), false) : null;
final Entry entry = new Entry(leaderEntry.getText(), leaderEntry.getOffset(), leaderEntry.getLength());
final List<Entry.Option> options = entry.getOptions();
while (pq.size() > 0) {
ShardOptions top = pq.top();
Entry.Option current = top.current;
if (top.advanceToNextOption()) {
pq.updateTop();
} else {
// options exhausted for this shard
pq.pop();
}
if (leader.skipDuplicates == false || seenSurfaceForms.add(current.getText().toString())) {
options.add(current);
if (options.size() >= size) {
break;
}
}
}
final CompletionSuggestion suggestion = new CompletionSuggestion(leader.getName(), leader.getSize(), leader.skipDuplicates);
suggestion.addTerm(entry);
return suggestion;
}
}
}
public void setShardIndex(int shardIndex) {
if (entries.isEmpty() == false) {
for (Entry.Option option : getOptions()) {
option.setShardIndex(shardIndex);
}
}
}
@Override
protected Entry newEntry(StreamInput in) throws IOException {
return new Entry(in);
}
public static final | ShardOptions |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/tags/HtmlEscapingAwareTag.java | {
"start": 1546,
"end": 3671
} | class ____ extends RequestContextAwareTag {
private @Nullable Boolean htmlEscape;
/**
* Set HTML escaping for this tag, as boolean value.
* Overrides the default HTML escaping setting for the current page.
* @see HtmlEscapeTag#setDefaultHtmlEscape
*/
public void setHtmlEscape(boolean htmlEscape) throws JspException {
this.htmlEscape = htmlEscape;
}
/**
* Return the HTML escaping setting for this tag,
* or the default setting if not overridden.
* @see #isDefaultHtmlEscape()
*/
protected boolean isHtmlEscape() {
if (this.htmlEscape != null) {
return this.htmlEscape;
}
else {
return isDefaultHtmlEscape();
}
}
/**
* Return the applicable default HTML escape setting for this tag.
* <p>The default implementation checks the RequestContext's setting,
* falling back to {@code false} in case of no explicit default given.
* @see #getRequestContext()
*/
protected boolean isDefaultHtmlEscape() {
return getRequestContext().isDefaultHtmlEscape();
}
/**
* Return the applicable default for the use of response encoding with
* HTML escaping for this tag.
* <p>The default implementation checks the RequestContext's setting,
* falling back to {@code false} in case of no explicit default given.
* @since 4.1.2
* @see #getRequestContext()
*/
protected boolean isResponseEncodedHtmlEscape() {
return getRequestContext().isResponseEncodedHtmlEscape();
}
/**
* HTML-encodes the given String, only if the "htmlEscape" setting is enabled.
* <p>The response encoding will be taken into account if the
* "responseEncodedHtmlEscape" setting is enabled as well.
* @param content the String to escape
* @return the escaped String
* @since 4.1.2
* @see #isHtmlEscape()
* @see #isResponseEncodedHtmlEscape()
*/
protected String htmlEscape(String content) {
String out = content;
if (isHtmlEscape()) {
if (isResponseEncodedHtmlEscape()) {
out = HtmlUtils.htmlEscape(content, this.pageContext.getResponse().getCharacterEncoding());
}
else {
out = HtmlUtils.htmlEscape(content);
}
}
return out;
}
}
| HtmlEscapingAwareTag |
java | micronaut-projects__micronaut-core | management/src/main/java/io/micronaut/management/endpoint/threads/ThreadInfoMapper.java | {
"start": 1132,
"end": 1464
} | interface ____<T> {
/**
* Converts the given {@link ThreadInfo} objects into any other
* object to be used for serialization.
*
* @param threads The thread publisher
* @return A publisher of objects to be serialized.
*/
Publisher<T> mapThreadInfo(Publisher<ThreadInfo> threads);
}
| ThreadInfoMapper |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_1681/Target.java | {
"start": 573,
"end": 952
} | class ____ {
private String builderValue;
public String getBuilderValue() {
return builderValue;
}
public Builder builderValue(String builderValue) {
this.builderValue = builderValue;
return this;
}
public Target build() {
return new Target( builderValue );
}
}
}
| Builder |
java | apache__camel | components/camel-weather/src/main/java/org/apache/camel/component/weather/WeatherProducer.java | {
"start": 1251,
"end": 3715
} | class ____ extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(WeatherProducer.class);
private final String query;
public WeatherProducer(WeatherEndpoint endpoint, String query) {
super(endpoint);
this.query = query;
}
@Override
public WeatherEndpoint getEndpoint() {
return (WeatherEndpoint) super.getEndpoint();
}
@Override
public void process(Exchange exchange) throws Exception {
String q = query;
String location = exchange.getIn().getHeader(WeatherConstants.WEATHER_LOCATION, String.class);
if (location != null) {
q = getEndpoint().getWeatherQuery().getQuery(location);
}
HttpClient httpClient = getEndpoint().getConfiguration().getHttpClient();
String uri = q;
HttpGet method = new HttpGet(uri);
httpClient.execute(
method,
response -> {
try {
LOG.debug("Going to execute the Weather query {}", uri);
if (HttpStatus.SC_OK != response.getCode()) {
throw new IllegalStateException(
"Got the invalid http status value '" + response.getCode()
+ "' as the result of the query '" + query + "'");
}
String weather = EntityUtils.toString(response.getEntity(), "UTF-8");
LOG.debug("Got back the Weather information {}", weather);
if (ObjectHelper.isEmpty(weather)) {
throw new IllegalStateException(
"Got the unexpected value '" + weather + "' as the result of the query '" + uri + "'");
}
String header = getEndpoint().getConfiguration().getHeaderName();
if (header != null) {
exchange.getIn().setHeader(header, weather);
} else {
exchange.getIn().setBody(weather);
}
exchange.getIn().setHeader(WeatherConstants.WEATHER_QUERY, uri);
return null;
} finally {
method.reset();
}
});
}
}
| WeatherProducer |
java | quarkusio__quarkus | independent-projects/bootstrap/core/src/main/java/io/quarkus/bootstrap/jbang/JBangDevModeLauncherImpl.java | {
"start": 1499,
"end": 9285
} | class ____ implements Closeable {
public static Closeable main(String... args) {
System.clearProperty("quarkus.dev"); //avoid unknown config key warnings
System.setProperty("java.util.logging.manager", "org.jboss.logmanager.LogManager");
URL url = JBangDevModeLauncherImpl.class.getClassLoader().getResource("jbang-dev.dat");
String jarFilePath = url.getPath().substring(5, url.getPath().indexOf("!"));
try (DataInputStream contextStream = new DataInputStream(
JBangDevModeLauncherImpl.class.getClassLoader().getResourceAsStream("jbang-dev.dat"))) {
String pomContents = contextStream.readUTF();
Path sourceFile = Paths.get(contextStream.readUTF());
int depCount = contextStream.readInt();
Map<String, Path> deps = new HashMap<>();
for (int i = 0; i < depCount; ++i) {
String name = contextStream.readUTF();
Path path = Paths.get(contextStream.readUTF());
deps.put(name, path);
}
Path projectRoot = Files.createTempDirectory("quarkus-jbang");
try (OutputStream out = Files.newOutputStream(projectRoot.resolve("pom.xml"))) {
out.write(pomContents.getBytes(StandardCharsets.UTF_8));
}
Path targetClasses = projectRoot.resolve("target/classes");
Files.createDirectories(targetClasses);
try (ZipFile fz = new ZipFile(new File(jarFilePath))) {
Enumeration<? extends ZipEntry> entries = fz.entries();
while (entries.hasMoreElements()) {
ZipEntry entry = entries.nextElement();
Path path = targetClasses.resolve(entry.getName()).normalize();
if (!path.startsWith(targetClasses)) {
throw new IOException("Bad ZIP entry: " + path);
}
if (entry.isDirectory()) {
Files.createDirectories(path);
} else {
Files.createDirectories(path.getParent());
Files.copy(fz.getInputStream(entry), path);
Files.setLastModifiedTime(path, entry.getLastModifiedTime());
}
}
}
Path srcDir = projectRoot.resolve("src/main/java");
Files.createDirectories(srcDir);
Path source = Files.createSymbolicLink(srcDir.resolve(sourceFile.getFileName().toString()), sourceFile);
final LocalProject currentProject = LocalProject.loadWorkspace(projectRoot);
final ResolvedDependency appArtifact = ResolvedDependencyBuilder.newInstance()
.setCoords(currentProject.getAppArtifact(ArtifactCoords.TYPE_JAR))
.setResolvedPath(targetClasses)
.setWorkspaceModule(currentProject.toWorkspaceModule())
.build();
Properties configurationProperties = getConfigurationProperties(source);
//todo : proper support for everything
final QuarkusBootstrap.Builder builder = QuarkusBootstrap.builder()
.setBaseClassLoader(JBangDevModeLauncherImpl.class.getClassLoader())
.setIsolateDeployment(true)
.setMode(QuarkusBootstrap.Mode.DEV)
.setTargetDirectory(targetClasses)
.setAppArtifact(appArtifact)
.setManagingProject(ArtifactCoords.pom("io.quarkus", "quarkus-bom", getQuarkusVersion()))
.setForcedDependencies(deps.entrySet().stream().map(s -> {
String[] parts = s.getKey().split(":");
Dependency artifact;
if (parts.length == 3) {
artifact = new ArtifactDependency(parts[0], parts[1], null, ArtifactCoords.TYPE_JAR, parts[2]);
} else if (parts.length == 4) {
artifact = new ArtifactDependency(parts[0], parts[1], null, parts[2], parts[3]);
} else if (parts.length == 5) {
artifact = new ArtifactDependency(parts[0], parts[1], parts[3], parts[2], parts[4]);
} else {
throw new RuntimeException("Invalid artifact " + s);
}
//artifact.setPath(s.getValue());
return artifact;
}).collect(Collectors.toList()))
.setApplicationRoot(targetClasses)
.setProjectRoot(projectRoot)
.setBuildSystemProperties(configurationProperties)
.setRuntimeProperties(configurationProperties);
Map<String, Object> context = new HashMap<>();
context.put("app-project", currentProject);
context.put("args", args);
context.put("app-classes", targetClasses);
final BootstrapMavenContext mvnCtx = new BootstrapMavenContext(
BootstrapMavenContext.config().setCurrentProject(currentProject));
final MavenArtifactResolver mvnResolver = new MavenArtifactResolver(mvnCtx);
builder.setMavenArtifactResolver(mvnResolver);
((ResolvedArtifactDependency) currentProject.getAppArtifact(ArtifactCoords.TYPE_JAR))
.setResolvedPaths(PathList.of(targetClasses));
final CuratedApplication curatedApp = builder.build().bootstrap();
final Object appInstance = curatedApp.runInAugmentClassLoader("io.quarkus.deployment.dev.IDEDevModeMain", context);
return new JBangDevModeLauncherImpl(curatedApp,
appInstance == null ? null : appInstance instanceof Closeable ? (Closeable) appInstance : null);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private final CuratedApplication curatedApp;
private final Closeable runningApp;
private JBangDevModeLauncherImpl(CuratedApplication curatedApp, Closeable runningApp) {
this.curatedApp = curatedApp;
this.runningApp = runningApp;
}
@Override
public void close() throws IOException {
try {
if (runningApp != null) {
runningApp.close();
}
} finally {
if (curatedApp != null) {
curatedApp.close();
}
}
}
private static String getQuarkusVersion() {
try (InputStream in = JBangDevModeLauncherImpl.class.getClassLoader().getResourceAsStream("quarkus-version.txt")) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
byte[] buf = new byte[10];
int r;
while ((r = in.read(buf)) > 0) {
out.write(buf, 0, r);
}
return new String(out.toByteArray(), StandardCharsets.UTF_8);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static Properties getConfigurationProperties(final Path source) throws IOException {
Properties properties = new Properties();
for (String line : Files.readAllLines(source)) {
if (line.startsWith("//Q:CONFIG")) {
String conf = line.substring(10).trim();
int equals = conf.indexOf("=");
if (equals == -1) {
throw new RuntimeException("invalid config " + line);
}
properties.setProperty(conf.substring(0, equals), conf.substring(equals + 1));
}
}
return properties;
}
}
| JBangDevModeLauncherImpl |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/state/internals/RocksDBTimeOrderedSessionSchemaWithIndexSegmentedBytesStoreTest.java | {
"start": 857,
"end": 1089
} | class ____ extends AbstractDualSchemaRocksDBSegmentedBytesStoreTest {
@Override
SchemaType schemaType() {
return SchemaType.SessionSchemaWithIndex;
}
}
| RocksDBTimeOrderedSessionSchemaWithIndexSegmentedBytesStoreTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/time/ZoneIdOfZTest.java | {
"start": 1182,
"end": 1509
} | class ____ {
private static final ZoneId NYC = ZoneId.of("America/New_York");
}
""")
.doTest();
}
@Test
public void zoneIdOfZ() {
helper
.addSourceLines(
"TestClass.java",
"""
import java.time.ZoneId;
public | TestClass |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/type/ByteTypeHandler.java | {
"start": 851,
"end": 1758
} | class ____ extends BaseTypeHandler<Byte> {
public static final ByteTypeHandler INSTANCE = new ByteTypeHandler();
@Override
public void setNonNullParameter(PreparedStatement ps, int i, Byte parameter, JdbcType jdbcType) throws SQLException {
ps.setByte(i, parameter);
}
@Override
public Byte getNullableResult(ResultSet rs, String columnName) throws SQLException {
byte result = rs.getByte(columnName);
return result == 0 && rs.wasNull() ? null : result;
}
@Override
public Byte getNullableResult(ResultSet rs, int columnIndex) throws SQLException {
byte result = rs.getByte(columnIndex);
return result == 0 && rs.wasNull() ? null : result;
}
@Override
public Byte getNullableResult(CallableStatement cs, int columnIndex) throws SQLException {
byte result = cs.getByte(columnIndex);
return result == 0 && cs.wasNull() ? null : result;
}
}
| ByteTypeHandler |
java | quarkusio__quarkus | core/runtime/src/main/java/io/quarkus/runtime/init/InitRuntimeConfig.java | {
"start": 985,
"end": 1236
} | class ____ implements Converter<Boolean> {
@Override
public Boolean convert(final String value) throws IllegalArgumentException, NullPointerException {
return BOOLEAN_CONVERTER.convert(value);
}
}
}
| BooleanConverter |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/task/SimpleAsyncTaskSchedulerCustomizer.java | {
"start": 914,
"end": 1160
} | interface ____ {
/**
* Callback to customize a {@link SimpleAsyncTaskScheduler} instance.
* @param taskScheduler the task scheduler to customize
*/
void customize(SimpleAsyncTaskScheduler taskScheduler);
}
| SimpleAsyncTaskSchedulerCustomizer |
java | redisson__redisson | redisson/src/main/java/org/redisson/client/protocol/decoder/ListObjectDecoder.java | {
"start": 904,
"end": 1560
} | class ____<T> implements MultiDecoder<T> {
private final int index;
public ListObjectDecoder(int index) {
super();
this.index = index;
}
@Override
public Decoder<Object> getDecoder(Codec codec, int paramNum, State state, long size) {
if (paramNum == 0) {
return StringCodec.INSTANCE.getValueDecoder();
}
return MultiDecoder.super.getDecoder(codec, paramNum, state, size);
}
@Override
public T decode(List<Object> parts, State state) {
if (parts.isEmpty()) {
return null;
}
return (T) parts.get(index);
}
}
| ListObjectDecoder |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sql/spi/NativeSelectQueryPlan.java | {
"start": 343,
"end": 426
} | interface ____<T> extends SelectQueryPlan<T>, NativeQueryPlan {
}
| NativeSelectQueryPlan |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/persister/spi/PersisterClassResolver.java | {
"start": 650,
"end": 891
} | class ____ by the installed {@link PersisterClassResolver}</li>
* <li>the default provider as chosen by Hibernate Core (best choice most of the time)</li>
* </ol>
*
* @author Emmanuel Bernard
* @author Steve Ebersole
*/
public | returned |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/consumer/ConsumerGroupMetadata.java | {
"start": 1416,
"end": 2141
} | interface ____ Kafka 5.0.
*/
@Deprecated(since = "4.2", forRemoval = true)
public ConsumerGroupMetadata(String groupId,
int generationId,
String memberId,
Optional<String> groupInstanceId) {
this.groupId = Objects.requireNonNull(groupId, "group.id can't be null");
this.generationId = generationId;
this.memberId = Objects.requireNonNull(memberId, "member.id can't be null");
this.groupInstanceId = Objects.requireNonNull(groupInstanceId, "group.instance.id can't be null");
}
/**
* @deprecated Since 4.2, please use {@link KafkaConsumer#groupMetadata()} instead. This | in |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/validation/AbstractNormalScopedFinalTest.java | {
"start": 1196,
"end": 1403
} | class ____ {
@Produces
@ApplicationScoped
public FinalFoo createFoo() {
return new FinalFoo();
}
}
@ApplicationScoped
static | MethodProducerWithFinalClass |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/ext/javatime/util/DurationUnitConverter.java | {
"start": 807,
"end": 3388
} | class ____ {
final Function<Duration, Long> serializer;
final Function<Long, Duration> deserializer;
DurationSerialization(
Function<Duration, Long> serializer,
Function<Long, Duration> deserializer) {
this.serializer = serializer;
this.deserializer = deserializer;
}
static Function<Long, Duration> deserializer(TemporalUnit unit) {
return v -> Duration.of(v, unit);
}
}
private final static Map<String, DurationSerialization> UNITS;
static {
Map<String, DurationSerialization> units = new LinkedHashMap<>();
units.put(ChronoUnit.NANOS.name(), new DurationSerialization(Duration::toNanos, deserializer(ChronoUnit.NANOS)));
units.put(ChronoUnit.MICROS.name(), new DurationSerialization(d -> d.toNanos() / 1000, deserializer(ChronoUnit.MICROS)));
units.put(ChronoUnit.MILLIS.name(), new DurationSerialization(Duration::toMillis, deserializer(ChronoUnit.MILLIS)));
units.put(ChronoUnit.SECONDS.name(), new DurationSerialization(Duration::getSeconds, deserializer(ChronoUnit.SECONDS)));
units.put(ChronoUnit.MINUTES.name(), new DurationSerialization(Duration::toMinutes, deserializer(ChronoUnit.MINUTES)));
units.put(ChronoUnit.HOURS.name(), new DurationSerialization(Duration::toHours, deserializer(ChronoUnit.HOURS)));
units.put(ChronoUnit.HALF_DAYS.name(), new DurationSerialization(d -> d.toHours() / 12, deserializer(ChronoUnit.HALF_DAYS)));
units.put(ChronoUnit.DAYS.name(), new DurationSerialization(Duration::toDays, deserializer(ChronoUnit.DAYS)));
UNITS = units;
}
final DurationSerialization serialization;
DurationUnitConverter(DurationSerialization serialization) {
this.serialization = serialization;
}
public Duration convert(long value) {
return serialization.deserializer.apply(value);
}
public long convert(Duration duration) {
return serialization.serializer.apply(duration);
}
/**
* @return Description of all allowed valued as a sequence of
* double-quoted values separated by comma
*/
public static String descForAllowed() {
return "\"" + UNITS.keySet().stream()
.collect(Collectors.joining("\", \""))
+ "\"";
}
public static DurationUnitConverter from(String unit) {
DurationSerialization def = UNITS.get(unit);
return (def == null) ? null : new DurationUnitConverter(def);
}
}
| DurationSerialization |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/sqlprovider/BaseMapper.java | {
"start": 3233,
"end": 3349
} | interface ____ {
String tableName();
}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
@ | Meta |
java | redisson__redisson | redisson-spring-data/redisson-spring-data-40/src/main/java/org/redisson/spring/data/connection/RedissonReactiveHashCommands.java | {
"start": 2873,
"end": 25568
} | class ____ extends RedissonBaseReactive implements ReactiveHashCommands {
RedissonReactiveHashCommands(CommandReactiveExecutor executorService) {
super(executorService);
}
private static final RedisCommand<String> HMSET = new RedisCommand<String>("HMSET");
@Override
public Flux<BooleanResponse<HSetCommand>> hSet(Publisher<HSetCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getFieldValueMap(), "FieldValueMap must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
if (command.getFieldValueMap().size() == 1) {
Entry<ByteBuffer, ByteBuffer> entry = command.getFieldValueMap().entrySet().iterator().next();
byte[] mapKeyBuf = toByteArray(entry.getKey());
byte[] mapValueBuf = toByteArray(entry.getValue());
RedisCommand<Boolean> cmd = RedisCommands.HSETNX;
if (command.isUpsert()) {
cmd = RedisCommands.HSET;
}
Mono<Boolean> m = write(keyBuf, StringCodec.INSTANCE, cmd, keyBuf, mapKeyBuf, mapValueBuf);
return m.map(v -> new BooleanResponse<>(command, v));
} else {
List<Object> params = new ArrayList<Object>(command.getFieldValueMap().size()*2 + 1);
params.add(keyBuf);
for (Entry<ByteBuffer, ByteBuffer> entry : command.getFieldValueMap().entrySet()) {
params.add(toByteArray(entry.getKey()));
params.add(toByteArray(entry.getValue()));
}
Mono<String> m = write(keyBuf, StringCodec.INSTANCE, HMSET, params.toArray());
return m.map(v -> new BooleanResponse<>(command, true));
}
});
}
private static final RedisCommand<List<Object>> HMGET = new RedisCommand<List<Object>>("HMGET", new MultiDecoder<List<Object>>() {
@Override
public List<Object> decode(List<Object> parts, State state) {
List<Object> list = parts.stream().filter(e -> e != null).collect(Collectors.toList());
if (list.isEmpty()) {
return null;
}
return parts;
}
});
@Override
public Flux<MultiValueResponse<HGetCommand, ByteBuffer>> hMGet(Publisher<HGetCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getFields(), "Fields must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
List<Object> args = new ArrayList<Object>(command.getFields().size() + 1);
args.add(keyBuf);
args.addAll(command.getFields().stream().map(buf -> toByteArray(buf)).collect(Collectors.toList()));
Mono<List<byte[]>> m = read(keyBuf, ByteArrayCodec.INSTANCE, HMGET, args.toArray());
return m.map(v -> {
List<ByteBuffer> values = v.stream().map(array -> {
if (array != null) {
return ByteBuffer.wrap(array);
}
return null;
}).collect(Collectors.toList());
return new MultiValueResponse<>(command, values);
});
});
}
@Override
public Flux<BooleanResponse<HExistsCommand>> hExists(Publisher<HExistsCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getField(), "Field must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
byte[] fieldBuf = toByteArray(command.getField());
Mono<Boolean> m =read(keyBuf, StringCodec.INSTANCE, RedisCommands.HEXISTS, keyBuf, fieldBuf);
return m.map(v -> new BooleanResponse<>(command, v));
});
}
@Override
public Flux<NumericResponse<HDelCommand, Long>> hDel(Publisher<HDelCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getFields(), "Fields must not be null!");
List<Object> args = new ArrayList<Object>(command.getFields().size() + 1);
args.add(toByteArray(command.getKey()));
args.addAll(command.getFields().stream().map(v -> toByteArray(v)).collect(Collectors.toList()));
Mono<Long> m = write((byte[])args.get(0), StringCodec.INSTANCE, RedisCommands.HDEL, args.toArray());
return m.map(v -> new NumericResponse<>(command, v));
});
}
@Override
public Flux<NumericResponse<KeyCommand, Long>> hLen(Publisher<KeyCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Mono<Long> m = read(keyBuf, StringCodec.INSTANCE, RedisCommands.HLEN_LONG, keyBuf);
return m.map(v -> new NumericResponse<>(command, v));
});
}
@Override
public Flux<CommandResponse<KeyCommand, Flux<ByteBuffer>>> hKeys(Publisher<KeyCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Mono<Set<byte[]>> m = read(keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.HKEYS, keyBuf);
return m.map(v -> new CommandResponse<>(command, Flux.fromIterable(v).map(e -> ByteBuffer.wrap(e))));
});
}
@Override
public Flux<CommandResponse<KeyCommand, Flux<ByteBuffer>>> hVals(Publisher<KeyCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Mono<List<byte[]>> m = read(keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.HVALS, keyBuf);
return m.map(v -> new CommandResponse<>(command, Flux.fromIterable(v).map(e -> ByteBuffer.wrap(e))));
});
}
@Override
public Flux<CommandResponse<KeyCommand, Flux<Entry<ByteBuffer, ByteBuffer>>>> hGetAll(
Publisher<KeyCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Mono<Map<byte[], byte[]>> m = read(keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.HGETALL, keyBuf);
Mono<Map<ByteBuffer, ByteBuffer>> f = m.map(v -> v.entrySet().stream().collect(Collectors.toMap(e -> ByteBuffer.wrap(e.getKey()), e -> ByteBuffer.wrap(e.getValue()))));
return f.map(v -> new CommandResponse<>(command, Flux.fromIterable(v.entrySet())));
});
}
@Override
public Flux<CommandResponse<KeyCommand, Flux<Entry<ByteBuffer, ByteBuffer>>>> hScan(
Publisher<KeyScanCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getOptions(), "ScanOptions must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Flux<Entry<Object, Object>> flux = Flux.create(new MapReactiveIterator<Object, Object, Entry<Object, Object>>(null, null, 0) {
@Override
public RFuture<ScanResult<Object>> scanIterator(RedisClient client, String nextIterPos) {
if (command.getOptions().getPattern() == null) {
return executorService.readAsync(client, keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.HSCAN,
keyBuf, nextIterPos, "COUNT", Optional.ofNullable(command.getOptions().getCount()).orElse(10L));
}
return executorService.readAsync(client, keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.HSCAN,
keyBuf, nextIterPos, "MATCH", command.getOptions().getPattern(),
"COUNT", Optional.ofNullable(command.getOptions().getCount()).orElse(10L));
}
});
Flux<Entry<ByteBuffer, ByteBuffer>> f = flux.map(v -> Collections.singletonMap(ByteBuffer.wrap((byte[])v.getKey()), ByteBuffer.wrap((byte[])v.getValue())).entrySet().iterator().next());
return Mono.just(new CommandResponse<>(command, f));
});
}
private static final RedisCommand<Long> HSTRLEN = new RedisCommand<Long>("HSTRLEN");
@Override
public Flux<NumericResponse<HStrLenCommand, Long>> hStrLen(Publisher<HStrLenCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getField(), "Field must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
byte[] fieldBuf = toByteArray(command.getField());
Mono<Long> m = read(keyBuf, StringCodec.INSTANCE, HSTRLEN, keyBuf, fieldBuf);
return m.map(v -> new NumericResponse<>(command, v));
});
}
@Override
public Flux<CommandResponse<HRandFieldCommand, Flux<ByteBuffer>>> hRandField(Publisher<HRandFieldCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Mono<Set<byte[]>> m;
if (command.getCount() > 0) {
m = read(keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.HRANDFIELD_KEYS, keyBuf, command.getCount());
} else {
m = read(keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.HRANDFIELD_KEYS, keyBuf);
}
return m.map(v -> new CommandResponse<>(command, Flux.fromIterable(v).map(e -> ByteBuffer.wrap(e))));
});
}
@Override
public Flux<CommandResponse<HRandFieldCommand, Flux<Entry<ByteBuffer, ByteBuffer>>>> hRandFieldWithValues(Publisher<HRandFieldCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Mono<Map<byte[], byte[]>> m;
if (command.getCount() > 0) {
m = read(keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.HRANDFIELD, keyBuf, command.getCount());
} else {
m = read(keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.HRANDFIELD, keyBuf);
}
Mono<Map<ByteBuffer, ByteBuffer>> f = m.map(v -> v.entrySet().stream().collect(Collectors.toMap(e -> ByteBuffer.wrap(e.getKey()), e -> ByteBuffer.wrap(e.getValue()))));
return f.map(v -> new CommandResponse<>(command, Flux.fromIterable(v.entrySet())));
});
}
private static final RedisCommand<List<Long>> HEXPIRE = new RedisCommand<>("HEXPIRE", new ObjectListReplayDecoder<>());
@Override
public Flux<NumericResponse<HashExpireCommand, Long>> applyHashFieldExpiration(Publisher<HashExpireCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getExpiration(), "Expiration must not be null!");
Assert.notEmpty(command.getFields(), "Fields must not be empty!");
byte[] keyBuf = toByteArray(command.getKey());
List<Object> args = new ArrayList<>();
args.add(keyBuf);
args.add(command.getExpiration().getExpirationTimeInSeconds());
if (command.getOptions() != null
&& command.getOptions().getCondition() != ExpirationOptions.Condition.ALWAYS) {
args.add(command.getOptions().getCondition().name());
}
args.add("FIELDS");
args.add(command.getFields().size());
args.addAll(command.getFields().stream().map(buf -> toByteArray(buf)).collect(Collectors.toList()));
Mono<List<Long>> result = write(keyBuf, LongCodec.INSTANCE, HEXPIRE, args.toArray());
return result.flatMapMany(Flux::fromIterable)
.map(value -> new NumericResponse<>(command, value));
});
}
private static final RedisStrictCommand<List<Long>> HPERSIST = new RedisStrictCommand<>("HPERSIST", new ObjectListReplayDecoder<>());
@Override
public Flux<NumericResponse<HashFieldsCommand, Long>> hPersist(Publisher<HashFieldsCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notEmpty(command.getFields(), "Fields must not be empty!");
byte[] keyBuf = toByteArray(command.getKey());
List<Object> args = new ArrayList<>();
args.add(keyBuf);
args.add("FIELDS");
args.add(command.getFields().size());
args.addAll(command.getFields().stream().map(buf -> toByteArray(buf)).collect(Collectors.toList()));
Mono<List<Long>> result = write(keyBuf, LongCodec.INSTANCE, HPERSIST, args.toArray());
return result.flatMapMany(Flux::fromIterable)
.map(value -> new NumericResponse<>(command, value));
});
}
private static final RedisCommand<List<Long>> HTTL = new RedisCommand<>("HTTL", new ObjectListReplayDecoder<>());
@Override
public Flux<NumericResponse<HashFieldsCommand, Long>> hTtl(Publisher<HashFieldsCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notEmpty(command.getFields(), "Fields must not be empty!");
byte[] keyBuf = toByteArray(command.getKey());
List<Object> args = new ArrayList<>();
args.add(keyBuf);
args.add("FIELDS");
args.add(command.getFields().size());
args.addAll(command.getFields().stream().map(buf -> toByteArray(buf)).collect(Collectors.toList()));
Mono<List<Long>> result = read(keyBuf, LongCodec.INSTANCE, HTTL, args.toArray());
return result.flatMapMany(Flux::fromIterable)
.map(value -> new NumericResponse<>(command, value));
});
}
private static final RedisCommand<List<Long>> HPTTL = new RedisCommand<>("HPTTL", new ObjectListReplayDecoder<>());
@Override
public Flux<NumericResponse<HashFieldsCommand, Long>> hpTtl(Publisher<HashFieldsCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notEmpty(command.getFields(), "Fields must not be empty!");
byte[] keyBuf = toByteArray(command.getKey());
List<Object> args = new ArrayList<>();
args.add(keyBuf);
args.add("FIELDS");
args.add(command.getFields().size());
args.addAll(command.getFields().stream().map(buf -> toByteArray(buf)).collect(Collectors.toList()));
Mono<List<Long>> result = read(keyBuf, LongCodec.INSTANCE, HPTTL, args.toArray());
return result.flatMapMany(Flux::fromIterable)
.map(value -> new NumericResponse<>(command, value));
});
}
private static final RedisCommand<List<Object>> HGETDEL = new RedisCommand<>("HGETDEL", new MultiDecoder<List<Object>>() {
@Override
public List<Object> decode(List<Object> parts, State state) {
List<Object> list = parts.stream().filter(e -> e != null).collect(Collectors.toList());
if (list.isEmpty()) {
return null;
}
return parts;
}
});
@Override
public Flux<MultiValueResponse<HGetDelCommand, ByteBuffer>> hGetDel(Publisher<HGetDelCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getFields(), "Fields must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
List<Object> args = new ArrayList<Object>(command.getFields().size() + 1);
args.add(keyBuf);
args.add("FIELDS");
args.add(command.getFields().size());
args.addAll(command.getFields().stream().map(buf -> toByteArray(buf)).collect(Collectors.toList()));
Mono<List<byte[]>> m = write(keyBuf, ByteArrayCodec.INSTANCE, HGETDEL, args.toArray());
return m.map(v -> {
List<ByteBuffer> values = v.stream().map(array -> {
if (array != null) {
return ByteBuffer.wrap(array);
}
return null;
}).collect(Collectors.toList());
return new MultiValueResponse<>(command, values);
});
});
}
private static final RedisCommand<List<Object>> HGETEX = new RedisCommand<>("HGETEX", new MultiDecoder<List<Object>>() {
@Override
public List<Object> decode(List<Object> parts, State state) {
List<Object> list = parts.stream().filter(e -> e != null).collect(Collectors.toList());
if (list.isEmpty()) {
return null;
}
return parts;
}
});
@Override
public Flux<MultiValueResponse<HGetExCommand, ByteBuffer>> hGetEx(Publisher<HGetExCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getFields(), "Fields must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
List<Object> args = new ArrayList<Object>(command.getFields().size() + 1);
args.add(keyBuf);
if (command.getExpiration() != null) {
Expiration expiration = command.getExpiration();
if (expiration.isUnixTimestamp()) {
if (expiration.getTimeUnit() == TimeUnit.MILLISECONDS) {
args.add("PXAT");
args.add(expiration.getExpirationTimeInMilliseconds());
} else {
args.add("EXAT");
args.add(expiration.getExpirationTimeInSeconds());
}
} else if (!expiration.isPersistent()) {
if (expiration.getTimeUnit() == TimeUnit.MILLISECONDS) {
args.add("PX");
args.add(expiration.getExpirationTime());
} else {
args.add("EX");
args.add(expiration.getConverted(TimeUnit.SECONDS));
}
} else if (expiration.isPersistent()) {
args.add("PERSIST");
}
}
args.add("FIELDS");
args.add(command.getFields().size());
args.addAll(command.getFields().stream().map(buf -> toByteArray(buf)).collect(Collectors.toList()));
Mono<List<byte[]>> m = read(keyBuf, ByteArrayCodec.INSTANCE, HGETEX, args.toArray());
return m.map(v -> {
List<ByteBuffer> values = v.stream().map(array -> {
if (array != null) {
return ByteBuffer.wrap(array);
}
return null;
}).collect(Collectors.toList());
return new MultiValueResponse<>(command, values);
});
});
}
private static final RedisCommand<String> HSETEX = new RedisCommand<>("HSETEX");
@Override
public Flux<BooleanResponse<HSetExCommand>> hSetEx(Publisher<HSetExCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getFieldValueMap(), "FieldValueMap must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
List<Object> params = new ArrayList<Object>(command.getFieldValueMap().size()*2 + 1);
params.add(keyBuf);
if (command.getCondition() == RedisHashCommands.HashFieldSetOption.IF_NONE_EXIST) {
params.add("FNX");
}
if (command.getCondition() == RedisHashCommands.HashFieldSetOption.IF_ALL_EXIST) {
params.add("FXX");
}
if (command.getExpiration() != null) {
Expiration expiration = command.getExpiration();
if (expiration.isUnixTimestamp()) {
if (expiration.getTimeUnit() == TimeUnit.MILLISECONDS) {
params.add("PXAT");
params.add(expiration.getExpirationTimeInMilliseconds());
} else {
params.add("EXAT");
params.add(expiration.getExpirationTimeInSeconds());
}
} else if (!expiration.isPersistent()) {
if (expiration.isKeepTtl()) {
params.add("KEEPTTL");
} else if (expiration.getTimeUnit() == TimeUnit.MILLISECONDS) {
params.add("PX");
params.add(expiration.getExpirationTime());
} else {
params.add("EX");
params.add(expiration.getConverted(TimeUnit.SECONDS));
}
}
}
params.add("FIELDS");
params.add(command.getFieldValueMap().size());
for (Entry<ByteBuffer, ByteBuffer> entry : command.getFieldValueMap().entrySet()) {
params.add(toByteArray(entry.getKey()));
params.add(toByteArray(entry.getValue()));
}
Mono<String> m = write(keyBuf, StringCodec.INSTANCE, HSETEX, params.toArray());
return m.map(v -> new BooleanResponse<>(command, true));
});
}
}
| RedissonReactiveHashCommands |
java | apache__logging-log4j2 | log4j-perf-test/src/main/java/org/apache/logging/log4j/perf/jmh/JdbcAppenderBenchmark.java | {
"start": 2006,
"end": 7628
} | class ____ {
private Logger loggerH2;
private Logger loggerHSQLDB;
private Connection connectionHSQLDB;
private Connection connectionH2;
private final RuntimeException exception = new RuntimeException("Hello, world!");
@Setup
public void setup() throws Exception {
connectionHSQLDB = getConnectionHSQLDB();
connectionH2 = getConnectionH2();
createTable(connectionHSQLDB, toCreateTableSqlStringHQLDB("fmLogEntry"));
createTable(connectionH2, toCreateTableSqlStringH2("fmLogEntry"));
System.setProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY, "log4j2-jdbc-appender.xml");
final LoggerContext context = LoggerContext.getContext(false);
if (context.getConfiguration() instanceof DefaultConfiguration) {
context.reconfigure();
}
StatusLogger.getLogger().reset();
loggerH2 = LogManager.getLogger("H2Logger");
loggerHSQLDB = LogManager.getLogger("HSQLDBLogger");
}
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
@Benchmark
public void testThroughputH2Message(final Blackhole bh) {
loggerH2.info("Test message");
}
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
@Benchmark
public void testThroughputH2Exception(final Blackhole bh) {
loggerH2.warn("Test message", exception);
}
@BenchmarkMode(Mode.SampleTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
@Benchmark
public void testResponseTimeH2Message(final Blackhole bh) {
loggerH2.info("Test message");
}
@BenchmarkMode(Mode.SampleTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
@Benchmark
public void testResponseTimeH2Exception(final Blackhole bh) {
loggerH2.warn("Test message", exception);
}
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
@Benchmark
public void testThroughputHSQLDBMessage(final Blackhole bh) {
loggerHSQLDB.info("Test message");
}
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
@Benchmark
public void testThroughputHSQLDBException(final Blackhole bh) {
loggerHSQLDB.warn("Test message", exception);
}
@BenchmarkMode(Mode.SampleTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
@Benchmark
public void testResponseTimeHSQLDBMessage(final Blackhole bh) {
loggerHSQLDB.info("Test message");
}
@BenchmarkMode(Mode.SampleTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
@Benchmark
public void testResponseTimeHSQLDBException(final Blackhole bh) {
loggerHSQLDB.warn("Test message", exception);
}
@TearDown
public void tearDown() throws SQLException {
final LoggerContext context = LoggerContext.getContext(false);
try {
((JdbcAppender) context.getConfiguration().getAppender("H2Appender"))
.getManager()
.close();
((JdbcAppender) context.getConfiguration().getAppender("HSQLDBAppender"))
.getManager()
.close();
} finally {
System.clearProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY);
// context.reconfigure();
// StatusLogger.getLogger().reset();
Statement statement = null;
try {
statement = connectionHSQLDB.createStatement();
statement.execute("SHUTDOWN");
} catch (final SQLException ignore) {
// ignore
} finally {
Closer.closeSilently(statement);
Closer.closeSilently(connectionHSQLDB);
}
try {
statement = connectionH2.createStatement();
statement.execute("SHUTDOWN");
} catch (final SQLException ignore) {
// ignore
} finally {
Closer.closeSilently(statement);
Closer.closeSilently(connectionH2);
}
}
}
private void createTable(final Connection connection, final String createSQL) throws SQLException {
try (final Statement statement = connection.createStatement()) {
statement.executeUpdate(createSQL);
}
}
private String toCreateTableSqlStringH2(final String tableName) {
return "CREATE TABLE " + tableName + " ( "
+ "id INTEGER GENERATED ALWAYS AS IDENTITY, eventDate DATETIME, literalColumn VARCHAR(255), level NVARCHAR(10), "
+ "logger NVARCHAR(255), message VARCHAR(1024), exception NCLOB" + " )";
}
private String toCreateTableSqlStringHQLDB(final String tableName) {
return "CREATE TABLE " + tableName + " ( "
+ "id INTEGER IDENTITY, eventDate DATETIME, literalColumn VARCHAR(255), level VARCHAR(10), "
+ "logger VARCHAR(255), message VARCHAR(1024), exception CLOB" + " )";
}
/**
* Referred from log4j2-jdbc-appender.xml.
*/
@SuppressFBWarnings("DMI_EMPTY_DB_PASSWORD")
public static Connection getConnectionH2() throws Exception {
Class.forName("org.h2.Driver");
return DriverManager.getConnection("jdbc:h2:mem:Log4j", "sa", "");
}
/**
* Referred from log4j2-jdbc-appender.xml.
*/
@SuppressFBWarnings("DMI_EMPTY_DB_PASSWORD")
public static Connection getConnectionHSQLDB() throws Exception {
Class.forName("org.hsqldb.jdbcDriver");
return DriverManager.getConnection("jdbc:hsqldb:mem:Log4j", "sa", "");
}
}
| JdbcAppenderBenchmark |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/ClassTemplateInvocationTests.java | {
"start": 62627,
"end": 63144
} | class ____ implements ClassTemplateInvocationContextProvider {
@Override
public boolean supportsClassTemplate(ExtensionContext context) {
return false;
}
@Override
public Stream<ClassTemplateInvocationContext> provideClassTemplateInvocationContexts(
ExtensionContext context) {
throw new RuntimeException("should not be called");
}
}
}
@SuppressWarnings("JUnitMalformedDeclaration")
@ExtendWith(TwoInvocationsClassTemplateInvocationContextProvider.class)
@ClassTemplate
static | Ext |
java | apache__dubbo | dubbo-metadata/dubbo-metadata-definition-protobuf/src/test/java/org/apache/dubbo/metadata/definition/protobuf/model/GooglePB.java | {
"start": 136110,
"end": 150547
} | class ____ extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:org.apache.dubbo.metadata.definition.protobuf.model.PhoneNumber)
PhoneNumberOrBuilder {
private static final long serialVersionUID = 0L;
// Use PhoneNumber.newBuilder() to construct.
private PhoneNumber(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private PhoneNumber() {
number_ = "";
type_ = 1;
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private PhoneNumber(
com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000001;
number_ = bs;
break;
}
case 16: {
int rawValue = input.readEnum();
org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneType value =
org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneType.valueOf(
rawValue);
if (value == null) {
unknownFields.mergeVarintField(2, rawValue);
} else {
bitField0_ |= 0x00000002;
type_ = rawValue;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return org.apache.dubbo.metadata.definition.protobuf.model.GooglePB
.internal_static_org_apache_dubbo_metadata_definition_protobuf_model_PhoneNumber_descriptor;
}
protected FieldAccessorTable internalGetFieldAccessorTable() {
return org.apache.dubbo.metadata.definition.protobuf.model.GooglePB
.internal_static_org_apache_dubbo_metadata_definition_protobuf_model_PhoneNumber_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneNumber.class,
org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneNumber.Builder.class);
}
private int bitField0_;
public static final int NUMBER_FIELD_NUMBER = 1;
private volatile java.lang.Object number_;
/**
* <code>required string number = 1;</code>
*/
public boolean hasNumber() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string number = 1;</code>
*/
public java.lang.String getNumber() {
java.lang.Object ref = number_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
number_ = s;
}
return s;
}
}
/**
* <code>required string number = 1;</code>
*/
public com.google.protobuf.ByteString getNumberBytes() {
java.lang.Object ref = number_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
number_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TYPE_FIELD_NUMBER = 2;
private int type_;
/**
* <code>optional .org.apache.dubbo.metadata.definition.protobuf.model.PhoneType type = 2 [default = HOME];</code>
*/
public boolean hasType() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .org.apache.dubbo.metadata.definition.protobuf.model.PhoneType type = 2 [default = HOME];</code>
*/
public org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneType getType() {
org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneType result =
org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneType.valueOf(type_);
return result == null
? org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneType.HOME
: result;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasNumber()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, number_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeEnum(2, type_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, number_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, type_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneNumber)) {
return super.equals(obj);
}
org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneNumber other =
(org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneNumber) obj;
boolean result = true;
result = result && (hasNumber() == other.hasNumber());
if (hasNumber()) {
result = result && getNumber().equals(other.getNumber());
}
result = result && (hasType() == other.hasType());
if (hasType()) {
result = result && type_ == other.type_;
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasNumber()) {
hash = (37 * hash) + NUMBER_FIELD_NUMBER;
hash = (53 * hash) + getNumber().hashCode();
}
if (hasType()) {
hash = (37 * hash) + TYPE_FIELD_NUMBER;
hash = (53 * hash) + type_;
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneNumber parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneNumber parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneNumber parseFrom(
com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneNumber parseFrom(
com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneNumber parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneNumber parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneNumber parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneNumber parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneNumber parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneNumber parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneNumber parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneNumber parseFrom(
com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
org.apache.dubbo.metadata.definition.protobuf.model.GooglePB.PhoneNumber prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.dubbo.metadata.definition.protobuf.model.PhoneNumber}
*/
public static final | PhoneNumber |
java | apache__camel | dsl/camel-yaml-dsl/camel-yaml-dsl-common/src/main/java/org/apache/camel/dsl/yaml/common/exception/InvalidNodeTypeException.java | {
"start": 1051,
"end": 1809
} | class ____ extends MarkedYamlEngineException {
public InvalidNodeTypeException(Node node, NodeType expected) {
super(null, Optional.empty(), "Node type " + nodeTypeName(node.getNodeType())
+ " is invalid, expected " + nodeTypeName(expected),
node.getStartMark());
}
private static String nodeTypeName(NodeType nodeType) {
switch (nodeType) {
case ANCHOR:
return "anchor";
case MAPPING:
return "map";
case SCALAR:
return "key-value";
case SEQUENCE:
return "array";
default:
return nodeType.name();
}
}
}
| InvalidNodeTypeException |
java | quarkusio__quarkus | integration-tests/maven/src/test/resources-filtered/projects/multijar-module/beans/src/test/java/org/acme/testlib/mock/MockServiceProducer.java | {
"start": 199,
"end": 322
} | class ____ {
@Produces
public Service getMockService() {
return new MockService();
}
}
| MockServiceProducer |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SpillCallBackPathsFinder.java | {
"start": 1457,
"end": 6539
} | class ____ extends SpillCallBackInjector {
private static final Logger LOG =
LoggerFactory.getLogger(SpillCallBackPathsFinder.class);
/**
* Encrypted spilled files.
*/
private final Map<Path, Set<Long>> encryptedSpillFiles =
Collections.synchronizedMap(new ConcurrentHashMap<>());
/**
* Non-Encrypted spilled files.
*/
private final Map<Path, Set<Long>> spillFiles =
Collections.synchronizedMap(new ConcurrentHashMap<>());
/**
* Invalid position access.
*/
private final Map<Path, Set<Long>> invalidAccessMap =
Collections.synchronizedMap(new ConcurrentHashMap<>());
/**
* Index spill files.
*/
private final Set<Path> indexSpillFiles = ConcurrentHashMap.newKeySet();
/**
* Paths that were not found in the maps.
*/
private final Set<Path> negativeCache = ConcurrentHashMap.newKeySet();
protected Map<Path, Set<Long>> getFilesMap(Configuration config) {
if (CryptoUtils.isEncryptedSpillEnabled(config)) {
return encryptedSpillFiles;
}
return spillFiles;
}
@Override
public void writeSpillFileCB(Path path, FSDataOutputStream out,
Configuration conf) {
long outPos = out.getPos();
getFilesMap(conf)
.computeIfAbsent(path, p -> ConcurrentHashMap.newKeySet())
.add(outPos);
LOG.debug("writeSpillFileCB.. path:{}; pos:{}", path, outPos);
}
@Override
public void getSpillFileCB(Path path, InputStream is, Configuration conf) {
if (path == null) {
return;
}
Set<Long> pathEntries = getFilesMap(conf).get(path);
if (pathEntries != null) {
try {
long isPos = CryptoStreamUtils.getInputStreamOffset(is);
if (pathEntries.contains(isPos)) {
LOG.debug("getSpillFileCB... Path {}; Pos: {}", path, isPos);
return;
}
invalidAccessMap
.computeIfAbsent(path, p -> ConcurrentHashMap.newKeySet())
.add(isPos);
LOG.debug("getSpillFileCB... access incorrect position.. "
+ "Path {}; Pos: {}", path, isPos);
} catch (IOException e) {
LOG.error("Could not get inputStream position.. Path {}", path, e);
// do nothing
}
return;
}
negativeCache.add(path);
LOG.warn("getSpillFileCB.. Could not find spilled file .. Path: {}", path);
}
@Override
public String getSpilledFileReport() {
StringBuilder strBuilder =
new StringBuilder("\n++++++++ Spill Report ++++++++")
.append(dumpMapEntries("Encrypted Spilled Files",
encryptedSpillFiles))
.append(dumpMapEntries("Non-Encrypted Spilled Files",
spillFiles))
.append(dumpMapEntries("Invalid Spill Access",
invalidAccessMap))
.append("\n ----- Spilled Index Files ----- ")
.append(indexSpillFiles.size());
for (Path p : indexSpillFiles) {
strBuilder.append("\n\t index-path: ").append(p.toString());
}
strBuilder.append("\n ----- Negative Cache files ----- ")
.append(negativeCache.size());
for (Path p : negativeCache) {
strBuilder.append("\n\t path: ").append(p.toString());
}
return strBuilder.toString();
}
@Override
public void addSpillIndexFileCB(Path path, Configuration conf) {
if (path == null) {
return;
}
indexSpillFiles.add(path);
LOG.debug("addSpillIndexFileCB... Path: {}", path);
}
@Override
public void validateSpillIndexFileCB(Path path, Configuration conf) {
if (path == null) {
return;
}
if (indexSpillFiles.contains(path)) {
LOG.debug("validateSpillIndexFileCB.. Path: {}", path);
return;
}
LOG.warn("validateSpillIndexFileCB.. could not retrieve indexFile.. "
+ "Path: {}", path);
negativeCache.add(path);
}
public Set<Path> getEncryptedSpilledFiles() {
return Collections.unmodifiableSet(encryptedSpillFiles.keySet());
}
/**
* Gets the set of path:pos of the entries that were accessed incorrectly.
* @return a set of string in the format of {@literal Path[Pos]}
*/
public Set<String> getInvalidSpillEntries() {
Set<String> result = new LinkedHashSet<>();
for (Entry<Path, Set<Long>> spillMapEntry: invalidAccessMap.entrySet()) {
for (Long singleEntry : spillMapEntry.getValue()) {
result.add(String.format("%s[%d]",
spillMapEntry.getKey(), singleEntry));
}
}
return result;
}
private String dumpMapEntries(String label,
Map<Path, Set<Long>> entriesMap) {
StringBuilder strBuilder =
new StringBuilder(String.format("%n ----- %s ----- %d", label,
entriesMap.size()));
for (Entry<Path, Set<Long>> encryptedSpillEntry
: entriesMap.entrySet()) {
strBuilder.append(String.format("%n\t\tpath: %s",
encryptedSpillEntry.getKey()));
for (Long singlePos : encryptedSpillEntry.getValue()) {
strBuilder.append(String.format("%n\t\t\tentry: %d", singlePos));
}
}
return strBuilder.toString();
}
}
| SpillCallBackPathsFinder |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/boot/database/qualfiedTableNaming/DefaultCatalogAndSchemaTest.java | {
"start": 49947,
"end": 50441
} | class ____
extends EntityWithTablePerClassInheritanceWithDefaultQualifiers {
public static final String NAME = "EntityWithTablePerClassInheritanceWithDefaultQualifiersSubclass";
@Basic
private String basic2;
}
@Entity(name = EntityWithTablePerClassInheritanceWithExplicitQualifiers.NAME)
@Inheritance(strategy = InheritanceType.TABLE_PER_CLASS)
@Table(catalog = EXPLICIT_CATALOG, schema = EXPLICIT_SCHEMA)
public static | EntityWithTablePerClassInheritanceWithDefaultQualifiersSubclass |
java | elastic__elasticsearch | qa/packaging/src/test/java/org/elasticsearch/packaging/test/RpmMetadataTests.java | {
"start": 886,
"end": 1773
} | class ____ extends PackagingTestCase {
@Before
public void filterDistros() {
assumeTrue("only rpm", distribution.packaging == Distribution.Packaging.RPM);
}
public void test11Dependencies() {
// TODO: rewrite this test to not use a real second distro to try and install
assumeTrue(Platforms.isRPM());
final Shell sh = new Shell();
final Shell.Result deps = sh.run("rpm -qpR " + getDistributionFile(distribution()));
TestCase.assertTrue(Pattern.compile("(?m)^/bin/bash\\s*$").matcher(deps.stdout()).find());
final Shell.Result conflicts = sh.run("rpm -qp --conflicts " + getDistributionFile(distribution()));
String oppositePackageName = "elasticsearch-oss";
TestCase.assertTrue(Pattern.compile("(?m)^" + oppositePackageName + "\\s*$").matcher(conflicts.stdout()).find());
}
}
| RpmMetadataTests |
java | grpc__grpc-java | netty/src/test/java/io/grpc/netty/NettyClientHandlerTest.java | {
"start": 4919,
"end": 46726
} | class ____ extends NettyHandlerTestBase<NettyClientHandler> {
private NettyClientStream.TransportState streamTransportState;
private Http2Headers grpcHeaders;
private long nanoTime; // backs a ticker, for testing ping round-trip time measurement
private int maxHeaderListSize = Integer.MAX_VALUE;
private int softLimitHeaderListSize = Integer.MAX_VALUE;
private int streamId = STREAM_ID;
private ClientTransportLifecycleManager lifecycleManager;
private KeepAliveManager mockKeepAliveManager = null;
private List<String> setKeepaliveManagerFor = ImmutableList.of("cancelShouldSucceed",
"sendFrameShouldSucceed", "channelShutdownShouldCancelBufferedStreams",
"createIncrementsIdsForActualAndBufferdStreams", "dataPingAckIsRecognized");
private Runnable tooManyPingsRunnable = new Runnable() {
@Override public void run() {}
};
@Rule
public TestName testNameRule = new TestName();
@Rule
public final MockitoRule mocks = MockitoJUnit.rule();
@Mock
private ManagedClientTransport.Listener listener;
@Mock
private ClientStreamListener streamListener;
private final Queue<InputStream> streamListenerMessageQueue = new LinkedList<>();
private NettyClientStream stream;
@Override
protected void manualSetUp() throws Exception {
setUp();
}
@Override
protected AbstractStream stream() throws Exception {
if (stream == null) {
stream = new NettyClientStream(streamTransportState,
TestMethodDescriptors.voidMethod(),
new Metadata(),
channel(),
AsciiString.of("localhost"),
AsciiString.of("http"),
AsciiString.of("agent"),
StatsTraceContext.NOOP,
transportTracer,
CallOptions.DEFAULT,
false);
}
return stream;
}
/**
* Set up for test.
*/
@Before
public void setUp() throws Exception {
doAnswer(
new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
StreamListener.MessageProducer producer =
(StreamListener.MessageProducer) invocation.getArguments()[0];
InputStream message;
while ((message = producer.next()) != null) {
streamListenerMessageQueue.add(message);
}
return null;
}
})
.when(streamListener)
.messagesAvailable(ArgumentMatchers.<StreamListener.MessageProducer>any());
doAnswer((attributes) -> Attributes.newBuilder().set(
GrpcAttributes.ATTR_AUTHORITY_VERIFIER,
(authority) -> Status.OK).build())
.when(listener)
.filterTransport(ArgumentMatchers.any(Attributes.class));
lifecycleManager = new ClientTransportLifecycleManager(listener);
// This mocks the keepalive manager only for there's in which we verify it. For other tests
// it'll be null which will be testing if we behave correctly when it's not present.
if (setKeepaliveManagerFor.contains(testNameRule.getMethodName())) {
mockKeepAliveManager = mock(KeepAliveManager.class);
}
initChannel(new GrpcHttp2ClientHeadersDecoder(GrpcUtil.DEFAULT_MAX_HEADER_LIST_SIZE));
streamTransportState = new TransportStateImpl(
handler(),
channel().eventLoop(),
DEFAULT_MAX_MESSAGE_SIZE,
transportTracer);
streamTransportState.setListener(streamListener);
grpcHeaders = new DefaultHttp2Headers()
.scheme(HTTPS)
.authority(as("www.fake.com"))
.path(as("/fakemethod"))
.method(HTTP_METHOD)
.add(as("auth"), as("sometoken"))
.add(CONTENT_TYPE_HEADER, CONTENT_TYPE_GRPC)
.add(TE_HEADER, TE_TRAILERS);
// Simulate receipt of initial remote settings.
ByteBuf serializedSettings = serializeSettings(new Http2Settings());
channelRead(serializedSettings);
channel().releaseOutbound();
}
@Test
@SuppressWarnings("InlineMeInliner")
public void sendLargerThanSoftLimitHeaderMayFail() throws Exception {
maxHeaderListSize = 8000;
softLimitHeaderListSize = 2000;
manualSetUp();
createStream();
// total head size of 7999, soft limit = 2000 and max = 8000.
// This header has 5999/6000 chance to be rejected.
Http2Headers headers = new DefaultHttp2Headers()
.scheme(HTTPS)
.authority(as("www.fake.com"))
.path(as("/fakemethod"))
.method(HTTP_METHOD)
.add(as("auth"), as("sometoken"))
.add(CONTENT_TYPE_HEADER, CONTENT_TYPE_GRPC)
.add(TE_HEADER, TE_TRAILERS)
.add("large-field", Strings.repeat("a", 7620)); // String.repeat() requires Java 11
ByteBuf headersFrame = headersFrame(STREAM_ID, headers);
channelRead(headersFrame);
ArgumentCaptor<Status> statusArgumentCaptor = ArgumentCaptor.forClass(Status.class);
verify(streamListener).closed(statusArgumentCaptor.capture(), eq(PROCESSED),
any(Metadata.class));
assertThat(statusArgumentCaptor.getValue().getCode()).isEqualTo(Status.Code.RESOURCE_EXHAUSTED);
assertThat(statusArgumentCaptor.getValue().getDescription()).contains(
"exceeded Metadata size soft limit");
}
@Test
public void cancelBufferedStreamShouldChangeClientStreamStatus() throws Exception {
// Force the stream to be buffered.
receiveMaxConcurrentStreams(0);
// Create a new stream with id 3.
ChannelFuture createFuture = enqueue(
newCreateStreamCommand(grpcHeaders, streamTransportState));
assertEquals(STREAM_ID, streamTransportState.id());
// Cancel the stream.
cancelStream(Status.CANCELLED);
assertFalse(createFuture.isSuccess());
verify(streamListener).closed(eq(Status.CANCELLED), same(PROCESSED), any(Metadata.class));
}
@Test
public void createStreamShouldSucceed() throws Exception {
createStream();
verifyWrite().writeHeaders(eq(ctx()), eq(STREAM_ID), eq(grpcHeaders), eq(0),
eq(false), any(ChannelPromise.class));
}
@Test
public void cancelShouldSucceed() throws Exception {
createStream();
cancelStream(Status.CANCELLED);
verifyWrite().writeRstStream(eq(ctx()), eq(STREAM_ID), eq(Http2Error.CANCEL.code()),
any(ChannelPromise.class));
verify(mockKeepAliveManager, times(1)).onTransportActive(); // onStreamActive
verify(mockKeepAliveManager, times(1)).onTransportIdle(); // onStreamClosed
verifyNoMoreInteractions(mockKeepAliveManager);
}
@Test
public void cancelDeadlineExceededShouldSucceed() throws Exception {
createStream();
cancelStream(Status.DEADLINE_EXCEEDED);
verifyWrite().writeRstStream(eq(ctx()), eq(STREAM_ID), eq(Http2Error.CANCEL.code()),
any(ChannelPromise.class));
}
@Test
public void cancelWhileBufferedShouldSucceed() throws Exception {
// Force the stream to be buffered.
receiveMaxConcurrentStreams(0);
ChannelFuture createFuture = createStream();
assertFalse(createFuture.isDone());
ChannelFuture cancelFuture = cancelStream(Status.CANCELLED);
assertTrue(cancelFuture.isSuccess());
assertTrue(createFuture.isDone());
assertFalse(createFuture.isSuccess());
}
/**
* Although nobody is listening to an exception should it occur during cancel(), we don't want an
* exception to be thrown because it would negatively impact performance, and we don't want our
* users working around around such performance issues.
*/
@Test
public void cancelTwiceShouldSucceed() throws Exception {
createStream();
cancelStream(Status.CANCELLED);
verifyWrite().writeRstStream(any(ChannelHandlerContext.class), eq(STREAM_ID),
eq(Http2Error.CANCEL.code()), any(ChannelPromise.class));
ChannelFuture future = cancelStream(Status.CANCELLED);
assertTrue(future.isSuccess());
}
@Test
public void cancelTwiceDifferentReasons() throws Exception {
createStream();
cancelStream(Status.DEADLINE_EXCEEDED);
verifyWrite().writeRstStream(eq(ctx()), eq(STREAM_ID), eq(Http2Error.CANCEL.code()),
any(ChannelPromise.class));
ChannelFuture future = cancelStream(Status.CANCELLED);
assertTrue(future.isSuccess());
}
@Test
public void sendFrameShouldSucceed() throws Exception {
createStream();
// Send a frame and verify that it was written.
ByteBuf content = content();
ChannelFuture future
= enqueue(new SendGrpcFrameCommand(streamTransportState, content, true));
assertTrue(future.isSuccess());
verifyWrite().writeData(eq(ctx()), eq(STREAM_ID), same(content), eq(0), eq(true),
any(ChannelPromise.class));
verify(mockKeepAliveManager, times(1)).onTransportActive(); // onStreamActive
verifyNoMoreInteractions(mockKeepAliveManager);
}
@Test
public void sendForUnknownStreamShouldFail() throws Exception {
ChannelFuture future
= enqueue(new SendGrpcFrameCommand(streamTransportState, content(), true));
assertTrue(future.isDone());
assertFalse(future.isSuccess());
}
@Test
public void inboundShouldForwardToStream() throws Exception {
createStream();
// Read a headers frame first.
Http2Headers headers = new DefaultHttp2Headers().status(STATUS_OK)
.set(CONTENT_TYPE_HEADER, CONTENT_TYPE_GRPC)
.set(as("magic"), as("value"));
ByteBuf headersFrame = headersFrame(STREAM_ID, headers);
channelRead(headersFrame);
ArgumentCaptor<Metadata> captor = ArgumentCaptor.forClass(Metadata.class);
verify(streamListener).headersRead(captor.capture());
assertEquals("value",
captor.getValue().get(Metadata.Key.of("magic", Metadata.ASCII_STRING_MARSHALLER)));
streamTransportState.requestMessagesFromDeframerForTesting(1);
// Create a data frame and then trigger the handler to read it.
ByteBuf frame = grpcDataFrame(STREAM_ID, false, contentAsArray());
channelRead(frame);
InputStream message = streamListenerMessageQueue.poll();
assertArrayEquals(ByteBufUtil.getBytes(content()), ByteStreams.toByteArray(message));
message.close();
assertNull("no additional message expected", streamListenerMessageQueue.poll());
}
@Test
public void receivedGoAwayNoErrorShouldRefuseLaterStreamId() throws Exception {
ChannelFuture future = enqueue(newCreateStreamCommand(grpcHeaders, streamTransportState));
channelRead(goAwayFrame(streamId - 1));
verify(streamListener).closed(any(Status.class), eq(REFUSED), any(Metadata.class));
assertTrue(future.isDone());
}
@Test
public void receivedGoAwayErrorShouldRefuseLaterStreamId() throws Exception {
ChannelFuture future = enqueue(newCreateStreamCommand(grpcHeaders, streamTransportState));
channelRead(
goAwayFrame(streamId - 1, (int) Http2Error.PROTOCOL_ERROR.code(), Unpooled.EMPTY_BUFFER));
// This _should_ be REFUSED, but we purposefully use PROCESSED. See comment for
// abruptGoAwayStatusConservative in NettyClientHandler
verify(streamListener).closed(any(Status.class), eq(PROCESSED), any(Metadata.class));
assertTrue(future.isDone());
}
@Test
public void receivedGoAwayShouldNotAffectEarlyStreamId() throws Exception {
ChannelFuture future = enqueue(newCreateStreamCommand(grpcHeaders, streamTransportState));
channelRead(goAwayFrame(streamId));
verify(streamListener, never())
.closed(any(Status.class), any(RpcProgress.class), any(Metadata.class));
assertTrue(future.isDone());
}
@Test
public void receivedGoAwayShouldNotAffectRacingQueuedStreamId() throws Exception {
// This command has not actually been executed yet
ChannelFuture future = writeQueue().enqueue(
newCreateStreamCommand(grpcHeaders, streamTransportState), true);
channelRead(goAwayFrame(streamId));
verify(streamListener, never())
.closed(any(Status.class), any(RpcProgress.class), any(Metadata.class));
assertTrue(future.isDone());
}
@Test
public void receivedAbruptGoAwayShouldFailRacingQueuedStreamid() throws Exception {
// This command has not actually been executed yet
ChannelFuture future = writeQueue().enqueue(
newCreateStreamCommand(grpcHeaders, streamTransportState), true);
// Read a GOAWAY that indicates our stream can't be sent
channelRead(goAwayFrame(0, 8 /* Cancel */, Unpooled.copiedBuffer("this is a test", UTF_8)));
ArgumentCaptor<Status> captor = ArgumentCaptor.forClass(Status.class);
verify(streamListener).closed(captor.capture(), same(MISCARRIED),
ArgumentMatchers.<Metadata>notNull());
assertEquals(Status.UNAVAILABLE.getCode(), captor.getValue().getCode());
assertEquals(
"Abrupt GOAWAY closed unsent stream. HTTP/2 error code: CANCEL, "
+ "debug data: this is a test\nstream id: 3, GOAWAY Last-Stream-ID:0",
captor.getValue().getDescription());
assertTrue(future.isDone());
}
@Test
public void receivedAbruptGoAwayShouldFailRacingQueuedIoStreamid() throws Exception {
// Purposefully avoid flush(), since we want the write to not actually complete.
// EmbeddedChannel doesn't support flow control, so this is the next closest approximation.
ChannelFuture future = channel().write(
newCreateStreamCommand(grpcHeaders, streamTransportState));
// Read a GOAWAY that indicates our stream can't be sent
channelRead(goAwayFrame(0, 0 /* NO_ERROR */, Unpooled.copiedBuffer("this is a test", UTF_8)));
ArgumentCaptor<Status> captor = ArgumentCaptor.forClass(Status.class);
verify(streamListener).closed(captor.capture(), same(REFUSED),
ArgumentMatchers.<Metadata>notNull());
assertEquals(Status.UNAVAILABLE.getCode(), captor.getValue().getCode());
assertEquals(
"Abrupt GOAWAY closed sent stream. HTTP/2 error code: NO_ERROR, "
+ "debug data: this is a test",
captor.getValue().getDescription());
assertTrue(future.isDone());
}
@Test
public void receivedGoAway_shouldFailBufferedStreamsExceedingMaxConcurrentStreams()
throws Exception {
NettyClientStream.TransportState streamTransportState1 = new TransportStateImpl(
handler(),
channel().eventLoop(),
DEFAULT_MAX_MESSAGE_SIZE,
transportTracer);
streamTransportState1.setListener(mock(ClientStreamListener.class));
NettyClientStream.TransportState streamTransportState2 = new TransportStateImpl(
handler(),
channel().eventLoop(),
DEFAULT_MAX_MESSAGE_SIZE,
transportTracer);
streamTransportState2.setListener(mock(ClientStreamListener.class));
receiveMaxConcurrentStreams(1);
ChannelFuture future1 = writeQueue().enqueue(
newCreateStreamCommand(grpcHeaders, streamTransportState1), true);
ChannelFuture future2 = writeQueue().enqueue(
newCreateStreamCommand(grpcHeaders, streamTransportState2), true);
// GOAWAY
channelRead(goAwayFrame(Integer.MAX_VALUE));
assertTrue(future1.isSuccess());
assertTrue(future2.isDone());
assertThat(Status.fromThrowable(future2.cause()).getCode()).isEqualTo(Status.Code.UNAVAILABLE);
assertThat(future2.cause().getMessage()).contains(
"Abrupt GOAWAY closed unsent stream. HTTP/2 error code: NO_ERROR");
assertThat(future2.cause().getMessage()).contains(
"At MAX_CONCURRENT_STREAMS limit");
}
@Test
public void receivedResetWithRefuseCode() throws Exception {
ChannelFuture future = enqueue(newCreateStreamCommand(grpcHeaders, streamTransportState));
channelRead(rstStreamFrame(streamId, (int) Http2Error.REFUSED_STREAM.code() ));
verify(streamListener).closed(any(Status.class), eq(REFUSED), any(Metadata.class));
assertTrue(future.isDone());
}
@Test
public void receivedResetWithCanceCode() throws Exception {
ChannelFuture future = enqueue(newCreateStreamCommand(grpcHeaders, streamTransportState));
channelRead(rstStreamFrame(streamId, (int) Http2Error.CANCEL.code()));
verify(streamListener).closed(any(Status.class), eq(PROCESSED), any(Metadata.class));
assertTrue(future.isDone());
}
@Test
public void receivedGoAwayShouldFailUnknownStreams() throws Exception {
enqueue(newCreateStreamCommand(grpcHeaders, streamTransportState));
// Read a GOAWAY that indicates our stream was never processed by the server.
channelRead(goAwayFrame(0, 8 /* Cancel */, Unpooled.copiedBuffer("this is a test", UTF_8)));
ArgumentCaptor<Status> captor = ArgumentCaptor.forClass(Status.class);
// See comment for abruptGoAwayStatusConservative in NettyClientHandler
verify(streamListener).closed(captor.capture(), same(PROCESSED),
ArgumentMatchers.<Metadata>notNull());
assertEquals(Status.CANCELLED.getCode(), captor.getValue().getCode());
assertEquals(
"Abrupt GOAWAY closed sent stream. HTTP/2 error code: CANCEL, "
+ "debug data: this is a test",
captor.getValue().getDescription());
}
@Test
public void receivedGoAwayShouldFailBufferedStreams() throws Exception {
receiveMaxConcurrentStreams(0);
ChannelFuture future = enqueue(newCreateStreamCommand(grpcHeaders, streamTransportState));
// Read a GOAWAY that indicates our stream was never processed by the server.
channelRead(goAwayFrame(0, 8 /* Cancel */, Unpooled.copiedBuffer("this is a test", UTF_8)));
assertTrue(future.isDone());
assertFalse(future.isSuccess());
Status status = Status.fromThrowable(future.cause());
assertEquals(Status.UNAVAILABLE.getCode(), status.getCode());
assertEquals(
"GOAWAY closed buffered stream. HTTP/2 error code: CANCEL, "
+ "debug data: this is a test",
status.getDescription());
}
@Test
public void channelClosureShouldFailBufferedStreams() throws Exception {
receiveMaxConcurrentStreams(0);
ChannelFuture future = enqueue(newCreateStreamCommand(grpcHeaders, streamTransportState));
channel().pipeline().fireChannelInactive();
assertTrue(future.isDone());
assertFalse(future.isSuccess());
ArgumentCaptor<Status> captor = ArgumentCaptor.forClass(Status.class);
verify(streamListener).closed(captor.capture(), same(MISCARRIED), ArgumentMatchers.notNull());
assertEquals(Status.UNAVAILABLE.getCode(), captor.getValue().getCode());
}
@Test
public void receivedGoAwayShouldFailNewStreams() throws Exception {
// Read a GOAWAY that indicates our stream was never processed by the server.
channelRead(goAwayFrame(0, 8 /* Cancel */, Unpooled.copiedBuffer("this is a test", UTF_8)));
// Now try to create a stream.
ChannelFuture future = enqueue(newCreateStreamCommand(grpcHeaders, streamTransportState));
assertTrue(future.isDone());
assertFalse(future.isSuccess());
Status status = Status.fromThrowable(future.cause());
assertEquals(Status.UNAVAILABLE.getCode(), status.getCode());
assertEquals(
"GOAWAY shut down transport. HTTP/2 error code: CANCEL, "
+ "debug data: this is a test",
status.getDescription());
}
// This test is not as useful as it looks, because the HTTP/2 Netty code catches and doesn't
// propagate exceptions during the onGoAwayReceived callback.
@Test
public void receivedGoAway_notUtf8() throws Exception {
// 0xFF is never permitted in UTF-8. 0xF0 should have 3 continuations following, and 0x0a isn't
// a continuation.
channelRead(goAwayFrame(0, 11 /* ENHANCE_YOUR_CALM */,
Unpooled.copiedBuffer(new byte[] {(byte) 0xFF, (byte) 0xF0, (byte) 0x0a})));
}
@Test
public void receivedGoAway_enhanceYourCalmWithoutTooManyPings() throws Exception {
final AtomicBoolean b = new AtomicBoolean();
tooManyPingsRunnable = new Runnable() {
@Override
public void run() {
b.set(true);
}
};
setUp();
channelRead(goAwayFrame(0, 11 /* ENHANCE_YOUR_CALM */,
Unpooled.copiedBuffer("not_many_pings", UTF_8)));
assertFalse(b.get());
}
@Test
public void receivedGoAway_enhanceYourCalmWithTooManyPings() throws Exception {
final AtomicBoolean b = new AtomicBoolean();
tooManyPingsRunnable = new Runnable() {
@Override
public void run() {
b.set(true);
}
};
setUp();
channelRead(goAwayFrame(0, 11 /* ENHANCE_YOUR_CALM */,
Unpooled.copiedBuffer("too_many_pings", UTF_8)));
assertTrue(b.get());
}
@Test
public void receivedGoAway_enhanceYourCalmShouldLogDebugData() throws Exception {
final AtomicReference<LogRecord> logRef = new AtomicReference<>();
Handler handler = new Handler() {
@Override
public void publish(LogRecord record) {
logRef.set(record);
}
@Override
public void flush() {
}
@Override
public void close() throws SecurityException {
}
};
Logger logger = Logger.getLogger(NettyClientHandler.class.getName());
try {
logger.addHandler(handler);
enqueue(newCreateStreamCommand(grpcHeaders, streamTransportState));
channelRead(goAwayFrame(0, 11 /* Enhance your calm */,
Unpooled.copiedBuffer("this is a test", UTF_8)));
assertNotNull(logRef.get());
assertTrue(MessageFormat.format(logRef.get().getMessage(), logRef.get().getParameters())
.contains("Debug data: this is a test"));
} finally {
logger.removeHandler(handler);
}
}
@Test
public void cancelStreamShouldCreateAndThenFailBufferedStream() throws Exception {
receiveMaxConcurrentStreams(0);
enqueue(newCreateStreamCommand(grpcHeaders, streamTransportState));
assertEquals(STREAM_ID, streamTransportState.id());
cancelStream(Status.CANCELLED);
verify(streamListener).closed(eq(Status.CANCELLED), same(PROCESSED), any(Metadata.class));
}
@Test
public void channelShutdownShouldCancelBufferedStreams() throws Exception {
// Force a stream to get added to the pending queue.
receiveMaxConcurrentStreams(0);
ChannelFuture future = enqueue(newCreateStreamCommand(grpcHeaders, streamTransportState));
handler().channelInactive(ctx());
assertTrue(future.isDone());
assertFalse(future.isSuccess());
verify(mockKeepAliveManager, times(1)).onTransportTermination(); // channelInactive
verifyNoMoreInteractions(mockKeepAliveManager);
}
@Test
public void channelShutdownShouldFailInFlightStreams() throws Exception {
createStream();
handler().channelInactive(ctx());
ArgumentCaptor<Status> captor = ArgumentCaptor.forClass(Status.class);
verify(streamListener).closed(captor.capture(), same(PROCESSED),
ArgumentMatchers.<Metadata>notNull());
assertEquals(Status.UNAVAILABLE.getCode(), captor.getValue().getCode());
}
@Test
public void connectionWindowShouldBeOverridden() throws Exception {
flowControlWindow = 1048576; // 1MiB
setUp();
Http2Stream connectionStream = connection().connectionStream();
Http2LocalFlowController localFlowController = connection().local().flowController();
int actualInitialWindowSize = localFlowController.initialWindowSize(connectionStream);
int actualWindowSize = localFlowController.windowSize(connectionStream);
assertEquals(flowControlWindow, actualWindowSize);
assertEquals(flowControlWindow, actualInitialWindowSize);
assertEquals(1048576, actualWindowSize);
}
@Test
public void createIncrementsIdsForActualAndBufferdStreams() throws Exception {
receiveMaxConcurrentStreams(2);
enqueue(newCreateStreamCommand(grpcHeaders, streamTransportState));
assertEquals(STREAM_ID, streamTransportState.id());
streamTransportState = new TransportStateImpl(
handler(),
channel().eventLoop(),
DEFAULT_MAX_MESSAGE_SIZE,
transportTracer);
streamTransportState.setListener(streamListener);
enqueue(newCreateStreamCommand(grpcHeaders, streamTransportState));
assertEquals(5, streamTransportState.id());
streamTransportState = new TransportStateImpl(
handler(),
channel().eventLoop(),
DEFAULT_MAX_MESSAGE_SIZE,
transportTracer);
streamTransportState.setListener(streamListener);
enqueue(newCreateStreamCommand(grpcHeaders, streamTransportState));
assertEquals(7, streamTransportState.id());
verify(mockKeepAliveManager, times(1)).onTransportActive(); // onStreamActive
verifyNoMoreInteractions(mockKeepAliveManager);
}
@Test
public void exhaustedStreamsShouldFail() throws Exception {
streamId = Integer.MAX_VALUE;
setUp();
assertNull(lifecycleManager.getShutdownStatus());
// Create the MAX_INT stream.
ChannelFuture future = createStream();
assertTrue(future.isSuccess());
TransportStateImpl newStreamTransportState = new TransportStateImpl(
handler(),
channel().eventLoop(),
DEFAULT_MAX_MESSAGE_SIZE,
transportTracer);
// This should fail - out of stream IDs.
future = enqueue(newCreateStreamCommand(grpcHeaders, newStreamTransportState));
assertTrue(future.isDone());
assertFalse(future.isSuccess());
Status status = lifecycleManager.getShutdownStatus();
assertNotNull(status);
assertTrue("status does not reference 'exhausted': " + status,
status.getDescription().contains("exhausted"));
}
@Test
public void nonExistentStream() throws Exception {
Status status = Status.INTERNAL.withDescription("zz");
lifecycleManager.notifyShutdown(status);
// Stream creation can race with the transport shutting down, with the create command already
// enqueued.
ChannelFuture future1 = createStream();
future1.await();
assertNotNull(future1.cause());
assertThat(Status.fromThrowable(future1.cause()).getCode()).isEqualTo(status.getCode());
ChannelFuture future2 = enqueue(new CancelClientStreamCommand(streamTransportState, status));
future2.sync();
}
@Test
public void ping() throws Exception {
PingCallbackImpl callback1 = new PingCallbackImpl();
assertEquals(0, transportTracer.getStats().keepAlivesSent);
sendPing(callback1);
assertEquals(1, transportTracer.getStats().keepAlivesSent);
// add'l ping will be added as listener to outstanding operation
PingCallbackImpl callback2 = new PingCallbackImpl();
sendPing(callback2);
assertEquals(1, transportTracer.getStats().keepAlivesSent);
ArgumentCaptor<Long> captor = ArgumentCaptor.forClass(long.class);
verifyWrite().writePing(eq(ctx()), eq(false), captor.capture(),
any(ChannelPromise.class));
// getting a bad ack won't cause the callback to be invoked
long pingPayload = captor.getValue();
// to compute bad payload, read the good payload and subtract one
long badPingPayload = pingPayload - 1;
channelRead(pingFrame(true, badPingPayload));
// operation not complete because ack was wrong
assertEquals(0, callback1.invocationCount);
assertEquals(0, callback2.invocationCount);
nanoTime += 10101;
// reading the proper response should complete the future
channelRead(pingFrame(true, pingPayload));
assertEquals(1, callback1.invocationCount);
assertEquals(10101, callback1.roundTripTime);
assertNull(callback1.failureCause);
// callback2 piggy-backed on same operation
assertEquals(1, callback2.invocationCount);
assertEquals(10101, callback2.roundTripTime);
assertNull(callback2.failureCause);
// now that previous ping is done, next request starts a new operation
callback1 = new PingCallbackImpl();
assertEquals(1, transportTracer.getStats().keepAlivesSent);
sendPing(callback1);
assertEquals(2, transportTracer.getStats().keepAlivesSent);
assertEquals(0, callback1.invocationCount);
}
@Test
public void ping_failsWhenChannelCloses() throws Exception {
PingCallbackImpl callback = new PingCallbackImpl();
sendPing(callback);
assertEquals(0, callback.invocationCount);
handler().channelInactive(ctx());
// ping failed on channel going inactive
assertEquals(1, callback.invocationCount);
assertEquals(Status.Code.UNAVAILABLE, callback.failureCause.getCode());
// A failed ping is still counted
assertEquals(1, transportTracer.getStats().keepAlivesSent);
}
@Test
public void oustandingUserPingShouldNotInteractWithDataPing() throws Exception {
createStream();
handler().setAutoTuneFlowControl(true);
PingCallbackImpl callback = new PingCallbackImpl();
assertEquals(0, transportTracer.getStats().keepAlivesSent);
sendPing(callback);
assertEquals(1, transportTracer.getStats().keepAlivesSent);
ArgumentCaptor<Long> captor = ArgumentCaptor.forClass(long.class);
verifyWrite().writePing(eq(ctx()), eq(false), captor.capture(), any(ChannelPromise.class));
long payload = captor.getValue();
channelRead(grpcDataFrame(STREAM_ID, false, contentAsArray()));
long pingData = handler().flowControlPing().payload();
channelRead(pingFrame(true, pingData));
assertEquals(1, handler().flowControlPing().getPingReturn());
assertEquals(0, callback.invocationCount);
channelRead(pingFrame(true, payload));
assertEquals(1, handler().flowControlPing().getPingReturn());
assertEquals(1, callback.invocationCount);
assertEquals(1, transportTracer.getStats().keepAlivesSent);
}
@Test
public void bdpPingAvoidsTooManyPingsOnSpecialServers() throws Exception {
// gRPC servers limit PINGs based on what they _send_. Some servers limit PINGs based on what is
// _received_.
createStream();
handler().setAutoTuneFlowControl(true);
Http2Headers headers = new DefaultHttp2Headers().status(STATUS_OK)
.set(CONTENT_TYPE_HEADER, CONTENT_TYPE_GRPC);
channelRead(headersFrame(STREAM_ID, headers));
channelRead(dataFrame(STREAM_ID, false, content()));
verifyWrite().writePing(eq(ctx()), eq(false), eq(1234L), any(ChannelPromise.class));
channelRead(pingFrame(true, 1234));
channelRead(dataFrame(STREAM_ID, false, content()));
verifyWrite(times(1)).writePing(eq(ctx()), eq(false), eq(1234L), any(ChannelPromise.class));
channelRead(pingFrame(true, 1234));
channelRead(dataFrame(STREAM_ID, false, content()));
// No ping was sent
verifyWrite(times(1)).writePing(eq(ctx()), eq(false), eq(1234L), any(ChannelPromise.class));
}
@Test
public void bdpPingAllowedAfterSendingData() throws Exception {
// gRPC servers limit PINGs based on what they _send_. Some servers limit PINGs based on what is
// _received_.
flowControlWindow = 64 * 1024;
manualSetUp();
createStream();
handler().setAutoTuneFlowControl(true);
ByteBuf content = Unpooled.buffer(64 * 1024 + 1024);
content.writerIndex(content.capacity());
ChannelFuture future
= enqueue(new SendGrpcFrameCommand(streamTransportState, content, false));
assertFalse(future.isDone()); // flow control limits send
Http2Headers headers = new DefaultHttp2Headers().status(STATUS_OK)
.set(CONTENT_TYPE_HEADER, CONTENT_TYPE_GRPC);
channelRead(headersFrame(STREAM_ID, headers));
channelRead(dataFrame(STREAM_ID, false, content()));
verifyWrite().writePing(eq(ctx()), eq(false), eq(1234L), any(ChannelPromise.class));
channelRead(pingFrame(true, 1234));
channelRead(dataFrame(STREAM_ID, false, content()));
verifyWrite(times(1)).writePing(eq(ctx()), eq(false), eq(1234L), any(ChannelPromise.class));
channelRead(pingFrame(true, 1234));
channelRead(dataFrame(STREAM_ID, false, content()));
// No ping was sent
verifyWrite(times(1)).writePing(eq(ctx()), eq(false), eq(1234L), any(ChannelPromise.class));
channelRead(windowUpdate(0, 2024));
channelRead(windowUpdate(STREAM_ID, 2024));
assertTrue(future.isDone());
assertTrue(future.isSuccess());
// But now one is sent
channelRead(dataFrame(STREAM_ID, false, content()));
verifyWrite(times(1)).writePing(eq(ctx()), eq(false), eq(1234L), any(ChannelPromise.class));
}
@Override
public void dataPingAckIsRecognized() throws Exception {
super.dataPingAckIsRecognized();
verify(mockKeepAliveManager, times(1)).onTransportActive(); // onStreamActive
// onHeadersRead, onDataRead, onPingAckRead
verify(mockKeepAliveManager, times(3)).onDataReceived();
verifyNoMoreInteractions(mockKeepAliveManager);
}
@Test
public void exceptionCaughtShouldCloseConnection() throws Exception {
handler().exceptionCaught(ctx(), new RuntimeException("fake exception"));
// TODO(nmittler): EmbeddedChannel does not currently invoke the channelInactive processing,
// so exceptionCaught() will not close streams properly in this test.
// Once https://github.com/netty/netty/issues/4316 is resolved, we should also verify that
// any open streams are closed properly.
assertFalse(channel().isOpen());
}
@Test
public void missingAuthorityHeader_streamCreationShouldFail() throws Exception {
Http2Headers grpcHeadersWithoutAuthority = new DefaultHttp2Headers()
.scheme(HTTPS)
.path(as("/fakemethod"))
.method(HTTP_METHOD)
.add(as("auth"), as("sometoken"))
.add(CONTENT_TYPE_HEADER, CONTENT_TYPE_GRPC)
.add(TE_HEADER, TE_TRAILERS);
ChannelFuture channelFuture = enqueue(newCreateStreamCommand(
grpcHeadersWithoutAuthority, streamTransportState));
try {
channelFuture.get();
fail("Expected stream creation failure");
} catch (ExecutionException e) {
assertThat(e.getCause().getMessage()).isEqualTo("UNAVAILABLE: Missing authority header");
}
}
@Test
public void missingAuthorityVerifierInAttributes_streamCreationShouldFail() throws Exception {
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
StreamListener.MessageProducer producer =
(StreamListener.MessageProducer) invocation.getArguments()[0];
InputStream message;
while ((message = producer.next()) != null) {
streamListenerMessageQueue.add(message);
}
return null;
}
})
.when(streamListener)
.messagesAvailable(ArgumentMatchers.<StreamListener.MessageProducer>any());
doAnswer((attributes) -> Attributes.EMPTY)
.when(listener)
.filterTransport(ArgumentMatchers.any(Attributes.class));
lifecycleManager = new ClientTransportLifecycleManager(listener);
// This mocks the keepalive manager only for there's in which we verify it. For other tests
// it'll be null which will be testing if we behave correctly when it's not present.
if (setKeepaliveManagerFor.contains(testNameRule.getMethodName())) {
mockKeepAliveManager = mock(KeepAliveManager.class);
}
initChannel(new GrpcHttp2ClientHeadersDecoder(GrpcUtil.DEFAULT_MAX_HEADER_LIST_SIZE));
streamTransportState = new TransportStateImpl(
handler(),
channel().eventLoop(),
DEFAULT_MAX_MESSAGE_SIZE,
transportTracer);
streamTransportState.setListener(streamListener);
grpcHeaders = new DefaultHttp2Headers()
.scheme(HTTPS)
.authority(as("www.fake.com"))
.path(as("/fakemethod"))
.method(HTTP_METHOD)
.add(as("auth"), as("sometoken"))
.add(CONTENT_TYPE_HEADER, CONTENT_TYPE_GRPC)
.add(TE_HEADER, TE_TRAILERS);
// Simulate receipt of initial remote settings.
ByteBuf serializedSettings = serializeSettings(new Http2Settings());
channelRead(serializedSettings);
channel().releaseOutbound();
ChannelFuture channelFuture = createStream();
try {
channelFuture.get();
fail("Expected stream creation failure");
} catch (ExecutionException e) {
assertThat(e.getCause().getMessage()).isEqualTo(
"UNAVAILABLE: Authority verifier not found to verify authority");
}
}
@Test
public void authorityVerificationSuccess_streamCreationSucceeds() throws Exception {
NettyClientHandler.enablePerRpcAuthorityCheck = true;
try {
ChannelFuture channelFuture = createStream();
channelFuture.get();
} finally {
NettyClientHandler.enablePerRpcAuthorityCheck = false;
}
}
@Test
public void authorityVerificationFailure_streamCreationFails() throws Exception {
NettyClientHandler.enablePerRpcAuthorityCheck = true;
try {
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
StreamListener.MessageProducer producer =
(StreamListener.MessageProducer) invocation.getArguments()[0];
InputStream message;
while ((message = producer.next()) != null) {
streamListenerMessageQueue.add(message);
}
return null;
}
})
.when(streamListener)
.messagesAvailable(ArgumentMatchers.<StreamListener.MessageProducer>any());
doAnswer((attributes) -> Attributes.newBuilder().set(
GrpcAttributes.ATTR_AUTHORITY_VERIFIER,
(authority) -> Status.UNAVAILABLE.withCause(
new CertificateException("Peer verification failed"))).build())
.when(listener)
.filterTransport(ArgumentMatchers.any(Attributes.class));
lifecycleManager = new ClientTransportLifecycleManager(listener);
// This mocks the keepalive manager only for there's in which we verify it. For other tests
// it'll be null which will be testing if we behave correctly when it's not present.
if (setKeepaliveManagerFor.contains(testNameRule.getMethodName())) {
mockKeepAliveManager = mock(KeepAliveManager.class);
}
initChannel(new GrpcHttp2ClientHeadersDecoder(GrpcUtil.DEFAULT_MAX_HEADER_LIST_SIZE));
streamTransportState = new TransportStateImpl(
handler(),
channel().eventLoop(),
DEFAULT_MAX_MESSAGE_SIZE,
transportTracer);
streamTransportState.setListener(streamListener);
grpcHeaders = new DefaultHttp2Headers()
.scheme(HTTPS)
.authority(as("www.fake.com"))
.path(as("/fakemethod"))
.method(HTTP_METHOD)
.add(as("auth"), as("sometoken"))
.add(CONTENT_TYPE_HEADER, CONTENT_TYPE_GRPC)
.add(TE_HEADER, TE_TRAILERS);
// Simulate receipt of initial remote settings.
ByteBuf serializedSettings = serializeSettings(new Http2Settings());
channelRead(serializedSettings);
channel().releaseOutbound();
ChannelFuture channelFuture = createStream();
try {
channelFuture.get();
fail("Expected stream creation failure");
} catch (ExecutionException e) {
assertThat(e.getMessage()).isEqualTo("io.grpc.InternalStatusRuntimeException: UNAVAILABLE");
}
} finally {
NettyClientHandler.enablePerRpcAuthorityCheck = false;
}
}
@Override
protected void makeStream() throws Exception {
createStream();
// The tests in NettyServerHandlerTest expect the header to already be read, since they work on
// both client- and server-side.
Http2Headers headers = new DefaultHttp2Headers().status(STATUS_OK)
.set(CONTENT_TYPE_HEADER, CONTENT_TYPE_GRPC);
ByteBuf headersFrame = headersFrame(STREAM_ID, headers);
channelRead(headersFrame);
}
@CanIgnoreReturnValue
private ChannelFuture sendPing(PingCallback callback) {
return enqueue(new SendPingCommand(callback, MoreExecutors.directExecutor()));
}
private void receiveMaxConcurrentStreams(int max) throws Exception {
ByteBuf serializedSettings = serializeSettings(new Http2Settings().maxConcurrentStreams(max));
channelRead(serializedSettings);
}
@CanIgnoreReturnValue
private ChannelFuture createStream() throws Exception {
ChannelFuture future = enqueue(newCreateStreamCommand(grpcHeaders, streamTransportState));
return future;
}
@CanIgnoreReturnValue
private ChannelFuture cancelStream(Status status) throws Exception {
return enqueue(new CancelClientStreamCommand(streamTransportState, status));
}
@Override
protected NettyClientHandler newHandler() throws Http2Exception {
Http2Connection connection = new DefaultHttp2Connection(false);
// Create and close a stream previous to the nextStreamId.
Http2Stream stream = connection.local().createStream(streamId - 2, true);
stream.close();
final Ticker ticker = new Ticker() {
@Override
public long read() {
return nanoTime;
}
};
Supplier<Stopwatch> stopwatchSupplier = new Supplier<Stopwatch>() {
@Override
public Stopwatch get() {
return Stopwatch.createUnstarted(ticker);
}
};
return NettyClientHandler.newHandler(
connection,
frameReader(),
frameWriter(),
lifecycleManager,
mockKeepAliveManager,
false,
flowControlWindow,
maxHeaderListSize,
softLimitHeaderListSize,
stopwatchSupplier,
tooManyPingsRunnable,
transportTracer,
Attributes.EMPTY,
"someauthority",
null,
fakeClock().getTicker());
}
@Override
protected WriteQueue initWriteQueue() {
handler().startWriteQueue(channel());
return handler().getWriteQueue();
}
private AsciiString as(String string) {
return new AsciiString(string);
}
private static CreateStreamCommand newCreateStreamCommand(
Http2Headers headers, NettyClientStream.TransportState stream) {
return new CreateStreamCommand(headers, stream, true, false);
}
private static | NettyClientHandlerTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/unconstrained/Employee.java | {
"start": 179,
"end": 391
} | class ____ {
private String id;
public Employee() {
}
public Employee(String id) {
this.id = id;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
}
| Employee |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java | {
"start": 1464,
"end": 5057
} | class ____ extends CommandShell {
final static private String USAGE_PREFIX = "Usage: hadoop credential " +
"[generic options]\n";
final static private String COMMANDS =
" [-help]\n" +
" [" + CreateCommand.USAGE + "]\n" +
" [" + DeleteCommand.USAGE + "]\n" +
" [" + ListCommand.USAGE + "]\n" +
" [" + CheckCommand.USAGE + "]\n";
@VisibleForTesting
public static final String NO_VALID_PROVIDERS =
"There are no valid (non-transient) providers configured.\n" +
"No action has been taken. Use the -provider option to specify\n" +
"a provider. If you want to use a transient provider then you\n" +
"MUST use the -provider argument.";
private boolean interactive = true;
/** If true, fail if the provider requires a password and none is given. */
private boolean strict = false;
private boolean userSuppliedProvider = false;
private String value = null;
private PasswordReader passwordReader;
/**
* Parse the command line arguments and initialize the data.
* <pre>
* % hadoop credential create alias [-provider providerPath]
* % hadoop credential list [-provider providerPath]
* % hadoop credential check alias [-provider providerPath]
* % hadoop credential delete alias [-provider providerPath] [-f]
* </pre>
* @param args args.
* @return 0 if the argument(s) were recognized, 1 otherwise
* @throws IOException raised on errors performing I/O.
*/
@Override
protected int init(String[] args) throws IOException {
// no args should print the help message
if (0 == args.length) {
ToolRunner.printGenericCommandUsage(getErr());
return 1;
}
for (int i = 0; i < args.length; i++) { // parse command line
if (args[i].equals("create")) {
if (i == args.length - 1) {
return 1;
}
setSubCommand(new CreateCommand(args[++i]));
} else if (args[i].equals("check")) {
if (i == args.length - 1) {
return 1;
}
setSubCommand(new CheckCommand(args[++i]));
} else if (args[i].equals("delete")) {
if (i == args.length - 1) {
return 1;
}
setSubCommand(new DeleteCommand(args[++i]));
} else if (args[i].equals("list")) {
setSubCommand(new ListCommand());
} else if (args[i].equals("-provider")) {
if (i == args.length - 1) {
return 1;
}
userSuppliedProvider = true;
getConf().set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH,
args[++i]);
} else if (args[i].equals("-f") || (args[i].equals("-force"))) {
interactive = false;
} else if (args[i].equals("-strict")) {
strict = true;
} else if (args[i].equals("-v") || (args[i].equals("-value"))) {
value = args[++i];
} else if (args[i].equals("-help")) {
printShellUsage();
return 0;
} else {
ToolRunner.printGenericCommandUsage(getErr());
return 1;
}
}
return 0;
}
@Override
public String getCommandUsage() {
StringBuilder sbuf = new StringBuilder(USAGE_PREFIX + COMMANDS);
String banner = StringUtils.repeat("=", 66);
sbuf.append(banner + "\n")
.append(CreateCommand.USAGE + ":\n\n" + CreateCommand.DESC + "\n")
.append(banner + "\n")
.append(DeleteCommand.USAGE + ":\n\n" + DeleteCommand.DESC + "\n")
.append(banner + "\n")
.append(ListCommand.USAGE + ":\n\n" + ListCommand.DESC + "\n");
return sbuf.toString();
}
private abstract | CredentialShell |
java | apache__rocketmq | proxy/src/main/java/org/apache/rocketmq/proxy/service/route/TopicRouteWrapper.java | {
"start": 1192,
"end": 2762
} | class ____ {
private final TopicRouteData topicRouteData;
private final String topicName;
private final Map<String/* brokerName */, BrokerData> brokerNameRouteData = new HashMap<>();
public TopicRouteWrapper(TopicRouteData topicRouteData, String topicName) {
this.topicRouteData = topicRouteData;
this.topicName = topicName;
if (this.topicRouteData.getBrokerDatas() != null) {
for (BrokerData brokerData : this.topicRouteData.getBrokerDatas()) {
this.brokerNameRouteData.put(brokerData.getBrokerName(), brokerData);
}
}
}
public String getMasterAddr(String brokerName) {
return this.brokerNameRouteData.get(brokerName).getBrokerAddrs().get(MixAll.MASTER_ID);
}
public String getMasterAddrPrefer(String brokerName) {
HashMap<Long, String> brokerAddr = brokerNameRouteData.get(brokerName).getBrokerAddrs();
String addr = brokerAddr.get(MixAll.MASTER_ID);
if (addr == null) {
Optional<Long> optional = brokerAddr.keySet().stream().findFirst();
return optional.map(brokerAddr::get).orElse(null);
}
return addr;
}
public String getTopicName() {
return topicName;
}
public TopicRouteData getTopicRouteData() {
return topicRouteData;
}
public List<QueueData> getQueueDatas() {
return this.topicRouteData.getQueueDatas();
}
public String getOrderTopicConf() {
return this.topicRouteData.getOrderTopicConf();
}
}
| TopicRouteWrapper |
java | google__dagger | dagger-android/main/java/dagger/android/AndroidInjectionModule.java | {
"start": 992,
"end": 1260
} | class ____ {
@Multibinds
abstract Map<Class<?>, AndroidInjector.Factory<?>> classKeyedInjectorFactories();
@Multibinds
abstract Map<String, AndroidInjector.Factory<?>> stringKeyedInjectorFactories();
private AndroidInjectionModule() {}
}
| AndroidInjectionModule |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/integration/AzureTestUtils.java | {
"start": 9036,
"end": 12538
} | class ____ an exception. If it is not as expected, rethrow it.
* Comparison is on the exact class, not subclass-of inference as
* offered by {@code instanceof}.
* @param clazz the expected exception class
* @param ex the exception caught
* @return the exception, if it is of the expected class
* @throws Exception the exception passed in.
*/
public static Exception verifyExceptionClass(Class clazz,
Exception ex)
throws Exception {
if (!(ex.getClass().equals(clazz))) {
throw ex;
}
return ex;
}
/**
* Turn off FS Caching: use if a filesystem with different options from
* the default is required.
* @param conf configuration to patch
*/
public static void disableFilesystemCaching(Configuration conf) {
conf.setBoolean("fs.wasb.impl.disable.cache", true);
}
/**
* Create a test path, using the value of
* {@link AzureTestUtils#TEST_UNIQUE_FORK_ID} if it is set.
* @param defVal default value
* @return a path
*/
public static Path createTestPath(Path defVal) {
String testUniqueForkId = System.getProperty(
AzureTestConstants.TEST_UNIQUE_FORK_ID);
return testUniqueForkId == null
? defVal
: new Path("/" + testUniqueForkId, "test");
}
/**
* Create a test page blob path using the value of
* {@link AzureTestConstants#TEST_UNIQUE_FORK_ID} if it is set.
* @param filename filename at the end of the path
* @return an absolute path
*/
public static Path blobPathForTests(FileSystem fs, String filename) {
String testUniqueForkId = System.getProperty(
AzureTestConstants.TEST_UNIQUE_FORK_ID);
return fs.makeQualified(new Path(PAGE_BLOB_DIR,
testUniqueForkId == null
? filename
: (testUniqueForkId + "/" + filename)));
}
/**
* Create a test path using the value of
* {@link AzureTestConstants#TEST_UNIQUE_FORK_ID} if it is set.
* @param filename filename at the end of the path
* @return an absolute path
*/
public static Path pathForTests(FileSystem fs, String filename) {
String testUniqueForkId = System.getProperty(
AzureTestConstants.TEST_UNIQUE_FORK_ID);
return fs.makeQualified(new Path(
testUniqueForkId == null
? ("/test/" + filename)
: (testUniqueForkId + "/" + filename)));
}
/**
* Get a unique fork ID.
* Returns a default value for non-parallel tests.
* @return a string unique for all test VMs running in this maven build.
*/
public static String getForkID() {
return System.getProperty(
AzureTestConstants.TEST_UNIQUE_FORK_ID, "fork-1");
}
/**
* Flag to indicate that this test is being executed in parallel.
* This is used by some of the scale tests to validate test time expectations.
* @return true if the build indicates this test is being run in parallel.
*/
public static boolean isParallelExecution() {
return Boolean.getBoolean(KEY_PARALLEL_TEST_EXECUTION);
}
/**
* Asserts that {@code obj} is an instance of {@code expectedClass} using a
* descriptive assertion message.
* @param expectedClass class
* @param obj object to check
*/
public static void assertInstanceOf2(Class<?> expectedClass, Object obj) {
Assertions.assertTrue(
expectedClass.isAssignableFrom(obj.getClass()), String.format("Expected instance of class %s, but is %s.",
expectedClass, obj.getClass()));
}
/**
* Builds a comma-separated list of | of |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_yuanmomo_Issue_504.java | {
"start": 1424,
"end": 1786
} | class ____ {
private List<String> models;
public List<String> getModels() {
return models;
}
public void setModels(List<String> models) {
this.models = models;
}
@Override
public String toString() {
return "UserExt{" + "models=" + models + '}';
}
}
}
| UserExt |
java | micronaut-projects__micronaut-core | test-suite/src/test/java/io/micronaut/docs/inject/generics/Vehicle.java | {
"start": 263,
"end": 851
} | class ____ {
private final Engine<V8> engine;
@Inject
List<Engine<V6>> v6Engines;
private Engine<V8> anotherV8;
// tag::constructor[]
@Inject
public Vehicle(Engine<V8> engine) {
this.engine = engine;
}
// end::constructor[]
public String start() {
return engine.start();
}
@Inject
public void setAnotherV8(Engine<V8> anotherV8) {
this.anotherV8 = anotherV8;
}
public Engine<V8> getAnotherV8() {
return anotherV8;
}
public Engine<V8> getEngine() {
return engine;
}
}
| Vehicle |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/visitor/MySqlSchemaStatVisitorTest_Subquery2.java | {
"start": 973,
"end": 2425
} | class ____ extends TestCase {
public void test_0() throws Exception {
String sql = "SELECT a.id, a.name, b.name groupName FROM (select id, name, groupId from users limit 10) a inner join groups b on a.groupId = b.id";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement statemen = statementList.get(0);
assertEquals(1, statementList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
statemen.accept(visitor);
System.out.println(sql);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
assertEquals(2, visitor.getTables().size());
assertEquals(true, visitor.containsTable("users"));
assertEquals(true, visitor.containsTable("groups"));
assertEquals(5, visitor.getColumns().size());
assertEquals(true, visitor.getColumns().contains(new Column("users", "id")));
assertEquals(true, visitor.getColumns().contains(new Column("users", "groupId")));
assertEquals(true, visitor.getColumns().contains(new Column("users", "name")));
assertEquals(true, visitor.getColumns().contains(new Column("groups", "id")));
assertEquals(true, visitor.getColumns().contains(new Column("groups", "name")));
}
}
| MySqlSchemaStatVisitorTest_Subquery2 |
java | apache__camel | components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/api/dto/composite/SObjectCollection.java | {
"start": 1041,
"end": 1620
} | class ____ implements Serializable {
private static final long serialVersionUID = 1L;
private boolean allOrNone;
private List<AbstractDescribedSObjectBase> records = new ArrayList<>();
public boolean isAllOrNone() {
return allOrNone;
}
public void setAllOrNone(boolean allOrNone) {
this.allOrNone = allOrNone;
}
public List<AbstractDescribedSObjectBase> getRecords() {
return records;
}
public void setRecords(List<AbstractDescribedSObjectBase> records) {
this.records = records;
}
}
| SObjectCollection |
java | apache__dubbo | dubbo-remoting/dubbo-remoting-http12/src/main/java/org/apache/dubbo/remoting/http12/exception/UnsupportedMediaTypeException.java | {
"start": 862,
"end": 1259
} | class ____ extends HttpStatusException {
private static final long serialVersionUID = 1L;
private final String mediaType;
public UnsupportedMediaTypeException(String mediaType) {
super(415, "Unsupported Media Type '" + mediaType + "'");
this.mediaType = mediaType;
}
public String getMediaType() {
return mediaType;
}
}
| UnsupportedMediaTypeException |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/persister/entity/CustomSqlNamespaceInjectionFunctionalTests.java | {
"start": 3967,
"end": 4191
} | class ____ {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
public Integer id;
private String name;
public CustomEntity() {
}
public CustomEntity(String name) {
this.name = name;
}
}
}
| CustomEntity |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/query/entities/Address.java | {
"start": 389,
"end": 966
} | class ____ {
@Id
@GeneratedValue
private Long id;
private String street;
@Column(name = "`number`")
private int number;
public Address() {
}
public Address(String street, int number) {
this.street = street;
this.number = number;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getStreet() {
return street;
}
public void setStreet(String street) {
this.street = street;
}
public int getNumber() {
return number;
}
public void setNumber(int number) {
this.number = number;
}
}
| Address |
java | elastic__elasticsearch | modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java | {
"start": 3751,
"end": 4186
} | class ____, optionally followed by a 'no_import' attribute, an opening bracket, a newline,
* constructor/method/field specifications, a closing bracket, and a final newline. Within a complex
* type the following may be parsed:
* <ul>
* <li> A constructor may be specified starting with an opening parenthesis, followed by a
* comma-delimited list of Painless type names corresponding to the type/ | name |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/junit/jupiter/DisabledIfAndDirtiesContextTests.java | {
"start": 3024,
"end": 3135
} | class ____ {
@Bean
DisposableBean disposableBean() {
return () -> contextClosed.set(true);
}
}
}
| Config |
java | apache__camel | components/camel-sql/src/test/java/org/apache/camel/component/sql/stored/ProducerUseMessageBodyForTemplateTest.java | {
"start": 1413,
"end": 3100
} | class ____ extends CamelTestSupport {
private EmbeddedDatabase db;
@Override
public void doPreSetup() throws Exception {
db = new EmbeddedDatabaseBuilder()
.setName(getClass().getSimpleName())
.setType(EmbeddedDatabaseType.DERBY)
.addScript("sql/storedProcedureTest.sql").build();
}
@Override
public void doPostTearDown() throws Exception {
if (db != null) {
db.shutdown();
}
}
@Test
public void shouldUseMessageBodyAsQuery() throws InterruptedException {
MockEndpoint mock = getMockEndpoint("mock:query");
mock.expectedMessageCount(1);
Map<String, Object> batch1 = new HashMap<>();
batch1.put("num1", 3);
batch1.put("num2", 1);
template.requestBodyAndHeader("direct:query", "SUBNUMBERS(INTEGER :#num1,INTEGER :#num2,OUT INTEGER resultofsum)",
SqlStoredConstants.SQL_STORED_PARAMETERS, batch1);
MockEndpoint.assertIsSatisfied(context);
Exchange exchange = mock.getExchanges().get(0);
assertEquals(Integer.valueOf(2), exchange.getIn().getBody(Map.class).get("resultofsum"));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// required for the sql component
getContext().getComponent("sql-stored", SqlStoredComponent.class).setDataSource(db);
from("direct:query").to("sql-stored:query?useMessageBodyForTemplate=true").to("mock:query");
}
};
}
}
| ProducerUseMessageBodyForTemplateTest |
java | apache__flink | flink-end-to-end-tests/flink-end-to-end-tests-common/src/main/java/org/apache/flink/tests/util/activation/OperatingSystemRestriction.java | {
"start": 1229,
"end": 2592
} | enum ____ {
;
/**
* Restricts the execution to the given set of operating systems.
*
* @param reason reason for the restriction
* @param operatingSystems allowed operating systems
* @throws AssumptionViolatedException if this method is called on a forbidden operating system
*/
public static void restrictTo(final String reason, final OperatingSystem... operatingSystems)
throws AssumptionViolatedException {
final EnumSet<OperatingSystem> allowed = EnumSet.copyOf(Arrays.asList(operatingSystems));
Assume.assumeTrue(reason, allowed.contains(OperatingSystem.getCurrentOperatingSystem()));
}
/**
* Forbids the execution on the given set of operating systems.
*
* @param reason reason for the restriction
* @param forbiddenSystems forbidden operating systems
* @throws AssumptionViolatedException if this method is called on a forbidden operating system
*/
public static void forbid(final String reason, final OperatingSystem... forbiddenSystems)
throws AssumptionViolatedException {
final OperatingSystem os = OperatingSystem.getCurrentOperatingSystem();
for (final OperatingSystem forbiddenSystem : forbiddenSystems) {
Assume.assumeTrue(reason, os != forbiddenSystem);
}
}
}
| OperatingSystemRestriction |
java | spring-projects__spring-framework | spring-orm/src/main/java/org/springframework/orm/jpa/ExtendedEntityManagerCreator.java | {
"start": 17479,
"end": 19549
} | class ____
extends ResourceHolderSynchronization<EntityManagerHolder, EntityManager>
implements Ordered {
private final EntityManager entityManager;
private final @Nullable PersistenceExceptionTranslator exceptionTranslator;
public volatile boolean closeOnCompletion;
public ExtendedEntityManagerSynchronization(
EntityManager em, @Nullable PersistenceExceptionTranslator exceptionTranslator) {
super(new EntityManagerHolder(em), em);
this.entityManager = em;
this.exceptionTranslator = exceptionTranslator;
}
@Override
public int getOrder() {
return EntityManagerFactoryUtils.ENTITY_MANAGER_SYNCHRONIZATION_ORDER - 1;
}
@Override
protected void flushResource(EntityManagerHolder resourceHolder) {
try {
this.entityManager.flush();
}
catch (RuntimeException ex) {
throw convertException(ex);
}
}
@Override
protected boolean shouldReleaseBeforeCompletion() {
return false;
}
@Override
public void afterCommit() {
super.afterCommit();
// Trigger commit here to let exceptions propagate to the caller.
try {
this.entityManager.getTransaction().commit();
}
catch (RuntimeException ex) {
throw convertException(ex);
}
}
@Override
public void afterCompletion(int status) {
try {
super.afterCompletion(status);
if (status != STATUS_COMMITTED) {
// Haven't had an afterCommit call: trigger a rollback.
try {
this.entityManager.getTransaction().rollback();
}
catch (RuntimeException ex) {
throw convertException(ex);
}
}
}
finally {
if (this.closeOnCompletion) {
EntityManagerFactoryUtils.closeEntityManager(this.entityManager);
}
}
}
private RuntimeException convertException(RuntimeException ex) {
DataAccessException dae = (this.exceptionTranslator != null) ?
this.exceptionTranslator.translateExceptionIfPossible(ex) :
EntityManagerFactoryUtils.convertJpaAccessExceptionIfPossible(ex);
return (dae != null ? dae : ex);
}
}
}
| ExtendedEntityManagerSynchronization |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerialization.java | {
"start": 1628,
"end": 3068
} | class ____ extends AvroSerialization<Object>{
/**
* Key to configure packages that contain classes to be serialized and
* deserialized using this class. Multiple packages can be specified using
* comma-separated list.
*/
@InterfaceAudience.Private
public static final String AVRO_REFLECT_PACKAGES = "avro.reflect.pkgs";
private Set<String> packages;
@InterfaceAudience.Private
@Override
public synchronized boolean accept(Class<?> c) {
if (packages == null) {
getPackages();
}
return AvroReflectSerializable.class.isAssignableFrom(c) ||
(c.getPackage() != null && packages.contains(c.getPackage().getName()));
}
private void getPackages() {
String[] pkgList = getConf().getStrings(AVRO_REFLECT_PACKAGES);
packages = new HashSet<>();
if (pkgList != null) {
for (String pkg : pkgList) {
packages.add(pkg.trim());
}
}
}
@InterfaceAudience.Private
@Override
public DatumReader getReader(Class<Object> clazz) {
try {
return new ReflectDatumReader(clazz);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@InterfaceAudience.Private
@Override
public Schema getSchema(Object t) {
return ReflectData.get().getSchema(t.getClass());
}
@InterfaceAudience.Private
@Override
public DatumWriter getWriter(Class<Object> clazz) {
return new ReflectDatumWriter();
}
}
| AvroReflectSerialization |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/IdleConnectionEvictorTests.java | {
"start": 1183,
"end": 5697
} | class ____ extends ESTestCase {
private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS);
private DeterministicTaskQueue taskQueue;
@Before
public void init() {
taskQueue = new DeterministicTaskQueue();
}
public void testStart_CallsExecutorSubmit() throws IOReactorException {
var mockThreadPool = mock(ThreadPool.class);
when(mockThreadPool.scheduleWithFixedDelay(any(Runnable.class), any(), any())).thenReturn(mock(Scheduler.Cancellable.class));
try (
var evictor = new IdleConnectionEvictor(
mockThreadPool,
createConnectionManager(),
new TimeValue(1, TimeUnit.NANOSECONDS),
new TimeValue(1, TimeUnit.NANOSECONDS)
)
) {
evictor.start();
verify(mockThreadPool, times(1)).scheduleWithFixedDelay(any(Runnable.class), any(), any());
}
}
public void testStart_OnlyCallsSubmitOnce() throws IOReactorException {
var mockThreadPool = mock(ThreadPool.class);
when(mockThreadPool.scheduleWithFixedDelay(any(Runnable.class), any(), any())).thenReturn(mock(Scheduler.Cancellable.class));
try (
var evictor = new IdleConnectionEvictor(
mockThreadPool,
createConnectionManager(),
new TimeValue(1, TimeUnit.NANOSECONDS),
new TimeValue(1, TimeUnit.NANOSECONDS)
)
) {
evictor.start();
evictor.start();
verify(mockThreadPool, times(1)).scheduleWithFixedDelay(any(Runnable.class), any(), any());
}
}
public void testCloseExpiredConnections_IsCalled() throws InterruptedException {
var manager = mock(PoolingNHttpClientConnectionManager.class);
var evictor = new IdleConnectionEvictor(
taskQueue.getThreadPool(),
manager,
new TimeValue(1, TimeUnit.NANOSECONDS),
new TimeValue(1, TimeUnit.NANOSECONDS)
);
CountDownLatch runLatch = new CountDownLatch(1);
doAnswer(invocation -> {
evictor.close();
runLatch.countDown();
return Void.TYPE;
}).when(manager).closeExpiredConnections();
startEvictor(evictor);
runLatch.await(TIMEOUT.getSeconds(), TimeUnit.SECONDS);
verify(manager, times(1)).closeExpiredConnections();
}
public void testCloseIdleConnections_IsCalled() throws InterruptedException {
var manager = mock(PoolingNHttpClientConnectionManager.class);
var evictor = new IdleConnectionEvictor(
taskQueue.getThreadPool(),
manager,
new TimeValue(1, TimeUnit.NANOSECONDS),
new TimeValue(1, TimeUnit.NANOSECONDS)
);
CountDownLatch runLatch = new CountDownLatch(1);
doAnswer(invocation -> {
evictor.close();
runLatch.countDown();
return Void.TYPE;
}).when(manager).closeIdleConnections(anyLong(), any());
startEvictor(evictor);
runLatch.await(TIMEOUT.getSeconds(), TimeUnit.SECONDS);
verify(manager, times(1)).closeIdleConnections(anyLong(), any());
}
public void testIsRunning_ReturnsTrue() throws IOReactorException {
var evictor = new IdleConnectionEvictor(
taskQueue.getThreadPool(),
createConnectionManager(),
new TimeValue(1, TimeUnit.SECONDS),
new TimeValue(1, TimeUnit.SECONDS)
);
startEvictor(evictor);
assertTrue(evictor.isRunning());
evictor.close();
}
public void testIsRunning_ReturnsFalse() throws IOReactorException {
var evictor = new IdleConnectionEvictor(
taskQueue.getThreadPool(),
createConnectionManager(),
new TimeValue(1, TimeUnit.SECONDS),
new TimeValue(1, TimeUnit.SECONDS)
);
startEvictor(evictor);
assertTrue(evictor.isRunning());
evictor.close();
assertFalse(evictor.isRunning());
}
private void startEvictor(IdleConnectionEvictor evictor) {
taskQueue.scheduleNow(evictor::start);
taskQueue.runAllRunnableTasks();
}
private static PoolingNHttpClientConnectionManager createConnectionManager() throws IOReactorException {
return new PoolingNHttpClientConnectionManager(new DefaultConnectingIOReactor());
}
}
| IdleConnectionEvictorTests |
java | micronaut-projects__micronaut-core | http/src/main/java/io/micronaut/http/uri/UriTemplateParser.java | {
"start": 11870,
"end": 12335
} | interface ____ {
/**
* Visits a literal.
*
* @param literal The literal value
*/
void visitLiteral(String literal);
/**
* Visits and expression.
*
* @param type The type
* @param variables The variables
*/
void visitExpression(ExpressionType type, List<Variable> variables);
}
/**
* The expression type.
*/
public | PartVisitor |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/schemafilter/SchemaFilterProviderTest.java | {
"start": 6469,
"end": 6610
} | class ____ {
@Id
private Integer id;
}
@Entity( name = "Entity2" )
@jakarta.persistence.Table( name = "entity_2" )
public static | Entity1 |
java | spring-projects__spring-framework | spring-expression/src/main/java/org/springframework/expression/common/TemplateAwareExpressionParser.java | {
"start": 1097,
"end": 1249
} | class ____ {@linkplain ExpressionParser expression parsers} that
* support templates.
*
* <p>Can be subclassed by expression parsers that offer first | for |
java | FasterXML__jackson-core | src/test/java/tools/jackson/core/unittest/json/GeneratorFailFromReaderTest.java | {
"start": 488,
"end": 4573
} | class ____
extends tools.jackson.core.unittest.JacksonCoreTestBase
{
private final JsonFactory F = newStreamFactory();
// [core#177]
// Also: should not try writing JSON String if field name expected
// (in future maybe take one as alias... but not yet)
@Test
void failOnWritingStringNotFieldNameBytes() throws Exception {
_testFailOnWritingStringNotFieldName(F, false);
}
// [core#177]
@Test
void failOnWritingStringNotFieldNameChars() throws Exception {
_testFailOnWritingStringNotFieldName(F, true);
}
@Test
void failOnWritingStringFromReaderWithTooFewCharacters() throws Exception {
_testFailOnWritingStringFromReaderWithTooFewCharacters(F, true);
_testFailOnWritingStringFromReaderWithTooFewCharacters(F, false);
}
@Test
void failOnWritingStringFromNullReader() throws Exception {
_testFailOnWritingStringFromNullReader(F, true);
_testFailOnWritingStringFromNullReader(F, false);
}
/*
/**********************************************************
/* Internal methods
/**********************************************************
*/
private void _testFailOnWritingStringNotFieldName(JsonFactory f, boolean useReader)
throws IOException
{
JsonGenerator gen;
ByteArrayOutputStream bout = new ByteArrayOutputStream();
if (useReader) {
gen = f.createGenerator(ObjectWriteContext.empty(), new OutputStreamWriter(bout, "UTF-8"));
} else {
gen = f.createGenerator(ObjectWriteContext.empty(), bout, JsonEncoding.UTF8);
}
gen.writeStartObject();
try {
StringReader reader = new StringReader("a");
gen.writeString(reader, -1);
gen.flush();
String json = bout.toString("UTF-8");
fail("Should not have let "+gen.getClass().getName()+".writeString() be used in place of 'writeName()': output = "+json);
} catch (StreamWriteException e) {
verifyException(e, "cannot write a String");
}
gen.close();
}
private void _testFailOnWritingStringFromReaderWithTooFewCharacters(JsonFactory f, boolean useReader)
throws IOException
{
JsonGenerator gen;
ByteArrayOutputStream bout = new ByteArrayOutputStream();
if (useReader) {
gen = f.createGenerator(ObjectWriteContext.empty(), new OutputStreamWriter(bout, "UTF-8"));
} else {
gen = f.createGenerator(ObjectWriteContext.empty(), bout, JsonEncoding.UTF8);
}
gen.writeStartObject();
try {
String testStr = "aaaaaaaaa";
StringReader reader = new StringReader(testStr);
gen.writeName("a");
gen.writeString(reader, testStr.length() + 1);
gen.flush();
String json = bout.toString("UTF-8");
fail("Should not have let "+gen.getClass().getName()+".writeString() ': output = "+json);
} catch (StreamWriteException e) {
verifyException(e, "Didn't read enough from reader");
}
gen.close();
}
private void _testFailOnWritingStringFromNullReader(JsonFactory f, boolean useReader)
throws IOException
{
JsonGenerator gen;
ByteArrayOutputStream bout = new ByteArrayOutputStream();
if (useReader) {
gen = f.createGenerator(ObjectWriteContext.empty(), new OutputStreamWriter(bout, "UTF-8"));
} else {
gen = f.createGenerator(ObjectWriteContext.empty(), bout, JsonEncoding.UTF8);
}
gen.writeStartObject();
try {
gen.writeName("a");
gen.writeString(null, -1);
gen.flush();
String json = bout.toString("UTF-8");
fail("Should not have let "+gen.getClass().getName()+".writeString() ': output = "+json);
} catch (StreamWriteException e) {
verifyException(e, "null reader");
}
gen.close();
}
}
| GeneratorFailFromReaderTest |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/lucene/queries/MinDocQueryTests.java | {
"start": 957,
"end": 2575
} | class ____ extends ESTestCase {
public void testBasics() {
MinDocQuery query1 = new MinDocQuery(42);
MinDocQuery query2 = new MinDocQuery(42);
MinDocQuery query3 = new MinDocQuery(43);
QueryUtils.check(query1);
QueryUtils.checkEqual(query1, query2);
QueryUtils.checkUnequal(query1, query3);
MinDocQuery query4 = new MinDocQuery(42, new Object());
MinDocQuery query5 = new MinDocQuery(42, new Object());
QueryUtils.checkUnequal(query4, query5);
}
public void testRewrite() throws Exception {
IndexReader reader = new MultiReader();
IndexSearcher searcher = newSearcher(reader);
MinDocQuery query = new MinDocQuery(42);
Query rewritten = query.rewrite(searcher);
QueryUtils.checkUnequal(query, rewritten);
Query rewritten2 = rewritten.rewrite(searcher);
assertSame(rewritten, rewritten2);
}
public void testRandom() throws IOException {
final int numDocs = randomIntBetween(10, 200);
final Document doc = new Document();
final Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
for (int i = 0; i < numDocs; ++i) {
w.addDocument(doc);
}
final IndexReader reader = w.getReader();
final IndexSearcher searcher = newSearcher(reader);
for (int i = 0; i <= numDocs; ++i) {
assertEquals(numDocs - i, searcher.count(new MinDocQuery(i)));
}
w.close();
reader.close();
dir.close();
}
}
| MinDocQueryTests |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/CheckedExceptionNotThrownTest.java | {
"start": 3233,
"end": 3560
} | class ____ {
int test(Optional<Integer> x) throws Exception {
return x.orElseThrow(() -> new IllegalStateException());
}
}
""")
.addOutputLines(
"Test.java",
"""
import java.util.Optional;
public final | Test |
java | spring-projects__spring-boot | smoke-test/spring-boot-smoke-test-web-secure-custom/src/main/java/smoketest/web/secure/custom/SampleWebSecureCustomApplication.java | {
"start": 1823,
"end": 2228
} | class ____ {
@Bean
SecurityFilterChain configure(HttpSecurity http) {
http.csrf(CsrfConfigurer::disable);
http.authorizeHttpRequests((requests) -> {
requests.dispatcherTypeMatchers(DispatcherType.FORWARD).permitAll();
requests.anyRequest().fullyAuthenticated();
});
http.formLogin((form) -> form.loginPage("/login").permitAll());
return http.build();
}
}
}
| ApplicationSecurity |
java | quarkusio__quarkus | extensions/smallrye-context-propagation/deployment/src/main/java/io/quarkus/smallrye/context/deployment/SmallRyeContextPropagationProcessor.java | {
"start": 20618,
"end": 21455
} | class ____ {
String[] cleared;
String[] propagated;
int maxAsync;
int maxQueued;
ExecutorConfig(AnnotationValue cleared, AnnotationValue propagated, AnnotationValue maxAsync,
AnnotationValue maxQueued) {
this.cleared = cleared == null ? ManagedExecutorConfig.Literal.DEFAULT_INSTANCE.cleared() : cleared.asStringArray();
this.propagated = propagated == null ? ManagedExecutorConfig.Literal.DEFAULT_INSTANCE.propagated()
: propagated.asStringArray();
this.maxAsync = maxAsync == null ? ManagedExecutorConfig.Literal.DEFAULT_INSTANCE.maxAsync() : maxAsync.asInt();
this.maxQueued = maxQueued == null ? ManagedExecutorConfig.Literal.DEFAULT_INSTANCE.maxQueued() : maxQueued.asInt();
}
}
| ExecutorConfig |
java | elastic__elasticsearch | x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/querydsl/query/GeoDistanceQuery.java | {
"start": 541,
"end": 2073
} | class ____ extends LeafQuery {
private final String field;
private final double lat;
private final double lon;
private final double distance;
public GeoDistanceQuery(Source source, String field, double distance, double lat, double lon) {
super(source);
this.field = field;
this.distance = distance;
this.lat = lat;
this.lon = lon;
}
public String field() {
return field;
}
public double lat() {
return lat;
}
public double lon() {
return lon;
}
public double distance() {
return distance;
}
@Override
public QueryBuilder asBuilder() {
return QueryBuilders.geoDistanceQuery(field).distance(distance, DistanceUnit.METERS).point(lat, lon);
}
@Override
public int hashCode() {
return Objects.hash(field, distance, lat, lon);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
GeoDistanceQuery other = (GeoDistanceQuery) obj;
return Objects.equals(field, other.field)
&& Objects.equals(distance, other.distance)
&& Objects.equals(lat, other.lat)
&& Objects.equals(lon, other.lon);
}
@Override
protected String innerToString() {
return field + ":" + "(" + distance + "," + "(" + lat + ", " + lon + "))";
}
}
| GeoDistanceQuery |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/derivedidentities/e3/a/EmployeeId.java | {
"start": 234,
"end": 315
} | class ____ implements Serializable{
String firstName;
String lastName;
}
| EmployeeId |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/mock/MockThroughtputTest.java | {
"start": 978,
"end": 1634
} | class ____ extends ContextTestSupport {
@Test
public void testMockThroughtputTest() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:foo?reportGroup=5");
mock.expectedMessageCount(10);
for (int i = 0; i < 10; i++) {
template.sendBody("direct:start", "Hello " + i);
}
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("mock:foo?reportGroup=5");
}
};
}
}
| MockThroughtputTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sqm/mutation/internal/temptable/GlobalTemporaryTableStrategy.java | {
"start": 1133,
"end": 5684
} | class ____ {
private static final Logger LOG = Logger.getLogger( GlobalTemporaryTableStrategy.class );
public static final String SHORT_NAME = "global_temporary";
public static final String CREATE_ID_TABLES = "hibernate.query.mutation_strategy.global_temporary.create_tables";
public static final String DROP_ID_TABLES = "hibernate.query.mutation_strategy.global_temporary.drop_tables";
private final TemporaryTable temporaryTable;
private final SessionFactoryImplementor sessionFactory;
private boolean prepared;
private boolean dropIdTables;
public GlobalTemporaryTableStrategy(TemporaryTable temporaryTable, SessionFactoryImplementor sessionFactory) {
this.temporaryTable = temporaryTable;
this.sessionFactory = sessionFactory;
final TemporaryTableStrategy temporaryTableStrategy = requireGlobalTemporaryTableStrategy( sessionFactory.getJdbcServices().getDialect() );
if ( temporaryTableStrategy.getTemporaryTableAfterUseAction() == AfterUseAction.DROP ) {
throw new IllegalArgumentException( "Global-temp ID tables cannot use AfterUseAction.DROP : "
+ temporaryTable.getTableExpression() );
}
}
protected static TemporaryTableStrategy requireGlobalTemporaryTableStrategy(Dialect dialect) {
return Objects.requireNonNull( dialect.getGlobalTemporaryTableStrategy(),
"Dialect does not define a global temporary table strategy: " + dialect.getClass().getSimpleName() );
}
public TemporaryTableStrategy getTemporaryTableStrategy() {
return castNonNull( sessionFactory.getJdbcServices().getDialect().getGlobalTemporaryTableStrategy() );
}
public void prepare(MappingModelCreationProcess mappingModelCreationProcess, JdbcConnectionAccess connectionAccess) {
if ( prepared ) {
return;
}
prepared = true;
final ConfigurationService configService =
mappingModelCreationProcess.getCreationContext()
.getBootstrapContext().getServiceRegistry()
.requireService( ConfigurationService.class );
if ( configService.getSetting( CREATE_ID_TABLES, StandardConverters.BOOLEAN, true ) ) {
LOG.tracef( "Creating global-temp ID table: %s", getTemporaryTable().getTableExpression() );
final TemporaryTableHelper.TemporaryTableCreationWork temporaryTableCreationWork =
new TemporaryTableHelper.TemporaryTableCreationWork( getTemporaryTable(), sessionFactory );
final Connection connection;
try {
connection = connectionAccess.obtainConnection();
}
catch (UnsupportedOperationException e) {
// assume this comes from org.hibernate.engine.jdbc.connections.internal.UserSuppliedConnectionProviderImpl
LOG.debug( "Unable to obtain JDBC connection; assuming ID tables already exist or wont be needed" );
return;
}
catch (SQLException e) {
LOG.error( "Unable obtain JDBC Connection", e );
return;
}
try {
temporaryTableCreationWork.execute( connection );
dropIdTables = configService.getSetting( DROP_ID_TABLES, StandardConverters.BOOLEAN, false );
}
finally {
try {
connectionAccess.releaseConnection( connection );
}
catch (SQLException exception) {
JDBC_LOGGER.unableToReleaseConnection( exception );
}
}
}
}
public void release(SessionFactoryImplementor sessionFactory, JdbcConnectionAccess connectionAccess) {
if ( !dropIdTables ) {
return;
}
dropIdTables = false;
LOG.tracef( "Dropping global-temp ID table: %s", getTemporaryTable().getTableExpression() );
final TemporaryTableHelper.TemporaryTableDropWork temporaryTableDropWork =
new TemporaryTableHelper.TemporaryTableDropWork( getTemporaryTable(), sessionFactory );
Connection connection;
try {
connection = connectionAccess.obtainConnection();
}
catch (UnsupportedOperationException e) {
// assume this comes from org.hibernate.engine.jdbc.connections.internal.UserSuppliedConnectionProviderImpl
LOG.debugf(
"Unable to obtain JDBC connection; unable to drop global-temp ID table : %s",
getTemporaryTable().getTableExpression()
);
return;
}
catch (SQLException e) {
LOG.error( "Unable obtain JDBC Connection", e );
return;
}
try {
temporaryTableDropWork.execute( connection );
}
finally {
try {
connectionAccess.releaseConnection( connection );
}
catch (SQLException exception) {
JDBC_LOGGER.unableToReleaseConnection( exception );
}
}
}
public TemporaryTable getTemporaryTable() {
return temporaryTable;
}
public SessionFactoryImplementor getSessionFactory() {
return sessionFactory;
}
}
| GlobalTemporaryTableStrategy |
java | mockito__mockito | mockito-core/src/test/java/org/mockito/internal/creation/bytebuddy/SubclassByteBuddyMockMakerTest.java | {
"start": 8392,
"end": 8499
} | interface ____ {
@SampleAnnotation("bar")
void sampleMethod();
}
}
| InterfaceWithAnnotation |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.