focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
@Override
public void onProjectsRekeyed(Set<RekeyedProject> rekeyedProjects) {
checkNotNull(rekeyedProjects, "rekeyedProjects can't be null");
if (rekeyedProjects.isEmpty()) {
return;
}
Arrays.stream(listeners)
.forEach(safelyCallListener(listener -> listener.onProjectsRekeyed(rekeyedProjects)));
}
|
@Test
@UseDataProvider("oneOrManyRekeyedProjects")
public void onProjectsRekeyed_calls_all_listeners_even_if_one_throws_an_Exception(Set<RekeyedProject> projects) {
InOrder inOrder = Mockito.inOrder(listener1, listener2, listener3);
doThrow(new RuntimeException("Faking listener2 throwing an exception"))
.when(listener2)
.onProjectsRekeyed(any());
underTestWithListeners.onProjectsRekeyed(projects);
inOrder.verify(listener1).onProjectsRekeyed(same(projects));
inOrder.verify(listener2).onProjectsRekeyed(same(projects));
inOrder.verify(listener3).onProjectsRekeyed(same(projects));
inOrder.verifyNoMoreInteractions();
}
|
public static byte[] generateCallStubV(Class<?> clazz, Method method) {
final AggBatchCallGenerator generator = new AggBatchCallGenerator(clazz, method);
generator.declareCallStubClazz();
generator.genBatchUpdateSingle();
generator.finish();
return generator.getByteCode();
}
|
@Test
public void testAggCallSingleStub()
throws NoSuchMethodException, ClassNotFoundException, InvocationTargetException, IllegalAccessException {
Class<?> clazz = IntSumfunc.class;
final String genClassName = CallStubGenerator.CLAZZ_NAME.replace("/", ".");
Method m = clazz.getMethod("update", IntSumfunc.State.class, Integer.class);
final byte[] updates =
CallStubGenerator.generateCallStubV(clazz, m);
ClassLoader classLoader = new TestClassLoader(genClassName, updates);
final Class<?> stubClazz = classLoader.loadClass(genClassName);
Method batchCall = getFirstMethod(stubClazz, "batchCallV");
IntSumfunc sum = new IntSumfunc();
IntSumfunc.State state = new IntSumfunc.State();
int testSize = 1000;
Integer[] inputs = new Integer[testSize];
long expect = 0;
for (int i = 0; i < testSize; i++) {
inputs[i] = i;
expect += inputs[i];
}
assert batchCall != null;
batchCall.invoke(null, testSize, sum, state, inputs);
Assertions.assertEquals(expect, state.val);
}
|
@Override
public NodeLabelsInfo getClusterNodeLabels(HttpServletRequest hsr)
throws IOException {
try {
long startTime = clock.getTime();
Collection<SubClusterInfo> subClustersActive = federationFacade.getActiveSubClusters();
final HttpServletRequest hsrCopy = clone(hsr);
Class[] argsClasses = new Class[]{HttpServletRequest.class};
Object[] args = new Object[]{hsrCopy};
ClientMethod remoteMethod = new ClientMethod("getClusterNodeLabels", argsClasses, args);
Map<SubClusterInfo, NodeLabelsInfo> nodeToLabelsInfoMap =
invokeConcurrent(subClustersActive, remoteMethod, NodeLabelsInfo.class);
Set<NodeLabel> hashSets = Sets.newHashSet();
nodeToLabelsInfoMap.values().forEach(item -> hashSets.addAll(item.getNodeLabels()));
NodeLabelsInfo nodeLabelsInfo = new NodeLabelsInfo(hashSets);
if (nodeLabelsInfo != null) {
long stopTime = clock.getTime();
routerMetrics.succeededGetClusterNodeLabelsRetrieved(stopTime - startTime);
RouterAuditLogger.logSuccess(getUser().getShortUserName(), GET_CLUSTER_NODELABELS,
TARGET_WEB_SERVICE);
return nodeLabelsInfo;
}
} catch (NotFoundException e) {
routerMetrics.incrClusterNodeLabelsFailedRetrieved();
RouterAuditLogger.logFailure(getUser().getShortUserName(), GET_CLUSTER_NODELABELS, UNKNOWN,
TARGET_WEB_SERVICE, e.getLocalizedMessage());
RouterServerUtil.logAndThrowIOException("get all active sub cluster(s) error.", e);
} catch (YarnException e) {
routerMetrics.incrClusterNodeLabelsFailedRetrieved();
RouterAuditLogger.logFailure(getUser().getShortUserName(), GET_CLUSTER_NODELABELS, UNKNOWN,
TARGET_WEB_SERVICE, e.getLocalizedMessage());
RouterServerUtil.logAndThrowIOException("getClusterNodeLabels with yarn error.", e);
}
routerMetrics.incrClusterNodeLabelsFailedRetrieved();
RouterAuditLogger.logFailure(getUser().getShortUserName(), GET_CLUSTER_NODELABELS, UNKNOWN,
TARGET_WEB_SERVICE, "getClusterNodeLabels Failed.");
throw new RuntimeException("getClusterNodeLabels Failed.");
}
|
@Test
public void testGetClusterNodeLabels() throws Exception {
NodeLabelsInfo nodeLabelsInfo = interceptor.getClusterNodeLabels(null);
Assert.assertNotNull(nodeLabelsInfo);
Assert.assertEquals(2, nodeLabelsInfo.getNodeLabelsName().size());
List<String> nodeLabelsName = nodeLabelsInfo.getNodeLabelsName();
Assert.assertNotNull(nodeLabelsName);
Assert.assertTrue(nodeLabelsName.contains("cpu"));
Assert.assertTrue(nodeLabelsName.contains("gpu"));
ArrayList<NodeLabelInfo> nodeLabelInfos = nodeLabelsInfo.getNodeLabelsInfo();
Assert.assertNotNull(nodeLabelInfos);
Assert.assertEquals(2, nodeLabelInfos.size());
NodeLabelInfo cpuNodeLabelInfo = new NodeLabelInfo("cpu", false);
Assert.assertTrue(nodeLabelInfos.contains(cpuNodeLabelInfo));
NodeLabelInfo gpuNodeLabelInfo = new NodeLabelInfo("gpu", false);
Assert.assertTrue(nodeLabelInfos.contains(gpuNodeLabelInfo));
}
|
@Override
public EncodedMessage transform(ActiveMQMessage message) throws Exception {
if (message == null) {
return null;
}
long messageFormat = 0;
Header header = null;
Properties properties = null;
Map<Symbol, Object> daMap = null;
Map<Symbol, Object> maMap = null;
Map<String,Object> apMap = null;
Map<Object, Object> footerMap = null;
Section body = convertBody(message);
if (message.isPersistent()) {
if (header == null) {
header = new Header();
}
header.setDurable(true);
}
byte priority = message.getPriority();
if (priority != Message.DEFAULT_PRIORITY) {
if (header == null) {
header = new Header();
}
header.setPriority(UnsignedByte.valueOf(priority));
}
String type = message.getType();
if (type != null) {
if (properties == null) {
properties = new Properties();
}
properties.setSubject(type);
}
MessageId messageId = message.getMessageId();
if (messageId != null) {
if (properties == null) {
properties = new Properties();
}
properties.setMessageId(getOriginalMessageId(message));
}
ActiveMQDestination destination = message.getDestination();
if (destination != null) {
if (properties == null) {
properties = new Properties();
}
properties.setTo(destination.getQualifiedName());
if (maMap == null) {
maMap = new HashMap<>();
}
maMap.put(JMS_DEST_TYPE_MSG_ANNOTATION, destinationType(destination));
}
ActiveMQDestination replyTo = message.getReplyTo();
if (replyTo != null) {
if (properties == null) {
properties = new Properties();
}
properties.setReplyTo(replyTo.getQualifiedName());
if (maMap == null) {
maMap = new HashMap<>();
}
maMap.put(JMS_REPLY_TO_TYPE_MSG_ANNOTATION, destinationType(replyTo));
}
String correlationId = message.getCorrelationId();
if (correlationId != null) {
if (properties == null) {
properties = new Properties();
}
try {
properties.setCorrelationId(AMQPMessageIdHelper.INSTANCE.toIdObject(correlationId));
} catch (AmqpProtocolException e) {
properties.setCorrelationId(correlationId);
}
}
long expiration = message.getExpiration();
if (expiration != 0) {
long ttl = expiration - System.currentTimeMillis();
if (ttl < 0) {
ttl = 1;
}
if (header == null) {
header = new Header();
}
header.setTtl(new UnsignedInteger((int) ttl));
if (properties == null) {
properties = new Properties();
}
properties.setAbsoluteExpiryTime(new Date(expiration));
}
long timeStamp = message.getTimestamp();
if (timeStamp != 0) {
if (properties == null) {
properties = new Properties();
}
properties.setCreationTime(new Date(timeStamp));
}
// JMSX Message Properties
int deliveryCount = message.getRedeliveryCounter();
if (deliveryCount > 0) {
if (header == null) {
header = new Header();
}
header.setDeliveryCount(UnsignedInteger.valueOf(deliveryCount));
}
String userId = message.getUserID();
if (userId != null) {
if (properties == null) {
properties = new Properties();
}
properties.setUserId(new Binary(userId.getBytes(StandardCharsets.UTF_8)));
}
String groupId = message.getGroupID();
if (groupId != null) {
if (properties == null) {
properties = new Properties();
}
properties.setGroupId(groupId);
}
int groupSequence = message.getGroupSequence();
if (groupSequence > 0) {
if (properties == null) {
properties = new Properties();
}
properties.setGroupSequence(UnsignedInteger.valueOf(groupSequence));
}
final Map<String, Object> entries;
try {
entries = message.getProperties();
} catch (IOException e) {
throw JMSExceptionSupport.create(e);
}
for (Map.Entry<String, Object> entry : entries.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
if (key.startsWith(JMS_AMQP_PREFIX)) {
if (key.startsWith(NATIVE, JMS_AMQP_PREFIX_LENGTH)) {
// skip transformer appended properties
continue;
} else if (key.startsWith(ORIGINAL_ENCODING, JMS_AMQP_PREFIX_LENGTH)) {
// skip transformer appended properties
continue;
} else if (key.startsWith(MESSAGE_FORMAT, JMS_AMQP_PREFIX_LENGTH)) {
messageFormat = (long) TypeConversionSupport.convert(entry.getValue(), Long.class);
continue;
} else if (key.startsWith(HEADER, JMS_AMQP_PREFIX_LENGTH)) {
if (header == null) {
header = new Header();
}
continue;
} else if (key.startsWith(PROPERTIES, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
continue;
} else if (key.startsWith(MESSAGE_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) {
if (maMap == null) {
maMap = new HashMap<>();
}
String name = key.substring(JMS_AMQP_MESSAGE_ANNOTATION_PREFIX.length());
maMap.put(Symbol.valueOf(name), value);
continue;
} else if (key.startsWith(FIRST_ACQUIRER, JMS_AMQP_PREFIX_LENGTH)) {
if (header == null) {
header = new Header();
}
header.setFirstAcquirer((boolean) TypeConversionSupport.convert(value, Boolean.class));
continue;
} else if (key.startsWith(CONTENT_TYPE, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
properties.setContentType(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class)));
continue;
} else if (key.startsWith(CONTENT_ENCODING, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
properties.setContentEncoding(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class)));
continue;
} else if (key.startsWith(REPLYTO_GROUP_ID, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
properties.setReplyToGroupId((String) TypeConversionSupport.convert(value, String.class));
continue;
} else if (key.startsWith(DELIVERY_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) {
if (daMap == null) {
daMap = new HashMap<>();
}
String name = key.substring(JMS_AMQP_DELIVERY_ANNOTATION_PREFIX.length());
daMap.put(Symbol.valueOf(name), value);
continue;
} else if (key.startsWith(FOOTER_PREFIX, JMS_AMQP_PREFIX_LENGTH)) {
if (footerMap == null) {
footerMap = new HashMap<>();
}
String name = key.substring(JMS_AMQP_FOOTER_PREFIX.length());
footerMap.put(Symbol.valueOf(name), value);
continue;
}
} else if (key.startsWith(AMQ_SCHEDULED_MESSAGE_PREFIX )) {
// strip off the scheduled message properties
continue;
}
// The property didn't map into any other slot so we store it in the
// Application Properties section of the message.
if (apMap == null) {
apMap = new HashMap<>();
}
apMap.put(key, value);
int messageType = message.getDataStructureType();
if (messageType == CommandTypes.ACTIVEMQ_MESSAGE) {
// Type of command to recognize advisory message
Object data = message.getDataStructure();
if(data != null) {
apMap.put("ActiveMqDataStructureType", data.getClass().getSimpleName());
}
}
}
final AmqpWritableBuffer buffer = new AmqpWritableBuffer();
encoder.setByteBuffer(buffer);
if (header != null) {
encoder.writeObject(header);
}
if (daMap != null) {
encoder.writeObject(new DeliveryAnnotations(daMap));
}
if (maMap != null) {
encoder.writeObject(new MessageAnnotations(maMap));
}
if (properties != null) {
encoder.writeObject(properties);
}
if (apMap != null) {
encoder.writeObject(new ApplicationProperties(apMap));
}
if (body != null) {
encoder.writeObject(body);
}
if (footerMap != null) {
encoder.writeObject(new Footer(footerMap));
}
return new EncodedMessage(messageFormat, buffer.getArray(), 0, buffer.getArrayLength());
}
|
@Test
public void testConvertEmptyObjectMessageToAmqpMessageUnknownEncodingGetsDataSection() throws Exception {
ActiveMQObjectMessage outbound = createObjectMessage();
outbound.setShortProperty(JMS_AMQP_ORIGINAL_ENCODING, AMQP_UNKNOWN);
outbound.onSend();
outbound.storeContent();
JMSMappingOutboundTransformer transformer = new JMSMappingOutboundTransformer();
EncodedMessage encoded = transformer.transform(outbound);
assertNotNull(encoded);
Message amqp = encoded.decode();
assertNotNull(amqp.getBody());
assertTrue(amqp.getBody() instanceof Data);
assertEquals(0, ((Data) amqp.getBody()).getValue().getLength());
}
|
@Override
public String getName() {
return ANALYZER_NAME;
}
|
@Test
public void testAnalyzePackageJson() throws AnalysisException {
final Dependency result = new Dependency(BaseTest.getResourceAsFile(this,
"ruby/vulnerable/gems/specifications/rest-client-1.7.2.gemspec"));
analyzer.analyze(result, null);
final String vendorString = result.getEvidence(EvidenceType.VENDOR).toString();
assertEquals(RubyGemspecAnalyzer.DEPENDENCY_ECOSYSTEM, result.getEcosystem());
assertThat(vendorString, containsString("REST Client Team"));
assertThat(vendorString, containsString("rest-client_project"));
assertThat(vendorString, containsString("rest.client@librelist.com"));
assertThat(vendorString, containsString("https://github.com/rest-client/rest-client"));
assertThat(result.getEvidence(EvidenceType.PRODUCT).toString(), containsString("rest-client"));
assertThat(result.getEvidence(EvidenceType.VERSION).toString(), containsString("1.7.2"));
assertEquals("rest-client", result.getName());
assertEquals("1.7.2", result.getVersion());
assertEquals("rest-client:1.7.2", result.getDisplayFileName());
}
|
public synchronized int sendFetches() {
final Map<Node, FetchSessionHandler.FetchRequestData> fetchRequests = prepareFetchRequests();
sendFetchesInternal(
fetchRequests,
(fetchTarget, data, clientResponse) -> {
synchronized (Fetcher.this) {
handleFetchSuccess(fetchTarget, data, clientResponse);
}
},
(fetchTarget, data, error) -> {
synchronized (Fetcher.this) {
handleFetchFailure(fetchTarget, data, error);
}
});
return fetchRequests.size();
}
|
@Test
public void testFetchedRecordsAfterSeek() {
buildFetcher(OffsetResetStrategy.NONE, new ByteArrayDeserializer(),
new ByteArrayDeserializer(), 2, IsolationLevel.READ_UNCOMMITTED);
assignFromUser(singleton(tp0));
subscriptions.seek(tp0, 0);
assertTrue(sendFetches() > 0);
client.prepareResponse(fullFetchResponse(tidp0, records, Errors.OFFSET_OUT_OF_RANGE, 100L, 0));
consumerClient.poll(time.timer(0));
// The partition is not marked as needing its offset reset because that error handling logic is
// performed during the fetch collection. When we call seek() before we collect the fetch, the
// partition's position is updated (to offset 2) which is different from the offset from which
// we fetched the data (from offset 0).
assertFalse(subscriptions.isOffsetResetNeeded(tp0));
subscriptions.seek(tp0, 2);
assertEmptyFetch("Should not return records or advance position after seeking to end of topic partition");
}
|
public static Model readPom(File file) throws AnalysisException {
Model model = null;
final PomParser parser = new PomParser();
try {
model = parser.parse(file);
} catch (PomParseException ex) {
if (ex.getCause() instanceof SAXParseException) {
try {
model = parser.parseWithoutDocTypeCleanup(file);
} catch (PomParseException ex1) {
LOGGER.warn("Unable to parse pom '{}'", file.getPath());
LOGGER.debug("", ex1);
throw new AnalysisException(ex1);
}
}
if (model == null) {
LOGGER.warn("Unable to parse pom '{}'", file.getPath());
LOGGER.debug("", ex);
throw new AnalysisException(ex);
}
} catch (Throwable ex) {
LOGGER.warn("Unexpected error during parsing of the pom '{}'", file.getPath());
LOGGER.debug("", ex);
throw new AnalysisException(ex);
}
if (model == null) {
throw new AnalysisException(String.format("Unable to parse pom '%s'", file.getPath()));
}
return model;
}
|
@Test
public void testReadPom_should_trim_version() throws AnalysisException {
File input = BaseTest.getResourceAsFile(this, "pom/pom-with-new-line.xml");
String expectedOutputVersion = "2.2.0";
Model output = PomUtils.readPom(input);
assertEquals(expectedOutputVersion, output.getVersion());
}
|
public static Timer getTimer() {
setup();
return sharedTimer;
}
|
@Test
public void timer() {
java.util.Timer a = SharedExecutors.getTimer();
assertNotNull("Timer must not be null", a);
java.util.Timer b = SharedExecutors.getTimer();
assertSame("factories should be same", a, b);
}
|
public <U> Opt<U> mapOrElse(Function<? super T, ? extends U> mapper, VoidFunc0 emptyAction) {
if (isPresent()) {
return ofNullable(mapper.apply(value));
} else {
emptyAction.callWithRuntimeException();
return empty();
}
}
|
@Test
public void mapOrElseTest() {
// 如果值存在就转换为大写,否则打印一句字符串,支持链式调用、转换为其他类型
String hutool = Opt.ofBlankAble("hutool").mapOrElse(String::toUpperCase, () -> Console.log("yes")).mapOrElse(String::intern, () -> Console.log("Value is not present~")).get();
assertEquals("HUTOOL", hutool);
}
|
@Override
public void onMsg(TbContext ctx, TbMsg msg) {
var tbMsg = ackIfNeeded(ctx, msg);
withCallback(publishMessageAsync(ctx, tbMsg),
m -> tellSuccess(ctx, m),
t -> tellFailure(ctx, processException(tbMsg, t), t));
}
|
@Test
void givenForceAckIsTrueAndMsgResultContainsBodyAndAttributesAndNumber_whenOnMsg_thenEnqueueForTellNext() {
ReflectionTestUtils.setField(node, "forceAck", true);
String messageBodyMd5 = "msgBodyMd5-55fb8ba2-2b71-4673-a82a-969756764761";
String messageAttributesMd5 = "msgAttrMd5-e3ba3eef-52ae-436a-bec1-0c2c2252d1f1";
String sequenceNumber = "seqNum-bb5ddce0-cf4e-4295-b015-524bdb6a332f";
mockSendingMsgRequest();
given(sendMessageResultMock.getMD5OfMessageBody()).willReturn(messageBodyMd5);
given(sendMessageResultMock.getMD5OfMessageAttributes()).willReturn(messageAttributesMd5);
given(sendMessageResultMock.getSequenceNumber()).willReturn(sequenceNumber);
TbMsg msg = TbMsg.newMsg(TbMsgType.POST_TELEMETRY_REQUEST, DEVICE_ID, TbMsgMetaData.EMPTY, TbMsg.EMPTY_JSON_OBJECT);
node.onMsg(ctxMock, msg);
then(ctxMock).should().ack(msg);
SendMessageRequest sendMsgRequest = new SendMessageRequest()
.withQueueUrl(TbNodeUtils.processPattern(config.getQueueUrlPattern(), msg))
.withMessageBody(msg.getData())
.withDelaySeconds(config.getDelaySeconds());
then(sqsClientMock).should().sendMessage(sendMsgRequest);
ArgumentCaptor<TbMsg> actualMsgCaptor = ArgumentCaptor.forClass(TbMsg.class);
then(ctxMock).should().enqueueForTellNext(actualMsgCaptor.capture(), eq(TbNodeConnectionType.SUCCESS));
TbMsg actualMsg = actualMsgCaptor.getValue();
assertThat(actualMsg)
.usingRecursiveComparison()
.ignoringFields("metaData", "ctx")
.isEqualTo(msg);
assertThat(actualMsg.getMetaData().getData())
.hasFieldOrPropertyWithValue("messageId", messageId)
.hasFieldOrPropertyWithValue("requestId", requestId)
.hasFieldOrPropertyWithValue("messageBodyMd5", messageBodyMd5)
.hasFieldOrPropertyWithValue("messageAttributesMd5", messageAttributesMd5)
.hasFieldOrPropertyWithValue("sequenceNumber", sequenceNumber);
verifyNoMoreInteractions(ctxMock, sqsClientMock, sendMessageResultMock, responseMetadataMock);
}
|
public T send() throws IOException {
return web3jService.send(this, responseType);
}
|
@Test
public void testAdminNodeInfo() throws Exception {
web3j.adminNodeInfo().send();
verifyResult("{\"jsonrpc\":\"2.0\",\"method\":\"admin_nodeInfo\",\"params\":[],\"id\":1}");
}
|
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj instanceof Bandwidth) {
return this.compareTo((Bandwidth) obj) == 0;
}
return false;
}
|
@Test
public void testEquals() {
Bandwidth expected = Bandwidth.kbps(one);
assertFalse(small.equals(big));
assertTrue(small.equals(expected));
assertTrue(small.equals(small));
assertFalse(small.equals(1000));
}
|
@Override
public CounterSet getOutputCounters() {
return counters;
}
|
@Test
@SuppressWarnings("unchecked")
public void testGetOutputCounters() throws Exception {
List<Operation> operations =
Arrays.asList(
new Operation[] {
createOperation("o1", 1), createOperation("o2", 2), createOperation("o3", 3)
});
ExecutionStateTracker stateTracker = ExecutionStateTracker.newForTest();
try (MapTaskExecutor executor = new MapTaskExecutor(operations, counterSet, stateTracker)) {
CounterSet counterSet = executor.getOutputCounters();
CounterUpdateExtractor<?> updateExtractor = Mockito.mock(CounterUpdateExtractor.class);
counterSet.extractUpdates(false, updateExtractor);
verify(updateExtractor).longSum(eq(named("test-o1-ElementCount")), anyBoolean(), eq(1L));
verify(updateExtractor).longSum(eq(named("test-o2-ElementCount")), anyBoolean(), eq(2L));
verify(updateExtractor).longSum(eq(named("test-o3-ElementCount")), anyBoolean(), eq(3L));
verifyNoMoreInteractions(updateExtractor);
}
}
|
@Override
public String getName() {
return "Zip";
}
|
@Test
public void testGetName() {
ZIPCompressionProvider provider = (ZIPCompressionProvider) factory.getCompressionProviderByName( PROVIDER_NAME );
assertNotNull( provider );
assertEquals( PROVIDER_NAME, provider.getName() );
}
|
public boolean record(final Throwable observation)
{
final long timestampMs;
DistinctObservation distinctObservation;
timestampMs = clock.time();
synchronized (this)
{
distinctObservation = find(distinctObservations, observation);
if (null == distinctObservation)
{
distinctObservation = newObservation(timestampMs, observation);
if (INSUFFICIENT_SPACE == distinctObservation)
{
return false;
}
}
}
final int offset = distinctObservation.offset;
buffer.getAndAddInt(offset + OBSERVATION_COUNT_OFFSET, 1);
buffer.putLongOrdered(offset + LAST_OBSERVATION_TIMESTAMP_OFFSET, timestampMs);
return true;
}
|
@Test
void shouldRecordTwoDistinctObservationsOnCause()
{
final long timestampOne = 7;
final long timestampTwo = 10;
final int offset = 0;
when(clock.time()).thenReturn(timestampOne).thenReturn(timestampTwo);
for (int i = 0; i < 2; i++)
{
assertTrue(log.record(i == 1 ?
new RuntimeException("One") :
new RuntimeException("One", new Exception("Cause"))));
}
final ArgumentCaptor<Integer> lengthArg = ArgumentCaptor.forClass(Integer.class);
final InOrder inOrder = inOrder(buffer);
inOrder.verify(buffer).putBytes(eq(offset + ENCODED_ERROR_OFFSET), any(byte[].class));
inOrder.verify(buffer).putLong(offset + FIRST_OBSERVATION_TIMESTAMP_OFFSET, timestampOne);
inOrder.verify(buffer).putIntOrdered(eq(offset + LENGTH_OFFSET), lengthArg.capture());
inOrder.verify(buffer).getAndAddInt(offset + OBSERVATION_COUNT_OFFSET, 1);
inOrder.verify(buffer).putLongOrdered(offset + LAST_OBSERVATION_TIMESTAMP_OFFSET, timestampOne);
final int recordTwoOffset = BitUtil.align(lengthArg.getValue(), RECORD_ALIGNMENT);
inOrder.verify(buffer).putBytes(eq(recordTwoOffset + ENCODED_ERROR_OFFSET), any(byte[].class));
inOrder.verify(buffer).putLong(recordTwoOffset + FIRST_OBSERVATION_TIMESTAMP_OFFSET, timestampTwo);
inOrder.verify(buffer).putIntOrdered(eq(recordTwoOffset + LENGTH_OFFSET), anyInt());
inOrder.verify(buffer).getAndAddInt(recordTwoOffset + OBSERVATION_COUNT_OFFSET, 1);
inOrder.verify(buffer).putLongOrdered(recordTwoOffset + LAST_OBSERVATION_TIMESTAMP_OFFSET, timestampTwo);
}
|
@Override
public KTable<Windowed<K>, V> aggregate(final Initializer<V> initializer,
final Merger<? super K, V> sessionMerger) {
return aggregate(initializer, sessionMerger, Materialized.with(null, null));
}
|
@Test
public void sessionWindowMixAggregatorsTest() {
final KTable<Windowed<String>, String> customers = windowedCogroupedStream.aggregate(
MockInitializer.STRING_INIT, sessionMerger, Materialized.with(Serdes.String(), Serdes.String()));
customers.toStream().to(OUTPUT);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> testInputTopic = driver.createInputTopic(TOPIC, new StringSerializer(), new StringSerializer());
final TestInputTopic<String, String> testInputTopic2 = driver.createInputTopic(TOPIC2, new StringSerializer(), new StringSerializer());
final TestOutputTopic<Windowed<String>, String> testOutputTopic = driver.createOutputTopic(
OUTPUT, new SessionWindowedDeserializer<>(new StringDeserializer()), new StringDeserializer());
testInputTopic.pipeInput("k1", "A", 0);
testInputTopic.pipeInput("k2", "A", 0);
testInputTopic.pipeInput("k2", "A", 1);
testInputTopic.pipeInput("k1", "A", 2);
testInputTopic2.pipeInput("k1", "B", 3);
testInputTopic2.pipeInput("k2", "B", 3);
testInputTopic2.pipeInput("k2", "B", 444);
testInputTopic2.pipeInput("k1", "B", 444);
assertOutputKeyValueTimestamp(testOutputTopic, "k1", "0+A", 0);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", "0+A", 0);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", null, 0);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", "0+0+A+A", 1);
assertOutputKeyValueTimestamp(testOutputTopic, "k1", null, 0);
assertOutputKeyValueTimestamp(testOutputTopic, "k1", "0+0+A+A", 2);
assertOutputKeyValueTimestamp(testOutputTopic, "k1", null, 2);
assertOutputKeyValueTimestamp(testOutputTopic, "k1", "0+0+0+A+A-B", 3);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", null, 1);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", "0+0+0+A+A-B", 3);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", "0-B", 444);
assertOutputKeyValueTimestamp(testOutputTopic, "k1", "0-B", 444);
}
}
|
protected String[] getRunCommand(String command, String groupId,
String userName, Path pidFile, Configuration config) {
return getRunCommand(command, groupId, userName, pidFile, config, null);
}
|
@Test (timeout = 5000)
public void testRunCommandNoPriority() throws Exception {
Configuration conf = new Configuration();
String[] command = containerExecutor.getRunCommand("echo", "group1", "user", null, conf);
assertTrue("first command should be the run command for the platform",
command[0].equals(Shell.WINUTILS) || command[0].equals("bash"));
}
|
@Override
public void register(String path, ServiceRecord record) throws IOException {
op(path, record, addRecordCommand);
}
|
@Test
public void testMultiARecord() throws Exception {
ServiceRecord record = getMarshal().fromBytes("somepath",
CONTAINER_RECORD.getBytes());
ServiceRecord record2 = getMarshal().fromBytes("somepath",
CONTAINER_RECORD2.getBytes());
getRegistryDNS().register(
"/registry/users/root/services/org-apache-slider/test1/components/"
+ "ctr-e50-1451931954322-0016-01-000002",
record);
getRegistryDNS().register(
"/registry/users/root/services/org-apache-slider/test1/components/"
+ "ctr-e50-1451931954322-0016-01-000003",
record2);
// start assessing whether correct records are available
List<Record> recs =
assertDNSQuery("httpd.test1.root.dev.test.", 2);
assertTrue("not an ARecord", recs.get(0) instanceof ARecord);
assertTrue("not an ARecord", recs.get(1) instanceof ARecord);
}
|
void getGroupNames(SearchResult groupResult, Collection<String> groups,
Collection<String> groupDNs, boolean doGetDNs)
throws NamingException {
Attribute groupName = groupResult.getAttributes().get(groupNameAttr);
if (groupName == null) {
throw new NamingException("The group object does not have " +
"attribute '" + groupNameAttr + "'.");
}
groups.add(groupName.get().toString());
if (doGetDNs) {
groupDNs.add(groupResult.getNameInNamespace());
}
}
|
@Test
public void testGetGroupsWithHierarchy() throws NamingException {
// The search functionality of the mock context is reused, so we will
// return the user NamingEnumeration first, and then the group
// The parent search is run once for each level, and is a different search
// The parent group is returned once for each group, yet the final list
// should be unique
when(getContext().search(anyString(), anyString(), any(Object[].class),
any(SearchControls.class)))
.thenReturn(getUserNames(), getGroupNames());
when(getContext().search(anyString(), anyString(),
any(SearchControls.class)))
.thenReturn(getParentGroupNames());
doTestGetGroupsWithParent(Arrays.asList(getTestParentGroups()), 2, 1);
}
|
@Nullable
ContainerPrices calculate(@Nullable Item[] items)
{
if (items == null)
{
return null;
}
long ge = 0;
long alch = 0;
for (final Item item : items)
{
final int qty = item.getQuantity();
final int id = item.getId();
if (id <= 0 || qty == 0)
{
continue;
}
alch += (long) getHaPrice(id) * qty;
ge += (long) itemManager.getItemPrice(id) * qty;
}
return new ContainerPrices(ge, alch);
}
|
@Test
public void testCalculate()
{
Item coins = new Item(ItemID.COINS_995, Integer.MAX_VALUE);
Item whip = new Item(ItemID.ABYSSAL_WHIP, 1_000_000_000);
Item[] items = ImmutableList.of(
coins,
whip
).toArray(new Item[0]);
ItemComposition whipComp = mock(ItemComposition.class);
when(whipComp.getHaPrice())
.thenReturn(4); // 4 * 1m overflows
when(itemManager.getItemComposition(ItemID.ABYSSAL_WHIP))
.thenReturn(whipComp);
when(itemManager.getItemPrice(ItemID.ABYSSAL_WHIP))
.thenReturn(3); // 1b * 3 overflows
final ContainerPrices prices = bankPlugin.calculate(items);
assertNotNull(prices);
assertTrue(prices.getHighAlchPrice() > Integer.MAX_VALUE);
assertTrue(prices.getGePrice() > Integer.MAX_VALUE);
}
|
@Override
public void close() {
// Push queries can be closed by both terminate commands and the client ending the request, so
// we ensure that there's no race and that close is called just once.
if (!this.isRunning.compareAndSet(true, false)) {
return;
}
// To avoid deadlock, close the queue first to ensure producer side isn't blocked trying to
// write to the blocking queue, otherwise super.close call can deadlock:
rowQueue.close();
// Now safe to close:
super.close();
}
|
@Test
public void shouldCloseQueueBeforeTopologyToAvoidDeadLock() {
// Given:
query.start();
// When:
query.close();
// Then:
final InOrder inOrder = inOrder(rowQueue, kafkaStreams);
inOrder.verify(rowQueue).close();
inOrder.verify(kafkaStreams).close(any(java.time.Duration.class));
}
|
public Object extract(Object target, String attributeName, Object metadata) {
return extract(target, attributeName, metadata, true);
}
|
@Test
public void when_extractWithNullTarget_then_nullValue() {
// WHEN
Object power = createExtractors(null).extract(null, "gimmePower", null);
// THEN
assertNull(power);
}
|
@Override
public Result invoke(Invoker<?> invoker, Invocation invocation) throws RpcException {
long start = System.currentTimeMillis();
Result result = invoker.invoke(invocation);
long end = System.currentTimeMillis();
if (TRACERS.size() > 0) {
String key = invoker.getInterface().getName() + "." + RpcUtils.getMethodName(invocation);
Set<Channel> channels = TRACERS.get(key);
if (CollectionUtils.isEmpty(channels)) {
key = invoker.getInterface().getName();
channels = TRACERS.get(key);
}
if (CollectionUtils.isNotEmpty(channels)) {
for (Channel channel : new ArrayList<>(channels)) {
if (channel.isConnected()) {
try {
int max = 1;
Integer m = (Integer) channel.getAttribute(TRACE_MAX);
if (m != null) {
max = m;
}
int count;
AtomicInteger c = (AtomicInteger) channel.getAttribute(TRACE_COUNT);
if (c == null) {
c = new AtomicInteger();
channel.setAttribute(TRACE_COUNT, c);
}
count = c.getAndIncrement();
if (count < max) {
String prompt =
channel.getUrl().getParameter(Constants.PROMPT_KEY, Constants.DEFAULT_PROMPT);
channel.send(
"\r\n" + RpcContext.getServiceContext().getRemoteAddress() + " -> "
+ invoker.getInterface().getName()
+ "." + RpcUtils.getMethodName(invocation)
+ "(" + JsonUtils.toJson(invocation.getArguments()) + ")" + " -> "
+ JsonUtils.toJson(result.getValue())
+ "\r\nelapsed: " + (end - start) + " ms."
+ "\r\n\r\n" + prompt);
}
if (count >= max - 1) {
channels.remove(channel);
}
} catch (Throwable e) {
channels.remove(channel);
logger.warn(PROTOCOL_FAILED_PARSE, "", "", e.getMessage(), e);
}
} else {
channels.remove(channel);
}
}
}
}
return result;
}
|
@Test
void testInvoke() throws Exception {
String method = "sayHello";
Class<?> type = DemoService.class;
String key = type.getName() + "." + method;
// add tracer
TraceFilter.addTracer(type, method, mockChannel, 2);
Invoker<DemoService> mockInvoker = mock(Invoker.class);
Invocation mockInvocation = mock(Invocation.class);
Result mockResult = mock(Result.class);
TraceFilter filter = new TraceFilter();
given(mockInvoker.getInterface()).willReturn(DemoService.class);
given(mockInvocation.getMethodName()).willReturn(method);
given(mockInvocation.getArguments()).willReturn(new Object[0]);
given(mockInvoker.invoke(mockInvocation)).willReturn(mockResult);
given(mockResult.getValue()).willReturn("result");
// test invoke
filter.invoke(mockInvoker, mockInvocation);
String message = listToString(mockChannel.getReceivedObjects());
String expectMessage = "org.apache.dubbo.rpc.protocol.dubbo.support.DemoService.sayHello([]) -> \"result\"";
System.out.println("actual message: " + message);
Assertions.assertTrue(message.contains(expectMessage));
Assertions.assertTrue(message.contains("elapsed:"));
AtomicInteger traceCount = (AtomicInteger) mockChannel.getAttribute(TRACE_COUNT);
Assertions.assertEquals(1, traceCount.get());
// test remove channel when count >= max - 1
filter.invoke(mockInvoker, mockInvocation);
Field tracers = TraceFilter.class.getDeclaredField(TRACERS_FIELD_NAME);
tracers.setAccessible(true);
ConcurrentHashMap<String, Set<Channel>> o =
(ConcurrentHashMap<String, Set<Channel>>) tracers.get(new ConcurrentHashMap<String, Set<Channel>>());
Assertions.assertTrue(o.containsKey(key));
Set<Channel> channels = o.get(key);
Assertions.assertNotNull(channels);
Assertions.assertFalse(channels.contains(mockChannel));
}
|
@Override
public Optional<EfestoOutputPMML> evaluateInput(EfestoInputPMML toEvaluate, EfestoRuntimeContext context) {
return executeEfestoInputPMML(toEvaluate, context);
}
|
@Test
void evaluateCorrectInput() {
modelLocalUriId = getModelLocalUriIdFromPmmlIdFactory(FILE_NAME, MODEL_NAME);
EfestoInputPMML inputPMML = new EfestoInputPMML(modelLocalUriId, getPMMLContext(FILE_NAME, MODEL_NAME,
memoryCompilerClassLoader));
Optional<EfestoOutputPMML> retrieved = kieRuntimeServicePMML.evaluateInput(inputPMML,
getPMMLContext(FILE_NAME,
MODEL_NAME,
memoryCompilerClassLoader));
assertThat(retrieved).isNotNull().isPresent();
commonEvaluateEfestoOutputPMML(retrieved.get(), inputPMML);
}
|
public static void maybeMeasureLatency(final Runnable actionToMeasure,
final Time time,
final Sensor sensor) {
if (sensor.shouldRecord() && sensor.hasMetrics()) {
final long startNs = time.nanoseconds();
try {
actionToMeasure.run();
} finally {
sensor.record(time.nanoseconds() - startNs);
}
} else {
actionToMeasure.run();
}
}
|
@Test
public void shouldNotMeasureLatencyDueToRecordingLevel() {
final Sensor sensor = mock(Sensor.class);
when(sensor.shouldRecord()).thenReturn(false);
final Time time = mock(Time.class);
StreamsMetricsImpl.maybeMeasureLatency(() -> { }, time, sensor);
}
|
@Override
public long computeLocalQuota(long confUsage, long myUsage, long[] allUsages) throws PulsarAdminException {
// ToDo: work out the initial conditions: we may allow a small number of "first few iterations" to go
// unchecked as we get some history of usage, or follow some other "TBD" method.
if (confUsage < 0) {
// This can happen if the RG is not configured with this particular limit (message or byte count) yet.
val retVal = -1;
if (log.isDebugEnabled()) {
log.debug("Configured usage ({}) is not set; returning a special value ({}) for calculated quota",
confUsage, retVal);
}
return retVal;
}
long totalUsage = 0;
for (long usage : allUsages) {
totalUsage += usage;
}
if (myUsage < 0 || totalUsage < 0) {
String errMesg = String.format("Local usage (%d) or total usage (%d) is negative",
myUsage, totalUsage);
log.error(errMesg);
throw new PulsarAdminException(errMesg);
}
// If the total usage is zero (which may happen during initial transients), just return the configured value.
// The caller is expected to check the value returned, or not call here with a zero global usage.
// [This avoids a division by zero when calculating the local share.]
if (totalUsage == 0) {
if (log.isDebugEnabled()) {
log.debug("computeLocalQuota: totalUsage is zero; "
+ "returning the configured usage ({}) as new local quota",
confUsage);
}
return confUsage;
}
if (myUsage > totalUsage) {
String errMesg = String.format("Local usage (%d) is greater than total usage (%d)",
myUsage, totalUsage);
// Log as a warning [in case this can happen transiently (?)].
log.warn(errMesg);
}
// How much unused capacity is left over?
float residual = confUsage - totalUsage;
// New quota is the old usage incremented by any residual as a ratio of the local usage to the total usage.
// This should result in the calculatedQuota increasing proportionately if total usage is less than the
// configured usage, and reducing proportionately if the total usage is greater than the configured usage.
// Capped to 1, to prevent negative or zero setting of quota.
// the rate limiter code assumes that rate value of 0 or less to mean that no rate limit should be applied
float myUsageFraction = (float) myUsage / totalUsage;
float calculatedQuota = max(myUsage + residual * myUsageFraction, 1);
val longCalculatedQuota = (long) calculatedQuota;
if (log.isDebugEnabled()) {
log.debug("computeLocalQuota: myUsage={}, totalUsage={}, myFraction={}; newQuota returned={} [long: {}]",
myUsage, totalUsage, myUsageFraction, calculatedQuota, longCalculatedQuota);
}
return longCalculatedQuota;
}
|
@Test
public void testRQCalcGlobUsedEqualsToConfigTest() throws PulsarAdminException {
final long config = 100;
final long localUsed = 20;
final long[] allUsage = { 100 };
final long newQuota = this.rqCalc.computeLocalQuota(config, localUsed, allUsage);
Assert.assertEquals(newQuota, localUsed);
}
|
@Override
public ConfigOperateResult insertOrUpdateBeta(final ConfigInfo configInfo, final String betaIps, final String srcIp,
final String srcUser) {
ConfigInfoStateWrapper configInfo4BetaState = this.findConfigInfo4BetaState(configInfo.getDataId(),
configInfo.getGroup(), configInfo.getTenant());
if (configInfo4BetaState == null) {
return addConfigInfo4Beta(configInfo, betaIps, srcIp, srcUser);
} else {
return updateConfigInfo4Beta(configInfo, betaIps, srcIp, srcUser);
}
}
|
@Test
void testInsertOrUpdateBetaOfUpdate() {
String dataId = "betaDataId113";
String group = "group";
String tenant = "tenant";
//mock exist beta
ConfigInfoStateWrapper mockedConfigInfoStateWrapper = new ConfigInfoStateWrapper();
mockedConfigInfoStateWrapper.setDataId(dataId);
mockedConfigInfoStateWrapper.setGroup(group);
mockedConfigInfoStateWrapper.setTenant(tenant);
mockedConfigInfoStateWrapper.setId(123456L);
mockedConfigInfoStateWrapper.setLastModified(System.currentTimeMillis());
when(jdbcTemplate.queryForObject(anyString(), eq(new Object[] {dataId, group, tenant}),
eq(CONFIG_INFO_STATE_WRAPPER_ROW_MAPPER))).thenReturn(mockedConfigInfoStateWrapper, mockedConfigInfoStateWrapper);
//execute
String betaIps = "betaips...";
String srcIp = "srcUp...";
String srcUser = "srcUser...";
String appName = "appname";
String content = "content111";
ConfigInfo configInfo = new ConfigInfo(dataId, group, tenant, appName, content);
configInfo.setEncryptedDataKey("key34567");
ConfigOperateResult configOperateResult = externalConfigInfoBetaPersistService.insertOrUpdateBeta(configInfo, betaIps, srcIp,
srcUser);
//expect return obj
assertEquals(mockedConfigInfoStateWrapper.getId(), configOperateResult.getId());
assertEquals(mockedConfigInfoStateWrapper.getLastModified(), configOperateResult.getLastModified());
//verify update to be invoked
Mockito.verify(jdbcTemplate, times(1))
.update(anyString(), eq(configInfo.getContent()), eq(configInfo.getMd5()), eq(betaIps), eq(srcIp), eq(srcUser),
eq(configInfo.getAppName()), eq(configInfo.getEncryptedDataKey()), eq(dataId), eq(group), eq(tenant));
}
|
public Properties getProperties()
{
return properties;
}
|
@Test
public void testUriWithCustomHeaders()
throws SQLException, UnsupportedEncodingException
{
String customHeaders = "testHeaderKey:testHeaderValue";
String encodedCustomHeaders = URLEncoder.encode(customHeaders, StandardCharsets.UTF_8.toString());
PrestoDriverUri parameters = createDriverUri("presto://localhost:8080?customHeaders=" + encodedCustomHeaders);
Properties properties = parameters.getProperties();
assertEquals(properties.getProperty(CUSTOM_HEADERS.getKey()), customHeaders);
}
|
@SuppressWarnings({"unchecked", "UnstableApiUsage"})
@Override
public <T extends Statement> ConfiguredStatement<T> inject(
final ConfiguredStatement<T> statement) {
if (!(statement.getStatement() instanceof DropStatement)) {
return statement;
}
final DropStatement dropStatement = (DropStatement) statement.getStatement();
if (!dropStatement.isDeleteTopic()) {
return statement;
}
final SourceName sourceName = dropStatement.getName();
final DataSource source = metastore.getSource(sourceName);
if (source != null) {
if (source.isSource()) {
throw new KsqlException("Cannot delete topic for read-only source: " + sourceName.text());
}
checkTopicRefs(source);
deleteTopic(source);
final Closer closer = Closer.create();
closer.register(() -> deleteKeySubject(source));
closer.register(() -> deleteValueSubject(source));
try {
closer.close();
} catch (final KsqlException e) {
throw e;
} catch (final Exception e) {
throw new KsqlException(e);
}
} else if (!dropStatement.getIfExists()) {
throw new KsqlException("Could not find source to delete topic for: " + statement);
}
final T withoutDelete = (T) dropStatement.withoutDeleteClause();
final String withoutDeleteText = SqlFormatter.formatSql(withoutDelete) + ";";
return statement.withStatement(withoutDeleteText, withoutDelete);
}
|
@Test
public void shouldDoNothingIfNoDeleteTopic() {
// When:
final ConfiguredStatement<DropStream> injected = deleteInjector.inject(DROP_WITHOUT_DELETE_TOPIC);
// Then:
assertThat(injected, is(sameInstance(DROP_WITHOUT_DELETE_TOPIC)));
verifyNoMoreInteractions(topicClient, registryClient);
}
|
public String getShare() {
return share;
}
|
@Test
void shareForValidURIShouldBeExtracted2() {
var remoteConf = context.getEndpoint("azure-files://account/share/", FilesEndpoint.class).getConfiguration();
assertEquals("share", remoteConf.getShare());
}
|
public static PDImageXObject createFromFileByContent(File file, PDDocument doc) throws IOException
{
FileType fileType = null;
try (BufferedInputStream bufferedInputStream = new BufferedInputStream(new FileInputStream(file)))
{
fileType = FileTypeDetector.detectFileType(bufferedInputStream);
}
catch (IOException e)
{
throw new IOException("Could not determine file type: " + file.getName(), e);
}
if (fileType == null)
{
throw new IllegalArgumentException("Image type not supported: " + file.getName());
}
if (fileType == FileType.JPEG)
{
try (FileInputStream fis = new FileInputStream(file))
{
return JPEGFactory.createFromStream(doc, fis);
}
}
if (fileType == FileType.TIFF)
{
try
{
return CCITTFactory.createFromFile(doc, file);
}
catch (IOException ex)
{
LOG.debug("Reading as TIFF failed, setting fileType to PNG", ex);
// Plan B: try reading with ImageIO
// common exception:
// First image in tiff is not CCITT T4 or T6 compressed
fileType = FileType.PNG;
}
}
if (fileType == FileType.BMP || fileType == FileType.GIF || fileType == FileType.PNG)
{
BufferedImage bim = ImageIO.read(file);
return LosslessFactory.createFromImage(doc, bim);
}
throw new IllegalArgumentException("Image type " + fileType + " not supported: " + file.getName());
}
|
@Test
void testCreateFromFileByContent() throws IOException, URISyntaxException
{
testCompareCreateByContentWithCreatedByCCITTFactory("ccittg4.tif");
testCompareCreatedByContentWithCreatedByJPEGFactory("jpeg.jpg");
testCompareCreatedByContentWithCreatedByJPEGFactory("jpegcmyk.jpg");
testCompareCreatedByContentWithCreatedByLosslessFactory("gif.gif");
testCompareCreatedByContentWithCreatedByLosslessFactory("gif-1bit-transparent.gif");
testCompareCreatedByContentWithCreatedByLosslessFactory("png_indexed_8bit_alpha.png");
testCompareCreatedByContentWithCreatedByLosslessFactory("png.png");
testCompareCreatedByContentWithCreatedByLosslessFactory("lzw.tif");
}
|
@Override
public HttpResponse get() throws InterruptedException, ExecutionException {
try {
final Object result = process(0, null);
if (result instanceof Throwable) {
throw new ExecutionException((Throwable) result);
}
return (HttpResponse) result;
} finally {
isDone = true;
}
}
|
@Test(expected = ExecutionException.class)
public void errGetThrowable() throws ExecutionException, InterruptedException, TimeoutException {
get(new Exception("wrong"), false);
}
|
public static URI parse(String featureIdentifier) {
requireNonNull(featureIdentifier, "featureIdentifier may not be null");
if (featureIdentifier.isEmpty()) {
throw new IllegalArgumentException("featureIdentifier may not be empty");
}
// Legacy from the Cucumber Eclipse plugin
// Older versions of Cucumber allowed it.
if (CLASSPATH_SCHEME_PREFIX.equals(featureIdentifier)) {
return rootPackageUri();
}
if (nonStandardPathSeparatorInUse(featureIdentifier)) {
String standardized = replaceNonStandardPathSeparator(featureIdentifier);
return parseAssumeFileScheme(standardized);
}
if (isWindowsOS() && pathContainsWindowsDrivePattern(featureIdentifier)) {
return parseAssumeFileScheme(featureIdentifier);
}
if (probablyURI(featureIdentifier)) {
return parseProbableURI(featureIdentifier);
}
return parseAssumeFileScheme(featureIdentifier);
}
|
@Test
void can_parse_classpath_directory_form() {
URI uri = FeaturePath.parse("classpath:/path/to");
assertAll(
() -> assertThat(uri.getScheme(), is("classpath")),
() -> assertThat(uri.getSchemeSpecificPart(), is("/path/to")));
}
|
@Override
public void getChildren(final String path, final boolean watch, final AsyncCallback.ChildrenCallback cb, final Object ctx)
{
if (!SymlinkUtil.containsSymlink(path))
{
_zk.getChildren(path, watch, cb, ctx);
}
else
{
SymlinkChildrenCallback compositeCallback = new SymlinkChildrenCallback(path, _defaultWatcher, cb);
getChildren0(path, watch ? compositeCallback : null, compositeCallback, ctx);
}
}
|
@Test
public void testSymlinkWithChildrenWatch() throws InterruptedException
{
final CountDownLatch latch = new CountDownLatch(1);
final CountDownLatch latch2 = new CountDownLatch(1);
final AsyncCallback.ChildrenCallback childrenCallback = new AsyncCallback.ChildrenCallback()
{
@Override
public void processResult(int rc, String path, Object ctx, List<String> children)
{
KeeperException.Code result = KeeperException.Code.get(rc);
Assert.assertEquals(result, KeeperException.Code.OK);
Assert.assertEquals(children.size(), 11);
latch.countDown();
}
};
Watcher childrenWatch = new Watcher()
{
@Override
public void process(WatchedEvent event)
{
Assert.assertEquals(event.getType(), Event.EventType.NodeChildrenChanged);
_zkClient.getZooKeeper().getChildren(event.getPath(), null, childrenCallback, null);
}
};
AsyncCallback.ChildrenCallback childrenCallback2 = new AsyncCallback.ChildrenCallback()
{
@Override
public void processResult(int rc, String path, Object ctx, List<String> children)
{
KeeperException.Code result = KeeperException.Code.get(rc);
Assert.assertEquals(result, KeeperException.Code.OK);
latch2.countDown();
}
};
// symlink: /foo/$link -> /foo/bar
_zkClient.getZooKeeper().getChildren("/foo/$link", childrenWatch, childrenCallback2, null);
latch2.await(30, TimeUnit.SECONDS);
_zkClient.ensurePersistentNodeExists("/foo/bar/newNode", new FutureCallback<>());
latch.await(30, TimeUnit.SECONDS);
_zkClient.removeNodeUnsafe("/foo/bar/newNode", new FutureCallback<>());
}
|
@Override
public String format(final Schema schema) {
final String converted = SchemaWalker.visit(schema, new Converter()) + typePostFix(schema);
return options.contains(Option.AS_COLUMN_LIST)
? stripTopLevelStruct(converted)
: converted;
}
|
@Test
public void shouldFormatOptionalArray() {
// Given:
final Schema schema = SchemaBuilder
.array(Schema.OPTIONAL_FLOAT64_SCHEMA)
.optional()
.build();
// Then:
assertThat(DEFAULT.format(schema),
is("ARRAY<DOUBLE>"));
assertThat(STRICT.format(schema),
is("ARRAY<DOUBLE>"));
}
|
public Tenant addTenant(TenantName tenantName) {
try (Lock lock = tenantLocks.lock(tenantName)) {
writeTenantPath(tenantName);
return createTenant(tenantName, clock.instant());
}
}
|
@Test
public void testAddTenant() throws Exception {
Set<TenantName> allTenants = tenantRepository.getAllTenantNames();
assertTrue(allTenants.contains(tenant1));
assertTrue(allTenants.contains(tenant2));
tenantRepository.addTenant(tenant3);
assertZooKeeperTenantPathExists(tenant3);
allTenants = tenantRepository.getAllTenantNames();
assertTrue(allTenants.contains(tenant1));
assertTrue(allTenants.contains(tenant2));
assertTrue(allTenants.contains(tenant3));
}
|
@Override
public Optional<GeoLocationInformation> doGetGeoIpData(InetAddress address) {
try (Timer.Context ignored = getTimer()) {
final CityResponse response = getCityResponse(address);
final Location location = response.getLocation();
final Country country = response.getCountry();
final City city = response.getCity();
GeoLocationInformation info = GeoLocationInformation.create(
location.getLatitude(), location.getLongitude(),
country.getGeoNameId() == null ? "N/A" : country.getIsoCode(),
country.getGeoNameId() == null ? "N/A" : country.getName(),
city.getGeoNameId() == null ? "N/A" : city.getName(),// calling to .getName() may throw a NPE
"N/A",
"N/A");
return Optional.of(info);
} catch (Exception e) {
if (!(e instanceof AddressNotFoundException)) {
LOG.debug("Could not get location from IP {}", address.getHostAddress(), e);
lastError = e.getMessage();
}
return Optional.empty();
}
}
|
@Test
void testDoGetGeoIpData() throws IOException, GeoIp2Exception {
Country country = createCountry();
City city = createCity();
Location location = createLocation();
CityResponse cityResponse = createCityResponse(country, city, location);
when(resolver.getCityResponse(any(InetAddress.class))).thenReturn(cityResponse);
InetAddress address = InetAddress.getByName("localhost");
Optional<GeoLocationInformation> optInfo = resolver.doGetGeoIpData(address);
assertTrue(optInfo.isPresent());
GeoLocationInformation info = optInfo.get();
assertEquals(country.getName(), info.countryName());
assertEquals(country.getIsoCode(), info.countryIsoCode());
assertEquals(city.getName(), info.cityName());
assertEquals(location.getLatitude(), info.latitude());
assertEquals(location.getLongitude(), info.longitude());
assertEquals(location.getTimeZone(), info.timeZone());
}
|
public long maxSetBit() {
ThreadSafeBitSetSegments segments = this.segments.get();
int segmentIdx = segments.numSegments() - 1;
for(;segmentIdx >= 0; segmentIdx--) {
AtomicLongArray segment = segments.getSegment(segmentIdx);
for(int longIdx=segment.length() - 1; longIdx >= 0; longIdx--) {
long l = segment.get(longIdx);
if(l != 0)
return (segmentIdx << log2SegmentSize) + (longIdx * 64) + (63 - Long.numberOfLeadingZeros(l));
}
}
return -1;
}
|
@Test
public void testMaxSetBit() {
ThreadSafeBitSet set1 = new ThreadSafeBitSet();
set1.set(100);
Assert.assertEquals(100, set1.maxSetBit());
set1.set(100000);
Assert.assertEquals(100000, set1.maxSetBit());
set1.set(1000000);
Assert.assertEquals(1000000, set1.maxSetBit());
set1.clearAll();
set1.set(555555);
Assert.assertEquals(555555, set1.maxSetBit());
}
|
@Override
public InterpreterResult interpret(String st, InterpreterContext context) {
return helper.interpret(session, st, context);
}
|
@Test
void should_interpret_multiple_statements_with_single_line_logged_batch() {
// Given
String statements = "CREATE TABLE IF NOT EXISTS zeppelin.albums(\n" +
" title text PRIMARY KEY,\n" +
" artist text,\n" +
" year int\n" +
");\n" +
"BEGIN BATCH" +
" INSERT INTO zeppelin.albums(title,artist,year) " +
"VALUES('The Impossible Dream EP','Carter the Unstoppable Sex Machine',1992);" +
" INSERT INTO zeppelin.albums(title,artist,year) " +
"VALUES('The Way You Are','Tears for Fears',1983);" +
" INSERT INTO zeppelin.albums(title,artist,year) " +
"VALUES('Primitive','Soulfly',2003);" +
"APPLY BATCH;\n" +
"SELECT * FROM zeppelin.albums;";
// When
final InterpreterResult actual = interpreter.interpret(statements, intrContext);
// Then
assertEquals(Code.SUCCESS, actual.code());
assertEquals("title\tartist\tyear\n" +
"The Impossible Dream EP\tCarter the Unstoppable Sex Machine\t1992\n" +
"The Way You Are\tTears for Fears\t1983\n" +
"Primitive\tSoulfly\t2003\n", actual.message().get(0).getData());
}
|
@Override
public <T extends State> T state(StateNamespace namespace, StateTag<T> address) {
return workItemState.get(namespace, address, StateContexts.nullContext());
}
|
@Test
public void testBagIsEmptyTrue() throws Exception {
StateTag<BagState<String>> addr = StateTags.bag("bag", StringUtf8Coder.of());
BagState<String> bag = underTest.state(NAMESPACE, addr);
SettableFuture<Iterable<String>> future = SettableFuture.create();
when(mockReader.bagFuture(key(NAMESPACE, "bag"), STATE_FAMILY, StringUtf8Coder.of()))
.thenReturn(future);
ReadableState<Boolean> result = bag.isEmpty().readLater();
Mockito.verify(mockReader).bagFuture(key(NAMESPACE, "bag"), STATE_FAMILY, StringUtf8Coder.of());
waitAndSet(future, Collections.emptyList(), 200);
assertThat(result.read(), Matchers.is(true));
}
|
@Override
public void notify(BitList<Invoker<T>> invokers) {
BitList<Invoker<T>> invokerList = invokers == null ? BitList.emptyList() : invokers;
this.invokerList = invokerList.clone();
registerAppRule(invokerList);
computeSubset(this.meshRuleCache.getAppToVDGroup());
}
|
@Test
void testNotify() {
StandardMeshRuleRouter<Object> meshRuleRouter = new StandardMeshRuleRouter<>(url);
meshRuleRouter.notify(null);
assertEquals(0, meshRuleRouter.getRemoteAppName().size());
BitList<Invoker<Object>> invokers =
new BitList<>(Arrays.asList(createInvoker(""), createInvoker("unknown"), createInvoker("app1")));
meshRuleRouter.notify(invokers);
assertEquals(1, meshRuleRouter.getRemoteAppName().size());
assertTrue(meshRuleRouter.getRemoteAppName().contains("app1"));
assertEquals(invokers, meshRuleRouter.getInvokerList());
verify(meshRuleManager, times(1)).register("app1", meshRuleRouter);
invokers = new BitList<>(Arrays.asList(createInvoker("unknown"), createInvoker("app2")));
meshRuleRouter.notify(invokers);
verify(meshRuleManager, times(1)).register("app2", meshRuleRouter);
verify(meshRuleManager, times(1)).unregister("app1", meshRuleRouter);
assertEquals(invokers, meshRuleRouter.getInvokerList());
meshRuleRouter.stop();
verify(meshRuleManager, times(1)).unregister("app2", meshRuleRouter);
}
|
@Override
public synchronized Request poll(Task task) {
try (Jedis jedis = pool.getResource()) {
String url = jedis.lpop(getQueueKey(task));
if (url == null) {
return null;
}
String key = ITEM_PREFIX + task.getUUID();
String field = DigestUtils.sha1Hex(url);
byte[] bytes = jedis.hget(key.getBytes(), field.getBytes());
if (bytes != null) {
Request o = JSON.parseObject(new String(bytes), Request.class);
return o;
}
Request request = new Request(url);
return request;
}
}
|
@Ignore("environment depended")
@Test
public void test() {
Task task = new Task() {
@Override
public String getUUID() {
return "1";
}
@Override
public Site getSite() {
return null;
}
};
Request request = new Request("http://www.ibm.com/developerworks/cn/java/j-javadev2-22/");
request.putExtra("1","2");
redisScheduler.push(request, task);
Request poll = redisScheduler.poll(task);
assertThat(poll).isEqualTo(request);
}
|
@Override
@Deprecated
public <VR> KStream<K, VR> transformValues(final org.apache.kafka.streams.kstream.ValueTransformerSupplier<? super V, ? extends VR> valueTransformerSupplier,
final String... stateStoreNames) {
Objects.requireNonNull(valueTransformerSupplier, "valueTransformerSupplier can't be null");
return doTransformValues(
toValueTransformerWithKeySupplier(valueTransformerSupplier),
NamedInternal.empty(),
stateStoreNames);
}
|
@Test
@SuppressWarnings("deprecation")
public void shouldNotAllowNullStoreNamesOnTransformValuesWithValueTransformerSupplierWithNamed() {
final NullPointerException exception = assertThrows(
NullPointerException.class,
() -> testStream.transformValues(
valueTransformerSupplier,
Named.as("valueTransformer"),
(String[]) null));
assertThat(exception.getMessage(), equalTo("stateStoreNames can't be a null array"));
}
|
@Override
public final void getSize(@NonNull SizeReadyCallback cb) {
sizeDeterminer.getSize(cb);
}
|
@Test
public void testMatchParentWidthAndHeight() {
LayoutParams params =
new FrameLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
view.setLayoutParams(params);
target.getSize(cb);
verify(cb, never()).onSizeReady(anyInt(), anyInt());
activity.visible();
view.getViewTreeObserver().dispatchOnPreDraw();
verify(cb).onSizeReady(eq(parent.getWidth()), eq(parent.getHeight()));
}
|
@Override
public void commit() {
if (context == null || context.isEmpty()) {
return;
}
LOGGER.info("Commit started");
if (context.containsKey(UnitActions.INSERT.getActionValue())) {
commitInsert();
}
if (context.containsKey(UnitActions.MODIFY.getActionValue())) {
commitModify();
}
if (context.containsKey(UnitActions.DELETE.getActionValue())) {
commitDelete();
}
LOGGER.info("Commit finished.");
}
|
@Test
void shouldNotInsertToDbIfNoRegisteredStudentsToBeCommitted() {
context.put(UnitActions.MODIFY.getActionValue(), List.of(weapon1));
context.put(UnitActions.DELETE.getActionValue(), List.of(weapon1));
armsDealer.commit();
verify(weaponDatabase, never()).insert(weapon1);
}
|
@Deprecated
// TODO: Remove when stanza builder is ready.
public Body addBody(String language, String body) {
language = Stanza.determineLanguage(this, language);
removeBody(language);
Body messageBody = new Body(language, body);
addExtension(messageBody);
return messageBody;
}
|
@Test(expected = NullPointerException.class)
public void setNullMessageBodyTest() {
StanzaBuilder.buildMessage()
.addBody(null, null)
.build();
}
|
@Override
public void doLimitForModifyRequest(ModifyRequest modifyRequest) throws SQLException {
if (null == modifyRequest || !enabledLimit) {
return;
}
doLimit(modifyRequest.getSql());
}
|
@Test
void testDoLimitForModifyRequestForDdl() throws SQLException {
ModifyRequest createTable = new ModifyRequest("create table test(id int,name varchar(255))");
ModifyRequest createIndex = new ModifyRequest("create index test_index on test(id)");
ModifyRequest alterTable = new ModifyRequest("alter table test add column age int");
List<ModifyRequest> modifyRequests = new LinkedList<>();
modifyRequests.add(createTable);
modifyRequests.add(createIndex);
modifyRequests.add(alterTable);
sqlLimiter.doLimitForModifyRequest(modifyRequests);
}
|
@Override
public ResultSet getBestRowIdentifier(final String catalog, final String schema, final String table, final int scope, final boolean nullable) {
return null;
}
|
@Test
void assertGetBestRowIdentifier() {
assertNull(metaData.getBestRowIdentifier("", "", "", 0, false));
}
|
String formatStepText(
String keyword, String stepText, Format textFormat, Format argFormat, List<Argument> arguments
) {
int beginIndex = 0;
StringBuilder result = new StringBuilder(textFormat.text(keyword));
for (Argument argument : arguments) {
// can be null if the argument is missing.
if (argument.getValue() != null) {
int argumentOffset = argument.getStart();
// a nested argument starts before the enclosing argument ends;
// ignore it when formatting
if (argumentOffset < beginIndex) {
continue;
}
String text = stepText.substring(beginIndex, argumentOffset);
result.append(textFormat.text(text));
}
// val can be null if the argument isn't there, for example
// @And("(it )?has something")
if (argument.getValue() != null) {
String text = stepText.substring(argument.getStart(), argument.getEnd());
result.append(argFormat.text(text));
// set beginIndex to end of argument
beginIndex = argument.getEnd();
}
}
if (beginIndex != stepText.length()) {
String text = stepText.substring(beginIndex);
result.append(textFormat.text(text));
}
return result.toString();
}
|
@Test
void should_mark_subsequent_arguments_in_steps() {
Formats formats = ansi();
StepTypeRegistry registry = new StepTypeRegistry(Locale.ENGLISH);
StepExpressionFactory stepExpressionFactory = new StepExpressionFactory(registry, bus);
StepDefinition stepDefinition = new StubStepDefinition("text {string} text {string}", String.class);
StepExpression expression = stepExpressionFactory.createExpression(stepDefinition);
PrettyFormatter prettyFormatter = new PrettyFormatter(new ByteArrayOutputStream());
String stepText = "text 'arg1' text 'arg2'";
String formattedText = prettyFormatter.formatStepText("Given ", stepText, formats.get("passed"),
formats.get("passed_arg"), createArguments(expression.match(stepText)));
assertThat(formattedText, equalTo(AnsiEscapes.GREEN + "Given " + AnsiEscapes.RESET +
AnsiEscapes.GREEN + "text " + AnsiEscapes.RESET +
AnsiEscapes.GREEN + AnsiEscapes.INTENSITY_BOLD + "'arg1'" + AnsiEscapes.RESET +
AnsiEscapes.GREEN + " text " + AnsiEscapes.RESET +
AnsiEscapes.GREEN + AnsiEscapes.INTENSITY_BOLD + "'arg2'" + AnsiEscapes.RESET));
}
|
public String getContent() {
return content;
}
|
@Test
public void testCreateUDFWithContent() {
String createFunctionSql = "CREATE FUNCTION echo(int) \n"
+ "RETURNS int \n"
+ "properties (\n"
+ " \"symbol\" = \"echo\",\n"
+ " \"type\" = \"Python\"\n"
+ ") AS $$ \n"
+ "def a(b):\n" +
" return b \n" +
"$$;";
CreateFunctionStmt stmt = (CreateFunctionStmt) com.starrocks.sql.parser.SqlParser.parse(
createFunctionSql, 32).get(0);
Assert.assertTrue(stmt.getContent().contains("\n"));
Assert.assertTrue(stmt.getContent().contains("def a(b):"));
}
|
public static Set<Long> getLongSet(String property, JsonNode node) {
Preconditions.checkArgument(node.has(property), "Cannot parse missing set: %s", property);
return ImmutableSet.<Long>builder().addAll(new JsonLongArrayIterator(property, node)).build();
}
|
@Test
public void getLongSet() throws JsonProcessingException {
assertThatThrownBy(() -> JsonUtil.getLongSet("items", JsonUtil.mapper().readTree("{}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse missing set: items");
assertThatThrownBy(
() -> JsonUtil.getLongSet("items", JsonUtil.mapper().readTree("{\"items\": null}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse JSON array from non-array value: items: null");
assertThatThrownBy(
() ->
JsonUtil.getLongSet(
"items", JsonUtil.mapper().readTree("{\"items\": [13, \"23\"]}")))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot parse long from non-long value in items: \"23\"");
assertThat(JsonUtil.getLongSet("items", JsonUtil.mapper().readTree("{\"items\": [23, 45]}")))
.containsExactlyElementsOf(Arrays.asList(23L, 45L));
}
|
@Override
public Acl getPermission(final Path file) throws BackgroundException {
try {
if(file.getType().contains(Path.Type.upload)) {
// Incomplete multipart upload has no ACL set
return Acl.EMPTY;
}
final Path bucket = containerService.getContainer(file);
final Acl acl;
if(containerService.isContainer(file)) {
// This method can be performed by anonymous services, but can only succeed if the
// bucket's existing ACL already allows write access by the anonymous user.
// In general, you can only access the ACL of a bucket if the ACL already in place
// for that bucket (in S3) allows you to do so.
acl = this.toAcl(session.getClient().getBucketAcl(bucket.isRoot() ? StringUtils.EMPTY : bucket.getName()));
}
else {
acl = this.toAcl(session.getClient().getVersionedObjectAcl(file.attributes().getVersionId(),
bucket.isRoot() ? StringUtils.EMPTY : bucket.getName(), containerService.getKey(file)));
}
if(this.isBucketOwnerEnforced(bucket)) {
acl.setEditable(false);
}
return acl;
}
catch(ServiceException e) {
final BackgroundException failure = new S3ExceptionMappingService().map("Failure to read attributes of {0}", e, file);
if(file.isDirectory()) {
if(failure instanceof NotfoundException) {
// No placeholder file may exist, but we just have a common prefix
return Acl.EMPTY;
}
}
if(failure instanceof InteroperabilityException) {
// The specified method is not allowed against this resource. The case for delete markers in versioned buckets.
return Acl.EMPTY;
}
throw failure;
}
}
|
@Test(expected = NotfoundException.class)
public void testReadNotFound() throws Exception {
final Path container = new Path("test-eu-central-1-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume));
final Path test = new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file));
final S3AccessControlListFeature f = new S3AccessControlListFeature(session);
f.getPermission(test);
}
|
@Subscribe
public void onVarbitChanged(VarbitChanged event)
{
if (event.getVarbitId() == Varbits.IN_RAID)
{
removeVarTimer(OVERLOAD_RAID);
removeGameTimer(PRAYER_ENHANCE);
}
if (event.getVarbitId() == Varbits.VENGEANCE_COOLDOWN && config.showVengeance())
{
if (event.getValue() == 1)
{
createGameTimer(VENGEANCE);
}
else
{
removeGameTimer(VENGEANCE);
}
}
if (event.getVarbitId() == Varbits.SPELLBOOK_SWAP && config.showSpellbookSwap())
{
if (event.getValue() == 1)
{
createGameTimer(SPELLBOOK_SWAP);
}
else
{
removeGameTimer(SPELLBOOK_SWAP);
}
}
if (event.getVarbitId() == Varbits.HEAL_GROUP_COOLDOWN && config.showHealGroup())
{
if (event.getValue() == 1)
{
createGameTimer(HEAL_GROUP);
}
else
{
removeGameTimer(HEAL_GROUP);
}
}
if (event.getVarbitId() == Varbits.DEATH_CHARGE_COOLDOWN && config.showArceuusCooldown())
{
if (event.getValue() == 1)
{
createGameTimer(DEATH_CHARGE_COOLDOWN);
}
else
{
removeGameTimer(DEATH_CHARGE_COOLDOWN);
}
}
if (event.getVarbitId() == Varbits.CORRUPTION_COOLDOWN && config.showArceuusCooldown())
{
if (event.getValue() == 1)
{
createGameTimer(CORRUPTION_COOLDOWN);
}
else
{
removeGameTimer(CORRUPTION_COOLDOWN);
}
}
if (event.getVarbitId() == Varbits.RESURRECT_THRALL_COOLDOWN && config.showArceuusCooldown())
{
if (event.getValue() == 1)
{
createGameTimer(RESURRECT_THRALL_COOLDOWN);
}
else
{
removeGameTimer(RESURRECT_THRALL_COOLDOWN);
}
}
if (event.getVarbitId() == Varbits.SHADOW_VEIL_COOLDOWN && config.showArceuusCooldown())
{
if (event.getValue() == 1)
{
createGameTimer(SHADOW_VEIL_COOLDOWN);
}
else
{
removeGameTimer(SHADOW_VEIL_COOLDOWN);
}
}
if (event.getVarbitId() == Varbits.WARD_OF_ARCEUUS_COOLDOWN && config.showArceuusCooldown())
{
if (event.getValue() == 1)
{
createGameTimer(WARD_OF_ARCEUUS_COOLDOWN);
}
else
{
removeGameTimer(WARD_OF_ARCEUUS_COOLDOWN);
}
}
if (event.getVarbitId() == Varbits.VENGEANCE_ACTIVE && config.showVengeanceActive())
{
updateVarCounter(VENGEANCE_ACTIVE, event.getValue());
}
if (event.getVarbitId() == Varbits.DEATH_CHARGE && config.showArceuus())
{
if (event.getValue() == 1)
{
createGameTimer(DEATH_CHARGE, Duration.of(client.getRealSkillLevel(Skill.MAGIC), RSTimeUnit.GAME_TICKS));
}
else
{
removeGameTimer(DEATH_CHARGE);
}
}
if (event.getVarbitId() == Varbits.RESURRECT_THRALL && event.getValue() == 0 && config.showArceuus())
{
removeGameTimer(RESURRECT_THRALL);
}
if (event.getVarbitId() == Varbits.SHADOW_VEIL && event.getValue() == 0 && config.showArceuus())
{
removeGameTimer(SHADOW_VEIL);
}
if (event.getVarpId() == VarPlayer.POISON && config.showAntiPoison())
{
final int poisonVarp = event.getValue();
final int tickCount = client.getTickCount();
if (poisonVarp == 0)
{
nextPoisonTick = -1;
}
else if (nextPoisonTick - tickCount <= 0)
{
nextPoisonTick = tickCount + POISON_TICK_LENGTH;
}
updateVarTimer(ANTIPOISON, event.getValue(),
i -> i >= 0 || i < VENOM_VALUE_CUTOFF,
i -> nextPoisonTick - tickCount + Math.abs((i + 1) * POISON_TICK_LENGTH));
updateVarTimer(ANTIVENOM, event.getValue(),
i -> i >= VENOM_VALUE_CUTOFF,
i -> nextPoisonTick - tickCount + Math.abs((i + 1 - VENOM_VALUE_CUTOFF) * POISON_TICK_LENGTH));
}
if ((event.getVarbitId() == Varbits.NMZ_OVERLOAD_REFRESHES_REMAINING
|| event.getVarbitId() == Varbits.COX_OVERLOAD_REFRESHES_REMAINING) && config.showOverload())
{
final int overloadVarb = event.getValue();
final int tickCount = client.getTickCount();
if (overloadVarb <= 0)
{
nextOverloadRefreshTick = -1;
}
else if (nextOverloadRefreshTick - tickCount <= 0)
{
nextOverloadRefreshTick = tickCount + OVERLOAD_TICK_LENGTH;
}
GameTimer overloadTimer = client.getVarbitValue(Varbits.IN_RAID) == 1 ? OVERLOAD_RAID : OVERLOAD;
updateVarTimer(overloadTimer, overloadVarb, i -> nextOverloadRefreshTick - tickCount + (i - 1) * OVERLOAD_TICK_LENGTH);
}
if (event.getVarbitId() == Varbits.TELEBLOCK && config.showTeleblock())
{
updateVarTimer(TELEBLOCK, event.getValue() - 100, i -> i <= 0, IntUnaryOperator.identity());
}
if (event.getVarpId() == VarPlayer.CHARGE_GOD_SPELL && config.showCharge())
{
updateVarTimer(CHARGE, event.getValue(), i -> i * 2);
}
if (event.getVarbitId() == Varbits.IMBUED_HEART_COOLDOWN && config.showImbuedHeart())
{
updateVarTimer(IMBUEDHEART, event.getValue(), i -> i * 10);
}
if (event.getVarbitId() == Varbits.DRAGONFIRE_SHIELD_COOLDOWN && config.showDFSSpecial())
{
updateVarTimer(DRAGON_FIRE_SHIELD, event.getValue(), i -> i * 8);
}
if (event.getVarpId() == LAST_HOME_TELEPORT && config.showHomeMinigameTeleports())
{
checkTeleport(LAST_HOME_TELEPORT);
}
if (event.getVarpId() == LAST_MINIGAME_TELEPORT && config.showHomeMinigameTeleports())
{
checkTeleport(LAST_MINIGAME_TELEPORT);
}
if (event.getVarbitId() == Varbits.RUN_SLOWED_DEPLETION_ACTIVE
|| event.getVarbitId() == Varbits.STAMINA_EFFECT
|| event.getVarbitId() == Varbits.RING_OF_ENDURANCE_EFFECT)
{
// staminaEffectActive is checked to match https://github.com/Joshua-F/cs2-scripts/blob/741271f0c3395048c1bad4af7881a13734516adf/scripts/%5Bproc%2Cbuff_bar_get_value%5D.cs2#L25
int staminaEffectActive = client.getVarbitValue(Varbits.RUN_SLOWED_DEPLETION_ACTIVE);
int staminaPotionEffectVarb = client.getVarbitValue(Varbits.STAMINA_EFFECT);
int enduranceRingEffectVarb = client.getVarbitValue(Varbits.RING_OF_ENDURANCE_EFFECT);
final int totalStaminaEffect = staminaPotionEffectVarb + enduranceRingEffectVarb;
if (staminaEffectActive == 1 && config.showStamina())
{
updateVarTimer(STAMINA, totalStaminaEffect, i -> i * 10);
}
}
if (event.getVarbitId() == Varbits.ANTIFIRE && config.showAntiFire())
{
final int antifireVarb = event.getValue();
final int tickCount = client.getTickCount();
if (antifireVarb == 0)
{
nextAntifireTick = -1;
}
else if (nextAntifireTick - tickCount <= 0)
{
nextAntifireTick = tickCount + ANTIFIRE_TICK_LENGTH;
}
updateVarTimer(ANTIFIRE, antifireVarb, i -> nextAntifireTick - tickCount + (i - 1) * ANTIFIRE_TICK_LENGTH);
}
if (event.getVarbitId() == Varbits.SUPER_ANTIFIRE && config.showAntiFire())
{
final int superAntifireVarb = event.getValue();
final int tickCount = client.getTickCount();
if (superAntifireVarb == 0)
{
nextSuperAntifireTick = -1;
}
else if (nextSuperAntifireTick - tickCount <= 0)
{
nextSuperAntifireTick = tickCount + SUPERANTIFIRE_TICK_LENGTH;
}
updateVarTimer(SUPERANTIFIRE, event.getValue(), i -> nextSuperAntifireTick - tickCount + (i - 1) * SUPERANTIFIRE_TICK_LENGTH);
}
if (event.getVarbitId() == Varbits.MAGIC_IMBUE && config.showMagicImbue())
{
updateVarTimer(MAGICIMBUE, event.getValue(), i -> i * 10);
}
if (event.getVarbitId() == Varbits.DIVINE_SUPER_ATTACK && config.showDivine())
{
if (client.getVarbitValue(Varbits.DIVINE_SUPER_COMBAT) > event.getValue())
{
return;
}
updateVarTimer(DIVINE_SUPER_ATTACK, event.getValue(), IntUnaryOperator.identity());
}
if (event.getVarbitId() == Varbits.DIVINE_SUPER_STRENGTH && config.showDivine())
{
if (client.getVarbitValue(Varbits.DIVINE_SUPER_COMBAT) > event.getValue())
{
return;
}
updateVarTimer(DIVINE_SUPER_STRENGTH, event.getValue(), IntUnaryOperator.identity());
}
if (event.getVarbitId() == Varbits.DIVINE_SUPER_DEFENCE && config.showDivine())
{
if (client.getVarbitValue(Varbits.DIVINE_SUPER_COMBAT) > event.getValue()
|| client.getVarbitValue(Varbits.DIVINE_BASTION) > event.getValue()
|| client.getVarbitValue(Varbits.DIVINE_BATTLEMAGE) > event.getValue()
// When drinking a dose of moonlight potion while already under its effects, desync between
// Varbits.MOONLIGHT_POTION and Varbits.DIVINE_SUPER_DEFENCE can occur, with the latter being 1 tick
// greater
|| client.getVarbitValue(Varbits.MOONLIGHT_POTION) >= event.getValue())
{
return;
}
if (client.getVarbitValue(Varbits.MOONLIGHT_POTION) < event.getValue())
{
removeVarTimer(MOONLIGHT_POTION);
}
updateVarTimer(DIVINE_SUPER_DEFENCE, event.getValue(), IntUnaryOperator.identity());
}
if (event.getVarbitId() == Varbits.DIVINE_RANGING && config.showDivine())
{
if (client.getVarbitValue(Varbits.DIVINE_BASTION) > event.getValue())
{
return;
}
updateVarTimer(DIVINE_RANGING, event.getValue(), IntUnaryOperator.identity());
}
if (event.getVarbitId() == Varbits.DIVINE_MAGIC && config.showDivine())
{
if (client.getVarbitValue(Varbits.DIVINE_BATTLEMAGE) > event.getValue())
{
return;
}
updateVarTimer(DIVINE_MAGIC, event.getValue(), IntUnaryOperator.identity());
}
if (event.getVarbitId() == Varbits.DIVINE_SUPER_COMBAT && config.showDivine())
{
if (client.getVarbitValue(Varbits.DIVINE_SUPER_ATTACK) == event.getValue())
{
removeVarTimer(DIVINE_SUPER_ATTACK);
}
if (client.getVarbitValue(Varbits.DIVINE_SUPER_STRENGTH) == event.getValue())
{
removeVarTimer(DIVINE_SUPER_STRENGTH);
}
if (client.getVarbitValue(Varbits.DIVINE_SUPER_DEFENCE) == event.getValue())
{
removeVarTimer(DIVINE_SUPER_DEFENCE);
}
updateVarTimer(DIVINE_SUPER_COMBAT, event.getValue(), IntUnaryOperator.identity());
}
if (event.getVarbitId() == Varbits.DIVINE_BASTION && config.showDivine())
{
if (client.getVarbitValue(Varbits.DIVINE_RANGING) == event.getValue())
{
removeVarTimer(DIVINE_RANGING);
}
if (client.getVarbitValue(Varbits.DIVINE_SUPER_DEFENCE) == event.getValue())
{
removeVarTimer(DIVINE_SUPER_DEFENCE);
}
updateVarTimer(DIVINE_BASTION, event.getValue(), IntUnaryOperator.identity());
}
if (event.getVarbitId() == Varbits.DIVINE_BATTLEMAGE && config.showDivine())
{
if (client.getVarbitValue(Varbits.DIVINE_MAGIC) == event.getValue())
{
removeVarTimer(DIVINE_MAGIC);
}
if (client.getVarbitValue(Varbits.DIVINE_SUPER_DEFENCE) == event.getValue())
{
removeVarTimer(DIVINE_SUPER_DEFENCE);
}
updateVarTimer(DIVINE_BATTLEMAGE, event.getValue(), IntUnaryOperator.identity());
}
if (event.getVarbitId() == Varbits.BUFF_STAT_BOOST && config.showOverload())
{
updateVarTimer(SMELLING_SALTS, event.getValue(), i -> i * 25);
}
if (event.getVarbitId() == Varbits.MENAPHITE_REMEDY && config.showMenaphiteRemedy())
{
updateVarTimer(MENAPHITE_REMEDY, event.getValue(), i -> i * 25);
}
if (event.getVarbitId() == Varbits.LIQUID_ADERNALINE_ACTIVE && event.getValue() == 0 && config.showLiquidAdrenaline())
{
removeGameTimer(LIQUID_ADRENALINE);
}
if (event.getVarbitId() == Varbits.FARMERS_AFFINITY && config.showFarmersAffinity())
{
updateVarTimer(FARMERS_AFFINITY, event.getValue(), i -> i * 20);
}
if (event.getVarbitId() == Varbits.GOD_WARS_ALTAR_COOLDOWN && config.showGodWarsAltar())
{
updateVarTimer(GOD_WARS_ALTAR, event.getValue(), i -> i * 100);
}
if (event.getVarbitId() == Varbits.CURSE_OF_THE_MOONS && config.showCurseOfTheMoons())
{
final int regionID = WorldPoint.fromLocal(client, client.getLocalPlayer().getLocalLocation()).getRegionID();
if (regionID == ECLIPSE_MOON_REGION_ID)
{
updateVarCounter(CURSE_OF_THE_MOONS_ECLIPSE, event.getValue());
}
else
{
updateVarCounter(CURSE_OF_THE_MOONS_BLUE, event.getValue());
}
}
if (event.getVarbitId() == Varbits.COLOSSEUM_DOOM && config.showColosseumDoom())
{
updateVarCounter(COLOSSEUM_DOOM, event.getValue());
}
if (event.getVarbitId() == Varbits.MOONLIGHT_POTION && config.showMoonlightPotion())
{
int moonlightValue = event.getValue();
// Increase the timer by 1 tick in case of desync due to drinking a dose of moonlight potion while already
// under its effects. Otherwise, the timer would be 1 tick shorter than it is meant to be.
if (client.getVarbitValue(Varbits.DIVINE_SUPER_DEFENCE) == moonlightValue + 1)
{
moonlightValue++;
}
updateVarTimer(MOONLIGHT_POTION, moonlightValue, IntUnaryOperator.identity());
}
}
|
@Test
public void testImbuedHeartStart()
{
when(timersAndBuffsConfig.showImbuedHeart()).thenReturn(true);
VarbitChanged varbitChanged = new VarbitChanged();
varbitChanged.setVarbitId(Varbits.IMBUED_HEART_COOLDOWN);
varbitChanged.setValue(70);
timersAndBuffsPlugin.onVarbitChanged(varbitChanged);
ArgumentCaptor<InfoBox> captor = ArgumentCaptor.forClass(InfoBox.class);
verify(infoBoxManager).addInfoBox(captor.capture());
TimerTimer infoBox = (TimerTimer) captor.getValue();
assertEquals(GameTimer.IMBUEDHEART, infoBox.getTimer());
assertEquals(Duration.ofMinutes(7), infoBox.getDuration());
}
|
public static Map<String, Object> getPropertiesWithPrefixForMap(Environment environment, String prefix) {
return handleSpringBinder(environment, prefix, Map.class);
}
|
@Test
@SuppressWarnings("unchecked")
void testGetPropertiesWithPrefixForMap() {
Map<String, Object> actual = PropertiesUtil.getPropertiesWithPrefixForMap(environment, "nacos.prefix");
assertEquals(3, actual.size());
for (Map.Entry<String, Object> entry : actual.entrySet()) {
String key = entry.getKey();
Map<String, Object> subMap = (Map<String, Object>) entry.getValue();
switch (key) {
case "one":
assertEquals("1", subMap.get("value"));
break;
case "two":
assertEquals("2", subMap.get("value"));
break;
case "three":
assertEquals("3", subMap.get("value"));
break;
default:
throw new RuntimeException();
}
}
}
|
@Override
public void startIt() {
if (semaphore.tryAcquire()) {
try {
executorService.execute(this::doDatabaseMigration);
} catch (RuntimeException e) {
semaphore.release();
throw e;
}
} else {
LOGGER.trace("{}: lock is already taken or process is already running", Thread.currentThread().getName());
}
}
|
@Test
public void startit_calls_MigrationEngine_execute() {
underTest.startIt();
inOrder.verify(migrationEngine).execute(any());
inOrder.verify(platform).doStart();
inOrder.verifyNoMoreInteractions();
}
|
public void record(long latencyNano, Consumer<AbstractHistogram> overflownConsumer) {
recordSafeValue(narrow(latencyNano), overflownConsumer);
}
|
@Test
public void testOverflowRecording()
{
LatencyMetric metric = new LatencyMetric();
for (int j = 0; j < 3; j++)
{
GaussianResponseTimeDistribution distribution =
new GaussianResponseTimeDistribution(0, j * 100, 10, TimeUnit.MILLISECONDS);
AtomicReference<AbstractHistogram> histogram = new AtomicReference<>();
//record until overflow
do
{
metric.record(distribution.responseTimeNanos(), h -> histogram.set(h.copy()));
} while (histogram.get() == null);
assertTrue(histogram.get().getTotalCount() > Short.MAX_VALUE);
assertEquals(histogram.get().getMean(), j * 100000000d, 10000000d);
}
}
|
public static synchronized void provideImpl(GooglePlayServicesUtilImpl impl) {
googlePlayServicesUtilImpl = Preconditions.checkNotNull(impl);
}
|
@Test
public void provideImplementation_nullValueNotAllowed() {
thrown.expect(NullPointerException.class);
ShadowGooglePlayServicesUtil.provideImpl(null);
}
|
@SuppressWarnings("unchecked")
public T getValue() {
final T value = (T) FROM_STRING.get(getConverterClass()).apply(JiveGlobals.getProperty(key), this);
if (value == null || (Collection.class.isAssignableFrom(value.getClass()) && ((Collection) value).isEmpty())) {
return defaultValue;
}
if (minValue != null && ((Comparable) minValue).compareTo(value) > 0) {
LOGGER.warn("Configured value of {} is less than the minimum value of {} for the SystemProperty {} - will use default value of {} instead",
value, minValue, key, defaultValue);
return defaultValue;
}
if (maxValue != null && ((Comparable) maxValue).compareTo(value) < 0) {
LOGGER.warn("Configured value of {} is more than the maximum value of {} for the SystemProperty {} - will use default value of {} instead",
value, maxValue, key, defaultValue);
return defaultValue;
}
return value;
}
|
@Test
public void willNotReturnAnotherClass() {
final SystemProperty<Class> classProperty = SystemProperty.Builder.ofType(Class.class)
.setKey("another-subclass-property")
.setDefaultValue(DefaultAuthProvider.class)
.setBaseClass(AuthProvider.class)
.setDynamic(false)
.build();
JiveGlobals.setProperty("another-subclass-property", "java.lang.Object");
assertThat(classProperty.getValue(), is(equalTo(DefaultAuthProvider.class)));
}
|
protected PaginatedList<DTO> findPaginatedWithQueryAndSort(Bson query, Bson sort, int page, int perPage) {
try (final DBCursor<DTO> cursor = db.find(query)
.sort(sort)
.limit(perPage)
.skip(perPage * Math.max(0, page - 1))) {
final long grandTotal = db.count();
return new PaginatedList<>(asImmutableList(cursor), cursor.count(), page, perPage, grandTotal);
}
}
|
@Test
public void findPaginatedWithQueryAndSort() {
dbService.save(newDto("hello1"));
dbService.save(newDto("hello2"));
dbService.save(newDto("hello3"));
dbService.save(newDto("hello4"));
dbService.save(newDto("hello5"));
final PaginatedList<TestDTO> page1 = dbService.findPaginatedWithQueryAndSort(DBQuery.empty(), DBSort.asc("title"), 1, 2);
assertThat(page1.pagination().count()).isEqualTo(2);
assertThat(page1.pagination().total()).isEqualTo(5);
assertThat(page1.delegate())
.extracting("title")
.containsExactly("hello1", "hello2");
final PaginatedList<TestDTO> page2 = dbService.findPaginatedWithQueryAndSort(DBQuery.empty(), DBSort.asc("title"), 2, 2);
assertThat(page2.pagination().count()).isEqualTo(2);
assertThat(page2.pagination().total()).isEqualTo(5);
assertThat(page2.delegate())
.extracting("title")
.containsExactly("hello3", "hello4");
final PaginatedList<TestDTO> page3 = dbService.findPaginatedWithQueryAndSort(DBQuery.empty(), DBSort.asc("title"), 3, 2);
assertThat(page3.pagination().count()).isEqualTo(1);
assertThat(page3.pagination().total()).isEqualTo(5);
assertThat(page3.delegate())
.extracting("title")
.containsExactly("hello5");
final PaginatedList<TestDTO> page1reverse = dbService.findPaginatedWithQueryAndSort(DBQuery.empty(), DBSort.desc("title"), 1, 2);
assertThat(page1reverse.pagination().count()).isEqualTo(2);
assertThat(page1reverse.pagination().total()).isEqualTo(5);
assertThat(page1reverse.delegate())
.extracting("title")
.containsExactly("hello5", "hello4");
}
|
@Override
public void report(final SortedMap<MetricName, Gauge> gauges, final SortedMap<MetricName, Counter> counters,
final SortedMap<MetricName, Histogram> histograms, final SortedMap<MetricName, Meter> meters, final SortedMap<MetricName, Timer> timers) {
final long now = System.currentTimeMillis();
if(logger.isDebugEnabled()) logger.debug("InfluxDbReporter report is called with counter size " + counters.size());
try {
influxDb.flush();
for (Map.Entry<MetricName, Gauge> entry : gauges.entrySet()) {
reportGauge(entry.getKey(), entry.getValue(), now);
}
for (Map.Entry<MetricName, Counter> entry : counters.entrySet()) {
reportCounter(entry.getKey(), entry.getValue(), now);
}
for (Map.Entry<MetricName, Histogram> entry : histograms.entrySet()) {
reportHistogram(entry.getKey(), entry.getValue(), now);
}
for (Map.Entry<MetricName, Meter> entry : meters.entrySet()) {
reportMeter(entry.getKey(), entry.getValue(), now);
}
for (Map.Entry<MetricName, Timer> entry : timers.entrySet()) {
reportTimer(entry.getKey(), entry.getValue(), now);
}
if (influxDb.hasSeriesData()) {
influxDb.writeData();
}
// reset counters
for (Map.Entry<MetricName, Counter> entry : counters.entrySet()) {
Counter counter = entry.getValue();
long count = counter.getCount();
counter.dec(count);
}
} catch (Exception e) {
logger.error("Unable to report to InfluxDB. Discarding data.", e);
}
}
|
@Test
public void reportsLongGaugeValues() throws Exception {
reporter.report(map("gauge", gauge(1L)), this.map(), this.map(), this.map(), this.map());
final ArgumentCaptor<InfluxDbPoint> influxDbPointCaptor = ArgumentCaptor.forClass(InfluxDbPoint.class);
Mockito.verify(influxDb, atLeastOnce()).appendPoints(influxDbPointCaptor.capture());
InfluxDbPoint point = influxDbPointCaptor.getValue();
/*
assertThat(point.getMeasurement()).isEqualTo("gauge");
assertThat(point.getFields()).isNotEmpty();
assertThat(point.getFields()).hasSize(1);
assertThat(point.getFields()).contains(entry("value", 1L));
*/
}
|
public static Map<String, PartitionColumnFilter> convertColumnFilter(List<ScalarOperator> predicates) {
return convertColumnFilter(predicates, null);
}
|
@Test
public void convertColumnFilterExprDateTruncContains() {
OlapTable olapTable = buildOlapTable("month");
List<ScalarOperator> listDay = buildOperator("day", BinaryType.EQ);
Map<String, PartitionColumnFilter> resultDay = ColumnFilterConverter.convertColumnFilter(listDay, olapTable);
assertEquals(1, resultDay.size());
List<ScalarOperator> listWeek = buildOperator("week", BinaryType.EQ);
Map<String, PartitionColumnFilter> resultWeek = ColumnFilterConverter.convertColumnFilter(listWeek, olapTable);
assertEquals(0, resultWeek.size());
List<ScalarOperator> listMonth = buildOperator("month", BinaryType.EQ);
Map<String, PartitionColumnFilter> resultMonth =
ColumnFilterConverter.convertColumnFilter(listMonth, olapTable);
assertEquals(1, resultMonth.size());
List<ScalarOperator> listYear = buildOperator("year", BinaryType.EQ);
Map<String, PartitionColumnFilter> resultYear = ColumnFilterConverter.convertColumnFilter(listYear, olapTable);
assertEquals(0, resultYear.size());
}
|
@Override
public List<Node> sniff(List<Node> nodes) {
if (attribute == null || value == null) {
return nodes;
}
return nodes.stream()
.filter(node -> nodeMatchesFilter(node, attribute, value))
.collect(Collectors.toList());
}
|
@Test
void doesNotFilterNodesIfNoFilterIsSet() throws Exception {
final List<Node> nodes = mockNodes();
final NodesSniffer nodesSniffer = new FilteredElasticsearchNodesSniffer(null, null);
assertThat(nodesSniffer.sniff(nodes)).isEqualTo(nodes);
}
|
@Override
public String normalise(String text) {
if (Objects.isNull(text) || text.isEmpty()) {
throw new IllegalArgumentException("Text cannot be null or empty");
}
return text.trim()
.toLowerCase()
.replaceAll("\\p{Punct}", "")
.replaceAll("\\s+", " ");
}
|
@Description("Normalise, when text is mixed case, then return lowercased text")
@Test
void normalise_WhenTextIsMixedCase_ThenReturnLowercasedText() {
// When
var result = textNormaliser.normalise("HeLLo WoRLD");
// Then
assertThat(result).isEqualTo("hello world");
}
|
public static List<ActiveMQDestination> convertToActiveMQDestination(Object value) {
if (value == null) {
return null;
}
// text must be enclosed with []
String text = value.toString();
if (text.startsWith("[") && text.endsWith("]")) {
text = text.substring(1, text.length() - 1).trim();
if (text.isEmpty()) {
return null;
}
String[] array = text.split(",");
List<ActiveMQDestination> list = new ArrayList<ActiveMQDestination>();
for (String item : array) {
list.add(ActiveMQDestination.createDestination(item.trim(), ActiveMQDestination.QUEUE_TYPE));
}
return list;
} else {
return null;
}
}
|
@Test
public void testConvertToActiveMQDestination() {
List<ActiveMQDestination> result = StringToListOfActiveMQDestinationConverter.convertToActiveMQDestination("");
assertNull(result);
result = StringToListOfActiveMQDestinationConverter.convertToActiveMQDestination("[]");
assertNull(result);
result = StringToListOfActiveMQDestinationConverter.convertToActiveMQDestination("[ ]");
assertNull(result);
result = StringToListOfActiveMQDestinationConverter.convertToActiveMQDestination("[one,two,three]");
assertNotNull(result);
assertEquals(3, result.size());
result = StringToListOfActiveMQDestinationConverter.convertToActiveMQDestination("[one, two, three ]");
assertNotNull(result);
assertEquals(3, result.size());
}
|
@Override
public void discardState() throws Exception {
final FileSystem fs = getFileSystem();
IOException actualException = null;
boolean success = true;
try {
success = fs.delete(filePath, false);
} catch (IOException e) {
actualException = e;
}
if (!success || actualException != null) {
if (fs.exists(filePath)) {
throw Optional.ofNullable(actualException)
.orElse(
new IOException(
"Unknown error caused the file '"
+ filePath
+ "' to not be deleted."));
}
}
}
|
@Test
void testDisposeDoesNotDeleteParentDirectory() throws Exception {
final Path p = resolve("path", "with", "parent");
final List<Path> pathsToDelete = new ArrayList<>();
initializeFileSystem(
MockedLocalFileSystem.newBuilder()
.setDeleteFunction(
(path, ignoredRecursionMarker) -> {
pathsToDelete.add(path);
return true;
})
.build());
final FileStateHandle handle = new FileStateHandle(p, 42);
handle.discardState();
assertThat(pathsToDelete)
.as(
"Only one delete call should have happened on the actual path but not the parent.")
.singleElement()
.isEqualTo(p);
}
|
public ConnectionDetails getConnectionDetails( IMetaStore metaStore, String key, String name ) {
ConnectionProvider<? extends ConnectionDetails> connectionProvider = getConnectionProvider( key );
if ( connectionProvider != null ) {
Class<? extends ConnectionDetails> clazz = connectionProvider.getClassType();
return loadElement( getMetaStoreFactory( metaStore, clazz ), name );
}
return null;
}
|
@Test
public void testGetConnectionDetailsNull() {
Assert.assertNull( connectionManager.getConnectionDetails( DOES_NOT_EXIST ) );
}
|
public RunResponse start(
@NotNull String workflowId, @NotNull String version, @NotNull RunRequest runRequest) {
WorkflowDefinition definition = workflowDao.getWorkflowDefinition(workflowId, version);
validateRequest(version, definition, runRequest);
RunProperties runProperties =
RunProperties.from(
Checks.notNull(
definition.getPropertiesSnapshot(),
"property snapshot cannot be null for workflow: " + workflowId));
// create and initiate a new instance with overrides and param evaluation
WorkflowInstance instance =
workflowHelper.createWorkflowInstance(
definition.getWorkflow(),
definition.getInternalId(),
definition.getMetadata().getWorkflowVersionId(),
runProperties,
runRequest);
RunStrategy runStrategy = definition.getRunStrategyOrDefault();
int ret = runStrategyDao.startWithRunStrategy(instance, runStrategy);
RunResponse response = RunResponse.from(instance, ret);
LOG.info("Created a workflow instance with response {}", response);
return response;
}
|
@Test
public void testStartWithInvalidTriggers() {
Stream.of(new SignalInitiator(), new TimeInitiator())
.forEach(
initiator -> {
RunRequest request =
RunRequest.builder()
.initiator(initiator)
.currentPolicy(RunPolicy.START_FRESH_NEW_RUN)
.build();
definition.setIsActive(false);
AssertHelper.assertThrows(
"cannot trigger an inactive workflow",
IllegalArgumentException.class,
"Triggered workflow definition for workflow",
() -> actionHandler.start("sample-minimal-wf", "active", request));
definition.setIsActive(true);
AssertHelper.assertThrows(
"cannot trigger a workflow without trigger uuids",
MaestroResourceConflictException.class,
"Invalid trigger initiator due to mismatch trigger uuid",
() -> actionHandler.start("sample-minimal-wf", "active", request));
});
}
|
@Override
protected String getNodeExplainString(String prefix, TExplainLevel detailLevel) {
StringBuilder output = new StringBuilder();
output.append(prefix).append("TABLE: ").append(deltaLakeTable.getName()).append("\n");
if (null != sortColumn) {
output.append(prefix).append("SORT COLUMN: ").append(sortColumn).append("\n");
}
if (!scanNodePredicates.getPartitionConjuncts().isEmpty()) {
output.append(prefix).append("PARTITION PREDICATES: ").append(
getExplainString(scanNodePredicates.getPartitionConjuncts())).append("\n");
}
if (!scanNodePredicates.getNonPartitionConjuncts().isEmpty()) {
output.append(prefix).append("NON-PARTITION PREDICATES: ").append(
getExplainString(scanNodePredicates.getNonPartitionConjuncts())).append("\n");
}
if (!scanNodePredicates.getNoEvalPartitionConjuncts().isEmpty()) {
output.append(prefix).append("NO EVAL-PARTITION PREDICATES: ").append(
getExplainString(scanNodePredicates.getNoEvalPartitionConjuncts())).append("\n");
}
if (!scanNodePredicates.getMinMaxConjuncts().isEmpty()) {
output.append(prefix).append("MIN/MAX PREDICATES: ").append(
getExplainString(scanNodePredicates.getMinMaxConjuncts())).append("\n");
}
output.append(prefix).append(String.format("cardinality=%s", cardinality));
output.append("\n");
output.append(prefix).append(String.format("avgRowSize=%s", avgRowSize));
output.append("\n");
if (detailLevel == TExplainLevel.VERBOSE) {
HdfsScanNode.appendDataCacheOptionsInExplain(output, prefix, dataCacheOptions);
for (SlotDescriptor slotDescriptor : desc.getSlots()) {
Type type = slotDescriptor.getOriginType();
if (type.isComplexType()) {
output.append(prefix)
.append(String.format("Pruned type: %d <-> [%s]\n", slotDescriptor.getId().asInt(), type));
}
}
List<String> partitionNames = GlobalStateMgr.getCurrentState().getMetadataMgr().listPartitionNames(
deltaLakeTable.getCatalogName(), deltaLakeTable.getDbName(), deltaLakeTable.getTableName());
output.append(prefix).append(
String.format("partitions=%s/%s", scanNodePredicates.getSelectedPartitionIds().size(),
partitionNames.size() == 0 ? 1 : partitionNames.size()));
output.append("\n");
}
return output.toString();
}
|
@Test
public void testNodeExplain(@Mocked GlobalStateMgr globalStateMgr, @Mocked CatalogConnector connector,
@Mocked DeltaLakeTable table) {
String catalogName = "delta0";
CloudConfiguration cloudConfiguration = CloudConfigurationFactory.
buildCloudConfigurationForStorage(new HashMap<>());
new Expectations() {
{
GlobalStateMgr.getCurrentState().getConnectorMgr().getConnector(catalogName);
result = connector;
minTimes = 0;
connector.getMetadata().getCloudConfiguration();
result = cloudConfiguration;
minTimes = 0;
table.getCatalogName();
result = catalogName;
minTimes = 0;
table.getName();
result = "table0";
minTimes = 0;
}
};
TupleDescriptor desc = new TupleDescriptor(new TupleId(0));
desc.setTable(table);
DeltaLakeScanNode scanNode = new DeltaLakeScanNode(new PlanNodeId(0), desc, "Delta Scan Node");
Assert.assertFalse(scanNode.getNodeExplainString("", TExplainLevel.NORMAL).contains("partitions"));
Assert.assertTrue(scanNode.getNodeExplainString("", TExplainLevel.VERBOSE).contains("partitions"));
}
|
public List<N> getNodesByResourceName(final String resourceName) {
Preconditions.checkArgument(
resourceName != null && !resourceName.isEmpty());
List<N> retNodes = new ArrayList<>();
if (ResourceRequest.ANY.equals(resourceName)) {
retNodes.addAll(getAllNodes());
} else if (nodeNameToNodeMap.containsKey(resourceName)) {
retNodes.add(nodeNameToNodeMap.get(resourceName));
} else if (nodesPerRack.containsKey(resourceName)) {
retNodes.addAll(nodesPerRack.get(resourceName));
} else {
LOG.info(
"Could not find a node matching given resourceName " + resourceName);
}
return retNodes;
}
|
@Test
public void testGetNodesForResourceName() throws Exception {
addEight4x4Nodes();
assertEquals("Incorrect number of nodes matching ANY",
8, nodeTracker.getNodesByResourceName(ResourceRequest.ANY).size());
assertEquals("Incorrect number of nodes matching rack",
4, nodeTracker.getNodesByResourceName("rack0").size());
assertEquals("Incorrect number of nodes matching node",
1, nodeTracker.getNodesByResourceName("host0").size());
}
|
public static <I> Builder<I> foreach(Iterable<I> items) {
return new Builder<>(requireNonNull(items, "items"));
}
|
@Test
public void testFailSlowExceptions() throws Throwable {
intercept(IOException.class,
() -> builder()
.run(failingTask));
failingTask.assertInvoked("continued through operations", ITEM_COUNT);
items.forEach(Item::assertCommittedOrFailed);
}
|
public static InetAddress fixScopeIdAndGetInetAddress(final InetAddress inetAddress) throws SocketException {
if (!(inetAddress instanceof Inet6Address inet6Address)) {
return inetAddress;
}
if (!inetAddress.isLinkLocalAddress() && !inetAddress.isSiteLocalAddress()) {
return inetAddress;
}
if (inet6Address.getScopeId() > 0 || inet6Address.getScopedInterface() != null) {
return inetAddress;
}
final Inet6Address resultInetAddress = findRealInet6Address(inet6Address);
return resultInetAddress == null ? inetAddress : resultInetAddress;
}
|
@Test
public void testFixScopeIdAndGetInetAddress_whenLinkLocalAddress_withNoInterfaceBind() throws SocketException, UnknownHostException {
Inet6Address inet6Address = createInet6AddressWithScope(SOME_LINK_LOCAL_ADDRESS, 0);
assertThat(inet6Address.isLinkLocalAddress()).isTrue();
InetAddress actual = AddressUtil.fixScopeIdAndGetInetAddress(inet6Address);
assertEquals(inet6Address, actual);
}
|
public static <E> E findStaticFieldValue(Class clazz, String fieldName) {
try {
Field field = clazz.getField(fieldName);
return (E) field.get(null);
} catch (Exception ignore) {
return null;
}
}
|
@Test
public void test_whenFieldNotExist() {
Object value = findStaticFieldValue(ClassWithStaticField.class, "nonexisting");
assertNull(value);
}
|
@Override
public Local create(final Path file) {
return this.create(String.format("%s-%s", new AlphanumericRandomStringService().random(), file.getName()));
}
|
@Test
public void testTemporaryPath() {
final Path file = new Path("/f1/f2/t.txt", EnumSet.of(Path.Type.file));
file.attributes().setDuplicate(true);
file.attributes().setVersionId("1");
final Local local = new FlatTemporaryFileService().create(file);
assertTrue(local.getParent().exists());
assertEquals("t.txt", file.getName());
assertNotEquals("t.txt", local.getName());
assertTrue(local.getName().endsWith("-t.txt"));
assertEquals(LocalFactory.get(PreferencesFactory.get().getProperty("tmp.dir")), LocalFactory.get(local.getParent().getAbsolute()));
}
|
@Override
public void setParameters(Collection<CompoundVariable> parameters) throws InvalidVariableException {
checkParameterCount(parameters, MIN_PARAMETER_COUNT, MAX_PARAMETER_COUNT);
values = parameters.toArray(new CompoundVariable[parameters.size()]);
}
|
@Test
void testDateConvertError() throws Exception {
params.add(new CompoundVariable("2017-01-02"));
params.add(new CompoundVariable("yyyy-MM-dd"));
assertThrows(
InvalidVariableException.class,
() -> dateConvert.setParameters(params));
}
|
@GET
@Path("{id}/stats")
@Timed
@ApiOperation(value = "Get index set statistics")
@ApiResponses(value = {
@ApiResponse(code = 403, message = "Unauthorized"),
@ApiResponse(code = 404, message = "Index set not found"),
})
public IndexSetStats indexSetStatistics(@ApiParam(name = "id", required = true)
@PathParam("id") String id) {
checkPermission(RestPermissions.INDEXSETS_READ, id);
return indexSetRegistry.get(id)
.map(indexSetStatsCreator::getForIndexSet)
.orElseThrow(() -> new NotFoundException("Couldn't load index set with ID <" + id + ">"));
}
|
@Test
public void indexSetStatistics() {
final IndexSet indexSet = mock(IndexSet.class);
final IndexSetStats indexSetStats = IndexSetStats.create(5L, 23L, 42L);
when(indexSetRegistry.get("id")).thenReturn(Optional.of(indexSet));
when(indexSetStatsCreator.getForIndexSet(indexSet)).thenReturn(indexSetStats);
assertThat(indexSetsResource.indexSetStatistics("id")).isEqualTo(indexSetStats);
}
|
public static MatchesKV matchesKV(String regex, int keyGroup, int valueGroup) {
return matchesKV(Pattern.compile(regex), keyGroup, valueGroup);
}
|
@Test
@Category(NeedsRunner.class)
public void testKVMatchesNone() {
PCollection<KV<String, String>> output =
p.apply(Create.of("x y z")).apply(Regex.matchesKV("a (b) (c)", 1, 2));
PAssert.that(output).empty();
p.run();
}
|
public void appendDocument(PDDocument destination, PDDocument source) throws IOException
{
if (source.getDocument().isClosed())
{
throw new IOException("Error: source PDF is closed.");
}
if (destination.getDocument().isClosed())
{
throw new IOException("Error: destination PDF is closed.");
}
PDDocumentCatalog srcCatalog = source.getDocumentCatalog();
if (isDynamicXfa(srcCatalog.getAcroForm()))
{
throw new IOException("Error: can't merge source document containing dynamic XFA form content.");
}
PDDocumentInformation destInfo = destination.getDocumentInformation();
PDDocumentInformation srcInfo = source.getDocumentInformation();
mergeInto(srcInfo.getCOSObject(), destInfo.getCOSObject(), Collections.emptySet());
// use the highest version number for the resulting pdf
float destVersion = destination.getVersion();
float srcVersion = source.getVersion();
if (destVersion < srcVersion)
{
destination.setVersion(srcVersion);
}
int pageIndexOpenActionDest = -1;
PDDocumentCatalog destCatalog = destination.getDocumentCatalog();
if (destCatalog.getOpenAction() == null)
{
// PDFBOX-3972: get local dest page index, it must be reassigned after the page cloning
PDDestinationOrAction openAction = null;
try
{
openAction = srcCatalog.getOpenAction();
}
catch (IOException ex)
{
// PDFBOX-4223
LOG.error("Invalid OpenAction ignored", ex);
}
PDDestination openActionDestination = null;
if (openAction instanceof PDActionGoTo)
{
openActionDestination = ((PDActionGoTo) openAction).getDestination();
}
else if (openAction instanceof PDDestination)
{
openActionDestination = (PDDestination) openAction;
}
// note that it can also be something else, e.g. PDActionJavaScript, then do nothing
if (openActionDestination instanceof PDPageDestination)
{
PDPage page = ((PDPageDestination) openActionDestination).getPage();
if (page != null)
{
pageIndexOpenActionDest = srcCatalog.getPages().indexOf(page);
}
}
destCatalog.setOpenAction(openAction);
}
PDFCloneUtility cloner = new PDFCloneUtility(destination);
mergeAcroForm(cloner, destCatalog, srcCatalog);
COSArray destThreads = destCatalog.getCOSObject().getCOSArray(COSName.THREADS);
COSArray srcThreads = (COSArray) cloner.cloneForNewDocument(destCatalog.getCOSObject().getDictionaryObject(
COSName.THREADS));
if (destThreads == null)
{
destCatalog.getCOSObject().setItem(COSName.THREADS, srcThreads);
}
else
{
destThreads.addAll(srcThreads);
}
PDDocumentNameDictionary destNames = destCatalog.getNames();
PDDocumentNameDictionary srcNames = srcCatalog.getNames();
if (srcNames != null)
{
if (destNames == null)
{
destCatalog.getCOSObject().setItem(COSName.NAMES,
cloner.cloneForNewDocument(srcNames.getCOSObject()));
}
else
{
cloner.cloneMerge(srcNames, destNames);
}
}
if (destNames != null && destNames.getCOSObject().containsKey(COSName.ID_TREE))
{
// found in 001031.pdf from PDFBOX-4417 and doesn't belong there
destNames.getCOSObject().removeItem(COSName.ID_TREE);
LOG.warn("Removed /IDTree from /Names dictionary, doesn't belong there");
}
PDDocumentNameDestinationDictionary srcDests = srcCatalog.getDests();
if (srcDests != null)
{
PDDocumentNameDestinationDictionary destDests = destCatalog.getDests();
if (destDests == null)
{
destCatalog.getCOSObject().setItem(COSName.DESTS,
cloner.cloneForNewDocument(srcDests.getCOSObject()));
}
else
{
cloner.cloneMerge(srcDests, destDests);
}
}
PDDocumentOutline srcOutline = srcCatalog.getDocumentOutline();
if (srcOutline != null)
{
PDDocumentOutline destOutline = destCatalog.getDocumentOutline();
if (destOutline == null || destOutline.getFirstChild() == null)
{
PDDocumentOutline cloned = new PDDocumentOutline(
cloner.cloneForNewDocument(srcOutline.getCOSObject()));
destCatalog.setDocumentOutline(cloned);
}
else
{
// search last sibling for dest, because /Last entry is sometimes wrong
PDOutlineItem destLastOutlineItem = destOutline.getFirstChild();
while (true)
{
PDOutlineItem outlineItem = destLastOutlineItem.getNextSibling();
if (outlineItem == null)
{
break;
}
destLastOutlineItem = outlineItem;
}
for (PDOutlineItem item : srcOutline.children())
{
// get each child, clone its dictionary, remove siblings info,
// append outline item created from there
COSDictionary clonedDict = cloner.cloneForNewDocument(item.getCOSObject());
clonedDict.removeItem(COSName.PREV);
clonedDict.removeItem(COSName.NEXT);
PDOutlineItem clonedItem = new PDOutlineItem(clonedDict);
destLastOutlineItem.insertSiblingAfter(clonedItem);
destLastOutlineItem = destLastOutlineItem.getNextSibling();
}
}
}
PageMode destPageMode = destCatalog.getPageMode();
if (destPageMode == null)
{
PageMode srcPageMode = srcCatalog.getPageMode();
destCatalog.setPageMode(srcPageMode);
}
COSDictionary srcLabels = srcCatalog.getCOSObject().getCOSDictionary(COSName.PAGE_LABELS);
if (srcLabels != null)
{
int destPageCount = destination.getNumberOfPages();
COSArray destNums;
COSDictionary destLabels = destCatalog.getCOSObject().getCOSDictionary(COSName.PAGE_LABELS);
if (destLabels == null)
{
destLabels = new COSDictionary();
destNums = new COSArray();
destLabels.setItem(COSName.NUMS, destNums);
destCatalog.getCOSObject().setItem(COSName.PAGE_LABELS, destLabels);
}
else
{
destNums = (COSArray) destLabels.getDictionaryObject(COSName.NUMS);
}
COSArray srcNums = (COSArray) srcLabels.getDictionaryObject(COSName.NUMS);
if (srcNums != null)
{
int startSize = destNums.size();
for (int i = 0; i < srcNums.size(); i += 2)
{
COSBase base = srcNums.getObject(i);
if (!(base instanceof COSNumber))
{
LOG.error("page labels ignored, index {} should be a number, but is {}", i,
base);
// remove what we added
while (destNums.size() > startSize)
{
destNums.remove(startSize);
}
break;
}
COSNumber labelIndex = (COSNumber) base;
long labelIndexValue = labelIndex.intValue();
destNums.add(COSInteger.get(labelIndexValue + destPageCount));
destNums.add(cloner.cloneForNewDocument(srcNums.getObject(i + 1)));
}
}
}
COSStream destMetadata = destCatalog.getCOSObject().getCOSStream(COSName.METADATA);
COSStream srcMetadata = srcCatalog.getCOSObject().getCOSStream(COSName.METADATA);
if (destMetadata == null && srcMetadata != null)
{
try
{
PDStream newStream = new PDStream(destination, srcMetadata.createInputStream(), (COSName) null);
mergeInto(srcMetadata, newStream.getCOSObject(),
new HashSet<>(Arrays.asList(COSName.FILTER, COSName.LENGTH)));
destCatalog.getCOSObject().setItem(COSName.METADATA, newStream);
}
catch (IOException ex)
{
// PDFBOX-4227 cleartext XMP stream with /Flate
LOG.error("Metadata skipped because it could not be read", ex);
}
}
COSDictionary destOCP = destCatalog.getCOSObject().getCOSDictionary(COSName.OCPROPERTIES);
COSDictionary srcOCP = srcCatalog.getCOSObject().getCOSDictionary(COSName.OCPROPERTIES);
if (destOCP == null && srcOCP != null)
{
destCatalog.getCOSObject().setItem(COSName.OCPROPERTIES, cloner.cloneForNewDocument(srcOCP));
}
else if (destOCP != null && srcOCP != null)
{
cloner.cloneMerge(srcOCP, destOCP);
}
mergeOutputIntents(cloner, srcCatalog, destCatalog);
// merge logical structure hierarchy
boolean mergeStructTree = false;
int destParentTreeNextKey = -1;
Map<Integer, COSObjectable> srcNumberTreeAsMap = null;
Map<Integer, COSObjectable> destNumberTreeAsMap = null;
PDStructureTreeRoot srcStructTree = srcCatalog.getStructureTreeRoot();
PDStructureTreeRoot destStructTree = destCatalog.getStructureTreeRoot();
if (destStructTree == null && srcStructTree != null)
{
// create a dummy structure tree in the destination, so that the source
// tree is cloned. (We can't just copy the tree reference due to PDFBOX-3999)
destStructTree = new PDStructureTreeRoot();
destCatalog.setStructureTreeRoot(destStructTree);
destStructTree.setParentTree(new PDNumberTreeNode(PDParentTreeValue.class));
// PDFBOX-4429: remove bogus StructParent(s)
for (PDPage page : destCatalog.getPages())
{
page.getCOSObject().removeItem(COSName.STRUCT_PARENTS);
for (PDAnnotation ann : page.getAnnotations())
{
ann.getCOSObject().removeItem(COSName.STRUCT_PARENT);
}
}
}
if (destStructTree != null)
{
PDNumberTreeNode destParentTree = destStructTree.getParentTree();
destParentTreeNextKey = destStructTree.getParentTreeNextKey();
if (destParentTree != null)
{
destNumberTreeAsMap = getNumberTreeAsMap(destParentTree);
if (destParentTreeNextKey < 0)
{
if (destNumberTreeAsMap.isEmpty())
{
destParentTreeNextKey = 0;
}
else
{
destParentTreeNextKey = Collections.max(destNumberTreeAsMap.keySet()) + 1;
}
}
if (destParentTreeNextKey >= 0 && srcStructTree != null)
{
PDNumberTreeNode srcParentTree = srcStructTree.getParentTree();
if (srcParentTree != null)
{
srcNumberTreeAsMap = getNumberTreeAsMap(srcParentTree);
if (!srcNumberTreeAsMap.isEmpty())
{
mergeStructTree = true;
}
}
}
}
}
Map<COSDictionary, COSDictionary> objMapping = new HashMap<>();
int pageIndex = 0;
PDPageTree destinationPageTree = destination.getPages(); // cache PageTree
for (PDPage page : srcCatalog.getPages())
{
PDPage newPage = new PDPage(cloner.cloneForNewDocument(page.getCOSObject()));
if (!mergeStructTree)
{
// PDFBOX-4429: remove bogus StructParent(s)
newPage.getCOSObject().removeItem(COSName.STRUCT_PARENTS);
for (PDAnnotation ann : newPage.getAnnotations())
{
ann.getCOSObject().removeItem(COSName.STRUCT_PARENT);
}
}
newPage.setCropBox(page.getCropBox());
newPage.setMediaBox(page.getMediaBox());
newPage.setRotation(page.getRotation());
PDResources resources = page.getResources();
if (resources != null)
{
// this is smart enough to just create references for resources that are used on multiple pages
newPage.setResources(new PDResources(
cloner.cloneForNewDocument(resources.getCOSObject())));
}
else
{
newPage.setResources(new PDResources());
}
if (mergeStructTree)
{
// add the value of the destination ParentTreeNextKey to every source element
// StructParent(s) value so that these don't overlap with the existing values
updateStructParentEntries(newPage, destParentTreeNextKey);
objMapping.put(page.getCOSObject(), newPage.getCOSObject());
List<PDAnnotation> oldAnnots = page.getAnnotations();
List<PDAnnotation> newAnnots = newPage.getAnnotations();
for (int i = 0; i < oldAnnots.size(); i++)
{
objMapping.put(oldAnnots.get(i).getCOSObject(), newAnnots.get(i).getCOSObject());
}
// TODO update mapping for XObjects
}
destinationPageTree.add(newPage);
if (pageIndex == pageIndexOpenActionDest)
{
// PDFBOX-3972: reassign the page.
// The openAction is either a PDActionGoTo or a PDPageDestination
PDDestinationOrAction openAction = destCatalog.getOpenAction();
PDPageDestination pageDestination;
if (openAction instanceof PDActionGoTo)
{
pageDestination = (PDPageDestination) ((PDActionGoTo) openAction).getDestination();
}
else
{
pageDestination = (PDPageDestination) openAction;
}
pageDestination.setPage(newPage);
}
++pageIndex;
}
if (mergeStructTree)
{
updatePageReferences(cloner, srcNumberTreeAsMap, objMapping);
int maxSrcKey = -1;
for (Map.Entry<Integer, COSObjectable> entry : srcNumberTreeAsMap.entrySet())
{
int srcKey = entry.getKey();
maxSrcKey = Math.max(srcKey, maxSrcKey);
destNumberTreeAsMap.put(destParentTreeNextKey + srcKey,
cloner.cloneForNewDocument(entry.getValue().getCOSObject()));
}
destParentTreeNextKey += maxSrcKey + 1;
PDNumberTreeNode newParentTreeNode = new PDNumberTreeNode(PDParentTreeValue.class);
// Note that all elements are stored flatly. This could become a problem for large files
// when these are opened in a viewer that uses the tagging information.
// If this happens, then PDNumberTreeNode should be improved with a convenience method that
// stores the map into a B+Tree, see https://en.wikipedia.org/wiki/B+_tree
newParentTreeNode.setNumbers(destNumberTreeAsMap);
destStructTree.setParentTree(newParentTreeNode);
destStructTree.setParentTreeNextKey(destParentTreeNextKey);
mergeKEntries(cloner, srcStructTree, destStructTree);
mergeRoleMap(srcStructTree, destStructTree);
mergeIDTree(cloner, srcStructTree, destStructTree);
mergeMarkInfo(destCatalog, srcCatalog);
mergeLanguage(destCatalog, srcCatalog);
mergeViewerPreferences(destCatalog, srcCatalog);
}
}
|
@Test
void testMissingParentTreeNextKey() throws IOException
{
PDFMergerUtility pdfMergerUtility = new PDFMergerUtility();
PDDocument src = Loader.loadPDF(new File(TARGETPDFDIR, "PDFBOX-4418-000314.pdf"));
PDDocument dst = Loader.loadPDF(new File(TARGETPDFDIR, "PDFBOX-4418-000314.pdf"));
// existing numbers are 321..327; ParentTreeNextKey is 408.
// After deletion, it is recalculated in the merge 328.
// That value is added to all numbers of the destination,
// so the new numbers should be 321+328..327+328, i.e. 649..655,
// and this ParentTreeNextKey is 656 at the end.
dst.getDocumentCatalog().getStructureTreeRoot().getCOSObject().removeItem(COSName.PARENT_TREE_NEXT_KEY);
pdfMergerUtility.appendDocument(dst, src);
src.close();
dst.save(new File(TARGETTESTDIR, "PDFBOX-4418-000314-merged.pdf"));
dst.close();
dst = Loader.loadPDF(new File(TARGETTESTDIR, "PDFBOX-4418-000314-merged.pdf"));
assertEquals(656, dst.getDocumentCatalog().getStructureTreeRoot().getParentTreeNextKey());
dst.close();
}
|
public String toString() {
verifyState(State.FINALIZED);
return builder.toString();
}
|
@Test
void testStructure() {
HttpWriter writer = new HttpWriter();
String header = defaultHeader.replace(defaultTitle, "Untitled page");
assertEquals(header + defaultFooter, writer.toString());
}
|
public InputFileFilter[] getInputFileFilters() {
return inputFileFilters;
}
|
@Test
public void should_return_filters_from_initialization() {
InputFileFilterRepository underTest = new InputFileFilterRepository(f -> true);
assertThat(underTest.getInputFileFilters()).isNotNull();
assertThat(underTest.getInputFileFilters()).hasSize(1);
}
|
@Override
public boolean equals(Object obj) {
if ( this == obj ) {
return true;
}
if ( obj == null ) {
return false;
}
if ( getClass() != obj.getClass() ) {
return false;
}
final SelectionParameters other = (SelectionParameters) obj;
if ( !equals( this.qualifiers, other.qualifiers ) ) {
return false;
}
if ( !Objects.equals( this.qualifyingNames, other.qualifyingNames ) ) {
return false;
}
if ( !Objects.equals( this.conditionQualifiers, other.conditionQualifiers ) ) {
return false;
}
if ( !Objects.equals( this.conditionQualifyingNames, other.conditionQualifyingNames ) ) {
return false;
}
if ( !Objects.equals( this.sourceRHS, other.sourceRHS ) ) {
return false;
}
return equals( this.resultType, other.resultType );
}
|
@Test
public void testAllEqual() {
List<String> qualifyingNames = Arrays.asList( "language", "german" );
TypeMirror resultType = new TestTypeMirror( "resultType" );
List<TypeMirror> qualifiers = new ArrayList<>();
qualifiers.add( new TestTypeMirror( "org.mapstruct.test.SomeType" ) );
qualifiers.add( new TestTypeMirror( "org.mapstruct.test.SomeOtherType" ) );
SelectionParameters params = new SelectionParameters( qualifiers, qualifyingNames, resultType, typeUtils );
List<String> qualifyingNames2 = Arrays.asList( "language", "german" );
TypeMirror resultType2 = new TestTypeMirror( "resultType" );
List<TypeMirror> qualifiers2 = new ArrayList<>();
qualifiers2.add( new TestTypeMirror( "org.mapstruct.test.SomeType" ) );
qualifiers2.add( new TestTypeMirror( "org.mapstruct.test.SomeOtherType" ) );
SelectionParameters params2 = new SelectionParameters( qualifiers2, qualifyingNames2, resultType2, typeUtils );
assertThat( params.equals( params2 ) ).as( "All equal" ).isTrue();
assertThat( params2.equals( params ) ).as( "All equal" ).isTrue();
}
|
@Override
protected void setProperties(Map<String, String> properties) throws DdlException {
Preconditions.checkState(properties != null);
for (String key : properties.keySet()) {
if (!DRIVER_URL.equals(key) && !URI.equals(key) && !USER.equals(key) && !PASSWORD.equals(key)
&& !TYPE.equals(key) && !NAME.equals(key) && !DRIVER_CLASS.equals(key)) {
throw new DdlException("Property " + key + " is unknown");
}
}
configs = properties;
checkProperties(DRIVER_URL);
checkProperties(DRIVER_CLASS);
checkProperties(URI);
checkProperties(USER);
checkProperties(PASSWORD);
computeDriverChecksum();
}
|
@Test(expected = DdlException.class)
public void testWithoutDriverClass() throws Exception {
Map<String, String> configs = getMockConfigs();
configs.remove(JDBCResource.DRIVER_CLASS);
JDBCResource resource = new JDBCResource("jdbc_resource_test");
resource.setProperties(configs);
}
|
@Udf
public <T> String join(
@UdfParameter(description = "the array to join using the default delimiter '"
+ DEFAULT_DELIMITER + "'") final List<T> array
) {
return join(array, DEFAULT_DELIMITER);
}
|
@Test
public void shouldReturnCorrectStringForFlatArraysWithPrimitiveTypes() {
assertThat(arrayJoinUDF.join(Arrays.asList(true, null, false),""),
is("truenullfalse")
);
assertThat(arrayJoinUDF.join(Arrays.asList(true, null, false)),
is("true,null,false")
);
assertThat(arrayJoinUDF.join(Arrays.asList(true,null,false),CUSTOM_DELIMITER),
is("true"+CUSTOM_DELIMITER+"null"+CUSTOM_DELIMITER+"false")
);
assertThat(arrayJoinUDF.join(Arrays.asList(1,23,-42,0),null), is("123-420"));
assertThat(arrayJoinUDF.join(Arrays.asList(1,23,-42,0)), is("1,23,-42,0"));
assertThat(arrayJoinUDF.join(Arrays.asList(1,23,-42,0),CUSTOM_DELIMITER),
is("1"+CUSTOM_DELIMITER+"23"+CUSTOM_DELIMITER+"-42"+CUSTOM_DELIMITER+"0")
);
assertThat(arrayJoinUDF.join(Arrays.asList(-4294967297L, 8589934592L),""),
is("-42949672978589934592")
);
assertThat(arrayJoinUDF.join(Arrays.asList(-4294967297L, 8589934592L)),
is("-4294967297,8589934592")
);
assertThat(arrayJoinUDF.join(Arrays.asList(-4294967297L, 8589934592L), CUSTOM_DELIMITER),
is("-4294967297"+CUSTOM_DELIMITER+"8589934592")
);
assertThat(arrayJoinUDF.join(Arrays.asList(1.23,-23.42,0.0),null),
is("1.23-23.420.0")
);
assertThat(arrayJoinUDF.join(Arrays.asList(1.23,-23.42,0.0)),
is("1.23,-23.42,0.0")
);
assertThat(arrayJoinUDF.join(Arrays.asList(1.23,-23.42,0.0),CUSTOM_DELIMITER),
is("1.23"+CUSTOM_DELIMITER+"-23.42"+CUSTOM_DELIMITER+"0.0")
);
assertThat(arrayJoinUDF.join(
Arrays.asList(new BigDecimal("123.45"), new BigDecimal("987.65")),null
),
is("123.45987.65")
);
assertThat(arrayJoinUDF.join(Arrays.asList(new BigDecimal("123.45"), new BigDecimal("987.65"))),
is("123.45,987.65")
);
assertThat(arrayJoinUDF.join(
Arrays.asList(new BigDecimal("123.45"), new BigDecimal("987.65")),CUSTOM_DELIMITER),
is("123.45"+CUSTOM_DELIMITER+"987.65")
);
assertThat(arrayJoinUDF.join(Arrays.asList("Hello","From","","Ksqldb","Udf"),""),
is("HelloFromKsqldbUdf")
);
assertThat(arrayJoinUDF.join(Arrays.asList("Hello","From","","Ksqldb","Udf")),
is("Hello,From,,Ksqldb,Udf")
);
assertThat(
arrayJoinUDF.join(Arrays.asList("hello","from","","ksqldb","udf",null),CUSTOM_DELIMITER),
is("hello"+CUSTOM_DELIMITER+"from"+CUSTOM_DELIMITER+CUSTOM_DELIMITER
+"ksqldb"+CUSTOM_DELIMITER+"udf"+CUSTOM_DELIMITER+"null")
);
}
|
public static boolean parse(final String str, ResTable_config out) {
return parse(str, out, true);
}
|
@Test
public void parse_screenSize_large() {
ResTable_config config = new ResTable_config();
ConfigDescription.parse("large", config);
assertThat(config.screenLayout).isEqualTo(SCREENSIZE_LARGE);
}
|
@Override
public void initialize(ServiceConfiguration config) throws IOException, IllegalArgumentException {
String prefix = (String) config.getProperty(CONF_TOKEN_SETTING_PREFIX);
if (null == prefix) {
prefix = "";
}
this.confTokenSecretKeySettingName = prefix + CONF_TOKEN_SECRET_KEY;
this.confTokenPublicKeySettingName = prefix + CONF_TOKEN_PUBLIC_KEY;
this.confTokenAuthClaimSettingName = prefix + CONF_TOKEN_AUTH_CLAIM;
this.confTokenPublicAlgSettingName = prefix + CONF_TOKEN_PUBLIC_ALG;
this.confTokenAudienceClaimSettingName = prefix + CONF_TOKEN_AUDIENCE_CLAIM;
this.confTokenAudienceSettingName = prefix + CONF_TOKEN_AUDIENCE;
this.confTokenAllowedClockSkewSecondsSettingName = prefix + CONF_TOKEN_ALLOWED_CLOCK_SKEW_SECONDS;
// we need to fetch the algorithm before we fetch the key
this.publicKeyAlg = getPublicKeyAlgType(config);
this.validationKey = getValidationKey(config);
this.roleClaim = getTokenRoleClaim(config);
this.audienceClaim = getTokenAudienceClaim(config);
this.audience = getTokenAudience(config);
long allowedSkew = getConfTokenAllowedClockSkewSeconds(config);
this.parser = Jwts.parserBuilder()
.setAllowedClockSkewSeconds(allowedSkew)
.setSigningKey(this.validationKey)
.build();
if (audienceClaim != null && audience == null) {
throw new IllegalArgumentException("Token Audience Claim [" + audienceClaim
+ "] configured, but Audience stands for this broker not.");
}
}
|
@Test(expectedExceptions = IOException.class)
public void testInitializeWhenSecretKeyFilePathIsInvalid() throws IOException {
Properties properties = new Properties();
properties.setProperty(AuthenticationProviderToken.CONF_TOKEN_SECRET_KEY,
"file://" + "invalid_secret_key_file");
ServiceConfiguration conf = new ServiceConfiguration();
conf.setProperties(properties);
new AuthenticationProviderToken().initialize(conf);
}
|
@Override
public byte[] echo(byte[] message) {
return read(null, ByteArrayCodec.INSTANCE, ECHO, message);
}
|
@Test
public void testEcho() {
assertThat(connection.echo("test".getBytes())).isEqualTo("test".getBytes());
}
|
@Override
public String getQualifier() {
if (isRoot()) {
return Qualifiers.VIEW;
}
return Qualifiers.SUBVIEW;
}
|
@Test
void getQualifier_whenRoot_shouldReturnVW() {
PortfolioDto dto = new PortfolioDto();
assertThat(dto.getQualifier()).isEqualTo(Qualifiers.VIEW);
}
|
List<Condition> run(boolean useKRaft) {
List<Condition> warnings = new ArrayList<>();
checkKafkaReplicationConfig(warnings);
checkKafkaBrokersStorage(warnings);
if (useKRaft) {
// Additional checks done for KRaft clusters
checkKRaftControllerStorage(warnings);
checkKRaftControllerCount(warnings);
checkKafkaMetadataVersion(warnings);
checkInterBrokerProtocolVersionInKRaft(warnings);
checkLogMessageFormatVersionInKRaft(warnings);
} else {
// Additional checks done for ZooKeeper-based clusters
checkKafkaLogMessageFormatVersion(warnings);
checkKafkaInterBrokerProtocolVersion(warnings);
checkKRaftMetadataStorageConfiguredForZooBasedCLuster(warnings);
}
return warnings;
}
|
@Test
public void testMetadataVersionIsOlderThanKafkaVersion() {
Kafka kafka = new KafkaBuilder(KAFKA)
.editSpec()
.editKafka()
.withVersion(KafkaVersionTestUtils.LATEST_KAFKA_VERSION)
.withMetadataVersion(KafkaVersionTestUtils.PREVIOUS_METADATA_VERSION)
.endKafka()
.endSpec()
.build();
KafkaSpecChecker checker = generateChecker(kafka, List.of(CONTROLLERS, POOL_A), new KafkaVersionChange(VERSIONS.defaultVersion(), VERSIONS.defaultVersion(), null, null, KafkaVersionTestUtils.PREVIOUS_METADATA_VERSION));
List<Condition> warnings = checker.run(true);
assertThat(warnings, hasSize(1));
assertThat(warnings.get(0).getReason(), is("KafkaMetadataVersion"));
assertThat(warnings.get(0).getMessage(), is("Metadata version is older than the Kafka version used by the cluster, which suggests that an upgrade is incomplete."));
}
|
public SqlType getExpressionSqlType(final Expression expression) {
return getExpressionSqlType(expression, Collections.emptyMap());
}
|
@Test
public void shouldEvaluateTypeForUDF() {
// Given:
givenUdfWithNameAndReturnType("FLOOR", SqlTypes.DOUBLE);
final Expression expression =
new FunctionCall(FunctionName.of("FLOOR"), ImmutableList.of(COL3));
// When:
final SqlType exprType = expressionTypeManager.getExpressionSqlType(expression);
// Then:
assertThat(exprType, is(SqlTypes.DOUBLE));
verify(udfFactory).getFunction(ImmutableList.of(SqlArgument.of(SqlTypes.DOUBLE)));
verify(function).getReturnType(ImmutableList.of(SqlArgument.of(SqlTypes.DOUBLE)));
}
|
@Override
public <T> T endProcess(AsyncResult<T> asyncResult) throws ExecutionException,
InterruptedException {
if (!asyncResult.isCompleted()) {
asyncResult.await();
}
return asyncResult.getValue();
}
|
@Test
void testEndProcess() {
assertTimeout(ofMillis(5000), () -> {
// Instantiate a new executor and start a new 'null' task ...
final var executor = new ThreadAsyncExecutor();
final var result = new Object();
when(task.call()).thenAnswer(i -> {
Thread.sleep(1500);
return result;
});
final var asyncResult = executor.startProcess(task);
assertNotNull(asyncResult);
assertFalse(asyncResult.isCompleted());
try {
asyncResult.getValue();
fail("Expected IllegalStateException when calling AsyncResult#getValue on a non-completed task");
} catch (IllegalStateException e) {
assertNotNull(e.getMessage());
}
assertSame(result, executor.endProcess(asyncResult));
verify(task, times(1)).call();
assertTrue(asyncResult.isCompleted());
// Calling end process a second time while already finished should give the same result
assertSame(result, executor.endProcess(asyncResult));
verifyNoMoreInteractions(task);
});
}
|
public RemoteSourceStatsRule(FragmentStatsProvider fragmentStatsProvider, StatsNormalizer normalizer)
{
super(normalizer);
this.fragmentStatsProvider = requireNonNull(fragmentStatsProvider, "metadata is null");
}
|
@Test
public void testRemoteSourceStatsRule()
{
QueryId queryId = new QueryId("testqueryid");
Session session = testSessionBuilder()
.setQueryId(queryId)
.build();
LocalQueryRunner localQueryRunner = new LocalQueryRunner(session);
StatsCalculatorTester tester = new StatsCalculatorTester(localQueryRunner);
FragmentStatsProvider fragmentStatsProvider = localQueryRunner.getFragmentStatsProvider();
fragmentStatsProvider.putStats(queryId, new PlanFragmentId(1), new PlanNodeStatsEstimate(NaN, 1000, FACT, ImmutableMap.of(), JoinNodeStatsEstimate.unknown(), TableWriterNodeStatsEstimate.unknown(), PartialAggregationStatsEstimate.unknown()));
fragmentStatsProvider.putStats(queryId, new PlanFragmentId(2), new PlanNodeStatsEstimate(NaN, 1000, FACT, ImmutableMap.of(), JoinNodeStatsEstimate.unknown(), TableWriterNodeStatsEstimate.unknown(), PartialAggregationStatsEstimate.unknown()));
tester.assertStatsFor(planBuilder -> planBuilder.remoteSource(ImmutableList.of(new PlanFragmentId(1), new PlanFragmentId(2))))
.check(check -> check.totalSize(2000)
.outputRowsCountUnknown());
tester.close();
}
|
@Override
public PrimitiveTypeEncoding<UTF8Buffer> getEncoding(UTF8Buffer value) {
return value.getLength() <= 255 ? smallBufferEncoding : largeBufferEncoding;
}
|
@Test
public void testGetEncodingForLargeUTF8Buffer() {
PrimitiveTypeEncoding<UTF8Buffer> encoding = utf8BufferEncoding.getEncoding(largeBuffer);
assertTrue(encoding instanceof UTF8BufferType.LargeUTF8BufferEncoding);
assertEquals(1, encoding.getConstructorSize());
assertEquals(largeBuffer.getLength() + Integer.BYTES, encoding.getValueSize(largeBuffer));
assertEquals(EncodingCodes.STR32, encoding.getEncodingCode());
assertFalse(encoding.encodesJavaPrimitive());
assertEquals(utf8BufferEncoding, encoding.getType());
}
|
@Override
public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) {
super.onDataReceived(device, data);
if (data.size() != 2 && data.size() != 4) {
onInvalidDataReceived(device, data);
return;
}
final int sessionRunTime = data.getIntValue(Data.FORMAT_UINT16_LE, 0);
final boolean crcPresent = data.size() == 4;
if (crcPresent) {
final int actualCrc = CRC16.MCRF4XX(data.getValue(), 0, 2);
final int expectedCrc = data.getIntValue(Data.FORMAT_UINT16_LE, 2);
if (actualCrc != expectedCrc) {
onContinuousGlucoseMonitorSessionRunTimeReceivedWithCrcError(device, data);
return;
}
}
onContinuousGlucoseMonitorSessionRunTimeReceived(device, sessionRunTime, crcPresent);
}
|
@Test
public void onContinuousGlucoseMonitorSessionRunTimeReceived_noCrc() {
final DataReceivedCallback callback = new CGMSessionRunTimeDataCallback() {
@Override
public void onContinuousGlucoseMonitorSessionRunTimeReceived(@NonNull final BluetoothDevice device, final int sessionRunTime, final boolean secured) {
called = true;
assertEquals("Session Run Time", 2, sessionRunTime);
assertFalse(secured);
}
@Override
public void onContinuousGlucoseMonitorSessionRunTimeReceivedWithCrcError(@NonNull final BluetoothDevice device, @NonNull final Data data) {
assertEquals("Correct packet but invalid CRC reported", 1, 2);
}
@Override
public void onInvalidDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) {
assertEquals("Correct packet but invalid data reported", 1, 2);
}
};
final Data data = new Data(new byte[] { 2, 0 });
called = false;
//noinspection DataFlowIssue
callback.onDataReceived(null, data);
assertTrue(called);
}
|
@Udf(description = "Converts a string representation of a date in the given format"
+ " into the number of milliseconds since 1970-01-01 00:00:00 UTC/GMT."
+ " Single quotes in the timestamp format can be escaped with '',"
+ " for example: 'yyyy-MM-dd''T''HH:mm:ssX'."
+ " The system default time zone is used when no time zone is explicitly provided.")
public long stringToTimestamp(
@UdfParameter(
description = "The string representation of a date.") final String formattedTimestamp,
@UdfParameter(
description = "The format pattern should be in the format expected by"
+ " java.time.format.DateTimeFormatter.") final String formatPattern) {
// NB: We do not perform a null here preferring to throw an exception as
// there is no sentinel value for a "null" Date.
try {
final StringToTimestampParser timestampParser = parsers.get(formatPattern);
return timestampParser.parse(formattedTimestamp);
} catch (final ExecutionException | RuntimeException e) {
throw new KsqlFunctionException("Failed to parse timestamp '" + formattedTimestamp
+ "' with formatter '" + formatPattern
+ "': " + e.getMessage(), e);
}
}
|
@Test
public void shouldConvertStringToTimestamp() throws ParseException {
// When:
final Object result = udf.stringToTimestamp("2021-12-01 12:10:11.123",
"yyyy-MM-dd HH:mm:ss.SSS");
// Then:
final long expectedResult = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS")
.parse("2021-12-01 12:10:11.123").getTime();
assertThat(result, is(expectedResult));
}
|
public static <T> ParamWindowedValueCoder<T> getParamWindowedValueCoder(Coder<T> valueCoder) {
return ParamWindowedValueCoder.of(valueCoder);
}
|
@Test
public void testParamWindowedValueCoderIsSerializableWithWellKnownCoderType() {
CoderProperties.coderSerializable(
WindowedValue.getParamWindowedValueCoder(GlobalWindow.Coder.INSTANCE));
}
|
@Override
public void deleteCategory(Long id) {
// 校验分类是否存在
validateProductCategoryExists(id);
// 校验是否还有子分类
if (productCategoryMapper.selectCountByParentId(id) > 0) {
throw exception(CATEGORY_EXISTS_CHILDREN);
}
// 校验分类是否绑定了 SPU
Long spuCount = productSpuService.getSpuCountByCategoryId(id);
if (spuCount > 0) {
throw exception(CATEGORY_HAVE_BIND_SPU);
}
// 删除
productCategoryMapper.deleteById(id);
}
|
@Test
public void testDeleteCategory_notExists() {
// 准备参数
Long id = randomLongId();
// 调用, 并断言异常
assertServiceException(() -> productCategoryService.deleteCategory(id), CATEGORY_NOT_EXISTS);
}
|
public static String extractAppIdFromRoleName(String roleName) {
Iterator<String> parts = STRING_SPLITTER.split(roleName).iterator();
if (parts.hasNext()) {
String roleType = parts.next();
if (RoleType.isValidRoleType(roleType) && parts.hasNext()) {
return parts.next();
}
}
return null;
}
|
@Test
public void testExtractAppIdFromRoleName() throws Exception {
assertEquals("someApp", RoleUtils.extractAppIdFromRoleName("Master+someApp"));
assertEquals("someApp", RoleUtils.extractAppIdFromRoleName("ModifyNamespace+someApp+xx"));
assertEquals("app1", RoleUtils.extractAppIdFromRoleName("ReleaseNamespace+app1+application"));
}
|
public static ManagedTransform write(String sink) {
return new AutoValue_Managed_ManagedTransform.Builder()
.setIdentifier(
Preconditions.checkNotNull(
WRITE_TRANSFORMS.get(sink.toLowerCase()),
"An unsupported sink was specified: '%s'. Please specify one of the following sinks: %s",
sink,
WRITE_TRANSFORMS.keySet()))
.setSupportedIdentifiers(new ArrayList<>(WRITE_TRANSFORMS.values()))
.build();
}
|
@Test
public void testManagedTestProviderWithConfigFile() throws Exception {
String yamlConfigPath =
Paths.get(getClass().getClassLoader().getResource("test_config.yaml").toURI())
.toFile()
.getAbsolutePath();
Managed.ManagedTransform writeOp =
Managed.write(Managed.ICEBERG)
.toBuilder()
.setIdentifier(TestSchemaTransformProvider.IDENTIFIER)
.build()
.withSupportedIdentifiers(Arrays.asList(TestSchemaTransformProvider.IDENTIFIER))
.withConfigUrl(yamlConfigPath);
runTestProviderTest(writeOp);
}
|
public static boolean isEmpty(Collection<?> col) {
return !isNotEmpty(col);
}
|
@Test
public void testIsEmptyWithArrays() {
String[] emptyArray = {};
String[] filledArray = {"Foo", "Bar"};
Assertions.assertTrue(CollectionUtils.isEmpty(emptyArray));
Assertions.assertFalse(CollectionUtils.isEmpty(filledArray));
}
|
public static <T> LengthPrefixCoder<T> of(Coder<T> valueCoder) {
checkNotNull(valueCoder, "Coder not expected to be null");
return new LengthPrefixCoder<>(valueCoder);
}
|
@Test
public void testMultiCoderCycle() throws Exception {
LengthPrefixCoder<Long> lengthPrefixedValueCoder =
LengthPrefixCoder.of(BigEndianLongCoder.of());
LengthPrefixCoder<byte[]> lengthPrefixedBytesCoder = LengthPrefixCoder.of(ByteArrayCoder.of());
// [0x08, 0, 0, 0, 0, 0, 0, 0, 0x16]
byte[] userEncoded = CoderUtils.encodeToByteArray(lengthPrefixedValueCoder, 22L);
// [0, 0, 0, 0, 0, 0, 0, 0x16]
byte[] decodedToBytes = CoderUtils.decodeFromByteArray(lengthPrefixedBytesCoder, userEncoded);
// [0x08, 0, 0, 0, 0, 0, 0, 0, 0x16]
byte[] reencodedBytes = CoderUtils.encodeToByteArray(lengthPrefixedBytesCoder, decodedToBytes);
long userDecoded = CoderUtils.decodeFromByteArray(lengthPrefixedValueCoder, reencodedBytes);
assertFalse(
"Length-prefix decoding to bytes should drop the length",
Arrays.equals(userEncoded, decodedToBytes));
assertArrayEquals(userEncoded, reencodedBytes);
assertEquals(22L, userDecoded);
}
|
@CanIgnoreReturnValue
public final Ordered containsAtLeast(
@Nullable Object firstExpected,
@Nullable Object secondExpected,
@Nullable Object @Nullable ... restOfExpected) {
return containsAtLeastElementsIn(accumulate(firstExpected, secondExpected, restOfExpected));
}
|
@Test
public void iterableContainsAtLeastWithMany() {
assertThat(asList(1, 2, 3)).containsAtLeast(1, 2);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.