focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
public FEELFnResult<BigDecimal> invoke(@ParameterName("from") String from, @ParameterName("grouping separator") String group, @ParameterName("decimal separator") String decimal) {
if ( from == null ) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "from", "cannot be null"));
}
if ( group != null && !group.equals( " " ) && !group.equals( "." ) && !group.equals( "," ) ) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "group", "not a valid one, can only be one of: dot ('.'), comma (','), space (' ') "));
}
if ( decimal != null ) {
if (!decimal.equals( "." ) && !decimal.equals( "," )) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "decimal", "not a valid one, can only be one of: dot ('.'), comma (',') "));
} else if (group != null && decimal.equals( group )) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "decimal", "cannot be the same as parameter 'group' "));
}
}
if ( group != null ) {
from = from.replaceAll( "\\" + group, "" );
}
if ( decimal != null ) {
from = from.replaceAll( "\\" + decimal, "." );
}
BigDecimal result = NumberEvalHelper.getBigDecimalOrNull(from );
if( from != null && result == null ) {
// conversion failed
return FEELFnResult.ofError( new InvalidParametersEvent(Severity.ERROR, "unable to calculate final number result" ) );
} else {
return FEELFnResult.ofResult( result );
}
}
|
@Test
void invokeEmptyDecimal() {
FunctionTestUtil.assertResultError(numberFunction.invoke("1.1", null, ""), InvalidParametersEvent.class);
}
|
@Override
public ResourceSet update(ResourceSet oldRs, ResourceSet newRs) {
if (oldRs.getId() == null || newRs.getId() == null
|| !oldRs.getId().equals(newRs.getId())) {
throw new IllegalArgumentException("Resource set IDs mismatched");
}
if (!checkScopeConsistency(newRs)) {
throw new IllegalArgumentException("Can't save a resource set with inconsistent claims.");
}
newRs.setOwner(oldRs.getOwner()); // preserve the owner tag across updates
newRs.setClientId(oldRs.getClientId()); // preserve the client id across updates
ResourceSet saved = repository.save(newRs);
return saved;
}
|
@Test(expected = IllegalArgumentException.class)
public void testUpdate_nullId2() {
ResourceSet rs = new ResourceSet();
ResourceSet rs2 = new ResourceSet();
rs2.setId(1L);
resourceSetService.update(rs, rs2);
}
|
public Long getLongHeader(Message in, String header, Long defaultValue) {
return in.getHeader(header, defaultValue, Long.class);
}
|
@Test
public void testGetLongHeader() {
when(in.getHeader(HEADER_HISTOGRAM_VALUE, 19L, Long.class)).thenReturn(201L);
assertThat(okProducer.getLongHeader(in, HEADER_HISTOGRAM_VALUE, 19L), is(201L));
inOrder.verify(in, times(1)).getHeader(HEADER_HISTOGRAM_VALUE, 19L, Long.class);
inOrder.verifyNoMoreInteractions();
}
|
public Map<String, Object> getKsqlStreamConfigProps(final String applicationId) {
final Map<String, Object> map = new HashMap<>(getKsqlStreamConfigProps());
map.put(
MetricCollectors.RESOURCE_LABEL_PREFIX
+ StreamsConfig.APPLICATION_ID_CONFIG,
applicationId
);
// Streams client metrics aren't used in Confluent deployment
possiblyConfigureConfluentTelemetry(map);
return Collections.unmodifiableMap(map);
}
|
@Test
public void shouldSetStreamsConfigProducerUnprefixedProperties() {
final KsqlConfig ksqlConfig = new KsqlConfig(
Collections.singletonMap(ProducerConfig.BUFFER_MEMORY_CONFIG, "1024"));
final Object result = ksqlConfig.getKsqlStreamConfigProps().get(ProducerConfig.BUFFER_MEMORY_CONFIG);
assertThat(result, equalTo(1024L));
}
|
@VisibleForTesting
protected static List<Integer> getQueryOutputIndices(List<Pair<Column, Integer>> mvColumnPairs) {
return Streams
.mapWithIndex(mvColumnPairs.stream(), (pair, idx) -> Pair.create(pair.second, (int) idx))
.sorted(Comparator.comparingInt(x -> x.first))
.map(x -> x.second)
.collect(Collectors.toList());
}
|
@Test
public void testGetQueryOutputIndices() {
checkQueryOutputIndices(Arrays.asList(1, 2, 0, 3), "2,0,1,3", true);
checkQueryOutputIndices(Arrays.asList(0, 1, 2, 3), "0,1,2,3", false);
checkQueryOutputIndices(Arrays.asList(3, 2, 1, 0), "3,2,1,0", true);
checkQueryOutputIndices(Arrays.asList(1, 2, 3, 0), "3,0,1,2", true);
checkQueryOutputIndices(Arrays.asList(0, 1), "0,1", false);
}
|
public static String toUpperUnderline(String src) {
return toUnderline(src, '_', true);
}
|
@Test
public void testToUpperUnderline() {
String result = FieldUtils.toUpperUnderline("ToUpperUnderline");
Assert.assertEquals("TO_UPPER_UNDERLINE", result);
}
|
@Override
public ByteBuf getBytes(int index, byte[] dst) {
getBytes(index, dst, 0, dst.length);
return this;
}
|
@Test
public void testGetBytesAfterRelease8() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() throws IOException {
releasedBuffer().getBytes(0, new DevNullGatheringByteChannel(), 1);
}
});
}
|
public void remove(Object key) {
recordMap.remove(key);
}
|
@Test
public void remove_whenNotExist_thenCallIgnored() {
TransactionLog log = new TransactionLog();
log.remove("not exist");
}
|
public int add(Object o) {
HollowTypeMapper typeMapper = getTypeMapper(o.getClass(), null, null);
return typeMapper.write(o);
}
|
@Test
public void testDate() throws IOException {
HollowObjectMapper mapper = new HollowObjectMapper(writeStateEngine);
long time = System.currentTimeMillis();
mapper.add(new Date(time));
roundTripSnapshot();
int theOrdinal = readStateEngine.getTypeState("Date").maxOrdinal();
GenericHollowObject obj = new GenericHollowObject(readStateEngine, "Date", theOrdinal);
Assert.assertEquals(time, obj.getLong("value"));
}
|
public boolean matches(String input) {
return MATCHER.matches(input, pattern);
}
|
@Test
public void testMatchesOnSingleCharacterLowerCase() throws Exception {
GlobMatcher matcher = new GlobMatcher("a*");
assertTrue(matcher.matches("AABBCC"));
assertFalse(matcher.matches("FFFF"));
}
|
public List<MappingField> resolveAndValidateFields(
List<MappingField> userFields,
Map<String, String> options,
NodeEngine nodeEngine
) {
final InternalSerializationService serializationService = (InternalSerializationService) nodeEngine
.getSerializationService();
final AbstractRelationsStorage relationsStorage = ((CalciteSqlOptimizer) nodeEngine.getSqlService().getOptimizer())
.relationsStorage();
// normalize and validate the names and external names
for (MappingField field : userFields) {
String name = field.name();
String externalName = field.externalName();
if (externalName == null) {
if (name.equals(KEY) || name.equals(VALUE)) {
externalName = name;
} else {
externalName = VALUE_PREFIX + name;
}
field.setExternalName(name);
}
if ((name.equals(KEY) && !externalName.equals(KEY))
|| (name.equals(VALUE) && !externalName.equals(VALUE))) {
throw QueryException.error("Cannot rename field: '" + name + '\'');
}
if (!EXT_NAME_PATTERN.matcher(externalName).matches()) {
throw QueryException.error("Invalid external name: " + externalName);
}
}
Stream<MappingField> keyFields = resolveAndValidateFields(true, userFields, options,
serializationService, relationsStorage);
Stream<MappingField> valueFields = resolveAndValidateFields(false, userFields, options,
serializationService, relationsStorage);
Map<String, MappingField> fields = Stream.concat(keyFields, valueFields)
.collect(LinkedHashMap::new, (map, field) -> map.putIfAbsent(field.name(), field), Map::putAll);
if (fields.isEmpty()) {
throw QueryException.error("The resolved field list is empty");
}
return new ArrayList<>(fields.values());
}
|
@Test
public void when_invalidExternalName_then_throws() {
MappingField field = field("field_name", QueryDataType.INT, "invalid_prefix.name");
assertThatThrownBy(() -> resolvers.resolveAndValidateFields(singletonList(field), emptyMap(), nodeEngine))
.hasMessage("Invalid external name: " + "invalid_prefix.name");
}
|
public static int readUnsignedIntLE(InputStream in) throws IOException {
return in.read()
| (in.read() << 8)
| (in.read() << 16)
| (in.read() << 24);
}
|
@Test
public void testReadUnsignedIntLEFromArray() {
byte[] array1 = {0x01, 0x02, 0x03, 0x04, 0x05};
assertEquals(0x04030201, ByteUtils.readUnsignedIntLE(array1, 0));
assertEquals(0x05040302, ByteUtils.readUnsignedIntLE(array1, 1));
byte[] array2 = {(byte) 0xf1, (byte) 0xf2, (byte) 0xf3, (byte) 0xf4, (byte) 0xf5, (byte) 0xf6};
assertEquals(0xf4f3f2f1, ByteUtils.readUnsignedIntLE(array2, 0));
assertEquals(0xf6f5f4f3, ByteUtils.readUnsignedIntLE(array2, 2));
}
|
@Override
public ValidationResult validate(RuleBuilderStep step) {
final RuleFragment ruleFragment = actions.get(step.function());
FunctionDescriptor<?> functionDescriptor = ruleFragment.descriptor();
Map<String, Object> stepParameters = step.parameters();
//Add output to map
String outputvariable = step.outputvariable();
if (StringUtils.isNotBlank(outputvariable)) {
if (functionDescriptor.returnType() == Void.class) {
return new ValidationResult(true, f("Return type is void. No output variable allowed", functionDescriptor.name()));
}
storeVariable(outputvariable, functionDescriptor.returnType());
}
ImmutableList<ParameterDescriptor> parameterDescriptors = functionDescriptor.params();
for (ParameterDescriptor parameterDescriptor : parameterDescriptors) {
String parameterName = parameterDescriptor.name();
Object value = stepParameters.get(parameterName);
Class<?> variableType = getVariableType(value);
if (!parameterDescriptor.optional() && value == null) {
return new ValidationResult(true, f("Missing parameter %s", parameterName));
}
//$ means it is stored in another variable and we need to fetch and verify that type
if (value instanceof String s && s.startsWith("$")) {
String substring = s.substring(1);
Class<?> passedVariableType = variables.get(substring);
if (Objects.isNull(passedVariableType)) {
return new ValidationResult(true, f("Could not find passed variable %s", value));
}
variableType = passedVariableType;
}
//Check if variable type matches function expectation
Class<?> paramType = parameterDescriptor.type();
if (value != null && paramType != Object.class && variableType != paramType) {
String errorMsg = "Found a wrong parameter type for parameter %s";
return new ValidationResult(true, f(errorMsg, parameterName));
}
}
return new ValidationResult(false, "");
}
|
@Test
void failsWhenPassedWrongTypeParam() {
HashMap<String, Object> parameters = new HashMap<>();
parameters.put(INT_PARAM, "1");
RuleBuilderStep stepWithValidNegation = RuleBuilderStep.builder()
.parameters(parameters)
.function(INTEGER_FUNCTION)
.build();
ValidationResult result = classUnderTest.validate(stepWithValidNegation);
assertThat(result.failed()).isTrue();
assertThat(result.failureReason()).isEqualTo("Found a wrong parameter type for parameter " + INT_PARAM);
}
|
@CanIgnoreReturnValue
public final Ordered containsAtLeast(
@Nullable Object firstExpected,
@Nullable Object secondExpected,
@Nullable Object @Nullable ... restOfExpected) {
return containsAtLeastElementsIn(accumulate(firstExpected, secondExpected, restOfExpected));
}
|
@Test
public void iterableContainsAtLeastWithNull() {
assertThat(asList(1, null, 3)).containsAtLeast(3, (Integer) null);
}
|
public PendingSpan getOrCreate(
@Nullable TraceContext parent, TraceContext context, boolean start) {
PendingSpan result = get(context);
if (result != null) return result;
MutableSpan span = new MutableSpan(context, defaultSpan);
PendingSpan parentSpan = parent != null ? get(parent) : null;
// save overhead calculating time if the parent is in-progress (usually is)
TickClock clock;
if (parentSpan != null) {
TraceContext parentContext = parentSpan.context();
if (parentContext != null) parent = parentContext;
clock = parentSpan.clock;
if (start) span.startTimestamp(clock.currentTimeMicroseconds());
} else {
long currentTimeMicroseconds = this.clock.currentTimeMicroseconds();
clock = new TickClock(platform, currentTimeMicroseconds, platform.nanoTime());
if (start) span.startTimestamp(currentTimeMicroseconds);
}
PendingSpan newSpan = new PendingSpan(context, span, clock);
// Probably absent because we already checked with get() at the entrance of this method
PendingSpan previousSpan = putIfProbablyAbsent(context, newSpan);
if (previousSpan != null) return previousSpan; // lost race
// We've now allocated a new trace context.
assert parent != null || context.isLocalRoot() :
"Bug (or unexpected call to internal code): parent can only be null in a local root!";
spanHandler.begin(newSpan.handlerContext, newSpan.span, parentSpan != null
? parentSpan.handlerContext : null);
return newSpan;
}
|
@Test void getOrCreate_splitsSharedServerDataFromClient() {
TraceContext context2 = context.toBuilder().shared(true).build();
assertThat(pendingSpans.getOrCreate(null, context, false)).isNotEqualTo(
pendingSpans.getOrCreate(null, context2, false));
}
|
static boolean isValidIpEntity(String ip) {
if (ip == null) return true;
try {
InetAddress.getByName(ip);
return true;
} catch (UnknownHostException e) {
return false;
}
}
|
@Test
public void testIsValidIpEntityWithUnresolvableHostname() {
// example.invalid will never be valid, as per RFC 2606.
assertFalse(ClientQuotaControlManager.isValidIpEntity("example.invalid"));
}
|
public Connector newConnector(String connectorClassOrAlias) {
Class<? extends Connector> klass = connectorClass(connectorClassOrAlias);
return newPlugin(klass);
}
|
@Test
public void shouldThrowIfStaticInitializerThrows() {
assertThrows(ConnectException.class, () -> plugins.newConnector(
TestPlugin.BAD_PACKAGING_STATIC_INITIALIZER_THROWS_CONNECTOR.className()
));
}
|
public int maxAllowedPlanningFailures() {
return maxAllowedPlanningFailures;
}
|
@Test
void testMaxAllowedPlanningFailures() {
ScanContext context = ScanContext.builder().maxAllowedPlanningFailures(-2).build();
assertException(
context, "Cannot set maxAllowedPlanningFailures to a negative number other than -1.");
}
|
public static Object[] realize(Object[] objs, Class<?>[] types) {
if (objs.length != types.length) {
throw new IllegalArgumentException("args.length != types.length");
}
Object[] dests = new Object[objs.length];
for (int i = 0; i < objs.length; i++) {
dests[i] = realize(objs[i], types[i]);
}
return dests;
}
|
@Test
void testJsonObjectToMap() throws Exception {
Method method = PojoUtilsTest.class.getMethod("setMap", Map.class);
assertNotNull(method);
JSONObject jsonObject = new JSONObject();
jsonObject.put("1", "test");
@SuppressWarnings("unchecked")
Map<Integer, Object> value = (Map<Integer, Object>)
PojoUtils.realize(jsonObject, method.getParameterTypes()[0], method.getGenericParameterTypes()[0]);
method.invoke(new PojoUtilsTest(), value);
assertEquals("test", value.get(1));
}
|
@Override
public void setProperties(final Properties properties) {
}
|
@Test
public void setPropertiesTest() {
final OpenGaussSQLQueryInterceptor openGaussSQLQueryInterceptor = new OpenGaussSQLQueryInterceptor();
Assertions.assertDoesNotThrow(() -> openGaussSQLQueryInterceptor.setProperties(mock(Properties.class)));
}
|
public static void register(Object object, Runnable cleanupTask) {
AutomaticCleanerReference reference = new AutomaticCleanerReference(object,
ObjectUtil.checkNotNull(cleanupTask, "cleanupTask"));
// Its important to add the reference to the LIVE_SET before we access CLEANER_RUNNING to ensure correct
// behavior in multi-threaded environments.
LIVE_SET.add(reference);
// Check if there is already a cleaner running.
if (CLEANER_RUNNING.compareAndSet(false, true)) {
final Thread cleanupThread = new FastThreadLocalThread(CLEANER_TASK);
cleanupThread.setPriority(Thread.MIN_PRIORITY);
// Set to null to ensure we not create classloader leaks by holding a strong reference to the inherited
// classloader.
// See:
// - https://github.com/netty/netty/issues/7290
// - https://bugs.openjdk.java.net/browse/JDK-7008595
AccessController.doPrivileged(new PrivilegedAction<Void>() {
@Override
public Void run() {
cleanupThread.setContextClassLoader(null);
return null;
}
});
cleanupThread.setName(CLEANER_THREAD_NAME);
// Mark this as a daemon thread to ensure that we the JVM can exit if this is the only thread that is
// running.
cleanupThread.setDaemon(true);
cleanupThread.start();
}
}
|
@Test
@Timeout(value = 5000, unit = TimeUnit.MILLISECONDS)
public void testCleanup() throws Exception {
final AtomicBoolean freeCalled = new AtomicBoolean();
final CountDownLatch latch = new CountDownLatch(1);
temporaryThread = new Thread(new Runnable() {
@Override
public void run() {
try {
latch.await();
} catch (InterruptedException ignore) {
// just ignore
}
}
});
temporaryThread.start();
ObjectCleaner.register(temporaryThread, new Runnable() {
@Override
public void run() {
freeCalled.set(true);
}
});
latch.countDown();
temporaryThread.join();
assertFalse(freeCalled.get());
// Null out the temporary object to ensure it is enqueued for GC.
temporaryThread = null;
while (!freeCalled.get()) {
System.gc();
System.runFinalization();
Thread.sleep(100);
}
}
|
@Override
public String getConfig(String key, String group, long timeout) throws IllegalStateException {
if (StringUtils.isNotEmpty(group)) {
if (group.equals(url.getApplication())) {
return ConfigService.getAppConfig().getProperty(key, null);
} else {
return ConfigService.getConfig(group).getProperty(key, null);
}
}
return dubboConfig.getProperty(key, null);
}
|
@Test
void testGetRule() {
String mockKey = "mockKey1";
String mockValue = String.valueOf(new Random().nextInt());
putMockRuleData(mockKey, mockValue, DEFAULT_NAMESPACE);
apolloDynamicConfiguration = new ApolloDynamicConfiguration(url, applicationModel);
assertEquals(mockValue, apolloDynamicConfiguration.getConfig(mockKey, DEFAULT_NAMESPACE, 3000L));
mockKey = "notExistKey";
assertNull(apolloDynamicConfiguration.getConfig(mockKey, DEFAULT_NAMESPACE, 3000L));
}
|
@Override
public ValidationResult responseMessageForIsSCMConfigurationValid(String responseBody) {
return jsonResultMessageHandler.toValidationResult(responseBody);
}
|
@Test
public void shouldBuildSuccessValidationResultFromCheckSCMConfigurationValidResponse() throws Exception {
assertThat(messageHandler.responseMessageForIsSCMConfigurationValid("").isSuccessful(), is(true));
assertThat(messageHandler.responseMessageForIsSCMConfigurationValid(null).isSuccessful(), is(true));
}
|
@Override public HashSlotCursor16byteKey cursor() {
return new CursorLongKey2();
}
|
@Test
public void testCursor_valueAddress() {
final SlotAssignmentResult slot = insert(randomKey(), randomKey());
HashSlotCursor16byteKey cursor = hsa.cursor();
cursor.advance();
assertEquals(slot.address(), cursor.valueAddress());
}
|
@Override
public boolean isInputConsumable(
SchedulingExecutionVertex executionVertex,
Set<ExecutionVertexID> verticesToDeploy,
Map<ConsumedPartitionGroup, Boolean> consumableStatusCache) {
for (ConsumedPartitionGroup consumedPartitionGroup :
executionVertex.getConsumedPartitionGroups()) {
if (!consumableStatusCache.computeIfAbsent(
consumedPartitionGroup, this::isConsumableBasedOnFinishedProducers)) {
return false;
}
}
return true;
}
|
@Test
void testAllFinishedHybridInput() {
final TestingSchedulingTopology topology = new TestingSchedulingTopology();
final List<TestingSchedulingExecutionVertex> producers =
topology.addExecutionVertices().withParallelism(2).finish();
final List<TestingSchedulingExecutionVertex> consumer =
topology.addExecutionVertices().withParallelism(2).finish();
topology.connectAllToAll(producers, consumer)
.withResultPartitionState(ResultPartitionState.ALL_DATA_PRODUCED)
.withResultPartitionType(ResultPartitionType.HYBRID_FULL)
.finish();
PartialFinishedInputConsumableDecider inputConsumableDecider =
createPartialFinishedInputConsumableDecider();
assertThat(
inputConsumableDecider.isInputConsumable(
consumer.get(0), Collections.emptySet(), new HashMap<>()))
.isTrue();
assertThat(
inputConsumableDecider.isInputConsumable(
consumer.get(1), Collections.emptySet(), new HashMap<>()))
.isTrue();
}
|
public static <T> T retryUntilTimeout(Callable<T> callable, Supplier<String> description, Duration timeoutDuration, long retryBackoffMs) throws Exception {
return retryUntilTimeout(callable, description, timeoutDuration, retryBackoffMs, Time.SYSTEM);
}
|
@Test
public void testInvalidRetryTimeout() throws Exception {
Mockito.when(mockCallable.call())
.thenThrow(new TimeoutException("timeout"))
.thenReturn("success");
assertEquals("success", RetryUtil.retryUntilTimeout(mockCallable, testMsg, Duration.ofMillis(100), -1, mockTime));
Mockito.verify(mockCallable, Mockito.times(2)).call();
}
|
@Override
public void write(T record) {
recordConsumer.startMessage();
try {
messageWriter.writeTopLevelMessage(record);
} catch (RuntimeException e) {
Message m = (record instanceof Message.Builder) ? ((Message.Builder) record).build() : (Message) record;
LOG.error("Cannot write message {}: {}", e.getMessage(), m);
throw e;
}
recordConsumer.endMessage();
}
|
@Test
public void testProto3OptionalInnerMessage() throws Exception {
RecordConsumer readConsumerMock = Mockito.mock(RecordConsumer.class);
ProtoWriteSupport<TestProto3.MessageA> instance =
createReadConsumerInstance(TestProto3.MessageA.class, readConsumerMock);
TestProto3.MessageA.Builder msg = TestProto3.MessageA.newBuilder();
msg.getInnerBuilder().setOne("one");
instance.write(msg.build());
InOrder inOrder = Mockito.inOrder(readConsumerMock);
inOrder.verify(readConsumerMock).startMessage();
inOrder.verify(readConsumerMock).startField("inner", 0);
inOrder.verify(readConsumerMock).startGroup();
inOrder.verify(readConsumerMock).startField("one", 0);
inOrder.verify(readConsumerMock).addBinary(Binary.fromConstantByteArray("one".getBytes()));
inOrder.verify(readConsumerMock).endField("one", 0);
inOrder.verify(readConsumerMock).startField("two", 1);
inOrder.verify(readConsumerMock).addBinary(Binary.fromConstantByteArray("".getBytes()));
inOrder.verify(readConsumerMock).endField("two", 1);
inOrder.verify(readConsumerMock).startField("three", 2);
inOrder.verify(readConsumerMock).addBinary(Binary.fromConstantByteArray("".getBytes()));
inOrder.verify(readConsumerMock).endField("three", 2);
inOrder.verify(readConsumerMock).endGroup();
inOrder.verify(readConsumerMock).endField("inner", 0);
inOrder.verify(readConsumerMock).endMessage();
Mockito.verifyNoMoreInteractions(readConsumerMock);
}
|
@Override
public <VO, VR> KStream<K, VR> join(final KStream<K, VO> otherStream,
final ValueJoiner<? super V, ? super VO, ? extends VR> joiner,
final JoinWindows windows) {
return join(otherStream, toValueJoinerWithKey(joiner), windows);
}
|
@Test
public void shouldNotAllowNullValueJoinerWithKeyOnJoinWithGlobalTableWithNamed() {
final NullPointerException exception = assertThrows(
NullPointerException.class,
() -> testStream.join(
testGlobalTable,
MockMapper.selectValueMapper(),
(ValueJoiner<? super String, ? super String, ?>) null,
Named.as("name")));
assertThat(exception.getMessage(), equalTo("joiner can't be null"));
}
|
public void logImageRemoval(final String channel, final int sessionId, final int streamId, final long correlationId)
{
final int length = SIZE_OF_INT * 3 + SIZE_OF_LONG + channel.length();
final int captureLength = captureLength(length);
final int encodedLength = encodedLength(captureLength);
final ManyToOneRingBuffer ringBuffer = this.ringBuffer;
final int index = ringBuffer.tryClaim(toEventCodeId(REMOVE_IMAGE_CLEANUP), encodedLength);
if (index > 0)
{
try
{
final UnsafeBuffer buffer = (UnsafeBuffer)ringBuffer.buffer();
encodeImageRemoval(buffer, index, captureLength, length, channel, sessionId, streamId, correlationId);
}
finally
{
ringBuffer.commit(index);
}
}
}
|
@Test
void logImageRemoval()
{
final int recordOffset = align(192, ALIGNMENT);
logBuffer.putLong(CAPACITY + TAIL_POSITION_OFFSET, recordOffset);
final String uri = "uri";
final int sessionId = 8;
final int streamId = 61;
final long id = 19;
final int captureLength = uri.length() + SIZE_OF_INT * 3 + SIZE_OF_LONG;
logger.logImageRemoval(uri, sessionId, streamId, id);
verifyLogHeader(
logBuffer, recordOffset, toEventCodeId(REMOVE_IMAGE_CLEANUP), captureLength, captureLength);
assertEquals(sessionId, logBuffer.getInt(encodedMsgOffset(recordOffset + LOG_HEADER_LENGTH), LITTLE_ENDIAN));
assertEquals(streamId,
logBuffer.getInt(encodedMsgOffset(recordOffset + LOG_HEADER_LENGTH + SIZE_OF_INT), LITTLE_ENDIAN));
assertEquals(id,
logBuffer.getLong(encodedMsgOffset(recordOffset + LOG_HEADER_LENGTH + SIZE_OF_INT * 2), LITTLE_ENDIAN));
assertEquals(uri, logBuffer.getStringAscii(
encodedMsgOffset(recordOffset + LOG_HEADER_LENGTH + SIZE_OF_INT * 2 + SIZE_OF_LONG), LITTLE_ENDIAN));
}
|
public static SqlType fromValue(final BigDecimal value) {
// SqlDecimal does not support negative scale:
final BigDecimal decimal = value.scale() < 0
? value.setScale(0, BigDecimal.ROUND_UNNECESSARY)
: value;
/* We can't use BigDecimal.precision() directly for all cases, since it defines
* precision differently from SQL Decimal.
* In particular, if the decimal is between -0.1 and 0.1, BigDecimal precision can be
* lower than scale, which is disallowed in SQL Decimal. For example, 0.005 in
* BigDecimal has a precision,scale of 1,3; whereas we expect 4,3.
* If the decimal is in (-1,1) but outside (-0.1,0.1), the code doesn't throw, but
* gives lower precision than expected (e.g., 0.8 has precision 1 instead of 2).
* To account for this edge case, we just take the scale and add one and use that
* for the precision instead. This works since BigDecimal defines scale as the
* number of digits to the right of the period; which is one lower than the precision for
* anything in the range (-1, 1).
* This covers the case where BigDecimal has a value of 0.
* Note: This solution differs from the SQL definition in that it returns (4, 3) for
* both "0.005" and ".005", whereas SQL expects (3, 3) for the latter. This is unavoidable
* if we use BigDecimal as an intermediate representation, since the two strings are parsed
* identically by it to have precision 1.
*/
if (decimal.compareTo(BigDecimal.ONE) < 0 && decimal.compareTo(BigDecimal.ONE.negate()) > 0) {
return SqlTypes.decimal(decimal.scale() + 1, decimal.scale());
}
return SqlTypes.decimal(decimal.precision(), Math.max(decimal.scale(), 0));
}
|
@Test
public void shouldExtractPrecisionFromZeroValue() {
// When:
final SqlType zeroDecimal = DecimalUtil
.fromValue(BigDecimal.ZERO.setScale(2, RoundingMode.UNNECESSARY));
// Then:
assertThat(zeroDecimal, is(SqlTypes.decimal(3, 2)));
}
|
public static void getSemanticPropsSingleFromString(
SingleInputSemanticProperties result,
String[] forwarded,
String[] nonForwarded,
String[] readSet,
TypeInformation<?> inType,
TypeInformation<?> outType) {
getSemanticPropsSingleFromString(
result, forwarded, nonForwarded, readSet, inType, outType, false);
}
|
@Test
void testForwardedInvalidTargetFieldType4() {
String[] forwardedFields = {"int1; string1"};
SingleInputSemanticProperties sp = new SingleInputSemanticProperties();
assertThatThrownBy(
() ->
SemanticPropUtil.getSemanticPropsSingleFromString(
sp,
forwardedFields,
null,
null,
pojoInTupleType,
pojo2Type))
.isInstanceOf(InvalidSemanticAnnotationException.class);
}
|
public static boolean hasLeadership(KubernetesConfigMap configMap, String lockIdentity) {
final String leader = configMap.getAnnotations().get(LEADER_ANNOTATION_KEY);
return leader != null && leader.contains(lockIdentity);
}
|
@Test
void testAnnotationNotMatch() {
leaderConfigMap.getAnnotations().put(LEADER_ANNOTATION_KEY, "wrong lock");
assertThat(KubernetesLeaderElector.hasLeadership(leaderConfigMap, lockIdentity)).isFalse();
}
|
@Override
public CreateTopicsResult createTopics(final Collection<NewTopic> newTopics,
final CreateTopicsOptions options) {
final Map<String, KafkaFutureImpl<TopicMetadataAndConfig>> topicFutures = new HashMap<>(newTopics.size());
final CreatableTopicCollection topics = new CreatableTopicCollection();
for (NewTopic newTopic : newTopics) {
if (topicNameIsUnrepresentable(newTopic.name())) {
KafkaFutureImpl<TopicMetadataAndConfig> future = new KafkaFutureImpl<>();
future.completeExceptionally(new InvalidTopicException("The given topic name '" +
newTopic.name() + "' cannot be represented in a request."));
topicFutures.put(newTopic.name(), future);
} else if (!topicFutures.containsKey(newTopic.name())) {
topicFutures.put(newTopic.name(), new KafkaFutureImpl<>());
topics.add(newTopic.convertToCreatableTopic());
}
}
if (!topics.isEmpty()) {
final long now = time.milliseconds();
final long deadline = calcDeadlineMs(now, options.timeoutMs());
final Call call = getCreateTopicsCall(options, topicFutures, topics,
Collections.emptyMap(), now, deadline);
runnable.call(call, now);
}
return new CreateTopicsResult(new HashMap<>(topicFutures));
}
|
@Test
public void testConnectionFailureOnMetadataUpdate() throws Exception {
// This tests the scenario in which we successfully connect to the bootstrap server, but
// the server disconnects before sending the full response
Cluster cluster = mockBootstrapCluster();
try (final AdminClientUnitTestEnv env = new AdminClientUnitTestEnv(Time.SYSTEM, cluster)) {
Cluster discoveredCluster = mockCluster(3, 0);
env.kafkaClient().setNodeApiVersions(NodeApiVersions.create());
env.kafkaClient().prepareResponse(request -> request instanceof MetadataRequest, null, true);
env.kafkaClient().prepareResponse(request -> request instanceof MetadataRequest,
RequestTestUtils.metadataResponse(discoveredCluster.nodes(), discoveredCluster.clusterResource().clusterId(),
1, Collections.emptyList()));
env.kafkaClient().prepareResponse(body -> body instanceof CreateTopicsRequest,
prepareCreateTopicsResponse("myTopic", Errors.NONE));
KafkaFuture<Void> future = env.adminClient().createTopics(
singleton(new NewTopic("myTopic", Collections.singletonMap(0, asList(0, 1, 2)))),
new CreateTopicsOptions().timeoutMs(10000)).all();
future.get();
}
}
|
@Override
public void execute(CommandLine commandLine, Options options,
RPCHook rpcHook) throws SubCommandException {
DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook);
defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
try {
PlainAccessConfig accessConfig = new PlainAccessConfig();
accessConfig.setAccessKey(commandLine.getOptionValue('a').trim());
// Secretkey
if (commandLine.hasOption('s')) {
accessConfig.setSecretKey(commandLine.getOptionValue('s').trim());
}
// Admin
if (commandLine.hasOption('m')) {
accessConfig.setAdmin(Boolean.parseBoolean(commandLine.getOptionValue('m').trim()));
}
// DefaultTopicPerm
if (commandLine.hasOption('i')) {
accessConfig.setDefaultTopicPerm(commandLine.getOptionValue('i').trim());
}
// DefaultGroupPerm
if (commandLine.hasOption('u')) {
accessConfig.setDefaultGroupPerm(commandLine.getOptionValue('u').trim());
}
// WhiteRemoteAddress
if (commandLine.hasOption('w')) {
accessConfig.setWhiteRemoteAddress(commandLine.getOptionValue('w').trim());
}
// TopicPerms list value
if (commandLine.hasOption('t')) {
String[] topicPerms = commandLine.getOptionValue('t').trim().split(",");
List<String> topicPermList = new ArrayList<>();
if (topicPerms != null) {
for (String topicPerm : topicPerms) {
topicPermList.add(topicPerm);
}
}
accessConfig.setTopicPerms(topicPermList);
}
// GroupPerms list value
if (commandLine.hasOption('g')) {
String[] groupPerms = commandLine.getOptionValue('g').trim().split(",");
List<String> groupPermList = new ArrayList<>();
if (groupPerms != null) {
for (String groupPerm : groupPerms) {
groupPermList.add(groupPerm);
}
}
accessConfig.setGroupPerms(groupPermList);
}
if (commandLine.hasOption('b')) {
String addr = commandLine.getOptionValue('b').trim();
defaultMQAdminExt.start();
defaultMQAdminExt.createAndUpdatePlainAccessConfig(addr, accessConfig);
System.out.printf("create or update plain access config to %s success.%n", addr);
System.out.printf("%s", accessConfig);
return;
} else if (commandLine.hasOption('c')) {
String clusterName = commandLine.getOptionValue('c').trim();
defaultMQAdminExt.start();
Set<String> brokerAddrSet =
CommandUtil.fetchMasterAndSlaveAddrByClusterName(defaultMQAdminExt, clusterName);
for (String addr : brokerAddrSet) {
defaultMQAdminExt.createAndUpdatePlainAccessConfig(addr, accessConfig);
System.out.printf("create or update plain access config to %s success.%n", addr);
}
System.out.printf("%s", accessConfig);
return;
}
ServerUtil.printCommandLineHelp("mqadmin " + this.commandName(), options);
} catch (Exception e) {
throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e);
} finally {
defaultMQAdminExt.shutdown();
}
}
|
@Test
public void testExecute() {
UpdateAccessConfigSubCommand cmd = new UpdateAccessConfigSubCommand();
Options options = ServerUtil.buildCommandlineOptions(new Options());
String[] subargs = new String[] {
"-b","127.0.0.1:10911",
"-a","RocketMQ",
"-s","12345678",
"-w","192.168.0.*",
"-i","DENY",
"-u","SUB",
"-t","topicA=DENY;topicB=PUB|SUB",
"-g","groupA=DENY;groupB=SUB",
"-m","true"
};
// Note: Posix parser is capable of handling values that contains '='.
final CommandLine commandLine =
ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs,
cmd.buildCommandlineOptions(options), new DefaultParser());
assertThat(commandLine.getOptionValue('b').trim()).isEqualTo("127.0.0.1:10911");
assertThat(commandLine.getOptionValue('a').trim()).isEqualTo("RocketMQ");
assertThat(commandLine.getOptionValue('s').trim()).isEqualTo("12345678");
assertThat(commandLine.getOptionValue('w').trim()).isEqualTo("192.168.0.*");
assertThat(commandLine.getOptionValue('i').trim()).isEqualTo("DENY");
assertThat(commandLine.getOptionValue('u').trim()).isEqualTo("SUB");
assertThat(commandLine.getOptionValue('t').trim()).isEqualTo("topicA=DENY;topicB=PUB|SUB");
assertThat(commandLine.getOptionValue('g').trim()).isEqualTo("groupA=DENY;groupB=SUB");
assertThat(commandLine.getOptionValue('m').trim()).isEqualTo("true");
PlainAccessConfig accessConfig = new PlainAccessConfig();
// topicPerms list value
if (commandLine.hasOption('t')) {
String[] topicPerms = commandLine.getOptionValue('t').trim().split(";");
List<String> topicPermList = new ArrayList<>(Arrays.asList(topicPerms));
accessConfig.setTopicPerms(topicPermList);
}
// groupPerms list value
if (commandLine.hasOption('g')) {
String[] groupPerms = commandLine.getOptionValue('g').trim().split(";");
List<String> groupPermList = new ArrayList<>();
Collections.addAll(groupPermList, groupPerms);
accessConfig.setGroupPerms(groupPermList);
}
Assert.assertTrue(accessConfig.getTopicPerms().contains("topicB=PUB|SUB"));
Assert.assertTrue(accessConfig.getGroupPerms().contains("groupB=SUB"));
}
|
public IterableSubject asList() {
return checkNoNeedToDisplayBothValues("asList()").that(Arrays.asList(checkNotNull(actual)));
}
|
@Test
public void asList() {
assertThat(objectArray("A", 5L)).asList().contains("A");
}
|
@Override
public Object[] toArray() {
List<Object> result = new ArrayList<>();
for (M member : members) {
if (selector.select(member)) {
result.add(member);
}
}
return result.toArray(new Object[0]);
}
|
@Test
public void testToArrayWhenLiteMembersSelected() {
Collection<MemberImpl> collection = new MemberSelectingCollection<>(members, LITE_MEMBER_SELECTOR);
Object[] array = collection.toArray();
assertArray(collection, array);
}
|
@Override
public DeserializationHandlerResponse handle(
final ProcessorContext context,
final ConsumerRecord<byte[], byte[]> record,
final Exception exception
) {
log.debug(
String.format("Exception caught during Deserialization, "
+ "taskId: %s, topic: %s, partition: %d, offset: %d",
context.taskId(), record.topic(), record.partition(), record.offset()),
exception
);
streamsErrorCollector.recordError(record.topic());
if (isCausedByAuthorizationError(exception)) {
log.info(
String.format(
"Authorization error when attempting to access the schema during deserialization. "
+ "taskId: %s, topic: %s, partition: %d, offset: %d",
context.taskId(), record.topic(), record.partition(), record.offset()));
return DeserializationHandlerResponse.FAIL;
}
return DeserializationHandlerResponse.CONTINUE;
}
|
@Test
public void shouldReturnFailForAuthorizationExceptions() {
assertThat(exceptionHandler.handle(context, record,
new Exception("", new RestClientException("", 403, 40301))),
equalTo(DeserializationHandlerResponse.FAIL));
}
|
@Override
public int configInfoCount() {
ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
String sql = configInfoMapper.count(null);
Integer result = jt.queryForObject(sql, Integer.class);
if (result == null) {
throw new IllegalArgumentException("configInfoCount error");
}
return result;
}
|
@Test
void testConfigInfoCountByTenant() {
String tenant = "tenant124";
//mock total count
when(jdbcTemplate.queryForObject(anyString(), eq(new Object[] {tenant}), eq(Integer.class))).thenReturn(new Integer(90));
int count = externalConfigInfoPersistService.configInfoCount(tenant);
assertEquals(90, count);
when(jdbcTemplate.queryForObject(anyString(), eq(new Object[] {tenant}), eq(Integer.class))).thenReturn(null);
try {
externalConfigInfoPersistService.configInfoCount(tenant);
assertTrue(false);
} catch (Exception e) {
assertTrue(e instanceof IllegalArgumentException);
}
}
|
@VisibleForTesting
static void enforceStreamStateDirAvailability(final File streamsStateDir) {
if (!streamsStateDir.exists()) {
final boolean mkDirSuccess = streamsStateDir.mkdirs();
if (!mkDirSuccess) {
throw new KsqlServerException("Could not create the kafka streams state directory: "
+ streamsStateDir.getPath()
+ "\n Make sure the directory exists and is writable for KSQL server "
+ "\n or its parent directory is writable by KSQL server"
+ "\n or change it to a writable directory by setting '"
+ KsqlConfig.KSQL_STREAMS_PREFIX + StreamsConfig.STATE_DIR_CONFIG
+ "' config in the properties file."
);
}
}
if (!streamsStateDir.isDirectory()) {
throw new KsqlServerException(streamsStateDir.getPath()
+ " is not a directory."
+ "\n Make sure the directory exists and is writable for KSQL server "
+ "\n or its parent directory is writable by KSQL server"
+ "\n or change it to a writable directory by setting '"
+ KsqlConfig.KSQL_STREAMS_PREFIX + StreamsConfig.STATE_DIR_CONFIG
+ "' config in the properties file."
);
}
if (!streamsStateDir.canWrite() || !streamsStateDir.canExecute()) {
throw new KsqlServerException("The kafka streams state directory is not writable "
+ "for KSQL server: "
+ streamsStateDir.getPath()
+ "\n Make sure the directory exists and is writable for KSQL server "
+ "\n or change it to a writable directory by setting '"
+ KsqlConfig.KSQL_STREAMS_PREFIX + StreamsConfig.STATE_DIR_CONFIG
+ "' config in the properties file."
);
}
}
|
@Test
public void shouldFailIfStreamsStateDirectoryIsNotExacutable() {
// Given:
when(mockStreamsStateDir.canExecute()).thenReturn(false);
// When:
final Exception e = assertThrows(
KsqlServerException.class,
() -> KsqlServerMain.enforceStreamStateDirAvailability(mockStreamsStateDir)
);
// Then:
assertThat(e.getMessage(), containsString(
"The kafka streams state directory is not writable for KSQL server: /var/lib/kafka-streams\n"
+ " Make sure the directory exists and is writable for KSQL server \n"
+ " or change it to a writable directory by setting 'ksql.streams.state.dir' "
+ "config in the properties file."));
}
|
@Override
@Nonnull
public <T> T invokeAny(@Nonnull Collection<? extends Callable<T>> tasks)
throws ExecutionException {
throwRejectedExecutionExceptionIfShutdown();
Exception exception = null;
for (Callable<T> task : tasks) {
try {
return task.call();
} catch (Exception e) {
// try next task
exception = e;
}
}
throw new ExecutionException("No tasks finished successfully.", exception);
}
|
@Test
void testRejectedInvokeAny() {
testRejectedExecutionException(
testInstance -> testInstance.invokeAny(Collections.singleton(() -> null)));
}
|
public static ResponseHandler cors(final CorsConfig... configs) {
return new MocoCorsHandler(configs);
}
|
@Test
public void should_support_cors() throws Exception {
server.response(cors());
running(server, () -> {
ClassicHttpResponse response = helper.getResponseWithHeader(root(), of("Origin", "https://www.github.com/"));
assertThat(response.getHeader("Access-Control-Allow-Origin").getValue(), is("*"));
});
}
|
@Deprecated
public static String getJwt(JwtClaims claims) throws JoseException {
String jwt;
RSAPrivateKey privateKey = (RSAPrivateKey) getPrivateKey(
jwtConfig.getKey().getFilename(),jwtConfig.getKey().getPassword(), jwtConfig.getKey().getKeyName());
// A JWT is a JWS and/or a JWE with JSON claims as the payload.
// In this example it is a JWS nested inside a JWE
// So we first create a JsonWebSignature object.
JsonWebSignature jws = new JsonWebSignature();
// The payload of the JWS is JSON content of the JWT Claims
jws.setPayload(claims.toJson());
// The JWT is signed using the sender's private key
jws.setKey(privateKey);
// Get provider from security config file, it should be two digit
// And the provider id will set as prefix for keyid in the token header, for example: 05100
// if there is no provider id, we use "00" for the default value
String provider_id = "";
if (jwtConfig.getProviderId() != null) {
provider_id = jwtConfig.getProviderId();
if (provider_id.length() == 1) {
provider_id = "0" + provider_id;
} else if (provider_id.length() > 2) {
logger.error("provider_id defined in the security.yml file is invalid; the length should be 2");
provider_id = provider_id.substring(0, 2);
}
}
jws.setKeyIdHeaderValue(provider_id + jwtConfig.getKey().getKid());
// Set the signature algorithm on the JWT/JWS that will integrity protect the claims
jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.RSA_USING_SHA256);
// Sign the JWS and produce the compact serialization, which will be the inner JWT/JWS
// representation, which is a string consisting of three dot ('.') separated
// base64url-encoded parts in the form Header.Payload.Signature
jwt = jws.getCompactSerialization();
return jwt;
}
|
@Test
public void sidecarReferenceBootstrap() throws Exception {
JwtClaims claims = ClaimsUtil.getTestCcClaimsScopeService("f7d42348-c647-4efb-a52d-4c5787421e72", "portal.r portal.w", "0100");
claims.setExpirationTimeMinutesInTheFuture(5256000);
String jwt = JwtIssuer.getJwt(claims, long_kid, KeyUtil.deserializePrivateKey(long_key, KeyUtil.RSA));
System.out.println("***Reference Long lived Bootstrap token for config server and controller: " + jwt);
}
|
public MyNewIssuesNotification newMyNewIssuesNotification(Map<String, UserDto> assigneesByUuid) {
verifyAssigneesByUuid(assigneesByUuid);
return new MyNewIssuesNotification(new DetailsSupplierImpl(assigneesByUuid));
}
|
@Test
public void newMyNewIssuesNotification_DetailsSupplier_getRuleDefinitionByRuleKey_always_returns_empty_if_RuleRepository_is_empty() {
MyNewIssuesNotification underTest = this.underTest.newMyNewIssuesNotification(emptyMap());
DetailsSupplier detailsSupplier = readDetailsSupplier(underTest);
assertThat(detailsSupplier.getRuleDefinitionByRuleKey(RuleKey.of("foo", "bar"))).isEmpty();
assertThat(detailsSupplier.getRuleDefinitionByRuleKey(RuleKey.of("bar", "foo"))).isEmpty();
}
|
@Override
public int hashCode() {
StringBuilder sb = toStringBuilder();
if (sHardwareEqualityEnforced) {
sb.append(mBluetoothAddress);
}
return sb.toString().hashCode();
}
|
@Test
public void testHashCodeWithNullIdentifier() {
Beacon beacon = new AltBeacon.Builder()
.setIdentifiers(Arrays.asList(
Identifier.parse("0x1234"),
null))
.build();
assertTrue("hashCode() should not throw exception", beacon.hashCode() >= Integer.MIN_VALUE);
}
|
@Override
public boolean betterThan(Num criterionValue1, Num criterionValue2) {
return lessIsBetter ? criterionValue1.isLessThan(criterionValue2)
: criterionValue1.isGreaterThan(criterionValue2);
}
|
@Test
public void betterThanWithLessIsBetter() {
AnalysisCriterion criterion = getCriterion(new ProfitLossCriterion(), true);
assertFalse(criterion.betterThan(numOf(5000), numOf(4500)));
assertTrue(criterion.betterThan(numOf(4500), numOf(5000)));
}
|
@Override
public void start() {
this.all = registry.meter(name(getName(), "all"));
this.trace = registry.meter(name(getName(), "trace"));
this.debug = registry.meter(name(getName(), "debug"));
this.info = registry.meter(name(getName(), "info"));
this.warn = registry.meter(name(getName(), "warn"));
this.error = registry.meter(name(getName(), "error"));
super.start();
}
|
@Test
public void usesRegistryFromProperty() {
SharedMetricRegistries.add("something_else", registry);
System.setProperty(InstrumentedAppender.REGISTRY_PROPERTY_NAME, "something_else");
final InstrumentedAppender shared = new InstrumentedAppender();
shared.start();
when(event.getLevel()).thenReturn(Level.INFO);
shared.doAppend(event);
assertThat(SharedMetricRegistries.names()).contains("something_else");
assertThat(registry.meter(METRIC_NAME_PREFIX + ".info").getCount())
.isEqualTo(1);
}
|
public static UserOperatorConfig buildFromMap(Map<String, String> map) {
Map<String, String> envMap = new HashMap<>(map);
envMap.keySet().retainAll(UserOperatorConfig.keyNames());
Map<String, Object> generatedMap = ConfigParameter.define(envMap, CONFIG_VALUES);
return new UserOperatorConfig(generatedMap);
}
|
@Test
public void testFromMapInvalidReconciliationIntervalThrows() {
Map<String, String> envVars = new HashMap<>(UserOperatorConfigTest.ENV_VARS);
envVars.put(UserOperatorConfig.RECONCILIATION_INTERVAL_MS.key(), "not_a_long");
assertThrows(InvalidConfigurationException.class, () -> UserOperatorConfig.buildFromMap(envVars));
}
|
@VisibleForTesting
String expandEnvironmentVariables(String innerInput, Map<String, String> env)
throws UploaderException {
boolean found;
do {
found = false;
Matcher matcher = VAR_SUBBER.matcher(innerInput);
StringBuffer stringBuffer = new StringBuffer();
while (matcher.find()) {
found = true;
String var = matcher.group(1);
// replace $env with the child's env constructed by tt's
String replace = env.get(var);
// the env key is not present anywhere .. simply set it
if (replace == null) {
throw new UploaderException("Environment variable does not exist " +
var);
}
matcher.appendReplacement(
stringBuffer, Matcher.quoteReplacement(replace));
}
matcher.appendTail(stringBuffer);
innerInput = stringBuffer.toString();
} while (found);
return innerInput;
}
|
@Test
void testRecursiveEnvironmentReplacement()
throws UploaderException {
String input = "C/$A/B,$B,D";
Map<String, String> map = new HashMap<>();
map.put("A", "X");
map.put("B", "$C");
map.put("C", "Y");
FrameworkUploader uploader = new FrameworkUploader();
String output = uploader.expandEnvironmentVariables(input, map);
assertEquals("C/X/B,Y,D", output, "Environment not expanded");
}
|
@VisibleForTesting
String getActiveProcessBundleState() {
StringJoiner activeBundlesState = new StringJoiner("\n");
activeBundlesState.add("========== ACTIVE PROCESSING BUNDLES ==========");
if (processBundleCache.getActiveBundleProcessors().isEmpty()) {
activeBundlesState.add("No active processing bundles.");
} else {
List<BundleState> bundleStates = new ArrayList<>();
processBundleCache.getActiveBundleProcessors().entrySet().stream()
.forEach(
instructionAndBundleProcessor -> {
BundleProcessor bundleProcessor = instructionAndBundleProcessor.getValue();
ExecutionStateTrackerStatus executionStateTrackerStatus =
bundleProcessor.getStateTracker().getStatus();
if (executionStateTrackerStatus != null) {
bundleStates.add(
new BundleState(
instructionAndBundleProcessor.getKey(),
executionStateTrackerStatus.getTrackedThread().getName(),
DateTimeUtils.currentTimeMillis()
- executionStateTrackerStatus.getLastTransitionTimeMillis()));
}
});
bundleStates.stream()
// reverse sort active bundle by time since last transition.
.sorted(Comparator.comparing(BundleState::getTimeSinceTransition).reversed())
.limit(10) // only keep top 10
.forEachOrdered(
bundleState -> {
activeBundlesState.add(
String.format("---- Instruction %s ----", bundleState.getInstruction()));
activeBundlesState.add(
String.format("Tracked thread: %s", bundleState.getTrackedThreadName()));
activeBundlesState.add(
String.format(
"Time since transition: %.2f seconds%n",
bundleState.getTimeSinceTransition() / 1000.0));
});
}
return activeBundlesState.toString();
}
|
@Test
public void testActiveBundleState() {
ProcessBundleHandler handler = mock(ProcessBundleHandler.class);
BundleProcessorCache processorCache = mock(BundleProcessorCache.class);
Map<String, BundleProcessor> bundleProcessorMap = new HashMap<>();
for (int i = 0; i < 11; i++) {
BundleProcessor processor = mock(BundleProcessor.class);
ExecutionStateTracker executionStateTracker = mock(ExecutionStateTracker.class);
when(processor.getStateTracker()).thenReturn(executionStateTracker);
when(executionStateTracker.getStatus())
.thenReturn(
ExecutionStateTrackerStatus.create(
"ptransformId", "ptransformIdName", Thread.currentThread(), i * 1000, null));
String instruction = Integer.toString(i);
when(processorCache.find(instruction)).thenReturn(processor);
bundleProcessorMap.put(instruction, processor);
}
when(handler.getBundleProcessorCache()).thenReturn(processorCache);
when(processorCache.getActiveBundleProcessors()).thenReturn(bundleProcessorMap);
ManagedChannelFactory channelFactory = ManagedChannelFactory.createInProcess();
BeamFnStatusClient client =
new BeamFnStatusClient(
apiServiceDescriptor,
channelFactory::forDescriptor,
handler.getBundleProcessorCache(),
PipelineOptionsFactory.create(),
Caches.noop());
StringJoiner joiner = new StringJoiner("\n");
joiner.add(client.getActiveProcessBundleState());
String actualState = joiner.toString();
List<String> expectedInstructions = new ArrayList<>();
for (int i = 0; i < 10; i++) {
expectedInstructions.add(String.format("Instruction %d", i));
}
assertThat(actualState, stringContainsInOrder(expectedInstructions));
assertThat(actualState, not(containsString("Instruction 10")));
}
|
void forwardToStateService(DeviceStateServiceMsgProto deviceStateServiceMsg, TbCallback callback) {
if (statsEnabled) {
stats.log(deviceStateServiceMsg);
}
stateService.onQueueMsg(deviceStateServiceMsg, callback);
}
|
@Test
public void givenStatsEnabled_whenForwardingDeviceStateMsgToStateService_thenStatsAreRecorded() {
// GIVEN
ReflectionTestUtils.setField(defaultTbCoreConsumerServiceMock, "stats", statsMock);
ReflectionTestUtils.setField(defaultTbCoreConsumerServiceMock, "statsEnabled", true);
var stateMsg = TransportProtos.DeviceStateServiceMsgProto.newBuilder()
.setTenantIdMSB(tenantId.getId().getMostSignificantBits())
.setTenantIdLSB(tenantId.getId().getLeastSignificantBits())
.setDeviceIdMSB(deviceId.getId().getMostSignificantBits())
.setDeviceIdLSB(deviceId.getId().getLeastSignificantBits())
.setAdded(true)
.setUpdated(false)
.setDeleted(false)
.build();
doCallRealMethod().when(defaultTbCoreConsumerServiceMock).forwardToStateService(stateMsg, tbCallbackMock);
// WHEN
defaultTbCoreConsumerServiceMock.forwardToStateService(stateMsg, tbCallbackMock);
// THEN
then(statsMock).should().log(stateMsg);
}
|
public Status currentStatus(FetchRequest request) {
final DocumentStatus ds = fetchStatus(request);
if (MUStatusType.ACTIEF == ds.getStatusMu() || ds.getDocType() == DocTypeType.NI) {
switch (ds.getStatus()) {
case GEACTIVEERD:
return Status.ACTIVE;
case UITGEREIKT:
return Status.ISSUED;
case GEBLOKKEERD:
return Status.BLOCKED;
default:
break;
}
}
return Status.INACTIVE;
}
|
@Test
public void getDocumentStatusNotFoundExceptionTest() throws Exception {
when(bsnkPseudonymDecryptorMock.decryptEp(anyString(), anyString(), anyString())).thenReturn(pseudonym);
when(documentStatusRepositoryMock.findByPseudonymAndDocTypeAndSequenceNo(anyString(), any(DocTypeType.class), anyString())).thenReturn(Optional.empty());
FetchRequest request = new FetchRequest();
request.setEpsc(encrypted);
assertThrows(DocumentNotFoundException.class, () -> {
documentStatusService.currentStatus(request);
});
}
|
@Deprecated
public static Method findMethodByMethodSignature(Class<?> clazz, String methodName, String[] parameterTypes)
throws NoSuchMethodException, ClassNotFoundException {
Method method;
if (parameterTypes == null) {
List<Method> finded = new ArrayList<>();
for (Method m : clazz.getMethods()) {
if (m.getName().equals(methodName)) {
finded.add(m);
}
}
if (finded.isEmpty()) {
throw new NoSuchMethodException("No such method " + methodName + " in class " + clazz);
}
if (finded.size() > 1) {
String msg = String.format(
"Not unique method for method name(%s) in class(%s), find %d methods.",
methodName, clazz.getName(), finded.size());
throw new IllegalStateException(msg);
}
method = finded.get(0);
} else {
Class<?>[] types = new Class<?>[parameterTypes.length];
for (int i = 0; i < parameterTypes.length; i++) {
types[i] = ReflectUtils.name2class(parameterTypes[i]);
}
method = clazz.getMethod(methodName, types);
}
return method;
}
|
@Test
void testFindMethodByMethodSignature() throws Exception {
Method m = ReflectUtils.findMethodByMethodSignature(TestedClass.class, "method1", null);
assertEquals("method1", m.getName());
Class<?>[] parameterTypes = m.getParameterTypes();
assertEquals(1, parameterTypes.length);
assertEquals(int.class, parameterTypes[0]);
}
|
public static ShowResultSet execute(ShowStmt statement, ConnectContext context) {
return GlobalStateMgr.getCurrentState().getShowExecutor().showExecutorVisitor.visit(statement, context);
}
|
@Test
public void testShowMaterializedView() throws AnalysisException, DdlException {
ctx.setCurrentUserIdentity(UserIdentity.ROOT);
ctx.setCurrentRoleIds(Sets.newHashSet(PrivilegeBuiltinConstants.ROOT_ROLE_ID));
ShowMaterializedViewsStmt stmt = new ShowMaterializedViewsStmt("testDb", (String) null);
ShowResultSet resultSet = ShowExecutor.execute(stmt, ctx);
verifyShowMaterializedViewResult(resultSet);
}
|
@Override
public JobDetails postProcess(JobDetails jobDetails) {
if (isNotNullOrEmpty(substringBetween(jobDetails.getClassName(), "$$", "$$"))) {
return new JobDetails(
substringBefore(jobDetails.getClassName(), "$$"),
jobDetails.getStaticFieldName(),
jobDetails.getMethodName(),
jobDetails.getJobParameters()
);
}
return jobDetails;
}
|
@Test
void postProcessWithoutCGLibReturnsSameJobDetails() {
// GIVEN
final JobDetails jobDetails = defaultJobDetails().build();
// WHEN
final JobDetails result = cgLibPostProcessor.postProcess(jobDetails);
// THEN
assertThat(result).isSameAs(jobDetails);
}
|
@Deprecated
public static String getJwt(JwtClaims claims) throws JoseException {
String jwt;
RSAPrivateKey privateKey = (RSAPrivateKey) getPrivateKey(
jwtConfig.getKey().getFilename(),jwtConfig.getKey().getPassword(), jwtConfig.getKey().getKeyName());
// A JWT is a JWS and/or a JWE with JSON claims as the payload.
// In this example it is a JWS nested inside a JWE
// So we first create a JsonWebSignature object.
JsonWebSignature jws = new JsonWebSignature();
// The payload of the JWS is JSON content of the JWT Claims
jws.setPayload(claims.toJson());
// The JWT is signed using the sender's private key
jws.setKey(privateKey);
// Get provider from security config file, it should be two digit
// And the provider id will set as prefix for keyid in the token header, for example: 05100
// if there is no provider id, we use "00" for the default value
String provider_id = "";
if (jwtConfig.getProviderId() != null) {
provider_id = jwtConfig.getProviderId();
if (provider_id.length() == 1) {
provider_id = "0" + provider_id;
} else if (provider_id.length() > 2) {
logger.error("provider_id defined in the security.yml file is invalid; the length should be 2");
provider_id = provider_id.substring(0, 2);
}
}
jws.setKeyIdHeaderValue(provider_id + jwtConfig.getKey().getKid());
// Set the signature algorithm on the JWT/JWS that will integrity protect the claims
jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.RSA_USING_SHA256);
// Sign the JWS and produce the compact serialization, which will be the inner JWT/JWS
// representation, which is a string consisting of three dot ('.') separated
// base64url-encoded parts in the form Header.Payload.Signature
jwt = jws.getCompactSerialization();
return jwt;
}
|
@Test
public void sidecarReferenceBootstrapWithServiceId() throws Exception {
JwtClaims claims = ClaimsUtil.getTestCcClaimsScopeService("f7d42348-c647-4efb-a52d-4c5787421e72", "A8E73740C0041C03D67C3A951AA1D7533C8F9F2FB57D7BA107210B9BC9E06DA2", "com.networknt.petstore-1.0.0");
claims.setExpirationTimeMinutesInTheFuture(5256000);
String jwt = JwtIssuer.getJwt(claims, long_kid, KeyUtil.deserializePrivateKey(long_key, KeyUtil.RSA));
System.out.println("***Reference Long lived Bootstrap token for config server and controller: " + jwt);
}
|
@Override
public void notify(final String group, final String identifier, final String title, final String description) {
this.notify(group, identifier, title, description, null);
}
|
@Test
public void testNotify() {
final NotificationService n = new NotificationCenter();
n.notify(null, null, "title", "test");
}
|
static String generatePackageName(final OpenAPI document) {
final String host = RestDslGenerator.determineHostFrom(document);
if (ObjectHelper.isNotEmpty(host)) {
final StringBuilder packageName = new StringBuilder();
final String hostWithoutPort = host.replaceFirst(":.*", "");
if ("localhost".equalsIgnoreCase(hostWithoutPort)) {
return DEFAULT_PACKAGE_NAME;
}
final String[] parts = hostWithoutPort.split("\\.");
for (int i = parts.length - 1; i >= 0; i--) {
packageName.append(parts[i]);
if (i != 0) {
packageName.append('.');
}
}
return packageName.toString();
}
return DEFAULT_PACKAGE_NAME;
}
|
@Test
public void shouldUseDefaultPackageNameForLocalhostWithPort() {
final OpenAPI openapi = new OpenAPI();
Server server = new Server();
server.url("http://localhost:8080");
openapi.addServersItem(server);
assertThat(RestDslSourceCodeGenerator.generatePackageName(openapi))
.isEqualTo(RestDslSourceCodeGenerator.DEFAULT_PACKAGE_NAME);
}
|
public static JClass resolveType(JClassContainer _package, String typeDefinition) {
try {
FieldDeclaration fieldDeclaration = (FieldDeclaration) JavaParser.parseBodyDeclaration(typeDefinition + " foo;");
ClassOrInterfaceType c = (ClassOrInterfaceType) ((ReferenceType) fieldDeclaration.getType()).getType();
return buildClass(_package, c, 0);
} catch (ParseException e) {
throw new GenerationException("Couldn't parse type: " + typeDefinition, e);
}
}
|
@Test
public void testResolveTypeCanHandleWildcard() {
final JCodeModel codeModel = new JCodeModel();
final JClass _class = TypeUtil.resolveType(codeModel.rootPackage(), "java.util.List<?>");
assertThat(_class.erasure(), equalTo(codeModel.ref(List.class)));
assertThat(_class.typeParams(), emptyArray());
assertThat(_class.isParameterized(), is(Boolean.TRUE));
assertThat(_class.getTypeParameters(), hasSize(1));
assertThat(_class.getTypeParameters().get(0)._extends(), is(equalTo(codeModel.ref(Object.class))));
}
|
public List<NodeResponse> nodes() {
final Request request = request("GET", "nodes");
request.addParameter("h", "id,name,role,host,ip,fileDescriptorMax,diskUsed,diskTotal,diskUsedPercent");
request.addParameter("full_id", "true");
return perform(request, new TypeReference<>() {}, "Unable to retrieve nodes list");
}
|
@Test
void testNodesMethodParsesAndReturnsEvenNodesThatMissDiskUsageInfo() throws Exception {
final ObjectMapper objectMapper = new ObjectMapperProvider().get();
final OpenSearchClient client = mock(OpenSearchClient.class);
final CatApi toTest = new CatApi(objectMapper, client);
when(client.execute(any(), anyString()))
.thenReturn(objectMapper.readValue(SAMPLE_CAT_NODES_RESPONSE, new TypeReference<List<NodeResponse>>() {}));
final List<NodeResponse> nodes = toTest.nodes();
assertThat(nodes)
.hasSize(2)
.contains(NodeResponse.create("nodeWithCorrectInfo",
"nodeWithCorrectInfo",
"dimr",
null,
"182.88.0.2",
"45gb",
"411.5gb",
10.95d,
1048576L))
.contains(NodeResponse.create("nodeWithMissingDiskStatistics",
"nodeWithMissingDiskStatistics",
"dimr",
null,
"182.88.0.1",
null,
null,
null,
null));
}
|
public RingbufferStoreConfig setProperties(Properties properties) {
this.properties = checkNotNull(properties, "Ringbuffer store config properties cannot be null!");
return this;
}
|
@Test
public void setProperties() {
Properties properties = new Properties();
properties.setProperty("key", "value");
config.setProperties(properties);
assertEquals(properties, config.getProperties());
}
|
public static FileIO loadFileIO(String impl, Map<String, String> properties, Object hadoopConf) {
LOG.info("Loading custom FileIO implementation: {}", impl);
DynConstructors.Ctor<FileIO> ctor;
try {
ctor =
DynConstructors.builder(FileIO.class)
.loader(CatalogUtil.class.getClassLoader())
.impl(impl)
.buildChecked();
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException(
String.format("Cannot initialize FileIO implementation %s: %s", impl, e.getMessage()), e);
}
FileIO fileIO;
try {
fileIO = ctor.newInstance();
} catch (ClassCastException e) {
throw new IllegalArgumentException(
String.format("Cannot initialize FileIO, %s does not implement FileIO.", impl), e);
}
configureHadoopConf(fileIO, hadoopConf);
fileIO.initialize(properties);
return fileIO;
}
|
@Test
public void loadCustomFileIO_configurable() {
Configuration configuration = new Configuration();
configuration.set("key", "val");
FileIO fileIO =
CatalogUtil.loadFileIO(
TestFileIOConfigurable.class.getName(), Maps.newHashMap(), configuration);
assertThat(fileIO).isInstanceOf(TestFileIOConfigurable.class);
assertThat(((TestFileIOConfigurable) fileIO).configuration).isEqualTo(configuration);
}
|
public void copyStages(PipelineConfig pipeline) {
if (pipeline != null) {
addAll(pipeline);
}
}
|
@Test
public void copyStagesShouldNotThrowExceptionIfInputPipelineConfigIsNull() {
PipelineTemplateConfig template = PipelineTemplateConfigMother.createTemplateWithParams("template-name", "foo", "bar", "baz");
int sizeBeforeCopy = template.size();
template.copyStages(null);
assertThat(template.size(), is(sizeBeforeCopy));
}
|
static Date toDate(final JsonNode object) {
if (object instanceof NumericNode) {
return getDateFromEpochDays(object.asLong());
}
if (object instanceof TextNode) {
try {
return getDateFromEpochDays(Long.parseLong(object.textValue()));
} catch (final NumberFormatException e) {
throw failedStringCoercionException(SqlBaseType.DATE);
}
}
throw invalidConversionException(object, SqlBaseType.DATE);
}
|
@Test
public void shouldConvertStringToDateCorrectly() {
final Date d = JsonSerdeUtils.toDate(JsonNodeFactory.instance.textNode("10"));
assertThat(d.getTime(), equalTo(864000000L));
}
|
@Override
public int hashCode()
{
return Float.hashCode(value);
}
|
@Test
void testHashCode()
{
new HashCodeTester().runTests();
}
|
synchronized boolean tryUpdatingLastStableOffset(TopicPartition tp, long lastStableOffset) {
final TopicPartitionState state = assignedStateOrNull(tp);
if (state != null) {
assignedState(tp).lastStableOffset(lastStableOffset);
return true;
}
return false;
}
|
@Test
public void testTryUpdatingLastStableOffset() {
state.assignFromUser(Collections.singleton(tp0));
final TopicPartition unassignedPartition = new TopicPartition("unassigned", 0);
final long lastStableOffset = 10L;
assertTrue(state.tryUpdatingLastStableOffset(tp0, lastStableOffset));
assertEquals(lastStableOffset, state.partitionEndOffset(tp0, IsolationLevel.READ_COMMITTED));
assertFalse(state.tryUpdatingLastStableOffset(unassignedPartition, lastStableOffset));
}
|
@Override
public T addFloat(K name, float value) {
throw new UnsupportedOperationException("read only");
}
|
@Test
public void testAddFloat() {
assertThrows(UnsupportedOperationException.class, new Executable() {
@Override
public void execute() {
HEADERS.addFloat("name", 0);
}
});
}
|
public String getName() {
return name;
}
|
@Test
void testWithEndpointClusterName() throws NacosException {
Properties properties = new Properties();
String endpoint = "127.0.0.1";
properties.setProperty(PropertyKeyConst.ENDPOINT, endpoint);
String endpointPort = "9090";
properties.setProperty(PropertyKeyConst.ENDPOINT_PORT, endpointPort);
String testEndpointClusterName = "testEndpointClusterName";
properties.setProperty(PropertyKeyConst.ENDPOINT_CLUSTER_NAME, testEndpointClusterName);
String testClusterName = "testClusterName";
properties.setProperty(PropertyKeyConst.CLUSTER_NAME, testClusterName);
String endpointContextPath = "/endpointContextPath";
properties.setProperty(PropertyKeyConst.ENDPOINT_CONTEXT_PATH, endpointContextPath);
String contextPath = "/contextPath";
properties.setProperty(PropertyKeyConst.CONTEXT_PATH, contextPath);
final NacosClientProperties clientProperties = NacosClientProperties.PROTOTYPE.derive(properties);
ServerListManager serverListManager = new ServerListManager(clientProperties);
assertTrue(serverListManager.addressServerUrl.contains(endpointContextPath));
assertTrue(serverListManager.getName().contains("endpointContextPath"));
assertTrue(serverListManager.addressServerUrl.contains(testEndpointClusterName));
assertTrue(serverListManager.getName().contains(testEndpointClusterName));
assertFalse(serverListManager.addressServerUrl.contains(testClusterName));
assertFalse(serverListManager.getName().contains(testClusterName));
}
|
@VisibleForTesting
public SharedBuffer(KeyedStateStore stateStore, TypeSerializer<V> valueSerializer) {
this(stateStore, valueSerializer, new SharedBufferCacheConfig());
}
|
@Test
public void testSharedBuffer() throws Exception {
SharedBuffer<Event> sharedBuffer =
TestSharedBuffer.createTestBuffer(Event.createTypeSerializer(), cacheConfig);
int numberEvents = 8;
Event[] events = new Event[numberEvents];
EventId[] eventIds = new EventId[numberEvents];
final long timestamp = 1L;
for (int i = 0; i < numberEvents; i++) {
events[i] = new Event(i + 1, "e" + (i + 1), i);
eventIds[i] = sharedBuffer.registerEvent(events[i], timestamp);
}
Map<String, List<Event>> expectedPattern1 = new HashMap<>();
expectedPattern1.put("a1", new ArrayList<>());
expectedPattern1.get("a1").add(events[2]);
expectedPattern1.put("a[]", new ArrayList<>());
expectedPattern1.get("a[]").add(events[3]);
expectedPattern1.put("b", new ArrayList<>());
expectedPattern1.get("b").add(events[5]);
Map<String, List<Event>> expectedPattern2 = new HashMap<>();
expectedPattern2.put("a1", new ArrayList<>());
expectedPattern2.get("a1").add(events[0]);
expectedPattern2.put("a[]", new ArrayList<>());
expectedPattern2.get("a[]").add(events[1]);
expectedPattern2.get("a[]").add(events[2]);
expectedPattern2.get("a[]").add(events[3]);
expectedPattern2.get("a[]").add(events[4]);
expectedPattern2.put("b", new ArrayList<>());
expectedPattern2.get("b").add(events[5]);
Map<String, List<Event>> expectedPattern3 = new HashMap<>();
expectedPattern3.put("a1", new ArrayList<>());
expectedPattern3.get("a1").add(events[0]);
expectedPattern3.put("a[]", new ArrayList<>());
expectedPattern3.get("a[]").add(events[1]);
expectedPattern3.get("a[]").add(events[2]);
expectedPattern3.get("a[]").add(events[3]);
expectedPattern3.get("a[]").add(events[4]);
expectedPattern3.get("a[]").add(events[5]);
expectedPattern3.get("a[]").add(events[6]);
expectedPattern3.put("b", new ArrayList<>());
expectedPattern3.get("b").add(events[7]);
try (SharedBufferAccessor<Event> sharedBufferAccessor = sharedBuffer.getAccessor()) {
NodeId a10 =
sharedBufferAccessor.put("a1", eventIds[0], null, DeweyNumber.fromString("1"));
NodeId aLoop0 =
sharedBufferAccessor.put(
"a[]", eventIds[1], a10, DeweyNumber.fromString("1.0"));
NodeId a11 =
sharedBufferAccessor.put("a1", eventIds[2], null, DeweyNumber.fromString("2"));
NodeId aLoop1 =
sharedBufferAccessor.put(
"a[]", eventIds[2], aLoop0, DeweyNumber.fromString("1.0"));
NodeId aLoop2 =
sharedBufferAccessor.put(
"a[]", eventIds[3], aLoop1, DeweyNumber.fromString("1.0"));
NodeId aSecondLoop0 =
sharedBufferAccessor.put(
"a[]", eventIds[3], a11, DeweyNumber.fromString("2.0"));
NodeId aLoop3 =
sharedBufferAccessor.put(
"a[]", eventIds[4], aLoop2, DeweyNumber.fromString("1.0"));
DeweyNumber b0Version = DeweyNumber.fromString("1.0.0");
NodeId b0 = sharedBufferAccessor.put("b", eventIds[5], aLoop3, b0Version);
NodeId aLoop4 =
sharedBufferAccessor.put(
"a[]", eventIds[5], aLoop3, DeweyNumber.fromString("1.1"));
DeweyNumber b1Version = DeweyNumber.fromString("2.0.0");
NodeId b1 = sharedBufferAccessor.put("b", eventIds[5], aSecondLoop0, b1Version);
NodeId aLoop5 =
sharedBufferAccessor.put(
"a[]", eventIds[6], aLoop4, DeweyNumber.fromString("1.1"));
DeweyNumber b3Version = DeweyNumber.fromString("1.1.0");
NodeId b3 = sharedBufferAccessor.put("b", eventIds[7], aLoop5, b3Version);
sharedBufferAccessor.lockNode(b0, b0Version);
sharedBufferAccessor.lockNode(b1, b1Version);
sharedBufferAccessor.lockNode(b3, b3Version);
List<Map<String, List<EventId>>> patterns3 =
sharedBufferAccessor.extractPatterns(b3, b3Version);
assertEquals(1L, patterns3.size());
assertEquals(expectedPattern3, sharedBufferAccessor.materializeMatch(patterns3.get(0)));
sharedBufferAccessor.releaseNode(b3, b3Version);
List<Map<String, List<EventId>>> patterns4 =
sharedBufferAccessor.extractPatterns(b3, b3Version);
assertEquals(0L, patterns4.size());
assertTrue(patterns4.isEmpty());
List<Map<String, List<EventId>>> patterns1 =
sharedBufferAccessor.extractPatterns(b1, b1Version);
assertEquals(1L, patterns1.size());
assertEquals(expectedPattern1, sharedBufferAccessor.materializeMatch(patterns1.get(0)));
sharedBufferAccessor.releaseNode(b1, b1Version);
List<Map<String, List<EventId>>> patterns2 =
sharedBufferAccessor.extractPatterns(b0, b0Version);
assertEquals(1L, patterns2.size());
assertEquals(expectedPattern2, sharedBufferAccessor.materializeMatch(patterns2.get(0)));
sharedBufferAccessor.releaseNode(b0, b0Version);
for (EventId eventId : eventIds) {
sharedBufferAccessor.releaseEvent(eventId);
}
}
assertTrue(sharedBuffer.isEmpty());
}
|
@Override
public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) {
super.onDataReceived(device, data);
if (data.size() < 5) {
onInvalidDataReceived(device, data);
return;
}
int offset = 0;
final int flags = data.getIntValue(Data.FORMAT_UINT8, offset);
final int unit = (flags & 0x01) == UNIT_C ? UNIT_C : UNIT_F;
final boolean timestampPresent = (flags & 0x02) != 0;
final boolean temperatureTypePresent = (flags & 0x04) != 0;
offset += 1;
if (data.size() < 5 + (timestampPresent ? 7 : 0) + (temperatureTypePresent ? 1 : 0)) {
onInvalidDataReceived(device, data);
return;
}
final float temperature = data.getFloatValue(Data.FORMAT_FLOAT, 1);
offset += 4;
Calendar calendar = null;
if (timestampPresent) {
calendar = DateTimeDataCallback.readDateTime(data, offset);
offset += 7;
}
Integer type = null;
if (temperatureTypePresent) {
type = data.getIntValue(Data.FORMAT_UINT8, offset);
// offset += 1;
}
onTemperatureMeasurementReceived(device, temperature, unit, calendar, type);
}
|
@Test
public void onInvalidDataReceived() {
final ProfileReadResponse response = new TemperatureMeasurementDataCallback() {
@Override
public void onTemperatureMeasurementReceived(@NonNull final BluetoothDevice device,
final float temperature, final int unit,
@Nullable final Calendar calendar,
@Nullable final Integer type) {
called = true;
}
};
final Data data = new Data(new byte[] { 0x06, (byte) 0xB0, 0x0E, 0x00, (byte) 0xFE, (byte) 0xDC, 0x07, 0x0C, 0x05, 0x0B, 0x32, 0x1B });
called = false;
response.onDataReceived(null, data);
assertFalse(called);
assertFalse(response.isValid());
}
|
@Override
public Set<Service> services() {
return ImmutableSet.copyOf(k8sServiceStore.services());
}
|
@Test
public void testGetServices() {
createBasicServices();
assertEquals("Number of service did not match", 1, target.services().size());
}
|
@Override
public String version() {
return AppInfoParser.getVersion();
}
|
@Test
public void testExtractFieldVersionRetrievedFromAppInfoParser() {
assertEquals(AppInfoParser.getVersion(), xformKey.version());
}
|
@Override
public void endInput() throws Exception {
userFunction.endInput(nonPartitionedContext);
}
|
@Test
void testEndInput() throws Exception {
AtomicInteger counter = new AtomicInteger();
OutputTag<Long> sideOutputTag = new OutputTag<Long>("side-output") {};
TwoOutputProcessOperator<Integer, Integer, Long> processOperator =
new TwoOutputProcessOperator<>(
new TwoOutputStreamProcessFunction<Integer, Integer, Long>() {
@Override
public void processRecord(
Integer record,
Collector<Integer> output1,
Collector<Long> output2,
PartitionedContext ctx) {
// do nothing.
}
@Override
public void endInput(
TwoOutputNonPartitionedContext<Integer, Long> ctx) {
try {
ctx.applyToAllPartitions(
(firstOutput, secondOutput, context) -> {
counter.incrementAndGet();
firstOutput.collect(1);
secondOutput.collect(2L);
});
} catch (Exception e) {
throw new RuntimeException(e);
}
}
},
sideOutputTag);
try (OneInputStreamOperatorTestHarness<Integer, Integer> testHarness =
new OneInputStreamOperatorTestHarness<>(processOperator)) {
testHarness.open();
testHarness.endInput();
assertThat(counter).hasValue(1);
Collection<StreamRecord<Integer>> firstOutput = testHarness.getRecordOutput();
ConcurrentLinkedQueue<StreamRecord<Long>> secondOutput =
testHarness.getSideOutput(sideOutputTag);
assertThat(firstOutput).containsExactly(new StreamRecord<>(1));
assertThat(secondOutput).containsExactly(new StreamRecord<>(2L));
}
}
|
@Override
public String arguments() {
ArrayList<String> args = new ArrayList<>();
if (buildFile != null) {
args.add("-f \"" + FilenameUtils.separatorsToUnix(buildFile) + "\"");
}
if (target != null) {
args.add(target);
}
return StringUtils.join(args, " ");
}
|
@Test
public void shouldGiveArgumentsIncludingBuildfileAndTarget() {
AntTask task = new AntTask();
task.setBuildFile("build/build.xml");
task.setTarget("compile");
assertThat(task.arguments(), is("-f \"build/build.xml\" compile"));
}
|
public static BaseResultsBlock buildEmptyQueryResults(QueryContext queryContext) {
if (QueryContextUtils.isSelectionQuery(queryContext)) {
return buildEmptySelectionQueryResults(queryContext);
}
if (QueryContextUtils.isAggregationQuery(queryContext)) {
if (queryContext.getGroupByExpressions() == null) {
return buildEmptyAggregationQueryResults(queryContext);
} else {
return buildEmptyGroupByQueryResults(queryContext);
}
}
assert QueryContextUtils.isDistinctQuery(queryContext);
return buildEmptyDistinctQueryResults(queryContext);
}
|
@Test
public void testBuildEmptyQueryResults()
throws IOException {
// Selection
QueryContext queryContext = QueryContextConverterUtils.getQueryContext("SELECT * FROM testTable WHERE foo = 'bar'");
DataTable dataTable = ResultsBlockUtils.buildEmptyQueryResults(queryContext).getDataTable();
DataSchema dataSchema = dataTable.getDataSchema();
assertEquals(dataSchema.getColumnNames(), new String[]{"*"});
assertEquals(dataSchema.getColumnDataTypes(), new ColumnDataType[]{ColumnDataType.STRING});
assertEquals(dataTable.getNumberOfRows(), 0);
// Aggregation
queryContext =
QueryContextConverterUtils.getQueryContext("SELECT COUNT(*), SUM(a), MAX(b) FROM testTable WHERE foo = 'bar'");
dataTable = ResultsBlockUtils.buildEmptyQueryResults(queryContext).getDataTable();
dataSchema = dataTable.getDataSchema();
assertEquals(dataSchema.getColumnNames(), new String[]{"count(*)", "sum(a)", "max(b)"});
assertEquals(dataSchema.getColumnDataTypes(), new ColumnDataType[]{
ColumnDataType.LONG, ColumnDataType.DOUBLE, ColumnDataType.DOUBLE
});
assertEquals(dataTable.getNumberOfRows(), 1);
assertEquals(dataTable.getLong(0, 0), 0L);
assertEquals(dataTable.getDouble(0, 1), 0.0);
assertEquals(dataTable.getDouble(0, 2), Double.NEGATIVE_INFINITY);
// Group-by
queryContext = QueryContextConverterUtils.getQueryContext(
"SELECT c, d, COUNT(*), SUM(a), MAX(b) FROM testTable WHERE foo = 'bar' GROUP BY c, d");
dataTable = ResultsBlockUtils.buildEmptyQueryResults(queryContext).getDataTable();
dataSchema = dataTable.getDataSchema();
assertEquals(dataSchema.getColumnNames(), new String[]{"c", "d", "count(*)", "sum(a)", "max(b)"});
assertEquals(dataSchema.getColumnDataTypes(), new ColumnDataType[]{
ColumnDataType.STRING, ColumnDataType.STRING, ColumnDataType.LONG, ColumnDataType.DOUBLE, ColumnDataType.DOUBLE
});
assertEquals(dataTable.getNumberOfRows(), 0);
// Distinct
queryContext = QueryContextConverterUtils.getQueryContext("SELECT DISTINCT a, b FROM testTable WHERE foo = 'bar'");
dataTable = ResultsBlockUtils.buildEmptyQueryResults(queryContext).getDataTable();
dataSchema = dataTable.getDataSchema();
assertEquals(dataSchema.getColumnNames(), new String[]{"a", "b"});
assertEquals(dataSchema.getColumnDataTypes(), new ColumnDataType[]{
ColumnDataType.STRING, ColumnDataType.STRING
});
assertEquals(dataTable.getNumberOfRows(), 0);
}
|
public static boolean canDrop(FilterPredicate pred, List<ColumnChunkMetaData> columns) {
Objects.requireNonNull(pred, "pred cannot be null");
Objects.requireNonNull(columns, "columns cannot be null");
return pred.accept(new StatisticsFilter(columns));
}
|
@Test
public void testAnd() {
FilterPredicate yes = eq(intColumn, 9);
FilterPredicate no = eq(doubleColumn, 50D);
assertTrue(canDrop(and(yes, yes), columnMetas));
assertTrue(canDrop(and(yes, no), columnMetas));
assertTrue(canDrop(and(no, yes), columnMetas));
assertFalse(canDrop(and(no, no), columnMetas));
}
|
public String getFormattedStartTimeStr(final DateFormat dateFormat) {
String str = NA;
if (startTime >= 0) {
str = dateFormat.format(new Date(startTime));
}
return str;
}
|
@Test
public void testGetFormattedStartTimeStr() {
JobReport jobReport = mock(JobReport.class);
when(jobReport.getStartTime()).thenReturn(-1L);
Job job = mock(Job.class);
when(job.getReport()).thenReturn(jobReport);
when(job.getName()).thenReturn("TestJobInfo");
when(job.getState()).thenReturn(JobState.SUCCEEDED);
JobId jobId = MRBuilderUtils.newJobId(1L, 1, 1);
when(job.getID()).thenReturn(jobId);
DateFormat dateFormat = new SimpleDateFormat();
JobInfo jobInfo = new JobInfo(job);
Assert.assertEquals(
JobInfo.NA, jobInfo.getFormattedStartTimeStr(dateFormat));
Date date = new Date();
when(jobReport.getStartTime()).thenReturn(date.getTime());
jobInfo = new JobInfo(job);
Assert.assertEquals(
dateFormat.format(date), jobInfo.getFormattedStartTimeStr(dateFormat));
}
|
public static <K, V> AsMultimap<K, V> asMultimap() {
return new AsMultimap<>(false);
}
|
@Test
@Category(ValidatesRunner.class)
public void testMultimapSideInput() {
final PCollectionView<Map<String, Iterable<Integer>>> view =
pipeline
.apply(
"CreateSideInput",
Create.of(KV.of("a", 1), KV.of("a", 1), KV.of("a", 2), KV.of("b", 3)))
.apply(View.asMultimap());
PCollection<KV<String, Integer>> output =
pipeline
.apply("CreateMainInput", Create.of("apple", "banana", "blackberry"))
.apply(
"OutputSideInputs",
ParDo.of(
new DoFn<String, KV<String, Integer>>() {
@ProcessElement
public void processElement(ProcessContext c) {
for (Integer v : c.sideInput(view).get(c.element().substring(0, 1))) {
c.output(KV.of(c.element(), v));
}
}
})
.withSideInputs(view));
PAssert.that(output)
.containsInAnyOrder(
KV.of("apple", 1),
KV.of("apple", 1),
KV.of("apple", 2),
KV.of("banana", 3),
KV.of("blackberry", 3));
pipeline.run();
}
|
public static <K, V> KV<K, V> of(K key, V value) {
return new KV<>(key, value);
}
|
@Test
public void testOrderByValue() {
Comparator<KV<Integer, Integer>> orderByValue = new KV.OrderByValue<>();
for (Integer key1 : TEST_VALUES) {
for (Integer val1 : TEST_VALUES) {
for (Integer key2 : TEST_VALUES) {
for (Integer val2 : TEST_VALUES) {
assertEquals(
compareInt(val1, val2), orderByValue.compare(KV.of(key1, val1), KV.of(key2, val2)));
}
}
}
}
}
|
@Override
protected void runAfterCatalogReady() {
deleteUnusedShardAndShardGroup();
deleteUnusedWorker();
syncTableMetaAndColocationInfo();
}
|
@Test
public void testNormal() throws Exception {
Config.shard_group_clean_threshold_sec = 0;
List<Long> allShardGroupId = Stream.of(1L, 2L, 3L, 4L, 12L).collect(Collectors.toList());
// build shardGroupInfos
List<ShardGroupInfo> shardGroupInfos = new ArrayList<>();
for (long groupId : allShardGroupId) {
ShardGroupInfo info = ShardGroupInfo.newBuilder()
.setGroupId(groupId)
.putProperties("createTime", String.valueOf(System.currentTimeMillis()))
.build();
shardGroupInfos.add(info);
}
new MockUp<StarOSAgent>() {
@Mock
public void deleteShardGroup(List<Long> groupIds) throws
StarClientException {
allShardGroupId.removeAll(groupIds);
for (long groupId : groupIds) {
shardGroupInfos.removeIf(item -> item.getGroupId() == groupId);
}
}
@Mock
public List<ShardGroupInfo> listShardGroup() {
return shardGroupInfos;
}
};
starMgrMetaSyncer.runAfterCatalogReady();
Assert.assertEquals(1, starOSAgent.listShardGroup().size());
}
|
@Udf
public Map<String, String> splitToMap(
@UdfParameter(
description = "Separator string and values to join") final String input,
@UdfParameter(
description = "Separator string and values to join") final String entryDelimiter,
@UdfParameter(
description = "Separator string and values to join") final String kvDelimiter) {
if (input == null || entryDelimiter == null || kvDelimiter == null) {
return null;
}
if (entryDelimiter.isEmpty() || kvDelimiter.isEmpty() || entryDelimiter.equals(kvDelimiter)) {
return null;
}
final Iterable<String> entries = Splitter.on(entryDelimiter).omitEmptyStrings().split(input);
return StreamSupport.stream(entries.spliterator(), false)
.filter(e -> e.contains(kvDelimiter))
.map(kv -> Splitter.on(kvDelimiter).split(kv).iterator())
.collect(Collectors.toMap(
Iterator::next,
Iterator::next,
(v1, v2) -> v2));
}
|
@Test
public void shouldSplitStringWithOnlyOneEntry() {
Map<String, String> result = udf.splitToMap("foo=apple", ";", "=");
assertThat(result, hasEntry("foo", "apple"));
assertThat(result.size(), equalTo(1));
}
|
@Override
public DevOpsProjectCreationContext create(AlmSettingDto almSettingDto, DevOpsProjectDescriptor devOpsProjectDescriptor) {
AccessToken accessToken = getAccessToken(almSettingDto);
return createDevOpsProject(almSettingDto, devOpsProjectDescriptor, accessToken);
}
|
@Test
void create_whenRepoNotFound_throws() {
AlmSettingDto almSettingDto = mockAlmSettingDto();
mockValidAccessToken(almSettingDto);
assertThatIllegalStateException()
.isThrownBy(() -> githubDevOpsProjectService.create(almSettingDto, DEV_OPS_PROJECT_DESCRIPTOR))
.withMessage("Impossible to find the repository 'repository-identifier' on GitHub, using the devops config alm-config-key");
}
|
@VisibleForTesting
public void setColumnsDefineExpr(Map<String, Expr> columnNameToDefineExpr) {
for (Map.Entry<String, Expr> entry : columnNameToDefineExpr.entrySet()) {
for (Column column : schema) {
if (column.getName().equalsIgnoreCase(entry.getKey())) {
column.setDefineExpr(entry.getValue());
break;
}
}
}
}
|
@Test
public void testSetDefineExprCaseInsensitive() {
List<Column> schema = Lists.newArrayList();
Column column = new Column("UPPER", Type.ARRAY_VARCHAR);
schema.add(column);
MaterializedIndexMeta meta = new MaterializedIndexMeta(0, schema, 0, 0,
(short) 0, TStorageType.COLUMN, KeysType.DUP_KEYS, null);
Map<String, Expr> columnNameToDefineExpr = Maps.newHashMap();
columnNameToDefineExpr.put("upper", new StringLiteral());
meta.setColumnsDefineExpr(columnNameToDefineExpr);
Assert.assertNotNull(column.getDefineExpr());
}
|
@Override
public void register(FixedResult result) {
if (isRegistered.compareAndSet(false, true)) {
RegisterManager.INSTANCE.register();
if (!getRegisterConfig().isOpenMigration()) {
// 阻止原注册中心注册
result.setResult(null);
}
}
}
|
@Test
public void register() {
final FixedResult fixedResult = new FixedResult();
registerCenterService.register(fixedResult);
Assert.assertTrue(fixedResult.isSkip());
registerConfig.setOpenMigration(true);
final FixedResult openResult = new FixedResult();
registerCenterService.register(openResult);
Assert.assertFalse(openResult.isSkip());
}
|
@Override
public T deserialize(final String topic, final byte[] bytes) {
return tryDeserialize(topic, bytes).get();
}
|
@Test
public void shouldDeserializeWithDelegate() {
// Given:
when(delegate.deserialize(any(), any())).thenReturn(SOME_ROW);
// When:
deserializer.deserialize("some topic", SOME_BYTES);
// Then:
verify(delegate).deserialize("some topic", SOME_BYTES);
}
|
@Override
public Iterable<V> values() {
return Collections.unmodifiableCollection(state.values());
}
|
@Test
void testValues() throws Exception {
assertThat(mapState.contains(1L)).isTrue();
long value = mapState.get(1L);
assertThat(value).isEqualTo(5L);
assertThat(mapState.contains(2L)).isTrue();
value = mapState.get(2L);
assertThat(value).isEqualTo(5L);
assertThatThrownBy(
() -> {
Iterator<Long> iterator = mapState.values().iterator();
while (iterator.hasNext()) {
iterator.remove();
}
})
.isInstanceOf(UnsupportedOperationException.class);
}
|
void validateMining(final KiePMMLMiningModel toValidate) {
if (toValidate.getTargetField() == null || StringUtils.isEmpty(toValidate.getTargetField().trim())) {
throw new KiePMMLInternalException(String.format(TARGET_FIELD_REQUIRED_RETRIEVED,
toValidate.getTargetField()));
}
}
|
@Test
void validateMiningEmptyTargetField() {
assertThatExceptionOfType(KiePMMLInternalException.class).isThrownBy(() -> {
String name = "NAME";
KiePMMLMiningModel kiePMMLMiningModel = KiePMMLMiningModel.builder("FILENAME", name, Collections.emptyList(),
MINING_FUNCTION.ASSOCIATION_RULES)
.withTargetField(" ")
.build();
evaluator.validateMining(kiePMMLMiningModel);
});
}
|
public RegistryBuilder password(String password) {
this.password = password;
return getThis();
}
|
@Test
void password() {
RegistryBuilder builder = new RegistryBuilder();
builder.password("password");
Assertions.assertEquals("password", builder.build().getPassword());
}
|
@Override
protected Optional<String> get(String key) {
// search for the first value available in
// 1. system properties
// 2. core property from environment variable
// 3. thread local cache (if enabled)
// 4. db
String value = systemProps.getProperty(key);
if (value != null) {
return Optional.of(value);
}
Optional<String> envVal = getDefinitions().getValueFromEnv(key);
if (envVal.isPresent()) {
return envVal;
}
Map<String, String> dbProps = CACHE.get();
// caching is disabled
if (dbProps == null) {
return Optional.ofNullable(load(key));
}
String loadedValue;
if (dbProps.containsKey(key)) {
// property may not exist in db. In this case key is present
// in cache but value is null
loadedValue = dbProps.get(key);
} else {
// cache the effective value (null if the property
// is not persisted)
loadedValue = load(key);
dbProps.put(key, loadedValue);
}
return Optional.ofNullable(loadedValue);
}
|
@Test
public void database_properties_are_not_cached_by_default() {
insertPropertyIntoDb("foo", "from db");
underTest = create(system, Collections.emptyMap());
assertThat(underTest.get("foo")).hasValue("from db");
deletePropertyFromDb("foo");
// no cache, change is visible immediately
assertThat(underTest.get("foo")).isNotPresent();
}
|
public void bookRoom(int roomNumber) throws Exception {
var room = hotelDao.getById(roomNumber);
if (room.isEmpty()) {
throw new Exception("Room number: " + roomNumber + " does not exist");
} else {
if (room.get().isBooked()) {
throw new Exception("Room already booked!");
} else {
var updateRoomBooking = room.get();
updateRoomBooking.setBooked(true);
hotelDao.update(updateRoomBooking);
}
}
}
|
@Test
void bookingRoomShouldChangeBookedStatusToTrue() throws Exception {
hotel.bookRoom(1);
assertTrue(dao.getById(1).isPresent());
assertTrue(dao.getById(1).get().isBooked());
}
|
public static IpPrefix valueOf(int address, int prefixLength) {
return new IpPrefix(IpAddress.valueOf(address), prefixLength);
}
|
@Test(expected = NullPointerException.class)
public void testInvalidValueOfNullString() {
IpPrefix ipPrefix;
String fromString;
fromString = null;
ipPrefix = IpPrefix.valueOf(fromString);
}
|
public static TimestampFormatter of(final String pattern, final String zoneIdString) {
return new TimestampFormatter(pattern, zoneIdString);
}
|
@Test
public void testRuby() {
testRubyToFormat(OffsetDateTime.of(2017, 2, 28, 2, 0, 45, 0, ZoneOffset.UTC).toInstant(),
"%Y-%m-%dT%H:%M:%S %Z",
"-09:00",
"2017-02-27T17:00:45 -09:00");
}
|
@Override
public DirectPipelineResult run(Pipeline pipeline) {
try {
options =
MAPPER
.readValue(MAPPER.writeValueAsBytes(options), PipelineOptions.class)
.as(DirectOptions.class);
} catch (IOException e) {
throw new IllegalArgumentException(
"PipelineOptions specified failed to serialize to JSON.", e);
}
performRewrites(pipeline);
MetricsEnvironment.setMetricsSupported(true);
try {
DirectGraphVisitor graphVisitor = new DirectGraphVisitor();
pipeline.traverseTopologically(graphVisitor);
@SuppressWarnings("rawtypes")
KeyedPValueTrackingVisitor keyedPValueVisitor = KeyedPValueTrackingVisitor.create();
pipeline.traverseTopologically(keyedPValueVisitor);
DisplayDataValidator.validatePipeline(pipeline);
DisplayDataValidator.validateOptions(options);
ExecutorService metricsPool =
Executors.newCachedThreadPool(
new ThreadFactoryBuilder()
.setThreadFactory(MoreExecutors.platformThreadFactory())
.setDaemon(false) // otherwise you say you want to leak, please don't!
.setNameFormat("direct-metrics-counter-committer")
.build());
DirectGraph graph = graphVisitor.getGraph();
EvaluationContext context =
EvaluationContext.create(
clockSupplier.get(),
Enforcement.bundleFactoryFor(enabledEnforcements, graph),
graph,
keyedPValueVisitor.getKeyedPValues(),
metricsPool);
TransformEvaluatorRegistry registry =
TransformEvaluatorRegistry.javaSdkNativeRegistry(context, options);
PipelineExecutor executor =
ExecutorServiceParallelExecutor.create(
options.getTargetParallelism(),
registry,
Enforcement.defaultModelEnforcements(enabledEnforcements),
context,
metricsPool);
executor.start(graph, RootProviderRegistry.javaNativeRegistry(context, options));
DirectPipelineResult result = new DirectPipelineResult(executor, context);
if (options.isBlockOnRun()) {
try {
result.waitUntilFinish();
} catch (UserCodeException userException) {
throw new PipelineExecutionException(userException.getCause());
} catch (Throwable t) {
if (t instanceof RuntimeException) {
throw (RuntimeException) t;
}
throw new RuntimeException(t);
}
}
return result;
} finally {
MetricsEnvironment.setMetricsSupported(false);
}
}
|
@Test
public void testMutatingOutputThenOutputDoFnError() throws Exception {
Pipeline pipeline = getPipeline();
pipeline
.apply(Create.of(42))
.apply(
ParDo.of(
new DoFn<Integer, List<Integer>>() {
@ProcessElement
public void processElement(ProcessContext c) {
List<Integer> outputList = Arrays.asList(1, 2, 3, 4);
c.output(outputList);
outputList.set(0, 37);
c.output(outputList);
}
}));
thrown.expect(IllegalMutationException.class);
thrown.expectMessage("output");
thrown.expectMessage("must not be mutated");
pipeline.run();
}
|
public static boolean isMatchWithPrefix(final byte[] candidate, final byte[] expected, final int prefixLength)
{
if (candidate.length != expected.length)
{
return false;
}
if (candidate.length == 4)
{
final int mask = prefixLengthToIpV4Mask(prefixLength);
return (toInt(candidate) & mask) == (toInt(expected) & mask);
}
else if (candidate.length == 16)
{
final long upperMask = prefixLengthToIpV6Mask(min(prefixLength, 64));
final long lowerMask = prefixLengthToIpV6Mask(max(prefixLength - 64, 0));
return
(upperMask & toLong(candidate, 0)) == (upperMask & toLong(expected, 0)) &&
(lowerMask & toLong(candidate, 8)) == (lowerMask & toLong(expected, 8));
}
throw new IllegalArgumentException("how many bytes does an IP address have again?");
}
|
@Test
void shouldMatchIfAllBytesWithPrefixMatch()
{
final byte[] a = { 'a', 'b', 'c', 'd' };
final byte[] b = { 'a', 'b', 'c', 'e' };
assertTrue(isMatchWithPrefix(a, b, 24));
}
|
@Override
public int getOrder() {
return PluginEnum.REWRITE.getCode();
}
|
@Test
public void testGetOrder() {
assertEquals(rewritePlugin.getOrder(), 90);
}
|
@Override
public Long createPost(PostSaveReqVO createReqVO) {
// 校验正确性
validatePostForCreateOrUpdate(null, createReqVO.getName(), createReqVO.getCode());
// 插入岗位
PostDO post = BeanUtils.toBean(createReqVO, PostDO.class);
postMapper.insert(post);
return post.getId();
}
|
@Test
public void testValidatePost_nameDuplicateForCreate() {
// mock 数据
PostDO postDO = randomPostDO();
postMapper.insert(postDO);// @Sql: 先插入出一条存在的数据
// 准备参数
PostSaveReqVO reqVO = randomPojo(PostSaveReqVO.class,
// 模拟 name 重复
o -> o.setName(postDO.getName()));
assertServiceException(() -> postService.createPost(reqVO), POST_NAME_DUPLICATE);
}
|
public void invalidate(final String selectorId) {
grpcUpstreamCachedHandle.get().removeHandle(selectorId);
cache.invalidate(selectorId);
watchUpstreamListener.remove(selectorId);
ruleCachedHandle.get().removeHandle(CacheKeyUtils.INST.getKey(selectorId, Constants.DEFAULT_RULE));
GrpcClientCache.removeClient(selectorId);
}
|
@Test
public void testInvalidate() {
this.applicationConfigCache.invalidate(selector.getName());
final List<ShenyuServiceInstance> shenyuServiceInstances = this.applicationConfigCache.get(selector.getName()).getShenyuServiceInstances();
assertTrue(CollectionUtils.isEmpty(shenyuServiceInstances), "shenyuServiceInstances mast is empty");
}
|
public static String pettyPrint(String xml, int blanks) throws Exception {
return doParse(xml, blanks, false, new NoopColor());
}
|
@Test
public void testPrettyPrintBeer() throws Exception {
String xml = "<beer alc=\"4.4%\"> <kind>Fosters</kind> <name>Bell Expedition</name> </beer>";
String expected = """
<beer alc="4.4%">
<kind>
Fosters
</kind>
<name>
Bell Expedition
</name>
</beer>""";
String pretty = XmlPrettyPrinter.pettyPrint(xml, 2, false);
Assertions.assertEquals(expected, pretty);
}
|
public static String[] getArrayPropDefault(String propName, String[] defaultVal) {
try {
String strVal = appProperties.getProperty(propName);
if (StringUtils.isNotBlank(strVal)) {
return strVal.trim().split("\\s+");
}
} catch (Exception e) {
log.warn("Exception '{}' occurred when fetching Array property:'{}', defaulting to: {}",
e.getMessage(), propName, defaultVal != null ? Arrays.toString(defaultVal) : null);
}
return defaultVal;
}
|
@Test
public void testGetArrayPropDefault() throws Exception {
Path props = Files.createTempFile("testGetArrayPropDefault", ".properties");
JMeterUtils.loadJMeterProperties(props.toString());
JMeterUtils.getJMeterProperties().setProperty("testGetArrayPropDefaultEmpty", " ");
JMeterUtils.getJMeterProperties().setProperty("testGetArrayPropDefault",
" Tolstoi Dostoievski Pouchkine Gorki ");
assertArrayEquals(new String[]{"Tolstoi", "Dostoievski", "Pouchkine", "Gorki"},
JMeterUtils.getArrayPropDefault("testGetArrayPropDefault", null));
assertArrayEquals(new String[]{"Gilels", "Richter"},
JMeterUtils.getArrayPropDefault("testGetArrayPropDefaultMissing",
new String[]{"Gilels", "Richter"}));
assertArrayEquals(null,
JMeterUtils.getArrayPropDefault("testGetArrayPropDefaultEmpty", null));
}
|
private static ClientAuthenticationMethod getClientAuthenticationMethod(
List<com.nimbusds.oauth2.sdk.auth.ClientAuthenticationMethod> metadataAuthMethods) {
if (metadataAuthMethods == null || metadataAuthMethods
.contains(com.nimbusds.oauth2.sdk.auth.ClientAuthenticationMethod.CLIENT_SECRET_BASIC)) {
// If null, the default includes client_secret_basic
return ClientAuthenticationMethod.CLIENT_SECRET_BASIC;
}
if (metadataAuthMethods.contains(com.nimbusds.oauth2.sdk.auth.ClientAuthenticationMethod.CLIENT_SECRET_POST)) {
return ClientAuthenticationMethod.CLIENT_SECRET_POST;
}
if (metadataAuthMethods.contains(com.nimbusds.oauth2.sdk.auth.ClientAuthenticationMethod.NONE)) {
return ClientAuthenticationMethod.NONE;
}
return null;
}
|
@Test
public void buildWhenClientCredentialsGrantAllAttributesProvidedThenAllAttributesAreSet() {
// @formatter:off
ClientRegistration registration = ClientRegistration.withRegistrationId(REGISTRATION_ID)
.clientId(CLIENT_ID)
.clientSecret(CLIENT_SECRET)
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.authorizationGrantType(AuthorizationGrantType.CLIENT_CREDENTIALS)
.scope(SCOPES.toArray(new String[0]))
.tokenUri(TOKEN_URI)
.clientName(CLIENT_NAME)
.build();
// @formatter:on
assertThat(registration.getRegistrationId()).isEqualTo(REGISTRATION_ID);
assertThat(registration.getClientId()).isEqualTo(CLIENT_ID);
assertThat(registration.getClientSecret()).isEqualTo(CLIENT_SECRET);
assertThat(registration.getClientAuthenticationMethod())
.isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_BASIC);
assertThat(registration.getAuthorizationGrantType()).isEqualTo(AuthorizationGrantType.CLIENT_CREDENTIALS);
assertThat(registration.getScopes()).isEqualTo(SCOPES);
assertThat(registration.getProviderDetails().getTokenUri()).isEqualTo(TOKEN_URI);
assertThat(registration.getClientName()).isEqualTo(CLIENT_NAME);
}
|
public static ProvisionResponse validateProvisionResponse(ProvisionResponse provisionResponse, ClusterModel clusterModel,
int overprovisionedMinBrokers) {
if (provisionResponse.status() != ProvisionStatus.OVER_PROVISIONED) {
return provisionResponse;
}
// ensure that a cluster is not identified as over provisioned unless it has the minimum required number of alive brokers
if (clusterModel.aliveBrokers().size() < overprovisionedMinBrokers) {
return new ProvisionResponse(ProvisionStatus.RIGHT_SIZED);
}
// when status is OVER_PROVISIONED goal is expected to have exactly 1 recommendation
if (provisionResponse.recommendationByRecommender().size() != 1) {
throw new IllegalArgumentException(String.format("Expected to have exactly 1 provision recommendation, but got: %d",
provisionResponse.recommendationByRecommender().size()));
}
String goalName = provisionResponse.recommendationByRecommender().keySet().iterator().next();
int numBrokersToDrop = provisionResponse.recommendationByRecommender().values().iterator().next().numBrokers();
// this variable indicates the maximum number of brokers that can be safely dropped.
int maxAllowedNumBrokersToDrop = clusterModel.aliveBrokers().size() - clusterModel.maxReplicationFactor();
if (numBrokersToDrop <= maxAllowedNumBrokersToDrop) {
// return provision response as it is if the recommended number of brokers to drop is smaller or equal to the max allowed number of
// brokers that can be safely dropped
return provisionResponse;
} else if (maxAllowedNumBrokersToDrop > 0) {
// change the recommended number of brokers to drop if the original recommendation can not be safely satisfied
ProvisionRecommendation recommendation =
new ProvisionRecommendation.Builder(ProvisionStatus.OVER_PROVISIONED).numBrokers(maxAllowedNumBrokersToDrop).build();
return new ProvisionResponse(ProvisionStatus.OVER_PROVISIONED, recommendation, goalName);
} else {
// change provision response status to RIGHT_SIZED if none of the brokers can be safely dropped
return new ProvisionResponse(ProvisionStatus.RIGHT_SIZED);
}
}
|
@Test
public void testValidateProvisionResponse() {
// min required num of brokers > num of brokers
ProvisionResponse response = GoalUtils.validateProvisionResponse(provisionResponse(1), clusterModel(3), 6);
assertEquals(RIGHT_SIZED, response.status());
// recommended num of brokers to drop is 1 and max allowed num of brokers to drop is 2, so 1 broker can be safely dropped
response = GoalUtils.validateProvisionResponse(provisionResponse(1), clusterModel(2), 4);
assertEquals(OVER_PROVISIONED, response.status());
assertEquals(1, response.recommendationByRecommender().values().iterator().next().numBrokers());
// recommended num of brokers to drop is 3 but max allowed num of brokers to drop is 1, so final provision response is to drop 1 broker
response = GoalUtils.validateProvisionResponse(provisionResponse(3), clusterModel(3), 4);
assertEquals(OVER_PROVISIONED, response.status());
assertEquals(1, response.recommendationByRecommender().values().iterator().next().numBrokers());
// recommended num of brokers to drop is 1 but max allowed num of brokers to drop is 0, so none of the brokers can be dropped
response = GoalUtils.validateProvisionResponse(provisionResponse(1), clusterModel(4), 4);
assertEquals(RIGHT_SIZED, response.status());
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.