focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
@Override
public Long sendSingleNotify(Long userId, Integer userType, String templateCode, Map<String, Object> templateParams) {
// 校验模版
NotifyTemplateDO template = validateNotifyTemplate(templateCode);
if (Objects.equals(template.getStatus(), CommonStatusEnum.DISABLE.getStatus())) {
log.info("[sendSingleNotify][模版({})已经关闭,无法给用户({}/{})发送]", templateCode, userId, userType);
return null;
}
// 校验参数
validateTemplateParams(template, templateParams);
// 发送站内信
String content = notifyTemplateService.formatNotifyTemplateContent(template.getContent(), templateParams);
return notifyMessageService.createNotifyMessage(userId, userType, template, content, templateParams);
}
|
@Test
public void testSendSingleMail_successWhenSmsTemplateDisable() {
// 准备参数
Long userId = randomLongId();
Integer userType = randomEle(UserTypeEnum.values()).getValue();
String templateCode = randomString();
Map<String, Object> templateParams = MapUtil.<String, Object>builder().put("code", "1234")
.put("op", "login").build();
// mock NotifyTemplateService 的方法
NotifyTemplateDO template = randomPojo(NotifyTemplateDO.class, o -> {
o.setStatus(CommonStatusEnum.DISABLE.getStatus());
o.setContent("验证码为{code}, 操作为{op}");
o.setParams(Lists.newArrayList("code", "op"));
});
when(notifyTemplateService.getNotifyTemplateByCodeFromCache(eq(templateCode))).thenReturn(template);
// 调用
Long resultMessageId = notifySendService.sendSingleNotify(userId, userType, templateCode, templateParams);
// 断言
assertNull(resultMessageId);
verify(notifyTemplateService, never()).formatNotifyTemplateContent(anyString(), anyMap());
verify(notifyMessageService, never()).createNotifyMessage(anyLong(), anyInt(), any(), anyString(), anyMap());
}
|
@Override
public MergedResult decorate(final QueryResult queryResult, final SQLStatementContext sqlStatementContext, final ShardingSphereRule rule) {
return new TransparentMergedResult(queryResult);
}
|
@Test
void assertDecorateQueryResult() throws SQLException {
QueryResult queryResult = mock(QueryResult.class);
when(queryResult.next()).thenReturn(true);
TransparentResultDecorator decorator = new TransparentResultDecorator();
assertTrue(decorator.decorate(queryResult, mock(SQLStatementContext.class), mock(ShardingSphereRule.class)).next());
}
|
public static synchronized void e(final String tag, String text, Object... args) {
if (msLogger.supportsE()) {
String msg = getFormattedString(text, args);
msLogger.e(tag, msg);
addLog(LVL_E, tag, msg);
}
}
|
@Test
public void testE1() throws Exception {
Logger.e("mTag", "Text with %d digits", 0);
Mockito.verify(mMockLog).e("mTag", "Text with 0 digits");
Logger.e("mTag", "Text with no digits");
Mockito.verify(mMockLog).e("mTag", "Text with no digits");
}
|
@Override
public Flux<String> getServices() {
return Flux.defer(() -> {
try {
return Flux.fromIterable(polarisServiceDiscovery.getServices());
}
catch (Exception e) {
LOGGER.error("get services from polaris server fail,", e);
return Flux.empty();
}
}).subscribeOn(Schedulers.boundedElastic());
}
|
@Test
public void testGetServices() throws PolarisException {
when(serviceDiscovery.getServices()).thenAnswer(invocation -> {
if (count == 0) {
count++;
return Arrays.asList(SERVICE_PROVIDER + 1, SERVICE_PROVIDER + 2);
}
else {
throw new PolarisException(ErrorCode.UNKNOWN_SERVER_ERROR);
}
});
// Normal
Flux<String> services = this.client.getServices();
StepVerifier.create(services).expectNext(SERVICE_PROVIDER + 1, SERVICE_PROVIDER + 2).expectComplete().verify();
// PolarisException
services = this.client.getServices();
StepVerifier.create(services).expectNextCount(0).expectComplete().verify();
}
|
@Nullable
public <T> T getInstanceWithoutAncestors(String name, Class<T> type) {
try {
return BeanFactoryUtils.beanOfType(getContext(name), type);
}
catch (BeansException ex) {
return null;
}
}
|
@Test
void getInstanceWithoutAncestors() {
AnnotationConfigApplicationContext parent = new AnnotationConfigApplicationContext();
parent.refresh();
FeignClientFactory feignClientFactory = new FeignClientFactory();
feignClientFactory.setApplicationContext(parent);
feignClientFactory.setConfigurations(Lists.newArrayList(getSpec("demo", null, DemoConfiguration.class)));
Logger.Level level = feignClientFactory.getInstanceWithoutAncestors("demo", Logger.Level.class);
assertThat(level).isEqualTo(Logger.Level.FULL);
}
|
boolean isWriteShareGroupStateSuccessful(List<PersisterStateBatch> stateBatches) {
WriteShareGroupStateResult response;
try {
response = persister.writeState(new WriteShareGroupStateParameters.Builder()
.setGroupTopicPartitionData(new GroupTopicPartitionData.Builder<PartitionStateBatchData>()
.setGroupId(this.groupId)
.setTopicsData(Collections.singletonList(new TopicData<>(topicIdPartition.topicId(),
Collections.singletonList(PartitionFactory.newPartitionStateBatchData(
topicIdPartition.partition(), stateEpoch, startOffset, 0, stateBatches))))
).build()).build()).get();
} catch (InterruptedException | ExecutionException e) {
log.error("Failed to write the share group state for share partition: {}-{}", groupId, topicIdPartition, e);
throw new IllegalStateException(String.format("Failed to write the share group state for share partition %s-%s",
groupId, topicIdPartition), e);
}
if (response == null || response.topicsData() == null || response.topicsData().size() != 1) {
log.error("Failed to write the share group state for share partition: {}-{}. Invalid state found: {}",
groupId, topicIdPartition, response);
throw new IllegalStateException(String.format("Failed to write the share group state for share partition %s-%s",
groupId, topicIdPartition));
}
TopicData<PartitionErrorData> state = response.topicsData().get(0);
if (state.topicId() != topicIdPartition.topicId() || state.partitions().size() != 1
|| state.partitions().get(0).partition() != topicIdPartition.partition()) {
log.error("Failed to write the share group state for share partition: {}-{}. Invalid topic partition response: {}",
groupId, topicIdPartition, response);
throw new IllegalStateException(String.format("Failed to write the share group state for share partition %s-%s",
groupId, topicIdPartition));
}
PartitionErrorData partitionData = state.partitions().get(0);
if (partitionData.errorCode() != Errors.NONE.code()) {
Exception exception = Errors.forCode(partitionData.errorCode()).exception(partitionData.errorMessage());
log.error("Failed to write the share group state for share partition: {}-{} due to exception",
groupId, topicIdPartition, exception);
return false;
}
return true;
}
|
@Test
public void testIsWriteShareGroupStateFailure() {
Persister persister = Mockito.mock(Persister.class);
mockPersisterReadStateMethod(persister);
SharePartition sharePartition = SharePartitionBuilder.builder().withPersister(persister).build();
// Mock Write state RPC to return error response.
WriteShareGroupStateResult writeShareGroupStateResult = Mockito.mock(WriteShareGroupStateResult.class);
Mockito.when(writeShareGroupStateResult.topicsData()).thenReturn(Collections.singletonList(
new TopicData<>(TOPIC_ID_PARTITION.topicId(), Collections.singletonList(
PartitionFactory.newPartitionErrorData(0, Errors.NOT_COORDINATOR.code(), Errors.NOT_COORDINATOR.message())))));
Mockito.when(persister.writeState(Mockito.any())).thenReturn(CompletableFuture.completedFuture(writeShareGroupStateResult));
assertFalse(sharePartition.isWriteShareGroupStateSuccessful(Mockito.anyList()));
}
|
public Set<String> getColumnNames() {
if (_segment == null) {
throw new IllegalStateException("Index segment for Lazy row is uninitialized.");
}
return _segment.getColumnNames();
}
|
@Test
public void testGetColumnNames() {
IndexSegment segment = getMockSegment();
LazyRow lazyRow = new LazyRow();
lazyRow.init(segment, 1);
HashSet<String> columnNames = new HashSet<>(Arrays.asList("col1", "col2"));
when(segment.getColumnNames()).thenReturn(columnNames);
assertEquals(lazyRow.getColumnNames(), columnNames);
}
|
static <T extends Type> String encodeDynamicArray(DynamicArray<T> value) {
int size = value.getValue().size();
String encodedLength = encode(new Uint(BigInteger.valueOf(size)));
String valuesOffsets = encodeArrayValuesOffsets(value);
String encodedValues = encodeArrayValues(value);
StringBuilder result = new StringBuilder();
result.append(encodedLength);
result.append(valuesOffsets);
result.append(encodedValues);
return result.toString();
}
|
@Test
public void testDynamicArray() {
DynamicArray<Uint> array =
new DynamicArray<>(
Uint.class,
new Uint(BigInteger.ONE),
new Uint(BigInteger.valueOf(2)),
new Uint(BigInteger.valueOf(3)));
assertEquals(
TypeEncoder.encodeDynamicArray(array),
("0000000000000000000000000000000000000000000000000000000000000003"
+ "0000000000000000000000000000000000000000000000000000000000000001"
+ "0000000000000000000000000000000000000000000000000000000000000002"
+ "0000000000000000000000000000000000000000000000000000000000000003"));
}
|
@SuppressWarnings("unchecked")
@Override
public void handle(ContainerAllocatorEvent event) {
if (event.getType() == ContainerAllocator.EventType.CONTAINER_REQ) {
LOG.info("Processing the event " + event.toString());
// Assign the same container ID as the AM
ContainerId cID =
ContainerId.newContainerId(getContext().getApplicationAttemptId(),
this.containerId.getContainerId());
Container container = recordFactory.newRecordInstance(Container.class);
container.setId(cID);
NodeId nodeId = NodeId.newInstance(this.nmHost, this.nmPort);
container.setResource(Resource.newInstance(0, 0));
container.setNodeId(nodeId);
container.setContainerToken(null);
container.setNodeHttpAddress(this.nmHost + ":" + this.nmHttpPort);
// send the container-assigned event to task attempt
if (event.getAttemptID().getTaskId().getTaskType() == TaskType.MAP) {
JobCounterUpdateEvent jce =
new JobCounterUpdateEvent(event.getAttemptID().getTaskId()
.getJobId());
// TODO Setting OTHER_LOCAL_MAP for now.
jce.addCounterUpdate(JobCounter.OTHER_LOCAL_MAPS, 1);
eventHandler.handle(jce);
}
eventHandler.handle(new TaskAttemptContainerAssignedEvent(
event.getAttemptID(), container, applicationACLs));
}
}
|
@Test
public void testAllocatedContainerResourceIsNotNull() {
ArgumentCaptor<TaskAttemptContainerAssignedEvent> containerAssignedCaptor
= ArgumentCaptor.forClass(TaskAttemptContainerAssignedEvent.class);
@SuppressWarnings("unchecked")
EventHandler<Event> eventHandler = mock(EventHandler.class);
AppContext context = mock(AppContext.class) ;
when(context.getEventHandler()).thenReturn(eventHandler);
ContainerId containerId = ContainerId.fromString(
"container_1427562107907_0002_01_000001");
LocalContainerAllocator containerAllocator = new LocalContainerAllocator(
mock(ClientService.class), context, "localhost", -1, -1, containerId);
ContainerAllocatorEvent containerAllocatorEvent =
createContainerRequestEvent();
containerAllocator.handle(containerAllocatorEvent);
verify(eventHandler, times(1)).handle(containerAssignedCaptor.capture());
Container container = containerAssignedCaptor.getValue().getContainer();
Resource containerResource = container.getResource();
Assert.assertNotNull(containerResource);
assertThat(containerResource.getMemorySize()).isEqualTo(0);
assertThat(containerResource.getVirtualCores()).isEqualTo(0);
}
|
@Override
public CompletionStage<V> removeAsync(K key) {
return map.removeAsync(key);
}
|
@Test
public void testRemoveAsync() throws Exception {
map.put(23, "value-23");
assertTrue(map.containsKey(23));
String value = adapter.removeAsync(23).toCompletableFuture().get();
assertEquals("value-23", value);
assertFalse(map.containsKey(23));
}
|
@Override
public BasicTypeDefine reconvert(Column column) {
try {
return super.reconvert(column);
} catch (SeaTunnelRuntimeException e) {
throw CommonError.convertToConnectorTypeError(
DatabaseIdentifier.KINGBASE,
column.getDataType().getSqlType().name(),
column.getName());
}
}
|
@Test
public void testReconvertFloat() {
Column column =
PhysicalColumn.builder().name("test").dataType(BasicType.FLOAT_TYPE).build();
BasicTypeDefine typeDefine = KingbaseTypeConverter.INSTANCE.reconvert(column);
Assertions.assertEquals(column.getName(), typeDefine.getName());
Assertions.assertEquals(KingbaseTypeConverter.PG_REAL, typeDefine.getColumnType());
Assertions.assertEquals(KingbaseTypeConverter.PG_REAL, typeDefine.getDataType());
}
|
@Override
public int getOrder() {
return PluginEnum.SPRING_CLOUD.getCode();
}
|
@Test
public void getOrder() {
final int result = springCloudPlugin.getOrder();
assertEquals(PluginEnum.SPRING_CLOUD.getCode(), result);
}
|
public static int compose(final int major, final int minor, final int patch)
{
if (major < 0 || major > 255)
{
throw new IllegalArgumentException("major must be 0-255: " + major);
}
if (minor < 0 || minor > 255)
{
throw new IllegalArgumentException("minor must be 0-255: " + minor);
}
if (patch < 0 || patch > 255)
{
throw new IllegalArgumentException("patch must be 0-255: " + patch);
}
if (major + minor + patch == 0)
{
throw new IllegalArgumentException("all parts cannot be zero");
}
return (major << 16) | (minor << 8) | patch;
}
|
@Test
void shouldDetectExcessivePatch()
{
assertThrows(IllegalArgumentException.class, () -> SemanticVersion.compose(1, 1, 256));
}
|
public final void isNegativeInfinity() {
isEqualTo(Float.NEGATIVE_INFINITY);
}
|
@Test
public void isNegativeInfinity() {
assertThat(Float.NEGATIVE_INFINITY).isNegativeInfinity();
assertThatIsNegativeInfinityFails(1.23f);
assertThatIsNegativeInfinityFails(Float.POSITIVE_INFINITY);
assertThatIsNegativeInfinityFails(Float.NaN);
assertThatIsNegativeInfinityFails(null);
}
|
@Override
public BeamSqlTable buildBeamSqlTable(Table table) {
return new BigQueryTable(table, getConversionOptions(table.getProperties()));
}
|
@Test
public void testSelectWriteDispositionMethodAppend() {
Table table =
fakeTableWithProperties(
"hello",
"{ "
+ WRITE_DISPOSITION_PROPERTY
+ ": "
+ "\""
+ WriteDisposition.WRITE_APPEND.toString()
+ "\" }");
BigQueryTable sqlTable = (BigQueryTable) provider.buildBeamSqlTable(table);
assertEquals(WriteDisposition.WRITE_APPEND, sqlTable.writeDisposition);
}
|
public PipelineColumnMetaData getColumnMetaData(final int columnIndex) {
return getColumnMetaData(columnNames.get(columnIndex - 1));
}
|
@Test
void assertGetColumnMetaDataGivenColumnName() {
PipelineColumnMetaData actual = pipelineTableMetaData.getColumnMetaData("test");
assertNull(pipelineTableMetaData.getColumnMetaData("non_exist"));
assertThat(actual.getOrdinalPosition(), is(1));
assertThat(actual.getName(), is("test"));
assertThat(actual.getDataType(), is(Types.INTEGER));
assertTrue(actual.isPrimaryKey());
}
|
public static TransMeta loadMappingMeta( StepWithMappingMeta mappingMeta, Repository rep,
IMetaStore metaStore, VariableSpace space ) throws KettleException {
return loadMappingMeta( mappingMeta, rep, metaStore, space, true );
}
|
@Test
public void loadMappingMetaTest() throws Exception {
String childParam = "childParam";
String childValue = "childValue";
String paramOverwrite = "paramOverwrite";
String parentParam = "parentParam";
String parentValue = "parentValue";
String variablePath = "Internal.Entry.Current.Directory";
String virtualDir = "/testFolder/CDA-91";
String fileName = "testTrans.ktr";
VariableSpace variables = new Variables();
variables.setVariable( parentParam, parentValue );
variables.setVariable( paramOverwrite, parentValue );
StepMeta stepMeta = new StepMeta();
TransMeta parentTransMeta = new TransMeta();
stepMeta.setParentTransMeta( parentTransMeta );
RepositoryDirectoryInterface repositoryDirectory = Mockito.mock( RepositoryDirectoryInterface.class );
when( repositoryDirectory.toString() ).thenReturn( virtualDir );
stepMeta.getParentTransMeta().setRepositoryDirectory( repositoryDirectory );
StepWithMappingMeta mappingMetaMock = mock( StepWithMappingMeta.class );
when( mappingMetaMock.getSpecificationMethod() ).thenReturn( ObjectLocationSpecificationMethod.FILENAME );
when( mappingMetaMock.getFileName() ).thenReturn( "${" + variablePath + "}/" + fileName );
when( mappingMetaMock.getParentStepMeta() ).thenReturn( stepMeta );
Repository rep = mock( Repository.class );
Mockito.doReturn( Mockito.mock( RepositoryDirectoryInterface.class ) ).when( rep ).findDirectory( anyString() );
TransMeta child = new TransMeta();
child.setVariable( childParam, childValue );
child.setVariable( paramOverwrite, childValue );
Mockito.doReturn( child ).when( rep ).loadTransformation( anyString(), any(), any(), anyBoolean(), any() );
TransMeta transMeta = StepWithMappingMeta.loadMappingMeta( mappingMetaMock, rep, null, variables, true );
Assert.assertNotNull( transMeta );
//When the child parameter does exist in the parent parameters, overwrite the child parameter by the parent parameter.
Assert.assertEquals( parentValue, transMeta.getVariable( paramOverwrite ) );
//When the child parameter does not exist in the parent parameters, keep it.
Assert.assertEquals( childValue, transMeta.getVariable( childParam ) );
//All other parent parameters need to get copied into the child parameters (when the 'Inherit all
//variables from the transformation?' option is checked).
Assert.assertEquals( parentValue, transMeta.getVariable( parentParam ) );
}
|
public void isNotInstanceOf(Class<?> clazz) {
if (clazz == null) {
throw new NullPointerException("clazz");
}
if (Platform.classMetadataUnsupported()) {
throw new UnsupportedOperationException(
"isNotInstanceOf is not supported under -XdisableClassMetadata");
}
if (actual == null) {
return; // null is not an instance of clazz.
}
if (isInstanceOfType(actual, clazz)) {
failWithActual("expected not to be an instance of", clazz.getName());
/*
* TODO(cpovirk): Consider including actual.getClass() if it's not clazz itself but only a
* subtype.
*/
}
}
|
@Test
public void isNotInstanceOfSuperclass() {
expectFailure.whenTesting().that(5).isNotInstanceOf(Number.class);
}
|
@Override
void validateKeyPresent(final SourceName sinkName, final Projection projection) {
if (joinKey.isForeignKey()) {
final DataSourceNode leftInputTable = getLeftmostSourceNode();
final SourceName leftInputTableName = leftInputTable.getAlias();
final List<Column> leftInputTableKeys = leftInputTable.getDataSource().getSchema().key();
final List<Column> missingKeys =
leftInputTableKeys.stream().filter(
k -> !projection.containsExpression(
new QualifiedColumnReferenceExp(leftInputTableName, k.name()))
&& !projection.containsExpression(new UnqualifiedColumnReferenceExp(
ColumnNames.generatedJoinColumnAlias(leftInputTableName, k.name())
))
).collect(Collectors.toList());
if (!missingKeys.isEmpty()) {
throwMissingKeyColumnForFkJoinException(missingKeys, leftInputTableName);
}
} else {
final boolean atLeastOneKey = joinKey.getAllViableKeys(schema).stream()
.anyMatch(projection::containsExpression);
if (!atLeastOneKey) {
final boolean synthetic = joinKey.isSynthetic();
final List<? extends Expression> viable = joinKey.getOriginalViableKeys(schema);
throwKeysNotIncludedError(sinkName, "join expression", viable, false, synthetic);
}
}
}
|
@Test
public void shouldThrowIfProjectionDoesNotIncludeAnyJoinColumns() {
// Given:
final JoinNode joinNode = new JoinNode(nodeId, LEFT, joinKey, true, left, right,
empty(),"KAFKA");
when(joinKey.getAllViableKeys(any()))
.thenReturn((List) ImmutableList.of(expression1, expression2));
when(projection.containsExpression(any())).thenReturn(false);
when(joinKey.getOriginalViableKeys(any()))
.thenReturn((List) ImmutableList.of(expression1, expression1, expression2));
// When:
final KsqlException e = assertThrows(
KsqlException.class,
() -> joinNode.validateKeyPresent(SINK, projection)
);
// Then:
assertThat(e.getMessage(), containsString("The query used to build `sink` "
+ "must include the join expressions expression1, expression1 or expression2 "
+ "in its projection (eg, SELECT expression1..."));
}
|
@SuppressWarnings("unchecked")
public V replace(final int key, final V value)
{
final V val = (V)mapNullValue(value);
requireNonNull(val, "value cannot be null");
final int[] keys = this.keys;
final Object[] values = this.values;
@DoNotSub final int mask = values.length - 1;
@DoNotSub int index = Hashing.hash(key, mask);
Object oldValue;
while (null != (oldValue = values[index]))
{
if (key == keys[index])
{
values[index] = val;
break;
}
index = ++index & mask;
}
return unmapNullValue(oldValue);
}
|
@Test
void replaceThrowsNullPointerExceptionIfValueIsNull()
{
final NullPointerException exception =
assertThrowsExactly(NullPointerException.class, () -> intToObjectMap.replace(42, null));
assertEquals("value cannot be null", exception.getMessage());
}
|
public synchronized void startRequest()
{
lastRequestStart = ticker.read();
}
|
@Test
public void testStartRequest()
{
TestingTicker ticker = new TestingTicker();
ticker.increment(1, NANOSECONDS);
Backoff backoff = new Backoff(1, new Duration(15, SECONDS), ticker, ImmutableList.of(new Duration(10, MILLISECONDS)));
ticker.increment(10, MICROSECONDS);
assertFalse(backoff.failure());
assertEquals(backoff.getFailureCount(), 1);
assertEquals(backoff.getFailureDuration().roundTo(SECONDS), 0);
assertEquals(backoff.getFailureRequestTimeTotal().roundTo(SECONDS), 0);
ticker.increment(7, SECONDS);
backoff.startRequest();
ticker.increment(7, SECONDS);
assertFalse(backoff.failure());
assertEquals(backoff.getFailureCount(), 2);
assertEquals(backoff.getFailureDuration().roundTo(SECONDS), 14);
// failed request took 7 seconds.
assertEquals(backoff.getFailureRequestTimeTotal().roundTo(SECONDS), 7);
ticker.increment(1, SECONDS);
backoff.startRequest();
ticker.increment(1, SECONDS);
assertTrue(backoff.failure());
assertEquals(backoff.getFailureCount(), 3);
assertEquals(backoff.getFailureDuration().roundTo(SECONDS), 16);
// failed requests took 7+1 seconds.
assertEquals(backoff.getFailureRequestTimeTotal().roundTo(SECONDS), 8);
}
|
public void or(BitmapValue other) {
switch (other.bitmapType) {
case EMPTY:
break;
case SINGLE_VALUE:
add(other.singleValue);
break;
case BITMAP_VALUE:
switch (this.bitmapType) {
case EMPTY:
this.bitmap = new Roaring64Map();
this.bitmap.or(other.bitmap);
this.bitmapType = BITMAP_VALUE;
break;
case SINGLE_VALUE:
this.bitmap = new Roaring64Map();
this.bitmap.or(other.bitmap);
this.bitmap.add(this.singleValue);
this.bitmapType = BITMAP_VALUE;
break;
case BITMAP_VALUE:
this.bitmap.or(other.bitmap);
break;
case SET_VALUE: {
this.bitmap = new Roaring64Map();
this.bitmap.or(other.bitmap);
for (Long v : this.set) {
this.bitmap.add(v);
}
this.bitmapType = BITMAP_VALUE;
this.set = null;
break;
}
}
break;
case SET_VALUE:
switch (this.bitmapType) {
case EMPTY: {
this.set = new HashSet<>();
this.set.addAll(other.set);
this.bitmapType = SET_VALUE;
break;
}
case SINGLE_VALUE: {
this.set = new HashSet<>(other.set);
if (other.set.size() < 32) {
this.set.add(singleValue);
this.bitmapType = SET_VALUE;
} else {
fromSetToBitmap();
this.bitmap.add(singleValue);
}
break;
}
case SET_VALUE: {
for (Long v : other.set) {
add(v);
}
break;
}
case BITMAP_VALUE: {
for (Long v : other.set) {
bitmap.add(v);
}
break;
}
}
break;
}
}
|
@Test
public void testBitmapValueOr() throws IOException {
// empty or empty
BitmapValue bitmap = new BitmapValue(emptyBitmap);
bitmap.or(emptyBitmap);
checkBitmap(bitmap, BitmapValue.EMPTY, 0, 0);
// empty or single
bitmap = new BitmapValue(emptyBitmap);
bitmap.or(singleBitmap);
checkBitmap(bitmap, BitmapValue.SINGLE_VALUE, 1, 2);
// empty or set
bitmap = new BitmapValue(emptyBitmap);
bitmap.or(mediumBitmap);
checkBitmap(bitmap, BitmapValue.SET_VALUE, 0, 10);
// empty or bitmap
bitmap = new BitmapValue(emptyBitmap);
bitmap.or(largeBitmap);
checkBitmap(bitmap, BitmapValue.BITMAP_VALUE, 0, 40);
// single or empty
bitmap = new BitmapValue(singleBitmap);
bitmap.or(emptyBitmap);
checkBitmap(bitmap, BitmapValue.SINGLE_VALUE, 1, 2);
// single or single (equal)
bitmap = new BitmapValue(singleBitmap);
bitmap.or(singleBitmap);
checkBitmap(bitmap, BitmapValue.SINGLE_VALUE, 1, 2);
// single or single (not equal)
bitmap = new BitmapValue(singleBitmap);
bitmap.or(new BitmapValue(2));
checkBitmap(bitmap, BitmapValue.SET_VALUE, 1, 3);
// single or bitmap
bitmap = new BitmapValue(singleBitmap);
bitmap.or(new BitmapValue(10, 70));
checkBitmap(bitmap, BitmapValue.BITMAP_VALUE, 1, 2, 10, 70);
// single or set (->set)
bitmap = new BitmapValue(singleBitmap);
bitmap.or(new BitmapValue(10, 20));
checkBitmap(bitmap, BitmapValue.SET_VALUE, 1, 2, 10, 20);
// single or set (->bitmap)
bitmap = new BitmapValue(singleBitmap);
bitmap.or(new BitmapValue(10, 42));
checkBitmap(bitmap, BitmapValue.BITMAP_VALUE, 1, 2, 10, 42);
// single or set (->set)
bitmap = new BitmapValue(singleBitmap);
bitmap.or(new BitmapValue(5, 10));
checkBitmap(bitmap, BitmapValue.SET_VALUE, 1, 2, 5, 10);
// bitmap or empty
bitmap = new BitmapValue(largeBitmap);
bitmap.or(emptyBitmap);
checkBitmap(bitmap, BitmapValue.BITMAP_VALUE, 0, 40);
// bitmap or single
bitmap = new BitmapValue(largeBitmap);
bitmap.or(new BitmapValue(100));
checkBitmap(bitmap, BitmapValue.BITMAP_VALUE, 0, 40, 100, 101);
// bitmap or bitmap
bitmap = new BitmapValue(largeBitmap);
bitmap.or(new BitmapValue(30, 80));
checkBitmap(bitmap, BitmapValue.BITMAP_VALUE, 0, 80);
// bitmap or set
bitmap = new BitmapValue(largeBitmap);
bitmap.or(new BitmapValue(30, 50));
checkBitmap(bitmap, BitmapValue.BITMAP_VALUE, 0, 50);
// set or empty
bitmap = new BitmapValue(mediumBitmap);
bitmap.or(emptyBitmap);
checkBitmap(bitmap, BitmapValue.SET_VALUE, 0, 10);
// set or single (->map)
bitmap = new BitmapValue(0, 32);
bitmap.or(new BitmapValue(32));
checkBitmap(bitmap, BitmapValue.BITMAP_VALUE, 0, 33);
// set or single (->set)
bitmap = new BitmapValue(mediumBitmap);
bitmap.or(new BitmapValue(10));
checkBitmap(bitmap, BitmapValue.SET_VALUE, 0, 11);
// set or bitmap
bitmap = new BitmapValue(mediumBitmap);
bitmap.or(new BitmapValue(8, 100));
checkBitmap(bitmap, BitmapValue.BITMAP_VALUE, 0, 100);
// set or set
bitmap = new BitmapValue(mediumBitmap);
bitmap.or(new BitmapValue(3, 15));
checkBitmap(bitmap, BitmapValue.SET_VALUE, 0, 15);
}
|
@Override
public boolean deleteMep(MdId mdName, MaIdShort maName, MepId mepId,
Optional<MaintenanceDomain> oldMd) throws CfmConfigException {
MepKeyId key = new MepKeyId(mdName, maName, mepId);
//Will throw IllegalArgumentException if ma does not exist
cfmMdService.getMaintenanceAssociation(mdName, maName);
//Get the device ID from the MEP
Optional<Mep> deletedMep = mepStore.getMep(key);
if (!deletedMep.isPresent()) {
log.warn("MEP {} not found when deleting Mep", key);
return false;
}
DeviceId mepDeviceId = deletedMep.get().deviceId();
boolean deleted = mepStore.deleteMep(key);
Device mepDevice = deviceService.getDevice(mepDeviceId);
if (mepDevice == null || !mepDevice.is(CfmMepProgrammable.class)) {
throw new CfmConfigException("Unexpeced fault on device driver for "
+ mepDeviceId);
}
try {
deleted = mepDevice.as(CfmMepProgrammable.class)
.deleteMep(mdName, maName, mepId, oldMd);
} catch (CfmConfigException e) {
log.warn("MEP could not be deleted on device - perhaps it "
+ "does not exist. Continuing");
}
//Iterate through all other devices and remove as a Remote Mep
int mepsOnMdCount = 0;
int mepsOnMaCount = 0;
List<DeviceId> alreadyHandledDevices = new ArrayList<>();
for (Mep mep : mepStore.getAllMeps()) {
if (mep.deviceId().equals(mepDeviceId) && mdName.equals(mep.mdId())) {
mepsOnMdCount++;
if (maName.equals(mep.maId())) {
mepsOnMaCount++;
}
}
if (mep.deviceId().equals(mepDeviceId) || !mep.mdId().equals(mdName) ||
!mep.maId().equals(maName) ||
alreadyHandledDevices.contains(mep.deviceId())) {
continue;
}
deviceService.getDevice(mep.deviceId())
.as(CfmMepProgrammable.class)
.deleteMaRemoteMepOnDevice(mdName, maName, mepId);
alreadyHandledDevices.add(mep.deviceId());
log.info("Deleted RMep entry on {} on device {}",
mdName.mdName() + "/" + maName.maName(), mep.deviceId());
}
//Also if this is the last MEP in this MA then delete this MA from device
//If this is the last MA in this MD on device, then delete the MD from the device
if (mepsOnMdCount == 0) {
boolean deletedMd = deviceService.getDevice(mepDeviceId)
.as(CfmMepProgrammable.class).deleteMdOnDevice(mdName, oldMd);
log.info("Deleted MD {} from Device {}", mdName.mdName(), mepDeviceId);
} else if (mepsOnMaCount == 0) {
boolean deletedMa = deviceService.getDevice(mepDeviceId)
.as(CfmMepProgrammable.class).deleteMaOnDevice(mdName, maName, oldMd);
log.info("Deleted MA {} from Device {}",
mdName.mdName() + "/" + maName.maName(), mepDeviceId);
}
return deleted;
}
|
@Test
public void testDeleteMep() throws CfmConfigException {
expect(mdService.getMaintenanceAssociation(MDNAME1, MANAME1))
.andReturn(Optional.ofNullable(ma1))
.anyTimes();
replay(mdService);
expect(deviceService.getDevice(DEVICE_ID1)).andReturn(device1).anyTimes();
expect(deviceService.getDevice(DEVICE_ID2)).andReturn(device2).anyTimes();
replay(deviceService);
expect(driverService.getDriver(DEVICE_ID1)).andReturn(testDriver).anyTimes();
expect(driverService.getDriver(DEVICE_ID2)).andReturn(testDriver).anyTimes();
replay(driverService);
assertTrue(mepManager.deleteMep(MDNAME1, MANAME1, MEPID1, Optional.empty()));
}
|
public String toSql() {
List<String> subSQLs = new ArrayList<>();
if (columnSeparator != null) {
subSQLs.add("COLUMNS TERMINATED BY " + columnSeparator.toSql());
}
if (rowDelimiter != null) {
subSQLs.add("ROWS TERMINATED BY " + rowDelimiter.toSql());
}
if (columnsInfo != null) {
String subSQL = "COLUMNS(" +
columnsInfo.getColumns().stream().map(this::columnToString)
.collect(Collectors.joining(", ")) +
")";
subSQLs.add(subSQL);
}
if (partitionNames != null) {
String subSQL = null;
if (partitionNames.isTemp()) {
subSQL = "TEMPORARY PARTITION";
} else {
subSQL = "PARTITION";
}
subSQL += "(" + partitionNames.getPartitionNames().stream().map(this::pack)
.collect(Collectors.joining(", "))
+ ")";
subSQLs.add(subSQL);
}
if (wherePredicate != null) {
castSlotRef(wherePredicate.getExpr());
subSQLs.add("WHERE " + wherePredicate.getExpr().toSql());
}
return String.join(", ", subSQLs);
}
|
@Test
public void testToSql() throws Exception {
RoutineLoadDesc originLoad = CreateRoutineLoadStmt.getLoadDesc(new OriginStatement("CREATE ROUTINE LOAD job ON tbl " +
"COLUMNS TERMINATED BY ';', " +
"ROWS TERMINATED BY '\n', " +
"COLUMNS(`a`, `b`, `c`=1), " +
"TEMPORARY PARTITION(`p1`, `p2`), " +
"WHERE a = 1 " +
"PROPERTIES (\"desired_concurrent_number\"=\"3\") " +
"FROM KAFKA (\"kafka_topic\" = \"my_topic\")", 0), null);
RoutineLoadDesc desc = new RoutineLoadDesc();
// set column separator and check
desc.setColumnSeparator(originLoad.getColumnSeparator());
Assert.assertEquals("COLUMNS TERMINATED BY ';'", desc.toSql());
// set row delimiter and check
desc.setRowDelimiter(originLoad.getRowDelimiter());
Assert.assertEquals("COLUMNS TERMINATED BY ';', " +
"ROWS TERMINATED BY '\n'", desc.toSql());
// set columns and check
desc.setColumnsInfo(originLoad.getColumnsInfo());
Assert.assertEquals("COLUMNS TERMINATED BY ';', " +
"ROWS TERMINATED BY '\n', " +
"COLUMNS(`a`, `b`, `c` = 1)", desc.toSql());
// set partitions and check
desc.setPartitionNames(originLoad.getPartitionNames());
Assert.assertEquals("COLUMNS TERMINATED BY ';', " +
"ROWS TERMINATED BY '\n', " +
"COLUMNS(`a`, `b`, `c` = 1), " +
"TEMPORARY PARTITION(`p1`, `p2`)",
desc.toSql());
// set where and check
desc.setWherePredicate(originLoad.getWherePredicate());
Assert.assertEquals("COLUMNS TERMINATED BY ';', " +
"ROWS TERMINATED BY '\n', " +
"COLUMNS(`a`, `b`, `c` = 1), " +
"TEMPORARY PARTITION(`p1`, `p2`), " +
"WHERE `a` = 1",
desc.toSql());
}
|
@Converter
public static String toString(Record dataRecord) {
Charset charset = StandardCharsets.UTF_8;
ByteBuffer buffer = dataRecord.data().asByteBuffer();
if (buffer.hasArray()) {
byte[] bytes = dataRecord.data().asByteArray();
return new String(bytes, charset);
} else {
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
return new String(bytes, charset);
}
}
|
@Test
public void convertRecordToString() {
Record record = Record.builder().sequenceNumber("1")
.data(SdkBytes.fromByteBuffer(ByteBuffer.wrap("this is a String".getBytes(StandardCharsets.UTF_8)))).build();
String result = RecordStringConverter.toString(record);
assertThat(result, is("this is a String"));
}
|
@Override
public void commitAndIndexOnEntityEvent(DbSession dbSession, Collection<String> entityUuids, EntityEvent cause) {
indexOnEvent(dbSession, indexer -> indexer.prepareForRecoveryOnEntityEvent(dbSession, entityUuids, cause));
}
|
@Test
public void commitAndIndexOnEntityEvent_shouldCallIndexerWithSupportedItems() {
List<EsQueueDto> items1 = List.of(EsQueueDto.create("fake/fake1", "P1"), EsQueueDto.create("fake/fake1", "P1"));
List<EsQueueDto> items2 = List.of(EsQueueDto.create("fake/fake2", "P1"));
EventIndexer indexer1 = mock(EventIndexer.class);
EventIndexer indexer2 = mock(EventIndexer.class);
DbSession dbSession = mock(DbSession.class);
IndexersImpl underTest = new IndexersImpl(indexer1, indexer2);
when(indexer1.prepareForRecoveryOnEntityEvent(dbSession, Set.of("P1"), CREATION)).thenReturn(items1);
when(indexer2.prepareForRecoveryOnEntityEvent(dbSession, Set.of("P1"), CREATION)).thenReturn(items2);
underTest.commitAndIndexOnEntityEvent(dbSession, Set.of("P1"), CREATION);
verify(indexer1).index(dbSession, items1);
verify(indexer2).index(dbSession, items2);
}
|
@Override
public SelType call(String methodName, SelType[] args) {
if (args.length == 1) {
if ("dateIntToTs".equals(methodName)) {
return dateIntToTs(args[0]);
} else if ("tsToDateInt".equals(methodName)) {
return tsToDateInt(args[0]);
}
} else if (args.length == 2) {
if ("incrementDateInt".equals(methodName)) {
return incrementDateInt(args[0], args[1]);
} else if ("timeoutForDateTimeDeadline".equals(methodName)) {
return timeoutForDateTimeDeadline(args[0], args[1]);
} else if ("timeoutForDateIntDeadline".equals(methodName)) {
return timeoutForDateIntDeadline(args[0], args[1]);
}
} else if (args.length == 3) {
if ("dateIntsBetween".equals(methodName)) {
return dateIntsBetween(args[0], args[1], args[2]);
} else if ("intsBetween".equals(methodName)) {
return intsBetween(args[0], args[1], args[2]);
}
} else if (args.length == 5 && "dateIntHourToTs".equals(methodName)) {
return dateIntHourToTs(args);
}
throw new UnsupportedOperationException(
type()
+ " DO NOT support calling method: "
+ methodName
+ " with args: "
+ Arrays.toString(args));
}
|
@Test(expected = IllegalFieldValueException.class)
public void testCallDateIntToTsInvalid() {
SelUtilFunc.INSTANCE.call("dateIntToTs", new SelType[] {SelLong.of(20200230)});
}
|
@UdafFactory(description = "collect values of a field into a single Array")
public static <T> TableUdaf<T, List<T>, List<T>> createCollectListT() {
return new Collect<>();
}
|
@Test
public void shouldCollectTimestamps() {
final TableUdaf<Timestamp, List<Timestamp>, List<Timestamp>> udaf = CollectListUdaf.createCollectListT();
final Timestamp[] values = new Timestamp[] {new Timestamp(1), new Timestamp(2)};
List<Timestamp> runningList = udaf.initialize();
for (final Timestamp i : values) {
runningList = udaf.aggregate(i, runningList);
}
assertThat(runningList, contains(new Timestamp(1), new Timestamp(2)));
}
|
@Override
public long length() {
return get(lengthAsync());
}
|
@Test
public void testLength() {
RBitSet bs = redisson.getBitSet("testbitset");
bs.set(0, 5);
bs.clear(0, 1);
assertThat(bs.length()).isEqualTo(5);
bs.clear();
bs.set(28);
bs.set(31);
assertThat(bs.length()).isEqualTo(32);
bs.clear();
bs.set(3);
bs.set(7);
assertThat(bs.length()).isEqualTo(8);
bs.clear();
bs.set(3);
bs.set(120);
bs.set(121);
assertThat(bs.length()).isEqualTo(122);
bs.clear();
bs.set(0);
assertThat(bs.length()).isEqualTo(1);
}
|
@Config("metadata-uri")
public ExampleConfig setMetadata(URI metadata)
{
this.metadata = metadata;
return this;
}
|
@Test
public void testDefaults()
{
ConfigAssertions.assertRecordedDefaults(ConfigAssertions.recordDefaults(ExampleConfig.class)
.setMetadata(null));
}
|
public boolean eval(ContentFile<?> file) {
// TODO: detect the case where a column is missing from the file using file's max field id.
return new MetricsEvalVisitor().eval(file);
}
|
@Test
public void testZeroRecordFile() {
DataFile empty = new TestDataFile("file.parquet", Row.of(), 0);
Expression[] exprs =
new Expression[] {
lessThan("id", 5),
lessThanOrEqual("id", 30),
equal("id", 70),
greaterThan("id", 78),
greaterThanOrEqual("id", 90),
notEqual("id", 101),
isNull("some_nulls"),
notNull("some_nulls"),
isNaN("some_nans"),
notNaN("some_nans"),
};
for (Expression expr : exprs) {
boolean shouldRead = new InclusiveMetricsEvaluator(SCHEMA, expr).eval(empty);
assertThat(shouldRead).as("Should never read 0-record file: " + expr).isFalse();
}
}
|
public static Long getFileLineNumberFromDir(@NonNull String dirPath) {
File file = new File(dirPath);
if (file.isDirectory()) {
File[] files = file.listFiles();
if (files == null) {
return 0L;
}
return Arrays.stream(files)
.map(
currFile -> {
if (currFile.isDirectory()) {
return getFileLineNumberFromDir(currFile.getPath());
} else {
return getFileLineNumber(currFile.getPath());
}
})
.mapToLong(Long::longValue)
.sum();
}
return getFileLineNumber(file.getPath());
}
|
@Test
public void testGetFileLineNumberFromDir() throws Exception {
String rootPath = "/tmp/test/file_utils1";
String dirPath1 = rootPath + "/dir1";
String dirPath2 = rootPath + "/dir2";
String file1 = dirPath1 + "/file1.txt";
String file2 = dirPath1 + "/file2.txt";
String file3 = dirPath2 + "/file3.txt";
String file4 = dirPath2 + "/file4.txt";
file1 = file1.replace("/", File.separator);
file2 = file2.replace("/", File.separator);
file3 = file3.replace("/", File.separator);
file4 = file4.replace("/", File.separator);
FileUtils.createNewFile(file1);
FileUtils.createNewFile(file2);
FileUtils.createNewFile(file3);
FileUtils.createNewFile(file4);
writeTestDataToFile(file1);
writeTestDataToFile(file2);
writeTestDataToFile(file3);
writeTestDataToFile(file4);
Long lines = FileUtils.getFileLineNumberFromDir(rootPath);
Assertions.assertEquals(100 * 4, lines);
}
|
public MethodBuilder retry(Boolean retry) {
this.retry = retry;
return getThis();
}
|
@Test
void retry() {
MethodBuilder builder = MethodBuilder.newBuilder();
builder.retry(true);
Assertions.assertTrue(builder.build().isRetry());
}
|
public void writeIntLenenc(final long value) {
if (value < 0xfb) {
byteBuf.writeByte((int) value);
return;
}
if (value < Math.pow(2D, 16D)) {
byteBuf.writeByte(0xfc);
byteBuf.writeShortLE((int) value);
return;
}
if (value < Math.pow(2D, 24D)) {
byteBuf.writeByte(0xfd);
byteBuf.writeMediumLE((int) value);
return;
}
byteBuf.writeByte(0xfe);
byteBuf.writeLongLE(value);
}
|
@Test
void assertWriteIntLenencWithThreeBytes() {
new MySQLPacketPayload(byteBuf, StandardCharsets.UTF_8).writeIntLenenc(Double.valueOf(Math.pow(2D, 24D)).longValue() - 1L);
verify(byteBuf).writeByte(0xfd);
verify(byteBuf).writeMediumLE(Double.valueOf(Math.pow(2D, 24D)).intValue() - 1);
}
|
static List<String> parse(String cmdline) {
List<String> matchList = new ArrayList<>();
Matcher shellwordsMatcher = SHELLWORDS_PATTERN.matcher(cmdline);
while (shellwordsMatcher.find()) {
if (shellwordsMatcher.group(1) != null) {
matchList.add(shellwordsMatcher.group(1));
} else {
String shellword = shellwordsMatcher.group();
if (shellword.startsWith("\"")
&& shellword.endsWith("\"")
&& shellword.length() > 2) {
shellword = shellword.substring(1, shellword.length() - 1);
}
matchList.add(shellword);
}
}
return matchList;
}
|
@Test
void ensure_name_with_spaces_works_with_args() {
assertThat(ShellWords.parse("--name 'some Name'"), contains("--name", "some Name"));
}
|
public static Expression convert(Predicate[] predicates) {
Expression expression = Expressions.alwaysTrue();
for (Predicate predicate : predicates) {
Expression converted = convert(predicate);
Preconditions.checkArgument(
converted != null, "Cannot convert Spark predicate to Iceberg expression: %s", predicate);
expression = Expressions.and(expression, converted);
}
return expression;
}
|
@Test
public void testNotEqualToNull() {
String col = "col";
NamedReference namedReference = FieldReference.apply(col);
LiteralValue value = new LiteralValue(null, DataTypes.IntegerType);
org.apache.spark.sql.connector.expressions.Expression[] attrAndValue =
new org.apache.spark.sql.connector.expressions.Expression[] {namedReference, value};
org.apache.spark.sql.connector.expressions.Expression[] valueAndAttr =
new org.apache.spark.sql.connector.expressions.Expression[] {value, namedReference};
Predicate notEq1 = new Predicate("<>", attrAndValue);
assertThatThrownBy(() -> SparkV2Filters.convert(notEq1))
.isInstanceOf(NullPointerException.class)
.hasMessageContaining("Expression is always false");
Predicate notEq2 = new Predicate("<>", valueAndAttr);
assertThatThrownBy(() -> SparkV2Filters.convert(notEq2))
.isInstanceOf(NullPointerException.class)
.hasMessageContaining("Expression is always false");
}
|
static Serde<List<?>> createSerde(final PersistenceSchema schema) {
final List<SimpleColumn> columns = schema.columns();
if (columns.isEmpty()) {
// No columns:
return new KsqlVoidSerde<>();
}
if (columns.size() != 1) {
throw new KsqlException("The '" + FormatFactory.KAFKA.name()
+ "' format only supports a single field. Got: " + columns);
}
final SimpleColumn singleColumn = columns.get(0);
final Class<?> javaType = SchemaConverters.sqlToJavaConverter()
.toJavaType(singleColumn.type());
return createSerde(singleColumn, javaType);
}
|
@Test
public void shouldDeserializeNullAsNull() {
// Given:
final PersistenceSchema schema = schemaWithFieldOfType(SqlTypes.INTEGER);
final Serde<List<?>> serde = KafkaSerdeFactory.createSerde(schema);
// When:
final Object result = serde.deserializer().deserialize("topic", null);
// Then:
assertThat(result, is(nullValue()));
}
|
public static int bytesToUShortBE(byte[] bytes, int off) {
return ((bytes[off] & 255) << 8) + (bytes[off + 1] & 255);
}
|
@Test
public void testBytesToUShortBE() {
assertEquals(-12345 & 0xffff,
ByteUtils.bytesToUShortBE(SHORT_12345_BE, 0));
}
|
public Map<String, String> mergeOptions(
MergingStrategy mergingStrategy,
Map<String, String> sourceOptions,
Map<String, String> derivedOptions) {
Map<String, String> options = new HashMap<>();
if (mergingStrategy != MergingStrategy.EXCLUDING) {
options.putAll(sourceOptions);
}
derivedOptions.forEach(
(key, value) -> {
if (mergingStrategy != MergingStrategy.OVERWRITING
&& options.containsKey(key)) {
throw new ValidationException(
String.format(
"There already exists an option ['%s' -> '%s'] in the "
+ "base table. You might want to specify EXCLUDING OPTIONS or OVERWRITING OPTIONS.",
key, options.get(key)));
}
options.put(key, value);
});
return options;
}
|
@Test
void mergeIncludingOptionsFailsOnDuplicate() {
Map<String, String> sourceOptions = new HashMap<>();
sourceOptions.put("offset", "1");
Map<String, String> derivedOptions = new HashMap<>();
derivedOptions.put("offset", "2");
assertThatThrownBy(
() ->
util.mergeOptions(
MergingStrategy.INCLUDING, sourceOptions, derivedOptions))
.isInstanceOf(ValidationException.class)
.hasMessage(
"There already exists an option ['offset' -> '1'] in the base table. "
+ "You might want to specify EXCLUDING OPTIONS or "
+ "OVERWRITING OPTIONS.");
}
|
@Override
public int read() throws IOException
{
checkClosed();
if (pointer >= this.size)
{
return -1;
}
if (currentBufferPointer >= chunkSize)
{
if (bufferListIndex >= bufferListMaxIndex)
{
return -1;
}
else
{
currentBuffer = bufferList.get(++bufferListIndex);
currentBufferPointer = 0;
}
}
pointer++;
return currentBuffer.get(currentBufferPointer++) & 0xff;
}
|
@Test
void testPDFBOX5161() throws IOException
{
try (RandomAccessRead rar = new RandomAccessReadBuffer(new ByteArrayInputStream(new byte[4099])))
{
byte[] buf = new byte[4096];
int bytesRead = rar.read(buf);
assertEquals(4096, bytesRead);
bytesRead = rar.read(buf, 0, 3);
assertEquals(3, bytesRead);
}
}
|
@Override
public void transform(Message message, DataType fromType, DataType toType) {
final Optional<ValueRange> valueRange = getValueRangeBody(message);
String range = message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "range", "A:A").toString();
String majorDimension = message
.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "majorDimension", RangeCoordinate.DIMENSION_ROWS).toString();
String spreadsheetId = message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "spreadsheetId", "").toString();
String[] columnNames
= message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "columnNames", "A").toString().split(",");
boolean splitResults = Boolean
.parseBoolean(message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "splitResults", "false").toString());
if (valueRange.isPresent()) {
message.setBody(
transformFromValueRangeModel(message, valueRange.get(), spreadsheetId, range, majorDimension, columnNames));
} else if (splitResults) {
message.setBody(transformFromSplitValuesModel(message, spreadsheetId, range, majorDimension, columnNames));
} else {
String valueInputOption
= message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "valueInputOption", "USER_ENTERED").toString();
message.setBody(
transformToValueRangeModel(message, spreadsheetId, range, majorDimension, valueInputOption, columnNames));
}
}
|
@Test
public void testTransformToValueRangeColumnNames() throws Exception {
Exchange inbound = new DefaultExchange(camelContext);
inbound.getMessage().setHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "range", "A1:B1");
inbound.getMessage().setHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "columnNames", "Foo,Bar");
String model = "{" +
"\"spreadsheetId\": \"" + spreadsheetId + "\"," +
"\"Foo\": \"a1\"," +
"\"Bar\": \"b1\"" +
"}";
inbound.getMessage().setBody(model);
transformer.transform(inbound.getMessage(), DataType.ANY, DataType.ANY);
Assertions.assertEquals(spreadsheetId, inbound.getMessage().getHeader(GoogleSheetsStreamConstants.SPREADSHEET_ID));
Assertions.assertEquals("A1:B1", inbound.getMessage().getHeader(GoogleSheetsStreamConstants.RANGE));
Assertions.assertEquals(RangeCoordinate.DIMENSION_ROWS,
inbound.getMessage().getHeader(GoogleSheetsStreamConstants.MAJOR_DIMENSION));
Assertions.assertEquals("USER_ENTERED",
inbound.getMessage().getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "valueInputOption"));
ValueRange valueRange = (ValueRange) inbound.getMessage().getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "values");
Assertions.assertEquals(1L, valueRange.getValues().size());
Assertions.assertEquals("a1", valueRange.getValues().get(0).get(0));
Assertions.assertEquals("b1", valueRange.getValues().get(0).get(1));
}
|
public static Method getPublicMethod(Class<?> clazz, String methodName, Class<?>... paramTypes) throws SecurityException {
return ReflectUtil.getPublicMethod(clazz, methodName, paramTypes);
}
|
@Test
public void getPublicMethod() {
Method superPublicMethod = ClassUtil.getPublicMethod(TestSubClass.class, "publicMethod");
assertNotNull(superPublicMethod);
Method superPrivateMethod = ClassUtil.getPublicMethod(TestSubClass.class, "privateMethod");
assertNull(superPrivateMethod);
Method publicMethod = ClassUtil.getPublicMethod(TestSubClass.class, "publicSubMethod");
assertNotNull(publicMethod);
Method privateMethod = ClassUtil.getPublicMethod(TestSubClass.class, "privateSubMethod");
assertNull(privateMethod);
}
|
public PreparedStatementProxy(AbstractConnectionProxy connectionProxy, PreparedStatement targetStatement,
String targetSQL) throws SQLException {
super(connectionProxy, targetStatement, targetSQL);
}
|
@Test
public void testPreparedStatementProxy() {
Assertions.assertNotNull(preparedStatementProxy);
Assertions.assertNotNull(unusedConstructorPreparedStatementProxy);
}
|
@Override
public void handleWayTags(int edgeId, EdgeIntAccess edgeIntAccess, ReaderWay way, IntsRef relationFlags) {
// TODO for now the node tag overhead is not worth the effort due to very few data points
// List<Map<String, Object>> nodeTags = way.getTag("node_tags", null);
Boolean b = getConditional(way.getTags());
if (b != null)
restrictionSetter.setBoolean(edgeId, edgeIntAccess, b);
}
|
@Test
public void testTaggingMistake() {
ArrayEdgeIntAccess edgeIntAccess = new ArrayEdgeIntAccess(1);
int edgeId = 0;
ReaderWay way = new ReaderWay(0L);
way.setTag("highway", "road");
// ignore incomplete values
way.setTag("access:conditional", "no @ 2023 Mar-Oct");
parser.handleWayTags(edgeId, edgeIntAccess, way, IntsRef.EMPTY);
assertEquals(CarTemporalAccess.MISSING, restricted.getEnum(false, edgeId, edgeIntAccess));
// here the "1" will be interpreted as year -> incorrect range
way.setTag("access:conditional", "no @ 1 Nov - 1 Mar");
parser.handleWayTags(edgeId, edgeIntAccess, way, IntsRef.EMPTY);
assertEquals(CarTemporalAccess.MISSING, restricted.getEnum(false, edgeId, edgeIntAccess));
}
|
public static <T> T[] getAny(Object array, int... indexes) {
if (null == array) {
return null;
}
if (null == indexes) {
return newArray(array.getClass().getComponentType(), 0);
}
final T[] result = newArray(array.getClass().getComponentType(), indexes.length);
for (int i = 0; i < indexes.length; i++) {
result[i] = ArrayUtil.get(array, indexes[i]);
}
return result;
}
|
@Test
public void getAnyTest() {
final String[] a = {"a", "b", "c", "d", "e"};
final Object o = ArrayUtil.getAny(a, 3, 4);
final String[] resultO = (String[]) o;
final String[] c = {"d", "e"};
assertTrue(ArrayUtil.containsAll(c, resultO[0], resultO[1]));
}
|
@Override
public void append(final LogEvent event) {
if(null == event.getMessage()) {
return;
}
// Category name
final String logger = String.format("%s %s", event.getThreadName(), event.getLoggerName());
Level level = event.getLevel();
if(Level.FATAL.equals(level) || Level.ERROR.equals(level)) {
this.log(OS_LOG_TYPE_ERROR, logger, event.getMessage().toString());
}
else if(Level.TRACE.equals(level)) {
this.log(OS_LOG_TYPE_DEBUG, logger, event.getMessage().toString());
}
else if(Level.DEBUG.equals(level) || Level.INFO.equals(level)) {
this.log(OS_LOG_TYPE_INFO, logger, event.getMessage().toString());
}
else {
this.log(OS_LOG_TYPE_DEFAULT, logger, event.getMessage().toString());
}
if(ignoreExceptions()) {
// Appender responsible for rendering
final Throwable thrown = event.getThrown();
if(thrown != null) {
final String[] trace = ExceptionUtils.getStackFrames(thrown);
for(final String t : trace) {
this.log(OS_LOG_TYPE_DEFAULT, logger, t);
}
}
}
}
|
@Test
public void testAppend유준환() {
final UnifiedSystemLogAppender a = new UnifiedSystemLogAppender();
a.log(UnifiedSystemLogAppender.OS_LOG_TYPE_INFO, "http-유준환.txt-1", "유준환");
}
|
public static void requireNonNulls(Object... objs) {
for (int i = 0; i < objs.length; i++) {
int effectivelyFinal = i;
Objects.requireNonNull(objs[i], () -> "Argument at index %d is null".formatted(effectivelyFinal));
}
}
|
@Test
void testNonNulls() {
assertDoesNotThrow(() -> Validation.requireNonNulls("hei", 123L, List.of("hoi")));
var exception = assertThrows(NullPointerException.class, () -> Validation.requireNonNulls("hei", null, List.of("hoi")));
assertEquals("Argument at index 1 is null", exception.getMessage());
}
|
public HttpResponse getLogs(ApplicationId applicationId, Optional<DomainName> hostname, Query apiParams) {
Exception exception = null;
for (var uri : getLogServerUris(applicationId, hostname)) {
try {
return logRetriever.getLogs(uri.withQuery(apiParams), activationTime(applicationId));
} catch (RuntimeException e) {
exception = e;
log.log(Level.INFO, e.getMessage());
}
}
return HttpErrorResponse.internalServerError(Exceptions.toMessageString(exception));
}
|
@Test
public void getLogsForHostname() {
ApplicationId applicationId = ApplicationId.from("hosted-vespa", "tenant-host", "default");
deployApp(testAppLogServerWithContainer, new PrepareParams.Builder().applicationId(applicationId).build());
HttpResponse response = applicationRepository.getLogs(applicationId, Optional.of(DomainName.localhost), Query.empty());
assertEquals(200, response.getStatus());
}
|
@Override
public ValidationResult toValidationResult(String responseBody) {
ValidationResult validationResult = new ValidationResult();
ArrayList<String> exceptions = new ArrayList<>();
try {
Map result = (Map) GSON.fromJson(responseBody, Object.class);
if (result == null) return validationResult;
final Map<String, Object> errors = (Map<String, Object>) result.get("errors");
if (errors != null) {
for (Map.Entry<String, Object> entry : errors.entrySet()) {
if (!(entry.getValue() instanceof String)) {
exceptions.add(String.format("Key: '%s' - The Json for Validation Request must contain a not-null error message of type String", entry.getKey()));
} else {
validationResult.addError(new ValidationError(entry.getKey(), entry.getValue().toString()));
}
}
}
if (!exceptions.isEmpty()) {
throw new RuntimeException(StringUtils.join(exceptions, ", "));
}
return validationResult;
} catch (Exception e) {
LOGGER.error("Error occurred while converting the Json to Validation Result. Error: {}. The Json received was '{}'.", e.getMessage(), responseBody);
throw new RuntimeException(String.format("Error occurred while converting the Json to Validation Result. Error: %s.", e.getMessage()));
}
}
|
@Test
public void shouldConvertJsonResponseToValidationResultWhenValidationPasses() {
String jsonResponse = "{}";
TaskConfig configuration = new TaskConfig();
TaskConfigProperty property = new TaskConfigProperty("URL", "http://foo");
property.with(Property.SECURE, false);
property.with(Property.REQUIRED, true);
configuration.add(property);
ValidationResult result = new JsonBasedTaskExtensionHandler_V1().toValidationResult(jsonResponse);
assertThat(result.isSuccessful(), is(true));
}
|
public void convertPostRepoSave( RepositoryFile repositoryFile ) {
if ( repositoryFile != null ) {
try {
Repository repo = connectToRepository();
if ( repo != null ) {
TransMeta
transMeta =
repo.loadTransformation( new StringObjectId( repositoryFile.getId().toString() ), null );
ExtensionPointHandler.callExtensionPoint( new LogChannel( this ),
KettleExtensionPoint.TransImportAfterSaveToRepo.id, transMeta );
}
} catch ( Exception e ) {
logger.error( KettleExtensionPoint.TransImportAfterSaveToRepo.id, e );
}
}
}
|
@Test
public void convertPostRepoSave() throws Exception {
StreamToTransNodeConverter converter = mock( StreamToTransNodeConverter.class );
doCallRealMethod().when( converter ).convertPostRepoSave( any( RepositoryFile.class ) );
Repository repository = mock( Repository.class );
when( converter.connectToRepository() ).thenReturn( repository );
TransMeta transMeta = mock( TransMeta.class );
when( repository.loadTransformation( any(), any() ) ).thenReturn( transMeta );
RepositoryFile file = mock( RepositoryFile.class );
when( file.getId() ).thenReturn( "fileId" );
PluginMockInterface pluginInterface = mock( PluginMockInterface.class );
when( pluginInterface.getName() ).thenReturn( KettleExtensionPoint.TransImportAfterSaveToRepo.id );
when( pluginInterface.getMainType() ).thenReturn( (Class) ExtensionPointInterface.class );
when( pluginInterface.getIds() ).thenReturn( new String[] {"extensionpointId"} );
ExtensionPointInterface extensionPoint = mock( ExtensionPointInterface.class );
when( pluginInterface.loadClass( ExtensionPointInterface.class ) ).thenReturn( extensionPoint );
PluginRegistry.addPluginType( ExtensionPointPluginType.getInstance() );
PluginRegistry.getInstance().registerPlugin( ExtensionPointPluginType.class, pluginInterface );
converter.convertPostRepoSave( file );
verify( extensionPoint, times( 1 ) ).callExtensionPoint( any( LogChannelInterface.class ), same( transMeta ) );
}
|
public static DataflowRunner fromOptions(PipelineOptions options) {
DataflowPipelineOptions dataflowOptions =
PipelineOptionsValidator.validate(DataflowPipelineOptions.class, options);
ArrayList<String> missing = new ArrayList<>();
if (dataflowOptions.getAppName() == null) {
missing.add("appName");
}
if (Strings.isNullOrEmpty(dataflowOptions.getRegion())
&& isServiceEndpoint(dataflowOptions.getDataflowEndpoint())) {
missing.add("region");
}
if (missing.size() > 0) {
throw new IllegalArgumentException(
"Missing required pipeline options: " + Joiner.on(',').join(missing));
}
validateWorkerSettings(
PipelineOptionsValidator.validate(DataflowPipelineWorkerPoolOptions.class, options));
PathValidator validator = dataflowOptions.getPathValidator();
String gcpTempLocation;
try {
gcpTempLocation = dataflowOptions.getGcpTempLocation();
} catch (Exception e) {
throw new IllegalArgumentException(
"DataflowRunner requires gcpTempLocation, "
+ "but failed to retrieve a value from PipelineOptions",
e);
}
validator.validateOutputFilePrefixSupported(gcpTempLocation);
String stagingLocation;
try {
stagingLocation = dataflowOptions.getStagingLocation();
} catch (Exception e) {
throw new IllegalArgumentException(
"DataflowRunner requires stagingLocation, "
+ "but failed to retrieve a value from PipelineOptions",
e);
}
validator.validateOutputFilePrefixSupported(stagingLocation);
if (!isNullOrEmpty(dataflowOptions.getSaveProfilesToGcs())) {
validator.validateOutputFilePrefixSupported(dataflowOptions.getSaveProfilesToGcs());
}
if (dataflowOptions.getFilesToStage() != null) {
// The user specifically requested these files, so fail now if they do not exist.
// (automatically detected classpath elements are permitted to not exist, so later
// staging will not fail on nonexistent files)
dataflowOptions.getFilesToStage().stream()
.forEach(
stagedFileSpec -> {
File localFile;
if (stagedFileSpec.contains("=")) {
String[] components = stagedFileSpec.split("=", 2);
localFile = new File(components[1]);
} else {
localFile = new File(stagedFileSpec);
}
if (!localFile.exists()) {
// should be FileNotFoundException, but for build-time backwards compatibility
// cannot add checked exception
throw new RuntimeException(
String.format("Non-existent files specified in filesToStage: %s", localFile));
}
});
} else {
dataflowOptions.setFilesToStage(
detectClassPathResourcesToStage(DataflowRunner.class.getClassLoader(), options));
if (dataflowOptions.getFilesToStage().isEmpty()) {
throw new IllegalArgumentException("No files to stage has been found.");
} else {
LOG.info(
"PipelineOptions.filesToStage was not specified. "
+ "Defaulting to files from the classpath: will stage {} files. "
+ "Enable logging at DEBUG level to see which files will be staged.",
dataflowOptions.getFilesToStage().size());
LOG.debug("Classpath elements: {}", dataflowOptions.getFilesToStage());
}
}
// Verify jobName according to service requirements, truncating converting to lowercase if
// necessary.
String jobName = dataflowOptions.getJobName().toLowerCase();
checkArgument(
jobName.matches("[a-z]([-a-z0-9]*[a-z0-9])?"),
"JobName invalid; the name must consist of only the characters "
+ "[-a-z0-9], starting with a letter and ending with a letter "
+ "or number");
if (!jobName.equals(dataflowOptions.getJobName())) {
LOG.info(
"PipelineOptions.jobName did not match the service requirements. "
+ "Using {} instead of {}.",
jobName,
dataflowOptions.getJobName());
}
dataflowOptions.setJobName(jobName);
// Verify project
String project = dataflowOptions.getProject();
if (project.matches("[0-9]*")) {
throw new IllegalArgumentException(
"Project ID '"
+ project
+ "' invalid. Please make sure you specified the Project ID, not project number.");
} else if (!project.matches(PROJECT_ID_REGEXP)) {
throw new IllegalArgumentException(
"Project ID '"
+ project
+ "' invalid. Please make sure you specified the Project ID, not project"
+ " description.");
}
DataflowPipelineDebugOptions debugOptions =
dataflowOptions.as(DataflowPipelineDebugOptions.class);
// Verify the number of worker threads is a valid value
if (debugOptions.getNumberOfWorkerHarnessThreads() < 0) {
throw new IllegalArgumentException(
"Number of worker harness threads '"
+ debugOptions.getNumberOfWorkerHarnessThreads()
+ "' invalid. Please make sure the value is non-negative.");
}
// Verify that if recordJfrOnGcThrashing is set, the pipeline is at least on java 11
if (dataflowOptions.getRecordJfrOnGcThrashing()
&& Environments.getJavaVersion() == Environments.JavaVersion.java8) {
throw new IllegalArgumentException(
"recordJfrOnGcThrashing is only supported on java 9 and up.");
}
if (dataflowOptions.isStreaming() && dataflowOptions.getGcsUploadBufferSizeBytes() == null) {
dataflowOptions.setGcsUploadBufferSizeBytes(GCS_UPLOAD_BUFFER_SIZE_BYTES_DEFAULT);
}
// Adding the Java version to the SDK name for user's and support convenience.
String agentJavaVer = "(JRE 8 environment)";
if (Environments.getJavaVersion() != Environments.JavaVersion.java8) {
agentJavaVer =
String.format("(JRE %s environment)", Environments.getJavaVersion().specification());
}
DataflowRunnerInfo dataflowRunnerInfo = DataflowRunnerInfo.getDataflowRunnerInfo();
String userAgentName = dataflowRunnerInfo.getName();
Preconditions.checkArgument(
!userAgentName.equals(""), "Dataflow runner's `name` property cannot be empty.");
String userAgentVersion = dataflowRunnerInfo.getVersion();
Preconditions.checkArgument(
!userAgentVersion.equals(""), "Dataflow runner's `version` property cannot be empty.");
String userAgent =
String.format("%s/%s%s", userAgentName, userAgentVersion, agentJavaVer).replace(" ", "_");
dataflowOptions.setUserAgent(userAgent);
return new DataflowRunner(dataflowOptions);
}
|
@Test
public void testRegionRequiredForServiceRunner() throws IOException {
DataflowPipelineOptions options = buildPipelineOptions();
options.setRegion(null);
options.setDataflowEndpoint("https://dataflow.googleapis.com");
assertThrows(IllegalArgumentException.class, () -> DataflowRunner.fromOptions(options));
}
|
public void close() {
directoryCache.close();
fileDistributionFactory.close();
try {
zkCacheExecutor.shutdown();
checkForRemovedApplicationsService.shutdown();
zkApplicationWatcherExecutor.shutdownAndWait();
zkSessionWatcherExecutor.shutdownAndWait();
zkCacheExecutor.awaitTermination(50, TimeUnit.SECONDS);
checkForRemovedApplicationsService.awaitTermination(50, TimeUnit.SECONDS);
}
catch (InterruptedException e) {
log.log(Level.WARNING, "Interrupted while shutting down.", e);
Thread.currentThread().interrupt();
}
}
|
@Test
public void testFailingBootstrap() {
tenantRepository.close(); // stop using the one setup in Before method
expectedException.expect(RuntimeException.class);
expectedException.expectMessage("Could not create all tenants when bootstrapping, failed to create: [default]");
new FailingDuringBootstrapTenantRepository(configserverConfig);
}
|
public JsonParseException(String message) {
super(message);
}
|
@Test
public void testJsonParseException() {
Assertions.assertThrowsExactly(JsonParseException.class, () -> {
throw new JsonParseException("error");
});
Assertions.assertThrowsExactly(JsonParseException.class, () -> {
throw new JsonParseException("error", new Throwable("error"));
});
Assertions.assertThrowsExactly(JsonParseException.class, () -> {
throw new JsonParseException(new Throwable("error"));
});
}
|
public static void compareProviders(List<ProviderInfo> oldList, List<ProviderInfo> newList,
List<ProviderInfo> add, List<ProviderInfo> remove) {
// 比较老列表和当前列表
if (CommonUtils.isEmpty(oldList)) {
// 空变成非空
if (CommonUtils.isNotEmpty(newList)) {
add.addAll(newList);
}
// 空到空,忽略
} else {
// 非空变成空
if (CommonUtils.isEmpty(newList)) {
remove.addAll(oldList);
} else {
// 非空变成非空,比较
if (CommonUtils.isNotEmpty(oldList)) {
List<ProviderInfo> tmpList = new ArrayList<ProviderInfo>(newList);
// 遍历老的
for (ProviderInfo oldProvider : oldList) {
if (tmpList.contains(oldProvider)) {
tmpList.remove(oldProvider);
} else {
// 新的没有,老的有,删掉
remove.add(oldProvider);
}
}
add.addAll(tmpList);
}
}
}
}
|
@Test
public void compareProviders() throws Exception {
ProviderGroup group1 = new ProviderGroup("a");
ProviderGroup group2 = new ProviderGroup("a");
List<ProviderInfo> oldList = new ArrayList<ProviderInfo>();
List<ProviderInfo> newList = new ArrayList<ProviderInfo>();
List<ProviderInfo> add = new ArrayList<ProviderInfo>();
List<ProviderInfo> remove = new ArrayList<ProviderInfo>();
group1.setProviderInfos(oldList);
group2.setProviderInfos(newList);
{
ProviderHelper.compareGroup(group1, group2, add, remove);
Assert.assertEquals(add.size(), 0);
Assert.assertEquals(remove.size(), 0);
}
{
oldList.clear();
newList.clear();
add.clear();
remove.clear();
oldList.add(ProviderHelper.toProviderInfo("127.0.0.1:12200?p=11&v=4.0"));
oldList.add(ProviderHelper.toProviderInfo("127.0.0.2:12200?p=11&v=4.0"));
oldList.add(ProviderHelper.toProviderInfo("127.0.0.3:12200?p=11&v=4.0"));
ProviderHelper.compareGroup(group1, group2, add, remove);
Assert.assertEquals(add.size(), 0);
Assert.assertEquals(remove.size(), 3);
}
{
oldList.clear();
newList.clear();
add.clear();
remove.clear();
newList.add(ProviderHelper.toProviderInfo("127.0.0.1:12200?p=11&v=4.0"));
newList.add(ProviderHelper.toProviderInfo("127.0.0.2:12200?p=11&v=4.0"));
newList.add(ProviderHelper.toProviderInfo("127.0.0.3:12200?p=11&v=4.0"));
ProviderHelper.compareGroup(group1, group2, add, remove);
Assert.assertEquals(add.size(), 3);
Assert.assertEquals(remove.size(), 0);
}
{
oldList.clear();
newList.clear();
add.clear();
remove.clear();
oldList.add(ProviderHelper.toProviderInfo("127.0.0.1:12200?p=11&v=4.0"));
oldList.add(ProviderHelper.toProviderInfo("127.0.0.2:12200?p=11&v=4.0"));
oldList.add(ProviderHelper.toProviderInfo("127.0.0.3:12200?p=11&v=4.0"));
newList.add(ProviderHelper.toProviderInfo("127.0.0.1:12200?p=11&v=4.0"));
newList.add(ProviderHelper.toProviderInfo("127.0.0.4:12200?p=11&v=4.0"));
newList.add(ProviderHelper.toProviderInfo("127.0.0.5:12200?p=11&v=4.0"));
ProviderHelper.compareGroup(group1, group2, add, remove);
Assert.assertEquals(add.size(), 2);
Assert.assertEquals(remove.size(), 2);
}
}
|
@Override
public void setNonNullParameter(final PreparedStatement preparedStatement, final int columnIndex,
final Boolean columnValue, final JdbcType jdbcType) throws SQLException {
preparedStatement.setInt(columnIndex, columnValue ? 1 : 0);
}
|
@Test
public void setNonNullParameterTest() {
final OpenGaussSQLBooleanHandler openGaussSQLBooleanHandler = new OpenGaussSQLBooleanHandler();
Assertions.assertDoesNotThrow(() -> openGaussSQLBooleanHandler.setNonNullParameter(mock(PreparedStatement.class), 1, true, JdbcType.BIGINT));
}
|
public static Http2Headers toHttp2Headers(HttpMessage in, boolean validateHeaders) {
HttpHeaders inHeaders = in.headers();
final Http2Headers out = new DefaultHttp2Headers(validateHeaders, inHeaders.size());
if (in instanceof HttpRequest) {
HttpRequest request = (HttpRequest) in;
String host = inHeaders.getAsString(HttpHeaderNames.HOST);
if (isOriginForm(request.uri()) || isAsteriskForm(request.uri())) {
out.path(new AsciiString(request.uri()));
setHttp2Scheme(inHeaders, out);
} else {
URI requestTargetUri = URI.create(request.uri());
out.path(toHttp2Path(requestTargetUri));
// Take from the request-line if HOST header was empty
host = isNullOrEmpty(host) ? requestTargetUri.getAuthority() : host;
setHttp2Scheme(inHeaders, requestTargetUri, out);
}
setHttp2Authority(host, out);
out.method(request.method().asciiName());
} else if (in instanceof HttpResponse) {
HttpResponse response = (HttpResponse) in;
out.status(response.status().codeAsText());
}
// Add the HTTP headers which have not been consumed above
toHttp2Headers(inHeaders, out);
return out;
}
|
@Test
public void stripTEHeadersCsvSeparatedAccountsForValueSimilarToTrailers() {
HttpHeaders inHeaders = new DefaultHttpHeaders();
inHeaders.add(TE, GZIP + "," + TRAILERS + "foo");
Http2Headers out = new DefaultHttp2Headers();
HttpConversionUtil.toHttp2Headers(inHeaders, out);
assertFalse(out.contains(TE));
}
|
public boolean isUnHealth(String address) {
Member member = serverList.get(address);
if (member == null) {
return false;
}
return !NodeState.UP.equals(member.getState());
}
|
@Test
void testIsUnHealth() {
assertFalse(serverMemberManager.isUnHealth("1.1.1.1"));
}
|
void parseArgAndAppend(String entityName, String fieldName, Object arg) {
if (entityName == null || fieldName == null || arg == null) {
return;
}
if (arg instanceof Collection) {
for (Object o : (Collection<?>) arg) {
String matchedValue = String.valueOf(o);
api.appendDataInfluence(entityName, ApolloAuditConstants.ANY_MATCHED_ID, fieldName, matchedValue);
}
} else {
String matchedValue = String.valueOf(arg);
api.appendDataInfluence(entityName, ApolloAuditConstants.ANY_MATCHED_ID, fieldName, matchedValue);
}
}
|
@Test
public void testParseArgAndAppendCaseNormalTypeArg() {
final String entityName = "App";
final String fieldName = "Name";
Object arg = new Object();
{
doNothing().when(api).appendDataInfluence(any(), any(), any(), any());
}
aspect.parseArgAndAppend(entityName, fieldName, arg);
verify(api, times(1)).appendDataInfluence(eq(entityName),
eq(ApolloAuditConstants.ANY_MATCHED_ID), eq(fieldName), any());
}
|
public NumericIndicator middle() {
return middle;
}
|
@Test
public void testCreation() {
final KeltnerChannelFacade facade = new KeltnerChannelFacade(data, 14, 14, 2);
assertEquals(data, facade.middle().getBarSeries());
}
|
public static Path resolveRealPath(Path path) {
try {
// Get the real path by resolving all relative paths and symbolic links.
return path.toRealPath();
} catch (IOException e) {
LOG.error("Could not resolve real location of path [{}].", path, e);
}
return null;
}
|
@Test
public void realPathNullWhenDoesNotExist() {
final Path path = Paths.get("non-existent-file-path");
assertNull(AllowedAuxiliaryPathChecker.resolveRealPath(path));
}
|
@Override
public Space get() throws BackgroundException {
try {
final Path home = new DefaultHomeFinderService(session).find();
if(!home.isRoot()) {
if(SDSQuotaFeature.unknown == home.attributes().getQuota()) {
log.warn(String.format("No quota set for node %s", home));
}
else {
return home.attributes().getQuota();
}
}
final CustomerData info = new UserApi(session.getClient()).requestCustomerInfo(StringUtils.EMPTY);
return new Space(info.getSpaceUsed(), info.getSpaceLimit() - info.getSpaceUsed());
}
catch(ApiException e) {
throw new SDSExceptionMappingService(nodeid).map("Failure to read attributes of {0}", e,
new Path(String.valueOf(Path.DELIMITER), EnumSet.of(Path.Type.volume, Path.Type.directory)));
}
}
|
@Test
public void testAccount() throws Exception {
final SDSNodeIdProvider nodeid = new SDSNodeIdProvider(session);
final Quota.Space quota = new SDSQuotaFeature(session, nodeid).get();
assertNotNull(quota.available);
assertNotNull(quota.used);
}
|
@Override
public Table getTable(String dbName, String tblName) {
if (!DEFAULT_DB.equalsIgnoreCase(dbName)) {
return null;
}
return toEsTable(esRestClient, properties, tblName, dbName, catalogName);
}
|
@Test
public void testGetTable(@Mocked EsRestClient client) {
ElasticsearchMetadata metadata = new ElasticsearchMetadata(client, new HashMap<>(), "catalog");
Assert.assertNull(metadata.getTable("default_db", "not_exist_index"));
Assert.assertNull(metadata.getTable("aaaa", "tbl"));
}
|
@Override
public void rotate(IndexSet indexSet) {
indexRotator.rotate(indexSet, this::shouldRotate);
}
|
@Test
public void shouldRotateThrowsNPEIfIndexSetConfigIsNull() throws Exception {
when(indexSet.getConfig()).thenReturn(null);
when(indexSet.getNewestIndex()).thenReturn(IGNORED);
expectedException.expect(NullPointerException.class);
expectedException.expectMessage("Index set configuration must not be null");
rotationStrategy.rotate(indexSet);
}
|
@Override
public StageBundleFactory forStage(ExecutableStage executableStage) {
return new SimpleStageBundleFactory(executableStage);
}
|
@Test
public void cachesEnvironment() throws Exception {
try (DefaultJobBundleFactory bundleFactory =
createDefaultJobBundleFactory(envFactoryProviderMap)) {
StageBundleFactory bf1 = bundleFactory.forStage(getExecutableStage(environment));
StageBundleFactory bf2 = bundleFactory.forStage(getExecutableStage(environment));
// NOTE: We hang on to stage bundle references to ensure their underlying environments are not
// garbage collected. For additional safety, we print the factories to ensure the references
// are not optimized away.
System.out.println("bundle factory 1:" + bf1);
System.out.println("bundle factory 1:" + bf2);
verify(envFactory).createEnvironment(eq(environment), any());
verifyNoMoreInteractions(envFactory);
}
}
|
@Override
public final boolean offer(int ordinal, @Nonnull Object item) {
if (ordinal == -1) {
return offerInternal(allEdges, item);
} else {
if (ordinal == bucketCount()) {
// ordinal beyond bucketCount will add to snapshot queue, which we don't allow through this method
throw new IllegalArgumentException("Illegal edge ordinal: " + ordinal);
}
singleEdge[0] = ordinal;
return offerInternal(singleEdge, item);
}
}
|
@Test
public void when_offer4_then_rateLimited() {
do_when_offer_then_rateLimited(e -> outbox.offer(new int[] {0}, e));
}
|
public StreamSummary(int capacity) {
this.capacity = capacity;
counterMap = new HashMap<T, ListNode2<Counter<T>>>();
bucketList = new DoublyLinkedList<Bucket>();
}
|
@Test
public void testStreamSummary() {
StreamSummary<String> vs = new StreamSummary<String>(3);
String[] stream = {"X", "X", "Y", "Z", "A", "B", "C", "X", "X", "A", "A", "A"};
for (String i : stream) {
vs.offer(i);
/*
for(String s : vs.poll(3))
System.out.print(s+" ");
*/
System.out.println(vs);
}
}
|
@Override
public VersionedKeyValueStore<K, V> build() {
final KeyValueStore<Bytes, byte[]> store = storeSupplier.get();
if (!(store instanceof VersionedBytesStore)) {
throw new IllegalStateException("VersionedBytesStoreSupplier.get() must return an instance of VersionedBytesStore");
}
return new MeteredVersionedKeyValueStore<>(
maybeWrapLogging((VersionedBytesStore) store), // no caching layer for versioned stores
storeSupplier.metricsScope(),
time,
keySerde,
valueSerde);
}
|
@Test
public void shouldHaveChangeLoggingStoreByDefault() {
setUp();
final VersionedKeyValueStore<String, String> store = builder.build();
assertThat(store, instanceOf(MeteredVersionedKeyValueStore.class));
final StateStore next = ((WrappedStateStore) store).wrapped();
assertThat(next, instanceOf(ChangeLoggingVersionedKeyValueBytesStore.class));
}
|
ComponentDto createBranchComponent(DbSession dbSession, ComponentKey componentKey, ComponentDto mainComponentDto, BranchDto mainComponentBranchDto) {
checkState(delegate != null, "Current edition does not support branch feature");
return delegate.createBranchComponent(dbSession, componentKey, mainComponentDto, mainComponentBranchDto);
}
|
@Test
public void createBranchComponent_delegates_to_delegate() {
DbSession dbSession = mock(DbSession.class);
ComponentKey componentKey = mock(ComponentKey.class);
ComponentDto mainComponentDto = new ComponentDto();
ComponentDto expected = new ComponentDto();
BranchDto mainComponentBranchDto = new BranchDto();
when(branchSupportDelegate.createBranchComponent(dbSession, componentKey, mainComponentDto, mainComponentBranchDto))
.thenReturn(expected);
ComponentDto dto = underTestWithBranch.createBranchComponent(dbSession, componentKey, mainComponentDto, mainComponentBranchDto);
assertThat(dto).isSameAs(expected);
}
|
@CheckForNull
static BundleParams getBundleParameters(String restOfPath) {
if (restOfPath == null || restOfPath.length() == 0) {
return null;
}
String[] pathTokens = restOfPath.split("/");
List<String> bundleParameters = new ArrayList<>();
for (String pathToken : pathTokens) {
if (pathToken.length() > 0) {
bundleParameters.add(urlDecode(pathToken));
}
}
// Path should be prefixed with /blue/rest/i18n.
// Let's remove those.
if (bundleParameters.get(0).equals("blue")) {
bundleParameters.remove(0);
}
if (bundleParameters.get(0).equals("rest")) {
bundleParameters.remove(0);
}
if (bundleParameters.get(0).equals("i18n")) {
bundleParameters.remove(0);
}
if (bundleParameters.size() != 3 && bundleParameters.size() != 4) {
return null;
}
BundleParams bundleParams = new BundleParams(
bundleParameters.get(0),
bundleParameters.get(1),
bundleParameters.get(2)
);
if (bundleParameters.size() == 4) {
// https://www.w3.org/International/questions/qa-lang-priorities
// in case we have regions/countries in the language query parameter
String locale = bundleParameters.get(3);
String[] localeTokens = locale.split("-|_");
bundleParams.language = localeTokens[0];
if (localeTokens.length > 1) {
bundleParams.country = localeTokens[1];
if (localeTokens.length > 2) {
bundleParams.variant = localeTokens[2];
}
}
}
return bundleParams;
}
|
@Test
public void test_getBundleParameters_valid_url() {
BlueI18n.BundleParams bundleParameters = BlueI18n.getBundleParameters("/blue/rest/i18n/pluginx/1.0.0/pluginx.bundle");
Assert.assertNotNull(bundleParameters);
Assert.assertEquals("pluginx", bundleParameters.pluginName);
Assert.assertEquals("1.0.0", bundleParameters.pluginVersion);
Assert.assertEquals("pluginx.bundle", bundleParameters.bundleName);
Assert.assertNull(bundleParameters.language);
Assert.assertNull(bundleParameters.country);
Assert.assertNull(bundleParameters.variant);
Assert.assertNull(bundleParameters.getLocale());
bundleParameters = BlueI18n.getBundleParameters("/blue/rest/i18n/pluginx/1.0.0/pluginx.bundle/en");
Assert.assertNotNull(bundleParameters);
Assert.assertEquals("pluginx", bundleParameters.pluginName);
Assert.assertEquals("1.0.0", bundleParameters.pluginVersion);
Assert.assertEquals("pluginx.bundle", bundleParameters.bundleName);
Assert.assertEquals("en", bundleParameters.language);
Assert.assertNull(bundleParameters.country);
Assert.assertNull(bundleParameters.variant);
Assert.assertNotNull(bundleParameters.getLocale());
Assert.assertEquals("en", bundleParameters.getLocale().toString());
}
|
@Override
public void doFilter(HttpRequest request, HttpResponse response, FilterChain chain) {
IdentityProvider provider = resolveProviderOrHandleResponse(request, response, INIT_CONTEXT);
if (provider != null) {
handleProvider(request, response, provider);
}
}
|
@Test
public void do_filter_on_basic_identity_provider() {
when(request.getRequestURI()).thenReturn("/sessions/init/" + BASIC_PROVIDER_KEY);
identityProviderRepository.addIdentityProvider(baseIdentityProvider);
underTest.doFilter(request, response, chain);
assertBasicInitCalled();
verifyNoInteractions(authenticationEvent);
}
|
public Page<ConfigHistoryInfo> listConfigHistory(String dataId, String group, String namespaceId, Integer pageNo,
Integer pageSize) {
return historyConfigInfoPersistService.findConfigHistory(dataId, group, namespaceId, pageNo, pageSize);
}
|
@Test
void testListConfigHistory() {
ConfigHistoryInfo configHistoryInfo = new ConfigHistoryInfo();
configHistoryInfo.setDataId(TEST_DATA_ID);
configHistoryInfo.setGroup(TEST_GROUP);
configHistoryInfo.setContent(TEST_CONTENT);
configHistoryInfo.setCreatedTime(new Timestamp(new Date().getTime()));
configHistoryInfo.setLastModifiedTime(new Timestamp(new Date().getTime()));
List<ConfigHistoryInfo> configHistoryInfoList = new ArrayList<>();
configHistoryInfoList.add(configHistoryInfo);
Page<ConfigHistoryInfo> page = new Page<>();
page.setTotalCount(15);
page.setPageNumber(1);
page.setPagesAvailable(2);
page.setPageItems(configHistoryInfoList);
when(historyConfigInfoPersistService.findConfigHistory(TEST_DATA_ID, TEST_GROUP, TEST_TENANT, 1, 10)).thenReturn(page);
Page<ConfigHistoryInfo> pageResult = historyService.listConfigHistory(TEST_DATA_ID, TEST_GROUP, TEST_TENANT, 1, 10);
verify(historyConfigInfoPersistService).findConfigHistory(TEST_DATA_ID, TEST_GROUP, TEST_TENANT, 1, 10);
List<ConfigHistoryInfo> resultList = pageResult.getPageItems();
ConfigHistoryInfo resConfigHistoryInfo = resultList.get(0);
assertEquals(configHistoryInfoList.size(), resultList.size());
assertEquals(configHistoryInfo.getDataId(), resConfigHistoryInfo.getDataId());
assertEquals(configHistoryInfo.getGroup(), resConfigHistoryInfo.getGroup());
assertEquals(configHistoryInfo.getContent(), resConfigHistoryInfo.getContent());
}
|
public String getUuid() {
return toString();
}
|
@Test
public void uuidDoesNotChangeBetweenRuns() {
// Given
Uuid uuid = new Uuid();
// When
String firstUuid = uuid.getUuid();
String secondUuid = uuid.getUuid();
// Then
assertEquals(secondUuid, firstUuid);
}
|
public static Builder builder() {
return new Builder(ImmutableList.of());
}
|
@Test
public void shouldThrowOnMultipleHeadersColumns() {
// Given:
final Builder builder = LogicalSchema.builder()
.headerColumn(H0, Optional.empty());
// When:
final Exception e = assertThrows(
KsqlException.class,
() -> builder.headerColumn(F0, Optional.empty())
);
// Then:
assertThat(e.getMessage(),
containsString("Schema already contains a HEADERS column."));
}
|
public static InternalLogger getInstance(Class<?> clazz) {
return getInstance(clazz.getName());
}
|
@Test
public void testError() {
final InternalLogger logger = InternalLoggerFactory.getInstance("mock");
logger.error("a");
verify(mockLogger).error("a");
}
|
protected Query<E> query(String queryString) {
return currentSession().createQuery(requireNonNull(queryString), getEntityClass());
}
|
@Test
void getsTypedQueries() throws Exception {
assertThat(dao.query("HQL"))
.isEqualTo(query);
verify(session).createQuery("HQL", String.class);
}
|
@Override
public double p(int k) {
if (k < 0) {
return 0.0;
} else {
return gamma(r + k) / (factorial(k) * gamma(r)) * Math.pow(p, r) * Math.pow(1 - p, k);
}
}
|
@Test
public void testP() {
System.out.println("p");
NegativeBinomialDistribution instance = new NegativeBinomialDistribution(3, 0.3);
instance.rand();
assertEquals(0.027, instance.p(0), 1E-7);
assertEquals(0.0567, instance.p(1), 1E-7);
assertEquals(0.07938, instance.p(2), 1E-7);
assertEquals(0.09261, instance.p(3), 1E-7);
assertEquals(0.05033709, instance.p(10), 1E-7);
}
|
@Bean
public ShenyuPlugin loggingRocketMQPlugin() {
return new LoggingRocketMQPlugin();
}
|
@Test
public void testLoggingRocketMQPlugin() {
applicationContextRunner
.withPropertyValues(
"debug=true",
"shenyu.logging.rocketmq.enabled=true"
)
.run(context -> {
PluginDataHandler pluginDataHandler = context.getBean("loggingRocketMQPluginDataHandler", PluginDataHandler.class);
assertNotNull(pluginDataHandler);
ShenyuPlugin plugin = context.getBean("loggingRocketMQPlugin", ShenyuPlugin.class);
assertNotNull(plugin);
assertThat(plugin.named()).isEqualTo(PluginEnum.LOGGING_ROCKETMQ.getName());
});
}
|
public static <E> List<E> ensureMutable(List<E> list) {
if (list instanceof ArrayList) return list;
int size = list.size();
ArrayList<E> mutable = new ArrayList<E>(size);
for (int i = 0; i < size; i++) {
mutable.add(list.get(i));
}
return mutable;
}
|
@Test void ensureMutable_copiesImmutable() {
List<Object> list = Collections.unmodifiableList(Arrays.asList("foo", "bar"));
assertThat(Lists.ensureMutable(list))
.isInstanceOf(ArrayList.class)
.containsExactlyElementsOf(list);
}
|
public static byte[] toByteArray(long value, int length) {
final byte[] buffer = ByteBuffer.allocate(8).putLong(value).array();
for (int i = 0; i < 8 - length; i++) {
if (buffer[i] != 0) {
throw new IllegalArgumentException(
"Value is does not fit into byte array " + (8 - i) + " > " + length);
}
}
return adjustLength(buffer, length);
}
|
@Test
public void toByteArrayLongShouldAFit() {
assertArrayEquals(new byte[] { 1 }, ByteArrayUtils.toByteArray(1L, 1));
}
|
@Signature
@GetMapping("/orders/{orderId}")
public String getOrder(@PathVariable String orderId, @RequestParam String name, @RequestParam Integer amount){
return "success";
}
|
@Test
void getOrder() throws Exception {
Map<String, String[]> params = Maps.newHashMap();
params.put("name", new String[]{"iphone"});
params.put("amount", new String[]{"1"});
SignatureVo signatureVo = new SignatureVo();
signatureVo.setPath("/orders/order1");
signatureVo.setContentType(MediaType.APPLICATION_JSON_VALUE);
signatureVo.setHttpMethod("GET");
signatureVo.setParams(params);
MvcResult mvcResult = mockMvc.perform(MockMvcRequestBuilders.get("/orders/order1?name=iphone&amount=1")
.headers(getHeaders(signatureVo)))
.andExpect(MockMvcResultMatchers.status().isOk())
.andDo(MockMvcResultHandlers.print())
.andReturn();
System.out.println("输出 " + mvcResult.getResponse().getContentAsString());
}
|
public static int[] copyWithout(int[] replicas, int value) {
int size = 0;
for (int replica : replicas) {
if (replica != value) {
size++;
}
}
int[] result = new int[size];
int j = 0;
for (int replica : replicas) {
if (replica != value) {
result[j++] = replica;
}
}
return result;
}
|
@Test
public void testCopyWithout2() {
assertArrayEquals(new int[] {}, Replicas.copyWithout(new int[] {}, new int[] {}));
assertArrayEquals(new int[] {}, Replicas.copyWithout(new int[] {1}, new int[] {1}));
assertArrayEquals(new int[] {1, 3},
Replicas.copyWithout(new int[] {1, 2, 3}, new int[]{2, 4}));
assertArrayEquals(new int[] {4},
Replicas.copyWithout(new int[] {4, 2, 2, 1}, new int[]{2, 1}));
}
|
public void clear() {
for (int i = 0; i < sections.length; i++) {
sections[i].clear();
}
}
|
@Test
public void testClear() {
ConcurrentOpenHashSet<String> set =
ConcurrentOpenHashSet.<String>newBuilder()
.expectedItems(2)
.concurrencyLevel(1)
.autoShrink(true)
.mapIdleFactor(0.25f)
.build();
assertEquals(set.capacity(), 4);
assertTrue(set.add("k1"));
assertTrue(set.add("k2"));
assertTrue(set.add("k3"));
assertEquals(set.capacity(), 8);
set.clear();
assertEquals(set.capacity(), 4);
}
|
@Override
public ExpressionEvaluatorResult evaluateUnaryExpression(String rawExpression, Object resultValue, Class<?> resultClass) {
if (isStructuredResult(resultClass)) {
return verifyResult(rawExpression, resultValue, resultClass);
} else {
return ExpressionEvaluatorResult.of(internalUnaryEvaluation(rawExpression, resultValue, resultClass, false));
}
}
|
@Test
public void evaluateUnaryExpression() {
assertThat(expressionEvaluator.evaluateUnaryExpression(null, null, String.class)).is(successful);
assertThat(expressionEvaluator.evaluateUnaryExpression(null, null, Map.class)).is(successful);
assertThat(expressionEvaluator.evaluateUnaryExpression(null, null, List.class)).is(successful);
}
|
@Override
public PathAttributes find(final Path file, final ListProgressListener listener) throws BackgroundException {
if(file.isRoot()) {
return PathAttributes.EMPTY;
}
final Region region = regionService.lookup(file);
try {
if(containerService.isContainer(file)) {
final ContainerInfo info = session.getClient().getContainerInfo(region,
containerService.getContainer(file).getName());
final PathAttributes attributes = new PathAttributes();
attributes.setSize(info.getTotalSize());
attributes.setRegion(info.getRegion().getRegionId());
return attributes;
}
final ObjectMetadata metadata;
try {
try {
metadata = session.getClient().getObjectMetaData(region,
containerService.getContainer(file).getName(), containerService.getKey(file));
}
catch(GenericException e) {
throw new SwiftExceptionMappingService().map("Failure to read attributes of {0}", e, file);
}
}
catch(NotfoundException e) {
if(file.isDirectory()) {
// Directory placeholder file may be missing. Still return empty attributes when we find children
try {
new SwiftObjectListService(session).list(file, new CancellingListProgressListener());
}
catch(ListCanceledException l) {
// Found common prefix
return PathAttributes.EMPTY;
}
catch(NotfoundException n) {
throw e;
}
// Common prefix only
return PathAttributes.EMPTY;
}
// Try to find pending large file upload
final Write.Append append = new SwiftLargeObjectUploadFeature(session, regionService, new SwiftWriteFeature(session, regionService)).append(file, new TransferStatus());
if(append.append) {
return new PathAttributes().withSize(append.offset);
}
throw e;
}
if(file.isDirectory()) {
if(!StringUtils.equals(SwiftDirectoryFeature.DIRECTORY_MIME_TYPE, metadata.getMimeType())) {
throw new NotfoundException(String.format("File %s has set MIME type %s but expected %s",
file.getAbsolute(), metadata.getMimeType(), SwiftDirectoryFeature.DIRECTORY_MIME_TYPE));
}
}
if(file.isFile()) {
if(StringUtils.equals(SwiftDirectoryFeature.DIRECTORY_MIME_TYPE, metadata.getMimeType())) {
throw new NotfoundException(String.format("File %s has set MIME type %s",
file.getAbsolute(), metadata.getMimeType()));
}
}
return this.toAttributes(metadata);
}
catch(GenericException e) {
throw new SwiftExceptionMappingService().map("Failure to read attributes of {0}", e, file);
}
catch(IOException e) {
throw new DefaultIOExceptionMappingService().map("Failure to read attributes of {0}", e, file);
}
}
|
@Test
public void testFindContainer() throws Exception {
final Path container = new Path("test.cyberduck.ch", EnumSet.of(Path.Type.directory, Path.Type.volume));
container.attributes().setRegion("IAD");
final PathAttributes attributes = new SwiftAttributesFinderFeature(session).find(container);
assertNotEquals(PathAttributes.EMPTY, attributes);
assertNotEquals(-1L, attributes.getSize());
assertNotNull(attributes.getRegion());
}
|
public CompletableFuture<Result> getTerminationFuture() {
return terminationFuture;
}
|
@Test
void testWorkingDirIsNotDeletedInCaseOfFailure() throws Exception {
final File workingDirBase = TempDirUtils.newFolder(temporaryFolder);
final ResourceID resourceId = ResourceID.generate();
final Configuration configuration =
createConfigurationWithWorkingDirectory(workingDirBase, resourceId);
final TaskManagerRunner taskManagerRunner =
createTaskManagerRunner(
configuration, new TestingFailingTaskExecutorServiceFactory());
taskManagerRunner.getTerminationFuture().join();
assertThat(
ClusterEntrypointUtils.generateTaskManagerWorkingDirectoryFile(
configuration, resourceId))
.exists();
}
|
@Override
public HiveMetastoreClient createMetastoreClient(Optional<String> token)
throws TException
{
List<HostAndPort> metastores = new ArrayList<>(addresses);
if (metastoreLoadBalancingEnabled) {
Collections.shuffle(metastores);
}
TException lastException = null;
for (HostAndPort metastore : metastores) {
try {
HiveMetastoreClient client = clientFactory.create(metastore, token);
if (!isNullOrEmpty(metastoreUsername)) {
client.setUGI(metastoreUsername);
}
return client;
}
catch (TException e) {
lastException = e;
}
}
throw new TException("Failed connecting to Hive metastore: " + addresses, lastException);
}
|
@Test
public void testDefaultHiveMetastore()
throws TException
{
HiveCluster cluster = createHiveCluster(CONFIG_WITH_FALLBACK, singletonList(DEFAULT_CLIENT));
assertEquals(cluster.createMetastoreClient(Optional.empty()), DEFAULT_CLIENT);
}
|
@Override
public BackgroundException map(final IOException e) {
final StringBuilder buffer = new StringBuilder();
this.append(buffer, e.getMessage());
if(e instanceof FTPConnectionClosedException) {
return new ConnectionRefusedException(buffer.toString(), e);
}
if(e instanceof FTPException) {
return this.handle((FTPException) e, buffer);
}
if(e instanceof MalformedServerReplyException) {
return new InteroperabilityException(buffer.toString(), e);
}
return new DefaultIOExceptionMappingService().map(e);
}
|
@Test
public void testMap() {
assertEquals(ConnectionRefusedException.class,
new FTPExceptionMappingService().map(new SocketException("Software caused connection abort")).getClass());
assertEquals(ConnectionRefusedException.class,
new FTPExceptionMappingService().map(new SocketException("Socket closed")).getClass());
}
|
@CallSuper
protected void abortCorrectionAndResetPredictionState(boolean disabledUntilNextInputStart) {
mSuggest.resetNextWordSentence();
mLastSpaceTimeStamp = NEVER_TIME_STAMP;
mJustAutoAddedWord = false;
mKeyboardHandler.removeAllSuggestionMessages();
final InputConnection ic = getCurrentInputConnection();
markExpectingSelectionUpdate();
if (ic != null) ic.finishComposingText();
clearSuggestions();
mWord.reset();
mWordRevertLength = 0;
mJustAutoAddedWord = false;
if (disabledUntilNextInputStart) {
Logger.d(TAG, "abortCorrection will abort correct forever");
final KeyboardViewContainerView inputViewContainer = getInputViewContainer();
if (inputViewContainer != null) {
inputViewContainer.removeStripAction(mCancelSuggestionsAction);
}
mPredictionOn = false;
}
}
|
@Test
public void testStripActionRemovedWhenAbortingPrediction() {
Assert.assertNotNull(
mAnySoftKeyboardUnderTest
.getInputViewContainer()
.findViewById(R.id.close_suggestions_strip_text));
mAnySoftKeyboardUnderTest.abortCorrectionAndResetPredictionState(true);
Assert.assertNull(
mAnySoftKeyboardUnderTest
.getInputViewContainer()
.findViewById(R.id.close_suggestions_strip_text));
}
|
static byte[] adaptArray(byte[] ftdiData)
{
int length = ftdiData.length;
if(length > 64)
{
int n = 1;
int p = 64;
// Precalculate length without FTDI headers
while(p < length)
{
n++;
p = n*64;
}
int realLength = length - n*2;
byte[] data = new byte[realLength];
copyData(ftdiData, data);
return data;
}
else if (length == 2) // special case optimization that returns the same instance.
{
return EMPTY_BYTE_ARRAY;
}
else
{
return Arrays.copyOfRange(ftdiData, 2, length);
}
}
|
@Test
public void testMultiplePartial() {
byte[] withHeaders = {1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64, 1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62};
byte[] wanted = {3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64, 3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62};
Assert.assertArrayEquals(wanted, FTDISerialDevice.adaptArray(withHeaders));
}
|
public void doManualTransition(DefaultIssue issue, String transitionKey, String userUuid) {
workflow.doManualTransition(issue, transitionKey, getIssueChangeContextWithUser(userUuid));
}
|
@Test
public void doManualTransition() {
DefaultIssue issue = new DefaultIssue();
String transitionKey = "transitionKey";
String userUuid = "userUuid";
underTest.doManualTransition(issue, transitionKey, userUuid);
verify(workflow).doManualTransition(issue, transitionKey, getIssueChangeContextWithUser(userUuid));
}
|
public static <K, E, V> Collector<E, ImmutableSetMultimap.Builder<K, V>, ImmutableSetMultimap<K, V>> unorderedFlattenIndex(
Function<? super E, K> keyFunction, Function<? super E, Stream<V>> valueFunction) {
verifyKeyAndValueFunctions(keyFunction, valueFunction);
BiConsumer<ImmutableSetMultimap.Builder<K, V>, E> accumulator = (map, element) -> {
K key = requireNonNull(keyFunction.apply(element), KEY_FUNCTION_CANT_RETURN_NULL_MESSAGE);
Stream<V> valueStream = requireNonNull(valueFunction.apply(element), VALUE_FUNCTION_CANT_RETURN_NULL_MESSAGE);
valueStream.forEach(value -> map.put(key, value));
};
BinaryOperator<ImmutableSetMultimap.Builder<K, V>> merger = (m1, m2) -> {
for (Map.Entry<K, V> entry : m2.build().entries()) {
m1.put(entry.getKey(), entry.getValue());
}
return m1;
};
return Collector.of(
ImmutableSetMultimap::builder,
accumulator,
merger,
ImmutableSetMultimap.Builder::build);
}
|
@Test
public void unorderedFlattenIndex_empty_stream_returns_empty_map() {
assertThat(Stream.<MyObj2>empty()
.collect(unorderedFlattenIndex(MyObj2::getId, MyObj2::getTexts))
.size()).isZero();
}
|
Map<String, String> describeNetworkInterfaces(List<String> privateAddresses, AwsCredentials credentials) {
if (privateAddresses.isEmpty()) {
return Collections.emptyMap();
}
try {
Map<String, String> attributes = createAttributesDescribeNetworkInterfaces(privateAddresses);
Map<String, String> headers = createHeaders(attributes, credentials);
String response = callAwsService(attributes, headers);
return parseDescribeNetworkInterfaces(response);
} catch (Exception e) {
LOGGER.finest(e);
// Log warning only once.
if (!isNoPublicIpAlreadyLogged) {
LOGGER.warning("Cannot fetch the public IPs of ECS Tasks. You won't be able to use "
+ "Hazelcast Smart Client from outside of this VPC.");
isNoPublicIpAlreadyLogged = true;
}
Map<String, String> map = new HashMap<>();
privateAddresses.forEach(k -> map.put(k, null));
return map;
}
}
|
@Test
public void describeNetworkInterfaces() {
// given
List<String> privateAddresses = asList("10.0.1.207", "10.0.1.82");
String requestUrl = "/?Action=DescribeNetworkInterfaces"
+ "&Filter.1.Name=addresses.private-ip-address"
+ "&Filter.1.Value.1=10.0.1.207"
+ "&Filter.1.Value.2=10.0.1.82"
+ "&Version=2016-11-15";
//language=XML
String response = """
<?xml version="1.0" encoding="UTF-8"?>
<DescribeNetworkInterfacesResponse xmlns="http://ec2.amazonaws.com/doc/2016-11-15/">
<requestId>21bc9f93-2196-4107-87a3-9e5b2b3f29d9</requestId>
<networkInterfaceSet>
<item>
<availabilityZone>eu-central-1a</availabilityZone>
<privateIpAddress>10.0.1.207</privateIpAddress>
<association>
<publicIp>54.93.217.194</publicIp>
</association>
</item>
<item>
<availabilityZone>eu-central-1a</availabilityZone>
<privateIpAddress>10.0.1.82</privateIpAddress>
<association>
<publicIp>35.156.192.128</publicIp>
</association>
</item>
</networkInterfaceSet>
</DescribeNetworkInterfacesResponse>""";
stubFor(get(urlEqualTo(requestUrl))
.withHeader("X-Amz-Date", equalTo("20200403T102518Z"))
.withHeader("Authorization", equalTo(AUTHORIZATION_HEADER))
.withHeader("X-Amz-Security-Token", equalTo(TOKEN))
.willReturn(aResponse().withStatus(HttpURLConnection.HTTP_OK).withBody(response)));
// when
Map<String, String> result = awsEc2Api.describeNetworkInterfaces(privateAddresses, CREDENTIALS);
// then
assertEquals(2, result.size());
assertEquals("54.93.217.194", result.get("10.0.1.207"));
assertEquals("35.156.192.128", result.get("10.0.1.82"));
}
|
public static <T> RetryTransformer<T> of(Retry retry) {
return new RetryTransformer<>(retry);
}
|
@Test
public void retryOnResultFailAfterMaxAttemptsUsingObservable() throws InterruptedException {
RetryConfig config = RetryConfig.<String>custom()
.retryOnResult("retry"::equals)
.waitDuration(Duration.ofMillis(50))
.maxAttempts(3).build();
Retry retry = Retry.of("testName", config);
given(helloWorldService.returnHelloWorld())
.willReturn("retry");
Observable.fromCallable(helloWorldService::returnHelloWorld)
.compose(RetryTransformer.of(retry))
.test()
.await()
.assertValueCount(1)
.assertValue("retry")
.assertComplete()
.assertSubscribed();
then(helloWorldService).should(times(3)).returnHelloWorld();
}
|
public double getNormalizedEditDistance(String source, String target) {
ImmutableList<String> sourceTerms = NamingConventions.splitToLowercaseTerms(source);
ImmutableList<String> targetTerms = NamingConventions.splitToLowercaseTerms(target);
// costMatrix[s][t] is the edit distance between source term s and target term t
double[][] costMatrix =
sourceTerms.stream()
.map(s -> targetTerms.stream().mapToDouble(t -> editDistanceFn.apply(s, t)).toArray())
.toArray(double[][]::new);
// worstCaseMatrix[s][t] is the worst case distance between source term s and target term t
double[][] worstCaseMatrix =
sourceTerms.stream()
.map(s -> s.length())
.map(
s ->
targetTerms.stream()
.map(t -> t.length())
.mapToDouble(t -> maxDistanceFn.apply(s, t))
.toArray())
.toArray(double[][]::new);
double[] sourceTermDeletionCosts =
sourceTerms.stream().mapToDouble(s -> maxDistanceFn.apply(s.length(), 0)).toArray();
double[] targetTermAdditionCosts =
targetTerms.stream().mapToDouble(s -> maxDistanceFn.apply(0, s.length())).toArray();
// this is an array of assignments of source terms to target terms. If assignments[i] contains
// the value j this means that source term i has been assigned to target term j
// There will be one entry in cost for each source term:
// - If there are more source terms than target terms then some will be unassigned - value -1
// - If there are a fewer source terms than target terms then some target terms will not be
// referenced in the array
int[] assignments = new HungarianAlgorithm(costMatrix).execute();
double assignmentCost =
computeCost(assignments, costMatrix, sourceTermDeletionCosts, targetTermAdditionCosts);
double maxCost =
computeCost(assignments, worstCaseMatrix, sourceTermDeletionCosts, targetTermAdditionCosts);
return assignmentCost / maxCost;
}
|
@Test
public void getNormalizedEditDistance_isSymmetric_withExtraTerm() {
TermEditDistance termEditDistance = new TermEditDistance();
String identifier = "fooBarBaz";
String otherIdentifier = "barBaz";
double distanceFwd = termEditDistance.getNormalizedEditDistance(identifier, otherIdentifier);
double distanceBwd = termEditDistance.getNormalizedEditDistance(otherIdentifier, identifier);
assertThat(distanceFwd).isEqualTo(distanceBwd);
}
|
public URI getHttpPublishUri() {
if (httpPublishUri == null) {
final URI defaultHttpUri = getDefaultHttpUri();
LOG.debug("No \"http_publish_uri\" set. Using default <{}>.", defaultHttpUri);
return defaultHttpUri;
} else {
final InetAddress inetAddress = toInetAddress(httpPublishUri.getHost());
if (Tools.isWildcardInetAddress(inetAddress)) {
final URI defaultHttpUri = getDefaultHttpUri(httpPublishUri.getPath());
LOG.warn("\"{}\" is not a valid setting for \"http_publish_uri\". Using default <{}>.", httpPublishUri, defaultHttpUri);
return defaultHttpUri;
} else {
return Tools.normalizeURI(httpPublishUri, httpPublishUri.getScheme(), GRAYLOG_DEFAULT_PORT, httpPublishUri.getPath());
}
}
}
|
@Test
public void testHttpPublishUriIPv6Wildcard() throws RepositoryException, ValidationException {
final Map<String, String> properties = ImmutableMap.of(
"http_bind_address", "[::]:9000",
"http_publish_uri", "http://[::]:9000/");
jadConfig.setRepository(new InMemoryRepository(properties)).addConfigurationBean(configuration).process();
assertThat(configuration.getHttpPublishUri()).isNotEqualTo(URI.create("http://[::]:9000/"));
}
|
@Override
public void delete(final Map<Path, TransferStatus> files, final PasswordCallback prompt, final Callback callback) throws BackgroundException {
final CountDownLatch signal = new CountDownLatch(1);
final AtomicReference<BackgroundException> failure = new AtomicReference<>();
final ScheduledThreadPool scheduler = new ScheduledThreadPool(new LoggingUncaughtExceptionHandler() {
@Override
public void uncaughtException(final Thread t, final Throwable e) {
super.uncaughtException(t, e);
failure.set(new BackgroundException(e));
signal.countDown();
}
}, "deletebatch");
try {
final Map<Path, List<String>> containers = new HashMap<>();
for(Path f : files.keySet()) {
final Path container = containerService.getContainer(f);
if(containers.containsKey(container)) {
containers.get(container).add(containerService.getKey(f));
}
else {
final List<String> keys = new ArrayList<>();
keys.add(containerService.getKey(f));
containers.put(container, keys);
}
callback.delete(f);
}
for(Path container : containers.keySet()) {
final DbxUserFilesRequests requests = new DbxUserFilesRequests(session.getClient(container));
final DeleteBatchLaunch job = requests.deleteBatch(containers.get(container).stream().map(DeleteArg::new).collect(Collectors.toList()));
final ScheduledFuture<?> f = scheduler.repeat(() -> {
try {
// Poll status
final DeleteBatchJobStatus status = requests.deleteBatchCheck(job.getAsyncJobIdValue());
if(status.isComplete()) {
final List<DeleteBatchResultEntry> entries = status.getCompleteValue().getEntries();
for(DeleteBatchResultEntry entry : entries) {
if(entry.isFailure()) {
switch(entry.getFailureValue().tag()) {
case PATH_LOOKUP:
failure.set(new NotfoundException(entry.getFailureValue().toString()));
break;
default:
failure.set(new InteroperabilityException());
}
}
}
signal.countDown();
}
if(status.isFailed()) {
signal.countDown();
}
}
catch(DbxException e) {
failure.set(new DropboxExceptionMappingService().map(e));
signal.countDown();
}
}, new HostPreferences(session.getHost()).getLong("dropbox.delete.poll.interval.ms"), TimeUnit.MILLISECONDS);
while(!Uninterruptibles.awaitUninterruptibly(signal, Duration.ofSeconds(1))) {
try {
if(f.isDone()) {
Uninterruptibles.getUninterruptibly(f);
}
}
catch(ExecutionException e) {
for(Throwable cause : ExceptionUtils.getThrowableList(e)) {
Throwables.throwIfInstanceOf(cause, BackgroundException.class);
}
throw new DefaultExceptionMappingService().map(Throwables.getRootCause(e));
}
}
if(null != failure.get()) {
throw failure.get();
}
}
}
catch(DbxException e) {
throw new DropboxExceptionMappingService().map(e);
}
finally {
scheduler.shutdown();
}
}
|
@Test
public void testDeleteFiles() throws Exception {
final Path file1 = new DropboxTouchFeature(session).touch(
new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus());
final Path file2 = new DropboxTouchFeature(session).touch(
new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus());
new DropboxBatchDeleteFeature(session).delete(Arrays.asList(file1, file2), new DisabledLoginCallback(), new Delete.DisabledCallback());
assertFalse(new DropboxFindFeature(session).find(file1));
assertFalse(new DropboxFindFeature(session).find(file2));
}
|
static ProjectMeasuresQuery newProjectMeasuresQuery(List<Criterion> criteria, @Nullable Set<String> projectUuids) {
ProjectMeasuresQuery query = new ProjectMeasuresQuery();
Optional.ofNullable(projectUuids).ifPresent(query::setProjectUuids);
criteria.forEach(criterion -> processCriterion(criterion, query));
return query;
}
|
@Test
public void fail_to_create_query_on_quality_gate_when_value_is_incorrect() {
assertThatThrownBy(() -> {
newProjectMeasuresQuery(singletonList(Criterion.builder().setKey("alert_status").setOperator(EQ).setValue("unknown").build()), emptySet());
})
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Unknown quality gate status : 'unknown'");
}
|
static void validateCertificate(String clusterName, String clientId, X509Certificate cert, BiConsumer<String, Throwable> reporter, DeployState state) {
try {
var extensions = TBSCertificate.getInstance(cert.getTBSCertificate()).getExtensions();
if (extensions == null) return; // Certificate without any extensions is okay
if (extensions.getExtensionOIDs().length == 0) {
/*
BouncyCastle 1.77 and 1.78 did not accept certificates having an empty sequence of extensions.
Earlier releases violated the ASN.1 specification as the specification forbids empty extension sequence.
See https://github.com/bcgit/bc-java/issues/1479.
The restriction was lifted on 1.78.1 although it's a reasonble to warn users still.
*/
var message = "The certificate's ASN.1 structure contains an empty sequence of extensions, " +
"which is a violation of the ASN.1 specification. " +
"Please update the application package with a new certificate, " +
"e.g by generating a new one using the Vespa CLI `$ vespa auth cert`. ";
state.getDeployLogger()
.log(Level.INFO, errorMessage(clusterName, clientId, message));
}
} catch (CertificateEncodingException e) {
reporter.accept(errorMessage(clusterName, clientId, e.getMessage()), e);
}
}
|
@Test
void accepts_valid_certificate() {
var logger = new DeployLoggerStub();
var state = new DeployState.Builder().deployLogger(logger).build();
var cert = readTestCertificate("valid-cert.pem");
assertDoesNotThrow(() -> CloudClientsValidator.validateCertificate("default", "my-feed-client", cert,
(msg, cause) -> { throw new IllegalArgumentException(msg, cause); },
state));
assertEquals(0, logger.entries.size());
}
|
@Override
public ContainersInfo getContainers(HttpServletRequest req,
HttpServletResponse res, String appId, String appAttemptId) {
// Check that the appId/appAttemptId format is accurate
try {
RouterServerUtil.validateApplicationId(appId);
RouterServerUtil.validateApplicationAttemptId(appAttemptId);
} catch (IllegalArgumentException e) {
RouterAuditLogger.logFailure(getUser().getShortUserName(), GET_CONTAINERS,
UNKNOWN, TARGET_WEB_SERVICE, e.getLocalizedMessage());
routerMetrics.incrGetContainersFailedRetrieved();
throw e;
}
try {
long startTime = clock.getTime();
ContainersInfo containersInfo = new ContainersInfo();
Collection<SubClusterInfo> subClustersActive = federationFacade.getActiveSubClusters();
Class[] argsClasses = new Class[]{
HttpServletRequest.class, HttpServletResponse.class, String.class, String.class};
Object[] args = new Object[]{req, res, appId, appAttemptId};
ClientMethod remoteMethod = new ClientMethod("getContainers", argsClasses, args);
Map<SubClusterInfo, ContainersInfo> containersInfoMap =
invokeConcurrent(subClustersActive, remoteMethod, ContainersInfo.class);
if (containersInfoMap != null && !containersInfoMap.isEmpty()) {
containersInfoMap.values().forEach(containers ->
containersInfo.addAll(containers.getContainers()));
}
if (containersInfo != null) {
long stopTime = clock.getTime();
RouterAuditLogger.logSuccess(getUser().getShortUserName(), GET_CONTAINERS,
TARGET_WEB_SERVICE);
routerMetrics.succeededGetContainersRetrieved(stopTime - startTime);
return containersInfo;
}
} catch (NotFoundException e) {
routerMetrics.incrGetContainersFailedRetrieved();
RouterAuditLogger.logFailure(getUser().getShortUserName(), GET_CONTAINERS,
UNKNOWN, TARGET_WEB_SERVICE, e.getLocalizedMessage());
RouterServerUtil.logAndThrowRunTimeException(e, "getContainers error, appId = %s, " +
" appAttemptId = %s, Probably getActiveSubclusters error.", appId, appAttemptId);
} catch (IOException | YarnException e) {
routerMetrics.incrGetContainersFailedRetrieved();
RouterAuditLogger.logFailure(getUser().getShortUserName(), GET_CONTAINERS,
UNKNOWN, TARGET_WEB_SERVICE, e.getLocalizedMessage());
RouterServerUtil.logAndThrowRunTimeException(e, "getContainers error, appId = %s, " +
" appAttemptId = %s.", appId, appAttemptId);
}
routerMetrics.incrGetContainersFailedRetrieved();
RouterAuditLogger.logFailure(getUser().getShortUserName(), GET_CONTAINERS,
UNKNOWN, TARGET_WEB_SERVICE, "getContainers failed.");
throw RouterServerUtil.logAndReturnRunTimeException(
"getContainers failed, appId: %s, appAttemptId: %s.", appId, appAttemptId);
}
|
@Test
public void testGetContainers()
throws YarnException, IOException, InterruptedException {
ApplicationId appId = ApplicationId.newInstance(Time.now(), 1);
ApplicationSubmissionContextInfo context =
new ApplicationSubmissionContextInfo();
context.setApplicationId(appId.toString());
// Submit the application we want the report later
Response response = interceptor.submitApplication(context, null);
Assert.assertNotNull(response);
Assert.assertNotNull(stateStoreUtil.queryApplicationHomeSC(appId));
ApplicationAttemptId appAttempt = ApplicationAttemptId.newInstance(appId, 1);
ContainersInfo responseGet = interceptor.getContainers(
null, null, appId.toString(), appAttempt.toString());
Assert.assertEquals(4, responseGet.getContainers().size());
}
|
public RowExpression rewriteExpression(RowExpression expression, Predicate<VariableReferenceExpression> variableScope)
{
checkArgument(determinismEvaluator.isDeterministic(expression), "Only deterministic expressions may be considered for rewrite");
return rewriteExpression(expression, variableScope, true);
}
|
@Test
public void testParseEqualityExpression()
{
EqualityInference inference = new EqualityInference.Builder(METADATA)
.addEquality(equals("a1", "b1"))
.addEquality(equals("a1", "c1"))
.addEquality(equals("c1", "a1"))
.build();
RowExpression expression = inference.rewriteExpression(someExpression("a1", "b1"), matchesVariables("c1"));
assertEquals(expression, someExpression("c1", "c1"));
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.