focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
@Override
public Long createPost(PostSaveReqVO createReqVO) {
// 校验正确性
validatePostForCreateOrUpdate(null, createReqVO.getName(), createReqVO.getCode());
// 插入岗位
PostDO post = BeanUtils.toBean(createReqVO, PostDO.class);
postMapper.insert(post);
return post.getId();
}
|
@Test
public void testCreatePost_success() {
// 准备参数
PostSaveReqVO reqVO = randomPojo(PostSaveReqVO.class,
o -> o.setStatus(randomEle(CommonStatusEnum.values()).getStatus()))
.setId(null); // 防止 id 被设置
// 调用
Long postId = postService.createPost(reqVO);
// 断言
assertNotNull(postId);
// 校验记录的属性是否正确
PostDO post = postMapper.selectById(postId);
assertPojoEquals(reqVO, post, "id");
}
|
public void updateConfig(DynamicConfigEvent event) {
if (!isTargetConfig(event)) {
return;
}
if (updateWithDefaultMode(event)) {
afterUpdateConfig();
}
}
|
@Test
public void testUpdateGraceConfig() {
try (MockedStatic<PluginConfigManager> pluginConfigManagerMockedStatic = Mockito.mockStatic(PluginConfigManager.class)) {
pluginConfigManagerMockedStatic.when(() -> PluginConfigManager.getPluginConfig(GraceConfig.class))
.thenReturn(new GraceConfig());
RegistryConfigResolver configResolver = new GraceConfigResolver();
final DynamicConfigEvent event = Mockito.mock(DynamicConfigEvent.class);
Mockito.when(event.getContent()).thenReturn("rule:\n"
+ " enableSpring: true # SpringCloud Elegant On/Off Line Switch\n"
+ " startDelayTime: 20 # Elegant up/down line start-up delay time, in S\n"
+ " enableWarmUp: true # Specifies whether to enable preheating\n"
+ " warmUpTime: 1200 # Warm-up time, in S\n"
+ " enableGraceShutdown: false # Whether to enable elegant offline\n"
+ " shutdownWaitTime: 300 # The maximum wait time for correlation traffic detection before closing"
+ ", in S. enabledGraceShutdown must be enabled for it to take effect\n"
+ " enableOfflineNotify: true # Specifies whether to enable offline notifications\n"
+ " httpServerPort: 26688 # Enable the HTTP server port for active notifications when you go offline\n"
+ " upstreamAddressMaxSize: 5000 # The default size of the cache upstream address\n"
+ " upstreamAddressExpiredTime: 600 # The expiration time of the upstream address of the cache");
Mockito.when(event.getKey()).thenReturn("sermant.agent.grace");
configResolver.updateConfig(event);
final GraceConfig graceConfig = config(configResolver, GraceConfig.class);
Assert.assertTrue(graceConfig.isEnableSpring());
Assert.assertEquals(graceConfig.getStartDelayTime(), TEST_START_DELAY_TIME);
Assert.assertTrue(graceConfig.isEnableWarmUp());
Assert.assertEquals(graceConfig.getWarmUpTime(), TEST_WARM_UP_TIME);
Assert.assertFalse(graceConfig.isEnableGraceShutdown());
Assert.assertEquals(graceConfig.getShutdownWaitTime(), TEST_SHUTDOWN_WAIT_TIME);
Assert.assertTrue(graceConfig.isEnableOfflineNotify());
Assert.assertEquals(graceConfig.getHttpServerPort(), TEST_HTTP_SERVER_PORT);
Assert.assertEquals(graceConfig.getUpstreamAddressMaxSize(), TEST_UPSTREAM_ADDRESS_MAXSIZE);
Assert.assertEquals(graceConfig.getUpstreamAddressExpiredTime(), TEST_UPSTREAM_ADDRESS_EXPIRED_TIME);
Mockito.when(event.getContent()).thenReturn("rule:\n"
+ " enableSpring: true # SpringCloud Elegant On/Off Line Switch");
configResolver.updateConfig(event);
final GraceConfig config = config(configResolver, GraceConfig.class);
Assert.assertEquals(config.getShutdownWaitTime(), TEST_DEFAULT_SHUTDOWN_WAIT_TIME);
}
}
|
private static void handleSetPrimaryIndexCacheExpireSec(long backendId, Map<Long, TTablet> backendTablets) {
List<Pair<Long, Integer>> tabletToPrimaryCacheExpireSec = Lists.newArrayList();
TabletInvertedIndex invertedIndex = GlobalStateMgr.getCurrentState().getTabletInvertedIndex();
for (TTablet backendTablet : backendTablets.values()) {
for (TTabletInfo tabletInfo : backendTablet.tablet_infos) {
if (!tabletInfo.isSetPrimary_index_cache_expire_sec()) {
continue;
}
long tabletId = tabletInfo.getTablet_id();
int bePrimaryIndexCacheExpireSec = tabletInfo.primary_index_cache_expire_sec;
TabletMeta tabletMeta = invertedIndex.getTabletMeta(tabletId);
long dbId = tabletMeta != null ? tabletMeta.getDbId() : TabletInvertedIndex.NOT_EXIST_VALUE;
long tableId = tabletMeta != null ? tabletMeta.getTableId() : TabletInvertedIndex.NOT_EXIST_VALUE;
Database db = GlobalStateMgr.getCurrentState().getDb(dbId);
if (db == null) {
continue;
}
OlapTable olapTable = (OlapTable) db.getTable(tableId);
if (olapTable == null) {
continue;
}
Locker locker = new Locker();
locker.lockTablesWithIntensiveDbLock(db, Lists.newArrayList(olapTable.getId()), LockType.READ);
try {
int fePrimaryIndexCacheExpireSec = olapTable.primaryIndexCacheExpireSec();
if (bePrimaryIndexCacheExpireSec != fePrimaryIndexCacheExpireSec) {
tabletToPrimaryCacheExpireSec.add(new Pair<>(tabletId, fePrimaryIndexCacheExpireSec));
}
} finally {
locker.unLockTablesWithIntensiveDbLock(db, Lists.newArrayList(olapTable.getId()), LockType.READ);
}
}
}
if (!tabletToPrimaryCacheExpireSec.isEmpty()) {
LOG.info("find [{}] tablet(s) which need to be set primary index cache expire sec",
tabletToPrimaryCacheExpireSec.size());
AgentBatchTask batchTask = new AgentBatchTask();
TabletMetadataUpdateAgentTask task = TabletMetadataUpdateAgentTaskFactory
.createPrimaryIndexCacheExpireTimeUpdateTask(backendId, tabletToPrimaryCacheExpireSec);
batchTask.addTask(task);
if (!FeConstants.runningUnitTest) {
AgentTaskExecutor.submit(batchTask);
}
}
}
|
@Test
public void testHandleSetPrimaryIndexCacheExpireSec() {
Database db = GlobalStateMgr.getCurrentState().getDb("test");
long dbId = db.getId();
OlapTable olapTable = (OlapTable) db.getTable("primary_index_cache_expire_sec_test");
long backendId = 10001L;
List<Long> tabletIds = GlobalStateMgr.getCurrentState().getTabletInvertedIndex().getTabletIdsByBackendId(10001);
Assert.assertFalse(tabletIds.isEmpty());
Map<Long, TTablet> backendTablets = new HashMap<Long, TTablet>();
List<TTabletInfo> tabletInfos = Lists.newArrayList();
TTablet tablet = new TTablet(tabletInfos);
for (Long tabletId : tabletIds) {
TTabletInfo tabletInfo = new TTabletInfo();
tabletInfo.setTablet_id(tabletId);
tabletInfo.setSchema_hash(60000);
tabletInfo.setPrimary_index_cache_expire_sec(7200);
tablet.tablet_infos.add(tabletInfo);
}
backendTablets.put(backendId, tablet);
ReportHandler handler = new ReportHandler();
handler.testHandleSetPrimaryIndexCacheExpireSec(backendId, backendTablets);
}
|
protected URI getOrigin(final Path container, final Distribution.Method method) throws BackgroundException {
return URI.create(String.format("http://%s.%s", container.getName(), bookmark.getProtocol().getDefaultHostname()));
}
|
@Test
public void testGetOrigin() throws Exception {
final S3Session session = new S3Session(new Host(new S3Protocol(), new S3Protocol().getDefaultHostname()));
final CloudFrontDistributionConfiguration configuration
= new CloudFrontDistributionConfiguration(session, new S3LocationFeature(session), new DisabledX509TrustManager(), new DefaultX509KeyManager());
assertEquals("bbb.s3.amazonaws.com",
configuration.getOrigin(new Path("/bbb", EnumSet.of(Path.Type.directory, Path.Type.volume)), Distribution.DOWNLOAD).getHost());
}
|
@Override
public @NonNull LiteralCommandNode<S> createNode(
final @NonNull String label,
final @NonNull CommandNode<C> cloudCommand,
final @NonNull Command<S> executor,
final @NonNull BrigadierPermissionChecker<C> permissionChecker
) {
final LiteralArgumentBuilder<S> literalArgumentBuilder = LiteralArgumentBuilder
.<S>literal(label)
.requires(this.requirement(cloudCommand, permissionChecker));
this.updateExecutes(literalArgumentBuilder, cloudCommand, executor);
final LiteralCommandNode<S> constructedRoot = literalArgumentBuilder.build();
for (final CommandNode<C> child : cloudCommand.children()) {
constructedRoot.addChild(this.constructCommandNode(child, permissionChecker, executor).build());
}
return constructedRoot;
}
|
@Test
void testSimple() throws Exception {
// Arrange
final Command<Object> command = this.commandManager.commandBuilder("command")
.literal("literal")
.required("integer", integerParser(0, 10))
.optional("string", greedyStringParser(),
org.incendo.cloud.suggestion.SuggestionProvider.suggesting(Arrays.asList(
Suggestion.suggestion("some"),
Suggestion.suggestion("suggestions")
))
).build();
this.commandManager.command(command);
final com.mojang.brigadier.Command<Object> brigadierCommand = ctx -> 0;
// Act
final LiteralCommandNode<Object> commandNode = this.literalBrigadierNodeFactory.createNode(
"command",
command,
brigadierCommand
);
this.dispatcher.getRoot().addChild(commandNode);
// Assert
assertThat(commandNode).isNotNull();
assertThat(commandNode.getLiteral()).isEqualTo("command");
assertThat(commandNode.isValidInput("command")).isTrue();
assertThat(commandNode.getChildren()).hasSize(1);
assertThat(commandNode.getCommand()).isNull();
assertThat(commandNode.getChild("literal")).isNotNull();
assertThat(commandNode.getChild("literal")).isInstanceOf(LiteralCommandNode.class);
assertThat(commandNode.getChild("literal").getChildren()).hasSize(1);
assertThat(commandNode.getCommand()).isNull();
assertThat(commandNode.getChild("literal").getChild("integer")).isNotNull();
assertThat(commandNode.getChild("literal").getChild("integer")).isInstanceOf(ArgumentCommandNode.class);
final ArgumentCommandNode<Object, Integer> integerArgument = (ArgumentCommandNode<Object, Integer>)
commandNode.getChild("literal").getChild("integer");
assertThat(integerArgument.getName()).isEqualTo("integer");
assertThat(integerArgument.getType()).isInstanceOf(IntegerArgumentType.class);
assertThat(integerArgument.getType()).isEqualTo(IntegerArgumentType.integer(0, 10));
assertThat(integerArgument.getChildren()).hasSize(1);
assertThat(integerArgument.getCommand()).isEqualTo(brigadierCommand); // Following is optional.
assertThat(integerArgument.getChild("string")).isNotNull();
assertThat(integerArgument.getChild("string")).isInstanceOf(ArgumentCommandNode.class);
final ArgumentCommandNode<Object, String> stringArgument = (ArgumentCommandNode<Object, String>)
integerArgument.getChild("string");
assertThat(stringArgument.getName()).isEqualTo("string");
assertThat(stringArgument.getType()).isInstanceOf(StringArgumentType.class);
assertThat(((StringArgumentType) stringArgument.getType()).getType())
.isEqualTo(StringArgumentType.StringType.GREEDY_PHRASE);
assertThat(stringArgument.getChildren()).isEmpty();
assertThat(stringArgument.getCommand()).isEqualTo(brigadierCommand);
assertThat(stringArgument.getCustomSuggestions()).isInstanceOf(CloudDelegatingSuggestionProvider.class);
final String suggestionString = "command literal 9 ";
final SuggestionProvider<Object> suggestionProvider = stringArgument.getCustomSuggestions();
final Suggestions suggestions = suggestionProvider.getSuggestions(
this.dispatcher.parse(suggestionString, new Object()).getContext().build(suggestionString),
new SuggestionsBuilder(suggestionString, suggestionString.length())
).get();
assertThat(suggestions.getList().stream().map(com.mojang.brigadier.suggestion.Suggestion::getText))
.containsExactly("some", "suggestions");
}
|
public static String getFieldName(Field field) {
if (null == field) {
return null;
}
return field.getName();
}
|
@Test
public void getFieldNameTest() {
Field privateField = ReflectUtil.getField(TestSubClass.class, "privateField");
String fieldName = ReflectUtil.getFieldName(privateField);
Assert.assertNotNull(fieldName);
Field subField = ReflectUtil.getField(TestSubClass.class, "subField");
String subfieldName = ReflectUtil.getFieldName(subField);
Assert.assertNotNull(subfieldName);
}
|
public static @CheckForNull
BlueUrlTokenizer parse(@NonNull String url) {
Iterator<String> urlTokens = extractTokens(url);
//
// Yes, the following code is quite ugly, but it's easy enough to understand atm.
// Unless this gets a lot more detailed, please don't get super clever ideas about using
// some fancy-pants abstractions/patterns/3rd-party-libs for parsing the URL that, while
// might make the code look neater structurally, also makes the code logic a lot harder
// to follow (without using a debugger).
//
if (urlTokens.hasNext()) {
if (urlTokens.next().equalsIgnoreCase("blue")) {
BlueUrlTokenizer blueUrlTokenizer = new BlueUrlTokenizer();
if (urlTokens.hasNext()) {
String next = urlTokens.next();
if (next.equalsIgnoreCase("pipelines")) {
// i.e. /blue/pipelines/
blueUrlTokenizer.addPart(UrlPart.DASHBOARD_PIPELINES, next);
} else if (next.equalsIgnoreCase("organizations")) {
// i.e. /blue/organizations/...
if (urlTokens.hasNext()) {
// e.g. /blue/organizations/jenkins/...
blueUrlTokenizer.addPart(UrlPart.ORGANIZATION, urlTokens.next());
if (urlTokens.hasNext()) {
// e.g. /blue/organizations/jenkins/f1%2Ff3%20with%20spaces%2Ff3%20pipeline/...
blueUrlTokenizer.addPart(UrlPart.PIPELINE, urlDecode(urlTokens.next()));
if (urlTokens.hasNext()) {
next = urlTokens.next();
if (next.equalsIgnoreCase("detail")) {
// e.g. /blue/organizations/jenkins/f1%2Ff3%20with%20spaces%2Ff3%20pipeline/detail/...
blueUrlTokenizer.addPart(UrlPart.PIPELINE_RUN_DETAIL, next);
if (urlTokens.hasNext()) {
// e.g. /blue/organizations/jenkins/f1%2Ff3%20with%20spaces%2Ff3%20pipeline/detail/magic-branch-X/...
blueUrlTokenizer.addPart(UrlPart.BRANCH, urlDecode(urlTokens.next()));
if (urlTokens.hasNext()) {
// e.g. /blue/organizations/jenkins/f1%2Ff3%20with%20spaces%2Ff3%20pipeline/detail/magic-branch-X/55/...
blueUrlTokenizer.addPart(UrlPart.PIPELINE_RUN_DETAIL_ID, urlDecode(urlTokens.next()));
if (urlTokens.hasNext()) {
next = urlTokens.next();
if (PIPELINE_RUN_DETAIL_TABS.contains(next.toLowerCase())) {
// e.g. /blue/organizations/jenkins/f1%2Ff3%20with%20spaces%2Ff3%20pipeline/detail/magic-branch-X/55/pipeline
blueUrlTokenizer.addPart(UrlPart.PIPELINE_RUN_DETAIL_TAB, next.toLowerCase());
}
}
}
}
} else if (PIPELINE_TABS.contains(next.toLowerCase())) {
// e.g. /blue/organizations/jenkins/f1%2Ff3%20with%20spaces%2Ff3%20pipeline/activity/
blueUrlTokenizer.addPart(UrlPart.PIPELINE_TAB, next.toLowerCase());
}
}
}
}
}
}
return blueUrlTokenizer;
}
}
return null;
}
|
@Test
public void test_MalformedURLException() {
Assert.assertNull(BlueUrlTokenizer.parse("/a"));
}
|
public int getClusterInfoFailedRetrieved() {
return numGetClusterInfoFailedRetrieved.value();
}
|
@Test
public void testGetClusterInfoRetrievedFailed() {
long totalBadBefore = metrics.getClusterInfoFailedRetrieved();
badSubCluster.getClusterInfoFailed();
Assert.assertEquals(totalBadBefore + 1, metrics.getClusterInfoFailedRetrieved());
}
|
@Override
public Optional<NativeEntity<Collector>> findExisting(Entity entity, Map<String, ValueReference> parameters) {
if (entity instanceof EntityV1) {
return findExisting((EntityV1) entity, parameters);
} else {
throw new IllegalArgumentException("Unsupported entity version: " + entity.getClass());
}
}
|
@Test
@MongoDBFixtures("SidecarCollectorFacadeTest.json")
public void findExisting() {
final Entity entity = EntityV1.builder()
.id(ModelId.of("0"))
.type(ModelTypes.SIDECAR_COLLECTOR_V1)
.data(objectMapper.convertValue(SidecarCollectorEntity.create(
ValueReference.of("filebeat"),
ValueReference.of("exec"),
ValueReference.of("linux"),
ValueReference.of("/usr/lib/graylog-sidecar/filebeat"),
ValueReference.of("-c %s"),
ValueReference.of("test config -c %s"),
ValueReference.of("")), JsonNode.class))
.build();
final NativeEntity<Collector> existingCollector = facade.findExisting(entity, Collections.emptyMap())
.orElseThrow(AssertionError::new);
final Collector collector = collectorService.findByName("filebeat");
assertThat(collector).isNotNull();
final NativeEntityDescriptor expectedDescriptor = NativeEntityDescriptor.create(entity.id(), collector.id(), ModelTypes.SIDECAR_COLLECTOR_V1, collector.name(), false);
assertThat(existingCollector.descriptor()).isEqualTo(expectedDescriptor);
assertThat(existingCollector.entity()).isEqualTo(collector);
}
|
@Override
public GetApplicationReportResponse getApplicationReport(
GetApplicationReportRequest request) throws YarnException, IOException {
ApplicationId applicationId = request.getApplicationId();
try {
GetApplicationReportResponse response =
GetApplicationReportResponse.newInstance(history
.getApplication(applicationId));
return response;
} catch (IOException e) {
LOG.error(e.getMessage(), e);
throw e;
}
}
|
@Test
void testApplicationReport() throws IOException, YarnException {
ApplicationId appId = null;
appId = ApplicationId.newInstance(0, 1);
GetApplicationReportRequest request =
GetApplicationReportRequest.newInstance(appId);
GetApplicationReportResponse response =
clientService.getApplicationReport(request);
ApplicationReport appReport = response.getApplicationReport();
assertNotNull(appReport);
assertEquals(123, appReport.getApplicationResourceUsageReport()
.getMemorySeconds());
assertEquals(345, appReport.getApplicationResourceUsageReport()
.getVcoreSeconds());
assertEquals("application_0_0001", appReport.getApplicationId()
.toString());
assertEquals("test app type",
appReport.getApplicationType().toString());
assertEquals("test queue", appReport.getQueue().toString());
}
|
@Override
public BasicTypeDefine reconvert(Column column) {
BasicTypeDefine.BasicTypeDefineBuilder builder =
BasicTypeDefine.builder()
.name(column.getName())
.nullable(column.isNullable())
.comment(column.getComment())
.defaultValue(column.getDefaultValue());
switch (column.getDataType().getSqlType()) {
case BOOLEAN:
builder.columnType(DB2_BOOLEAN);
builder.dataType(DB2_BOOLEAN);
break;
case TINYINT:
case SMALLINT:
builder.columnType(DB2_SMALLINT);
builder.dataType(DB2_SMALLINT);
break;
case INT:
builder.columnType(DB2_INT);
builder.dataType(DB2_INT);
break;
case BIGINT:
builder.columnType(DB2_BIGINT);
builder.dataType(DB2_BIGINT);
break;
case FLOAT:
builder.columnType(DB2_REAL);
builder.dataType(DB2_REAL);
break;
case DOUBLE:
builder.columnType(DB2_DOUBLE);
builder.dataType(DB2_DOUBLE);
break;
case DECIMAL:
DecimalType decimalType = (DecimalType) column.getDataType();
long precision = decimalType.getPrecision();
int scale = decimalType.getScale();
if (precision <= 0) {
precision = DEFAULT_PRECISION;
scale = DEFAULT_SCALE;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which is precision less than 0, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
precision,
scale);
} else if (precision > MAX_PRECISION) {
scale = (int) Math.max(0, scale - (precision - MAX_PRECISION));
precision = MAX_PRECISION;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which exceeds the maximum precision of {}, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
MAX_PRECISION,
precision,
scale);
}
if (scale < 0) {
scale = 0;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which is scale less than 0, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
precision,
scale);
} else if (scale > MAX_SCALE) {
scale = MAX_SCALE;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which exceeds the maximum scale of {}, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
MAX_SCALE,
precision,
scale);
}
builder.columnType(String.format("%s(%s,%s)", DB2_DECIMAL, precision, scale));
builder.dataType(DB2_DECIMAL);
builder.precision(precision);
builder.scale(scale);
break;
case BYTES:
if (column.getColumnLength() == null || column.getColumnLength() <= 0) {
builder.columnType(
String.format("%s(%s)", DB2_VARBINARY, MAX_VARBINARY_LENGTH));
builder.dataType(DB2_VARBINARY);
builder.length(column.getColumnLength());
} else if (column.getColumnLength() <= MAX_BINARY_LENGTH) {
builder.columnType(
String.format("%s(%s)", DB2_BINARY, column.getColumnLength()));
builder.dataType(DB2_BINARY);
builder.length(column.getColumnLength());
} else if (column.getColumnLength() <= MAX_VARBINARY_LENGTH) {
builder.columnType(
String.format("%s(%s)", DB2_VARBINARY, column.getColumnLength()));
builder.dataType(DB2_VARBINARY);
builder.length(column.getColumnLength());
} else {
long length = column.getColumnLength();
if (length > MAX_BLOB_LENGTH) {
length = MAX_BLOB_LENGTH;
log.warn(
"The length of blob type {} is out of range, "
+ "it will be converted to {}({})",
column.getName(),
DB2_BLOB,
length);
}
builder.columnType(String.format("%s(%s)", DB2_BLOB, length));
builder.dataType(DB2_BLOB);
builder.length(length);
}
break;
case STRING:
if (column.getColumnLength() == null || column.getColumnLength() <= 0) {
builder.columnType(String.format("%s(%s)", DB2_VARCHAR, MAX_VARCHAR_LENGTH));
builder.dataType(DB2_VARCHAR);
builder.length(column.getColumnLength());
} else if (column.getColumnLength() <= MAX_CHAR_LENGTH) {
builder.columnType(String.format("%s(%s)", DB2_CHAR, column.getColumnLength()));
builder.dataType(DB2_CHAR);
builder.length(column.getColumnLength());
} else if (column.getColumnLength() <= MAX_VARCHAR_LENGTH) {
builder.columnType(
String.format("%s(%s)", DB2_VARCHAR, column.getColumnLength()));
builder.dataType(DB2_VARCHAR);
builder.length(column.getColumnLength());
} else {
long length = column.getColumnLength();
if (length > MAX_CLOB_LENGTH) {
length = MAX_CLOB_LENGTH;
log.warn(
"The length of clob type {} is out of range, "
+ "it will be converted to {}({})",
column.getName(),
DB2_CLOB,
length);
}
builder.columnType(String.format("%s(%s)", DB2_CLOB, length));
builder.dataType(DB2_CLOB);
builder.length(length);
}
break;
case DATE:
builder.columnType(DB2_DATE);
builder.dataType(DB2_DATE);
break;
case TIME:
builder.columnType(DB2_TIME);
builder.dataType(DB2_TIME);
break;
case TIMESTAMP:
if (column.getScale() != null && column.getScale() > 0) {
int timestampScale = column.getScale();
if (column.getScale() > MAX_TIMESTAMP_SCALE) {
timestampScale = MAX_TIMESTAMP_SCALE;
log.warn(
"The timestamp column {} type timestamp({}) is out of range, "
+ "which exceeds the maximum scale of {}, "
+ "it will be converted to timestamp({})",
column.getName(),
column.getScale(),
MAX_TIMESTAMP_SCALE,
timestampScale);
}
builder.columnType(String.format("%s(%s)", DB2_TIMESTAMP, timestampScale));
builder.scale(timestampScale);
} else {
builder.columnType(DB2_TIMESTAMP);
}
builder.dataType(DB2_TIMESTAMP);
break;
default:
throw CommonError.convertToConnectorTypeError(
DatabaseIdentifier.DB_2,
column.getDataType().getSqlType().name(),
column.getName());
}
return builder.build();
}
|
@Test
public void testReconvertBoolean() {
Column column =
PhysicalColumn.builder().name("test").dataType(BasicType.BOOLEAN_TYPE).build();
BasicTypeDefine typeDefine = DB2TypeConverter.INSTANCE.reconvert(column);
Assertions.assertEquals(column.getName(), typeDefine.getName());
Assertions.assertEquals(DB2TypeConverter.DB2_BOOLEAN, typeDefine.getColumnType());
Assertions.assertEquals(DB2TypeConverter.DB2_BOOLEAN, typeDefine.getDataType());
}
|
@RequiresApi(Build.VERSION_CODES.R)
@Override
public boolean onInlineSuggestionsResponse(@NonNull InlineSuggestionsResponse response) {
final List<InlineSuggestion> inlineSuggestions = response.getInlineSuggestions();
if (inlineSuggestions.size() > 0) {
mInlineSuggestionAction.onNewSuggestions(inlineSuggestions);
getInputViewContainer().addStripAction(mInlineSuggestionAction, true);
getInputViewContainer().setActionsStripVisibility(true);
}
return !inlineSuggestions.isEmpty();
}
|
@Test
public void testClosesInlineSuggestionsOnPick() {
simulateOnStartInputFlow();
var inlineView1 = Mockito.mock(InlineContentView.class);
var inlineView2 = Mockito.mock(InlineContentView.class);
mAnySoftKeyboardUnderTest.onInlineSuggestionsResponse(mockResponse(inlineView1, inlineView2));
var rootView =
mAnySoftKeyboardUnderTest
.getInputViewContainer()
.findViewById(R.id.inline_suggestions_strip_root);
Shadows.shadowOf(rootView).getOnClickListener().onClick(rootView);
var lister =
(ScrollViewAsMainChild)
mAnySoftKeyboardUnderTest
.getInputViewContainer()
.findViewById(R.id.inline_suggestions_list);
Assert.assertEquals(
View.GONE, ((View) mAnySoftKeyboardUnderTest.getInputView()).getVisibility());
Assert.assertEquals(2, lister.getItemsCount());
Mockito.verify(inlineView1).setOnClickListener(Mockito.notNull());
Mockito.verify(inlineView2).setOnClickListener(Mockito.notNull());
/*due to inability to test InlineContentView, I have to remove the following checks*/
// clickCaptor.getValue().onClick(inlineView1);
//
// Assert.assertEquals(0, lister.getChildCount());
// Assert.assertEquals(
// View.VISIBLE, ((View)
// mAnySoftKeyboardUnderTest.getInputView()).getVisibility());
// Assert.assertNull(
// mAnySoftKeyboardUnderTest
// .getInputViewContainer()
// .findViewById(R.id.inline_suggestions_scroller));
}
|
static Properties getProperties(final Arguments arguments) throws IOException {
final Properties props = new Properties();
props.put("bootstrap.servers", arguments.bootstrapServer);
props.put("client.id", "KSQLDataGenProducer");
props.put(KsqlConfig.SCHEMA_REGISTRY_URL_PROPERTY, arguments.schemaRegistryUrl);
if (arguments.propertiesFile != null) {
props.load(arguments.propertiesFile);
}
return props;
}
|
@Test
public void shouldPassSchemaRegistryUrl() throws Exception {
final DataGen.Arguments args = new DataGen.Arguments(
false,
"bootstrap",
null,
null,
null,
null,
"topic",
"key",
null,
0,
"srUrl",
null,
1,
-1,
true
);
final Properties props = DataGen.getProperties(args);
assertThat(props.getProperty(KsqlConfig.SCHEMA_REGISTRY_URL_PROPERTY), equalTo("srUrl"));
}
|
public static InputStream getNoticeStream() {
InputStream res = FileUtils.getInputStreamForClasspathFile(NOTICE_RESOURCE);
assert res != null;
return res;
}
|
@Test
public void getNoticeStreamTest() throws IOException {
Set<String> expectedStrings = Sets.newHashSet(
"Florian Schmaus"
, "Paul Schaub"
);
int maxLineLength = 0;
try (InputStream inputStream = Smack.getNoticeStream()) {
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8));
while (reader.ready()) {
String line = reader.readLine();
int lineLength = line.length();
maxLineLength = Math.max(maxLineLength, lineLength);
expectedStrings.removeIf(s -> s.equals(line));
}
}
assertTrue(expectedStrings.isEmpty());
assertTrue(maxLineLength < 60);
}
|
@VisibleForTesting
List<String> getFuseInfo() {
return mFuseInfo;
}
|
@Test
public void UnderFileSystemS3() {
try (FuseUpdateChecker checker = getUpdateCheckerWithUfs("s3://alluxio-test/")) {
Assert.assertTrue(containsTargetInfo(checker.getFuseInfo(), "s3"));
}
}
|
@Override
public BeamSqlTable buildBeamSqlTable(Table table) {
return new BigQueryTable(table, getConversionOptions(table.getProperties()));
}
|
@Test
public void testRuntimeExceptionThrown_whenAnInvalidPropertyIsSpecified() {
Table table = fakeTableWithProperties("hello", "{" + METHOD_PROPERTY + ": \"blahblah\" }");
assertThrows(
RuntimeException.class,
() -> {
provider.buildBeamSqlTable(table);
});
}
|
@Override
public OAuth2CodeDO consumeAuthorizationCode(String code) {
OAuth2CodeDO codeDO = oauth2CodeMapper.selectByCode(code);
if (codeDO == null) {
throw exception(OAUTH2_CODE_NOT_EXISTS);
}
if (DateUtils.isExpired(codeDO.getExpiresTime())) {
throw exception(OAUTH2_CODE_EXPIRE);
}
oauth2CodeMapper.deleteById(codeDO.getId());
return codeDO;
}
|
@Test
public void testConsumeAuthorizationCode_success() {
// 准备参数
String code = "test_code";
// mock 数据
OAuth2CodeDO codeDO = randomPojo(OAuth2CodeDO.class).setCode(code)
.setExpiresTime(LocalDateTime.now().plusDays(1));
oauth2CodeMapper.insert(codeDO);
// 调用
OAuth2CodeDO result = oauth2CodeService.consumeAuthorizationCode(code);
assertPojoEquals(codeDO, result);
assertNull(oauth2CodeMapper.selectByCode(code));
}
|
@Restricted(NoExternalUse.class)
public boolean supportIsDescendant() {
return false;
}
|
@Test
public void testSupportIsDescendant_AbstractBase() {
VirtualFile root = new VirtualFileMinimalImplementation();
assertFalse(root.supportIsDescendant());
}
|
@Override
public Response toResponse(Throwable e) {
if (log.isDebugEnabled()) {
log.debug("Uncaught exception in REST call: ", e);
} else if (log.isInfoEnabled()) {
log.info("Uncaught exception in REST call: {}", e.getMessage());
}
if (e instanceof NotFoundException) {
return buildResponse(Response.Status.NOT_FOUND, e);
} else if (e instanceof InvalidRequestException) {
return buildResponse(Response.Status.BAD_REQUEST, e);
} else if (e instanceof InvalidTypeIdException) {
return buildResponse(Response.Status.NOT_IMPLEMENTED, e);
} else if (e instanceof JsonMappingException) {
return buildResponse(Response.Status.BAD_REQUEST, e);
} else if (e instanceof ClassNotFoundException) {
return buildResponse(Response.Status.NOT_IMPLEMENTED, e);
} else if (e instanceof SerializationException) {
return buildResponse(Response.Status.BAD_REQUEST, e);
} else if (e instanceof RequestConflictException) {
return buildResponse(Response.Status.CONFLICT, e);
} else {
return buildResponse(Response.Status.INTERNAL_SERVER_ERROR, e);
}
}
|
@Test
public void testToResponseSerializationException() {
RestExceptionMapper mapper = new RestExceptionMapper();
Response resp = mapper.toResponse(new SerializationException());
assertEquals(resp.getStatus(), Response.Status.BAD_REQUEST.getStatusCode());
}
|
@Override
public Map<String, Object> batchInsertOrUpdate(List<ConfigAllInfo> configInfoList, String srcUser, String srcIp,
Map<String, Object> configAdvanceInfo, SameConfigPolicy policy) throws NacosException {
int succCount = 0;
int skipCount = 0;
List<Map<String, String>> failData = null;
List<Map<String, String>> skipData = null;
final BiConsumer<Boolean, Throwable> callFinally = (result, t) -> {
if (t != null) {
throw new NacosRuntimeException(0, t);
}
};
for (int i = 0; i < configInfoList.size(); i++) {
ConfigAllInfo configInfo = configInfoList.get(i);
try {
ParamUtils.checkParam(configInfo.getDataId(), configInfo.getGroup(), "datumId",
configInfo.getContent());
} catch (Throwable e) {
DEFAULT_LOG.error("data verification failed", e);
throw e;
}
ConfigInfo configInfo2Save = new ConfigInfo(configInfo.getDataId(), configInfo.getGroup(),
configInfo.getTenant(), configInfo.getAppName(), configInfo.getContent());
configInfo2Save.setEncryptedDataKey(
configInfo.getEncryptedDataKey() == null ? "" : configInfo.getEncryptedDataKey());
String type = configInfo.getType();
if (StringUtils.isBlank(type)) {
// simple judgment of file type based on suffix
if (configInfo.getDataId().contains(SPOT)) {
String extName = configInfo.getDataId().substring(configInfo.getDataId().lastIndexOf(SPOT) + 1);
FileTypeEnum fileTypeEnum = FileTypeEnum.getFileTypeEnumByFileExtensionOrFileType(extName);
type = fileTypeEnum.getFileType();
} else {
type = FileTypeEnum.getFileTypeEnumByFileExtensionOrFileType(null).getFileType();
}
}
if (configAdvanceInfo == null) {
configAdvanceInfo = new HashMap<>(16);
}
configAdvanceInfo.put("type", type);
configAdvanceInfo.put("desc", configInfo.getDesc());
try {
ConfigInfoStateWrapper foundCfg = findConfigInfoState(configInfo2Save.getDataId(),
configInfo2Save.getGroup(), configInfo2Save.getTenant());
if (foundCfg != null) {
throw new Throwable("DuplicateKeyException: config already exists, should be overridden");
}
addConfigInfo(srcIp, srcUser, configInfo2Save, configAdvanceInfo, callFinally);
succCount++;
} catch (Throwable e) {
if (!StringUtils.contains(e.toString(), "DuplicateKeyException")) {
throw new NacosException(NacosException.SERVER_ERROR, e);
}
// uniqueness constraint conflict
if (SameConfigPolicy.ABORT.equals(policy)) {
failData = new ArrayList<>();
skipData = new ArrayList<>();
Map<String, String> faileditem = new HashMap<>(2);
faileditem.put("dataId", configInfo2Save.getDataId());
faileditem.put("group", configInfo2Save.getGroup());
failData.add(faileditem);
for (int j = (i + 1); j < configInfoList.size(); j++) {
ConfigInfo skipConfigInfo = configInfoList.get(j);
Map<String, String> skipitem = new HashMap<>(2);
skipitem.put("dataId", skipConfigInfo.getDataId());
skipitem.put("group", skipConfigInfo.getGroup());
skipData.add(skipitem);
skipCount++;
}
break;
} else if (SameConfigPolicy.SKIP.equals(policy)) {
skipCount++;
if (skipData == null) {
skipData = new ArrayList<>();
}
Map<String, String> skipitem = new HashMap<>(2);
skipitem.put("dataId", configInfo2Save.getDataId());
skipitem.put("group", configInfo2Save.getGroup());
skipData.add(skipitem);
} else if (SameConfigPolicy.OVERWRITE.equals(policy)) {
succCount++;
updateConfigInfo(configInfo2Save, srcIp, srcUser, configAdvanceInfo);
}
}
}
Map<String, Object> result = new HashMap<>(4);
result.put("succCount", succCount);
result.put("skipCount", skipCount);
if (failData != null && !failData.isEmpty()) {
result.put("failData", failData);
}
if (skipData != null && !skipData.isEmpty()) {
result.put("skipData", skipData);
}
return result;
}
|
@Test
void testBatchInsertOrUpdateAbort() throws NacosException {
List<ConfigAllInfo> configInfoList = new ArrayList<>();
//insert direct
configInfoList.add(createMockConfigAllInfo(0));
//exist config and overwrite
configInfoList.add(createMockConfigAllInfo(1));
//insert direct
configInfoList.add(createMockConfigAllInfo(2));
String srcUser = "srcUser1324";
String srcIp = "srcIp1243";
Map<String, Object> configAdvanceInfo = new HashMap<>();
//mock add config 1 success,config 2 fail and abort,config 3 not operated
Mockito.when(databaseOperate.queryOne(anyString(),
eq(new Object[] {configInfoList.get(0).getDataId(), configInfoList.get(0).getGroup(), configInfoList.get(0).getTenant()}),
eq(CONFIG_INFO_STATE_WRAPPER_ROW_MAPPER))).thenReturn(null);
Mockito.when(databaseOperate.queryOne(anyString(),
eq(new Object[] {configInfoList.get(1).getDataId(), configInfoList.get(1).getGroup(), configInfoList.get(1).getTenant()}),
eq(CONFIG_INFO_STATE_WRAPPER_ROW_MAPPER))).thenReturn(new ConfigInfoStateWrapper());
Mockito.when(databaseOperate.queryOne(anyString(),
eq(new Object[] {configInfoList.get(2).getDataId(), configInfoList.get(2).getGroup(), configInfoList.get(1).getTenant()}),
eq(CONFIG_INFO_STATE_WRAPPER_ROW_MAPPER))).thenReturn(null);
Map<String, Object> stringObjectMap = embeddedConfigInfoPersistService.batchInsertOrUpdate(configInfoList, srcUser, srcIp,
configAdvanceInfo, SameConfigPolicy.ABORT);
assertEquals(1, stringObjectMap.get("succCount"));
assertEquals(1, stringObjectMap.get("skipCount"));
// config 2 failed
assertEquals(configInfoList.get(1).getDataId(), ((List<Map<String, String>>) stringObjectMap.get("failData")).get(0).get("dataId"));
//skip config 3
assertEquals(configInfoList.get(2).getDataId(), ((List<Map<String, String>>) stringObjectMap.get("skipData")).get(0).get("dataId"));
}
|
@Override
public String getMessage() {
return message;
}
|
@Test
final void requireMixOfMessageAndNoMessageWorks() {
final Throwable t0 = new Throwable("t0");
final Throwable t1 = new Throwable(t0);
final Throwable t2 = new Throwable("t2", t1);
final ExceptionWrapper e = new ExceptionWrapper(t2);
final String expected = "Throwable(\"t2\") at com.yahoo.jdisc.http.server.jetty.ExceptionWrapperTest(ExceptionWrapperTest.java:43):"
+ " Throwable(\"java.lang.Throwable: t0\") at com.yahoo.jdisc.http.server.jetty.ExceptionWrapperTest(ExceptionWrapperTest.java:42):"
+ " Throwable(\"t0\") at com.yahoo.jdisc.http.server.jetty.ExceptionWrapperTest(ExceptionWrapperTest.java:41)";
assertThat(e.getMessage(), equalTo(expected));
}
|
@Override
public PCollectionsImmutableNavigableSet<E> descendingSet() {
return new PCollectionsImmutableNavigableSet<>(underlying().descendingSet());
}
|
@Test
public void testDelegationOfDescendingSet() {
TreePSet<Integer> testSet = TreePSet.from(Arrays.asList(2, 3, 4));
new PCollectionsTreeSetWrapperDelegationChecker<>()
.defineMockConfigurationForFunctionInvocation(TreePSet::descendingSet, testSet.descendingSet())
.defineWrapperFunctionInvocationAndMockReturnValueTransformation(PCollectionsImmutableNavigableSet::descendingSet, identity())
.expectWrapperToWrapMockFunctionReturnValue()
.doFunctionDelegationCheck();
}
|
@Override
public void collectNoValue(MetricDescriptor descriptor) {
for (MetricsCollector collector : collectors) {
collector.collectNoValue(descriptor);
}
}
|
@Test
public void testCollectNoValue() {
compositeCollector.collectNoValue(metricsDescriptor);
verify(collectorMock1).collectNoValue(metricsDescriptor);
verify(collectorMock2).collectNoValue(metricsDescriptor);
}
|
public static Resource multiplyAndAddTo(
Resource lhs, Resource rhs, double by) {
int maxLength = ResourceUtils.getNumberOfCountableResourceTypes();
for (int i = 0; i < maxLength; i++) {
try {
ResourceInformation rhsValue = rhs.getResourceInformation(i);
ResourceInformation lhsValue = lhs.getResourceInformation(i);
long convertedRhs = (long) (rhsValue.getValue() * by);
lhs.setResourceValue(i, lhsValue.getValue() + convertedRhs);
} catch (ResourceNotFoundException ye) {
LOG.warn("Resource is missing:" + ye.getMessage());
}
}
return lhs;
}
|
@Test
void testMultiplyAndAddTo() throws Exception {
unsetExtraResourceType();
setupExtraResourceType();
assertEquals(createResource(6, 4),
multiplyAndAddTo(createResource(3, 1), createResource(2, 2), 1.5));
assertEquals(createResource(6, 4, 0),
multiplyAndAddTo(createResource(3, 1), createResource(2, 2), 1.5));
assertEquals(createResource(4, 7),
multiplyAndAddTo(createResource(1, 1), createResource(2, 4), 1.5));
assertEquals(createResource(4, 7, 0),
multiplyAndAddTo(createResource(1, 1), createResource(2, 4), 1.5));
assertEquals(createResource(6, 4, 0),
multiplyAndAddTo(createResource(3, 1, 0), createResource(2, 2, 0),
1.5));
assertEquals(createResource(6, 4, 6),
multiplyAndAddTo(createResource(3, 1, 2), createResource(2, 2, 3),
1.5));
}
|
public static boolean isPopRetryTopicV2(String retryTopic) {
return retryTopic.startsWith(MixAll.RETRY_GROUP_TOPIC_PREFIX) && retryTopic.contains(String.valueOf(POP_RETRY_SEPARATOR_V2));
}
|
@Test
public void testIsPopRetryTopicV2() {
String popRetryTopic = KeyBuilder.buildPopRetryTopicV2(topic, group);
assertThat(KeyBuilder.isPopRetryTopicV2(popRetryTopic)).isEqualTo(true);
String popRetryTopicV1 = KeyBuilder.buildPopRetryTopicV1(topic, group);
assertThat(KeyBuilder.isPopRetryTopicV2(popRetryTopicV1)).isEqualTo(false);
}
|
@Override
public void save(final CallContext ctx) {
val webContext = ctx.webContext();
val sessionStore = ctx.sessionStore();
val requestedUrl = getRequestedUrl(webContext, sessionStore);
if (WebContextHelper.isPost(webContext)) {
LOGGER.debug("requestedUrl with data: {}", requestedUrl);
val formPost = HttpActionHelper.buildFormPostContent(webContext);
sessionStore.set(webContext, Pac4jConstants.REQUESTED_URL, new OkAction(formPost));
} else {
LOGGER.debug("requestedUrl: {}", requestedUrl);
sessionStore.set(webContext, Pac4jConstants.REQUESTED_URL, requestedUrl);
}
}
|
@Test
public void testSaveGet() {
val context = MockWebContext.create().setFullRequestURL(PAC4J_URL);
val sessionStore = new MockSessionStore();
handler.save(new CallContext(context, sessionStore));
val location = (String) sessionStore.get(context, Pac4jConstants.REQUESTED_URL).get();
assertEquals(PAC4J_URL, location);
}
|
@Override
public void deleteObject(String accountName, ObjectType objectType, String objectKey) {
if (objectType.equals(ObjectType.CANARY_RESULT_ARCHIVE)) {
sqlCanaryArchiveRepo.deleteById(objectKey);
return;
}
if (objectType.equals(ObjectType.CANARY_CONFIG)) {
sqlCanaryConfigRepo.deleteById(objectKey);
return;
}
if (objectType.equals(ObjectType.METRIC_SET_PAIR_LIST)) {
sqlMetricSetPairsRepo.deleteById(objectKey);
return;
}
if (objectType.equals(ObjectType.METRIC_SET_LIST)) {
sqlMetricSetsRepo.deleteById(objectKey);
return;
}
throw new IllegalArgumentException("Unsupported object type: " + objectType);
}
|
@Test
public void testDeleteObjectWhenMetricSets() {
var testAccountName = UUID.randomUUID().toString();
var testObjectType = ObjectType.METRIC_SET_LIST;
var testObjectKey = UUID.randomUUID().toString();
sqlStorageService.deleteObject(testAccountName, testObjectType, testObjectKey);
verify(sqlMetricSetsRepo, times(1)).deleteById(testObjectKey);
}
|
public static int CCITT_FALSE(@NonNull final byte[] data, final int offset, final int length) {
// Other implementation of the same algorithm:
// int crc = 0xFFFF;
//
// for (int i = offset; i < offset + length && i < data.length; ++i) {
// crc = (((crc & 0xFFFF) >> 8) | (crc << 8));
// crc ^= data[i];
// crc ^= (crc & 0xFF) >> 4;
// crc ^= (crc << 8) << 4;
// crc ^= ((crc & 0xFF) << 4) << 1;
// }
return CRC(0x1021, 0xFFFF, data, offset, length, false, false, 0x0000);
}
|
@Test
public void CCITT_FALSE_A() {
final byte[] data = new byte[] { 'A' };
assertEquals(0xB915, CRC16.CCITT_FALSE(data, 0, 1));
}
|
public static GenericSchemaImpl of(SchemaInfo schemaInfo) {
return of(schemaInfo, true);
}
|
@Test
public void testKeyValueSchema() {
// configure the schema info provider
MultiVersionSchemaInfoProvider multiVersionSchemaInfoProvider = mock(MultiVersionSchemaInfoProvider.class);
List<Schema<Foo>> encodeSchemas = Lists.newArrayList(
Schema.JSON(Foo.class),
Schema.AVRO(Foo.class)
);
for (Schema<Foo> keySchema : encodeSchemas) {
for (Schema<Foo> valueSchema : encodeSchemas) {
// configure encode schema
Schema<KeyValue<Foo, Foo>> kvSchema = KeyValueSchemaImpl.of(
keySchema, valueSchema
);
// configure decode schema
Schema<KeyValue<GenericRecord, GenericRecord>> decodeSchema = KeyValueSchemaImpl.of(
Schema.AUTO_CONSUME(), Schema.AUTO_CONSUME()
);
decodeSchema.configureSchemaInfo(
"test-topic", "topic",kvSchema.getSchemaInfo()
);
GenericSchema genericAvroSchema = GenericSchemaImpl.of(Schema.AVRO(Foo.class).getSchemaInfo());
when(multiVersionSchemaInfoProvider.getSchemaByVersion(any(byte[].class)))
.thenReturn(CompletableFuture.completedFuture(
KeyValueSchemaInfo.encodeKeyValueSchemaInfo(
keySchema,
valueSchema,
KeyValueEncodingType.INLINE
)
));
decodeSchema.setSchemaInfoProvider(multiVersionSchemaInfoProvider);
testEncodeAndDecodeKeyValues(kvSchema, decodeSchema);
}
}
}
|
@Override
public Set<String> getOutputResourceFields( XMLOutputMeta meta ) {
Set<String> fields = new HashSet<>();
XMLField[] outputFields = meta.getOutputFields();
for ( int i = 0; i < outputFields.length; i++ ) {
XMLField outputField = outputFields[ i ];
fields.add( outputField.getFieldName() );
}
return fields;
}
|
@Test
public void testGetOutputResourceFields() throws Exception {
XMLField[] outputFields = new XMLField[2];
XMLField field1 = mock( XMLField.class );
XMLField field2 = mock( XMLField.class );
outputFields[0] = field1;
outputFields[1] = field2;
when( field1.getFieldName() ).thenReturn( "field1" );
when( field2.getFieldName() ).thenReturn( "field2" );
when( meta.getOutputFields() ).thenReturn( outputFields );
Set<String> outputResourceFields = analyzer.getOutputResourceFields( meta );
assertEquals( outputFields.length, outputResourceFields.size() );
for ( XMLField outputField : outputFields ) {
assertTrue( outputResourceFields.contains( outputField.getFieldName() ) );
}
}
|
public static String hmacSHA256(String data, String key) {
return hmacSHA256(data.getBytes(), key.getBytes());
}
|
@Test
public void testHmacSHA256() throws Exception {
String biezhiHmacSHA256 = "65e377c552b81d0978343e5fe7cf92bdc867d19a73d8479f0437db93b0f0b2af";
Assert.assertEquals(
biezhiHmacSHA256,
EncryptKit.hmacSHA256("biezhi", biezhiHmackey)
);
Assert.assertEquals(
biezhiHmacSHA256,
EncryptKit.hmacSHA256("biezhi".getBytes(), biezhiHmackey.getBytes())
);
TestCase.assertTrue(
Arrays.equals(
ConvertKit.hexString2Bytes(biezhiHmacSHA256),
EncryptKit.hmacSHA256ToByte("biezhi".getBytes(), biezhiHmackey.getBytes())
)
);
}
|
public static Type fromHiveTypeToMapType(String typeStr) {
String[] kv = getKeyValueStr(typeStr);
return new MapType(fromHiveType(kv[0]), fromHiveType(kv[1]));
}
|
@Test
public void testMapString() {
ScalarType keyType = ScalarType.createType(PrimitiveType.TINYINT);
ScalarType valueType = ScalarType.createType(PrimitiveType.SMALLINT);
MapType mapType = new MapType(keyType, valueType);
String typeStr = "map<tinyint,smallint>";
Type resType = fromHiveTypeToMapType(typeStr);
Assert.assertEquals(mapType, resType);
keyType = ScalarType.createType(PrimitiveType.INT);
valueType = ScalarType.createType(PrimitiveType.INT);
mapType = new MapType(keyType, valueType);
typeStr = "Map<INT,INTEGER>";
resType = fromHiveTypeToMapType(typeStr);
Assert.assertEquals(mapType, resType);
keyType = ScalarType.createType(PrimitiveType.FLOAT);
valueType = ScalarType.createType(PrimitiveType.DOUBLE);
mapType = new MapType(keyType, valueType);
typeStr = "map<float,double>";
resType = fromHiveTypeToMapType(typeStr);
Assert.assertEquals(mapType, resType);
keyType = ScalarType.createUnifiedDecimalType(10, 7);
valueType = ScalarType.createType(PrimitiveType.DATETIME);
mapType = new MapType(keyType, valueType);
typeStr = "map<decimal(10,7),timestamp>";
resType = fromHiveTypeToMapType(typeStr);
Assert.assertEquals(mapType, resType);
keyType = ScalarType.createType(PrimitiveType.DATE);
valueType = ScalarType.createDefaultCatalogString();
mapType = new MapType(keyType, valueType);
typeStr = "map<date,string>";
resType = fromHiveTypeToMapType(typeStr);
Assert.assertEquals(mapType, resType);
keyType = ScalarType.createVarcharType(10);
valueType = ScalarType.createCharType(5);
mapType = new MapType(keyType, valueType);
typeStr = "map<varchar(10),char(5)>";
resType = fromHiveTypeToMapType(typeStr);
Assert.assertEquals(mapType, resType);
keyType = ScalarType.createType(PrimitiveType.BOOLEAN);
valueType = ScalarType.createVarcharType(10);
mapType = new MapType(keyType, valueType);
typeStr = "map<boolean,varchar(10)>";
resType = fromHiveTypeToMapType(typeStr);
Assert.assertEquals(mapType, resType);
keyType = ScalarType.createCharType(10);
ScalarType itemType = ScalarType.createType(PrimitiveType.INT);
ArrayType vType = new ArrayType(itemType);
mapType = new MapType(keyType, vType);
typeStr = "map<char(10),array<int>>";
resType = fromHiveTypeToMapType(typeStr);
Assert.assertEquals(mapType, resType);
keyType = ScalarType.createCharType(10);
ScalarType inKeyType = ScalarType.createType(PrimitiveType.INT);
itemType = ScalarType.createType(PrimitiveType.DATETIME);
ArrayType inValueType = new ArrayType(itemType);
MapType mValueType = new MapType(inKeyType, inValueType);
mapType = new MapType(keyType, mValueType);
typeStr = "map<char(10),map<int,array<timestamp>>>";
resType = fromHiveTypeToMapType(typeStr);
Assert.assertEquals(mapType, resType);
}
|
Optional<ImageMetadataTemplate> retrieveMetadata(ImageReference imageReference)
throws IOException, CacheCorruptedException {
Path imageDirectory = cacheStorageFiles.getImageDirectory(imageReference);
Path metadataPath = imageDirectory.resolve("manifests_configs.json");
if (!Files.exists(metadataPath)) {
return Optional.empty();
}
ImageMetadataTemplate metadata;
try (LockFile ignored = LockFile.lock(imageDirectory.resolve("lock"))) {
metadata = JsonTemplateMapper.readJsonFromFile(metadataPath, ImageMetadataTemplate.class);
}
verifyImageMetadata(metadata, imageDirectory);
return Optional.of(metadata);
}
|
@Test
public void testRetrieveMetadata_ociImageIndex()
throws IOException, URISyntaxException, CacheCorruptedException {
setupCachedMetadataOciImageIndex(cacheDirectory);
ImageMetadataTemplate metadata =
cacheStorageReader.retrieveMetadata(ImageReference.of("test", "image", "tag")).get();
MatcherAssert.assertThat(
metadata.getManifestList(), CoreMatchers.instanceOf(OciIndexTemplate.class));
List<? extends ContentDescriptorTemplate> manifestDescriptors =
((OciIndexTemplate) metadata.getManifestList()).getManifests();
Assert.assertEquals(1, manifestDescriptors.size());
Assert.assertEquals(
"8c662931926fa990b41da3c9f42663a537ccd498130030f9149173a0493832ad",
manifestDescriptors.get(0).getDigest().getHash());
Assert.assertEquals(1, metadata.getManifestsAndConfigs().size());
ManifestAndConfigTemplate manifestAndConfig = metadata.getManifestsAndConfigs().get(0);
OciManifestTemplate manifestTemplate = (OciManifestTemplate) manifestAndConfig.getManifest();
Assert.assertEquals(2, manifestTemplate.getSchemaVersion());
Assert.assertEquals(
"8c662931926fa990b41da3c9f42663a537ccd498130030f9149173a0493832ad",
manifestTemplate.getContainerConfiguration().getDigest().getHash());
Assert.assertEquals("wasm", manifestAndConfig.getConfig().getArchitecture());
}
|
public void parse(String jsonString) {
parse(new JSONObject(jsonString));
}
|
@Test
public void parseTest() {
String jsonstr = "{\n" +
" \"location\": \"https://hutool.cn\",\n" +
" \"message\": \"这是一条测试消息\",\n" +
" \"requestId\": \"123456789\",\n" +
" \"traceId\": \"987654321\"\n" +
"}";
final TestBean testBean = JSONUtil.toBean(jsonstr, TestBean.class);
assertEquals("https://hutool.cn", testBean.getLocation());
assertEquals("这是一条测试消息", testBean.getMessage());
assertEquals("123456789", testBean.getRequestId());
assertEquals("987654321", testBean.getTraceId());
}
|
public static boolean validateCSConfiguration(
final Configuration oldConfParam, final Configuration newConf,
final RMContext rmContext) throws IOException {
// ensure that the oldConf is deep copied
Configuration oldConf = new Configuration(oldConfParam);
QueueMetrics.setConfigurationValidation(oldConf, true);
QueueMetrics.setConfigurationValidation(newConf, true);
CapacityScheduler liveScheduler = (CapacityScheduler) rmContext.getScheduler();
CapacityScheduler newCs = new CapacityScheduler();
try {
//TODO: extract all the validation steps and replace reinitialize with
//the specific validation steps
newCs.setConf(oldConf);
newCs.setRMContext(rmContext);
newCs.init(oldConf);
newCs.addNodes(liveScheduler.getAllNodes());
newCs.reinitialize(newConf, rmContext, true);
return true;
} finally {
newCs.stop();
}
}
|
@Test
public void testValidateCSConfigDominantRCAbsoluteModeParentMaxGPUExceeded() throws Exception {
setUpMockRM(true);
RMContext rmContext = mockRM.getRMContext();
CapacitySchedulerConfiguration oldConfiguration = cs.getConfiguration();
CapacitySchedulerConfiguration newConfiguration =
new CapacitySchedulerConfiguration(cs.getConfiguration());
newConfiguration.setMaximumResourceRequirement("",
LEAF_A_FULL_PATH, GPU_EXCEEDED_MAXRES_GPU);
try {
CapacitySchedulerConfigValidator
.validateCSConfiguration(oldConfiguration, newConfiguration, rmContext);
fail("Parent maximum capacity exceeded");
} catch (IOException e) {
Assert.assertTrue(e.getCause().getMessage()
.startsWith("Max resource configuration"));
} finally {
mockRM.stop();
}
}
|
public static boolean equivalent(
Expression left, Expression right, Types.StructType struct, boolean caseSensitive) {
return Binder.bind(struct, Expressions.rewriteNot(left), caseSensitive)
.isEquivalentTo(Binder.bind(struct, Expressions.rewriteNot(right), caseSensitive));
}
|
@Test
public void testInequalityEquivalence() {
String[] cols = new String[] {"id", "val", "ts", "date", "time"};
for (String col : cols) {
assertThat(
ExpressionUtil.equivalent(
Expressions.lessThan(col, 34L),
Expressions.lessThanOrEqual(col, 33L),
STRUCT,
true))
.as("Should detect < to <= equivalence: " + col)
.isTrue();
assertThat(
ExpressionUtil.equivalent(
Expressions.lessThanOrEqual(col, 34L),
Expressions.lessThan(col, 35L),
STRUCT,
true))
.as("Should detect <= to < equivalence: " + col)
.isTrue();
assertThat(
ExpressionUtil.equivalent(
Expressions.greaterThan(col, 34L),
Expressions.greaterThanOrEqual(col, 35L),
STRUCT,
true))
.as("Should detect > to >= equivalence: " + col)
.isTrue();
assertThat(
ExpressionUtil.equivalent(
Expressions.greaterThanOrEqual(col, 34L),
Expressions.greaterThan(col, 33L),
STRUCT,
true))
.as("Should detect >= to > equivalence: " + col)
.isTrue();
}
assertThat(
ExpressionUtil.equivalent(
Expressions.lessThan("val", 34L),
Expressions.lessThanOrEqual("val2", 33L),
STRUCT,
true))
.as("Should not detect equivalence for different columns")
.isFalse();
assertThat(
ExpressionUtil.equivalent(
Expressions.lessThan("val", 34L),
Expressions.lessThanOrEqual("id", 33L),
STRUCT,
true))
.as("Should not detect equivalence for different types")
.isFalse();
}
|
@Override
public boolean isAutoTrackEventTypeIgnored(SensorsDataAPI.AutoTrackEventType eventType) {
return true;
}
|
@Test
public void testIsAutoTrackEventTypeIgnored() {
Assert.assertTrue(mSensorsAPI.isAutoTrackEventTypeIgnored(SensorsAnalyticsAutoTrackEventType.APP_START));
}
|
public boolean isEscapeUnicode() {
return escapeUnicode;
}
|
@Test
public void testEscapeUnicodeOption() {
assertThat(parse("--escape-unicode").isEscapeUnicode()).isTrue();
assertThat(parse("").isEscapeUnicode()).isFalse();
}
|
private MergeSortedPages() {}
|
@Test
public void testEmptyStreams()
throws Exception
{
List<Type> types = ImmutableList.of(INTEGER, BIGINT, DOUBLE);
MaterializedResult actual = mergeSortedPages(
types,
ImmutableList.of(0, 1),
ImmutableList.of(ASC_NULLS_FIRST, ASC_NULLS_FIRST),
ImmutableList.of(
rowPagesBuilder(types)
.pageBreak()
.pageBreak()
.build(),
rowPagesBuilder(types)
.pageBreak()
.build(),
rowPagesBuilder(types)
.pageBreak()
.build(),
rowPagesBuilder(types)
.build()));
MaterializedResult expected = resultBuilder(TEST_SESSION, types)
.build();
assertEquals(actual, expected);
}
|
public boolean intersects(Tags other) {
return this.tags.stream().anyMatch(other::contains);
}
|
@Test
public void testIntersects() {
Tags tags1 = new Tags(Set.of("a", "tag2", "3"));
Tags tags2 = new Tags(Set.of("a", "tag3"));
assertTrue(tags1.intersects(tags2));
assertTrue(tags2.intersects(tags1));
}
|
T getFunction(final List<SqlArgument> arguments) {
// first try to get the candidates without any implicit casting
Optional<T> candidate = findMatchingCandidate(arguments, false);
if (candidate.isPresent()) {
return candidate.get();
} else if (!supportsImplicitCasts) {
throw createNoMatchingFunctionException(arguments);
}
// if none were found (candidate isn't present) try again with implicit casting
candidate = findMatchingCandidate(arguments, true);
if (candidate.isPresent()) {
return candidate.get();
}
throw createNoMatchingFunctionException(arguments);
}
|
@Test
public void shouldChooseLaterVariadicWhenTwoVariadicsMatch() {
// Given:
givenFunctions(
function(OTHER, 1, LONG, INT_VARARGS, STRING, DOUBLE),
function(EXPECTED, 2, LONG, INT, STRING_VARARGS, DOUBLE)
);
// When:
final KsqlScalarFunction fun = udfIndex.getFunction(ImmutableList.of(
SqlArgument.of(SqlTypes.BIGINT),
SqlArgument.of(SqlTypes.INTEGER),
SqlArgument.of(SqlTypes.STRING),
SqlArgument.of(SqlTypes.DOUBLE))
);
// Then:
assertThat(fun.name(), equalTo(EXPECTED));
}
|
@Override
public void pluginJarAdded(BundleOrPluginFileDetails bundleOrPluginFileDetails) {
final GoPluginBundleDescriptor bundleDescriptor = goPluginBundleDescriptorBuilder.build(bundleOrPluginFileDetails);
try {
LOGGER.info("Plugin load starting: {}", bundleOrPluginFileDetails.file());
validateIfExternalPluginRemovingBundledPlugin(bundleDescriptor);
validatePluginCompatibilityWithCurrentOS(bundleDescriptor);
validatePluginCompatibilityWithGoCD(bundleDescriptor);
addPlugin(bundleOrPluginFileDetails, bundleDescriptor);
} finally {
LOGGER.info("Plugin load finished: {}", bundleOrPluginFileDetails.file());
}
}
|
@Test
void shouldNotLoadAPluginWhenTargetedGocdVersionIsGreaterThanCurrentGocdVersion() throws Exception {
File pluginJarFile = new File(pluginWorkDir, PLUGIN_JAR_FILE_NAME);
copyPluginToTheDirectory(pluginWorkDir, PLUGIN_JAR_FILE_NAME);
final GoPluginDescriptor pluginDescriptor1 = getPluginDescriptor("some.old.id.1", "1.0", pluginJarFile.getAbsolutePath(),
new File(PLUGIN_JAR_FILE_NAME), false, "17.5.0", "Linux", "Mac OS X");
final GoPluginDescriptor pluginDescriptor2 = getPluginDescriptor("some.old.id.2", "1.0", pluginJarFile.getAbsolutePath(),
new File(PLUGIN_JAR_FILE_NAME), false, "9999.0.0", "Linux", "Mac OS X");
GoPluginBundleDescriptor bundleDescriptor = new GoPluginBundleDescriptor(pluginDescriptor1, pluginDescriptor2);
when(goPluginBundleDescriptorBuilder.build(new BundleOrPluginFileDetails(pluginJarFile, true, pluginWorkDir))).thenReturn(bundleDescriptor);
listener = new DefaultPluginJarChangeListener(registry, osgiManifestGenerator, pluginLoader, goPluginBundleDescriptorBuilder, systemEnvironment);
listener.pluginJarAdded(new BundleOrPluginFileDetails(pluginJarFile, true, pluginWorkDir));
verify(registry, times(1)).loadPlugin(bundleDescriptor);
verifyNoMoreInteractions(pluginLoader);
assertThat(pluginDescriptor1.getStatus().getMessages().size()).isEqualTo(1);
assertThat(pluginDescriptor1.getStatus().getMessages().get(0)).isEqualTo("Plugins with IDs ([some.old.id.1, some.old.id.2]) are not valid: Incompatible with GoCD version '" + CurrentGoCDVersion.getInstance().goVersion() + "'. Compatible version is: 9999.0.0.");
assertThat(pluginDescriptor2.getStatus().getMessages().size()).isEqualTo(1);
assertThat(pluginDescriptor2.getStatus().getMessages().get(0)).isEqualTo("Plugins with IDs ([some.old.id.1, some.old.id.2]) are not valid: Incompatible with GoCD version '" + CurrentGoCDVersion.getInstance().goVersion() + "'. Compatible version is: 9999.0.0.");
}
|
@Override
public void handleWayTags(int edgeId, EdgeIntAccess edgeIntAccess, ReaderWay readerWay, IntsRef relationFlags) {
List<Map<String, Object>> nodeTags = readerWay.getTag("node_tags", null);
if (nodeTags == null)
return;
for (int i = 0; i < nodeTags.size(); i++) {
Map<String, Object> tags = nodeTags.get(i);
if ("crossing".equals(tags.get("railway")) || "level_crossing".equals(tags.get("railway"))) {
String barrierVal = (String) tags.get("crossing:barrier");
crossingEnc.setEnum(false, edgeId, edgeIntAccess, (Helper.isEmpty(barrierVal) || "no".equals(barrierVal)) ? Crossing.RAILWAY : Crossing.RAILWAY_BARRIER);
return;
}
String crossingSignals = (String) tags.get("crossing:signals");
if ("yes".equals(crossingSignals)) {
crossingEnc.setEnum(false, edgeId, edgeIntAccess, Crossing.TRAFFIC_SIGNALS);
return;
}
String crossingMarkings = (String) tags.get("crossing:markings");
if ("yes".equals(crossingMarkings)) {
crossingEnc.setEnum(false, edgeId, edgeIntAccess, Crossing.MARKED);
return;
}
String crossingValue = (String) tags.get("crossing");
// some crossing values like "no" do not require highway=crossing and sometimes no crossing value exists although highway=crossing
if (Helper.isEmpty(crossingValue) && ("no".equals(crossingSignals) || "no".equals(crossingMarkings)
|| "crossing".equals(tags.get("highway")) || "crossing".equals(tags.get("footway")) || "crossing".equals(tags.get("cycleway")))) {
crossingEnc.setEnum(false, edgeId, edgeIntAccess, Crossing.UNMARKED);
// next node could have more specific Crossing value
continue;
}
Crossing crossing = Crossing.find(crossingValue);
if (crossing != Crossing.MISSING)
crossingEnc.setEnum(false, edgeId, edgeIntAccess, crossing);
}
}
|
@Test
public void testSignals() {
EdgeIntAccess edgeIntAccess = new ArrayEdgeIntAccess(1);
int edgeId = 0;
parser.handleWayTags(edgeId, edgeIntAccess,
createReader(new PMap().putObject("crossing", "traffic_signals").toMap()), null);
assertEquals(Crossing.TRAFFIC_SIGNALS, crossingEV.getEnum(false, edgeId, edgeIntAccess));
parser.handleWayTags(edgeId, edgeIntAccess = new ArrayEdgeIntAccess(1),
createReader(new PMap().putObject("crossing:signals", "yes").toMap()), null);
assertEquals(Crossing.TRAFFIC_SIGNALS, crossingEV.getEnum(false, edgeId, edgeIntAccess));
parser.handleWayTags(edgeId, edgeIntAccess = new ArrayEdgeIntAccess(1),
createReader(new PMap().putObject("crossing:signals", "no").toMap()), null);
assertEquals(Crossing.UNMARKED, crossingEV.getEnum(false, edgeId, edgeIntAccess));
}
|
public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException {
meta = (SelectValuesMeta) smi;
data = (SelectValuesData) sdi;
Object[] rowData = getRow(); // get row from rowset, wait for our turn, indicate busy!
if ( rowData == null ) { // no more input to be expected...
setOutputDone();
return false;
}
Object[] rowCopy = null;
if ( getStepMeta().isDoingErrorHandling() ) {
rowCopy = getInputRowMeta().cloneRow( rowData );
}
if ( log.isRowLevel() ) {
logRowlevel( BaseMessages.getString( PKG, "SelectValues.Log.GotRowFromPreviousStep" )
+ getInputRowMeta().getString( rowData ) );
}
if ( first ) {
first = false;
data.selectRowMeta = getInputRowMeta().clone();
meta.getSelectFields( data.selectRowMeta, getStepname() );
data.deselectRowMeta = data.selectRowMeta.clone();
meta.getDeleteFields( data.deselectRowMeta );
data.metadataRowMeta = data.deselectRowMeta.clone();
meta.getMetadataFields( data.metadataRowMeta, getStepname(), this );
}
try {
Object[] outputData = rowData;
if ( data.select ) {
outputData = selectValues( getInputRowMeta(), outputData );
}
if ( data.deselect ) {
outputData = removeValues( data.selectRowMeta, outputData );
}
if ( data.metadata ) {
outputData = metadataValues( data.deselectRowMeta, outputData );
}
if ( outputData == null ) {
setOutputDone(); // signal end to receiver(s)
return false;
}
// Send the row on its way
//
putRow( data.metadataRowMeta, outputData );
if ( log.isRowLevel() ) {
logRowlevel( BaseMessages.getString( PKG, "SelectValues.Log.WroteRowToNextStep" )
+ data.metadataRowMeta.getString( outputData ) );
}
} catch ( KettleException e ) {
if ( getStepMeta().isDoingErrorHandling() ) {
String field;
if ( e instanceof KettleConversionException ) {
List<ValueMetaInterface> fields = ( (KettleConversionException) e ).getFields();
field = fields.isEmpty() ? null : fields.get( 0 ).getName();
} else {
field = null;
}
putError( getInputRowMeta(), rowCopy, 1, e.getMessage(), field, "SELECT001" );
} else {
throw e;
}
}
if ( checkFeedback( getLinesRead() ) ) {
logBasic( BaseMessages.getString( PKG, "SelectValues.Log.LineNumber" ) + getLinesRead() );
}
return true;
}
|
@Test
public void testPDI16368() throws Exception {
// This tests that the fix for PDI-16388 doesn't get re-broken.
//
SelectValuesHandler step2;
Object[] inputRow2;
RowMeta inputRowMeta;
SelectValuesMeta stepMeta;
SelectValuesData stepData;
ValueMetaInterface vmi;
// First, test current behavior (it's worked this way since 5.x or so)
//
step2 = new SelectValuesHandler( helper.stepMeta, helper.stepDataInterface, 1, helper.transMeta, helper.trans );
step2 = spy( step2 );
inputRow2 = new Object[] { new BigDecimal( "589" ) }; // Starting with a BigDecimal (no places)
doReturn( inputRow2 ).when( step2 ).getRow();
doNothing().when( step2 )
.putError( any( RowMetaInterface.class ), any( Object[].class ), anyLong(), anyString(), anyString(),
anyString() );
inputRowMeta = new RowMeta();
inputRowMeta.addValueMeta( new ValueMetaBigNumber( SELECTED_FIELD ) );
step2.setInputRowMeta( inputRowMeta );
stepMeta = new SelectValuesMeta();
stepMeta.allocate( 1, 0, 1 );
stepMeta.getSelectFields()[0] = new SelectField();
stepMeta.getSelectFields()[0].setName( SELECTED_FIELD );
stepMeta.getMeta()[ 0 ] =
new SelectMetadataChange( stepMeta, SELECTED_FIELD, null, ValueMetaInterface.TYPE_INTEGER, -2, -2,
ValueMetaInterface.STORAGE_TYPE_NORMAL, null, false, null, null, false, null, null, null ); // no specified conversion type so should have default conversion mask.
stepData = new SelectValuesData();
stepData.select = true;
stepData.metadata = true;
stepData.firstselect = true;
stepData.firstmetadata = true;
step2.processRow( stepMeta, stepData );
vmi = step2.rowMeta.getValueMeta( 0 );
assertEquals( ValueMetaBase.DEFAULT_BIG_NUMBER_FORMAT_MASK, vmi.getConversionMask() );
step2 = new SelectValuesHandler( helper.stepMeta, helper.stepDataInterface, 1, helper.transMeta, helper.trans );
step2 = spy( step2 );
doReturn( inputRow2 ).when( step2 ).getRow();
doNothing().when( step2 )
.putError( any( RowMetaInterface.class ), any( Object[].class ), anyLong(), anyString(), anyString(),
anyString() );
inputRowMeta = new RowMeta();
inputRowMeta.addValueMeta( new ValueMetaBigNumber( SELECTED_FIELD ) );
step2.setInputRowMeta( inputRowMeta );
stepMeta = new SelectValuesMeta();
stepMeta.allocate( 1, 0, 1 );
stepMeta.getSelectFields()[0] = new SelectField();
stepMeta.getSelectFields()[0].setName( SELECTED_FIELD );
stepMeta.getMeta()[ 0 ] =
new SelectMetadataChange( stepMeta, SELECTED_FIELD, null, ValueMetaInterface.TYPE_NUMBER, -2, -2,
ValueMetaInterface.STORAGE_TYPE_NORMAL, null, false, null, null, false, null, null, null ); // no specified conversion type so should have default conversion mask for Double.
stepData = new SelectValuesData();
stepData.select = true;
stepData.metadata = true;
stepData.firstselect = true;
stepData.firstmetadata = true;
step2.processRow( stepMeta, stepData );
vmi = step2.rowMeta.getValueMeta( 0 );
assertEquals( ValueMetaBase.DEFAULT_BIG_NUMBER_FORMAT_MASK, vmi.getConversionMask() );
step2 = new SelectValuesHandler( helper.stepMeta, helper.stepDataInterface, 1, helper.transMeta, helper.trans );
step2 = spy( step2 );
inputRow2 = new Object[] { 589L }; // Starting with a Long
doReturn( inputRow2 ).when( step2 ).getRow();
doNothing().when( step2 )
.putError( any( RowMetaInterface.class ), any( Object[].class ), anyLong(), anyString(), anyString(),
anyString() );
inputRowMeta = new RowMeta();
inputRowMeta.addValueMeta( new ValueMetaInteger( SELECTED_FIELD ) );
step2.setInputRowMeta( inputRowMeta );
stepMeta = new SelectValuesMeta();
stepMeta.allocate( 1, 0, 1 );
stepMeta.getSelectFields()[0] = new SelectField();
stepMeta.getSelectFields()[0].setName( SELECTED_FIELD );
// no specified conversion type so should have default conversion mask for BigNumber
stepMeta.getMeta()[ 0 ] =
new SelectMetadataChange( stepMeta, SELECTED_FIELD, null, ValueMetaInterface.TYPE_BIGNUMBER, -2, -2,
ValueMetaInterface.STORAGE_TYPE_NORMAL, null, false, null, null, false, null, null, null );
stepData = new SelectValuesData();
stepData.select = true;
stepData.metadata = true;
stepData.firstselect = true;
stepData.firstmetadata = true;
step2.processRow( stepMeta, stepData );
vmi = step2.rowMeta.getValueMeta( 0 );
assertEquals( ValueMetaBase.DEFAULT_INTEGER_FORMAT_MASK, vmi.getConversionMask() );
// Now, test that setting the variable results in getting the default conversion mask
step2 = new SelectValuesHandler( helper.stepMeta, helper.stepDataInterface, 1, helper.transMeta, helper.trans );
step2.setVariable( Const.KETTLE_COMPATIBILITY_SELECT_VALUES_TYPE_CHANGE_USES_TYPE_DEFAULTS, "Y" );
step2 = spy( step2 );
inputRow2 = new Object[] { new BigDecimal( "589" ) }; // Starting with a BigDecimal (no places)
doReturn( inputRow2 ).when( step2 ).getRow();
doNothing().when( step2 )
.putError( any( RowMetaInterface.class ), any( Object[].class ), anyLong(), anyString(), anyString(),
anyString() );
inputRowMeta = new RowMeta();
inputRowMeta.addValueMeta( new ValueMetaBigNumber( SELECTED_FIELD ) );
step2.setInputRowMeta( inputRowMeta );
stepMeta = new SelectValuesMeta();
stepMeta.allocate( 1, 0, 1 );
stepMeta.getSelectFields()[0] = new SelectField();
stepMeta.getSelectFields()[0].setName( SELECTED_FIELD );
stepMeta.getMeta()[ 0 ] =
new SelectMetadataChange( stepMeta, SELECTED_FIELD, null, ValueMetaInterface.TYPE_INTEGER, -2, -2,
ValueMetaInterface.STORAGE_TYPE_NORMAL, null, false, null, null, false, null, null, null ); // no specified conversion type so should have default conversion mask.
stepData = new SelectValuesData();
stepData.select = true;
stepData.metadata = true;
stepData.firstselect = true;
stepData.firstmetadata = true;
step2.processRow( stepMeta, stepData );
vmi = step2.rowMeta.getValueMeta( 0 );
assertEquals( ValueMetaBase.DEFAULT_INTEGER_FORMAT_MASK, vmi.getConversionMask() );
step2 = new SelectValuesHandler( helper.stepMeta, helper.stepDataInterface, 1, helper.transMeta, helper.trans );
step2.setVariable( Const.KETTLE_COMPATIBILITY_SELECT_VALUES_TYPE_CHANGE_USES_TYPE_DEFAULTS, "Y" );
step2 = spy( step2 );
doReturn( inputRow2 ).when( step2 ).getRow();
doNothing().when( step2 )
.putError( any( RowMetaInterface.class ), any( Object[].class ), anyLong(), anyString(), anyString(),
anyString() );
inputRowMeta = new RowMeta();
inputRowMeta.addValueMeta( new ValueMetaBigNumber( SELECTED_FIELD ) );
step2.setInputRowMeta( inputRowMeta );
stepMeta = new SelectValuesMeta();
stepMeta.allocate( 1, 0, 1 );
stepMeta.getSelectFields()[0] = new SelectField();
stepMeta.getSelectFields()[0].setName( SELECTED_FIELD );
stepMeta.getMeta()[ 0 ] =
new SelectMetadataChange( stepMeta, SELECTED_FIELD, null, ValueMetaInterface.TYPE_NUMBER, -2, -2,
ValueMetaInterface.STORAGE_TYPE_NORMAL, null, false, null, null, false, null, null, null ); // no specified conversion type so should have default conversion mask for Double.
stepData = new SelectValuesData();
stepData.select = true;
stepData.metadata = true;
stepData.firstselect = true;
stepData.firstmetadata = true;
step2.processRow( stepMeta, stepData );
vmi = step2.rowMeta.getValueMeta( 0 );
assertEquals( ValueMetaBase.DEFAULT_NUMBER_FORMAT_MASK, vmi.getConversionMask() );
step2 = new SelectValuesHandler( helper.stepMeta, helper.stepDataInterface, 1, helper.transMeta, helper.trans );
step2.setVariable( Const.KETTLE_COMPATIBILITY_SELECT_VALUES_TYPE_CHANGE_USES_TYPE_DEFAULTS, "Y" );
step2 = spy( step2 );
inputRow2 = new Object[] { 589L }; // Starting with a Long
doReturn( inputRow2 ).when( step2 ).getRow();
doNothing().when( step2 )
.putError( any( RowMetaInterface.class ), any( Object[].class ), anyLong(), anyString(), anyString(),
anyString() );
inputRowMeta = new RowMeta();
inputRowMeta.addValueMeta( new ValueMetaInteger( SELECTED_FIELD ) );
step2.setInputRowMeta( inputRowMeta );
stepMeta = new SelectValuesMeta();
stepMeta.allocate( 1, 0, 1 );
stepMeta.getSelectFields()[0] = new SelectField();
stepMeta.getSelectFields()[0].setName( SELECTED_FIELD );
// no specified conversion type so should have default conversion mask for BigNumber
stepMeta.getMeta()[ 0 ] =
new SelectMetadataChange( stepMeta, SELECTED_FIELD, null, ValueMetaInterface.TYPE_BIGNUMBER, -2, -2,
ValueMetaInterface.STORAGE_TYPE_NORMAL, null, false, null, null, false, null, null, null );
stepData = new SelectValuesData();
stepData.select = true;
stepData.metadata = true;
stepData.firstselect = true;
stepData.firstmetadata = true;
step2.processRow( stepMeta, stepData );
vmi = step2.rowMeta.getValueMeta( 0 );
assertEquals( ValueMetaBase.DEFAULT_BIG_NUMBER_FORMAT_MASK, vmi.getConversionMask() );
}
|
public static <T> CompressedSerializedValue<T> fromBytes(byte[] compressedSerializedData) {
return new CompressedSerializedValue<>(compressedSerializedData);
}
|
@Test
void testFromNullBytes() {
assertThatThrownBy(() -> CompressedSerializedValue.fromBytes(null))
.isInstanceOf(NullPointerException.class);
}
|
@Operation(summary = "queryLineageByWorkFlowName", description = "QUERY_WORKFLOW_LINEAGE_BY_NAME_NOTES")
@GetMapping(value = "/query-by-name")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_WORKFLOW_LINEAGE_ERROR)
public Result<List<WorkFlowLineage>> queryWorkFlowLineageByName(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam(value = "workFlowName", required = false) String workFlowName) {
workFlowName = ParameterUtils.handleEscapes(workFlowName);
List<WorkFlowLineage> workFlowLineages =
workFlowLineageService.queryWorkFlowLineageByName(projectCode, workFlowName);
return Result.success(workFlowLineages);
}
|
@Test
public void testQueryWorkFlowLineageByName() {
long projectCode = 1L;
String searchVal = "test";
Mockito.when(workFlowLineageService.queryWorkFlowLineageByName(projectCode, searchVal))
.thenReturn(Collections.emptyList());
assertDoesNotThrow(() -> workFlowLineageController.queryWorkFlowLineageByName(user, projectCode, searchVal));
}
|
public static boolean hasIllegalNodeAddress(List<RemoteInstance> remoteInstances) {
if (CollectionUtils.isEmpty(remoteInstances)) {
return false;
}
Set<String> remoteAddressSet = remoteInstances.stream().map(remoteInstance ->
remoteInstance.getAddress().getHost()).collect(Collectors.toSet());
return !Sets.intersection(ILLEGAL_NODE_ADDRESS_IN_CLUSTER_MODE, remoteAddressSet).isEmpty();
}
|
@Test
public void hasIllegalNodeAddressWithNull() {
boolean flag = OAPNodeChecker.hasIllegalNodeAddress(null);
Assertions.assertFalse(flag);
}
|
public ThreadSafeBitSet andNot(ThreadSafeBitSet other) {
if(other.log2SegmentSize != log2SegmentSize)
throw new IllegalArgumentException("Segment sizes must be the same");
ThreadSafeBitSetSegments thisSegments = this.segments.get();
ThreadSafeBitSetSegments otherSegments = other.segments.get();
ThreadSafeBitSetSegments newSegments = new ThreadSafeBitSetSegments(thisSegments.numSegments(), numLongsPerSegment);
for(int i=0;i<thisSegments.numSegments();i++) {
AtomicLongArray thisArray = thisSegments.getSegment(i);
AtomicLongArray otherArray = (i < otherSegments.numSegments()) ? otherSegments.getSegment(i) : null;
AtomicLongArray newArray = newSegments.getSegment(i);
for(int j=0;j<thisArray.length();j++) {
long thisLong = thisArray.get(j);
long otherLong = (otherArray == null) ? 0 : otherArray.get(j);
newArray.set(j, thisLong & ~otherLong);
}
}
ThreadSafeBitSet andNot = new ThreadSafeBitSet(log2SegmentSize);
andNot.segments.set(newSegments);
return andNot;
}
|
@Test
public void testAndNot() {
ThreadSafeBitSet tsbSet1 = new ThreadSafeBitSet();
ThreadSafeBitSet tsbSet2 = new ThreadSafeBitSet();
for (int i = 0; i < 3; i++) {
tsbSet1.set(i);
tsbSet2.set(i * 2);
}
// determine andNot
BitSet andNot_bSet = new BitSet();
ThreadSafeBitSet andNot_tsbSet = new ThreadSafeBitSet();
int ordinal = tsbSet1.nextSetBit(0);
while (ordinal != -1) {
if (!tsbSet2.get(ordinal)) {
andNot_bSet.set(ordinal);
andNot_tsbSet.set(ordinal);
}
ordinal = tsbSet1.nextSetBit(ordinal + 1);
}
Assert.assertFalse(tsbSet1.equals(tsbSet2));
Assert.assertNotEquals(tsbSet1, tsbSet2);
Assert.assertNotEquals(tsbSet1.toBitSet(), tsbSet2.toBitSet());
// validate content
ThreadSafeBitSet result = tsbSet1.andNot(tsbSet2);
Assert.assertEquals(andNot_tsbSet.cardinality(), result.cardinality());
Assert.assertTrue(andNot_tsbSet.equals(result));
Assert.assertEquals(andNot_tsbSet, result);
Assert.assertEquals(andNot_bSet, result.toBitSet());
}
|
public long[] decodeInt8Array(final byte[] parameterBytes, final boolean isBinary) {
ShardingSpherePreconditions.checkState(!isBinary, () -> new UnsupportedSQLOperationException("binary mode"));
String parameterValue = new String(parameterBytes, StandardCharsets.UTF_8);
Collection<String> parameterElements = decodeText(parameterValue);
long[] result = new long[parameterElements.size()];
int index = 0;
for (String each : parameterElements) {
result[index++] = Long.parseLong(each);
}
return result;
}
|
@Test
void assertParseInt8ArrayNormalTextMode() {
long[] actual = DECODER.decodeInt8Array(INT_ARRAY_STR.getBytes(), false);
assertThat(actual.length, is(2));
assertThat(actual[0], is(11L));
assertThat(actual[1], is(12L));
}
|
@Override
public int choosePartition(Message<?> msg, TopicMetadata topicMetadata) {
// If the message has a key, it supersedes the round robin routing policy
if (msg.hasKey()) {
return signSafeMod(hash.makeHash(msg.getKey()), topicMetadata.numPartitions());
}
if (isBatchingEnabled) { // if batching is enabled, choose partition on `partitionSwitchMs` boundary.
long currentMs = clock.millis();
return signSafeMod(currentMs / partitionSwitchMs + startPtnIdx, topicMetadata.numPartitions());
} else {
return signSafeMod(PARTITION_INDEX_UPDATER.getAndIncrement(this), topicMetadata.numPartitions());
}
}
|
@Test
public void testChoosePartitionWithoutKey() {
Message<?> msg = mock(Message.class);
when(msg.getKey()).thenReturn(null);
RoundRobinPartitionMessageRouterImpl router = new RoundRobinPartitionMessageRouterImpl(
HashingScheme.JavaStringHash, 0, false, 0);
for (int i = 0; i < 10; i++) {
assertEquals(i % 5, router.choosePartition(msg, new TopicMetadataImpl(5)));
}
}
|
public static String getZooKeeperEnsemble(Configuration flinkConf)
throws IllegalConfigurationException {
String zkQuorum = flinkConf.getValue(HighAvailabilityOptions.HA_ZOOKEEPER_QUORUM);
if (zkQuorum == null || StringUtils.isBlank(zkQuorum)) {
throw new IllegalConfigurationException("No ZooKeeper quorum specified in config.");
}
// Remove all whitespace
zkQuorum = zkQuorum.replaceAll("\\s+", "");
return zkQuorum;
}
|
@Test
void testZooKeeperEnsembleConnectStringConfiguration() throws Exception {
// ZooKeeper does not like whitespace in the quorum connect String.
String actual, expected;
Configuration conf = new Configuration();
{
expected = "localhost:2891";
setQuorum(conf, expected);
actual = ZooKeeperUtils.getZooKeeperEnsemble(conf);
assertThat(actual).isEqualTo(expected);
setQuorum(conf, " localhost:2891 "); // with leading and trailing whitespace
actual = ZooKeeperUtils.getZooKeeperEnsemble(conf);
assertThat(actual).isEqualTo(expected);
setQuorum(conf, "localhost :2891"); // whitespace after port
actual = ZooKeeperUtils.getZooKeeperEnsemble(conf);
assertThat(actual).isEqualTo(expected);
}
{
expected = "localhost:2891,localhost:2891";
setQuorum(conf, "localhost:2891,localhost:2891");
actual = ZooKeeperUtils.getZooKeeperEnsemble(conf);
assertThat(actual).isEqualTo(expected);
setQuorum(conf, "localhost:2891, localhost:2891");
actual = ZooKeeperUtils.getZooKeeperEnsemble(conf);
assertThat(actual).isEqualTo(expected);
setQuorum(conf, "localhost :2891, localhost:2891");
actual = ZooKeeperUtils.getZooKeeperEnsemble(conf);
assertThat(actual).isEqualTo(expected);
setQuorum(conf, " localhost:2891, localhost:2891 ");
actual = ZooKeeperUtils.getZooKeeperEnsemble(conf);
assertThat(actual).isEqualTo(expected);
}
}
|
public static TokenLocation of(final int line, final int charPositionInLine) {
return new TokenLocation(
line,
charPositionInLine,
Integer.MAX_VALUE,
Integer.MAX_VALUE
);
}
|
@Test
public void shouldImplementEqualsProperly() {
new EqualsTester()
.addEqualityGroup(TokenLocation.of(1, 2))
.addEqualityGroup(new TokenLocation(1, 2, 3, 4), new TokenLocation(1, 2, 3, 4))
.testEquals();
}
|
@Override
public void unsubscribe(String serviceName, EventListener listener) throws NacosException {
unsubscribe(serviceName, new ArrayList<>(), listener);
}
|
@Test
public void testUnSubscribe5() throws NacosException {
//given
String serviceName = "service1";
String groupName = "group1";
EventListener listener = event -> {
};
when(changeNotifier.isSubscribed(groupName, serviceName)).thenReturn(false);
//when
client.unsubscribe(serviceName, groupName, NamingSelectorFactory.HEALTHY_SELECTOR, listener);
NamingSelectorWrapper wrapper = new NamingSelectorWrapper(NamingSelectorFactory.HEALTHY_SELECTOR, listener);
//then
verify(changeNotifier, times(1)).deregisterListener(groupName, serviceName, wrapper);
verify(proxy, times(1)).unsubscribe(serviceName, groupName, Constants.NULL);
}
|
public static List<ACL> parseACLs(String aclString) throws
BadAclFormatException {
List<ACL> acl = Lists.newArrayList();
if (aclString == null) {
return acl;
}
List<String> aclComps = Lists.newArrayList(
Splitter.on(',').omitEmptyStrings().trimResults()
.split(aclString));
for (String a : aclComps) {
// from ZooKeeperMain private method
int firstColon = a.indexOf(':');
int lastColon = a.lastIndexOf(':');
if (firstColon == -1 || lastColon == -1 || firstColon == lastColon) {
throw new BadAclFormatException(
"ACL '" + a + "' not of expected form scheme:id:perm");
}
ACL newAcl = new ACL();
newAcl.setId(new Id(a.substring(0, firstColon), a.substring(
firstColon + 1, lastColon)));
newAcl.setPerms(getPermFromString(a.substring(lastColon + 1)));
acl.add(newAcl);
}
return acl;
}
|
@Test
public void testGoodACLs() {
List<ACL> result = ZKUtil.parseACLs(
"sasl:hdfs/host1@MY.DOMAIN:cdrwa, sasl:hdfs/host2@MY.DOMAIN:ca");
ACL acl0 = result.get(0);
assertEquals(Perms.CREATE | Perms.DELETE | Perms.READ |
Perms.WRITE | Perms.ADMIN, acl0.getPerms());
assertEquals("sasl", acl0.getId().getScheme());
assertEquals("hdfs/host1@MY.DOMAIN", acl0.getId().getId());
ACL acl1 = result.get(1);
assertEquals(Perms.CREATE | Perms.ADMIN, acl1.getPerms());
assertEquals("sasl", acl1.getId().getScheme());
assertEquals("hdfs/host2@MY.DOMAIN", acl1.getId().getId());
}
|
@Override
public TimelineEvents getEntityTimelines(String entityType,
SortedSet<String> entityIds, Long limit, Long windowStart,
Long windowEnd, Set<String> eventTypes) throws IOException {
LOG.debug("getEntityTimelines type={} ids={}", entityType, entityIds);
TimelineEvents returnEvents = new TimelineEvents();
List<EntityCacheItem> relatedCacheItems = new ArrayList<>();
if (entityIds == null || entityIds.isEmpty()) {
return returnEvents;
}
for (String entityId : entityIds) {
LOG.debug("getEntityTimeline type={} id={}", entityType, entityId);
List<TimelineStore> stores
= getTimelineStoresForRead(entityId, entityType, relatedCacheItems);
for (TimelineStore store : stores) {
LOG.debug("Try timeline store {}:{} for the request", store.getName(),
store.toString());
SortedSet<String> entityIdSet = new TreeSet<>();
entityIdSet.add(entityId);
TimelineEvents events =
store.getEntityTimelines(entityType, entityIdSet, limit,
windowStart, windowEnd, eventTypes);
if (events != null) {
returnEvents.addEvents(events.getAllEvents());
}
}
}
return returnEvents;
}
|
@Test
void testNullCheckGetEntityTimelines() throws Exception {
try {
store.getEntityTimelines("YARN_APPLICATION", null, null, null, null,
null);
} catch (NullPointerException e) {
fail("NPE when getEntityTimelines called with Null EntityIds");
}
}
|
@VisibleForTesting
static StreamExecutionEnvironment createStreamExecutionEnvironment(FlinkPipelineOptions options) {
return createStreamExecutionEnvironment(
options,
MoreObjects.firstNonNull(options.getFilesToStage(), Collections.emptyList()),
options.getFlinkConfDir());
}
|
@Test
public void shouldAutoSetIdleSourcesFlagWithoutCheckpointing() {
// Checkpointing disabled, shut down sources immediately
FlinkPipelineOptions options = getDefaultPipelineOptions();
FlinkExecutionEnvironments.createStreamExecutionEnvironment(options);
assertThat(options.getShutdownSourcesAfterIdleMs(), is(0L));
}
|
public void validate() throws TelegramApiException {
if (useHttps) {
File file = new File(keyStorePath);
if (!file.exists() || !file.canRead()) {
throw new TelegramApiException("Can't find or access server keystore file.");
}
}
}
|
@Test
public void testWhenHttpsEnabledKeyStoreFileMustBePresent() {
WebhookOptions webhookOptions = new WebhookOptions();
webhookOptions.setUseHttps(true);
webhookOptions.setKeyStorePath(temporaryKeyStoreFile.getAbsolutePath());
try {
webhookOptions.validate();
} catch (TelegramApiException e) {
fail("Exception raised during Https webhook options validation");
}
}
|
static void parseAuthority(final StringReader reader, final Host host, final Consumer<HostParserException> decorator) throws HostParserException {
parseUserInfo(reader, host, decorator);
parseHostname(reader, host, decorator);
parsePath(reader, host, false, decorator);
}
|
@Test
public void testParseAuthorityUserPasswordDomain() throws HostParserException {
final Host host = new Host(new TestProtocol());
final String authority = "user:password@domain.tld";
final HostParser.StringReader reader = new HostParser.StringReader(authority);
HostParser.parseAuthority(reader, host, null);
assertEquals("user", host.getCredentials().getUsername());
assertEquals("password", host.getCredentials().getPassword());
assertEquals("domain.tld", host.getHostname());
}
|
public Frequency floorDivision(long value) {
return new Frequency(Math.floorDiv(this.frequency, value));
}
|
@Test
public void testfloorDivision() {
Frequency frequency = Frequency.ofGHz(1);
long factor = 5;
Frequency expected = Frequency.ofMHz(200);
assertThat(frequency.floorDivision(factor), is(expected));
}
|
public DataTable subTable(int fromRow, int fromColumn) {
return subTable(fromRow, fromColumn, height(), width());
}
|
@Test
void subTable_throws_for_negative_from_row() {
DataTable table = createSimpleTable();
assertThrows(IndexOutOfBoundsException.class, () -> table.subTable(-1, 0, 1, 1));
}
|
@Override
public List<String> mapRow(Row element) {
List<String> res = new ArrayList<>();
Schema s = element.getSchema();
for (int i = 0; i < s.getFieldCount(); i++) {
res.add(convertFieldToString(s.getField(i).getType(), element.getValue(i)));
}
return res;
}
|
@Test
public void testBigNegativeNumbers() {
Schema.Builder schemaBuilder = new Schema.Builder();
schemaBuilder.addField("byte", Schema.FieldType.BYTE);
schemaBuilder.addField("int16", Schema.FieldType.INT16);
schemaBuilder.addField("int32", Schema.FieldType.INT32);
schemaBuilder.addField("int64", Schema.FieldType.INT64);
schemaBuilder.addField("float", Schema.FieldType.FLOAT);
schemaBuilder.addField("double", Schema.FieldType.DOUBLE);
schemaBuilder.addField("decimal", Schema.FieldType.DECIMAL);
Schema schema = schemaBuilder.build();
Row.Builder rowBuilder = Row.withSchema(schema);
rowBuilder.addValue(Byte.MIN_VALUE);
rowBuilder.addValue(Short.MIN_VALUE);
rowBuilder.addValue(Integer.MIN_VALUE);
rowBuilder.addValue(Long.MIN_VALUE);
rowBuilder.addValue(-Float.MAX_VALUE);
rowBuilder.addValue(-Double.MAX_VALUE);
rowBuilder.addValue(new BigDecimal("-10000000000000.1000000000000000000000"));
Row row = rowBuilder.build();
SingleStoreDefaultUserDataMapper mapper = new SingleStoreDefaultUserDataMapper();
List<String> res = mapper.mapRow(row);
assertEquals(7, res.size());
assertEquals("-128", res.get(0));
assertEquals("-32768", res.get(1));
assertEquals("-2147483648", res.get(2));
assertEquals("-9223372036854775808", res.get(3));
assertEquals("-3.4028235E38", res.get(4));
assertEquals("-1.7976931348623157E308", res.get(5));
assertEquals("-10000000000000.1000000000000000000000", res.get(6));
}
|
@JsonProperty
public void setKeyStoreType(String keyStoreType) {
this.keyStoreType = keyStoreType;
}
|
@Test
void windowsKeyStoreValidation() {
HttpsConnectorFactory factory = new HttpsConnectorFactory();
factory.setKeyStoreType(WINDOWS_MY_KEYSTORE_NAME);
assertThat(getViolationProperties(validator.validate(factory)))
.doesNotContain("validKeyStorePassword")
.doesNotContain("validKeyStorePath");
}
|
public static <T> Object create(Class<T> iface, T implementation,
RetryPolicy retryPolicy) {
return RetryProxy.create(iface,
new DefaultFailoverProxyProvider<T>(iface, implementation),
retryPolicy);
}
|
@Test
public void testRetryByRemoteException() {
Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap =
Collections.<Class<? extends Exception>, RetryPolicy>singletonMap(FatalException.class, TRY_ONCE_THEN_FAIL);
UnreliableInterface unreliable = (UnreliableInterface)
RetryProxy.create(UnreliableInterface.class, unreliableImpl,
retryByRemoteException(RETRY_FOREVER, exceptionToPolicyMap));
try {
unreliable.alwaysFailsWithRemoteFatalException();
fail("Should fail");
} catch (RemoteException e) {
// expected
}
}
|
public boolean contains(long item1, long item2) {
lock.readLock().lock();
try {
RoaringBitmap bitSet = map.get(item1);
return bitSet != null && bitSet.contains(item2, item2 + 1);
} finally {
lock.readLock().unlock();
}
}
|
@Test
public void testContains() {
ConcurrentBitmapSortedLongPairSet set = new ConcurrentBitmapSortedLongPairSet();
assertFalse(set.contains(1, 1));
int items = 10;
for (int i = 0; i < items; i++) {
set.add(1, i);
}
for (int i = 0; i < items; i++) {
assertTrue(set.contains(1, i));
}
assertFalse(set.contains(1, 10000));
}
|
public static String pluginInfoKey(final String pluginName) {
return String.join("", pluginName, PLUGIN_INFO);
}
|
@Test
public void testPlugInfoKey() {
String mockPlugin = "MockPlugin";
String mokPluginInfoKey = RedisKeyConstants.pluginInfoKey(mockPlugin);
assertThat(mockPlugin, notNullValue());
assertThat(String.join("", mockPlugin, PLUGIN_INFO), equalTo(mokPluginInfoKey));
}
|
static void waitUntilFinish(@Nullable StreamGobbler gobbler) {
if (gobbler != null) {
try {
gobbler.join();
} catch (InterruptedException ignored) {
// consider as finished, restore the interrupted flag
Thread.currentThread().interrupt();
}
}
}
|
@Test
public void forward_stream_to_log() {
InputStream stream = IOUtils.toInputStream("one\nsecond log\nthird log\n", StandardCharsets.UTF_8);
Logger logger = mock(Logger.class);
Logger startupLogger = mock(Logger.class);
StreamGobbler gobbler = new StreamGobbler(stream, "WEB", appSettings, logger, startupLogger);
verifyNoInteractions(logger);
gobbler.start();
StreamGobbler.waitUntilFinish(gobbler);
verify(logger).info("one");
verify(logger).info("second log");
verify(logger).info("third log");
verifyNoMoreInteractions(logger);
verifyNoInteractions(startupLogger);
}
|
private static boolean canSatisfyConstraints(ApplicationId appId,
PlacementConstraint constraint, SchedulerNode node,
AllocationTagsManager atm,
Optional<DiagnosticsCollector> dcOpt)
throws InvalidAllocationTagsQueryException {
if (constraint == null) {
LOG.debug("Constraint is found empty during constraint validation for"
+ " app:{}", appId);
return true;
}
// If this is a single constraint, transform to SingleConstraint
SingleConstraintTransformer singleTransformer =
new SingleConstraintTransformer(constraint);
constraint = singleTransformer.transform();
AbstractConstraint sConstraintExpr = constraint.getConstraintExpr();
// TODO handle other type of constraints, e.g CompositeConstraint
if (sConstraintExpr instanceof SingleConstraint) {
SingleConstraint single = (SingleConstraint) sConstraintExpr;
return canSatisfySingleConstraint(appId, single, node, atm, dcOpt);
} else if (sConstraintExpr instanceof And) {
And and = (And) sConstraintExpr;
return canSatisfyAndConstraint(appId, and, node, atm, dcOpt);
} else if (sConstraintExpr instanceof Or) {
Or or = (Or) sConstraintExpr;
return canSatisfyOrConstraint(appId, or, node, atm, dcOpt);
} else {
throw new InvalidAllocationTagsQueryException(
"Unsupported type of constraint: "
+ sConstraintExpr.getClass().getSimpleName());
}
}
|
@Test
public void testInterAppConstriantsByAppTag()
throws InvalidAllocationTagsQueryException {
ApplicationId application1 = BuilderUtils.newApplicationId(1000, 123);
ApplicationId application2 = BuilderUtils.newApplicationId(1001, 124);
// app1: test-tag
// app2: N/A
RMContext mockedContext = Mockito.spy(rmContext);
ConcurrentMap<ApplicationId, RMApp> allApps = new ConcurrentHashMap<>();
allApps.put(application1, new MockRMApp(123, 1000,
RMAppState.NEW, "userA", ImmutableSet.of("test-tag")));
allApps.put(application2, new MockRMApp(124, 1001,
RMAppState.NEW, "userA", ImmutableSet.of("")));
when(mockedContext.getRMApps()).thenReturn(allApps);
AllocationTagsManager tm = new AllocationTagsManager(mockedContext);
PlacementConstraintManagerService pcm =
new MemoryPlacementConstraintManager();
mockedContext.setAllocationTagsManager(tm);
mockedContext.setPlacementConstraintManager(pcm);
// Register App1 with anti-affinity constraint map.
RMNode n0r1 = rmNodes.get(0);
RMNode n1r1 = rmNodes.get(1);
RMNode n2r2 = rmNodes.get(2);
RMNode n3r2 = rmNodes.get(3);
/**
* Place container:
* n0: app1/hbase-m(1)
* n1: ""
* n2: app1/hbase-m(1)
* n3: ""
*/
tm.addContainer(n0r1.getNodeID(),
newContainerId(application1), ImmutableSet.of("hbase-m"));
tm.addContainer(n2r2.getNodeID(),
newContainerId(application1), ImmutableSet.of("hbase-m"));
SchedulerNode schedulerNode0 = newSchedulerNode(n0r1.getHostName(),
n0r1.getRackName(), n0r1.getNodeID());
SchedulerNode schedulerNode1 = newSchedulerNode(n1r1.getHostName(),
n1r1.getRackName(), n1r1.getNodeID());
SchedulerNode schedulerNode2 = newSchedulerNode(n2r2.getHostName(),
n2r2.getRackName(), n2r2.getNodeID());
SchedulerNode schedulerNode3 = newSchedulerNode(n3r2.getHostName(),
n3r2.getRackName(), n3r2.getNodeID());
TargetApplicationsNamespace namespace =
new TargetApplicationsNamespace.AppTag("test-tag");
Map<Set<String>, PlacementConstraint> constraintMap = new HashMap<>();
PlacementConstraint constraint2 = PlacementConstraints
.targetNotIn(NODE, allocationTagWithNamespace(namespace.toString(),
"hbase-m"))
.build();
Set<String> srcTags2 = ImmutableSet.of("app2");
constraintMap.put(srcTags2, constraint2);
pcm.registerApplication(application2, constraintMap);
// Anti-affinity with app-tag/test-tag/hbase-m,
// app1 has tag "test-tag" so the constraint is equally to work on app1
// onto n1 and n3 as they don't have "hbase-m" from app1.
Assert.assertFalse(PlacementConstraintsUtil.canSatisfyConstraints(
application2, createSchedulingRequest(srcTags2),
schedulerNode0, pcm, tm));
Assert.assertTrue(PlacementConstraintsUtil.canSatisfyConstraints(
application2, createSchedulingRequest(srcTags2),
schedulerNode1, pcm, tm));
Assert.assertFalse(PlacementConstraintsUtil.canSatisfyConstraints(
application2, createSchedulingRequest(srcTags2),
schedulerNode2, pcm, tm));
Assert.assertTrue(PlacementConstraintsUtil.canSatisfyConstraints(
application2, createSchedulingRequest(srcTags2),
schedulerNode3, pcm, tm));
pcm.unregisterApplication(application1);
pcm.unregisterApplication(application2);
}
|
public static JibContainerBuilder toJibContainerBuilder(
ArtifactProcessor processor,
Jar jarOptions,
CommonCliOptions commonCliOptions,
CommonContainerConfigCliOptions commonContainerConfigCliOptions,
ConsoleLogger logger)
throws IOException, InvalidImageReferenceException {
String imageReference =
commonContainerConfigCliOptions.getFrom().orElseGet(() -> getDefaultBaseImage(processor));
JibContainerBuilder containerBuilder =
ContainerBuilders.create(imageReference, Collections.emptySet(), commonCliOptions, logger);
List<FileEntriesLayer> layers = processor.createLayers();
List<String> customEntrypoint = commonContainerConfigCliOptions.getEntrypoint();
List<String> entrypoint =
customEntrypoint.isEmpty()
? processor.computeEntrypoint(jarOptions.getJvmFlags())
: customEntrypoint;
containerBuilder
.setEntrypoint(entrypoint)
.setFileEntriesLayers(layers)
.setExposedPorts(commonContainerConfigCliOptions.getExposedPorts())
.setVolumes(commonContainerConfigCliOptions.getVolumes())
.setEnvironment(commonContainerConfigCliOptions.getEnvironment())
.setLabels(commonContainerConfigCliOptions.getLabels())
.setProgramArguments(commonContainerConfigCliOptions.getProgramArguments());
commonContainerConfigCliOptions.getUser().ifPresent(containerBuilder::setUser);
commonContainerConfigCliOptions.getFormat().ifPresent(containerBuilder::setFormat);
commonContainerConfigCliOptions.getCreationTime().ifPresent(containerBuilder::setCreationTime);
return containerBuilder;
}
|
@Test
public void testToJibContainerBuilder_optionalParameters()
throws IOException, InvalidImageReferenceException {
when(mockCommonContainerConfigCliOptions.getFrom()).thenReturn(Optional.of("base-image"));
when(mockCommonContainerConfigCliOptions.getExposedPorts())
.thenReturn(ImmutableSet.of(Port.udp(123)));
when(mockCommonContainerConfigCliOptions.getVolumes())
.thenReturn(
ImmutableSet.of(AbsoluteUnixPath.get("/volume1"), AbsoluteUnixPath.get("/volume2")));
when(mockCommonContainerConfigCliOptions.getEnvironment())
.thenReturn(ImmutableMap.of("key1", "value1"));
when(mockCommonContainerConfigCliOptions.getLabels())
.thenReturn(ImmutableMap.of("label", "mylabel"));
when(mockCommonContainerConfigCliOptions.getUser()).thenReturn(Optional.of("customUser"));
when(mockCommonContainerConfigCliOptions.getFormat()).thenReturn(Optional.of(ImageFormat.OCI));
when(mockCommonContainerConfigCliOptions.getProgramArguments())
.thenReturn(ImmutableList.of("arg1"));
when(mockCommonContainerConfigCliOptions.getEntrypoint())
.thenReturn(ImmutableList.of("custom", "entrypoint"));
when(mockCommonContainerConfigCliOptions.getCreationTime())
.thenReturn(Optional.of(Instant.ofEpochSecond(5)));
JibContainerBuilder containerBuilder =
JarFiles.toJibContainerBuilder(
mockStandardExplodedProcessor,
mockJarCommand,
mockCommonCliOptions,
mockCommonContainerConfigCliOptions,
mockLogger);
ContainerBuildPlan buildPlan = containerBuilder.toContainerBuildPlan();
assertThat(buildPlan.getBaseImage()).isEqualTo("base-image");
assertThat(buildPlan.getExposedPorts()).isEqualTo(ImmutableSet.of(Port.udp(123)));
assertThat(buildPlan.getVolumes())
.isEqualTo(
ImmutableSet.of(AbsoluteUnixPath.get("/volume1"), AbsoluteUnixPath.get("/volume2")));
assertThat(buildPlan.getEnvironment()).isEqualTo(ImmutableMap.of("key1", "value1"));
assertThat(buildPlan.getLabels()).isEqualTo(ImmutableMap.of("label", "mylabel"));
assertThat(buildPlan.getUser()).isEqualTo("customUser");
assertThat(buildPlan.getFormat()).isEqualTo(ImageFormat.OCI);
assertThat(buildPlan.getCmd()).isEqualTo(ImmutableList.of("arg1"));
assertThat(buildPlan.getEntrypoint()).isEqualTo(ImmutableList.of("custom", "entrypoint"));
assertThat(buildPlan.getCreationTime()).isEqualTo(Instant.ofEpochSecond(5));
}
|
public List<BlameLine> blame(Path baseDir, String fileName) throws Exception {
BlameOutputProcessor outputProcessor = new BlameOutputProcessor();
try {
this.processWrapperFactory.create(
baseDir,
outputProcessor::process,
gitCommand,
GIT_DIR_FLAG, String.format(GIT_DIR_ARGUMENT, baseDir), GIT_DIR_FORCE_FLAG, baseDir.toString(),
BLAME_COMMAND,
BLAME_LINE_PORCELAIN_FLAG, IGNORE_WHITESPACES, FILENAME_SEPARATOR_FLAG, fileName)
.execute();
} catch (UncommittedLineException e) {
LOG.debug("Unable to blame file '{}' - it has uncommitted changes", fileName);
return emptyList();
}
return outputProcessor.getBlameLines();
}
|
@Test
public void throw_exception_if_command_fails() throws Exception {
Path baseDir = temp.newFolder().toPath();
NativeGitBlameCommand blameCommand = new NativeGitBlameCommand("randomcmdthatwillneverbefound", System2.INSTANCE, processWrapperFactory);
assertThatThrownBy(() -> blameCommand.blame(baseDir, "file")).isInstanceOf(IOException.class);
}
|
@Override
public <T extends GetWorkBudgetSpender> void distributeBudget(
ImmutableCollection<T> budgetOwners, GetWorkBudget getWorkBudget) {
if (budgetOwners.isEmpty()) {
LOG.debug("Cannot distribute budget to no owners.");
return;
}
if (getWorkBudget.equals(GetWorkBudget.noBudget())) {
LOG.debug("Cannot distribute 0 budget.");
return;
}
Map<T, GetWorkBudget> desiredBudgets = computeDesiredBudgets(budgetOwners, getWorkBudget);
for (Entry<T, GetWorkBudget> streamAndDesiredBudget : desiredBudgets.entrySet()) {
GetWorkBudgetSpender getWorkBudgetSpender = streamAndDesiredBudget.getKey();
GetWorkBudget desired = streamAndDesiredBudget.getValue();
GetWorkBudget remaining = getWorkBudgetSpender.remainingBudget();
if (isBelowFiftyPercentOfTarget(remaining, desired)) {
GetWorkBudget adjustment = desired.subtract(remaining);
getWorkBudgetSpender.adjustBudget(adjustment);
}
}
}
|
@Test
public void testDistributeBudget_adjustsStreamBudgetWhenRemainingByteBudgetTooLowNoActiveWork() {
GetWorkBudget streamRemainingBudget =
GetWorkBudget.builder().setItems(10L).setBytes(1L).build();
GetWorkBudget totalGetWorkBudget = GetWorkBudget.builder().setItems(10L).setBytes(10L).build();
GetWorkBudgetSpender getWorkBudgetSpender =
spy(createGetWorkBudgetOwnerWithRemainingBudgetOf(streamRemainingBudget));
createBudgetDistributor(0L)
.distributeBudget(ImmutableList.of(getWorkBudgetSpender), totalGetWorkBudget);
verify(getWorkBudgetSpender, times(1))
.adjustBudget(
eq(totalGetWorkBudget.items() - streamRemainingBudget.items()),
eq(totalGetWorkBudget.bytes() - streamRemainingBudget.bytes()));
}
|
public static Duration parseDuration(String text) {
checkNotNull(text);
final String trimmed = text.trim();
checkArgument(!trimmed.isEmpty(), "argument is an empty- or whitespace-only string");
final int len = trimmed.length();
int pos = 0;
char current;
while (pos < len && (current = trimmed.charAt(pos)) >= '0' && current <= '9') {
pos++;
}
final String number = trimmed.substring(0, pos);
final String unitLabel = trimmed.substring(pos).trim().toLowerCase(Locale.US);
if (number.isEmpty()) {
throw new NumberFormatException("text does not start with a number");
}
final BigInteger value;
try {
value = new BigInteger(number); // this throws a NumberFormatException
} catch (NumberFormatException e) {
throw new IllegalArgumentException(
"The value '" + number + "' cannot be represented as an integer number.", e);
}
final ChronoUnit unit;
if (unitLabel.isEmpty()) {
unit = ChronoUnit.MILLIS;
} else {
unit = LABEL_TO_UNIT_MAP.get(unitLabel);
}
if (unit == null) {
throw new IllegalArgumentException(
"Time interval unit label '"
+ unitLabel
+ "' does not match any of the recognized units: "
+ TimeUnit.getAllUnits());
}
try {
return convertBigIntToDuration(value, unit);
} catch (ArithmeticException e) {
throw new IllegalArgumentException(
"The value '"
+ number
+ "' cannot be represented as java.time.Duration (numeric overflow).",
e);
}
}
|
@Test
void testParseDurationDays() {
assertThat(TimeUtils.parseDuration("987654d").toDays()).isEqualTo(987654);
assertThat(TimeUtils.parseDuration("987654day").toDays()).isEqualTo(987654);
assertThat(TimeUtils.parseDuration("987654days").toDays()).isEqualTo(987654);
assertThat(TimeUtils.parseDuration("987654 d").toDays()).isEqualTo(987654);
}
|
@Override
public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception {
if (args.size() < 2) {
printInfo(err);
return 1;
}
int index = 0;
String input = args.get(index);
String option = "all";
if ("-o".equals(input)) {
option = args.get(1);
index += 2;
}
if (!OPTIONS.contains(option) || (args.size() - index < 1)) {
printInfo(err);
return 1;
}
input = args.get(index++);
if (!REPORT.equals(option)) {
if (args.size() - index < 1) {
printInfo(err);
return 1;
}
}
if (ALL.equals(option)) {
return recoverAll(input, args.get(index), out, err);
} else if (PRIOR.equals(option)) {
return recoverPrior(input, args.get(index), out, err);
} else if (AFTER.equals(option)) {
return recoverAfter(input, args.get(index), out, err);
} else if (REPORT.equals(option)) {
return reportOnly(input, out, err);
} else {
return 1;
}
}
|
@Test
void repairAllCorruptBlock() throws Exception {
String output = run(new DataFileRepairTool(), "-o", "all", corruptBlockFile.getPath(), repairedFile.getPath());
assertTrue(output.contains("Number of blocks: 2 Number of corrupt blocks: 1"), output);
assertTrue(output.contains("Number of records: 5 Number of corrupt records: 0"), output);
checkFileContains(repairedFile, "apple", "banana", "celery", "guava", "hazelnut");
}
|
public static void mark(Buffer buffer) {
buffer.mark();
}
|
@Test
public void testMark() {
ByteBuffer byteBuffer = ByteBuffer.allocate(4);
byteBuffer.putInt(1);
Assertions.assertDoesNotThrow(() -> BufferUtils.mark(byteBuffer));
}
|
public boolean sendEventToJS(String eventName, Bundle data, ReactContext reactContext) {
if (reactContext != null) {
sendEventToJS(eventName, Arguments.fromBundle(data), reactContext);
return true;
}
return false;
}
|
@Test
public void sendEventToJS_noReactContext_returnsFalse() throws Exception {
WritableMap data = mock(WritableMap.class);
final JsIOHelper uut = createUUT();
boolean result = uut.sendEventToJS("my-event", data, null);
assertFalse(result);
verify(mRCTDeviceEventEmitter, never()).emit(anyString(), any(WritableMap.class));
}
|
@Udf
public <T extends Comparable<? super T>> T arrayMax(@UdfParameter(
description = "Array of values from which to find the maximum") final List<T> input) {
if (input == null) {
return null;
}
T candidate = null;
for (T thisVal : input) {
if (thisVal != null) {
if (candidate == null) {
candidate = thisVal;
} else if (thisVal.compareTo(candidate) > 0) {
candidate = thisVal;
}
}
}
return candidate;
}
|
@Test
public void shouldFindDoubleMax() {
final List<Double> input =
Arrays.asList(Double.valueOf(1.1), Double.valueOf(3.1), Double.valueOf(-1.1));
assertThat(udf.arrayMax(input), is(Double.valueOf(3.1)));
}
|
public Mono<Void> createProducerAcl(KafkaCluster cluster, CreateProducerAclDTO request) {
return adminClientService.get(cluster)
.flatMap(ac -> createAclsWithLogging(ac, createProducerBindings(request)))
.then();
}
|
@Test
void createsProducerDependantAclsWhenTopicsAndTxIdSpecifiedByPrefix() {
ArgumentCaptor<Collection<AclBinding>> createdCaptor = ArgumentCaptor.forClass(Collection.class);
when(adminClientMock.createAcls(createdCaptor.capture()))
.thenReturn(Mono.empty());
var principal = UUID.randomUUID().toString();
var host = UUID.randomUUID().toString();
aclsService.createProducerAcl(
CLUSTER,
new CreateProducerAclDTO()
.principal(principal)
.host(host)
.topicsPrefix("topicPref")
.transactionsIdPrefix("txIdPref")
.idempotent(false)
).block();
//Write, Describe, Create permission on topics, Write, Describe on transactionalIds
//IDEMPOTENT_WRITE on cluster if idempotent is enabled (false)
Collection<AclBinding> createdBindings = createdCaptor.getValue();
assertThat(createdBindings)
.hasSize(5)
.contains(new AclBinding(
new ResourcePattern(ResourceType.TOPIC, "topicPref", PatternType.PREFIXED),
new AccessControlEntry(principal, host, AclOperation.WRITE, AclPermissionType.ALLOW)))
.contains(new AclBinding(
new ResourcePattern(ResourceType.TOPIC, "topicPref", PatternType.PREFIXED),
new AccessControlEntry(principal, host, AclOperation.DESCRIBE, AclPermissionType.ALLOW)))
.contains(new AclBinding(
new ResourcePattern(ResourceType.TOPIC, "topicPref", PatternType.PREFIXED),
new AccessControlEntry(principal, host, AclOperation.CREATE, AclPermissionType.ALLOW)))
.contains(new AclBinding(
new ResourcePattern(ResourceType.TRANSACTIONAL_ID, "txIdPref", PatternType.PREFIXED),
new AccessControlEntry(principal, host, AclOperation.WRITE, AclPermissionType.ALLOW)))
.contains(new AclBinding(
new ResourcePattern(ResourceType.TRANSACTIONAL_ID, "txIdPref", PatternType.PREFIXED),
new AccessControlEntry(principal, host, AclOperation.DESCRIBE, AclPermissionType.ALLOW)));
}
|
public List<List<String>> getAllQueries() {
List<List<String>> result = Lists.newLinkedList();
readLock.lock();
try {
for (ProfileElement element : profileMap.values()) {
Map<String, String> infoStrings = element.infoStrings;
List<String> row = Lists.newArrayList();
for (String str : PROFILE_HEADERS) {
row.add(infoStrings.get(str));
}
result.add(0, row);
}
for (ProfileElement element : loadProfileMap.values()) {
Map<String, String> infoStrings = element.infoStrings;
List<String> row = Lists.newArrayList();
for (String str : PROFILE_HEADERS) {
row.add(infoStrings.get(str));
}
result.add(0, row);
}
} finally {
readLock.unlock();
}
return result;
}
|
@Test
public void testGetAllQueries() {
ProfileManager manager = ProfileManager.getInstance();
assertTrue(manager.getAllProfileElements().isEmpty());
RuntimeProfile profile1 = buildRuntimeProfile("123", "Query");
manager.pushProfile(null, profile1);
RuntimeProfile profile2 = buildRuntimeProfile("124", "Load");
manager.pushProfile(null, profile2);
assertEquals(2, manager.getAllQueries().size());
manager.clearProfiles();
}
|
@Override
public ComponentCreationData createProjectAndBindToDevOpsPlatform(DbSession dbSession, CreationMethod creationMethod, Boolean monorepo, @Nullable String projectKey,
@Nullable String projectName) {
String pat = findPersonalAccessTokenOrThrow(dbSession);
String url = requireNonNull(almSettingDto.getUrl(), "DevOps Platform url cannot be null");
String bitbucketRepo = devOpsProjectDescriptor.repositoryIdentifier();
String bitbucketProject = getBitbucketProjectOrThrow();
Repository repository = bitbucketServerRestClient.getRepo(url, pat, bitbucketProject, bitbucketRepo);
String defaultBranchName = getDefaultBranchName(url, pat, bitbucketProject, bitbucketRepo);
ComponentCreationData componentCreationData = projectCreator.createProject(
dbSession,
getProjectKey(projectKey, repository),
getProjectName(projectName, repository),
defaultBranchName,
creationMethod);
ProjectDto projectDto = Optional.ofNullable(componentCreationData.projectDto()).orElseThrow();
createProjectAlmSettingDto(dbSession, repository, projectDto, almSettingDto, monorepo);
return componentCreationData;
}
|
@Test
void createProjectAndBindToDevOpsPlatform_whenNoKeyAndNameSpecified_generatesKeyAndUsersBitbucketRepositoryName() {
mockValidUserSession();
mockValidAlmSettingsDto();
mockValidPatForUser();
mockValidProjectDescriptor();
Repository repository = mockValidBitBucketRepository();
String generatedProjectKey = "generatedProjectKey";
when(projectKeyGenerator.generateUniqueProjectKey(repository.getProject().getKey(), repository.getSlug())).thenReturn(generatedProjectKey);
mockProjectCreation(generatedProjectKey, repository.getName());
underTest.createProjectAndBindToDevOpsPlatform(mock(DbSession.class), CreationMethod.ALM_IMPORT_API, true, null, null);
ArgumentCaptor<ProjectAlmSettingDto> projectAlmSettingCaptor = ArgumentCaptor.forClass(ProjectAlmSettingDto.class);
verify(dbClient.projectAlmSettingDao()).insertOrUpdate(any(), projectAlmSettingCaptor.capture(), eq(ALM_SETTING_KEY), eq(repository.getName()), eq(generatedProjectKey));
ProjectAlmSettingDto createdProjectAlmSettingDto = projectAlmSettingCaptor.getValue();
assertThat(createdProjectAlmSettingDto.getAlmSettingUuid()).isEqualTo(ALM_SETTING_UUID);
assertThat(createdProjectAlmSettingDto.getAlmRepo()).isEqualTo(DOP_PROJECT_ID);
assertThat(createdProjectAlmSettingDto.getAlmSlug()).isEqualTo(DOP_REPOSITORY_ID);
assertThat(createdProjectAlmSettingDto.getProjectUuid()).isEqualTo(PROJECT_UUID);
assertThat(createdProjectAlmSettingDto.getMonorepo()).isTrue();
}
|
static void manageInvalidValues(final KiePMMLMiningField miningField, final ParameterInfo parameterInfo,
final List<ParameterInfo> toRemove) {
INVALID_VALUE_TREATMENT_METHOD invalidValueTreatmentMethod =
miningField.getInvalidValueTreatmentMethod() != null ?
miningField.getInvalidValueTreatmentMethod()
: INVALID_VALUE_TREATMENT_METHOD.RETURN_INVALID;
Object originalValue = parameterInfo.getValue();
switch (invalidValueTreatmentMethod) {
case RETURN_INVALID:
throw new KiePMMLInputDataException("Invalid value " + originalValue + " for " + miningField.getName());
case AS_MISSING:
toRemove.add(parameterInfo);
break;
case AS_IS:
break;
case AS_VALUE:
String invalidValueReplacement = miningField.getInvalidValueReplacement();
if (invalidValueReplacement == null) {
throw new KiePMMLInputDataException("Missing required invalidValueReplacement for " + miningField.getName());
} else {
Object requiredValue =
miningField.getDataType().getActualValue(invalidValueReplacement);
parameterInfo.setType(miningField.getDataType().getMappedClass());
parameterInfo.setValue(requiredValue);
}
break;
default:
throw new KiePMMLException("Unmanaged INVALID_VALUE_TREATMENT_METHOD " + invalidValueTreatmentMethod);
}
}
|
@Test
void manageInvalidValuesReturnInvalid() {
assertThatExceptionOfType(KiePMMLException.class).isThrownBy(() -> {
final ParameterInfo parameterInfo = new ParameterInfo();
// RETURN_INVALID
KiePMMLMiningField miningField = KiePMMLMiningField.builder("FIELD", null)
.withDataType(DATA_TYPE.STRING)
.withInvalidValueTreatmentMethod(INVALID_VALUE_TREATMENT_METHOD.RETURN_INVALID)
.build();
List<ParameterInfo> toRemove = new ArrayList<>();
PreProcess.manageInvalidValues(miningField, parameterInfo, toRemove);
});
}
|
@Restricted(NoExternalUse.class)
public static String extractPluginNameFromIconSrc(String iconSrc) {
if (iconSrc == null) {
return "";
}
if (!iconSrc.contains("plugin-")) {
return "";
}
String[] arr = iconSrc.split(" ");
for (String element : arr) {
if (element.startsWith("plugin-")) {
return element.replaceFirst("plugin-", "");
}
}
return "";
}
|
@Test
public void extractPluginNameFromIconSrcExtractsPlugin() {
String result = Functions.extractPluginNameFromIconSrc("symbol-padlock plugin-design-library");
assertThat(result, is(equalTo("design-library")));
}
|
public ShareSessionCache(int maxEntries, long evictionMs) {
this.maxEntries = maxEntries;
this.evictionMs = evictionMs;
}
|
@Test
public void testShareSessionCache() {
ShareSessionCache cache = new ShareSessionCache(3, 100);
assertEquals(0, cache.size());
ShareSessionKey key1 = cache.maybeCreateSession("grp", Uuid.randomUuid(), 0, mockedSharePartitionMap(10));
ShareSessionKey key2 = cache.maybeCreateSession("grp", Uuid.randomUuid(), 10, mockedSharePartitionMap(20));
ShareSessionKey key3 = cache.maybeCreateSession("grp", Uuid.randomUuid(), 20, mockedSharePartitionMap(30));
assertNull(cache.maybeCreateSession("grp", Uuid.randomUuid(), 30, mockedSharePartitionMap(40)));
assertNull(cache.maybeCreateSession("grp", Uuid.randomUuid(), 40, mockedSharePartitionMap(5)));
assertShareCacheContains(cache, new ArrayList<>(Arrays.asList(key1, key2, key3)));
cache.touch(cache.get(key1), 200);
ShareSessionKey key4 = cache.maybeCreateSession("grp", Uuid.randomUuid(), 210, mockedSharePartitionMap(11));
assertShareCacheContains(cache, new ArrayList<>(Arrays.asList(key1, key3, key4)));
cache.touch(cache.get(key1), 400);
cache.touch(cache.get(key3), 390);
cache.touch(cache.get(key4), 400);
ShareSessionKey key5 = cache.maybeCreateSession("grp", Uuid.randomUuid(), 410, mockedSharePartitionMap(50));
assertNull(key5);
}
|
public String getStaticAssets(String pluginId) {
return pluginRequestHelper.submitRequest(pluginId, REQUEST_GET_STATIC_ASSETS, new DefaultPluginInteractionCallback<>() {
@Override
public String onSuccess(String responseBody, Map<String, String> responseHeaders, String resolvedExtensionVersion) {
return getMessageConverter(resolvedExtensionVersion).getStaticAssetsFromResponseBody(responseBody);
}
});
}
|
@Test
public void shouldErrorOutInAbsenceOfStaticAssets() {
when(pluginManager.submitTo(eq(PLUGIN_ID), eq(ANALYTICS_EXTENSION), requestArgumentCaptor.capture())).thenReturn(new DefaultGoPluginApiResponse(SUCCESS_RESPONSE_CODE, "{}"));
assertThatThrownBy(() -> analyticsExtension.getStaticAssets(PLUGIN_ID))
.isInstanceOf(RuntimeException.class)
.hasMessageContaining("No assets defined!");
}
|
@Override
public void refreshPackages() {
throw newException();
}
|
@Test
void require_that_refreshPackages_throws_exception() {
assertThrows(RuntimeException.class, () -> {
new DisableOsgiFramework().refreshPackages();
});
}
|
public static Pair<String, String> decryptHandler(String dataId, String secretKey, String content) {
if (!checkCipher(dataId)) {
return Pair.with(secretKey, content);
}
Optional<String> algorithmName = parseAlgorithmName(dataId);
Optional<EncryptionPluginService> optional = algorithmName.flatMap(
EncryptionPluginManager.instance()::findEncryptionService);
if (!optional.isPresent()) {
LOGGER.warn("[EncryptionHandler] [decryptHandler] No encryption program with the corresponding name found");
return Pair.with(secretKey, content);
}
EncryptionPluginService encryptionPluginService = optional.get();
String decryptSecretKey = encryptionPluginService.decryptSecretKey(secretKey);
String decryptContent = encryptionPluginService.decrypt(decryptSecretKey, content);
return Pair.with(decryptSecretKey, decryptContent);
}
|
@Test
void testDecrypt() {
String dataId = "cipher-mockAlgo-application";
String oContent = "content";
String oSec = mockEncryptionPluginService.generateSecretKey();
String content = mockEncryptionPluginService.encrypt(oSec, oContent);
String sec = mockEncryptionPluginService.encryptSecretKey(oSec);
Pair<String, String> pair = EncryptionHandler.decryptHandler(dataId, sec, content);
assertNotNull(pair);
assertEquals(oContent, pair.getSecond(), "should return original content.");
assertEquals(oSec, pair.getFirst(), "should return original secret key.");
}
|
@Override
public Object getInitialAggregatedValue(Object rawValue) {
Union thetaUnion = _setOperationBuilder.buildUnion();
if (rawValue instanceof byte[]) { // Serialized Sketch
byte[] bytes = (byte[]) rawValue;
Sketch sketch = deserializeAggregatedValue(bytes);
thetaUnion.union(sketch);
} else if (rawValue instanceof byte[][]) { // Multiple Serialized Sketches
byte[][] serializedSketches = (byte[][]) rawValue;
for (byte[] sketchBytes : serializedSketches) {
thetaUnion.union(deserializeAggregatedValue(sketchBytes));
}
} else {
singleItemUpdate(thetaUnion, rawValue);
}
_maxByteSize = Math.max(_maxByteSize, thetaUnion.getCurrentBytes());
return thetaUnion;
}
|
@Test
public void initialShouldCreateSingleItemSketch() {
DistinctCountThetaSketchValueAggregator agg = new DistinctCountThetaSketchValueAggregator();
assertEquals(toSketch(agg.getInitialAggregatedValue("hello world")).getEstimate(), 1.0);
}
|
@Udf
public <T> List<T> except(
@UdfParameter(description = "Array of values") final List<T> left,
@UdfParameter(description = "Array of exceptions") final List<T> right) {
if (left == null || right == null) {
return null;
}
final Set<T> distinctRightValues = new HashSet<>(right);
final Set<T> distinctLeftValues = new LinkedHashSet<>(left);
return distinctLeftValues
.stream()
.filter(e -> !distinctRightValues.contains(e))
.collect(Collectors.toList());
}
|
@Test
public void shouldReturnNullForNullInputs() {
@SuppressWarnings("rawtypes")
final List result = udf.except(null, null);
assertThat(result, is(nullValue()));
}
|
public static ParamType getVarArgsSchemaFromType(final Type type) {
return getSchemaFromType(type, VARARGS_JAVA_TO_ARG_TYPE);
}
|
@Test
public void shouldGetFunctionVariadic() throws NoSuchMethodException {
final Type type = getClass().getDeclaredMethod("functionType", Function.class)
.getGenericParameterTypes()[0];
final ParamType schema = UdfUtil.getVarArgsSchemaFromType(type);
assertThat(schema, instanceOf(LambdaType.class));
assertThat(((LambdaType) schema).inputTypes(), equalTo(ImmutableList.of(ParamTypes.LONG)));
assertThat(((LambdaType) schema).returnType(), equalTo(ParamTypes.INTEGER));
}
|
public static SchemaAndValue parseString(String value) {
if (value == null) {
return NULL_SCHEMA_AND_VALUE;
}
if (value.isEmpty()) {
return new SchemaAndValue(Schema.STRING_SCHEMA, value);
}
ValueParser parser = new ValueParser(new Parser(value));
return parser.parse(false);
}
|
@Test
public void shouldParseShortAsInt16() {
Short value = Short.MAX_VALUE;
SchemaAndValue schemaAndValue = Values.parseString(
String.valueOf(value)
);
assertEquals(Schema.INT16_SCHEMA, schemaAndValue.schema());
assertInstanceOf(Short.class, schemaAndValue.value());
assertEquals(value.shortValue(), ((Short) schemaAndValue.value()).shortValue());
value = Short.MIN_VALUE;
schemaAndValue = Values.parseString(
String.valueOf(value)
);
assertEquals(Schema.INT16_SCHEMA, schemaAndValue.schema());
assertInstanceOf(Short.class, schemaAndValue.value());
assertEquals(value.shortValue(), ((Short) schemaAndValue.value()).shortValue());
}
|
@Override
public long doRemoteFunction(int value) {
long waitTime = (long) Math.floor(randomProvider.random() * 1000);
try {
sleep(waitTime);
} catch (InterruptedException e) {
LOGGER.error("Thread sleep state interrupted", e);
Thread.currentThread().interrupt();
}
return waitTime <= THRESHOLD ? value * 10
: RemoteServiceStatus.FAILURE.getRemoteServiceStatusValue();
}
|
@Test
void testSuccessfulCall() {
var remoteService = new RemoteService(new StaticRandomProvider(0.2));
var result = remoteService.doRemoteFunction(10);
assertEquals(100, result);
}
|
public RegistryBuilder group(String group) {
this.group = group;
return getThis();
}
|
@Test
void group() {
RegistryBuilder builder = new RegistryBuilder();
builder.group("group");
Assertions.assertEquals("group", builder.build().getGroup());
}
|
protected void setUpJettyOptions( Node node ) {
Map<String, String> jettyOptions = parseJettyOptions( node );
if ( jettyOptions != null && jettyOptions.size() > 0 ) {
for ( Entry<String, String> jettyOption : jettyOptions.entrySet() ) {
System.setProperty( jettyOption.getKey(), jettyOption.getValue() );
}
}
}
|
@Test
public void testSetUpJettyOptionsAsSystemParameters() throws KettleXMLException {
Node configNode = getConfigNode( getConfigWithAllOptions() );
slServerConfig.setUpJettyOptions( configNode );
assertTrue( "Expected containing jetty option " + EXPECTED_ACCEPTORS_KEY, System.getProperties().containsKey(
EXPECTED_ACCEPTORS_KEY ) );
assertEquals( EXPECTED_ACCEPTORS_VALUE, System.getProperty( EXPECTED_ACCEPTORS_KEY ) );
assertTrue( "Expected containing jetty option " + EXPECTED_ACCEPT_QUEUE_SIZE_KEY, System.getProperties()
.containsKey( EXPECTED_ACCEPT_QUEUE_SIZE_KEY ) );
assertEquals( EXPECTED_ACCEPT_QUEUE_SIZE_VALUE, System.getProperty( EXPECTED_ACCEPT_QUEUE_SIZE_KEY ) );
assertTrue( "Expected containing jetty option " + EXPECTED_LOW_RES_MAX_IDLE_TIME_KEY, System.getProperties()
.containsKey( EXPECTED_LOW_RES_MAX_IDLE_TIME_KEY ) );
assertEquals( EXPECTED_LOW_RES_MAX_IDLE_TIME_VALUE, System.getProperty( EXPECTED_LOW_RES_MAX_IDLE_TIME_KEY ) );
}
|
public static Path getConfigHome() {
return getConfigHome(System.getProperties(), System.getenv());
}
|
@Test
public void testGetConfigHome_mac() throws IOException {
Path libraryApplicationSupport = Paths.get(fakeConfigHome, "Library", "Preferences");
Files.createDirectories(libraryApplicationSupport);
Properties fakeProperties = new Properties();
fakeProperties.setProperty("user.home", fakeConfigHome);
fakeProperties.setProperty("os.name", "os is mAc or DaRwIn");
Assert.assertEquals(
libraryApplicationSupport.resolve("Google").resolve("Jib"),
XdgDirectories.getConfigHome(fakeProperties, Collections.emptyMap()));
}
|
public WeightedItem<T> addOrVote(T item) {
for (int i = 0; i < list.size(); i++) {
WeightedItem<T> weightedItem = list.get(i);
if (weightedItem.item.equals(item)) {
voteFor(weightedItem);
return weightedItem;
}
}
return organizeAndAdd(item);
}
|
@Test
public void testDuplicateAddIncreasesWeight() {
WeightedEvictableList<String> list = new WeightedEvictableList<>(3, 3);
list.addOrVote("a");
list.addOrVote("a");
list.addOrVote("a");
assertItemsInOrder(list, "a");
assertWeightsInOrder(list, 3);
}
|
public static Matches matches(String regex) {
return matches(regex, 0);
}
|
@Test
@Category(NeedsRunner.class)
public void testMatches() {
PCollection<String> output =
p.apply(Create.of("a", "x", "y", "z")).apply(Regex.matches("[xyz]"));
PAssert.that(output).containsInAnyOrder("x", "y", "z");
p.run();
}
|
List<Endpoint> endpoints() {
try {
String urlString = String.format("%s/api/v1/namespaces/%s/pods", kubernetesMaster, namespace);
return enrichWithPublicAddresses(parsePodsList(callGet(urlString)));
} catch (RestClientException e) {
return handleKnownException(e);
}
}
|
@Test
public void endpointsWithoutNodeName() throws JsonProcessingException {
stub(String.format("/api/v1/namespaces/%s/services/hazelcast-0", NAMESPACE),
serviceLb(servicePort(32124, 5701, 31916), "35.232.226.200"));
stub(String.format("/api/v1/namespaces/%s/services/service-1", NAMESPACE),
serviceLb(servicePort(32124, 5701, 31916), "35.232.226.201"));
stub(String.format("/api/v1/namespaces/%s/endpoints", NAMESPACE), endpointsListResponseWithoutNodeName());
stub(String.format("/api/v1/namespaces/%s/pods", NAMESPACE), podsListResponse());
stub(String.format("/api/v1/namespaces/%s/pods/hazelcast-0", NAMESPACE),
pod("hazelcast-0", NAMESPACE, "node-name-1", 5701));
stub(String.format("/api/v1/namespaces/%s/pods/hazelcast-1", NAMESPACE),
pod("hazelcast-1", NAMESPACE, "node-name-2", 5701));
stub("/api/v1/nodes/node-name-1", node("node-name-1", "10.240.0.21", "35.232.226.200"));
stub("/api/v1/nodes/node-name-2", node("node-name-2", "10.240.0.22", "35.232.226.201"));
List<Endpoint> result = kubernetesClient.endpoints();
assertThat(formatPrivate(result)).containsExactlyInAnyOrder(ready("192.168.0.25", 5701), ready("172.17.0.5", 5702));
assertThat(formatPublic(result)).containsExactlyInAnyOrder(ready("35.232.226.200", 32124), ready("35.232.226.201", 32124));
}
|
public static String getClientIdentify(HttpServletRequest request) {
String identify = request.getHeader(LONG_PULLING_CLIENT_IDENTIFICATION);
return StringUtil.isBlank(identify) ? "" : identify;
}
|
@Test
public void notExistClientIdentifyTest() {
MockHttpServletRequest request = new MockHttpServletRequest();
String clientIdentify = RequestUtil.getClientIdentify(request);
Assert.isTrue(Objects.equals(clientIdentify, ""));
}
|
public Node chooseRandomWithStorageTypeTwoTrial(final String scope,
final Collection<Node> excludedNodes, StorageType type) {
netlock.readLock().lock();
try {
String searchScope;
String excludedScope;
if (scope.startsWith("~")) {
searchScope = NodeBase.ROOT;
excludedScope = scope.substring(1);
} else {
searchScope = scope;
excludedScope = null;
}
// next do a two-trial search
// first trial, call the old method, inherited from NetworkTopology
Node n = chooseRandom(searchScope, excludedScope, excludedNodes);
if (n == null) {
if (LOG.isDebugEnabled()) {
LOG.debug("No node to choose.");
}
// this means there is simply no node to choose from
return null;
}
Preconditions.checkArgument(n instanceof DatanodeDescriptor);
DatanodeDescriptor dnDescriptor = (DatanodeDescriptor)n;
if (dnDescriptor.hasStorageType(type)) {
// the first trial succeeded, just return
return dnDescriptor;
} else {
// otherwise, make the second trial by calling the new method
LOG.debug("First trial failed, node has no type {}, " +
"making second trial carrying this type", type);
return chooseRandomWithStorageType(searchScope, excludedScope,
excludedNodes, type);
}
} finally {
netlock.readLock().unlock();
}
}
|
@Test
public void testChooseRandomWithStorageTypeTwoTrial() throws Exception {
Node n;
DatanodeDescriptor dd;
n = CLUSTER.chooseRandomWithStorageType("/l2/d3/r4", null, null,
StorageType.ARCHIVE);
HashSet<Node> excluded = new HashSet<>();
// exclude the host on r4 (since there is only one host, no randomness here)
excluded.add(n);
// search with given scope being desired scope
for (int i = 0; i < 10; i++) {
n = CLUSTER.chooseRandomWithStorageTypeTwoTrial(
"/l2/d3", null, StorageType.ARCHIVE);
assertTrue(n instanceof DatanodeDescriptor);
dd = (DatanodeDescriptor) n;
assertTrue(dd.getHostName().equals("host13") ||
dd.getHostName().equals("host14"));
}
for (int i = 0; i < 10; i++) {
n = CLUSTER.chooseRandomWithStorageTypeTwoTrial(
"/l2/d3", excluded, StorageType.ARCHIVE);
assertTrue(n instanceof DatanodeDescriptor);
dd = (DatanodeDescriptor) n;
assertTrue(dd.getHostName().equals("host14"));
}
// search with given scope being exclude scope
// a total of 4 ramdisk nodes:
// /l1/d2/r3/host7, /l2/d3/r2/host10, /l2/d4/r1/host7 and /l2/d4/r1/host10
// so if we exclude /l2/d4/r1, if should be always either host7 or host10
for (int i = 0; i < 10; i++) {
n = CLUSTER.chooseRandomWithStorageTypeTwoTrial(
"~/l2/d4", null, StorageType.RAM_DISK);
assertTrue(n instanceof DatanodeDescriptor);
dd = (DatanodeDescriptor) n;
assertTrue(dd.getHostName().equals("host7") ||
dd.getHostName().equals("host11"));
}
// similar to above, except that we also exclude host10 here. so it should
// always be host7
n = CLUSTER.chooseRandomWithStorageType("/l2/d3/r2", null, null,
StorageType.RAM_DISK);
// add host10 to exclude
excluded.add(n);
for (int i = 0; i < 10; i++) {
n = CLUSTER.chooseRandomWithStorageTypeTwoTrial(
"~/l2/d4", excluded, StorageType.RAM_DISK);
assertTrue(n instanceof DatanodeDescriptor);
dd = (DatanodeDescriptor) n;
assertTrue(dd.getHostName().equals("host7"));
}
}
|
public long getCent() {
return cent;
}
|
@Test
public void yuanToCentTest() {
final Money money = new Money("1234.56");
assertEquals(123456, money.getCent());
assertEquals(123456, MathUtil.yuanToCent(1234.56));
}
|
@Override
public void getClient(Request request, RequestContext requestContext, Callback<TransportClient> clientCallback)
{
URI uri = request.getURI();
debug(_log, "get client for uri: ", uri);
if (!D2_SCHEME_NAME.equalsIgnoreCase(uri.getScheme()))
{
throw new IllegalArgumentException("Unsupported scheme in URI " + uri);
}
// get the service for this uri
String extractedServiceName = LoadBalancerUtil.getServiceNameFromUri(uri);
listenToServiceAndCluster(extractedServiceName, Callbacks.handle(service -> {
String serviceName = service.getServiceName();
String clusterName = service.getClusterName();
try
{
ClusterProperties cluster = getClusterProperties(serviceName, clusterName);
// Check if we want to override the service URL and bypass choosing among the existing
// tracker clients. This is useful when the service we want is not announcing itself to
// the cluster, ie a private service for a set of clients. This mechanism is deprecated;
// use host override list instead.
@SuppressWarnings("deprecation")
URI targetService = LoadBalancerUtil.TargetHints.getRequestContextTargetService(requestContext);
// Checks if we have a host override list provided in the request context. If present,
// get the override URI available override for the current cluster and service names.
HostOverrideList overrides = (HostOverrideList) requestContext.getLocalAttr(HOST_OVERRIDE_LIST);
URI override = overrides == null ? null : overrides.getOverride(clusterName, serviceName);
if (targetService == null && override == null)
{
LoadBalancerStateItem<UriProperties> uriItem = getUriItem(serviceName, clusterName, cluster);
UriProperties uris = uriItem.getProperty();
List<LoadBalancerState.SchemeStrategyPair> orderedStrategies =
_state.getStrategiesForService(serviceName, service.getPrioritizedSchemes());
TrackerClient trackerClient = null;
// Use client provided by CustomURIAffinityRoutingProvider when it's enabled
CustomAffinityRoutingURIProvider customAffinityRoutingURIProvider =
(CustomAffinityRoutingURIProvider) requestContext.getLocalAttr(CustomAffinityRoutingURIProvider.CUSTOM_AFFINITY_ROUTING_URI_PROVIDER);
boolean enableCustomAffinityRouting = isCustomAffinityRoutingEnabled(requestContext, customAffinityRoutingURIProvider);
if (enableCustomAffinityRouting)
{
trackerClient = customAffinityRoutingURIProvider.getTargetHostURI(clusterName)
.map(targetHost -> _state.getClient(serviceName, targetHost))
.orElse(null);
}
if (trackerClient == null)
{
trackerClient =
chooseTrackerClient(request, requestContext, serviceName, clusterName, cluster, uriItem, uris,
orderedStrategies, service);
// set host URI for the cluster. with that next time, for the same inbound request, if downstream request is
// made to same cluster and custom affinity routing is enabled, then it will go to same box.
if (enableCustomAffinityRouting)
{
customAffinityRoutingURIProvider.setTargetHostURI(clusterName, trackerClient.getUri());
}
}
String clusterAndServiceUriString = trackerClient.getUri() + service.getPath();
_serviceAvailableStats.inc();
clientCallback.onSuccess(new RewriteLoadBalancerClient(serviceName,
URI.create(clusterAndServiceUriString),
trackerClient));
}
else
{
URI target = override == null ? targetService : URI.create(override + service.getPath());
if (targetService != null && override != null)
{
_log.warn("Both TargetHints and HostOverrideList are found. HostOverList will take precedence %s.", target);
}
if (_log.isDebugEnabled())
{
_log.debug("Rewrite URI as specified in the TargetHints/HostOverrideList {} for cluster {} and service {}.",
target, clusterName, serviceName);
}
TransportClient transportClient = _state.getClient(serviceName, target.getScheme());
if (transportClient == null)
{
throw new ServiceUnavailableException(serviceName, String.format(
"PEGA_1001. Cannot find transportClient for service %s and scheme %s with URI specified in"
+ "TargetHints/HostOverrideList %s", serviceName, target.getScheme(), target));
}
clientCallback.onSuccess(new RewriteLoadBalancerClient(serviceName, target, transportClient));
}
}
catch (ServiceUnavailableException e)
{
clientCallback.onError(e);
}
}, clientCallback));
}
|
@Test(dataProvider = "customAffinityRoutingSkippedDataProvider")
public void testCustomAffinityRoutingSkipped(boolean enableTargetHostHint, boolean enableCustomAffinityRouting,
URI expectedURI) throws Exception
{
MockStore<ServiceProperties> serviceRegistry = new MockStore<>();
MockStore<ClusterProperties> clusterRegistry = new MockStore<>();
MockStore<UriProperties> uriRegistry = new MockStore<>();
List<String> prioritizedSchemes = new ArrayList<>();
SimpleLoadBalancer loadBalancer = setupLoadBalancer(serviceRegistry, clusterRegistry, uriRegistry);
URI uri1 = URI.create("http://test.qd.com:1234");
Map<Integer, PartitionData> partitionData = new HashMap<>(1);
partitionData.put(DEFAULT_PARTITION_ID, new PartitionData(1d));
Map<URI, Map<Integer, PartitionData>> uriData = new HashMap<>(2);
uriData.put(uri1, partitionData);
URI uri2 = URI.create("http://targethosthint.qd.com:1234");
RequestContext serviceContext = new RequestContext();
if (enableTargetHostHint) {
uriData.put(uri2, partitionData);
KeyMapper.TargetHostHints.setRequestContextTargetHost(serviceContext, uri2);
}
prioritizedSchemes.add(PropertyKeys.HTTP_SCHEME);
Set<URI> bannedSet = new HashSet<>();
clusterRegistry.put(CLUSTER1_NAME, new ClusterProperties(CLUSTER1_NAME, Collections.emptyList(),
Collections.emptyMap(), bannedSet, NullPartitionProperties.getInstance()));
serviceRegistry.put("foo", new ServiceProperties("foo",
CLUSTER1_NAME,
"/foo", Collections.singletonList("degrader"),
Collections.<String,Object>emptyMap(),
null,
null,
prioritizedSchemes,
null));
uriRegistry.put(CLUSTER1_NAME, new UriProperties(CLUSTER1_NAME, uriData));
URIRequest uriRequest = new URIRequest("d2://foo/52");
serviceContext.putLocalAttr(
CustomAffinityRoutingURIProvider.CUSTOM_AFFINITY_ROUTING_URI_PROVIDER, new CustomAffinityRoutingURIProvider() {
private final Map<String, URI> uriMap = new HashMap<>();
@Override
public boolean isEnabled() {
return enableCustomAffinityRouting;
}
@Override
public Optional<URI> getTargetHostURI(String clusterName) {
return Optional.ofNullable(uriMap.get(clusterName));
}
@Override
public void setTargetHostURI(String clusterName, URI targetHostURI) {
uriMap.put(clusterName, targetHostURI);
}
});
RewriteLoadBalancerClient client =
(RewriteLoadBalancerClient) loadBalancer.getClient(uriRequest, serviceContext);
Assert.assertEquals(client.getUri(), expectedURI);
CustomAffinityRoutingURIProvider affinityRoutingURIProvider
= (CustomAffinityRoutingURIProvider) serviceContext.getLocalAttr(CustomAffinityRoutingURIProvider.CUSTOM_AFFINITY_ROUTING_URI_PROVIDER);
Assert.assertFalse(affinityRoutingURIProvider.getTargetHostURI(CLUSTER1_NAME).isPresent());
}
|
public static <T> T convert(Class<T> type, Object value) throws ConvertException {
return convert((Type) type, value);
}
|
@Test
public void toStrTest3() {
final char a = 'a';
final String result = Convert.convert(String.class, a);
assertEquals("a", result);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.