focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
@Override
public void close() {
if (CONTAINER_DATASOURCE_NAMES.contains(dataSource.getClass().getSimpleName())) {
close(dataSource);
} else {
xaTransactionManagerProvider.removeRecoveryResource(resourceName, xaDataSource);
}
enlistedTransactions.remove();
}
|
@Test
void assertCloseHikariDataSource() {
DataSource dataSource = DataSourceUtils.build(HikariDataSource.class, TypedSPILoader.getService(DatabaseType.class, "H2"), "ds1");
XATransactionDataSource transactionDataSource = new XATransactionDataSource(TypedSPILoader.getService(DatabaseType.class, "H2"), "ds1", dataSource, xaTransactionManagerProvider);
transactionDataSource.close();
verify(xaTransactionManagerProvider).removeRecoveryResource(anyString(), any(XADataSource.class));
}
|
@Override
public int compare(String version1, String version2) {
if(ObjectUtil.equal(version1, version2)) {
return 0;
}
if (version1 == null && version2 == null) {
return 0;
} else if (version1 == null) {// null或""视为最小版本,排在前
return -1;
} else if (version2 == null) {
return 1;
}
return CompareUtil.compare(Version.of(version1), Version.of(version2));
}
|
@Test
public void versionComparatorTest1() {
int compare = VersionComparator.INSTANCE.compare("1.2.1", "1.12.1");
assertTrue(compare < 0);
// 自反测试
compare = VersionComparator.INSTANCE.compare("1.12.1", "1.2.1");
assertTrue(compare > 0);
}
|
@Override
public void execute(ComputationStep.Context context) {
new DepthTraversalTypeAwareCrawler(
new TypeAwareVisitorAdapter(CrawlerDepthLimit.PROJECT, PRE_ORDER) {
@Override
public void visitProject(Component project) {
executeForProject(project);
}
}).visit(treeRootHolder.getRoot());
}
|
@Test
void new_measures_have_ERROR_level_if_at_least_one_updated_measure_has_ERROR_level() {
int rawValue = 3;
Condition equalsOneErrorCondition = createLessThanCondition(INT_METRIC_1, "4");
Condition equalsOneOkCondition = createLessThanCondition(INT_METRIC_2, "2");
Measure rawMeasure = newMeasureBuilder().create(rawValue, null);
qualityGateHolder.setQualityGate(new QualityGate(SOME_QG_UUID, SOME_QG_NAME, of(equalsOneErrorCondition, equalsOneOkCondition)));
measureRepository.addRawMeasure(PROJECT_REF, INT_METRIC_1_KEY, rawMeasure);
measureRepository.addRawMeasure(PROJECT_REF, INT_METRIC_2_KEY, rawMeasure);
underTest.execute(new TestComputationStepContext());
Optional<Measure> rawMeasure1 = measureRepository.getAddedRawMeasure(PROJECT_REF, INT_METRIC_1_KEY);
Optional<Measure> rawMeasure2 = measureRepository.getAddedRawMeasure(PROJECT_REF, INT_METRIC_2_KEY);
assertThat(rawMeasure1.get())
.hasQualityGateLevel(ERROR)
.hasQualityGateText(dumbResultTextAnswer(equalsOneErrorCondition, ERROR, rawValue));
assertThat(rawMeasure2.get())
.hasQualityGateLevel(OK)
.hasQualityGateText(dumbResultTextAnswer(equalsOneOkCondition, OK, rawValue));
assertThat(getAlertStatusMeasure())
.hasQualityGateLevel(ERROR)
.hasQualityGateText(dumbResultTextAnswer(equalsOneErrorCondition, ERROR, rawValue) + ", "
+ dumbResultTextAnswer(equalsOneOkCondition, OK, rawValue));
assertThat(getQGDetailsMeasure())
.hasValue(new QualityGateDetailsData(ERROR, of(
new EvaluatedCondition(equalsOneErrorCondition, ERROR, rawValue),
new EvaluatedCondition(equalsOneOkCondition, OK, rawValue)), false).toJson());
QualityGateStatusHolderAssertions.assertThat(qualityGateStatusHolder)
.hasStatus(QualityGateStatus.ERROR)
.hasConditionCount(2)
.hasCondition(equalsOneErrorCondition, ConditionStatus.EvaluationStatus.ERROR, String.valueOf(rawValue))
.hasCondition(equalsOneOkCondition, ConditionStatus.EvaluationStatus.OK, String.valueOf(rawValue));
}
|
@Override
public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException {
try {
final AttributedList<Path> objects = new AttributedList<>();
Marker marker = new Marker(null, null);
final String containerId = fileid.getVersionId(containerService.getContainer(directory));
// Seen placeholders
final Map<String, Long> revisions = new HashMap<>();
boolean hasDirectoryPlaceholder = containerService.isContainer(directory);
do {
if(log.isDebugEnabled()) {
log.debug(String.format("List directory %s with marker %s", directory, marker));
}
final B2ListFilesResponse response;
if(versioning.isEnabled()) {
// In alphabetical order by file name, and by reverse of date/time uploaded for
// versions of files with the same name.
response = session.getClient().listFileVersions(containerId,
marker.nextFilename, marker.nextFileId, chunksize,
this.createPrefix(directory),
String.valueOf(Path.DELIMITER));
}
else {
response = session.getClient().listFileNames(containerId,
marker.nextFilename, chunksize,
this.createPrefix(directory),
String.valueOf(Path.DELIMITER));
}
marker = this.parse(directory, objects, response, revisions);
if(null == marker.nextFileId) {
if(!response.getFiles().isEmpty()) {
hasDirectoryPlaceholder = true;
}
}
listener.chunk(directory, objects);
}
while(marker.hasNext());
if(!hasDirectoryPlaceholder && objects.isEmpty()) {
if(log.isWarnEnabled()) {
log.warn(String.format("No placeholder found for directory %s", directory));
}
throw new NotfoundException(directory.getAbsolute());
}
return objects;
}
catch(B2ApiException e) {
throw new B2ExceptionMappingService(fileid).map("Listing directory {0} failed", e, directory);
}
catch(IOException e) {
throw new DefaultIOExceptionMappingService().map(e);
}
}
|
@Test
public void testDisplayFolderInBucketMissingPlaceholder() throws Exception {
final B2VersionIdProvider fileid = new B2VersionIdProvider(session);
final Path bucket = new B2DirectoryFeature(session, fileid).mkdir(
new Path(String.format("test-%s", new AsciiRandomStringService().random()), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus());
final Path folder1 = new Path(bucket, "1-d", EnumSet.of(Path.Type.directory));
final Path file1 = new B2TouchFeature(session, fileid).touch(new Path(folder1, "2-f", EnumSet.of(Path.Type.file)), new TransferStatus());
assertNotNull(file1.attributes().getVersionId());
{
final AttributedList<Path> list = new B2ObjectListService(session, fileid).list(bucket, new DisabledListProgressListener());
assertEquals(1, list.size());
final Path foundFolder1 = list.iterator().next();
assertEquals(folder1, foundFolder1);
assertFalse(foundFolder1.attributes().isDuplicate());
}
// Nullify version to add delete marker
new B2DeleteFeature(session, fileid).delete(Collections.singletonList(new Path(file1).withAttributes(new PathAttributes(file1.attributes()).withVersionId(null))), new DisabledLoginCallback(), new Delete.DisabledCallback());
assertTrue(new B2ObjectListService(session, fileid, 1, VersioningConfiguration.empty()).list(bucket, new DisabledListProgressListener()).isEmpty());
{
final AttributedList<Path> list = new B2ObjectListService(session, fileid).list(bucket, new DisabledListProgressListener());
assertEquals(1, list.size());
assertEquals(folder1, list.iterator().next());
}
for(Path f : new B2ObjectListService(session, fileid).list(folder1, new DisabledListProgressListener())) {
new B2DeleteFeature(session, fileid).delete(Collections.singletonList(f), new DisabledLoginCallback(), new Delete.DisabledCallback());
}
new B2DeleteFeature(session, fileid).delete(Collections.singletonList(bucket), new DisabledLoginCallback(), new Delete.DisabledCallback());
}
|
@Override
public void updateIndices(SegmentDirectory.Writer segmentWriter)
throws Exception {
Map<String, List<Operation>> columnOperationsMap = computeOperations(segmentWriter);
if (columnOperationsMap.isEmpty()) {
return;
}
for (Map.Entry<String, List<Operation>> entry : columnOperationsMap.entrySet()) {
String column = entry.getKey();
List<Operation> operations = entry.getValue();
for (Operation operation : operations) {
switch (operation) {
case DISABLE_FORWARD_INDEX:
// Deletion of the forward index will be handled outside the index handler to ensure that other index
// handlers that need the forward index to construct their own indexes will have it available.
_tmpForwardIndexColumns.add(column);
break;
case ENABLE_FORWARD_INDEX:
ColumnMetadata columnMetadata = createForwardIndexIfNeeded(segmentWriter, column, false);
if (columnMetadata.hasDictionary()) {
if (!segmentWriter.hasIndexFor(column, StandardIndexes.dictionary())) {
throw new IllegalStateException(String.format(
"Dictionary should still exist after rebuilding forward index for dictionary column: %s", column));
}
} else {
if (segmentWriter.hasIndexFor(column, StandardIndexes.dictionary())) {
throw new IllegalStateException(
String.format("Dictionary should not exist after rebuilding forward index for raw column: %s",
column));
}
}
break;
case DISABLE_DICTIONARY:
Set<String> newForwardIndexDisabledColumns =
FieldIndexConfigsUtil.columnsWithIndexDisabled(_fieldIndexConfigs.keySet(), StandardIndexes.forward(),
_fieldIndexConfigs);
if (newForwardIndexDisabledColumns.contains(column)) {
removeDictionaryFromForwardIndexDisabledColumn(column, segmentWriter);
if (segmentWriter.hasIndexFor(column, StandardIndexes.dictionary())) {
throw new IllegalStateException(
String.format("Dictionary should not exist after disabling dictionary for column: %s", column));
}
} else {
disableDictionaryAndCreateRawForwardIndex(column, segmentWriter);
}
break;
case ENABLE_DICTIONARY:
createDictBasedForwardIndex(column, segmentWriter);
if (!segmentWriter.hasIndexFor(column, StandardIndexes.forward())) {
throw new IllegalStateException(String.format("Forward index was not created for column: %s", column));
}
break;
case CHANGE_INDEX_COMPRESSION_TYPE:
rewriteForwardIndexForCompressionChange(column, segmentWriter);
break;
default:
throw new IllegalStateException("Unsupported operation for column " + column);
}
}
}
}
|
@Test
public void testAddOtherIndexWhenForwardIndexDisabledAndInvertedIndexOrDictionaryDisabled()
throws Exception {
SegmentMetadataImpl existingSegmentMetadata = new SegmentMetadataImpl(_segmentDirectory);
SegmentDirectory segmentLocalFSDirectory =
new SegmentLocalFSDirectory(_segmentDirectory, existingSegmentMetadata, ReadMode.mmap);
SegmentDirectory.Writer writer = segmentLocalFSDirectory.createWriter();
IndexLoadingConfig indexLoadingConfig = new IndexLoadingConfig(null, _tableConfig);
// Add a forward index and inverted index disabled column to the range index list
indexLoadingConfig.addRangeIndexColumns(DIM_SV_FORWARD_INDEX_DISABLED_INTEGER_WITHOUT_INV_IDX);
RangeIndexHandler rangeIndexHandler = new RangeIndexHandler(segmentLocalFSDirectory, indexLoadingConfig);
try {
rangeIndexHandler.updateIndices(writer);
Assert.fail("Creating the range index on forward index and inverted index disabled column should fail");
} catch (IllegalStateException e) {
assertEquals(e.getMessage(), "Forward index disabled column "
+ "DIM_SV_FORWARD_INDEX_DISABLED_INTEGER_WITHOUT_INV_IDX must have an inverted index to regenerate the "
+ "forward index. Regeneration of the forward index is required to create new indexes as well. Please "
+ "refresh or back-fill the forward index");
}
// Remove inverted index disabled column from range index list and add a raw column instead
indexLoadingConfig.removeRangeIndexColumns(DIM_SV_FORWARD_INDEX_DISABLED_INTEGER_WITHOUT_INV_IDX);
indexLoadingConfig.addRangeIndexColumns(DIM_RAW_SV_FORWARD_INDEX_DISABLED_INTEGER);
rangeIndexHandler = new RangeIndexHandler(segmentLocalFSDirectory, indexLoadingConfig);
try {
rangeIndexHandler.updateIndices(writer);
Assert.fail("Creating the range index on forward index and inverted index disabled column should fail");
} catch (IllegalStateException e) {
assertEquals(e.getMessage(), "Forward index disabled column "
+ "DIM_RAW_SV_FORWARD_INDEX_DISABLED_INTEGER must have a dictionary to regenerate the forward index. "
+ "Regeneration of the forward index is required to create new indexes as well. Please refresh or back-fill "
+ "the forward index");
}
}
|
@Override
public UsersSearchRestResponse toUsersForResponse(List<UserInformation> userInformations, PaginationInformation paginationInformation) {
List<UserRestResponse> usersForResponse = toUsersForResponse(userInformations);
PageRestResponse pageRestResponse = new PageRestResponse(paginationInformation.pageIndex(), paginationInformation.pageSize(), paginationInformation.total());
return new UsersSearchRestResponse(usersForResponse, pageRestResponse);
}
|
@Test
public void toUsersForResponse_whenAdmin_mapsAllFields() {
when(userSession.isLoggedIn()).thenReturn(true);
when(userSession.isSystemAdministrator()).thenReturn(true);
PaginationInformation paging = forPageIndex(1).withPageSize(2).andTotal(3);
UserInformation userInformation1 = mockSearchResult(1, true);
UserInformation userInformation2 = mockSearchResult(2, false);
UsersSearchRestResponse usersForResponse = usersSearchRestResponseGenerator.toUsersForResponse(List.of(userInformation1, userInformation2), paging);
UserRestResponseForAdmins expectUser1 = buildExpectedResponseForAdmin(userInformation1);
UserRestResponseForAdmins expectUser2 = buildExpectedResponseForAdmin(userInformation2);
assertThat(usersForResponse.users()).containsExactly(expectUser1, expectUser2);
assertPaginationInformationAreCorrect(paging, usersForResponse.page());
}
|
@SuppressWarnings({"checkstyle:ParameterNumber"})
public static Pod createStatefulPod(
Reconciliation reconciliation,
String name,
String namespace,
Labels labels,
String strimziPodSetName,
String serviceAccountName,
PodTemplate template,
Map<String, String> defaultPodLabels,
Map<String, String> podAnnotations,
String headlessServiceName,
Affinity affinity,
List<Container> initContainers,
List<Container> containers,
List<Volume> volumes,
List<LocalObjectReference> defaultImagePullSecrets,
PodSecurityContext podSecurityContext
) {
Pod pod = new PodBuilder()
.withNewMetadata()
.withName(name)
.withLabels(labels.withStrimziPodName(name).withStatefulSetPod(name).withStrimziPodSetController(strimziPodSetName).withAdditionalLabels(Util.mergeLabelsOrAnnotations(defaultPodLabels, TemplateUtils.labels(template))).toMap())
.withNamespace(namespace)
.withAnnotations(Util.mergeLabelsOrAnnotations(podAnnotations, TemplateUtils.annotations(template)))
.endMetadata()
.withNewSpec()
.withRestartPolicy("Always")
.withHostname(name)
.withSubdomain(headlessServiceName)
.withServiceAccountName(serviceAccountName)
.withEnableServiceLinks(template != null ? template.getEnableServiceLinks() : null)
.withAffinity(affinity)
.withInitContainers(initContainers)
.withContainers(containers)
.withVolumes(volumes)
.withTolerations(template != null && template.getTolerations() != null ? template.getTolerations() : null)
.withTerminationGracePeriodSeconds(template != null ? (long) template.getTerminationGracePeriodSeconds() : 30L)
.withImagePullSecrets(imagePullSecrets(template, defaultImagePullSecrets))
.withSecurityContext(podSecurityContext)
.withPriorityClassName(template != null ? template.getPriorityClassName() : null)
.withSchedulerName(template != null && template.getSchedulerName() != null ? template.getSchedulerName() : "default-scheduler")
.withHostAliases(template != null ? template.getHostAliases() : null)
.withTopologySpreadConstraints(template != null ? template.getTopologySpreadConstraints() : null)
.endSpec()
.build();
// Set the pod revision annotation
pod.getMetadata().getAnnotations().put(PodRevision.STRIMZI_REVISION_ANNOTATION, PodRevision.getRevision(reconciliation, pod));
return pod;
}
|
@Test
public void testCreateStatefulPodWithNullValuesAndNullTemplate() {
Pod pod = WorkloadUtils.createStatefulPod(
Reconciliation.DUMMY_RECONCILIATION,
NAME + "-0", // => Pod name
NAMESPACE,
LABELS,
NAME, // => Workload name
NAME + "-sa", // => Service Account name
null,
Map.of("default-label", "default-value"),
Map.of("extra", "annotations"),
HEADLESS_SERVICE_NAME,
DEFAULT_AFFINITY,
List.of(new ContainerBuilder().withName("init-container").build()),
List.of(new ContainerBuilder().withName("container").build()),
VolumeUtils.createPodSetVolumes(NAME + "-0", DEFAULT_STORAGE, false),
List.of(new LocalObjectReference("some-pull-secret")),
DEFAULT_POD_SECURITY_CONTEXT
);
assertThat(pod.getMetadata().getName(), is(NAME + "-0"));
assertThat(pod.getMetadata().getNamespace(), is(NAMESPACE));
assertThat(pod.getMetadata().getLabels(), is(LABELS
.withStrimziPodSetController(NAME)
.withStrimziPodName(NAME + "-0")
.withAdditionalLabels(Map.of("statefulset.kubernetes.io/pod-name", "my-workload-0", "default-label", "default-value"))
.toMap()));
assertThat(pod.getMetadata().getAnnotations(), is(Map.of("extra", "annotations", PodRevision.STRIMZI_REVISION_ANNOTATION, "da09ff49")));
assertThat(pod.getSpec().getRestartPolicy(), is("Always"));
assertThat(pod.getSpec().getHostname(), is(NAME + "-0"));
assertThat(pod.getSpec().getServiceAccountName(), is(NAME + "-sa"));
assertThat(pod.getSpec().getEnableServiceLinks(), is(nullValue()));
assertThat(pod.getSpec().getAffinity(), is(DEFAULT_AFFINITY));
assertThat(pod.getSpec().getInitContainers().size(), is(1));
assertThat(pod.getSpec().getInitContainers().get(0).getName(), is("init-container"));
assertThat(pod.getSpec().getContainers().size(), is(1));
assertThat(pod.getSpec().getContainers().get(0).getName(), is("container"));
assertThat(pod.getSpec().getVolumes(), is(VolumeUtils.createPodSetVolumes(NAME + "-0", DEFAULT_STORAGE, false)));
assertThat(pod.getSpec().getTolerations(), is(nullValue()));
assertThat(pod.getSpec().getTerminationGracePeriodSeconds(), is(30L));
assertThat(pod.getSpec().getImagePullSecrets(), is(List.of(new LocalObjectReference("some-pull-secret"))));
assertThat(pod.getSpec().getSecurityContext(), is(DEFAULT_POD_SECURITY_CONTEXT));
assertThat(pod.getSpec().getPriorityClassName(), is(nullValue()));
assertThat(pod.getSpec().getSchedulerName(), is("default-scheduler"));
assertThat(pod.getSpec().getHostAliases(), is(nullValue()));
assertThat(pod.getSpec().getTopologySpreadConstraints(), is(nullValue()));
}
|
public synchronized ResultSet fetchResults(FetchOrientation orientation, int maxFetchSize) {
long token;
switch (orientation) {
case FETCH_NEXT:
token = currentToken;
break;
case FETCH_PRIOR:
token = currentToken - 1;
break;
default:
throw new UnsupportedOperationException(
String.format("Unknown fetch orientation: %s.", orientation));
}
if (orientation == FetchOrientation.FETCH_NEXT && bufferedResults.isEmpty()) {
// make sure data is available in the buffer
resultStore.waitUntilHasData();
}
return fetchResults(token, maxFetchSize);
}
|
@Test
void testFetchResultWithToken() {
ResultFetcher fetcher =
buildResultFetcher(Collections.singletonList(data.iterator()), data.size());
Long nextToken = 0L;
List<RowData> actual = new ArrayList<>();
ResultSet resultSetBefore = null;
while (nextToken != null) {
if (resultSetBefore != null) {
assertEquals(resultSetBefore, fetcher.fetchResults(nextToken - 1, data.size()));
}
ResultSet resultSet = fetcher.fetchResults(nextToken, data.size());
ResultSet resultSetWithSameToken = fetcher.fetchResults(nextToken, data.size());
assertEquals(resultSet, resultSetWithSameToken);
if (resultSet.getResultType() == ResultSet.ResultType.EOS) {
break;
}
resultSetBefore = resultSet;
actual.addAll(checkNotNull(resultSet.getData()));
nextToken = resultSet.getNextToken();
}
assertEquals(data, actual);
}
|
@Override
public void doRun() {
doRun(false);
}
|
@Test
void scheduledExecutionIsSkippedWhenServerIsNotRunning() {
when(serverStatus.getLifecycle()).thenReturn(Lifecycle.HALTING);
periodical.doRun();
verifyNoInteractions(cluster);
}
|
public static List<DataTypes.Field> getFields(DataType dataType) {
final List<String> names = getFieldNames(dataType);
final List<DataType> dataTypes = getFieldDataTypes(dataType);
return IntStream.range(0, names.size())
.mapToObj(i -> DataTypes.FIELD(names.get(i), dataTypes.get(i)))
.collect(Collectors.toList());
}
|
@Test
void testGetFields() {
assertThat(
DataType.getFields(
ROW(
FIELD("c0", BOOLEAN()),
FIELD("c1", DOUBLE()),
FIELD("c2", INT()))))
.containsExactly(FIELD("c0", BOOLEAN()), FIELD("c1", DOUBLE()), FIELD("c2", INT()));
assertThat(
DataType.getFields(
STRUCTURED(
DataTypesTest.SimplePojo.class,
FIELD("name", STRING()),
FIELD("count", INT().notNull().bridgedTo(int.class)))))
.containsExactly(
FIELD("name", STRING()),
FIELD("count", INT().notNull().bridgedTo(int.class)));
assertThat(DataType.getFields(ARRAY(INT()))).isEmpty();
assertThat(DataType.getFields(INT())).isEmpty();
}
|
public static CronPattern of(String pattern) {
return new CronPattern(pattern);
}
|
@Test
public void matchAllTest() {
CronPattern pattern;
// 任何时间匹配
pattern = CronPattern.of("* * * * * *");
assertMatch(pattern, DateUtil.now());
}
|
@Transactional(readOnly = true)
public AuthFindDto.FindUsernameRes findUsername(String phone) {
User user = readGeneralSignUpUser(phone);
return AuthFindDto.FindUsernameRes.of(user);
}
|
@DisplayName("휴대폰 번호로 유저를 찾았으나 OAuth 유저일 때 AuthFinderException을 발생시킨다.")
@Test
void findUsernameIfUserIsOAuth() {
// given
String phone = "010-1234-5678";
User user = UserFixture.OAUTH_USER.toUser();
given(userService.readUserByPhone(phone)).willReturn(Optional.of(user));
// when - then
UserErrorException exception = assertThrows(UserErrorException.class, () -> authFindService.findUsername(phone));
log.debug(exception.getExplainError());
}
|
public static java.nio.file.Path relativizePath(
java.nio.file.Path basePath, java.nio.file.Path pathToRelativize) {
if (pathToRelativize.isAbsolute()) {
return basePath.relativize(pathToRelativize);
} else {
return pathToRelativize;
}
}
|
@Test
void testRelativizeOfAbsolutePath() throws IOException {
final java.nio.file.Path absolutePath =
TempDirUtils.newFolder(temporaryFolder).toPath().toAbsolutePath();
final java.nio.file.Path rootPath = temporaryFolder.getRoot();
final java.nio.file.Path relativePath = FileUtils.relativizePath(rootPath, absolutePath);
assertThat(relativePath).isRelative();
assertThat(absolutePath).isEqualTo(rootPath.resolve(relativePath));
}
|
@DoNotSub public int size()
{
return sizeOfArrayValues + (containsMissingValue ? 1 : 0);
}
|
@Test
void sizeIncrementsWithNumberOfAddedElements()
{
addTwoElements(testSet);
assertEquals(2, testSet.size());
}
|
@Override
public String getSessionId() {
return sessionID;
}
|
@Test
public void testUnLockRequest() {
log.info("Starting unlock async");
assertNotNull("Incorrect sessionId", session1.getSessionId());
try {
assertTrue("NETCONF unlock request failed", session1.unlock());
} catch (NetconfException e) {
e.printStackTrace();
fail("NETCONF unlock test failed: " + e.getMessage());
}
log.info("Finishing unlock async");
}
|
NewCookie createAuthenticationCookie(SessionResponse token, ContainerRequestContext requestContext) {
return makeCookie(token.getAuthenticationToken(), token.validUntil(), requestContext);
}
|
@Test
void pathFromRequest() {
containerRequest.getHeaders().put(HttpConfiguration.OVERRIDE_HEADER,
List.of("http://graylog.local/path/from/request/"));
final CookieFactory cookieFactory = new CookieFactory(new HttpConfiguration());
final NewCookie cookie = cookieFactory.createAuthenticationCookie(sessionResponse, containerRequest);
assertThat(cookie.getPath()).isEqualTo("/path/from/request/");
}
|
@Override
public void define(Context context) {
NewController controller = context.createController(CONTROLLER)
.setSince("6.6")
.setDescription("Manage branch");
Arrays.stream(actions).forEach(action -> action.define(controller));
controller.done();
}
|
@Test
public void define_ws() {
BranchesWs underTest = new BranchesWs(new BranchWsAction() {
@Override
public void define(WebService.NewController context) {
context.createAction("foo").setHandler(this);
}
@Override
public void handle(Request request, Response response) {
}
});
WebService.Context context = new WebService.Context();
underTest.define(context);
assertThat(context.controller("api/project_branches").action("foo")).isNotNull();
}
|
public static List<RlpType> asRlpValues(
RawTransaction rawTransaction, Sign.SignatureData signatureData) {
return rawTransaction.getTransaction().asRlpValues(signatureData);
}
|
@Test
public void testContractAsRlpValues() {
List<RlpType> rlpStrings =
TransactionEncoder.asRlpValues(createContractTransaction(), null);
assertEquals(rlpStrings.size(), (6));
assertEquals(rlpStrings.get(3), (RlpString.create("")));
}
|
public @CheckForNull R search(final int n, final Direction d) {
switch (d) {
case EXACT:
return getByNumber(n);
case ASC:
for (int m : numberOnDisk) {
if (m < n) {
// TODO could be made more efficient with numberOnDisk.find
continue;
}
R r = getByNumber(m);
if (r != null) {
return r;
}
}
return null;
case DESC:
// TODO again could be made more efficient
ListIterator<Integer> iterator = numberOnDisk.listIterator(numberOnDisk.size());
while (iterator.hasPrevious()) {
int m = iterator.previous();
if (m > n) {
continue;
}
R r = getByNumber(m);
if (r != null) {
return r;
}
}
return null;
default:
throw new AssertionError();
}
}
|
@Test
public void unloadableData() throws IOException {
FakeMap m = localBuilder.add(1).addUnloadable(3).add(5).make();
assertNull(m.search(3, Direction.EXACT));
m.search(3, Direction.DESC).asserts(1);
m.search(3, Direction.ASC).asserts(5);
}
|
@Override
public boolean open(final Local file) {
synchronized(NSWorkspace.class) {
if(!workspace.openFile(file.getAbsolute())) {
log.warn(String.format("Error opening file %s", file));
return false;
}
return true;
}
}
|
@Test
public void testOpen() throws Exception {
new WorkspaceApplicationLauncher().open(new NullLocal("t"));
final NullLocal file = new NullLocal(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString());
LocalTouchFactory.get().touch(file);
new WorkspaceApplicationLauncher().open(file);
file.delete();
}
|
@Override
protected Result[] run(String value) {
final Map<String, Object> extractedJson;
try {
extractedJson = extractJson(value);
} catch (IOException e) {
throw new ExtractorException(e);
}
final List<Result> results = new ArrayList<>(extractedJson.size());
for (Map.Entry<String, Object> entry : extractedJson.entrySet()) {
results.add(new Result(entry.getValue(), entry.getKey(), -1, -1));
}
return results.toArray(new Result[results.size()]);
}
|
@Test
public void testRunWithWhitespaceInNestedKey() throws Exception {
final String value = "{\"foo\":{\"b a r\":{\"b a z\": 42}}}";
final JsonExtractor jsonExtractor = new JsonExtractor(
new MetricRegistry(),
"json",
"title",
0L,
Extractor.CursorStrategy.COPY,
"source",
"target",
ImmutableMap.of("replace_key_whitespace", true, "key_whitespace_replacement", "-"),
"user",
Collections.emptyList(),
Extractor.ConditionType.NONE,
"");
assertThat(jsonExtractor.run(value)).containsOnly(
new Extractor.Result(42, "foo_b-a-r_b-a-z", -1, -1)
);
}
|
@Override
public void parse(InputStream stream, ContentHandler handler, Metadata metadata,
ParseContext context) throws IOException, SAXException, TikaException {
// Automatically detect the character encoding
try (AutoDetectReader reader = new AutoDetectReader(CloseShieldInputStream.wrap(stream),
metadata, getEncodingDetector(context))) {
//try to get detected content type; could be a subclass of text/plain
//such as vcal, etc.
String incomingMime = metadata.get(Metadata.CONTENT_TYPE);
MediaType mediaType = MediaType.TEXT_PLAIN;
if (incomingMime != null) {
MediaType tmpMediaType = MediaType.parse(incomingMime);
if (tmpMediaType != null) {
mediaType = tmpMediaType;
}
}
Charset charset = reader.getCharset();
MediaType type = new MediaType(mediaType, charset);
metadata.set(Metadata.CONTENT_TYPE, type.toString());
// deprecated, see TIKA-431
metadata.set(Metadata.CONTENT_ENCODING, charset.name());
XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata);
xhtml.startDocument();
xhtml.startElement("p");
char[] buffer = new char[4096];
int n = reader.read(buffer);
while (n != -1) {
xhtml.characters(buffer, 0, n);
n = reader.read(buffer);
}
xhtml.endElement("p");
xhtml.endDocument();
}
}
|
@Test
public void testUseIncomingCharsetAsHint() throws Exception {
// Could be ISO 8859-1 or ISO 8859-15 or ...
// u00e1 is latin small letter a with acute
final String test2 = "the name is \u00e1ndre";
Metadata metadata = new Metadata();
parser.parse(new ByteArrayInputStream(test2.getBytes(ISO_8859_1)), new BodyContentHandler(),
metadata, new ParseContext());
assertEquals("text/plain; charset=ISO-8859-1", metadata.get(Metadata.CONTENT_TYPE));
assertEquals("ISO-8859-1", metadata.get(Metadata.CONTENT_ENCODING)); // deprecated
metadata.set(Metadata.CONTENT_TYPE, "text/plain; charset=ISO-8859-15");
parser.parse(new ByteArrayInputStream(test2.getBytes(ISO_8859_1)), new BodyContentHandler(),
metadata, new ParseContext());
assertEquals("text/plain; charset=ISO-8859-15", metadata.get(Metadata.CONTENT_TYPE));
assertEquals("ISO-8859-15", metadata.get(Metadata.CONTENT_ENCODING)); // deprecated
}
|
public static String reformatParam(@Nullable Object param) {
if (param == null) {
return PARAM_NULL;
}
String abbreviated = abbreviate(param.toString(), PARAM_MAX_WIDTH);
return NEWLINE_PATTERN.matcher(abbreviated).replaceAll("\\\\n");
}
|
@Test
public void reformatParam_escapes_newlines() {
assertThat(SqlLogFormatter.reformatParam("foo\n bar\nbaz")).isEqualTo("foo\\n bar\\nbaz");
}
|
static int run(File buildResult, Path root) throws IOException {
// parse included dependencies from build output
final Map<String, Set<Dependency>> modulesWithBundledDependencies =
combineAndFilterFlinkDependencies(
ShadeParser.parseShadeOutput(buildResult.toPath()),
DependencyParser.parseDependencyCopyOutput(buildResult.toPath()));
final Set<String> deployedModules = DeployParser.parseDeployOutput(buildResult);
LOG.info(
"Extracted "
+ deployedModules.size()
+ " modules that were deployed and "
+ modulesWithBundledDependencies.keySet().size()
+ " modules which bundle dependencies with a total of "
+ modulesWithBundledDependencies.values().size()
+ " dependencies");
// find modules producing a shaded-jar
List<Path> noticeFiles = findNoticeFiles(root);
LOG.info("Found {} NOTICE files to check", noticeFiles.size());
final Map<String, Optional<NoticeContents>> moduleToNotice =
noticeFiles.stream()
.collect(
Collectors.toMap(
NoticeFileChecker::getModuleFromNoticeFile,
noticeFile -> {
try {
return NoticeParser.parseNoticeFile(noticeFile);
} catch (IOException e) {
// some machine issue
throw new RuntimeException(e);
}
}));
return run(modulesWithBundledDependencies, deployedModules, moduleToNotice);
}
|
@Test
void testRunIncludesBundledNonDeployedModules() throws IOException {
final Map<String, Set<Dependency>> bundledDependencies = new HashMap<>();
final Map<String, Optional<NoticeContents>> notices = new HashMap<>();
// a module that is not deployed but bundles another dependency with an empty NOTICE
final String nonDeployedModuleName = "nonDeployed";
final Dependency nonDeployedDependency =
Dependency.create("a", nonDeployedModuleName, "c", null);
final Dependency bundledDependency = Dependency.create("a", "b", "c", null);
bundledDependencies.put(nonDeployedModuleName, Collections.singleton(bundledDependency));
// this would usually not be a problem, but since the module is not bundled it's not OK!
final Optional<NoticeContents> emptyNotice =
Optional.of(new NoticeContents(nonDeployedModuleName, Collections.emptyList()));
notices.put(nonDeployedModuleName, emptyNotice);
// a module that is deploys and bundles the above
final String bundlingModule = "bundling";
bundledDependencies.put(bundlingModule, Collections.singleton(nonDeployedDependency));
final Optional<NoticeContents> correctNotice =
Optional.of(
new NoticeContents(
bundlingModule, Collections.singletonList(nonDeployedDependency)));
notices.put(bundlingModule, correctNotice);
final Set<String> deployedModules = Collections.singleton(bundlingModule);
assertThat(NoticeFileChecker.run(bundledDependencies, deployedModules, notices))
.isEqualTo(1);
}
|
@Override
public void close() throws Exception {
this.timer.close();
this.persister.stop();
}
|
@Test
public void testCloseSharePartitionManager() throws Exception {
Timer timer = Mockito.mock(SystemTimerReaper.class);
Persister persister = Mockito.mock(Persister.class);
SharePartitionManager sharePartitionManager = SharePartitionManagerBuilder.builder()
.withTimer(timer).withShareGroupPersister(persister).build();
// Verify that 0 calls are made to timer.close() and persister.stop().
Mockito.verify(timer, times(0)).close();
Mockito.verify(persister, times(0)).stop();
// Closing the sharePartitionManager closes timer object in sharePartitionManager.
sharePartitionManager.close();
// Verify that the timer object in sharePartitionManager is closed by checking the calls to timer.close() and persister.stop().
Mockito.verify(timer, times(1)).close();
Mockito.verify(persister, times(1)).stop();
}
|
@Override
public <T> void delete(T attachedObject) {
addExpireListener(commandExecutor);
Set<String> deleted = new HashSet<String>();
delete(attachedObject, deleted);
}
|
@Test
public void testDeleteNotExisted() {
RLiveObjectService service = redisson.getLiveObjectService();
assertThat(service.delete(Customer.class, "id")).isZero();
}
|
public static Serializer getDefault() {
return SERIALIZER_MAP.get(defaultSerializer);
}
|
@Test
void testMapSerialize() {
Serializer serializer = SerializeFactory.getDefault();
Map<Integer, Integer> logsMap = new HashMap<>();
for (int i = 0; i < 4; i++) {
logsMap.put(i, i);
}
byte[] data = serializer.serialize(logsMap);
assertNotEquals(0, data.length);
Map<Integer, Integer> result = serializer.deserialize(data, HashMap.class);
System.out.println(result);
}
|
@Override
protected void refresh(final List<MetaData> data) {
if (CollectionUtils.isEmpty(data)) {
LOG.info("clear all metaData cache");
metaDataSubscribers.forEach(MetaDataSubscriber::refresh);
} else {
data.forEach(metaData -> metaDataSubscribers.forEach(subscriber -> subscriber.onSubscribe(metaData)));
}
}
|
@Test
public void testRefreshCoverage() {
final MetaDataRefresh metaDataRefresh = mockMetaDataRefresh;
MetaData metaData = new MetaData();
List<MetaData> metaDataList = new ArrayList<>();
metaDataRefresh.refresh(metaDataList);
metaDataList.add(metaData);
metaDataRefresh.refresh(metaDataList);
}
|
public FEELFnResult<List<Object>> invoke(@ParameterName( "ctx" ) EvaluationContext ctx,
@ParameterName("list") List list,
@ParameterName("precedes") FEELFunction function) {
if ( function == null ) {
return invoke( list );
} else {
return invoke(list, (a, b) -> {
final Object result = function.invokeReflectively(ctx, new Object[]{a, b});
if (!(result instanceof Boolean) || ((Boolean) result)) {
return -1;
} else {
return 1;
}
} );
}
}
|
@Test
void invokeList() {
FunctionTestUtil.assertResultList(sortFunction.invoke(Arrays.asList(10, 4, 5, 12)), Arrays.asList(4, 5, 10, 12));
FunctionTestUtil.assertResultList(sortFunction.invoke(Arrays.asList("a", "c", "b")), Arrays.asList("a", "b", "c"));
}
|
public void printKsqlEntityList(final List<KsqlEntity> entityList) {
switch (outputFormat) {
case JSON:
printAsJson(entityList);
break;
case TABULAR:
final boolean showStatements = entityList.size() > 1;
for (final KsqlEntity ksqlEntity : entityList) {
writer().println();
if (showStatements) {
writer().println(ksqlEntity.getStatementText());
}
printAsTable(ksqlEntity);
}
break;
default:
throw new RuntimeException(String.format(
"Unexpected output format: '%s'",
outputFormat.name()
));
}
}
|
@Test
public void shouldPrintConnectorsList() {
// Given:
final KsqlEntityList entities = new KsqlEntityList(ImmutableList.of(
new ConnectorList(
"statement",
ImmutableList.of(),
ImmutableList.of(
new SimpleConnectorInfo("foo", ConnectorType.SOURCE, "clazz", "STATUS"),
new SimpleConnectorInfo("bar", null, null, null)
))
));
// When:
console.printKsqlEntityList(entities);
// Then:
final String output = terminal.getOutputString();
Approvals.verify(output, approvalOptions);
}
|
@Override
public DeleteConsumerGroupOffsetsResult deleteConsumerGroupOffsets(
String groupId,
Set<TopicPartition> partitions,
DeleteConsumerGroupOffsetsOptions options) {
SimpleAdminApiFuture<CoordinatorKey, Map<TopicPartition, Errors>> future =
DeleteConsumerGroupOffsetsHandler.newFuture(groupId);
DeleteConsumerGroupOffsetsHandler handler = new DeleteConsumerGroupOffsetsHandler(groupId, partitions, logContext);
invokeDriver(handler, future, options.timeoutMs);
return new DeleteConsumerGroupOffsetsResult(future.get(CoordinatorKey.byGroupId(groupId)), partitions);
}
|
@Test
public void testDeleteConsumerGroupOffsetsFindCoordinatorNonRetriableErrors() throws Exception {
// Non-retriable FindCoordinatorResponse errors throw an exception
final TopicPartition tp1 = new TopicPartition("foo", 0);
try (AdminClientUnitTestEnv env = new AdminClientUnitTestEnv(mockCluster(1, 0))) {
env.kafkaClient().setNodeApiVersions(NodeApiVersions.create());
env.kafkaClient().prepareResponse(
prepareFindCoordinatorResponse(Errors.GROUP_AUTHORIZATION_FAILED, Node.noNode()));
final DeleteConsumerGroupOffsetsResult errorResult = env.adminClient()
.deleteConsumerGroupOffsets(GROUP_ID, Stream.of(tp1).collect(Collectors.toSet()));
TestUtils.assertFutureError(errorResult.all(), GroupAuthorizationException.class);
TestUtils.assertFutureError(errorResult.partitionResult(tp1), GroupAuthorizationException.class);
}
}
|
static TupleIdentifier createTupleIdentifierById(String id) {
return new TupleIdentifier(id, generateNameFromId(id));
}
|
@Test
void createTupleIdentifierById() {
String id = "123124";
TupleIdentifier retrieved = TupleIdentifier.createTupleIdentifierById(id);
assertThat(retrieved).isNotNull();
assertThat(retrieved.getId()).isEqualTo(id);
assertThat(retrieved.getName()).isNotNull();
}
|
@Override
public EncodedMessage transform(ActiveMQMessage message) throws Exception {
if (message == null) {
return null;
}
long messageFormat = 0;
Header header = null;
Properties properties = null;
Map<Symbol, Object> daMap = null;
Map<Symbol, Object> maMap = null;
Map<String,Object> apMap = null;
Map<Object, Object> footerMap = null;
Section body = convertBody(message);
if (message.isPersistent()) {
if (header == null) {
header = new Header();
}
header.setDurable(true);
}
byte priority = message.getPriority();
if (priority != Message.DEFAULT_PRIORITY) {
if (header == null) {
header = new Header();
}
header.setPriority(UnsignedByte.valueOf(priority));
}
String type = message.getType();
if (type != null) {
if (properties == null) {
properties = new Properties();
}
properties.setSubject(type);
}
MessageId messageId = message.getMessageId();
if (messageId != null) {
if (properties == null) {
properties = new Properties();
}
properties.setMessageId(getOriginalMessageId(message));
}
ActiveMQDestination destination = message.getDestination();
if (destination != null) {
if (properties == null) {
properties = new Properties();
}
properties.setTo(destination.getQualifiedName());
if (maMap == null) {
maMap = new HashMap<>();
}
maMap.put(JMS_DEST_TYPE_MSG_ANNOTATION, destinationType(destination));
}
ActiveMQDestination replyTo = message.getReplyTo();
if (replyTo != null) {
if (properties == null) {
properties = new Properties();
}
properties.setReplyTo(replyTo.getQualifiedName());
if (maMap == null) {
maMap = new HashMap<>();
}
maMap.put(JMS_REPLY_TO_TYPE_MSG_ANNOTATION, destinationType(replyTo));
}
String correlationId = message.getCorrelationId();
if (correlationId != null) {
if (properties == null) {
properties = new Properties();
}
try {
properties.setCorrelationId(AMQPMessageIdHelper.INSTANCE.toIdObject(correlationId));
} catch (AmqpProtocolException e) {
properties.setCorrelationId(correlationId);
}
}
long expiration = message.getExpiration();
if (expiration != 0) {
long ttl = expiration - System.currentTimeMillis();
if (ttl < 0) {
ttl = 1;
}
if (header == null) {
header = new Header();
}
header.setTtl(new UnsignedInteger((int) ttl));
if (properties == null) {
properties = new Properties();
}
properties.setAbsoluteExpiryTime(new Date(expiration));
}
long timeStamp = message.getTimestamp();
if (timeStamp != 0) {
if (properties == null) {
properties = new Properties();
}
properties.setCreationTime(new Date(timeStamp));
}
// JMSX Message Properties
int deliveryCount = message.getRedeliveryCounter();
if (deliveryCount > 0) {
if (header == null) {
header = new Header();
}
header.setDeliveryCount(UnsignedInteger.valueOf(deliveryCount));
}
String userId = message.getUserID();
if (userId != null) {
if (properties == null) {
properties = new Properties();
}
properties.setUserId(new Binary(userId.getBytes(StandardCharsets.UTF_8)));
}
String groupId = message.getGroupID();
if (groupId != null) {
if (properties == null) {
properties = new Properties();
}
properties.setGroupId(groupId);
}
int groupSequence = message.getGroupSequence();
if (groupSequence > 0) {
if (properties == null) {
properties = new Properties();
}
properties.setGroupSequence(UnsignedInteger.valueOf(groupSequence));
}
final Map<String, Object> entries;
try {
entries = message.getProperties();
} catch (IOException e) {
throw JMSExceptionSupport.create(e);
}
for (Map.Entry<String, Object> entry : entries.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
if (key.startsWith(JMS_AMQP_PREFIX)) {
if (key.startsWith(NATIVE, JMS_AMQP_PREFIX_LENGTH)) {
// skip transformer appended properties
continue;
} else if (key.startsWith(ORIGINAL_ENCODING, JMS_AMQP_PREFIX_LENGTH)) {
// skip transformer appended properties
continue;
} else if (key.startsWith(MESSAGE_FORMAT, JMS_AMQP_PREFIX_LENGTH)) {
messageFormat = (long) TypeConversionSupport.convert(entry.getValue(), Long.class);
continue;
} else if (key.startsWith(HEADER, JMS_AMQP_PREFIX_LENGTH)) {
if (header == null) {
header = new Header();
}
continue;
} else if (key.startsWith(PROPERTIES, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
continue;
} else if (key.startsWith(MESSAGE_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) {
if (maMap == null) {
maMap = new HashMap<>();
}
String name = key.substring(JMS_AMQP_MESSAGE_ANNOTATION_PREFIX.length());
maMap.put(Symbol.valueOf(name), value);
continue;
} else if (key.startsWith(FIRST_ACQUIRER, JMS_AMQP_PREFIX_LENGTH)) {
if (header == null) {
header = new Header();
}
header.setFirstAcquirer((boolean) TypeConversionSupport.convert(value, Boolean.class));
continue;
} else if (key.startsWith(CONTENT_TYPE, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
properties.setContentType(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class)));
continue;
} else if (key.startsWith(CONTENT_ENCODING, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
properties.setContentEncoding(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class)));
continue;
} else if (key.startsWith(REPLYTO_GROUP_ID, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
properties.setReplyToGroupId((String) TypeConversionSupport.convert(value, String.class));
continue;
} else if (key.startsWith(DELIVERY_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) {
if (daMap == null) {
daMap = new HashMap<>();
}
String name = key.substring(JMS_AMQP_DELIVERY_ANNOTATION_PREFIX.length());
daMap.put(Symbol.valueOf(name), value);
continue;
} else if (key.startsWith(FOOTER_PREFIX, JMS_AMQP_PREFIX_LENGTH)) {
if (footerMap == null) {
footerMap = new HashMap<>();
}
String name = key.substring(JMS_AMQP_FOOTER_PREFIX.length());
footerMap.put(Symbol.valueOf(name), value);
continue;
}
} else if (key.startsWith(AMQ_SCHEDULED_MESSAGE_PREFIX )) {
// strip off the scheduled message properties
continue;
}
// The property didn't map into any other slot so we store it in the
// Application Properties section of the message.
if (apMap == null) {
apMap = new HashMap<>();
}
apMap.put(key, value);
int messageType = message.getDataStructureType();
if (messageType == CommandTypes.ACTIVEMQ_MESSAGE) {
// Type of command to recognize advisory message
Object data = message.getDataStructure();
if(data != null) {
apMap.put("ActiveMqDataStructureType", data.getClass().getSimpleName());
}
}
}
final AmqpWritableBuffer buffer = new AmqpWritableBuffer();
encoder.setByteBuffer(buffer);
if (header != null) {
encoder.writeObject(header);
}
if (daMap != null) {
encoder.writeObject(new DeliveryAnnotations(daMap));
}
if (maMap != null) {
encoder.writeObject(new MessageAnnotations(maMap));
}
if (properties != null) {
encoder.writeObject(properties);
}
if (apMap != null) {
encoder.writeObject(new ApplicationProperties(apMap));
}
if (body != null) {
encoder.writeObject(body);
}
if (footerMap != null) {
encoder.writeObject(new Footer(footerMap));
}
return new EncodedMessage(messageFormat, buffer.getArray(), 0, buffer.getArrayLength());
}
|
@Test
public void testConvertMapMessageToAmqpMessage() throws Exception {
ActiveMQMapMessage outbound = createMapMessage();
outbound.setString("property-1", "string");
outbound.setInt("property-2", 1);
outbound.setBoolean("property-3", true);
outbound.onSend();
outbound.storeContent();
JMSMappingOutboundTransformer transformer = new JMSMappingOutboundTransformer();
EncodedMessage encoded = transformer.transform(outbound);
assertNotNull(encoded);
Message amqp = encoded.decode();
assertNotNull(amqp.getBody());
assertTrue(amqp.getBody() instanceof AmqpValue);
assertTrue(((AmqpValue) amqp.getBody()).getValue() instanceof Map);
@SuppressWarnings("unchecked")
Map<Object, Object> amqpMap = (Map<Object, Object>) ((AmqpValue) amqp.getBody()).getValue();
assertEquals(3, amqpMap.size());
assertTrue("string".equals(amqpMap.get("property-1")));
}
|
@Override
String getFileName(double lat, double lon) {
int intKey = calcIntKey(lat, lon);
String str = areas.get(intKey);
if (str == null)
return null;
int minLat = Math.abs(down(lat));
int minLon = Math.abs(down(lon));
str += "/";
if (lat >= 0)
str += "N";
else
str += "S";
if (minLat < 10)
str += "0";
str += minLat;
if (lon >= 0)
str += "E";
else
str += "W";
if (minLon < 10)
str += "0";
if (minLon < 100)
str += "0";
str += minLon;
return str;
}
|
@Disabled
@Test
public void testDownloadIssue_1274() {
instance = new SRTMProvider();
// The file is incorrectly named on the sever: N55W061hgt.zip (it should be N55W061.hgt.zip)
assertEquals("North_America/N55W061", instance.getFileName(55.055,-60.541));
assertEquals(204, instance.getEle(55.055,-60.541), .1);
}
|
public boolean shouldShow(@NonNull PublicNotices ime) {
final View candidate = ime.getInputViewContainer().getCandidateView();
return candidate != null && candidate.getVisibility() == View.VISIBLE;
}
|
@Test
@SuppressWarnings("ResultOfMethodCallIgnored")
public void testHappyPath() {
CandidateViewShowingHelper helper = new CandidateViewShowingHelper();
final PublicNotices ime = Mockito.mock(PublicNotices.class);
final KeyboardViewContainerView container = Mockito.mock(KeyboardViewContainerView.class);
Mockito.doReturn(container).when(ime).getInputViewContainer();
Mockito.doReturn(null).when(container).getCandidateView();
Assert.assertFalse(helper.shouldShow(ime));
CandidateView candidate = Mockito.mock(CandidateView.class);
Mockito.doReturn(candidate).when(container).getCandidateView();
Mockito.doReturn(View.GONE).when(candidate).getVisibility();
Assert.assertFalse(helper.shouldShow(ime));
Mockito.doReturn(View.INVISIBLE).when(candidate).getVisibility();
Assert.assertFalse(helper.shouldShow(ime));
Mockito.doReturn(View.VISIBLE).when(candidate).getVisibility();
Assert.assertTrue(helper.shouldShow(ime));
}
|
public String process(final Expression expression) {
return formatExpression(expression);
}
|
@Test
public void shouldEscapeQuotesInStringLiteralQuote() {
// Given:
final Expression expression = new StringLiteral("\\\"");
// When:
final String javaExpression = sqlToJavaVisitor.process(expression);
// Then:
assertThat(javaExpression, equalTo("\"\\\\\\\"\""));
}
|
public ConvertedTime getConvertedTime(long duration) {
Set<Seconds> keys = RULES.keySet();
for (Seconds seconds : keys) {
if (duration <= seconds.getSeconds()) {
return RULES.get(seconds).getConvertedTime(duration);
}
}
return new TimeConverter.OverTwoYears().getConvertedTime(duration);
}
|
@Test
public void testShouldReport29DaysFor29Days23Hours59Minutes29Seconds() throws Exception {
assertEquals(TimeConverter.ABOUT_X_DAYS_AGO.argument(29), timeConverter
.getConvertedTime(30 * TimeConverter.DAY_IN_SECONDS - 31));
}
|
public static String generateWsRemoteAddress(HttpServletRequest request) {
if (request == null) {
throw new IllegalArgumentException("HttpServletRequest must not be null.");
}
StringBuilder remoteAddress = new StringBuilder();
String scheme = request.getScheme();
remoteAddress.append(scheme != null && scheme.equalsIgnoreCase("https") ? "wss://" : "ws://");
remoteAddress.append(request.getRemoteAddr());
remoteAddress.append(":");
remoteAddress.append(request.getRemotePort());
return remoteAddress.toString();
}
|
@Test(expected=IllegalArgumentException.class)
public void testNullHttpServleRequest() {
HttpTransportUtils.generateWsRemoteAddress(null);
}
|
@CanIgnoreReturnValue
public final Ordered containsAtLeastEntriesIn(Multimap<?, ?> expectedMultimap) {
checkNotNull(expectedMultimap, "expectedMultimap");
checkNotNull(actual);
ListMultimap<?, ?> missing = difference(expectedMultimap, actual);
// TODO(kak): Possible enhancement: Include "[1 copy]" if the element does appear in
// the subject but not enough times. Similarly for unexpected extra items.
if (!missing.isEmpty()) {
failWithActual(
fact("missing", countDuplicatesMultimap(annotateEmptyStringsMultimap(missing))),
simpleFact("---"),
fact("expected to contain at least", annotateEmptyStringsMultimap(expectedMultimap)));
return ALREADY_FAILED;
}
return new MultimapInOrder(/* allowUnexpected = */ true, expectedMultimap);
}
|
@Test
public void containsAtLeastRejectsNull() {
ImmutableMultimap<Integer, String> multimap =
ImmutableMultimap.of(3, "one", 3, "six", 3, "two", 4, "five", 4, "four");
try {
assertThat(multimap).containsAtLeastEntriesIn(null);
fail("Should have thrown.");
} catch (NullPointerException expected) {
}
}
|
@Override
public Iterable<DefaultLogicalResult> getProducedResults() {
return jobVertex.getProducedDataSets().stream()
.map(IntermediateDataSet::getId)
.map(resultRetriever)
.collect(Collectors.toList());
}
|
@Test
public void testGetProducedResults() {
assertResultsEquals(results, upstreamLogicalVertex.getProducedResults());
}
|
public static LeaderElectionManagerConfig fromMap(Map<String, String> map) {
Map<String, String> envMap = new HashMap<>(map);
envMap.keySet().retainAll(LeaderElectionManagerConfig.keyNames());
Map<String, Object> generatedMap = ConfigParameter.define(envMap, CONFIG_VALUES);
return new LeaderElectionManagerConfig(generatedMap);
}
|
@Test
public void testSomeRequiredValuesNotNull() {
Map<String, String> envVars = new HashMap<>();
envVars.put(LeaderElectionManagerConfig.ENV_VAR_LEADER_ELECTION_LEASE_NAMESPACE.key(), "my-namespace");
envVars.put(LeaderElectionManagerConfig.ENV_VAR_LEADER_ELECTION_IDENTITY.key(), "my-pod");
envVars.put(LeaderElectionManagerConfig.ENV_VAR_LEADER_ELECTION_LEASE_NAME.key(), null);
InvalidConfigurationException e = assertThrows(InvalidConfigurationException.class, () -> LeaderElectionManagerConfig.fromMap(envVars));
assertThat(e.getMessage(), is("Failed to parse. Value cannot be empty or null"));
Map<String, String> envVars2 = new HashMap<>();
envVars2.put(LeaderElectionManagerConfig.ENV_VAR_LEADER_ELECTION_LEASE_NAME.key(), "my-lease");
envVars2.put(LeaderElectionManagerConfig.ENV_VAR_LEADER_ELECTION_IDENTITY.key(), "my-pod");
envVars2.put(LeaderElectionManagerConfig.ENV_VAR_LEADER_ELECTION_LEASE_NAMESPACE.key(), null);
e = assertThrows(InvalidConfigurationException.class, () -> LeaderElectionManagerConfig.fromMap(envVars2));
assertThat(e.getMessage(), is("Failed to parse. Value cannot be empty or null"));
Map<String, String> envVars3 = new HashMap<>();
envVars3.put(LeaderElectionManagerConfig.ENV_VAR_LEADER_ELECTION_LEASE_NAME.key(), "my-lease");
envVars3.put(LeaderElectionManagerConfig.ENV_VAR_LEADER_ELECTION_LEASE_NAMESPACE.key(), "my-namespace");
envVars3.put(LeaderElectionManagerConfig.ENV_VAR_LEADER_ELECTION_IDENTITY.key(), null);
e = assertThrows(InvalidConfigurationException.class, () -> LeaderElectionManagerConfig.fromMap(envVars3));
assertThat(e.getMessage(), is("Failed to parse. Value cannot be empty or null"));
}
|
@Override
public V computeIfPresent(String key, BiFunction<? super String, ? super V, ? extends V> remappingFunction) {
return Map.super.computeIfPresent(key.toLowerCase(), remappingFunction);
}
|
@Test
void computeIfPresent() {
Map<String, Object> map = new LowerCaseLinkHashMap<>(lowerCaseLinkHashMap);
Object result = map.computeIfPresent("key", (key,value)-> key.toUpperCase());
Assertions.assertEquals("KEY", result);
Assertions.assertEquals("KEY", map.get("key"));
result = map.computeIfPresent("key", (key, value) -> null);
Assertions.assertNull(result);
Assertions.assertFalse(map.containsKey("key"));
result = map.computeIfPresent("computeIfPresent", (key,value)-> key.toUpperCase());
Assertions.assertNull(result);
Assertions.assertFalse(map.containsKey("computeIfPresent"));
}
|
@Override
@SuppressWarnings("unchecked")
public int run() throws IOException {
Preconditions.checkArgument(targets != null && targets.size() == 1, "A data file is required.");
String source = targets.get(0);
CodecFactory codecFactory = Codecs.avroCodec(compressionCodecName);
final Schema schema;
if (avroSchemaFile != null) {
schema = Schemas.fromAvsc(open(avroSchemaFile));
} else {
schema = getAvroSchema(source);
}
final Schema projection = filterSchema(schema, columns);
Iterable<Record> reader = openDataFile(source, projection);
boolean threw = true;
long count = 0;
DatumWriter<Record> datumWriter = new GenericDatumWriter<>(schema);
try (DataFileWriter<Record> fileWriter = new DataFileWriter<>(datumWriter)) {
fileWriter.setCodec(codecFactory);
try (OutputStream os = overwrite ? create(outputPath) : createWithNoOverwrite(outputPath);
DataFileWriter<Record> writer = fileWriter.create(projection, os)) {
for (Record record : reader) {
writer.append(record);
count += 1;
}
}
threw = false;
} catch (RuntimeException e) {
throw new RuntimeException("Failed on record " + count, e);
} finally {
if (reader instanceof Closeable) {
Closeables.close((Closeable) reader, threw);
}
}
return 0;
}
|
@Test
public void testToAvroCommandFromJson() throws IOException {
final File jsonInputFile = folder.newFile("sample.json");
final File avroOutputFile = folder.newFile("sample.avro");
// Write the json to the file, so we can read it again.
final String inputJson = "{\"id\": 1, \"name\": \"Alice\"}\n" + "{\"id\": 2, \"name\": \"Bob\"}\n"
+ "{\"id\": 3, \"name\": \"Carol\"}\n"
+ "{\"id\": 4, \"name\": \"Dave\"}";
try (BufferedWriter writer = new BufferedWriter(new FileWriter(jsonInputFile))) {
writer.write(inputJson);
}
ToAvroCommand cmd = new ToAvroCommand(null);
JCommander.newBuilder()
.addObject(cmd)
.build()
.parse("--overwrite", jsonInputFile.getAbsolutePath(), "--output", avroOutputFile.getAbsolutePath());
assert (cmd.run() == 0);
}
|
public void command(String primaryCommand, SecureConfig config, String... allArguments) {
terminal.writeLine("");
final Optional<CommandLine> commandParseResult;
try {
commandParseResult = Command.parse(primaryCommand, allArguments);
} catch (InvalidCommandException e) {
terminal.writeLine(String.format("ERROR: %s", e.getMessage()));
return;
}
if (commandParseResult.isEmpty()) {
printHelp();
return;
}
final CommandLine commandLine = commandParseResult.get();
switch (commandLine.getCommand()) {
case CREATE: {
if (commandLine.hasOption(CommandOptions.HELP)){
terminal.writeLine("Creates a new keystore. For example: 'bin/logstash-keystore create'");
return;
}
if (secretStoreFactory.exists(config.clone())) {
terminal.write("An Logstash keystore already exists. Overwrite ? [y/N] ");
if (isYes(terminal.readLine())) {
create(config);
}
} else {
create(config);
}
break;
}
case LIST: {
if (commandLine.hasOption(CommandOptions.HELP)){
terminal.writeLine("List all secret identifiers from the keystore. For example: " +
"`bin/logstash-keystore list`. Note - only the identifiers will be listed, not the secrets.");
return;
}
Collection<SecretIdentifier> ids = secretStoreFactory.load(config).list();
List<String> keys = ids.stream().filter(id -> !id.equals(LOGSTASH_MARKER)).map(id -> id.getKey()).collect(Collectors.toList());
Collections.sort(keys);
keys.forEach(terminal::writeLine);
break;
}
case ADD: {
if (commandLine.hasOption(CommandOptions.HELP)){
terminal.writeLine("Add secrets to the keystore. For example: " +
"`bin/logstash-keystore add my-secret`, at the prompt enter your secret. You will use the identifier ${my-secret} in your Logstash configuration.");
return;
}
if (commandLine.getArguments().isEmpty()) {
terminal.writeLine("ERROR: You must supply an identifier to add. (e.g. bin/logstash-keystore add my-secret)");
return;
}
if (secretStoreFactory.exists(config.clone())) {
final SecretStore secretStore = secretStoreFactory.load(config);
for (String argument : commandLine.getArguments()) {
final SecretIdentifier id = new SecretIdentifier(argument);
final byte[] existingValue = secretStore.retrieveSecret(id);
if (existingValue != null) {
SecretStoreUtil.clearBytes(existingValue);
terminal.write(String.format("%s already exists. Overwrite ? [y/N] ", argument));
if (!isYes(terminal.readLine())) {
continue;
}
}
final String enterValueMessage = String.format("Enter value for %s: ", argument);
char[] secret = null;
while(secret == null) {
terminal.write(enterValueMessage);
final char[] readSecret = terminal.readSecret();
if (readSecret == null || readSecret.length == 0) {
terminal.writeLine("ERROR: Value cannot be empty");
continue;
}
if (!ASCII_ENCODER.canEncode(CharBuffer.wrap(readSecret))) {
terminal.writeLine("ERROR: Value must contain only ASCII characters");
continue;
}
secret = readSecret;
}
add(secretStore, id, SecretStoreUtil.asciiCharToBytes(secret));
}
} else {
terminal.writeLine("ERROR: Logstash keystore not found. Use 'create' command to create one.");
}
break;
}
case REMOVE: {
if (commandLine.hasOption(CommandOptions.HELP)){
terminal.writeLine("Remove secrets from the keystore. For example: " +
"`bin/logstash-keystore remove my-secret`");
return;
}
if (commandLine.getArguments().isEmpty()) {
terminal.writeLine("ERROR: You must supply a value to remove. (e.g. bin/logstash-keystore remove my-secret)");
return;
}
final SecretStore secretStore = secretStoreFactory.load(config);
for (String argument : commandLine.getArguments()) {
SecretIdentifier id = new SecretIdentifier(argument);
if (secretStore.containsSecret(id)) {
secretStore.purgeSecret(id);
terminal.writeLine(String.format("Removed '%s' from the Logstash keystore.", id.getKey()));
} else {
terminal.writeLine(String.format("ERROR: '%s' does not exist in the Logstash keystore.", argument));
}
}
break;
}
}
}
|
@Test
public void testList() {
cli.command("list", existingStoreConfig);
// contents of the existing store is a-z for both the key and value
for (int i = 65; i <= 90; i++) {
String expected = new String(new byte[]{(byte) i});
assertListed(expected.toLowerCase());
}
assertThat(terminal.out).doesNotContain("keystore.seed");
}
|
@Override
public Map<String, Metric> getMetrics() {
final Map<String, Metric> gauges = new HashMap<>();
gauges.put("total.init", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getInit() +
mxBean.getNonHeapMemoryUsage().getInit());
gauges.put("total.used", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getUsed() +
mxBean.getNonHeapMemoryUsage().getUsed());
gauges.put("total.max", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getMax() == -1 ?
-1 : mxBean.getHeapMemoryUsage().getMax() + mxBean.getNonHeapMemoryUsage().getMax());
gauges.put("total.committed", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getCommitted() +
mxBean.getNonHeapMemoryUsage().getCommitted());
gauges.put("heap.init", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getInit());
gauges.put("heap.used", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getUsed());
gauges.put("heap.max", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getMax());
gauges.put("heap.committed", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getCommitted());
gauges.put("heap.usage", new RatioGauge() {
@Override
protected Ratio getRatio() {
final MemoryUsage usage = mxBean.getHeapMemoryUsage();
return Ratio.of(usage.getUsed(), usage.getMax());
}
});
gauges.put("non-heap.init", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getInit());
gauges.put("non-heap.used", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getUsed());
gauges.put("non-heap.max", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getMax());
gauges.put("non-heap.committed", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getCommitted());
gauges.put("non-heap.usage", new RatioGauge() {
@Override
protected Ratio getRatio() {
final MemoryUsage usage = mxBean.getNonHeapMemoryUsage();
return Ratio.of(usage.getUsed(), usage.getMax() == -1 ? usage.getCommitted() : usage.getMax());
}
});
for (final MemoryPoolMXBean pool : memoryPools) {
final String poolName = name("pools", WHITESPACE.matcher(pool.getName()).replaceAll("-"));
gauges.put(name(poolName, "usage"), new RatioGauge() {
@Override
protected Ratio getRatio() {
MemoryUsage usage = pool.getUsage();
return Ratio.of(usage.getUsed(),
usage.getMax() == -1 ? usage.getCommitted() : usage.getMax());
}
});
gauges.put(name(poolName, "max"), (Gauge<Long>) () -> pool.getUsage().getMax());
gauges.put(name(poolName, "used"), (Gauge<Long>) () -> pool.getUsage().getUsed());
gauges.put(name(poolName, "committed"), (Gauge<Long>) () -> pool.getUsage().getCommitted());
// Only register GC usage metrics if the memory pool supports usage statistics.
if (pool.getCollectionUsage() != null) {
gauges.put(name(poolName, "used-after-gc"), (Gauge<Long>) () ->
pool.getCollectionUsage().getUsed());
}
gauges.put(name(poolName, "init"), (Gauge<Long>) () -> pool.getUsage().getInit());
}
return Collections.unmodifiableMap(gauges);
}
|
@Test
public void autoDetectsMemoryUsageBeanAndMemoryPools() {
assertThat(new MemoryUsageGaugeSet().getMetrics().keySet())
.isNotEmpty();
}
|
@SuppressWarnings("unchecked")
public static <T> T getObjectWithKey(String key, RequestContext requestContext, Class<T> clazz)
{
final Object object = requestContext.getLocalAttr(key);
return (clazz.isInstance(object)) ? (T) object : null;
}
|
@Test
public void testGetObjectWithKeySuperclass()
{
final ArrayList<String> value = new ArrayList<>();
_requestContext.putLocalAttr(KEY, value);
Assert.assertEquals(RequestContextUtil.getObjectWithKey(KEY, _requestContext, List.class), value);
}
|
public TaskRunScheduler getTaskRunScheduler() {
return taskRunScheduler;
}
|
@Test
public void testTaskRunMergePriorityFirst2() {
TaskRunManager taskRunManager = new TaskRunManager();
Task task = new Task("test");
task.setDefinition("select 1");
long taskId = 1;
TaskRun taskRun1 = TaskRunBuilder
.newBuilder(task)
.setExecuteOption(DEFAULT_MERGE_OPTION)
.build();
long now = System.currentTimeMillis();
taskRun1.setTaskId(taskId);
taskRun1.initStatus("1", now);
taskRun1.getStatus().setPriority(0);
TaskRun taskRun2 = TaskRunBuilder
.newBuilder(task)
.setExecuteOption(DEFAULT_MERGE_OPTION)
.build();
taskRun2.setTaskId(taskId);
taskRun2.initStatus("2", now);
taskRun2.getStatus().setPriority(10);
taskRunManager.arrangeTaskRun(taskRun2, false);
taskRunManager.arrangeTaskRun(taskRun1, false);
TaskRunScheduler taskRunScheduler = taskRunManager.getTaskRunScheduler();
List<TaskRun> taskRuns = Lists.newArrayList(taskRunScheduler.getPendingTaskRunsByTaskId(taskId));
Assert.assertTrue(taskRuns != null);
Assert.assertEquals(1, taskRuns.size());
Assert.assertEquals(10, taskRuns.get(0).getStatus().getPriority());
}
|
public synchronized boolean isValid(int maxAllowedWindowsWithExtrapolation) {
int currentArrayIndex = arrayIndex(currentWindowIndex());
// The total number of valid window indices should exclude the current window index.
int numValidIndicesAdjustment = _validity.get(currentArrayIndex) ? 1 : 0;
boolean allIndicesValid = _validity.cardinality() - numValidIndicesAdjustment == _counts.length - 1;
// All indices should be valid and should not have more than maxAllowedWindowsWithExtrapolation extrapolations.
return allIndicesValid && numWindowsWithExtrapolation() <= maxAllowedWindowsWithExtrapolation;
}
|
@Test
public void testIsValid() {
RawMetricValues rawValues = new RawMetricValues(NUM_WINDOWS_TO_KEEP, MIN_SAMPLES_PER_WINDOW, NUM_RAW_METRICS);
rawValues.updateOldestWindowIndex(0);
MetricSample<String, IntegerEntity> m = getMetricSample(10, 10, 10);
for (int i = 0; i < NUM_WINDOWS_TO_KEEP; i++) {
for (int j = 0; j < MIN_SAMPLES_PER_WINDOW - 1; j++) {
addSample(rawValues, m, i);
}
}
assertTrue(rawValues.isValid(5));
assertFalse(rawValues.isValid(4));
addSample(rawValues, m, 0);
assertTrue(rawValues.isValid(4));
}
|
@Override
public TransformResultMetadata getResultMetadata() {
return _resultMetadata;
}
|
@Test
public void testCaseTransformFunctionWithoutCastForFloatValues() {
boolean[] predicateResults = new boolean[1];
Arrays.fill(predicateResults, true);
int[] expectedValues = new int[1];
int index = -1;
for (int i = 0; i < NUM_ROWS; i++) {
if (Double.compare(_floatSVValues[i], Double.parseDouble(String.format("%f", _floatSVValues[i]))) != 0) {
index = i;
expectedValues[0] = predicateResults[0] ? _intSVValues[i] : 10;
break;
}
}
if (index != -1) {
String predicate = String.format("%s(%s, %s)", TransformFunctionType.EQUALS, FLOAT_SV_COLUMN,
String.format("%f", _floatSVValues[index]));
String expression = String.format("CASE WHEN %s THEN %s ELSE 10 END", predicate, INT_SV_COLUMN);
ExpressionContext expressionContext = RequestContextUtils.getExpression(expression);
TransformFunction transformFunction = TransformFunctionFactory.get(expressionContext, _dataSourceMap);
Assert.assertTrue(transformFunction instanceof CaseTransformFunction);
assertEquals(transformFunction.getResultMetadata().getDataType(), DataType.INT);
int[] intValues = transformFunction.transformToIntValuesSV(_projectionBlock);
assertNotEquals(intValues[index], expectedValues[0]);
}
}
|
@Override
public Integer clusterGetSlotForKey(byte[] key) {
RFuture<Integer> f = executorService.readAsync((String)null, StringCodec.INSTANCE, RedisCommands.KEYSLOT, key);
return syncFuture(f);
}
|
@Test
public void testClusterGetSlotForKey() {
Integer slot = connection.clusterGetSlotForKey("123".getBytes());
assertThat(slot).isNotNull();
}
|
@Override
public void fulfillFinishedTaskStatus(Map<OperatorID, OperatorState> operatorStates) {
if (!mayHaveFinishedTasks) {
return;
}
Map<JobVertexID, ExecutionJobVertex> partlyFinishedVertex = new HashMap<>();
for (Execution task : finishedTasks) {
JobVertexID jobVertexId = task.getVertex().getJobvertexId();
if (!fullyFinishedOrFinishedOnRestoreVertices.containsKey(jobVertexId)) {
partlyFinishedVertex.put(jobVertexId, task.getVertex().getJobVertex());
}
}
checkNoPartlyFinishedVertexUsedUnionListState(partlyFinishedVertex, operatorStates);
checkNoPartlyOperatorsFinishedVertexUsedUnionListState(
partlyFinishedVertex, operatorStates);
fulfillFullyFinishedOrFinishedOnRestoreOperatorStates(operatorStates);
fulfillSubtaskStateForPartiallyFinishedOperators(operatorStates);
}
|
@Test
void testFulfillFinishedStates() throws Exception {
JobVertexID fullyFinishedVertexId = new JobVertexID();
JobVertexID finishedOnRestoreVertexId = new JobVertexID();
JobVertexID partiallyFinishedVertexId = new JobVertexID();
OperatorID fullyFinishedOperatorId = new OperatorID();
OperatorID finishedOnRestoreOperatorId = new OperatorID();
OperatorID partiallyFinishedOperatorId = new OperatorID();
ExecutionGraph executionGraph =
new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder()
.addJobVertex(
fullyFinishedVertexId,
2,
2,
Collections.singletonList(
OperatorIDPair.generatedIDOnly(fullyFinishedOperatorId)),
true)
.addJobVertex(
finishedOnRestoreVertexId,
2,
2,
Collections.singletonList(
OperatorIDPair.generatedIDOnly(
finishedOnRestoreOperatorId)),
true)
.addJobVertex(
partiallyFinishedVertexId,
2,
2,
Collections.singletonList(
OperatorIDPair.generatedIDOnly(
partiallyFinishedOperatorId)),
true)
.build(EXECUTOR_EXTENSION.getExecutor());
ExecutionVertex[] fullyFinishedVertexTasks =
executionGraph.getJobVertex(fullyFinishedVertexId).getTaskVertices();
ExecutionVertex[] finishedOnRestoreVertexTasks =
executionGraph.getJobVertex(finishedOnRestoreVertexId).getTaskVertices();
ExecutionVertex[] partiallyFinishedVertexTasks =
executionGraph.getJobVertex(partiallyFinishedVertexId).getTaskVertices();
Arrays.stream(fullyFinishedVertexTasks)
.forEach(task -> task.getCurrentExecutionAttempt().markFinished());
partiallyFinishedVertexTasks[0].getCurrentExecutionAttempt().markFinished();
CheckpointPlan checkpointPlan = createCheckpointPlan(executionGraph);
Arrays.stream(finishedOnRestoreVertexTasks)
.forEach(checkpointPlan::reportTaskFinishedOnRestore);
Map<OperatorID, OperatorState> operatorStates = new HashMap<>();
checkpointPlan.fulfillFinishedTaskStatus(operatorStates);
assertThat(operatorStates).hasSize(3);
assertThat(operatorStates.get(fullyFinishedOperatorId).isFullyFinished()).isTrue();
assertThat(operatorStates.get(finishedOnRestoreOperatorId).isFullyFinished()).isTrue();
OperatorState operatorState = operatorStates.get(partiallyFinishedOperatorId);
assertThat(operatorState.isFullyFinished()).isFalse();
assertThat(operatorState.getState(0).isFinished()).isTrue();
}
|
public Statement buildStatement(final ParserRuleContext parseTree) {
return build(Optional.of(getSources(parseTree)), parseTree);
}
|
@Test
public void shouldExtractUnaliasedDataSources() {
// Given:
final SingleStatementContext stmt = givenQuery("SELECT * FROM TEST1;");
// When:
final Query result = (Query) builder.buildStatement(stmt);
// Then:
assertThat(result.getFrom(), is(new AliasedRelation(TEST1, TEST1_NAME)));
}
|
@Override
public Addresses loadAddresses(ClientConnectionProcessListenerRegistry listenerRunner) throws Exception {
response = discovery.discoverNodes();
List<Address> addresses = response.getPrivateMemberAddresses();
listenerRunner.onPossibleAddressesCollected(addresses);
return new Addresses(addresses);
}
|
@Test
public void testLoadAddresses_withTpc() throws Exception {
setUpWithTpc();
ViridianAddressProvider provider = new ViridianAddressProvider(createDiscovery());
ClientConnectionProcessListenerRegistry listener = createListenerRunner();
Addresses addresses = provider.loadAddresses(listener);
Collection<Address> primaries = addresses.primary();
Collection<Address> secondaries = addresses.secondary();
assertThat(primaries).containsExactly(PRIVATE_MEMBER_ADDRESS);
assertThat(secondaries).isEmpty();
verify(listener, times(1)).onPossibleAddressesCollected(Collections.singletonList(PRIVATE_MEMBER_ADDRESS)); }
|
public CompletableFuture<JobClient> submitJob(
JobGraph jobGraph, ClassLoader userCodeClassloader) throws Exception {
MiniClusterConfiguration miniClusterConfig =
getMiniClusterConfig(jobGraph.getMaximumParallelism());
MiniCluster miniCluster = miniClusterFactory.apply(miniClusterConfig);
miniCluster.start();
return miniCluster
.submitJob(jobGraph)
.thenApplyAsync(
FunctionUtils.uncheckedFunction(
submissionResult -> {
org.apache.flink.client.ClientUtils
.waitUntilJobInitializationFinished(
() ->
miniCluster
.getJobStatus(
submissionResult
.getJobID())
.get(),
() ->
miniCluster
.requestJobResult(
submissionResult
.getJobID())
.get(),
userCodeClassloader);
return submissionResult;
}))
.thenApply(
result ->
new MiniClusterJobClient(
result.getJobID(),
miniCluster,
userCodeClassloader,
MiniClusterJobClient.JobFinalizationBehavior
.SHUTDOWN_CLUSTER))
.whenComplete(
(ignored, throwable) -> {
if (throwable != null) {
// We failed to create the JobClient and must shutdown to ensure
// cleanup.
shutDownCluster(miniCluster);
}
})
.thenApply(Function.identity());
}
|
@Test
void testJobClientInteractionAfterShutdown() throws Exception {
PerJobMiniClusterFactory perJobMiniClusterFactory = initializeMiniCluster();
JobClient jobClient =
perJobMiniClusterFactory
.submitJob(getNoopJobGraph(), ClassLoader.getSystemClassLoader())
.get();
jobClient.getJobExecutionResult().get();
assertThatMiniClusterIsShutdown();
assertThatThrownBy(jobClient::cancel)
.isInstanceOf(IllegalStateException.class)
.hasMessageContaining(
"MiniCluster is not yet running or has already been shut down.");
}
|
public static void checkContextPath(String contextPath) {
if (contextPath == null) {
return;
}
Matcher matcher = CONTEXT_PATH_MATCH.matcher(contextPath);
if (matcher.find()) {
throw new IllegalArgumentException("Illegal url path expression");
}
}
|
@Test
void testContextPathIllegal2() {
assertThrows(IllegalArgumentException.class, () -> {
String contextPath2 = "/nacos//";
ValidatorUtils.checkContextPath(contextPath2);
});
}
|
@Override
public SchemaTransform from(PubsubReadSchemaTransformConfiguration configuration) {
if (configuration.getSubscription() == null && configuration.getTopic() == null) {
throw new IllegalArgumentException(
"To read from Pubsub, a subscription name or a topic name must be provided");
}
if (configuration.getSubscription() != null && configuration.getTopic() != null) {
throw new IllegalArgumentException(
"To read from Pubsub, a subscription name or a topic name must be provided. Not both.");
}
if (!"RAW".equals(configuration.getFormat())) {
if ((Strings.isNullOrEmpty(configuration.getSchema())
&& !Strings.isNullOrEmpty(configuration.getFormat()))
|| (!Strings.isNullOrEmpty(configuration.getSchema())
&& Strings.isNullOrEmpty(configuration.getFormat()))) {
throw new IllegalArgumentException(
"A schema was provided without a data format (or viceversa). Please provide "
+ "both of these parameters to read from Pubsub, or if you would like to use the Pubsub schema service,"
+ " please leave both of these blank.");
}
}
Schema payloadSchema;
SerializableFunction<byte[], Row> payloadMapper;
String format =
configuration.getFormat() == null ? null : configuration.getFormat().toUpperCase();
if ("RAW".equals(format)) {
payloadSchema = Schema.of(Schema.Field.of("payload", Schema.FieldType.BYTES));
payloadMapper = input -> Row.withSchema(payloadSchema).addValue(input).build();
} else if ("JSON".equals(format)) {
payloadSchema = JsonUtils.beamSchemaFromJsonSchema(configuration.getSchema());
payloadMapper = JsonUtils.getJsonBytesToRowFunction(payloadSchema);
} else if ("AVRO".equals(format)) {
payloadSchema =
AvroUtils.toBeamSchema(
new org.apache.avro.Schema.Parser().parse(configuration.getSchema()));
payloadMapper = AvroUtils.getAvroBytesToRowFunction(payloadSchema);
} else {
throw new IllegalArgumentException(
String.format(
"Format %s not supported. Only supported formats are %s",
configuration.getFormat(), VALID_FORMATS_STR));
}
PubsubReadSchemaTransform transform =
new PubsubReadSchemaTransform(configuration, payloadSchema, payloadMapper);
if (configuration.getClientFactory() != null) {
transform.setClientFactory(configuration.getClientFactory());
}
if (configuration.getClock() != null) {
transform.setClock(configuration.getClock());
}
return transform;
}
|
@Test
public void testInvalidConfigInvalidFormat() {
PCollectionRowTuple begin = PCollectionRowTuple.empty(p);
assertThrows(
IllegalArgumentException.class,
() ->
begin.apply(
new PubsubReadSchemaTransformProvider()
.from(
PubsubReadSchemaTransformConfiguration.builder()
.setSchema(SCHEMA)
.setFormat("BadFormat")
.setSubscription(SUBSCRIPTION)
.build())));
p.run().waitUntilFinish();
}
|
public static ThriftType.StructType toStructType(Class<? extends TBase<?, ?>> thriftClass) {
final TStructDescriptor struct = TStructDescriptor.getInstance(thriftClass);
return toStructType(struct);
}
|
@Test
public void testToThriftType() throws Exception {
final StructType converted = ThriftSchemaConverter.toStructType(AddressBook.class);
final String json = converted.toJSON();
final ThriftType fromJSON = StructType.fromJSON(json);
assertEquals(json, fromJSON.toJSON());
}
|
@Override
public boolean find(final Path file, final ListProgressListener listener) throws BackgroundException {
if(file.isRoot()) {
return true;
}
try {
attributes.find(file, listener);
return true;
}
catch(NotfoundException e) {
return false;
}
catch(AccessDeniedException e) {
// Object is inaccessible to current user, but does exist.
return true;
}
}
|
@Test
public void testFindFileNotFound() throws Exception {
final Path container = new Path("cyberduck-test-eu", EnumSet.of(Path.Type.directory, Path.Type.volume));
final GoogleStorageFindFeature f = new GoogleStorageFindFeature(session);
assertFalse(f.find(new Path(container, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file))));
}
|
@JsonCreator
public static ContentPackInstallationRequest create(
@JsonProperty("parameters") @Nullable Map<String, ValueReference> parameters,
@JsonProperty("comment") @Nullable String comment) {
final Map<String, ValueReference> parameterMap = parameters == null ? Collections.emptyMap() : parameters;
return new AutoValue_ContentPackInstallationRequest(parameterMap, comment);
}
|
@Test
public void testDeserialisation() throws IOException {
final String json = "{"
+ "\"parameters\":{"
+ " \"param1\":{\"@type\":\"string\",\"@value\":\"string\"},"
+ " \"param2\":{\"@type\":\"integer\",\"@value\":42},"
+ " \"param3\":{\"@type\":\"double\",\"@value\":3.14},"
+ " \"param4\":{\"@type\":\"boolean\",\"@value\":true}"
+ "},"
+ "\"comment\":\"comment\"" +
"}";
final ContentPackInstallationRequest actual = objectMapper.readValue(json, ContentPackInstallationRequest.class);
final ImmutableMap<String, ValueReference> parameters = ImmutableMap.of(
"param1", ValueReference.of("string"),
"param2", ValueReference.of(42),
"param3", ValueReference.of(3.14d),
"param4", ValueReference.of(true));
final ContentPackInstallationRequest expected = ContentPackInstallationRequest.create(parameters, "comment");
assertThat(actual).isEqualTo(expected);
}
|
@Override
public ControlledShutdownResponse getErrorResponse(int throttleTimeMs, Throwable e) {
ControlledShutdownResponseData data = new ControlledShutdownResponseData()
.setErrorCode(Errors.forException(e).code());
return new ControlledShutdownResponse(data);
}
|
@Test
public void testGetErrorResponse() {
for (short version : CONTROLLED_SHUTDOWN.allVersions()) {
ControlledShutdownRequest.Builder builder = new ControlledShutdownRequest.Builder(
new ControlledShutdownRequestData().setBrokerId(1), version);
ControlledShutdownRequest request = builder.build();
ControlledShutdownResponse response = request.getErrorResponse(0,
new ClusterAuthorizationException("Not authorized"));
assertEquals(Errors.CLUSTER_AUTHORIZATION_FAILED, response.error());
}
}
|
@Override
public ValidationResult validate(Object value) {
ValidationResult result = super.validate(value);
if (result instanceof ValidationResult.ValidationPassed) {
final String sValue = (String) value;
if (sValue != null && sValue.length() > maxLength) {
result = new ValidationResult.ValidationFailed("Value is longer than " + maxLength + " characters!");
}
}
return result;
}
|
@Test
public void testValidateNoString() {
assertThat(new LimitedOptionalStringValidator(1).validate(123))
.isInstanceOf(ValidationResult.ValidationFailed.class);
}
|
@SuppressWarnings({"dereference.of.nullable", "argument"})
public static PipelineResult run(DataTokenizationOptions options) {
SchemasUtils schema = null;
try {
schema = new SchemasUtils(options.getDataSchemaPath(), StandardCharsets.UTF_8);
} catch (IOException e) {
LOG.error("Failed to retrieve schema for data.", e);
}
checkArgument(schema != null, "Data schema is mandatory.");
// Create the pipeline
Pipeline pipeline = Pipeline.create(options);
// Register the coder for pipeline
CoderRegistry coderRegistry = pipeline.getCoderRegistry();
coderRegistry.registerCoderForType(
FAILSAFE_ELEMENT_CODER.getEncodedTypeDescriptor(), FAILSAFE_ELEMENT_CODER);
coderRegistry.registerCoderForType(
RowCoder.of(schema.getBeamSchema()).getEncodedTypeDescriptor(),
RowCoder.of(schema.getBeamSchema()));
/*
* Row/Row Coder for FailsafeElement.
*/
FailsafeElementCoder<Row, Row> coder =
FailsafeElementCoder.of(
RowCoder.of(schema.getBeamSchema()), RowCoder.of(schema.getBeamSchema()));
coderRegistry.registerCoderForType(coder.getEncodedTypeDescriptor(), coder);
PCollection<Row> rows;
if (options.getInputFilePattern() != null) {
rows = new TokenizationFileSystemIO(options).read(pipeline, schema);
} else if (options.getPubsubTopic() != null) {
rows =
pipeline
.apply(
"ReadMessagesFromPubsub",
PubsubIO.readStrings().fromTopic(options.getPubsubTopic()))
.apply(
"TransformToBeamRow",
new JsonToBeamRow(options.getNonTokenizedDeadLetterPath(), schema));
if (options.getOutputDirectory() != null) {
rows = rows.apply(Window.into(FixedWindows.of(parseDuration(options.getWindowDuration()))));
}
} else {
throw new IllegalStateException(
"No source is provided, please configure File System or Pub/Sub");
}
/*
Tokenize data using remote API call
*/
PCollectionTuple tokenizedRows =
rows.setRowSchema(schema.getBeamSchema())
.apply(
MapElements.into(
TypeDescriptors.kvs(TypeDescriptors.integers(), TypeDescriptors.rows()))
.via((Row row) -> KV.of(0, row)))
.setCoder(KvCoder.of(VarIntCoder.of(), RowCoder.of(schema.getBeamSchema())))
.apply(
"DsgTokenization",
RowToTokenizedRow.newBuilder()
.setBatchSize(options.getBatchSize())
.setRpcURI(options.getRpcUri())
.setSchema(schema.getBeamSchema())
.setSuccessTag(TOKENIZATION_OUT)
.setFailureTag(TOKENIZATION_DEADLETTER_OUT)
.build());
String csvDelimiter = options.getCsvDelimiter();
if (options.getNonTokenizedDeadLetterPath() != null) {
/*
Write tokenization errors to dead-letter sink
*/
tokenizedRows
.get(TOKENIZATION_DEADLETTER_OUT)
.apply(
"ConvertToCSV",
MapElements.into(FAILSAFE_ELEMENT_CODER.getEncodedTypeDescriptor())
.via(
(FailsafeElement<Row, Row> fse) ->
FailsafeElement.of(
new RowToCsv(csvDelimiter).getCsvFromRow(fse.getOriginalPayload()),
new RowToCsv(csvDelimiter).getCsvFromRow(fse.getPayload()))))
.apply(
"WriteTokenizationErrorsToFS",
ErrorConverters.WriteErrorsToTextIO.<String, String>newBuilder()
.setErrorWritePath(options.getNonTokenizedDeadLetterPath())
.setTranslateFunction(SerializableFunctions.getCsvErrorConverter())
.build());
}
if (options.getOutputDirectory() != null) {
new TokenizationFileSystemIO(options)
.write(tokenizedRows.get(TOKENIZATION_OUT), schema.getBeamSchema());
} else if (options.getBigQueryTableName() != null) {
WriteResult writeResult =
TokenizationBigQueryIO.write(
tokenizedRows.get(TOKENIZATION_OUT),
options.getBigQueryTableName(),
schema.getBigQuerySchema());
writeResult
.getFailedInsertsWithErr()
.apply(
"WrapInsertionErrors",
MapElements.into(FAILSAFE_ELEMENT_CODER.getEncodedTypeDescriptor())
.via(TokenizationBigQueryIO::wrapBigQueryInsertError))
.setCoder(FAILSAFE_ELEMENT_CODER)
.apply(
"WriteInsertionFailedRecords",
ErrorConverters.WriteStringMessageErrors.newBuilder()
.setErrorRecordsTable(
options.getBigQueryTableName() + DEFAULT_DEADLETTER_TABLE_SUFFIX)
.setErrorRecordsTableSchema(DEADLETTER_SCHEMA)
.build());
} else if (options.getBigTableInstanceId() != null) {
new TokenizationBigTableIO(options)
.write(tokenizedRows.get(TOKENIZATION_OUT), schema.getBeamSchema());
} else {
throw new IllegalStateException(
"No sink is provided, please configure BigQuery or BigTable.");
}
return pipeline.run();
}
|
@Test
public void testFileSystemIOReadJSON() throws IOException {
PCollection<Row> jsons = fileSystemIORead(JSON_FILE_PATH, FORMAT.JSON);
assertRows(jsons);
testPipeline.run();
}
|
public static boolean equals(FlatRecordTraversalObjectNode left, FlatRecordTraversalObjectNode right) {
if (left == null && right == null) {
return true;
}
if (left == null || right == null) {
return false;
}
if (!left.getSchema().getName().equals(right.getSchema().getName())) {
return false;
}
extractCommonObjectSchema(left, right);
return compare(left, right);
}
|
@Test
public void shouldProvideCollisionGuaranteesForIntegerCollisions_onObjects() {
IntTypeState1 intTypeState1 = new IntTypeState1();
intTypeState1.intA = 15;
intTypeState1.intB = 5;
writer1.reset();
mapper1.writeFlat(intTypeState1, writer1);
FlatRecord rec1 = writer1.generateFlatRecord();
IntTypeState2 intTypeState2 = new IntTypeState2();
intTypeState2.intA = 13;
intTypeState2.intB = 7;
writer2.reset();
mapper2.writeFlat(intTypeState2, writer2);
FlatRecord rec2 = writer2.generateFlatRecord();
FlatRecordTraversalObjectNode leftNode = new FlatRecordTraversalObjectNode(rec1);
FlatRecordTraversalObjectNode rightNode = new FlatRecordTraversalObjectNode(rec2);
assertThat(FlatRecordTraversalObjectNodeEquality.equals(leftNode, rightNode)).isFalse();
assertThat(FlatRecordTraversalObjectNodeEquality.equals(rightNode, leftNode)).isFalse();
}
|
public static List<ParameterMarkerExpressionSegment> getParameterMarkerExpressions(final Collection<ExpressionSegment> expressions) {
List<ParameterMarkerExpressionSegment> result = new ArrayList<>();
extractParameterMarkerExpressions(result, expressions);
return result;
}
|
@Test
void assertExtractGetParameterMarkerExpressions() {
FunctionSegment functionSegment = new FunctionSegment(0, 0, "IF", "IF(number + 1 <= ?, 1, -1)");
BinaryOperationExpression param1 = new BinaryOperationExpression(0, 0,
new BinaryOperationExpression(0, 0, new ColumnSegment(0, 0, new IdentifierValue("number")), new LiteralExpressionSegment(0, 0, 1), "+", "number + 1"),
new ParameterMarkerExpressionSegment(0, 0, 2), "<=", "number + 1 <= ?");
CommonExpressionSegment param2 = new CommonExpressionSegment(0, 0, "1");
CommonExpressionSegment param3 = new CommonExpressionSegment(0, 0, "-1");
functionSegment.getParameters().add(param1);
functionSegment.getParameters().add(param2);
functionSegment.getParameters().add(param3);
assertThat(ExpressionExtractUtils.getParameterMarkerExpressions(Collections.singleton(functionSegment)).size(), is(1));
}
|
public AbilityStatus getConnectionAbility(AbilityKey abilityKey) {
if (currentConnection != null) {
return currentConnection.getConnectionAbility(abilityKey);
}
// return null if connection is not ready
return null;
}
|
@Test
void testGetConnectionAbilityWithNullConnection() {
AbilityStatus abilityStatus = rpcClient.getConnectionAbility(AbilityKey.SERVER_TEST_1);
assertNull(abilityStatus);
}
|
@Override
public int getID() {
return this.id;
}
|
@Test
public void checkCategoryIDs() {
Assert.assertEquals(0, this.fastFood.getID());
Assert.assertEquals(1, this.bars.getID());
Assert.assertEquals(2, this.restaurants.getID());
Assert.assertEquals(3, this.electronics.getID());
Assert.assertEquals(4, this.clothes.getID());
Assert.assertEquals(5, this.shops.getID());
Assert.assertEquals(6, this.root.getID());
}
|
public static GenericRecord rewriteRecord(GenericRecord oldRecord, Schema newSchema) {
GenericRecord newRecord = new GenericData.Record(newSchema);
boolean isSpecificRecord = oldRecord instanceof SpecificRecordBase;
for (Schema.Field f : newSchema.getFields()) {
if (!(isSpecificRecord && isMetadataField(f.name()))) {
copyOldValueOrSetDefault(oldRecord, newRecord, f);
}
}
return newRecord;
}
|
@Test
public void testNonNullableFieldWithoutDefault() {
GenericRecord rec = new GenericData.Record(new Schema.Parser().parse(EXAMPLE_SCHEMA));
rec.put("_row_key", "key1");
rec.put("non_pii_col", "val1");
rec.put("pii_col", "val2");
rec.put("timestamp", 3.5);
assertThrows(SchemaCompatibilityException.class, () -> HoodieAvroUtils.rewriteRecord(rec, new Schema.Parser().parse(SCHEMA_WITH_NON_NULLABLE_FIELD)));
}
|
public static void upgradeConfigurationAndVersion(RuleNode node, RuleNodeClassInfo nodeInfo) {
JsonNode oldConfiguration = node.getConfiguration();
int configurationVersion = node.getConfigurationVersion();
int currentVersion = nodeInfo.getCurrentVersion();
var configClass = nodeInfo.getAnnotation().configClazz();
if (oldConfiguration == null || !oldConfiguration.isObject()) {
log.warn("Failed to upgrade rule node with id: {} type: {} fromVersion: {} toVersion: {}. " +
"Current configuration is null or not a json object. " +
"Going to set default configuration ... ",
node.getId(), node.getType(), configurationVersion, currentVersion);
node.setConfiguration(getDefaultConfig(configClass));
} else {
var tbVersionedNode = getTbVersionedNode(nodeInfo);
try {
JsonNode queueName = oldConfiguration.get(QUEUE_NAME);
TbPair<Boolean, JsonNode> upgradeResult = tbVersionedNode.upgrade(configurationVersion, oldConfiguration);
if (upgradeResult.getFirst()) {
node.setConfiguration(upgradeResult.getSecond());
if (nodeInfo.getAnnotation().hasQueueName() && queueName != null && queueName.isTextual()) {
node.setQueueName(queueName.asText());
}
}
} catch (Exception e) {
try {
JacksonUtil.treeToValue(oldConfiguration, configClass);
} catch (Exception ex) {
log.warn("Failed to upgrade rule node with id: {} type: {} fromVersion: {} toVersion: {}. " +
"Going to set default configuration ... ",
node.getId(), node.getType(), configurationVersion, currentVersion, e);
node.setConfiguration(getDefaultConfig(configClass));
}
}
}
node.setConfigurationVersion(currentVersion);
}
|
@Test
public void testUpgradeRuleNodeConfigurationWithNonNullConfig() throws Exception {
// GIVEN
var node = new RuleNode();
var nodeInfo = mock(RuleNodeClassInfo.class);
var nodeConfigClazz = TbGetAttributesNodeConfiguration.class;
var annotation = mock(org.thingsboard.rule.engine.api.RuleNode.class);
var defaultConfig = JacksonUtil.valueToTree(nodeConfigClazz.getDeclaredConstructor().newInstance().defaultConfiguration());
when(nodeInfo.getClazz()).thenReturn((Class) TbGetAttributesNode.class);
when(nodeInfo.getCurrentVersion()).thenReturn(1);
when(nodeInfo.getAnnotation()).thenReturn(annotation);
when(annotation.configClazz()).thenReturn((Class) nodeConfigClazz);
String versionZeroDefaultConfigStr = "{\"fetchToData\":false," +
"\"clientAttributeNames\":[]," +
"\"sharedAttributeNames\":[]," +
"\"serverAttributeNames\":[]," +
"\"latestTsKeyNames\":[]," +
"\"tellFailureIfAbsent\":true," +
"\"getLatestValueWithTs\":false}";
node.setConfiguration(JacksonUtil.toJsonNode(versionZeroDefaultConfigStr));
// WHEN
TbNodeUpgradeUtils.upgradeConfigurationAndVersion(node, nodeInfo);
// THEN
Assertions.assertThat(node.getConfiguration()).isEqualTo(defaultConfig);
Assertions.assertThat(node.getConfigurationVersion()).isEqualTo(1);
}
|
@Override
public Mono<TagVo> getByName(String name) {
return client.fetch(Tag.class, name)
.map(TagVo::from);
}
|
@Test
void getByName() throws JSONException {
when(client.fetch(eq(Tag.class), eq("t1")))
.thenReturn(Mono.just(tag(1)));
TagVo tagVo = tagFinder.getByName("t1").block();
tagVo.getMetadata().setCreationTimestamp(null);
JSONAssert.assertEquals("""
{
"metadata": {
"name": "t1",
"annotations": {
"K1": "V1"
}
},
"spec": {
"displayName": "displayName-1",
"slug": "slug-1",
"color": "color-1",
"cover": "cover-1"
},
"status": {
"permalink": "permalink-1",
"postCount": 2,
"visiblePostCount": 1
},
"postCount": 1
}
""",
JsonUtils.objectToJson(tagVo),
true);
}
|
public static <T> Read<T> read() {
return new AutoValue_JdbcIO_Read.Builder<T>()
.setFetchSize(DEFAULT_FETCH_SIZE)
.setOutputParallelization(true)
.build();
}
|
@Test
public void testReadWithSingleStringParameter() {
PCollection<TestRow> rows =
pipeline.apply(
JdbcIO.<TestRow>read()
.withDataSourceConfiguration(DATA_SOURCE_CONFIGURATION)
.withQuery(String.format("select name,id from %s where name = ?", READ_TABLE_NAME))
.withStatementPreparator(
preparedStatement -> preparedStatement.setString(1, TestRow.getNameForSeed(1)))
.withRowMapper(new JdbcTestHelper.CreateTestRowOfNameAndId())
.withCoder(SerializableCoder.of(TestRow.class)));
PAssert.thatSingleton(rows.apply("Count All", Count.globally())).isEqualTo(1L);
Iterable<TestRow> expectedValues = Collections.singletonList(TestRow.fromSeed(1));
PAssert.that(rows).containsInAnyOrder(expectedValues);
pipeline.run();
}
|
@Subscribe
public void onChatMessage(ChatMessage chatMessage)
{
if (chatMessage.getType() != ChatMessageType.TRADE
&& chatMessage.getType() != ChatMessageType.GAMEMESSAGE
&& chatMessage.getType() != ChatMessageType.SPAM
&& chatMessage.getType() != ChatMessageType.FRIENDSCHATNOTIFICATION)
{
return;
}
String message = chatMessage.getMessage();
Matcher matcher = KILLCOUNT_PATTERN.matcher(message);
if (matcher.find())
{
final String boss = matcher.group("boss");
final int kc = Integer.parseInt(matcher.group("kc"));
final String pre = matcher.group("pre");
final String post = matcher.group("post");
if (Strings.isNullOrEmpty(pre) && Strings.isNullOrEmpty(post))
{
unsetKc(boss);
return;
}
String renamedBoss = KILLCOUNT_RENAMES
.getOrDefault(boss, boss)
// The config service doesn't support keys with colons in them
.replace(":", "");
if (boss != renamedBoss)
{
// Unset old TOB kc
unsetKc(boss);
unsetPb(boss);
unsetKc(boss.replace(":", "."));
unsetPb(boss.replace(":", "."));
// Unset old story mode
unsetKc("Theatre of Blood Story Mode");
unsetPb("Theatre of Blood Story Mode");
}
setKc(renamedBoss, kc);
// We either already have the pb, or need to remember the boss for the upcoming pb
if (lastPb > -1)
{
log.debug("Got out-of-order personal best for {}: {}", renamedBoss, lastPb);
if (renamedBoss.contains("Theatre of Blood"))
{
// TOB team size isn't sent in the kill message, but can be computed from varbits
int tobTeamSize = tobTeamSize();
lastTeamSize = tobTeamSize == 1 ? "Solo" : (tobTeamSize + " players");
}
else if (renamedBoss.contains("Tombs of Amascut"))
{
// TOA team size isn't sent in the kill message, but can be computed from varbits
int toaTeamSize = toaTeamSize();
lastTeamSize = toaTeamSize == 1 ? "Solo" : (toaTeamSize + " players");
}
final double pb = getPb(renamedBoss);
// If a raid with a team size, only update the pb if it is lower than the existing pb
// so that the pb is the overall lowest of any team size
if (lastTeamSize == null || pb == 0 || lastPb < pb)
{
log.debug("Setting overall pb (old: {})", pb);
setPb(renamedBoss, lastPb);
}
if (lastTeamSize != null)
{
log.debug("Setting team size pb: {}", lastTeamSize);
setPb(renamedBoss + " " + lastTeamSize, lastPb);
}
lastPb = -1;
lastTeamSize = null;
}
else
{
lastBossKill = renamedBoss;
lastBossTime = client.getTickCount();
}
return;
}
matcher = DUEL_ARENA_WINS_PATTERN.matcher(message);
if (matcher.find())
{
final int oldWins = getKc("Duel Arena Wins");
final int wins = matcher.group(2).equals("one") ? 1 :
Integer.parseInt(matcher.group(2).replace(",", ""));
final String result = matcher.group(1);
int winningStreak = getKc("Duel Arena Win Streak");
int losingStreak = getKc("Duel Arena Lose Streak");
if (result.equals("won") && wins > oldWins)
{
losingStreak = 0;
winningStreak += 1;
}
else if (result.equals("were defeated"))
{
losingStreak += 1;
winningStreak = 0;
}
else
{
log.warn("unrecognized duel streak chat message: {}", message);
}
setKc("Duel Arena Wins", wins);
setKc("Duel Arena Win Streak", winningStreak);
setKc("Duel Arena Lose Streak", losingStreak);
}
matcher = DUEL_ARENA_LOSSES_PATTERN.matcher(message);
if (matcher.find())
{
int losses = matcher.group(1).equals("one") ? 1 :
Integer.parseInt(matcher.group(1).replace(",", ""));
setKc("Duel Arena Losses", losses);
}
matcher = KILL_DURATION_PATTERN.matcher(message);
if (matcher.find())
{
matchPb(matcher);
}
matcher = NEW_PB_PATTERN.matcher(message);
if (matcher.find())
{
matchPb(matcher);
}
matcher = RAIDS_PB_PATTERN.matcher(message);
if (matcher.find())
{
matchPb(matcher);
}
matcher = RAIDS_DURATION_PATTERN.matcher(message);
if (matcher.find())
{
matchPb(matcher);
}
matcher = HS_PB_PATTERN.matcher(message);
if (matcher.find())
{
int floor = Integer.parseInt(matcher.group("floor"));
String floortime = matcher.group("floortime");
String floorpb = matcher.group("floorpb");
String otime = matcher.group("otime");
String opb = matcher.group("opb");
String pb = MoreObjects.firstNonNull(floorpb, floortime);
setPb("Hallowed Sepulchre Floor " + floor, timeStringToSeconds(pb));
if (otime != null)
{
pb = MoreObjects.firstNonNull(opb, otime);
setPb("Hallowed Sepulchre", timeStringToSeconds(pb));
}
}
matcher = HS_KC_FLOOR_PATTERN.matcher(message);
if (matcher.find())
{
int floor = Integer.parseInt(matcher.group(1));
int kc = Integer.parseInt(matcher.group(2).replaceAll(",", ""));
setKc("Hallowed Sepulchre Floor " + floor, kc);
}
matcher = HS_KC_GHC_PATTERN.matcher(message);
if (matcher.find())
{
int kc = Integer.parseInt(matcher.group(1).replaceAll(",", ""));
setKc("Hallowed Sepulchre", kc);
}
matcher = HUNTER_RUMOUR_KC_PATTERN.matcher(message);
if (matcher.find())
{
int kc = Integer.parseInt(matcher.group(1).replaceAll(",", ""));
setKc("Hunter Rumours", kc);
}
if (lastBossKill != null && lastBossTime != client.getTickCount())
{
lastBossKill = null;
lastBossTime = -1;
}
matcher = COLLECTION_LOG_ITEM_PATTERN.matcher(message);
if (matcher.find())
{
String item = matcher.group(1);
int petId = findPet(item);
if (petId != -1)
{
final List<Integer> petList = new ArrayList<>(getPetList());
if (!petList.contains(petId))
{
log.debug("New pet added: {}/{}", item, petId);
petList.add(petId);
setPetList(petList);
}
}
}
matcher = GUARDIANS_OF_THE_RIFT_PATTERN.matcher(message);
if (matcher.find())
{
int kc = Integer.parseInt(matcher.group(1));
setKc("Guardians of the Rift", kc);
}
}
|
@Test
public void testTemporossNoPb()
{
ChatMessage chatMessage = new ChatMessage(null, GAMEMESSAGE, "", "Subdued in <col=ef1020>7:40</col>. Personal best: 5:38.", null, 0);
chatCommandsPlugin.onChatMessage(chatMessage);
chatMessage = new ChatMessage(null, GAMEMESSAGE, "", "Your Tempoross kill count is: <col=ff0000>55</col>.", null, 0);
chatCommandsPlugin.onChatMessage(chatMessage);
verify(configManager).setRSProfileConfiguration("killcount", "tempoross", 55);
verify(configManager).setRSProfileConfiguration("personalbest", "tempoross", 5 * 60 + 38.0);
// Precise times
chatMessage = new ChatMessage(null, GAMEMESSAGE, "", "Subdued in <col=ef1020>6:19.80</col>. Personal best: 5:42.60.", null, 0);
chatCommandsPlugin.onChatMessage(chatMessage);
chatMessage = new ChatMessage(null, GAMEMESSAGE, "", "Your Tempoross kill count is: <col=ff0000>55</col>.", null, 0);
chatCommandsPlugin.onChatMessage(chatMessage);
verify(configManager).setRSProfileConfiguration("personalbest", "tempoross", 5 * 60 + 42.6);
}
|
@Override
public TypeSerializer<T> createSerializer(SerializerConfig config) {
return new AvroSerializer<>(getTypeClass());
}
|
@Test
void testAvroByDefault() {
final TypeSerializer<User> serializer =
new AvroTypeInfo<>(User.class).createSerializer(new SerializerConfigImpl());
assertThat(serializer).isInstanceOf(AvroSerializer.class);
}
|
public int getAttributesToNodesFailedRetrieved() {
return numGetAttributesToNodesFailedRetrieved.value();
}
|
@Test
public void testGetAttributesToNodesRetrievedFailed() {
long totalBadBefore = metrics.getAttributesToNodesFailedRetrieved();
badSubCluster.getAttributesToNodesFailed();
Assert.assertEquals(totalBadBefore + 1,
metrics.getAttributesToNodesFailedRetrieved());
}
|
@UdafFactory(description = "collect distinct values of a Bigint field into a single Array")
public static <T> Udaf<T, List<T>, List<T>> createCollectSetT() {
return new Collect<>();
}
|
@Test
public void shouldRespectSizeLimitString() {
final Udaf<Integer, List<Integer>, List<Integer>> udaf = CollectSetUdaf.createCollectSetT();
((Configurable) udaf).configure(ImmutableMap.of(CollectSetUdaf.LIMIT_CONFIG, "1000"));
List<Integer> runningList = udaf.initialize();
for (int i = 1; i < 2500; i++) {
runningList = udaf.aggregate(i, runningList);
}
assertThat(runningList, hasSize(1000));
assertThat(runningList, hasItem(1));
assertThat(runningList, hasItem(1000));
assertThat(runningList, not(hasItem(1001)));
}
|
public void execute(final PrioritizableRunnable runnable) {
_queue.add(runnable);
// Guarantees that execution loop is scheduled only once to the underlying executor.
// Also makes sure that all memory effects of last Runnable are visible to the next Runnable
// in case value returned by decrementAndGet == 0.
if (_pendingCount.getAndIncrement() == 0) {
tryExecuteLoop();
}
}
|
@Test(dataProvider = "draining")
public void testRejectOnFirstExecute(boolean draining) throws InterruptedException {
// First fill up the underlying executor service so that a subsequent
// submission of an execution loop by the serial executor will fail.
_executorService.execute(new NeverEndingRunnable());
assertFalse(_rejectionHandler.wasExecuted());
_executorService.execute(new NeverEndingRunnable());
assertFalse(_rejectionHandler.wasExecuted());
// Now submit our task to serial executor. The underlying executor should
// throw RejectedExecutionException and the rejectionRunnable should run.
_serialExecutor.execute(new NeverEndingRunnable());
assertTrue(_rejectionHandler.await(5, TimeUnit.SECONDS));
assertTrue(
"Expected " + _rejectionHandler.getLastError() + " to be instance of "
+ RejectedExecutionException.class.getName(),
_rejectionHandler.getLastError() instanceof RejectedExecutionException);
}
|
public static List<String> computeNameParts(String loggerName) {
List<String> partList = new ArrayList<String>();
int fromIndex = 0;
while (true) {
int index = getSeparatorIndexOf(loggerName, fromIndex);
if (index == -1) {
partList.add(loggerName.substring(fromIndex));
break;
}
partList.add(loggerName.substring(fromIndex, index));
fromIndex = index + 1;
}
return partList;
}
|
@Test
public void supportNestedClassesAtBeginning() {
List<String> witnessList = new ArrayList<String>();
witnessList.add("foo");
witnessList.add("Nested");
witnessList.add("bar");
List<String> partList = LoggerNameUtil.computeNameParts("foo$Nested.bar");
assertEquals(witnessList, partList);
}
|
@Override
public void doFilter(ServletRequest req, ServletResponse resp, FilterChain chain)
throws IOException, ServletException {
if (bizConfig.isAdminServiceAccessControlEnabled()) {
HttpServletRequest request = (HttpServletRequest) req;
HttpServletResponse response = (HttpServletResponse) resp;
String token = request.getHeader(HttpHeaders.AUTHORIZATION);
if (!checkAccessToken(token)) {
logger.warn("Invalid access token: {} for uri: {}", token, request.getRequestURI());
response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Unauthorized");
return;
}
}
chain.doFilter(req, resp);
}
|
@Test
public void testWithAccessControlEnabledWithTokenSpecifiedWithNoTokenPassed() throws Exception {
String someValidToken = "someValidToken";
when(bizConfig.isAdminServiceAccessControlEnabled()).thenReturn(true);
when(bizConfig.getAdminServiceAccessTokens()).thenReturn(someValidToken);
when(servletRequest.getHeader(HttpHeaders.AUTHORIZATION)).thenReturn(null);
authenticationFilter.doFilter(servletRequest, servletResponse, filterChain);
verify(bizConfig, times(1)).isAdminServiceAccessControlEnabled();
verify(bizConfig, times(1)).getAdminServiceAccessTokens();
verify(servletResponse, times(1))
.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Unauthorized");
verify(filterChain, never()).doFilter(servletRequest, servletResponse);
}
|
@Override
public Iterator<Path> iterator() {
return new NameIterator(fs, !bucket.isEmpty(), bucketAndObject());
}
|
@Test
public void testIterator() {
GcsPath a = GcsPath.fromComponents("bucket", "a/b/c");
Iterator<Path> it = a.iterator();
assertTrue(it.hasNext());
assertEquals("gs://bucket/", it.next().toString());
assertTrue(it.hasNext());
assertEquals("a", it.next().toString());
assertTrue(it.hasNext());
assertEquals("b", it.next().toString());
assertTrue(it.hasNext());
assertEquals("c", it.next().toString());
assertFalse(it.hasNext());
}
|
public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {
// We don't have any input fields here in "r" as they are all info fields.
// So we just merge in the info fields.
//
if ( info != null ) {
for ( int i = 0; i < info.length; i++ ) {
if ( info[i] != null ) {
r.mergeRowMeta( info[i], name );
}
}
}
for ( int i = 0; i < r.size(); i++ ) {
ValueMetaInterface vmi = r.getValueMeta( i );
if ( vmi != null && Utils.isEmpty( vmi.getName() ) ) {
vmi.setOrigin( name );
}
}
return;
}
|
@Test
public void testGetFieldsEmptyInput() throws Exception {
RowMeta outputRowMeta = new RowMeta();
MergeJoinMeta meta = new MergeJoinMeta();
RowMeta inputRow1 = new RowMeta();
ValueMetaInteger field1_row1 = new ValueMetaInteger( "field1" );
field1_row1.setOrigin( "inputStep1" );
inputRow1.addValueMeta( field1_row1 );
ValueMetaString field2_row1 = new ValueMetaString( "field2" );
field2_row1.setOrigin( "inputStep1" );
inputRow1.addValueMeta( field2_row1 );
RowMeta inputRow2 = new RowMeta();
ValueMetaString field1_row2 = new ValueMetaString( "field1" );
field1_row2.setOrigin( "inputStep2" );
inputRow2.addValueMeta( field1_row2 );
ValueMetaString field3_row2 = new ValueMetaString( "field3" );
field3_row2.setOrigin( "inputStep2" );
inputRow2.addValueMeta( field3_row2 );
StepMeta stepMeta = new StepMeta( "Merge", meta );
meta.getFields( outputRowMeta, "Merge Join",
new RowMetaInterface[]{ inputRow1, inputRow2 }, stepMeta, new Variables(), null, null );
assertNotNull( outputRowMeta );
assertFalse( outputRowMeta.isEmpty() );
assertEquals( 4, outputRowMeta.size() );
List<ValueMetaInterface> vmi = outputRowMeta.getValueMetaList();
assertNotNull( vmi );
// Proceed in order
ValueMetaInterface field1 = outputRowMeta.getValueMeta( 0 );
assertNotNull( field1 );
assertEquals( "field1", field1.getName() );
assertTrue( field1 instanceof ValueMetaInteger );
assertEquals( "inputStep1", field1.getOrigin() );
ValueMetaInterface field2 = outputRowMeta.getValueMeta( 1 );
assertNotNull( field2 );
assertEquals( "field2", field2.getName() );
assertTrue( field2 instanceof ValueMetaString );
assertEquals( "inputStep1", field2.getOrigin() );
ValueMetaInterface field1_1 = outputRowMeta.getValueMeta( 2 );
assertNotNull( field1_1 );
assertEquals( "field1_1", field1_1.getName() );
assertTrue( field1_1 instanceof ValueMetaString );
assertEquals( "Merge Join", field1_1.getOrigin() );
ValueMetaInterface field3 = outputRowMeta.getValueMeta( 3 );
assertNotNull( field3 );
assertEquals( "field3", field3.getName() );
assertTrue( field3 instanceof ValueMetaString );
assertEquals( "inputStep2", field3.getOrigin() );
}
|
@Override
public Hedge hedge(final String name) {
return hedge(name, getDefaultConfig(), emptyMap());
}
|
@Test
public void hedgePositive() {
HedgeRegistry registry = HedgeRegistry.builder().withDefaultConfig(config).build();
Hedge firstHedge = registry.hedge("test");
Hedge anotherLimit = registry.hedge("test1");
Hedge sameAsFirst = registry.hedge("test");
then(firstHedge).isEqualTo(sameAsFirst);
then(firstHedge).isNotEqualTo(anotherLimit);
}
|
@Override
@Nullable
public String readString(@Nonnull String fieldName) throws IOException {
return readIncompatibleField(fieldName, UTF, super::readString);
}
|
@Test(expected = IncompatibleClassChangeError.class)
public void testReadUTF_IncompatibleClass() throws Exception {
reader.readString("byte");
}
|
@Override
public void start(EdgeExplorer explorer, int startNode) {
SimpleIntDeque fifo = new SimpleIntDeque();
GHBitSet visited = createBitSet();
visited.add(startNode);
fifo.push(startNode);
int current;
while (!fifo.isEmpty()) {
current = fifo.pop();
if (!goFurther(current))
continue;
EdgeIterator iter = explorer.setBaseNode(current);
while (iter.next()) {
int connectedId = iter.getAdjNode();
if (checkAdjacent(iter) && !visited.contains(connectedId)) {
visited.add(connectedId);
fifo.push(connectedId);
}
}
}
}
|
@Test
public void testBFS() {
BreadthFirstSearch bfs = new BreadthFirstSearch() {
@Override
protected GHBitSet createBitSet() {
return new GHTBitSet();
}
@Override
public boolean goFurther(int v) {
counter++;
assertFalse(set.contains(v), "v " + v + " is already contained in set. iteration:" + counter);
set.add(v);
list.add(v);
return super.goFurther(v);
}
};
BaseGraph g = new BaseGraph.Builder(1).create();
g.edge(0, 1);
g.edge(0, 2);
g.edge(0, 3);
g.edge(0, 5);
g.edge(1, 6);
g.edge(2, 7);
g.edge(3, 8);
g.edge(4, 8);
g.edge(8, 10);
g.edge(6, 9);
g.edge(9, 10);
g.edge(5, 10);
bfs.start(g.createEdgeExplorer(), 0);
assertTrue(counter > 0);
assertEquals(g.getNodes(), counter);
assertEquals("[0, 5, 3, 2, 1, 10, 8, 7, 6, 9, 4]", list.toString());
}
|
public boolean isLaunchIntentOfNotification(Intent intent) {
return intent.getBooleanExtra(LAUNCH_FLAG_KEY_NAME, false);
}
|
@Test
public void isLaunchIntentOfNotification_hasFlagInBundle_returnTrue() throws Exception {
Intent intent = mock(Intent.class);
when(intent.getBooleanExtra(eq(LAUNCHED_FROM_NOTIF_BOOLEAN_EXTRA_NAME), eq(false))).thenReturn(true);
final AppLaunchHelper uut = getUUT();
boolean result = uut.isLaunchIntentOfNotification(intent);
assertTrue(result);
}
|
@Override
@Transactional(rollbackFor = Exception.class)
@LogRecord(type = SYSTEM_USER_TYPE, subType = SYSTEM_USER_UPDATE_SUB_TYPE, bizNo = "{{#updateReqVO.id}}",
success = SYSTEM_USER_UPDATE_SUCCESS)
public void updateUser(UserSaveReqVO updateReqVO) {
updateReqVO.setPassword(null); // 特殊:此处不更新密码
// 1. 校验正确性
AdminUserDO oldUser = validateUserForCreateOrUpdate(updateReqVO.getId(), updateReqVO.getUsername(),
updateReqVO.getMobile(), updateReqVO.getEmail(), updateReqVO.getDeptId(), updateReqVO.getPostIds());
// 2.1 更新用户
AdminUserDO updateObj = BeanUtils.toBean(updateReqVO, AdminUserDO.class);
userMapper.updateById(updateObj);
// 2.2 更新岗位
updateUserPost(updateReqVO, updateObj);
// 3. 记录操作日志上下文
LogRecordContext.putVariable(DiffParseFunction.OLD_OBJECT, BeanUtils.toBean(oldUser, UserSaveReqVO.class));
LogRecordContext.putVariable("user", oldUser);
}
|
@Test
public void testUpdateUser_success() {
// mock 数据
AdminUserDO dbUser = randomAdminUserDO(o -> o.setPostIds(asSet(1L, 2L)));
userMapper.insert(dbUser);
userPostMapper.insert(new UserPostDO().setUserId(dbUser.getId()).setPostId(1L));
userPostMapper.insert(new UserPostDO().setUserId(dbUser.getId()).setPostId(2L));
// 准备参数
UserSaveReqVO reqVO = randomPojo(UserSaveReqVO.class, o -> {
o.setId(dbUser.getId());
o.setSex(RandomUtil.randomEle(SexEnum.values()).getSex());
o.setMobile(randomString());
o.setPostIds(asSet(2L, 3L));
});
// mock deptService 的方法
DeptDO dept = randomPojo(DeptDO.class, o -> {
o.setId(reqVO.getDeptId());
o.setStatus(CommonStatusEnum.ENABLE.getStatus());
});
when(deptService.getDept(eq(dept.getId()))).thenReturn(dept);
// mock postService 的方法
List<PostDO> posts = CollectionUtils.convertList(reqVO.getPostIds(), postId ->
randomPojo(PostDO.class, o -> {
o.setId(postId);
o.setStatus(CommonStatusEnum.ENABLE.getStatus());
}));
when(postService.getPostList(eq(reqVO.getPostIds()), isNull())).thenReturn(posts);
// 调用
userService.updateUser(reqVO);
// 断言
AdminUserDO user = userMapper.selectById(reqVO.getId());
assertPojoEquals(reqVO, user, "password");
// 断言关联岗位
List<UserPostDO> userPosts = userPostMapper.selectListByUserId(user.getId());
assertEquals(2L, userPosts.get(0).getPostId());
assertEquals(3L, userPosts.get(1).getPostId());
}
|
static public JobConf createJob(String[] argv) throws IOException {
StreamJob job = new StreamJob();
job.argv_ = argv;
job.init();
job.preProcessArgs();
job.parseArgv();
job.postProcessArgs();
job.setJobConf();
return job.jobConf_;
}
|
@Test
public void testCreateJob() throws IOException {
JobConf job;
ArrayList<String> dummyArgs = new ArrayList<String>();
dummyArgs.add("-input"); dummyArgs.add("dummy");
dummyArgs.add("-output"); dummyArgs.add("dummy");
dummyArgs.add("-mapper"); dummyArgs.add("dummy");
dummyArgs.add("-reducer"); dummyArgs.add("dummy");
ArrayList<String> args;
args = new ArrayList<String>(dummyArgs);
args.add("-inputformat");
args.add("org.apache.hadoop.mapred.KeyValueTextInputFormat");
job = StreamJob.createJob(args.toArray(new String[] {}));
assertEquals(KeyValueTextInputFormat.class, job.getInputFormat().getClass());
args = new ArrayList<String>(dummyArgs);
args.add("-inputformat");
args.add("org.apache.hadoop.mapred.SequenceFileInputFormat");
job = StreamJob.createJob(args.toArray(new String[] {}));
assertEquals(SequenceFileInputFormat.class, job.getInputFormat().getClass());
args = new ArrayList<String>(dummyArgs);
args.add("-inputformat");
args.add("org.apache.hadoop.mapred.KeyValueTextInputFormat");
args.add("-inputreader");
args.add("StreamXmlRecordReader,begin=<doc>,end=</doc>");
job = StreamJob.createJob(args.toArray(new String[] {}));
assertEquals(StreamInputFormat.class, job.getInputFormat().getClass());
}
|
@VisibleForTesting
RoleDO validateRoleForUpdate(Long id) {
RoleDO role = roleMapper.selectById(id);
if (role == null) {
throw exception(ROLE_NOT_EXISTS);
}
// 内置角色,不允许删除
if (RoleTypeEnum.SYSTEM.getType().equals(role.getType())) {
throw exception(ROLE_CAN_NOT_UPDATE_SYSTEM_TYPE_ROLE);
}
return role;
}
|
@Test
public void testValidateUpdateRole_systemRoleCanNotBeUpdate() {
RoleDO roleDO = randomPojo(RoleDO.class, o -> o.setType(RoleTypeEnum.SYSTEM.getType()));
roleMapper.insert(roleDO);
// 准备参数
Long id = roleDO.getId();
assertServiceException(() -> roleService.validateRoleForUpdate(id),
ROLE_CAN_NOT_UPDATE_SYSTEM_TYPE_ROLE);
}
|
@Override
public boolean updateGlobalWhiteAddrsConfig(List<String> globalWhiteAddrsList) {
return aclPlugEngine.updateGlobalWhiteAddrsConfig(globalWhiteAddrsList);
}
|
@Test
public void testUpdateSpecifiedAclFileGlobalWhiteAddrsConfig() throws IOException {
String folder = "update_global_white_addr";
File home = AclTestHelper.copyResources(folder);
System.setProperty("rocketmq.home.dir", home.getAbsolutePath());
System.setProperty("rocketmq.acl.plain.file", "/conf/plain_acl.yml".replace("/", File.separator));
String targetFileName = Joiner.on(File.separator).join(new String[]{home.getAbsolutePath(), "conf", "plain_acl.yml"});
PlainAccessData backUpAclConfigMap = AclUtils.getYamlDataObject(targetFileName, PlainAccessData.class);
String targetFileName1 = Joiner.on(File.separator).join(new String[]{home.getAbsolutePath(), "conf", "acl", "plain_acl.yml"});
PlainAccessData backUpAclConfigMap1 = AclUtils.getYamlDataObject(targetFileName1, PlainAccessData.class);
String targetFileName2 = Joiner.on(File.separator).join(new String[]{home.getAbsolutePath(), "conf", "acl", "empty.yml"});
PlainAccessData backUpAclConfigMap2 = AclUtils.getYamlDataObject(targetFileName2, PlainAccessData.class);
PlainAccessValidator plainAccessValidator = new PlainAccessValidator();
List<String> globalWhiteAddrsList1 = new ArrayList<>();
globalWhiteAddrsList1.add("10.10.154.1");
List<String> globalWhiteAddrsList2 = new ArrayList<>();
globalWhiteAddrsList2.add("10.10.154.2");
List<String> globalWhiteAddrsList3 = new ArrayList<>();
globalWhiteAddrsList3.add("10.10.154.3");
//Test parameter p is null
plainAccessValidator.updateGlobalWhiteAddrsConfig(globalWhiteAddrsList1, null);
String defaultAclFile = targetFileName;
PlainAccessData defaultAclFileMap = AclUtils.getYamlDataObject(defaultAclFile, PlainAccessData.class);
List<String> defaultAclFileGlobalWhiteAddrList = defaultAclFileMap.getGlobalWhiteRemoteAddresses();
Assert.assertTrue(defaultAclFileGlobalWhiteAddrList.contains("10.10.154.1"));
//Test parameter p is not null
plainAccessValidator.updateGlobalWhiteAddrsConfig(globalWhiteAddrsList2, targetFileName1);
PlainAccessData aclFileMap1 = AclUtils.getYamlDataObject(targetFileName1, PlainAccessData.class);
List<String> aclFileGlobalWhiteAddrList1 = aclFileMap1.getGlobalWhiteRemoteAddresses();
Assert.assertTrue(aclFileGlobalWhiteAddrList1.contains("10.10.154.2"));
//Test parameter p is not null, but the file does not have globalWhiteRemoteAddresses
plainAccessValidator.updateGlobalWhiteAddrsConfig(globalWhiteAddrsList3, targetFileName2);
PlainAccessData aclFileMap2 = AclUtils.getYamlDataObject(targetFileName2, PlainAccessData.class);
List<String> aclFileGlobalWhiteAddrList2 = aclFileMap2.getGlobalWhiteRemoteAddresses();
Assert.assertTrue(aclFileGlobalWhiteAddrList2.contains("10.10.154.3"));
AclUtils.writeDataObject(targetFileName, backUpAclConfigMap);
AclUtils.writeDataObject(targetFileName1, backUpAclConfigMap1);
AclUtils.writeDataObject(targetFileName2, backUpAclConfigMap2);
AclTestHelper.recursiveDelete(home);
}
|
public static UnboundStringFlag defineStringFlag(String flagId, String defaultValue, List<String> owners,
String createdAt, String expiresAt, String description,
String modificationEffect, Dimension... dimensions) {
return defineStringFlag(flagId, defaultValue, owners,
createdAt, expiresAt, description,
modificationEffect, value -> true,
dimensions);
}
|
@Test
void testString() {
testGeneric(Flags.defineStringFlag("string-id", "default value", List.of("owner"), "1970-01-01", "2100-01-01", "description",
"modification effect", Dimension.ZONE_ID, Dimension.HOSTNAME),
"other value");
}
|
public String getName() {
return type.name;
}
|
@Test
void qualifiedName() {
Arrays.stream(Type.values()).forEach((Type t) -> {
final Schema.Name name = new Schema.Name(t.getName(), "space");
assertEquals("space." + t.getName(), name.getQualified("space"));
assertEquals("space." + t.getName(), name.getQualified("otherdefault"));
});
final Schema.Name name = new Schema.Name("name", "space");
assertEquals("name", name.getQualified("space"));
assertEquals("space.name", name.getQualified("otherdefault"));
final Schema.Name nameInt = new Schema.Name("Int", "space");
assertEquals("Int", nameInt.getQualified("space"));
}
|
static ULabeledStatement create(CharSequence label, UStatement statement) {
return new AutoValue_ULabeledStatement(StringName.of(label), (USimpleStatement) statement);
}
|
@Test
public void serialization() {
SerializableTester.reserializeAndAssert(ULabeledStatement.create("foo", USkip.INSTANCE));
}
|
@Override
public ObjectNode encode(OpenstackVtapCriterion entity, CodecContext context) {
String protoStr = getProtocolStringFromType(entity.ipProtocol());
return context.mapper().createObjectNode()
.put(SRC_IP, entity.srcIpPrefix().address().toString())
.put(DST_IP, entity.dstIpPrefix().address().toString())
.put(IP_PROTOCOL, protoStr)
.put(SRC_PORT, entity.srcTpPort().toString())
.put(DST_PORT, entity.dstTpPort().toString());
}
|
@Test
public void testOpenstackVtapCriterionEncode() {
OpenstackVtapCriterion criterion = DefaultOpenstackVtapCriterion.builder()
.srcIpPrefix(IpPrefix.valueOf(IpAddress.valueOf("10.10.10.10"), 32))
.dstIpPrefix(IpPrefix.valueOf(IpAddress.valueOf("20.20.20.20"), 32))
.ipProtocol(getProtocolTypeFromString("tcp"))
.srcTpPort(TpPort.tpPort(8080))
.dstTpPort(TpPort.tpPort(9090))
.build();
ObjectNode criterionJson = vtapCriterionCodec.encode(criterion, context);
assertThat(criterionJson, matchVtapCriterion(criterion));
}
|
@Override
public void collectSizeStats(StateObjectSizeStatsCollector collector) {
final StateObjectLocation location;
if (NOT_LOCAL_FILER.matcher(filePath.toUri().toString()).matches()) {
location = StateObjectLocation.REMOTE;
} else {
location = StateObjectLocation.LOCAL_DISK;
}
collector.add(location, getStateSize());
}
|
@Test
void testCollectSizeStats() throws Exception {
final long stateSize = 123L;
StateObject.StateObjectSizeStatsCollector statsCollector =
StateObject.StateObjectSizeStatsCollector.create();
FileStateHandle handle =
new FileStateHandle(new Path(new URI("file:///home/test.txt")), stateSize);
handle.collectSizeStats(statsCollector);
checkStats(statsCollector, StateObject.StateObjectLocation.LOCAL_DISK, stateSize);
statsCollector = StateObject.StateObjectSizeStatsCollector.create();
handle = new FileStateHandle(new Path(new URI("/home/test.txt")), stateSize);
handle.collectSizeStats(statsCollector);
checkStats(statsCollector, StateObject.StateObjectLocation.LOCAL_DISK, stateSize);
statsCollector = StateObject.StateObjectSizeStatsCollector.create();
handle = new FileStateHandle(new Path(new URI("s3:///folder/test.txt")), stateSize);
handle.collectSizeStats(statsCollector);
checkStats(statsCollector, StateObject.StateObjectLocation.REMOTE, stateSize);
}
|
@Override
public String doSharding(final Collection<String> availableTargetNames, final PreciseShardingValue<Comparable<?>> shardingValue) {
ShardingSpherePreconditions.checkNotNull(shardingValue.getValue(), NullShardingValueException::new);
return doSharding(availableTargetNames, Range.singleton(shardingValue.getValue())).stream().findFirst().orElse(null);
}
|
@Test
void assertPreciseDoShardingByQuarter() {
assertThat(shardingAlgorithmByQuarter.doSharding(availableTablesForQuarterDataSources,
new PreciseShardingValue<>("t_order", "create_time", DATA_NODE_INFO, "2020-01-01 00:00:01")), is("t_order_202001"));
assertThat(shardingAlgorithmByQuarter.doSharding(availableTablesForQuarterDataSources,
new PreciseShardingValue<>("t_order", "create_time", DATA_NODE_INFO, "2020-02-01 00:00:01")), is("t_order_202001"));
}
|
public CompletableFuture<Triple<MessageExt, String, Boolean>> getMessageAsync(String topic, long offset, int queueId, String brokerName, boolean deCompressBody) {
MessageStore messageStore = brokerController.getMessageStoreByBrokerName(brokerName);
if (messageStore != null) {
return messageStore.getMessageAsync(innerConsumerGroupName, topic, queueId, offset, 1, null)
.thenApply(result -> {
if (result == null) {
LOG.warn("getMessageResult is null , innerConsumerGroupName {}, topic {}, offset {}, queueId {}", innerConsumerGroupName, topic, offset, queueId);
return Triple.of(null, "getMessageResult is null", false); // local store, so no retry
}
List<MessageExt> list = decodeMsgList(result, deCompressBody);
if (list == null || list.isEmpty()) {
// OFFSET_FOUND_NULL returned by TieredMessageStore indicates exception occurred
boolean needRetry = GetMessageStatus.OFFSET_FOUND_NULL.equals(result.getStatus())
&& messageStore instanceof TieredMessageStore;
LOG.warn("Can not get msg , topic {}, offset {}, queueId {}, needRetry {}, result is {}",
topic, offset, queueId, needRetry, result);
return Triple.of(null, "Can not get msg", needRetry);
}
return Triple.of(list.get(0), "", false);
});
} else {
return getMessageFromRemoteAsync(topic, offset, queueId, brokerName);
}
}
|
@Test
public void getMessageAsyncTest_localStore_getMessageAsync_null() {
when(brokerController.getMessageStoreByBrokerName(any())).thenReturn(defaultMessageStore);
when(defaultMessageStore.getMessageAsync(anyString(), anyString(), anyInt(), anyLong(), anyInt(), any()))
.thenReturn(CompletableFuture.completedFuture(null));
Triple<MessageExt, String, Boolean> rst = escapeBridge.getMessageAsync(TEST_TOPIC, 0, DEFAULT_QUEUE_ID, BROKER_NAME, false).join();
Assert.assertNull(rst.getLeft());
Assert.assertEquals("getMessageResult is null", rst.getMiddle());
Assert.assertFalse(rst.getRight()); // no retry
}
|
@Override
public void forward(DeviceId deviceId, ForwardingObjective forwardingObjective) {
process(deviceId, forwardingObjective);
}
|
@Test
public void forwardTimeout() {
final AtomicInteger counter = new AtomicInteger(0);
ForwardingObjective fwdTimeout = buildFwdObjective(S1, NID2).add(new ObjectiveContext() {
@Override
public void onError(Objective objective, ObjectiveError error) {
if (Objects.equals(ObjectiveError.INSTALLATIONTIMEOUT, error)) {
counter.incrementAndGet();
}
}
});
List<ForwardingObjective> expectFwdObjsTimeout = Lists.newCopyOnWriteArrayList(
Lists.newArrayList(fwdTimeout, FWD1, FWD2));
// Reduce timeout so the unit test doesn't have to wait many seconds
internalSetup(TIMEOUT_THRESH);
expect(mgr.flowObjectiveStore.getNextGroup(NID1)).andReturn(NGRP1).times(1);
expect(mgr.flowObjectiveStore.getNextGroup(NID2)).andReturn(NGRP2).times(2);
replay(mgr.flowObjectiveStore);
// Force this objective to time out
offset = mgr.objectiveTimeoutMs * 3;
expectFwdObjsTimeout.forEach(fwdObj -> mgr.forward(DEV1, fwdObj));
// Wait for the pipeline operation to complete
int expectedTime = (bound + offset) * 3;
assertAfter(expectedTime, expectedTime * 5, () -> assertEquals(expectFwdObjsTimeout.size(), actualObjs.size()));
assertAfter(expectedTime, expectedTime * 5, () -> assertTrue(counter.get() != 0));
assertTrue(actualObjs.indexOf(fwdTimeout) < actualObjs.indexOf(FWD1));
verify(mgr.flowObjectiveStore);
}
|
public void setSaslMechanism(String saslMechanism) {
this.saslMechanism = saslMechanism;
}
|
@Test
public void testSetSaslMechanism() {
Properties props = getLog4jConfig(false);
props.put("log4j.appender.KAFKA.SaslMechanism", "PLAIN");
PropertyConfigurator.configure(props);
MockKafkaLog4jAppender mockKafkaLog4jAppender = getMockKafkaLog4jAppender();
assertEquals(mockKafkaLog4jAppender.getProducerProperties().getProperty(SaslConfigs.SASL_MECHANISM), "PLAIN");
}
|
@SuppressWarnings("unchecked")
public Set<String> getStreamIds() {
Collection<String> streamField;
try {
streamField = getFieldAs(Collection.class, FIELD_STREAMS);
} catch (ClassCastException e) {
LOG.trace("Couldn't cast {} to List", FIELD_STREAMS, e);
streamField = Collections.emptySet();
}
final Set<String> streamIds = streamField == null ? new HashSet<>(streams.size()) : new HashSet<>(streamField);
for (Stream stream : streams) {
streamIds.add(stream.getId());
}
return streamIds;
}
|
@Test
public void testGetStreamIds() throws Exception {
message.addField("streams", Lists.newArrayList("stream-id"));
assertThat(message.getStreamIds()).containsOnly("stream-id");
}
|
@Override
public boolean remove(Object o) {
if (o instanceof Pair) {
Object first = ((Pair<?, ?>) o).first();
Object second = ((Pair<?, ?>) o).second();
if (first instanceof Integer && (second == null || second instanceof StructLike)) {
return remove((Integer) first, (StructLike) second);
}
}
return false;
}
|
@Test
public void testRemove() {
PartitionSet set = PartitionSet.create(SPECS);
set.add(BY_DATA_SPEC.specId(), Row.of("a"));
set.add(UNPARTITIONED_SPEC.specId(), null);
set.add(UNPARTITIONED_SPEC.specId(), Row.of());
set.add(BY_DATA_CATEGORY_BUCKET_SPEC.specId(), CustomRow.of("a", 1));
assertThat(set).hasSize(4);
assertThat(set.remove(BY_DATA_SPEC.specId(), CustomRow.of("a"))).isTrue();
assertThat(set.remove(UNPARTITIONED_SPEC.specId(), null)).isTrue();
assertThat(set.remove(UNPARTITIONED_SPEC.specId(), CustomRow.of())).isTrue();
assertThat(set.remove(BY_DATA_CATEGORY_BUCKET_SPEC.specId(), Row.of("a", 1))).isTrue();
assertThat(set).isEmpty();
}
|
@Override
public OverlayData createOverlayData(ComponentName remoteApp) {
final OverlayData original = mOriginal.createOverlayData(remoteApp);
if (original.isValid() || mFixInvalid) {
final int backgroundLuminance = luminance(original.getPrimaryColor());
final int diff = backgroundLuminance - luminance(original.getPrimaryTextColor());
if (mRequiredTextColorDiff > Math.abs(diff)) {
if (backgroundLuminance > GRAY_LUM) {
// closer to white, text will be black
original.setPrimaryTextColor(Color.BLACK);
original.setSecondaryTextColor(Color.DKGRAY);
} else {
original.setPrimaryTextColor(Color.WHITE);
original.setSecondaryTextColor(Color.LTGRAY);
}
}
}
return original;
}
|
@Test
public void testReturnsOriginalIfInvalid() {
OverlayData original = setupOriginal(Color.GRAY, Color.GRAY, Color.GRAY);
final OverlayData fixed = mUnderTest.createOverlayData(mTestComponent);
Assert.assertSame(original, fixed);
Assert.assertFalse(fixed.isValid());
}
|
public static String createRpcId() {
return UUID.randomUUID().toString();
}
|
@Test
public void createRpcId() throws Exception {
String first = IdUtils.createRpcId();
assertTrue(!first.isEmpty());
String second = IdUtils.createRpcId();
assertTrue(!second.isEmpty());
assertNotEquals(first, second);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.