focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
@SuppressWarnings("unchecked")
public static <K> Set<K> toPropertySet(String key, List<?> list) {
Set<K> set = new HashSet<>();
if (CollectionUtils.isEmpty(list)) {// 防止外面传入空list
return set;
}
try {
Class<?> clazz = list.get(0).getClass();
Field field = deepFindField(clazz, key);
if (field == null) {
throw new IllegalArgumentException("Could not find the key");
}
field.setAccessible(true);
for (Object o : list) {
set.add((K)field.get(o));
}
} catch (Exception e) {
throw new BeanUtilsException(e);
}
return set;
}
|
@Test(expected = BeanUtilsException.class)
public void testToPropertySetNotEmptyThrowsEx() {
someAnotherList.add(new KeyClass());
assertNotNull(BeanUtils.toPropertySet("wrongKey", someAnotherList));
}
|
public static DataSchema buildSkeletonSchema(DataSchema schema) throws CloneNotSupportedException
{
switch (schema.getType())
{
case RECORD:
RecordDataSchema newRecordSchema = new RecordDataSchema(new Name(((RecordDataSchema) schema).getFullName()),
RecordDataSchema.RecordType.RECORD);
RecordDataSchema originalRecordSchema = (RecordDataSchema) schema;
if (originalRecordSchema.getAliases() != null)
{
newRecordSchema.setAliases(originalRecordSchema.getAliases());
}
if (originalRecordSchema.getDoc() != null)
{
newRecordSchema.setDoc(originalRecordSchema.getDoc());
}
if (originalRecordSchema.getProperties() != null)
{
newRecordSchema.setProperties(originalRecordSchema.getProperties());
}
return newRecordSchema;
case UNION:
UnionDataSchema newUnionDataSchema = new UnionDataSchema();
UnionDataSchema unionDataSchema = (UnionDataSchema) schema;
if (unionDataSchema.getProperties() != null)
{
newUnionDataSchema.setProperties(unionDataSchema.getProperties());
}
return newUnionDataSchema;
case TYPEREF:
TyperefDataSchema originalTypeRefSchema = (TyperefDataSchema) schema;
TyperefDataSchema newTypeRefSchema = new TyperefDataSchema(new Name(originalTypeRefSchema.getFullName()));
if (originalTypeRefSchema.getProperties() != null)
{
newTypeRefSchema.setProperties(originalTypeRefSchema.getProperties());
}
if (originalTypeRefSchema.getDoc() != null)
{
newTypeRefSchema.setDoc(originalTypeRefSchema.getDoc());
}
if (originalTypeRefSchema.getAliases() != null)
{
newTypeRefSchema.setAliases(originalTypeRefSchema.getAliases());
}
return newTypeRefSchema;
case ARRAY:
ArrayDataSchema originalArrayDataSchema = (ArrayDataSchema) schema;
//Set null item types for this skeleton
ArrayDataSchema newArrayDataSchema = new ArrayDataSchema(DataSchemaConstants.NULL_DATA_SCHEMA);
if (originalArrayDataSchema.getProperties() != null)
{
newArrayDataSchema.setProperties(originalArrayDataSchema.getProperties());
}
return newArrayDataSchema;
case MAP:
MapDataSchema originalMapDataSchema = (MapDataSchema) schema;
//Set null value types for this skeleton
MapDataSchema newMapDataSchema = new MapDataSchema(DataSchemaConstants.NULL_DATA_SCHEMA);
if (originalMapDataSchema.getProperties() != null)
{
newMapDataSchema.setProperties(originalMapDataSchema.getProperties());
}
return newMapDataSchema;
case FIXED:
case ENUM:
default:
// Primitive types, FIXED, ENUM: using schema's clone method
return schema.clone();
}
}
|
@Test
public void testBuildSkeletonSchema() throws Exception
{
DataSchema oldSchema = null;
RecordDataSchema fooSchema = (RecordDataSchema) TestUtil.dataSchemaFromString(fooSchemaText);
// Test Record
RecordDataSchema newRecordSchema = (RecordDataSchema) CopySchemaUtil.buildSkeletonSchema(fooSchema);
assert((newRecordSchema.getFields().size() == 0) && Objects.equals(newRecordSchema.getDoc(), fooSchema.getDoc())
&& Objects.equals(newRecordSchema.getProperties(), fooSchema.getProperties())
&& Objects.equals(newRecordSchema.getAliases(), fooSchema.getAliases()));
// Test TypeRef
oldSchema = fooSchema.getField("typeRefField").getType();
TyperefDataSchema newTypeRefDataSchema = (TyperefDataSchema) CopySchemaUtil.buildSkeletonSchema(oldSchema);
assert( Objects.equals(newTypeRefDataSchema.getDoc(), ((TyperefDataSchema) oldSchema).getDoc())
&& Objects.equals(newTypeRefDataSchema.getProperties(), oldSchema.getProperties())
&& Objects.equals(newTypeRefDataSchema.getAliases(), ((TyperefDataSchema)oldSchema).getAliases()));
// Test Union
oldSchema = fooSchema.getField("unionField").getType();
UnionDataSchema newUnionDataSchema = (UnionDataSchema) CopySchemaUtil.buildSkeletonSchema(oldSchema);
assert(newUnionDataSchema.getMembers().size() == 0 && Objects.equals(newUnionDataSchema.getProperties(), oldSchema.getProperties()));
// Test map
oldSchema = fooSchema.getField("mapField").getType();
MapDataSchema mapDataSchema = (MapDataSchema) CopySchemaUtil.buildSkeletonSchema(oldSchema);
assert (Objects.equals(mapDataSchema.getProperties(), oldSchema.getProperties()) &&
Objects.equals(mapDataSchema.getValues(), DataSchemaConstants.NULL_DATA_SCHEMA));
// Test array
oldSchema = fooSchema.getField("arrayField").getType();
ArrayDataSchema arrayDataSchema = (ArrayDataSchema) CopySchemaUtil.buildSkeletonSchema(oldSchema);
assert (Objects.equals(arrayDataSchema.getProperties(), oldSchema.getProperties()) &&
Objects.equals(arrayDataSchema.getItems(), DataSchemaConstants.NULL_DATA_SCHEMA));
// Test ENUM
oldSchema = fooSchema.getField("enumField").getType();
EnumDataSchema enumDataSchema = (EnumDataSchema) CopySchemaUtil.buildSkeletonSchema(oldSchema);
Assert.assertEquals(enumDataSchema, oldSchema);
// Test FIXED
oldSchema = fooSchema.getField("fixedField").getType();
FixedDataSchema fixedDataSchema = (FixedDataSchema) CopySchemaUtil.buildSkeletonSchema(oldSchema);
Assert.assertEquals(fixedDataSchema, oldSchema);
// Test primitive
oldSchema = fooSchema.getField("intField").getType();
PrimitiveDataSchema primitiveDataSchema = (PrimitiveDataSchema) CopySchemaUtil.buildSkeletonSchema(oldSchema);
Assert.assertEquals(primitiveDataSchema, oldSchema);
}
|
@Override
@Deprecated
public <KR, VR> KStream<KR, VR> transform(final org.apache.kafka.streams.kstream.TransformerSupplier<? super K, ? super V, KeyValue<KR, VR>> transformerSupplier,
final String... stateStoreNames) {
Objects.requireNonNull(transformerSupplier, "transformerSupplier can't be null");
final String name = builder.newProcessorName(TRANSFORM_NAME);
return flatTransform(new TransformerSupplierAdapter<>(transformerSupplier), Named.as(name), stateStoreNames);
}
|
@Test
@SuppressWarnings("deprecation")
public void shouldNotAllowNullTransformerSupplierOnTransformWithNamed() {
final NullPointerException exception = assertThrows(
NullPointerException.class,
() -> testStream.transform(null, Named.as("transformer")));
assertThat(exception.getMessage(), equalTo("transformerSupplier can't be null"));
}
|
@Override
public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) {
super.onDataReceived(device, data);
if (data.size() < 3) {
onInvalidDataReceived(device, data);
return;
}
final int opCode = data.getIntValue(Data.FORMAT_UINT8, 0);
if (opCode != OP_CODE_NUMBER_OF_STORED_RECORDS_RESPONSE && opCode != OP_CODE_RESPONSE_CODE) {
onInvalidDataReceived(device, data);
return;
}
final int operator = data.getIntValue(Data.FORMAT_UINT8, 1);
if (operator != OPERATOR_NULL) {
onInvalidDataReceived(device, data);
return;
}
switch (opCode) {
case OP_CODE_NUMBER_OF_STORED_RECORDS_RESPONSE -> {
// Field size is defined per service
int numberOfRecords;
switch (data.size() - 2) {
case 1 -> numberOfRecords = data.getIntValue(Data.FORMAT_UINT8, 2);
case 2 -> numberOfRecords = data.getIntValue(Data.FORMAT_UINT16_LE, 2);
case 4 -> numberOfRecords = data.getIntValue(Data.FORMAT_UINT32_LE, 2);
default -> {
// Other field sizes are not supported
onInvalidDataReceived(device, data);
return;
}
}
onNumberOfRecordsReceived(device, numberOfRecords);
}
case OP_CODE_RESPONSE_CODE -> {
if (data.size() != 4) {
onInvalidDataReceived(device, data);
return;
}
final int requestCode = data.getIntValue(Data.FORMAT_UINT8, 2);
final int responseCode = data.getIntValue(Data.FORMAT_UINT8, 3);
if (responseCode == RACP_RESPONSE_SUCCESS) {
onRecordAccessOperationCompleted(device, requestCode);
} else if (responseCode == RACP_ERROR_NO_RECORDS_FOUND) {
onRecordAccessOperationCompletedWithNoRecordsFound(device, requestCode);
} else {
onRecordAccessOperationError(device, requestCode, responseCode);
}
}
}
}
|
@Test
public void onRecordAccessOperationError_procedureNotCompleted() {
final Data data = new Data(new byte[] { 6, 0, 2, 8 });
callback.onDataReceived(null, data);
assertEquals(8, error);
}
|
@Override
public void monitor(RedisServer master) {
connection.sync(RedisCommands.SENTINEL_MONITOR, master.getName(), master.getHost(),
master.getPort().intValue(), master.getQuorum().intValue());
}
|
@Test
public void testMonitor() {
Collection<RedisServer> masters = connection.masters();
RedisServer master = masters.iterator().next();
master.setName(master.getName() + ":");
connection.monitor(master);
}
|
@Override
public ScalarOperator visitArraySlice(ArraySliceOperator array, Void context) {
return shuttleIfUpdate(array);
}
|
@Test
void visitArraySlice() {
ArrayOperator arrayOperator = new ArrayOperator(ARRAY_TINYINT, true,
Lists.newArrayList(ConstantOperator.createInt(3), ConstantOperator.createInt(10)));
ConstantOperator offset = ConstantOperator.createInt(0);
ConstantOperator length = ConstantOperator.createInt(1);
ArraySliceOperator operator = new ArraySliceOperator(TINYINT, Lists.newArrayList(arrayOperator, offset, length));
{
ScalarOperator newOperator = shuttle.visitArraySlice(operator, null);
assertEquals(operator, newOperator);
}
{
ScalarOperator newOperator = shuttle2.visitArraySlice(operator, null);
assertEquals(operator, newOperator);
}
}
|
public static <T> List<List<T>> diffList(Collection<T> oldList, Collection<T> newList,
BiFunction<T, T, Boolean> sameFunc) {
List<T> createList = new LinkedList<>(newList); // 默认都认为是新增的,后续会进行移除
List<T> updateList = new ArrayList<>();
List<T> deleteList = new ArrayList<>();
// 通过以 oldList 为主遍历,找出 updateList 和 deleteList
for (T oldObj : oldList) {
// 1. 寻找是否有匹配的
T foundObj = null;
for (Iterator<T> iterator = createList.iterator(); iterator.hasNext(); ) {
T newObj = iterator.next();
// 1.1 不匹配,则直接跳过
if (!sameFunc.apply(oldObj, newObj)) {
continue;
}
// 1.2 匹配,则移除,并结束寻找
iterator.remove();
foundObj = newObj;
break;
}
// 2. 匹配添加到 updateList;不匹配则添加到 deleteList 中
if (foundObj != null) {
updateList.add(foundObj);
} else {
deleteList.add(oldObj);
}
}
return asList(createList, updateList, deleteList);
}
|
@Test
public void testDiffList() {
// 准备参数
Collection<Dog> oldList = Arrays.asList(
new Dog(1, "花花", "hh"),
new Dog(2, "旺财", "wc")
);
Collection<Dog> newList = Arrays.asList(
new Dog(null, "花花2", "hh"),
new Dog(null, "小白", "xb")
);
BiFunction<Dog, Dog, Boolean> sameFunc = (oldObj, newObj) -> {
boolean same = oldObj.getCode().equals(newObj.getCode());
// 如果相等的情况下,需要设置下 id,后续好更新
if (same) {
newObj.setId(oldObj.getId());
}
return same;
};
// 调用
List<List<Dog>> result = CollectionUtils.diffList(oldList, newList, sameFunc);
// 断言
assertEquals(result.size(), 3);
// 断言 create
assertEquals(result.get(0).size(), 1);
assertEquals(result.get(0).get(0), new Dog(null, "小白", "xb"));
// 断言 update
assertEquals(result.get(1).size(), 1);
assertEquals(result.get(1).get(0), new Dog(1, "花花2", "hh"));
// 断言 delete
assertEquals(result.get(2).size(), 1);
assertEquals(result.get(2).get(0), new Dog(2, "旺财", "wc"));
}
|
public static URL getEmptyUrl(String service, String category) {
String group = null;
String version = null;
int i = service.indexOf('/');
if (i > 0) {
group = service.substring(0, i);
service = service.substring(i + 1);
}
i = service.lastIndexOf(':');
if (i > 0) {
version = service.substring(i + 1);
service = service.substring(0, i);
}
return URL.valueOf(EMPTY_PROTOCOL + "://0.0.0.0/" + service + URL_PARAM_STARTING_SYMBOL
+ CATEGORY_KEY + "=" + category
+ (group == null ? "" : "&" + GROUP_KEY + "=" + group)
+ (version == null ? "" : "&" + VERSION_KEY + "=" + version));
}
|
@Test
void testGetEmptyUrl() throws Exception {
URL url = UrlUtils.getEmptyUrl("dubbo/a.b.c.Foo:1.0.0", "test");
assertThat(url.toFullString(), equalTo("empty://0.0.0.0/a.b.c.Foo?category=test&group=dubbo&version=1.0.0"));
}
|
String getUtcDate(Date date) {
// package-private for test.
Calendar calendar = new GregorianCalendar(TimeZone.getTimeZone("GMT"));
calendar.setTime(date);
// This makes sure the date is formatted as the xs:dateTime type.
return DatatypeConverter.printDateTime(calendar);
}
|
@Test
public void testUtcDate() {
IQEntityTimeHandler iqEntityTimeHandler = new IQEntityTimeHandler();
Date date = new Date();
Calendar calendar = new GregorianCalendar();
calendar.setTime(date);
calendar.setTimeZone(TimeZone.getTimeZone("GMT"));
assertEquals(iqEntityTimeHandler.getUtcDate(date), DatatypeConverter.printDateTime(calendar));
}
|
@CheckForNull
public Set<ChangedFile> branchChangedFilesWithFileMovementDetection(String targetBranchName, Path rootBaseDir) {
try (Repository repo = buildRepo(rootBaseDir)) {
Ref targetRef = resolveTargetRef(targetBranchName, repo);
if (targetRef == null) {
addWarningTargetNotFound(targetBranchName);
return null;
}
if (isDiffAlgoInvalid(repo.getConfig())) {
LOG.warn("The diff algorithm configured in git is not supported. "
+ "No information regarding changes in the branch will be collected, which can lead to unexpected results.");
return null;
}
Optional<RevCommit> mergeBaseCommit = findMergeBase(repo, targetRef);
if (mergeBaseCommit.isEmpty()) {
LOG.warn(composeNoMergeBaseFoundWarning(targetRef.getName()));
return null;
}
AbstractTreeIterator mergeBaseTree = prepareTreeParser(repo, mergeBaseCommit.get());
// we compare a commit with HEAD, so no point ignoring line endings (it will be whatever is committed)
try (Git git = newGit(repo)) {
List<DiffEntry> diffEntries = git.diff()
.setShowNameAndStatusOnly(true)
.setOldTree(mergeBaseTree)
.setNewTree(prepareNewTree(repo))
.call();
return computeChangedFiles(repo, diffEntries);
}
} catch (IOException | GitAPIException e) {
LOG.warn(e.getMessage(), e);
}
return null;
}
|
@Test
public void branchChangedFilesWithFileMovementDetection_correctly_detects_several_file_moves_in_pull_request_base_branch() throws IOException, GitAPIException {
String fileM1 = "file-m1.xoo";
String newFileM1 = "new-file-m1.xoo";
String fileM2 = "file-m2.xoo";
String newFileM2 = "new-file-m2.xoo";
Path newFileM1AbsolutPath = worktree.resolve(newFileM1);
Path newFileM2AbsolutPath = worktree.resolve(newFileM2);
createAndCommitFile(fileM1);
createAndCommitFile(fileM2);
createBranch();
editLineOfFile(fileM1, 1);
commit(fileM1);
moveAndCommitFile(fileM1, newFileM1);
moveAndCommitFile(fileM2, newFileM2);
assertThat(newScmProvider().branchChangedFilesWithFileMovementDetection("master", worktree))
.extracting(ChangedFile::getAbsolutFilePath, ChangedFile::getOldRelativeFilePathReference)
.containsExactlyInAnyOrder(
tuple(newFileM1AbsolutPath, fileM1),
tuple(newFileM2AbsolutPath, fileM2)
);
}
|
public void ensureActiveGroup() {
while (!ensureActiveGroup(time.timer(Long.MAX_VALUE))) {
log.warn("still waiting to ensure active group");
}
}
|
@Test
public void testWakeupAfterJoinGroupSentExternalCompletion() throws Exception {
setupCoordinator();
mockClient.prepareResponse(groupCoordinatorResponse(node, Errors.NONE));
mockClient.prepareResponse(new MockClient.RequestMatcher() {
private int invocations = 0;
@Override
public boolean matches(AbstractRequest body) {
invocations++;
boolean isJoinGroupRequest = body instanceof JoinGroupRequest;
if (isJoinGroupRequest && invocations == 1)
// simulate wakeup before the request returns
throw new WakeupException();
return isJoinGroupRequest;
}
}, joinGroupFollowerResponse(1, memberId, leaderId, Errors.NONE));
mockClient.prepareResponse(syncGroupResponse(Errors.NONE));
AtomicBoolean heartbeatReceived = prepareFirstHeartbeat();
try {
coordinator.ensureActiveGroup();
fail("Should have woken up from ensureActiveGroup()");
} catch (WakeupException ignored) {
}
assertEquals(1, coordinator.onJoinPrepareInvokes);
assertEquals(0, coordinator.onJoinCompleteInvokes);
assertFalse(heartbeatReceived.get());
// the join group completes in this poll()
consumerClient.poll(mockTime.timer(0));
coordinator.ensureActiveGroup();
assertEquals(1, coordinator.onJoinPrepareInvokes);
assertEquals(1, coordinator.onJoinCompleteInvokes);
awaitFirstHeartbeat(heartbeatReceived);
}
|
@Override
public void pluginJarAdded(BundleOrPluginFileDetails bundleOrPluginFileDetails) {
final GoPluginBundleDescriptor bundleDescriptor = goPluginBundleDescriptorBuilder.build(bundleOrPluginFileDetails);
try {
LOGGER.info("Plugin load starting: {}", bundleOrPluginFileDetails.file());
validateIfExternalPluginRemovingBundledPlugin(bundleDescriptor);
validatePluginCompatibilityWithCurrentOS(bundleDescriptor);
validatePluginCompatibilityWithGoCD(bundleDescriptor);
addPlugin(bundleOrPluginFileDetails, bundleDescriptor);
} finally {
LOGGER.info("Plugin load finished: {}", bundleOrPluginFileDetails.file());
}
}
|
@Test
void shouldFailToLoadAPluginWhenActivatorJarIsNotAvailable() throws Exception {
systemEnvironment = mock(SystemEnvironment.class);
when(systemEnvironment.get(PLUGIN_ACTIVATOR_JAR_PATH)).thenReturn("some-path-which-does-not-exist.jar");
File pluginJarFile = new File(pluginWorkDir, PLUGIN_JAR_FILE_NAME);
File bundleDirectory = new File(bundleDir, PLUGIN_JAR_FILE_NAME);
copyPluginToTheDirectory(pluginWorkDir, PLUGIN_JAR_FILE_NAME);
String pluginJarFileLocation = pluginJarFile.getAbsolutePath();
GoPluginDescriptor descriptor = GoPluginDescriptor.builder()
.id("some.old.id")
.bundleLocation(bundleDirectory)
.pluginJarFileLocation(pluginJarFileLocation)
.isBundledPlugin(true)
.build();
when(goPluginBundleDescriptorBuilder.build(new BundleOrPluginFileDetails(pluginJarFile, true, pluginWorkDir))).thenReturn(new GoPluginBundleDescriptor(descriptor));
listener = new DefaultPluginJarChangeListener(registry, osgiManifestGenerator, pluginLoader, goPluginBundleDescriptorBuilder, systemEnvironment);
assertThatCode(() -> listener.pluginJarAdded(new BundleOrPluginFileDetails(pluginJarFile, true, pluginWorkDir)))
.isInstanceOf(RuntimeException.class);
}
|
@VisibleForTesting
int createTimelineSchema(String[] args, Configuration conf) throws Exception {
String schemaCreatorClassName = conf.get(
YarnConfiguration.TIMELINE_SERVICE_SCHEMA_CREATOR_CLASS,
YarnConfiguration.DEFAULT_TIMELINE_SERVICE_SCHEMA_CREATOR_CLASS);
LOG.info("Using {} for creating Timeline Service Schema ",
schemaCreatorClassName);
try {
Class<?> schemaCreatorClass = Class.forName(schemaCreatorClassName);
if (SchemaCreator.class.isAssignableFrom(schemaCreatorClass)) {
SchemaCreator schemaCreator = (SchemaCreator) ReflectionUtils
.newInstance(schemaCreatorClass, conf);
schemaCreator.createTimelineSchema(args);
return 0;
} else {
throw new YarnRuntimeException("Class: " + schemaCreatorClassName
+ " not instance of " + SchemaCreator.class.getCanonicalName());
}
} catch (ClassNotFoundException e) {
throw new YarnRuntimeException("Could not instantiate TimelineReader: "
+ schemaCreatorClassName, e);
}
}
|
@Test
void testTimelineSchemaCreation() throws Exception {
Configuration conf = new Configuration();
conf.set(YarnConfiguration.TIMELINE_SERVICE_SCHEMA_CREATOR_CLASS,
"org.apache.hadoop.yarn.server.timelineservice.storage" +
".DummyTimelineSchemaCreator");
TimelineSchemaCreator timelineSchemaCreator = new TimelineSchemaCreator();
assertEquals(0, timelineSchemaCreator
.createTimelineSchema(new String[]{}, conf));
}
|
public boolean appliesTo(String pipelineName, String stageName) {
boolean pipelineMatches = this.pipelineName.equals(pipelineName) ||
this.pipelineName.equals(GoConstants.ANY_PIPELINE);
boolean stageMatches = this.stageName.equals(stageName) ||
this.stageName.equals(GoConstants.ANY_STAGE);
return pipelineMatches && stageMatches;
}
|
@Test
void specificStageShouldApplyToAnyPipeline() {
NotificationFilter filter = new NotificationFilter(GoConstants.ANY_PIPELINE, "dev", StageEvent.Breaks, false);
assertThat(filter.appliesTo("cruise1", "dev")).isTrue();
assertThat(filter.appliesTo("cruise2", "dev")).isTrue();
assertThat(filter.appliesTo("cruise2", "not-dev")).isFalse();
}
|
public CsvReader includeFields(boolean... fields) {
if (fields == null || fields.length == 0) {
throw new IllegalArgumentException(
"The set of included fields must not be null or empty.");
}
int lastTruePos = -1;
for (int i = 0; i < fields.length; i++) {
if (fields[i]) {
lastTruePos = i;
}
}
if (lastTruePos == -1) {
throw new IllegalArgumentException(
"The description of fields to parse excluded all fields. At least one fields must be included.");
}
if (lastTruePos == fields.length - 1) {
this.includedMask = fields;
} else {
this.includedMask = Arrays.copyOfRange(fields, 0, lastTruePos + 1);
}
return this;
}
|
@Test
void testIncludeFieldsDense() {
CsvReader reader = getCsvReader();
reader.includeFields(true, true, true);
assertThat(reader.includedMask).containsExactly(true, true, true);
reader = getCsvReader();
reader.includeFields("ttt");
assertThat(reader.includedMask).containsExactly(true, true, true);
reader = getCsvReader();
reader.includeFields("TTT");
assertThat(reader.includedMask).containsExactly(true, true, true);
reader = getCsvReader();
reader.includeFields("111");
assertThat(reader.includedMask).containsExactly(true, true, true);
reader = getCsvReader();
reader.includeFields(0x7L);
assertThat(reader.includedMask).containsExactly(true, true, true);
}
|
public Map<String, List<Pair<PipelineConfig, PipelineConfigs>>> getPackageUsageInPipelines() {
if (packageToPipelineMap == null) {
synchronized (this) {
if (packageToPipelineMap == null) {
packageToPipelineMap = new HashMap<>();
for (PipelineConfigs pipelineConfigs : this) {
for (PipelineConfig pipelineConfig : pipelineConfigs) {
for (PackageMaterialConfig packageMaterialConfig : pipelineConfig.packageMaterialConfigs()) {
String packageId = packageMaterialConfig.getPackageId();
if (!packageToPipelineMap.containsKey(packageId)) {
packageToPipelineMap.put(packageId, new ArrayList<>());
}
packageToPipelineMap.get(packageId).add(new Pair<>(pipelineConfig, pipelineConfigs));
}
}
}
}
}
}
return packageToPipelineMap;
}
|
@Test
public void shouldComputePackageUsageInPipelinesOnlyOnce() throws Exception {
PackageMaterialConfig packageOne = new PackageMaterialConfig("package-id-one");
PackageMaterialConfig packageTwo = new PackageMaterialConfig("package-id-two");
final PipelineConfig p1 = PipelineConfigMother.pipelineConfig("pipeline1", new MaterialConfigs(packageOne, packageTwo), new JobConfigs(new JobConfig(new CaseInsensitiveString("jobName"))));
final PipelineConfig p2 = PipelineConfigMother.pipelineConfig("pipeline2", new MaterialConfigs(packageTwo), new JobConfigs(new JobConfig(new CaseInsensitiveString("jobName"))));
PipelineGroups groups = new PipelineGroups();
groups.addAll(List.of(new BasicPipelineConfigs(p1), new BasicPipelineConfigs(p2)));
Map<String, List<Pair<PipelineConfig, PipelineConfigs>>> result1 = groups.getPackageUsageInPipelines();
Map<String, List<Pair<PipelineConfig, PipelineConfigs>>> result2 = groups.getPackageUsageInPipelines();
assertSame(result1, result2);
}
|
@Udf
public <T> List<T> concat(
@UdfParameter(description = "First array of values") final List<T> left,
@UdfParameter(description = "Second array of values") final List<T> right) {
if (left == null && right == null) {
return null;
}
final int leftSize = left != null ? left.size() : 0;
final int rightSize = right != null ? right.size() : 0;
final List<T> result = new ArrayList<>(leftSize + rightSize);
if (left != null) {
result.addAll(left);
}
if (right != null) {
result.addAll(right);
}
return result;
}
|
@Test
public void shouldConcatArraysOfOnlyNulls() {
final List<String> input1 = Arrays.asList(null, null);
final List<String> input2 = Arrays.asList(null, null, null);
final List<String> result = udf.concat(input1, input2);
assertThat(result, is(Arrays.asList(null, null, null, null, null)));
}
|
static void run(
final SystemExit systemExit,
final String... args
) throws Throwable {
final Arguments arguments = new Arguments.Builder()
.parseArgs(args)
.build();
if (arguments.help) {
usage();
return;
}
final Properties props = getProperties(arguments);
final DataGenProducer dataProducer = ProducerFactory
.getProducer(arguments.keyFormat, arguments.valueFormat, arguments.valueDelimiter, props);
final Optional<RateLimiter> rateLimiter = arguments.msgRate != -1
? Optional.of(RateLimiter.create(arguments.msgRate)) : Optional.empty();
final Executor executor = Executors.newFixedThreadPool(
arguments.numThreads,
r -> {
final Thread thread = new Thread(r);
thread.setDaemon(true);
return thread;
}
);
final CompletionService<Void> service = new ExecutorCompletionService<>(executor);
for (int i = 0; i < arguments.numThreads; i++) {
service.submit(getProducerTask(arguments, dataProducer, props, rateLimiter));
}
for (int i = 0; i < arguments.numThreads; i++) {
try {
service.take().get();
} catch (final InterruptedException e) {
System.err.println("Interrupted waiting for threads to exit.");
systemExit.exit(1);
} catch (final ExecutionException e) {
throw e.getCause();
}
}
}
|
@Test(expected = DataGen.Arguments.ArgumentParseException.class)
public void valueDelimiterCanOnlyBeSingleCharacter() throws Throwable {
DataGen.run(
mockSystem,
"schema=./src/main/resources/purchase.avro",
"key=id",
"format=delimited",
"value_delimiter=@@",
"topic=foo"
);
}
|
@Override
@SuppressWarnings("UseOfSystemOutOrSystemErr")
public void run(Namespace namespace, Liquibase liquibase) throws Exception {
final String context = getContext(namespace);
final Integer count = namespace.getInt("count");
final boolean dryRun = namespace.getBoolean("dry-run") != null && namespace.getBoolean("dry-run");
if (count != null) {
if (dryRun) {
liquibase.update(count, context, new OutputStreamWriter(outputStream, StandardCharsets.UTF_8));
} else {
liquibase.update(count, context);
}
} else {
if (dryRun) {
liquibase.update(context, new OutputStreamWriter(outputStream, StandardCharsets.UTF_8));
} else {
liquibase.update(context);
}
}
}
|
@Test
void testRunFirstTwoMigration() throws Exception {
migrateCommand.run(null, new Namespace(Collections.singletonMap("count", 2)), conf);
try (Handle handle = Jdbi.create(databaseUrl, "sa", "").open()) {
assertThat(handle.select("select * from persons").mapToMap()).isEmpty();
}
}
|
public static Catalog loadCatalog(
String impl, String catalogName, Map<String, String> properties, Object hadoopConf) {
Preconditions.checkNotNull(impl, "Cannot initialize custom Catalog, impl class name is null");
DynConstructors.Ctor<Catalog> ctor;
try {
ctor = DynConstructors.builder(Catalog.class).impl(impl).buildChecked();
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException(
String.format("Cannot initialize Catalog implementation %s: %s", impl, e.getMessage()),
e);
}
Catalog catalog;
try {
catalog = ctor.newInstance();
} catch (ClassCastException e) {
throw new IllegalArgumentException(
String.format("Cannot initialize Catalog, %s does not implement Catalog.", impl), e);
}
configureHadoopConf(catalog, hadoopConf);
catalog.initialize(catalogName, properties);
return catalog;
}
|
@Test
public void loadCustomCatalog_NotImplementCatalog() {
Map<String, String> options = Maps.newHashMap();
options.put("key", "val");
Configuration hadoopConf = new Configuration();
String name = "custom";
assertThatThrownBy(
() ->
CatalogUtil.loadCatalog(
TestCatalogNoInterface.class.getName(), name, options, hadoopConf))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageStartingWith("Cannot initialize Catalog")
.hasMessageContaining("does not implement Catalog");
}
|
public static <T> SamplerFunction<T> neverSample() {
return (SamplerFunction<T>) Constants.NEVER_SAMPLE;
}
|
@Test void neverSample_returnsFalse() {
assertThat(neverSample().trySample(null)).isFalse();
assertThat(neverSample().trySample("1")).isFalse();
}
|
static void validateFileExtension(Path jarPath) {
String fileName = jarPath.getFileName().toString();
if (!fileName.endsWith(".jar")) {
throw new JetException("File name extension should be .jar");
}
}
|
@Test
public void testValidateFileExtension() {
Path jarPath = Paths.get("foo");
assertThatThrownBy(() -> JarOnMemberValidator.validateFileExtension(jarPath))
.isInstanceOf(JetException.class)
.hasMessageContaining("File name extension should be .jar");
}
|
@Override
public void accept(T t) {
updateTimeHighWaterMark(t.time());
shortTermStorage.add(t);
drainDueToLatestInput(t); //standard drain policy
drainDueToTimeHighWaterMark(); //prevent blow-up when data goes backwards in time
sizeHighWaterMark = Math.max(sizeHighWaterMark, shortTermStorage.size());
}
|
@Test
public void testVeryOldPointsAreAutoEvicted() {
ConsumingArrayList<Point> downstreamConsumer = newConsumingArrayList();
ApproximateTimeSorter<Point> sorter = new ApproximateTimeSorter<>(
Duration.ofSeconds(10),
downstreamConsumer
);
sorter.accept(Point.builder().latLong(0.0,0.0).time(EPOCH).build());
sorter.accept(Point.builder().latLong(0.0,0.0).time(EPOCH.plusSeconds(4)).build());
sorter.accept(Point.builder().latLong(0.0,0.0).time(EPOCH.plusSeconds(8)).build());
assertThat("no evictions yet", downstreamConsumer, empty());
//Adding this point evicts the 1st point because it is more than 10 seconds older than the most recent input
sorter.accept(Point.builder().latLong(0.0,0.0).time(EPOCH.plusSeconds(12)).build()); //evict Epoch + 0
assertThat(downstreamConsumer, hasSize(1));
assertThat(downstreamConsumer.get(0).time(), equalTo(EPOCH));
//This stale point should get instantly evicted because it is too old (with respect to time highwater mark)
sorter.accept(Point.builder().latLong(0.0,0.0).time(EPOCH.minusSeconds(20)).build());
assertThat(downstreamConsumer, hasSize(2));
assertThat(downstreamConsumer.get(1).time(), equalTo(EPOCH.minusSeconds(20)));
}
|
@Deprecated
public static String getJwt(JwtClaims claims) throws JoseException {
String jwt;
RSAPrivateKey privateKey = (RSAPrivateKey) getPrivateKey(
jwtConfig.getKey().getFilename(),jwtConfig.getKey().getPassword(), jwtConfig.getKey().getKeyName());
// A JWT is a JWS and/or a JWE with JSON claims as the payload.
// In this example it is a JWS nested inside a JWE
// So we first create a JsonWebSignature object.
JsonWebSignature jws = new JsonWebSignature();
// The payload of the JWS is JSON content of the JWT Claims
jws.setPayload(claims.toJson());
// The JWT is signed using the sender's private key
jws.setKey(privateKey);
// Get provider from security config file, it should be two digit
// And the provider id will set as prefix for keyid in the token header, for example: 05100
// if there is no provider id, we use "00" for the default value
String provider_id = "";
if (jwtConfig.getProviderId() != null) {
provider_id = jwtConfig.getProviderId();
if (provider_id.length() == 1) {
provider_id = "0" + provider_id;
} else if (provider_id.length() > 2) {
logger.error("provider_id defined in the security.yml file is invalid; the length should be 2");
provider_id = provider_id.substring(0, 2);
}
}
jws.setKeyIdHeaderValue(provider_id + jwtConfig.getKey().getKid());
// Set the signature algorithm on the JWT/JWS that will integrity protect the claims
jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.RSA_USING_SHA256);
// Sign the JWS and produce the compact serialization, which will be the inner JWT/JWS
// representation, which is a string consisting of three dot ('.') separated
// base64url-encoded parts in the form Header.Payload.Signature
jwt = jws.getCompactSerialization();
return jwt;
}
|
@Test
public void AcRoleAccessControlWrong() throws Exception {
JwtClaims claims = ClaimsUtil.getTestClaims("stevehu", "CUSTOMER", "f7d42348-c647-4efb-a52d-4c5787421e72", Arrays.asList("account.r", "account.w"), "user");
claims.setExpirationTimeMinutesInTheFuture(5256000);
String jwt = JwtIssuer.getJwt(claims, long_kid, KeyUtil.deserializePrivateKey(long_key, KeyUtil.RSA));
System.out.println("***Long lived token Authorization code customer with roles***: " + jwt);
}
|
public static String getUniqueName(String baseName) {
return baseName + UNIQUE_KEY_COUNTER.incrementAndGet();
}
|
@Test
public void testGetUniqueName()
{
final Set<String> names = new HashSet<>();
for (int i = 0; i < 10000; i++) {
final String uniqueName = TimingKey.getUniqueName("baseName");
Assert.assertTrue(uniqueName.contains("baseName"));
Assert.assertFalse(names.contains(uniqueName));
names.add(uniqueName);
}
}
|
public Map<String, String> mergeLocalParams(Map<String, String> localMap) {
String ump = localMap.get(URL_MERGE_PROCESSOR_KEY);
ProviderURLMergeProcessor providerUrlMergeProcessor;
if (StringUtils.isNotEmpty(ump)) {
providerUrlMergeProcessor = applicationModel
.getExtensionLoader(ProviderURLMergeProcessor.class)
.getExtension(ump);
} else {
providerUrlMergeProcessor = applicationModel
.getExtensionLoader(ProviderURLMergeProcessor.class)
.getExtension("default");
}
return providerUrlMergeProcessor.mergeLocalParams(localMap);
}
|
@Test
void testMergeLocalParams() {
// Verify default ProviderURLMergeProcessor
URL consumerURL = new URLBuilder(DUBBO_PROTOCOL, "localhost", 55555)
.addParameter(PID_KEY, "1234")
.addParameter(THREADPOOL_KEY, "foo")
.addParameter(APPLICATION_KEY, "consumer")
.addParameter(REFERENCE_FILTER_KEY, "filter3")
.addParameter(TAG_KEY, "UUU")
.build();
Map<String, String> params = clusterUtils.mergeLocalParams(consumerURL.getParameters());
Assertions.assertEquals("1234", params.get(PID_KEY));
Assertions.assertEquals("foo", params.get(THREADPOOL_KEY));
Assertions.assertEquals("consumer", params.get(APPLICATION_KEY));
Assertions.assertEquals("filter3", params.get(REFERENCE_FILTER_KEY));
Assertions.assertEquals("UUU", params.get(TAG_KEY));
// Verify custom ProviderURLMergeProcessor
URL consumerUrlForTag = new URLBuilder(DUBBO_PROTOCOL, "localhost", 55555)
.addParameter(PID_KEY, "1234")
.addParameter(THREADPOOL_KEY, "foo")
.addParameter(APPLICATION_KEY, "consumer")
.addParameter(REFERENCE_FILTER_KEY, "filter3")
.addParameter(TAG_KEY, "UUU")
.addParameter(URL_MERGE_PROCESSOR_KEY, "tag")
.build();
Map<String, String> paramsForTag = clusterUtils.mergeLocalParams(consumerUrlForTag.getParameters());
Assertions.assertEquals("1234", paramsForTag.get(PID_KEY));
Assertions.assertEquals("foo", paramsForTag.get(THREADPOOL_KEY));
Assertions.assertEquals("consumer", paramsForTag.get(APPLICATION_KEY));
Assertions.assertEquals("filter3", paramsForTag.get(REFERENCE_FILTER_KEY));
Assertions.assertNull(paramsForTag.get(TAG_KEY));
}
|
public static Set<Result> anaylze(String log) {
Set<Result> results = new HashSet<>();
for (Rule rule : Rule.values()) {
Matcher matcher = rule.pattern.matcher(log);
if (matcher.find()) {
results.add(new Result(rule, log, matcher));
}
}
return results;
}
|
@Test
public void noClassDefFoundError1() throws IOException {
CrashReportAnalyzer.Result result = findResultByRule(
CrashReportAnalyzer.anaylze(loadLog("/crash-report/no_class_def_found_error.txt")),
CrashReportAnalyzer.Rule.NO_CLASS_DEF_FOUND_ERROR);
assertEquals("blk", result.getMatcher().group("class"));
}
|
void handleTestStepFinished(TestStepFinished event) {
if (event.getTestStep() instanceof PickleStepTestStep && event.getResult().getStatus().is(Status.PASSED)) {
PickleStepTestStep testStep = (PickleStepTestStep) event.getTestStep();
addUsageEntry(event.getResult(), testStep);
}
}
|
@Test
void resultWithPassedStep() {
OutputStream out = new ByteArrayOutputStream();
UsageFormatter usageFormatter = new UsageFormatter(out);
TestStep testStep = mockTestStep();
Result result = new Result(Status.PASSED, Duration.ofMillis(12345L), null);
usageFormatter
.handleTestStepFinished(new TestStepFinished(Instant.EPOCH, mock(TestCase.class), testStep, result));
Map<String, List<UsageFormatter.StepContainer>> usageMap = usageFormatter.usageMap;
assertThat(usageMap.size(), is(equalTo(1)));
List<UsageFormatter.StepContainer> durationEntries = usageMap.get("stepDef");
assertThat(durationEntries.size(), is(equalTo(1)));
assertThat(durationEntries.get(0).getName(), is(equalTo("step")));
assertThat(durationEntries.get(0).getDurations().size(), is(equalTo(1)));
assertThat(durationEntries.get(0).getDurations().get(0).getDuration(), is(closeTo(12.345, EPSILON)));
}
|
public URLNormalizer decodeUnreservedCharacters() {
if (url.contains("%")) {
StringBuffer sb = new StringBuffer();
Matcher m = PATTERN_PERCENT_ENCODED_CHAR.matcher(url);
try {
while (m.find()) {
String enc = m.group(1).toUpperCase();
if (isEncodedUnreservedCharacter(enc)) {
m.appendReplacement(sb, URLDecoder.decode(
enc, StandardCharsets.UTF_8.toString()));
}
}
} catch (UnsupportedEncodingException e) {
logger.debug("UTF-8 is not supported by your system. "
+ "URL will remain unchanged:" + url, e);
}
url = m.appendTail(sb).toString();
}
return this;
}
|
@Test
public void testDecodeUnreservedCharacters() {
// ALPHA (%41-%5A and %61-%7A), DIGIT (%30-%39), hyphen (%2D),
// period (%2E), underscore (%5F), or tilde (%7E)
s = "http://www.example.com/%41%42%59%5Aalpha"
+ "%61%62%79%7A/digit%30%31%38%39/%2Dhyphen/period%2E"
+ "/underscore%5F/%7Etilde/reserved%2F%3A%5B%26";
t = "http://www.example.com/ABYZalphaabyz/digit0189"
+ "/-hyphen/period./underscore_/~tilde/reserved%2F%3A%5B%26";
assertEquals(t, n(s).decodeUnreservedCharacters().toString());
}
|
@Override
public CompletableFuture<Void> cleanupAsync(JobID jobId) {
mainThreadExecutor.assertRunningInMainThread();
CompletableFuture<Void> cleanupFuture = FutureUtils.completedVoidFuture();
for (CleanupWithLabel<T> cleanupWithLabel : prioritizedCleanup) {
cleanupFuture =
cleanupFuture.thenCompose(
ignoredValue ->
withRetry(
jobId,
cleanupWithLabel.getLabel(),
cleanupWithLabel.getCleanup()));
}
return cleanupFuture.thenCompose(
ignoredValue ->
FutureUtils.completeAll(
regularCleanup.stream()
.map(
cleanupWithLabel ->
withRetry(
jobId,
cleanupWithLabel.getLabel(),
cleanupWithLabel.getCleanup()))
.collect(Collectors.toList())));
}
|
@Test
void testCleanupWithSingleRetryInHighPriorityTask() {
final Collection<JobID> actualJobIds = new ArrayList<>();
final CleanupCallback cleanupWithRetry = cleanupWithInitialFailingRuns(actualJobIds, 1);
final CleanupCallback oneRunHigherPriorityCleanup =
SingleCallCleanup.withCompletionOnCleanup();
final SingleCallCleanup oneRunCleanup = SingleCallCleanup.withCompletionOnCleanup();
final CompletableFuture<Void> compositeCleanupResult =
createTestInstanceBuilder(TestingRetryStrategies.createWithNumberOfRetries(1))
.withPrioritizedCleanup("Prio #0", cleanupWithRetry)
.withPrioritizedCleanup("Prio #1", oneRunHigherPriorityCleanup)
.withRegularCleanup("Reg #0", oneRunCleanup)
.build()
.cleanupAsync(JOB_ID);
assertThatFuture(compositeCleanupResult).eventuallySucceeds();
assertThat(oneRunCleanup.getProcessedJobId()).isEqualTo(JOB_ID);
assertThat(oneRunCleanup.isDone()).isTrue();
assertThat(actualJobIds).containsExactly(JOB_ID, JOB_ID);
}
|
@PostMapping("/authorize")
@Operation(summary = "申请授权", description = "适合 code 授权码模式,或者 implicit 简化模式;在 sso.vue 单点登录界面被【提交】调用")
@Parameters({
@Parameter(name = "response_type", required = true, description = "响应类型", example = "code"),
@Parameter(name = "client_id", required = true, description = "客户端编号", example = "tudou"),
@Parameter(name = "scope", description = "授权范围", example = "userinfo.read"), // 使用 Map<String, Boolean> 格式,Spring MVC 暂时不支持这么接收参数
@Parameter(name = "redirect_uri", required = true, description = "重定向 URI", example = "https://www.iocoder.cn"),
@Parameter(name = "auto_approve", required = true, description = "用户是否接受", example = "true"),
@Parameter(name = "state", example = "1")
})
public CommonResult<String> approveOrDeny(@RequestParam("response_type") String responseType,
@RequestParam("client_id") String clientId,
@RequestParam(value = "scope", required = false) String scope,
@RequestParam("redirect_uri") String redirectUri,
@RequestParam(value = "auto_approve") Boolean autoApprove,
@RequestParam(value = "state", required = false) String state) {
@SuppressWarnings("unchecked")
Map<String, Boolean> scopes = JsonUtils.parseObject(scope, Map.class);
scopes = ObjectUtil.defaultIfNull(scopes, Collections.emptyMap());
// 0. 校验用户已经登录。通过 Spring Security 实现
// 1.1 校验 responseType 是否满足 code 或者 token 值
OAuth2GrantTypeEnum grantTypeEnum = getGrantTypeEnum(responseType);
// 1.2 校验 redirectUri 重定向域名是否合法 + 校验 scope 是否在 Client 授权范围内
OAuth2ClientDO client = oauth2ClientService.validOAuthClientFromCache(clientId, null,
grantTypeEnum.getGrantType(), scopes.keySet(), redirectUri);
// 2.1 假设 approved 为 null,说明是场景一
if (Boolean.TRUE.equals(autoApprove)) {
// 如果无法自动授权通过,则返回空 url,前端不进行跳转
if (!oauth2ApproveService.checkForPreApproval(getLoginUserId(), getUserType(), clientId, scopes.keySet())) {
return success(null);
}
} else { // 2.2 假设 approved 非 null,说明是场景二
// 如果计算后不通过,则跳转一个错误链接
if (!oauth2ApproveService.updateAfterApproval(getLoginUserId(), getUserType(), clientId, scopes)) {
return success(OAuth2Utils.buildUnsuccessfulRedirect(redirectUri, responseType, state,
"access_denied", "User denied access"));
}
}
// 3.1 如果是 code 授权码模式,则发放 code 授权码,并重定向
List<String> approveScopes = convertList(scopes.entrySet(), Map.Entry::getKey, Map.Entry::getValue);
if (grantTypeEnum == OAuth2GrantTypeEnum.AUTHORIZATION_CODE) {
return success(getAuthorizationCodeRedirect(getLoginUserId(), client, approveScopes, redirectUri, state));
}
// 3.2 如果是 token 则是 implicit 简化模式,则发送 accessToken 访问令牌,并重定向
return success(getImplicitGrantRedirect(getLoginUserId(), client, approveScopes, redirectUri, state));
}
|
@Test // autoApprove = false,通过 + code
public void testApproveOrDeny_approveWithCode() {
// 准备参数
String responseType = "code";
String clientId = randomString();
String scope = "{\"read\": true, \"write\": false}";
String redirectUri = "https://www.iocoder.cn";
String state = "test";
// mock 方法(client)
OAuth2ClientDO client = randomPojo(OAuth2ClientDO.class).setClientId(clientId).setAdditionalInformation(null);
when(oauth2ClientService.validOAuthClientFromCache(eq(clientId), isNull(), eq("authorization_code"),
eq(asSet("read", "write")), eq(redirectUri))).thenReturn(client);
// mock 方法(场景二)
when(oauth2ApproveService.updateAfterApproval(isNull(), eq(UserTypeEnum.ADMIN.getValue()), eq(clientId),
eq(MapUtil.builder(new LinkedHashMap<String, Boolean>()).put("read", true).put("write", false).build())))
.thenReturn(true);
// mock 方法(访问令牌)
String authorizationCode = "test_code";
when(oauth2GrantService.grantAuthorizationCodeForCode(isNull(), eq(UserTypeEnum.ADMIN.getValue()),
eq(clientId), eq(ListUtil.toList("read")), eq(redirectUri), eq(state))).thenReturn(authorizationCode);
// 调用
CommonResult<String> result = oauth2OpenController.approveOrDeny(responseType, clientId,
scope, redirectUri, false, state);
// 断言
assertEquals(0, result.getCode());
assertEquals("https://www.iocoder.cn?code=test_code&state=test", result.getData());
}
|
public RingbufferConfig setInMemoryFormat(InMemoryFormat inMemoryFormat) {
checkNotNull(inMemoryFormat, "inMemoryFormat can't be null");
checkFalse(inMemoryFormat == NATIVE, "InMemoryFormat " + NATIVE + " is not supported");
this.inMemoryFormat = inMemoryFormat;
return this;
}
|
@Test
public void setInMemoryFormat() {
RingbufferConfig config = new RingbufferConfig(NAME);
RingbufferConfig returned = config.setInMemoryFormat(InMemoryFormat.OBJECT);
assertSame(config, returned);
assertEquals(InMemoryFormat.OBJECT, config.getInMemoryFormat());
}
|
public boolean greaterThan(SentinelVersion version) {
if (version == null) {
return true;
}
return getFullVersion() > version.getFullVersion();
}
|
@Test
public void testGreater() {
assertTrue(new SentinelVersion(2, 0, 0).greaterThan(new SentinelVersion(1, 0, 0)));
assertTrue(new SentinelVersion(1, 1, 0).greaterThan(new SentinelVersion(1, 0, 0)));
assertTrue(new SentinelVersion(1, 1, 2).greaterThan(new SentinelVersion(1, 1, 0)));
assertTrue(new SentinelVersion(1, 1, 4).greaterThan(new SentinelVersion(1, 1, 3)));
assertFalse(new SentinelVersion(1, 0, 0).greaterThan(new SentinelVersion(1, 0, 0)));
assertFalse(new SentinelVersion(1, 0, 0).greaterThan(new SentinelVersion(1, 1, 0)));
assertFalse(new SentinelVersion(1, 1, 3).greaterThan(new SentinelVersion(1, 1, 3)));
assertFalse(new SentinelVersion(1, 1, 2).greaterThan(new SentinelVersion(1, 1, 3)));
assertFalse(new SentinelVersion(1, 0, 0, "").greaterThan(new SentinelVersion(1, 0, 0)));
assertTrue(new SentinelVersion(1, 0, 1).greaterThan(new SentinelVersion(1, 0, 0)));
assertTrue(new SentinelVersion(1, 0, 1, "a").greaterThan(new SentinelVersion(1, 0, 0, "b")));
assertFalse(new SentinelVersion(1, 0, 0, "b").greaterThan(new SentinelVersion(1, 0, 0, "a")));
}
|
public List<Class<? extends AbstractAttributeConverter>> getConverters() {
return List.copyOf(converterClasses);
}
|
@Test
public void setProfileAttrs() throws Exception {
var client = new GenericOAuth20Client();
Map map = new HashMap();
map.put(AGE, "Integer|age");
//map.put("creation_time", "Date:|creation_time");
map.put(IS_ADMIN, "Boolean|is_admin");
map.put(BG_COLOR, "Color|bg_color");
map.put(GENDER, "Gender|gender");
map.put(BIRTHDAY, "Locale|birthday");
map.put(ID, "Long|id");
map.put(BLOG, "URI|blog");
client.setProfileAttrs(map);
client.setCallbackUrl(CALLBACK_URL);
client.init();
var profileDefinition = (GenericOAuth20ProfileDefinition) client.getConfiguration().getProfileDefinition();
assertTrue(profileDefinition.getConverters().get(AGE) instanceof IntegerConverter);
assertTrue(profileDefinition.getConverters().get(IS_ADMIN) instanceof BooleanConverter);
assertTrue(profileDefinition.getConverters().get(BG_COLOR) instanceof ColorConverter);
assertTrue(profileDefinition.getConverters().get(GENDER) instanceof GenderConverter);
assertTrue(profileDefinition.getConverters().get(BIRTHDAY) instanceof LocaleConverter);
assertTrue(profileDefinition.getConverters().get(ID) instanceof LongConverter);
assertTrue(profileDefinition.getConverters().get(BLOG) instanceof UrlConverter);
}
|
@Override
public double cdf(double x) {
if (x < 0.0) {
throw new IllegalArgumentException("Invalid x: " + x);
}
if (x == 0.0) {
return 0.;
}
return 0.5 * Erf.erfc(-0.707106781186547524 * (Math.log(x) - mu) / sigma);
}
|
@Test
public void testCdf() {
System.out.println("cdf");
LogNormalDistribution instance = new LogNormalDistribution(1.0, 1.0);
instance.rand();
assertEquals(1.040252e-08, instance.cdf(0.01), 1E-12);
assertEquals(0.0004789901, instance.cdf(0.1), 1E-7);
assertEquals(0.1586553, instance.cdf(1.0), 1E-7);
assertEquals(0.3794777, instance.cdf(2.0), 1E-7);
assertEquals(0.7288829, instance.cdf(5.0), 1E-7);
assertEquals(0.9036418, instance.cdf(10.0), 1E-7);
}
|
public T getElement(final T key) {
// validate key
if (key == null) {
throw new IllegalArgumentException("Null element is not supported.");
}
// find element
final int hashCode = key.hashCode();
final int index = getIndex(hashCode);
return getContainedElem(index, key, hashCode);
}
|
@Test
public void testGetElement() {
LightWeightHashSet<TestObject> objSet = new LightWeightHashSet<TestObject>();
TestObject objA = new TestObject("object A");
TestObject equalToObjA = new TestObject("object A");
TestObject objB = new TestObject("object B");
objSet.add(objA);
objSet.add(objB);
assertSame(objA, objSet.getElement(objA));
assertSame(objA, objSet.getElement(equalToObjA));
assertSame(objB, objSet.getElement(objB));
assertNull(objSet.getElement(new TestObject("not in set")));
}
|
public static synchronized AbstractAbilityControlManager getInstance() {
if (null == abstractAbilityControlManager) {
initAbilityControlManager();
}
return abstractAbilityControlManager;
}
|
@Test
void testGetInstanceByType() {
assertNotNull(NacosAbilityManagerHolder.getInstance(HigherMockAbilityManager.class));
}
|
public <T> ObjectConstructor<T> get(TypeToken<T> typeToken) {
final Type type = typeToken.getType();
final Class<? super T> rawType = typeToken.getRawType();
// first try an instance creator
@SuppressWarnings("unchecked") // types must agree
final InstanceCreator<T> typeCreator = (InstanceCreator<T>) instanceCreators.get(type);
if (typeCreator != null) {
return new ObjectConstructor<T>() {
@Override
public T construct() {
return typeCreator.createInstance(type);
}
};
}
// Next try raw type match for instance creators
@SuppressWarnings("unchecked") // types must agree
final InstanceCreator<T> rawTypeCreator = (InstanceCreator<T>) instanceCreators.get(rawType);
if (rawTypeCreator != null) {
return new ObjectConstructor<T>() {
@Override
public T construct() {
return rawTypeCreator.createInstance(type);
}
};
}
// First consider special constructors before checking for no-args constructors
// below to avoid matching internal no-args constructors which might be added in
// future JDK versions
ObjectConstructor<T> specialConstructor = newSpecialCollectionConstructor(type, rawType);
if (specialConstructor != null) {
return specialConstructor;
}
FilterResult filterResult =
ReflectionAccessFilterHelper.getFilterResult(reflectionFilters, rawType);
ObjectConstructor<T> defaultConstructor = newDefaultConstructor(rawType, filterResult);
if (defaultConstructor != null) {
return defaultConstructor;
}
ObjectConstructor<T> defaultImplementation = newDefaultImplementationConstructor(type, rawType);
if (defaultImplementation != null) {
return defaultImplementation;
}
// Check whether type is instantiable; otherwise ReflectionAccessFilter recommendation
// of adjusting filter suggested below is irrelevant since it would not solve the problem
final String exceptionMessage = checkInstantiable(rawType);
if (exceptionMessage != null) {
return new ObjectConstructor<T>() {
@Override
public T construct() {
throw new JsonIOException(exceptionMessage);
}
};
}
// Consider usage of Unsafe as reflection, so don't use if BLOCK_ALL
// Additionally, since it is not calling any constructor at all, don't use if BLOCK_INACCESSIBLE
if (filterResult == FilterResult.ALLOW) {
// finally try unsafe
return newUnsafeAllocator(rawType);
} else {
final String message =
"Unable to create instance of "
+ rawType
+ "; ReflectionAccessFilter does not permit using reflection or Unsafe. Register an"
+ " InstanceCreator or a TypeAdapter for this type or adjust the access filter to"
+ " allow using reflection.";
return new ObjectConstructor<T>() {
@Override
public T construct() {
throw new JsonIOException(message);
}
};
}
}
|
@Test
public void testGet_Interface() {
ObjectConstructor<Interface> constructor =
constructorConstructor.get(TypeToken.get(Interface.class));
var e = assertThrows(RuntimeException.class, () -> constructor.construct());
assertThat(e)
.hasMessageThat()
.isEqualTo(
"Interfaces can't be instantiated! Register an InstanceCreator or a TypeAdapter for"
+ " this type. Interface name:"
+ " com.google.gson.internal.ConstructorConstructorTest$Interface");
}
|
public Boolean editNamespace(String namespaceId, String namespaceName, String namespaceDesc) {
// TODO 获取用kp
namespacePersistService.updateTenantNameAtomic(DEFAULT_KP, namespaceId, namespaceName, namespaceDesc);
return true;
}
|
@Test
void testEditNamespace() {
namespaceOperationService.editNamespace(TEST_NAMESPACE_ID, TEST_NAMESPACE_NAME, TEST_NAMESPACE_DESC);
verify(namespacePersistService).updateTenantNameAtomic(DEFAULT_KP, TEST_NAMESPACE_ID, TEST_NAMESPACE_NAME, TEST_NAMESPACE_DESC);
}
|
public static ReservationDefinition convertReservationDefinition(
ReservationDefinitionInfo definitionInfo) {
if (definitionInfo == null || definitionInfo.getReservationRequests() == null
|| definitionInfo.getReservationRequests().getReservationRequest() == null
|| definitionInfo.getReservationRequests().getReservationRequest().isEmpty()) {
throw new RuntimeException("definitionInfo Or ReservationRequests is Null.");
}
// basic variable
long arrival = definitionInfo.getArrival();
long deadline = definitionInfo.getDeadline();
// ReservationRequests reservationRequests
String name = definitionInfo.getReservationName();
String recurrenceExpression = definitionInfo.getRecurrenceExpression();
Priority priority = Priority.newInstance(definitionInfo.getPriority());
// reservation requests info
List<ReservationRequest> reservationRequestList = new ArrayList<>();
ReservationRequestsInfo reservationRequestsInfo = definitionInfo.getReservationRequests();
List<ReservationRequestInfo> reservationRequestInfos =
reservationRequestsInfo.getReservationRequest();
for (ReservationRequestInfo resRequestInfo : reservationRequestInfos) {
ResourceInfo resourceInfo = resRequestInfo.getCapability();
Resource capability =
Resource.newInstance(resourceInfo.getMemorySize(), resourceInfo.getvCores());
ReservationRequest reservationRequest = ReservationRequest.newInstance(capability,
resRequestInfo.getNumContainers(), resRequestInfo.getMinConcurrency(),
resRequestInfo.getDuration());
reservationRequestList.add(reservationRequest);
}
ReservationRequestInterpreter[] values = ReservationRequestInterpreter.values();
ReservationRequestInterpreter reservationRequestInterpreter =
values[reservationRequestsInfo.getReservationRequestsInterpreter()];
ReservationRequests reservationRequests = ReservationRequests.newInstance(
reservationRequestList, reservationRequestInterpreter);
ReservationDefinition definition = ReservationDefinition.newInstance(
arrival, deadline, reservationRequests, name, recurrenceExpression, priority);
return definition;
}
|
@Test
public void testConvertReservationDefinition() {
// Prepare parameters
ReservationId reservationId = ReservationId.newInstance(Time.now(), 1);
ReservationSubmissionRequestInfo requestInfo =
getReservationSubmissionRequestInfo(reservationId);
ReservationDefinitionInfo expectDefinitionInfo = requestInfo.getReservationDefinition();
// ReservationDefinitionInfo conversion ReservationDefinition
ReservationDefinition convertDefinition =
RouterServerUtil.convertReservationDefinition(expectDefinitionInfo);
// reservationDefinition is not null
assertNotNull(convertDefinition);
assertEquals(expectDefinitionInfo.getArrival(), convertDefinition.getArrival());
assertEquals(expectDefinitionInfo.getDeadline(), convertDefinition.getDeadline());
Priority priority = convertDefinition.getPriority();
assertNotNull(priority);
assertEquals(expectDefinitionInfo.getPriority(), priority.getPriority());
assertEquals(expectDefinitionInfo.getRecurrenceExpression(),
convertDefinition.getRecurrenceExpression());
assertEquals(expectDefinitionInfo.getReservationName(), convertDefinition.getReservationName());
ReservationRequestsInfo expectRequestsInfo = expectDefinitionInfo.getReservationRequests();
List<ReservationRequestInfo> expectRequestsInfoList =
expectRequestsInfo.getReservationRequest();
ReservationRequests convertReservationRequests =
convertDefinition.getReservationRequests();
assertNotNull(convertReservationRequests);
List<ReservationRequest> convertRequestList =
convertReservationRequests.getReservationResources();
assertNotNull(convertRequestList);
assertEquals(1, convertRequestList.size());
ReservationRequestInfo expectResRequestInfo = expectRequestsInfoList.get(0);
ReservationRequest convertResRequest = convertRequestList.get(0);
assertNotNull(convertResRequest);
assertEquals(expectResRequestInfo.getNumContainers(), convertResRequest.getNumContainers());
assertEquals(expectResRequestInfo.getDuration(), convertResRequest.getDuration());
ResourceInfo expectResourceInfo = expectResRequestInfo.getCapability();
Resource convertResource = convertResRequest.getCapability();
assertNotNull(expectResourceInfo);
assertEquals(expectResourceInfo.getMemorySize(), convertResource.getMemorySize());
assertEquals(expectResourceInfo.getvCores(), convertResource.getVirtualCores());
}
|
@Override
public DeviceCredentials findByCredentialsId(TenantId tenantId, String credentialsId) {
log.trace("[{}] findByCredentialsId [{}]", tenantId, credentialsId);
return DaoUtil.getData(deviceCredentialsRepository.findByCredentialsId(credentialsId));
}
|
@Test
public void findByCredentialsId() {
DeviceCredentials foundedDeviceCredentials = deviceCredentialsDao.findByCredentialsId(SYSTEM_TENANT_ID, neededDeviceCredentials.getCredentialsId());
assertNotNull(foundedDeviceCredentials);
assertEquals(neededDeviceCredentials.getId(), foundedDeviceCredentials.getId());
}
|
@VisibleForTesting
static String getProjectCacheDirectoryFromProject(Path path) {
try {
byte[] hashedBytes =
MessageDigest.getInstance("SHA-256")
.digest(path.toFile().getCanonicalPath().getBytes(Charsets.UTF_8));
StringBuilder stringBuilder = new StringBuilder(2 * hashedBytes.length);
for (byte b : hashedBytes) {
stringBuilder.append(String.format("%02x", b));
}
return stringBuilder.toString();
} catch (IOException | SecurityException ex) {
throw new RuntimeException(
"Unable to create cache directory for project path: "
+ path
+ " - you can try to configure --project-cache manually",
ex);
} catch (NoSuchAlgorithmException ex) {
throw new RuntimeException(
"SHA-256 algorithm implementation not found - might be a broken JVM");
}
}
|
@Test
public void testGetProjectCacheDirectoryFromProject_different() {
assertThat(CacheDirectories.getProjectCacheDirectoryFromProject(Paths.get("1")))
.isNotEqualTo(CacheDirectories.getProjectCacheDirectoryFromProject(Paths.get("2")));
}
|
public String toJson(boolean pretty) { return SlimeUtils.toJson(inspector, !pretty); }
|
@Test
void add_all() {
var expected =
"""
[
1,
2,
3,
4,
5,
6
]
""";
var json = Json.Builder.newArray()
.addAll(Json.Builder.Array.newArray().add(1).add(2).add(3))
.add(4)
.addAll(Json.Builder.Array.newArray().add(5))
.add(6)
.build()
.toJson(true);
assertEquals(expected, json);
}
|
protected ValidationTaskResult loadHdfsConfig() {
Pair<String, String> clientConfFiles = getHdfsConfPaths();
String coreConfPath = clientConfFiles.getFirst();
String hdfsConfPath = clientConfFiles.getSecond();
mCoreConf = accessAndParseConf("core-site.xml", coreConfPath);
mHdfsConf = accessAndParseConf("hdfs-site.xml", hdfsConfPath);
return new ValidationTaskResult(mState, getName(), mMsg.toString(), mAdvice.toString());
}
|
@Test
public void loadedConf() {
String hdfsSite = Paths.get(sTestDir.toPath().toString(), "hdfs-site.xml").toString();
ValidationTestUtils.writeXML(hdfsSite, ImmutableMap.of("key2", "value2"));
String coreSite = Paths.get(sTestDir.toPath().toString(), "core-site.xml").toString();
ValidationTestUtils.writeXML(coreSite, ImmutableMap.of("key1", "value1"));
CONF.set(PropertyKey.UNDERFS_HDFS_CONFIGURATION,
hdfsSite + HdfsConfValidationTask.SEPARATOR + coreSite);
HdfsConfValidationTask task =
new HdfsConfValidationTask("hdfs://namenode:9000/alluxio", CONF);
ValidationTaskResult result = task.loadHdfsConfig();
assertEquals(result.getState(), ValidationUtils.State.OK);
}
|
public void updateMemoryUsage(
long deltaUserMemoryInBytes,
long deltaTotalMemoryInBytes,
long taskUserMemoryInBytes,
long taskTotalMemoryInBytes,
long peakNodeTotalMemoryInBytes)
{
currentUserMemory.addAndGet(deltaUserMemoryInBytes);
currentTotalMemory.addAndGet(deltaTotalMemoryInBytes);
peakUserMemory.updateAndGet(currentPeakValue -> Math.max(currentUserMemory.get(), currentPeakValue));
peakTotalMemory.updateAndGet(currentPeakValue -> Math.max(currentTotalMemory.get(), currentPeakValue));
peakTaskUserMemory.accumulateAndGet(taskUserMemoryInBytes, Math::max);
peakTaskTotalMemory.accumulateAndGet(taskTotalMemoryInBytes, Math::max);
peakNodeTotalMemory.accumulateAndGet(peakNodeTotalMemoryInBytes, Math::max);
}
|
@Test
public void testUpdateMemoryUsage()
{
QueryStateMachine stateMachine = createQueryStateMachine();
stateMachine.updateMemoryUsage(5, 10, 1, 3, 3);
assertEquals(stateMachine.getPeakUserMemoryInBytes(), 5);
assertEquals(stateMachine.getPeakTotalMemoryInBytes(), 10);
assertEquals(stateMachine.getPeakTaskUserMemory(), 1);
assertEquals(stateMachine.getPeakTaskTotalMemory(), 3);
assertEquals(stateMachine.getPeakNodeTotalMemory(), 3);
stateMachine.updateMemoryUsage(0, 0, 2, 2, 2);
assertEquals(stateMachine.getPeakUserMemoryInBytes(), 5);
assertEquals(stateMachine.getPeakTotalMemoryInBytes(), 10);
assertEquals(stateMachine.getPeakTaskUserMemory(), 2);
assertEquals(stateMachine.getPeakTaskTotalMemory(), 3);
assertEquals(stateMachine.getPeakNodeTotalMemory(), 3);
stateMachine.updateMemoryUsage(1, 1, 1, 5, 5);
assertEquals(stateMachine.getPeakUserMemoryInBytes(), 6);
assertEquals(stateMachine.getPeakTotalMemoryInBytes(), 11);
assertEquals(stateMachine.getPeakTaskUserMemory(), 2);
assertEquals(stateMachine.getPeakTaskTotalMemory(), 5);
assertEquals(stateMachine.getPeakNodeTotalMemory(), 5);
stateMachine.updateMemoryUsage(3, 3, 5, 2, 2);
assertEquals(stateMachine.getPeakUserMemoryInBytes(), 9);
assertEquals(stateMachine.getPeakTotalMemoryInBytes(), 14);
assertEquals(stateMachine.getPeakTaskUserMemory(), 5);
assertEquals(stateMachine.getPeakTaskTotalMemory(), 5);
assertEquals(stateMachine.getPeakNodeTotalMemory(), 5);
}
|
@Override
public Map<String, String> getProperties() {
if (setSingleMessageMetadata && singleMessageMetadata.getPropertiesCount() > 0) {
return singleMessageMetadata.getPropertiesList().stream()
.collect(Collectors.toMap(KeyValue::getKey, KeyValue::getValue,
(oldValue, newValue) -> newValue));
} else if (msgMetadata.getMetadata().getPropertiesCount() > 0) {
return msgMetadata.getMetadata().getPropertiesList().stream()
.collect(Collectors.toMap(KeyValue::getKey, KeyValue::getValue));
} else {
return Collections.emptyMap();
}
}
|
@Test
public void testGetProperties() {
ReferenceCountedMessageMetadata refCntMsgMetadata =
ReferenceCountedMessageMetadata.get(mock(ByteBuf.class));
SingleMessageMetadata singleMessageMetadata = new SingleMessageMetadata();
singleMessageMetadata.addProperty().setKey(HARD_CODE_KEY).setValue(KEY_VALUE_FIRST);
singleMessageMetadata.addProperty().setKey(HARD_CODE_KEY).setValue(KEY_VALUE_SECOND);
singleMessageMetadata.addProperty().setKey(HARD_CODE_KEY_ID).setValue(HARD_CODE_KEY_ID_VALUE);
RawMessage msg = RawMessageImpl.get(refCntMsgMetadata, singleMessageMetadata, null, 0, 0, 0);
Map<String, String> properties = msg.getProperties();
assertEquals(properties.get(HARD_CODE_KEY), KEY_VALUE_SECOND);
assertEquals(properties.get(HARD_CODE_KEY_ID), HARD_CODE_KEY_ID_VALUE);
assertEquals(KEY_VALUE_SECOND, properties.get(HARD_CODE_KEY));
assertEquals(HARD_CODE_KEY_ID_VALUE, properties.get(HARD_CODE_KEY_ID));
}
|
public Range<PartitionKey> handleNewSinglePartitionDesc(Map<ColumnId, Column> schema, SingleRangePartitionDesc desc,
long partitionId, boolean isTemp) throws DdlException {
Range<PartitionKey> range;
try {
range = checkAndCreateRange(schema, desc, isTemp);
setRangeInternal(partitionId, isTemp, range);
} catch (IllegalArgumentException e) {
// Range.closedOpen may throw this if (lower > upper)
throw new DdlException("Invalid key range: " + e.getMessage());
}
idToDataProperty.put(partitionId, desc.getPartitionDataProperty());
idToReplicationNum.put(partitionId, desc.getReplicationNum());
idToInMemory.put(partitionId, desc.isInMemory());
idToStorageCacheInfo.put(partitionId, desc.getDataCacheInfo());
return range;
}
|
@Test
public void testFixedRange4() throws DdlException, AnalysisException {
//add columns
int columns = 2;
Column k1 = new Column("k1", new ScalarType(PrimitiveType.INT), true, null, "", "");
Column k2 = new Column("k2", new ScalarType(PrimitiveType.BIGINT), true, null, "", "");
partitionColumns.add(k1);
partitionColumns.add(k2);
//add RangePartitionDescs
PartitionKeyDesc p1 = new PartitionKeyDesc(
Lists.newArrayList(new PartitionValue("20190101"), new PartitionValue("100")),
Lists.newArrayList(new PartitionValue("20190201")));
singleRangePartitionDescs.add(new SingleRangePartitionDesc(false, "p1", p1, null));
partitionInfo = new RangePartitionInfo(partitionColumns);
for (SingleRangePartitionDesc singleRangePartitionDesc : singleRangePartitionDescs) {
singleRangePartitionDesc.analyze(columns, null);
partitionInfo.handleNewSinglePartitionDesc(MetaUtils.buildIdToColumn(partitionColumns),
singleRangePartitionDesc, 20000L, false);
}
}
|
@Override
public void execute(String commandName, BufferedReader reader, BufferedWriter writer)
throws Py4JException, IOException {
char subCommand = safeReadLine(reader).charAt(0);
String returnCommand = null;
if (subCommand == ARRAY_GET_SUB_COMMAND_NAME) {
returnCommand = getArray(reader);
} else if (subCommand == ARRAY_SET_SUB_COMMAND_NAME) {
returnCommand = setArray(reader);
} else if (subCommand == ARRAY_SLICE_SUB_COMMAND_NAME) {
returnCommand = sliceArray(reader);
} else if (subCommand == ARRAY_LEN_SUB_COMMAND_NAME) {
returnCommand = lenArray(reader);
} else if (subCommand == ARRAY_CREATE_SUB_COMMAND_NAME) {
returnCommand = createArray(reader);
} else {
returnCommand = Protocol.getOutputErrorCommand("Unknown Array SubCommand Name: " + subCommand);
}
logger.finest("Returning command: " + returnCommand);
writer.write(returnCommand);
writer.flush();
}
|
@Test
public void testSlice() {
int[] array3 = new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
String[][] array4 = new String[][] { { "111", "222" }, { "aaa", "bbb" }, { "88", "99" } };
gateway.putNewObject(array3);
gateway.putNewObject(array4);
String inputCommand = ArrayCommand.ARRAY_SLICE_SUB_COMMAND_NAME + "\n" + "o2" + "\ni1\ni5\ne\n";
try {
command.execute("a", new BufferedReader(new StringReader(inputCommand)), writer);
assertEquals("!yto4\n", sWriter.toString());
int[] intarray = (int[]) gateway.getObject("o4");
assertEquals(2, intarray.length);
assertEquals(6, intarray[1]);
inputCommand = ArrayCommand.ARRAY_SLICE_SUB_COMMAND_NAME + "\n" + "o3" + "\ni2\ne\n";
command.execute("a", new BufferedReader(new StringReader(inputCommand)), writer);
assertEquals("!yto4\n!yto5\n", sWriter.toString());
String[][] stringarray = (String[][]) gateway.getObject("o5");
assertEquals(1, stringarray.length);
assertEquals("99", stringarray[0][1]);
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
|
public static UFreeIdent create(CharSequence identifier) {
return new AutoValue_UFreeIdent(StringName.of(identifier));
}
|
@Test
public void inlinesExpression() {
bind(new UFreeIdent.Key("foo"), parseExpression("\"abcdefg\".charAt(x + 1)"));
assertInlines(
parseExpression("\"abcdefg\".charAt(x + 1)").toString(), UFreeIdent.create("foo"));
}
|
@Override
public void handle(final Callback[] callbacks)
throws IOException, UnsupportedCallbackException {
for (final Callback callback : callbacks) {
if (callback instanceof NameCallback) {
final NameCallback nc = (NameCallback) callback;
nc.setName(getUserName());
} else if (callback instanceof ObjectCallback) {
final ObjectCallback oc = (ObjectCallback)callback;
oc.setObject(getCredential());
} else if (callback instanceof PasswordCallback) {
final PasswordCallback pc = (PasswordCallback) callback;
pc.setPassword(((String) getCredential()).toCharArray());
} else if (callback instanceof TextOutputCallback) {
final TextOutputCallback toc = (TextOutputCallback) callback;
switch (toc.getMessageType()) {
case TextOutputCallback.ERROR:
log.error(toc.getMessage());
break;
case TextOutputCallback.WARNING:
log.warn(toc.getMessage());
break;
case TextOutputCallback.INFORMATION:
log.info(toc.getMessage());
break;
default:
throw new IOException("Unsupported message type: " + toc.getMessageType());
}
} else {
// We ignore unknown callback types - e.g. Jetty implementation might pass us Jetty specific
// stuff which we can't deal with
}
}
}
|
@Test
public void shouldHandlePasswordCallback() throws Exception {
// When:
callbackHandler.handle(new Callback[]{nameCallback, passwordCallback});
// Then:
verify(nameCallback).setName(USERNAME);
verify(passwordCallback).setPassword(PASSWORD.toCharArray());
}
|
@Override
public boolean upload(String destPath, File file) {
Assert.notNull(file, "file to upload is null !");
return upload(destPath, file.getName(), file);
}
|
@Test
@Disabled
public void uploadTest() {
final Ftp ftp = new Ftp("localhost");
final boolean upload = ftp.upload("/temp", FileUtil.file("d:/test/test.zip"));
Console.log(upload);
IoUtil.close(ftp);
}
|
@Override
public CommittableCollector<CommT> deserialize(int version, byte[] serialized)
throws IOException {
final DataInputDeserializer in = new DataInputDeserializer(serialized);
if (version == 1) {
return deserializeV1(in);
}
if (version == 2) {
validateMagicNumber(in);
return deserializeV2(in);
}
throw new IOException("Unrecognized version or corrupt state: " + version);
}
|
@Test
void testCommittableCollectorV1SerDe() throws IOException {
final List<Integer> legacyState = Arrays.asList(1, 2, 3);
final DataOutputSerializer out = new DataOutputSerializer(256);
out.writeInt(SinkV1CommittableDeserializer.MAGIC_NUMBER);
SimpleVersionedSerialization.writeVersionAndSerializeList(
COMMITTABLE_SERIALIZER, legacyState, out);
final byte[] serialized = out.getCopyOfBuffer();
final CommittableCollector<Integer> committableCollector =
SERIALIZER.deserialize(1, serialized);
assertThat(committableCollector.getNumberOfSubtasks()).isEqualTo(1);
assertThat(committableCollector.isFinished()).isFalse();
assertThat(committableCollector.getSubtaskId()).isEqualTo(0);
final Collection<CheckpointCommittableManagerImpl<Integer>> checkpointCommittables =
committableCollector.getCheckpointCommittables();
assertThat(checkpointCommittables).hasSize(1);
final SubtaskCommittableManager<Integer> subtaskCommittableManager =
checkpointCommittables.iterator().next().getSubtaskCommittableManager(0);
assertThat(
subtaskCommittableManager
.getPendingRequests()
.map(CommitRequestImpl::getCommittable)
.collect(Collectors.toList()))
.containsExactly(1, 2, 3);
}
|
@Override
public SnowflakeTableMetadata loadTableMetadata(SnowflakeIdentifier tableIdentifier) {
Preconditions.checkArgument(
tableIdentifier.type() == SnowflakeIdentifier.Type.TABLE,
"loadTableMetadata requires a TABLE identifier, got '%s'",
tableIdentifier);
SnowflakeTableMetadata tableMeta;
try {
final String finalQuery = "SELECT SYSTEM$GET_ICEBERG_TABLE_INFORMATION(?) AS METADATA";
tableMeta =
connectionPool.run(
conn ->
queryHarness.query(
conn,
finalQuery,
TABLE_METADATA_RESULT_SET_HANDLER,
tableIdentifier.toIdentifierString()));
} catch (SQLException e) {
throw snowflakeExceptionToIcebergException(
tableIdentifier,
e,
String.format("Failed to get table metadata for '%s'", tableIdentifier));
} catch (InterruptedException e) {
throw new UncheckedInterruptedException(
e, "Interrupted while getting table metadata for '%s'", tableIdentifier);
}
return tableMeta;
}
|
@SuppressWarnings("unchecked")
@Test
public void testGetGcsTableMetadata() throws SQLException {
when(mockResultSet.next()).thenReturn(true);
when(mockResultSet.getString("METADATA"))
.thenReturn(
"{\"metadataLocation\":\"gcs://tab5/metadata/v793.metadata.json\",\"status\":\"success\"}");
SnowflakeTableMetadata actualMetadata =
snowflakeClient.loadTableMetadata(
SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_1"));
verify(mockQueryHarness)
.query(
eq(mockConnection),
eq("SELECT SYSTEM$GET_ICEBERG_TABLE_INFORMATION(?) AS METADATA"),
any(JdbcSnowflakeClient.ResultSetParser.class),
eq("DB_1.SCHEMA_1.TABLE_1"));
SnowflakeTableMetadata expectedMetadata =
new SnowflakeTableMetadata(
"gcs://tab5/metadata/v793.metadata.json",
"gs://tab5/metadata/v793.metadata.json",
"success",
null);
assertThat(actualMetadata).isEqualTo(expectedMetadata);
}
|
public DefaultIssue setLine(@Nullable Integer l) {
Preconditions.checkArgument(l == null || l > 0, "Line must be null or greater than zero (got %s)", l);
this.line = l;
return this;
}
|
@Test
void setLine_whenLineIsNegative_shouldThrowException() {
int anyNegativeValue = Integer.MIN_VALUE;
assertThatThrownBy(() -> issue.setLine(anyNegativeValue))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage(String.format("Line must be null or greater than zero (got %s)", anyNegativeValue));
}
|
public static int ge0(int value, String name) {
return (int) ge0((long) value, name);
}
|
@Test(expected = IllegalArgumentException.class)
public void checkGELessThanZero() {
Check.ge0(-1, "test");
}
|
private Mono<ServerResponse> getByName(ServerRequest request) {
String name = request.pathVariable("name");
return client.get(Category.class, name)
.map(CategoryVo::from)
.flatMap(categoryVo -> ServerResponse.ok()
.contentType(MediaType.APPLICATION_JSON)
.bodyValue(categoryVo)
);
}
|
@Test
void getByName() {
Category category = new Category();
category.setMetadata(new Metadata());
category.getMetadata().setName("test");
when(client.get(eq(Category.class), eq("test"))).thenReturn(Mono.just(category));
webTestClient.get()
.uri("/categories/test")
.exchange()
.expectStatus().isOk()
.expectHeader().contentType(MediaType.APPLICATION_JSON)
.expectBody()
.jsonPath("$.metadata.name").isEqualTo(category.getMetadata().getName());
}
|
public String getNodeGroup(String loc) {
netlock.readLock().lock();
try {
loc = NodeBase.normalize(loc);
Node locNode = getNode(loc);
if (locNode instanceof InnerNodeWithNodeGroup) {
InnerNodeWithNodeGroup node = (InnerNodeWithNodeGroup) locNode;
if (node.isNodeGroup()) {
return loc;
} else if (node.isRack()) {
// not sure the node group for a rack
return null;
} else {
// may be a leaf node
if(!(node.getNetworkLocation() == null ||
node.getNetworkLocation().isEmpty())) {
return getNodeGroup(node.getNetworkLocation());
} else {
return NodeBase.ROOT;
}
}
} else {
// not in cluster map, don't handle it
return loc;
}
} finally {
netlock.readLock().unlock();
}
}
|
@Test
public void testNodeGroup() throws Exception {
String res = cluster.getNodeGroup("");
assertTrue("NodeGroup should be NodeBase.ROOT for empty location",
res.equals(NodeBase.ROOT));
try {
cluster.getNodeGroup(null);
} catch (IllegalArgumentException e) {
assertTrue("Null Network Location should throw exception!",
e.getMessage().contains("Network Location is null"));
}
}
|
boolean dropSession(final String clientId, boolean removeSessionState) {
LOG.debug("Disconnecting client: {}", clientId);
if (clientId == null) {
return false;
}
final Session client = pool.get(clientId);
if (client == null) {
LOG.debug("Client {} not found, nothing disconnected", clientId);
return false;
}
client.closeImmediately();
if (removeSessionState) {
purgeSessionState(client);
}
LOG.debug("Client {} successfully disconnected from broker", clientId);
return true;
}
|
@Test
public void testDropSessionWithNotExistingClientId() {
assertFalse(sut.dropSession(FAKE_CLIENT_ID, ANY_BOOLEAN), "Can't be successful when non existing clientId is passed");
}
|
@Retryable(DataAccessResourceFailureException.class)
@CacheEvict(value = CACHE_AVERAGE_REVIEW_RATING, allEntries = true)
public void updateSearchIndex() {
if (!isEnabled()) {
return;
}
var stopWatch = new StopWatch();
stopWatch.start();
updateSearchIndex(false);
stopWatch.stop();
logger.info("Updated search index in " + stopWatch.getTotalTimeMillis() + " ms");
}
|
@Test
public void testHardUpdateExists() {
var index = mockIndex(true);
mockExtensions();
search.updateSearchIndex(true);
assertThat(index.created).isTrue();
assertThat(index.deleted).isTrue();
assertThat(index.entries).hasSize(3);
}
|
@Nonnull
public InstanceConfig setBackupCount(int newBackupCount) {
checkBackupCount(newBackupCount, 0);
this.backupCount = newBackupCount;
return this;
}
|
@Test
public void when_NegativeBackupCount_thenThrowsException() {
// When
InstanceConfig instanceConfig = new InstanceConfig();
// Then
Assert.assertThrows(IllegalArgumentException.class, () -> instanceConfig.setBackupCount(-1));
}
|
@Override
public String[] split(String text) {
if (splitContraction) {
text = WONT_CONTRACTION.matcher(text).replaceAll("$1ill not");
text = SHANT_CONTRACTION.matcher(text).replaceAll("$1ll not");
text = AINT_CONTRACTION.matcher(text).replaceAll("$1m not");
for (Pattern regexp : NOT_CONTRACTIONS) {
text = regexp.matcher(text).replaceAll("$1 not");
}
for (Pattern regexp : CONTRACTIONS2) {
text = regexp.matcher(text).replaceAll("$1 $2");
}
for (Pattern regexp : CONTRACTIONS3) {
text = regexp.matcher(text).replaceAll("$1 $2 $3");
}
}
text = DELIMITERS[0].matcher(text).replaceAll(" $1 ");
text = DELIMITERS[1].matcher(text).replaceAll(" $1");
text = DELIMITERS[2].matcher(text).replaceAll(" $1");
text = DELIMITERS[3].matcher(text).replaceAll(" . ");
text = DELIMITERS[4].matcher(text).replaceAll(" $1 ");
String[] words = WHITESPACE.split(text);
if (words.length > 1 && words[words.length-1].equals(".")) {
if (EnglishAbbreviations.contains(words[words.length-2])) {
words[words.length-2] = words[words.length-2] + ".";
}
}
ArrayList<String> result = new ArrayList<>();
for (String token : words) {
if (!token.isEmpty()) {
result.add(token);
}
}
return result.toArray(new String[0]);
}
|
@Test
public void testTokenizeNonLatinChars() {
System.out.println("tokenize words containing non-Latin chars");
// See https://en.wikipedia.org/wiki/Zero-width_non-joiner
String text = "میخواهم עֲוֹנֹת Auflage";
String[] expResult = {"میخواهم", "עֲוֹנֹת", "Auflage"};
SimpleTokenizer instance = new SimpleTokenizer();
String[] result = instance.split(text);
assertEquals(expResult.length, result.length);
for (int i = 0; i < result.length; i++) {
assertEquals(expResult[i], result[i]);
}
}
|
@Override
public void accept(MetadataShellState state) {
String fullGlob = glob.startsWith("/") ? glob :
state.workingDirectory() + "/" + glob;
List<String> globComponents =
CommandUtils.stripDotPathComponents(CommandUtils.splitPath(fullGlob));
MetadataNode root = state.root();
if (root == null) {
throw new RuntimeException("Invalid null root");
}
if (!accept(globComponents, 0, root, new String[0])) {
handler.accept(Optional.empty());
}
}
|
@Test
public void testAbsoluteGlob() {
InfoConsumer consumer = new InfoConsumer();
GlobVisitor visitor = new GlobVisitor("/a?pha", consumer);
visitor.accept(DATA);
assertEquals(Optional.of(Collections.singletonList(
new MetadataNodeInfo(new String[]{"alpha"},
DATA.root().child("alpha")))), consumer.infos);
}
|
@Override
public void execute(Exchange exchange) throws SmppException {
SubmitSm[] submitSms = createSubmitSm(exchange);
List<String> messageIDs = new ArrayList<>(submitSms.length);
String messageID = null;
for (int i = 0; i < submitSms.length; i++) {
SubmitSm submitSm = submitSms[i];
messageID = null;
if (log.isDebugEnabled()) {
log.debug("Sending short message {} for exchange id '{}'...", i, exchange.getExchangeId());
}
try {
SubmitSmResult result = session.submitShortMessage(
submitSm.getServiceType(),
TypeOfNumber.valueOf(submitSm.getSourceAddrTon()),
NumberingPlanIndicator.valueOf(submitSm.getSourceAddrNpi()),
submitSm.getSourceAddr(),
TypeOfNumber.valueOf(submitSm.getDestAddrTon()),
NumberingPlanIndicator.valueOf(submitSm.getDestAddrNpi()),
submitSm.getDestAddress(),
new ESMClass(submitSm.getEsmClass()),
submitSm.getProtocolId(),
submitSm.getPriorityFlag(),
submitSm.getScheduleDeliveryTime(),
submitSm.getValidityPeriod(),
new RegisteredDelivery(submitSm.getRegisteredDelivery()),
submitSm.getReplaceIfPresent(),
DataCodings.newInstance(submitSm.getDataCoding()),
(byte) 0,
submitSm.getShortMessage(),
submitSm.getOptionalParameters());
if (result != null) {
messageID = result.getMessageId();
}
} catch (Exception e) {
throw new SmppException(e);
}
if (messageID != null) {
messageIDs.add(messageID);
}
}
if (log.isDebugEnabled()) {
log.debug("Sent short message for exchange id '{}' and received message ids '{}'",
exchange.getExchangeId(), messageIDs);
}
Message message = ExchangeHelper.getResultMessage(exchange);
message.setHeader(SmppConstants.ID, messageIDs);
message.setHeader(SmppConstants.SENT_MESSAGE_COUNT, messageIDs.size());
}
|
@Test
public void bodyWithSMPP8bitDataCodingNotModified() throws Exception {
final byte dataCoding = (byte) 0x04; /* SMPP 8-bit */
byte[] body = { (byte) 0xFF, 'A', 'B', (byte) 0x00, (byte) 0xFF, (byte) 0x7F, 'C', (byte) 0xFF };
Exchange exchange = new DefaultExchange(new DefaultCamelContext(), ExchangePattern.InOut);
exchange.getIn().setHeader(SmppConstants.COMMAND, "SubmitSm");
exchange.getIn().setHeader(SmppConstants.DATA_CODING, dataCoding);
exchange.getIn().setBody(body);
when(session.submitShortMessage(eq("CMT"),
eq(TypeOfNumber.UNKNOWN),
eq(NumberingPlanIndicator.UNKNOWN),
eq("1616"),
eq(TypeOfNumber.UNKNOWN),
eq(NumberingPlanIndicator.UNKNOWN),
eq("1717"),
eq(new ESMClass()),
eq((byte) 0),
eq((byte) 1),
(String) isNull(),
(String) isNull(),
eq(new RegisteredDelivery(SMSCDeliveryReceipt.SUCCESS_FAILURE)),
eq(ReplaceIfPresentFlag.DEFAULT.value()),
eq(DataCodings.newInstance(dataCoding)),
eq((byte) 0),
eq(body)))
.thenReturn(new SubmitSmResult(new MessageId("1"), null));
command.execute(exchange);
assertEquals(Collections.singletonList("1"), exchange.getMessage().getHeader(SmppConstants.ID));
}
|
@VisibleForTesting
void validateDictTypeNameUnique(Long id, String name) {
DictTypeDO dictType = dictTypeMapper.selectByName(name);
if (dictType == null) {
return;
}
// 如果 id 为空,说明不用比较是否为相同 id 的字典类型
if (id == null) {
throw exception(DICT_TYPE_NAME_DUPLICATE);
}
if (!dictType.getId().equals(id)) {
throw exception(DICT_TYPE_NAME_DUPLICATE);
}
}
|
@Test
public void testValidateDictTypNameUnique_success() {
// 调用,成功
dictTypeService.validateDictTypeNameUnique(randomLongId(), randomString());
}
|
@Override
public void clear() {
complete(asyncCounterMap.clear());
}
|
@Test
public void testClear() {
atomicCounterMap.putIfAbsent(KEY1, VALUE1);
assertThat(atomicCounterMap.size(), is(1));
atomicCounterMap.clear();
assertThat(atomicCounterMap.size(), is(0));
}
|
public MetricSampleCompleteness<G, E> completeness(long from, long to, AggregationOptions<G, E> options) {
_windowRollingLock.lock();
try {
long fromWindowIndex = Math.max(windowIndex(from), _oldestWindowIndex);
long toWindowIndex = Math.min(windowIndex(to), _currentWindowIndex - 1);
if (fromWindowIndex > _currentWindowIndex || toWindowIndex < _oldestWindowIndex) {
return new MetricSampleCompleteness<>(generation(), _windowMs);
}
maybeUpdateAggregatorState();
return _aggregatorState.completeness(fromWindowIndex,
toWindowIndex,
interpretAggregationOptions(options),
generation());
} finally {
_windowRollingLock.unlock();
}
}
|
@Test
public void testAggregationOption1() {
MetricSampleAggregator<String, IntegerEntity> aggregator = prepareCompletenessTestEnv();
// Let the group coverage to be 1
AggregationOptions<String, IntegerEntity> options =
new AggregationOptions<>(0.5, 1, NUM_WINDOWS, 5,
new HashSet<>(Arrays.asList(ENTITY1, ENTITY2, ENTITY3)),
AggregationOptions.Granularity.ENTITY, true);
MetricSampleCompleteness<String, IntegerEntity> completeness =
aggregator.completeness(-1, Long.MAX_VALUE, options);
assertTrue(completeness.validWindowIndices().isEmpty());
assertTrue(completeness.validEntities().isEmpty());
assertTrue(completeness.validEntityGroups().isEmpty());
assertCompletenessByWindowIndex(completeness);
}
|
public static Dict loadByPath(String path) {
return loadByPath(path, Dict.class);
}
|
@Test
public void loadByPathTest() {
final Dict result = YamlUtil.loadByPath("test.yaml");
assertEquals("John", result.getStr("firstName"));
final List<Integer> numbers = result.getByPath("contactDetails.number");
assertEquals(123456789, (int) numbers.get(0));
assertEquals(456786868, (int) numbers.get(1));
}
|
@Override
public Num calculate(BarSeries series, Position position) {
int beginIndex = position.getEntry().getIndex();
int endIndex = series.getEndIndex();
return criterion.calculate(series, createEnterAndHoldTrade(series, beginIndex, endIndex));
}
|
@Test
public void calculateOnlyWithGainPositions() {
MockBarSeries series = new MockBarSeries(numFunction, 100, 105, 110, 100, 95, 105);
TradingRecord tradingRecord = new BaseTradingRecord(Trade.buyAt(0, series), Trade.sellAt(2, series),
Trade.buyAt(3, series), Trade.sellAt(5, series));
// buy and hold of ReturnCriterion
AnalysisCriterion buyAndHoldReturn = getCriterion(new ReturnCriterion());
assertNumEquals(1.05, buyAndHoldReturn.calculate(series, tradingRecord));
// sell and hold of ReturnCriterion
AnalysisCriterion sellAndHoldReturn = getCriterion(TradeType.SELL, new ReturnCriterion());
assertNumEquals(0.95, sellAndHoldReturn.calculate(series, tradingRecord));
// buy and hold of ProfitLossPercentageCriterion
AnalysisCriterion buyAndHoldPnlPercentage = getCriterion(new ProfitLossPercentageCriterion());
assertNumEquals(5, buyAndHoldPnlPercentage.calculate(series, tradingRecord));
// sell and hold of ProfitLossPercentageCriterion
AnalysisCriterion sellAndHoldPnlPercentage = getCriterion(TradeType.SELL, new ProfitLossPercentageCriterion());
assertNumEquals(-5, sellAndHoldPnlPercentage.calculate(series, tradingRecord));
}
|
@Override
public Result reconcile(Request request) {
client.fetch(Tag.class, request.name())
.ifPresent(tag -> {
if (ExtensionUtil.isDeleted(tag)) {
if (removeFinalizers(tag.getMetadata(), Set.of(FINALIZER_NAME))) {
client.update(tag);
}
return;
}
addFinalizers(tag.getMetadata(), Set.of(FINALIZER_NAME));
Map<String, String> annotations = MetadataUtil.nullSafeAnnotations(tag);
String newPattern = tagPermalinkPolicy.pattern();
annotations.put(Constant.PERMALINK_PATTERN_ANNO, newPattern);
String permalink = tagPermalinkPolicy.permalink(tag);
var status = tag.getStatusOrDefault();
status.setPermalink(permalink);
// Update the observed version.
status.setObservedVersion(tag.getMetadata().getVersion() + 1);
client.update(tag);
});
return Result.doNotRetry();
}
|
@Test
void reconcile() {
Tag tag = tag();
when(client.fetch(eq(Tag.class), eq("fake-tag")))
.thenReturn(Optional.of(tag));
when(tagPermalinkPolicy.permalink(any()))
.thenAnswer(arg -> "/tags/" + tag.getSpec().getSlug());
ArgumentCaptor<Tag> captor = ArgumentCaptor.forClass(Tag.class);
tagReconciler.reconcile(new TagReconciler.Request("fake-tag"));
verify(client).update(captor.capture());
Tag capture = captor.getValue();
assertThat(capture.getStatus().getPermalink()).isEqualTo("/tags/fake-slug");
// change slug
tag.getSpec().setSlug("new-slug");
tagReconciler.reconcile(new TagReconciler.Request("fake-tag"));
verify(client, times(2)).update(captor.capture());
assertThat(capture.getStatus().getPermalink()).isEqualTo("/tags/new-slug");
}
|
public void shutDown() throws InterruptedException {
this.stopped = true;
interrupt();
try {
join(5000);
} catch (InterruptedException ie) {
LOG.warn("Got interrupt while joining " + getName(), ie);
}
if (sslFactory != null) {
sslFactory.destroy();
}
}
|
@Test(timeout=10000)
public void testInterruptOnDisk() throws Exception {
final int FETCHER = 7;
Path p = new Path("file:///tmp/foo");
Path pTmp = OnDiskMapOutput.getTempPath(p, FETCHER);
FileSystem mFs = mock(FileSystem.class, RETURNS_DEEP_STUBS);
IFileWrappedMapOutput<Text,Text> odmo =
spy(new OnDiskMapOutput<Text,Text>(map1ID, mm, 100L, job,
FETCHER, true, mFs, p));
when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt()))
.thenReturn(odmo);
doNothing().when(mm).waitForResource();
when(ss.getHost()).thenReturn(host);
String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
when(connection.getResponseCode()).thenReturn(200);
when(connection.getHeaderField(
SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH)).thenReturn(replyHash);
ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
header.write(new DataOutputStream(bout));
final StuckInputStream in =
new StuckInputStream(new ByteArrayInputStream(bout.toByteArray()));
when(connection.getInputStream()).thenReturn(in);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
.thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
doAnswer(new Answer<Void>() {
public Void answer(InvocationOnMock ignore) throws IOException {
in.close();
return null;
}
}).when(connection).disconnect();
Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
r, metrics, except, key, connection, FETCHER);
underTest.start();
// wait for read in inputstream
in.waitForFetcher();
underTest.shutDown();
underTest.join(); // rely on test timeout to kill if stuck
assertTrue(in.wasClosedProperly());
verify(mFs).create(eq(pTmp));
verify(mFs).delete(eq(pTmp), eq(false));
verify(odmo).abort();
}
|
@Override
public ResultSet getSchemas() throws SQLException {
return createDatabaseMetaDataResultSet(getDatabaseMetaData().getSchemas());
}
|
@Test
void assertGetSchemas() throws SQLException {
when(databaseMetaData.getSchemas()).thenReturn(resultSet);
assertThat(shardingSphereDatabaseMetaData.getSchemas(), instanceOf(DatabaseMetaDataResultSet.class));
}
|
@Override
public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException {
try {
if(!session.getClient().changeWorkingDirectory(directory.getAbsolute())) {
throw new FTPException(session.getClient().getReplyCode(), session.getClient().getReplyString());
}
if(!session.getClient().setFileType(FTPClient.ASCII_FILE_TYPE)) {
// Set transfer type for traditional data socket file listings. The data transfer is over the
// data connection in type ASCII or type EBCDIC.
throw new FTPException(session.getClient().getReplyCode(), session.getClient().getReplyString());
}
final List<String> list = new DataConnectionActionExecutor(session).data(new DataConnectionAction<List<String>>() {
@Override
public List<String> execute() throws BackgroundException {
try {
return session.getClient().list(command.getCommand(), command.getArg());
}
catch(IOException e) {
throw new FTPExceptionMappingService().map(e);
}
}
});
return reader.read(directory, list);
}
catch(IOException e) {
throw new FTPExceptionMappingService().map("Listing directory {0} failed", e, directory);
}
}
|
@Test
public void testListDefaultFlag() throws Exception {
final ListService list = new FTPDefaultListService(session, new CompositeFileEntryParser(Collections.singletonList(new UnixFTPEntryParser())),
FTPListService.Command.lista);
final Path directory = new FTPWorkdirService(session).find();
final Path file = new Path(directory, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file));
new FTPTouchFeature(session).touch(file, new TransferStatus());
assertTrue(list.list(directory, new DisabledListProgressListener()).contains(file));
new FTPDeleteFeature(session).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback());
}
|
public List<JobTriggerDto> getAllForJob(String jobDefinitionId) {
if (isNullOrEmpty(jobDefinitionId)) {
throw new IllegalArgumentException("jobDefinitionId cannot be null or empty");
}
return stream(collection.find(eq(FIELD_JOB_DEFINITION_ID, jobDefinitionId))).toList();
}
|
@Test
@MongoDBFixtures("job-triggers.json")
public void getAllForJob() {
// We expect a ISE when there is more than one trigger for a single job definition
assertThatCode(() -> dbJobTriggerService.getOneForJob("54e3deadbeefdeadbeefaff3"))
.isInstanceOf(IllegalStateException.class)
.hasMessageContaining("54e3deadbeefdeadbeefaff3");
// But we can also obtain all by calling following method:
assertThat(dbJobTriggerService.getAllForJob("54e3deadbeefdeadbeefaff3"))
.hasSize(2)
.allSatisfy(trigger -> assertThat(trigger.jobDefinitionId()).isEqualTo("54e3deadbeefdeadbeefaff3"));
}
|
public long betweenYear(boolean isReset) {
final Calendar beginCal = DateUtil.calendar(begin);
final Calendar endCal = DateUtil.calendar(end);
int result = endCal.get(Calendar.YEAR) - beginCal.get(Calendar.YEAR);
if (false == isReset) {
final int beginMonthBase0 = beginCal.get(Calendar.MONTH);
final int endMonthBase0 = endCal.get(Calendar.MONTH);
if (beginMonthBase0 < endMonthBase0) {
return result;
} else if (beginMonthBase0 > endMonthBase0) {
return result - 1;
} else if (Calendar.FEBRUARY == beginMonthBase0
&& CalendarUtil.isLastDayOfMonth(beginCal)
&& CalendarUtil.isLastDayOfMonth(endCal)) {
// 考虑闰年的2月情况
// 两个日期都位于2月的最后一天,此时月数按照相等对待,此时都设置为1号
beginCal.set(Calendar.DAY_OF_MONTH, 1);
endCal.set(Calendar.DAY_OF_MONTH, 1);
}
endCal.set(Calendar.YEAR, beginCal.get(Calendar.YEAR));
long between = endCal.getTimeInMillis() - beginCal.getTimeInMillis();
if (between < 0) {
return result - 1;
}
}
return result;
}
|
@Test
public void betweenYearTest() {
Date start = DateUtil.parse("2017-02-01 12:23:46");
Date end = DateUtil.parse("2018-02-01 12:23:46");
long betweenYear = new DateBetween(start, end).betweenYear(false);
assertEquals(1, betweenYear);
Date start1 = DateUtil.parse("2017-02-01 12:23:46");
Date end1 = DateUtil.parse("2018-03-01 12:23:46");
long betweenYear1 = new DateBetween(start1, end1).betweenYear(false);
assertEquals(1, betweenYear1);
// 不足1年
Date start2 = DateUtil.parse("2017-02-01 12:23:46");
Date end2 = DateUtil.parse("2018-02-01 11:23:46");
long betweenYear2 = new DateBetween(start2, end2).betweenYear(false);
assertEquals(0, betweenYear2);
}
|
@Override
public BasicTypeDefine reconvert(Column column) {
BasicTypeDefine.BasicTypeDefineBuilder builder =
BasicTypeDefine.builder()
.name(column.getName())
.nullable(column.isNullable())
.comment(column.getComment())
.defaultValue(column.getDefaultValue());
switch (column.getDataType().getSqlType()) {
case BOOLEAN:
builder.columnType(XUGU_BOOLEAN);
builder.dataType(XUGU_BOOLEAN);
break;
case TINYINT:
builder.columnType(XUGU_TINYINT);
builder.dataType(XUGU_TINYINT);
break;
case SMALLINT:
builder.columnType(XUGU_SMALLINT);
builder.dataType(XUGU_SMALLINT);
break;
case INT:
builder.columnType(XUGU_INTEGER);
builder.dataType(XUGU_INTEGER);
break;
case BIGINT:
builder.columnType(XUGU_BIGINT);
builder.dataType(XUGU_BIGINT);
break;
case FLOAT:
builder.columnType(XUGU_FLOAT);
builder.dataType(XUGU_FLOAT);
break;
case DOUBLE:
builder.columnType(XUGU_DOUBLE);
builder.dataType(XUGU_DOUBLE);
break;
case DECIMAL:
DecimalType decimalType = (DecimalType) column.getDataType();
long precision = decimalType.getPrecision();
int scale = decimalType.getScale();
if (precision <= 0) {
precision = DEFAULT_PRECISION;
scale = DEFAULT_SCALE;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which is precision less than 0, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
precision,
scale);
} else if (precision > MAX_PRECISION) {
scale = (int) Math.max(0, scale - (precision - MAX_PRECISION));
precision = MAX_PRECISION;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which exceeds the maximum precision of {}, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
MAX_PRECISION,
precision,
scale);
}
if (scale < 0) {
scale = 0;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which is scale less than 0, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
precision,
scale);
} else if (scale > MAX_SCALE) {
scale = MAX_SCALE;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which exceeds the maximum scale of {}, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
MAX_SCALE,
precision,
scale);
}
builder.columnType(String.format("%s(%s,%s)", XUGU_NUMERIC, precision, scale));
builder.dataType(XUGU_NUMERIC);
builder.precision(precision);
builder.scale(scale);
break;
case BYTES:
if (column.getColumnLength() == null || column.getColumnLength() <= 0) {
builder.columnType(XUGU_BLOB);
builder.dataType(XUGU_BLOB);
} else if (column.getColumnLength() <= MAX_BINARY_LENGTH) {
builder.columnType(XUGU_BINARY);
builder.dataType(XUGU_BINARY);
} else {
builder.columnType(XUGU_BLOB);
builder.dataType(XUGU_BLOB);
}
break;
case STRING:
if (column.getColumnLength() == null || column.getColumnLength() <= 0) {
builder.columnType(String.format("%s(%s)", XUGU_VARCHAR, MAX_VARCHAR_LENGTH));
builder.dataType(XUGU_VARCHAR);
} else if (column.getColumnLength() <= MAX_VARCHAR_LENGTH) {
builder.columnType(
String.format("%s(%s)", XUGU_VARCHAR, column.getColumnLength()));
builder.dataType(XUGU_VARCHAR);
} else {
builder.columnType(XUGU_CLOB);
builder.dataType(XUGU_CLOB);
}
break;
case DATE:
builder.columnType(XUGU_DATE);
builder.dataType(XUGU_DATE);
break;
case TIME:
builder.dataType(XUGU_TIME);
if (column.getScale() != null && column.getScale() > 0) {
Integer timeScale = column.getScale();
if (timeScale > MAX_TIME_SCALE) {
timeScale = MAX_TIME_SCALE;
log.warn(
"The time column {} type time({}) is out of range, "
+ "which exceeds the maximum scale of {}, "
+ "it will be converted to time({})",
column.getName(),
column.getScale(),
MAX_SCALE,
timeScale);
}
builder.columnType(String.format("%s(%s)", XUGU_TIME, timeScale));
builder.scale(timeScale);
} else {
builder.columnType(XUGU_TIME);
}
break;
case TIMESTAMP:
if (column.getScale() == null || column.getScale() <= 0) {
builder.columnType(XUGU_TIMESTAMP);
} else {
int timestampScale = column.getScale();
if (column.getScale() > MAX_TIMESTAMP_SCALE) {
timestampScale = MAX_TIMESTAMP_SCALE;
log.warn(
"The timestamp column {} type timestamp({}) is out of range, "
+ "which exceeds the maximum scale of {}, "
+ "it will be converted to timestamp({})",
column.getName(),
column.getScale(),
MAX_TIMESTAMP_SCALE,
timestampScale);
}
builder.columnType(String.format("TIMESTAMP(%s)", timestampScale));
builder.scale(timestampScale);
}
builder.dataType(XUGU_TIMESTAMP);
break;
default:
throw CommonError.convertToConnectorTypeError(
DatabaseIdentifier.XUGU,
column.getDataType().getSqlType().name(),
column.getName());
}
return builder.build();
}
|
@Test
public void testReconvertDate() {
Column column =
PhysicalColumn.builder()
.name("test")
.dataType(LocalTimeType.LOCAL_DATE_TYPE)
.build();
BasicTypeDefine typeDefine = XuguTypeConverter.INSTANCE.reconvert(column);
Assertions.assertEquals(column.getName(), typeDefine.getName());
Assertions.assertEquals(XuguTypeConverter.XUGU_DATE, typeDefine.getColumnType());
Assertions.assertEquals(XuguTypeConverter.XUGU_DATE, typeDefine.getDataType());
}
|
public static RedissonClient create() {
Config config = new Config();
config.useSingleServer()
.setAddress("redis://127.0.0.1:6379");
return create(config);
}
|
@Test
public void testSingleConnectionFail() {
Assertions.assertThrows(RedisConnectionException.class, () -> {
Config config = new Config();
config.useSingleServer().setAddress("redis://127.99.0.1:1111");
Redisson.create(config);
Thread.sleep(1500);
});
}
|
@Override
public <T> byte[] serialize(T data) {
return JacksonUtils.toJsonBytes(data);
}
|
@Test
void testSerialize() {
String actual = new String(serializer.serialize(switchDomain));
assertTrue(actual.contains("\"defaultPushCacheMillis\":10000"));
assertTrue(actual.contains("\"clientBeatInterval\":5000"));
assertTrue(actual.contains("\"defaultCacheMillis\":3000"));
assertTrue(actual.contains("\"distroEnabled\":true"));
}
|
public boolean updateGlobalWhiteAddrsConfig(List<String> globalWhiteAddrsList) {
return this.updateGlobalWhiteAddrsConfig(globalWhiteAddrsList, this.defaultAclFile);
}
|
@Test
public void updateGlobalWhiteAddrsConfigTest() {
final boolean flag = plainPermissionManager.updateGlobalWhiteAddrsConfig(Lists.newArrayList("192.168.1.2"));
assert flag;
final AclConfig config = plainPermissionManager.getAllAclConfig();
Assert.assertEquals(true, config.getGlobalWhiteAddrs().contains("192.168.1.2"));
}
|
@VisibleForTesting
boolean doWork(WorkItem workItem, WorkItemStatusClient workItemStatusClient) throws IOException {
LOG.debug("Executing: {}", workItem);
DataflowWorkExecutor worker = null;
try {
// Populate PipelineOptions with data from work unit.
options.setProject(workItem.getProjectId());
final String stageName;
if (workItem.getMapTask() != null) {
stageName = workItem.getMapTask().getStageName();
} else if (workItem.getSourceOperationTask() != null) {
stageName = workItem.getSourceOperationTask().getStageName();
} else {
throw new RuntimeException("Unknown kind of work item: " + workItem);
}
CounterSet counterSet = new CounterSet();
BatchModeExecutionContext executionContext =
BatchModeExecutionContext.create(
counterSet,
sideInputDataCache,
sideInputWeakReferenceCache,
readerRegistry,
options,
stageName,
String.valueOf(workItem.getId()));
if (workItem.getMapTask() != null) {
MutableNetwork<Node, Edge> network = mapTaskToNetwork.apply(workItem.getMapTask());
if (LOG.isDebugEnabled()) {
LOG.debug("Network as Graphviz .dot: {}", Networks.toDot(network));
}
worker =
mapTaskExecutorFactory.create(
network,
options,
stageName,
readerRegistry,
sinkRegistry,
executionContext,
counterSet,
idGenerator);
} else if (workItem.getSourceOperationTask() != null) {
worker =
SourceOperationExecutorFactory.create(
options,
workItem.getSourceOperationTask(),
counterSet,
executionContext,
stageName);
} else {
throw new IllegalStateException("Work Item was neither a MapTask nor a SourceOperation");
}
workItemStatusClient.setWorker(worker, executionContext);
DataflowWorkProgressUpdater progressUpdater =
new DataflowWorkProgressUpdater(workItemStatusClient, workItem, worker, options);
executeWork(worker, progressUpdater);
workItemStatusClient.reportSuccess();
return true;
} catch (Throwable e) {
workItemStatusClient.reportError(e);
return false;
} finally {
if (worker != null) {
try {
worker.close();
} catch (Exception exn) {
LOG.warn(
"Uncaught exception while closing worker. All work has already committed or "
+ "been marked for retry.",
exn);
}
}
}
}
|
@Test
public void testWhenProcessingWorkUnitFailsWeReportStatus() throws Exception {
BatchDataflowWorker worker =
new BatchDataflowWorker(
mockWorkUnitClient, IntrinsicMapTaskExecutorFactory.defaultFactory(), options);
// In practice this value is always 1, but for the sake of testing send a different value.
long initialReportIndex = 4L;
WorkItem workItem =
new WorkItem()
.setId(1L)
.setJobId("Expected to fail the job")
.setInitialReportIndex(initialReportIndex);
WorkItemStatusClient workItemStatusClient = mock(WorkItemStatusClient.class);
worker.doWork(workItem, workItemStatusClient);
ArgumentCaptor<Throwable> errorCaptor = ArgumentCaptor.forClass(Throwable.class);
verify(workItemStatusClient).reportError(errorCaptor.capture());
Throwable error = errorCaptor.getValue();
assertThat(error, notNullValue());
assertThat(error.getMessage(), equalTo("Unknown kind of work item: " + workItem));
}
|
@Activate
public void activate() {
localNodeId = clusterService.getLocalNode().id();
leadershipService.addListener(leaderListener);
listenerRegistry = new ListenerRegistry<>();
eventDispatcher.addSink(WorkPartitionEvent.class, listenerRegistry);
for (int i = 0; i < NUM_PARTITIONS; i++) {
leadershipService.runForLeadership(getPartitionPath(i));
log.debug("Registered to run for {}", getPartitionPath(i));
}
executor.scheduleAtFixedRate(() -> scheduleRebalance(0), 0,
CHECK_PARTITION_BALANCE_PERIOD_SEC, TimeUnit.SECONDS);
log.info("Started");
}
|
@Test
public void testRebalanceScheduling() {
// We have all the partitions so we'll need to relinquish some
setUpLeadershipService(WorkPartitionManager.NUM_PARTITIONS);
replay(leadershipService);
partitionManager.activate();
// Send in the event
leaderListener.event(event);
assertTrue(partitionManager.rebalanceScheduled.get());
verify(leadershipService);
}
|
public MonitorBuilder appendParameter(String key, String value) {
this.parameters = appendParameter(parameters, key, value);
return getThis();
}
|
@Test
void appendParameter() {
MonitorBuilder builder = MonitorBuilder.newBuilder();
builder.appendParameter("default.num", "one").appendParameter("num", "ONE");
Map<String, String> parameters = builder.build().getParameters();
Assertions.assertTrue(parameters.containsKey("default.num"));
Assertions.assertEquals("ONE", parameters.get("num"));
}
|
@Override
public void delete(final Map<Path, TransferStatus> files, final PasswordCallback prompt, final Callback callback) throws BackgroundException {
for(Path file : files.keySet()) {
try {
callback.delete(file);
if(containerService.isContainer(file)) {
final Storage.Buckets.Delete request = session.getClient().buckets().delete(file.getName());
if(containerService.getContainer(file).attributes().getCustom().containsKey(GoogleStorageAttributesFinderFeature.KEY_REQUESTER_PAYS)) {
request.setUserProject(session.getHost().getCredentials().getUsername());
}
request.execute();
}
else {
final Storage.Objects.Delete request = session.getClient().objects().delete(containerService.getContainer(file).getName(), containerService.getKey(file));
if(containerService.getContainer(file).attributes().getCustom().containsKey(GoogleStorageAttributesFinderFeature.KEY_REQUESTER_PAYS)) {
request.setUserProject(session.getHost().getCredentials().getUsername());
}
final VersioningConfiguration versioning = null != session.getFeature(Versioning.class) ? session.getFeature(Versioning.class).getConfiguration(
containerService.getContainer(file)
) : VersioningConfiguration.empty();
if(versioning.isEnabled()) {
if(StringUtils.isNotBlank(file.attributes().getVersionId())) {
// You permanently delete versions of objects by including the generation number in the deletion request
request.setGeneration(Long.parseLong(file.attributes().getVersionId()));
}
}
request.execute();
}
}
catch(IOException e) {
final BackgroundException failure = new GoogleStorageExceptionMappingService().map("Cannot delete {0}", e, file);
if(file.isDirectory()) {
if(failure instanceof NotfoundException) {
// No placeholder file may exist but we just have a common prefix
continue;
}
}
throw failure;
}
}
}
|
@Test
public void testDeleteContainer() throws Exception {
final Path container = new Path(new AsciiRandomStringService().random().toLowerCase(Locale.ROOT), EnumSet.of(Path.Type.volume, Path.Type.directory));
new GoogleStorageDirectoryFeature(session).mkdir(container, new TransferStatus().withRegion("us"));
assertTrue(new GoogleStorageFindFeature(session).find(container));
new GoogleStorageDeleteFeature(session).delete(Collections.singletonList(container), new DisabledLoginCallback(), new Delete.DisabledCallback());
assertFalse(new GoogleStorageFindFeature(session).find(container));
}
|
@Override
public FsCheckpointStateOutputStream createCheckpointStateOutputStream(
CheckpointedStateScope scope) throws IOException {
Path target = getTargetPath(scope);
int bufferSize = Math.max(writeBufferSize, fileStateThreshold);
// Whether the file system dynamically injects entropy into the file paths.
final boolean entropyInjecting = EntropyInjector.isEntropyInjecting(filesystem, target);
final boolean absolutePath = entropyInjecting || scope == CheckpointedStateScope.SHARED;
return new FsCheckpointStateOutputStream(
target, filesystem, bufferSize, fileStateThreshold, !absolutePath);
}
|
@Test
void testSharedStateHasAbsolutePathHandles() throws IOException {
final FsCheckpointStreamFactory factory = createFactory(FileSystem.getLocalFileSystem(), 0);
final FsCheckpointStreamFactory.FsCheckpointStateOutputStream stream =
factory.createCheckpointStateOutputStream(CheckpointedStateScope.SHARED);
stream.write(0);
final StreamStateHandle handle = stream.closeAndGetHandle();
assertThat(handle).isInstanceOf(FileStateHandle.class);
assertThat(handle).isNotInstanceOf(RelativeFileStateHandle.class);
assertPathsEqual(sharedStateDir, ((FileStateHandle) handle).getFilePath().getParent());
}
|
public static void error(Logger logger, Throwable e) {
if (logger == null) {
return;
}
if (logger.isErrorEnabled()) {
logger.error(e);
}
}
|
@Test
void testError() {
Logger logger = Mockito.mock(Logger.class);
when(logger.isErrorEnabled()).thenReturn(true);
LogHelper.error(logger, "error");
verify(logger).error("error");
Throwable t = new RuntimeException();
LogHelper.error(logger, t);
verify(logger).error(t);
LogHelper.error(logger, "error", t);
verify(logger).error("error", t);
}
|
@Override
public void run() {
logSubprocess();
}
|
@Test
public void shouldNotLogAnythingWhenNoChildProcessesFound() {
CurrentProcess currentProcess = mock(CurrentProcess.class);
logger = new SubprocessLogger(currentProcess);
try (LogFixture log = logFixtureFor(SubprocessLogger.class, Level.ALL)) {
logger.run();
String result;
synchronized (log) {
result = log.getLog();
}
assertThat(result, is(""));
}
}
|
public static <T> List<T> sub(List<T> list, int start, int end) {
return ListUtil.sub(list, start, end);
}
|
@Test
public void subInput1ZeroPositivePositiveOutput1() {
// Arrange
final List<Integer> list = new ArrayList<>();
list.add(null);
final int start = 0;
final int end = 1;
final int step = 2;
// Act
final List<Integer> retval = CollUtil.sub(list, start, end, step);
// Assert result
final List<Integer> arrayList = new ArrayList<>();
arrayList.add(null);
assertEquals(arrayList, retval);
}
|
@Override
public void distributeIssueChangeEvent(DefaultIssue issue, @Nullable String severity, @Nullable String type, @Nullable String transition,
BranchDto branch, String projectKey) {
Issue changedIssue = new Issue(issue.key(), branch.getKey());
Boolean resolved = isResolved(transition);
if (severity == null && type == null && resolved == null) {
return;
}
IssueChangedEvent event = new IssueChangedEvent(projectKey, new Issue[]{changedIssue},
resolved, severity, type);
persistEvent(event, branch.getProjectUuid());
}
|
@Test
public void distributeIssueChangeEvent_whenBulkIssueChange_shouldDistributesEvents() {
RuleDto rule = db.rules().insert();
ProjectData projectData1 = db.components().insertPublicProject();
ProjectDto project1 = projectData1.getProjectDto();
BranchDto branch1 = projectData1.getMainBranchDto();
ComponentDto componentDto1 = projectData1.getMainBranchComponent();
IssueDto issue1 = db.issues().insert(rule, branch1, componentDto1, i -> i.setSeverity(MAJOR.name()).setType(RuleType.BUG));
ProjectData projectData2 = db.components().insertPublicProject();
ProjectDto project2 = projectData2.getProjectDto();
BranchDto branch2 = projectData2.getMainBranchDto();
ComponentDto componentDto2 = projectData2.getMainBranchComponent();
IssueDto issue2 = db.issues().insert(rule, branch2, componentDto2, i -> i.setSeverity(MAJOR.name()).setType(RuleType.BUG));
ProjectData projectData3 = db.components().insertPublicProject();
ProjectDto project3 = projectData3.getProjectDto();
BranchDto branch3 = projectData3.getMainBranchDto();
ComponentDto componentDto3 = projectData3.getMainBranchComponent();
IssueDto issue3 = db.issues().insert(rule, branch3, componentDto3, i -> i.setSeverity(MAJOR.name()).setType(RuleType.BUG));
DefaultIssue defaultIssue1 = issue1.toDefaultIssue().setCurrentChangeWithoutAddChange(new FieldDiffs()
.setDiff("resolution", null, null)
.setDiff("severity", MAJOR.name(), CRITICAL.name())
.setDiff("type", RuleType.BUG.name(), CODE_SMELL.name()));
DefaultIssue defaultIssue2 = issue2.toDefaultIssue().setCurrentChangeWithoutAddChange(new FieldDiffs()
.setDiff("resolution", "OPEN", "FALSE-POSITIVE")
.setDiff("severity", MAJOR.name(), CRITICAL.name())
.setDiff("type", RuleType.BUG.name(), CODE_SMELL.name()));
Set<DefaultIssue> issues = Set.of(defaultIssue1, defaultIssue2, issue3.toDefaultIssue());
Map<String, ComponentDto> projectsByUuid = new HashMap<>();
projectsByUuid.put(componentDto1.branchUuid(), componentDto1);
projectsByUuid.put(componentDto2.branchUuid(), componentDto2);
projectsByUuid.put(componentDto3.branchUuid(), componentDto3);
Map<String, BranchDto> branchesByProjectUuid = new HashMap<>();
branchesByProjectUuid.put(componentDto1.branchUuid(), branch1);
branchesByProjectUuid.put(componentDto2.branchUuid(), branch2);
branchesByProjectUuid.put(componentDto3.branchUuid(), branch3);
underTest.distributeIssueChangeEvent(issues, projectsByUuid, branchesByProjectUuid);
Deque<PushEventDto> issueChangedEvents = db.getDbClient().pushEventDao()
.selectChunkByProjectUuids(db.getSession(), Set.of(project1.getUuid(), project2.getUuid()),
1l, null, 3);
assertThat(issueChangedEvents).hasSize(2);
assertThat(issueChangedEvents)
.extracting(PushEventDto::getName, PushEventDto::getProjectUuid)
.containsExactlyInAnyOrder(
tuple("IssueChanged", project1.getUuid()),
tuple("IssueChanged", project2.getUuid()));
Optional<PushEventDto> project1Event = issueChangedEvents.stream().filter(e -> e.getProjectUuid().equals(project1.getUuid())).findFirst();
Optional<PushEventDto> project2Event = issueChangedEvents.stream().filter(e -> e.getProjectUuid().equals(project2.getUuid())).findFirst();
assertThat(project1Event).isPresent();
assertThat(project2Event).isPresent();
String firstPayload = new String(project1Event.get().getPayload(), StandardCharsets.UTF_8);
assertThat(firstPayload)
.contains("\"userSeverity\":\"" + CRITICAL.name() + "\"",
"\"userType\":\"" + CODE_SMELL.name() + "\"",
"\"resolved\":" + false);
String secondPayload = new String(project2Event.get().getPayload(), StandardCharsets.UTF_8);
assertThat(secondPayload)
.contains("\"userSeverity\":\"" + CRITICAL.name() + "\"",
"\"userType\":\"" + CODE_SMELL.name() + "\"",
"\"resolved\":" + true);
}
|
@Nullable
public TrackerClient getTrackerClient(Request request,
RequestContext requestContext,
Ring<URI> ring,
Map<URI, TrackerClient> trackerClients)
{
TrackerClient trackerClient;
URI targetHostUri = KeyMapper.TargetHostHints.getRequestContextTargetHost(requestContext);
if (targetHostUri != null)
{
trackerClient = getTrackerClientFromTarget(targetHostUri, requestContext, trackerClients);
}
else
{
trackerClient = getTrackerClientFromRing(request, requestContext, ring, trackerClients);
}
addToExcludedHosts(trackerClient, requestContext);
return trackerClient;
}
|
@Test
public void testSubstituteClientFromRing()
{
URI newUri = URI.create("new_uri");
@SuppressWarnings("unchecked")
Ring<URI> ring = Mockito.mock(Ring.class);
Mockito.when(ring.get(anyInt())).thenReturn(newUri);
List<URI> ringIteratierList = Arrays.asList(newUri, URI_1, URI_2, URI_3);
Mockito.when(ring.getIterator(anyInt())).thenReturn(ringIteratierList.iterator());
TrackerClient trackerClient = _clientSelector.getTrackerClient(_request, _requestContext, ring, DEFAULT_TRACKER_CLIENT_MAP);
assertTrue(DEFAULT_TRACKER_CLIENT_MAP.containsKey(trackerClient.getUri()));
}
|
boolean connectedToRepository() {
return repository.isConnected();
}
|
@Test
public void connectedToRepository() {
when( repository.isConnected() ).thenReturn( true );
assertTrue( timeoutHandler.connectedToRepository() );
}
|
public void createNewCodeDefinition(DbSession dbSession, String projectUuid, String mainBranchUuid,
String defaultBranchName, String newCodeDefinitionType, @Nullable String newCodeDefinitionValue) {
boolean isCommunityEdition = editionProvider.get().filter(EditionProvider.Edition.COMMUNITY::equals).isPresent();
NewCodePeriodType newCodePeriodType = parseNewCodeDefinitionType(newCodeDefinitionType);
NewCodePeriodDto dto = new NewCodePeriodDto();
dto.setType(newCodePeriodType);
dto.setProjectUuid(projectUuid);
if (isCommunityEdition) {
dto.setBranchUuid(mainBranchUuid);
}
getNewCodeDefinitionValueProjectCreation(newCodePeriodType, newCodeDefinitionValue, defaultBranchName).ifPresent(dto::setValue);
if (!CaycUtils.isNewCodePeriodCompliant(dto.getType(), dto.getValue())) {
throw new IllegalArgumentException("Failed to set the New Code Definition. The given value is not compatible with the Clean as You Code methodology. "
+ "Please refer to the documentation for compliant options.");
}
dbClient.newCodePeriodDao().insert(dbSession, dto);
}
|
@Test
public void createNewCodeDefinition_throw_IAE_if_value_is_set_for_reference_branch() {
assertThatThrownBy(() -> newCodeDefinitionResolver.createNewCodeDefinition(dbSession, DEFAULT_PROJECT_ID, MAIN_BRANCH_UUID, MAIN_BRANCH, REFERENCE_BRANCH.name(), "feature/zw"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining("Unexpected value for newCodeDefinitionType 'REFERENCE_BRANCH'");
}
|
@Override
public String[] requiredModules() {
return new String[] {CoreModule.NAME};
}
|
@Test
public void requiredModules() {
String[] modules = provider.requiredModules();
assertArrayEquals(new String[] {CoreModule.NAME}, modules);
}
|
public long scan(
final UnsafeBuffer termBuffer,
final long rebuildPosition,
final long hwmPosition,
final long nowNs,
final int termLengthMask,
final int positionBitsToShift,
final int initialTermId)
{
boolean lossFound = false;
int rebuildOffset = (int)rebuildPosition & termLengthMask;
if (rebuildPosition < hwmPosition)
{
final int rebuildTermCount = (int)(rebuildPosition >>> positionBitsToShift);
final int hwmTermCount = (int)(hwmPosition >>> positionBitsToShift);
final int rebuildTermId = initialTermId + rebuildTermCount;
final int hwmTermOffset = (int)hwmPosition & termLengthMask;
final int limitOffset = rebuildTermCount == hwmTermCount ? hwmTermOffset : termLengthMask + 1;
rebuildOffset = scanForGap(termBuffer, rebuildTermId, rebuildOffset, limitOffset, this);
if (rebuildOffset < limitOffset)
{
if (scannedTermOffset != activeTermOffset || scannedTermId != activeTermId)
{
activateGap(nowNs);
lossFound = true;
}
checkTimerExpiry(nowNs);
}
}
return pack(rebuildOffset, lossFound);
}
|
@Test
void shouldHandleMoreThan2Gaps()
{
long rebuildPosition = ACTIVE_TERM_POSITION;
final long hwmPosition = ACTIVE_TERM_POSITION + (ALIGNED_FRAME_LENGTH * 7L);
insertDataFrame(offsetOfMessage(0));
insertDataFrame(offsetOfMessage(2));
insertDataFrame(offsetOfMessage(4));
insertDataFrame(offsetOfMessage(6));
lossDetector.scan(termBuffer, rebuildPosition, hwmPosition, currentTime, MASK, POSITION_BITS_TO_SHIFT, TERM_ID);
currentTime = TimeUnit.MILLISECONDS.toNanos(40);
lossDetector.scan(termBuffer, rebuildPosition, hwmPosition, currentTime, MASK, POSITION_BITS_TO_SHIFT, TERM_ID);
insertDataFrame(offsetOfMessage(1));
rebuildPosition += (ALIGNED_FRAME_LENGTH * 3L);
lossDetector.scan(termBuffer, rebuildPosition, hwmPosition, currentTime, MASK, POSITION_BITS_TO_SHIFT, TERM_ID);
currentTime = TimeUnit.MILLISECONDS.toNanos(80);
lossDetector.scan(termBuffer, rebuildPosition, hwmPosition, currentTime, MASK, POSITION_BITS_TO_SHIFT, TERM_ID);
final InOrder inOrder = inOrder(lossHandler);
inOrder.verify(lossHandler, atLeast(1)).onGapDetected(TERM_ID, offsetOfMessage(1), gapLength());
inOrder.verify(lossHandler, atLeast(1)).onGapDetected(TERM_ID, offsetOfMessage(3), gapLength());
inOrder.verify(lossHandler, never()).onGapDetected(TERM_ID, offsetOfMessage(5), gapLength());
}
|
public static double kendall(int[] x, int[] y) {
if (x.length != y.length) {
throw new IllegalArgumentException("Input vector sizes are different.");
}
int is = 0, n2 = 0, n1 = 0, n = x.length;
double aa, a2, a1;
for (int j = 0; j < n - 1; j++) {
for (int k = j + 1; k < n; k++) {
a1 = x[j] - x[k];
a2 = y[j] - y[k];
aa = a1 * a2;
if (aa != 0.0) {
++n1;
++n2;
if (aa > 0) {
++is;
} else {
--is;
}
} else {
if (a1 != 0.0) {
++n1;
}
if (a2 != 0.0) {
++n2;
}
}
}
}
return is / (sqrt(n1) * sqrt(n2));
}
|
@Test
public void testKendall_doubleArr_doubleArr() {
System.out.println("kendall");
double[] x = {-2.1968219, -0.9559913, -0.0431738, 1.0567679, 0.3853515};
double[] y = {-1.7781325, -0.6659839, 0.9526148, -0.9460919, -0.3925300};
assertEquals(0.2, MathEx.kendall(x, y), 1E-7);
}
|
public FEELFnResult<BigDecimal> invoke(@ParameterName("from") String from, @ParameterName("grouping separator") String group, @ParameterName("decimal separator") String decimal) {
if ( from == null ) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "from", "cannot be null"));
}
if ( group != null && !group.equals( " " ) && !group.equals( "." ) && !group.equals( "," ) ) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "group", "not a valid one, can only be one of: dot ('.'), comma (','), space (' ') "));
}
if ( decimal != null ) {
if (!decimal.equals( "." ) && !decimal.equals( "," )) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "decimal", "not a valid one, can only be one of: dot ('.'), comma (',') "));
} else if (group != null && decimal.equals( group )) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "decimal", "cannot be the same as parameter 'group' "));
}
}
if ( group != null ) {
from = from.replaceAll( "\\" + group, "" );
}
if ( decimal != null ) {
from = from.replaceAll( "\\" + decimal, "." );
}
BigDecimal result = NumberEvalHelper.getBigDecimalOrNull(from );
if( from != null && result == null ) {
// conversion failed
return FEELFnResult.ofError( new InvalidParametersEvent(Severity.ERROR, "unable to calculate final number result" ) );
} else {
return FEELFnResult.ofResult( result );
}
}
|
@Test
void invokeNull() {
FunctionTestUtil.assertResultError(numberFunction.invoke(null, null, null), InvalidParametersEvent.class);
FunctionTestUtil.assertResultError(numberFunction.invoke(null, " ", null), InvalidParametersEvent.class);
FunctionTestUtil.assertResultError(numberFunction.invoke(null, null, "."), InvalidParametersEvent.class);
FunctionTestUtil.assertResultError(numberFunction.invoke(null, " ", "."), InvalidParametersEvent.class);
}
|
public static byte[] blob2Bytes(Blob blob) {
if (blob == null) {
return null;
}
try {
return blob.getBytes(1, (int) blob.length());
} catch (Exception e) {
throw new ShouldNeverHappenException(e);
}
}
|
@Test
public void testBlob2Bytes() throws UnsupportedEncodingException, SQLException {
assertNull(BlobUtils.blob2Bytes(null));
byte[] bs = "xxaaadd".getBytes(Constants.DEFAULT_CHARSET_NAME);
assertThat(BlobUtils.blob2Bytes(new SerialBlob(bs))).isEqualTo(
bs);
}
|
public List<String> collectErrorsFromAllNodes() {
List<String> errors = new ArrayList<>();
for (T node : mNodeResults.values()) {
// add all the errors for this node, with the node appended to prefix
for (String err : node.getErrors()) {
errors.add(String.format("%s :%s", node.getBaseParameters().mId, err));
}
}
return errors;
}
|
@Test
public void collectErrorFromAllNodesWithErrors() {
// test summary with errors
TestMultipleNodeSummary summary = new TestMultipleNodeSummary();
summary.addTaskResultWithoutErrors(4);
summary.addTaskResultWithErrors(3);
List<String> list = summary.collectErrorsFromAllNodes();
assertEquals(list.size(), 3);
Set<String> set = new HashSet<>(list);
for (int i = 4; i < 6; i++) {
String message = String.format("task%s :error%s", i, i);
assertTrue(set.contains(message));
}
}
|
@Override
public List<PrivilegedOperation> bootstrap(Configuration conf)
throws ResourceHandlerException {
super.bootstrap(conf);
swappiness = conf
.getInt(YarnConfiguration.NM_MEMORY_RESOURCE_CGROUPS_SWAPPINESS,
YarnConfiguration.DEFAULT_NM_MEMORY_RESOURCE_CGROUPS_SWAPPINESS);
if (swappiness < 0 || swappiness > 100) {
throw new ResourceHandlerException(
"Illegal value '" + swappiness + "' for "
+ YarnConfiguration.NM_MEMORY_RESOURCE_CGROUPS_SWAPPINESS
+ ". Value must be between 0 and 100.");
}
return null;
}
|
@Test
public void testOpportunistic() throws Exception {
Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.NM_PMEM_CHECK_ENABLED, false);
conf.setBoolean(YarnConfiguration.NM_VMEM_CHECK_ENABLED, false);
cGroupsMemoryResourceHandler.bootstrap(conf);
ContainerTokenIdentifier tokenId = mock(ContainerTokenIdentifier.class);
when(tokenId.getExecutionType()).thenReturn(ExecutionType.OPPORTUNISTIC);
Container container = mock(Container.class);
String id = "container_01_01";
ContainerId mockContainerId = mock(ContainerId.class);
when(mockContainerId.toString()).thenReturn(id);
when(container.getContainerId()).thenReturn(mockContainerId);
when(container.getContainerTokenIdentifier()).thenReturn(tokenId);
when(container.getResource()).thenReturn(Resource.newInstance(1024, 2));
cGroupsMemoryResourceHandler.preStart(container);
verify(mockCGroupsHandler, times(1))
.updateCGroupParam(CGroupsHandler.CGroupController.MEMORY, id,
CGroupsHandler.CGROUP_PARAM_MEMORY_SOFT_LIMIT_BYTES, "0M");
verify(mockCGroupsHandler, times(1))
.updateCGroupParam(CGroupsHandler.CGroupController.MEMORY, id,
CGroupsHandler.CGROUP_PARAM_MEMORY_SWAPPINESS, "100");
verify(mockCGroupsHandler, times(1))
.updateCGroupParam(CGroupsHandler.CGroupController.MEMORY, id,
CGroupsHandler.CGROUP_PARAM_MEMORY_HARD_LIMIT_BYTES, "1024M");
}
|
public static Set<Result> anaylze(String log) {
Set<Result> results = new HashSet<>();
for (Rule rule : Rule.values()) {
Matcher matcher = rule.pattern.matcher(log);
if (matcher.find()) {
results.add(new Result(rule, log, matcher));
}
}
return results;
}
|
@Test
public void config() throws IOException {
CrashReportAnalyzer.Result result = findResultByRule(
CrashReportAnalyzer.anaylze(loadLog("/crash-report/config.txt")),
CrashReportAnalyzer.Rule.CONFIG);
assertEquals("jumbofurnace", result.getMatcher().group("id"));
assertEquals("jumbofurnace-server.toml", result.getMatcher().group("file"));
}
|
@Override
public void invoke() throws Exception {
// --------------------------------------------------------------------
// Initialize
// --------------------------------------------------------------------
initInputFormat();
LOG.debug(getLogString("Start registering input and output"));
try {
initOutputs(getEnvironment().getUserCodeClassLoader());
} catch (Exception ex) {
throw new RuntimeException(
"The initialization of the DataSource's outputs caused an error: "
+ ex.getMessage(),
ex);
}
LOG.debug(getLogString("Finished registering input and output"));
// --------------------------------------------------------------------
// Invoke
// --------------------------------------------------------------------
LOG.debug(getLogString("Starting data source operator"));
RuntimeContext ctx = createRuntimeContext();
final Counter numRecordsOut;
{
Counter tmpNumRecordsOut;
try {
InternalOperatorIOMetricGroup ioMetricGroup =
((InternalOperatorMetricGroup) ctx.getMetricGroup()).getIOMetricGroup();
ioMetricGroup.reuseInputMetricsForTask();
if (this.config.getNumberOfChainedStubs() == 0) {
ioMetricGroup.reuseOutputMetricsForTask();
}
tmpNumRecordsOut = ioMetricGroup.getNumRecordsOutCounter();
} catch (Exception e) {
LOG.warn("An exception occurred during the metrics setup.", e);
tmpNumRecordsOut = new SimpleCounter();
}
numRecordsOut = tmpNumRecordsOut;
}
Counter completedSplitsCounter = ctx.getMetricGroup().counter("numSplitsProcessed");
if (RichInputFormat.class.isAssignableFrom(this.format.getClass())) {
((RichInputFormat) this.format).setRuntimeContext(ctx);
LOG.debug(getLogString("Rich Source detected. Initializing runtime context."));
((RichInputFormat) this.format).openInputFormat();
LOG.debug(getLogString("Rich Source detected. Opening the InputFormat."));
}
ExecutionConfig executionConfig = getExecutionConfig();
boolean objectReuseEnabled = executionConfig.isObjectReuseEnabled();
LOG.debug(
"DataSourceTask object reuse: "
+ (objectReuseEnabled ? "ENABLED" : "DISABLED")
+ ".");
final TypeSerializer<OT> serializer = this.serializerFactory.getSerializer();
try {
// start all chained tasks
BatchTask.openChainedTasks(this.chainedTasks, this);
// get input splits to read
final Iterator<InputSplit> splitIterator = getInputSplits();
// for each assigned input split
while (!this.taskCanceled && splitIterator.hasNext()) {
// get start and end
final InputSplit split = splitIterator.next();
LOG.debug(getLogString("Opening input split " + split.toString()));
final InputFormat<OT, InputSplit> format = this.format;
// open input format
format.open(split);
LOG.debug(getLogString("Starting to read input from split " + split.toString()));
try {
final Collector<OT> output =
new CountingCollector<>(this.output, numRecordsOut);
if (objectReuseEnabled) {
OT reuse = serializer.createInstance();
// as long as there is data to read
while (!this.taskCanceled && !format.reachedEnd()) {
OT returned;
if ((returned = format.nextRecord(reuse)) != null) {
output.collect(returned);
}
}
} else {
// as long as there is data to read
while (!this.taskCanceled && !format.reachedEnd()) {
OT returned;
if ((returned = format.nextRecord(serializer.createInstance()))
!= null) {
output.collect(returned);
}
}
}
if (LOG.isDebugEnabled() && !this.taskCanceled) {
LOG.debug(getLogString("Closing input split " + split.toString()));
}
} finally {
// close. We close here such that a regular close throwing an exception marks a
// task as failed.
format.close();
}
completedSplitsCounter.inc();
} // end for all input splits
// close all chained tasks letting them report failure
BatchTask.closeChainedTasks(this.chainedTasks, this);
// close the output collector
this.output.close();
} catch (Exception ex) {
// close the input, but do not report any exceptions, since we already have another root
// cause
try {
this.format.close();
} catch (Throwable ignored) {
}
BatchTask.cancelChainedTasks(this.chainedTasks);
ex = ExceptionInChainedStubException.exceptionUnwrap(ex);
if (ex instanceof CancelTaskException) {
// forward canceling exception
throw ex;
} else if (!this.taskCanceled) {
// drop exception, if the task was canceled
BatchTask.logAndThrowException(ex, this);
}
} finally {
BatchTask.clearWriters(eventualOutputs);
// --------------------------------------------------------------------
// Closing
// --------------------------------------------------------------------
if (this.format != null
&& RichInputFormat.class.isAssignableFrom(this.format.getClass())) {
((RichInputFormat) this.format).closeInputFormat();
LOG.debug(getLogString("Rich Source detected. Closing the InputFormat."));
}
}
if (!this.taskCanceled) {
LOG.debug(getLogString("Finished data source operator"));
} else {
LOG.debug(getLogString("Data source operator cancelled"));
}
}
|
@Test
void testFailingDataSourceTask() throws IOException {
int keyCnt = 20;
int valCnt = 10;
this.outList = new NirvanaOutputList();
File tempTestFile = new File(tempFolder.toFile(), UUID.randomUUID().toString());
InputFilePreparator.prepareInputFile(
new UniformRecordGenerator(keyCnt, valCnt, false), tempTestFile, false);
super.initEnvironment(MEMORY_MANAGER_SIZE, NETWORK_BUFFER_SIZE);
super.addOutput(this.outList);
DataSourceTask<Record> testTask = new DataSourceTask<>(this.mockEnv);
super.registerFileInputTask(
testTask, MockFailingInputFormat.class, tempTestFile.toURI().toString(), "\n");
boolean stubFailed = false;
try {
testTask.invoke();
} catch (Exception e) {
stubFailed = true;
}
assertThat(stubFailed).withFailMessage("Function exception was not forwarded.").isTrue();
// assert that temp file was created
assertThat(tempTestFile).withFailMessage("Temp output file does not exist").exists();
}
|
@Override
public T get(int unused)
{
if (_cumulativePointsMap.isEmpty())
{
LOG.warn("Calling get on an empty ring, null value will be returned");
return null;
}
int rand = ThreadLocalRandom.current().nextInt(_totalPoints);
return _cumulativePointsMap.higherEntry(rand).getValue();
}
|
@Test
public void testLoadBalancingCapacity() throws Exception {
Map<URI, Integer> pointsMap = new HashMap<>();
Map<URI, Integer> countsMap = new HashMap<>();
List<URI> goodHosts = addHostsToPointMap(10, 100, pointsMap);
List<URI> averageHosts = addHostsToPointMap(10, 80, pointsMap);
List<URI> badHosts = addHostsToPointMap(10, 40, pointsMap);
goodHosts.forEach((host) -> {
countsMap.put(host, 0);
});
averageHosts.forEach((host) -> {
countsMap.put(host, 0);
});
badHosts.forEach((host) -> {
countsMap.put(host, 0);
});
Ring<URI> ring = new DistributionNonDiscreteRingFactory<URI>().createRing(pointsMap);
int trials = 100000;
trial(trials, countsMap, ring);
double goodAvg = goodHosts.stream().map((host) -> {
return countsMap.get(host);
}).reduce(0, (a, b) -> a + b) / goodHosts.size();
double averageAvg = averageHosts.stream().map((host) -> {
return countsMap.get(host);
}).reduce(0, (a, b) -> a + b) / averageHosts.size();
double badAvg = badHosts.stream().map((host) -> {
return countsMap.get(host);
}).reduce(0, (a, b) -> a + b) / badHosts.size();
Assert.assertTrue(goodAvg > averageAvg);
Assert.assertTrue(averageAvg > badAvg);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.