focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
@Override
public boolean containsAll(Collection c) {
for (Object o : c) {
if (!contains(o)) return false;
}
return true;
}
|
@Test
public void testContainsAll() {
Uuid fooUuid = Uuid.randomUuid();
Uuid barUuid = Uuid.randomUuid();
Uuid bazUuid = Uuid.randomUuid();
Uuid quxUuid = Uuid.randomUuid();
TopicsImage topicsImage = new MetadataImageBuilder()
.addTopic(fooUuid, "foo", 3)
.addTopic(barUuid, "bar", 3)
.addTopic(bazUuid, "baz", 3)
.addTopic(quxUuid, "qux", 3)
.build()
.topics();
Set<Uuid> topicIds = new TopicIds(mkSet("foo", "bar", "baz", "qux"), topicsImage);
assertTrue(topicIds.contains(fooUuid));
assertTrue(topicIds.contains(barUuid));
assertTrue(topicIds.contains(bazUuid));
assertTrue(topicIds.contains(quxUuid));
assertTrue(topicIds.containsAll(mkSet(fooUuid, barUuid, bazUuid, quxUuid)));
}
|
@Transactional
@PostMapping("/apps/{appId}/clusters/{clusterName}/namespaces/{namespaceName}/releases")
public ReleaseDTO publish(@PathVariable("appId") String appId,
@PathVariable("clusterName") String clusterName,
@PathVariable("namespaceName") String namespaceName,
@RequestParam("name") String releaseName,
@RequestParam(name = "comment", required = false) String releaseComment,
@RequestParam("operator") String operator,
@RequestParam(name = "isEmergencyPublish", defaultValue = "false") boolean isEmergencyPublish) {
Namespace namespace = namespaceService.findOne(appId, clusterName, namespaceName);
if (namespace == null) {
throw NotFoundException.namespaceNotFound(appId, clusterName, namespaceName);
}
Release release = releaseService.publish(namespace, releaseName, releaseComment, operator, isEmergencyPublish);
//send release message
Namespace parentNamespace = namespaceService.findParentNamespace(namespace);
String messageCluster;
if (parentNamespace != null) {
messageCluster = parentNamespace.getClusterName();
} else {
messageCluster = clusterName;
}
messageSender.sendMessage(ReleaseMessageKeyGenerator.generate(appId, messageCluster, namespaceName),
Topics.APOLLO_RELEASE_TOPIC);
return BeanUtils.transform(ReleaseDTO.class, release);
}
|
@Test
public void testMessageSendAfterBuildRelease() throws Exception {
String someAppId = "someAppId";
String someNamespaceName = "someNamespace";
String someCluster = "someCluster";
String someName = "someName";
String someComment = "someComment";
NamespaceService someNamespaceService = mock(NamespaceService.class);
ReleaseService someReleaseService = mock(ReleaseService.class);
MessageSender someMessageSender = mock(MessageSender.class);
Namespace someNamespace = mock(Namespace.class);
ReleaseController releaseController = new ReleaseController(someReleaseService, someNamespaceService, someMessageSender, null);
when(someNamespaceService.findOne(someAppId, someCluster, someNamespaceName))
.thenReturn(someNamespace);
releaseController
.publish(someAppId, someCluster, someNamespaceName, someName, someComment, "test", false);
verify(someMessageSender, times(1))
.sendMessage(Joiner.on(ConfigConsts.CLUSTER_NAMESPACE_SEPARATOR)
.join(someAppId, someCluster, someNamespaceName),
Topics.APOLLO_RELEASE_TOPIC);
}
|
public Object[] parseParameterFor(String resource, T request, Predicate<GatewayFlowRule> rulePredicate) {
if (StringUtil.isEmpty(resource) || request == null || rulePredicate == null) {
return new Object[0];
}
Set<GatewayFlowRule> gatewayRules = new HashSet<>();
Set<Boolean> predSet = new HashSet<>();
boolean hasNonParamRule = false;
for (GatewayFlowRule rule : GatewayRuleManager.getRulesForResource(resource)) {
if (rule.getParamItem() != null) {
gatewayRules.add(rule);
predSet.add(rulePredicate.test(rule));
} else {
hasNonParamRule = true;
}
}
if (!hasNonParamRule && gatewayRules.isEmpty()) {
return new Object[0];
}
if (predSet.size() > 1 || predSet.contains(false)) {
return new Object[0];
}
int size = hasNonParamRule ? gatewayRules.size() + 1 : gatewayRules.size();
Object[] arr = new Object[size];
for (GatewayFlowRule rule : gatewayRules) {
GatewayParamFlowItem paramItem = rule.getParamItem();
int idx = paramItem.getIndex();
String param = parseInternal(paramItem, request);
arr[idx] = param;
}
if (hasNonParamRule) {
arr[size - 1] = SentinelGatewayConstants.GATEWAY_DEFAULT_PARAM;
}
return arr;
}
|
@Test
public void testParseParametersWithEmptyItemPattern() {
RequestItemParser<Object> itemParser = mock(RequestItemParser.class);
GatewayParamParser<Object> paramParser = new GatewayParamParser<>(itemParser);
// Create a fake request.
Object request = new Object();
// Prepare gateway rules.
Set<GatewayFlowRule> rules = new HashSet<>();
final String routeId = "my_test_route_DS(*H";
final String headerName = "X-Sentinel-Flag";
GatewayFlowRule routeRule1 = new GatewayFlowRule(routeId)
.setCount(10)
.setIntervalSec(2)
.setParamItem(new GatewayParamFlowItem()
.setParseStrategy(SentinelGatewayConstants.PARAM_PARSE_STRATEGY_HEADER)
.setFieldName(headerName)
.setPattern("")
.setMatchStrategy(SentinelGatewayConstants.PARAM_MATCH_STRATEGY_EXACT)
);
rules.add(routeRule1);
GatewayRuleManager.loadRules(rules);
mockSingleHeader(itemParser, headerName, "Sent1nel");
Object[] params = paramParser.parseParameterFor(routeId, request, routeIdPredicate);
assertThat(params.length).isEqualTo(1);
// Empty pattern should not take effect.
assertThat(params[routeRule1.getParamItem().getIndex()]).isEqualTo("Sent1nel");
}
|
@Override
public BackgroundData addData(int index) {
if (index < 0 || index > scesimData.size()) {
throw new IndexOutOfBoundsException(new StringBuilder().append("Index out of range ").append(index).toString());
}
BackgroundData backgroundData = new BackgroundData();
scesimData.add(index, backgroundData);
return backgroundData;
}
|
@Test
public void addData() {
background.addData(1);
assertThatThrownBy(() -> background.addData(-1)).isInstanceOf(IndexOutOfBoundsException.class);
assertThatThrownBy(() -> background.addData(3)).isInstanceOf(IndexOutOfBoundsException.class);
}
|
@Override
public boolean accept(final Path file) {
if(!super.accept(file)) {
return false;
}
if(pattern.matcher(file.getName()).matches()) {
if(log.isDebugEnabled()) {
log.debug(String.format("Skip %s excluded with regex", file.getAbsolute()));
}
return false;
}
return true;
}
|
@Test
public void testAccept() {
final Pattern pattern = Pattern.compile(".*~\\..*|\\.DS_Store|\\.svn|CVS|RCS|SCCS|\\.git|\\.bzr|\\.bzrignore|\\.bzrtags|\\.hg|\\.hgignore|\\.hgtags|_darcs|\\.file-segments");
assertFalse(new DownloadRegexFilter(pattern).accept(new Path(".DS_Store", EnumSet.of(Path.Type.file))));
assertTrue(new DownloadRegexFilter(pattern).accept(new Path("f", EnumSet.of(Path.Type.file))));
assertTrue(new DownloadRegexFilter(Pattern.compile("")).accept(new Path("f", EnumSet.of(Path.Type.file))));
assertTrue(new DownloadRegexFilter(Pattern.compile("")).accept(new Path("f", EnumSet.of(Path.Type.file))));
}
|
public static Date toDate(Object value, Date defaultValue) {
return convertQuietly(Date.class, value, defaultValue);
}
|
@Test
public void toDateTest2() {
final Date date = Convert.toDate("2021-01");
assertNull(date);
}
|
@Override
public String getServerStatus() {
if (worker.isHealthServer()) {
return UP;
} else {
return DOWN;
}
}
|
@Test
void testGetServerStatus() {
Mockito.when(mockWoker.isHealthServer()).thenReturn(true);
assertEquals("UP", nacosConfigService.getServerStatus());
Mockito.verify(mockWoker, Mockito.times(1)).isHealthServer();
Mockito.when(mockWoker.isHealthServer()).thenReturn(false);
assertEquals("DOWN", nacosConfigService.getServerStatus());
Mockito.verify(mockWoker, Mockito.times(2)).isHealthServer();
}
|
public void updateTableStatistics(String dbName, String tableName, Function<HivePartitionStats, HivePartitionStats> update) {
try {
metastore.updateTableStatistics(dbName, tableName, update);
} finally {
if (!(metastore instanceof CachingHiveMetastore)) {
refreshTable(dbName, tableName, true);
}
}
}
|
@Test
public void testUpdateTableStats() {
CachingHiveMetastore cachingHiveMetastore = new CachingHiveMetastore(
metastore, executor, expireAfterWriteSec, refreshAfterWriteSec, 1000, false);
HivePartitionStats partitionStats = HivePartitionStats.empty();
cachingHiveMetastore.updateTableStatistics("db", "table", ignore -> partitionStats);
}
|
public int remap(int var, int size) {
if ((var & REMAP_FLAG) != 0) {
return unmask(var);
}
int offset = var - argsSize;
if (offset < 0) {
// self projection for method arguments
return var;
}
if (offset >= mapping.length) {
mapping = Arrays.copyOf(mapping, Math.max(mapping.length * 2, offset + 1));
}
int mappedVar = mapping[offset];
int unmasked = unmask(mappedVar);
boolean isRemapped = ((mappedVar & REMAP_FLAG) != 0);
if (size == 2) {
if ((mappedVar & DOUBLE_SLOT_FLAG) == 0) {
// no double slot mapping over an int slot;
// must re-map unless the int slot is the last used one or there is a free double-ext slot
isRemapped = false;
}
} else {
// size == 1
if ((mappedVar & DOUBLE_SLOT_FLAG_2) != 0) {
// no mapping over a previously 2-slot value
isRemapped = false;
} else if ((mappedVar & DOUBLE_SLOT_FLAG) != 0) {
// the previously second part of the double slot is free to reuse
mapping[unmasked + 1] = (unmasked + 1) | REMAP_FLAG;
}
}
if (!isRemapped) {
mappedVar = remapVar(newVarIdxInternal(size), size);
setMapping(offset, mappedVar, size);
}
unmasked = unmask(mappedVar);
// adjust the mapping pointer if remapping with variable occupying 2 slots
nextMappedVar = Math.max(unmasked + size, nextMappedVar);
return unmasked;
}
|
@Test
void testRemapDoublesAndSingles() {
assertEquals(0, instance.remap(0, 1));
assertEquals(1, instance.remap(1, 2));
assertEquals(3, instance.remap(3, 2));
assertEquals(5, instance.remap(5, 1));
assertEquals(6, instance.remap(6, 1));
assertEquals(1, instance.remap(1, 1)); // change slot 1 to single-slot value
assertEquals(2, instance.remap(2, 1)); // 1 slot value should fit easily
assertEquals(7, instance.remap(5, 2)); // can't overwrite slot 6
assertEquals(9, instance.remap(4, 1)); // can not write to the double-slot-extension
assertEquals(3, instance.remap(3, 1)); // change slot 3 to single-slot value
assertEquals(4, instance.remap(4, 1)); // and suddenly the slot 4 is free to grab
}
|
@Override
public void close() throws IOException {
boolean triedToClose = false, success = false;
try {
flush();
((FileOutputStream)out).getChannel().force(true);
triedToClose = true;
super.close();
success = true;
} finally {
if (success) {
boolean renamed = tmpFile.renameTo(origFile);
if (!renamed) {
// On windows, renameTo does not replace.
if (origFile.exists()) {
try {
Files.delete(origFile.toPath());
} catch (IOException e) {
throw new IOException("Could not delete original file " + origFile, e);
}
}
try {
NativeIO.renameTo(tmpFile, origFile);
} catch (NativeIOException e) {
throw new IOException("Could not rename temporary file " + tmpFile
+ " to " + origFile + " due to failure in native rename. "
+ e.toString());
}
}
} else {
if (!triedToClose) {
// If we failed when flushing, try to close it to not leak an FD
IOUtils.closeStream(out);
}
// close wasn't successful, try to delete the tmp file
if (!tmpFile.delete()) {
LOG.warn("Unable to delete tmp file " + tmpFile);
}
}
}
}
|
@Test
public void testFailToFlush() throws IOException {
// Create a file at destination
FileOutputStream fos = new FileOutputStream(DST_FILE);
fos.write(TEST_STRING_2.getBytes());
fos.close();
OutputStream failingStream = createFailingStream();
failingStream.write(TEST_STRING.getBytes());
try {
failingStream.close();
fail("Close didn't throw exception");
} catch (IOException ioe) {
// expected
}
// Should not have touched original file
assertEquals(TEST_STRING_2, DFSTestUtil.readFile(DST_FILE));
assertEquals("Temporary file should have been cleaned up",
DST_FILE.getName(), Joiner.on(",").join(TEST_DIR.list()));
}
|
@Override
public void connectionEstablished(
TieredStorageSubpartitionId subpartitionId,
NettyConnectionWriter nettyConnectionWriter) {
subpartitionProducerAgents[subpartitionId.getSubpartitionId()].connectionEstablished(
nettyConnectionWriter);
nettyConnectionEstablished[subpartitionId.getSubpartitionId()] = true;
}
|
@Test
void testRelease() {
TieredStorageResourceRegistry resourceRegistry = new TieredStorageResourceRegistry();
MemoryTierProducerAgent memoryTierProducerAgent =
createMemoryTierProducerAgent(false, SEGMENT_SIZE_BYTES, resourceRegistry);
AtomicBoolean isClosed = new AtomicBoolean(false);
memoryTierProducerAgent.connectionEstablished(
SUBPARTITION_ID,
new TestingNettyConnectionWriter.Builder()
.setCloseFunction(
throwable -> {
isClosed.set(true);
return null;
})
.build());
resourceRegistry.clearResourceFor(PARTITION_ID);
assertThat(isClosed).isTrue();
}
|
@Override
public FieldValueProvider decode(JsonNode value)
{
return new ISO8601JsonValueProvider(value, columnHandle);
}
|
@Test
public void testDecode()
{
tester.assertDecodedAs("\"2018-02-19T09:20:11\"", TIMESTAMP, 1519032011000L);
tester.assertDecodedAs("\"2018-02-19T09:20:11Z\"", TIMESTAMP, 1519032011000L);
tester.assertDecodedAs("\"2018-02-19T09:20:11+10:00\"", TIMESTAMP, 1519032011000L);
tester.assertDecodedAs("\"13:15:18\"", TIME, 47718000);
tester.assertDecodedAs("\"13:15\"", TIME, 47700000);
tester.assertDecodedAs("\"13:15:18Z\"", TIME, 47718000);
tester.assertDecodedAs("\"13:15Z\"", TIME, 47700000);
tester.assertDecodedAs("\"13:15:18+10:00\"", TIME, 47718000);
tester.assertDecodedAs("\"13:15+10:00\"", TIME, 47700000);
tester.assertDecodedAs("\"2018-02-11\"", DATE, 17573);
tester.assertDecodedAs("\"2018-02-19T09:20:11Z\"", TIMESTAMP_WITH_TIME_ZONE, packDateTimeWithZone(1519032011000L, UTC_KEY));
tester.assertDecodedAs("\"2018-02-19T12:20:11+03:00\"", TIMESTAMP_WITH_TIME_ZONE, packDateTimeWithZone(1519032011000L, "+03:00"));
tester.assertDecodedAs("\"13:15:18Z\"", TIME_WITH_TIME_ZONE, packDateTimeWithZone(47718000, UTC_KEY));
tester.assertDecodedAs("\"13:15:18+10:00\"", TIME_WITH_TIME_ZONE, packDateTimeWithZone(47718000, "+10:00"));
}
|
protected static KeyPair signWithEcdsa() {
KeyPair keyPair = null;
try {
ECGenParameterSpec ecSpec = new ECGenParameterSpec("secp256r1");
KeyPairGenerator g = KeyPairGenerator.getInstance("EC");
g.initialize(ecSpec, new SecureRandom());
java.security.KeyPair keypair = g.generateKeyPair();
PublicKey publicKey = keypair.getPublic();
PrivateKey privateKey = keypair.getPrivate();
ContentSigner signer = new JcaContentSignerBuilder("SHA256withECDSA").build(privateKey);
keyPair = new KeyPair(publicKey, privateKey, signer);
} catch (NoSuchAlgorithmException | InvalidAlgorithmParameterException | OperatorCreationException e) {
logger.error(
CONFIG_SSL_CERT_GENERATE_FAILED,
"",
"",
"Generate Key with secp256r1 algorithm failed. Please check if your system support. "
+ "Will attempt to generate with RSA2048.",
e);
}
return keyPair;
}
|
@Test
void testSignWithEcdsa() {
DubboCertManager.KeyPair keyPair = DubboCertManager.signWithEcdsa();
Assertions.assertNotNull(keyPair);
Assertions.assertNotNull(keyPair.getPrivateKey());
Assertions.assertNotNull(keyPair.getPublicKey());
Assertions.assertNotNull(keyPair.getSigner());
}
|
public static boolean isIPv4Address(final String input) {
return IPV4_PATTERN.matcher(input).matches();
}
|
@Test
void isIPv4Address() {
assertTrue(InetAddressValidator.isIPv4Address("192.168.1.2"));
}
|
@Override
public int size() {
return actualToMetaFieldMapping.length;
}
|
@Test
public void size() {
assertEquals( 2, fieldsMapping.size() );
}
|
public static Builder newBuilder() {
return new Builder();
}
|
@Test
public void build_fails_with_NPE_if_source_is_null() {
AuthenticationException.Builder builder = AuthenticationException.newBuilder()
.setLogin("login")
.setMessage("message");
assertThatThrownBy(builder::build)
.isInstanceOf(NullPointerException.class)
.hasMessage("source can't be null");
}
|
void precheckMaxResultLimitOnLocalPartitions(String mapName) {
// check if feature is enabled
if (!isPreCheckEnabled) {
return;
}
// limit number of local partitions to check to keep runtime constant
PartitionIdSet localPartitions = mapServiceContext.getCachedOwnedPartitions();
int partitionsToCheck = min(localPartitions.size(), maxLocalPartitionsLimitForPreCheck);
if (partitionsToCheck == 0) {
return;
}
// calculate size of local partitions
int localPartitionSize = getLocalPartitionSize(mapName, localPartitions, partitionsToCheck);
if (localPartitionSize == 0) {
return;
}
// check local result size
long localResultLimit = getNodeResultLimit(partitionsToCheck);
if (localPartitionSize > localResultLimit * MAX_RESULT_LIMIT_FACTOR_FOR_PRECHECK) {
var localMapStatsProvider = mapServiceContext.getLocalMapStatsProvider();
if (localMapStatsProvider != null && localMapStatsProvider.hasLocalMapStatsImpl(mapName)) {
localMapStatsProvider.getLocalMapStatsImpl(mapName).incrementQueryResultSizeExceededCount();
}
throw new QueryResultSizeExceededException(maxResultLimit, " Result size exceeded in local pre-check.");
}
}
|
@Test
public void testLocalPreCheckEnabledWithEmptyPartition() {
int[] partitionsSizes = {0};
populatePartitions(partitionsSizes);
initMocksWithConfiguration(200000, 1);
limiter.precheckMaxResultLimitOnLocalPartitions(ANY_MAP_NAME);
}
|
public Set<Long> calculateUsers(DelegateExecution execution, int level) {
Assert.isTrue(level > 0, "level 必须大于 0");
// 获得发起人
ProcessInstance processInstance = processInstanceService.getProcessInstance(execution.getProcessInstanceId());
Long startUserId = NumberUtils.parseLong(processInstance.getStartUserId());
// 获得对应 leve 的部门
DeptRespDTO dept = null;
for (int i = 0; i < level; i++) {
// 获得 level 对应的部门
if (dept == null) {
dept = getStartUserDept(startUserId);
if (dept == null) { // 找不到发起人的部门,所以无法使用该规则
return emptySet();
}
} else {
DeptRespDTO parentDept = deptApi.getDept(dept.getParentId());
if (parentDept == null) { // 找不到父级部门,所以只好结束寻找。原因是:例如说,级别比较高的人,所在部门层级比较少
break;
}
dept = parentDept;
}
}
return dept.getLeaderUserId() != null ? asSet(dept.getLeaderUserId()) : emptySet();
}
|
@Test
public void testCalculateUsers_existParentDept() {
// 准备参数
DelegateExecution execution = mockDelegateExecution(1L);
// mock 方法(startUser)
AdminUserRespDTO startUser = randomPojo(AdminUserRespDTO.class, o -> o.setDeptId(10L));
when(adminUserApi.getUser(eq(1L))).thenReturn(startUser);
DeptRespDTO startUserDept = randomPojo(DeptRespDTO.class, o -> o.setId(10L).setParentId(100L)
.setLeaderUserId(20L));
when(deptApi.getDept(eq(10L))).thenReturn(startUserDept);
// mock 方法(父 dept)
DeptRespDTO parentDept = randomPojo(DeptRespDTO.class, o -> o.setId(100L).setParentId(1000L)
.setLeaderUserId(200L));
when(deptApi.getDept(eq(100L))).thenReturn(parentDept);
// 调用
Set<Long> result = expression.calculateUsers(execution, 2);
// 断言
assertEquals(asSet(200L), result);
}
|
@Override
public void failed(Exception ex) {
httpAsyncRequestProducer.failed(ex);
}
|
@Test
public void failed() {
final HttpAsyncRequestProducer delegate = Mockito.mock(HttpAsyncRequestProducer.class);
final HttpAsyncRequestProducerDecorator decorator = new HttpAsyncRequestProducerDecorator(
delegate, null, null);
decorator.failed(null);
Mockito.verify(delegate, Mockito.times(1)).failed(null);
}
|
static <RequestT, ResponseT> Call<RequestT, ResponseT> of(
Caller<RequestT, ResponseT> caller, Coder<ResponseT> responseTCoder) {
caller = SerializableUtils.ensureSerializable(caller);
return new Call<>(
Configuration.<RequestT, ResponseT>builder()
.setCaller(caller)
.setResponseCoder(responseTCoder)
.build());
}
|
@Test
public void givenCallerNotSerializable_throwsError() {
assertThrows(
IllegalArgumentException.class,
() -> Call.of(new UnSerializableCaller(), NON_DETERMINISTIC_RESPONSE_CODER));
}
|
public Map<String, Object> getKsqlStreamConfigProps(final String applicationId) {
final Map<String, Object> map = new HashMap<>(getKsqlStreamConfigProps());
map.put(
MetricCollectors.RESOURCE_LABEL_PREFIX
+ StreamsConfig.APPLICATION_ID_CONFIG,
applicationId
);
// Streams client metrics aren't used in Confluent deployment
possiblyConfigureConfluentTelemetry(map);
return Collections.unmodifiableMap(map);
}
|
@Test
public void shouldSetPrefixedStreamsConfigProperties() {
final KsqlConfig ksqlConfig = new KsqlConfig(Collections.singletonMap(
KsqlConfig.KSQL_STREAMS_PREFIX + StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, "128"));
assertThat(ksqlConfig.getKsqlStreamConfigProps().
get(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG), equalTo(128L));
assertThat(ksqlConfig.getKsqlStreamConfigProps().
get(KsqlConfig.KSQL_STREAMS_PREFIX + StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG),
is(nullValue()));
}
|
public SearchSourceBuilder create(SearchesConfig config) {
return create(SearchCommand.from(config));
}
|
@Test
void searchIncludesSearchFilters() {
final SearchSourceBuilder search = this.searchRequestFactory.create(ChunkCommand.builder()
.filters(Collections.singletonList(InlineQueryStringSearchFilter.builder()
.title("filter 1")
.queryString("test-filter-value")
.build()))
.indices(Collections.singleton("graylog_0"))
.range(RANGE)
.batchSize(BATCH_SIZE)
.build());
assertThat(search.toString()).contains(TEST_SEARCH_FILTERS_STRING);
}
|
@Override public boolean replace(long key, long oldValue, long newValue) {
assert oldValue != nullValue : "replace() called with null-sentinel oldValue " + nullValue;
assert newValue != nullValue : "replace() called with null-sentinel newValue " + nullValue;
final long valueAddr = hsa.get(key);
if (valueAddr == NULL_ADDRESS) {
return false;
}
final long actualValue = mem.getLong(valueAddr);
if (actualValue != oldValue) {
return false;
}
mem.putLong(valueAddr, newValue);
return true;
}
|
@Test(expected = AssertionError.class)
@RequireAssertEnabled
public void test_replace_invalidValue() {
map.replace(newKey(), MISSING_VALUE);
}
|
public void performSortOperation(int option, List<File> pdf) {
switch (option) {
case DATE_INDEX:
sortFilesByDateNewestToOldest(pdf);
break;
case NAME_INDEX:
sortByNameAlphabetical(pdf);
break;
case SIZE_INCREASING_ORDER_INDEX:
sortFilesBySizeIncreasingOrder(pdf);
break;
case SIZE_DECREASING_ORDER_INDEX:
sortFilesBySizeDecreasingOrder(pdf);
break;
}
}
|
@Test
public void shouldReturnArraySortedByAscendingSize() {
// given
long[] sizes = {10000, 1000, 100, 50, 2000, 2500};
for (int i = 0; i < sizes.length; i++)
when(mFiles.get(i).length()).thenReturn(sizes[i]);
File[] expected = new File[]{mFiles.get(3), mFiles.get(2), mFiles.get(1),
mFiles.get(4), mFiles.get(5), mFiles.get(0)};
// when
mInstance.performSortOperation(mInstance.SIZE_INCREASING_ORDER_INDEX, mFiles);
// then
Assert.assertEquals(asList(expected), mFiles);
}
|
static void commonPopulateGetCreatedKiePMMLOutputFieldsMethod(final MethodDeclaration methodDeclaration,
final List<org.dmg.pmml.OutputField> outputFields) {
BlockStmt body = new BlockStmt();
NodeList<Expression> arguments = new NodeList<>();
for (org.dmg.pmml.OutputField outputField : outputFields) {
String outputFieldVariableName = getSanitizedVariableName(outputField.getName()).toLowerCase();
BlockStmt toAdd = getOutputFieldVariableDeclaration(outputFieldVariableName, outputField);
toAdd.getStatements().forEach(body::addStatement);
arguments.add(new NameExpr(outputFieldVariableName));
}
MethodCallExpr methodCallExpr = new MethodCallExpr();
methodCallExpr.setScope(new NameExpr(Arrays.class.getSimpleName()));
methodCallExpr.setName("asList");
methodCallExpr.setArguments(arguments);
ReturnStmt returnStmt = new ReturnStmt();
returnStmt.setExpression(methodCallExpr);
body.addStatement(returnStmt);
methodDeclaration.setBody(body);
}
|
@Test
void commonPopulateGetCreatedKiePMMLOutputFieldsMethod() throws IOException {
final CompilationDTO compilationDTO = CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME,
pmmlModel,
model,
new PMMLCompilationContextMock(), SOURCE_BASE);
final MethodDeclaration methodDeclaration = new MethodDeclaration();
KiePMMLModelFactoryUtils.commonPopulateGetCreatedKiePMMLOutputFieldsMethod(methodDeclaration,
compilationDTO.getOutput().getOutputFields());
String text = getFileContent(TEST_05_SOURCE);
MethodDeclaration expected = JavaParserUtils.parseMethod(text);
assertThat(JavaParserUtils.equalsNode(expected, methodDeclaration)).isTrue();
}
|
public static <T> T fillBean(String className, Map<List<String>, Object> params, ClassLoader classLoader) {
return fillBean(errorEmptyMessage(), className, params, classLoader);
}
|
@Test(expected = ScenarioException.class)
public void fillBeanFailNullClassTest() {
Map<List<String>, Object> paramsToSet = new HashMap<>();
paramsToSet.put(List.of("fakeField"), null);
ScenarioBeanUtil.fillBean(errorEmptyMessage(), null, paramsToSet, classLoader);
}
|
@VisibleForTesting
Map<ExecutionVertexID, Collection<ExecutionAttemptID>> findSlowTasks(
final ExecutionGraph executionGraph) {
final long currentTimeMillis = System.currentTimeMillis();
final Map<ExecutionVertexID, Collection<ExecutionAttemptID>> slowTasks = new HashMap<>();
final List<ExecutionJobVertex> jobVerticesToCheck = getJobVerticesToCheck(executionGraph);
for (ExecutionJobVertex ejv : jobVerticesToCheck) {
final ExecutionTimeWithInputBytes baseline = getBaseline(ejv, currentTimeMillis);
for (ExecutionVertex ev : ejv.getTaskVertices()) {
if (ev.getExecutionState().isTerminal()) {
continue;
}
final List<ExecutionAttemptID> slowExecutions =
findExecutionsExceedingBaseline(
ev.getCurrentExecutions(), baseline, currentTimeMillis);
if (!slowExecutions.isEmpty()) {
slowTasks.put(ev.getID(), slowExecutions);
}
}
}
return slowTasks;
}
|
@Test
void testFinishedTaskExceedRatioInDynamicGraph() throws Exception {
final int parallelism = 3;
final JobVertex jobVertex1 = createNoOpVertex(parallelism);
// create jobVertex2 and leave its parallelism unset
final JobVertex jobVertex2 = new JobVertex("vertex2");
jobVertex2.setInvokableClass(NoOpInvokable.class);
jobVertex2.connectNewDataSetAsInput(
jobVertex1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING);
final ExecutionGraph executionGraph = createDynamicExecutionGraph(jobVertex1, jobVertex2);
final ExecutionTimeBasedSlowTaskDetector slowTaskDetector =
createSlowTaskDetector(0.3, 1, 0);
final ExecutionVertex ev13 =
executionGraph.getJobVertex(jobVertex1.getID()).getTaskVertices()[2];
ev13.getCurrentExecutionAttempt().markFinished();
final Map<ExecutionVertexID, Collection<ExecutionAttemptID>> slowTasks =
slowTaskDetector.findSlowTasks(executionGraph);
assertThat(slowTasks).hasSize(2);
}
|
public static String getHttpMethod(Exchange exchange, Endpoint endpoint) {
// 1. Use method provided in header.
Object method = exchange.getIn().getHeader(Exchange.HTTP_METHOD);
if (method instanceof String) {
return (String) method;
} else if (method instanceof Enum) {
return ((Enum<?>) method).name();
} else if (method != null) {
return exchange.getContext().getTypeConverter().tryConvertTo(String.class, exchange, method);
}
// 2. GET if query string is provided in header.
if (exchange.getIn().getHeader(Exchange.HTTP_QUERY) != null) {
return GET_METHOD;
}
// 3. GET if endpoint is configured with a query string.
if (endpoint.getEndpointUri().indexOf('?') != -1) {
return GET_METHOD;
}
// 4. POST if there is data to send (body is not null).
if (exchange.getIn().getBody() != null) {
return POST_METHOD;
}
// 5. GET otherwise.
return GET_METHOD;
}
|
@Test
public void testGetMethodDefault() {
Endpoint endpoint = Mockito.mock(Endpoint.class);
Exchange exchange = Mockito.mock(Exchange.class);
Message message = Mockito.mock(Message.class);
Mockito.when(endpoint.getEndpointUri()).thenReturn(TEST_URI);
Mockito.when(exchange.getIn()).thenReturn(message);
Mockito.when(message.getHeader(Exchange.HTTP_URI)).thenReturn(TEST_URI);
assertEquals(AbstractHttpSpanDecorator.GET_METHOD,
AbstractHttpSpanDecorator.getHttpMethod(exchange, endpoint));
}
|
public static FileIO loadFileIO(String impl, Map<String, String> properties, Object hadoopConf) {
LOG.info("Loading custom FileIO implementation: {}", impl);
DynConstructors.Ctor<FileIO> ctor;
try {
ctor =
DynConstructors.builder(FileIO.class)
.loader(CatalogUtil.class.getClassLoader())
.impl(impl)
.buildChecked();
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException(
String.format("Cannot initialize FileIO implementation %s: %s", impl, e.getMessage()), e);
}
FileIO fileIO;
try {
fileIO = ctor.newInstance();
} catch (ClassCastException e) {
throw new IllegalArgumentException(
String.format("Cannot initialize FileIO, %s does not implement FileIO.", impl), e);
}
configureHadoopConf(fileIO, hadoopConf);
fileIO.initialize(properties);
return fileIO;
}
|
@Test
public void loadCustomFileIO_badClass() {
assertThatThrownBy(
() ->
CatalogUtil.loadFileIO(TestFileIONotImpl.class.getName(), Maps.newHashMap(), null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageStartingWith("Cannot initialize FileIO")
.hasMessageContaining("does not implement FileIO");
}
|
@Override
public void createDb(String dbName, Map<String, String> properties) throws AlreadyExistsException {
if (dbExists(dbName)) {
throw new AlreadyExistsException("Database Already Exists");
}
icebergCatalog.createDb(dbName, properties);
}
|
@Test
public void testCreateDbInvalidateLocation() {
IcebergHiveCatalog icebergHiveCatalog = new IcebergHiveCatalog(CATALOG_NAME, new Configuration(), DEFAULT_CONFIG);
IcebergMetadata metadata = new IcebergMetadata(CATALOG_NAME, HDFS_ENVIRONMENT, icebergHiveCatalog,
Executors.newSingleThreadExecutor(), Executors.newSingleThreadExecutor(), null);
new Expectations(icebergHiveCatalog) {
{
icebergHiveCatalog.listAllDatabases();
result = Lists.newArrayList();
minTimes = 0;
}
};
try {
metadata.createDb("iceberg_db", ImmutableMap.of("location", "hdfs:xx/aaaxx"));
Assert.fail();
} catch (Exception e) {
Assert.assertTrue(e instanceof StarRocksConnectorException);
Assert.assertTrue(e.getMessage().contains("Invalid location URI"));
}
}
|
public void setFilePath(PropertyType filePath) {
this.filePath = filePath;
}
|
@Test
@SuppressWarnings("squid:S2699")
public void testSetFilePath() {
//already tested, this is just left so the IDE doesn't recreate it.
}
|
public boolean delete(ApplicationId applicationId) {
Tenant tenant = getTenant(applicationId);
TenantApplications tenantApplications = tenant.getApplicationRepo();
NestedTransaction transaction = new NestedTransaction();
Optional<ApplicationTransaction> applicationTransaction = hostProvisioner.map(provisioner -> provisioner.lock(applicationId))
.map(lock -> new ApplicationTransaction(lock, transaction));
try (@SuppressWarnings("unused") var applicationLock = tenantApplications.lock(applicationId)) {
Optional<Long> activeSession = tenantApplications.activeSessionOf(applicationId);
CompletionWaiter waiter;
if (activeSession.isPresent()) {
try {
Session session = getRemoteSession(tenant, activeSession.get());
transaction.add(tenant.getSessionRepository().createSetStatusTransaction(session, Session.Status.DELETE));
} catch (NotFoundException e) {
log.log(Level.INFO, TenantRepository.logPre(applicationId) + "Active session exists, but has not been deleted properly. Trying to cleanup");
}
waiter = tenantApplications.createRemoveApplicationWaiter(applicationId);
} else {
// If there's no active session, we still want to clean up any resources created in a failing prepare
waiter = new NoopCompletionWaiter();
}
Curator curator = tenantRepository.getCurator();
transaction.add(new ContainerEndpointsCache(tenant.getPath(), curator).delete(applicationId)); // TODO: Not unit tested
// Delete any application roles
transaction.add(new ApplicationRolesStore(curator, tenant.getPath()).delete(applicationId));
// Delete endpoint certificates
transaction.add(new EndpointCertificateMetadataStore(curator, tenant.getPath()).delete(applicationId));
// This call will remove application in zookeeper. Watches in TenantApplications will remove the application
// and allocated hosts in model and handlers in RPC server
transaction.add(tenantApplications.createDeleteTransaction(applicationId));
transaction.onCommitted(() -> log.log(Level.INFO, "Deleted " + applicationId));
if (applicationTransaction.isPresent()) {
hostProvisioner.get().remove(applicationTransaction.get());
applicationTransaction.get().nested().commit();
} else {
transaction.commit();
}
// Wait for app being removed on other servers
waiter.awaitCompletion(Duration.ofSeconds(30));
return activeSession.isPresent();
} finally {
applicationTransaction.ifPresent(ApplicationTransaction::close);
}
}
|
@Test
public void delete() {
SessionRepository sessionRepository = tenant().getSessionRepository();
{
PrepareResult result = deployApp(testApp);
long sessionId = result.sessionId();
Session applicationData = sessionRepository.getLocalSession(sessionId);
assertNotNull(applicationData);
assertNotNull(applicationData.getApplicationId());
assertNotNull(sessionRepository.getLocalSession(sessionId));
assertNotNull(applicationRepository.getActiveSession(applicationId()));
Path sessionNode = sessionRepository.getSessionPath(sessionId);
assertTrue(curator.exists(sessionNode));
TenantFileSystemDirs tenantFileSystemDirs = tenant().getApplicationRepo().getTenantFileSystemDirs();
File sessionFile = new File(tenantFileSystemDirs.sessionsPath(), String.valueOf(sessionId));
assertTrue(sessionFile.exists());
// Delete app and verify that it has been deleted from repos and no application set exists
assertTrue(applicationRepository.delete(applicationId()));
assertTrue(applicationRepository.getActiveSession(applicationId()).isEmpty());
assertEquals(Optional.empty(), sessionRepository.getRemoteSession(sessionId).applicationVersions());
assertTrue(curator.exists(sessionNode));
assertEquals(Session.Status.DELETE.name(), Utf8.toString(curator.getData(sessionNode.append("sessionState")).get()));
assertTrue(sessionFile.exists());
// Deleting a non-existent application will return false
assertFalse(applicationRepository.delete(applicationId()));
}
{
deployApp(testApp);
assertTrue(applicationRepository.delete(applicationId()));
deployApp(testApp);
// Deploy another app (with id fooId)
ApplicationId fooId = applicationId("fooId");
PrepareParams prepareParams2 = new PrepareParams.Builder().applicationId(fooId).build();
deployApp(testAppJdiscOnly, prepareParams2);
assertNotNull(applicationRepository.getActiveSession(fooId));
// Delete app with id fooId, should not affect original app
assertTrue(applicationRepository.delete(fooId));
assertNotNull(applicationRepository.getActiveSession(applicationId()));
assertTrue(applicationRepository.delete(applicationId()));
}
}
|
@Override
public AMFeedback statusUpdate(TaskAttemptID taskAttemptID,
TaskStatus taskStatus) throws IOException, InterruptedException {
org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId yarnAttemptID =
TypeConverter.toYarn(taskAttemptID);
AMFeedback feedback = new AMFeedback();
feedback.setTaskFound(true);
AtomicReference<TaskAttemptStatus> lastStatusRef =
attemptIdToStatus.get(yarnAttemptID);
if (lastStatusRef == null) {
// The task is not known, but it could be in the process of tearing
// down gracefully or receiving a thread dump signal. Tolerate unknown
// tasks as long as they have unregistered recently.
if (!taskHeartbeatHandler.hasRecentlyUnregistered(yarnAttemptID)) {
LOG.error("Status update was called with illegal TaskAttemptId: "
+ yarnAttemptID);
feedback.setTaskFound(false);
}
return feedback;
}
// Propagating preemption to the task if TASK_PREEMPTION is enabled
if (getConfig().getBoolean(MRJobConfig.TASK_PREEMPTION, false)
&& preemptionPolicy.isPreempted(yarnAttemptID)) {
feedback.setPreemption(true);
LOG.info("Setting preemption bit for task: "+ yarnAttemptID
+ " of type " + yarnAttemptID.getTaskId().getTaskType());
}
if (taskStatus == null) {
//We are using statusUpdate only as a simple ping
if (LOG.isDebugEnabled()) {
LOG.debug("Ping from " + taskAttemptID.toString());
}
// Consider ping from the tasks for liveliness check
if (getConfig().getBoolean(MRJobConfig.MR_TASK_ENABLE_PING_FOR_LIVELINESS_CHECK,
MRJobConfig.DEFAULT_MR_TASK_ENABLE_PING_FOR_LIVELINESS_CHECK)) {
taskHeartbeatHandler.progressing(yarnAttemptID);
}
return feedback;
}
// if we are here there is an actual status update to be processed
taskHeartbeatHandler.progressing(yarnAttemptID);
TaskAttemptStatus taskAttemptStatus =
new TaskAttemptStatus();
taskAttemptStatus.id = yarnAttemptID;
// Task sends the updated progress to the TT.
taskAttemptStatus.progress = taskStatus.getProgress();
// log the new progress
taskAttemptLogProgressStamps.computeIfAbsent(taskAttemptID,
k -> new TaskProgressLogPair(taskAttemptID))
.update(taskStatus.getProgress());
// Task sends the updated state-string to the TT.
taskAttemptStatus.stateString = taskStatus.getStateString();
// Task sends the updated phase to the TT.
taskAttemptStatus.phase = TypeConverter.toYarn(taskStatus.getPhase());
// Counters are updated by the task. Convert counters into new format as
// that is the primary storage format inside the AM to avoid multiple
// conversions and unnecessary heap usage.
taskAttemptStatus.counters = new org.apache.hadoop.mapreduce.Counters(
taskStatus.getCounters());
// Map Finish time set by the task (map only)
if (taskStatus.getIsMap() && taskStatus.getMapFinishTime() != 0) {
taskAttemptStatus.mapFinishTime = taskStatus.getMapFinishTime();
}
// Shuffle Finish time set by the task (reduce only).
if (!taskStatus.getIsMap() && taskStatus.getShuffleFinishTime() != 0) {
taskAttemptStatus.shuffleFinishTime = taskStatus.getShuffleFinishTime();
}
// Sort finish time set by the task (reduce only).
if (!taskStatus.getIsMap() && taskStatus.getSortFinishTime() != 0) {
taskAttemptStatus.sortFinishTime = taskStatus.getSortFinishTime();
}
// Not Setting the task state. Used by speculation - will be set in TaskAttemptImpl
//taskAttemptStatus.taskState = TypeConverter.toYarn(taskStatus.getRunState());
//set the fetch failures
if (taskStatus.getFetchFailedMaps() != null
&& taskStatus.getFetchFailedMaps().size() > 0) {
taskAttemptStatus.fetchFailedMaps =
new ArrayList<org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId>();
for (TaskAttemptID failedMapId : taskStatus.getFetchFailedMaps()) {
taskAttemptStatus.fetchFailedMaps.add(
TypeConverter.toYarn(failedMapId));
}
}
// Task sends the information about the nextRecordRange to the TT
// TODO: The following are not needed here, but needed to be set somewhere inside AppMaster.
// taskStatus.getRunState(); // Set by the TT/JT. Transform into a state TODO
// taskStatus.getStartTime(); // Used to be set by the TaskTracker. This should be set by getTask().
// taskStatus.getFinishTime(); // Used to be set by TT/JT. Should be set when task finishes
// // This was used by TT to do counter updates only once every minute. So this
// // isn't ever changed by the Task itself.
// taskStatus.getIncludeCounters();
coalesceStatusUpdate(yarnAttemptID, taskAttemptStatus, lastStatusRef);
return feedback;
}
|
@Test
public void testStatusUpdateProgress()
throws IOException, InterruptedException {
configureMocks();
startListener(true);
verify(hbHandler).register(attemptId);
// make sure a ping doesn't report progress
AMFeedback feedback = listener.statusUpdate(attemptID, null);
assertTrue(feedback.getTaskFound());
verify(hbHandler, never()).progressing(eq(attemptId));
// make sure a status update does report progress
MapTaskStatus mockStatus = new MapTaskStatus(attemptID, 0.0f, 1,
TaskStatus.State.RUNNING, "", "RUNNING", "", TaskStatus.Phase.MAP,
new Counters());
feedback = listener.statusUpdate(attemptID, mockStatus);
assertTrue(feedback.getTaskFound());
verify(hbHandler).progressing(eq(attemptId));
}
|
@Override
public MapTileArea computeFromSource(final MapTileArea pSource, final MapTileArea pReuse) {
final MapTileArea out = pReuse != null ? pReuse : new MapTileArea();
if (pSource.size() == 0) {
out.reset();
return out;
}
final int left = pSource.getLeft() - mBorder;
final int top = pSource.getTop() - mBorder;
final int additional = 2 * mBorder - 1;
out.set(pSource.getZoom(),
left, top,
left + pSource.getWidth() + additional, top + pSource.getHeight() + additional);
return out;
}
|
@Test
public void testOnePointModulo() {
final MapTileArea source = new MapTileArea();
final MapTileArea dest = new MapTileArea();
final Set<Long> set = new HashSet<>();
final int border = 2;
final MapTileAreaBorderComputer computer = new MapTileAreaBorderComputer(border);
final int zoom = 5;
final int sourceX = 1;
final int sourceY = 31;
source.set(zoom, sourceX, sourceY, sourceX, sourceY);
add(set, zoom, sourceX, sourceY, border);
computer.computeFromSource(source, dest);
check(dest, set, zoom);
}
|
static long parseLong(String key, @Nullable String value) {
requireArgument((value != null) && !value.isEmpty(), "value of key %s was omitted", key);
try {
return Long.parseLong(value);
} catch (NumberFormatException e) {
throw new IllegalArgumentException(String.format(US,
"key %s value was set to %s, must be a long", key, value), e);
}
}
|
@Test
public void parseLong_exception() {
assertThrows(IllegalArgumentException.class, () -> CaffeineSpec.parseLong("key", "value"));
}
|
@Override
public Coder<T> getCoder(CoderRegistry coderRegistry) {
return (Coder<T>) AvroCoder.of(getAvroSchema());
}
|
@Test
public void testGetCoder() {
String schemaRegistryUrl = "mock://my-scope-name";
String subject = "mytopic";
SchemaRegistryClient mockRegistryClient = mockSchemaRegistryClient(schemaRegistryUrl, subject);
CoderRegistry coderRegistry = CoderRegistry.createDefault();
AvroCoder<Object> coderV0 =
(AvroCoder<Object>)
mockDeserializerProvider(schemaRegistryUrl, subject, null).getCoder(coderRegistry);
assertEquals(AVRO_SCHEMA, coderV0.getSchema());
try {
Integer version = mockRegistryClient.getVersion(subject, AVRO_SCHEMA_V1);
AvroCoder<Object> coderV1 =
(AvroCoder<Object>)
mockDeserializerProvider(schemaRegistryUrl, subject, version).getCoder(coderRegistry);
assertEquals(AVRO_SCHEMA_V1, coderV1.getSchema());
} catch (IOException | RestClientException e) {
throw new RuntimeException("Unable to register schema for subject: " + subject, e);
}
}
|
public Expression rewrite(final Expression expression) {
return new ExpressionTreeRewriter<>(new OperatorPlugin()::process)
.rewrite(expression, null);
}
|
@Test
public void shouldPassRowTimeStringsToTheParser() {
// Given:
final Expression predicate = getPredicate(
"SELECT * FROM orders where ROWTIME = '2017-01-01T00:44:00.000';");
// When:
rewriter.rewrite(predicate);
// Then:
verify(parser).parse("2017-01-01T00:44:00.000");
}
|
public static DateTime parse(CharSequence dateStr, DateFormat dateFormat) {
return new DateTime(dateStr, dateFormat);
}
|
@Test
public void parseUTCTest3() {
// issue#I5M6DP
final String dateStr = "2022-08-13T09:30";
final DateTime dateTime = DateUtil.parse(dateStr);
assertNotNull(dateTime);
assertEquals("2022-08-13 09:30:00", dateTime.toString());
}
|
@SuppressWarnings("JdkObsolete")
void runNonGui(String testFile, String logFile, boolean remoteStart, String remoteHostsString, boolean generateReportDashboard)
throws ConfigurationException {
try {
File f = new File(testFile);
if (!f.exists() || !f.isFile()) {
throw new ConfigurationException("The file " + f.getAbsolutePath() + " doesn't exist or can't be opened");
}
FileServer.getFileServer().setBaseForScript(f);
HashTree tree = SaveService.loadTree(f);
@SuppressWarnings("deprecation") // Deliberate use of deprecated ctor
JMeterTreeModel treeModel = new JMeterTreeModel(new Object());// NOSONAR Create non-GUI version to avoid headless problems
JMeterTreeNode root = (JMeterTreeNode) treeModel.getRoot();
treeModel.addSubTree(tree, root);
// Hack to resolve ModuleControllers in non GUI mode
SearchByClass<ReplaceableController> replaceableControllers =
new SearchByClass<>(ReplaceableController.class);
tree.traverse(replaceableControllers);
Collection<ReplaceableController> replaceableControllersRes = replaceableControllers.getSearchResults();
for (ReplaceableController replaceableController : replaceableControllersRes) {
replaceableController.resolveReplacementSubTree(root);
}
// Ensure tree is interpreted (ReplaceableControllers are replaced)
// For GUI runs this is done in Start.java
HashTree clonedTree = convertSubTree(tree, true);
Summariser summariser = null;
String summariserName = JMeterUtils.getPropDefault("summariser.name", "");//$NON-NLS-1$
if (summariserName.length() > 0) {
log.info("Creating summariser <{}>", summariserName);
println("Creating summariser <" + summariserName + ">");
summariser = new Summariser(summariserName);
}
ResultCollector resultCollector = null;
if (logFile != null) {
resultCollector = new ResultCollector(summariser);
resultCollector.setFilename(logFile);
clonedTree.add(clonedTree.getArray()[0], resultCollector);
}
else {
// only add Summariser if it can not be shared with the ResultCollector
if (summariser != null) {
clonedTree.add(clonedTree.getArray()[0], summariser);
}
}
if (deleteResultFile) {
SearchByClass<ResultCollector> resultListeners = new SearchByClass<>(ResultCollector.class);
clonedTree.traverse(resultListeners);
for (ResultCollector rc : resultListeners.getSearchResults()) {
File resultFile = new File(rc.getFilename());
if (resultFile.exists() && !resultFile.delete()) {
throw new IllegalStateException("Could not delete results file " + resultFile.getAbsolutePath()
+ "(canRead:" + resultFile.canRead() + ", canWrite:" + resultFile.canWrite() + ")");
}
}
}
ReportGenerator reportGenerator = null;
if (logFile != null && generateReportDashboard) {
reportGenerator = new ReportGenerator(logFile, resultCollector);
}
// Used for remote notification of threads start/stop,see BUG 54152
// Summariser uses this feature to compute correctly number of threads
// when NON GUI mode is used
clonedTree.add(clonedTree.getArray()[0], new RemoteThreadsListenerTestElement());
List<JMeterEngine> engines = new ArrayList<>();
println("Created the tree successfully using "+testFile);
if (!remoteStart) {
JMeterEngine engine = new StandardJMeterEngine();
clonedTree.add(clonedTree.getArray()[0], new ListenToTest(
org.apache.jmeter.JMeter.ListenToTest.RunMode.LOCAL, false, reportGenerator));
engine.configure(clonedTree);
Instant now = Instant.now();
println("Starting standalone test @ "+ formatLikeDate(now) + " (" + now.toEpochMilli() + ')');
engines.add(engine);
engine.runTest();
} else {
java.util.StringTokenizer st = new java.util.StringTokenizer(remoteHostsString.trim(), ",");//$NON-NLS-1$
List<String> hosts = new ArrayList<>();
while (st.hasMoreElements()) {
hosts.add(((String) st.nextElement()).trim());
}
ListenToTest testListener = new ListenToTest(
org.apache.jmeter.JMeter.ListenToTest.RunMode.REMOTE, remoteStop, reportGenerator);
clonedTree.add(clonedTree.getArray()[0], testListener);
DistributedRunner distributedRunner=new DistributedRunner(this.remoteProps);
distributedRunner.setStdout(System.out); // NOSONAR
distributedRunner.setStdErr(System.err); // NOSONAR
distributedRunner.init(hosts, clonedTree);
engines.addAll(distributedRunner.getEngines());
testListener.setStartedRemoteEngines(engines);
distributedRunner.start();
}
startUdpDdaemon(engines);
} catch (ConfigurationException e) {
throw e;
} catch (Exception e) {
System.out.println("Error in NonGUIDriver " + e.toString());//NOSONAR
log.error("Error in NonGUIDriver", e);
throw new ConfigurationException("Error in NonGUIDriver " + e.getMessage(), e);
}
}
|
@Test
void testFailureWithMissingPlugin() throws IOException {
File temp = File.createTempFile("testPlan", ".jmx");
String testPlan = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
+ "<jmeterTestPlan version=\"1.2\" properties=\"5.0\" jmeter=\"5.2-SNAPSHOT.20190506\">\n"
+ " <hashTree>\n"
+ " <TestPlan guiclass=\"TestPlanGui\" testclass=\"TestPlan\" testname=\"Test Plan\" enabled=\"true\">\n"
+ " <stringProp name=\"TestPlan.comments\"></stringProp>\n"
+ " <boolProp name=\"TestPlan.functional_mode\">false</boolProp>\n"
+ " <boolProp name=\"TestPlan.tearDown_on_shutdown\">true</boolProp>\n"
+ " <boolProp name=\"TestPlan.serialize_threadgroups\">false</boolProp>\n"
+ " <elementProp name=\"TestPlan.user_defined_variables\" elementType=\"Arguments\" "
+ "guiclass=\"ArgumentsPanel\" testclass=\"Arguments\" testname=\"User Defined Variables\" enabled=\"true\">\n"
+ " <collectionProp name=\"Arguments.arguments\"/>\n" + " </elementProp>\n"
+ " <stringProp name=\"TestPlan.user_define_classpath\"></stringProp>\n" + " </TestPlan>\n"
+ " <hashTree>\n" + " <hashTree>\n"
+ " <kg.apc.jmeter.samplers.DummySampler guiclass=\"kg.apc.jmeter.samplers.DummySamplerGui\" "
+ "testclass=\"kg.apc.jmeter.samplers.DummySampler\" testname=\"jp@gc - Dummy Sampler\" enabled=\"true\">\n"
+ " <boolProp name=\"WAITING\">true</boolProp>\n"
+ " <boolProp name=\"SUCCESFULL\">true</boolProp>\n"
+ " <stringProp name=\"RESPONSE_CODE\">200</stringProp>\n"
+ " <stringProp name=\"RESPONSE_MESSAGE\">OK</stringProp>\n"
+ " <stringProp name=\"REQUEST_DATA\">{"email":"user1", "password":"password1"};"
+ "</stringProp>\n"
+ " <stringProp name=\"RESPONSE_DATA\">{"successful": true, "account_id":"0123456789"}</stringProp>\n"
+ " <stringProp name=\"RESPONSE_TIME\">${__Random(50,500)}</stringProp>\n"
+ " <stringProp name=\"LATENCY\">${__Random(1,50)}</stringProp>\n"
+ " <stringProp name=\"CONNECT\">${__Random(1,5)}</stringProp>\n"
+ " </kg.apc.jmeter.samplers.DummySampler></hashTree></hashTree>\n"
+ " </hashTree></jmeterTestPlan><hashTree/></hashTree>\n" + "</jmeterTestPlan>";
try (FileOutputStream os = new FileOutputStream(temp);
Writer fw = new OutputStreamWriter(os, StandardCharsets.UTF_8);
BufferedWriter out = new BufferedWriter(fw)) {
out.write(testPlan);
}
JMeter jmeter = new JMeter();
try {
jmeter.runNonGui(temp.getAbsolutePath(), null, false, null, false);
Assertions.fail("Expected ConfigurationException to be thrown");
} catch (ConfigurationException e) {
Assertions.assertTrue(e.getMessage().contains("Error in NonGUIDriver Problem loading XML from"),
"When the plugin doesn't exist, the method 'runNonGui' should have a detailed message");
} finally {
Assertions.assertTrue(temp.delete(), () -> "File " + temp.getAbsolutePath() + " should have been deleted");
}
}
|
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) {
return api.send(request);
}
|
@Test
public void setChatPermissions() {
for (boolean bool : new boolean[]{false, true}) {
ChatPermissions setPerms = new ChatPermissions();
setPerms.canSendMessages(bool);
setPerms.canSendAudios(bool);
setPerms.canSendDocuments(bool);
setPerms.canSendPhotos(bool);
setPerms.canSendVideos(bool);
setPerms.canSendVideoNotes(bool);
setPerms.canSendVoiceNotes(bool);
setPerms.canSendPolls(bool);
setPerms.canSendOtherMessages(bool);
setPerms.canAddWebPagePreviews(bool);
setPerms.canChangeInfo(bool);
setPerms.canInviteUsers(bool);
setPerms.canPinMessages(bool);
setPerms.canManageTopics(bool);
BaseResponse response = bot.execute(new SetChatPermissions(groupId, setPerms)
.useIndependentChatPermissions(false));
assertTrue(response.isOk());
ChatFullInfo chat = bot.execute(new GetChat(groupId)).chat();
ChatPermissions permissions = chat.permissions();
if (bool) {
assertTrue(permissions.canSendMessages());
assertTrue(permissions.canSendAudios());
assertTrue(permissions.canSendDocuments());
assertTrue(permissions.canSendPhotos());
assertTrue(permissions.canSendVideos());
assertTrue(permissions.canSendVideoNotes());
assertTrue(permissions.canSendVoiceNotes());
assertTrue(permissions.canSendPolls());
assertTrue(permissions.canSendOtherMessages());
assertTrue(permissions.canAddWebPagePreviews());
assertFalse(permissions.canChangeInfo());
assertTrue(permissions.canInviteUsers());
assertFalse(permissions.canPinMessages());
assertFalse(permissions.canManageTopics());
} else {
assertFalse(permissions.canSendMessages());
assertFalse(permissions.canSendAudios());
assertFalse(permissions.canSendDocuments());
assertFalse(permissions.canSendPhotos());
assertFalse(permissions.canSendVideos());
assertFalse(permissions.canSendVideoNotes());
assertFalse(permissions.canSendVoiceNotes());
assertFalse(permissions.canSendPolls());
assertFalse(permissions.canSendOtherMessages());
assertFalse(permissions.canAddWebPagePreviews());
assertFalse(permissions.canChangeInfo());
assertFalse(permissions.canInviteUsers());
assertFalse(permissions.canPinMessages());
assertFalse(permissions.canManageTopics());
}
}
}
|
public static String name(String name, String... names) {
final StringBuilder builder = new StringBuilder();
append(builder, name);
if (names != null) {
for (String s : names) {
append(builder, s);
}
}
return builder.toString();
}
|
@Test
public void elidesNullValuesFromNamesWhenManyNullsPassedIn() {
assertThat(name("one", null, null))
.isEqualTo("one");
}
|
public static DataMap processProjections(DataMap dataMap, Map<String, List<String>> result)
{
//We send this through the pipeline and migrate from the dataMap into the result
for (final String parameterName : RestConstants.PROJECTION_PARAMETERS)
{
if (dataMap.containsKey(parameterName))
{
processIndividualProjection(dataMap, result, parameterName);
}
}
//If there were no projection parameters, we simply return the unmodified dataMap
return dataMap;
}
|
@Test
public void testProcessProjections()
{
//Construct a MaskTree from a series of PathSpecs. Extract the subsequent Datamap representation.
final MaskTree rootObjectsMask = MaskCreator
.createPositiveMask(new PathSpec("foo", PathSpec.WILDCARD, "bar"));
final MaskTree metadataMask = MaskCreator
.createPositiveMask(new PathSpec("foo", "bar"), new PathSpec("bar", "baz"), new PathSpec("qux"));
final MaskTree pagingMask = MaskCreator
.createPositiveMask(new PathSpec("total"), new PathSpec("count"), new PathSpec("links", PathSpec.WILDCARD, "rel"));
final DataMap resultMap = new DataMap(4); //For each type of projection, plus one query string parameter
resultMap.put(RestConstants.FIELDS_PARAM, rootObjectsMask.getDataMap());
resultMap.put(RestConstants.METADATA_FIELDS_PARAM, metadataMask.getDataMap());
resultMap.put(RestConstants.PAGING_FIELDS_PARAM, pagingMask.getDataMap());
resultMap.put("someQueryString", "someValue");
final Map<String, List<String>> processedProjections = new LinkedHashMap<>();
final DataMap processedDataMap = QueryParamsDataMap.processProjections(resultMap, processedProjections);
Assert.assertTrue(processedDataMap.size() == 1, "Processed datamap should only have one item left!");
final Map<String, Set<String>> expectedProcessedProjections = new LinkedHashMap<>();
//"{fields=[foo:($*:(bar))], metadataFields=[foo:(bar),bar:(baz),qux], pagingFields=[total,count,links:($*:(rel))]}"
expectedProcessedProjections.put(RestConstants.FIELDS_PARAM, Collections.singleton("foo:($*:(bar))"));
expectedProcessedProjections.put(RestConstants.METADATA_FIELDS_PARAM,
new HashSet<>(Arrays.asList("foo:(bar)", "bar:(baz)", "qux")));
expectedProcessedProjections.put(RestConstants.PAGING_FIELDS_PARAM,
new HashSet<>(Arrays.asList("total", "count", "links:($*:(rel))")));
Assert.assertEquals(processedProjections.size(), expectedProcessedProjections.size(), "We must have the correct number of" +
" expected projections!");
for (final Map.Entry<String, List<String>> entry : processedProjections.entrySet())
{
//Acceptable because these are always comma delimited
final Set<String> actualProjectionValueSet = new HashSet<>(Arrays.asList(entry.getValue().get(0).split(",")));
Assert.assertEquals(actualProjectionValueSet, expectedProcessedProjections.get(entry.getKey()), "The individual projection " +
"for " + entry.getKey() + " does not match what is expected!");
}
}
|
@Override
public void populateContainer(TaskContainer container) {
ComputationSteps steps = new ReportComputationSteps(container);
container.add(SettingsLoader.class);
container.add(task);
container.add(steps);
container.add(componentClasses());
for (ReportAnalysisComponentProvider componentProvider : componentProviders) {
container.add(componentProvider.getComponents());
}
container.add(steps.orderedStepClasses());
}
|
@Test
public void all_computation_steps_are_added_in_order_to_the_container() {
ListTaskContainer container = new ListTaskContainer();
underTest.populateContainer(container);
Set<String> computationStepClassNames = container.getAddedComponents().stream()
.map(s -> {
if (s instanceof Class) {
return (Class<?>) s;
}
return null;
})
.filter(Objects::nonNull)
.filter(ComputationStep.class::isAssignableFrom)
.map(Class::getCanonicalName)
.collect(Collectors.toSet());
assertThat(difference(retrieveStepPackageStepsCanonicalNames(PROJECTANALYSIS_STEP_PACKAGE), computationStepClassNames)).isEmpty();
}
|
public static SourceConfig validateUpdate(SourceConfig existingConfig, SourceConfig newConfig) {
SourceConfig mergedConfig = clone(existingConfig);
if (!existingConfig.getTenant().equals(newConfig.getTenant())) {
throw new IllegalArgumentException("Tenants differ");
}
if (!existingConfig.getNamespace().equals(newConfig.getNamespace())) {
throw new IllegalArgumentException("Namespaces differ");
}
if (!existingConfig.getName().equals(newConfig.getName())) {
throw new IllegalArgumentException("Function Names differ");
}
if (!StringUtils.isEmpty(newConfig.getClassName())) {
mergedConfig.setClassName(newConfig.getClassName());
}
if (!StringUtils.isEmpty(newConfig.getTopicName())) {
mergedConfig.setTopicName(newConfig.getTopicName());
}
if (!StringUtils.isEmpty(newConfig.getSerdeClassName())) {
mergedConfig.setSerdeClassName(newConfig.getSerdeClassName());
}
if (!StringUtils.isEmpty(newConfig.getSchemaType())) {
mergedConfig.setSchemaType(newConfig.getSchemaType());
}
if (newConfig.getConfigs() != null) {
mergedConfig.setConfigs(newConfig.getConfigs());
}
if (newConfig.getSecrets() != null) {
mergedConfig.setSecrets(newConfig.getSecrets());
}
if (!StringUtils.isEmpty(newConfig.getLogTopic())) {
mergedConfig.setLogTopic(newConfig.getLogTopic());
}
if (newConfig.getProcessingGuarantees() != null && !newConfig.getProcessingGuarantees()
.equals(existingConfig.getProcessingGuarantees())) {
throw new IllegalArgumentException("Processing Guarantees cannot be altered");
}
if (newConfig.getParallelism() != null) {
mergedConfig.setParallelism(newConfig.getParallelism());
}
if (newConfig.getResources() != null) {
mergedConfig
.setResources(ResourceConfigUtils.merge(existingConfig.getResources(), newConfig.getResources()));
}
if (!StringUtils.isEmpty(newConfig.getArchive())) {
mergedConfig.setArchive(newConfig.getArchive());
}
if (!StringUtils.isEmpty(newConfig.getRuntimeFlags())) {
mergedConfig.setRuntimeFlags(newConfig.getRuntimeFlags());
}
if (!StringUtils.isEmpty(newConfig.getCustomRuntimeOptions())) {
mergedConfig.setCustomRuntimeOptions(newConfig.getCustomRuntimeOptions());
}
if (isBatchSource(existingConfig) != isBatchSource(newConfig)) {
throw new IllegalArgumentException("Sources cannot be update between regular sources and batchsource");
}
if (newConfig.getBatchSourceConfig() != null) {
validateBatchSourceConfigUpdate(existingConfig.getBatchSourceConfig(), newConfig.getBatchSourceConfig());
mergedConfig.setBatchSourceConfig(newConfig.getBatchSourceConfig());
}
if (newConfig.getProducerConfig() != null) {
mergedConfig.setProducerConfig(newConfig.getProducerConfig());
}
return mergedConfig;
}
|
@Test
public void testMergeDifferentBatchSourceConfig() {
SourceConfig sourceConfig = createSourceConfigWithBatch();
BatchSourceConfig batchSourceConfig = createBatchSourceConfig();
Map<String, Object> newConfig = new HashMap<>();
newConfig.put("something", "different");
batchSourceConfig.setDiscoveryTriggererConfig(newConfig);
SourceConfig newSourceConfig = createUpdatedSourceConfig("batchSourceConfig", batchSourceConfig);
SourceConfig mergedConfig = SourceConfigUtils.validateUpdate(sourceConfig, newSourceConfig);
assertEquals(
mergedConfig.getBatchSourceConfig().getDiscoveryTriggererConfig().get("something"),
"different"
);
mergedConfig.getBatchSourceConfig().setDiscoveryTriggererConfig(sourceConfig.getBatchSourceConfig().getDiscoveryTriggererConfig());
assertEquals(
new Gson().toJson(sourceConfig),
new Gson().toJson(mergedConfig)
);
}
|
@NonNull
public static Permutor<FeedItem> getPermutor(@NonNull SortOrder sortOrder) {
Comparator<FeedItem> comparator = null;
Permutor<FeedItem> permutor = null;
switch (sortOrder) {
case EPISODE_TITLE_A_Z:
comparator = (f1, f2) -> itemTitle(f1).compareTo(itemTitle(f2));
break;
case EPISODE_TITLE_Z_A:
comparator = (f1, f2) -> itemTitle(f2).compareTo(itemTitle(f1));
break;
case DATE_OLD_NEW:
comparator = (f1, f2) -> pubDate(f1).compareTo(pubDate(f2));
break;
case DATE_NEW_OLD:
comparator = (f1, f2) -> pubDate(f2).compareTo(pubDate(f1));
break;
case DURATION_SHORT_LONG:
comparator = (f1, f2) -> Integer.compare(duration(f1), duration(f2));
break;
case DURATION_LONG_SHORT:
comparator = (f1, f2) -> Integer.compare(duration(f2), duration(f1));
break;
case EPISODE_FILENAME_A_Z:
comparator = (f1, f2) -> itemLink(f1).compareTo(itemLink(f2));
break;
case EPISODE_FILENAME_Z_A:
comparator = (f1, f2) -> itemLink(f2).compareTo(itemLink(f1));
break;
case FEED_TITLE_A_Z:
comparator = (f1, f2) -> feedTitle(f1).compareTo(feedTitle(f2));
break;
case FEED_TITLE_Z_A:
comparator = (f1, f2) -> feedTitle(f2).compareTo(feedTitle(f1));
break;
case RANDOM:
permutor = Collections::shuffle;
break;
case SMART_SHUFFLE_OLD_NEW:
permutor = (queue) -> smartShuffle(queue, true);
break;
case SMART_SHUFFLE_NEW_OLD:
permutor = (queue) -> smartShuffle(queue, false);
break;
case SIZE_SMALL_LARGE:
comparator = (f1, f2) -> Long.compare(size(f1), size(f2));
break;
case SIZE_LARGE_SMALL:
comparator = (f1, f2) -> Long.compare(size(f2), size(f1));
break;
case COMPLETION_DATE_NEW_OLD:
comparator = (f1, f2) -> f2.getMedia().getPlaybackCompletionDate()
.compareTo(f1.getMedia().getPlaybackCompletionDate());
break;
default:
throw new IllegalArgumentException("Permutor not implemented");
}
if (comparator != null) {
final Comparator<FeedItem> comparator2 = comparator;
permutor = (queue) -> Collections.sort(queue, comparator2);
}
return permutor;
}
|
@Test
public void testPermutorForRule_size_desc() {
Permutor<FeedItem> permutor = FeedItemPermutors.getPermutor(SortOrder.SIZE_LARGE_SMALL);
List<FeedItem> itemList = getTestList();
assertTrue(checkIdOrder(itemList, 1, 3, 2)); // before sorting
permutor.reorder(itemList);
assertTrue(checkIdOrder(itemList, 3, 2, 1)); // after sorting
}
|
CodeEmitter<T> emit(final Parameter parameter) {
emitter.emit("param");
emit("name", parameter.getName());
final String parameterType = parameter.getIn();
if (ObjectHelper.isNotEmpty(parameterType)) {
emit("type", RestParamType.valueOf(parameterType));
}
if (!"body".equals(parameterType)) {
final Schema schema = parameter.getSchema();
if (schema != null) {
final String dataType = schema.getType();
if (ObjectHelper.isNotEmpty(dataType)) {
emit("dataType", dataType);
}
emit("allowableValues", asStringList(schema.getEnum()));
final StyleEnum style = parameter.getStyle();
if (ObjectHelper.isNotEmpty(style)) {
if (style.equals(StyleEnum.FORM)) {
// Guard against null explode value
// See: https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.3.md#fixed-fields-10
if (Boolean.FALSE.equals(parameter.getExplode())) {
emit("collectionFormat", CollectionFormat.csv);
} else {
emit("collectionFormat", CollectionFormat.multi);
}
}
}
if (ObjectHelper.isNotEmpty(schema.getDefault())) {
final String value = StringHelper.removeLeadingAndEndingQuotes(schema.getDefault().toString());
emit("defaultValue", value);
}
if ("array".equals(dataType) && schema.getItems() != null) {
emit("arrayType", schema.getItems().getType());
}
}
}
if (parameter.getRequired() != null) {
emit("required", parameter.getRequired());
} else {
emit("required", Boolean.FALSE);
}
emit("description", parameter.getDescription());
emitter.emit("endParam");
return emitter;
}
|
@Test
public void shouldEmitCodeForOas3ParameterWithType() {
final Builder method = MethodSpec.methodBuilder("configure");
final MethodBodySourceCodeEmitter emitter = new MethodBodySourceCodeEmitter(method);
final OperationVisitor<?> visitor = new OperationVisitor<>(emitter, null, null, null, null);
final Parameter parameter = new Parameter();
parameter.setName("param");
parameter.setIn("query");
Schema schema = new Schema();
schema.setType("integer");
parameter.setSchema(schema);
visitor.emit(parameter);
assertThat(method.build().toString()).isEqualTo("void configure() {\n"
+ " param()\n"
+ " .name(\"param\")\n"
+ " .type(org.apache.camel.model.rest.RestParamType.query)\n"
+ " .dataType(\"integer\")\n"
+ " .required(false)\n"
+ " .endParam()\n"
+ " }\n");
}
|
@SuppressWarnings("unchecked")
public <T> RefererConfig<T> get(final String path) {
try {
return (RefererConfig<T>) cache.get(path);
} catch (ExecutionException e) {
throw new ShenyuException(e);
}
}
|
@Test
public void testMethodInfo() {
List<Pair<String, String>> params = new ArrayList<>();
Pair<String, String> pair = Pair.of("left", "right");
params.add(pair);
ApplicationConfigCache.MethodInfo methodInfo = new ApplicationConfigCache.MethodInfo();
methodInfo.setParams(params);
Assertions.assertEquals(methodInfo.getParams().get(0).getLeft(), "left");
}
|
public void setName(String name) throws IllegalStateException {
if (name != null && name.equals(this.name)) {
return; // idempotent naming
}
if (this.name == null || CoreConstants.DEFAULT_CONTEXT_NAME.equals(this.name)) {
this.name = name;
} else {
throw new IllegalStateException("Context has been already given a name");
}
}
|
@Test
public void idempotentNameTest() {
context.setName("hello");
context.setName("hello");
}
|
@Override
public Num calculate(BarSeries series, Position position) {
return numberOfPositionsCriterion.calculate(series, position);
}
|
@Test
public void calculateWithOnePosition() {
BarSeries series = new MockBarSeries(numFunction, 100d, 95d, 102d, 105d, 97d, 113d);
Position position = new Position(Trade.buyAt(0, series), Trade.sellAt(1, series));
// 0 winning position
AnalysisCriterion winningPositionsRatio = getCriterion(PositionFilter.PROFIT);
assertNumEquals(numOf(0), winningPositionsRatio.calculate(series, position));
// 1 winning position
position = new Position(Trade.buyAt(1, series), Trade.sellAt(2, series));
assertNumEquals(1, winningPositionsRatio.calculate(series, position));
// 1 losing position
position = new Position(Trade.buyAt(0, series), Trade.sellAt(1, series));
AnalysisCriterion losingPositionsRatio = getCriterion(PositionFilter.LOSS);
assertNumEquals(numOf(1), losingPositionsRatio.calculate(series, position));
// 0 losing position
position = new Position(Trade.buyAt(1, series), Trade.sellAt(2, series));
assertNumEquals(0, losingPositionsRatio.calculate(series, position));
}
|
public static Optional<SentinelVersion> parseVersion(String verStr) {
if (StringUtil.isBlank(verStr)) {
return Optional.empty();
}
try {
String versionFull = verStr;
SentinelVersion version = new SentinelVersion();
// postfix
int index = versionFull.indexOf("-");
if (index == 0) {
// Start with "-"
return Optional.empty();
}
if (index == versionFull.length() - 1) {
// End with "-"
} else if (index > 0) {
version.setPostfix(versionFull.substring(index + 1));
}
if (index >= 0) {
versionFull = versionFull.substring(0, index);
}
// x.x.x
int segment = 0;
int[] ver = new int[3];
while (segment < ver.length) {
index = versionFull.indexOf('.');
if (index < 0) {
if (versionFull.length() > 0) {
ver[segment] = Integer.valueOf(versionFull);
}
break;
}
ver[segment] = Integer.valueOf(versionFull.substring(0, index));
versionFull = versionFull.substring(index + 1);
segment ++;
}
if (ver[0] < 1) {
// Wrong format, return empty.
return Optional.empty();
} else {
return Optional.of(version
.setMajorVersion(ver[0])
.setMinorVersion(ver[1])
.setFixVersion(ver[2]));
}
} catch (Exception ex) {
// Parse fail, return empty.
return Optional.empty();
}
}
|
@Test
public void test() {
Optional<SentinelVersion> version = VersionUtils.parseVersion("1.2.3");
assertTrue(version.isPresent());
assertEquals(1, version.get().getMajorVersion());
assertEquals(2, version.get().getMinorVersion());
assertEquals(3, version.get().getFixVersion());
assertNull(version.get().getPostfix());
version = VersionUtils.parseVersion("1.2");
assertTrue(version.isPresent());
assertEquals(1, version.get().getMajorVersion());
assertEquals(2, version.get().getMinorVersion());
assertEquals(0, version.get().getFixVersion());
assertNull(version.get().getPostfix());
version = VersionUtils.parseVersion("1.");
assertTrue(version.isPresent());
assertEquals(1, version.get().getMajorVersion());
assertEquals(0, version.get().getMinorVersion());
assertEquals(0, version.get().getFixVersion());
assertNull(version.get().getPostfix());
version = VersionUtils.parseVersion("1.2.");
assertTrue(version.isPresent());
assertEquals(1, version.get().getMajorVersion());
assertEquals(2, version.get().getMinorVersion());
assertEquals(0, version.get().getFixVersion());
assertNull(version.get().getPostfix());
version = VersionUtils.parseVersion("1.2.3.");
assertTrue(version.isPresent());
assertEquals(1, version.get().getMajorVersion());
assertEquals(2, version.get().getMinorVersion());
assertEquals(3, version.get().getFixVersion());
assertNull(version.get().getPostfix());
version = VersionUtils.parseVersion("1.2.3.4");
assertTrue(version.isPresent());
assertEquals(1, version.get().getMajorVersion());
assertEquals(2, version.get().getMinorVersion());
assertEquals(3, version.get().getFixVersion());
assertNull(version.get().getPostfix());
version = VersionUtils.parseVersion("1");
assertTrue(version.isPresent());
assertEquals(1, version.get().getMajorVersion());
assertEquals(0, version.get().getMinorVersion());
assertEquals(0, version.get().getFixVersion());
assertNull(version.get().getPostfix());
version = VersionUtils.parseVersion("1.2.3-");
assertTrue(version.isPresent());
assertEquals(1, version.get().getMajorVersion());
assertEquals(2, version.get().getMinorVersion());
assertEquals(3, version.get().getFixVersion());
assertNull(version.get().getPostfix());
version = VersionUtils.parseVersion("-");
assertFalse(version.isPresent());
version = VersionUtils.parseVersion("-t");
assertFalse(version.isPresent());
version = VersionUtils.parseVersion("");
assertFalse(version.isPresent());
version = VersionUtils.parseVersion(null);
assertFalse(version.isPresent());
version = VersionUtils.parseVersion("1.2.3-SNAPSHOTS");
assertTrue(version.isPresent());
assertEquals(1, version.get().getMajorVersion());
assertEquals(2, version.get().getMinorVersion());
assertEquals(3, version.get().getFixVersion());
assertEquals("SNAPSHOTS", version.get().getPostfix());
}
|
@JsonProperty
public DateTime getTimestamp() {
return message.getTimestamp();
}
|
@Test
public void testGetTimestamp() throws Exception {
assertEquals(message.getTimestamp(), messageSummary.getTimestamp());
}
|
public static Method getter(Class<?> o, String propertiesName) {
if (o == null) {
return null;
}
try {
PropertyDescriptor descriptor = new PropertyDescriptor(propertiesName, o);
return descriptor.getReadMethod();
} catch (IntrospectionException e) {
throw new RuntimeException("not find getter for" + propertiesName + "in" + o.getName(), e);
}
}
|
@Test
public void testGetter() {
Method name = BeanUtil.getter(Customer.class, "name");
Assert.assertEquals("getName", name.getName());
}
|
public static Configuration loadConfiguration(String[] args) throws FlinkException {
return ConfigurationParserUtils.loadCommonConfiguration(
filterCmdArgs(args, ClusterConfigurationParserFactory.options()),
BashJavaUtils.class.getSimpleName());
}
|
@TestTemplate
void testLoadConfigurationDynamicPropertyWithoutSpace() throws Exception {
String[] args = {"--configDir", confDir.toFile().getAbsolutePath(), "-Dkey=value"};
Configuration configuration = FlinkConfigLoader.loadConfiguration(args);
verifyConfiguration(configuration, "key", "value");
}
|
public void transitionTo(ClassicGroupState groupState) {
assertValidTransition(groupState);
previousState = state;
state = groupState;
currentStateTimestamp = Optional.of(time.milliseconds());
metrics.onClassicGroupStateTransition(previousState, state);
}
|
@Test
public void testDeadToAwaitingRebalanceIllegalTransition() {
group.transitionTo(PREPARING_REBALANCE);
group.transitionTo(DEAD);
assertThrows(IllegalStateException.class, () -> group.transitionTo(COMPLETING_REBALANCE));
}
|
public void run() {
runner = newJsonRunnerWithSetting(
globalSettings.stream()
.filter(byEnv(this.env))
.map(toRunnerSetting())
.collect(toList()), startArgs);
runner.run();
}
|
@Test
public void should_run_with_setting() throws IOException {
stream = getResourceAsStream("settings/settings.json");
runner = new SettingRunner(stream, createStartArgs(12306));
runner.run();
assertThat(helper.get(remoteUrl("/foo")), is("foo"));
assertThat(helper.get(remoteUrl("/bar")), is("bar"));
}
|
@Override
public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException {
try {
if(!session.getClient().setFileType(FTP.BINARY_FILE_TYPE)) {
throw new FTPException(session.getClient().getReplyCode(), session.getClient().getReplyString());
}
if(status.isAppend()) {
session.getClient().setRestartOffset(status.getOffset());
}
final InputStream in = new DataConnectionActionExecutor(session).data(new DataConnectionAction<InputStream>() {
@Override
public InputStream execute() throws BackgroundException {
try {
return session.getClient().retrieveFileStream(file.getAbsolute());
}
catch(IOException e) {
throw new FTPExceptionMappingService().map(e);
}
}
});
return new ReadReplyInputStream(in, status);
}
catch(IOException e) {
throw new FTPExceptionMappingService().map("Download {0} failed", e, file);
}
}
|
@Test
public void testAbortNoRead() throws Exception {
final TransferStatus status = new TransferStatus();
status.setLength(5L);
final Path workdir = new FTPWorkdirService(session).find();
final Path file = new Path(workdir, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file));
new DefaultTouchFeature<>(new FTPWriteFeature(session)).touch(file, new TransferStatus());
final InputStream in = new FTPReadFeature(session).read(file, status, new DisabledConnectionCallback());
assertNotNull(in);
// Send ABOR because stream was not read completly
in.close();
// Make sure subsequent PWD command works
assertEquals(workdir, new FTPWorkdirService(session).find());
new FTPDeleteFeature(session).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback());
}
|
public Type getType(final String name)
{
return containedTypeByNameMap.get(name);
}
|
@Test
void shouldHandleCompositeHasNullableType() throws Exception
{
final String nullValStr = "9223372036854775807";
final String testXmlString =
"<types>" +
"<composite name=\"PRICENULL\" description=\"Price NULL\" semanticType=\"Price\">" +
" <type name=\"mantissa\" description=\"mantissa\" presence=\"optional\" nullValue=\"" +
nullValStr + "\" primitiveType=\"int64\"/>" +
" <type name=\"exponent\" description=\"exponent\" presence=\"constant\" primitiveType=\"int8\">" +
"-7</type>" +
"</composite>" +
"</types>";
final Map<String, Type> map = parseTestXmlWithMap("/types/composite", testXmlString);
final CompositeType c = (CompositeType)map.get("PRICENULL");
final EncodedDataType mantissa = (EncodedDataType)c.getType("mantissa");
assertThat(mantissa.nullValue(), is(PrimitiveValue.parse(nullValStr, PrimitiveType.INT64)));
}
|
public ShenyuServiceInstanceLists get(final String contextPath) {
try {
return cache.get(contextPath);
} catch (ExecutionException e) {
throw new ShenyuException(e.getCause());
}
}
|
@Test
public void testGet() {
assertNotNull(this.applicationConfigCache.get("/test"));
}
|
static String buildAcceptEncodingHeader(EncodingType[] acceptedEncodings)
{
//Essentially, we want to assign nonzero quality values to all those specified;
float delta = 1.0f/(acceptedEncodings.length + 1);
float currentQuality = 1.0f;
//Special case so we don't end with an unnecessary delimiter
StringBuilder acceptEncodingValue = new StringBuilder();
for (int i = 0; i < acceptedEncodings.length; i++)
{
EncodingType t = acceptedEncodings[i];
if (i > 0)
{
acceptEncodingValue.append(CompressionConstants.ENCODING_DELIMITER);
}
acceptEncodingValue.append(t.getHttpName());
acceptEncodingValue.append(CompressionConstants.QUALITY_DELIMITER);
acceptEncodingValue.append(CompressionConstants.QUALITY_PREFIX);
acceptEncodingValue.append(String.format("%.2f", currentQuality));
currentQuality = currentQuality - delta;
}
return acceptEncodingValue.toString();
}
|
@Test(dataProvider = "contentEncodingGeneratorDataProvider")
public void testEncodingGeneration(EncodingType[] encoding, String acceptEncoding)
{
Assert.assertEquals(ClientCompressionFilter.buildAcceptEncodingHeader(encoding), acceptEncoding);
}
|
public static <T> List<LocalProperty<T>> grouped(Collection<T> columns)
{
return ImmutableList.of(new GroupingProperty<>(columns));
}
|
@Test
public void testMatchedGroupHierarchy()
{
List<LocalProperty<String>> actual = builder()
.grouped("a")
.grouped("b")
.grouped("c")
.build();
assertMatch(
actual,
builder().grouped("a", "b", "c", "d").build(),
Optional.of(grouped("d")));
assertMatch(
actual,
builder().grouped("a", "b", "c").build(),
Optional.empty());
assertMatch(
actual,
builder().grouped("a", "b").build(),
Optional.empty());
assertMatch(
actual,
builder().grouped("a").build(),
Optional.empty());
assertMatch(
actual,
builder().grouped("b").build(),
Optional.of(grouped("b")));
assertMatch(
actual,
builder().grouped("b", "c").build(),
Optional.of(grouped("b", "c")));
assertMatch(
actual,
builder().grouped("a", "c").build(),
Optional.of(grouped("c")));
assertMatch(
actual,
builder().grouped("c").build(),
Optional.of(grouped("c")));
assertMatch(
actual,
builder()
.grouped("a")
.grouped("a")
.grouped("a")
.grouped("a")
.grouped("b")
.build(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
Optional.empty());
}
|
@VisibleForTesting
@Nullable
static Map<String, Map<String, String>> volumesSetToMap(@Nullable Set<AbsoluteUnixPath> volumes) {
return setToMap(volumes, AbsoluteUnixPath::toString);
}
|
@Test
public void testVolumeListToMap() {
ImmutableSet<AbsoluteUnixPath> input =
ImmutableSet.of(
AbsoluteUnixPath.get("/var/job-result-data"),
AbsoluteUnixPath.get("/var/log/my-app-logs"));
ImmutableSortedMap<String, Map<?, ?>> expected =
ImmutableSortedMap.of(
"/var/job-result-data", ImmutableMap.of(), "/var/log/my-app-logs", ImmutableMap.of());
Assert.assertEquals(expected, ImageToJsonTranslator.volumesSetToMap(input));
}
|
@Override
public String pluginNamed() {
return PluginEnum.WAF.getName();
}
|
@Test
public void testPluginNamed() {
final String result = wafPluginDataHandlerUnderTest.pluginNamed();
assertEquals(PluginEnum.WAF.getName(), result);
}
|
@Subscribe
public void onChatMessage(ChatMessage event)
{
if (event.getType() == ChatMessageType.GAMEMESSAGE || event.getType() == ChatMessageType.SPAM)
{
String message = Text.removeTags(event.getMessage());
Matcher dodgyCheckMatcher = DODGY_CHECK_PATTERN.matcher(message);
Matcher dodgyProtectMatcher = DODGY_PROTECT_PATTERN.matcher(message);
Matcher dodgyBreakMatcher = DODGY_BREAK_PATTERN.matcher(message);
Matcher bindingNecklaceCheckMatcher = BINDING_CHECK_PATTERN.matcher(message);
Matcher bindingNecklaceUsedMatcher = BINDING_USED_PATTERN.matcher(message);
Matcher ringOfForgingCheckMatcher = RING_OF_FORGING_CHECK_PATTERN.matcher(message);
Matcher amuletOfChemistryCheckMatcher = AMULET_OF_CHEMISTRY_CHECK_PATTERN.matcher(message);
Matcher amuletOfChemistryUsedMatcher = AMULET_OF_CHEMISTRY_USED_PATTERN.matcher(message);
Matcher amuletOfChemistryBreakMatcher = AMULET_OF_CHEMISTRY_BREAK_PATTERN.matcher(message);
Matcher amuletOfBountyCheckMatcher = AMULET_OF_BOUNTY_CHECK_PATTERN.matcher(message);
Matcher amuletOfBountyUsedMatcher = AMULET_OF_BOUNTY_USED_PATTERN.matcher(message);
Matcher chronicleAddMatcher = CHRONICLE_ADD_PATTERN.matcher(message);
Matcher chronicleUseAndCheckMatcher = CHRONICLE_USE_AND_CHECK_PATTERN.matcher(message);
Matcher slaughterActivateMatcher = BRACELET_OF_SLAUGHTER_ACTIVATE_PATTERN.matcher(message);
Matcher slaughterCheckMatcher = BRACELET_OF_SLAUGHTER_CHECK_PATTERN.matcher(message);
Matcher expeditiousActivateMatcher = EXPEDITIOUS_BRACELET_ACTIVATE_PATTERN.matcher(message);
Matcher expeditiousCheckMatcher = EXPEDITIOUS_BRACELET_CHECK_PATTERN.matcher(message);
Matcher bloodEssenceCheckMatcher = BLOOD_ESSENCE_CHECK_PATTERN.matcher(message);
Matcher bloodEssenceExtractMatcher = BLOOD_ESSENCE_EXTRACT_PATTERN.matcher(message);
Matcher braceletOfClayCheckMatcher = BRACELET_OF_CLAY_CHECK_PATTERN.matcher(message);
if (message.contains(RING_OF_RECOIL_BREAK_MESSAGE))
{
notifier.notify(config.recoilNotification(), "Your Ring of Recoil has shattered");
}
else if (dodgyBreakMatcher.find())
{
notifier.notify(config.dodgyNotification(), "Your dodgy necklace has crumbled to dust.");
updateDodgyNecklaceCharges(MAX_DODGY_CHARGES);
}
else if (dodgyCheckMatcher.find())
{
updateDodgyNecklaceCharges(Integer.parseInt(dodgyCheckMatcher.group(1)));
}
else if (dodgyProtectMatcher.find())
{
updateDodgyNecklaceCharges(Integer.parseInt(dodgyProtectMatcher.group(1)));
}
else if (amuletOfChemistryCheckMatcher.find())
{
updateAmuletOfChemistryCharges(Integer.parseInt(amuletOfChemistryCheckMatcher.group(1)));
}
else if (amuletOfChemistryUsedMatcher.find())
{
final String match = amuletOfChemistryUsedMatcher.group(1);
int charges = 1;
if (!match.equals("one"))
{
charges = Integer.parseInt(match);
}
updateAmuletOfChemistryCharges(charges);
}
else if (amuletOfChemistryBreakMatcher.find())
{
notifier.notify(config.amuletOfChemistryNotification(), "Your amulet of chemistry has crumbled to dust.");
updateAmuletOfChemistryCharges(MAX_AMULET_OF_CHEMISTRY_CHARGES);
}
else if (amuletOfBountyCheckMatcher.find())
{
updateAmuletOfBountyCharges(Integer.parseInt(amuletOfBountyCheckMatcher.group(1)));
}
else if (amuletOfBountyUsedMatcher.find())
{
updateAmuletOfBountyCharges(Integer.parseInt(amuletOfBountyUsedMatcher.group(1)));
}
else if (message.equals(AMULET_OF_BOUNTY_BREAK_TEXT))
{
updateAmuletOfBountyCharges(MAX_AMULET_OF_BOUNTY_CHARGES);
}
else if (message.contains(BINDING_BREAK_TEXT))
{
notifier.notify(config.bindingNotification(), BINDING_BREAK_TEXT);
// This chat message triggers before the used message so add 1 to the max charges to ensure proper sync
updateBindingNecklaceCharges(MAX_BINDING_CHARGES + 1);
}
else if (bindingNecklaceUsedMatcher.find())
{
final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT);
if (equipment.contains(ItemID.BINDING_NECKLACE))
{
updateBindingNecklaceCharges(getItemCharges(ItemChargeConfig.KEY_BINDING_NECKLACE) - 1);
}
}
else if (bindingNecklaceCheckMatcher.find())
{
final String match = bindingNecklaceCheckMatcher.group(1);
int charges = 1;
if (!match.equals("one"))
{
charges = Integer.parseInt(match);
}
updateBindingNecklaceCharges(charges);
}
else if (ringOfForgingCheckMatcher.find())
{
final String match = ringOfForgingCheckMatcher.group(1);
int charges = 1;
if (!match.equals("one"))
{
charges = Integer.parseInt(match);
}
updateRingOfForgingCharges(charges);
}
else if (message.equals(RING_OF_FORGING_USED_TEXT) || message.equals(RING_OF_FORGING_VARROCK_PLATEBODY))
{
final ItemContainer inventory = client.getItemContainer(InventoryID.INVENTORY);
final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT);
// Determine if the player smelted with a Ring of Forging equipped.
if (equipment == null)
{
return;
}
if (equipment.contains(ItemID.RING_OF_FORGING) && (message.equals(RING_OF_FORGING_USED_TEXT) || inventory.count(ItemID.IRON_ORE) > 1))
{
int charges = Ints.constrainToRange(getItemCharges(ItemChargeConfig.KEY_RING_OF_FORGING) - 1, 0, MAX_RING_OF_FORGING_CHARGES);
updateRingOfForgingCharges(charges);
}
}
else if (message.equals(RING_OF_FORGING_BREAK_TEXT))
{
notifier.notify(config.ringOfForgingNotification(), "Your ring of forging has melted.");
// This chat message triggers before the used message so add 1 to the max charges to ensure proper sync
updateRingOfForgingCharges(MAX_RING_OF_FORGING_CHARGES + 1);
}
else if (chronicleAddMatcher.find())
{
final String match = chronicleAddMatcher.group(1);
if (match.equals("one"))
{
setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1);
}
else
{
setItemCharges(ItemChargeConfig.KEY_CHRONICLE, Integer.parseInt(match));
}
}
else if (chronicleUseAndCheckMatcher.find())
{
setItemCharges(ItemChargeConfig.KEY_CHRONICLE, Integer.parseInt(chronicleUseAndCheckMatcher.group(1)));
}
else if (message.equals(CHRONICLE_ONE_CHARGE_TEXT))
{
setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1);
}
else if (message.equals(CHRONICLE_EMPTY_TEXT) || message.equals(CHRONICLE_NO_CHARGES_TEXT))
{
setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 0);
}
else if (message.equals(CHRONICLE_FULL_TEXT))
{
setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1000);
}
else if (slaughterActivateMatcher.find())
{
final String found = slaughterActivateMatcher.group(1);
if (found == null)
{
updateBraceletOfSlaughterCharges(MAX_SLAYER_BRACELET_CHARGES);
notifier.notify(config.slaughterNotification(), BRACELET_OF_SLAUGHTER_BREAK_TEXT);
}
else
{
updateBraceletOfSlaughterCharges(Integer.parseInt(found));
}
}
else if (slaughterCheckMatcher.find())
{
updateBraceletOfSlaughterCharges(Integer.parseInt(slaughterCheckMatcher.group(1)));
}
else if (expeditiousActivateMatcher.find())
{
final String found = expeditiousActivateMatcher.group(1);
if (found == null)
{
updateExpeditiousBraceletCharges(MAX_SLAYER_BRACELET_CHARGES);
notifier.notify(config.expeditiousNotification(), EXPEDITIOUS_BRACELET_BREAK_TEXT);
}
else
{
updateExpeditiousBraceletCharges(Integer.parseInt(found));
}
}
else if (expeditiousCheckMatcher.find())
{
updateExpeditiousBraceletCharges(Integer.parseInt(expeditiousCheckMatcher.group(1)));
}
else if (bloodEssenceCheckMatcher.find())
{
updateBloodEssenceCharges(Integer.parseInt(bloodEssenceCheckMatcher.group(1)));
}
else if (bloodEssenceExtractMatcher.find())
{
updateBloodEssenceCharges(getItemCharges(ItemChargeConfig.KEY_BLOOD_ESSENCE) - Integer.parseInt(bloodEssenceExtractMatcher.group(1)));
}
else if (message.contains(BLOOD_ESSENCE_ACTIVATE_TEXT))
{
updateBloodEssenceCharges(MAX_BLOOD_ESSENCE_CHARGES);
}
else if (braceletOfClayCheckMatcher.find())
{
updateBraceletOfClayCharges(Integer.parseInt(braceletOfClayCheckMatcher.group(1)));
}
else if (message.equals(BRACELET_OF_CLAY_USE_TEXT) || message.equals(BRACELET_OF_CLAY_USE_TEXT_TRAHAEARN))
{
final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT);
// Determine if the player mined with a Bracelet of Clay equipped.
if (equipment != null && equipment.contains(ItemID.BRACELET_OF_CLAY))
{
final ItemContainer inventory = client.getItemContainer(InventoryID.INVENTORY);
// Charge is not used if only 1 inventory slot is available when mining in Prifddinas
boolean ignore = inventory != null
&& inventory.count() == 27
&& message.equals(BRACELET_OF_CLAY_USE_TEXT_TRAHAEARN);
if (!ignore)
{
int charges = Ints.constrainToRange(getItemCharges(ItemChargeConfig.KEY_BRACELET_OF_CLAY) - 1, 0, MAX_BRACELET_OF_CLAY_CHARGES);
updateBraceletOfClayCharges(charges);
}
}
}
else if (message.equals(BRACELET_OF_CLAY_BREAK_TEXT))
{
notifier.notify(config.braceletOfClayNotification(), "Your bracelet of clay has crumbled to dust");
updateBraceletOfClayCharges(MAX_BRACELET_OF_CLAY_CHARGES);
}
}
}
|
@Test
public void testBloodEssenceExtract()
{
ChatMessage chatMessage = new ChatMessage(null, ChatMessageType.GAMEMESSAGE, "", EXTRACT_BLOOD_ESSENCE, "", 0);
when(configManager.getConfiguration(ItemChargeConfig.GROUP, ItemChargeConfig.KEY_BLOOD_ESSENCE, Integer.class)).thenReturn(1000);
itemChargePlugin.onChatMessage(chatMessage);
verify(configManager).setRSProfileConfiguration(ItemChargeConfig.GROUP, ItemChargeConfig.KEY_BLOOD_ESSENCE, 933);
}
|
@Override
public Optional<String> getContentHash() {
return Optional.ofNullable(mContentHash);
}
|
@Test
public void writeByteArrayForLargeFile() throws Exception {
int partSize = (int) FormatUtils.parseSpaceSize(PARTITION_SIZE);
byte[] b = new byte[partSize + 1];
mStream.write(b, 0, b.length);
Mockito.verify(mMockOssClient)
.initiateMultipartUpload(any(InitiateMultipartUploadRequest.class));
Mockito.verify(mMockOutputStream).write(b, 0, b.length - 1);
Mockito.verify(mMockOutputStream).write(b, b.length - 1, 1);
Mockito.verify(mMockExecutor).submit(any(Callable.class));
mStream.close();
Mockito.verify(mMockOssClient)
.completeMultipartUpload(any(CompleteMultipartUploadRequest.class));
assertTrue(mStream.getContentHash().isPresent());
assertEquals("multiTag", mStream.getContentHash().get());
}
|
public boolean isNewerOrEqualTo(VersionNumber versionNumber) {
return equals(versionNumber) || isNewerThan(versionNumber);
}
|
@Test
void testIsNewerOrEqualTo() {
assertThat(v("6.0.0").isNewerOrEqualTo(v("6.0.0"))).isTrue();
assertThat(v("6.0.0").isNewerOrEqualTo(v("5.0.0"))).isTrue();
assertThat(v("10.0.0").isNewerOrEqualTo(v("9.0.0"))).isTrue();
assertThat(v("10.0.0").isNewerOrEqualTo(v("1.0.0"))).isTrue();
assertThat(v("5.0.1").isNewerOrEqualTo(v("5.0.0"))).isTrue();
assertThat(v("10.6").isNewerOrEqualTo(v("10.0.0"))).isTrue();
assertThat(v("7.0.0-beta.2").isNewerOrEqualTo(v("6.0.0"))).isTrue();
assertThat(v("7.0.0-beta.1").isNewerOrEqualTo(v("7.0.0-alpha.1"))).isTrue();
assertThat(v("7.0.0-beta.3").isNewerOrEqualTo(v("7.0.0-beta.2"))).isTrue();
assertThat(v("5.0.1").isNewerOrEqualTo(v("6.0.0"))).isFalse();
assertThat(v("1.0.0").isNewerOrEqualTo(v("10.0.0"))).isFalse();
assertThat(v("9.0.0").isNewerOrEqualTo(v("10.0.0"))).isFalse();
assertThat(v("10.6").isNewerOrEqualTo(v("11.0.0"))).isFalse();
assertThat(v("1.8.0_241").isNewerOrEqualTo(v("21"))).isFalse();
}
|
public String getLanguage() {
return language;
}
|
@Test
void gettersAndSetters() {
LanguageMetric metric = new LanguageMetric("ncloc", 100, "java", TelemetryDataType.INTEGER, Granularity.MONTHLY);
assertThat(metric.getLanguage()).isEqualTo("java");
assertThat(metric.getValue()).isEqualTo(100);
assertThat(metric.getKey()).isEqualTo("ncloc");
assertThat(metric.getGranularity()).isEqualTo(Granularity.MONTHLY);
assertThat(metric.getType()).isEqualTo(TelemetryDataType.INTEGER);
}
|
public Partition getPartition(String dbName, String tableName, List<String> partitionValues) {
return metastore.getPartition(dbName, tableName, partitionValues);
}
|
@Test
public void testGetPartition() {
Partition partition = hmsOps.getPartition(
"db1", "tbl1", Lists.newArrayList("par1"));
Assert.assertEquals(ORC, partition.getFileFormat());
Assert.assertEquals("100", partition.getParameters().get(TOTAL_SIZE));
partition = hmsOps.getPartition("db1", "tbl1", Lists.newArrayList());
Assert.assertEquals("100", partition.getParameters().get(TOTAL_SIZE));
}
|
public static URI buildURI(String schema, String hostPort, String path, Map<String, String> params) {
StringTokenizer tokenizer = new StringTokenizer(hostPort, ":");
String host = tokenizer.nextToken();
String port = null;
if (tokenizer.hasMoreTokens()) {
port = tokenizer.nextToken();
}
URIBuilder uriBuilder = new URIBuilder().setScheme(schema).setHost(host).setPath(path);
if (port != null) {
uriBuilder.setPort(Integer.parseInt(port));
}
for (Map.Entry<String, String> entry : params.entrySet()) {
uriBuilder.addParameter(entry.getKey(), entry.getValue());
}
try {
return uriBuilder.build();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
|
@Test
public void testBuildURI() {
URI uri = URIUtils.buildURI("http", "foo", "bar", Collections.emptyMap());
Assert.assertEquals(uri.toString(), "http://foo/bar");
uri = URIUtils.buildURI("http", "foo:8080", "bar/moo", Collections.emptyMap());
Assert.assertEquals(uri.toString(), "http://foo:8080/bar/moo");
Assert.assertEquals(uri.getHost(), "foo");
Assert.assertEquals(uri.getPort(), 8080);
// test that params get encoded
Map<String, String> params = new LinkedHashMap<>();
params.put("stringParam", "aString");
params.put("stringParamNeedsEncoding", "{\"format\":\"JSON\",\"timeout\":1000}");
uri = URIUtils.buildURI("http", "foo", "bar", params);
Assert.assertEquals(uri.toString(), "http://foo/bar?stringParam=aString&stringParamNeedsEncoding=" + URIUtils
.encode("{\"format\":\"JSON\",\"timeout\":1000}"));
// test that path gets encoded
uri = URIUtils.buildURI("http", "foo", "bar%moo{}", Collections.emptyMap());
Assert.assertEquals(uri.toString(), "http://foo/" + URIUtils.encode("bar%moo{}"));
}
|
static int parseMajorJavaVersion(String javaVersion) {
int version = parseDotted(javaVersion);
if (version == -1) {
version = extractBeginningInt(javaVersion);
}
if (version == -1) {
return 6; // Choose minimum supported JDK version as default
}
return version;
}
|
@Test
public void testJava6() {
// http://www.oracle.com/technetwork/java/javase/version-6-141920.html
assertThat(JavaVersion.parseMajorJavaVersion("1.6.0")).isEqualTo(6);
}
|
@Nullable
public Supplier<? extends SocketAddress> bindAddressSupplier() {
return bindAddressSupplier;
}
|
@Test
void bindAddressSupplierBadValues() {
assertThatExceptionOfType(NullPointerException.class)
.isThrownBy(() -> builder.bindAddressSupplier(null));
}
|
@ThriftField(1)
public List<PrestoThriftRange> getRanges()
{
return ranges;
}
|
@Test
public void testFromValueSetOf()
{
PrestoThriftValueSet thriftValueSet = fromValueSet(ValueSet.of(BIGINT, 1L, 2L, 3L));
assertNotNull(thriftValueSet.getRangeValueSet());
assertEquals(thriftValueSet.getRangeValueSet().getRanges(), ImmutableList.of(
new PrestoThriftRange(new PrestoThriftMarker(longValue(1), EXACTLY), new PrestoThriftMarker(longValue(1), EXACTLY)),
new PrestoThriftRange(new PrestoThriftMarker(longValue(2), EXACTLY), new PrestoThriftMarker(longValue(2), EXACTLY)),
new PrestoThriftRange(new PrestoThriftMarker(longValue(3), EXACTLY), new PrestoThriftMarker(longValue(3), EXACTLY))));
}
|
@NonNull
public GifHeader parseHeader() {
if (rawData == null) {
throw new IllegalStateException("You must call setData() before parseHeader()");
}
if (err()) {
return header;
}
readHeader();
if (!err()) {
readContents();
if (header.frameCount < 0) {
header.status = STATUS_FORMAT_ERROR;
}
}
return header;
}
|
@Test(expected = IllegalStateException.class)
public void testThrowsIfParseHeaderCalledBeforeSetData() {
GifHeaderParser parser = new GifHeaderParser();
parser.parseHeader();
}
|
public synchronized GpuDeviceInformation parseXml(String xmlContent)
throws YarnException {
InputSource inputSource = new InputSource(new StringReader(xmlContent));
SAXSource source = new SAXSource(xmlReader, inputSource);
try {
return (GpuDeviceInformation) unmarshaller.unmarshal(source);
} catch (JAXBException e) {
String msg = "Failed to parse XML output of " +
GPU_SCRIPT_REFERENCE + "!";
LOG.error(msg, e);
throw new YarnException(msg, e);
}
}
|
@Test
public void testParseInvalidRootElement() throws YarnException {
expected.expect(YarnException.class);
GpuDeviceInformationParser parser = new GpuDeviceInformationParser();
parser.parseXml("<nvidia_smiiiii></nvidia_smiiiii");
}
|
@Override
public void decode(final ChannelHandlerContext context, final ByteBuf in, final List<Object> out) {
int payloadLength = in.markReaderIndex().readUnsignedMediumLE();
int remainPayloadLength = SEQUENCE_LENGTH + payloadLength;
if (in.readableBytes() < remainPayloadLength) {
in.resetReaderIndex();
return;
}
ByteBuf message = in.readRetainedSlice(remainPayloadLength);
if (MAX_PACKET_LENGTH == payloadLength) {
pendingMessages.add(message.skipBytes(SEQUENCE_LENGTH));
} else if (pendingMessages.isEmpty()) {
out.add(message);
} else {
aggregateMessages(context, message, out);
}
}
|
@Test
void assertDecodeWithStickyPacket() {
when(byteBuf.markReaderIndex()).thenReturn(byteBuf);
when(byteBuf.readUnsignedMediumLE()).thenReturn(50);
List<Object> out = new LinkedList<>();
new MySQLPacketCodecEngine().decode(context, byteBuf, out);
assertTrue(out.isEmpty());
}
|
public static Read<String> readStrings() {
return Read.newBuilder(
(PubsubMessage message) -> new String(message.getPayload(), StandardCharsets.UTF_8))
.setCoder(StringUtf8Coder.of())
.build();
}
|
@Test
public void testFailedParseWithErrorHandlerConfigured() throws Exception {
ByteString data = ByteString.copyFrom("Hello, World!".getBytes(StandardCharsets.UTF_8));
RuntimeException exception = new RuntimeException("Some error message");
ImmutableList<IncomingMessage> expectedReads =
ImmutableList.of(
IncomingMessage.of(
com.google.pubsub.v1.PubsubMessage.newBuilder().setData(data).build(),
1234L,
0,
UUID.randomUUID().toString(),
UUID.randomUUID().toString()));
ImmutableList<OutgoingMessage> expectedWrites = ImmutableList.of();
clientFactory =
PubsubTestClient.createFactoryForPullAndPublish(
SUBSCRIPTION, TOPIC, CLOCK, 60, expectedReads, expectedWrites, ImmutableList.of());
ErrorHandler<BadRecord, PCollection<Long>> errorHandler =
pipeline.registerBadRecordErrorHandler(new ErrorSinkTransform());
PCollection<String> read =
pipeline.apply(
PubsubIO.readStrings()
.fromSubscription(SUBSCRIPTION.getPath())
.withErrorHandler(errorHandler)
.withClock(CLOCK)
.withClientFactory(clientFactory)
.withCoderAndParseFn(
StringUtf8Coder.of(),
SimpleFunction.fromSerializableFunctionWithOutputType(
message -> {
throw exception;
},
TypeDescriptors.strings())));
errorHandler.close();
PAssert.thatSingleton(errorHandler.getOutput()).isEqualTo(1L);
PAssert.that(read).empty();
pipeline.run();
}
|
@Override
protected boolean hasLeadership(String componentId, UUID leaderSessionId) {
synchronized (lock) {
if (leaderElectionDriver != null) {
if (leaderContenderRegistry.containsKey(componentId)) {
return leaderElectionDriver.hasLeadership()
&& leaderSessionId.equals(issuedLeaderSessionID);
} else {
LOG.debug(
"hasLeadership is called for component '{}' while there is no contender registered under that ID in the service, returning false.",
componentId);
return false;
}
} else {
LOG.debug("hasLeadership is called after the service is closed, returning false.");
return false;
}
}
}
|
@Test
void testHasLeadershipWithLeadershipButNoGrantEventProcessed() throws Exception {
new Context() {
{
runTestWithManuallyTriggeredEvents(
executorService -> {
final UUID expectedSessionID = UUID.randomUUID();
grantLeadership(expectedSessionID);
applyToBothContenderContexts(
ctx -> {
assertThat(
leaderElectionService.hasLeadership(
ctx.componentId, expectedSessionID))
.isFalse();
assertThat(
leaderElectionService.hasLeadership(
ctx.componentId, UUID.randomUUID()))
.isFalse();
});
});
}
};
}
|
void doSyntaxCheck(NamespaceTextModel model) {
if (StringUtils.isBlank(model.getConfigText())) {
return;
}
// only support yaml syntax check
if (model.getFormat() != ConfigFileFormat.YAML && model.getFormat() != ConfigFileFormat.YML) {
return;
}
// use YamlPropertiesFactoryBean to check the yaml syntax
TypeLimitedYamlPropertiesFactoryBean yamlPropertiesFactoryBean = new TypeLimitedYamlPropertiesFactoryBean();
yamlPropertiesFactoryBean.setResources(new ByteArrayResource(model.getConfigText().getBytes()));
try {
// this call converts yaml to properties and will throw exception if the conversion fails
yamlPropertiesFactoryBean.getObject();
}catch (Exception ex){
throw new BadRequestException(ex.getMessage());
}
}
|
@Test(expected = BadRequestException.class)
public void yamlSyntaxCheckWithDuplicatedValue() throws Exception {
String yaml = loadYaml("case2.yaml");
itemController.doSyntaxCheck(assemble(ConfigFileFormat.YAML.getValue(), yaml));
}
|
@Override
public <R> R create(final Class<R> resourceClass)
{
return _containerAdaptor.getBean(resourceClass);
}
|
@Test
public void testMockInjectionSubClass()
{
InjectMockResourceFactory factory =
new InjectMockResourceFactory(new SimpleBeanProvider()
.add("counterBean", new CounterBean())
.add("mySpecialBean", new DerivedBean()),
Resource.class);
Resource instance = factory.create(Resource.class);
instance.validateBeans();
}
|
@Override
public V put(K key, V value) {
Objects.requireNonNull(key);
Objects.requireNonNull(value);
TimelineHashMapEntry<K, V> entry = new TimelineHashMapEntry<>(key, value);
TimelineHashMapEntry<K, V> prev = snapshottableAddOrReplace(entry);
if (prev == null) {
return null;
}
return prev.getValue();
}
|
@Test
public void testNullsForbidden() {
SnapshotRegistry registry = new SnapshotRegistry(new LogContext());
TimelineHashMap<String, Boolean> map = new TimelineHashMap<>(registry, 1);
assertThrows(NullPointerException.class, () -> map.put(null, true));
assertThrows(NullPointerException.class, () -> map.put("abc", null));
assertThrows(NullPointerException.class, () -> map.put(null, null));
}
|
@Override
public ValueType deserialize(JsonParser p, DeserializationContext ctxt) throws IOException {
if (p.currentTokenId() == JsonTokenId.ID_STRING) {
final String str = StringUtils.upperCase(p.getText(), Locale.ROOT);
try {
return ValueType.valueOf(str);
} catch (IllegalArgumentException e) {
throw ctxt.weirdStringException(str, ValueType.class, e.getMessage());
}
} else {
throw ctxt.wrongTokenException(p, handledType(), JsonToken.VALUE_STRING, "expected String " + Arrays.toString(ValueType.values()));
}
}
|
@Test
public void deserialize() throws IOException {
assertThat(objectMapper.readValue("\"boolean\"", ValueType.class)).isEqualTo(ValueType.BOOLEAN);
assertThat(objectMapper.readValue("\"double\"", ValueType.class)).isEqualTo(ValueType.DOUBLE);
assertThat(objectMapper.readValue("\"float\"", ValueType.class)).isEqualTo(ValueType.FLOAT);
assertThat(objectMapper.readValue("\"integer\"", ValueType.class)).isEqualTo(ValueType.INTEGER);
assertThat(objectMapper.readValue("\"long\"", ValueType.class)).isEqualTo(ValueType.LONG);
assertThat(objectMapper.readValue("\"string\"", ValueType.class)).isEqualTo(ValueType.STRING);
assertThat(objectMapper.readValue("\"parameter\"", ValueType.class)).isEqualTo(ValueType.PARAMETER);
assertThatThrownBy(() -> objectMapper.readValue("\"\"", ValueType.class))
.isInstanceOf(JsonMappingException.class)
.hasMessageStartingWith("Cannot deserialize value of type `org.graylog2.contentpacks.model.entities.references.ValueType` from String \"\": No enum constant org.graylog2.contentpacks.model.entities.references.ValueType");
assertThatThrownBy(() -> objectMapper.readValue("\"UNKNOWN\"", ValueType.class))
.isInstanceOf(JsonMappingException.class)
.hasMessageStartingWith("Cannot deserialize value of type `org.graylog2.contentpacks.model.entities.references.ValueType` from String \"UNKNOWN\": No enum constant org.graylog2.contentpacks.model.entities.references.ValueType");
assertThatThrownBy(() -> objectMapper.readValue("0", ValueType.class))
.isInstanceOf(JsonMappingException.class)
.hasMessageStartingWith("Unexpected token (VALUE_NUMBER_INT), expected VALUE_STRING: expected String");
assertThatThrownBy(() -> objectMapper.readValue("true", ValueType.class))
.isInstanceOf(JsonMappingException.class)
.hasMessageStartingWith("Unexpected token (VALUE_TRUE), expected VALUE_STRING: expected String");
assertThatThrownBy(() -> objectMapper.readValue("{}", ValueType.class))
.isInstanceOf(JsonMappingException.class)
.hasMessageStartingWith("Unexpected token (START_OBJECT), expected VALUE_STRING: expected String");
assertThatThrownBy(() -> objectMapper.readValue("[]", ValueType.class))
.isInstanceOf(JsonMappingException.class)
.hasMessageStartingWith("Unexpected token (START_ARRAY), expected VALUE_STRING: expected String");
}
|
public static Getter newMethodGetter(Object object, Getter parent, Method method, String modifier) throws Exception {
return newGetter(object, parent, modifier, method.getReturnType(), method::invoke,
(t, et) -> new MethodGetter(parent, method, modifier, t, et));
}
|
@Test
public void newMethodGetter_whenExtractingFromNonEmpty_Collection_nullFirst_FieldAndParentIsNonEmptyMultiResult_thenInferReturnType()
throws Exception {
OuterObject object = new OuterObject("name", null, new InnerObject("inner", 0, 1, 2, 3));
Getter parentGetter = GetterFactory.newMethodGetter(object, null, innersCollectionMethod, "[any]");
Getter innerObjectNameGetter
= GetterFactory.newMethodGetter(object, parentGetter, innerAttributesCollectionMethod, "[any]");
Class<?> returnType = innerObjectNameGetter.getReturnType();
assertEquals(Integer.class, returnType);
}
|
public ExitStatus(Options options) {
this.options = options;
}
|
@Test
void with_failed_scenarios() {
createRuntime();
bus.send(testCaseFinishedWithStatus(Status.FAILED));
assertThat(exitStatus.exitStatus(), is(equalTo((byte) 0x1)));
}
|
static DynamicState stateMachineStep(DynamicState dynamicState, StaticState staticState) throws Exception {
LOG.debug("STATE {}", dynamicState.state);
switch (dynamicState.state) {
case EMPTY:
return handleEmpty(dynamicState, staticState);
case RUNNING:
return handleRunning(dynamicState, staticState);
case WAITING_FOR_WORKER_START:
return handleWaitingForWorkerStart(dynamicState, staticState);
case KILL_BLOB_UPDATE:
return handleKillBlobUpdate(dynamicState, staticState);
case KILL_AND_RELAUNCH:
return handleKillAndRelaunch(dynamicState, staticState);
case KILL:
return handleKill(dynamicState, staticState);
case WAITING_FOR_BLOB_LOCALIZATION:
return handleWaitingForBlobLocalization(dynamicState, staticState);
case WAITING_FOR_BLOB_UPDATE:
return handleWaitingForBlobUpdate(dynamicState, staticState);
default:
throw new IllegalStateException("Code not ready to handle a state of " + dynamicState.state);
}
}
|
@Test
public void testReschedule() throws Exception {
try (SimulatedTime ignored = new SimulatedTime(1010)) {
int port = 8080;
String cTopoId = "CURRENT";
List<ExecutorInfo> cExecList = mkExecutorInfoList(1, 2, 3, 4, 5);
LocalAssignment cAssignment =
mkLocalAssignment(cTopoId, cExecList, mkWorkerResources(100.0, 100.0, 100.0));
BlobChangingCallback cb = mock(BlobChangingCallback.class);
Container cContainer = mock(Container.class);
LSWorkerHeartbeat chb = mkWorkerHB(cTopoId, port, cExecList, Time.currentTimeSecs());
when(cContainer.readHeartbeat()).thenReturn(chb);
when(cContainer.areAllProcessesDead()).thenReturn(false, false, true);
String nTopoId = "NEW";
List<ExecutorInfo> nExecList = mkExecutorInfoList(1, 2, 3, 4, 5);
LocalAssignment nAssignment =
mkLocalAssignment(nTopoId, nExecList, mkWorkerResources(100.0, 100.0, 100.0));
AsyncLocalizer localizer = mock(AsyncLocalizer.class);
Container nContainer = mock(Container.class);
LocalState state = mock(LocalState.class);
ContainerLauncher containerLauncher = mock(ContainerLauncher.class);
when(containerLauncher.launchContainer(port, nAssignment, state)).thenReturn(nContainer);
LSWorkerHeartbeat nhb = mkWorkerHB(nTopoId, 100, nExecList, Time.currentTimeSecs());
when(nContainer.readHeartbeat()).thenReturn(nhb, nhb);
@SuppressWarnings("unchecked")
CompletableFuture<Void> blobFuture = mock(CompletableFuture.class);
when(localizer.requestDownloadTopologyBlobs(nAssignment, port, cb)).thenReturn(blobFuture);
ISupervisor iSuper = mock(ISupervisor.class);
SlotMetrics slotMetrics = new SlotMetrics(new StormMetricsRegistry());
StaticState staticState = new StaticState(localizer, 5000, 120000, 1000, 1000,
containerLauncher, "localhost", port, iSuper, state, cb, null, null, slotMetrics);
DynamicState dynamicState = new DynamicState(cAssignment, cContainer, nAssignment, slotMetrics);
DynamicState nextState = Slot.stateMachineStep(dynamicState, staticState);
assertEquals(MachineState.KILL, nextState.state);
verify(cContainer).kill();
verify(localizer).requestDownloadTopologyBlobs(nAssignment, port, cb);
assertSame(blobFuture, nextState.pendingDownload, "pendingDownload not set properly");
assertEquals(nAssignment, nextState.pendingLocalization);
assertTrue(Time.currentTimeMillis() > 1000);
nextState = Slot.stateMachineStep(nextState, staticState);
assertEquals(MachineState.KILL, nextState.state);
verify(cContainer).forceKill();
assertSame(blobFuture, nextState.pendingDownload, "pendingDownload not set properly");
assertEquals(nAssignment, nextState.pendingLocalization);
assertTrue(Time.currentTimeMillis() > 2000);
nextState = Slot.stateMachineStep(nextState, staticState);
assertEquals(MachineState.WAITING_FOR_BLOB_LOCALIZATION, nextState.state);
verify(cContainer).cleanUp();
verify(localizer).releaseSlotFor(cAssignment, port);
assertTrue(Time.currentTimeMillis() > 2000);
nextState = Slot.stateMachineStep(nextState, staticState);
verify(blobFuture).get(1000, TimeUnit.MILLISECONDS);
verify(containerLauncher).launchContainer(port, nAssignment, state);
assertEquals(MachineState.WAITING_FOR_WORKER_START, nextState.state);
assertNull(nextState.pendingDownload, "pendingDownload is not null");
assertNull(nextState.pendingLocalization);
assertSame(nAssignment, nextState.currentAssignment);
assertSame(nContainer, nextState.container);
assertTrue(Time.currentTimeMillis() > 2000);
nextState = Slot.stateMachineStep(nextState, staticState);
assertEquals(MachineState.RUNNING, nextState.state);
assertNull(nextState.pendingDownload, "pendingDownload is not null");
assertNull(nextState.pendingLocalization);
assertSame(nAssignment, nextState.currentAssignment);
assertSame(nContainer, nextState.container);
assertTrue(Time.currentTimeMillis() > 2000);
nextState = Slot.stateMachineStep(nextState, staticState);
assertEquals(MachineState.RUNNING, nextState.state);
assertNull(nextState.pendingDownload, "pendingDownload is not null");
assertNull(nextState.pendingLocalization);
assertSame(nAssignment, nextState.currentAssignment);
assertSame(nContainer, nextState.container);
assertTrue(Time.currentTimeMillis() > 3000);
nextState = Slot.stateMachineStep(nextState, staticState);
assertEquals(MachineState.RUNNING, nextState.state);
assertNull(nextState.pendingDownload, "pendingDownload is not null");
assertNull(nextState.pendingLocalization);
assertSame(nAssignment, nextState.currentAssignment);
assertSame(nContainer, nextState.container);
assertTrue(Time.currentTimeMillis() > 4000);
}
}
|
@Override
public void updateUserPassword(String username, String password) {
try {
EmbeddedStorageContextHolder
.addSqlContext("UPDATE users SET password = ? WHERE username=?", password, username);
databaseOperate.blockUpdate();
} finally {
EmbeddedStorageContextHolder.cleanAllContext();
}
}
|
@Test
void testUpdateUserPassword() {
embeddedUserPersistService.updateUserPassword("username", "password");
Mockito.verify(databaseOperate).blockUpdate();
}
|
@Description("arc cosine")
@ScalarFunction
@SqlType(StandardTypes.DOUBLE)
public static double acos(@SqlType(StandardTypes.DOUBLE) double num)
{
return Math.acos(num);
}
|
@Test
public void testAcos()
{
for (double doubleValue : DOUBLE_VALUES) {
assertFunction("acos(" + doubleValue + ")", DOUBLE, Math.acos(doubleValue));
assertFunction("acos(REAL '" + (float) doubleValue + "')", DOUBLE, Math.acos((float) doubleValue));
}
assertFunction("acos(NULL)", DOUBLE, null);
}
|
@Override
public void processElement(StreamRecord<FlinkInputSplit> element) {
splits.add(element.getValue());
enqueueProcessSplits();
}
|
@TestTemplate
public void testProcessAllRecords() throws Exception {
List<List<Record>> expectedRecords = generateRecordsAndCommitTxn(10);
List<FlinkInputSplit> splits = generateSplits();
assertThat(splits).hasSize(10);
try (OneInputStreamOperatorTestHarness<FlinkInputSplit, RowData> harness = createReader()) {
harness.setup();
harness.open();
SteppingMailboxProcessor processor = createLocalMailbox(harness);
List<Record> expected = Lists.newArrayList();
for (int i = 0; i < splits.size(); i++) {
// Process this element to enqueue to mail-box.
harness.processElement(splits.get(i), -1);
// Run the mail-box once to read all records from the given split.
assertThat(processor.runMailboxStep()).as("Should processed 1 split").isTrue();
// Assert the output has expected elements.
expected.addAll(expectedRecords.get(i));
TestHelpers.assertRecords(readOutputValues(harness), expected, SCHEMA);
}
}
}
|
static PreferredAddressTypeComparator comparator(InternetProtocolFamily family) {
switch (family) {
case IPv4:
return IPv4;
case IPv6:
return IPv6;
default:
throw new IllegalArgumentException();
}
}
|
@Test
public void testIpv6() throws UnknownHostException {
InetAddress ipv4Address1 = InetAddress.getByName("10.0.0.1");
InetAddress ipv4Address2 = InetAddress.getByName("10.0.0.2");
InetAddress ipv4Address3 = InetAddress.getByName("10.0.0.3");
InetAddress ipv6Address1 = InetAddress.getByName("::1");
InetAddress ipv6Address2 = InetAddress.getByName("::2");
InetAddress ipv6Address3 = InetAddress.getByName("::3");
PreferredAddressTypeComparator ipv4 = PreferredAddressTypeComparator.comparator(InternetProtocolFamily.IPv6);
List<InetAddress> addressList = new ArrayList<InetAddress>();
Collections.addAll(addressList, ipv4Address1, ipv4Address2, ipv6Address1,
ipv6Address2, ipv4Address3, ipv6Address3);
Collections.sort(addressList, ipv4);
assertEquals(Arrays.asList(ipv6Address1,
ipv6Address2, ipv6Address3, ipv4Address1, ipv4Address2, ipv4Address3), addressList);
}
|
@Override
public DescriptiveUrl toDownloadUrl(final Path file, final Sharee sharee, final ShareCreationRequestModel options, final PasswordCallback callback) throws BackgroundException {
return this.toGuestUrl(file, options, callback);
}
|
@Test
public void testDownloadUrlForFile() throws Exception {
final EueResourceIdProvider fileid = new EueResourceIdProvider(session);
final Path sourceFolder = new EueDirectoryFeature(session, fileid).mkdir(new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus());
final Path file = new Path(sourceFolder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file));
createFile(fileid, file, RandomUtils.nextBytes(0));
assertTrue(new EueFindFeature(session, fileid).find(file));
final EueShareFeature feature = new EueShareFeature(session, fileid);
final DescriptiveUrl url = feature.toDownloadUrl(file, Share.Sharee.world, null, new DisabledPasswordCallback() {
@Override
public Credentials prompt(final Host bookmark, final String title, final String reason, final LoginOptions options) {
return new Credentials(null, new AlphanumericRandomStringService().random());
}
});
assertNotEquals(DescriptiveUrl.EMPTY, url);
// Test returning same share
assertEquals(url, feature.toDownloadUrl(file, Share.Sharee.world, null, new DisabledPasswordCallback() {
@Override
public Credentials prompt(final Host bookmark, final String title, final String reason, final LoginOptions options) {
return new Credentials(null, new AlphanumericRandomStringService().random());
}
}));
assertEquals(url, new EueShareUrlProvider(session.getHost(), session.userShares()).toUrl(file).find(DescriptiveUrl.Type.signed));
new EueDeleteFeature(session, fileid).delete(Collections.singletonList(sourceFolder), new DisabledPasswordCallback(), new Delete.DisabledCallback());
}
|
public void submitEtlJob(long loadJobId, String loadLabel, EtlJobConfig etlJobConfig, SparkResource resource,
BrokerDesc brokerDesc, SparkLoadAppHandle handle, SparkPendingTaskAttachment attachment,
Long sparkLoadSubmitTimeout)
throws LoadException {
// delete outputPath
deleteEtlOutputPath(etlJobConfig.outputPath, brokerDesc);
// init local dir
if (!FeConstants.runningUnitTest) {
initLocalDir();
}
// prepare dpp archive
SparkRepository.SparkArchive archive = resource.prepareArchive();
SparkRepository.SparkLibrary dppLibrary = archive.getDppLibrary();
SparkRepository.SparkLibrary spark2xLibrary = archive.getSpark2xLibrary();
// spark home
String sparkHome = Config.spark_home_default_dir;
// etl config path
String configsHdfsDir = etlJobConfig.outputPath + "/" + JOB_CONFIG_DIR + "/";
// etl config json path
String jobConfigHdfsPath = configsHdfsDir + CONFIG_FILE_NAME;
// spark submit app resource path
String appResourceHdfsPath = dppLibrary.remotePath;
// spark yarn archive path
String jobArchiveHdfsPath = spark2xLibrary.remotePath;
// spark yarn stage dir
String jobStageHdfsPath = resource.getWorkingDir();
// spark launcher log path
String logFilePath = Config.spark_launcher_log_dir + "/" + String.format(LAUNCHER_LOG, loadJobId, loadLabel);
// update archive and stage configs here
Map<String, String> sparkConfigs = resource.getSparkConfigs();
if (Strings.isNullOrEmpty(sparkConfigs.get("spark.yarn.archive"))) {
sparkConfigs.put("spark.yarn.archive", jobArchiveHdfsPath);
}
if (Strings.isNullOrEmpty(sparkConfigs.get("spark.yarn.stage.dir"))) {
sparkConfigs.put("spark.yarn.stage.dir", jobStageHdfsPath);
}
try {
byte[] configData = etlJobConfig.configToJson().getBytes(StandardCharsets.UTF_8);
if (brokerDesc.hasBroker()) {
BrokerUtil.writeFile(configData, jobConfigHdfsPath, brokerDesc);
} else {
HdfsUtil.writeFile(configData, jobConfigHdfsPath, brokerDesc);
}
} catch (UserException e) {
throw new LoadException(e.getMessage());
}
SparkLauncher launcher = new SparkLauncher();
// master | deployMode
// ------------|-------------
// yarn | cluster
// spark://xx | client
launcher.setMaster(resource.getMaster())
.setDeployMode(resource.getDeployMode().name().toLowerCase())
.setAppResource(appResourceHdfsPath)
.setMainClass(SparkEtlJob.class.getCanonicalName())
.setAppName(String.format(ETL_JOB_NAME, loadLabel))
.setSparkHome(sparkHome)
.addAppArgs(jobConfigHdfsPath)
.redirectError();
// spark configs
for (Map.Entry<String, String> entry : resource.getSparkConfigs().entrySet()) {
launcher.setConf(entry.getKey(), entry.getValue());
}
// start app
State state = null;
String appId = null;
String logPath = null;
String errMsg = "start spark app failed. error: ";
try {
Process process = launcher.launch();
handle.setProcess(process);
if (!FeConstants.runningUnitTest) {
SparkLauncherMonitor.LogMonitor logMonitor = SparkLauncherMonitor.createLogMonitor(handle);
logMonitor.setSubmitTimeoutMs(sparkLoadSubmitTimeout);
logMonitor.setRedirectLogPath(logFilePath);
logMonitor.start();
try {
logMonitor.join();
} catch (InterruptedException e) {
logMonitor.interrupt();
throw new LoadException(errMsg + e.getMessage());
}
}
appId = handle.getAppId();
state = handle.getState();
logPath = handle.getLogPath();
} catch (IOException e) {
LOG.warn(errMsg, e);
throw new LoadException(errMsg + e.getMessage());
}
if (fromSparkState(state) == TEtlState.CANCELLED) {
if (state == State.KILLED) {
try {
killYarnApplication(appId, loadJobId, resource);
} catch (UserException e) {
LOG.warn(errMsg, e);
}
}
throw new LoadException(
errMsg + "spark app state: " + state.toString() + ", loadJobId:" + loadJobId + ", logPath:" +
logPath);
}
if (appId == null) {
throw new LoadException(errMsg + "Waiting too much time to get appId from handle. spark app state: "
+ state.toString() + ", loadJobId:" + loadJobId);
}
// success
attachment.setAppId(appId);
attachment.setHandle(handle);
}
|
@Test
public void testSubmitEtlJob(@Mocked BrokerUtil brokerUtil, @Mocked SparkLauncher launcher,
@Injectable Process process,
@Mocked SparkLoadAppHandle handle) throws IOException, LoadException {
new Expectations() {
{
launcher.launch();
result = process;
handle.getAppId();
result = appId;
handle.getState();
result = SparkLoadAppHandle.State.RUNNING;
}
};
EtlJobConfig etlJobConfig = new EtlJobConfig(Maps.newHashMap(), etlOutputPath, label, null);
SparkResource resource = new SparkResource(resourceName);
new Expectations(resource) {
{
resource.prepareArchive();
result = archive;
}
};
Map<String, String> sparkConfigs = resource.getSparkConfigs();
sparkConfigs.put("spark.master", "yarn");
sparkConfigs.put("spark.submit.deployMode", "cluster");
sparkConfigs.put("spark.hadoop.yarn.resourcemanager.address", "127.0.0.1:9999");
BrokerDesc brokerDesc = new BrokerDesc(broker, Maps.newHashMap());
SparkPendingTaskAttachment attachment = new SparkPendingTaskAttachment(pendingTaskId);
SparkEtlJobHandler handler = new SparkEtlJobHandler();
long sparkLoadSubmitTimeout = Config.spark_load_submit_timeout_second;
handler.submitEtlJob(loadJobId, label, etlJobConfig, resource, brokerDesc, handle, attachment, sparkLoadSubmitTimeout);
// check submit etl job success
Assert.assertEquals(appId, attachment.getAppId());
}
|
public static <K, V> Reshuffle<K, V> of() {
return new Reshuffle<>();
}
|
@Test
public void testNoOldTransformByDefault() {
pipeline.enableAbandonedNodeEnforcement(false);
pipeline.apply(Create.of(KV.of("arbitrary", "kv"))).apply(Reshuffle.of());
OldTransformSeeker seeker = new OldTransformSeeker();
pipeline.traverseTopologically(seeker);
assertFalse(seeker.isOldTransformFound);
}
|
public static Optional<String> getRuleName(final String rulePath) {
Pattern pattern = Pattern.compile(getRuleNameNode() + "/(\\w+)" + ACTIVE_VERSION_SUFFIX, Pattern.CASE_INSENSITIVE);
Matcher matcher = pattern.matcher(rulePath);
return matcher.find() ? Optional.of(matcher.group(1)) : Optional.empty();
}
|
@Test
void assertGetRuleName() {
Optional<String> actual = GlobalNodePath.getRuleName("/rules/transaction/active_version");
assertTrue(actual.isPresent());
assertThat(actual.get(), is("transaction"));
}
|
public static void copyBytes(InputStream in, OutputStream out,
int buffSize, boolean close)
throws IOException {
try {
copyBytes(in, out, buffSize);
if(close) {
out.close();
out = null;
in.close();
in = null;
}
} finally {
if(close) {
closeStream(out);
closeStream(in);
}
}
}
|
@Test
public void testCopyBytesShouldNotCloseStreamsWhenCloseIsFalse()
throws Exception {
InputStream inputStream = Mockito.mock(InputStream.class);
OutputStream outputStream = Mockito.mock(OutputStream.class);
Mockito.doReturn(-1).when(inputStream).read(new byte[1]);
IOUtils.copyBytes(inputStream, outputStream, 1, false);
Mockito.verify(inputStream, Mockito.atMost(0)).close();
Mockito.verify(outputStream, Mockito.atMost(0)).close();
}
|
public static void getSemanticPropsSingleFromString(
SingleInputSemanticProperties result,
String[] forwarded,
String[] nonForwarded,
String[] readSet,
TypeInformation<?> inType,
TypeInformation<?> outType) {
getSemanticPropsSingleFromString(
result, forwarded, nonForwarded, readSet, inType, outType, false);
}
|
@Test
void testForwardedInvalidTargetFieldType5() {
String[] forwardedFields = {"f0.*->*"};
SingleInputSemanticProperties sp = new SingleInputSemanticProperties();
assertThatThrownBy(
() ->
SemanticPropUtil.getSemanticPropsSingleFromString(
sp,
forwardedFields,
null,
null,
nestedTupleType,
fiveIntTupleType))
.isInstanceOf(InvalidSemanticAnnotationException.class);
}
|
@Override
public Optional<ReadableRegion> getReadableRegion(
int subpartitionId, int bufferIndex, int consumingOffset) {
synchronized (lock) {
return getInternalRegion(subpartitionId, bufferIndex)
.map(
internalRegion ->
internalRegion.toReadableRegion(bufferIndex, consumingOffset))
.filter(internalRegion -> internalRegion.numReadable > 0);
}
}
|
@Test
void testGetReadableRegion() {
final int subpartitionId = 0;
hsDataIndex.addBuffers(createSpilledBuffers(subpartitionId, Arrays.asList(0, 1, 3, 4, 5)));
hsDataIndex.markBufferReleased(subpartitionId, 1);
hsDataIndex.markBufferReleased(subpartitionId, 3);
hsDataIndex.markBufferReleased(subpartitionId, 4);
assertThat(hsDataIndex.getReadableRegion(subpartitionId, 1, 0))
.hasValueSatisfying(
readableRegion -> {
assertRegionStartWithTargetBufferIndex(readableRegion, 1);
// Readable region will not include discontinuous buffer.
assertThat(readableRegion.numReadable).isEqualTo(1);
});
assertThat(hsDataIndex.getReadableRegion(subpartitionId, 3, 0))
.hasValueSatisfying(
readableRegion -> {
assertRegionStartWithTargetBufferIndex(readableRegion, 3);
assertThat(readableRegion.numReadable)
.isGreaterThanOrEqualTo(1)
.isLessThanOrEqualTo(2);
});
assertThat(hsDataIndex.getReadableRegion(subpartitionId, 4, 0))
.hasValueSatisfying(
readableRegion -> {
assertRegionStartWithTargetBufferIndex(readableRegion, 4);
assertThat(readableRegion.numReadable).isEqualTo(1);
});
}
|
public static int[] rainbow(int pageNo, int totalPage, int displayCount) {
// displayCount % 2
boolean isEven = (displayCount & 1) == 0;
int left = displayCount >> 1;
int right = displayCount >> 1;
int length = displayCount;
if (isEven) {
right++;
}
if (totalPage < displayCount) {
length = totalPage;
}
int[] result = new int[length];
if (totalPage >= displayCount) {
if (pageNo <= left) {
for (int i = 0; i < result.length; i++) {
result[i] = i + 1;
}
} else if (pageNo > totalPage - right) {
for (int i = 0; i < result.length; i++) {
result[i] = i + totalPage - displayCount + 1;
}
} else {
for (int i = 0; i < result.length; i++) {
result[i] = i + pageNo - left + (isEven ? 1 : 0);
}
}
} else {
for (int i = 0; i < result.length; i++) {
result[i] = i + 1;
}
}
return result;
}
|
@Test
public void rainbowTest() {
final int[] rainbow = PageUtil.rainbow(5, 20, 6);
assertArrayEquals(new int[]{3, 4, 5, 6, 7, 8}, rainbow);
}
|
@Override
public void onProjectsRekeyed(Set<RekeyedProject> rekeyedProjects) {
checkNotNull(rekeyedProjects, "rekeyedProjects can't be null");
if (rekeyedProjects.isEmpty()) {
return;
}
Arrays.stream(listeners)
.forEach(safelyCallListener(listener -> listener.onProjectsRekeyed(rekeyedProjects)));
}
|
@Test
@UseDataProvider("oneOrManyRekeyedProjects")
public void onProjectsRekeyed_calls_all_listeners_even_if_one_throws_an_Error(Set<RekeyedProject> projects) {
InOrder inOrder = Mockito.inOrder(listener1, listener2, listener3);
doThrow(new Error("Faking listener2 throwing an Error"))
.when(listener2)
.onProjectsRekeyed(any());
underTestWithListeners.onProjectsRekeyed(projects);
inOrder.verify(listener1).onProjectsRekeyed(same(projects));
inOrder.verify(listener2).onProjectsRekeyed(same(projects));
inOrder.verify(listener3).onProjectsRekeyed(same(projects));
inOrder.verifyNoMoreInteractions();
}
|
public static boolean isCoastedPoint(NopHit starsPoint) {
StarsRadarHit srh = (StarsRadarHit) starsPoint.rawMessage();
return isCoastedRadarHit(srh);
}
|
@Test
public void testIsCoastedPoint() {
NopHit active = new NopHit(ACTIVE_STARS);
NopHit coasted = new NopHit(COASTED_STARS);
NopHit dropped = new NopHit(DROPPED_STARS);
assertFalse(StarsSmoothing.isCoastedPoint(active));
assertTrue(StarsSmoothing.isCoastedPoint(coasted));
assertTrue(StarsSmoothing.isCoastedPoint(dropped));
}
|
public Summation[] partition(final int nParts) {
final Summation[] parts = new Summation[nParts];
final long steps = (E.limit - E.value)/E.delta + 1;
long prevN = N.value;
long prevE = E.value;
for(int i = 1; i < parts.length; i++) {
final long k = (i * steps)/parts.length;
final long currN = N.skip(k);
final long currE = E.skip(k);
parts[i - 1] = new Summation(
new ArithmeticProgression(N.symbol, prevN, N.delta, currN),
new ArithmeticProgression(E.symbol, prevE, E.delta, currE));
prevN = currN;
prevE = currE;
}
parts[parts.length - 1] = new Summation(
new ArithmeticProgression(N.symbol, prevN, N.delta, N.limit),
new ArithmeticProgression(E.symbol, prevE, E.delta, E.limit));
return parts;
}
|
@Test
void testSubtract() {
final Summation sigma = newSummation(3, 10000, 20);
final int size = 10;
final List<Summation> parts = Arrays.asList(sigma.partition(size));
Collections.sort(parts);
runTestSubtract(sigma, new ArrayList<Summation>());
runTestSubtract(sigma, parts);
for (int n = 1; n < size; n++) {
for (int j = 0; j < 10; j++) {
final List<Summation> diff = new ArrayList<Summation>(parts);
for (int i = 0; i < n; i++)
diff.remove(RANDOM.nextInt(diff.size()));
/// Collections.sort(diff);
runTestSubtract(sigma, diff);
}
}
}
|
public static HttpRequestMessage getRequestFromChannel(Channel ch) {
return ch.attr(ATTR_ZUUL_REQ).get();
}
|
@Test
void multipleHostHeaders_setBadRequestStatus() {
ClientRequestReceiver receiver = new ClientRequestReceiver(null);
EmbeddedChannel channel = new EmbeddedChannel(new HttpRequestEncoder());
PassportLoggingHandler loggingHandler = new PassportLoggingHandler(new DefaultRegistry());
// Required for messages
channel.attr(SourceAddressChannelHandler.ATTR_SERVER_LOCAL_PORT).set(1234);
channel.pipeline().addLast(new HttpServerCodec());
channel.pipeline().addLast(receiver);
channel.pipeline().addLast(loggingHandler);
HttpRequest httpRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/post");
httpRequest.headers().add("Host", "foo.bar.com");
httpRequest.headers().add("Host", "bar.foo.com");
channel.writeOutbound(httpRequest);
ByteBuf byteBuf = channel.readOutbound();
channel.writeInbound(byteBuf);
channel.readInbound();
channel.close();
HttpRequestMessage request = ClientRequestReceiver.getRequestFromChannel(channel);
SessionContext context = request.getContext();
assertEquals(ZuulStatusCategory.FAILURE_CLIENT_BAD_REQUEST, StatusCategoryUtils.getStatusCategory(context));
assertEquals("Multiple Host headers", context.getError().getMessage());
assertEquals(
"Invalid request provided: Multiple Host headers",
StatusCategoryUtils.getStatusCategoryReason(context));
}
|
@Override
public int read() throws IOException {
if (advanceStream()) {
final int value = buffer[bufferStart];
incrementRead();
return value;
}
return -1;
}
|
@Test()
public void testRead_3args() throws Exception {
byte[] input = new byte[2048];
Arrays.fill(input, (byte) ' ');
input[0] = '{';
input[2047] = '}';
byte[] results = new byte[2050];
byte[] expected = new byte[2050];
Arrays.fill(expected, (byte) ' ');
expected[0] = '[';
expected[1] = '{';
expected[2048] = '}';
expected[2049] = ']';
try (InputStream sample = new ByteArrayInputStream(input);
JsonArrayFixingInputStream instance = new JsonArrayFixingInputStream(sample)) {
int read = 0;
int pos = 0;
while (read >= 0) {
read = instance.read(results, pos, 2050 - pos);
pos += read;
}
Assert.assertArrayEquals(expected, results);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.