focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
@Override
public Stream<MappingField> resolveAndValidateFields(
boolean isKey,
List<MappingField> userFields,
Map<String, String> options,
InternalSerializationService serializationService
) {
Map<QueryPath, MappingField> fieldsByPath = extractFields(userFields, isKey);
Class<?> typeClass = getMetadata(fieldsByPath)
.<Class<?>>map(KvMetadataJavaResolver::loadClass)
.orElseGet(() -> loadClass(options, isKey));
QueryDataType type = QueryDataTypeUtils.resolveTypeForClass(typeClass);
if (type.getTypeFamily() != QueryDataTypeFamily.OBJECT || type.isCustomType()) {
return userFields.isEmpty()
? resolvePrimitiveField(isKey, type)
: resolveAndValidatePrimitiveField(isKey, fieldsByPath, type);
} else {
return userFields.isEmpty()
? resolveObjectFields(isKey, typeClass)
: resolveAndValidateObjectFields(isKey, fieldsByPath, typeClass);
}
}
|
@Test
@Parameters({
"true, __key",
"false, this"
})
public void when_userDeclaresFields_then_fieldsFromClassNotAdded(boolean key, String prefix) {
Map<String, String> options = Map.of(
(key ? OPTION_KEY_FORMAT : OPTION_VALUE_FORMAT), JAVA_FORMAT,
(key ? OPTION_KEY_CLASS : OPTION_VALUE_CLASS), Type.class.getName()
);
Stream<MappingField> fields = INSTANCE.resolveAndValidateFields(
key,
singletonList(field("field2", QueryDataType.VARCHAR, prefix + ".field2")),
options,
null
);
assertThat(fields).containsExactly(field("field2", QueryDataType.VARCHAR, prefix + ".field2"));
}
|
@Override
public void initialize(Map<String, String> props) {
this.properties = SerializableMap.copyOf(props);
this.gcpProperties = new GCPProperties(properties);
this.storageSupplier =
() -> {
StorageOptions.Builder builder = StorageOptions.newBuilder();
gcpProperties.projectId().ifPresent(builder::setProjectId);
gcpProperties.clientLibToken().ifPresent(builder::setClientLibToken);
gcpProperties.serviceHost().ifPresent(builder::setHost);
// Google Cloud APIs default to automatically detect the credentials to use, which is
// in most cases the convenient way, especially in GCP.
// See javadoc of com.google.auth.oauth2.GoogleCredentials.getApplicationDefault().
if (gcpProperties.noAuth()) {
// Explicitly allow "no credentials" for testing purposes.
builder.setCredentials(NoCredentials.getInstance());
}
gcpProperties
.oauth2Token()
.ifPresent(
token -> {
// Explicitly configure an OAuth token.
AccessToken accessToken =
new AccessToken(token, gcpProperties.oauth2TokenExpiresAt().orElse(null));
builder.setCredentials(OAuth2Credentials.create(accessToken));
});
return builder.build().getService();
};
initMetrics(properties);
}
|
@Test
public void testResolvingFileIOLoad() {
ResolvingFileIO resolvingFileIO = new ResolvingFileIO();
resolvingFileIO.setConf(new Configuration());
resolvingFileIO.initialize(ImmutableMap.of());
FileIO result =
DynMethods.builder("io")
.hiddenImpl(ResolvingFileIO.class, String.class)
.build(resolvingFileIO)
.invoke("gs://foo/bar");
assertThat(result).isInstanceOf(GCSFileIO.class);
}
|
public static String getTableActiveVersionNode(final String databaseName, final String schemaName, final String tableName) {
return String.join("/", getMetaDataNode(), databaseName, SCHEMAS_NODE, schemaName, TABLES_NODE, tableName, ACTIVE_VERSION);
}
|
@Test
void assertGetTableActiveVersionNode() {
assertThat(TableMetaDataNode.getTableActiveVersionNode("foo_db", "foo_schema", "foo_table"), is("/metadata/foo_db/schemas/foo_schema/tables/foo_table/active_version"));
}
|
public boolean isAttached(Appender<E> appender) {
if (appender == null) {
return false;
}
for (Appender<E> a : appenderList) {
if (a == appender) return true;
}
return false;
}
|
@Test
public void testIsAttached() throws Exception {
NOPAppender<TestEvent> ta = new NOPAppender<TestEvent>();
ta.start();
aai.addAppender(ta);
NOPAppender<TestEvent> tab = new NOPAppender<TestEvent>();
tab.setName("test");
tab.start();
aai.addAppender(tab);
assertTrue("Appender is not attached", aai.isAttached(ta));
assertTrue("Appender is not attached", aai.isAttached(tab));
}
|
@Override
public KStream<K, V> filterNot(final Predicate<? super K, ? super V> predicate) {
return filterNot(predicate, NamedInternal.empty());
}
|
@Test
public void shouldNotAllowNullPredicateOnFilterNotWithName() {
final NullPointerException exception = assertThrows(
NullPointerException.class,
() -> testStream.filterNot(null, Named.as("filter")));
assertThat(exception.getMessage(), equalTo("predicate can't be null"));
}
|
@Override
public Set<V> readDiff(String... names) {
return get(readDiffAsync(names));
}
|
@Test
public void testReadDiff() {
RSet<Integer> set = redisson.getSet("set");
set.add(5);
set.add(7);
set.add(6);
RSet<Integer> set1 = redisson.getSet("set1");
set1.add(1);
set1.add(2);
set1.add(5);
RSet<Integer> set2 = redisson.getSet("set2");
set2.add(3);
set2.add(4);
set2.add(5);
assertThat(set.readDiff("set1", "set2")).containsOnly(7, 6);
assertThat(set).containsOnly(6, 5, 7);
}
|
public <T extends SELF> T setParam(String key, @Nullable String value) {
return (T) setSingleValueParam(key, value);
}
|
@Test
public void skip_null_value_in_multi_param() {
underTest.setParam("key", asList("v1", null, "", "v3"));
assertMultiValueParameters(entry("key", asList("v1", "", "v3")));
}
|
public Result combine(Result other) {
return new Result(this.isPass() && other.isPass(), this.isDescend()
&& other.isDescend());
}
|
@Test
public void equalsStop() {
Result one = Result.STOP;
Result two = Result.STOP.combine(Result.STOP);
assertEquals(one, two);
}
|
@Override
public ConfigDO getConfigByKey(String key) {
return configMapper.selectByKey(key);
}
|
@Test
public void testGetConfigByKey() {
// mock 数据
ConfigDO dbConfig = randomConfigDO();
configMapper.insert(dbConfig);// @Sql: 先插入出一条存在的数据
// 准备参数
String key = dbConfig.getConfigKey();
// 调用
ConfigDO config = configService.getConfigByKey(key);
// 断言
assertNotNull(config);
assertPojoEquals(dbConfig, config);
}
|
public static void executeLongPolling(Runnable runnable) {
LONG_POLLING_EXECUTOR.execute(runnable);
}
|
@Test
void testExecuteLongPolling() throws InterruptedException {
AtomicInteger atomicInteger = new AtomicInteger();
Runnable runnable = atomicInteger::incrementAndGet;
ConfigExecutor.executeLongPolling(runnable);
TimeUnit.MILLISECONDS.sleep(20);
assertEquals(1, atomicInteger.get());
}
|
public Optional<RouteContext> loadRouteContext(final OriginSQLRouter originSQLRouter, final QueryContext queryContext, final RuleMetaData globalRuleMetaData,
final ShardingSphereDatabase database, final ShardingCache shardingCache, final ConfigurationProperties props,
final ConnectionContext connectionContext) {
if (queryContext.getSql().length() > shardingCache.getConfiguration().getAllowedMaxSqlLength()) {
return Optional.empty();
}
ShardingRouteCacheableCheckResult cacheableCheckResult = shardingCache.getRouteCacheableChecker().check(database, queryContext);
if (!cacheableCheckResult.isProbablyCacheable()) {
return Optional.empty();
}
List<Object> shardingConditionParams = new ArrayList<>(cacheableCheckResult.getShardingConditionParameterMarkerIndexes().size());
for (int each : cacheableCheckResult.getShardingConditionParameterMarkerIndexes()) {
if (each >= queryContext.getParameters().size()) {
return Optional.empty();
}
shardingConditionParams.add(queryContext.getParameters().get(each));
}
Optional<RouteContext> cachedResult = shardingCache.getRouteCache().get(new ShardingRouteCacheKey(queryContext.getSql(), shardingConditionParams))
.flatMap(ShardingRouteCacheValue::getCachedRouteContext);
RouteContext result = cachedResult.orElseGet(
() -> originSQLRouter.createRouteContext(queryContext, globalRuleMetaData, database, shardingCache.getShardingRule(), props, connectionContext));
if (!cachedResult.isPresent() && hitOneShardOnly(result)) {
shardingCache.getRouteCache().put(new ShardingRouteCacheKey(queryContext.getSql(), shardingConditionParams), new ShardingRouteCacheValue(result));
}
return Optional.of(result);
}
|
@Test
void assertCreateRouteContextWithNotCacheableQuery() {
QueryContext queryContext =
new QueryContext(sqlStatementContext, "insert into t values (?), (?)", Collections.emptyList(), new HintValueContext(), mockConnectionContext(), mock(ShardingSphereMetaData.class));
when(shardingCache.getConfiguration()).thenReturn(new ShardingCacheConfiguration(100, null));
when(shardingCache.getRouteCacheableChecker()).thenReturn(mock(ShardingRouteCacheableChecker.class));
when(shardingCache.getRouteCacheableChecker().check(null, queryContext)).thenReturn(new ShardingRouteCacheableCheckResult(false, Collections.emptyList()));
Optional<RouteContext> actual = new CachedShardingSQLRouter().loadRouteContext(null, queryContext, mock(RuleMetaData.class), null, shardingCache, null, null);
assertFalse(actual.isPresent());
}
|
AccessTokenRetriever getAccessTokenRetriever() {
return accessTokenRetriever;
}
|
@Test
public void testConfigureWithAccessTokenFile() throws Exception {
String expected = "{}";
File tmpDir = createTempDir("access-token");
File accessTokenFile = createTempFile(tmpDir, "access-token-", ".json", expected);
OAuthBearerLoginCallbackHandler handler = new OAuthBearerLoginCallbackHandler();
Map<String, ?> configs = getSaslConfigs(SASL_OAUTHBEARER_TOKEN_ENDPOINT_URL, accessTokenFile.toURI().toString());
Map<String, Object> jaasConfigs = Collections.emptyMap();
configureHandler(handler, configs, jaasConfigs);
assertInstanceOf(FileTokenRetriever.class, handler.getAccessTokenRetriever());
}
|
@Udf(description = "Returns a new string encoded using the outputEncoding ")
public String encode(
@UdfParameter(
description = "The source string. If null, then function returns null.") final String str,
@UdfParameter(
description = "The input encoding."
+ " If null, then function returns null.") final String inputEncoding,
@UdfParameter(
description = "The output encoding."
+ " If null, then function returns null.") final String outputEncoding) {
if (str == null || inputEncoding == null || outputEncoding == null) {
return null;
}
final String encodedString = inputEncoding.toLowerCase() + outputEncoding.toLowerCase();
final Encode.Encoder encoder = ENCODER_MAP.get(encodedString);
if (encoder == null) {
throw new KsqlFunctionException("Supported input and output encodings are: "
+ "hex, utf8, ascii and base64");
}
return encoder.apply(str);
}
|
@Test
public void shouldReturnNullOnNullValue() {
assertThat(udf.encode(null, "hex", "ascii"), is(nullValue()));
assertThat(udf.encode(null, "utf8", "base64"), is(nullValue()));
assertThat(udf.encode("some string", null, "utf8"), is(nullValue()));
assertThat(udf.encode("some string", "hex", null), is(nullValue()));
}
|
public void register(final InstanceInfo info) throws Exception {
long expiryTime = System.currentTimeMillis() + getLeaseRenewalOf(info);
batchingDispatcher.process(
taskId("register", info),
new InstanceReplicationTask(targetHost, Action.Register, info, null, true) {
public EurekaHttpResponse<Void> execute() {
return replicationClient.register(info);
}
},
expiryTime
);
}
|
@Test
public void testRegistrationBatchReplication() throws Exception {
createPeerEurekaNode().register(instanceInfo);
ReplicationInstance replicationInstance = expectSingleBatchRequest();
assertThat(replicationInstance.getAction(), is(equalTo(Action.Register)));
}
|
public static String fromKarateOptionsTags(List<String> tags) {
if (tags == null || tags.isEmpty()) {
return null;
}
return fromKarateOptionsTags(tags.toArray(new String[]{}));
}
|
@Test
public void testCucumberOptionsTagsConversion() {
assertEquals("anyOf('@foo')", Tags.fromKarateOptionsTags("@foo"));
assertEquals("anyOf('@foo','@bar')", Tags.fromKarateOptionsTags("@foo, @bar"));
assertEquals("anyOf('@foo') && anyOf('@bar')", Tags.fromKarateOptionsTags("@foo", "@bar"));
assertEquals("anyOf('@foo') && not('@bar')", Tags.fromKarateOptionsTags("@foo", "~@bar"));
// detect new syntax and use as-is
assertEquals("anyOf('@foo')", Tags.fromKarateOptionsTags("anyOf('@foo')"));
}
|
@Override
public ObjectNode encode(Criterion criterion, CodecContext context) {
EncodeCriterionCodecHelper encoder = new EncodeCriterionCodecHelper(criterion, context);
return encoder.encode();
}
|
@Test
public void matchVlanPcpTest() {
Criterion criterion = Criteria.matchVlanPcp((byte) 7);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
|
@Override
public void handleRequest(RestRequest request, RequestContext requestContext, Callback<RestResponse> callback)
{
//This code path cannot accept content types or accept types that contain
//multipart/related. This is because these types of requests will usually have very large payloads and therefore
//would degrade server performance since RestRequest reads everything into memory.
if (!isMultipart(request, requestContext, callback))
{
_restRestLiServer.handleRequest(request, requestContext, callback);
}
}
|
@Test(dataProvider = "validClientProtocolVersionDataStreamOnly")
public void testValidReactiveUnstructuredDataRequest(RestLiServer server,
ProtocolVersion clientProtocolVersion,
String headerConstant)
throws URISyntaxException, IOException
{
StreamRequest streamRequest = new StreamRequestBuilder(new URI("/reactiveFeedDownloads/1")).setHeader(headerConstant,
clientProtocolVersion.toString())
.build(EntityStreams.emptyStream());
final FeedDownloadResourceReactive resource = getMockResource(FeedDownloadResourceReactive.class);
resource.get(eq(1L), anyObject());
EasyMock.expectLastCall().andDelegateTo(new FeedDownloadResourceReactive()).once();
replay(resource);
@SuppressWarnings("unchecked")
Callback<StreamResponse> r2Callback = createMock(Callback.class);
final Capture<StreamResponse> streamResponse = EasyMock.newCapture();
r2Callback.onSuccess(capture(streamResponse));
expectLastCall().once();
replay(r2Callback);
RequestContext requestContext = new RequestContext();
server.handleRequest(streamRequest, requestContext, r2Callback);
verify(resource);
verify(r2Callback);
assertNotNull(streamResponse);
assertEquals(streamResponse.getValue().getHeader(RestConstants.HEADER_CONTENT_TYPE), FeedDownloadResourceReactive.CONTENT_TYPE);
FullEntityReader fullEntityReader = new FullEntityReader(new Callback<ByteString>() {
@Override
public void onError(Throwable e)
{
fail("Error inside callback!! Failed to read response data from stream!", e);
}
@Override
public void onSuccess(ByteString result)
{
assertEquals(result, FeedDownloadResourceReactive.CONTENT);
}
});
streamResponse.getValue().getEntityStream().setReader(fullEntityReader);
}
|
@Override
public void filter(final ContainerRequestContext requestContext, final ContainerResponseContext responseContext) {
responseContext.getHeaders().add(HeaderUtils.TIMESTAMP_HEADER, System.currentTimeMillis());
}
|
@Test
void testFilter() {
final ContainerRequestContext requestContext = mock(ContainerRequestContext.class);
final ContainerResponseContext responseContext = mock(ContainerResponseContext.class);
final MultivaluedMap<String, Object> headers = HeaderUtils.createOutbound();
when(responseContext.getHeaders()).thenReturn(headers);
new TimestampResponseFilter().filter(requestContext, responseContext);
assertTrue(headers.containsKey(org.whispersystems.textsecuregcm.util.HeaderUtils.TIMESTAMP_HEADER));
}
|
static AnnotatedClusterState generatedStateFrom(final Params params) {
final ContentCluster cluster = params.cluster;
final ClusterState workingState = ClusterState.emptyState();
final Map<Node, NodeStateReason> nodeStateReasons = new HashMap<>();
for (final NodeInfo nodeInfo : cluster.getNodeInfos()) {
final NodeState nodeState = computeEffectiveNodeState(nodeInfo, params, nodeStateReasons);
workingState.setNodeState(nodeInfo.getNode(), nodeState);
}
takeDownGroupsWithTooLowAvailability(workingState, nodeStateReasons, params);
final Optional<ClusterStateReason> reasonToBeDown = clusterDownReason(workingState, params);
if (reasonToBeDown.isPresent()) {
workingState.setClusterState(State.DOWN);
}
workingState.setDistributionBits(inferDistributionBitCount(cluster, workingState, params));
return new AnnotatedClusterState(workingState, reasonToBeDown, nodeStateReasons);
}
|
@Test
void transient_maintenance_mode_does_not_override_wanted_down_state() {
final ClusterFixture fixture = ClusterFixture.forFlatCluster(5).bringEntireClusterUp();
final ClusterStateGenerator.Params params = fixture.generatorParams()
.currentTimeInMillis(10_000)
.transitionTimes(2000);
fixture.proposeStorageNodeWantedState(2, State.DOWN);
fixture.reportStorageNodeState(2, State.DOWN);
final NodeInfo nodeInfo = fixture.cluster.getNodeInfo(new Node(NodeType.STORAGE, 2));
nodeInfo.setTransitionTime(9000);
final AnnotatedClusterState state = ClusterStateGenerator.generatedStateFrom(params);
// Should _not_ be in maintenance mode, since we explicitly want it to stay down.
assertThat(state.toString(), equalTo("distributor:5 storage:5 .2.s:d"));
}
|
@Override
public Object handle(String targetService, List<Object> invokers, Object invocation, Map<String, String> queryMap,
String serviceInterface) {
if (!shouldHandle(invokers)) {
return invokers;
}
List<Object> result = getTargetInvokersByRules(invokers, targetService);
return super.handle(targetService, result, invocation, queryMap, serviceInterface);
}
|
@Test
public void testGetTargetInvokerByTagRules() {
// initialize the routing rule
RuleInitializationUtils.initTagMatchRule();
List<Object> invokers = new ArrayList<>();
ApacheInvoker<Object> invoker1 = new ApacheInvoker<>("1.0.0");
invokers.add(invoker1);
ApacheInvoker<Object> invoker2 = new ApacheInvoker<>("1.0.1");
invokers.add(invoker2);
Invocation invocation = new ApacheInvocation();
invocation.setAttachment("bar", "bar1");
Map<String, String> queryMap = new HashMap<>();
queryMap.put("side", "consumer");
queryMap.put("group", "red");
queryMap.put("version", "0.0.1");
queryMap.put("interface", "io.sermant.foo.FooTest");
Map<String, String> parameters = new HashMap<>();
parameters.put(RouterConstant.PARAMETERS_KEY_PREFIX + "group", "red");
DubboCache.INSTANCE.setParameters(parameters);
DubboCache.INSTANCE.putApplication("io.sermant.foo.FooTest", "foo");
List<Object> targetInvokers = (List<Object>) tagRouteHandler.handle(DubboCache.INSTANCE.getApplication("io.sermant.foo.FooTest")
, invokers, invocation, queryMap, "io.sermant.foo.FooTest");
Assert.assertEquals(1, targetInvokers.size());
Assert.assertEquals(invoker2, targetInvokers.get(0));
ConfigCache.getLabel(RouterConstant.DUBBO_CACHE_NAME).resetRouteRule(Collections.emptyMap());
}
|
public String toFriendlyString() {
return FRIENDLY_FORMAT.format(this).toString();
}
|
@Test
public void testToFriendlyString() {
assertEquals("1.00 BTC", COIN.toFriendlyString());
assertEquals("1.23 BTC", valueOf(1, 23).toFriendlyString());
assertEquals("0.001 BTC", COIN.divide(1000).toFriendlyString());
assertEquals("-1.23 BTC", valueOf(1, 23).negate().toFriendlyString());
}
|
@Override
public boolean match(Message msg, StreamRule rule) {
return Optional.ofNullable(rule.getInverted())
// When this rule is inverted, it should never match
.map(inverted -> !inverted)
// If `inverted` is `null`, we always return `true`
.orElse(true);
}
|
@Test
public void matchAlwaysReturnsTrue() throws Exception {
assertThat(matcher.match(
message,
new StreamRuleMock(Map.of("_id", "stream-rule-id"))))
.isTrue();
assertThat(matcher.match(
message,
new StreamRuleMock(Map.of("_id", "stream-rule-id", "inverted", false))))
.isTrue();
}
|
public Filter merge(Filter... filters) {
BloomFilter merged = new BloomFilter(this.getHashCount(), (BitSet) this.filter().clone());
if (filters == null) {
return merged;
}
for (Filter filter : filters) {
if (!(filter instanceof BloomFilter)) {
throw new IllegalArgumentException("Cannot merge filters of different class");
}
BloomFilter bf = (BloomFilter) filter;
merged.addAll(bf);
}
return merged;
}
|
@Test(expected=IllegalArgumentException.class)
public void testMergeException() {
BloomFilter bf3 = new BloomFilter(ELEMENTS*10, 1);
BloomFilter[] bfs = new BloomFilter[1];
bfs[0] = bf;
BloomFilter mergeBf = (BloomFilter) bf3.merge(bf);
}
|
@Override
public void resolveArtifacts(
ArtifactApi.ResolveArtifactsRequest request,
StreamObserver<ArtifactApi.ResolveArtifactsResponse> responseObserver) {
responseObserver.onNext(
ArtifactApi.ResolveArtifactsResponse.newBuilder()
.addAllReplacements(resolver.resolveArtifacts(request.getArtifactsList()))
.build());
responseObserver.onCompleted();
}
|
@Test
public void testResolveArtifacts() throws IOException {
RunnerApi.ArtifactInformation artifact = fileArtifact(Paths.get("somePath"));
ArtifactApi.ResolveArtifactsResponse resolved =
retrievalBlockingStub.resolveArtifacts(
ArtifactApi.ResolveArtifactsRequest.newBuilder().addArtifacts(artifact).build());
assertEquals(1, resolved.getReplacementsCount());
assertEquals(artifact, resolved.getReplacements(0));
}
|
boolean isSetAndInitialized() {
return proxy != null || error != null;
}
|
@Test
public void isSet_returnsFalse_whenNotSet() {
assertFalse(future.isSetAndInitialized());
}
|
@Override
public KubevirtFloatingIp floatingIpByPodName(String podName) {
checkArgument(!Strings.isNullOrEmpty(podName), ERR_NULL_POD_NAME);
return kubevirtRouterStore.floatingIps().stream()
.filter(ips -> podName.equals(ips.podName()))
.findAny().orElse(null);
}
|
@Test
public void testGetFloatingIpByPodName() {
createBasicFloatingIpAssociated();
assertNotNull("Floating IP did not match", target.floatingIpByPodName(POD_NAME));
assertNull("Floating IP did not match", target.floatingIpByPodName(UNKNOWN_ID));
}
|
@Override
public V fetch(final K key, final long time) {
Objects.requireNonNull(key, "key can't be null");
final List<ReadOnlyWindowStore<K, V>> stores = provider.stores(storeName, windowStoreType);
for (final ReadOnlyWindowStore<K, V> windowStore : stores) {
try {
final V result = windowStore.fetch(key, time);
if (result != null) {
return result;
}
} catch (final InvalidStateStoreException e) {
throw new InvalidStateStoreException(
"State store is not available anymore and may have been migrated to another instance; " +
"please re-discover its location from the state metadata.");
}
}
return null;
}
|
@Test
public void shouldThrowInvalidStateStoreExceptionIfFetchThrows() {
underlyingWindowStore.setOpen(false);
final CompositeReadOnlyWindowStore<Object, Object> store =
new CompositeReadOnlyWindowStore<>(
new WrappingStoreProvider(singletonList(stubProviderOne), StoreQueryParameters.fromNameAndType("window-store", QueryableStoreTypes.windowStore())),
QueryableStoreTypes.windowStore(),
"window-store"
);
try {
store.fetch("key", ofEpochMilli(1), ofEpochMilli(10));
fail("InvalidStateStoreException was expected");
} catch (final InvalidStateStoreException e) {
assertEquals("State store is not available anymore and may have been migrated to another instance; " +
"please re-discover its location from the state metadata.", e.getMessage());
}
}
|
public MessageType getMessageType()
{
return messageType;
}
|
@Test
public void testMapKeyRepetitionLevel()
{
ParquetSchemaConverter schemaConverter = new ParquetSchemaConverter(
ImmutableList.of(mapType(VARCHAR, INTEGER)),
ImmutableList.of("test"));
GroupType mapType = schemaConverter.getMessageType().getType(0).asGroupType();
GroupType keyValueValue = mapType.getType(0).asGroupType();
assertEquals(keyValueValue.isRepetition(REPEATED), true);
Type keyType = keyValueValue.getType(0).asPrimitiveType();
assertEquals(keyType.isRepetition(REQUIRED), true);
PrimitiveType valueType = keyValueValue.getType(1).asPrimitiveType();
assertEquals(valueType.isRepetition(OPTIONAL), true);
schemaConverter = new ParquetSchemaConverter(
ImmutableList.of(mapType(RowType.from(asList(field("a", VARCHAR), field("b", BIGINT))), INTEGER)),
ImmutableList.of("test"));
mapType = schemaConverter.getMessageType().getType(0).asGroupType();
keyValueValue = mapType.getType(0).asGroupType();
assertEquals(keyValueValue.isRepetition(REPEATED), true);
keyType = keyValueValue.getType(0).asGroupType();
assertEquals(keyType.isRepetition(REQUIRED), true);
assertEquals(keyType.asGroupType().getType(0).asPrimitiveType().isRepetition(OPTIONAL), true);
assertEquals(keyType.asGroupType().getType(1).asPrimitiveType().isRepetition(OPTIONAL), true);
valueType = keyValueValue.getType(1).asPrimitiveType();
assertEquals(valueType.isRepetition(OPTIONAL), true);
}
|
public void update(String namespaceName, String extensionName) throws InterruptedException {
if(BuiltInExtensionUtil.isBuiltIn(namespaceName)) {
LOGGER.debug("SKIP BUILT-IN EXTENSION {}", NamingUtil.toExtensionId(namespaceName, extensionName));
return;
}
var extension = repositories.findPublicId(namespaceName, extensionName);
var extensionUpdates = new HashMap<Long, String>();
updateExtensionPublicId(extension, extensionUpdates, false);
if(!extensionUpdates.isEmpty()) {
repositories.updateExtensionPublicIds(extensionUpdates);
}
var namespaceUpdates = new HashMap<Long, String>();
updateNamespacePublicId(extension, namespaceUpdates, false);
if(!namespaceUpdates.isEmpty()) {
repositories.updateNamespacePublicIds(namespaceUpdates);
}
}
|
@Test
public void testMustUpdateRandomExistsDb() throws InterruptedException {
var namespaceName1 = "foo";
var namespaceUuid1 = UUID.randomUUID().toString();
var extensionName1 = "bar";
var extensionUuid1 = UUID.randomUUID().toString();
var namespace1 = new Namespace();
namespace1.setId(1L);
namespace1.setName(namespaceName1);
var extension1 = new Extension();
extension1.setId(2L);
extension1.setName(extensionName1);
extension1.setNamespace(namespace1);
var namespaceName2 = "baz";
var namespacePublicId2 = UUID.randomUUID().toString();
var extensionName2 = "foobar";
var extensionPublicId2 = UUID.randomUUID().toString();
var namespace2 = new Namespace();
namespace2.setId(3L);
namespace2.setName(namespaceName2);
namespace2.setPublicId(namespaceUuid1);
var extension2 = new Extension();
extension2.setId(4L);
extension2.setName(extensionName2);
extension2.setPublicId(extensionUuid1);
extension2.setNamespace(namespace2);
var dbExtensionPublicId = UUID.randomUUID().toString();
var dbNamespacePublicId = UUID.randomUUID().toString();
Mockito.when(repositories.extensionPublicIdExists(dbExtensionPublicId)).thenReturn(true);
Mockito.when(repositories.namespacePublicIdExists(dbNamespacePublicId)).thenReturn(true);
Mockito.when(repositories.findPublicId(namespaceName1, extensionName1)).thenReturn(extension1);
Mockito.when(repositories.findPublicId(extensionUuid1)).thenReturn(extension2);
Mockito.when(repositories.findNamespacePublicId(namespaceUuid1)).thenReturn(extension2);
var upstreamPublicIds = new PublicIds(namespaceUuid1, extensionUuid1);
Mockito.when(idService.getUpstreamPublicIds(extension1)).thenReturn(upstreamPublicIds);
Mockito.when(idService.getUpstreamPublicIds(extension2)).thenReturn(upstreamPublicIds);
Mockito.when(idService.getRandomPublicId()).thenReturn(dbExtensionPublicId, extensionPublicId2, dbNamespacePublicId, namespacePublicId2);
updateService.update(namespaceName1, extensionName1);
Mockito.verify(repositories).updateExtensionPublicIds(Mockito.argThat((Map<Long, String> map) -> {
return map.size() == 2
&& map.get(extension1.getId()).equals(extensionUuid1)
&& map.get(extension2.getId()).equals(extensionPublicId2);
}));
Mockito.verify(repositories).updateNamespacePublicIds(Mockito.argThat((Map<Long, String> map) -> {
return map.size() == 2
&& map.get(namespace1.getId()).equals(namespaceUuid1)
&& map.get(namespace2.getId()).equals(namespacePublicId2);
}));
}
|
public String name() { return name; }
|
@Test
public void metricsAreUnregistered() throws Exception {
Configuration conf = new Configuration();
Server server = new Server("0.0.0.0", 0, LongWritable.class, 1, conf) {
@Override
public Writable call(
RPC.RpcKind rpcKind, String protocol, Writable param,
long receiveTime) throws Exception {
return null;
}
};
MetricsSystem metricsSystem = DefaultMetricsSystem.instance();
RpcMetrics rpcMetrics = server.getRpcMetrics();
RpcDetailedMetrics rpcDetailedMetrics = server.getRpcDetailedMetrics();
assertNotNull(metricsSystem.getSource(rpcMetrics.name()));
assertNotNull(metricsSystem.getSource(rpcDetailedMetrics.name()));
server.stop();
assertNull(metricsSystem.getSource(rpcMetrics.name()));
assertNull(metricsSystem.getSource(rpcDetailedMetrics.name()));
}
|
@Override
@CheckForNull
public EmailMessage format(Notification notif) {
if (!(notif instanceof ChangesOnMyIssuesNotification)) {
return null;
}
ChangesOnMyIssuesNotification notification = (ChangesOnMyIssuesNotification) notif;
if (notification.getChange() instanceof AnalysisChange) {
checkState(!notification.getChangedIssues().isEmpty(), "changedIssues can't be empty");
return formatAnalysisNotification(notification.getChangedIssues().keySet().iterator().next(), notification);
}
return formatMultiProject(notification);
}
|
@Test
public void format_sets_static_subject_when_change_from_User() {
Set<ChangedIssue> changedIssues = IntStream.range(0, 2 + new Random().nextInt(4))
.mapToObj(i -> newChangedIssue(i + "", randomValidStatus(), newProject("prj_" + i), newRandomNotAHotspotRule("rule_" + i)))
.collect(toSet());
UserChange userChange = newUserChange();
EmailMessage emailMessage = underTest.format(new ChangesOnMyIssuesNotification(userChange, changedIssues));
assertThat(emailMessage.getSubject()).isEqualTo("A manual update has changed some of your issues/hotspots");
}
|
@Override
public boolean isEmpty() {
return commandTopic.getEndOffset() == 0;
}
|
@Test
public void shouldComputeNotEmptyCorrectly() {
// Given:
when(commandTopic.getEndOffset()).thenReturn(1L);
// When/Then:
assertThat(commandStore.isEmpty(), is(false));
}
|
@Override
public ConfigErrors errors() {
return errors;
}
|
@Test
public void shouldValidatePipelineLabelWithEnvironmentVariable() {
String labelFormat = "pipeline-${COUNT}-${env:SOME_VAR}";
PipelineConfig pipelineConfig = createAndValidatePipelineLabel(labelFormat);
assertThat(pipelineConfig.errors().on(PipelineConfig.LABEL_TEMPLATE), is(nullValue()));
}
|
public static StrimziPodSet createPodSet(
String name,
String namespace,
Labels labels,
OwnerReference ownerReference,
ResourceTemplate template,
int replicas,
Map<String, String> annotations,
Labels selectorLabels,
Function<Integer, Pod> podCreator
) {
List<Map<String, Object>> pods = new ArrayList<>(replicas);
for (int i = 0; i < replicas; i++) {
Pod pod = podCreator.apply(i);
pods.add(PodSetUtils.podToMap(pod));
}
return new StrimziPodSetBuilder()
.withNewMetadata()
.withName(name)
.withLabels(labels.withAdditionalLabels(TemplateUtils.labels(template)).toMap())
.withNamespace(namespace)
.withAnnotations(Util.mergeLabelsOrAnnotations(annotations, TemplateUtils.annotations(template)))
.withOwnerReferences(ownerReference)
.endMetadata()
.withNewSpec()
.withSelector(new LabelSelectorBuilder().withMatchLabels(selectorLabels.toMap()).build())
.withPods(pods)
.endSpec()
.build();
}
|
@Test
public void testCreateStrimziPodSetFromNodeReferencesWithTemplate() {
List<String> podNames = new ArrayList<>();
StrimziPodSet sps = WorkloadUtils.createPodSet(
NAME,
NAMESPACE,
LABELS,
OWNER_REFERENCE,
new ResourceTemplateBuilder()
.withNewMetadata()
.withLabels(Map.of("label-3", "value-3", "label-4", "value-4"))
.withAnnotations(Map.of("anno-1", "value-1", "anno-2", "value-2"))
.endMetadata()
.build(),
NODES,
Map.of("extra", "annotations"),
Labels.fromMap(Map.of("custom", "selector")),
n -> {
podNames.add(n.podName());
return new PodBuilder()
.withNewMetadata()
.withName(n.podName())
.endMetadata()
.build();
}
);
assertThat(sps.getMetadata().getName(), is(NAME));
assertThat(sps.getMetadata().getNamespace(), is(NAMESPACE));
assertThat(sps.getMetadata().getOwnerReferences(), is(List.of(OWNER_REFERENCE)));
assertThat(sps.getMetadata().getLabels(), is(LABELS.withAdditionalLabels(Map.of("label-3", "value-3", "label-4", "value-4")).toMap()));
assertThat(sps.getMetadata().getAnnotations(), is(Map.of("extra", "annotations", "anno-1", "value-1", "anno-2", "value-2")));
assertThat(sps.getSpec().getSelector().getMatchLabels().size(), is(1));
assertThat(sps.getSpec().getSelector().getMatchLabels(), is(Map.of("custom", "selector")));
// Test generating pods from the PodCreator method
assertThat(podNames.size(), is(3));
assertThat(podNames, hasItems("my-cluster-nodes-10", "my-cluster-nodes-11", "my-cluster-nodes-12"));
assertThat(sps.getSpec().getPods().size(), is(3));
assertThat(sps.getSpec().getPods().stream().map(pod -> PodSetUtils.mapToPod(pod).getMetadata().getName()).toList(), hasItems("my-cluster-nodes-10", "my-cluster-nodes-11", "my-cluster-nodes-12"));
}
|
public Config setProperty(@Nonnull String name, @Nonnull String value) {
if (isNullOrEmptyAfterTrim(name)) {
throw new IllegalArgumentException("argument 'name' can't be null or empty");
}
isNotNull(value, "value");
properties.setProperty(name, value);
return this;
}
|
@Test(expected = IllegalArgumentException.class)
public void testSetConfigPropertyNameNull() {
config.setProperty(null, "test");
}
|
public static List<UpdateRequirement> forUpdateTable(
TableMetadata base, List<MetadataUpdate> metadataUpdates) {
Preconditions.checkArgument(null != base, "Invalid table metadata: null");
Preconditions.checkArgument(null != metadataUpdates, "Invalid metadata updates: null");
Builder builder = new Builder(base, false);
builder.require(new UpdateRequirement.AssertTableUUID(base.uuid()));
metadataUpdates.forEach(builder::update);
return builder.build();
}
|
@Test
public void setAndRemoveProperties() {
List<UpdateRequirement> requirements =
UpdateRequirements.forUpdateTable(
metadata,
ImmutableList.of(new MetadataUpdate.SetProperties(ImmutableMap.of("test", "test"))));
requirements.forEach(req -> req.validate(metadata));
assertThat(requirements)
.hasSize(1)
.hasOnlyElementsOfTypes(UpdateRequirement.AssertTableUUID.class);
assertTableUUID(requirements);
requirements =
UpdateRequirements.forUpdateTable(
metadata,
ImmutableList.of(new MetadataUpdate.RemoveProperties(Sets.newHashSet("test"))));
requirements.forEach(req -> req.validate(metadata));
assertThat(requirements)
.hasSize(1)
.hasOnlyElementsOfTypes(UpdateRequirement.AssertTableUUID.class);
assertTableUUID(requirements);
}
|
@Override
public ClientBuilder dnsServerAddresses(List<InetSocketAddress> addresses) {
for (InetSocketAddress address : addresses) {
String ip = address.getHostString();
checkArgument(InetAddressUtils.isIPv4Address(ip) || InetAddressUtils.isIPv6Address(ip),
"DnsServerAddresses need to be valid IPv4 or IPv6 addresses");
}
conf.setDnsServerAddresses(addresses);
return this;
}
|
@Test(expectedExceptions = IllegalArgumentException.class)
public void testClientBuilderWithIllegalDNSServerHostname() throws PulsarClientException {
PulsarClient.builder().dnsServerAddresses(
Arrays.asList(new InetSocketAddress("1.2.3.4", 53), new InetSocketAddress("localhost",53)));
}
|
public static String getProcessId(Path path) throws IOException {
if (path == null) {
throw new IOException("Trying to access process id from a null path");
}
LOG.debug("Accessing pid from pid file {}", path);
String processId = null;
BufferedReader bufReader = null;
try {
File file = new File(path.toString());
if (file.exists()) {
FileInputStream fis = new FileInputStream(file);
bufReader = new BufferedReader(new InputStreamReader(fis, StandardCharsets.UTF_8));
while (true) {
String line = bufReader.readLine();
if (line == null) {
break;
}
String temp = line.trim();
if (!temp.isEmpty()) {
if (Shell.WINDOWS) {
// On Windows, pid is expected to be a container ID, so find first
// line that parses successfully as a container ID.
try {
ContainerId.fromString(temp);
processId = temp;
break;
} catch (Exception e) {
// do nothing
}
}
else {
// Otherwise, find first line containing a numeric pid.
try {
long pid = Long.parseLong(temp);
if (pid > 0) {
processId = temp;
break;
}
} catch (Exception e) {
// do nothing
}
}
}
}
}
} finally {
if (bufReader != null) {
bufReader.close();
}
}
LOG.debug("Got pid {} from path {}",
(processId != null ? processId : "null"), path);
return processId;
}
|
@Test (timeout = 30000)
public void testSimpleGet() throws IOException {
String rootDir = new File(System.getProperty(
"test.build.data", "/tmp")).getAbsolutePath();
File testFile = null;
String expectedProcessId = Shell.WINDOWS ?
"container_1353742680940_0002_01_000001" :
"56789";
try {
testFile = new File(rootDir, "temp.txt");
PrintWriter fileWriter = new PrintWriter(testFile);
fileWriter.println(expectedProcessId);
fileWriter.close();
String processId = null;
processId = ProcessIdFileReader.getProcessId(
new Path(rootDir + Path.SEPARATOR + "temp.txt"));
Assert.assertEquals(expectedProcessId, processId);
} finally {
if (testFile != null
&& testFile.exists()) {
testFile.delete();
}
}
}
|
public boolean isSuccess() {
return httpStatus != null && httpStatus >= 200 && httpStatus < 300;
}
|
@Test
public void isSuccess_returns_true_if_http_response_returns_2xx_code() {
WebhookDelivery delivery = newBuilderTemplate()
.setHttpStatus(204)
.build();
assertThat(delivery.isSuccess()).isTrue();
}
|
protected org.graylog2.plugin.indexer.searches.timeranges.TimeRange restrictTimeRange(final org.graylog2.plugin.indexer.searches.timeranges.TimeRange timeRange) {
final DateTime originalFrom = timeRange.getFrom();
final DateTime to = timeRange.getTo();
final DateTime from;
final SearchesClusterConfig config = clusterConfigService.get(SearchesClusterConfig.class);
if (config == null || Period.ZERO.equals(config.queryTimeRangeLimit())) {
from = originalFrom;
} else {
final DateTime limitedFrom = to.minus(config.queryTimeRangeLimit());
from = limitedFrom.isAfter(originalFrom) ? limitedFrom : originalFrom;
}
return AbsoluteRange.create(from, to);
}
|
@Test
public void restrictTimeRangeReturnsGivenTimeRangeIfNoLimitHasBeenSet() {
when(clusterConfigService.get(SearchesClusterConfig.class)).thenReturn(SearchesClusterConfig.createDefault()
.toBuilder()
.queryTimeRangeLimit(Period.ZERO)
.build());
final SearchResource resource = new SearchResource(searches, clusterConfigService, decoratorProcessor, searchExecutor) {
};
final DateTime from = new DateTime(2015, 1, 15, 12, 0, DateTimeZone.UTC);
final DateTime to = from.plusYears(1);
final TimeRange timeRange = AbsoluteRange.create(from, to);
final TimeRange restrictedTimeRange = resource.restrictTimeRange(timeRange);
assertThat(restrictedTimeRange).isNotNull();
assertThat(restrictedTimeRange.getFrom()).isEqualTo(from);
assertThat(restrictedTimeRange.getTo()).isEqualTo(to);
}
|
public void add(Task task) {
tasks.put('/' + task.getName(), task);
TaskExecutor taskExecutor = new TaskExecutor(task);
try {
final Method executeMethod = task.getClass().getMethod("execute",
Map.class, PrintWriter.class);
if (executeMethod.isAnnotationPresent(Timed.class)) {
final Timed annotation = executeMethod.getAnnotation(Timed.class);
final String name = chooseName(annotation.name(),
annotation.absolute(),
task);
taskExecutor = new TimedTask(taskExecutor, metricRegistry.timer(name));
}
if (executeMethod.isAnnotationPresent(Metered.class)) {
final Metered annotation = executeMethod.getAnnotation(Metered.class);
final String name = chooseName(annotation.name(),
annotation.absolute(),
task);
taskExecutor = new MeteredTask(taskExecutor, metricRegistry.meter(name));
}
if (executeMethod.isAnnotationPresent(ExceptionMetered.class)) {
final ExceptionMetered annotation = executeMethod.getAnnotation(ExceptionMetered.class);
final String name = chooseName(annotation.name(),
annotation.absolute(),
task,
ExceptionMetered.DEFAULT_NAME_SUFFIX);
taskExecutor = new ExceptionMeteredTask(taskExecutor, metricRegistry.meter(name), annotation.cause());
}
} catch (NoSuchMethodException ignored) {
}
taskExecutors.put(task, taskExecutor);
}
|
@Test
void testRunsTimedTask() throws Exception {
final ServletInputStream bodyStream = new TestServletInputStream(
new ByteArrayInputStream("".getBytes(StandardCharsets.UTF_8)));
final Task timedTask = new Task("timed-task") {
@Override
@Timed(name = "vacuum-cleaning")
public void execute(Map<String, List<String>> parameters, PrintWriter output) {
output.println("Vacuum cleaning");
}
};
servlet.add(timedTask);
when(request.getInputStream()).thenReturn(bodyStream);
when(request.getParameterNames()).thenReturn(Collections.emptyEnumeration());
when(request.getMethod()).thenReturn("POST");
when(request.getPathInfo()).thenReturn("/timed-task");
when(response.getWriter()).thenReturn(mock(PrintWriter.class));
servlet.service(request, response);
assertThat(metricRegistry.getTimers()).containsKey(name(timedTask.getClass(), "vacuum-cleaning"));
}
|
@Override
public String getOperationName(Exchange exchange, Endpoint endpoint) {
Object name = exchange.getProperty(Exchange.TIMER_NAME);
if (name instanceof String) {
return (String) name;
}
return super.getOperationName(exchange, endpoint);
}
|
@Test
public void testGetOperationName() {
Exchange exchange = Mockito.mock(Exchange.class);
Mockito.when(exchange.getProperty(Exchange.TIMER_NAME)).thenReturn(TEST_NAME);
SpanDecorator decorator = new TimerSpanDecorator() {
@Override
public String getComponent() {
return null;
}
};
assertEquals(TEST_NAME, decorator.getOperationName(exchange, null));
}
|
@Override
public int hashCode() {
int hashCode = 0;
for (int hash : hashes) {
hashCode += hash;
}
return hashCode;
}
|
@Test
public void testHashCode() {
final OAHashSet<Integer> set = new OAHashSet<>(8);
populateSet(set, 10);
final int expectedHashCode = 45;
assertEquals(expectedHashCode, set.hashCode());
}
|
protected HashMap<String, Double> computeModularity(Graph graph, CommunityStructure theStructure,
int[] comStructure,
double currentResolution, boolean randomized,
boolean weighted) {
isCanceled = false;
Progress.start(progress);
Random rand = new Random();
double totalWeight = theStructure.graphWeightSum;
double[] nodeDegrees = theStructure.weights.clone();
HashMap<String, Double> results = new HashMap<>();
if (isCanceled) {
return results;
}
boolean someChange = true;
while (someChange) {
someChange = false;
boolean localChange = true;
while (localChange) {
localChange = false;
int start = 0;
if (randomized) {
start = Math.abs(rand.nextInt()) % theStructure.N;
}
int step = 0;
for (int i = start; step < theStructure.N; i = (i + 1) % theStructure.N) {
step++;
Community bestCommunity = updateBestCommunity(theStructure, i, currentResolution);
if ((theStructure.nodeCommunities[i] != bestCommunity) && (bestCommunity != null)) {
theStructure.moveNodeTo(i, bestCommunity);
localChange = true;
}
if (isCanceled) {
return results;
}
}
someChange = localChange || someChange;
if (isCanceled) {
return results;
}
}
if (someChange) {
theStructure.zoomOut();
}
}
fillComStructure(graph, theStructure, comStructure);
double[] degreeCount = fillDegreeCount(graph, theStructure, comStructure, nodeDegrees, weighted);
double computedModularity = finalQ(comStructure, degreeCount, graph, theStructure, totalWeight, 1., weighted);
double computedModularityResolution =
finalQ(comStructure, degreeCount, graph, theStructure, totalWeight, currentResolution, weighted);
results.put("modularity", computedModularity);
results.put("modularityResolution", computedModularityResolution);
return results;
}
|
@Test
public void testCyclicWithWeightsGraphModularity() {
GraphModel graphModel = GraphModel.Factory.newInstance();
UndirectedGraph undirectedGraph = graphModel.getUndirectedGraph();
Node node1 = graphModel.factory().newNode("0");
Node node2 = graphModel.factory().newNode("1");
Node node3 = graphModel.factory().newNode("2");
Node node4 = graphModel.factory().newNode("3");
Node node5 = graphModel.factory().newNode("4");
Node node6 = graphModel.factory().newNode("5");
Node node7 = graphModel.factory().newNode("6");
Node node8 = graphModel.factory().newNode("7");
undirectedGraph.addNode(node1);
undirectedGraph.addNode(node2);
undirectedGraph.addNode(node3);
undirectedGraph.addNode(node4);
undirectedGraph.addNode(node5);
undirectedGraph.addNode(node6);
undirectedGraph.addNode(node7);
undirectedGraph.addNode(node8);
//Test 3 parallel edges summing weight = 10
//Related issues ==> #1419 Getting null pointer error when trying to calculate modularity; #1526 NullPointerException on Modularity Statistics with gexf with kind / parallel nodes
Edge edge12_1 = graphModel.factory().newEdge(node1, node2, 1, 2.f, false);
Edge edge12_2 = graphModel.factory().newEdge(node1, node2, 2, 5.f, false);
Edge edge12_3 = graphModel.factory().newEdge(node1, node2, 2, 3.f, false);
Edge edge23 = graphModel.factory().newEdge(node2, node3, false);
Edge edge34 = graphModel.factory().newEdge(node3, node4, 0, 10.f, false);
Edge edge45 = graphModel.factory().newEdge(node4, node5, false);
Edge edge56 = graphModel.factory().newEdge(node5, node6, 0, 10.f, false);
Edge edge67 = graphModel.factory().newEdge(node6, node7, false);
//Test 2 parallel edges summing weight = 10
Edge edge78_1 = graphModel.factory().newEdge(node7, node8, 0, 5.f, false);
Edge edge78_2 = graphModel.factory().newEdge(node7, node8, 0, 5.f, false);
Edge edge81 = graphModel.factory().newEdge(node8, node1, false);
undirectedGraph.addEdge(edge12_1);
undirectedGraph.addEdge(edge12_2);
undirectedGraph.addEdge(edge12_3);
undirectedGraph.addEdge(edge23);
undirectedGraph.addEdge(edge34);
undirectedGraph.addEdge(edge45);
undirectedGraph.addEdge(edge56);
undirectedGraph.addEdge(edge67);
undirectedGraph.addEdge(edge78_1);
undirectedGraph.addEdge(edge78_2);
undirectedGraph.addEdge(edge81);
UndirectedGraph graph = graphModel.getUndirectedGraph();
Modularity mod = new Modularity();
Modularity.CommunityStructure theStructure = mod.new CommunityStructure(graph);
int[] comStructure = new int[graph.getNodeCount()];
HashMap<String, Double> modularityValues = mod.computeModularity(graph, theStructure, comStructure,
1., true, true);
int class1 = comStructure[0];
int class2 = comStructure[1];
int class4 = comStructure[3];
int class5 = comStructure[4];
int class7 = comStructure[6];
int class8 = comStructure[7];
assertEquals(class1, class2);
assertEquals(class7, class8);
Assert.assertNotEquals(class4, class5);
}
|
public static UBinary create(Kind binaryOp, UExpression lhs, UExpression rhs) {
checkArgument(
OP_CODES.containsKey(binaryOp), "%s is not a supported binary operation", binaryOp);
return new AutoValue_UBinary(binaryOp, lhs, rhs);
}
|
@Test
public void serialization() {
ULiteral oneLit = ULiteral.intLit(1);
ULiteral twoLit = ULiteral.intLit(2);
ULiteral piLit = ULiteral.doubleLit(Math.PI);
ULiteral trueLit = ULiteral.booleanLit(true);
ULiteral falseLit = ULiteral.booleanLit(false);
SerializableTester.reserializeAndAssert(UBinary.create(Kind.PLUS, oneLit, twoLit));
SerializableTester.reserializeAndAssert(UBinary.create(Kind.PLUS, oneLit, piLit));
SerializableTester.reserializeAndAssert(UBinary.create(Kind.PLUS, piLit, twoLit));
SerializableTester.reserializeAndAssert(UBinary.create(Kind.MINUS, oneLit, twoLit));
SerializableTester.reserializeAndAssert(UBinary.create(Kind.XOR, oneLit, twoLit));
SerializableTester.reserializeAndAssert(UBinary.create(Kind.CONDITIONAL_OR, trueLit, falseLit));
SerializableTester.reserializeAndAssert(UBinary.create(Kind.OR, trueLit, falseLit));
}
|
public static void readFully(InputStream stream, byte[] bytes, int offset, int length)
throws IOException {
int bytesRead = readRemaining(stream, bytes, offset, length);
if (bytesRead < length) {
throw new EOFException(
"Reached the end of stream with " + (length - bytesRead) + " bytes left to read");
}
}
|
@Test
public void testReadFullyUnderflow() {
final byte[] buffer = new byte[11];
final MockInputStream stream = new MockInputStream(2, 3, 3);
assertThatThrownBy(() -> IOUtil.readFully(stream, buffer, 0, buffer.length))
.isInstanceOf(EOFException.class)
.hasMessage("Reached the end of stream with 1 bytes left to read");
assertThat(Arrays.copyOfRange(buffer, 0, 10))
.as("Should have consumed bytes")
.isEqualTo(MockInputStream.TEST_ARRAY);
assertThat(stream.getPos()).as("Stream position should reflect bytes read").isEqualTo(10);
}
|
@CanIgnoreReturnValue
public final Ordered containsAtLeast(
@Nullable Object k0, @Nullable Object v0, @Nullable Object... rest) {
return containsAtLeastEntriesIn(accumulateMap("containsAtLeast", k0, v0, rest));
}
|
@Test
public void containsAtLeastExtraKeyAndMissingKeyAndWrongValue() {
ImmutableMap<String, Integer> actual = ImmutableMap.of("jan", 1, "march", 3);
expectFailureWhenTestingThat(actual).containsAtLeast("march", 33, "feb", 2);
assertFailureKeys(
"keys with wrong values",
"for key",
"expected value",
"but got value",
"missing keys",
"for key",
"expected value",
"---",
"expected to contain at least",
"but was");
assertFailureValueIndexed("for key", 0, "march");
assertFailureValueIndexed("expected value", 0, "33");
assertFailureValue("but got value", "3");
assertFailureValueIndexed("for key", 1, "feb");
assertFailureValueIndexed("expected value", 1, "2");
}
|
@Override
public Mono<GetUnversionedProfileResponse> getUnversionedProfile(final GetUnversionedProfileAnonymousRequest request) {
final ServiceIdentifier targetIdentifier =
ServiceIdentifierUtil.fromGrpcServiceIdentifier(request.getRequest().getServiceIdentifier());
// Callers must be authenticated to request unversioned profiles by PNI
if (targetIdentifier.identityType() == IdentityType.PNI) {
throw Status.UNAUTHENTICATED.asRuntimeException();
}
final Mono<Account> account = switch (request.getAuthenticationCase()) {
case GROUP_SEND_TOKEN ->
groupSendTokenUtil.checkGroupSendToken(request.getGroupSendToken(), List.of(targetIdentifier))
.then(Mono.fromFuture(() -> accountsManager.getByServiceIdentifierAsync(targetIdentifier)))
.flatMap(Mono::justOrEmpty)
.switchIfEmpty(Mono.error(Status.NOT_FOUND.asException()));
case UNIDENTIFIED_ACCESS_KEY ->
getTargetAccountAndValidateUnidentifiedAccess(targetIdentifier, request.getUnidentifiedAccessKey().toByteArray());
default -> Mono.error(Status.INVALID_ARGUMENT.asException());
};
return account.map(targetAccount -> ProfileGrpcHelper.buildUnversionedProfileResponse(targetIdentifier,
null,
targetAccount,
profileBadgeConverter));
}
|
@Test
void getUnversionedProfileGroupSendEndorsementAccountNotFound() throws Exception {
final AciServiceIdentifier serviceIdentifier = new AciServiceIdentifier(UUID.randomUUID());
// Expiration must be on a day boundary; we want one in the future
final Instant expiration = Instant.now().plus(Duration.ofDays(1)).truncatedTo(ChronoUnit.DAYS);
final byte[] token = AuthHelper.validGroupSendToken(SERVER_SECRET_PARAMS, List.of(serviceIdentifier), expiration);
when(accountsManager.getByServiceIdentifierAsync(any())).thenReturn(CompletableFuture.completedFuture(Optional.empty()));
final GetUnversionedProfileAnonymousRequest request = GetUnversionedProfileAnonymousRequest.newBuilder()
.setGroupSendToken(ByteString.copyFrom(token))
.setRequest(GetUnversionedProfileRequest.newBuilder()
.setServiceIdentifier(ServiceIdentifierUtil.toGrpcServiceIdentifier(serviceIdentifier)))
.build();
assertStatusException(Status.NOT_FOUND, () -> unauthenticatedServiceStub().getUnversionedProfile(request));
}
|
@Override
public Position offsetToPosition(int offset, Bias bias) {
return position(0, 0).offsetBy(offset, bias);
}
|
@Test
public void testPositiveOffsetWithBackwardBias() {
Position pos = navigator.offsetToPosition(10, Backward);
assertEquals(0, pos.getMajor());
assertEquals(10, pos.getMinor());
}
|
@Udf
public String elt(
@UdfParameter(description = "the nth element to extract") final int n,
@UdfParameter(description = "the strings of which to extract the nth") final String... args
) {
if (args == null) {
return null;
}
if (n < 1 || n > args.length) {
return null;
}
return args[n - 1];
}
|
@Test
public void shouldSelectFirstElementOfTwo() {
// When:
final String el = elt.elt(1, "a", "b");
// Then:
assertThat(el, equalTo("a"));
}
|
public static SocketAddress createFrom(String value) {
if (value.startsWith(UNIX_DOMAIN_SOCKET_PREFIX)) {
// Unix Domain Socket address.
// Create the underlying file for the Unix Domain Socket.
String filePath = value.substring(UNIX_DOMAIN_SOCKET_PREFIX.length());
File file = new File(filePath);
if (!file.isAbsolute()) {
throw new IllegalArgumentException("File path must be absolute: " + filePath);
}
try {
if (file.createNewFile()) {
// If this application created the file, delete it when the application exits.
file.deleteOnExit();
}
} catch (IOException ex) {
throw new RuntimeException(ex);
}
// Create the SocketAddress referencing the file.
return new DomainSocketAddress(file);
} else {
// Standard TCP/IP address.
HostAndPort hostAndPort = HostAndPort.fromString(value);
checkArgument(
hostAndPort.hasPort(),
"Address must be a unix:// path or be in the form host:port. Got: %s",
value);
return new InetSocketAddress(hostAndPort.getHost(), hostAndPort.getPort());
}
}
|
@Test
public void testDomainSocket() throws Exception {
File tmpFile = tmpFolder.newFile();
SocketAddress socketAddress =
SocketAddressFactory.createFrom("unix://" + tmpFile.getAbsolutePath());
assertThat(socketAddress, Matchers.instanceOf(DomainSocketAddress.class));
assertEquals(tmpFile.getAbsolutePath(), ((DomainSocketAddress) socketAddress).path());
}
|
@Override
public boolean verify(Map<String, Object> properties, Collection<DefaultIssue> issues, UserSession userSession) {
comment(properties);
return true;
}
|
@Test
public void should_verify_fail_if_parameter_not_found() {
Map<String, Object> properties = singletonMap("unknwown", "unknown value");
try {
action.verify(properties, new ArrayList<>(), new AnonymousMockUserSession());
fail();
} catch (Exception e) {
assertThat(e).isInstanceOf(IllegalArgumentException.class).hasMessage("Missing parameter : 'comment'");
}
verifyNoInteractions(issueUpdater);
}
|
@Udf
public String uuid() {
return java.util.UUID.randomUUID().toString();
}
|
@Test
public void shouldReturnDistinctValueEachInvocation() {
int capacity = 1000;
final Set<String> outputs = new HashSet<String>(capacity);
for (int i = 0; i < capacity; i++) {
outputs.add(udf.uuid());
}
assertThat(outputs, hasSize(capacity));
}
|
public static boolean isFuture(String timestamp) {
return !StringUtils.isEmpty(timestamp) && isFuture(Long.parseLong(timestamp));
}
|
@Test
public void isFutureManagesNullValues() {
Long longValue = null;
assertFalse(DateUtils.isFuture(longValue));
}
|
public List<String> toPrefix(String in) {
List<String> tokens = buildTokens(alignINClause(in));
List<String> output = new ArrayList<>();
List<String> stack = new ArrayList<>();
for (String token : tokens) {
if (isOperand(token)) {
if (token.equals(")")) {
while (openParanthesesFound(stack)) {
output.add(stack.remove(stack.size() - 1));
}
if (!stack.isEmpty()) {
// temporarily fix for issue #189
stack.remove(stack.size() - 1);
}
} else {
while (openParanthesesFound(stack) && !hasHigherPrecedence(token, stack.get(stack.size() - 1))) {
output.add(stack.remove(stack.size() - 1));
}
stack.add(token);
}
} else {
output.add(token);
}
}
while (!stack.isEmpty()) {
output.add(stack.remove(stack.size() - 1));
}
return output;
}
|
@Test
public void testBetween() {
String query = "b between 10 and 15";
List<String> list = parser.toPrefix(query);
assertEquals(Arrays.asList("b", "10", "15", "between"), list);
}
|
public void restore(final List<Pair<byte[], byte[]>> backupCommands) {
// Delete the command topic
deleteCommandTopicIfExists();
// Create the command topic
KsqlInternalTopicUtils.ensureTopic(commandTopicName, serverConfig, topicClient);
// Restore the commands
restoreCommandTopic(backupCommands);
}
|
@SuppressFBWarnings("RV_RETURN_VALUE_IGNORED_NO_SIDE_EFFECT")
@Test
public void shouldThrowIfCannotDescribeTopicExists() {
// Given:
doThrow(new RuntimeException("denied")).when(topicClient).isTopicExists(COMMAND_TOPIC_NAME);
// When:
final Exception e = assertThrows(
RuntimeException.class,
() -> restoreCommandTopic.restore(Collections.singletonList(BACKUP_COMMANDS.get(0))));
// Then:
assertThat(e.getMessage(), containsString("denied"));
verifyNoMoreInteractions(kafkaProducer);
}
|
public static boolean isNullOrEmpty(String str) {
return (str == null) || (str.isEmpty());
}
|
@Test
public void testIsNullOrEmpty() {
assertThat(StringUtils.isNullOrEmpty(null)).isTrue();
assertThat(StringUtils.isNullOrEmpty("abc")).isFalse();
assertThat(StringUtils.isNullOrEmpty("")).isTrue();
assertThat(StringUtils.isNullOrEmpty(" ")).isFalse();
}
|
@Override
public BackgroundException map(final ApiException failure) {
final StringBuilder buffer = new StringBuilder();
if(StringUtils.isNotBlank(failure.getMessage())) {
for(String s : StringUtils.split(failure.getMessage(), ",")) {
this.append(buffer, LocaleFactory.localizedString(s, "EUE"));
}
}
if(null != failure.getResponseHeaders()) {
final List<List<String>> headers = failure.getResponseHeaders().entrySet().stream()
.filter(e -> "X-UI-ENHANCED-STATUS".equalsIgnoreCase(e.getKey())).map(Map.Entry::getValue).collect(Collectors.toList());
for(List<String> header : headers) {
for(String s : header) {
this.append(buffer, LocaleFactory.localizedString(s, "EUE"));
}
}
}
for(Throwable cause : ExceptionUtils.getThrowableList(failure)) {
if(cause instanceof ProcessingException) {
return new InteroperabilityException(cause.getMessage(), cause);
}
if(cause instanceof SocketException) {
// Map Connection has been shutdown: javax.net.ssl.SSLException: java.net.SocketException: Broken pipe
return new DefaultSocketExceptionMappingService().map((SocketException) cause);
}
if(cause instanceof HttpResponseException) {
return new DefaultHttpResponseExceptionMappingService().map((HttpResponseException) cause);
}
if(cause instanceof IOException) {
return new DefaultIOExceptionMappingService().map((IOException) cause);
}
if(cause instanceof IllegalStateException) {
// Caused by: ApiException: javax.ws.rs.ProcessingException: java.lang.IllegalStateException: Connection pool shut down
return new ConnectionCanceledException(cause);
}
}
switch(failure.getCode()) {
case HttpStatus.SC_UNPROCESSABLE_ENTITY:
return new LockedException(buffer.toString(), failure);
case HttpStatus.SC_TOO_MANY_REQUESTS:
final Optional<Map.Entry<String, List<String>>> header
= failure.getResponseHeaders().entrySet().stream().filter(e -> HttpHeaders.RETRY_AFTER.equals(e.getKey())).findAny();
if(header.isPresent()) {
final Optional<String> value = header.get().getValue().stream().findAny();
return value.map(s -> new RetriableAccessDeniedException(buffer.toString(),
Duration.ofSeconds(Long.parseLong(s)), failure)).orElseGet(() -> new RetriableAccessDeniedException(buffer.toString(), failure));
}
}
return new DefaultHttpResponseExceptionMappingService().map(failure, buffer, failure.getCode());
}
|
@Test
public void testRetry() {
final BackgroundException failure = new EueExceptionMappingService().map(new ApiException(429, "",
Collections.singletonMap("Retry-After", Collections.singletonList("5")), ""));
assertTrue(failure instanceof RetriableAccessDeniedException);
assertEquals(5, ((RetriableAccessDeniedException) failure).getDelay().getSeconds());
}
|
@Override
public int read() throws IOException {
if (mPosition == mLength) { // at end of file
return -1;
}
updateStreamIfNeeded();
int res = mUfsInStream.get().read();
if (res == -1) {
return -1;
}
mPosition++;
Metrics.BYTES_READ_FROM_UFS.inc(1);
return res;
}
|
@Test
public void readOffset() throws IOException, AlluxioException {
AlluxioURI ufsPath = getUfsPath();
createFile(ufsPath, CHUNK_SIZE);
int start = CHUNK_SIZE / 4;
int len = CHUNK_SIZE / 2;
try (FileInStream inStream = getStream(ufsPath)) {
byte[] res = new byte[CHUNK_SIZE];
assertEquals(CHUNK_SIZE / 2, inStream.read(res, start, len));
for (int i = start; i < start + len; i++) {
assertEquals(i - start, res[i]);
}
}
}
|
@Subscribe
public void onChatMessage(ChatMessage event)
{
if (event.getType() != ChatMessageType.GAMEMESSAGE && event.getType() != ChatMessageType.SPAM)
{
return;
}
String message = event.getMessage();
if (message.equals("Your Ring of endurance doubles the duration of your stamina potion's effect."))
{
Integer charges = getRingOfEnduranceCharges();
if (charges == null)
{
log.debug("Ring of endurance charge with no known charges");
return;
}
// subtract the used charge
charges--;
setRingOfEnduranceCharges(charges);
if (!roeWarningSent && charges < RING_OF_ENDURANCE_PASSIVE_EFFECT && energyConfig.ringOfEnduranceChargeMessage())
{
String chatMessage = new ChatMessageBuilder()
.append(ChatColorType.HIGHLIGHT)
.append("Your Ring of endurance now has less than " + RING_OF_ENDURANCE_PASSIVE_EFFECT + " charges. Add more charges to regain its passive stamina effect.")
.build();
chatMessageManager.queue(QueuedMessage.builder()
.type(ChatMessageType.CONSOLE)
.runeLiteFormattedMessage(chatMessage)
.build());
roeWarningSent = true;
}
}
else if (message.startsWith("Your Ring of endurance is charged with") || message.startsWith("You load your Ring of endurance with"))
{
Matcher matcher = Pattern.compile("([0-9,]+)").matcher(message);
int charges = -1;
while (matcher.find())
{
charges = Integer.parseInt(matcher.group(1).replaceAll(",", ""));
}
setRingOfEnduranceCharges(charges);
if (charges >= RING_OF_ENDURANCE_PASSIVE_EFFECT)
{
roeWarningSent = false;
}
}
}
|
@Test
public void testCheckMessage()
{
String checkMessage = "Your Ring of endurance is charged with 52 stamina doses.";
ChatMessage chatMessage = new ChatMessage(null, ChatMessageType.SPAM, "", checkMessage, "", 0);
runEnergyPlugin.onChatMessage(chatMessage);
verify(configManager).setRSProfileConfiguration(RunEnergyConfig.GROUP_NAME, "ringOfEnduranceCharges", 52);
}
|
@Override
public Endpoint<Http2RemoteFlowController> remote() {
return remoteEndpoint;
}
|
@Test
public void serverRemoteIncrementAndGetStreamShouldRespectOverflow() throws Http2Exception {
incrementAndGetStreamShouldRespectOverflow(server.remote(), MAX_VALUE);
}
|
public static String prepareUrl(@NonNull String url) {
url = url.trim();
String lowerCaseUrl = url.toLowerCase(Locale.ROOT); // protocol names are case insensitive
if (lowerCaseUrl.startsWith("feed://")) {
Log.d(TAG, "Replacing feed:// with http://");
return prepareUrl(url.substring("feed://".length()));
} else if (lowerCaseUrl.startsWith("pcast://")) {
Log.d(TAG, "Removing pcast://");
return prepareUrl(url.substring("pcast://".length()));
} else if (lowerCaseUrl.startsWith("pcast:")) {
Log.d(TAG, "Removing pcast:");
return prepareUrl(url.substring("pcast:".length()));
} else if (lowerCaseUrl.startsWith("itpc")) {
Log.d(TAG, "Replacing itpc:// with http://");
return prepareUrl(url.substring("itpc://".length()));
} else if (lowerCaseUrl.startsWith(AP_SUBSCRIBE)) {
Log.d(TAG, "Removing antennapod-subscribe://");
return prepareUrl(url.substring(AP_SUBSCRIBE.length()));
} else if (lowerCaseUrl.contains(AP_SUBSCRIBE_DEEPLINK)) {
Log.d(TAG, "Removing " + AP_SUBSCRIBE_DEEPLINK);
String query = Uri.parse(url).getQueryParameter("url");
try {
return prepareUrl(URLDecoder.decode(query, "UTF-8"));
} catch (UnsupportedEncodingException e) {
return prepareUrl(query);
}
} else if (!(lowerCaseUrl.startsWith("http://") || lowerCaseUrl.startsWith("https://"))) {
Log.d(TAG, "Adding http:// at the beginning of the URL");
return "http://" + url;
} else {
return url;
}
}
|
@Test
public void testItpcProtocolWithScheme() {
final String in = "itpc://https://example.com";
final String out = UrlChecker.prepareUrl(in);
assertEquals("https://example.com", out);
}
|
@Override
public String getName() {
return "readFile";
}
|
@Test
void testGetName() {
assertEquals("readFile", readFileAction.getName());
}
|
public static org.onosproject.security.Permission getOnosPermission(Permission permission) {
if (permission instanceof AppPermission) {
return new org.onosproject.security.Permission(AppPermission.class.getName(), permission.getName(), "");
} else if (permission instanceof FilePermission) {
return new org.onosproject.security.Permission(
FilePermission.class.getName(), permission.getName(), permission.getActions());
} else if (permission instanceof SerializablePermission) {
return new org.onosproject.security.Permission(
SerializablePermission.class.getName(), permission.getName(), permission.getActions());
} else if (permission instanceof NetPermission) {
return new org.onosproject.security.Permission(
NetPermission.class.getName(), permission.getName(), permission.getActions());
} else if (permission instanceof RuntimePermission) {
return new org.onosproject.security.Permission(
RuntimePermission.class.getName(), permission.getName(), permission.getActions());
} else if (permission instanceof SocketPermission) {
return new org.onosproject.security.Permission(
SocketPermission.class.getName(), permission.getName(), permission.getActions());
} else if (permission instanceof SQLPermission) {
return new org.onosproject.security.Permission(
SQLPermission.class.getName(), permission.getName(), permission.getActions());
} else if (permission instanceof PropertyPermission) {
return new org.onosproject.security.Permission(
PropertyPermission.class.getName(), permission.getName(), permission.getActions());
} else if (permission instanceof LoggingPermission) {
return new org.onosproject.security.Permission(
LoggingPermission.class.getName(), permission.getName(), permission.getActions());
} else if (permission instanceof SSLPermission) {
return new org.onosproject.security.Permission(
SSLPermission.class.getName(), permission.getName(), permission.getActions());
} else if (permission instanceof AuthPermission) {
return new org.onosproject.security.Permission(
AuthPermission.class.getName(), permission.getName(), permission.getActions());
} else if (permission instanceof PrivateCredentialPermission) {
return new org.onosproject.security.Permission(
PrivateCredentialPermission.class.getName(), permission.getName(), permission.getActions());
} else if (permission instanceof DelegationPermission) {
return new org.onosproject.security.Permission(
DelegationPermission.class.getName(), permission.getName(), permission.getActions());
} else if (permission instanceof javax.security.auth.kerberos.ServicePermission) {
return new org.onosproject.security.Permission(
javax.security.auth.kerberos.ServicePermission.class.getName(), permission.getName(),
permission.getActions());
} else if (permission instanceof AudioPermission) {
return new org.onosproject.security.Permission(
AudioPermission.class.getName(), permission.getName(), permission.getActions());
} else if (permission instanceof AdaptPermission) {
return new org.onosproject.security.Permission(
AdaptPermission.class.getName(), permission.getName(), permission.getActions());
} else if (permission instanceof BundlePermission) {
return new org.onosproject.security.Permission(
BundlePermission.class.getName(), permission.getName(), permission.getActions());
} else if (permission instanceof CapabilityPermission) {
return new org.onosproject.security.Permission(
CapabilityPermission.class.getName(), permission.getName(), permission.getActions());
} else if (permission instanceof PackagePermission) {
return new org.onosproject.security.Permission(
PackagePermission.class.getName(), permission.getName(), permission.getActions());
} else if (permission instanceof ServicePermission) {
return new org.onosproject.security.Permission(
ServicePermission.class.getName(), permission.getName(), permission.getActions());
} else if (permission instanceof AdminPermission) {
return new org.onosproject.security.Permission(
AdminPermission.class.getName(), permission.getName(), permission.getActions());
//} else if (permission instanceof ConfigurationPermission) {
// return new org.onosproject.security.Permission(
// ConfigurationPermission.class.getName(), permission.getName(), permission.getActions());
}
return null;
}
|
@Test
public void testGetOnosPermission() {
org.onosproject.security.Permission result = null;
if (testJavaPerm instanceof AppPermission) {
result = new org.onosproject.security.Permission(AppPermission.class.getName(), testJavaPerm.getName(), "");
}
assertNotNull(result);
assertEquals("TESTNAME", result.getName());
}
|
public static String formatExpression(final Expression expression) {
return formatExpression(expression, FormatOptions.of(s -> false));
}
|
@Test
public void shouldFormatStructWithColumnWithReservedWordName() {
final SqlStruct struct = SqlStruct.builder()
.field("RESERVED", SqlTypes.INTEGER)
.build();
assertThat(
ExpressionFormatter.formatExpression(new Type(struct), FormatOptions.none()),
equalTo("STRUCT<`RESERVED` INTEGER>"));
}
|
protected List<T> getEntities(
@Nonnull final EntityProvider<T, ?> entities,
@Nullable final Pageable pageable,
@Nullable final Class<T> clazz,
@Nullable final Sort sort)
{
Objects.requireNonNull(entities);
Stream<? extends T> entityStream = entities
.stream()
.filter(this.criteria::evaluate);
final Sort sortToUse = this.staticSort.orElse(sort);
if(sortToUse != null)
{
entityStream = EntitySorter.sortEntitiesStream(clazz, sortToUse, entityStream);
}
entityStream = this.pageEntityStream(pageable, entityStream);
final List<T> result = this.copyEntities(entityStream);
if(LOG.isTraceEnabled())
{
LOG.trace("Found {} entries.", result.size());
}
return result;
}
|
@Test
void getEntities_EmptyCriteria_NoPageable_Sortable_IsSortedByInvalidProperty()
{
final PageableSortableCollectionQuerier<Customer> querier = new PageableSortableCollectionQuerier<>(
new DummyWorkingCopier<>(),
new CriteriaSingleNode<>()
);
final Pageable unpaged = Pageable.unpaged();
final Sort invalid = Sort.by("invalid");
Assertions.assertThrows(
RuntimeException.class,
() -> querier.getEntities(DATA_CUSTOMERS_DABC_ABCD, unpaged, Customer.class, invalid));
}
|
public static void clearUtm() {
clearMemoryUtm();
clearLocalUtm();
}
|
@Test
public void clearUtm() {
ChannelUtils.clearUtm();
JSONObject jsonObject = ChannelUtils.getUtmProperties();
Assert.assertEquals(0, jsonObject.length());
}
|
String buildCustomMessage(EventNotificationContext ctx, TeamsEventNotificationConfig config, String template) throws PermanentEventNotificationException {
final List<MessageSummary> backlog = getMessageBacklog(ctx, config);
Map<String, Object> model = getCustomMessageModel(ctx, config.type(), backlog, config.timeZone());
try {
return templateEngine.transform(template, model);
} catch (Exception e) {
String error = "Invalid Custom Message template.";
LOG.error("{} [{}]", error, e.toString());
throw new PermanentEventNotificationException(error + e, e.getCause());
}
}
|
@Test
public void buildCustomMessage() throws PermanentEventNotificationException {
String expectedCustomMessage = teamsEventNotification.buildCustomMessage(eventNotificationContext, teamsEventNotificationConfig, "test");
assertThat(expectedCustomMessage).isNotEmpty();
}
|
public void updateMetrics() {
recordMessagesConsumed(metricCollectors.currentConsumptionRate());
recordTotalMessagesConsumed(metricCollectors.totalMessageConsumption());
recordTotalBytesConsumed(metricCollectors.totalBytesConsumption());
recordMessagesProduced(metricCollectors.currentProductionRate());
recordMessageConsumptionByQueryStats(metricCollectors.currentConsumptionRateByQuery());
recordErrorRate(metricCollectors.currentErrorRate());
}
|
@Test
public void shouldRecordMinMessagesConsumedByQuery() {
final int numMessagesConsumed = 500;
consumeMessages(numMessagesConsumed, "group1");
consumeMessages(numMessagesConsumed * 100, "group2");
engineMetrics.updateMetrics();
final double value = getMetricValue("messages-consumed-min");
final double legacyValue = getMetricValueLegacy("messages-consumed-min");
assertThat(Math.floor(value), closeTo(numMessagesConsumed / 100, 0.01));
assertThat(Math.floor(legacyValue), closeTo(numMessagesConsumed / 100, 0.01));
}
|
@Override
public Object handle(ProceedingJoinPoint proceedingJoinPoint, Retry retry, String methodName)
throws Throwable {
RetryTransformer<?> retryTransformer = RetryTransformer.of(retry);
Object returnValue = proceedingJoinPoint.proceed();
return executeRxJava3Aspect(retryTransformer, returnValue);
}
|
@Test
public void testRxTypes() throws Throwable {
Retry retry = Retry.ofDefaults("test");
when(proceedingJoinPoint.proceed()).thenReturn(Single.just("Test"));
assertThat(rxJava3RetryAspectExt.handle(proceedingJoinPoint, retry, "testMethod"))
.isNotNull();
when(proceedingJoinPoint.proceed()).thenReturn(Flowable.just("Test"));
assertThat(rxJava3RetryAspectExt.handle(proceedingJoinPoint, retry, "testMethod"))
.isNotNull();
}
|
@Override
public void setParentJobMeta( JobMeta parentJobMeta ) {
JobMeta previous = getParentJobMeta();
super.setParentJobMeta( parentJobMeta );
if ( previous != null ) {
previous.removeCurrentDirectoryChangedListener( this.dirListener );
}
if ( parentJobMeta != null ) {
parentJobMeta.addCurrentDirectoryChangedListener( this.dirListener );
}
}
|
@Test
public void testCurrDirListener() throws Exception {
try ( MockedConstruction<JobMeta> jobMetaMockedConstruction = mockConstruction( JobMeta.class );
MockedConstruction<CurrentDirectoryResolver> currentDirectoryResolverMockedConstruction = mockConstruction(
CurrentDirectoryResolver.class, ( mock, context ) ->
{
doCallRealMethod().when( mock ).normalizeSlashes( anyString() );
doReturn( space ).when( mock ).resolveCurrentDirectory( any( ObjectLocationSpecificationMethod.class ),
any( VariableSpace.class ), nullable( Repository.class ), nullable( Job.class ), anyString() );
} ) ) {
JobMeta meta = mock( JobMeta.class );
JobEntryJob jej = getJej();
jej.setParentJobMeta( null );
jej.setParentJobMeta( meta );
jej.setParentJobMeta( null );
verify( meta, times( 1 ) ).addCurrentDirectoryChangedListener( any() );
verify( meta, times( 1 ) ).removeCurrentDirectoryChangedListener( any() );
}
}
|
public void hidePerspective( final String perspectiveId ) {
changePerspectiveVisibility( perspectiveId, true );
}
|
@Test
public void hidePerspective() {
SpoonPerspectiveManager.PerspectiveManager perspectiveManager = perspectiveManagerMap.get( perspective );
spoonPerspectiveManager.hidePerspective( perspective.getId() );
verify( perspectiveManager ).setPerspectiveHidden( PERSPECTIVE_NAME, true );
}
|
@Override
public void onError(Throwable t) {
forwardErrorToLeaderContender(t);
}
|
@Test
void testErrorForwarding() throws Exception {
new Context() {
{
runTestWithSynchronousEventHandling(
() -> {
final Exception testException = new Exception("test leader exception");
leaderElectionService.onError(testException);
applyToBothContenderContexts(
contenderContext -> {
assertThat(contenderContext.contender.getError())
.isNotNull()
.hasCause(testException);
contenderContext.contender.clearError();
});
});
}
};
}
|
public static void checkGroup(String group) throws MQClientException {
if (UtilAll.isBlank(group)) {
throw new MQClientException("the specified group is blank", null);
}
if (group.length() > CHARACTER_MAX_LENGTH) {
throw new MQClientException("the specified group is longer than group max length 255.", null);
}
if (isTopicOrGroupIllegal(group)) {
throw new MQClientException(String.format(
"the specified group[%s] contains illegal characters, allowing only %s", group,
"^[%|a-zA-Z0-9_-]+$"), null);
}
}
|
@Test
public void testGroupNameBlank() {
try {
Validators.checkGroup(null);
fail("excepted MQClientException for group name is blank");
} catch (MQClientException e) {
assertThat(e.getErrorMessage()).isEqualTo("the specified group is blank");
}
}
|
@Override
public int hashCode() {
return Objects.hash(labelStrings);
}
|
@Test
public void testEqualsAndHashCode() {
MultiLabel a = new MultiLabelFactory().generateOutput("a");
MultiLabel b = new MultiLabelFactory().generateOutput("a");
assertEquals(a, b);
assertEquals(a.hashCode(), b.hashCode());
}
|
@Override
protected Release findLatestActiveRelease(String configAppId, String configClusterName, String configNamespace,
ApolloNotificationMessages clientMessages) {
return releaseService.findLatestActiveRelease(configAppId, configClusterName,
configNamespace);
}
|
@Test
public void testLoadConfig() throws Exception {
when(releaseService.findLatestActiveRelease(someConfigAppId, someClusterName, defaultNamespaceName))
.thenReturn(someRelease);
Release release = configService
.loadConfig(someClientAppId, someClientIp, someClientLabel, someConfigAppId, someClusterName, defaultNamespaceName, someDataCenter,
someNotificationMessages);
verify(releaseService, times(1)).findLatestActiveRelease(someConfigAppId, someClusterName, defaultNamespaceName);
assertEquals(someRelease, release);
}
|
public void prepareExchange(Exchange exchange) {
Message message = exchange.getMessage();
String recipients = matchFilters(exchange);
message.setHeader(RECIPIENT_LIST_HEADER, recipients);
}
|
@Test
void testPrepareExchange() {
when(exchange.getMessage()).thenReturn(message);
when(filterService.getMatchingEndpointsForExchangeByChannel(exchange, TEST_CHANNEL, true, false))
.thenReturn(MOCK_ENDPOINT);
processor.prepareExchange(exchange);
verify(message, new Times(1)).setHeader(RECIPIENT_LIST_HEADER, MOCK_ENDPOINT);
}
|
public void writeApplicationId(ApplicationId id) {
if ( ! id.tenant().equals(tenantName))
throw new IllegalArgumentException("Cannot write application id '" + id + "' for tenant '" + tenantName + "'");
curator.set(applicationIdPath(), Utf8.toBytes(id.serializedForm()));
}
|
@Test
public void require_that_application_id_is_written_to_zk() {
ApplicationId id = new ApplicationId.Builder()
.tenant(tenantName)
.applicationName("foo")
.instanceName("bim")
.build();
int sessionId = 3;
SessionZooKeeperClient zkc = createSessionZKClient(sessionId);
zkc.writeApplicationId(id);
Path path = sessionPath(sessionId).append(APPLICATION_ID_PATH);
assertTrue(curator.exists(path));
assertEquals(id.serializedForm(), Utf8.toString(curator.getData(path).get()));
}
|
@Override
public UserAccount upsertDb(final UserAccount userAccount) {
String userId = userAccount.getUserId();
String userName = userAccount.getUserName();
String additionalInfo = userAccount.getAdditionalInfo();
db.getCollection(CachingConstants.USER_ACCOUNT).updateOne(
new Document(USER_ID, userId),
new Document("$set",
new Document(USER_ID, userId)
.append(USER_NAME, userName)
.append(ADD_INFO, additionalInfo)
),
new UpdateOptions().upsert(true)
);
return userAccount;
}
|
@Test
void upsertDb() {
MongoCollection<Document> mongoCollection = mock(MongoCollection.class);
when(db.getCollection(CachingConstants.USER_ACCOUNT)).thenReturn(mongoCollection);
assertDoesNotThrow(()-> {mongoDb.upsertDb(userAccount);});
}
|
@Description("a pseudo-random value")
@ScalarFunction(alias = "rand", deterministic = false)
@SqlType(StandardTypes.DOUBLE)
public static double random()
{
return ThreadLocalRandom.current().nextDouble();
}
|
@Test
public void testRandom()
{
// random is non-deterministic
functionAssertions.tryEvaluateWithAll("rand()", DOUBLE, TEST_SESSION);
functionAssertions.tryEvaluateWithAll("random()", DOUBLE, TEST_SESSION);
functionAssertions.tryEvaluateWithAll("rand(1000)", INTEGER, TEST_SESSION);
functionAssertions.tryEvaluateWithAll("random(2000)", INTEGER, TEST_SESSION);
functionAssertions.tryEvaluateWithAll("random(3000000000)", BIGINT, TEST_SESSION);
assertInvalidFunction("rand(-1)", "bound must be positive");
assertInvalidFunction("rand(-3000000000)", "bound must be positive");
}
|
@Override
public RemotingCommand processRequest(final ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
return this.processRequest(ctx.channel(), request, true);
}
|
@Test
public void testSingleAck_OffsetCheck() throws RemotingCommandException {
{
AckMessageRequestHeader requestHeader = new AckMessageRequestHeader();
requestHeader.setTopic(topic);
requestHeader.setQueueId(0);
requestHeader.setOffset(MIN_OFFSET_IN_QUEUE - 1);
//requestHeader.setOffset(maxOffsetInQueue + 1);
RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.ACK_MESSAGE, requestHeader);
request.makeCustomHeaderToNet();
RemotingCommand response = ackMessageProcessor.processRequest(handlerContext, request);
assertThat(response.getCode()).isEqualTo(ResponseCode.NO_MESSAGE);
assertThat(response.getRemark()).contains("offset is illegal");
}
{
AckMessageRequestHeader requestHeader = new AckMessageRequestHeader();
requestHeader.setTopic(topic);
requestHeader.setQueueId(0);
//requestHeader.setOffset(minOffsetInQueue - 1);
requestHeader.setOffset(MAX_OFFSET_IN_QUEUE + 1);
RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.ACK_MESSAGE, requestHeader);
request.makeCustomHeaderToNet();
RemotingCommand response = ackMessageProcessor.processRequest(handlerContext, request);
assertThat(response.getCode()).isEqualTo(ResponseCode.NO_MESSAGE);
assertThat(response.getRemark()).contains("offset is illegal");
}
}
|
public ServiceInfo getServiceInfo() {
return serviceInfo;
}
|
@Test
void testDeserialize() throws JsonProcessingException {
String json = "{\"headers\":{},\"namespace\":\"namespace\",\"serviceName\":\"service\",\"groupName\":\"group\","
+ "\"serviceInfo\":{\"name\":\"service\",\"groupName\":\"group\",\"cacheMillis\":1000,\"hosts\":[],"
+ "\"lastRefTime\":0,\"checksum\":\"\",\"allIPs\":false,\"reachProtectionThreshold\":false,"
+ "\"valid\":true},\"module\":\"naming\"}";
NotifySubscriberRequest actual = mapper.readValue(json, NotifySubscriberRequest.class);
checkRequestBasedInfo(actual);
assertEquals(GROUP + "@@" + SERVICE, actual.getServiceInfo().getKey());
}
|
public static TaskID forName(String str)
throws IllegalArgumentException {
if(str == null)
return null;
Matcher m = taskIdPattern.matcher(str);
if (m.matches()) {
return new org.apache.hadoop.mapred.TaskID(m.group(1),
Integer.parseInt(m.group(2)),
CharTaskTypeMaps.getTaskType(m.group(3).charAt(0)),
Integer.parseInt(m.group(4)));
}
String exceptionMsg = "TaskId string : " + str + " is not properly formed" +
"\nReason: " + m.toString();
throw new IllegalArgumentException(exceptionMsg);
}
|
@Test
public void testForName() {
assertEquals("The forName() method did not parse the task ID string "
+ "correctly", "task_1_0001_m_000000",
TaskID.forName("task_1_0001_m_000").toString());
assertEquals("The forName() method did not parse the task ID string "
+ "correctly", "task_23_0002_r_000001",
TaskID.forName("task_23_0002_r_0001").toString());
assertEquals("The forName() method did not parse the task ID string "
+ "correctly", "task_345_0003_s_000002",
TaskID.forName("task_345_0003_s_00002").toString());
assertEquals("The forName() method did not parse the task ID string "
+ "correctly", "task_6789_0004_c_000003",
TaskID.forName("task_6789_0004_c_000003").toString());
assertEquals("The forName() method did not parse the task ID string "
+ "correctly", "task_12345_0005_t_4000000",
TaskID.forName("task_12345_0005_t_4000000").toString());
try {
TaskID.forName("tisk_12345_0005_t_4000000");
fail("The forName() method parsed an invalid job ID: "
+ "tisk_12345_0005_t_4000000");
} catch (IllegalArgumentException ex) {
// Expected
}
try {
TaskID.forName("tisk_12345_0005_t_4000000");
fail("The forName() method parsed an invalid job ID: "
+ "tisk_12345_0005_t_4000000");
} catch (IllegalArgumentException ex) {
// Expected
}
try {
TaskID.forName("task_abc_0005_t_4000000");
fail("The forName() method parsed an invalid job ID: "
+ "task_abc_0005_t_4000000");
} catch (IllegalArgumentException ex) {
// Expected
}
try {
TaskID.forName("task_12345_xyz_t_4000000");
fail("The forName() method parsed an invalid job ID: "
+ "task_12345_xyz_t_4000000");
} catch (IllegalArgumentException ex) {
// Expected
}
try {
TaskID.forName("task_12345_0005_x_4000000");
fail("The forName() method parsed an invalid job ID: "
+ "task_12345_0005_x_4000000");
} catch (IllegalArgumentException ex) {
// Expected
}
try {
TaskID.forName("task_12345_0005_t_jkl");
fail("The forName() method parsed an invalid job ID: "
+ "task_12345_0005_t_jkl");
} catch (IllegalArgumentException ex) {
// Expected
}
try {
TaskID.forName("task_12345_0005_t");
fail("The forName() method parsed an invalid job ID: "
+ "task_12345_0005_t");
} catch (IllegalArgumentException ex) {
// Expected
}
try {
TaskID.forName("task_12345_0005_4000000");
fail("The forName() method parsed an invalid job ID: "
+ "task_12345_0005_4000000");
} catch (IllegalArgumentException ex) {
// Expected
}
try {
TaskID.forName("task_12345_t_4000000");
fail("The forName() method parsed an invalid job ID: "
+ "task_12345_t_4000000");
} catch (IllegalArgumentException ex) {
// Expected
}
try {
TaskID.forName("12345_0005_t_4000000");
fail("The forName() method parsed an invalid job ID: "
+ "12345_0005_t_4000000");
} catch (IllegalArgumentException ex) {
// Expected
}
}
|
public int format(String... args) throws UsageException {
CommandLineOptions parameters = processArgs(args);
if (parameters.version()) {
errWriter.println(versionString());
return 0;
}
if (parameters.help()) {
throw new UsageException();
}
JavaFormatterOptions options =
JavaFormatterOptions.builder()
.style(parameters.aosp() ? Style.AOSP : Style.GOOGLE)
.formatJavadoc(parameters.formatJavadoc())
.build();
if (parameters.stdin()) {
return formatStdin(parameters, options);
} else {
return formatFiles(parameters, options);
}
}
|
@Test
public void dryRunStdinChanged() throws Exception {
StringWriter out = new StringWriter();
StringWriter err = new StringWriter();
String input = "class Test {\n}\n";
Main main =
new Main(
new PrintWriter(out, true),
new PrintWriter(err, true),
new ByteArrayInputStream(input.getBytes(UTF_8)));
assertThat(main.format("-n", "-")).isEqualTo(0);
assertThat(out.toString()).isEqualTo("<stdin>" + System.lineSeparator());
assertThat(err.toString()).isEmpty();
}
|
public RegistryBuilder isCheck(Boolean check) {
this.check = check;
return getThis();
}
|
@Test
void isCheck() {
RegistryBuilder builder = new RegistryBuilder();
builder.isCheck(true);
Assertions.assertTrue(builder.build().isCheck());
}
|
public void verifyState(HttpRequest request, HttpResponse response, OAuth2IdentityProvider provider) {
verifyState(request, response, provider, DEFAULT_STATE_PARAMETER_NAME);
}
|
@Test
public void verify_state() {
String state = "state";
when(request.getCookies()).thenReturn(new Cookie[]{wrapCookie("OAUTHSTATE", sha256Hex(state))});
when(request.getParameter("aStateParameter")).thenReturn(state);
underTest.verifyState(request, response, identityProvider, "aStateParameter");
verify(response).addCookie(cookieArgumentCaptor.capture());
Cookie updatedCookie = cookieArgumentCaptor.getValue();
assertThat(updatedCookie.getName()).isEqualTo("OAUTHSTATE");
assertThat(updatedCookie.getValue()).isNull();
assertThat(updatedCookie.getPath()).isEqualTo("/");
assertThat(updatedCookie.getMaxAge()).isZero();
}
|
public static int toSeconds(TimeRange timeRange) {
if (timeRange.getFrom() == null || timeRange.getTo() == null) {
return 0;
}
try {
return Seconds.secondsBetween(timeRange.getFrom(), timeRange.getTo()).getSeconds();
} catch (IllegalArgumentException e) {
return 0;
}
}
|
@Test
public void toSecondsHandlesIncompleteTimeRange() throws Exception {
assertThat(TimeRanges.toSeconds(new TimeRange() {
@Override
public String type() {
return AbsoluteRange.ABSOLUTE;
}
@Override
public DateTime getFrom() {
return DateTime.now(DateTimeZone.UTC);
}
@Override
public DateTime getTo() {
return null;
}
@Override
public TimeRange withReferenceDate(DateTime now) {
return this;
}
})).isEqualTo(0);
assertThat(TimeRanges.toSeconds(new TimeRange() {
@Override
public String type() {
return AbsoluteRange.ABSOLUTE;
}
@Override
public DateTime getFrom() {
return null;
}
@Override
public DateTime getTo() {
return DateTime.now(DateTimeZone.UTC);
}
@Override
public TimeRange withReferenceDate(DateTime now) {
return this;
}
})).isEqualTo(0);
}
|
public static Map<String, String> getConfigMapWithPrefix(Map<String, String> batchConfigMap, String prefix) {
Map<String, String> props = new HashMap<>();
if (!prefix.endsWith(DOT_SEPARATOR)) {
prefix = prefix + DOT_SEPARATOR;
}
for (String configKey : batchConfigMap.keySet()) {
if (configKey.startsWith(prefix)) {
String[] splits = configKey.split(prefix, 2);
if (splits.length > 1) {
props.put(splits[1], batchConfigMap.get(configKey));
}
}
}
return props;
}
|
@Test
public void testGetConfigMapWithPrefix() {
Map<String, String> testMap = ImmutableMap.of("k1", "v1", "k1.k2", "v2", "k1.k3", "v3", "k4", "v4");
Assert.assertEquals(2, IngestionConfigUtils.getConfigMapWithPrefix(testMap, "k1").size());
Assert.assertEquals(2, IngestionConfigUtils.getConfigMapWithPrefix(testMap, "k1.").size());
}
|
public DockerStopCommand setGracePeriod(int value) {
super.addCommandArguments("time", Integer.toString(value));
return this;
}
|
@Test
public void testSetGracePeriod() throws Exception {
dockerStopCommand.setGracePeriod(GRACE_PERIOD);
assertEquals("stop", StringUtils.join(",",
dockerStopCommand.getDockerCommandWithArguments()
.get("docker-command")));
assertEquals("foo", StringUtils.join(",",
dockerStopCommand.getDockerCommandWithArguments().get("name")));
assertEquals("10", StringUtils.join(",",
dockerStopCommand.getDockerCommandWithArguments().get("time")));
assertEquals(3, dockerStopCommand.getDockerCommandWithArguments().size());
}
|
@Override
public List<AdminUserDO> getUserListByStatus(Integer status) {
return userMapper.selectListByStatus(status);
}
|
@Test
public void testGetUserListByStatus() {
// mock 数据
AdminUserDO user = randomAdminUserDO(o -> o.setStatus(CommonStatusEnum.DISABLE.getStatus()));
userMapper.insert(user);
// 测试 status 不匹配
userMapper.insert(randomAdminUserDO(o -> o.setStatus(CommonStatusEnum.ENABLE.getStatus())));
// 准备参数
Integer status = CommonStatusEnum.DISABLE.getStatus();
// 调用
List<AdminUserDO> result = userService.getUserListByStatus(status);
// 断言
assertEquals(1, result.size());
assertEquals(user, result.get(0));
}
|
@Override
public void delete(final String key) {
try {
if (isExisted(key)) {
client.delete().deletingChildrenIfNeeded().forPath(key);
}
// CHECKSTYLE:OFF
} catch (final Exception ex) {
// CHECKSTYLE:ON
ZookeeperExceptionHandler.handleException(ex);
}
}
|
@Test
void assertDeleteNotExistKey() {
REPOSITORY.delete("/test/children/1");
verify(client, times(0)).delete();
}
|
public static boolean isFastStatsSame(Partition oldPart, Partition newPart) {
// requires to calculate stats if new and old have different fast stats
if ((oldPart != null) && oldPart.isSetParameters() && newPart != null && newPart.isSetParameters()) {
for (String stat : StatsSetupConst.FAST_STATS) {
if (oldPart.getParameters().containsKey(stat) && newPart.getParameters().containsKey(stat)) {
Long oldStat = Long.parseLong(oldPart.getParameters().get(stat));
String newStat = newPart.getParameters().get(stat);
if (newStat == null || !oldStat.equals(Long.parseLong(newStat))) {
return false;
}
} else {
return false;
}
}
return true;
}
return false;
}
|
@Test
public void isFastStatsSameWithNullPartitions() {
Partition partition = new Partition();
assertFalse(MetaStoreServerUtils.isFastStatsSame(null, null));
assertFalse(MetaStoreServerUtils.isFastStatsSame(null, partition));
assertFalse(MetaStoreServerUtils.isFastStatsSame(partition, null));
}
|
@Override
public ResultSet getTableTypes() throws SQLException {
return createDatabaseMetaDataResultSet(getDatabaseMetaData().getTableTypes());
}
|
@Test
void assertGetTableTypes() throws SQLException {
when(databaseMetaData.getTableTypes()).thenReturn(resultSet);
assertThat(shardingSphereDatabaseMetaData.getTableTypes(), instanceOf(DatabaseMetaDataResultSet.class));
}
|
@Nullable
@Override
public Message decode(@Nonnull RawMessage rawMessage) {
final String msg = new String(rawMessage.getPayload(), charset);
try (Timer.Context ignored = this.decodeTime.time()) {
final ResolvableInetSocketAddress address = rawMessage.getRemoteAddress();
final InetSocketAddress remoteAddress;
if (address == null) {
remoteAddress = null;
} else {
remoteAddress = address.getInetSocketAddress();
}
return parse(msg, remoteAddress == null ? null : remoteAddress.getAddress(), rawMessage.getTimestamp());
}
}
|
@Test
public void testDefaultTimezoneConfig() {
when(configuration.getString("timezone")).thenReturn("MST");
SyslogCodec codec = new SyslogCodec(configuration, metricRegistry, messageFactory);
final Message msgWithoutTimezone = codec.decode(buildRawMessage(UNSTRUCTURED));
final Message msgWithUTCTimezone = codec.decode(buildRawMessage(STRUCTURED));
final Message msgWithTimezoneOffset = codec.decode(buildRawMessage(STRUCTURED_ISSUE_845_EMPTY));
assertEquals(new DateTime(YEAR + "-10-21T12:09:37", DateTimeZone.forID("MST")).toDate(), ((DateTime) msgWithoutTimezone.getField("timestamp")).toDate());
assertEquals(new DateTime("2012-12-25T22:14:15.003Z", DateTimeZone.UTC), ((DateTime) msgWithUTCTimezone.getField("timestamp")).withZone(DateTimeZone.UTC));
assertEquals(new DateTime("2015-01-11T16:35:21.335797", DateTimeZone.forOffsetHours(1)).toDate(), ((DateTime) msgWithTimezoneOffset.getField("timestamp")).toDate());
}
|
@Override
@SuppressWarnings("unchecked")
public <K, T> void onAllResponsesReceived(Request<T> request, ProtocolVersion protocolVersion,
Map<RequestInfo, Response<T>> successResponses,
Map<RequestInfo, Throwable> failureResponses,
Map<Integer, Set<K>> unmappedKeys,
Callback<Response<T>> callback)
{
BatchRequest<T> batchRequest = safeCastRequest(request);
// initialize an empty dataMap for final response entity
DataMap gatheredResponseDataMap = initializeResponseContainer();
// gather success response
successResponses.forEach((req, response) -> gatherResponse(gatheredResponseDataMap, req, response.getEntity()));
// gather failure response
failureResponses.forEach((req, e) ->
{
Set<K> failedKeys = (Set<K>)((BatchRequest<T>)req.getRequest()).getObjectIds();
gatherException(gatheredResponseDataMap, failedKeys, e, protocolVersion);
});
// gather unmapped keys
if (unmappedKeys != null && !unmappedKeys.isEmpty())
{
Set<K> unmapped = unmappedKeys.values().stream().flatMap(Collection::stream).collect(Collectors.toSet());
gatherException(gatheredResponseDataMap, unmapped,
new RestLiScatterGatherException("Unable to find a host for keys :" + unmapped),
protocolVersion);
}
T gatheredResponse = constructResponseFromDataMap(batchRequest, protocolVersion, gatheredResponseDataMap);
if (!successResponses.isEmpty())
{
Response<T> firstResponse = successResponses.values().iterator().next();
callback.onSuccess(new ResponseImpl<>(firstResponse, gatheredResponse));
}
else
{
// all scattered requests are failing, we still return 200 for original request, but body will contain
// failed response for each key.
callback.onSuccess(new ResponseImpl<>(HttpStatus.S_200_OK.getCode(),
Collections.emptyMap(), Collections.emptyList(), gatheredResponse, null));
}
}
|
@Test(dataProvider = TestConstants.RESTLI_PROTOCOL_1_2_PREFIX + "protocol")
public void testGatherBatchResponse(ProtocolVersion version)
{
Map<RequestInfo, Response<BatchResponse<TestRecord>>> successResponses = new HashMap<>();
successResponses.put(
new RequestInfo(createBatchGetRequest(1L, 2L), getTargetHostRequestContext(_host1URI)),
createBatchResponse(Collections.singleton(1L), Collections.singleton(2L)));
Map<RequestInfo, Throwable> failResponses = new HashMap<>();
failResponses.put(
new RequestInfo(createBatchGetRequest(3L), getTargetHostRequestContext(_host2URI)),
new RestLiScatterGatherException("Partition host is unavailable!"));
Callback<Response<BatchResponse<TestRecord>>> testCallback = new Callback<Response<BatchResponse<TestRecord>>>()
{
@Override
public void onError(Throwable e)
{
}
@Override
public void onSuccess(Response<BatchResponse<TestRecord>> result)
{
Assert.assertNotNull(result.getEntity());
Assert.assertEquals(result.getStatus(), HttpStatus.S_200_OK.getCode());
Assert.assertTrue(result.getEntity().getResults().size() == 1);
Assert.assertTrue(result.getEntity().getResults().containsKey("1"));
Assert.assertTrue(result.getEntity().getErrors().size() == 3);
ErrorResponse keyError = result.getEntity().getErrors().get("2");
Assert.assertEquals(keyError.getStatus().intValue(), HttpStatus.S_404_NOT_FOUND.getCode());
ErrorResponse failError = result.getEntity().getErrors().get("3");
Assert.assertEquals(failError.getExceptionClass(), RestLiScatterGatherException.class.getName());
Assert.assertEquals(failError.getMessage(), "Partition host is unavailable!");
ErrorResponse unmappedError = result.getEntity().getErrors().get("4");
Assert.assertEquals(unmappedError.getExceptionClass(), RestLiScatterGatherException.class.getName());
Assert.assertEquals(unmappedError.getMessage(), "Unable to find a host for keys :[4]");
}
};
_sgStrategy.onAllResponsesReceived(_batchGetRequest, version, successResponses, failResponses, _unmappedKeys, testCallback);
}
|
public void removeDecorator(TaskDecorator decorator) {
decorators.remove(decorator);
}
|
@Test
public void testRemoveDecorator() {
TaskDecoratorPlugin plugin = new TaskDecoratorPlugin();
TaskDecorator decorator = runnable -> runnable;
plugin.addDecorator(decorator);
plugin.removeDecorator(decorator);
Assert.assertTrue(plugin.getDecorators().isEmpty());
}
|
@Override
public ComponentCreationData createProjectAndBindToDevOpsPlatform(DbSession dbSession, CreationMethod creationMethod, Boolean monorepo, @Nullable String projectKey,
@Nullable String projectName) {
String pat = findPersonalAccessTokenOrThrow(dbSession, almSettingDto);
String url = requireNonNull(almSettingDto.getUrl(), "DevOps Platform url cannot be null");
checkArgument(devOpsProjectDescriptor.projectIdentifier() != null, "DevOps Project Identifier cannot be null for Azure DevOps");
GsonAzureRepo repo = fetchAzureDevOpsProject(url, pat, devOpsProjectDescriptor.projectIdentifier(), devOpsProjectDescriptor.repositoryIdentifier());
ComponentCreationData componentCreationData = projectCreator.createProject(
dbSession,
getProjectKey(projectKey, repo),
getProjectName(projectName, repo),
repo.getDefaultBranchName(),
creationMethod);
ProjectDto projectDto = Optional.ofNullable(componentCreationData.projectDto()).orElseThrow();
createProjectAlmSettingDto(dbSession, repo, projectDto, almSettingDto, monorepo);
return componentCreationData;
}
|
@Test
void createProjectAndBindToDevOpsPlatform_whenPatIsMissing_shouldThrow() {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> underTest.createProjectAndBindToDevOpsPlatform(mock(DbSession.class), CreationMethod.ALM_IMPORT_API, false, null, null))
.withMessage("personal access token for 'azuredevops_config_1' is missing");
}
|
@Override
public String getName() {
return "Perl cpanfile Analyzer";
}
|
@Test
public void testGetName() {
PerlCpanfileAnalyzer instance = new PerlCpanfileAnalyzer();
String expResult = "Perl cpanfile Analyzer";
String result = instance.getName();
assertEquals(expResult, result);
}
|
public Optional<Object> evaluate(final Map<String, Object> columnPairsMap, final String outputColumn, final String regexField) {
return rows.stream()
.map(row -> row.evaluate(columnPairsMap, outputColumn, regexField))
.filter(Optional::isPresent)
.findFirst()
.map(Optional::get);
}
|
@Test
void evaluateKeyFoundMultipleNotMatching() {
KiePMMLInlineTable kiePMMLInlineTable = new KiePMMLInlineTable("name", Collections.emptyList(), ROWS);
Map<String, Object> columnPairsMap = IntStream.range(0, 2).boxed()
.collect(Collectors.toMap(i -> "KEY-1-" + i,
i -> "VALUE-1-" + i));
columnPairsMap.put("KEY-1-2", 4);
Optional<Object> retrieved = kiePMMLInlineTable.evaluate(columnPairsMap, "KEY-0-0", null);
assertThat(retrieved).isNotPresent();
}
|
public ValidationResult toValidationResult(String responseBody) {
try {
ValidationResult validationResult = new ValidationResult();
if (isEmpty(responseBody)) return validationResult;
List<Map<String, Object>> errors;
try {
errors = GSON.fromJson(responseBody, new TypeToken<List<Map<String, Object>>>() {}.getType());
} catch (Exception e) {
throw new RuntimeException("Validation errors should be returned as list of errors, with each error represented as a map");
}
for (Map<String, Object> error : errors) {
String key;
try {
key = (String) error.get("key");
} catch (Exception e) {
throw new RuntimeException("Validation error key should be of type string");
}
String message;
try {
message = (String) error.get("message");
} catch (Exception e) {
throw new RuntimeException("Validation message should be of type string");
}
if (isEmpty(key)) {
validationResult.addError(new ValidationError(message));
} else {
validationResult.addError(new ValidationError(key, message));
}
}
return validationResult;
} catch (Exception e) {
throw new RuntimeException(format("Unable to de-serialize json response. %s", e.getMessage()));
}
}
|
@Test
public void shouldBuildValidationResultFromResponseBody() {
String responseBody = "[{\"key\":\"key-one\",\"message\":\"incorrect value\"},{\"message\":\"general error\"}]";
ValidationResult validationResult = messageHandler.toValidationResult(responseBody);
assertValidationError(validationResult.getErrors().get(0), "key-one", "incorrect value");
assertValidationError(validationResult.getErrors().get(1), "", "general error");
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.