focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
public Connection createConnection(final StandardPipelineDataSourceConfiguration pipelineDataSourceConfig) throws SQLException {
Properties props = new Properties();
PGProperty.USER.set(props, pipelineDataSourceConfig.getUsername());
PGProperty.PASSWORD.set(props, pipelineDataSourceConfig.getPassword());
PGProperty.ASSUME_MIN_SERVER_VERSION.set(props, "9.6");
PGProperty.REPLICATION.set(props, "database");
PGProperty.PREFER_QUERY_MODE.set(props, "simple");
return DriverManager.getConnection(pipelineDataSourceConfig.getUrl(), props);
}
|
@Test
void assertCreatePgConnectionSuccess() throws SQLException {
Map<String, Object> poolProps = new HashMap<>(3, 1F);
poolProps.put("url", "jdbc:h2:mem:test;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=PostgreSQL");
poolProps.put("username", "root");
poolProps.put("password", "root");
Connection connection = logicalReplication.createConnection(new StandardPipelineDataSourceConfiguration(poolProps));
assertFalse(connection.isClosed());
}
|
@Override
protected Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException {
Class<?> cls = null;
for (ClassLoader classLoader : classLoaders) {
try {
cls = classLoader.loadClass(name);
} catch (ClassNotFoundException e) {
LOG.trace("Class " + name + " not found", e);
}
if (cls != null) {
break;
}
}
if (cls == null) {
throw new ClassNotFoundException("Class " + name + " not found.");
}
return cls;
}
|
@Test(expected = ClassNotFoundException.class)
public void loadThrowsClassNotFoundExceptionIfClassDoesNotExist() throws Exception {
final ChainingClassLoader chainingClassLoader = new ChainingClassLoader(getClass().getClassLoader());
chainingClassLoader.loadClass("ThisClassHopeFullyDoesNotExist" + Instant.now().toEpochMilli());
}
|
public static final void loadAttributesMap( DataNode dataNode, AttributesInterface attributesInterface )
throws KettleException {
loadAttributesMap( dataNode, attributesInterface, NODE_ATTRIBUTE_GROUPS );
}
|
@Test
public void testLoadAttributesMap_CustomTag_EmptyDataNode() throws Exception {
try ( MockedStatic<AttributesMapUtil> mockedAttributesMapUtil = mockStatic( AttributesMapUtil.class ) ) {
mockedAttributesMapUtil.when( () -> AttributesMapUtil.loadAttributesMap( any( DataNode.class ),
any( AttributesInterface.class ), anyString() ) ).thenCallRealMethod();
DataNode dataNode = new DataNode( CNST_DUMMY );
JobEntryCopy jobEntryCopy = new JobEntryCopy();
AttributesMapUtil.loadAttributesMap( dataNode, jobEntryCopy, CUSTOM_TAG );
assertNotNull( jobEntryCopy.getAttributesMap() );
}
}
|
public void commitPartitions() throws Exception {
commitPartitions((subtaskIndex, attemptNumber) -> true);
}
|
@Test
void testEmptyPartition() throws Exception {
LinkedHashMap<String, String> staticPartitions = new LinkedHashMap<>();
// add new empty partition
staticPartitions.put("dt", "2022-08-02");
FileSystemCommitter committer =
new FileSystemCommitter(
fileSystemFactory,
metaStoreFactory,
true,
new Path(path.toString()),
1,
false,
identifier,
staticPartitions,
policies);
createFile(path, "task-1-attempt-0/dt=2022-08-02/");
createFile(path, "task-2-attempt-0/dt=2022-08-02/");
committer.commitPartitions();
File emptyPartitionFile = new File(outputPath.toFile(), "dt=2022-08-02");
// assert partition dir is empty with only success file
assertThat(emptyPartitionFile).exists();
assertThat(emptyPartitionFile).isDirectory();
assertThat(emptyPartitionFile).isNotEmptyDirectory();
assertThat(emptyPartitionFile)
.isDirectoryNotContaining(file -> !file.getName().equals(SUCCESS_FILE_NAME));
// Add new empty partition to overwrite the old one with data
createFile(outputPath, "dt=2022-08-02/f1");
assertThat(new File(emptyPartitionFile, "f1")).exists();
createFile(path, "task-1-attempt-0/dt=2022-08-02/");
createFile(path, "task-2-attempt-0/dt=2022-08-02/");
committer.commitPartitions();
// assert partition dir is still empty because the partition dir is overwritten
assertThat(emptyPartitionFile).exists();
assertThat(emptyPartitionFile).isDirectory();
assertThat(emptyPartitionFile).isNotEmptyDirectory();
assertThat(emptyPartitionFile)
.isDirectoryNotContaining(file -> !file.getName().equals(SUCCESS_FILE_NAME));
// Add empty partition to the old one with data
createFile(outputPath, "dt=2022-08-02/f1");
assertThat(new File(emptyPartitionFile, "f1")).exists();
createFile(path, "task-1-attempt-0/dt=2022-08-02/");
createFile(path, "task-2-attempt-0/dt=2022-08-02/");
committer =
new FileSystemCommitter(
fileSystemFactory,
metaStoreFactory,
false,
new Path(path.toString()),
1,
false,
identifier,
staticPartitions,
policies);
committer.commitPartitions();
// assert the partition dir contains remaining 'f1' because overwrite is disabled
assertThat(emptyPartitionFile).exists();
assertThat(emptyPartitionFile).isDirectory();
assertThat(emptyPartitionFile).isNotEmptyDirectory();
assertThat(new File(emptyPartitionFile, "f1")).exists();
assertThat(new File(emptyPartitionFile, SUCCESS_FILE_NAME)).exists();
}
|
public static void validateHostAndPort(final String type, final PluginConfiguration pluginConfig) {
validateHost(type, pluginConfig);
validatePort(type, pluginConfig);
}
|
@Test
void assertValidateHostAndPortWhenHostIsEmpty() {
assertThrows(IllegalArgumentException.class, () -> PluginConfigurationValidator.validateHostAndPort("foo_type", new PluginConfiguration("", 8080, "pwd", null)));
}
|
static String clientQuotaEntityToString(ClientQuotaEntity entity) {
if (entity.entries().isEmpty()) {
throw new RuntimeException("Invalid empty entity");
}
String clientId = null;
String ip = null;
String user = null;
for (Map.Entry<String, String> entry : entity.entries().entrySet()) {
if (entry.getKey().equals(CLIENT_ID)) {
clientId = entry.getValue();
} else if (entry.getKey().equals(IP)) {
ip = entry.getValue();
} else if (entry.getKey().equals(USER)) {
user = entry.getValue();
} else {
throw new RuntimeException("Invalid entity type " + entry.getKey());
}
}
StringBuilder bld = new StringBuilder();
String prefix = "";
if (clientId != null) {
bld.append(prefix).append("clientId(").append(escape(clientId)).append(")");
prefix = "_";
}
if (ip != null) {
bld.append(prefix).append("ip(").append(escape(ip)).append(")");
prefix = "_";
}
if (user != null) {
bld.append(prefix).append("user(").append(escape(user)).append(")");
prefix = "_";
}
return bld.toString();
}
|
@Test
public void testErrorOnInvalidEntityType() {
assertEquals("Invalid entity type foobar",
assertThrows(RuntimeException.class, () -> ClientQuotasImageNode.
clientQuotaEntityToString(new ClientQuotaEntity(singletonMap("foobar", "baz")))).
getMessage());
}
|
public boolean updateValue(@Nullable TraceContext context, @Nullable String value) {
if (context == null) return false;
if (this.context.updateValue(this, context, value)) {
CorrelationFlushScope.flush(this, value);
return true;
}
return false;
}
|
@Test void updateValue_extracted_invalid() {
assertThatThrownBy(() -> REQUEST_ID.updateValue((TraceContextOrSamplingFlags) null, null))
.isInstanceOf(NullPointerException.class);
}
|
@Override
public void onPeriodicEmit(WatermarkOutput output) {
output.emitWatermark(new Watermark(maxTimestamp - outOfOrdernessMillis - 1));
}
|
@Test
void testWatermarkBeforeRecords() {
final TestingWatermarkOutput output = new TestingWatermarkOutput();
final BoundedOutOfOrdernessWatermarks<Object> watermarks =
new BoundedOutOfOrdernessWatermarks<>(Duration.ofMillis(10));
watermarks.onPeriodicEmit(output);
assertThat(output.lastWatermark()).isNotNull();
assertThat(output.lastWatermark().getTimestamp()).isEqualTo(Long.MIN_VALUE);
}
|
public static <P> Matcher<P> alwaysMatch() {
return (Matcher<P>) Constants.ALWAYS_MATCH;
}
|
@Test void alwaysMatch_matched() {
assertThat(alwaysMatch().matches(null)).isTrue();
}
|
public synchronized int sendFetches() {
final Map<Node, FetchSessionHandler.FetchRequestData> fetchRequests = prepareFetchRequests();
sendFetchesInternal(
fetchRequests,
(fetchTarget, data, clientResponse) -> {
synchronized (Fetcher.this) {
handleFetchSuccess(fetchTarget, data, clientResponse);
}
},
(fetchTarget, data, error) -> {
synchronized (Fetcher.this) {
handleFetchFailure(fetchTarget, data, error);
}
});
return fetchRequests.size();
}
|
@Test
public void testFetcherCloseClosesFetchSessionsInBroker() {
buildFetcher();
assignFromUser(singleton(tp0));
subscriptions.seek(tp0, 0);
// normal fetch
assertEquals(1, sendFetches());
assertFalse(fetcher.hasCompletedFetches());
final FetchResponse fetchResponse = fullFetchResponse(tidp0, records, Errors.NONE, 100L, 0);
client.prepareResponse(fetchResponse);
consumerClient.poll(time.timer(0));
assertTrue(fetcher.hasCompletedFetches());
assertEquals(0, consumerClient.pendingRequestCount());
final ArgumentCaptor<FetchRequest.Builder> argument = ArgumentCaptor.forClass(FetchRequest.Builder.class);
// send request to close the fetcher
fetcher.close(time.timer(Duration.ofSeconds(10)));
// validate that Fetcher.close() has sent a request with final epoch. 2 requests are sent, one for the normal
// fetch earlier and another for the finish fetch here.
verify(consumerClient, times(2)).send(any(Node.class), argument.capture());
FetchRequest.Builder builder = argument.getValue();
// session Id is the same
assertEquals(fetchResponse.sessionId(), builder.metadata().sessionId());
// contains final epoch
assertEquals(FetchMetadata.FINAL_EPOCH, builder.metadata().epoch()); // final epoch indicates we want to close the session
assertTrue(builder.fetchData().isEmpty()); // partition data should be empty
}
|
protected String addIndent(String line, int indent) {
return " ".repeat(Math.max(0, indent)) + line + "\n";
}
|
@Test
public void testAddIndent() {
MessageCodeGen messageCodeGen = new MessageCodeGen();
assertEquals(messageCodeGen.addIndent("add indentation", 3), " add indentation\n");
assertEquals(messageCodeGen.addIndent("add indentation", 0), "add indentation\n");
}
|
public static UnifiedDiff parseUnifiedDiff(InputStream stream) throws IOException, UnifiedDiffParserException {
UnifiedDiffReader parser = new UnifiedDiffReader(new BufferedReader(new InputStreamReader(stream)));
return parser.parse();
}
|
@Test
public void testParseIssue182delete() throws IOException {
UnifiedDiff diff = UnifiedDiffReader.parseUnifiedDiff(
UnifiedDiffReaderTest.class.getResourceAsStream("problem_diff_issue182_delete.diff"));
UnifiedDiffFile file1 = diff.getFiles().get(0);
assertThat(file1.getBinaryDeleted()).isEqualTo("some-image.png");
}
|
public Rule<FilterNode> filterNodeRule()
{
return new PullUpExpressionInLambdaFilterNodeRule();
}
|
@Test
public void testNonDeterministicFilter()
{
tester().assertThat(new PullUpExpressionInLambdaRules(getFunctionManager()).filterNodeRule())
.setSystemProperty(PULL_EXPRESSION_FROM_LAMBDA_ENABLED, "true")
.on(p ->
{
p.variable("idmap", new MapType(BIGINT, BIGINT, KEY_BLOCK_EQUALS, KEY_BLOCK_HASH_CODE));
return p.filter(
p.rowExpression("cardinality(map_filter(idmap, (k, v) -> array_position(array_sort(array[random(), random()]), k) <= 200)) > 0"),
p.values(p.variable("idmap", new MapType(BIGINT, BIGINT, KEY_BLOCK_EQUALS, KEY_BLOCK_HASH_CODE))));
}).doesNotFire();
}
|
@Override
public List<String> splitAndEvaluate() {
return Strings.isNullOrEmpty(inlineExpression) ? Collections.emptyList() : flatten(evaluate(GroovyUtils.split(handlePlaceHolder(inlineExpression))));
}
|
@Test
void assertEvaluateForExpressionPlaceHolder() {
List<String> expected = TypedSPILoader.getService(InlineExpressionParser.class, "GROOVY", PropertiesBuilder.build(
new PropertiesBuilder.Property(InlineExpressionParser.INLINE_EXPRESSION_KEY, "t_$->{[\"new$->{1+2}\",'old']}_order_$->{1..2}"))).splitAndEvaluate();
assertThat(expected.size(), is(4));
assertThat(expected, hasItems("t_new3_order_1", "t_new3_order_2", "t_old_order_1", "t_old_order_2"));
}
|
public List<QueueTimeSpan> queryConsumeTimeSpan(final String topic)
throws RemotingException, MQClientException, InterruptedException, MQBrokerException {
List<QueueTimeSpan> queueTimeSpan = new ArrayList<>();
TopicRouteData routeData = this.mQClientFactory.getMQClientAPIImpl().getTopicRouteInfoFromNameServer(topic, 3000);
for (BrokerData brokerData : routeData.getBrokerDatas()) {
String addr = brokerData.selectBrokerAddr();
queueTimeSpan.addAll(this.mQClientFactory.getMQClientAPIImpl().queryConsumeTimeSpan(addr, topic, groupName(), 3000));
}
return queueTimeSpan;
}
|
@Test
public void testQueryConsumeTimeSpan() throws InterruptedException, MQClientException, MQBrokerException, RemotingException {
TopicRouteData topicRouteData = new TopicRouteData();
topicRouteData.getBrokerDatas().add(createBrokerData());
when(mqClientAPIImpl.getTopicRouteInfoFromNameServer(any(), anyLong())).thenReturn(topicRouteData);
List<QueueTimeSpan> actual = defaultMQPushConsumerImpl.queryConsumeTimeSpan(defaultTopic);
assertNotNull(actual);
assertEquals(0, actual.size());
}
|
@Nullable static String method(Invocation invocation) {
String methodName = invocation.getMethodName();
if ("$invoke".equals(methodName) || "$invokeAsync".equals(methodName)) {
Object[] arguments = invocation.getArguments();
if (arguments != null && arguments.length > 0 && arguments[0] instanceof String) {
methodName = (String) arguments[0];
} else {
methodName = null;
}
}
return methodName != null && !methodName.isEmpty() ? methodName : null;
}
|
@Test void method_malformed() {
when(invocation.getMethodName()).thenReturn("");
assertThat(DubboParser.method(invocation)).isNull();
}
|
public Properties getProperties() {
return mProperties;
}
|
@Test
public void setProperties() {
MetricsConfig config = new MetricsConfig(mMetricsProps);
Properties masterProp = config.getProperties();
assertEquals(4, masterProp.size());
assertEquals("alluxio.metrics.sink.ConsoleSink",
masterProp.getProperty("sink.console.class"));
assertEquals("15", masterProp.getProperty("sink.console.period"));
assertEquals("minutes", masterProp.getProperty("sink.console.unit"));
assertEquals("alluxio.metrics.sink.JmxSink", masterProp.getProperty("sink.jmx.class"));
}
|
public Optional<InstanceMetadata> getInstanceMetadata(Service service, String metadataId) {
ConcurrentMap<String, InstanceMetadata> instanceMetadataMapForService = instanceMetadataMap.get(service);
if (null == instanceMetadataMapForService) {
return Optional.empty();
}
return Optional.ofNullable(instanceMetadataMapForService.get(metadataId));
}
|
@Test
void testGetInstanceMetadata() {
Optional<InstanceMetadata> instanceMetadata = namingMetadataManager.getInstanceMetadata(service, METADATA_ID);
assertTrue(instanceMetadata.isPresent());
assertNotNull(instanceMetadata.get());
}
|
@SuppressWarnings("unchecked")
public static int compare(Comparable lhs, Comparable rhs) {
Class lhsClass = lhs.getClass();
Class rhsClass = rhs.getClass();
assert lhsClass != rhsClass;
assert lhs instanceof Number;
assert rhs instanceof Number;
Number lhsNumber = (Number) lhs;
Number rhsNumber = (Number) rhs;
if (isDoubleRepresentable(lhsClass)) {
if (isDoubleRepresentable(rhsClass)) {
return Double.compare(lhsNumber.doubleValue(), rhsNumber.doubleValue());
} else if (isLongRepresentable(rhsClass)) {
return -Integer.signum(compareLongWithDouble(rhsNumber.longValue(), lhsNumber.doubleValue()));
}
} else if (isLongRepresentable(lhsClass)) {
if (isDoubleRepresentable(rhsClass)) {
return compareLongWithDouble(lhsNumber.longValue(), rhsNumber.doubleValue());
} else if (isLongRepresentable(rhsClass)) {
return Long.compare(lhsNumber.longValue(), rhsNumber.longValue());
}
}
return lhs.compareTo(rhs);
}
|
@SuppressWarnings("ConstantConditions")
@Test(expected = Throwable.class)
public void testNullRhsInCompareThrows() {
compare(1, null);
}
|
public List<Modification> parse(String svnLogOutput, String path, SAXBuilder builder) {
try {
Document document = builder.build(new StringReader(svnLogOutput));
return parseDOMTree(document, path);
} catch (Exception e) {
throw bomb("Unable to parse svn log output: " + svnLogOutput, e);
}
}
|
@Test
public void shouldParseLogEntryWithoutComment() {
SvnLogXmlParser parser = new SvnLogXmlParser();
List<Modification> materialRevisions = parser.parse("""
<?xml version="1.0"?>
<log>
<logentry
revision="3">
<author>cceuser</author>
<date>2008-03-11T07:52:41.162075Z</date>
<paths>
<path
action="A">/trunk/revision3.txt</path>
</paths>
</logentry>
</log>""", "", new SAXBuilder());
assertThat(materialRevisions.size()).isEqualTo(1);
Modification mod = materialRevisions.get(0);
assertThat(mod.getRevision()).isEqualTo("3");
assertThat(mod.getComment()).isNull();
}
|
public double calculateAveragePercentageUsedBy(NormalizedResources used, double totalMemoryMb, double usedMemoryMb) {
int skippedResourceTypes = 0;
double total = 0.0;
if (usedMemoryMb > totalMemoryMb) {
throwBecauseUsedIsNotSubsetOfTotal(used, totalMemoryMb, usedMemoryMb);
}
if (totalMemoryMb != 0.0) {
total += usedMemoryMb / totalMemoryMb;
} else {
skippedResourceTypes++;
}
double totalCpu = getTotalCpu();
if (used.getTotalCpu() > getTotalCpu()) {
throwBecauseUsedIsNotSubsetOfTotal(used, totalMemoryMb, usedMemoryMb);
}
if (totalCpu != 0.0) {
total += used.getTotalCpu() / getTotalCpu();
} else {
skippedResourceTypes++;
}
if (used.otherResources.length > otherResources.length) {
throwBecauseUsedIsNotSubsetOfTotal(used, totalMemoryMb, usedMemoryMb);
}
for (int i = 0; i < otherResources.length; i++) {
double totalValue = otherResources[i];
double usedValue;
if (i >= used.otherResources.length) {
//Resources missing from used are using none of that resource
usedValue = 0.0;
} else {
usedValue = used.otherResources[i];
}
if (usedValue > totalValue) {
throwBecauseUsedIsNotSubsetOfTotal(used, totalMemoryMb, usedMemoryMb);
}
if (totalValue == 0.0) {
//Skip any resources where the total is 0, the percent used for this resource isn't meaningful.
//We fall back to prioritizing by cpu, memory and any other resources by ignoring this value
skippedResourceTypes++;
continue;
}
total += usedValue / totalValue;
}
//Adjust the divisor for the average to account for any skipped resources (those where the total was 0)
int divisor = 2 + otherResources.length - skippedResourceTypes;
if (divisor == 0) {
/*
* This is an arbitrary choice to make the result consistent with calculateMin. Any value would be valid here, becase there are
* no (non-zero) resources in the total set of resources, so we're trying to average 0 values.
*/
return 100.0;
} else {
return (total * 100.0) / divisor;
}
}
|
@Test
public void testCalculateAvgWithCpuMemAndGenericResource() {
Map<String, Double> allResourcesMap = new HashMap<>();
allResourcesMap.put(Constants.COMMON_CPU_RESOURCE_NAME, 2.0);
allResourcesMap.put(gpuResourceName, 10.0);
NormalizedResources resources = new NormalizedResources(normalize(allResourcesMap));
Map<String, Double> usedResourcesMap = new HashMap<>();
usedResourcesMap.put(Constants.COMMON_CPU_RESOURCE_NAME, 1.0);
usedResourcesMap.put(gpuResourceName, 1.0);
NormalizedResources usedResources = new NormalizedResources(normalize(usedResourcesMap));
double avg = resources.calculateAveragePercentageUsedBy(usedResources, 4, 1);
assertThat(avg, is((50.0 + 25.0 + 10.0)/3));
}
|
public static Expression[] parseExpressions(String template, EvaluationContext context, String expressionPrefix,
String expressionSuffix) throws ParseException {
// Prepare an array for results.
List<Expression> expressions = new ArrayList<>();
int startIdx = 0;
while (startIdx < template.length()) {
int prefixIndex = template.indexOf(expressionPrefix, startIdx);
if (prefixIndex >= startIdx) {
// an inner expression was found - this is a composite
if (prefixIndex > startIdx) {
log.debug("Found a literal expression starting at {}", startIdx);
expressions.add(new LiteralExpression(template.substring(startIdx, prefixIndex)));
}
int afterPrefixIndex = prefixIndex + expressionPrefix.length();
int suffixIndex = skipToCorrectEndSuffix(expressionSuffix, template, afterPrefixIndex);
if (suffixIndex == -1) {
log.info("No ending suffix '{}' for expression starting at character {}: {}", expressionSuffix,
prefixIndex, template.substring(prefixIndex));
throw new ParseException(template, prefixIndex,
"No ending suffix '" + expressionSuffix + "' for expression starting at character " + prefixIndex
+ ": " + template.substring(prefixIndex));
}
if (suffixIndex == afterPrefixIndex) {
log.info("No expression defined within delimiter '{}' at character {}", expressionPrefix, prefixIndex);
throw new ParseException(template, prefixIndex, "No expression defined within delimiter '"
+ expressionPrefix + expressionSuffix + "' at character " + prefixIndex);
}
String expr = template.substring(prefixIndex + expressionPrefix.length(), suffixIndex);
expr = expr.trim();
if (expr.isEmpty()) {
log.info("No expression defined within delimiter '{}' at character {}", expressionPrefix, prefixIndex);
throw new ParseException(template, prefixIndex, "No expression defined within delimiter '"
+ expressionPrefix + expressionSuffix + "' at character " + prefixIndex);
}
expressions.add(doParseExpression(expr, context));
startIdx = suffixIndex + expressionSuffix.length();
log.debug("Expression accumulated. Pursuing with index {} on {}", startIdx, template.length());
} else {
// no more expression. finalize with a literal.
expressions.add(new LiteralExpression(template.substring(startIdx, template.length())));
break;
}
}
return expressions.toArray(new Expression[0]);
}
|
@Test
void testXpathAttributeExpressionWithNestedFunction() {
String template = "Hello {{ request.body/request/name/@firstname }} it's {{ now() }}";
// Build a suitable context.
EvaluationContext context = new EvaluationContext();
context.registerFunction("now", NowELFunction.class);
context.setVariable("request", new EvaluableRequest("<request><name firstname=\"Laurent\"/></request>", null));
Expression[] expressions = ExpressionParser.parseExpressions(template, context, "{{", "}}");
assertEquals(4, expressions.length);
assertTrue(expressions[0] instanceof LiteralExpression);
assertTrue(expressions[1] instanceof VariableReferenceExpression);
assertTrue(expressions[2] instanceof LiteralExpression);
assertTrue(expressions[3] instanceof FunctionExpression);
assertEquals("Laurent", ((VariableReferenceExpression) expressions[1]).getValue(context));
}
|
Set<SourceName> analyzeExpression(
final Expression expression,
final String clauseType
) {
final Validator extractor = new Validator(clauseType);
extractor.process(expression, null);
return extractor.referencedSources;
}
|
@Test
public void shouldThrowOnPossibleSyntheticKeyColumnIfNotJoin() {
// Given:
when(sourceSchemas.isJoin()).thenReturn(false);
// When:
final Exception e = assertThrows(
UnknownColumnException.class,
() -> analyzer.analyzeExpression(POSSIBLE_SYNTHETIC_KEY, "SELECT")
);
// Then:
assertThat(e.getMessage(), containsString(
"SELECT column 'ROWKEY' cannot be resolved."));
}
|
public static FlinkPod loadPodFromTemplateFile(
FlinkKubeClient kubeClient, File podTemplateFile, String mainContainerName) {
final KubernetesPod pod = kubeClient.loadPodFromTemplateFile(podTemplateFile);
final List<Container> otherContainers = new ArrayList<>();
Container mainContainer = null;
if (null != pod.getInternalResource().getSpec()) {
for (Container container : pod.getInternalResource().getSpec().getContainers()) {
if (mainContainerName.equals(container.getName())) {
mainContainer = container;
} else {
otherContainers.add(container);
}
}
pod.getInternalResource().getSpec().setContainers(otherContainers);
} else {
// Set an empty spec for pod template
pod.getInternalResource().setSpec(new PodSpecBuilder().build());
}
if (mainContainer == null) {
LOG.info(
"Could not find main container {} in pod template, using empty one to initialize.",
mainContainerName);
mainContainer = new ContainerBuilder().build();
}
return new FlinkPod(pod.getInternalResource(), mainContainer);
}
|
@Test
void testLoadPodFromNoSpecTemplate() {
final FlinkPod flinkPod =
KubernetesUtils.loadPodFromTemplateFile(
flinkKubeClient,
KubernetesPodTemplateTestUtils.getNoSpecPodTemplateFile(),
KubernetesPodTemplateTestUtils.TESTING_MAIN_CONTAINER_NAME);
assertThat(flinkPod.getMainContainer()).isEqualTo(EMPTY_POD.getMainContainer());
assertThat(flinkPod.getPodWithoutMainContainer().getSpec().getContainers()).hasSize(0);
}
|
public static String getCallerMethodName(boolean isFullName){
final StackTraceElement stackTraceElement = Thread.currentThread().getStackTrace()[2];
final String methodName = stackTraceElement.getMethodName();
if(false == isFullName){
return methodName;
}
return stackTraceElement.getClassName() + "." + methodName;
}
|
@Test
public void getCallerMethodNameTest() {
final String callerMethodName = CallerUtil.getCallerMethodName(false);
assertEquals("getCallerMethodNameTest", callerMethodName);
final String fullCallerMethodName = CallerUtil.getCallerMethodName(true);
assertEquals("cn.hutool.core.lang.caller.CallerUtilTest.getCallerMethodNameTest", fullCallerMethodName);
}
|
static void activateHttpAndHttpsProxies(Settings settings, SettingsDecrypter decrypter)
throws MojoExecutionException {
List<Proxy> proxies = new ArrayList<>(2);
for (String protocol : ImmutableList.of("http", "https")) {
if (areProxyPropertiesSet(protocol)) {
continue;
}
settings.getProxies().stream()
.filter(Proxy::isActive)
.filter(proxy -> protocol.equals(proxy.getProtocol()))
.findFirst()
.ifPresent(proxies::add);
}
if (proxies.isEmpty()) {
return;
}
SettingsDecryptionRequest request = new DefaultSettingsDecryptionRequest().setProxies(proxies);
SettingsDecryptionResult result = decrypter.decrypt(request);
for (SettingsProblem problem : result.getProblems()) {
if (problem.getSeverity() == SettingsProblem.Severity.ERROR
|| problem.getSeverity() == SettingsProblem.Severity.FATAL) {
throw new MojoExecutionException(
"Unable to decrypt proxy info from settings.xml: " + problem);
}
}
result.getProxies().forEach(MavenSettingsProxyProvider::setProxyProperties);
}
|
@Test
public void testActivateHttpAndHttpsProxies_firstActiveHttpProxy() throws MojoExecutionException {
MavenSettingsProxyProvider.activateHttpAndHttpsProxies(
httpOnlyProxySettings, settingsDecrypter);
Assert.assertEquals("proxy2.example.com", System.getProperty("http.proxyHost"));
Assert.assertNull(System.getProperty("https.proxyHost"));
}
|
@Override
public void isNotEqualTo(@Nullable Object expected) {
super.isNotEqualTo(expected);
}
|
@Test
public void isNotEqualTo_WithoutToleranceParameter_NaN_plusZero_FailEquals() {
expectFailureWhenTestingThat(
array(2.2d, 5.4d, POSITIVE_INFINITY, NEGATIVE_INFINITY, NaN, 0.0, -0.0))
.isNotEqualTo(array(2.2d, 5.4d, POSITIVE_INFINITY, NEGATIVE_INFINITY, NaN, 0.0, -0.0));
}
|
@SuppressWarnings("unchecked")
@Override
public <T extends Statement> ConfiguredStatement<T> inject(
final ConfiguredStatement<T> statement
) {
if (!(statement.getStatement() instanceof CreateSource)
&& !(statement.getStatement() instanceof CreateAsSelect)) {
return statement;
}
try {
if (statement.getStatement() instanceof CreateSource) {
final ConfiguredStatement<CreateSource> createStatement =
(ConfiguredStatement<CreateSource>) statement;
return (ConfiguredStatement<T>) forCreateStatement(createStatement).orElse(createStatement);
} else {
final ConfiguredStatement<CreateAsSelect> createStatement =
(ConfiguredStatement<CreateAsSelect>) statement;
return (ConfiguredStatement<T>) forCreateAsStatement(createStatement).orElse(
createStatement);
}
} catch (final KsqlStatementException e) {
throw e;
} catch (final KsqlException e) {
throw new KsqlStatementException(
ErrorMessageUtil.buildErrorMessage(e),
statement.getMaskedStatementText(),
e.getCause());
}
}
|
@Test
public void shouldThrowIfValueFormatDoesNotSupportSchemaIdInference() {
// Given
givenKeyButNotValueInferenceSupported(
ImmutableMap.of("value_schema_id", new IntegerLiteral(123),
"WRAP_SINGLE_VALUE", new BooleanLiteral(true)));
when(cs.getElements()).thenReturn(SOME_KEY_ELEMENTS_STREAM);
// When:
final Exception e = assertThrows(
KsqlException.class,
() -> injector.inject(csStatement)
);
// Then:
assertThat(e.getMessage(),
containsString("VALUE_FORMAT should support schema inference when VALUE_SCHEMA_ID is provided. "
+ "Current format is DELIMITED."));
}
|
@Override
public String getKind(final String filename) {
if(StringUtils.isBlank(Path.getExtension(filename))) {
final String kind = this.kind(filename);
if(StringUtils.isBlank(kind)) {
return LocaleFactory.localizedString("Unknown");
}
return kind;
}
final String kind = this.kind(Path.getExtension(filename));
if(StringUtils.isBlank(kind)) {
return LocaleFactory.localizedString("Unknown");
}
return kind;
}
|
@Test
public void testGetKindWithoutExtension() {
assertNotNull(new LaunchServicesFileDescriptor().getKind("txt"));
}
|
public JobControl(JobControlState state) {
this.state = Objects.requireNonNull(state);
}
|
@Test
public void testJobControl() {
JobControlStateMock state = new JobControlStateMock();
JobControl jobControl = new JobControl(state);
String job1 = "Job1";
String job2 = "Job2";
TestMaintainer maintainer1 = new TestMaintainer(job1, jobControl, new NoopJobMetrics());
TestMaintainer maintainer2 = new TestMaintainer(job2, jobControl, new NoopJobMetrics());
assertEquals(2, jobControl.jobs().size());
assertTrue(jobControl.jobs().contains(job1));
assertTrue(jobControl.jobs().contains(job2));
assertTrue(jobControl.isActive(job1));
assertTrue(jobControl.isActive(job2));
state.setActive(job1, false);
assertFalse(jobControl.isActive(job1));
assertTrue(jobControl.isActive(job2));
state.setActive(job2, false);
assertFalse(jobControl.isActive(job1));
assertFalse(jobControl.isActive(job2));
state.setActive(job1, true);
assertTrue(jobControl.isActive(job1));
assertFalse(jobControl.isActive(job2));
state.setActive(job2, true);
assertTrue(jobControl.isActive(job1));
assertTrue(jobControl.isActive(job2));
// Run jobs on-demand
jobControl.run(job1);
jobControl.run(job1);
assertEquals(2, maintainer1.totalRuns());
jobControl.run(job2);
assertEquals(1, maintainer2.totalRuns());
// Running jobs on-demand ignores inactive flag
state.setActive(job1, false);
jobControl.run(job1);
assertEquals(3, maintainer1.totalRuns());
}
|
synchronized NewEpochResponseProto newEpoch(
NamespaceInfo nsInfo, long epoch) throws IOException {
checkFormatted();
storage.checkConsistentNamespace(nsInfo);
// Check that the new epoch being proposed is in fact newer than
// any other that we've promised.
if (epoch <= getLastPromisedEpoch()) {
throw new IOException("Proposed epoch " + epoch + " <= last promise " +
getLastPromisedEpoch() + " ; journal id: " + journalId);
}
updateLastPromisedEpoch(epoch);
abortCurSegment();
NewEpochResponseProto.Builder builder =
NewEpochResponseProto.newBuilder();
EditLogFile latestFile = scanStorageForLatestEdits();
if (latestFile != null) {
builder.setLastSegmentTxId(latestFile.getFirstTxId());
}
return builder.build();
}
|
@Test (timeout = 10000)
public void testNamespaceVerification() throws Exception {
journal.newEpoch(FAKE_NSINFO, 1);
try {
journal.newEpoch(FAKE_NSINFO_2, 2);
fail("Did not fail newEpoch() when namespaces mismatched");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains(
"Incompatible namespaceID", ioe);
}
}
|
@Subscribe
public void onChatMessage(ChatMessage event)
{
if (event.getType() == ChatMessageType.GAMEMESSAGE || event.getType() == ChatMessageType.SPAM)
{
String message = Text.removeTags(event.getMessage());
Matcher dodgyCheckMatcher = DODGY_CHECK_PATTERN.matcher(message);
Matcher dodgyProtectMatcher = DODGY_PROTECT_PATTERN.matcher(message);
Matcher dodgyBreakMatcher = DODGY_BREAK_PATTERN.matcher(message);
Matcher bindingNecklaceCheckMatcher = BINDING_CHECK_PATTERN.matcher(message);
Matcher bindingNecklaceUsedMatcher = BINDING_USED_PATTERN.matcher(message);
Matcher ringOfForgingCheckMatcher = RING_OF_FORGING_CHECK_PATTERN.matcher(message);
Matcher amuletOfChemistryCheckMatcher = AMULET_OF_CHEMISTRY_CHECK_PATTERN.matcher(message);
Matcher amuletOfChemistryUsedMatcher = AMULET_OF_CHEMISTRY_USED_PATTERN.matcher(message);
Matcher amuletOfChemistryBreakMatcher = AMULET_OF_CHEMISTRY_BREAK_PATTERN.matcher(message);
Matcher amuletOfBountyCheckMatcher = AMULET_OF_BOUNTY_CHECK_PATTERN.matcher(message);
Matcher amuletOfBountyUsedMatcher = AMULET_OF_BOUNTY_USED_PATTERN.matcher(message);
Matcher chronicleAddMatcher = CHRONICLE_ADD_PATTERN.matcher(message);
Matcher chronicleUseAndCheckMatcher = CHRONICLE_USE_AND_CHECK_PATTERN.matcher(message);
Matcher slaughterActivateMatcher = BRACELET_OF_SLAUGHTER_ACTIVATE_PATTERN.matcher(message);
Matcher slaughterCheckMatcher = BRACELET_OF_SLAUGHTER_CHECK_PATTERN.matcher(message);
Matcher expeditiousActivateMatcher = EXPEDITIOUS_BRACELET_ACTIVATE_PATTERN.matcher(message);
Matcher expeditiousCheckMatcher = EXPEDITIOUS_BRACELET_CHECK_PATTERN.matcher(message);
Matcher bloodEssenceCheckMatcher = BLOOD_ESSENCE_CHECK_PATTERN.matcher(message);
Matcher bloodEssenceExtractMatcher = BLOOD_ESSENCE_EXTRACT_PATTERN.matcher(message);
Matcher braceletOfClayCheckMatcher = BRACELET_OF_CLAY_CHECK_PATTERN.matcher(message);
if (message.contains(RING_OF_RECOIL_BREAK_MESSAGE))
{
notifier.notify(config.recoilNotification(), "Your Ring of Recoil has shattered");
}
else if (dodgyBreakMatcher.find())
{
notifier.notify(config.dodgyNotification(), "Your dodgy necklace has crumbled to dust.");
updateDodgyNecklaceCharges(MAX_DODGY_CHARGES);
}
else if (dodgyCheckMatcher.find())
{
updateDodgyNecklaceCharges(Integer.parseInt(dodgyCheckMatcher.group(1)));
}
else if (dodgyProtectMatcher.find())
{
updateDodgyNecklaceCharges(Integer.parseInt(dodgyProtectMatcher.group(1)));
}
else if (amuletOfChemistryCheckMatcher.find())
{
updateAmuletOfChemistryCharges(Integer.parseInt(amuletOfChemistryCheckMatcher.group(1)));
}
else if (amuletOfChemistryUsedMatcher.find())
{
final String match = amuletOfChemistryUsedMatcher.group(1);
int charges = 1;
if (!match.equals("one"))
{
charges = Integer.parseInt(match);
}
updateAmuletOfChemistryCharges(charges);
}
else if (amuletOfChemistryBreakMatcher.find())
{
notifier.notify(config.amuletOfChemistryNotification(), "Your amulet of chemistry has crumbled to dust.");
updateAmuletOfChemistryCharges(MAX_AMULET_OF_CHEMISTRY_CHARGES);
}
else if (amuletOfBountyCheckMatcher.find())
{
updateAmuletOfBountyCharges(Integer.parseInt(amuletOfBountyCheckMatcher.group(1)));
}
else if (amuletOfBountyUsedMatcher.find())
{
updateAmuletOfBountyCharges(Integer.parseInt(amuletOfBountyUsedMatcher.group(1)));
}
else if (message.equals(AMULET_OF_BOUNTY_BREAK_TEXT))
{
updateAmuletOfBountyCharges(MAX_AMULET_OF_BOUNTY_CHARGES);
}
else if (message.contains(BINDING_BREAK_TEXT))
{
notifier.notify(config.bindingNotification(), BINDING_BREAK_TEXT);
// This chat message triggers before the used message so add 1 to the max charges to ensure proper sync
updateBindingNecklaceCharges(MAX_BINDING_CHARGES + 1);
}
else if (bindingNecklaceUsedMatcher.find())
{
final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT);
if (equipment.contains(ItemID.BINDING_NECKLACE))
{
updateBindingNecklaceCharges(getItemCharges(ItemChargeConfig.KEY_BINDING_NECKLACE) - 1);
}
}
else if (bindingNecklaceCheckMatcher.find())
{
final String match = bindingNecklaceCheckMatcher.group(1);
int charges = 1;
if (!match.equals("one"))
{
charges = Integer.parseInt(match);
}
updateBindingNecklaceCharges(charges);
}
else if (ringOfForgingCheckMatcher.find())
{
final String match = ringOfForgingCheckMatcher.group(1);
int charges = 1;
if (!match.equals("one"))
{
charges = Integer.parseInt(match);
}
updateRingOfForgingCharges(charges);
}
else if (message.equals(RING_OF_FORGING_USED_TEXT) || message.equals(RING_OF_FORGING_VARROCK_PLATEBODY))
{
final ItemContainer inventory = client.getItemContainer(InventoryID.INVENTORY);
final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT);
// Determine if the player smelted with a Ring of Forging equipped.
if (equipment == null)
{
return;
}
if (equipment.contains(ItemID.RING_OF_FORGING) && (message.equals(RING_OF_FORGING_USED_TEXT) || inventory.count(ItemID.IRON_ORE) > 1))
{
int charges = Ints.constrainToRange(getItemCharges(ItemChargeConfig.KEY_RING_OF_FORGING) - 1, 0, MAX_RING_OF_FORGING_CHARGES);
updateRingOfForgingCharges(charges);
}
}
else if (message.equals(RING_OF_FORGING_BREAK_TEXT))
{
notifier.notify(config.ringOfForgingNotification(), "Your ring of forging has melted.");
// This chat message triggers before the used message so add 1 to the max charges to ensure proper sync
updateRingOfForgingCharges(MAX_RING_OF_FORGING_CHARGES + 1);
}
else if (chronicleAddMatcher.find())
{
final String match = chronicleAddMatcher.group(1);
if (match.equals("one"))
{
setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1);
}
else
{
setItemCharges(ItemChargeConfig.KEY_CHRONICLE, Integer.parseInt(match));
}
}
else if (chronicleUseAndCheckMatcher.find())
{
setItemCharges(ItemChargeConfig.KEY_CHRONICLE, Integer.parseInt(chronicleUseAndCheckMatcher.group(1)));
}
else if (message.equals(CHRONICLE_ONE_CHARGE_TEXT))
{
setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1);
}
else if (message.equals(CHRONICLE_EMPTY_TEXT) || message.equals(CHRONICLE_NO_CHARGES_TEXT))
{
setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 0);
}
else if (message.equals(CHRONICLE_FULL_TEXT))
{
setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1000);
}
else if (slaughterActivateMatcher.find())
{
final String found = slaughterActivateMatcher.group(1);
if (found == null)
{
updateBraceletOfSlaughterCharges(MAX_SLAYER_BRACELET_CHARGES);
notifier.notify(config.slaughterNotification(), BRACELET_OF_SLAUGHTER_BREAK_TEXT);
}
else
{
updateBraceletOfSlaughterCharges(Integer.parseInt(found));
}
}
else if (slaughterCheckMatcher.find())
{
updateBraceletOfSlaughterCharges(Integer.parseInt(slaughterCheckMatcher.group(1)));
}
else if (expeditiousActivateMatcher.find())
{
final String found = expeditiousActivateMatcher.group(1);
if (found == null)
{
updateExpeditiousBraceletCharges(MAX_SLAYER_BRACELET_CHARGES);
notifier.notify(config.expeditiousNotification(), EXPEDITIOUS_BRACELET_BREAK_TEXT);
}
else
{
updateExpeditiousBraceletCharges(Integer.parseInt(found));
}
}
else if (expeditiousCheckMatcher.find())
{
updateExpeditiousBraceletCharges(Integer.parseInt(expeditiousCheckMatcher.group(1)));
}
else if (bloodEssenceCheckMatcher.find())
{
updateBloodEssenceCharges(Integer.parseInt(bloodEssenceCheckMatcher.group(1)));
}
else if (bloodEssenceExtractMatcher.find())
{
updateBloodEssenceCharges(getItemCharges(ItemChargeConfig.KEY_BLOOD_ESSENCE) - Integer.parseInt(bloodEssenceExtractMatcher.group(1)));
}
else if (message.contains(BLOOD_ESSENCE_ACTIVATE_TEXT))
{
updateBloodEssenceCharges(MAX_BLOOD_ESSENCE_CHARGES);
}
else if (braceletOfClayCheckMatcher.find())
{
updateBraceletOfClayCharges(Integer.parseInt(braceletOfClayCheckMatcher.group(1)));
}
else if (message.equals(BRACELET_OF_CLAY_USE_TEXT) || message.equals(BRACELET_OF_CLAY_USE_TEXT_TRAHAEARN))
{
final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT);
// Determine if the player mined with a Bracelet of Clay equipped.
if (equipment != null && equipment.contains(ItemID.BRACELET_OF_CLAY))
{
final ItemContainer inventory = client.getItemContainer(InventoryID.INVENTORY);
// Charge is not used if only 1 inventory slot is available when mining in Prifddinas
boolean ignore = inventory != null
&& inventory.count() == 27
&& message.equals(BRACELET_OF_CLAY_USE_TEXT_TRAHAEARN);
if (!ignore)
{
int charges = Ints.constrainToRange(getItemCharges(ItemChargeConfig.KEY_BRACELET_OF_CLAY) - 1, 0, MAX_BRACELET_OF_CLAY_CHARGES);
updateBraceletOfClayCharges(charges);
}
}
}
else if (message.equals(BRACELET_OF_CLAY_BREAK_TEXT))
{
notifier.notify(config.braceletOfClayNotification(), "Your bracelet of clay has crumbled to dust");
updateBraceletOfClayCharges(MAX_BRACELET_OF_CLAY_CHARGES);
}
}
}
|
@Test
public void testChemistryBreak3()
{
ChatMessage chatMessage = new ChatMessage(null, ChatMessageType.GAMEMESSAGE, "", BREAK_AMULET_OF_CHEMISTRY_3_DOSES, "", 0);
itemChargePlugin.onChatMessage(chatMessage);
}
|
@Override
public void init(final DiscoveryConfig config) {
try {
if (this.etcdClient != null) {
return;
}
Properties props = config.getProps();
this.timeout = Long.parseLong(props.getProperty("etcdTimeout", "3000"));
this.ttl = Long.parseLong(props.getProperty("etcdTTL", "5"));
this.etcdClient = Client.builder().endpoints(config.getServerList().split(",")).build();
LOGGER.info("Etcd Discovery Service initialize successfully");
if (leaseId == 0) {
initLease();
}
} catch (Exception e) {
LOGGER.error("Error initializing Etcd Discovery Service", e);
throw new ShenyuException(e);
}
}
|
@Test
public void testInit() throws ExecutionException, InterruptedException, NoSuchFieldException, IllegalAccessException {
setField(EtcdDiscoveryService.class, "etcdClient", null);
final ClientBuilder builder = mock(ClientBuilder.class);
when(builder.endpoints(anyString())).thenReturn(builder);
when(builder.build()).thenReturn(etcdClient);
final MockedStatic<Client> client = mockStatic(Client.class);
client.when(Client::builder).thenReturn(builder);
final Lease lease = mock(Lease.class);
final CompletableFuture<LeaseGrantResponse> leaseGrantFuture = mock(CompletableFuture.class);
when(lease.grant(anyLong())).thenReturn(leaseGrantFuture);
final LeaseGrantResponse leaseGrantResponse = mock(LeaseGrantResponse.class);
when(leaseGrantFuture.get()).thenReturn(leaseGrantResponse);
when(etcdClient.getLeaseClient()).thenReturn(lease);
ArrayList<StreamObserver<LeaseKeepAliveResponse>> observerList = new ArrayList<>();
doAnswer(invocationOnMock -> {
observerList.add(invocationOnMock.getArgument(1));
return lease;
}).when(lease).keepAlive(anyLong(), any());
final Properties props = new Properties();
props.put("etcdTimeout", "3000");
props.put("etcdTTL", "5");
final DiscoveryConfig discoveryConfig = new DiscoveryConfig();
discoveryConfig.setProps(props);
discoveryConfig.setServerList("localhost:2379");
etcdDiscoveryServiceUnderTest.init(discoveryConfig);
final LeaseKeepAliveResponse leaseKeepAliveResponse = mock(LeaseKeepAliveResponse.class);
observerList.forEach(observer -> {
observer.onNext(leaseKeepAliveResponse);
observer.onError(new ShenyuException("test"));
observer.onCompleted();
});
doThrow(new InterruptedException("test")).when(leaseGrantFuture).get();
assertDoesNotThrow(() -> etcdDiscoveryServiceUnderTest.init(discoveryConfig));
}
|
@Override
public List<String> getConnectionInitStatements() {
return INIT_STATEMENTS;
}
|
@Test
void should_set_connection_properties() {
assertThat(underTest.getConnectionInitStatements()).isEqualTo(PostgreSql.INIT_STATEMENTS);
}
|
@Override
public boolean isFrozen() {
return writer.isFrozen();
}
|
@Test
void testCloseWithoutFreeze() {
NoopRawSnapshotWriter rawWriter = new NoopRawSnapshotWriter();
AtomicBoolean called = new AtomicBoolean(false);
Consumer<OffsetAndEpoch> consumer = offset -> called.set(true);
try (RawSnapshotWriter writer = new NotifyingRawSnapshotWriter(rawWriter, consumer)) {
}
assertFalse(called.get());
assertFalse(rawWriter.isFrozen());
assertTrue(rawWriter.closed);
}
|
public static List<String> scriptCommands(List<String> interpreter, List<String> beforeCommands, String command) {
return scriptCommands(interpreter, beforeCommands, List.of(command), TargetOS.LINUX);
}
|
@Test
void scriptCommands() {
var scriptCommands = ScriptService.scriptCommands(List.of("interpreter"), List.of("beforeCommand"), List.of("command"));
assertThat(scriptCommands, hasSize(2));
assertThat(scriptCommands.getFirst(), is("interpreter"));
assertThat(scriptCommands.get(1), is("beforeCommand\ncommand"));
}
|
@Override
public void d(String tag, String message, Object... args) {
Log.d(tag, formatString(message, args));
}
|
@Test
public void debugLoggedCorrectly() {
String expectedMessage = "Hello World";
logger.d(tag, "Hello %s", "World");
assertLogged(DEBUG, tag, expectedMessage, null);
}
|
@Override
public Map<String, String> getTopicConfig(final String topicName) {
return topicConfig(topicName, true);
}
|
@Test
public void shouldHandleRetryableGetTopicConfigError() {
// Given:
givenTopicConfigs(
"fred",
overriddenConfigEntry(TopicConfig.RETENTION_MS_CONFIG, "12345"),
defaultConfigEntry(TopicConfig.COMPRESSION_TYPE_CONFIG, "producer")
);
when(adminClient.describeConfigs(any()))
.thenAnswer(describeConfigsResult(new DisconnectException()))
.thenAnswer(describeConfigsResult());
// When:
kafkaTopicClient.getTopicConfig("fred");
// Then:
verify(adminClient, times(2)).describeConfigs(any());
}
|
@Override
public String getName() {
return "Yarn Audit Analyzer";
}
|
@Test
public void testGetName() {
YarnAuditAnalyzer analyzer = new YarnAuditAnalyzer();
assertThat(analyzer.getName(), is("Yarn Audit Analyzer"));
}
|
public void add(T value) {
int i = segmenter.segment(value);
counts[i]++;
totalCount++;
if (this.recentAdditions.size() > Histogram.MAX_HISTORY_SIZE) {
this.recentAdditions.pollLast();
}
this.recentAdditions.push(value);
}
|
@Test
void badValue() {
assertThrows(Histogram.SegmenterException.class, () -> {
Segmenter<String, Long> s = new Histogram.TreeMapSegmenter<>(
new TreeSet<>(Arrays.asList(0L, 1L, 2L, 4L, 8L, 16L)));
Histogram<String, Long> h = new Histogram<>(s);
h.add(-1L);
});
}
|
public float sub(int i, int j, float b) {
return A[index(i, j)] -= b;
}
|
@Test
public void testSub() {
System.out.println("sub");
float[][] A = {
{ 0.7220180f, 0.07121225f, 0.6881997f},
{-0.2648886f, -0.89044952f, 0.3700456f},
{-0.6391588f, 0.44947578f, 0.6240573f}
};
float[][] B = {
{0.6881997f, -0.07121225f, 0.7220180f},
{0.3700456f, 0.89044952f, -0.2648886f},
{0.6240573f, -0.44947578f, -0.6391588f}
};
float[][] C = {
{ 0.0338183f, 0.1424245f, -0.0338183f},
{-0.6349342f, -1.7808990f, 0.6349342f},
{-1.2632161f, 0.8989516f, 1.2632161f}
};
Matrix a = Matrix.of(A);
Matrix b = Matrix.of(B);
a.sub(b);
assertTrue(MathEx.equals(C, a.toArray(), 1E-6f));
}
|
public CeQueueDto setTaskType(String s) {
checkArgument(s.length() <= 40, "Value of task type is too long: %s", s);
this.taskType = s;
return this;
}
|
@Test
void setTaskType_throws_NPE_if_argument_is_null() {
assertThatThrownBy(() -> underTest.setTaskType(null))
.isInstanceOf(NullPointerException.class);
}
|
@Override
public void executeUpdate(final SetComputeNodeStateStatement sqlStatement, final ContextManager contextManager) {
if ("DISABLE".equals(sqlStatement.getState())) {
checkDisablingIsValid(contextManager, sqlStatement.getInstanceId());
} else {
checkEnablingIsValid(contextManager, sqlStatement.getInstanceId());
}
contextManager.getPersistServiceFacade().getComputeNodePersistService().updateComputeNodeState(sqlStatement.getInstanceId(),
"DISABLE".equals(sqlStatement.getState()) ? InstanceState.CIRCUIT_BREAK : InstanceState.OK);
}
|
@Test
void assertExecuteUpdateWithNotExistsInstanceID() {
assertThrows(UnsupportedSQLOperationException.class, () -> executor.executeUpdate(new SetComputeNodeStateStatement("ENABLE", "instanceID"), mock(ContextManager.class, RETURNS_DEEP_STUBS)));
}
|
@Override
public KeyValueIterator<K, V> range(final K from, final K to) {
final NextIteratorFunction<K, V, ReadOnlyKeyValueStore<K, V>> nextIteratorFunction = new NextIteratorFunction<K, V, ReadOnlyKeyValueStore<K, V>>() {
@Override
public KeyValueIterator<K, V> apply(final ReadOnlyKeyValueStore<K, V> store) {
try {
return store.range(from, to);
} catch (final InvalidStateStoreException e) {
throw new InvalidStateStoreException("State store is not available anymore and may have been migrated to another instance; please re-discover its location from the state metadata.");
}
}
};
final List<ReadOnlyKeyValueStore<K, V>> stores = storeProvider.stores(storeName, storeType);
return new DelegatingPeekingKeyValueIterator<>(
storeName,
new CompositeKeyValueIterator<>(stores.iterator(), nextIteratorFunction));
}
|
@Test
public void shouldThrowUnsupportedOperationExceptionWhileRange() {
stubOneUnderlying.put("a", "1");
stubOneUnderlying.put("b", "1");
try (final KeyValueIterator<String, String> keyValueIterator = theStore.range("a", "b")) {
assertThrows(UnsupportedOperationException.class, keyValueIterator::remove);
}
}
|
@Override
public int ncol() {
return n;
}
|
@Test
public void testNcols() {
System.out.println("ncol");
assertEquals(3, sparse.ncol());
}
|
@Override
public boolean supports(Job job) {
JobDetails jobDetails = job.getJobDetails();
return !jobDetails.hasStaticFieldName() && Modifier.isStatic(getJobMethod(jobDetails).getModifiers());
}
|
@Test
void doesNotSupportJobIfJobHasStaticField() {
Job job = anEnqueuedJob()
.withJobDetails(systemOutPrintLnJobDetails("This is a test"))
.build();
assertThat(backgroundStaticJobWithoutIocRunner.supports(job)).isFalse();
}
|
public static KubernetesJobManagerSpecification buildKubernetesJobManagerSpecification(
FlinkPod podTemplate, KubernetesJobManagerParameters kubernetesJobManagerParameters)
throws IOException {
FlinkPod flinkPod = Preconditions.checkNotNull(podTemplate).copy();
List<HasMetadata> accompanyingResources = new ArrayList<>();
final List<KubernetesStepDecorator> stepDecorators =
new ArrayList<>(
Arrays.asList(
new InitJobManagerDecorator(kubernetesJobManagerParameters),
new EnvSecretsDecorator(kubernetesJobManagerParameters),
new MountSecretsDecorator(kubernetesJobManagerParameters),
new CmdJobManagerDecorator(kubernetesJobManagerParameters),
new InternalServiceDecorator(kubernetesJobManagerParameters),
new ExternalServiceDecorator(kubernetesJobManagerParameters)));
Configuration configuration = kubernetesJobManagerParameters.getFlinkConfiguration();
if (configuration.get(KUBERNETES_HADOOP_CONF_MOUNT_DECORATOR_ENABLED)) {
stepDecorators.add(new HadoopConfMountDecorator(kubernetesJobManagerParameters));
}
if (configuration.get(KUBERNETES_KERBEROS_MOUNT_DECORATOR_ENABLED)) {
stepDecorators.add(new KerberosMountDecorator(kubernetesJobManagerParameters));
}
stepDecorators.addAll(
Arrays.asList(
new FlinkConfMountDecorator(kubernetesJobManagerParameters),
new PodTemplateMountDecorator(kubernetesJobManagerParameters)));
for (KubernetesStepDecorator stepDecorator : stepDecorators) {
flinkPod = stepDecorator.decorateFlinkPod(flinkPod);
accompanyingResources.addAll(stepDecorator.buildAccompanyingKubernetesResources());
}
final Deployment deployment =
createJobManagerDeployment(flinkPod, kubernetesJobManagerParameters);
return new KubernetesJobManagerSpecification(deployment, accompanyingResources);
}
|
@Test
void testFlinkConfConfigMap() throws IOException {
kubernetesJobManagerSpecification =
KubernetesJobManagerFactory.buildKubernetesJobManagerSpecification(
flinkPod, kubernetesJobManagerParameters);
final ConfigMap resultConfigMap =
(ConfigMap)
getConfigMapList(
FlinkConfMountDecorator.getFlinkConfConfigMapName(
CLUSTER_ID))
.get(0);
assertThat(resultConfigMap.getMetadata().getLabels()).hasSize(2);
final Map<String, String> resultDatas = resultConfigMap.getData();
assertThat(resultDatas).hasSize(3);
assertThat(resultDatas.get(CONFIG_FILE_LOG4J_NAME)).isEqualTo("some data");
assertThat(resultDatas.get(CONFIG_FILE_LOGBACK_NAME)).isEqualTo("some data");
final Configuration resultFlinkConfig =
KubernetesTestUtils.loadConfigurationFromString(
resultDatas.get(FLINK_CONF_FILENAME));
assertThat(resultFlinkConfig.get(KubernetesConfigOptionsInternal.ENTRY_POINT_CLASS))
.isEqualTo(ENTRY_POINT_CLASS);
}
|
public static AuditManagerS3A createAndStartAuditManager(
Configuration conf,
IOStatisticsStore iostatistics) {
AuditManagerS3A auditManager;
if (conf.getBoolean(AUDIT_ENABLED, AUDIT_ENABLED_DEFAULT)) {
auditManager = new ActiveAuditManagerS3A(
requireNonNull(iostatistics));
} else {
LOG.debug("auditing is disabled");
auditManager = stubAuditManager();
}
auditManager.init(conf);
auditManager.start();
LOG.debug("Started Audit Manager {}", auditManager);
return auditManager;
}
|
@Test
public void testSingleExecutionInterceptor() throws Throwable {
AuditManagerS3A manager = AuditIntegration.createAndStartAuditManager(
noopAuditConfig(),
ioStatistics);
List<ExecutionInterceptor> interceptors
= manager.createExecutionInterceptors();
assertThat(interceptors)
.hasSize(1);
ExecutionInterceptor interceptor = interceptors.get(0);
RequestFactory requestFactory = RequestFactoryImpl.builder()
.withBucket("bucket")
.build();
HeadObjectRequest.Builder requestBuilder =
requestFactory.newHeadObjectRequestBuilder("/");
assertThat(interceptor instanceof AWSAuditEventCallbacks).isTrue();
((AWSAuditEventCallbacks)interceptor).requestCreated(requestBuilder);
HeadObjectRequest request = requestBuilder.build();
SdkHttpRequest httpRequest = SdkHttpRequest.builder()
.protocol("https")
.host("test")
.method(SdkHttpMethod.HEAD)
.build();
ExecutionAttributes attributes = ExecutionAttributes.builder().build();
InterceptorContext context = InterceptorContext.builder()
.request(request)
.httpRequest(httpRequest)
.build();
// test the basic pre-request sequence while avoiding
// the complexity of recreating the full sequence
// (and probably getting it wrong)
interceptor.beforeExecution(context, attributes);
interceptor.modifyRequest(context, attributes);
interceptor.beforeMarshalling(context, attributes);
interceptor.afterMarshalling(context, attributes);
interceptor.modifyHttpRequest(context, attributes);
interceptor.beforeTransmission(context, attributes);
AuditSpanS3A span = attributes.getAttribute(AUDIT_SPAN_EXECUTION_ATTRIBUTE);
assertThat(span).isNotNull();
assertThat(span.isValidSpan()).isFalse();
}
|
public EndpointResponse get() {
return EndpointResponse.ok(new ServerInfo(
appVersion.get(),
kafkaClusterId.get(),
ksqlServiceId.get(),
serverStatus.get().toString()));
}
|
@Test
public void shouldGetKafkaClusterIdOnce() {
// When:
serverInfoResource.get();
serverInfoResource.get();
// Then:
verify(adminClient).describeCluster();
verifyNoMoreInteractions(adminClient);
}
|
public static boolean areOperationsEquivalent(String operationNameRef, String operationNameCandidate) {
// First check equals ignoring case.
if (operationNameRef.equalsIgnoreCase(operationNameCandidate)) {
return true;
}
// Then we may have an OpenAPI template we should convert to Postman and check again.
if (operationNameRef.contains("/{")) {
String transformedName = operationNameRef.replaceAll("/\\{", "/:").replace("}", "");
if (transformedName.equalsIgnoreCase(operationNameCandidate)) {
return true;
}
}
try {
// Finally check again adding a verb as prefix.
return operationNameCandidate.matches(OPERATION_NAME_EXPRESSION_PREFIX + operationNameRef);
} catch (PatternSyntaxException pse) {
log.warn("{}{} throws a PatternSyntaxException", OPERATION_NAME_EXPRESSION_PREFIX, operationNameRef);
}
return false;
}
|
@Test
void testAreOperationsEquivalent() {
assertTrue(PostmanUtil.areOperationsEquivalent("GET /PaStRiEs", "get /pastries"));
assertTrue(PostmanUtil.areOperationsEquivalent("GET /PaStRiEs/{name}", "get /pastries/:name"));
assertTrue(PostmanUtil.areOperationsEquivalent("recommendation", "POST recommendation"));
}
|
@Override
public String mask(final Object plainValue) {
String result = null == plainValue ? null : String.valueOf(plainValue);
if (Strings.isNullOrEmpty(result)) {
return result;
}
char[] chars = result.toCharArray();
for (int i = 0; i < chars.length; i++) {
char c = chars[i];
if ('A' <= c && c <= 'Z') {
chars[i] = uppercaseLetterCodes.get(random.nextInt(uppercaseLetterCodes.size()));
} else if ('a' <= c && c <= 'z') {
chars[i] = lowercaseLetterCodes.get(random.nextInt(lowercaseLetterCodes.size()));
} else if ('0' <= c && c <= '9') {
chars[i] = digitalCodes.get(random.nextInt(digitalCodes.size()));
} else {
chars[i] = specialCodes.get(random.nextInt(specialCodes.size()));
}
}
return new String(chars);
}
|
@Test
void assertMaskWithEmptyProps() {
GenericTableRandomReplaceAlgorithm maskAlgorithm = (GenericTableRandomReplaceAlgorithm) TypedSPILoader.getService(MaskAlgorithm.class, "GENERIC_TABLE_RANDOM_REPLACE", new Properties());
assertThat(maskAlgorithm.mask("Ab1!").substring(0, 1), anyOf(Arrays.stream("ABCDEFGHIJKLMNOPQRSTUVWXYZ".split("")).map(CoreMatchers::is).collect(Collectors.toList())));
assertThat(maskAlgorithm.mask("Ab1!").substring(1, 2), anyOf(Arrays.stream("abcdefghijklmnopqrstuvwxyz".split("")).map(CoreMatchers::is).collect(Collectors.toList())));
assertThat(maskAlgorithm.mask("Ab1!").substring(2, 3), anyOf(Arrays.stream("0123456789".split("")).map(CoreMatchers::is).collect(Collectors.toList())));
assertThat(maskAlgorithm.mask("Ab1!").substring(3, 4), anyOf(Arrays.stream("~!@#$%^&*:<>|".split("")).map(CoreMatchers::is).collect(Collectors.toList())));
}
|
public TableMetadata readTableMetadata(String metadataLocation) {
URI metadataLocationUri = URI.create(metadataLocation);
// TODO: cache fileIO
FileIO fileIO = fileIOFactory.getFileIO(metadataLocationUri);
return CompletableFuture.supplyAsync(() -> TableMetadataParser.read(fileIO, metadataLocation))
.join();
}
|
@SneakyThrows
@Test
public void testGetTableMetadataFromS3() {
when(mockFileIOFactory.getFileIO(any())).thenReturn(new S3FileIO(() -> mockS3Client));
mockS3Client.createBucket(builder -> builder.bucket(TEST_BUCKET).build());
String simpleMetadataJson =
IOUtils.toString(
Objects.requireNonNull(
this.getClass()
.getResourceAsStream("/" + TEST_SIMPLE_ICEBERG_V1_METADATA_FILE_NAME)));
mockS3Client.putObject(
builder ->
builder
.bucket(TEST_BUCKET)
.key(TEST_LOCATION + "/" + TEST_SIMPLE_ICEBERG_V1_METADATA_FILE_NAME)
.build(),
RequestBody.fromString(simpleMetadataJson));
String metadataLocation =
"s3://"
+ TEST_BUCKET
+ "/"
+ TEST_LOCATION
+ "/"
+ TEST_SIMPLE_ICEBERG_V1_METADATA_FILE_NAME;
TableMetadata tableMetadata = metadataService.readTableMetadata(metadataLocation);
assertThat(tableMetadata.uuid()).isEqualTo("11111111-2222-3333-4444-555555555555");
}
|
public void check(Metadata metadata) throws AccessPermissionException {
if (!needToCheck) {
return;
}
if ("false".equals(metadata.get(AccessPermissions.EXTRACT_CONTENT))) {
if (allowExtractionForAccessibility) {
if ("true".equals(metadata.get(AccessPermissions.EXTRACT_FOR_ACCESSIBILITY))) {
return;
}
throw new AccessPermissionException(
"Content extraction for accessibility is not allowed.");
}
throw new AccessPermissionException("Content extraction is not allowed.");
}
}
|
@Test
public void testExtractOnlyForAccessibility() throws AccessPermissionException {
Metadata m = getMetadata(false, true);
//allow accessibility
AccessChecker checker = new AccessChecker(true);
checker.check(m);
assertTrue(true, "no exception");
boolean ex = false;
try {
m = getMetadata(false, false);
checker.check(m);
} catch (AccessPermissionException e) {
ex = true;
}
assertTrue(ex, "correct exception");
}
|
@Override
public URI getUri() {
String format = "http://%s:%s";
if (microServiceInstance.isSecure()) {
format = "https://%s:%s";
}
return URI.create(String.format(Locale.ENGLISH, format, getHost(), getPort()));
}
|
@Test
public void testScheme() {
DiscoveryServiceInstance instance = new DiscoveryServiceInstance(
buildInstance(8001, true), serviceName);
Assert.assertEquals(instance.getUri().getScheme(), "https");
DiscoveryServiceInstance instanceHttp = new DiscoveryServiceInstance(
buildInstance(8001, false), serviceName);
Assert.assertEquals(instanceHttp.getUri().getScheme(), "http");
}
|
@Override
public V put(K key, V value, long ttl, TimeUnit unit) {
return get(putAsync(key, value, ttl, unit));
}
|
@Test
public void testSizeInMemory() {
RMapCache<Integer, Integer> map = redisson.getMapCache("test");
for (int i = 0; i < 10; i++) {
map.put(i, i, 5, TimeUnit.SECONDS);
}
assertThat(map.sizeInMemory()).isGreaterThanOrEqualTo(466);
}
|
public boolean match(List<String> left, String right) {
if (Objects.isNull(left)) {
return false;
}
if (right.startsWith("\"") && right.endsWith("\"")) {
right = right.substring(1, right.length() - 1);
}
return left.contains(right);
}
|
@Test
public void match() {
ContainMatch containMatch = new ContainMatch();
assertFalse(containMatch.match(null, "http.method:GET"));
assertTrue(containMatch.match(Arrays.asList("http.method:GET", "http.method:POST"), "http.method:GET"));
assertFalse(
containMatch.match(Arrays.asList("http.method:GET", "http.method:POST"), "http.method:PUT"));
}
|
@Override
protected Optional<LocalDate> extractField(String s) {
try {
return Optional.of(LocalDate.parse(s, formatter));
} catch (DateTimeParseException e) {
logger.log(Level.WARNING, e.getParsedString());
logger.log(Level.WARNING, String.format("Unable to parse date %s with formatter %s", s, formatter.toString()));
return Optional.empty();
}
}
|
@Test
public void testInvalidBehaviour() {
String notADateFormatString = "not-a-date-format-string";
try {
DateExtractor extractor = new DateExtractor("test","date", notADateFormatString);
fail("Should have thrown on failing to parse the date format string");
} catch (PropertyException e) {
// pass
}
String isoFormat = "uuuu-MM-dd";
DateExtractor extractor = new DateExtractor("test", "date", isoFormat);
Optional<LocalDate> extractedMetadata = extractor.extractField("definitely-not-a-date");
assertFalse(extractedMetadata.isPresent());
}
|
@Override
public ZonedDateTime next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
if (cursor == null) {
cursor = start;
} else {
cursor = cursor.plusDays(increment);
}
return cursor;
}
|
@Test
void nextAscendantTest() {
List<ZonedDateTime> expected = Arrays.asList(getZonedDateTime(2021, 1, 1, 10, 15),
getZonedDateTime(2021, 1, 2, 10, 15),
getZonedDateTime(2021, 1, 3, 10, 15));
ZonedDateTimeRangeIterator iterator = new ZonedDateTimeRangeIterator(before, after);
IntStream.range(0, 3).forEach(i -> assertEquals(expected.get(i), iterator.next()));
}
|
public ParseResult parse(File file) throws IOException, SchemaParseException {
return parse(file, null);
}
|
@Test
void testParseReader() throws IOException {
Schema schema = new SchemaParser().parse(new StringReader(SCHEMA_JSON)).mainSchema();
assertEquals(SCHEMA_REAL, schema);
}
|
@Override
public Map<K, V> loadAll(Collection<K> keys) {
awaitSuccessfulInit();
Object[] keysArray = keys.toArray();
String sql = queries.loadAll(keys.size());
try (SqlResult queryResult = sqlService.execute(sql, keysArray)) {
Iterator<SqlRow> it = queryResult.iterator();
Map<K, V> result = new HashMap<>();
while (it.hasNext()) {
SqlRow sqlRow = it.next();
// If there is a single column as the value, return that column as the value
if (queryResult.getRowMetadata().getColumnCount() == 2 && genericMapStoreProperties.singleColumnAsValue) {
K id = sqlRow.getObject(genericMapStoreProperties.idColumn);
result.put(id, sqlRow.getObject(1));
} else {
K id = sqlRow.getObject(genericMapStoreProperties.idColumn);
//noinspection unchecked
V record = (V) toGenericRecord(sqlRow, genericMapStoreProperties);
result.put(id, record);
}
}
return result;
}
}
|
@Test
public void givenRowDoesNotExist_whenLoadAllWithSingleColumn_thenReturnEmptyMap() {
objectProvider.createObject(mapName);
mapLoaderSingleColumn = createMapLoaderSingleColumn();
Map<Integer, String> names = mapLoaderSingleColumn.loadAll(newArrayList(0));
assertThat(names).isEmpty();
}
|
@Override
public boolean isSatisfied(int index, TradingRecord tradingRecord) {
LowestValueIndicator lowest = new LowestValueIndicator(ref, barCount);
Num lowestVal = lowest.getValue(index);
Num refVal = ref.getValue(index);
final boolean satisfied = !refVal.isNaN() && !lowestVal.isNaN() && refVal.equals(lowestVal);
traceIsSatisfied(index, satisfied);
return satisfied;
}
|
@Test
public void isSatisfied() {
assertTrue(rule.isSatisfied(0));
assertTrue(rule.isSatisfied(1));
assertFalse(rule.isSatisfied(2));
assertTrue(rule.isSatisfied(3));
assertFalse(rule.isSatisfied(4));
assertTrue(rule.isSatisfied(5));
assertFalse(rule.isSatisfied(6));
assertFalse(rule.isSatisfied(7));
assertFalse(rule.isSatisfied(8));
assertTrue(rule.isSatisfied(9));
}
|
public static boolean is9OrLater(final String targetVersion) {
if (isNotBlank(targetVersion)) {
final Double v = Double.valueOf(targetVersion);
return (v >= 9) || (v < 2 && v >= 1.9);
} else {
return false;
}
}
|
@Test
public void testIs9OrLater() {
assertThat(JavaVersion.is9OrLater(null), is(false));
assertThat(JavaVersion.is9OrLater(""), is(false));
assertThat(JavaVersion.is9OrLater("1.1"), is(false));
assertThat(JavaVersion.is9OrLater("1.2"), is(false));
assertThat(JavaVersion.is9OrLater("1.3"), is(false));
assertThat(JavaVersion.is9OrLater("1.4"), is(false));
assertThat(JavaVersion.is9OrLater("1.5"), is(false));
assertThat(JavaVersion.is9OrLater("5"), is(false));
assertThat(JavaVersion.is9OrLater("1.6"), is(false));
assertThat(JavaVersion.is9OrLater("6"), is(false));
assertThat(JavaVersion.is9OrLater("1.7"), is(false));
assertThat(JavaVersion.is9OrLater("7"), is(false));
assertThat(JavaVersion.is9OrLater("1.8"), is(false));
assertThat(JavaVersion.is9OrLater("8"), is(false));
assertThat(JavaVersion.is9OrLater("1.9"), is(true));
assertThat(JavaVersion.is9OrLater("9"), is(true));
assertThat(JavaVersion.is9OrLater("10"), is(true));
assertThat(JavaVersion.is9OrLater("11"), is(true));
}
|
public static InstanceMetaData create(final String instanceId, final InstanceType instanceType, final String attributes, final String version) {
return InstanceType.JDBC == instanceType ? new JDBCInstanceMetaData(instanceId, attributes, version) : new ProxyInstanceMetaData(instanceId, attributes, version);
}
|
@Test
void assertCreateProxyInstanceMetaDataWithInstanceId() {
ProxyInstanceMetaData actual = (ProxyInstanceMetaData) InstanceMetaDataFactory.create("foo_id", InstanceType.PROXY, "127.0.0.1@3307", "foo_version");
assertThat(actual.getId(), is("foo_id"));
assertThat(actual.getIp(), is("127.0.0.1"));
assertThat(actual.getPort(), is(3307));
assertThat(actual.getAttributes(), is("127.0.0.1@3307"));
assertThat(actual.getVersion(), is("foo_version"));
assertThat(actual.getType(), is(InstanceType.PROXY));
}
|
@Udf
public String trim(
@UdfParameter(
description = "The string to trim") final String input) {
if (input == null) {
return null;
}
return input.trim();
}
|
@Test
public void shouldRemoveTrailingWhitespace() {
final String result = udf.trim("Foo Bar \t ");
assertThat(result, is("Foo Bar"));
}
|
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
} else if (obj != null && obj.getClass() == WorkerResourceSpec.class) {
WorkerResourceSpec that = (WorkerResourceSpec) obj;
return Objects.equals(this.cpuCores, that.cpuCores)
&& Objects.equals(this.taskHeapSize, that.taskHeapSize)
&& Objects.equals(this.taskOffHeapSize, that.taskOffHeapSize)
&& Objects.equals(this.networkMemSize, that.networkMemSize)
&& Objects.equals(this.managedMemSize, that.managedMemSize)
&& Objects.equals(this.numSlots, that.numSlots)
&& Objects.equals(this.extendedResources, that.extendedResources);
}
return false;
}
|
@Test
void testEquals() {
final WorkerResourceSpec spec1 =
new WorkerResourceSpec.Builder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setNetworkMemoryMB(100)
.setManagedMemoryMB(100)
.setNumSlots(1)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1))
.build();
final WorkerResourceSpec spec2 =
new WorkerResourceSpec.Builder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setNetworkMemoryMB(100)
.setManagedMemoryMB(100)
.setNumSlots(1)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1))
.build();
final WorkerResourceSpec spec3 =
new WorkerResourceSpec.Builder()
.setCpuCores(1.1)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setNetworkMemoryMB(100)
.setManagedMemoryMB(100)
.setNumSlots(1)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1))
.build();
final WorkerResourceSpec spec4 =
new WorkerResourceSpec.Builder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(110)
.setTaskOffHeapMemoryMB(100)
.setNetworkMemoryMB(100)
.setManagedMemoryMB(100)
.setNumSlots(1)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1))
.build();
final WorkerResourceSpec spec5 =
new WorkerResourceSpec.Builder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(110)
.setNetworkMemoryMB(100)
.setManagedMemoryMB(100)
.setNumSlots(1)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1))
.build();
final WorkerResourceSpec spec6 =
new WorkerResourceSpec.Builder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setNetworkMemoryMB(110)
.setManagedMemoryMB(100)
.setNumSlots(1)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1))
.build();
final WorkerResourceSpec spec7 =
new WorkerResourceSpec.Builder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setNetworkMemoryMB(100)
.setManagedMemoryMB(110)
.setNumSlots(1)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1))
.build();
final WorkerResourceSpec spec8 =
new WorkerResourceSpec.Builder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setNetworkMemoryMB(100)
.setManagedMemoryMB(100)
.setNumSlots(2)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 1))
.build();
final WorkerResourceSpec spec9 =
new WorkerResourceSpec.Builder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setNetworkMemoryMB(100)
.setManagedMemoryMB(100)
.setNumSlots(1)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME, 2))
.build();
final WorkerResourceSpec spec10 =
new WorkerResourceSpec.Builder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(100)
.setNetworkMemoryMB(100)
.setManagedMemoryMB(100)
.setNumSlots(1)
.build();
assertThat(spec1).isEqualTo(spec1);
assertThat(spec1).isEqualTo(spec2);
assertThat(spec1).isNotEqualTo(spec3);
assertThat(spec1).isNotEqualTo(spec4);
assertThat(spec1).isNotEqualTo(spec5);
assertThat(spec1).isNotEqualTo(spec6);
assertThat(spec1).isNotEqualTo(spec7);
assertThat(spec1).isNotEqualTo(spec8);
assertThat(spec1).isNotEqualTo(spec9);
assertThat(spec1).isNotEqualTo(spec10);
}
|
public static String decapitalize(String string) {
return string == null ? null : string.substring( 0, 1 ).toLowerCase( Locale.ROOT ) + string.substring( 1 );
}
|
@Test
@DefaultLocale("tr")
public void decapitalizeTurkish() {
String international = Strings.decapitalize( "International" );
assertThat( international ).isEqualTo( "international" );
}
|
@Override
public synchronized CryptoVault load(final Session<?> session, final PasswordCallback prompt) throws BackgroundException {
if(this.isUnlocked()) {
log.warn(String.format("Skip unlock of open vault %s", this));
return this;
}
final Host bookmark = session.getHost();
String passphrase = keychain.getPassword(String.format("Cryptomator Passphrase (%s)", bookmark.getCredentials().getUsername()),
new DefaultUrlProvider(bookmark).toUrl(masterkey).find(DescriptiveUrl.Type.provider).getUrl());
if(null == passphrase) {
// Legacy
passphrase = keychain.getPassword(String.format("Cryptomator Passphrase %s", bookmark.getHostname()),
new DefaultUrlProvider(bookmark).toUrl(masterkey).find(DescriptiveUrl.Type.provider).getUrl());
}
return this.unlock(session, prompt, bookmark, passphrase);
}
|
@Test
public void testLoad() throws Exception {
final NullSession session = new NullSession(new Host(new TestProtocol())) {
@Override
@SuppressWarnings("unchecked")
public <T> T _getFeature(final Class<T> type) {
if(type == Read.class) {
return (T) new Read() {
@Override
public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException {
final String masterKey = "{\n" +
" \"version\": 8,\n" +
" \"scryptSalt\": \"RVAAirkArDU=\",\n" +
" \"scryptCostParam\": 32768,\n" +
" \"scryptBlockSize\": 8,\n" +
" \"primaryMasterKey\": \"+03NkJNWVsJ9Tb1CTpKhXyfINzjDirFFI+iJLOWIOySyxB+abpx34Q==\",\n" +
" \"hmacMasterKey\": \"aMoDtn7Y6kIXxyHo2zl47p5jCYTlRnfx3l3AMgULmIDSYAxVAraSgg==\",\n" +
" \"versionMac\": \"FzirA8UhwCmS5RsC4JvxbO+ZBxaCbIkzqD2Ocagd+A8=\"\n" +
"}";
if("masterkey.cryptomator".equals(file.getName())) {
return IOUtils.toInputStream(masterKey, Charset.defaultCharset());
}
if("vault.cryptomator".equals(file.getName())) {
return IOUtils.toInputStream(createJWT(masterKey, VAULT_VERSION, CryptorProvider.Scheme.SIV_GCM, "vault123"), Charset.defaultCharset());
}
throw new NotfoundException(String.format("%s not found", file.getName()));
}
@Override
public boolean offset(final Path file) {
return false;
}
};
}
return super._getFeature(type);
}
};
final Path home = new Path("/", EnumSet.of((Path.Type.directory)));
final CryptoVault vault = new CryptoVault(home);
vault.load(session, new DisabledPasswordCallback() {
@Override
public Credentials prompt(final Host bookmark, final String title, final String reason, final LoginOptions options) {
return new VaultCredentials("vault123");
}
});
assertTrue(vault.getFileContentCryptor().getClass().getName().contains("v2"));
assertTrue(vault.getFileHeaderCryptor().getClass().getName().contains("v2"));
assertEquals(Vault.State.open, vault.getState());
assertNotSame(home, vault.encrypt(session, home));
assertEquals(vault.encrypt(session, home), vault.encrypt(session, home));
final Path directory = new Path(home, "dir", EnumSet.of(Path.Type.directory));
assertNull(directory.attributes().getVault());
assertEquals(home, vault.encrypt(session, directory).attributes().getVault());
assertEquals(home, directory.attributes().getVault());
assertEquals(vault.encrypt(session, directory), vault.encrypt(session, directory));
assertEquals(new Path(home, directory.getName(), EnumSet.of(Path.Type.directory, Path.Type.decrypted)), vault.decrypt(session, vault.encrypt(session, directory, true)));
final Path placeholder = new Path(home, "placeholder", EnumSet.of(Path.Type.directory, Path.Type.placeholder));
assertTrue(vault.encrypt(session, placeholder, true).getType().contains(Path.Type.placeholder));
assertTrue(vault.decrypt(session, vault.encrypt(session, placeholder, true)).getType().contains(Path.Type.placeholder));
assertEquals(new Path(home, placeholder.getName(), EnumSet.of(Path.Type.directory, Path.Type.placeholder, Path.Type.decrypted)), vault.decrypt(session, vault.encrypt(session, placeholder, true)));
assertNotEquals(
vault.encrypt(session, new Path(home, "dir", EnumSet.of(Path.Type.directory))),
vault.encrypt(session, new Path(home, "dir", EnumSet.of(Path.Type.directory)), true)
);
assertEquals(
vault.encrypt(session, new Path(home, "dir", EnumSet.of(Path.Type.directory))).attributes().getDirectoryId(),
vault.encrypt(session, new Path(home, "dir", EnumSet.of(Path.Type.directory))).attributes().getDirectoryId()
);
assertEquals(
vault.encrypt(session, vault.encrypt(session, new Path(home, "dir", EnumSet.of(Path.Type.directory)))).attributes().getDirectoryId(),
vault.encrypt(session, vault.encrypt(session, new Path(home, "dir", EnumSet.of(Path.Type.directory)))).attributes().getDirectoryId()
);
assertNull(vault.encrypt(session, new Path(home, "dir", EnumSet.of(Path.Type.directory)), true).attributes().getDirectoryId());
assertNull(vault.encrypt(session, vault.encrypt(session, new Path(home, "dir", EnumSet.of(Path.Type.directory))), true).attributes().getDirectoryId());
assertNotEquals(
vault.encrypt(session, new Path(home, "dir", EnumSet.of(Path.Type.directory))).attributes().getDirectoryId(),
vault.encrypt(session, new Path(home, "dir", EnumSet.of(Path.Type.directory)), true).attributes().getDirectoryId()
);
vault.close();
assertEquals(Vault.State.closed, vault.getState());
}
|
@Override
public Serializable read(final MySQLBinlogColumnDef columnDef, final MySQLPacketPayload payload) {
return payload.getByteBuf().readFloatLE();
}
|
@Test
void assertRead() {
when(payload.getByteBuf()).thenReturn(byteBuf);
when(byteBuf.readFloatLE()).thenReturn(1.1F);
MySQLFloatBinlogProtocolValue actual = new MySQLFloatBinlogProtocolValue();
assertThat(actual.read(columnDef, payload), is(1.1F));
}
|
public static <T, R extends Type<T>, E extends Type<T>> List<E> typeMap(
List<List<T>> input, Class<E> outerDestType, Class<R> innerType) {
List<E> result = new ArrayList<>();
try {
Constructor<E> constructor =
outerDestType.getDeclaredConstructor(Class.class, List.class);
for (List<T> ts : input) {
E e = constructor.newInstance(innerType, typeMap(ts, innerType));
result.add(e);
}
} catch (NoSuchMethodException
| IllegalAccessException
| InstantiationException
| InvocationTargetException e) {
throw new TypeMappingException(e);
}
return result;
}
|
@Test
public void testTypeMapEmpty() {
assertEquals(typeMap(new ArrayList<>(), Uint256.class), (new ArrayList<Uint256>()));
}
|
static void handleDisable(Namespace namespace, Admin adminClient) throws TerseException {
FeatureUpdate.UpgradeType upgradeType = downgradeType(namespace);
Map<String, FeatureUpdate> updates = new HashMap<>();
List<String> features = namespace.getList("feature");
if (features != null) {
features.forEach(feature -> {
if (updates.put(feature, new FeatureUpdate((short) 0, upgradeType)) != null) {
throw new RuntimeException("Feature " + feature + " was specified more than once.");
}
});
}
update("disable", adminClient, updates, namespace.getBoolean("dry_run"));
}
|
@Test
public void testHandleDisableDryRun() {
Map<String, Object> namespace = new HashMap<>();
namespace.put("feature", Arrays.asList("foo.bar", "metadata.version", "quux"));
namespace.put("dry_run", true);
String disableOutput = ToolsTestUtils.captureStandardOut(() -> {
Throwable t = assertThrows(TerseException.class, () -> FeatureCommand.handleDisable(new Namespace(namespace), buildAdminClient()));
assertTrue(t.getMessage().contains("1 out of 3 operation(s) failed."));
});
assertEquals(format("foo.bar can be disabled.%n" +
"Can not disable metadata.version. Can't downgrade below 4%n" +
"quux can be disabled."), disableOutput);
}
|
public AnalysisPropertyDto setKey(String key) {
requireNonNull(key, "key cannot be null");
this.key = key;
return this;
}
|
@Test
void null_key_should_throw_NPE() {
underTest = new AnalysisPropertyDto();
assertThatThrownBy(() -> underTest.setKey(null))
.isInstanceOf(NullPointerException.class)
.hasMessage("key cannot be null");
}
|
public void initPrx(final MetaData metaData) {
while (true) {
Class<?> prxClass = prxClassCache.get(metaData.getPath());
try {
if (Objects.isNull(prxClass)) {
// Spin's Attempt to Load
tryLockedLoadMetaData(metaData);
} else {
if (Objects.nonNull(metaData.getContextPath()) && Objects.nonNull(refreshUpstreamCache.get(metaData.getContextPath()))) {
refreshTarsInvokePrxList(metaData, refreshUpstreamCache.get(metaData.getContextPath()));
}
break;
}
} catch (Exception e) {
LOG.error("ShenyuTarsPluginInitializeException: init tars ref ex:{}", e.getMessage());
break;
}
}
}
|
@Test
public void testInitPrx() {
final MetaData metaData = new MetaData("id", "127.0.0.1:8080", "contextPath",
"path6", RpcTypeEnum.TARS.getName(), "serviceName6", "method1",
"parameterTypes", "{\"methodInfo\":[{\"methodName\":\"method1\",\"params\":[{\"left\":\"int\",\"right\":\"param1\"},"
+ "{\"left\":\"java.lang.Integer\",\"right\":\"param2\"}],\"returnType\":\"java.lang.String\"}]}", false);
assertThrows(NullPointerException.class, () -> {
applicationConfigCacheUnderTest.initPrx(metaData);
final TarsInvokePrxList result = applicationConfigCacheUnderTest.get("path6");
assertEquals("promise_method1", result.getMethod().getName());
});
}
|
public static List<String> getServerIdentities(X509Certificate x509Certificate) {
List<String> names = new ArrayList<>();
for (CertificateIdentityMapping mapping : serverCertMapping) {
List<String> identities = mapping.mapIdentity(x509Certificate);
Log.debug("CertificateManager: " + mapping.name() + " returned " + identities.toString());
if (!identities.isEmpty()) {
names.addAll(identities);
break;
}
}
return names;
}
|
@Test
public void testServerIdentitiesDNS() throws Exception
{
// Setup fixture.
final String subjectCommonName = "MySubjectCommonName";
final String subjectAltNameDNS = "MySubjectAltNameDNS";
final X509v3CertificateBuilder builder = new JcaX509v3CertificateBuilder(
new X500Name( "CN=MyIssuer" ), // Issuer
BigInteger.valueOf( Math.abs( new SecureRandom().nextInt() ) ), // Random serial number
new Date( System.currentTimeMillis() - ( 1000L * 60 * 60 * 24 * 30 ) ), // Not before 30 days ago
new Date( System.currentTimeMillis() + ( 1000L * 60 * 60 * 24 * 99 ) ), // Not after 99 days from now
new X500Name( "CN=" + subjectCommonName ), // Subject
subjectKeyPair.getPublic()
);
final GeneralNames generalNames = new GeneralNames(new GeneralName(GeneralName.dNSName, subjectAltNameDNS));
builder.addExtension( Extension.subjectAlternativeName, false, generalNames );
final X509CertificateHolder certificateHolder = builder.build( contentSigner );
final X509Certificate cert = new JcaX509CertificateConverter().getCertificate( certificateHolder );
// Execute system under test
final List<String> serverIdentities = CertificateManager.getServerIdentities( cert );
// Verify result
assertEquals( 1, serverIdentities.size() );
assertTrue( serverIdentities.contains( subjectAltNameDNS ) );
assertFalse( serverIdentities.contains( subjectCommonName ) );
}
|
@Override
public void onChange(Job job) {
sendObject(job);
if (job.hasState(SUCCEEDED) || job.hasState(FAILED) || job.hasState(DELETED)) {
close();
}
}
|
@Test
void sseConnectionIsClosedIfJobStateIsSucceeded() throws IOException {
JobSseExchange jobSseExchange = new JobSseExchange(httpExchange, storageProvider, new JacksonJsonMapper());
jobSseExchange.onChange(aSucceededJob().build());
verify(storageProvider).removeJobStorageOnChangeListener(jobSseExchange);
}
|
@Override
public String buildContext() {
final PluginDO after = (PluginDO) getAfter();
if (Objects.isNull(getBefore())) {
return String.format("the plugin [%s] is %s", after.getName(), StringUtils.lowerCase(getType().getType().toString()));
}
return String.format("the plugin [%s] is %s : %s", after.getName(), StringUtils.lowerCase(getType().getType().toString()), contrast());
}
|
@Test
public void deletePluginBuildContextTest() {
PluginChangedEvent pluginDeleteEvent = new PluginChangedEvent(pluginDO, null, EventTypeEnum.PLUGIN_DELETE, "test-operator");
String context =
String.format("the plugin [%s] is %s", pluginDO.getName(), StringUtils.lowerCase(EventTypeEnum.PLUGIN_DELETE.getType().toString()));
assertEquals(context, pluginDeleteEvent.buildContext());
}
|
@SuppressWarnings("FutureReturnValueIgnored")
public void start() {
budgetRefreshExecutor.submit(this::subscribeToRefreshBudget);
}
|
@Test
public void testScheduledBudgetRefresh_doesNotRunWhenBudgetRefreshPaused()
throws InterruptedException {
CountDownLatch redistributeBudgetLatch = new CountDownLatch(1);
Runnable redistributeBudget = redistributeBudgetLatch::countDown;
GetWorkBudgetRefresher budgetRefresher = createBudgetRefresher(true, redistributeBudget);
budgetRefresher.start();
boolean redistributeBudgetRan =
redistributeBudgetLatch.await(
GetWorkBudgetRefresher.SCHEDULED_BUDGET_REFRESH_MILLIS + WAIT_BUFFER,
TimeUnit.MILLISECONDS);
// Make sure that redistributeBudgetLatch.countDown() is never called.
assertThat(redistributeBudgetLatch.getCount()).isEqualTo(1);
assertFalse(redistributeBudgetRan);
}
|
public static Optional<String> getSchemaNameByTableNode(final String tablePath) {
Pattern pattern = Pattern.compile(getMetaDataNode() + "/([\\w\\-]+)/schemas/([\\w\\-]+)?", Pattern.CASE_INSENSITIVE);
Matcher matcher = pattern.matcher(tablePath);
return matcher.find() ? Optional.of(matcher.group(2)) : Optional.empty();
}
|
@Test
void assertGetSchemaNameByTableNode() {
Optional<String> actual = DatabaseMetaDataNode.getSchemaNameByTableNode("/metadata/foo_db/schemas/foo_schema/tables");
assertTrue(actual.isPresent());
assertThat(actual.get(), is("foo_schema"));
}
|
public BackgroundException map(HttpResponse response) throws IOException {
final S3ServiceException failure;
if(null == response.getEntity()) {
failure = new S3ServiceException(response.getStatusLine().getReasonPhrase());
}
else {
EntityUtils.updateEntity(response, new BufferedHttpEntity(response.getEntity()));
failure = new S3ServiceException(response.getStatusLine().getReasonPhrase(),
EntityUtils.toString(response.getEntity()));
}
failure.setResponseCode(response.getStatusLine().getStatusCode());
if(response.containsHeader(MINIO_ERROR_CODE)) {
failure.setErrorCode(response.getFirstHeader(MINIO_ERROR_CODE).getValue());
}
if(response.containsHeader(MINIO_ERROR_DESCRIPTION)) {
failure.setErrorMessage(response.getFirstHeader(MINIO_ERROR_DESCRIPTION).getValue());
}
return this.map(failure);
}
|
@Test
public void test403NoXml() {
final ServiceException f = new ServiceException();
f.setResponseCode(403);
assertTrue(new S3ExceptionMappingService().map(f) instanceof AccessDeniedException);
}
|
public static List<Configuration> split(HbaseSQLReaderConfig readerConfig) {
PhoenixInputFormat inputFormat = new PhoenixInputFormat<PhoenixRecordWritable>();
org.apache.hadoop.conf.Configuration conf = generatePhoenixConf(readerConfig);
JobID jobId = new JobID(Key.MOCK_JOBID_IDENTIFIER, Key.MOCK_JOBID);
JobContextImpl jobContext = new JobContextImpl(conf, jobId);
List<Configuration> resultConfigurations = new ArrayList<Configuration>();
List<InputSplit> rawSplits = null;
try {
rawSplits = inputFormat.getSplits(jobContext);
LOG.info("split size is " + rawSplits.size());
for (InputSplit split : rawSplits) {
Configuration cfg = readerConfig.getOriginalConfig().clone();
byte[] splitSer = HadoopSerializationUtil.serialize((PhoenixInputSplit) split);
String splitBase64Str = org.apache.commons.codec.binary.Base64.encodeBase64String(splitSer);
cfg.set(Key.SPLIT_KEY, splitBase64Str);
resultConfigurations.add(cfg);
}
} catch (IOException e) {
throw DataXException.asDataXException(
HbaseSQLReaderErrorCode.GET_PHOENIX_SPLITS_ERROR, "获取表的split信息时出现了异常,请检查hbase集群服务是否正常," + e.getMessage(), e);
} catch (InterruptedException e) {
throw DataXException.asDataXException(
HbaseSQLReaderErrorCode.GET_PHOENIX_SPLITS_ERROR, "获取表的split信息时被中断,请重试,若还有问题请联系datax管理员," + e.getMessage(), e);
}
return resultConfigurations;
}
|
@Test
public void testSplit() {
Configuration config = Configuration.from(jsonStr);
HbaseSQLReaderConfig readerConfig = HbaseSQLHelper.parseConfig(config);
List<Configuration> splits = HbaseSQLHelper.split(readerConfig);
System.out.println("split size = " + splits.size());
}
|
public final void register(Class type, Serializer serializer) {
if (type == null) {
throw new IllegalArgumentException("type is required");
}
if (serializer.getTypeId() <= 0) {
throw new IllegalArgumentException(
"Type ID must be positive. Current: " + serializer.getTypeId() + ", Serializer: " + serializer);
}
safeRegister(type, createSerializerAdapter(serializer));
}
|
@Test(expected = IllegalArgumentException.class)
public void testRegister_nullType() {
abstractSerializationService.register(null, new StringBufferSerializer(true));
}
|
@Override
public LibSVMModel<Event> train(Dataset<Event> dataset, Map<String, Provenance> instanceProvenance) {
for (Pair<String,Long> p : dataset.getOutputInfo().outputCountsIterable()) {
if (p.getA().equals(EventType.ANOMALOUS.toString()) && (p.getB() > 0)) {
throw new IllegalArgumentException("LibSVMAnomalyTrainer only supports EXPECTED events at training time.");
}
}
return super.train(dataset,instanceProvenance);
}
|
@Test
public void gaussianDataTest() {
Pair<Dataset<Event>,Dataset<Event>> pair = AnomalyDataGenerator.gaussianAnomaly(1000,0.2);
SVMParameters<Event> params = new SVMParameters<>(new SVMAnomalyType(SVMAnomalyType.SVMMode.ONE_CLASS), KernelType.RBF);
params.setGamma(1.0);
params.setNu(0.1);
LibSVMAnomalyTrainer trainer = new LibSVMAnomalyTrainer(params);
LibSVMModel<Event> model = trainer.train(pair.getA());
AnomalyEvaluator evaluator = new AnomalyEvaluator();
AnomalyEvaluation evaluation = evaluator.evaluate(model,pair.getB());
assertEquals(200,evaluation.getTruePositives());
assertTrue(650 < evaluation.getTrueNegatives());
assertEquals(0,evaluation.getFalseNegatives());
String confusion = evaluation.confusionString();
String output = evaluation.toString();
// logging a few outputs for easy checking, and to prevent dead code elimination.
logger.finer(confusion);
logger.finer(output);
// Test serialization
Helpers.testModelSerialization(model,Event.class);
// Test protobuf serialization
Helpers.testModelProtoSerialization(model, Event.class, pair.getB());
}
|
static ProtocolHandlerWithClassLoader load(ProtocolHandlerMetadata metadata,
String narExtractionDirectory) throws IOException {
final File narFile = metadata.getArchivePath().toAbsolutePath().toFile();
NarClassLoader ncl = NarClassLoaderBuilder.builder()
.narFile(narFile)
.parentClassLoader(ProtocolHandler.class.getClassLoader())
.extractionDirectory(narExtractionDirectory)
.build();
ProtocolHandlerDefinition phDef = getProtocolHandlerDefinition(ncl);
if (StringUtils.isBlank(phDef.getHandlerClass())) {
throw new IOException("Protocol handler `" + phDef.getName() + "` does NOT provide a protocol"
+ " handler implementation");
}
try {
Class handlerClass = ncl.loadClass(phDef.getHandlerClass());
Object handler = handlerClass.getDeclaredConstructor().newInstance();
if (!(handler instanceof ProtocolHandler)) {
throw new IOException("Class " + phDef.getHandlerClass()
+ " does not implement protocol handler interface");
}
ProtocolHandler ph = (ProtocolHandler) handler;
return new ProtocolHandlerWithClassLoader(ph, ncl);
} catch (Throwable t) {
rethrowIOException(t);
return null;
}
}
|
@Test
public void testLoadProtocolHandler() throws Exception {
ProtocolHandlerDefinition def = new ProtocolHandlerDefinition();
def.setHandlerClass(MockProtocolHandler.class.getName());
def.setDescription("test-protocol-handler");
String archivePath = "/path/to/protocol/handler/nar";
ProtocolHandlerMetadata metadata = new ProtocolHandlerMetadata();
metadata.setDefinition(def);
metadata.setArchivePath(Paths.get(archivePath));
NarClassLoader mockLoader = mock(NarClassLoader.class);
when(mockLoader.getServiceDefinition(eq(PULSAR_PROTOCOL_HANDLER_DEFINITION_FILE)))
.thenReturn(ObjectMapperFactory.getYamlMapper().writer().writeValueAsString(def));
Class handlerClass = MockProtocolHandler.class;
when(mockLoader.loadClass(eq(MockProtocolHandler.class.getName())))
.thenReturn(handlerClass);
final NarClassLoaderBuilder mockedBuilder = mock(NarClassLoaderBuilder.class, RETURNS_SELF);
when(mockedBuilder.build()).thenReturn(mockLoader);
try (MockedStatic<NarClassLoaderBuilder> builder = Mockito.mockStatic(NarClassLoaderBuilder.class)) {
builder.when(() -> NarClassLoaderBuilder.builder()).thenReturn(mockedBuilder);
ProtocolHandlerWithClassLoader returnedPhWithCL = ProtocolHandlerUtils.load(metadata, "");
ProtocolHandler returnedPh = returnedPhWithCL.getHandler();
assertSame(mockLoader, returnedPhWithCL.getClassLoader());
assertTrue(returnedPh instanceof MockProtocolHandler);
}
}
|
@Override
public void putAll(Map<? extends K, ? extends V> map) {
RFuture<Void> result = putAllAsync(map);
sync(result);
}
|
@Test
public void testPutAll() throws Exception {
URL configUrl = getClass().getResource("redisson-jcache.yaml");
Config cfg = Config.fromYAML(configUrl);
Configuration<String, String> config = RedissonConfiguration.fromConfig(cfg);
Cache<String, String> cache = Caching.getCachingProvider().getCacheManager()
.createCache("test", config);
Map<String, String> map = new HashMap<>();
for (int i = 0; i < 10000; i++) {
map.put("" + i, "" + i);
}
long start = System.currentTimeMillis();
cache.putAll(map);
System.out.println(System.currentTimeMillis() - start);
for (int i = 0; i < 10000; i++) {
assertThat(cache.containsKey("" + i)).isTrue();
}
cache.close();
}
|
@Override
public ConfigData get(String path) {
assertNoPath(path);
return new ConfigData(getEnv());
}
|
@Test
public void testNonExistingEnvReturnsEmpty() {
Set<String> set = new HashSet<>();
set.add("NON_EXISTING_ENV");
ConfigData actual;
try (EnvConfigProvider configProvider = new EnvConfigProvider()) {
actual = configProvider.get("", set);
}
assertEquals(0, actual.data().size());
}
|
public static BigDecimal cast(final Integer value, final int precision, final int scale) {
if (value == null) {
return null;
}
return cast(value.longValue(), precision, scale);
}
|
@Test
public void shouldCastStringNegative() {
// When:
final BigDecimal decimal = DecimalUtil.cast("-1.1", 3, 2);
// Then:
assertThat(decimal, is(new BigDecimal("-1.10")));
}
|
public RegistryBuilder version(String version) {
this.version = version;
return getThis();
}
|
@Test
void version() {
RegistryBuilder builder = new RegistryBuilder();
builder.version("version");
Assertions.assertEquals("version", builder.build().getVersion());
}
|
public static String formatHostnameForHttp(InetSocketAddress addr) {
String hostString = NetUtil.getHostname(addr);
if (NetUtil.isValidIpV6Address(hostString)) {
if (!addr.isUnresolved()) {
hostString = NetUtil.toAddressString(addr.getAddress());
} else if (hostString.charAt(0) == '[' && hostString.charAt(hostString.length() - 1) == ']') {
// If IPv6 address already contains brackets, let's return as is.
return hostString;
}
return '[' + hostString + ']';
}
return hostString;
}
|
@Test
public void testIpv6Unresolved() {
InetSocketAddress socketAddress = InetSocketAddress.createUnresolved("::1", 8080);
assertEquals("[::1]", HttpUtil.formatHostnameForHttp(socketAddress));
}
|
static boolean subjectMatch(Subscription subscription, Reason.Subject reasonSubject) {
Assert.notNull(subscription, "The subscription must not be null");
Assert.notNull(reasonSubject, "The reasonSubject must not be null");
final var sourceSubject = subscription.getSpec().getReason().getSubject();
var matchSubject = new Subscription.ReasonSubject();
matchSubject.setKind(reasonSubject.getKind());
matchSubject.setApiVersion(reasonSubject.getApiVersion());
if (StringUtils.isBlank(sourceSubject.getName())) {
return sourceSubject.equals(matchSubject);
}
matchSubject.setName(reasonSubject.getName());
return sourceSubject.equals(matchSubject);
}
|
@Test
void subjectMatchTest() {
var subscriber = new Subscription.Subscriber();
subscriber.setName("test");
final var subscription = createSubscription(subscriber);
// match all name subscription
var subject = new Reason.Subject();
subject.setApiVersion("content.halo.run/v1alpha1");
subject.setKind("Post");
subject.setName("fake-post");
assertThat(RecipientResolverImpl.subjectMatch(subscription, subject)).isTrue();
// different kind
subject = new Reason.Subject();
subject.setApiVersion("content.halo.run/v1alpha1");
subject.setKind("SinglePage");
subject.setName("fake-post");
assertThat(RecipientResolverImpl.subjectMatch(subscription, subject)).isFalse();
// special case
subscription.getSpec().getReason().getSubject().setName("other-post");
subject = new Reason.Subject();
subject.setApiVersion("content.halo.run/v1alpha1");
subject.setKind("Post");
subject.setName("fake-post");
assertThat(RecipientResolverImpl.subjectMatch(subscription, subject)).isFalse();
subject.setName("other-post");
assertThat(RecipientResolverImpl.subjectMatch(subscription, subject)).isTrue();
}
|
public String getTimeLocal() {
return timeLocal;
}
|
@Test
public void testGetTimeLocal() {
LocalDateTime timeLocal = LocalDateTime.now();
shenyuRequestLog.setTimeLocal(timeLocal.toString());
Assertions.assertEquals(shenyuRequestLog.getTimeLocal(), timeLocal.toString());
}
|
@Override
public int compareTo(@Nonnull Version that) {
requireNonNull(that);
return version.compareTo(that.getVersion());
}
|
@Test
public void testCompareTo() {
Version v = Version.from(0, 20, 2);
assertTrue(v.compareTo(Version.from(0, 19, 0)) > 0);
assertTrue(v.compareTo(Version.from(0, 18, 2)) > 0);
assertTrue(v.compareTo(Version.from(0, 19, 9001)) > 0);
assertTrue(v.compareTo(Version.from(0, 20, 2)) == 0);
assertTrue(v.compareTo(Version.from(0, 20, 0)) > 0);
assertTrue(v.compareTo(Version.from(1, 0, 0)) < 0);
assertTrue(v.compareTo(Version.from(1, 0, 9001)) < 0);
assertTrue(v.compareTo(Version.from(1, 20, 0)) < 0);
assertTrue(v.compareTo(Version.from(1, 1, 0)) < 0);
assertTrue(v.compareTo(Version.from(3, 2, 1)) < 0);
assertTrue(v.compareTo(Version.from(0, 19, 0, "rc.1")) > 0);
assertTrue(v.compareTo(Version.from(1, 19, 0, "rc.1")) < 0);
assertTrue(v.compareTo(Version.from(0, 21, 0, "rc.1")) < 0);
assertTrue(v.compareTo(Version.from(0, 20, 1, "rc.1")) > 0);
assertTrue(v.compareTo(Version.from(0, 20, 0, "rc.1")) > 0);
assertTrue(v.compareTo(Version.from(0, 20, 2, "rc.1")) > 0);
assertTrue(v.compareTo(Version.from(0, 20, 3, "rc.1")) < 0);
v = Version.from(1, 5, 0);
assertTrue(v.compareTo(Version.from(0, 19, 0)) > 0);
assertTrue(v.compareTo(Version.from(1, 0, 0)) > 0);
assertTrue(v.compareTo(Version.from(0, 19, 9001)) > 0);
assertTrue(v.compareTo(Version.from(1, 5, 0)) == 0);
assertTrue(v.compareTo(Version.from(1, 4, 9)) > 0);
assertTrue(v.compareTo(Version.from(1, 6, 0)) < 0);
assertTrue(v.compareTo(Version.from(3, 0, 0)) < 0);
assertTrue(v.compareTo(Version.from(1, 5, 9001)) < 0);
assertTrue(v.compareTo(Version.from(1, 20, 0)) < 0);
assertTrue(v.compareTo(Version.from(1, 20, 5)) < 0);
assertTrue(v.compareTo(Version.from(3, 2, 1)) < 0);
assertTrue(v.compareTo(Version.from(0, 19, 0, "rc.1")) > 0);
assertTrue(v.compareTo(Version.from(2, 19, 0, "rc.1")) < 0);
assertTrue(v.compareTo(Version.from(0, 0, 0)) > 0);
assertTrue(v.compareTo(Version.from(Integer.MAX_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE)) < 0);
v = Version.from(1, 0, 0, "beta.2");
assertTrue(v.compareTo(Version.from(1, 0, 0, "beta.1")) > 0);
assertTrue(v.compareTo(Version.from(1, 0, 0, "beta.2")) == 0);
assertTrue(v.compareTo(Version.from(1, 0, 0, "beta.3")) < 0);
assertTrue(v.compareTo(Version.from(1, 0, 0, "alpha.1")) > 0);
assertTrue(v.compareTo(Version.from(1, 0, 0, "alpha.3")) > 0);
assertTrue(v.compareTo(Version.from(1, 0, 0, "rc.1")) < 0);
assertTrue(v.compareTo(Version.from(1, 0, 0, "rc.3")) < 0);
assertTrue(v.compareTo(Version.from(1, 0, 0)) < 0);
}
|
public byte[] encode(String val, String delimiters) {
return codecs[0].encode(val);
}
|
@Test
public void testEncodeGreekPersonName() {
assertArrayEquals(GREEK_PERSON_NAME_BYTE,
iso8859_7().encode(GREEK_PERSON_NAME, PN_DELIMS));
}
|
@Override
public AlterUserScramCredentialsResult alterUserScramCredentials(List<UserScramCredentialAlteration> alterations,
AlterUserScramCredentialsOptions options) {
final long now = time.milliseconds();
final Map<String, KafkaFutureImpl<Void>> futures = new HashMap<>();
for (UserScramCredentialAlteration alteration: alterations) {
futures.put(alteration.user(), new KafkaFutureImpl<>());
}
final Map<String, Exception> userIllegalAlterationExceptions = new HashMap<>();
// We need to keep track of users with deletions of an unknown SCRAM mechanism
final String usernameMustNotBeEmptyMsg = "Username must not be empty";
String passwordMustNotBeEmptyMsg = "Password must not be empty";
final String unknownScramMechanismMsg = "Unknown SCRAM mechanism";
alterations.stream().filter(a -> a instanceof UserScramCredentialDeletion).forEach(alteration -> {
final String user = alteration.user();
if (user == null || user.isEmpty()) {
userIllegalAlterationExceptions.put(alteration.user(), new UnacceptableCredentialException(usernameMustNotBeEmptyMsg));
} else {
UserScramCredentialDeletion deletion = (UserScramCredentialDeletion) alteration;
ScramMechanism mechanism = deletion.mechanism();
if (mechanism == null || mechanism == ScramMechanism.UNKNOWN) {
userIllegalAlterationExceptions.put(user, new UnsupportedSaslMechanismException(unknownScramMechanismMsg));
}
}
});
// Creating an upsertion may throw InvalidKeyException or NoSuchAlgorithmException,
// so keep track of which users are affected by such a failure so we can fail all their alterations later
final Map<String, Map<ScramMechanism, AlterUserScramCredentialsRequestData.ScramCredentialUpsertion>> userInsertions = new HashMap<>();
alterations.stream().filter(a -> a instanceof UserScramCredentialUpsertion)
.filter(alteration -> !userIllegalAlterationExceptions.containsKey(alteration.user()))
.forEach(alteration -> {
final String user = alteration.user();
if (user == null || user.isEmpty()) {
userIllegalAlterationExceptions.put(alteration.user(), new UnacceptableCredentialException(usernameMustNotBeEmptyMsg));
} else {
UserScramCredentialUpsertion upsertion = (UserScramCredentialUpsertion) alteration;
try {
byte[] password = upsertion.password();
if (password == null || password.length == 0) {
userIllegalAlterationExceptions.put(user, new UnacceptableCredentialException(passwordMustNotBeEmptyMsg));
} else {
ScramMechanism mechanism = upsertion.credentialInfo().mechanism();
if (mechanism == null || mechanism == ScramMechanism.UNKNOWN) {
userIllegalAlterationExceptions.put(user, new UnsupportedSaslMechanismException(unknownScramMechanismMsg));
} else {
userInsertions.putIfAbsent(user, new HashMap<>());
userInsertions.get(user).put(mechanism, getScramCredentialUpsertion(upsertion));
}
}
} catch (NoSuchAlgorithmException e) {
// we might overwrite an exception from a previous alteration, but we don't really care
// since we just need to mark this user as having at least one illegal alteration
// and make an exception instance available for completing the corresponding future exceptionally
userIllegalAlterationExceptions.put(user, new UnsupportedSaslMechanismException(unknownScramMechanismMsg));
} catch (InvalidKeyException e) {
// generally shouldn't happen since we deal with the empty password case above,
// but we still need to catch/handle it
userIllegalAlterationExceptions.put(user, new UnacceptableCredentialException(e.getMessage(), e));
}
}
});
// submit alterations only for users that do not have an illegal alteration as identified above
Call call = new Call("alterUserScramCredentials", calcDeadlineMs(now, options.timeoutMs()),
new ControllerNodeProvider()) {
@Override
public AlterUserScramCredentialsRequest.Builder createRequest(int timeoutMs) {
return new AlterUserScramCredentialsRequest.Builder(
new AlterUserScramCredentialsRequestData().setUpsertions(alterations.stream()
.filter(a -> a instanceof UserScramCredentialUpsertion)
.filter(a -> !userIllegalAlterationExceptions.containsKey(a.user()))
.map(a -> userInsertions.get(a.user()).get(((UserScramCredentialUpsertion) a).credentialInfo().mechanism()))
.collect(Collectors.toList()))
.setDeletions(alterations.stream()
.filter(a -> a instanceof UserScramCredentialDeletion)
.filter(a -> !userIllegalAlterationExceptions.containsKey(a.user()))
.map(d -> getScramCredentialDeletion((UserScramCredentialDeletion) d))
.collect(Collectors.toList())));
}
@Override
public void handleResponse(AbstractResponse abstractResponse) {
AlterUserScramCredentialsResponse response = (AlterUserScramCredentialsResponse) abstractResponse;
// Check for controller change
for (Errors error : response.errorCounts().keySet()) {
if (error == Errors.NOT_CONTROLLER) {
handleNotControllerError(error);
}
}
/* Now that we have the results for the ones we sent,
* fail any users that have an illegal alteration as identified above.
* Be sure to do this after the NOT_CONTROLLER error check above
* so that all errors are consistent in that case.
*/
userIllegalAlterationExceptions.entrySet().stream().forEach(entry ->
futures.get(entry.getKey()).completeExceptionally(entry.getValue())
);
response.data().results().forEach(result -> {
KafkaFutureImpl<Void> future = futures.get(result.user());
if (future == null) {
log.warn("Server response mentioned unknown user {}", result.user());
} else {
Errors error = Errors.forCode(result.errorCode());
if (error != Errors.NONE) {
future.completeExceptionally(error.exception(result.errorMessage()));
} else {
future.complete(null);
}
}
});
completeUnrealizedFutures(
futures.entrySet().stream(),
user -> "The broker response did not contain a result for user " + user);
}
@Override
void handleFailure(Throwable throwable) {
completeAllExceptionally(futures.values(), throwable);
}
};
runnable.call(call, now);
return new AlterUserScramCredentialsResult(new HashMap<>(futures));
}
|
@Test
public void testAlterUserScramCredentialsUnknownMechanism() throws ExecutionException, InterruptedException {
try (AdminClientUnitTestEnv env = mockClientEnv()) {
env.kafkaClient().setNodeApiVersions(NodeApiVersions.create());
final String user0Name = "user0";
ScramMechanism user0ScramMechanism0 = ScramMechanism.UNKNOWN;
final String user1Name = "user1";
ScramMechanism user1ScramMechanism0 = ScramMechanism.UNKNOWN;
final String user2Name = "user2";
ScramMechanism user2ScramMechanism0 = ScramMechanism.SCRAM_SHA_256;
AlterUserScramCredentialsResponseData responseData = new AlterUserScramCredentialsResponseData();
responseData.setResults(singletonList(
new AlterUserScramCredentialsResponseData.AlterUserScramCredentialsResult().setUser(user2Name)));
env.kafkaClient().prepareResponse(new AlterUserScramCredentialsResponse(responseData));
AlterUserScramCredentialsResult result = env.adminClient().alterUserScramCredentials(asList(
new UserScramCredentialDeletion(user0Name, user0ScramMechanism0),
new UserScramCredentialUpsertion(user1Name, new ScramCredentialInfo(user1ScramMechanism0, 8192), "password"),
new UserScramCredentialUpsertion(user2Name, new ScramCredentialInfo(user2ScramMechanism0, 4096), "password")));
Map<String, KafkaFuture<Void>> resultData = result.values();
assertEquals(3, resultData.size());
Stream.of(user0Name, user1Name).forEach(u -> {
assertTrue(resultData.containsKey(u));
assertThrows(Exception.class, () -> resultData.get(u).get(), "Expected request for user " + u + " to complete exceptionally, but it did not");
});
assertTrue(resultData.containsKey(user2Name));
resultData.get(user2Name).get();
assertThrows(Exception.class, () -> result.all().get(), "Expected 'result.all().get()' to throw an exception since at least one user failed, but it did not");
}
}
|
@Override
public Map<ExecutionAttemptID, ExecutionSlotAssignment> allocateSlotsFor(
List<ExecutionAttemptID> executionAttemptIds) {
final Map<ExecutionVertexID, ExecutionAttemptID> vertexIdToExecutionId = new HashMap<>();
executionAttemptIds.forEach(
executionId ->
vertexIdToExecutionId.put(executionId.getExecutionVertexId(), executionId));
checkState(
vertexIdToExecutionId.size() == executionAttemptIds.size(),
"SlotSharingExecutionSlotAllocator does not support one execution vertex to have multiple concurrent executions");
final List<ExecutionVertexID> vertexIds =
executionAttemptIds.stream()
.map(ExecutionAttemptID::getExecutionVertexId)
.collect(Collectors.toList());
return allocateSlotsForVertices(vertexIds).stream()
.collect(
Collectors.toMap(
vertexAssignment ->
vertexIdToExecutionId.get(
vertexAssignment.getExecutionVertexId()),
vertexAssignment ->
new ExecutionSlotAssignment(
vertexIdToExecutionId.get(
vertexAssignment.getExecutionVertexId()),
vertexAssignment.getLogicalSlotFuture())));
}
|
@Test
void testAllocateLogicalSlotFromAvailableSharedSlot() {
AllocationContext context = AllocationContext.newBuilder().addGroup(EV1, EV2).build();
context.allocateSlotsFor(EV1);
Map<ExecutionAttemptID, ExecutionSlotAssignment> executionSlotAssignments =
context.allocateSlotsFor(EV2);
Collection<ExecutionVertexID> assignIds = getAssignIds(executionSlotAssignments.values());
// execution 0 from the first allocateSlotsFor call and execution 1 from the second
// allocateSlotsFor call
// share a slot, therefore only one physical slot allocation should happen
assertThat(assignIds).containsExactly(EV2);
assertThat(context.getSlotProvider().getRequests()).hasSize(1);
}
|
@Override
public void onAddClassLoader(ModuleModel scopeModel, ClassLoader classLoader) {
refreshClassLoader(classLoader);
}
|
@Test
void testStatus3() {
FrameworkModel frameworkModel = new FrameworkModel();
ApplicationModel applicationModel = frameworkModel.newApplication();
ModuleModel moduleModel = applicationModel.newModule();
ApplicationConfig applicationConfig = new ApplicationConfig("Test");
applicationConfig.setSerializeCheckStatus(SerializeCheckStatus.STRICT.name());
applicationModel.getApplicationConfigManager().setApplication(applicationConfig);
SerializeSecurityManager ssm = frameworkModel.getBeanFactory().getBean(SerializeSecurityManager.class);
SerializeSecurityConfigurator serializeSecurityConfigurator = new SerializeSecurityConfigurator(moduleModel);
serializeSecurityConfigurator.onAddClassLoader(
moduleModel, Thread.currentThread().getContextClassLoader());
Assertions.assertEquals(SerializeCheckStatus.STRICT, ssm.getCheckStatus());
frameworkModel.destroy();
}
|
public static <T extends JsonTemplate> T readJson(InputStream jsonStream, Class<T> templateClass)
throws IOException {
return objectMapper.readValue(jsonStream, templateClass);
}
|
@Test
public void testReadJson_inputStream() throws IOException {
String testJson = "{\"number\":3, \"text\":\"cool\"}";
ByteArrayInputStream in = new ByteArrayInputStream(testJson.getBytes(StandardCharsets.UTF_8));
TestJson json = JsonTemplateMapper.readJson(in, TestJson.class);
Assert.assertEquals(3, json.number);
Assert.assertEquals("cool", json.text);
}
|
@GetMapping("/resilience4j")
public Object resilience4jFallBack() {
return ShenyuResultWrap.error(ShenyuResultEnum.RESILIENCE4J_PLUGIN_FALLBACK, null);
}
|
@Test
public void resilience4jFallBack() throws Exception {
final MockHttpServletResponse response = this.mockMvc.perform(MockMvcRequestBuilders.get("/fallback/resilience4j"))
.andReturn().getResponse();
assertThat(response.getStatus()).isEqualTo(HttpStatus.OK.value());
}
|
@Transactional(readOnly = true)
public void existsGeneralSignUpUser(String phone) {
readGeneralSignUpUser(phone);
}
|
@DisplayName("존재하지 않는 사용자의 번호로 비밀번호 찾기 인증요청이 올 경우 UserErrorException을 발생시킨다.")
@Test
void findPasswordVerificationIfUserNotFound() {
// given
String phone = "010-1234-5678";
given(userService.readUserByPhone(phone)).willReturn(Optional.empty());
// when - then
assertThrows(UserErrorException.class, () -> authFindService.existsGeneralSignUpUser(phone));
}
|
@Override
public double calcEdgeWeight(EdgeIteratorState edgeState, boolean reverse) {
double priority = edgeToPriorityMapping.get(edgeState, reverse);
if (priority == 0) return Double.POSITIVE_INFINITY;
final double distance = edgeState.getDistance();
double seconds = calcSeconds(distance, edgeState, reverse);
if (Double.isInfinite(seconds)) return Double.POSITIVE_INFINITY;
// add penalty at start/stop/via points
if (edgeState.get(EdgeIteratorState.UNFAVORED_EDGE)) seconds += headingPenaltySeconds;
double distanceCosts = distance * distanceInfluence;
if (Double.isInfinite(distanceCosts)) return Double.POSITIVE_INFINITY;
return seconds / priority + distanceCosts;
}
|
@Test
public void testSpeedFactorBooleanEV() {
EdgeIteratorState edge = graph.edge(0, 1).set(avSpeedEnc, 15, 15).setDistance(10);
Weighting weighting = createWeighting(createSpeedCustomModel(avSpeedEnc).setDistanceInfluence(70d));
assertEquals(3.1, weighting.calcEdgeWeight(edge, false), 0.01);
// here we increase weight for edges that are road class links
weighting = createWeighting(createSpeedCustomModel(avSpeedEnc)
.setDistanceInfluence(70d)
.addToPriority(If(RoadClassLink.KEY, MULTIPLY, "0.5")));
BooleanEncodedValue rcLinkEnc = encodingManager.getBooleanEncodedValue(RoadClassLink.KEY);
assertEquals(3.1, weighting.calcEdgeWeight(edge.set(rcLinkEnc, false), false), 0.01);
assertEquals(5.5, weighting.calcEdgeWeight(edge.set(rcLinkEnc, true), false), 0.01);
}
|
@Bean
public ShenyuPlugin uriPlugin() {
return new URIPlugin();
}
|
@Test
public void testURIPlugin() {
new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(URIPluginConfiguration.class))
.withBean(URIPluginConfigurationTest.class)
.withPropertyValues("debug=true")
.run(context -> {
ShenyuPlugin plugin = context.getBean("uriPlugin", ShenyuPlugin.class);
assertNotNull(plugin);
assertThat(plugin.named()).isEqualTo(PluginEnum.URI.getName());
});
}
|
public String getAllErrorMessages() {
return new AllConfigErrors(cruiseConfig.getAllErrors()).asString();
}
|
@Test
public void shouldReturnAllErrorMessagesOnCruiseConfig(){
BasicCruiseConfig cruiseConfig = GoConfigMother.defaultCruiseConfig();
cruiseConfig.addError("BasicCruiseConfig_key", "BasicCruiseConfig_error");
PipelineConfig pipelineConfig = PipelineConfigMother.pipelineConfig("");
pipelineConfig.addError("PipelineConfig_key", "PipelineConfig_error");
cruiseConfig.addPipeline("default", pipelineConfig);
GoConfigInvalidException exception = new GoConfigInvalidException(cruiseConfig, "");
assertThat(exception.getAllErrorMessages(), is("BasicCruiseConfig_error, PipelineConfig_error"));
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.