focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
public StatementExecutorResponse execute(
final ConfiguredStatement<? extends Statement> statement,
final KsqlExecutionContext executionContext,
final KsqlSecurityContext securityContext
) {
final String commandRunnerWarningString = commandRunnerWarning.get();
if (!commandRunnerWarningString.equals("")) {
throw new KsqlServerException("Failed to handle Ksql Statement."
+ System.lineSeparator()
+ commandRunnerWarningString);
}
final InjectorWithSideEffects injector = InjectorWithSideEffects.wrap(
injectorFactory.apply(executionContext, securityContext.getServiceContext()));
final ConfiguredStatementWithSideEffects<?> injectedWithSideEffects =
injector.injectWithSideEffects(statement);
try {
return executeInjected(
injectedWithSideEffects.getStatement(),
statement,
executionContext,
securityContext);
} catch (Exception e) {
injector.revertSideEffects(injectedWithSideEffects);
throw e;
}
}
|
@Test
public void shouldNotAbortTransactionIfInitTransactionFails() {
// Given:
doThrow(TimeoutException.class).when(transactionalProducer).initTransactions();
// When:
assertThrows(
KsqlServerException.class,
() -> distributor.execute(CONFIGURED_STATEMENT, executionContext, securityContext)
);
verify(transactionalProducer, times(0)).abortTransaction();
}
|
@Override
public void subscribe(Collection<String> topics) {
subscribeInternal(topics, Optional.empty());
}
|
@Test
public void testSubscribeToNullTopicCollection() {
consumer = newConsumer();
assertThrows(IllegalArgumentException.class, () -> consumer.subscribe((List<String>) null));
}
|
public static double distance(int[] a, int[] b) {
return sqrt(squaredDistance(a, b));
}
|
@Test
public void testDistance_doubleArr_doubleArr() {
System.out.println("distance");
double[] x = {-2.1968219, -0.9559913, -0.0431738, 1.0567679, 0.3853515};
double[] y = {-1.7781325, -0.6659839, 0.9526148, -0.9460919, -0.3925300};
assertEquals(2.422302, MathEx.distance(x, y), 1E-6);
}
|
public void removeTemplateNamed(CaseInsensitiveString name) {
PipelineTemplateConfig toBeRemoved = null;
for (PipelineTemplateConfig templateConfig : this) {
if (templateConfig.matches(name)) {
toBeRemoved = templateConfig;
}
}
this.remove(toBeRemoved);
}
|
@Test
public void shouldRemoveATemplateByName() {
PipelineTemplateConfig template2 = template("template2");
TemplatesConfig templates = new TemplatesConfig(template("template1"), template2);
templates.removeTemplateNamed(new CaseInsensitiveString("template1"));
assertThat(templates.size(), is(1));
assertThat(templates.get(0), is(template2));
}
|
public static <T> T copy(Object origin, Class<T> destCls) {
T dest = ReflectKit.newInstance(destCls);
copy(origin, dest);
return dest;
}
|
@Test
public void testCopy() {
Person source = new Person("jack", "nu", 22);
Person dest = new Person();
BeanKit.copy(source, dest);
Assert.assertEquals(source.toString(), dest.toString());
Person dest2 = BeanKit.copy(source, Person.class);
Assert.assertEquals(source.toString(), dest2.toString());
MyPerson myPerson = BeanKit.copy(source, MyPerson.class);
Assert.assertNotNull(myPerson.getName());
}
|
public static CharSequence escapeCsv(CharSequence value) {
return escapeCsv(value, false);
}
|
@Test
public void escapeCsvWithSingleQuoteAndCharacter() {
CharSequence value = "\"f";
CharSequence expected = "\"\"\"f\"";
escapeCsv(value, expected);
}
|
@Override
public Class<?> getActionReturnType()
{
if (_resourceMethod.getMethodType() == ResourceMethod.ACTION)
{
return _resourceMethod.getActionReturnType();
}
return null;
}
|
@Test
public void testGetActionReturnType()
{
when(resourceMethod.getMethodType()).thenReturn(ResourceMethod.ACTION);
Mockito.doReturn(String.class).when(resourceMethod).getActionReturnType();
FilterRequestContext filterContext = new FilterRequestContextInternalImpl(context, resourceMethod, null);
Assert.assertEquals(filterContext.getActionReturnType(), String.class);
when(resourceMethod.getMethodType()).thenReturn(ResourceMethod.GET);
Assert.assertNull(filterContext.getActionReturnType());
}
|
@Override
public void init(DatabaseMetaData metaData) {
LoggerFactory.getLogger(getClass()).warn("H2 database should be used for evaluation purpose only.");
}
|
@Test
public void init_logs_warning() {
underTest.init(mock(DatabaseMetaData.class));
assertThat(logs.logs(Level.WARN)).contains("H2 database should be used for evaluation purpose only.");
}
|
@Override
public Job save(Job jobToSave) {
try (final Connection conn = dataSource.getConnection(); final Transaction transaction = new Transaction(conn)) {
final Job savedJob = jobTable(conn).save(jobToSave);
transaction.commit();
notifyJobStatsOnChangeListeners();
return savedJob;
} catch (SQLException e) {
throw new StorageException(e);
}
}
|
@Test
void saveJob() throws SQLException {
when(preparedStatement.executeUpdate()).thenReturn(1);
assertThatCode(() -> jobStorageProvider.save(anEnqueuedJob().build())).doesNotThrowAnyException();
}
|
@VisibleForTesting
void validateExperienceOutRange(List<MemberLevelDO> list, Long id, Integer level, Integer experience) {
for (MemberLevelDO levelDO : list) {
if (levelDO.getId().equals(id)) {
continue;
}
if (levelDO.getLevel() < level) {
// 经验大于前一个等级
if (experience <= levelDO.getExperience()) {
throw exception(LEVEL_EXPERIENCE_MIN, levelDO.getName(), levelDO.getExperience());
}
} else if (levelDO.getLevel() > level) {
//小于下一个级别
if (experience >= levelDO.getExperience()) {
throw exception(LEVEL_EXPERIENCE_MAX, levelDO.getName(), levelDO.getExperience());
}
}
}
}
|
@Test
public void testUpdateLevel_experienceOutRange() {
// 准备参数
int level = 10;
int experience = 10;
Long id = randomLongId();
String name = randomString();
// mock 数据
memberlevelMapper.insert(randomLevelDO(o -> {
o.setLevel(level);
o.setExperience(experience);
o.setName(name);
}));
List<MemberLevelDO> list = memberlevelMapper.selectList();
// 调用,校验异常
assertServiceException(() -> levelService.validateExperienceOutRange(list, id, level + 1, experience - 1), LEVEL_EXPERIENCE_MIN, name, level);
// 调用,校验异常
assertServiceException(() -> levelService.validateExperienceOutRange(list, id, level - 1, experience + 1), LEVEL_EXPERIENCE_MAX, name, level);
}
|
@Override
public void processElement(final StreamRecord<T> element) throws Exception {
final T event = element.getValue();
final long previousTimestamp =
element.hasTimestamp() ? element.getTimestamp() : Long.MIN_VALUE;
final long newTimestamp = timestampAssigner.extractTimestamp(event, previousTimestamp);
element.setTimestamp(newTimestamp);
output.collect(element);
watermarkGenerator.onEvent(event, newTimestamp, wmOutput);
}
|
@Test
void testNegativeTimestamps() throws Exception {
OneInputStreamOperatorTestHarness<Long, Long> testHarness =
createTestHarness(
WatermarkStrategy.forGenerator((ctx) -> new NeverWatermarkGenerator())
.withTimestampAssigner((ctx) -> new LongExtractor()));
long[] values = {Long.MIN_VALUE, -1L, 0L, 1L, 2L, 3L, Long.MAX_VALUE};
for (long value : values) {
testHarness.processElement(new StreamRecord<>(value));
}
for (long value : values) {
assertThat(pollNextStreamRecord(testHarness).getTimestamp()).isEqualTo(value);
}
}
|
public DirectoryEntry lookUp(
File workingDirectory, JimfsPath path, Set<? super LinkOption> options) throws IOException {
checkNotNull(path);
checkNotNull(options);
DirectoryEntry result = lookUp(workingDirectory, path, options, 0);
if (result == null) {
// an intermediate file in the path did not exist or was not a directory
throw new NoSuchFileException(path.toString());
}
return result;
}
|
@Test
public void testLookup_absolute_intermediateSymlink_parentExists() throws IOException {
assertParentExists(lookup("/work/four/five/baz"), "foo");
assertParentExists(lookup("/work/four/six/baz"), "one");
}
|
@ProtoFactory
public static MediaType fromString(String tree) {
if (tree == null || tree.isEmpty()) throw CONTAINER.missingMediaType();
Matcher matcher = TREE_PATTERN.matcher(tree);
return parseSingleMediaType(tree, matcher, false);
}
|
@Test(expected = EncodingException.class)
public void testParsingNull() {
MediaType.fromString(null);
}
|
@Deprecated(since = "5.5", forRemoval = true)
public RestApiConfig getRestApiConfig() {
return restApiConfig;
}
|
@Test
public void testRestApiConfig_isNotNullByDefault() {
assertNotNull(networkConfig.getRestApiConfig());
}
|
@Override
public void validTenant(Long id) {
TenantDO tenant = getTenant(id);
if (tenant == null) {
throw exception(TENANT_NOT_EXISTS);
}
if (tenant.getStatus().equals(CommonStatusEnum.DISABLE.getStatus())) {
throw exception(TENANT_DISABLE, tenant.getName());
}
if (DateUtils.isExpired(tenant.getExpireTime())) {
throw exception(TENANT_EXPIRE, tenant.getName());
}
}
|
@Test
public void testValidTenant_success() {
// mock 数据
TenantDO tenant = randomPojo(TenantDO.class, o -> o.setId(1L).setStatus(CommonStatusEnum.ENABLE.getStatus())
.setExpireTime(LocalDateTime.now().plusDays(1)));
tenantMapper.insert(tenant);
// 调用,并断言业务异常
tenantService.validTenant(1L);
}
|
static String determinePackageName(Path baseDir, String basePackageName, Path classFile) {
String subPackageName = determineSubpackageName(baseDir, classFile);
return of(basePackageName, subPackageName)
.filter(value -> !value.isEmpty()) // default package
.collect(joining(PACKAGE_SEPARATOR_STRING));
}
|
@Test
void determinePackageNameFromComPackage() {
Path baseDir = Paths.get("path", "to", "com");
String basePackageName = "com";
Path classFile = Paths.get("path", "to", "com", "example", "app", "App.class");
String packageName = ClasspathSupport.determinePackageName(baseDir, basePackageName, classFile);
assertEquals("com.example.app", packageName);
}
|
public DockerInspectCommand getContainerStatus() {
super.addCommandArguments("format", STATUS_TEMPLATE);
this.commandArguments = String.format("--format=%s", STATUS_TEMPLATE);
return this;
}
|
@Test
public void testGetContainerStatus() throws Exception {
dockerInspectCommand.getContainerStatus();
assertEquals("inspect", StringUtils.join(",",
dockerInspectCommand.getDockerCommandWithArguments()
.get("docker-command")));
assertEquals("{{.State.Status}}", StringUtils.join(",",
dockerInspectCommand.getDockerCommandWithArguments().get("format")));
assertEquals("foo", StringUtils.join(",",
dockerInspectCommand.getDockerCommandWithArguments().get("name")));
assertEquals(3,
dockerInspectCommand.getDockerCommandWithArguments().size());
}
|
public int getNumberOfErrors() {
return errors.size();
}
|
@Test
void testErrorsEmpty() {
assertEquals(0, new JsonValidationException(null, null, new Exception()).getNumberOfErrors());
}
|
public abstract byte[] encode(MutableSpan input);
|
@Test void specialCharsInJson_JSON_V2() {
assertThat(new String(encoder.encode(utf8Span), UTF_8))
.isEqualTo(
"{\"traceId\":\"0000000000000001\",\"id\":\"0000000000000001\",\"name\":\"\\\"\\\\\\t\\b\\n\\r\\f\",\"annotations\":[{\"timestamp\":1,\"value\":\"\\u2028 and \\u2029\"}],\"tags\":{\"\\\"foo\":\"Database error: ORA-00942:\\u2028 and \\u2029 table or view does not exist\\n\"}}");
}
|
@Udf(description = "Returns the hyperbolic sine of an INT value")
public Double sinh(
@UdfParameter(
value = "value",
description = "The value in radians to get the hyperbolic sine of."
) final Integer value
) {
return sinh(value == null ? null : value.doubleValue());
}
|
@Test
public void shouldHandleNull() {
assertThat(udf.sinh((Integer) null), is(nullValue()));
assertThat(udf.sinh((Long) null), is(nullValue()));
assertThat(udf.sinh((Double) null), is(nullValue()));
}
|
static final String generateForFragment(RuleBuilderStep step, Configuration configuration) {
final String fragmentName = step.function();
try {
Template template = configuration.getTemplate(fragmentName);
StringWriter writer = new StringWriter();
Map<String, Object> filteredParams = new HashMap<>();
if (step.parameters() != null) {
for (Map.Entry<String, Object> val : step.parameters().entrySet()) {
if (val.getValue() instanceof String s) {
if (StringUtils.isBlank(s)) {
} else if (s.startsWith("$")) {
filteredParams.put(val.getKey(), s.substring(1));
} else {
filteredParams.put(val.getKey(), "\"" + s + "\"");
}
} else {
filteredParams.put(val.getKey(), val.getValue());
}
}
}
template.process(filteredParams, writer);
writer.close();
return writer.toString();
} catch (TemplateNotFoundException e) {
throw new IllegalArgumentException(f("No template found for fragment %s", fragmentName));
} catch (Exception e) {
throw new IllegalArgumentException("Error converting fragment template to fragment.", e);
}
}
|
@Test
public void generateForFragmentThrowsException_WhenTemplateNotFound() {
RuleBuilderStep step = mock(RuleBuilderStep.class);
when(step.function()).thenReturn("unknown");
assertThatThrownBy(() -> ParserUtil.generateForFragment(step, configuration))
.isInstanceOf(IllegalArgumentException.class);
}
|
public boolean fileIsInAllowedPath(Path path) {
if (allowedPaths.isEmpty()) {
return true;
}
final Path realFilePath = resolveRealPath(path);
if (realFilePath == null) {
return false;
}
for (Path allowedPath : allowedPaths) {
final Path realAllowedPath = resolveRealPath(allowedPath);
if (realAllowedPath != null && realFilePath.startsWith(realAllowedPath)) {
return true;
}
}
return false;
}
|
@Test
public void verifyToRealPathCalled() throws IOException {
final Path permittedPath = mock(Path.class);
final Path filePath = mock(Path.class);
pathChecker = new AllowedAuxiliaryPathChecker(new TreeSet<>(Collections.singleton(permittedPath)));
when(filePath.toRealPath()).thenReturn(filePath);
pathChecker.fileIsInAllowedPath(filePath);
verify(permittedPath, times(1)).toRealPath();
verify(filePath, times(1)).toRealPath();
}
|
@Override
@SuppressWarnings("unchecked")
public void forward(ServletRequest servletRequest, ServletResponse servletResponse)
throws ServletException, IOException {
if (lambdaContainerHandler == null) {
throw new IllegalStateException("Null container handler in dispatcher");
}
if (servletResponse.isCommitted()) {
throw new IllegalStateException("Cannot forward request with committed response");
}
try {
// Reset any output that has been buffered, but keep headers/cookies
servletResponse.resetBuffer();
} catch (IllegalStateException e) {
throw e;
}
if (isNamedDispatcher) {
lambdaContainerHandler.doFilter((HttpServletRequest) servletRequest, (HttpServletResponse) servletResponse, ((AwsServletRegistration)servletRequest.getServletContext().getServletRegistration(dispatchTo)).getServlet());
return;
}
servletRequest.setAttribute(DISPATCHER_TYPE_ATTRIBUTE, DispatcherType.FORWARD);
setRequestPath(servletRequest, dispatchTo);
lambdaContainerHandler.doFilter((HttpServletRequest) servletRequest, (HttpServletResponse) servletResponse, getServlet((HttpServletRequest)servletRequest));
}
|
@Test
void forwardRequest_committedResponse_throwsIllegalStateException() throws InvalidRequestEventException {
AwsProxyRequest proxyRequest = new AwsProxyRequestBuilder("/hello", "GET").build();
HttpServletRequest servletRequest = requestReader.readRequest(proxyRequest, null, new MockLambdaContext(), ContainerConfig.defaultConfig());
AwsProxyRequestDispatcher dispatcher = new AwsProxyRequestDispatcher(FORWARD_PATH, false, mockLambdaHandler(null));
AwsHttpServletResponse resp = new AwsHttpServletResponse(servletRequest, new CountDownLatch(1));
try {
resp.flushBuffer();
dispatcher.forward(servletRequest, resp);
} catch (ServletException e) {
fail("Unexpected ServletException");
} catch (IOException e) {
fail("Unexpected IOException");
} catch (Exception e) {
assertTrue(e instanceof IllegalStateException);
return;
}
fail();
}
|
public boolean matchesBeacon(Beacon beacon) {
// All identifiers must match, or the corresponding region identifier must be null.
for (int i = mIdentifiers.size(); --i >= 0; ) {
final Identifier identifier = mIdentifiers.get(i);
Identifier beaconIdentifier = null;
if (i < beacon.mIdentifiers.size()) {
beaconIdentifier = beacon.getIdentifier(i);
}
if ((beaconIdentifier == null && identifier != null) ||
(beaconIdentifier != null && identifier != null && !identifier.equals(beaconIdentifier))) {
return false;
}
}
if (mBluetoothAddress != null && !mBluetoothAddress.equalsIgnoreCase(beacon.mBluetoothAddress)) {
return false;
}
return true;
}
|
@Test
public void testBeaconDoesNotMatchRegionWithDiffrentBluetoothMac() {
Beacon beacon = new AltBeacon.Builder().setId1("1").setId2("2").setId3("3").setRssi(4)
.setBeaconTypeCode(5).setTxPower(6).setBluetoothAddress("01:02:03:04:05:06").build();
Region region = new Region("myRegion", "01:02:03:04:05:99");
assertFalse("Beacon should match region with mac the same", region.matchesBeacon(beacon));
}
|
<T extends PipelineOptions> T as(Class<T> iface) {
checkNotNull(iface);
checkArgument(iface.isInterface(), "Not an interface: %s", iface);
T existingOption = computedProperties.interfaceToProxyCache.getInstance(iface);
if (existingOption == null) {
synchronized (this) {
// double check
existingOption = computedProperties.interfaceToProxyCache.getInstance(iface);
if (existingOption == null) {
Registration<T> registration =
PipelineOptionsFactory.CACHE
.get()
.validateWellFormed(iface, computedProperties.knownInterfaces);
List<PropertyDescriptor> propertyDescriptors = registration.getPropertyDescriptors();
Class<T> proxyClass = registration.getProxyClass();
existingOption =
InstanceBuilder.ofType(proxyClass)
.fromClass(proxyClass)
.withArg(InvocationHandler.class, this)
.build();
computedProperties =
computedProperties.updated(iface, existingOption, propertyDescriptors);
}
}
}
return existingOption;
}
|
@Test
public void testJsonConversionOfAJsonConvertedType() throws Exception {
SimpleTypes options = PipelineOptionsFactory.as(SimpleTypes.class);
options.setString("TestValue");
options.setInteger(5);
// It is important here that our first serialization goes to our most basic
// type so that we handle the case when we don't know the types of certain
// properties because the intermediate instance of PipelineOptions never
// saw their interface.
SimpleTypes options2 =
serializeDeserialize(
SimpleTypes.class, serializeDeserialize(PipelineOptions.class, options));
assertEquals(5, options2.getInteger());
assertEquals("TestValue", options2.getString());
}
|
@Override
protected JobExceptionsInfoWithHistory handleRequest(
HandlerRequest<EmptyRequestBody> request, ExecutionGraphInfo executionGraph) {
final List<Integer> exceptionToReportMaxSizes =
request.getQueryParameter(UpperLimitExceptionParameter.class);
final int exceptionToReportMaxSize =
exceptionToReportMaxSizes.size() > 0
? exceptionToReportMaxSizes.get(0)
: MAX_NUMBER_EXCEPTION_TO_REPORT;
List<FailureLabelFilterParameter.FailureLabel> failureLabelFilter =
request.getQueryParameter(FailureLabelFilterParameter.class);
failureLabelFilter =
failureLabelFilter.size() > 0 ? failureLabelFilter : EMPTY_FAILURE_LABEL_FILTER;
return createJobExceptionsInfo(
executionGraph, exceptionToReportMaxSize, failureLabelFilter);
}
|
@Test
void testOnlyExceptionHistory()
throws HandlerRequestException, ExecutionException, InterruptedException {
final RuntimeException rootThrowable = new RuntimeException("exception #0");
final long rootTimestamp = System.currentTimeMillis();
final RootExceptionHistoryEntry rootEntry = fromGlobalFailure(rootThrowable, rootTimestamp);
final ExecutionGraphInfo executionGraphInfo =
createExecutionGraphInfoWithoutFailureCause(rootEntry);
final HandlerRequest<EmptyRequestBody> request =
createRequest(executionGraphInfo.getJobId(), 10);
final JobExceptionsInfoWithHistory response =
testInstance.handleRequest(request, executionGraphInfo);
assertThat(response.getRootException()).isNull();
assertThat(response.getRootTimestamp()).isNull();
assertThat(response.getExceptionHistory().getEntries())
.satisfies(
matching(
contains(
historyContainsGlobalFailure(
rootThrowable, rootTimestamp))));
}
|
@Override
protected void doStart() throws Exception {
super.doStart();
LOG.debug("Creating connection to Azure ServiceBus");
client = getEndpoint().getServiceBusClientFactory().createServiceBusProcessorClient(getConfiguration(),
this::processMessage, this::processError);
client.start();
}
|
@Test
void synchronizationDoesNotCompleteMessageWhenReceiveModeIsReceiveAndDelete() throws Exception {
try (ServiceBusConsumer consumer = new ServiceBusConsumer(endpoint, processor)) {
when(configuration.getServiceBusReceiveMode()).thenReturn(ServiceBusReceiveMode.RECEIVE_AND_DELETE);
consumer.doStart();
verify(client).start();
verify(clientFactory).createServiceBusProcessorClient(any(), any(), any());
when(messageContext.getMessage()).thenReturn(message);
processMessageCaptor.getValue().accept(messageContext);
verify(messageContext).getMessage();
Exchange exchange = exchangeCaptor.getValue();
assertThat(exchange).isNotNull();
Synchronization synchronization = exchange.getExchangeExtension().handoverCompletions().get(0);
synchronization.onComplete(exchange);
verifyNoMoreInteractions(messageContext);
}
}
|
public StorageID append(String name, String value) {
if (StringUtil.isBlank(name)) {
throw new IllegalArgumentException("The name of storage ID should not be null or empty.");
}
if (sealed) {
throw new IllegalStateException("The storage ID is sealed. Can't append a new fragment, name=" + name);
}
fragments.add(new Fragment(new String[] {name}, String.class, false, value));
return this;
}
|
@Test
public void testEqual() {
StorageID id = new StorageID();
id.append("time_bucket", 202212141438L) //2022-12-14 14:38
.append("entity_id", "encoded-service-name");
StorageID id2 = new StorageID();
id2.append("time_bucket", 202212141438L) //2022-12-14 14:38
.append("entity_id", "encoded-service-name");
Assertions.assertEquals(true, id.equals(id2));
}
|
String encode(JwtSession jwtSession) {
checkIsStarted();
return Jwts.builder()
.claims(jwtSession.getProperties())
.claim(LAST_REFRESH_TIME_PARAM, system2.now())
.id(jwtSession.getSessionTokenUuid())
.subject(jwtSession.getUserLogin())
.issuedAt(new Date(system2.now()))
.expiration(new Date(jwtSession.getExpirationTime()))
.signWith(secretKey, SIGNATURE_ALGORITHM)
.compact();
}
|
@Test
public void encode_fail_when_not_started() {
JwtSession jwtSession = new JwtSession(USER_LOGIN, SESSION_TOKEN_UUID, addMinutes(new Date(), 10).getTime());
assertThatThrownBy(() -> underTest.encode(jwtSession))
.isInstanceOf(NullPointerException.class)
.hasMessage("org.sonar.server.authentication.JwtSerializer not started");
}
|
@Override
public Optional<Integer> extractIndexNumber(final String indexName) {
final int beginIndex = indexPrefixLength(indexName);
if (indexName.length() < beginIndex) {
return Optional.empty();
}
final String suffix = indexName.substring(beginIndex);
try {
return Optional.of(Integer.parseInt(suffix));
} catch (NumberFormatException e) {
return Optional.empty();
}
}
|
@Test
public void testExtractIndexNumberWithMalformedFormatReturnsEmptyOptional() {
assertThat(mongoIndexSet.extractIndexNumber("graylog2_hunderttausend")).isEmpty();
}
|
public long getNumEntriesScannedPostFilter() {
return _brokerResponse.has(NUM_ENTRIES_SCANNED_POST_FILTER) ? _brokerResponse.get(NUM_ENTRIES_SCANNED_POST_FILTER)
.asLong() : -1L;
}
|
@Test
public void testGetNumEntriesScannedPostFilter() {
// Run the test
final long result = _executionStatsUnderTest.getNumEntriesScannedPostFilter();
// Verify the results
assertEquals(10L, result);
}
|
@Override
public PollResult poll(long currentTimeMs) {
return pollInternal(
prepareFetchRequests(),
this::handleFetchSuccess,
this::handleFetchFailure
);
}
|
@Test
public void testReadCommittedWithCompactedTopic() {
buildFetcher(OffsetResetStrategy.EARLIEST, new StringDeserializer(),
new StringDeserializer(), Integer.MAX_VALUE, IsolationLevel.READ_COMMITTED);
ByteBuffer buffer = ByteBuffer.allocate(1024);
long pid1 = 1L;
long pid2 = 2L;
long pid3 = 3L;
appendTransactionalRecords(buffer, pid3, 3L,
new SimpleRecord("3".getBytes(), "value".getBytes()),
new SimpleRecord("4".getBytes(), "value".getBytes()));
appendTransactionalRecords(buffer, pid2, 15L,
new SimpleRecord("15".getBytes(), "value".getBytes()),
new SimpleRecord("16".getBytes(), "value".getBytes()),
new SimpleRecord("17".getBytes(), "value".getBytes()));
appendTransactionalRecords(buffer, pid1, 22L,
new SimpleRecord("22".getBytes(), "value".getBytes()),
new SimpleRecord("23".getBytes(), "value".getBytes()));
abortTransaction(buffer, pid2, 28L);
appendTransactionalRecords(buffer, pid3, 30L,
new SimpleRecord("30".getBytes(), "value".getBytes()),
new SimpleRecord("31".getBytes(), "value".getBytes()),
new SimpleRecord("32".getBytes(), "value".getBytes()));
commitTransaction(buffer, pid3, 35L);
appendTransactionalRecords(buffer, pid1, 39L,
new SimpleRecord("39".getBytes(), "value".getBytes()),
new SimpleRecord("40".getBytes(), "value".getBytes()));
// transaction from pid1 is aborted, but the marker is not included in the fetch
buffer.flip();
// send the fetch
assignFromUser(singleton(tp0));
subscriptions.seek(tp0, 0);
assertEquals(1, sendFetches());
// prepare the response. the aborted transactions begin at offsets which are no longer in the log
List<FetchResponseData.AbortedTransaction> abortedTransactions = Arrays.asList(
new FetchResponseData.AbortedTransaction().setProducerId(pid2).setFirstOffset(6),
new FetchResponseData.AbortedTransaction().setProducerId(pid1).setFirstOffset(0)
);
client.prepareResponse(fullFetchResponseWithAbortedTransactions(MemoryRecords.readableRecords(buffer),
abortedTransactions, Errors.NONE, 100L, 100L, 0));
networkClientDelegate.poll(time.timer(0));
assertTrue(fetcher.hasCompletedFetches());
Map<TopicPartition, List<ConsumerRecord<String, String>>> allFetchedRecords = fetchRecords();
assertTrue(allFetchedRecords.containsKey(tp0));
List<ConsumerRecord<String, String>> fetchedRecords = allFetchedRecords.get(tp0);
assertEquals(5, fetchedRecords.size());
assertEquals(Arrays.asList(3L, 4L, 30L, 31L, 32L), collectRecordOffsets(fetchedRecords));
}
|
public static <InputT> KeyByBuilder<InputT> of(PCollection<InputT> input) {
return named(null).of(input);
}
|
@Test
public void testBuild_ImplicitName() {
final PCollection<String> dataset = TestUtils.createMockDataset(TypeDescriptors.strings());
final PCollection<KV<String, Long>> reduced =
ReduceByKey.of(dataset).keyBy(s -> s).valueBy(s -> 1L).combineBy(Sums.ofLongs()).output();
final ReduceByKey reduce = (ReduceByKey) TestUtils.getProducer(reduced);
assertFalse(reduce.getName().isPresent());
}
|
public static RuntimeOpts mergeRuntimeOpts(RuntimeOpts oriOpts, RuntimeOpts newOpts) {
RuntimeOpts mergedOpts = oriOpts.partialDeepClone();
if (mergedOpts.getExtraLabels() == null) {
mergedOpts.setExtraLabels(new HashMap<>());
}
if (mergedOpts.getExtraAnnotations() == null) {
mergedOpts.setExtraAnnotations(new HashMap<>());
}
if (mergedOpts.getNodeSelectorLabels() == null) {
mergedOpts.setNodeSelectorLabels(new HashMap<>());
}
if (mergedOpts.getTolerations() == null) {
mergedOpts.setTolerations(new ArrayList<>());
}
if (mergedOpts.getResourceRequirements() == null) {
mergedOpts.setResourceRequirements(new V1ResourceRequirements());
}
if (!StringUtils.isEmpty(newOpts.getJobName())) {
mergedOpts.setJobName(newOpts.getJobName());
}
if (!StringUtils.isEmpty(newOpts.getJobNamespace())) {
mergedOpts.setJobNamespace(newOpts.getJobNamespace());
}
if (newOpts.getExtraLabels() != null && !newOpts.getExtraLabels().isEmpty()) {
newOpts.getExtraLabels().forEach((key, labelsItem) -> {
if (!mergedOpts.getExtraLabels().containsKey(key)) {
log.debug("extra label {} has been changed to {}", key, labelsItem);
}
mergedOpts.getExtraLabels().put(key, labelsItem);
});
}
if (newOpts.getExtraAnnotations() != null && !newOpts.getExtraAnnotations().isEmpty()) {
newOpts.getExtraAnnotations().forEach((key, annotationsItem) -> {
if (!mergedOpts.getExtraAnnotations().containsKey(key)) {
log.debug("extra annotation {} has been changed to {}", key, annotationsItem);
}
mergedOpts.getExtraAnnotations().put(key, annotationsItem);
});
}
if (newOpts.getNodeSelectorLabels() != null && !newOpts.getNodeSelectorLabels().isEmpty()) {
newOpts.getNodeSelectorLabels().forEach((key, nodeSelectorItem) -> {
if (!mergedOpts.getNodeSelectorLabels().containsKey(key)) {
log.debug("node selector label {} has been changed to {}", key, nodeSelectorItem);
}
mergedOpts.getNodeSelectorLabels().put(key, nodeSelectorItem);
});
}
if (newOpts.getResourceRequirements() != null) {
V1ResourceRequirements mergedResourcesRequirements = mergedOpts.getResourceRequirements();
V1ResourceRequirements newResourcesRequirements = newOpts.getResourceRequirements();
Map<String, Quantity> limits = newResourcesRequirements.getLimits();
Map<String, Quantity> requests = newResourcesRequirements.getRequests();
for (String resource : RESOURCES) {
if (limits != null && limits.containsKey(resource)) {
mergedResourcesRequirements.putLimitsItem(resource, limits.get(resource));
}
if (requests != null && requests.containsKey(resource)) {
mergedResourcesRequirements.putRequestsItem(resource, requests.get(resource));
}
}
mergedOpts.setResourceRequirements(mergedResourcesRequirements);
}
if (newOpts.getTolerations() != null && !newOpts.getTolerations().isEmpty()) {
mergedOpts.getTolerations().addAll(newOpts.getTolerations());
}
return mergedOpts;
}
|
@Test
public void TestMergeRuntimeOpts() {
Map<String, Object> configs = new Gson().fromJson(KubernetesRuntimeTest.createRuntimeCustomizerConfig(), HashMap.class);
BasicKubernetesManifestCustomizer customizer = new BasicKubernetesManifestCustomizer();
customizer.initialize(configs);
BasicKubernetesManifestCustomizer.RuntimeOpts newOpts = new BasicKubernetesManifestCustomizer.RuntimeOpts();
newOpts.setJobName("merged-name");
newOpts.setTolerations(Collections.emptyList());
V1Toleration toleration = new V1Toleration();
toleration.setKey("merge-key");
toleration.setEffect("NoSchedule");
toleration.setOperator("Equal");
toleration.setTolerationSeconds(6000L);
newOpts.setTolerations(Collections.singletonList(toleration));
V1ResourceRequirements resourceRequirements = new V1ResourceRequirements();
resourceRequirements.putLimitsItem("cpu", new Quantity("20"));
resourceRequirements.putLimitsItem("memory", new Quantity("10240"));
newOpts.setResourceRequirements(resourceRequirements);
newOpts.setNodeSelectorLabels(Collections.singletonMap("disktype", "ssd"));
newOpts.setExtraAnnotations(Collections.singletonMap("functiontype", "sink"));
newOpts.setExtraLabels(Collections.singletonMap("functiontype", "sink"));
BasicKubernetesManifestCustomizer.RuntimeOpts mergedOpts = BasicKubernetesManifestCustomizer.mergeRuntimeOpts(
customizer.getRuntimeOpts(), newOpts);
assertEquals(mergedOpts.getJobName(), "merged-name");
assertEquals(mergedOpts.getTolerations().size(), 2);
assertEquals(mergedOpts.getExtraAnnotations().size(), 2);
assertEquals(mergedOpts.getExtraLabels().size(), 2);
assertEquals(mergedOpts.getNodeSelectorLabels().size(), 2);
assertEquals(mergedOpts.getResourceRequirements().getLimits().get("cpu").getNumber().intValue(), 20);
assertEquals(mergedOpts.getResourceRequirements().getLimits().get("memory").getNumber().intValue(), 10240);
}
|
public static void verifyGroupId(final String groupId) {
if (StringUtils.isBlank(groupId)) {
throw new IllegalArgumentException("Blank groupId");
}
if (!GROUP_ID_PATTER.matcher(groupId).matches()) {
throw new IllegalArgumentException(
"Invalid group id, it should be started with character 'a'-'z' or 'A'-'Z',"
+ " and followed with numbers, english alphabet, '-' or '_'. ");
}
}
|
@Test(expected = IllegalArgumentException.class)
public void tetsVerifyGroupId3() {
Utils.verifyGroupId("1abc");
}
|
@Override
public Batch toBatch() {
return new SparkBatch(
sparkContext, table, readConf, groupingKeyType(), taskGroups(), expectedSchema, hashCode());
}
|
@TestTemplate
public void testPartitionedIsNotNull() throws Exception {
createPartitionedTable(spark, tableName, "truncate(4, data)");
SparkScanBuilder builder = scanBuilder();
TruncateFunction.TruncateString function = new TruncateFunction.TruncateString();
UserDefinedScalarFunc udf = toUDF(function, expressions(intLit(4), fieldRef("data")));
Predicate predicate = new Predicate("IS_NOT_NULL", expressions(udf));
pushFilters(builder, predicate);
Batch scan = builder.build().toBatch();
assertThat(scan.planInputPartitions().length).isEqualTo(10);
// NOT IsNotNULL
builder = scanBuilder();
predicate = new Not(predicate);
pushFilters(builder, predicate);
scan = builder.build().toBatch();
assertThat(scan.planInputPartitions().length).isEqualTo(0);
}
|
public static NamingHttpClientManager getInstance() {
return NamingHttpClientManagerInstance.INSTANCE;
}
|
@Test
void testGetInstance() {
assertNotNull(NamingHttpClientManager.getInstance());
}
|
@Override
public Long time(RedisClusterNode node) {
RedisClient entry = getEntry(node);
RFuture<Long> f = executorService.readAsync(entry, LongCodec.INSTANCE, RedisCommands.TIME_LONG);
return syncFuture(f);
}
|
@Test
public void testTime() {
RedisClusterNode master = getFirstMaster();
Long time = connection.time(master);
assertThat(time).isGreaterThan(1000);
}
|
static void checkFormat(final String originalFilename) {
final List<String> fileNameSplit = Splitter.on(".").splitToList(originalFilename);
if (fileNameSplit.size() <= 1) {
throw new BadRequestException("The file format is invalid.");
}
for (String s : fileNameSplit) {
if (StringUtils.isEmpty(s)) {
throw new BadRequestException("The file format is invalid.");
}
}
}
|
@Test(expected = BadRequestException.class)
public void checkFormatWithException0() {
ConfigFileUtils.checkFormat("1234+defaultes");
}
|
@Override
public byte[] encode(ILoggingEvent event) {
var baos = new ByteArrayOutputStream();
try (var generator = jsonFactory.createGenerator(baos)) {
generator.writeStartObject();
// https://cloud.google.com/logging/docs/structured-logging#structured_logging_special_fields
// https://github.com/googleapis/java-logging-logback/blob/main/src/main/java/com/google/cloud/logging/logback/LoggingAppender.java
writeTimestamp(generator, event);
writeSeverity(generator, event);
writeLogger(generator, event);
writeMessage(generator, event);
writeThread(generator, event);
writeServiceContext(generator);
writeTraceContext(generator);
var mdc = event.getMDCPropertyMap();
writeMdc(generator, mdc);
writeKeyValue(generator, event);
if ("ERROR".equals(event.getLevel().toString())) {
writeError(generator, event, mdc);
}
writeStackTrace(generator, event);
generator.writeEndObject();
generator.writeRaw('\n');
generator.flush();
} catch (NullPointerException | IOException e) {
return logFallbackError(event, e);
}
return baos.toByteArray();
}
|
@Test
void encode_kv() {
var e = mockEvent();
when(e.getLevel()).thenReturn(Level.DEBUG);
when(e.getFormattedMessage()).thenReturn("oha, sup?");
when(e.getKeyValuePairs())
.thenReturn(
List.of(
new KeyValuePair("req", Map.of("url", "https://example.com", "method", "GET")),
new KeyValuePair("a", Map.of("b", Map.of("c", "d"))),
new KeyValuePair("status", 500)));
var msg = encoder.encode(e);
assertMatchesJson(
"""
{"a":{"b":{"c":"d"}},"logger":"com.example.MyLogger","message":"oha, sup?","req":{"method":"GET","url":"https://example.com"},"severity":"DEBUG","status":"500","thread_name":"main","time":"2024-08-09T14:13:33Z"}
""",
msg);
}
|
void precheckMaxResultLimitOnLocalPartitions(String mapName) {
// check if feature is enabled
if (!isPreCheckEnabled) {
return;
}
// limit number of local partitions to check to keep runtime constant
PartitionIdSet localPartitions = mapServiceContext.getCachedOwnedPartitions();
int partitionsToCheck = min(localPartitions.size(), maxLocalPartitionsLimitForPreCheck);
if (partitionsToCheck == 0) {
return;
}
// calculate size of local partitions
int localPartitionSize = getLocalPartitionSize(mapName, localPartitions, partitionsToCheck);
if (localPartitionSize == 0) {
return;
}
// check local result size
long localResultLimit = getNodeResultLimit(partitionsToCheck);
if (localPartitionSize > localResultLimit * MAX_RESULT_LIMIT_FACTOR_FOR_PRECHECK) {
var localMapStatsProvider = mapServiceContext.getLocalMapStatsProvider();
if (localMapStatsProvider != null && localMapStatsProvider.hasLocalMapStatsImpl(mapName)) {
localMapStatsProvider.getLocalMapStatsImpl(mapName).incrementQueryResultSizeExceededCount();
}
throw new QueryResultSizeExceededException(maxResultLimit, " Result size exceeded in local pre-check.");
}
}
|
@Test
public void testLocalPreCheckEnabledWitTwoPartitionsBelowLimit() {
int[] partitionsSizes = {849, 849};
populatePartitions(partitionsSizes);
initMocksWithConfiguration(200000, 2);
limiter.precheckMaxResultLimitOnLocalPartitions(ANY_MAP_NAME);
}
|
public V get(K key) {
final WeakReference<V> currentWeakRef = lookup(key);
// resolve it, after which if not null, we have a strong reference
V strongVal = resolve(currentWeakRef);
if (strongVal != null) {
// all good.
return strongVal;
}
// here, either currentWeakRef was null, or its reference was GC'd.
if (currentWeakRef != null) {
// garbage collection removed the reference.
// explicitly remove the weak ref from the map if it has not
// been updated by this point
// this is here just for completeness.
map.remove(key, currentWeakRef);
// log/report the loss.
noteLost(key);
}
// create a new value and add it to the map
return create(key);
}
|
@Test
public void testFactoryReturningNull() throws Throwable {
referenceMap = new WeakReferenceMap<>(
(k) -> null,
null);
intercept(NullPointerException.class, () ->
referenceMap.get(0));
}
|
@Override // mappedStatementId 参数,暂时没有用。以后,可以基于 mappedStatementId + DataPermission 进行缓存
public List<DataPermissionRule> getDataPermissionRule(String mappedStatementId) {
// 1. 无数据权限
if (CollUtil.isEmpty(rules)) {
return Collections.emptyList();
}
// 2. 未配置,则默认开启
DataPermission dataPermission = DataPermissionContextHolder.get();
if (dataPermission == null) {
return rules;
}
// 3. 已配置,但禁用
if (!dataPermission.enable()) {
return Collections.emptyList();
}
// 4. 已配置,只选择部分规则
if (ArrayUtil.isNotEmpty(dataPermission.includeRules())) {
return rules.stream().filter(rule -> ArrayUtil.contains(dataPermission.includeRules(), rule.getClass()))
.collect(Collectors.toList()); // 一般规则不会太多,所以不采用 HashSet 查询
}
// 5. 已配置,只排除部分规则
if (ArrayUtil.isNotEmpty(dataPermission.excludeRules())) {
return rules.stream().filter(rule -> !ArrayUtil.contains(dataPermission.excludeRules(), rule.getClass()))
.collect(Collectors.toList()); // 一般规则不会太多,所以不采用 HashSet 查询
}
// 6. 已配置,全部规则
return rules;
}
|
@Test
public void testGetDataPermissionRule_03() {
// 准备参数
String mappedStatementId = randomString();
// mock 方法
DataPermissionContextHolder.add(AnnotationUtils.findAnnotation(TestClass03.class, DataPermission.class));
// 调用
List<DataPermissionRule> result = dataPermissionRuleFactory.getDataPermissionRule(mappedStatementId);
// 断言
assertTrue(result.isEmpty());
}
|
public void notify(PluginJarChangeListener listener, Collection<BundleOrPluginFileDetails> knowPluginFiles, Collection<BundleOrPluginFileDetails> currentPluginFiles) {
List<BundleOrPluginFileDetails> oldPlugins = new ArrayList<>(knowPluginFiles);
subtract(oldPlugins, currentPluginFiles).forEach(listener::pluginJarRemoved);
currentPluginFiles.forEach(newPlugin -> {
int index = oldPlugins.indexOf(newPlugin);
if (index < 0) {
listener.pluginJarAdded(newPlugin);
} else if (newPlugin.doesTimeStampDiffer(oldPlugins.get(index))) {
listener.pluginJarUpdated(newPlugin);
}
});
}
|
@Test
void shouldNotifyWhenPluginIsUpdated() {
final PluginJarChangeListener listener = mock(PluginJarChangeListener.class);
BundleOrPluginFileDetails pluginOne = mock(BundleOrPluginFileDetails.class);
BundleOrPluginFileDetails pluginTwo = mock(BundleOrPluginFileDetails.class);
BundleOrPluginFileDetails pluginThree = mock(BundleOrPluginFileDetails.class);
List<BundleOrPluginFileDetails> knownPlugins = List.of(pluginOne, pluginTwo, pluginThree);
List<BundleOrPluginFileDetails> newPlugins = List.of(pluginOne, pluginTwo, pluginThree);
when(pluginOne.doesTimeStampDiffer(pluginOne)).thenReturn(true);
pluginChangeNotifier.notify(listener, knownPlugins, newPlugins);
verify(listener).pluginJarUpdated(pluginOne);
verify(listener, never()).pluginJarAdded(any());
verify(listener, never()).pluginJarRemoved(any());
}
|
@Override
public ShenyuContext decorator(final ShenyuContext shenyuContext, final MetaData metaData) {
shenyuContext.setModule(metaData.getAppName());
shenyuContext.setMethod(metaData.getServiceName());
shenyuContext.setContextPath(metaData.getContextPath());
shenyuContext.setRpcType(RpcTypeEnum.MOTAN.getName());
return shenyuContext;
}
|
@Test
public void testDecorator() {
metaData.setAppName("app");
metaData.setServiceName("service");
metaData.setContextPath("localhost");
motanShenyuContextDecorator.decorator(shenyuContext, metaData);
Assertions.assertEquals(shenyuContext.getModule(), "app");
}
|
public static Split split(String regex) {
return split(Pattern.compile(regex), false);
}
|
@Test
@Category(NeedsRunner.class)
public void testSplitsWithEmpty() {
PCollection<String> output =
p.apply(Create.of("The quick brown fox jumps over the lazy dog"))
.apply(Regex.split("\\s", true));
PAssert.that(output)
.containsInAnyOrder(
"The", "", "quick", "brown", "", "", "fox", "jumps", "over", "", "", "", "the", "lazy",
"dog");
p.run();
}
|
public Record convert(final AbstractWALEvent event) {
if (filter(event)) {
return createPlaceholderRecord(event);
}
if (!(event instanceof AbstractRowEvent)) {
return createPlaceholderRecord(event);
}
PipelineTableMetaData tableMetaData = getPipelineTableMetaData(((AbstractRowEvent) event).getTableName());
if (event instanceof WriteRowEvent) {
return handleWriteRowEvent((WriteRowEvent) event, tableMetaData);
}
if (event instanceof UpdateRowEvent) {
return handleUpdateRowEvent((UpdateRowEvent) event, tableMetaData);
}
if (event instanceof DeleteRowEvent) {
return handleDeleteRowEvent((DeleteRowEvent) event, tableMetaData);
}
throw new UnsupportedSQLOperationException("");
}
|
@Test
void assertConvertBeginTXEvent() {
BeginTXEvent beginTXEvent = new BeginTXEvent(100L, null);
beginTXEvent.setLogSequenceNumber(new PostgreSQLLogSequenceNumber(logSequenceNumber));
Record record = walEventConverter.convert(beginTXEvent);
assertInstanceOf(PlaceholderRecord.class, record);
assertThat(((WALPosition) record.getPosition()).getLogSequenceNumber().asString(), is(logSequenceNumber.asString()));
}
|
public static void main(final String[] args) {
// VirtualDB (instead of MongoDB) was used in running the JUnit tests
// and the App class to avoid Maven compilation errors. Set flag to
// true to run the tests with MongoDB (provided that MongoDB is
// installed and socket connection is open).
boolean isDbMongo = isDbMongo(args);
if (isDbMongo) {
LOGGER.info("Using the Mongo database engine to run the application.");
} else {
LOGGER.info("Using the 'in Memory' database to run the application.");
}
App app = new App(isDbMongo);
app.useReadAndWriteThroughStrategy();
String splitLine = "==============================================";
LOGGER.info(splitLine);
app.useReadThroughAndWriteAroundStrategy();
LOGGER.info(splitLine);
app.useReadThroughAndWriteBehindStrategy();
LOGGER.info(splitLine);
app.useCacheAsideStategy();
LOGGER.info(splitLine);
}
|
@Test
void shouldExecuteApplicationWithoutException() {
assertDoesNotThrow(() -> App.main(new String[]{}));
}
|
public static String version() {
ensureLoaded();
return version;
}
|
@Test
public void testVersion() {
assertThat(IcebergBuild.version()).as("Should not use unknown version").isNotEqualTo("unknown");
}
|
@Override
public void run(DiagnosticsLogWriter writer) {
metricCollector.writer = writer;
// we set the time explicitly so that for this particular rendering of the probes, all metrics have exactly
// the same timestamp
metricCollector.timeMillis = System.currentTimeMillis();
metricsRegistry.collect(metricCollector);
metricCollector.writer = null;
}
|
@Test
public void testRunWithProblematicProbe() {
metricsRegistry.registerStaticProbe(this, "broken", MANDATORY, (LongProbeFunction) source -> {
throw new RuntimeException("error");
});
plugin.run(logWriter);
assertContains("[metric=broken]=java.lang.RuntimeException:error");
}
|
protected SuppressionRules rules() {
return rules;
}
|
@Test
public void updateDeviceTypeRule() {
Device.Type deviceType1 = Device.Type.ROADM;
Device.Type deviceType2 = Device.Type.SWITCH;
Set<Device.Type> deviceTypes = new HashSet<>();
deviceTypes.add(deviceType1);
cfg.deviceTypes(deviceTypes);
configEvent(NetworkConfigEvent.Type.CONFIG_ADDED);
deviceTypes.add(deviceType2);
cfg.deviceTypes(deviceTypes);
configEvent(NetworkConfigEvent.Type.CONFIG_UPDATED);
assertAfter(EVENT_MS, () -> {
assertTrue(provider.rules().getSuppressedDeviceType().contains(deviceType1));
assertTrue(provider.rules().getSuppressedDeviceType().contains(deviceType2));
});
}
|
public static <T, PartitionColumnT> ReadWithPartitions<T, PartitionColumnT> readWithPartitions(
TypeDescriptor<PartitionColumnT> partitioningColumnType) {
return new AutoValue_JdbcIO_ReadWithPartitions.Builder<T, PartitionColumnT>()
.setPartitionColumnType(partitioningColumnType)
.setNumPartitions(DEFAULT_NUM_PARTITIONS)
.setFetchSize(DEFAULT_FETCH_SIZE)
.setUseBeamSchema(false)
.build();
}
|
@Test
public void testLowerBoundIsMoreThanUpperBound() {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage(
"The lower bound of partitioning column is larger or equal than the upper bound");
pipeline.apply(
JdbcIO.<TestRow>readWithPartitions()
.withDataSourceConfiguration(DATA_SOURCE_CONFIGURATION)
.withRowMapper(new JdbcTestHelper.CreateTestRowOfNameAndId())
.withTable(READ_TABLE_NAME)
.withNumPartitions(5)
.withPartitionColumn("id")
.withLowerBound(100L)
.withUpperBound(100L));
pipeline.run();
}
|
public Searcher searcher() {
return new Searcher();
}
|
@Test
void requireThatPredicateIndexCanSearch() {
PredicateIndexBuilder builder = new PredicateIndexBuilder(10);
builder.indexDocument(1, Predicate.fromString("country in ['no', 'se'] and gender in ['male']"));
builder.indexDocument(0x3fffffe, Predicate.fromString("country in ['no'] and gender in ['female']"));
PredicateIndex index = builder.build();
PredicateIndex.Searcher searcher = index.searcher();
PredicateQuery query = new PredicateQuery();
query.addFeature("country", "no");
query.addFeature("gender", "male");
assertEquals("[1]", searcher.search(query).toList().toString());
query.addFeature("gender", "female");
assertEquals("[1, 67108862]", searcher.search(query).toList().toString());
}
|
public void removeTimer(String eventName) {
synchronized (mTrackTimer) {
mTrackTimer.remove(eventName);
}
}
|
@Test
public void removeTimer() {
mInstance.addEventTimer("EventTimer", new EventTimer(TimeUnit.SECONDS, 10000L));
mInstance.removeTimer("EventTimer");
Assert.assertNull(mInstance.getEventTimer("EventTimer"));
}
|
public static void reset(Buffer buffer) {
buffer.reset();
}
|
@Test
public void testReset() {
ByteBuffer byteBuffer = ByteBuffer.allocate(4);
byteBuffer.putInt(1);
BufferUtils.mark(byteBuffer);
Assertions.assertDoesNotThrow(() -> BufferUtils.reset(byteBuffer));
}
|
@Override
public boolean supportsTransactions() {
return false;
}
|
@Test
void assertSupportsTransactions() {
assertFalse(metaData.supportsTransactions());
}
|
public static Read<String> readStrings() {
return Read.newBuilder(
(PubsubMessage message) -> new String(message.getPayload(), StandardCharsets.UTF_8))
.setCoder(StringUtf8Coder.of())
.build();
}
|
@Test
public void testTopicValidationBadCharacter() throws Exception {
thrown.expect(IllegalArgumentException.class);
PubsubIO.readStrings().fromTopic("projects/my-project/topics/abc-*-abc");
}
|
public static void mergeOutputDataParams(
Map<String, Parameter> allParams, Map<String, Parameter> params) {
params.forEach(
(name, param) -> {
if (!allParams.containsKey(name)) {
throw new MaestroValidationException(
"Invalid output parameter [%s], not defined in params", name);
}
MergeContext context = MergeContext.stepCreate(ParamSource.OUTPUT_PARAMETER);
if (param.getType() == ParamType.MAP && param.isLiteral()) {
ParamDefinition baseDef = allParams.get(name).toDefinition();
Map<String, ParamDefinition> baseMap = baseDef.asMapParamDef().getValue();
ParamDefinition toMergeDef = param.toDefinition();
Map<String, ParamDefinition> toMergeMap = toMergeDef.asMapParamDef().getValue();
mergeParams(baseMap, toMergeMap, context);
Parameter mergedParam =
buildMergedParamDefinition(name, toMergeDef, baseDef, context, baseMap)
.toParameter();
populateEvaluatedResultAndTime(mergedParam, param.getEvaluatedTime());
allParams.put(name, mergedParam);
} else if (param.getType() == ParamType.STRING_MAP && param.isLiteral()) {
ParamDefinition baseDef = allParams.get(name).toDefinition();
Map<String, String> baseMap = baseDef.asStringMapParamDef().getValue();
ParamDefinition toMergeDef = param.toDefinition();
Map<String, String> toMergeMap = toMergeDef.asStringMapParamDef().getValue();
baseMap.putAll(toMergeMap);
Parameter mergedParam =
buildMergedParamDefinition(name, toMergeDef, baseDef, context, baseMap)
.toParameter();
populateEvaluatedResultAndTime(mergedParam, param.getEvaluatedTime());
allParams.put(name, mergedParam);
} else {
ParamDefinition paramDefinition =
ParamsMergeHelper.buildMergedParamDefinition(
name,
param.toDefinition(),
allParams.get(name).toDefinition(),
MergeContext.stepCreate(ParamSource.OUTPUT_PARAMETER),
param.getValue());
Parameter parameter = paramDefinition.toParameter();
parameter.setEvaluatedResult(param.getEvaluatedResult());
parameter.setEvaluatedTime(param.getEvaluatedTime());
allParams.put(name, parameter);
}
});
}
|
@Test
public void testMergeOutputDataParamsOtherParams() throws JsonProcessingException {
Map<String, Parameter> allParams =
parseParamMap(
"{\"long_array_param\":{\"evaluated_result\":[1,2,3],\"evaluated_time\":1626893775979,\"type\":\"LONG_ARRAY\",\"value\":[1,2,3]},\"long_param\":{\"evaluated_result\":21,\"evaluated_time\":1626893775979,\"type\":\"LONG\",\"value\":21},\"str_param\":{\"evaluated_result\":\"hello\",\"evaluated_time\":1626893775979,\"type\":\"STRING\",\"value\":\"hello\"},\"double_param\":{\"evaluated_result\":3.14,\"evaluated_time\":1626893775979,\"type\":\"DOUBLE\",\"value\":3.14},\"double_array_param\":{\"evaluated_result\":[1.1,-0.5,3.2],\"evaluated_time\":1626893775979,\"type\":\"DOUBLE_ARRAY\",\"value\":[1.1,-0.5,3.2]},\"boolean_param\":{\"evaluated_result\":false,\"evaluated_time\":1626893775979,\"type\":\"BOOLEAN\",\"value\":false},\"boolean_array_param\":{\"evaluated_result\":[true,false,true],\"evaluated_time\":1626893775979,\"type\":\"BOOLEAN_ARRAY\",\"value\":[true,false,true]}}");
Map<String, Parameter> paramsToMerge =
parseParamMap(
"{\"long_array_param\":{\"evaluated_result\":[4,5],\"evaluated_time\":1626893775979,\"type\":\"LONG_ARRAY\",\"value\":[4,5]},\"long_param\":{\"evaluated_result\":21,\"evaluated_time\":1626893775979,\"type\":\"LONG\",\"value\":21},\"double_param\":{\"evaluated_result\":3.14,\"evaluated_time\":1626893775979,\"type\":\"DOUBLE\",\"value\":3.14},\"double_array_param\":{\"evaluated_result\":[1.1],\"evaluated_time\":1626893775979,\"type\":\"DOUBLE_ARRAY\",\"value\":[1.1]},\"boolean_param\":{\"evaluated_result\":false,\"evaluated_time\":1626893775979,\"type\":\"BOOLEAN\",\"value\":false},\"boolean_array_param\":{\"evaluated_result\":[true,true],\"evaluated_time\":1626893775979,\"type\":\"BOOLEAN_ARRAY\",\"value\":[true,true]}}");
ParamsMergeHelper.mergeOutputDataParams(allParams, paramsToMerge);
assertArrayEquals(
new long[] {4, 5}, allParams.get("long_array_param").asLongArrayParam().getValue());
assertEquals(21, (long) allParams.get("long_param").asLongParam().getValue());
assertEquals("hello", allParams.get("str_param").asStringParam().getValue());
assertEquals(
3.14, allParams.get("double_param").asDoubleParam().getValue().doubleValue(), 0.01);
assertArrayEquals(
new double[] {1.1},
allParams.get("double_array_param").asDoubleArrayParam().getEvaluatedResult(),
0.01);
assertEquals(false, allParams.get("boolean_param").asBooleanParam().getValue());
assertArrayEquals(
new boolean[] {true, true},
allParams.get("boolean_array_param").asBooleanArrayParam().getValue());
}
|
public LeaderAndIsr newEpoch() {
return newLeaderAndIsrWithBrokerEpoch(leader, isrWithBrokerEpoch);
}
|
@Test
public void testNewEpoch() {
LeaderAndIsr leaderAndIsr = new LeaderAndIsr(3, Arrays.asList(1, 2, 3));
assertEquals(0, leaderAndIsr.leaderEpoch());
LeaderAndIsr leaderWithNewEpoch = leaderAndIsr.newEpoch();
assertEquals(1, leaderWithNewEpoch.leaderEpoch());
}
|
String substituteParametersInSqlString(String sql, SqlParameterSource paramSource) {
ParsedSql parsedSql = NamedParameterUtils.parseSqlStatement(sql);
List<SqlParameter> declaredParams = NamedParameterUtils.buildSqlParameterList(parsedSql, paramSource);
if (declaredParams.isEmpty()) {
return sql;
}
for (SqlParameter parSQL: declaredParams) {
String paramName = parSQL.getName();
if (!paramSource.hasValue(paramName)) {
continue;
}
Object value = paramSource.getValue(paramName);
if (value instanceof SqlParameterValue) {
value = ((SqlParameterValue)value).getValue();
}
if (!(value instanceof Iterable)) {
String ValueForSQLQuery = getValueForSQLQuery(value);
sql = sql.replace(":" + paramName, ValueForSQLQuery);
continue;
}
//Iterable
int count = 0;
String valueArrayStr = "";
for (Object valueTemp: (Iterable)value) {
if (count > 0) {
valueArrayStr+=", ";
}
String valueForSQLQuery = getValueForSQLQuery(valueTemp);
valueArrayStr += valueForSQLQuery;
++count;
}
sql = sql.replace(":" + paramName, valueArrayStr);
}
return sql;
}
|
@Test
public void substituteParametersInSqlString_BooleanType() {
String sql = "Select * from Table Where check = :check AND mark = :mark";
String sqlToUse = "Select * from Table Where check = true AND mark = false";
ctx.addBooleanParameter("check", true);
ctx.addBooleanParameter("mark", false);
String sqlToUseResult = queryLog.substituteParametersInSqlString(sql, ctx);
assertEquals(sqlToUse, sqlToUseResult);
}
|
@Override
public Response toResponse(Throwable exception) {
debugLog(exception);
if (exception instanceof WebApplicationException w) {
var res = w.getResponse();
if (res.getStatus() >= 500) {
log(w);
}
return res;
}
if (exception instanceof AuthenticationException) {
return Response.status(Status.UNAUTHORIZED).build();
}
if (exception instanceof ValidationException ve) {
if (ve.seeOther() != null) {
return Response.seeOther(ve.seeOther()).build();
}
return buildContentNegotiatedErrorResponse(ve.localizedMessage(), Status.BAD_REQUEST);
}
// the remaining exceptions are unexpected, let's log them
log(exception);
if (exception instanceof FederationException fe) {
var errorMessage = new Message(FEDERATION_ERROR_MESSAGE, fe.reason().name());
return buildContentNegotiatedErrorResponse(errorMessage, Status.INTERNAL_SERVER_ERROR);
}
var status = Status.INTERNAL_SERVER_ERROR;
var errorMessage = new Message(SERVER_ERROR_MESSAGE, (String) null);
return buildContentNegotiatedErrorResponse(errorMessage, status);
}
|
@Test
void toResponse_withBody_seeOthers() {
// when
var res =
mapper.toResponse(
new ValidationException(
new Message("error.unsupportedScope", "https://example.com/see/other"),
URI.create("https://example.com/see/other")));
// then
assertEquals(303, res.getStatus());
}
|
public static MetricsReporter combine(MetricsReporter first, MetricsReporter second) {
if (null == first) {
return second;
} else if (null == second || first == second) {
return first;
}
Set<MetricsReporter> reporters = Sets.newIdentityHashSet();
if (first instanceof CompositeMetricsReporter) {
reporters.addAll(((CompositeMetricsReporter) first).reporters());
} else {
reporters.add(first);
}
if (second instanceof CompositeMetricsReporter) {
reporters.addAll(((CompositeMetricsReporter) second).reporters());
} else {
reporters.add(second);
}
return new CompositeMetricsReporter(reporters);
}
|
@Test
public void reportWithMultipleMetricsReportersOneFails() {
AtomicInteger counter = new AtomicInteger();
MetricsReporter combined =
MetricsReporters.combine(
MetricsReporters.combine(
report -> counter.incrementAndGet(),
report -> {
throw new RuntimeException("invalid report");
}),
report -> counter.incrementAndGet());
combined.report(new MetricsReport() {});
assertThat(combined).isInstanceOf(MetricsReporters.CompositeMetricsReporter.class);
assertThat(((MetricsReporters.CompositeMetricsReporter) combined).reporters()).hasSize(3);
assertThat(counter.get()).isEqualTo(2);
}
|
@Transactional(readOnly = true)
public AttendeeScheduleResponse findMySchedule(String uuid, long attendeeId) {
Meeting meeting = meetingRepository.findByUuid(uuid)
.orElseThrow(() -> new MomoException(MeetingErrorCode.NOT_FOUND_MEETING));
Attendee attendee = attendeeRepository.findByIdAndMeeting(attendeeId, meeting)
.orElseThrow(() -> new MomoException(AttendeeErrorCode.NOT_FOUND_ATTENDEE));
List<Schedule> schedules = scheduleRepository.findAllByAttendee(attendee);
return AttendeeScheduleResponse.of(attendee, DateTimesResponse.from(schedules));
}
|
@DisplayName("UUID와 참가자 ID로 자신의 스케줄을 조회한다.")
@Test
void findMySchedule() {
createAttendeeSchedule(attendee);
AttendeeScheduleResponse result = scheduleService.findMySchedule(meeting.getUuid(), attendee.getId());
DateTimesResponse firstTimeResponse = result.schedules().get(0);
assertAll(
() -> assertThat(result.attendeeName()).isEqualTo(attendee.name()),
() -> assertThat(result.schedules()).hasSize(2),
() -> assertThat(firstTimeResponse.times()).hasSize(3)
);
}
|
@SuppressWarnings("unchecked")
public static int compare(Comparable lhs, Comparable rhs) {
assert lhs != null;
assert rhs != null;
if (lhs.getClass() == rhs.getClass()) {
return lhs.compareTo(rhs);
}
if (lhs instanceof Number && rhs instanceof Number) {
return Numbers.compare(lhs, rhs);
}
return lhs.compareTo(rhs);
}
|
@SuppressWarnings("ConstantConditions")
@Test(expected = Throwable.class)
public void testNullLhsInCompareThrows() {
compare(null, 1);
}
|
public static TableElements parse(final String schema, final TypeRegistry typeRegistry) {
return new SchemaParser(typeRegistry).parse(schema);
}
|
@Test
public void shouldParseValidSchemaWithSingleHeaderKeyField() {
// Given:
final String schema = "K STRING HEADER('k1'), bar INT";
// When:
final TableElements elements = parser.parse(schema);
// Then:
assertThat(elements, contains(
new TableElement(ColumnName.of("K"), new Type(SqlTypes.STRING), HEADER_KEY1_CONSTRAINT),
new TableElement(BAR, new Type(SqlTypes.INTEGER))
));
}
|
@Override
public long delete(String path) {
return get(deleteAsync(path));
}
|
@Test
public void testDelete() {
RJsonBucket<TestType> al = redisson.getJsonBucket("test", new JacksonCodec<>(TestType.class));
assertThat(al.delete()).isFalse();
TestType t = new TestType();
t.setName("name1");
al.set(t);
assertThat(al.delete()).isTrue();
}
|
@Override
public Reader getCharacterStream(final int columnIndex) throws SQLException {
// TODO To be supported: encrypt, mask, and so on
return mergeResultSet.getCharacterStream(columnIndex);
}
|
@Test
void assertGetCharacterStreamWithColumnLabel() throws SQLException {
Reader reader = mock(Reader.class);
when(mergeResultSet.getCharacterStream(1)).thenReturn(reader);
assertThat(shardingSphereResultSet.getCharacterStream("label"), is(reader));
}
|
public static SchemaAndValue parseString(String value) {
if (value == null) {
return NULL_SCHEMA_AND_VALUE;
}
if (value.isEmpty()) {
return new SchemaAndValue(Schema.STRING_SCHEMA, value);
}
ValueParser parser = new ValueParser(new Parser(value));
return parser.parse(false);
}
|
@Test
public void shouldParseEmptyString() {
SchemaAndValue schemaAndValue = Values.parseString("");
assertEquals(Schema.STRING_SCHEMA, schemaAndValue.schema());
assertEquals("", schemaAndValue.value());
}
|
@Override
public boolean betterThan(Num criterionValue1, Num criterionValue2) {
return criterionValue1.isGreaterThan(criterionValue2);
}
|
@Test
public void betterThan() {
AnalysisCriterion criterion = getCriterion();
assertTrue(criterion.betterThan(numOf(2.0), numOf(1.5)));
assertFalse(criterion.betterThan(numOf(1.5), numOf(2.0)));
}
|
public static boolean isLocalOutputDir(String outputDirURIScheme) {
return outputDirURIScheme == null || outputDirURIScheme.startsWith("file");
}
|
@Test
public void testIsLocalOutputDir() {
assertTrue(MinionTaskUtils.isLocalOutputDir("file"));
assertFalse(MinionTaskUtils.isLocalOutputDir("hdfs"));
}
|
public static List<String> splitStatementsAcrossBlocks(CharSequence string) {
List<String> statements = codeAwareSplitOnChar(string, false, true, ';', '\n', '{', '}');
return statements.stream()
.filter(stmt -> !(stmt.isEmpty()))
.filter(stmt -> !(stmt.startsWith("//")))
.collect(Collectors.toList());
}
|
@Test
public void splitStatementsAcrossBlocksCommentedIfMissingEndingBrace() {
String text = "// if (true) {\n" +
" $fact.value1 = 2;\n" +
" drools.update($fact);\n" +
"//";
List<String> statements = splitStatementsAcrossBlocks(text);
assertThat(statements.get(0)).isEqualTo("$fact.value1 = 2");
assertThat(statements.get(1)).isEqualTo("drools.update($fact)");
}
|
@Override
public void deletePost(Long id) {
// 校验是否存在
validatePostExists(id);
// 删除部门
postMapper.deleteById(id);
}
|
@Test
public void testDeletePost_success() {
// mock 数据
PostDO postDO = randomPostDO();
postMapper.insert(postDO);
// 准备参数
Long id = postDO.getId();
// 调用
postService.deletePost(id);
assertNull(postMapper.selectById(id));
}
|
@Override
public void addProduct(Product product, Customer customer) throws SQLException {
var sql = "insert into PURCHASES (product_name, customer_name) values (?,?)";
try (var connection = dataSource.getConnection();
var preparedStatement = connection.prepareStatement(sql)) {
preparedStatement.setString(1, product.getName());
preparedStatement.setString(2, customer.getName());
preparedStatement.executeUpdate();
}
}
|
@Test
void shouldAddProductToPurchases() throws SQLException {
TestUtils.executeSQL(INSERT_CUSTOMER_SQL, dataSource);
TestUtils.executeSQL(ProductDaoImplTest.INSERT_PRODUCT_SQL, dataSource);
customerDao.addProduct(product, customer);
try (var connection = dataSource.getConnection();
var statement = connection.createStatement();
ResultSet rs = statement.executeQuery(SELECT_PURCHASES_SQL)) {
assertTrue(rs.next());
assertEquals(product.getName(), rs.getString("product_name"));
assertEquals(customer.getName(), rs.getString("customer_name"));
assertFalse(rs.next());
}
}
|
@Override
public AppResponse process(Flow flow, AppRequest request) throws FlowNotDefinedException, IOException, NoSuchAlgorithmException {
var response = new WidPollResponse(attestEnabled && Arrays.asList(allowedActions).contains(appSession.getAction()));
setValid(false);
switch (appSession.getState()) {
case "VERIFIED" -> {
if (validateCardStatus()) {
setValid(true);
response.setStatus(appSession.getState());
}
}
case "COMPLETED", "AUTHENTICATED", "CONFIRMED", "CANCELLED", "ABORTED" -> response.setStatus(appSession.getState());
default -> response.setStatus("PENDING");
}
if ("ABORTED".equals(appSession.getState())) {
response.setStatus("ABORTED");
response.setError(appSession.getError());
}
return response;
}
|
@Test
void processAttestEnabled() throws FlowNotDefinedException, IOException, NoSuchAlgorithmException {
setupWidPolling(true);
WidPollResponse appResponse = (WidPollResponse) widPolling.process(mockedFlow, mockedAbstractAppRequest);
assertTrue(appResponse.getAttestApp());
assertEquals("PENDING", appResponse.getStatus());
}
|
public Span nextSpan(ConsumerRecord<?, ?> record) {
// Even though the type is ConsumerRecord, this is not a (remote) consumer span. Only "poll"
// events create consumer spans. Since this is a processor span, we use the normal sampler.
TraceContextOrSamplingFlags extracted =
extractAndClearTraceIdHeaders(processorExtractor, record.headers(), record.headers());
Span result = tracer.nextSpan(extracted);
if (extracted.context() == null && !result.isNoop()) {
addTags(record, result);
}
return result;
}
|
@Test void nextSpan_should_not_clear_other_headers() {
consumerRecord.headers().add("foo", new byte[0]);
kafkaTracing.nextSpan(consumerRecord);
assertThat(consumerRecord.headers().headers("foo")).isNotEmpty();
}
|
@Override
public void write(ConnectionLogEntry record, OutputStream outputStream) throws IOException {
try (JsonGenerator generator = createJsonGenerator(outputStream)) {
generator.writeStartObject();
generator.writeStringField("id", record.id());
generator.writeStringField("timestamp", record.timestamp().toString());
writeOptionalSeconds(generator, "duration", unwrap(record.durationSeconds()));
writeOptionalString(generator, "peerAddress", unwrap(record.peerAddress()));
writeOptionalInteger(generator, "peerPort", unwrap(record.peerPort()));
writeOptionalString(generator, "localAddress", unwrap(record.localAddress()));
writeOptionalInteger(generator, "localPort", unwrap(record.localPort()));
String proxyProtocolVersion = unwrap(record.proxyProtocolVersion());
String proxyProtocolRemoteAddress = unwrap(record.remoteAddress());
Integer proxyProtocolRemotePort = unwrap(record.remotePort());
if (isAnyValuePresent(proxyProtocolVersion, proxyProtocolRemoteAddress, proxyProtocolRemotePort)) {
generator.writeObjectFieldStart("proxyProtocol");
writeOptionalString(generator, "version", proxyProtocolVersion);
writeOptionalString(generator, "remoteAddress", proxyProtocolRemoteAddress);
writeOptionalInteger(generator, "remotePort", proxyProtocolRemotePort);
generator.writeEndObject();
}
String httpVersion = unwrap(record.httpProtocol());
Long httpBytesReceived = unwrap(record.httpBytesReceived());
Long httpBytesSent = unwrap(record.httpBytesSent());
Long httpRequests = unwrap(record.requests());
Long httpResponses = unwrap(record.responses());
if (isAnyValuePresent(httpVersion, httpBytesReceived, httpBytesSent, httpRequests, httpResponses)) {
generator.writeObjectFieldStart("http");
writeOptionalString(generator, "version", httpVersion);
writeOptionalLong(generator, "bytesReceived", httpBytesReceived);
writeOptionalLong(generator, "responses", httpResponses);
writeOptionalLong(generator, "bytesSent", httpBytesSent);
writeOptionalLong(generator, "requests", httpRequests);
generator.writeEndObject();
}
String sslProtocol = unwrap(record.sslProtocol());
String sslSessionId = unwrap(record.sslSessionId());
String sslCipherSuite = unwrap(record.sslCipherSuite());
String sslPeerSubject = unwrap(record.sslPeerSubject());
Instant sslPeerNotBefore = unwrap(record.sslPeerNotBefore());
Instant sslPeerNotAfter = unwrap(record.sslPeerNotAfter());
String sslSniServerName = unwrap(record.sslSniServerName());
String sslPeerIssuerSubject = unwrap(record.sslPeerIssuerSubject());
String sslPeerFingerprint = unwrap(record.sslPeerFingerprint());
Long sslBytesReceived = unwrap(record.sslBytesReceived());
Long sslBytesSent = unwrap(record.sslBytesSent());
ConnectionLogEntry.SslHandshakeFailure sslHandshakeFailure = unwrap(record.sslHandshakeFailure());
List<String> sslSubjectAlternativeNames = record.sslSubjectAlternativeNames();
if (isAnyValuePresent(
sslProtocol, sslSessionId, sslCipherSuite, sslPeerSubject, sslPeerNotBefore, sslPeerNotAfter,
sslSniServerName, sslHandshakeFailure, sslPeerIssuerSubject, sslPeerFingerprint,
sslBytesReceived, sslBytesSent)) {
generator.writeObjectFieldStart("ssl");
writeOptionalString(generator, "protocol", sslProtocol);
writeOptionalString(generator, "sessionId", sslSessionId);
writeOptionalString(generator, "cipherSuite", sslCipherSuite);
writeOptionalString(generator, "peerSubject", sslPeerSubject);
writeOptionalString(generator, "peerIssuerSubject", sslPeerIssuerSubject);
writeOptionalTimestamp(generator, "peerNotBefore", sslPeerNotBefore);
writeOptionalTimestamp(generator, "peerNotAfter", sslPeerNotAfter);
writeOptionalString(generator, "peerFingerprint", sslPeerFingerprint);
writeOptionalString(generator, "sniServerName", sslSniServerName);
writeOptionalLong(generator, "bytesReceived", sslBytesReceived);
writeOptionalLong(generator, "bytesSent", sslBytesSent);
if (sslHandshakeFailure != null) {
generator.writeObjectFieldStart("handshake-failure");
generator.writeArrayFieldStart("exception");
for (ExceptionEntry entry : sslHandshakeFailure.exceptionChain()) {
generator.writeStartObject();
generator.writeStringField("cause", entry.name());
generator.writeStringField("message", entry.message());
generator.writeEndObject();
}
generator.writeEndArray();
generator.writeStringField("type", sslHandshakeFailure.type());
generator.writeEndObject();
}
if (!sslSubjectAlternativeNames.isEmpty()) {
generator.writeArrayFieldStart("san");
for (String sanEntry : sslSubjectAlternativeNames) {
generator.writeString(sanEntry);
}
generator.writeEndArray();
}
generator.writeEndObject();
}
}
}
|
@Test
void test_serialization() throws IOException {
var id = UUID.randomUUID();
var instant = Instant.parse("2021-01-13T12:12:12Z");
ConnectionLogEntry entry = ConnectionLogEntry.builder(id, instant)
.withPeerPort(1234)
.withSslHandshakeFailure(new ConnectionLogEntry.SslHandshakeFailure("UNKNOWN",
List.of(
new ConnectionLogEntry.SslHandshakeFailure.ExceptionEntry("javax.net.ssl.SSLHandshakeException", "message"),
new ConnectionLogEntry.SslHandshakeFailure.ExceptionEntry("java.io.IOException", "cause message"))))
.withSslSubjectAlternativeNames(List.of("sandns", "sanemail"))
.build();
String expectedJson = "{" +
"\"id\":\""+id.toString()+"\"," +
"\"timestamp\":\"2021-01-13T12:12:12Z\"," +
"\"peerPort\":1234," +
"\"ssl\":{\"handshake-failure\":{\"exception\":[" +
"{\"cause\":\"javax.net.ssl.SSLHandshakeException\",\"message\":\"message\"}," +
"{\"cause\":\"java.io.IOException\",\"message\":\"cause message\"}" +
"],\"type\":\"UNKNOWN\"},\"san\":[\"sandns\",\"sanemail\"]}}";
JsonConnectionLogWriter writer = new JsonConnectionLogWriter();
ByteArrayOutputStream out = new ByteArrayOutputStream();
writer.write(entry, out);
String actualJson = out.toString(StandardCharsets.UTF_8);
JsonTestHelper.assertJsonEquals(actualJson, expectedJson);
}
|
public void dispose( StepMetaInterface smi, StepDataInterface sdi ) {
meta = (JoinRowsMeta) smi;
data = (JoinRowsData) sdi;
// Remove the temporary files...
if ( data.file != null ) {
for ( int i = 1; i < data.file.length; i++ ) {
if ( data.file[i] != null ) {
data.file[i].delete();
}
}
}
super.dispose( meta, data );
}
|
@Test
public void disposeDataFiles() throws Exception {
File mockFile1 = mock( File.class );
File mockFile2 = mock( File.class );
data.file = new File[] {null, mockFile1, mockFile2};
getJoinRows().dispose( meta, data );
verify( mockFile1, times( 1 ) ).delete();
verify( mockFile2, times( 1 ) ).delete();
}
|
public static String beforeLast(String text, String before) {
if (!text.contains(before)) {
return null;
}
return text.substring(0, text.lastIndexOf(before));
}
|
@Test
public void testBeforeLast() {
assertEquals("Hello ", SshShellOutputStringHelper.beforeLast("Hello World", "World"));
assertEquals("Hello World ", SshShellOutputStringHelper.beforeLast("Hello World World", "World"));
assertEquals("Hello ", SshShellOutputStringHelper.beforeLast("Hello World Again", "World"));
assertNull(SshShellOutputStringHelper.beforeLast("Hello Again", "Foo"));
assertTrue(SshShellOutputStringHelper.beforeLast("mykey:ignore:hello", ":", "mykey:ignore"::equals).orElse(false));
assertFalse(SshShellOutputStringHelper.beforeLast("ignore:ignore:world", ":", "mykey"::equals).orElse(false));
}
|
@Override
public void delete(final Map<Path, TransferStatus> files, final PasswordCallback prompt, final Callback callback) throws BackgroundException {
for(Path file : files.keySet()) {
try {
callback.delete(file);
// Delete the file or folder at a given path. If the path is a folder, all its contents will be deleted too.
if(file.attributes().isDuplicate()) {
new DbxUserFilesRequests(session.getClient(file)).permanentlyDelete(containerService.getKey(file),
file.attributes().getVersionId());
}
else {
new DbxUserFilesRequests(session.getClient(file)).deleteV2(containerService.getKey(file));
}
}
catch(DbxException e) {
throw new DropboxExceptionMappingService().map("Cannot delete {0}", e, file);
}
}
}
|
@Test
public void testDeleteDirectory() throws Exception {
final Path folder = new DropboxDirectoryFeature(session).mkdir(
new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.volume, Path.Type.directory)), new TransferStatus());
final Path file = new DropboxTouchFeature(session).touch(
new Path(folder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus());
new DropboxDeleteFeature(session).delete(Collections.singletonList(folder), new DisabledLoginCallback(), new Delete.DisabledCallback());
assertFalse(new DropboxFindFeature(session).find(file));
}
|
@Override
public User findUserByUsername(String username) {
String sql = "SELECT username,password FROM users WHERE username=? ";
return databaseOperate.queryOne(sql, new Object[] {username}, USER_ROW_MAPPER);
}
|
@Test
void testFindUserByUsername() {
User user = embeddedUserPersistService.findUserByUsername("username");
assertNull(user);
}
|
DecodedJWT verifyJWT(PublicKey publicKey,
String publicKeyAlg,
DecodedJWT jwt) throws AuthenticationException {
if (publicKeyAlg == null) {
incrementFailureMetric(AuthenticationExceptionCode.UNSUPPORTED_ALGORITHM);
throw new AuthenticationException("PublicKey algorithm cannot be null");
}
Algorithm alg;
try {
switch (publicKeyAlg) {
case ALG_RS256:
alg = Algorithm.RSA256((RSAPublicKey) publicKey, null);
break;
case ALG_RS384:
alg = Algorithm.RSA384((RSAPublicKey) publicKey, null);
break;
case ALG_RS512:
alg = Algorithm.RSA512((RSAPublicKey) publicKey, null);
break;
case ALG_ES256:
alg = Algorithm.ECDSA256((ECPublicKey) publicKey, null);
break;
case ALG_ES384:
alg = Algorithm.ECDSA384((ECPublicKey) publicKey, null);
break;
case ALG_ES512:
alg = Algorithm.ECDSA512((ECPublicKey) publicKey, null);
break;
default:
incrementFailureMetric(AuthenticationExceptionCode.UNSUPPORTED_ALGORITHM);
throw new AuthenticationException("Unsupported algorithm: " + publicKeyAlg);
}
} catch (ClassCastException e) {
incrementFailureMetric(AuthenticationExceptionCode.ALGORITHM_MISMATCH);
throw new AuthenticationException("Expected PublicKey alg [" + publicKeyAlg + "] does match actual alg.");
}
// We verify issuer when retrieving the PublicKey, so it is not verified here.
// The claim presence requirements are based on https://openid.net/specs/openid-connect-basic-1_0.html#IDToken
Verification verifierBuilder = JWT.require(alg)
.acceptLeeway(acceptedTimeLeewaySeconds)
.withAnyOfAudience(allowedAudiences)
.withClaimPresence(RegisteredClaims.ISSUED_AT)
.withClaimPresence(RegisteredClaims.EXPIRES_AT)
.withClaimPresence(RegisteredClaims.NOT_BEFORE)
.withClaimPresence(RegisteredClaims.SUBJECT);
if (isRoleClaimNotSubject) {
verifierBuilder = verifierBuilder.withClaimPresence(roleClaim);
}
JWTVerifier verifier = verifierBuilder.build();
try {
return verifier.verify(jwt);
} catch (TokenExpiredException e) {
incrementFailureMetric(AuthenticationExceptionCode.EXPIRED_JWT);
throw new AuthenticationException("JWT expired: " + e.getMessage());
} catch (SignatureVerificationException e) {
incrementFailureMetric(AuthenticationExceptionCode.ERROR_VERIFYING_JWT_SIGNATURE);
throw new AuthenticationException("JWT signature verification exception: " + e.getMessage());
} catch (InvalidClaimException e) {
incrementFailureMetric(AuthenticationExceptionCode.INVALID_JWT_CLAIM);
throw new AuthenticationException("JWT contains invalid claim: " + e.getMessage());
} catch (AlgorithmMismatchException e) {
incrementFailureMetric(AuthenticationExceptionCode.ALGORITHM_MISMATCH);
throw new AuthenticationException("JWT algorithm does not match Public Key algorithm: " + e.getMessage());
} catch (JWTDecodeException e) {
incrementFailureMetric(AuthenticationExceptionCode.ERROR_DECODING_JWT);
throw new AuthenticationException("Error while decoding JWT: " + e.getMessage());
} catch (JWTVerificationException | IllegalArgumentException e) {
incrementFailureMetric(AuthenticationExceptionCode.ERROR_VERIFYING_JWT);
throw new AuthenticationException("JWT verification failed: " + e.getMessage());
}
}
|
@Test
public void testThatUnsupportedAlgsThrowExceptions() {
Set<SignatureAlgorithm> unsupportedAlgs = new HashSet<>(Set.of(SignatureAlgorithm.values()));
Arrays.stream(supportedAlgorithms()).map(o -> (SignatureAlgorithm) o[0]).toList()
.forEach(unsupportedAlgs::remove);
unsupportedAlgs.forEach(unsupportedAlg -> {
try {
@Cleanup
AuthenticationProviderOpenID provider = new AuthenticationProviderOpenID();
// We don't create a public key because it's irrelevant
Assert.assertThrows(AuthenticationException.class,
() -> provider.verifyJWT(null, unsupportedAlg.getValue(), null));
} catch (Exception e) {
throw new RuntimeException(e);
}
});
}
|
@Override
public int run(String[] args) throws Exception {
Options opts = new Options();
opts.addOption("lnl", LIST_LABELS_CMD, false,
"List cluster node-label collection");
opts.addOption("lna", LIST_CLUSTER_ATTRIBUTES, false,
"List cluster node-attribute collection");
opts.addOption("h", HELP_CMD, false, "Displays help for all commands.");
opts.addOption("dnl", DIRECTLY_ACCESS_NODE_LABEL_STORE, false,
"This is DEPRECATED, will be removed in future releases. Directly access node label store, "
+ "with this option, all node label related operations"
+ " will NOT connect RM. Instead, they will"
+ " access/modify stored node labels directly."
+ " By default, it is false (access via RM)."
+ " AND PLEASE NOTE: if you configured "
+ YarnConfiguration.FS_NODE_LABELS_STORE_ROOT_DIR
+ " to a local directory"
+ " (instead of NFS or HDFS), this option will only work"
+ " when the command run on the machine where RM is running."
+ " Also, this option is UNSTABLE, could be removed in future"
+ " releases.");
int exitCode = -1;
CommandLine parsedCli = null;
try {
parsedCli = new GnuParser().parse(opts, args);
} catch (MissingArgumentException ex) {
sysout.println("Missing argument for options");
printUsage(opts);
return exitCode;
}
createAndStartYarnClient();
if (parsedCli.hasOption(DIRECTLY_ACCESS_NODE_LABEL_STORE)) {
accessLocal = true;
}
if (parsedCli.hasOption(LIST_LABELS_CMD)) {
printClusterNodeLabels();
} else if(parsedCli.hasOption(LIST_CLUSTER_ATTRIBUTES)){
printClusterNodeAttributes();
} else if (parsedCli.hasOption(HELP_CMD)) {
printUsage(opts);
return 0;
} else {
syserr.println("Invalid Command Usage : ");
printUsage(opts);
}
return 0;
}
|
@Test
public void testHelp() throws Exception {
ClusterCLI cli = createAndGetClusterCLI();
int rc =
cli.run(new String[] { "cluster", "--help" });
assertEquals(0, rc);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintWriter pw = new PrintWriter(baos);
pw.println("usage: yarn cluster");
pw.println(" -dnl,--directly-access-node-label-store This is DEPRECATED, will be");
pw.println(" removed in future releases.");
pw.println(" Directly access node label");
pw.println(" store, with this option, all");
pw.println(" node label related operations");
pw.println(" will NOT connect RM. Instead,");
pw.println(" they will access/modify stored");
pw.println(" node labels directly. By");
pw.println(" default, it is false (access");
pw.println(" via RM). AND PLEASE NOTE: if");
pw.println(" you configured");
pw.println(" yarn.node-labels.fs-store.root-");
pw.println(" dir to a local directory");
pw.println(" (instead of NFS or HDFS), this");
pw.println(" option will only work when the");
pw.println(" command run on the machine");
pw.println(" where RM is running. Also, this");
pw.println(" option is UNSTABLE, could be");
pw.println(" removed in future releases.");
pw.println(" -h,--help Displays help for all commands.");
pw.println(" -lna,--list-node-attributes List cluster node-attribute");
pw.println(" collection");
pw.println(" -lnl,--list-node-labels List cluster node-label");
pw.println(" collection");
pw.close();
verify(sysOut).println(baos.toString("UTF-8"));
}
|
private Function<KsqlConfig, Kudf> getUdfFactory(
final Method method,
final UdfDescription udfDescriptionAnnotation,
final String functionName,
final FunctionInvoker invoker,
final String sensorName
) {
return ksqlConfig -> {
final Object actualUdf = FunctionLoaderUtils.instantiateFunctionInstance(
method.getDeclaringClass(), udfDescriptionAnnotation.name());
if (actualUdf instanceof Configurable) {
ExtensionSecurityManager.INSTANCE.pushInUdf();
try {
((Configurable) actualUdf)
.configure(ksqlConfig.getKsqlFunctionsConfigProps(functionName));
} finally {
ExtensionSecurityManager.INSTANCE.popOutUdf();
}
}
final PluggableUdf theUdf = new PluggableUdf(invoker, actualUdf);
return metrics.<Kudf>map(m -> new UdfMetricProducer(
m.getSensor(sensorName),
theUdf,
Time.SYSTEM
)).orElse(theUdf);
};
}
|
@Test
public void shouldCollectMetricsWhenMetricCollectionEnabled() {
// Given:
final UdfFactory substring = FUNC_REG_WITH_METRICS.getUdfFactory(FunctionName.of("substring"));
final KsqlScalarFunction function = substring
.getFunction(Arrays.asList(SqlArgument.of(SqlTypes.STRING), SqlArgument.of(SqlTypes.INTEGER)));
// When:
final Kudf kudf = function.newInstance(ksqlConfig);
// Then:
assertThat(kudf, instanceOf(UdfMetricProducer.class));
final Sensor sensor = METRICS.getSensor("ksql-udf-substring");
assertThat(sensor, not(nullValue()));
assertThat(METRICS.metric(METRICS.metricName("ksql-udf-substring-count", "ksql-udf")),
not(nullValue()));
assertThat(METRICS.metric(METRICS.metricName("ksql-udf-substring-max", "ksql-udf")),
not(nullValue()));
assertThat(METRICS.metric(METRICS.metricName("ksql-udf-substring-avg", "ksql-udf")),
not(nullValue()));
assertThat(METRICS.metric(METRICS.metricName("ksql-udf-substring-rate", "ksql-udf")),
not(nullValue()));
}
|
@Override
public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain)
throws IOException, ServletException {
RequestContext requestContext = RequestContextHolder.getContext();
try {
requestContext.getBasicContext().setRequestProtocol(BasicContext.HTTP_PROTOCOL);
HttpServletRequest request = (HttpServletRequest) servletRequest;
setRequestTarget(request, requestContext);
setEncoding(request, requestContext);
setAddressContext(request, requestContext);
setOtherBasicContext(request, requestContext);
filterChain.doFilter(servletRequest, servletResponse);
} finally {
RequestContextHolder.removeContext();
}
}
|
@Test
public void testGetAppNameWithFallback() throws Exception {
when(servletRequest.getHeader(HttpHeaderConsts.APP_FILED)).thenReturn("");
MockNextFilter nextFilter = new MockNextFilter("unknown", "GBK");
filter.doFilter(servletRequest, servletResponse, new MockFilterChain(servlet, nextFilter));
if (null != nextFilter.error) {
throw nextFilter.error;
}
}
|
@Override
@Deprecated
public <VR> KStream<K, VR> flatTransformValues(final org.apache.kafka.streams.kstream.ValueTransformerSupplier<? super V, Iterable<VR>> valueTransformerSupplier,
final String... stateStoreNames) {
Objects.requireNonNull(valueTransformerSupplier, "valueTransformerSupplier can't be null");
return doFlatTransformValues(
toValueTransformerWithKeySupplier(valueTransformerSupplier),
NamedInternal.empty(),
stateStoreNames);
}
|
@Test
@SuppressWarnings("deprecation")
public void shouldNotAllowNullNamedOnFlatTransformValuesWithFlatValueWithKeySupplier() {
final NullPointerException exception = assertThrows(
NullPointerException.class,
() -> testStream.flatTransformValues(
flatValueTransformerWithKeySupplier,
(Named) null));
assertThat(exception.getMessage(), equalTo("named can't be null"));
}
|
@Override
public Ring<T> createRing(Map<T, Integer> pointsMap) {
return _ringFactory.createRing(pointsMap);
}
|
@Test(groups = { "small", "back-end" })
public void testFactoryWithMultiProbeAndHashMethod() {
RingFactory<String> factory = new DelegatingRingFactory<>(configBuilder("multiProbe", "uriRegex"));
Ring<String> ring = factory.createRing(buildPointsMap(10));
assertTrue(ring instanceof MPConsistentHashRing);
}
|
public static <T> PCollections<T> pCollections() {
return new PCollections<>();
}
|
@Test
@Category(ValidatesRunner.class)
public void testFlattenWithDifferentInputAndOutputCoders2() {
// This test exists to prevent a regression in Dataflow. It tests a
// GroupByKey followed by a Flatten with an SDK-specific output coder.
PCollection<KV<String, Iterable<String>>> flattenInput =
p.apply(Create.of(LINES))
.apply(WithKeys.of("a"))
.setCoder(KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of()))
.apply(GroupByKey.create());
PCollection<String> output =
PCollectionList.of(flattenInput)
.apply(Flatten.pCollections())
.setCoder(SerializableCoder.of(new TypeDescriptor<KV<String, Iterable<String>>>() {}))
.apply(Values.create())
.setCoder(IterableCoder.of(StringUtf8Coder.of()))
.apply(
FlatMapElements.into(TypeDescriptors.strings())
.via((Iterable<String> values) -> values));
PAssert.that(output).containsInAnyOrder(LINES);
p.run();
}
|
@Override
public void onText(Keyboard.Key key, CharSequence text) {
mKeyboardActionListener.onText(key, text);
if (TextUtils.isEmpty(key.label) || TextUtils.isEmpty(text)) return;
String name = String.valueOf(key.label);
String value = String.valueOf(text);
mHistoryQuickTextKey.recordUsedKey(name, value);
}
|
@Test
public void onTextWithText() throws Exception {
Keyboard.Key key = Mockito.mock(Keyboard.Key.class);
key.label = "testing";
mUnderTest.onText(key, "testing_value");
Mockito.verify(mKeyboardListener).onText(key, "testing_value");
Mockito.verify(mHistoryKey).recordUsedKey("testing", "testing_value");
}
|
@Override
public void close() throws IOException {
close(false);
}
|
@Test
public void testClose() throws Exception {
this.writer.close();
Mockito.verify(this.snapshotStorage, Mockito.only()).close(this.writer, false);
}
|
@Override
protected ResultSubpartitionView createSubpartitionView(
int subpartitionId, BufferAvailabilityListener availabilityListener)
throws IOException {
checkState(!isReleased(), "ResultPartition already released.");
// If data file is not readable, throw PartitionNotFoundException to mark this result
// partition failed. Otherwise, the partition data is not regenerated, so failover can not
// recover the job.
if (!Files.isReadable(dataFilePath)) {
throw new PartitionNotFoundException(getPartitionId());
}
// if broadcastOptimize is enabled, map every subpartitionId to the special broadcast
// subpartition.
subpartitionId = isBroadcastOnly ? BROADCAST_SUBPARTITION : subpartitionId;
HsSubpartitionConsumer subpartitionConsumer =
new HsSubpartitionConsumer(availabilityListener);
HsConsumerId lastConsumerId = lastConsumerIds[subpartitionId];
checkMultipleConsumerIsAllowed(lastConsumerId, hybridShuffleConfiguration);
// assign a unique id for each consumer, now it is guaranteed by the value that is one
// higher than the last consumerId's id field.
HsConsumerId consumerId = HsConsumerId.newId(lastConsumerId);
lastConsumerIds[subpartitionId] = consumerId;
HsDataView diskDataView =
fileDataManager.registerNewConsumer(
subpartitionId, consumerId, subpartitionConsumer);
HsDataView memoryDataView =
checkNotNull(memoryDataManager)
.registerNewConsumer(subpartitionId, consumerId, subpartitionConsumer);
subpartitionConsumer.setDiskDataView(diskDataView);
subpartitionConsumer.setMemoryDataView(memoryDataView);
return subpartitionConsumer;
}
|
@Test
void testFullSpillingStrategyRegisterMultipleConsumer() throws Exception {
final int numSubpartitions = 2;
BufferPool bufferPool = globalPool.createBufferPool(2, 2);
try (HsResultPartition partition =
createHsResultPartition(
2,
bufferPool,
HybridShuffleConfiguration.builder(
numSubpartitions, readBufferPool.getNumBuffersPerRequest())
.setSpillingStrategyType(
HybridShuffleConfiguration.SpillingStrategyType.FULL)
.build())) {
partition.createSubpartitionView(
new ResultSubpartitionIndexSet(0), new NoOpBufferAvailablityListener());
assertThatNoException()
.isThrownBy(
() ->
partition.createSubpartitionView(
new ResultSubpartitionIndexSet(0),
new NoOpBufferAvailablityListener()));
}
}
|
private ServerHttpRequestDecorator decorate(final ServerWebExchange exchange, final CachedBodyOutputMessage outputMessage) {
return new ServerHttpRequestDecorator(exchange.getRequest()) {
@Override
public Flux<DataBuffer> getBody() {
return outputMessage.getBody();
}
};
}
|
@Test
public void testDecorate() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
ServerWebExchange webExchangeTextPlain =
MockServerWebExchange.from(MockServerHttpRequest
.post("http://localhost:8080")
.contentType(MediaType.TEXT_PLAIN)
.contentLength(4)
.body("test"));
FileSizeFilter fileSizeFilterError = new FileSizeFilter(1);
Method declaredMethod = FileSizeFilter.class.getDeclaredMethod("decorate", ServerWebExchange.class, CachedBodyOutputMessage.class);
CachedBodyOutputMessage cachedBodyOutputMessage = mock(CachedBodyOutputMessage.class);
declaredMethod.setAccessible(true);
ServerHttpRequestDecorator decorator = (ServerHttpRequestDecorator) declaredMethod.invoke(fileSizeFilterError, webExchangeTextPlain, cachedBodyOutputMessage);
Assertions.assertEquals(decorator.getBody(), cachedBodyOutputMessage.getBody());
}
|
public String getValue() {
if (period == null) {
final DateFormat dateFormat = I18N.createDateFormat();
return dateFormat.format(startDate) + CUSTOM_PERIOD_SEPARATOR
+ dateFormat.format(endDate);
}
return period.getCode();
}
|
@Test
public void testGetValue() {
assertNotNull("getValue", periodRange.getValue());
assertNotNull("getValue", customRange.getValue());
}
|
@Override
public List<PostDO> getPostList(Collection<Long> ids) {
if (CollUtil.isEmpty(ids)) {
return Collections.emptyList();
}
return postMapper.selectBatchIds(ids);
}
|
@Test
public void testGetPostList() {
// mock 数据
PostDO postDO01 = randomPojo(PostDO.class);
postMapper.insert(postDO01);
// 测试 id 不匹配
PostDO postDO02 = randomPojo(PostDO.class);
postMapper.insert(postDO02);
// 准备参数
List<Long> ids = singletonList(postDO01.getId());
// 调用
List<PostDO> list = postService.getPostList(ids);
// 断言
assertEquals(1, list.size());
assertPojoEquals(postDO01, list.get(0));
}
|
public Optional<Measure> toMeasure(@Nullable LiveMeasureDto measureDto, Metric metric) {
requireNonNull(metric);
if (measureDto == null) {
return Optional.empty();
}
Double value = measureDto.getValue();
String data = measureDto.getDataAsString();
switch (metric.getType().getValueType()) {
case INT:
return toIntegerMeasure(value, data);
case LONG:
return toLongMeasure(value, data);
case DOUBLE:
return toDoubleMeasure(value, data);
case BOOLEAN:
return toBooleanMeasure(value, data);
case STRING:
return toStringMeasure(data);
case LEVEL:
return toLevelMeasure(data);
case NO_VALUE:
return toNoValueMeasure();
default:
throw new IllegalArgumentException("Unsupported Measure.ValueType " + metric.getType().getValueType());
}
}
|
@Test
public void toMeasure_should_not_loose_decimals_of_float_values() {
MetricImpl metric = new MetricImpl("42", "double", "name", Metric.MetricType.FLOAT, 5, null, false, false);
LiveMeasureDto LiveMeasureDto = new LiveMeasureDto()
.setValue(0.12345);
Optional<Measure> measure = underTest.toMeasure(LiveMeasureDto, metric);
assertThat(measure.get().getDoubleValue()).isEqualTo(0.12345, Offset.offset(0.000001));
}
|
public static boolean safeRangeEquals(final Range<Comparable<?>> sourceRange, final Range<Comparable<?>> targetRange) {
Class<?> clazz = getRangeTargetNumericType(sourceRange, targetRange);
if (null == clazz) {
return sourceRange.equals(targetRange);
}
Range<Comparable<?>> newSourceRange = createTargetNumericTypeRange(sourceRange, clazz);
Range<Comparable<?>> newTargetRange = createTargetNumericTypeRange(targetRange, clazz);
return newSourceRange.equals(newTargetRange);
}
|
@Test
void assertSafeRangeEqualsForLong() {
assertTrue(SafeNumberOperationUtils.safeRangeEquals(Range.greaterThan(1L), Range.greaterThan(BigInteger.ONE)));
}
|
public static UStaticIdent create(UClassIdent classIdent, CharSequence member, UType memberType) {
return new AutoValue_UStaticIdent(classIdent, StringName.of(member), memberType);
}
|
@Test
public void equality() {
new EqualsTester()
.addEqualityGroup(
UStaticIdent.create(
"java.lang.Integer",
"valueOf",
UMethodType.create(
UClassType.create("java.lang.Integer"), UClassType.create("java.lang.String"))))
.addEqualityGroup(
UStaticIdent.create(
"java.lang.Integer",
"valueOf",
UMethodType.create(
UClassType.create("java.lang.Integer"),
UClassType.create("java.lang.String"),
UPrimitiveType.INT)))
.addEqualityGroup(
UStaticIdent.create(
"java.lang.Integer",
"getInteger",
UMethodType.create(
UClassType.create("java.lang.Integer"), UClassType.create("java.lang.String"))))
.testEquals();
}
|
public static RuntimeException wrapIf(boolean condition, Throwable t) {
if (condition) {
return wrap(t);
}
if (t instanceof RuntimeException) {
return (RuntimeException) t;
}
return new RuntimeException(t);
}
|
@Test
public void testWrapIfReturnsSourceRuntimeExceptionWhenFalse() {
RuntimeException runtimeException = new RuntimeException("oh noes!");
RuntimeException wrapped = UserCodeException.wrapIf(false, runtimeException);
assertEquals(runtimeException, wrapped);
}
|
public HollowHashIndexResult findMatches(Object... query) {
if (hashStateVolatile == null) {
throw new IllegalStateException(this + " wasn't initialized");
}
int hashCode = 0;
for(int i=0;i<query.length;i++) {
if(query[i] == null)
throw new IllegalArgumentException("querying by null unsupported; i=" + i);
hashCode ^= HashCodes.hashInt(keyHashCode(query[i], i));
}
HollowHashIndexResult result;
HollowHashIndexState hashState;
do {
result = null;
hashState = hashStateVolatile;
long bucket = hashCode & hashState.getMatchHashMask();
long hashBucketBit = bucket * hashState.getBitsPerMatchHashEntry();
boolean bucketIsEmpty = hashState.getMatchHashTable().getElementValue(hashBucketBit, hashState.getBitsPerTraverserField()[0]) == 0;
while (!bucketIsEmpty) {
if (matchIsEqual(hashState.getMatchHashTable(), hashBucketBit, query)) {
int selectSize = (int) hashState.getMatchHashTable().getElementValue(hashBucketBit + hashState.getBitsPerMatchHashKey(), hashState.getBitsPerSelectTableSize());
long selectBucketPointer = hashState.getMatchHashTable().getElementValue(hashBucketBit + hashState.getBitsPerMatchHashKey() + hashState.getBitsPerSelectTableSize(), hashState.getBitsPerSelectTablePointer());
result = new HollowHashIndexResult(hashState, selectBucketPointer, selectSize);
break;
}
bucket = (bucket + 1) & hashState.getMatchHashMask();
hashBucketBit = bucket * hashState.getBitsPerMatchHashEntry();
bucketIsEmpty = hashState.getMatchHashTable().getElementValue(hashBucketBit, hashState.getBitsPerTraverserField()[0]) == 0;
}
} while (hashState != hashStateVolatile);
return result;
}
|
@Test
public void testIndexingFloatTypeFieldWithNullValues() throws Exception {
mapper.add(new TypeFloat(null));
mapper.add(new TypeFloat(-1.0f));
roundTripSnapshot();
HollowHashIndex index = new HollowHashIndex(readStateEngine, "TypeFloat", "", "data.value");
Assert.assertNull(index.findMatches(2.0f));
assertIteratorContainsAll(index.findMatches(-1.0f).iterator(), 1);
}
|
@Override
public Column convert(BasicTypeDefine typeDefine) {
PhysicalColumn.PhysicalColumnBuilder builder =
PhysicalColumn.builder()
.name(typeDefine.getName())
.sourceType(typeDefine.getColumnType())
.nullable(typeDefine.isNullable())
.defaultValue(typeDefine.getDefaultValue())
.comment(typeDefine.getComment());
String dataType = typeDefine.getDataType().toUpperCase();
switch (dataType) {
case REDSHIFT_BOOLEAN:
builder.sourceType(REDSHIFT_BOOLEAN);
builder.dataType(BasicType.BOOLEAN_TYPE);
break;
case REDSHIFT_SMALLINT:
builder.sourceType(REDSHIFT_SMALLINT);
builder.dataType(BasicType.SHORT_TYPE);
break;
case REDSHIFT_INTEGER:
builder.sourceType(REDSHIFT_INTEGER);
builder.dataType(BasicType.INT_TYPE);
break;
case REDSHIFT_BIGINT:
builder.sourceType(REDSHIFT_BIGINT);
builder.dataType(BasicType.LONG_TYPE);
break;
case REDSHIFT_REAL:
builder.sourceType(REDSHIFT_REAL);
builder.dataType(BasicType.FLOAT_TYPE);
break;
case REDSHIFT_DOUBLE_PRECISION:
builder.sourceType(REDSHIFT_DOUBLE_PRECISION);
builder.dataType(BasicType.DOUBLE_TYPE);
break;
case REDSHIFT_NUMERIC:
Long precision = typeDefine.getPrecision();
Integer scale = typeDefine.getScale();
if (precision == null || precision <= 0) {
precision = Long.valueOf(DEFAULT_PRECISION);
scale = DEFAULT_SCALE;
} else if (precision > MAX_PRECISION) {
scale = scale - (int) (precision - MAX_PRECISION);
precision = Long.valueOf(MAX_PRECISION);
}
builder.sourceType(String.format("%s(%d,%d)", REDSHIFT_NUMERIC, precision, scale));
builder.dataType(new DecimalType(Math.toIntExact(precision), scale));
break;
case REDSHIFT_CHARACTER:
Long characterLength = typeDefine.getLength();
if (characterLength == null || characterLength <= 0) {
characterLength = Long.valueOf(MAX_CHARACTER_LENGTH);
}
builder.sourceType(String.format("%s(%d)", REDSHIFT_CHARACTER, characterLength));
builder.dataType(BasicType.STRING_TYPE);
builder.columnLength(characterLength);
break;
case REDSHIFT_CHARACTER_VARYING:
Long characterVaryingLength = typeDefine.getLength();
if (characterVaryingLength == null || characterVaryingLength <= 0) {
characterVaryingLength = Long.valueOf(MAX_CHARACTER_VARYING_LENGTH);
}
builder.sourceType(
String.format(
"%s(%d)", REDSHIFT_CHARACTER_VARYING, characterVaryingLength));
builder.dataType(BasicType.STRING_TYPE);
builder.columnLength(characterVaryingLength);
break;
case REDSHIFT_HLLSKETCH:
builder.sourceType(REDSHIFT_HLLSKETCH);
builder.dataType(BasicType.STRING_TYPE);
builder.columnLength(MAX_HLLSKETCH_LENGTH);
break;
case REDSHIFT_SUPER:
builder.sourceType(REDSHIFT_SUPER);
builder.dataType(BasicType.STRING_TYPE);
builder.columnLength(MAX_SUPER_LENGTH);
break;
case REDSHIFT_VARBYTE:
case REDSHIFT_BINARY_VARYING:
builder.sourceType(
String.format(
"%s(%d)", typeDefine.getDataType(), MAX_BINARY_VARYING_LENGTH));
builder.dataType(PrimitiveByteArrayType.INSTANCE);
builder.columnLength(MAX_BINARY_VARYING_LENGTH);
break;
case REDSHIFT_TIME:
builder.sourceType(REDSHIFT_TIME);
builder.dataType(LocalTimeType.LOCAL_TIME_TYPE);
builder.scale(MAX_TIME_SCALE);
break;
case REDSHIFT_TIMETZ:
builder.sourceType(REDSHIFT_TIMETZ);
builder.dataType(LocalTimeType.LOCAL_TIME_TYPE);
builder.scale(MAX_TIME_SCALE);
break;
case REDSHIFT_TIMESTAMP:
builder.sourceType(REDSHIFT_TIMESTAMP);
builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE);
builder.scale(MAX_TIMESTAMP_SCALE);
break;
case REDSHIFT_TIMESTAMPTZ:
builder.sourceType(REDSHIFT_TIMESTAMPTZ);
builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE);
builder.scale(MAX_TIMESTAMP_SCALE);
break;
default:
try {
return super.convert(typeDefine);
} catch (SeaTunnelRuntimeException e) {
throw CommonError.convertToSeaTunnelTypeError(
DatabaseIdentifier.REDSHIFT,
typeDefine.getDataType(),
typeDefine.getName());
}
}
return builder.build();
}
|
@Test
public void testConvertUnsupported() {
BasicTypeDefine<Object> typeDefine =
BasicTypeDefine.builder().name("test").columnType("aaa").dataType("aaa").build();
try {
RedshiftTypeConverter.INSTANCE.convert(typeDefine);
Assertions.fail();
} catch (SeaTunnelRuntimeException e) {
// ignore
} catch (Throwable e) {
Assertions.fail();
}
}
|
public static double find(Function func, double x1, double x2, double tol, int maxIter) {
if (tol <= 0.0) {
throw new IllegalArgumentException("Invalid tolerance: " + tol);
}
if (maxIter <= 0) {
throw new IllegalArgumentException("Invalid maximum number of iterations: " + maxIter);
}
double a = x1, b = x2, c = x2, d = 0, e = 0, fa = func.apply(a), fb = func.apply(b), fc, p, q, r, s, xm;
if ((fa > 0.0 && fb > 0.0) || (fa < 0.0 && fb < 0.0)) {
throw new IllegalArgumentException("Root must be bracketed.");
}
fc = fb;
for (int iter = 1; iter <= maxIter; iter++) {
if ((fb > 0.0 && fc > 0.0) || (fb < 0.0 && fc < 0.0)) {
c = a;
fc = fa;
e = d = b - a;
}
if (Math.abs(fc) < Math.abs(fb)) {
a = b;
b = c;
c = a;
fa = fb;
fb = fc;
fc = fa;
}
tol = 2.0 * MathEx.EPSILON * Math.abs(b) + 0.5 * tol;
xm = 0.5 * (c - b);
if (iter % 10 == 0) {
logger.info(String.format("Brent: the root after %3d iterations: %.5g, error = %.5g", iter, b, xm));
}
if (Math.abs(xm) <= tol || fb == 0.0) {
logger.info(String.format("Brent finds the root after %d iterations: %.5g, error = %.5g", iter, b, xm));
return b;
}
if (Math.abs(e) >= tol && Math.abs(fa) > Math.abs(fb)) {
s = fb / fa;
if (a == c) {
p = 2.0 * xm * s;
q = 1.0 - s;
} else {
q = fa / fc;
r = fb / fc;
p = s * (2.0 * xm * q * (q - r) - (b - a) * (r - 1.0));
q = (q - 1.0) * (r - 1.0) * (s - 1.0);
}
if (p > 0.0) {
q = -q;
}
p = Math.abs(p);
double min1 = 3.0 * xm * q - Math.abs(tol * q);
double min2 = Math.abs(e * q);
if (2.0 * p < Math.min(min1, min2)) {
e = d;
d = p / q;
} else {
d = xm;
e = d;
}
} else {
d = xm;
e = d;
}
a = b;
fa = fb;
if (Math.abs(d) > tol) {
b += d;
} else {
b += Math.copySign(tol, xm);
}
fb = func.apply(b);
}
logger.error("Brent exceeded the maximum number of iterations.");
return b;
}
|
@Test
public void testBrent() {
System.out.println("Brent");
double result = Root.find(x -> x * x * x + x * x - 5 * x + 3, -4, -2, 1E-7, 500);
assertEquals(-3, result, 1E-7);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.