focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
void handleStatement(final QueuedCommand queuedCommand) {
throwIfNotConfigured();
handleStatementWithTerminatedQueries(
queuedCommand.getAndDeserializeCommand(commandDeserializer),
queuedCommand.getAndDeserializeCommandId(),
queuedCommand.getStatus(),
Mode.EXECUTE,
queuedCommand.getOffset(),
false
);
}
|
@Test(expected = IllegalStateException.class)
public void shouldThrowOnHandleStatementIfNotConfigured() {
// Given:
statementExecutor = new InteractiveStatementExecutor(
serviceContext,
mockEngine,
mockParser,
mockQueryIdGenerator,
commandDeserializer
);
final Map<String, Object> withoutAppServer = ksqlConfig.originals();
withoutAppServer.remove(StreamsConfig.APPLICATION_SERVER_CONFIG);
when(mockEngine.getKsqlConfig()).thenReturn(new KsqlConfig(withoutAppServer));
// When:
statementExecutor.handleStatement(queuedCommand);
}
|
public boolean resetTaskOffset(String previousFailedTaskId) {
final Task task = executionDAOFacade.getTaskById(previousFailedTaskId);
final Workflow workflow =
executionDAOFacade.getWorkflowById(task.getWorkflowInstanceId(), true);
Optional<Task> currentTask =
workflow.getTasks().stream()
.filter(t -> task.getTaskId().equals(t.getRetriedTaskId()))
.findFirst();
if (currentTask.isPresent()) {
final Task enqueuedTask = currentTask.get();
return queueDAO.resetOffsetTime(
QueueUtils.getQueueName(enqueuedTask), enqueuedTask.getTaskId());
}
return false;
}
|
@Test
public void testResetOffsetNotFound() {
String workflowId = "workflow-id";
String taskId = "task-id-1";
String taskId1 = "task-id-2";
Task maestroTask = new Task();
maestroTask.setTaskType(Constants.MAESTRO_TASK_NAME);
maestroTask.setReferenceTaskName("maestroTask");
maestroTask.setWorkflowInstanceId(workflowId);
maestroTask.setScheduledTime(System.currentTimeMillis());
maestroTask.setTaskId(taskId);
maestroTask.setStatus(Task.Status.FAILED);
maestroTask.setStartTime(123);
maestroTask.setCallbackAfterSeconds(0);
Task maestroTask1 = new Task();
maestroTask1.setTaskType(Constants.MAESTRO_TASK_NAME);
maestroTask1.setReferenceTaskName("maestroTask");
maestroTask1.setWorkflowInstanceId(workflowId);
maestroTask1.setScheduledTime(System.currentTimeMillis());
maestroTask1.setTaskId(taskId1);
maestroTask1.setStatus(Task.Status.SCHEDULED);
maestroTask1.setStartTime(123);
maestroTask1.setCallbackAfterSeconds(0);
maestroTask1.setRetryCount(1);
List<Task> tasks = new ArrayList<>();
tasks.add(maestroTask);
tasks.add(maestroTask1);
Workflow workflow = new Workflow();
workflow.setWorkflowId(workflowId);
workflow.setStatus(Workflow.WorkflowStatus.RUNNING);
workflow.setTasks(tasks);
when(executionDAOFacade.getTaskById(taskId)).thenReturn(maestroTask);
when(executionDAOFacade.getWorkflowById(workflowId, true)).thenReturn(workflow);
assertFalse(maestroWorkflowExecutor.resetTaskOffset(taskId));
Mockito.verifyNoInteractions(queueDAO);
}
|
PreparedStatement createPsForInsertConfigInfo(final String srcIp, final String srcUser, final ConfigInfo configInfo,
Map<String, Object> configAdvanceInfo, Connection connection, ConfigInfoMapper configInfoMapper)
throws SQLException {
final String appNameTmp = StringUtils.defaultEmptyIfBlank(configInfo.getAppName());
final String tenantTmp = StringUtils.defaultEmptyIfBlank(configInfo.getTenant());
final String desc = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("desc");
final String use = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("use");
final String effect = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("effect");
final String type = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("type");
final String schema = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("schema");
final String encryptedDataKey =
configInfo.getEncryptedDataKey() == null ? StringUtils.EMPTY : configInfo.getEncryptedDataKey();
final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE);
String insertSql = configInfoMapper.insert(
Arrays.asList("data_id", "group_id", "tenant_id", "app_name", "content", "md5", "src_ip", "src_user",
"gmt_create@NOW()", "gmt_modified@NOW()", "c_desc", "c_use", "effect", "type", "c_schema",
"encrypted_data_key"));
PreparedStatement ps = connection.prepareStatement(insertSql, configInfoMapper.getPrimaryKeyGeneratedKeys());
ps.setString(1, configInfo.getDataId());
ps.setString(2, configInfo.getGroup());
ps.setString(3, tenantTmp);
ps.setString(4, appNameTmp);
ps.setString(5, configInfo.getContent());
ps.setString(6, md5Tmp);
ps.setString(7, srcIp);
ps.setString(8, srcUser);
ps.setString(9, desc);
ps.setString(10, use);
ps.setString(11, effect);
ps.setString(12, type);
ps.setString(13, schema);
ps.setString(14, encryptedDataKey);
return ps;
}
|
@Test
void testCreatePsForInsertConfigInfo() throws SQLException {
Map<String, Object> configAdvanceInfo = new HashMap<>();
configAdvanceInfo.put("config_tags", "tag1,tag2");
configAdvanceInfo.put("desc", "desc11");
configAdvanceInfo.put("use", "use2233");
configAdvanceInfo.put("effect", "effect222");
configAdvanceInfo.put("type", "type3");
configAdvanceInfo.put("schema", "schema");
String dataId = "dataId";
String group = "group";
String tenant = "tenant";
String content = "content132456";
ConfigInfo configInfo = new ConfigInfo(dataId, group, tenant, null, content);
Connection mockConnection = Mockito.mock(Connection.class);
PreparedStatement preparedStatement = Mockito.mock(PreparedStatement.class);
ConfigInfoMapper configInfoMapper = externalConfigInfoPersistService.mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_INFO);
Mockito.when(mockConnection.prepareStatement(anyString(), any(String[].class))).thenReturn(preparedStatement);
String srcIp = "srcIp";
String srcUser = "srcUser";
externalConfigInfoPersistService.createPsForInsertConfigInfo(srcIp, srcUser, configInfo, configAdvanceInfo, mockConnection,
configInfoMapper);
Mockito.verify(preparedStatement, times(14)).setString(anyInt(), anyString());
}
|
public static StringBuilder createBuilder(CharSequence... charSequences) {
StringBuilder builder = new StringBuilder();
if (charSequences == null || charSequences.length == 0) {
return builder;
}
for (CharSequence sequence : charSequences) {
builder.append(sequence);
}
return builder;
}
|
@Test
public void createBuilder() {
StringBuilder s1 = StringUtil.createBuilder(null);
Assert.assertEquals("", s1.toString());
StringBuilder s2 = StringUtil.createBuilder("H", "ippo", "4j");
Assert.assertEquals("Hippo4j", s2.toString());
}
|
@Deprecated
public boolean cancel(Timer timer)
{
assert (timer.parent == this);
return timer.cancel();
}
|
@Test
public void testCancel()
{
long fullTimeout = 100;
Timers.Timer timer = timers.add(fullTimeout, handler, invoked);
// cancel timer
long timeout = timers.timeout();
boolean ret = timer.cancel();
assertThat(ret, is(true));
ZMQ.msleep(timeout * 2);
int rc = timers.execute();
assertThat(rc, is(0));
assertThat(invoked.get(), is(false));
}
|
public static TimeLock ofTimestamp(Instant time) {
long secs = time.getEpochSecond();
if (secs < THRESHOLD)
throw new IllegalArgumentException("timestamp too low: " + secs);
return new TimeLock(secs);
}
|
@Test(expected = IllegalArgumentException.class)
public void ofTimestamp_negative() {
LockTime.ofTimestamp(Instant.EPOCH.minusSeconds(1));
}
|
@Override
public KeyValueIterator<Windowed<K>, V> fetch(final K key) {
Objects.requireNonNull(key, "key can't be null");
final List<ReadOnlySessionStore<K, V>> stores = storeProvider.stores(storeName, queryableStoreType);
for (final ReadOnlySessionStore<K, V> store : stores) {
try {
final KeyValueIterator<Windowed<K>, V> result = store.fetch(key);
if (!result.hasNext()) {
result.close();
} else {
return result;
}
} catch (final InvalidStateStoreException ise) {
throw new InvalidStateStoreException("State store [" + storeName + "] is not available anymore" +
" and may have been migrated to another instance; " +
"please re-discover its location from the state metadata. " +
"Original error message: " + ise);
}
}
return KeyValueIterators.emptyIterator();
}
|
@Test
public void shouldThrowNullPointerExceptionIfFetchingNullKey() {
assertThrows(NullPointerException.class, () -> sessionStore.fetch(null));
}
|
@Override
public LinkEvent removeLink(ConnectPoint src, ConnectPoint dst) {
final LinkKey linkKey = LinkKey.linkKey(src, dst);
ProviderId primaryProviderId = getBaseProviderId(linkKey);
// Stop if there is no base provider.
if (primaryProviderId == null) {
return null;
}
LinkDescription removedLinkDescription =
linkDescriptions.remove(new Provided<>(linkKey, primaryProviderId));
if (removedLinkDescription != null) {
return purgeLinkCache(linkKey);
}
return null;
}
|
@Test
public final void testRemoveLink() {
final ConnectPoint d1P1 = new ConnectPoint(DID1, P1);
final ConnectPoint d2P2 = new ConnectPoint(DID2, P2);
LinkKey linkId1 = LinkKey.linkKey(d1P1, d2P2);
LinkKey linkId2 = LinkKey.linkKey(d2P2, d1P1);
putLink(linkId1, DIRECT, A1);
putLink(linkId2, DIRECT, A2);
// DID1,P1 => DID2,P2
// DID2,P2 => DID1,P1
// DID1,P2 => DID2,P3
LinkEvent event = linkStore.removeLink(d1P1, d2P2);
assertEquals(LINK_REMOVED, event.type());
assertAnnotationsEquals(event.subject().annotations(), A1);
LinkEvent event2 = linkStore.removeLink(d1P1, d2P2);
assertNull(event2);
assertLink(linkId2, DIRECT, linkStore.getLink(d2P2, d1P1));
assertAnnotationsEquals(linkStore.getLink(d2P2, d1P1).annotations(), A2);
// annotations, etc. should not survive remove
putLink(linkId1, DIRECT);
assertLink(linkId1, DIRECT, linkStore.getLink(d1P1, d2P2));
assertAnnotationsEquals(linkStore.getLink(d1P1, d2P2).annotations());
}
|
@Override
public OAuth2AccessTokenDO grantRefreshToken(String refreshToken, String clientId) {
return oauth2TokenService.refreshAccessToken(refreshToken, clientId);
}
|
@Test
public void testGrantRefreshToken() {
// 准备参数
String refreshToken = randomString();
String clientId = randomString();
// mock 方法
OAuth2AccessTokenDO accessTokenDO = randomPojo(OAuth2AccessTokenDO.class);
when(oauth2TokenService.refreshAccessToken(eq(refreshToken), eq(clientId)))
.thenReturn(accessTokenDO);
// 调用,并断言
assertPojoEquals(accessTokenDO, oauth2GrantService.grantRefreshToken(
refreshToken, clientId));
}
|
public List<InterpreterResultMessage> message() {
return msg;
}
|
@Test
void testComplexMagicData() {
InterpreterResult result = null;
result = new InterpreterResult(InterpreterResult.Code.SUCCESS,
"some text before\n%table col1\tcol2\naaa\t123\n");
assertEquals("some text before\n", result.message().get(0).getData(), "text before %table");
assertEquals("col1\tcol2\naaa\t123\n", result.message().get(1).getData(), "text after %table");
result = new InterpreterResult(InterpreterResult.Code.SUCCESS,
"%html <h3> This is a hack </h3>\n%table\ncol1\tcol2\naaa\t123\n");
assertEquals(" <h3> This is a hack </h3>\n", result.message().get(0).getData());
assertEquals("col1\tcol2\naaa\t123\n", result.message().get(1).getData());
result = new InterpreterResult(InterpreterResult.Code.SUCCESS,
"some text before magic word\n%table col1\tcol2\naaa\t123\n\n%html " +
"<h3> This is a hack </h3>");
assertEquals("<h3> This is a hack </h3>", result.message().get(2).getData());
result = new InterpreterResult(InterpreterResult.Code.SUCCESS,
"%table col1\tcol2\naaa\t123\n\n%html <h3> This is a hack </h3>\n%table col1\naaa\n123\n");
assertEquals("col1\naaa\n123\n", result.message().get(2).getData());
result = new InterpreterResult(InterpreterResult.Code.SUCCESS,
"%table " + "col1\tcol2\naaa\t123\n\n%table col1\naaa\n123\n");
assertEquals("col1\tcol2\naaa\t123\n", result.message().get(0).getData());
assertEquals("col1\naaa\n123\n", result.message().get(1).getData());
}
|
public DateCache() {
cache = new HashMap<String, Date>();
}
|
@SuppressWarnings( "deprecation" )
@Test
public void testDateCache() {
DateCache cache = new DateCache();
cache.populate( "yyyy-MM-dd", 2016, 2016 );
assertEquals( 366, cache.getSize() ); // Leap year
assertEquals( Calendar.FEBRUARY, cache.lookupDate( "2016-02-29" ).getMonth() );
assertEquals( 29, cache.lookupDate( "2016-02-29" ).getDate() );
assertEquals( ( 2016 - 1900 ), cache.lookupDate( "2016-02-29" ).getYear() );
}
|
public static void initRequestEntity(HttpRequestBase requestBase, Object body, Header header) throws Exception {
if (body == null) {
return;
}
if (requestBase instanceof HttpEntityEnclosingRequest) {
HttpEntityEnclosingRequest request = (HttpEntityEnclosingRequest) requestBase;
MediaType mediaType = MediaType.valueOf(header.getValue(HttpHeaderConsts.CONTENT_TYPE));
ContentType contentType = ContentType.create(mediaType.getType(), mediaType.getCharset());
HttpEntity entity;
if (body instanceof byte[]) {
entity = new ByteArrayEntity((byte[]) body, contentType);
} else {
entity = new StringEntity(body instanceof String ? (String) body : JacksonUtils.toJson(body),
contentType);
}
request.setEntity(entity);
}
}
|
@Test
void testInitRequestEntity3() throws Exception {
BaseHttpMethod.HttpGetWithEntity httpRequest = new BaseHttpMethod.HttpGetWithEntity("");
Header header = Header.newInstance();
header.addParam(HttpHeaderConsts.CONTENT_TYPE, "text/html");
HttpUtils.initRequestEntity(httpRequest, "common text", header);
HttpEntity entity = httpRequest.getEntity();
InputStream contentStream = entity.getContent();
byte[] bytes = new byte[contentStream.available()];
contentStream.read(bytes);
assertEquals("common text", new String(bytes, Constants.ENCODE));
assertEquals(HttpHeaderConsts.CONTENT_TYPE, entity.getContentType().getName());
assertEquals("text/html; charset=UTF-8", entity.getContentType().getValue());
}
|
@Override
public List<Integer> applyTransforms(List<Integer> originalGlyphIds)
{
List<Integer> intermediateGlyphsFromGsub = adjustRephPosition(originalGlyphIds);
intermediateGlyphsFromGsub = repositionGlyphs(intermediateGlyphsFromGsub);
for (String feature : FEATURES_IN_ORDER)
{
if (!gsubData.isFeatureSupported(feature))
{
if (feature.equals(RKRF_FEATURE) && gsubData.isFeatureSupported(VATU_FEATURE))
{
// Create your own rkrf feature from vatu feature
intermediateGlyphsFromGsub = applyRKRFFeature(
gsubData.getFeature(VATU_FEATURE),
intermediateGlyphsFromGsub);
}
LOG.debug("the feature {} was not found", feature);
continue;
}
LOG.debug("applying the feature {}", feature);
ScriptFeature scriptFeature = gsubData.getFeature(feature);
intermediateGlyphsFromGsub = applyGsubFeature(scriptFeature,
intermediateGlyphsFromGsub);
}
return Collections.unmodifiableList(intermediateGlyphsFromGsub);
}
|
@Test
void testApplyTransforms_blws()
{
// given
List<Integer> glyphsAfterGsub = Arrays.asList(278,76,333,337,276);
// when
List<Integer> result = gsubWorkerForGujarati.applyTransforms(getGlyphIds("હૃટ્રુણુરુ"));
// then
assertEquals(glyphsAfterGsub, result);
}
|
@Override
public Integer doCall() throws Exception {
List<Row> rows = new ArrayList<>();
JsonObject plugins = loadConfig().getMap("plugins");
plugins.forEach((key, value) -> {
JsonObject details = (JsonObject) value;
String name = details.getStringOrDefault("name", key);
String command = details.getStringOrDefault("command", name);
String dependency = details.getStringOrDefault("dependency",
"org.apache.camel:camel-jbang-plugin-%s".formatted(command));
String description
= details.getStringOrDefault("description", "Plugin %s called with command %s".formatted(name, command));
rows.add(new Row(name, command, dependency, description));
});
printRows(rows);
if (all) {
rows.clear();
for (PluginType camelPlugin : PluginType.values()) {
if (plugins.get(camelPlugin.getName()) == null) {
String dependency = "org.apache.camel:camel-jbang-plugin-%s".formatted(camelPlugin.getCommand());
rows.add(new Row(
camelPlugin.getName(), camelPlugin.getCommand(), dependency,
camelPlugin.getDescription()));
}
}
if (!rows.isEmpty()) {
printer().println();
printer().println("Supported plugins:");
printer().println();
printRows(rows);
}
}
return 0;
}
|
@Test
public void shouldGetEmptyPlugins() throws Exception {
PluginGet command = new PluginGet(new CamelJBangMain().withPrinter(printer));
command.doCall();
Assertions.assertEquals("", printer.getOutput());
}
|
public void addCookie(JDiscCookieWrapper cookie) {
if (cookie != null) {
List<Cookie> cookies = new ArrayList<>();
// Get current set of cookies first
List<Cookie> c = getCookies();
if (c != null && !c.isEmpty()) {
cookies.addAll(c);
}
cookies.add(cookie.getCookie());
setCookies(cookies);
}
}
|
@Test
void testAddCookie() {
URI uri = URI.create("http://example.yahoo.com/test");
HttpRequest httpReq = newRequest(uri, HttpRequest.Method.GET, HttpRequest.Version.HTTP_1_1);
DiscFilterRequest request = new DiscFilterRequest(httpReq);
request.addCookie(JDiscCookieWrapper.wrap(new Cookie("name", "value")));
List<Cookie> cookies = request.getCookies();
assertEquals(cookies.size(), 1);
assertEquals(cookies.get(0).getName(), "name");
assertEquals(cookies.get(0).getValue(), "value");
}
|
public UiTopoLayout geomap(String geomap) {
if (sprites != null) {
throw new IllegalArgumentException(E_SPRITES_SET);
}
this.geomap = geomap;
return this;
}
|
@Test
public void setGeomap() {
mkRootLayout();
assertEquals("geo to start", null, layout.geomap());
layout.geomap(GEOMAP);
assertEquals("wrong geo", GEOMAP, layout.geomap());
}
|
@Override
@MethodNotAvailable
public CompletionStage<Void> setAsync(K key, V value) {
throw new MethodNotAvailableException();
}
|
@Test(expected = MethodNotAvailableException.class)
public void testSetAsync() {
adapter.setAsync(42, "value");
}
|
public static AggregationFunctionColumnPair resolveToStoredType(AggregationFunctionColumnPair functionColumnPair) {
AggregationFunctionType storedType = getStoredType(functionColumnPair.getFunctionType());
return new AggregationFunctionColumnPair(storedType, functionColumnPair.getColumn());
}
|
@Test
public void testResolveToStoredType() {
assertEquals(AggregationFunctionColumnPair.fromColumnName("distinctCountThetaSketch__dimX"),
AggregationFunctionColumnPair.resolveToStoredType(
AggregationFunctionColumnPair.fromColumnName("distinctCountRawThetaSketch__dimX")));
assertEquals(AggregationFunctionColumnPair.fromColumnName("count__*"),
AggregationFunctionColumnPair.resolveToStoredType(AggregationFunctionColumnPair.fromColumnName("count__*")));
assertEquals(AggregationFunctionColumnPair.fromColumnName("sum__dimY"),
AggregationFunctionColumnPair.resolveToStoredType(AggregationFunctionColumnPair.fromColumnName("sum__dimY")));
}
|
@PublicAPI(usage = ACCESS)
public JavaClasses importUrl(URL url) {
return importUrls(singletonList(url));
}
|
@Test
public void imports_subclass_in_class_hierarchy_correctly() {
JavaClass subclass = new ClassFileImporter().importUrl(getClass().getResource("testexamples/classhierarchyimport")).get(Subclass.class);
assertThat(subclass.getConstructors()).hasSize(3);
assertThat(subclass.getFields()).hasSize(1);
assertThat(subclass.getMethods()).hasSize(3);
assertThat(subclass.getStaticInitializer().get().getMethodCallsFromSelf()).isNotEmpty();
}
|
public static <T, R> OneInputTransformation<T, R> getOneInputTransformation(
String operatorName,
AbstractDataStream<T> inputStream,
TypeInformation<R> outTypeInformation,
OneInputStreamOperator<T, R> operator) {
// read the output type of the input Transform to coax out errors about MissingTypeInfo
inputStream.getTransformation().getOutputType();
OneInputTransformation<T, R> resultTransform =
new OneInputTransformation<>(
inputStream.getTransformation(),
operatorName,
SimpleUdfStreamOperatorFactory.of(operator),
outTypeInformation,
inputStream.getEnvironment().getParallelism(),
false);
return resultTransform;
}
|
@Test
void testGetOneInputTransformation() throws Exception {
ExecutionEnvironmentImpl env = StreamTestUtils.getEnv();
ProcessOperator<Integer, Long> operator =
new ProcessOperator<>(new StreamTestUtils.NoOpOneInputStreamProcessFunction());
OneInputTransformation<Integer, Long> transformation =
StreamUtils.getOneInputTransformation(
"op",
new NonKeyedPartitionStreamImpl<>(
env, new TestingTransformation<>("t", Types.INT, 1)),
Types.LONG,
operator);
assertThat(transformation.getOperator()).isEqualTo(operator);
assertThat(transformation.getOutputType()).isEqualTo(Types.LONG);
assertThat(transformation.getStateKeySelector()).isNull();
}
|
public static <T> Partition<T> of(
int numPartitions,
PartitionWithSideInputsFn<? super T> partitionFn,
Requirements requirements) {
Contextful ctfFn =
Contextful.fn(
(T element, Contextful.Fn.Context c) ->
partitionFn.partitionFor(element, numPartitions, c),
requirements);
return new Partition<>(new PartitionDoFn<T>(numPartitions, ctfFn, partitionFn));
}
|
@Test
public void testDisplayDataOfSideViewFunction() {
Partition<?> partition = Partition.of(123, new IdentitySideViewFn(), Requirements.empty());
DisplayData displayData = DisplayData.from(partition);
assertThat(displayData, hasDisplayItem("numPartitions", 123));
assertThat(displayData, hasDisplayItem("partitionFn", IdentitySideViewFn.class));
}
|
@Override
public PollResult poll(long currentTimeMs) {
return pollInternal(
prepareFetchRequests(),
this::handleFetchSuccess,
this::handleFetchFailure
);
}
|
@Test
public void testFetchResponseMetricsWithOnePartitionAtTheWrongOffset() {
buildFetcher();
assignFromUser(mkSet(tp0, tp1));
subscriptions.seek(tp0, 0);
subscriptions.seek(tp1, 0);
Map<MetricName, KafkaMetric> allMetrics = metrics.metrics();
KafkaMetric fetchSizeAverage = allMetrics.get(metrics.metricInstance(metricsRegistry.fetchSizeAvg));
KafkaMetric recordsCountAverage = allMetrics.get(metrics.metricInstance(metricsRegistry.recordsPerRequestAvg));
// send the fetch and then seek to a new offset
assertEquals(1, sendFetches());
subscriptions.seek(tp1, 5);
MemoryRecordsBuilder builder = MemoryRecords.builder(ByteBuffer.allocate(1024), Compression.NONE,
TimestampType.CREATE_TIME, 0L);
for (int v = 0; v < 3; v++)
builder.appendWithOffset(v, RecordBatch.NO_TIMESTAMP, "key".getBytes(), ("value-" + v).getBytes());
MemoryRecords records = builder.build();
Map<TopicIdPartition, FetchResponseData.PartitionData> partitions = new HashMap<>();
partitions.put(tidp0, new FetchResponseData.PartitionData()
.setPartitionIndex(tp0.partition())
.setHighWatermark(100)
.setLogStartOffset(0)
.setRecords(records));
partitions.put(tidp1, new FetchResponseData.PartitionData()
.setPartitionIndex(tp1.partition())
.setHighWatermark(100)
.setLogStartOffset(0)
.setRecords(MemoryRecords.withRecords(Compression.NONE, new SimpleRecord("val".getBytes()))));
client.prepareResponse(FetchResponse.of(Errors.NONE, 0, INVALID_SESSION_ID, new LinkedHashMap<>(partitions)));
networkClientDelegate.poll(time.timer(0));
collectFetch();
// we should have ignored the record at the wrong offset
int expectedBytes = 0;
for (Record record : records.records())
expectedBytes += record.sizeInBytes();
assertEquals(expectedBytes, (Double) fetchSizeAverage.metricValue(), EPSILON);
assertEquals(3, (Double) recordsCountAverage.metricValue(), EPSILON);
}
|
public void run() {
try {
targetFile = KettleVFS.getFileObject( targetPath );
if ( targetFile.exists() ) {
if ( !targetFile.delete() ) {
throw new ReportProcessingException( messages.getErrorString( "ReportExportTask.ERROR_0001_TARGET_EXISTS" ) ); //$NON-NLS-1$
}
}
if ( createParentFolder ) {
targetFile.getParent().createFolder();
} else if ( !targetFile.getParent().exists() ) {
throw new ReportProcessingException( messages.getString(
"ReportExportTask.PARENT_FOLDER_DOES_NOT_EXIST", targetFile.getParent().getName().getPath() ) );
}
execute();
} catch ( Exception ex ) {
statusListener.setStatus( StatusType.ERROR, messages.getString( "ReportExportTask.USER_EXPORT_FAILED" ), ex ); //$NON-NLS-1$
logger.error( "Failed" ); //$NON-NLS-1$
}
}
|
@Test
public void testExportReportWithUnsupportedLocale() {
when( masterReport.getConfiguration() ).thenReturn( mock( Configuration.class ) );
when( masterReport.getResourceManager() ).thenReturn( new ResourceManager() );
when( swingGuiContext.getLocale() ).thenReturn( Locale.UK );
when( swingGuiContext.getStatusListener() ).thenReturn( mock( StatusListener.class ) );
Runnable exportTask = new ReportExportTask( masterReport, swingGuiContext, targetPath, createParentFolder ) {
protected ReportProcessor createReportProcessor( OutputStream fout ) throws Exception {
PdfOutputProcessor outputProcessor =
new PdfOutputProcessor( masterReport.getConfiguration(), fout, masterReport.getResourceManager() );
return new PageableReportProcessor( masterReport, outputProcessor );
}
};
assertNotNull( exportTask );
exportTask.run();
assertThat( swingGuiContext.getStatusType(), not( StatusType.ERROR ) );
}
|
@NonNull
@Override
public IdpListJWS fetchIdpList(URI idpListUrl) {
return idpListCache.computeIfAbsent(
idpListUrl.toString(), k -> delegate.fetchIdpList(idpListUrl));
}
|
@Test
void fetchIdpList() {
var uri = URI.create("https://example.com/idpList");
var expected = new IdpListJWS(null, null);
when(delegate.fetchIdpList(uri)).thenReturn(expected);
// when
var got = sut.fetchIdpList(uri);
// then
verify(delegate).fetchIdpList(uri);
assertEquals(expected, got);
}
|
public List<ShardingCondition> createShardingConditions(final InsertStatementContext sqlStatementContext, final List<Object> params) {
List<ShardingCondition> result = null == sqlStatementContext.getInsertSelectContext()
? createShardingConditionsWithInsertValues(sqlStatementContext, params)
: createShardingConditionsWithInsertSelect(sqlStatementContext, params);
appendGeneratedKeyConditions(sqlStatementContext, result);
return result;
}
|
@Test
void assertCreateShardingConditionsInsertStatementWithGeneratedKeyContextUsingCommonExpressionSegmentEmpty() {
when(insertStatementContext.getInsertValueContexts()).thenReturn(Collections.singletonList(createInsertValueContextAsCommonExpressionSegmentEmptyText()));
when(insertStatementContext.getGeneratedKeyContext()).thenReturn(Optional.of(mock(GeneratedKeyContext.class)));
when(shardingRule.findShardingColumn("foo_col_1", "foo_table")).thenReturn(Optional.of("foo_col_1"));
List<ShardingCondition> shardingConditions = shardingConditionEngine.createShardingConditions(insertStatementContext, Collections.emptyList());
assertThat(shardingConditions.get(0).getStartIndex(), is(0));
assertThat(shardingConditions.get(0).getValues().size(), is(1));
}
|
@Override
public void initialize(@Nullable Configuration configuration, Properties serDeProperties)
throws SerDeException {
// HiveIcebergSerDe.initialize is called multiple places in Hive code:
// - When we are trying to create a table - HiveDDL data is stored at the serDeProperties, but
// no Iceberg table
// is created yet.
// - When we are compiling the Hive query on HiveServer2 side - We only have table information
// (location/name),
// and we have to read the schema using the table data. This is called multiple times so there
// is room for
// optimizing here.
// - When we are executing the Hive query in the execution engine - We do not want to load the
// table data on every
// executor, but serDeProperties are populated by
// HiveIcebergStorageHandler.configureInputJobProperties() and
// the resulting properties are serialized and distributed to the executors
if (serDeProperties.get(InputFormatConfig.TABLE_SCHEMA) != null) {
this.tableSchema =
SchemaParser.fromJson((String) serDeProperties.get(InputFormatConfig.TABLE_SCHEMA));
} else {
try {
// always prefer the original table schema if there is one
this.tableSchema = Catalogs.loadTable(configuration, serDeProperties).schema();
LOG.info("Using schema from existing table {}", SchemaParser.toJson(tableSchema));
} catch (Exception e) {
boolean autoConversion =
configuration.getBoolean(InputFormatConfig.SCHEMA_AUTO_CONVERSION, false);
// If we can not load the table try the provided hive schema
this.tableSchema = hiveSchemaOrThrow(serDeProperties, e, autoConversion);
}
}
Schema projectedSchema;
if (serDeProperties.get(HiveIcebergStorageHandler.WRITE_KEY) != null) {
// when writing out data, we should not do projection pushdown
projectedSchema = tableSchema;
} else {
configuration.setBoolean(InputFormatConfig.CASE_SENSITIVE, false);
String[] selectedColumns = ColumnProjectionUtils.getReadColumnNames(configuration);
// When same table is joined multiple times, it is possible some selected columns are
// duplicated,
// in this case wrong recordStructField position leads wrong value or
// ArrayIndexOutOfBoundException
String[] distinctSelectedColumns =
Arrays.stream(selectedColumns).distinct().toArray(String[]::new);
projectedSchema =
distinctSelectedColumns.length > 0
? tableSchema.caseInsensitiveSelect(distinctSelectedColumns)
: tableSchema;
// the input split mapper handles does not belong to this table
// it is necessary to ensure projectedSchema equals to tableSchema,
// or we cannot find selectOperator's column from inspector
if (projectedSchema.columns().size() != distinctSelectedColumns.length) {
projectedSchema = tableSchema;
}
}
try {
this.inspector = IcebergObjectInspector.create(projectedSchema);
} catch (Exception e) {
throw new SerDeException(e);
}
}
|
@Test
public void testInitialize() throws IOException, SerDeException {
File location = tmp.toFile();
assertThat(location.delete()).isTrue();
Configuration conf = new Configuration();
Properties properties = new Properties();
properties.setProperty("location", location.toString());
properties.setProperty(InputFormatConfig.CATALOG_NAME, Catalogs.ICEBERG_HADOOP_TABLE_NAME);
HadoopTables tables = new HadoopTables(conf);
tables.create(SCHEMA, location.toString());
HiveIcebergSerDe serDe = new HiveIcebergSerDe();
serDe.initialize(conf, properties);
assertThat(serDe.getObjectInspector()).isEqualTo(IcebergObjectInspector.create(SCHEMA));
}
|
public static ConfigMap createConfigMap(
String name,
String namespace,
Labels labels,
OwnerReference ownerReference,
Map<String, String> data
) {
return new ConfigMapBuilder()
.withNewMetadata()
.withName(name)
.withNamespace(namespace)
.withLabels(labels.toMap())
.withOwnerReferences(ownerReference)
.endMetadata()
.withData(data)
.build();
}
|
@Test
public void testConfigMapCreation() {
ConfigMap cm = ConfigMapUtils.createConfigMap(NAME, NAMESPACE, LABELS, OWNER_REFERENCE, Map.of("key1", "value1", "key2", "value2"));
assertThat(cm.getMetadata().getName(), is(NAME));
assertThat(cm.getMetadata().getNamespace(), is(NAMESPACE));
assertThat(cm.getMetadata().getOwnerReferences(), is(List.of(OWNER_REFERENCE)));
assertThat(cm.getMetadata().getLabels(), is(LABELS.toMap()));
assertThat(cm.getMetadata().getAnnotations(), is(Map.of()));
assertThat(cm.getData(), is(Map.of("key1", "value1", "key2", "value2")));
}
|
public static int findEndOfMethodArgsIndex(CharSequence string, int startOfMethodArgsIndex) {
boolean isDoubleQuoted = false;
boolean isSingleQuoted = false;
int nestingLevel = 0;
for (int charIndex = startOfMethodArgsIndex; charIndex < string.length(); charIndex++) {
boolean isCurrentCharEscaped = charIndex > 0 && string.charAt(charIndex - 1) == '\\';
switch (string.charAt(charIndex)) {
case '(':
if (!isDoubleQuoted && !isSingleQuoted) {
nestingLevel++;
}
break;
case ')':
if (!isDoubleQuoted && !isSingleQuoted) {
nestingLevel--;
if (nestingLevel == 0) {
return charIndex;
}
}
break;
case '"':
if (isCurrentCharEscaped || isSingleQuoted) {
// ignore escaped double quote and double quote inside single quotes (e.g 'text " text')
continue;
}
isDoubleQuoted = !isDoubleQuoted;
break;
case '\'':
if (isCurrentCharEscaped || isDoubleQuoted) {
// ignore escaped single quote and single quote inside double quotes (e.g. "text ' text")
continue;
}
isSingleQuoted = !isSingleQuoted;
break;
default:
// nothing to do, just continue with next character
}
}
return -1;
}
|
@Test
public void testFindEndOfMethodArgsIndex() {
findEndOfMethodArgsIndexAndAssertItEqualsToExpected("setId(\"myId\")", 12);
findEndOfMethodArgsIndexAndAssertItEqualsToExpected("setId(\"myId\").call()", 12);
findEndOfMethodArgsIndexAndAssertItEqualsToExpected("setId('myId')", 12);
findEndOfMethodArgsIndexAndAssertItEqualsToExpected("setId('myId').call()", 12);
findEndOfMethodArgsIndexAndAssertItEqualsToExpected("setId(\"my'Id\")", 13);
findEndOfMethodArgsIndexAndAssertItEqualsToExpected("setId(\"my'Id\").call()", 13);
findEndOfMethodArgsIndexAndAssertItEqualsToExpected("setId(\"my'Id'\")", 14);
findEndOfMethodArgsIndexAndAssertItEqualsToExpected("setId(\"my'Id'\").call()", 14);
findEndOfMethodArgsIndexAndAssertItEqualsToExpected("setId('my\"Id\"')", 14);
findEndOfMethodArgsIndexAndAssertItEqualsToExpected("setId('my\"Id\"').call()", 14);
findEndOfMethodArgsIndexAndAssertItEqualsToExpected("setId('my\"Id')", 13);
findEndOfMethodArgsIndexAndAssertItEqualsToExpected("setId('my\"Id').call()", 13);
findEndOfMethodArgsIndexAndAssertItEqualsToExpected("setId(\"my\\\"Id\")", 14);
findEndOfMethodArgsIndexAndAssertItEqualsToExpected("setId(\"my\\\"Id\").call()", 14);
findEndOfMethodArgsIndexAndAssertItEqualsToExpected("setId('myId', 'something')", 25);
findEndOfMethodArgsIndexAndAssertItEqualsToExpected("setId(\"myId\", \"something\")", 25);
findEndOfMethodArgsIndexAndAssertItEqualsToExpected("setId(\"my'Id\", \"somet'hing\")", 27);
findEndOfMethodArgsIndexAndAssertItEqualsToExpected("setId(\"my'Id'\", \"somet'hing\")", 28);
findEndOfMethodArgsIndexAndAssertItEqualsToExpected("setId(\"my'(Id\", \"somet'(hing'\")", 30);
findEndOfMethodArgsIndexAndAssertItEqualsToExpected("setObject(new Object())", 22);
findEndOfMethodArgsIndexAndAssertItEqualsToExpected("setObject(new Object(\"string param\"))", 36);
}
|
public List<String> build() {
if (columnDefs.isEmpty()) {
throw new IllegalStateException("No column has been defined");
}
switch (dialect.getId()) {
case PostgreSql.ID:
return createPostgresQuery();
case Oracle.ID:
return createOracleQuery();
default:
return createMsSqlAndH2Queries();
}
}
|
@Test
public void update_not_nullable_column_on_oracle() {
assertThat(createNotNullableBuilder(new Oracle()).build())
.containsOnly("ALTER TABLE issues MODIFY (name VARCHAR2 (10 CHAR) NOT NULL)");
}
|
@Override
public boolean next() throws SQLException {
return mergeResultSet.next();
}
|
@Test
void assertNext() throws SQLException {
when(mergeResultSet.next()).thenReturn(true);
assertTrue(shardingSphereResultSet.next());
}
|
@PublicAPI(usage = ACCESS)
public JavaClasses importClasses(Class<?>... classes) {
return importClasses(Arrays.asList(classes));
}
|
@Test
public void imports_enclosing_method_of_anonymous_class() throws ClassNotFoundException {
@SuppressWarnings("unused")
class ClassCreatingAnonymousClassInMethod {
void someMethod() {
new Serializable() {
};
}
}
String anonymousClassName = ClassCreatingAnonymousClassInMethod.class.getName() + "$1";
JavaClasses classes = new ClassFileImporter().importClasses(
ClassCreatingAnonymousClassInMethod.class, Class.forName(anonymousClassName)
);
JavaClass enclosingClass = classes.get(ClassCreatingAnonymousClassInMethod.class);
JavaClass anonymousClass = classes.get(anonymousClassName);
assertThat(anonymousClass.getEnclosingCodeUnit()).contains(enclosingClass.getMethod("someMethod"));
assertThat(anonymousClass.getEnclosingClass()).contains(enclosingClass);
}
|
public static void cleanDirectory(File directory) throws IOException {
requireNonNull(directory, DIRECTORY_CAN_NOT_BE_NULL);
Path path = directory.toPath();
if (!path.toFile().exists()) {
return;
}
cleanDirectoryImpl(path);
}
|
@Test
public void cleanDirectory_throws_NPE_if_file_is_null() throws IOException {
assertThatThrownBy(() -> FileUtils.cleanDirectory(null))
.isInstanceOf(NullPointerException.class)
.hasMessage("Directory can not be null");
}
|
@Override
public int hashCode() {
int result = (materialType != null ? materialType.hashCode() : 0);
result = 31 * result + (pipelineName != null ? pipelineName.hashCode() : 0);
result = 31 * result + (stageName != null ? stageName.hashCode() : 0);
return result;
}
|
@Test
void hashCodeImplementation() throws Exception {
DependencyMaterial one = new DependencyMaterial(new CaseInsensitiveString("pipelineName"), new CaseInsensitiveString("stage"));
DependencyMaterial two = new DependencyMaterial(new CaseInsensitiveString("pipelineName"), new CaseInsensitiveString("stage"));
two.setName(new CaseInsensitiveString("other-name-that-should-be-ignored-in-hashcode-generation"));
assertThat(one.hashCode()).isEqualTo(two.hashCode());
DependencyMaterial three = new DependencyMaterial(new CaseInsensitiveString("otherPipelineName"), new CaseInsensitiveString("stage"));
assertThat(three.hashCode()).isNotEqualTo(one.hashCode());
}
|
@Override
public final void initialize(Bootstrap<?> bootstrap) {
bootstrap.getObjectMapper().registerModule(createHibernate5Module());
}
|
@Test
void addsHibernateSupportToJackson() throws Exception {
final ObjectMapper objectMapperFactory = mock(ObjectMapper.class);
final Bootstrap<?> bootstrap = mock(Bootstrap.class);
when(bootstrap.getObjectMapper()).thenReturn(objectMapperFactory);
bundle.initialize(bootstrap);
final ArgumentCaptor<Module> captor = ArgumentCaptor.forClass(Module.class);
verify(objectMapperFactory).registerModule(captor.capture());
assertThat(captor.getValue()).isInstanceOf(Hibernate5JakartaModule.class);
}
|
public synchronized void deletePartitionSchema( PartitionSchema removed ) {
synchronizeTransformations( true, transMeta -> transMeta.getPartitionSchemas().remove( removed ) );
}
|
@Test
public void synchronizePartitionSchemasDeleteFromRepository() throws Exception {
try {
spoon.rep = repository;
when( spoon.getRepository() ).thenReturn( repository );
final String objectId = "object-id";
final String partitionName = "partsch";
TransMeta trans1 = createTransMeta();
trans1.setRepository( repository );
trans1.setSharedObjects( createSharedObjects( SHARED_OBJECTS_FILE ) );
PartitionSchema part1 = createPartitionSchema( partitionName, false );
part1.setObjectId( new StringObjectId( objectId ) );
trans1.addOrReplacePartitionSchema( part1 );
spoon.delegates.trans.addTransformation( trans1 );
TransMeta trans2 = createTransMeta();
trans2.setRepository( repository );
trans2.setSharedObjects( createSharedObjects( SHARED_OBJECTS_FILE ) );
PartitionSchema part2 = createPartitionSchema( partitionName, false );
part2.setObjectId( new StringObjectId( objectId ) );
trans2.addOrReplacePartitionSchema( part2 );
spoon.delegates.trans.addTransformation( trans2 );
assertFalse( trans1.getPartitionSchemas().isEmpty() );
spoon.delegates.partitions.delPartitionSchema( trans2, part2 );
verify( repository ).deletePartitionSchema( part2.getObjectId() );
assertTrue( trans1.getPartitionSchemas().isEmpty() );
} finally {
spoon.rep = null;
when( spoon.getRepository() ).thenReturn( null );
}
}
|
MatchResult matchPluginTemplate(String ownerTemplate, String template) {
boolean matches = false;
String pluginName = null;
String templateName = template;
String ownerTemplateName = ownerTemplate;
if (StringUtils.isNotBlank(ownerTemplate)) {
Matcher ownerTemplateMatcher = PLUGIN_TEMPLATE_PATTERN.matcher(ownerTemplate);
if (ownerTemplateMatcher.matches()) {
matches = true;
pluginName = ownerTemplateMatcher.group(1);
ownerTemplateName = ownerTemplateMatcher.group(2);
}
}
Matcher templateMatcher = PLUGIN_TEMPLATE_PATTERN.matcher(template);
if (templateMatcher.matches()) {
matches = true;
pluginName = templateMatcher.group(1);
templateName = templateMatcher.group(2);
}
return new MatchResult(pluginName, ownerTemplateName, templateName, matches);
}
|
@Test
void matchPluginTemplateWhenTemplateMatch() {
var result =
templateResolver.matchPluginTemplate("doc", "plugin:fake-plugin:modules/layout");
assertThat(result.matches()).isTrue();
assertThat(result.pluginName()).isEqualTo("fake-plugin");
assertThat(result.templateName()).isEqualTo("modules/layout");
assertThat(result.ownerTemplateName()).isEqualTo("doc");
}
|
@Override
public String toString() {
return datum.toString();
}
|
@Test
void testToString() {
String datum = "my string";
AvroWrapper<CharSequence> wrapper = new AvroWrapper<>(datum);
assertEquals(datum, wrapper.toString());
}
|
@Override
public FlumeConfiguration getFlumeConfiguration() {
BufferedReader reader = null;
try {
reader = new BufferedReader(new FileReader(file));
String resolverClassName = System.getProperty("propertiesImplementation",
DEFAULT_PROPERTIES_IMPLEMENTATION);
Class<? extends Properties> propsclass = Class.forName(resolverClassName)
.asSubclass(Properties.class);
Properties properties = propsclass.getDeclaredConstructor().newInstance();
properties.load(reader);
return new FlumeConfiguration(toMap(properties));
} catch (IOException ex) {
LOGGER.error("Unable to load file:" + file
+ " (I/O failure) - Exception follows.", ex);
} catch (ClassNotFoundException | NoClassDefFoundError e) {
LOGGER.error("Configuration resolver class not found", e);
} catch (InstantiationException e) {
LOGGER.error("Instantiation exception", e);
} catch (IllegalAccessException e) {
LOGGER.error("Illegal access exception", e);
} catch (InvocationTargetException e) {
LOGGER.error("Invocation target exception", e);
} catch (NoSuchMethodException e) {
LOGGER.error("No such method exception", e);
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException ex) {
LOGGER.warn(
"Unable to close file reader for file: " + file, ex);
}
}
}
return new FlumeConfiguration(new HashMap<String, String>());
}
|
@Test
public void testPropertyRead() {
FlumeConfiguration configuration = provider.getFlumeConfiguration();
assertNotNull(configuration);
/*
* Test the known errors in the file
*/
List<String> expected = Lists.newArrayList();
expected.add("host5 CONFIG_ERROR");
expected.add("host5 INVALID_PROPERTY");
expected.add("host4 CONFIG_ERROR");
expected.add("host4 CONFIG_ERROR");
expected.add("host4 PROPERTY_VALUE_NULL");
expected.add("host4 PROPERTY_VALUE_NULL");
expected.add("host4 PROPERTY_VALUE_NULL");
expected.add("host4 AGENT_CONFIGURATION_INVALID");
expected.add("ch2 ATTRS_MISSING");
expected.add("host3 CONFIG_ERROR");
expected.add("host3 PROPERTY_VALUE_NULL");
expected.add("host3 AGENT_CONFIGURATION_INVALID");
expected.add("host2 PROPERTY_VALUE_NULL");
expected.add("host2 AGENT_CONFIGURATION_INVALID");
List<String> actual = Lists.newArrayList();
for (FlumeConfigurationError error : configuration.getConfigurationErrors()) {
actual.add(error.getComponentName() + " " + error.getErrorType().toString());
}
Collections.sort(expected);
Collections.sort(actual);
assertEquals(actual, expected);
AgentConfiguration agentConfiguration =
configuration.getConfigurationFor("host1");
assertNotNull(agentConfiguration);
LOGGER.info(agentConfiguration.getPrevalidationConfig());
LOGGER.info(agentConfiguration.getPostvalidationConfig());
Set<String> sources = Sets.newHashSet("source1");
Set<String> sinks = Sets.newHashSet("sink1");
Set<String> channels = Sets.newHashSet("channel1");
assertEquals(agentConfiguration.getSourceSet(), sources);
assertEquals(agentConfiguration.getSinkSet(), sinks);
assertEquals(agentConfiguration.getChannelSet(), channels);
}
|
public String getPomUrl() {
return pomUrl;
}
|
@Test
public void getPomUrl() {
// Given
final MavenArtifact mavenArtifact = new MavenArtifact("com.google.code.gson", "gson", "2.1",
"https://artifactory.techno.ingenico.com/artifactory/jcenter-cache/com/google/code/gson/gson/2.1/gson-2.1.jar", MavenArtifact.derivePomUrl("gson", "2.1",
"https://artifactory.techno.ingenico.com/artifactory/jcenter-cache/com/google/code/gson/gson/2.1/gson-2.1.jar"));
// When
final String pomUrl = mavenArtifact.getPomUrl();
// Then
assertEquals("https://artifactory.techno.ingenico.com/artifactory/jcenter-cache/com/google/code/gson/gson/2.1/gson-2.1.pom", pomUrl);
}
|
@Override
public void loginSuccess(HttpRequest request, @Nullable String login, Source source) {
checkRequest(request);
requireNonNull(source, "source can't be null");
LOGGER.atDebug().setMessage("login success [method|{}][provider|{}|{}][IP|{}|{}][login|{}]")
.addArgument(source::getMethod)
.addArgument(source::getProvider)
.addArgument(source::getProviderName)
.addArgument(request::getRemoteAddr)
.addArgument(() -> getAllIps(request))
.addArgument(() -> preventLogFlood(sanitizeLog(emptyIfNull(login))))
.log();
}
|
@Test
public void login_success_message_is_sanitized() {
logTester.setLevel(Level.DEBUG);
underTest.loginSuccess(mockRequest("1.2.3.4"), "login with \n malicious line \r return", Source.sso());
assertThat(logTester.logs()).isNotEmpty()
.contains("login success [method|SSO][provider|SSO|sso][IP|1.2.3.4|][login|login with _ malicious line _ return]");
}
|
public static String signWithHmacSha1Encrypt(String encryptText, String encryptKey) {
try {
byte[] data = encryptKey.getBytes(Constants.ENCODE);
// Construct a key according to the given byte array, and the second parameter specifies the name of a key algorithm
SecretKey secretKey = new SecretKeySpec(data, SHA_ENCRYPT);
// Generate a Mac object specifying Mac algorithm
Mac mac = Mac.getInstance(SHA_ENCRYPT);
// Initialize the Mac object with the given key
mac.init(secretKey);
byte[] text = encryptText.getBytes(Constants.ENCODE);
byte[] textFinal = mac.doFinal(text);
// Complete Mac operation, base64 encoding, convert byte array to string
return new String(Base64.encodeBase64(textFinal), Constants.ENCODE);
} catch (Exception e) {
throw new RuntimeException("signWithhmacSHA1Encrypt fail", e);
}
}
|
@Test
void testSignWithHmacSha1EncryptWithException() {
assertThrows(Exception.class, () -> {
SpasAdapter.signWithHmacSha1Encrypt(null, "123");
});
}
|
private Expect(ExpectationGatherer gatherer) {
super(FailureMetadata.forFailureStrategy(gatherer));
this.gatherer = checkNotNull(gatherer);
}
|
@Test
public void expectFailWithExceptionBeforeExpectFailures() {
thrown.expect(IllegalStateException.class);
thrown.expectMessage("testing");
throwException();
expect.withMessage("x").fail();
expect.withMessage("y").fail();
}
|
protected String toString(Subject subject) {
PrincipalCollection pc = subject.getPrincipals();
if (pc != null && !pc.isEmpty()) {
return "[" + pc.toString() + "] ";
}
return "";
}
|
@Test
public void testSubjectToString() {
Subject subject = new PermsSubject() {
@Override
public PrincipalCollection getPrincipals() {
return null;
}
};
String string = filter.toString(subject);
assertEquals("", string);
}
|
public static boolean hasChinese(CharSequence value) {
return ReUtil.contains(ReUtil.RE_CHINESES, value);
}
|
@Test
public void hasChineseTest() {
assertTrue(Validator.hasChinese("黄单桑米"));
assertTrue(Validator.hasChinese("Kn 四兄弟"));
assertTrue(Validator.hasChinese("\uD840\uDDA3"));
assertFalse(Validator.hasChinese("Abc"));
}
|
@Override
public void accept(TimerEvent timerEvent) {
logMessageConsumer.accept(buildLogMessage(timerEvent));
}
|
@Test
public void testAccept() {
TimerEventHandler timerEventHandler = new TimerEventHandler(logMessageQueue::add);
timerEventHandler.accept(
new TimerEvent(State.START, ROOT_TIMER, Duration.ZERO, Duration.ZERO, "description"));
timerEventHandler.accept(
new TimerEvent(State.LAP, ROOT_TIMER, Duration.ofMillis(10), Duration.ZERO, "description"));
timerEventHandler.accept(
new TimerEvent(
State.FINISHED, ROOT_TIMER, Duration.ofMillis(100), Duration.ZERO, "description"));
timerEventHandler.accept(
new TimerEvent(
State.LAP,
() -> Optional.of(ROOT_TIMER),
Duration.ZERO,
Duration.ZERO,
"child description"));
String rootStartMessage = logMessageQueue.poll();
Assert.assertNotNull(rootStartMessage);
Assert.assertEquals("TIMING\tdescription", rootStartMessage);
String rootInProgressMessage = logMessageQueue.poll();
Assert.assertNotNull(rootInProgressMessage);
Assert.assertEquals("TIMED\tdescription : 10.0 ms", rootInProgressMessage);
String rootFinishedMessage = logMessageQueue.poll();
Assert.assertNotNull(rootFinishedMessage);
Assert.assertEquals("TIMED\tdescription : 100.0 ms", rootFinishedMessage);
String childMessage = logMessageQueue.poll();
Assert.assertNotNull(childMessage);
Assert.assertEquals("\tTIMED\tchild description : 0.0 ms", childMessage);
Assert.assertTrue(logMessageQueue.isEmpty());
}
|
private InterpreterResult renderTable(List<String> cols, List<List<String>> lines) {
LOGGER.info("Executing renderTable method");
StringBuilder msg = null;
if (cols.isEmpty()) {
msg = new StringBuilder();
} else {
msg = new StringBuilder(TABLE);
msg.append(NEW_LINE);
msg.append(StringUtils.join(cols, TAB));
msg.append(NEW_LINE);
for (List<String> line : lines) {
if (line.size() < cols.size()) {
for (int i = line.size(); i < cols.size(); i++) {
line.add(null);
}
}
msg.append(StringUtils.join(line, TAB));
msg.append(NEW_LINE);
}
}
return new InterpreterResult(Code.SUCCESS, msg.toString());
}
|
@Test
void testRenderTable() {
interpreter.open();
InterpreterResult result = interpreter.interpret("MATCH (n:Person) "
+ "WHERE n.name IN ['name1', 'name2', 'name3'] "
+ "RETURN n.name AS name, n.age AS age, "
+ "n.address AS address, n.birth AS birth", context);
assertEquals(Code.SUCCESS, result.code());
final String tableResult = "name\tage\taddress\tbirth\n" +
"name1\t1\tPoint{srid=4979, x=56.7, y=12.78, z=8.0}\t1984-04-04\n" +
"name2\t2\tPoint{srid=4979, x=56.7, y=12.78, z=8.0}\t1984-04-04\n" +
"name3\t3\tPoint{srid=4979, x=56.7, y=12.78, z=8.0}\t1984-04-04\n";
assertEquals(tableResult, result.toString().replace(TABLE_RESULT_PREFIX, StringUtils.EMPTY));
}
|
@Override
public boolean verify(final Host host, final PublicKey key) throws BackgroundException {
String lookup = preferences.getProperty(this.toFormat(host, key));
if(StringUtils.isEmpty(lookup)) {
// Backward compatiblity to find keys with no port number saved
lookup = preferences.getProperty(this.toFormat(host, key, false));
}
if(StringUtils.equals(Base64.toBase64String(key.getEncoded()), lookup)) {
if(log.isInfoEnabled()) {
log.info(String.format("Accepted host key %s matching %s", key, lookup));
}
return true;
}
final boolean accept;
if(null == lookup) {
accept = this.isUnknownKeyAccepted(host, key);
}
else {
accept = this.isChangedKeyAccepted(host, key);
}
return accept;
}
|
@Test
public void testVerifyDenyServerHostKey() throws Exception {
PreferencesHostKeyVerifier v = new PreferencesHostKeyVerifier() {
@Override
public boolean isChangedKeyAccepted(Host hostname, PublicKey key) {
return false;
}
@Override
public boolean isUnknownKeyAccepted(Host hostname, final PublicKey key) {
return false;
}
};
final PublicKey key = SecurityUtils.getKeyFactory("RSA").generatePublic(new RSAPublicKeySpec(new BigInteger("a19f65e93926d9a2f5b52072db2c38c54e6cf0113d31fa92ff827b0f3bec609c45ea84264c88e64adba11ff093ed48ee0ed297757654b0884ab5a7e28b3c463bc9074b32837a2b69b61d914abf1d74ccd92b20fa44db3b31fb208c0dd44edaeb4ab097118e8ee374b6727b89ad6ce43f1b70c5a437ccebc36d2dad8ae973caad15cd89ae840fdae02cae42d241baef8fda8aa6bbaa54fd507a23338da6f06f61b34fb07d560e63fbce4a39c073e28573c2962cedb292b14b80d1b4e67b0465f2be0e38526232d0a7f88ce91a055fde082038a87ed91f3ef5ff971e30ea6cccf70d38498b186621c08f8fdceb8632992b480bf57fc218e91f2ca5936770fe9469", 16),
new BigInteger("23", 16)));
assertFalse(v.verify(new Host(new SFTPProtocol(), "bhostname", 22), key));
assertNull(PreferencesFactory.get().getProperty("ssh.hostkey.ssh-rsa.bhostname"));
}
|
public static Collection<WhereSegment> getJoinWhereSegments(final SelectStatement selectStatement) {
return selectStatement.getFrom().map(WhereExtractUtils::getJoinWhereSegments).orElseGet(Collections::emptyList);
}
|
@Test
void assertGetJoinWhereSegmentsWithEmptySelectStatement() {
assertTrue(WhereExtractUtils.getJoinWhereSegments(mock(SelectStatement.class)).isEmpty());
}
|
@Override
@Nullable
public short[] readShortArray() throws EOFException {
int len = readInt();
if (len == NULL_ARRAY_LENGTH) {
return null;
}
if (len > 0) {
short[] values = new short[len];
for (int i = 0; i < len; i++) {
values[i] = readShort();
}
return values;
}
return new short[0];
}
|
@Test
public void testReadShortArray() throws Exception {
byte[] bytesBE = {0, 0, 0, 0, 0, 0, 0, 1, 0, 1, -1, -1, -1, -1};
byte[] bytesLE = {0, 0, 0, 0, 1, 0, 0, 0, 1, 0, -1, -1, -1, -1};
in.init((byteOrder == BIG_ENDIAN ? bytesBE : bytesLE), 0);
in.position(bytesLE.length - 4);
short[] theNullArray = in.readShortArray();
in.position(0);
short[] theZeroLengthArray = in.readShortArray();
in.position(4);
short[] booleanArray = in.readShortArray();
assertNull(theNullArray);
assertArrayEquals(new short[0], theZeroLengthArray);
assertArrayEquals(new short[]{1}, booleanArray);
}
|
public static void addRumHit(HttpServletRequest httpRequest, Counter httpCounter) {
final String requestName = httpRequest.getParameter("requestName");
if (requestName == null) {
return;
}
try {
final long serverTime = Long.parseLong(httpRequest.getParameter("serverTime"));
final long timeToFirstByte = Long
.parseLong(httpRequest.getParameter("timeToFirstByte"));
final long domProcessing = Long.parseLong(httpRequest.getParameter("domProcessing"));
final long pageRendering = Long.parseLong(httpRequest.getParameter("pageRendering"));
final long networkTime = Math.max(timeToFirstByte - serverTime, 0);
httpCounter.addRumHit(requestName, networkTime, domProcessing, pageRendering);
} catch (final NumberFormatException e) {
return;
}
}
|
@Test
public void testAddRumHit() {
final Counter httpCounter = new Counter(Counter.HTTP_COUNTER_NAME, "dbweb.png");
// test null requestName
addRumHit(httpCounter, null, null, null, null, null);
final String requestName = "test";
// test non-parseable values
addRumHit(httpCounter, requestName, null, null, null, null);
addRumHit(httpCounter, requestName, "a", "b", "c", "d");
// test valid values without existing request
addRumHit(httpCounter, requestName, "100", "200", "300", "400");
// test with existing request and creating rumData
httpCounter.addRequest(requestName, 100, 10, 10, false, 1);
addRumHit(httpCounter, requestName, "100", "200", "300", "400");
// test with existing request and rumData
addRumHit(httpCounter, requestName, "100", "200", "300", "400");
// test aberrant values
addRumHit(httpCounter, requestName, "-100", "200", "300", "400");
addRumHit(httpCounter, requestName, "300001", "200", "300", "400");
addRumHit(httpCounter, requestName, "100", "-200", "300", "400");
addRumHit(httpCounter, requestName, "100", "300101", "300", "400");
addRumHit(httpCounter, requestName, "100", "200", "-300", "400");
addRumHit(httpCounter, requestName, "100", "200", "300001", "400");
addRumHit(httpCounter, requestName, "100", "200", "300", "-400");
addRumHit(httpCounter, requestName, "100", "200", "300", "300001");
}
|
public static Read<String> readStrings() {
return Read.newBuilder(
(PubsubMessage message) -> new String(message.getPayload(), StandardCharsets.UTF_8))
.setCoder(StringUtf8Coder.of())
.build();
}
|
@Test
public void testTopicValidationTooLong() throws Exception {
thrown.expect(IllegalArgumentException.class);
PubsubIO.readStrings()
.fromTopic(
new StringBuilder()
.append("projects/my-project/topics/A-really-long-one-")
.append(
"111111111111111111111111111111111111111111111111111111111111111111111111111111111")
.append(
"111111111111111111111111111111111111111111111111111111111111111111111111111111111")
.append(
"1111111111111111111111111111111111111111111111111111111111111111111111111111")
.toString());
}
|
@Override
public boolean test(Pair<Point, Point> pair) {
if (timeDeltaIsSmall(pair.first().time(), pair.second().time())) {
return distIsSmall(pair);
} else {
/*
* reject points with large time deltas because we don't want to rely on a numerically
* unstable process
*/
return false;
}
}
|
@Test
public void testCase4() {
DistanceFilter filter = newTestFilter();
LatLong position1 = new LatLong(0.0, 0.0);
double notTooFarInNm = MAX_DISTANCE_IN_FEET * 0.5 / Spherical.feetPerNM();
Point p1 = new PointBuilder()
.latLong(position1)
.time(Instant.EPOCH)
.altitude(Distance.ofFeet(500.0))
.build();
Point p2 = new PointBuilder()
.latLong(position1.projectOut(90.0, notTooFarInNm)) //move the position
.time(Instant.EPOCH.plusMillis(MAX_TIME_DELTA_IN_MILLISEC / 2))
.altitude(Distance.ofFeet(500.0))
.build();
assertTrue(filter.test(Pair.of(p1, p2)));
assertTrue(filter.test(Pair.of(p2, p1)));
}
|
public static DataSchema buildSchemaByProjection(DataSchema schema, DataMap maskMap)
{
return buildSchemaByProjection(schema, maskMap, Collections.emptyList());
}
|
@Test
public void testBuildSchemaByEmptyProjection()
{
DataSchema schema = DataTemplateUtil.getSchema(RecordTemplateWithPrimitiveKey.class);
DataMap projectionMask = buildProjectionMaskDataMap();
try
{
buildSchemaByProjection(schema, projectionMask);
}
catch (IllegalArgumentException e)
{
Assert.assertEquals(e.getMessage(), "Invalid projection masks.");
return;
}
Assert.fail("Building schema by empty projection should throw an IllegalArgumentException");
}
|
@Override
public Path copy(final Path source, final Path target, final TransferStatus status, final ConnectionCallback callback, final StreamListener listener) throws BackgroundException {
try {
final CopyFileRequest copy = new CopyFileRequest()
.name(target.getName())
.parentID(fileid.getFileId(target.getParent()))
.mode(1); // Overwrite
final File file = new FilesApi(session.getClient()).filesCopy(
fileid.getFileId(source), copy);
listener.sent(status.getLength());
fileid.cache(target, file.getId());
return target.withAttributes(new StoregateAttributesFinderFeature(session, fileid).toAttributes(file));
}
catch(ApiException e) {
throw new StoregateExceptionMappingService(fileid).map("Cannot copy {0}", e, source);
}
}
|
@Test
public void testCopyServerSideToExistingFile() throws Exception {
final StoregateIdProvider fileid = new StoregateIdProvider(session);
final Path top = new StoregateDirectoryFeature(session, fileid).mkdir(new Path(
String.format("/My files/%s", new AlphanumericRandomStringService().random()), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus());
final Path sourceFolder = new Path(top, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory));
final Path targetFolder = new Path(top, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory));
new StoregateDirectoryFeature(session, fileid).mkdir(sourceFolder, new TransferStatus());
new StoregateDirectoryFeature(session, fileid).mkdir(targetFolder, new TransferStatus());
final Path test = new StoregateTouchFeature(session, fileid).touch(new Path(sourceFolder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus());
final Path copy = new Path(targetFolder, test.getName(), EnumSet.of(Path.Type.file));
new StoregateTouchFeature(session, fileid).touch(copy, new TransferStatus());
final StoregateCopyFeature feature = new StoregateCopyFeature(session, fileid);
assertTrue(feature.isSupported(test, copy));
assertNotEquals(test.attributes().getFileId(), new StoregateCopyFeature(session, fileid).copy(test, copy, new TransferStatus().exists(true), new DisabledConnectionCallback(), new DisabledStreamListener()).attributes().getFileId());
final Find find = new DefaultFindFeature(session);
final AttributedList<Path> files = new StoregateListService(session, fileid).list(targetFolder, new DisabledListProgressListener());
assertTrue(find.find(copy));
new StoregateDeleteFeature(session, fileid).delete(Collections.singletonList(top), new DisabledLoginCallback(), new Delete.DisabledCallback());
}
|
@Override
public V takeLastAndOfferFirstTo(String queueName) throws InterruptedException {
return commandExecutor.getInterrupted(takeLastAndOfferFirstToAsync(queueName));
}
|
@Test
public void testTakeLastAndOfferFirstTo() throws InterruptedException {
final RBlockingQueue<Integer> queue1 = redisson.getBlockingQueue("{queue}1");
Executors.newSingleThreadScheduledExecutor().schedule(() -> {
try {
queue1.put(3);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}, 3, TimeUnit.SECONDS);
RBlockingQueue<Integer> queue2 = redisson.getBlockingQueue("{queue}2");
queue2.put(4);
queue2.put(5);
queue2.put(6);
long startTime = System.currentTimeMillis();
Integer value = queue1.takeLastAndOfferFirstTo(queue2.getName());
assertThat(System.currentTimeMillis() - startTime).isBetween(2900L, 3200L);
assertThat(value).isEqualTo(3);
assertThat(queue2).containsExactly(3, 4, 5, 6);
}
|
@Override
public void validateDictDataList(String dictType, Collection<String> values) {
if (CollUtil.isEmpty(values)) {
return;
}
Map<String, DictDataDO> dictDataMap = CollectionUtils.convertMap(
dictDataMapper.selectByDictTypeAndValues(dictType, values), DictDataDO::getValue);
// 校验
values.forEach(value -> {
DictDataDO dictData = dictDataMap.get(value);
if (dictData == null) {
throw exception(DICT_DATA_NOT_EXISTS);
}
if (!CommonStatusEnum.ENABLE.getStatus().equals(dictData.getStatus())) {
throw exception(DICT_DATA_NOT_ENABLE, dictData.getLabel());
}
});
}
|
@Test
public void testValidateDictDataList_notFound() {
// 准备参数
String dictType = randomString();
List<String> values = singletonList(randomString());
// 调用, 并断言异常
assertServiceException(() -> dictDataService.validateDictDataList(dictType, values), DICT_DATA_NOT_EXISTS);
}
|
public String process(final Expression expression) {
return formatExpression(expression);
}
|
@Test
public void shouldGenerateCorrectCodeForDateDateLT() {
// Given:
final ComparisonExpression compExp = new ComparisonExpression(
Type.LESS_THAN,
DATECOL,
DATECOL
);
// When:
final String java = sqlToJavaVisitor.process(compExp);
// Then:
assertThat(java, containsString("(((java.sql.Date) arguments.get(\"COL13\")).compareTo(((java.sql.Date) arguments.get(\"COL13\"))) < 0)"));
}
|
public ControllerResult<ElectMasterResponseHeader> electMaster(final ElectMasterRequestHeader request,
final ElectPolicy electPolicy) {
final String brokerName = request.getBrokerName();
final Long brokerId = request.getBrokerId();
final ControllerResult<ElectMasterResponseHeader> result = new ControllerResult<>(new ElectMasterResponseHeader());
final ElectMasterResponseHeader response = result.getResponse();
if (!isContainsBroker(brokerName)) {
// this broker set hasn't been registered
result.setCodeAndRemark(ResponseCode.CONTROLLER_BROKER_NEED_TO_BE_REGISTERED, "Broker hasn't been registered");
return result;
}
final SyncStateInfo syncStateInfo = this.syncStateSetInfoTable.get(brokerName);
final BrokerReplicaInfo brokerReplicaInfo = this.replicaInfoTable.get(brokerName);
final Set<Long> syncStateSet = syncStateInfo.getSyncStateSet();
final Long oldMaster = syncStateInfo.getMasterBrokerId();
Set<Long> allReplicaBrokers = controllerConfig.isEnableElectUncleanMaster() ? brokerReplicaInfo.getAllBroker() : null;
Long newMaster = null;
if (syncStateInfo.isFirstTimeForElect()) {
// If never have a master in this broker set, in other words, it is the first time to elect a master
// elect it as the first master
newMaster = brokerId;
}
// elect by policy
if (newMaster == null || newMaster == -1) {
// we should assign this assignedBrokerId when the brokerAddress need to be elected by force
Long assignedBrokerId = request.getDesignateElect() ? brokerId : null;
newMaster = electPolicy.elect(brokerReplicaInfo.getClusterName(), brokerReplicaInfo.getBrokerName(), syncStateSet, allReplicaBrokers, oldMaster, assignedBrokerId);
}
if (newMaster != null && newMaster.equals(oldMaster)) {
// old master still valid, change nothing
String err = String.format("The old master %s is still alive, not need to elect new master for broker %s", oldMaster, brokerReplicaInfo.getBrokerName());
LOGGER.warn("{}", err);
// the master still exist
response.setMasterEpoch(syncStateInfo.getMasterEpoch());
response.setSyncStateSetEpoch(syncStateInfo.getSyncStateSetEpoch());
response.setMasterBrokerId(oldMaster);
response.setMasterAddress(brokerReplicaInfo.getBrokerAddress(oldMaster));
result.setBody(new ElectMasterResponseBody(syncStateSet).encode());
result.setCodeAndRemark(ResponseCode.CONTROLLER_MASTER_STILL_EXIST, err);
return result;
}
// a new master is elected
if (newMaster != null) {
final int masterEpoch = syncStateInfo.getMasterEpoch();
final int syncStateSetEpoch = syncStateInfo.getSyncStateSetEpoch();
final HashSet<Long> newSyncStateSet = new HashSet<>();
newSyncStateSet.add(newMaster);
response.setMasterBrokerId(newMaster);
response.setMasterAddress(brokerReplicaInfo.getBrokerAddress(newMaster));
response.setMasterEpoch(masterEpoch + 1);
response.setSyncStateSetEpoch(syncStateSetEpoch + 1);
ElectMasterResponseBody responseBody = new ElectMasterResponseBody(newSyncStateSet);
BrokerMemberGroup brokerMemberGroup = buildBrokerMemberGroup(brokerReplicaInfo);
if (null != brokerMemberGroup) {
responseBody.setBrokerMemberGroup(brokerMemberGroup);
}
result.setBody(responseBody.encode());
final ElectMasterEvent event = new ElectMasterEvent(brokerName, newMaster);
result.addEvent(event);
LOGGER.info("Elect new master {} for broker {}", newMaster, brokerName);
return result;
}
// If elect failed and the electMaster is triggered by controller (we can figure it out by brokerAddress),
// we still need to apply an ElectMasterEvent to tell the statemachine
// that the master was shutdown and no new master was elected.
if (request.getBrokerId() == null || request.getBrokerId() == -1) {
final ElectMasterEvent event = new ElectMasterEvent(false, brokerName);
result.addEvent(event);
result.setCodeAndRemark(ResponseCode.CONTROLLER_MASTER_NOT_AVAILABLE, "Old master has down and failed to elect a new broker master");
} else {
result.setCodeAndRemark(ResponseCode.CONTROLLER_ELECT_MASTER_FAILED, "Failed to elect a new master");
}
LOGGER.warn("Failed to elect a new master for broker {}", brokerName);
return result;
}
|
@Test
public void testElectMaster() {
mockMetaData();
final ElectMasterRequestHeader request = ElectMasterRequestHeader.ofControllerTrigger(DEFAULT_BROKER_NAME);
final ControllerResult<ElectMasterResponseHeader> cResult = this.replicasInfoManager.electMaster(request,
new DefaultElectPolicy((cluster, brokerName, brokerId) -> !brokerId.equals(1L), null));
final ElectMasterResponseHeader response = cResult.getResponse();
assertEquals(2, response.getMasterEpoch().intValue());
assertNotEquals(1L, response.getMasterBrokerId().longValue());
assertNotEquals(DEFAULT_IP[0], response.getMasterAddress());
apply(cResult.getEvents());
final Set<Long> brokerSet = new HashSet<>();
brokerSet.add(1L);
brokerSet.add(2L);
brokerSet.add(3L);
assertTrue(alterNewInSyncSet(DEFAULT_BROKER_NAME, response.getMasterBrokerId(), response.getMasterEpoch(), brokerSet, response.getSyncStateSetEpoch()));
// test admin try to elect a assignedMaster, but it isn't alive
final ElectMasterRequestHeader assignRequest = ElectMasterRequestHeader.ofAdminTrigger(DEFAULT_CLUSTER_NAME, DEFAULT_BROKER_NAME, 1L);
final ControllerResult<ElectMasterResponseHeader> cResult1 = this.replicasInfoManager.electMaster(assignRequest,
new DefaultElectPolicy((cluster, brokerName, brokerId) -> !brokerId.equals(1L), null));
assertEquals(cResult1.getResponseCode(), ResponseCode.CONTROLLER_ELECT_MASTER_FAILED);
// test admin try to elect a assignedMaster but old master still alive, and the old master is equals to assignedMaster
final ElectMasterRequestHeader assignRequest1 = ElectMasterRequestHeader.ofAdminTrigger(DEFAULT_CLUSTER_NAME, DEFAULT_BROKER_NAME, response.getMasterBrokerId());
final ControllerResult<ElectMasterResponseHeader> cResult2 = this.replicasInfoManager.electMaster(assignRequest1,
new DefaultElectPolicy((cluster, brokerName, brokerId) -> true, null));
assertEquals(cResult2.getResponseCode(), ResponseCode.CONTROLLER_MASTER_STILL_EXIST);
// admin successful elect a assignedMaster.
final ElectMasterRequestHeader assignRequest2 = ElectMasterRequestHeader.ofAdminTrigger(DEFAULT_CLUSTER_NAME, DEFAULT_BROKER_NAME, 1L);
final ControllerResult<ElectMasterResponseHeader> cResult3 = this.replicasInfoManager.electMaster(assignRequest2,
new DefaultElectPolicy((cluster, brokerName, brokerId) -> !brokerId.equals(response.getMasterBrokerId()), null));
assertEquals(cResult3.getResponseCode(), ResponseCode.SUCCESS);
final ElectMasterResponseHeader response3 = cResult3.getResponse();
assertEquals(1L, response3.getMasterBrokerId().longValue());
assertEquals(DEFAULT_IP[0], response3.getMasterAddress());
assertEquals(3, response3.getMasterEpoch().intValue());
}
|
public AccessPrivilege getAccessPrivilege(InetAddress addr) {
return getAccessPrivilege(addr.getHostAddress(),
addr.getCanonicalHostName());
}
|
@Test
public void testRegexHostRW() {
NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod,
"[a-z]+.b.com rw");
Assert.assertEquals(AccessPrivilege.READ_WRITE,
matcher.getAccessPrivilege(address1, hostname1));
// address1 will hit the cache
Assert.assertEquals(AccessPrivilege.READ_WRITE,
matcher.getAccessPrivilege(address1, hostname2));
}
|
@Override
public String getMethod() {
return PATH;
}
|
@Test
public void testGetChatMenuButtonAsDefault() {
SetChatMenuButton setChatMenuButton = SetChatMenuButton
.builder()
.menuButton(MenuButtonDefault.builder().build())
.build();
assertEquals("setChatMenuButton", setChatMenuButton.getMethod());
assertDoesNotThrow(setChatMenuButton::validate);
}
|
@Override
public boolean setAlertFilter(String severity) {
DriverHandler handler = handler();
NetconfController controller = handler.get(NetconfController.class);
MastershipService mastershipService = handler.get(MastershipService.class);
DeviceId ncDeviceId = handler.data().deviceId();
checkNotNull(controller, "Netconf controller is null");
if (!mastershipService.isLocalMaster(ncDeviceId)) {
log.warn("Not master for {} Use {} to execute command",
ncDeviceId,
mastershipService.getMasterFor(ncDeviceId));
return false;
}
if (!SEVERITYLEVELS.contains(severity)) {
log.error("Invalid severity level: {}", severity);
return false;
}
try {
StringBuilder request = new StringBuilder();
request.append(VOLT_NE_OPEN + VOLT_NE_NAMESPACE);
request.append(ANGLE_RIGHT + NEW_LINE);
request.append(buildStartTag(VOLT_ALERTS))
.append(buildStartTag(ALERT_FILTER, false))
.append(severity)
.append(buildEndTag(ALERT_FILTER))
.append(buildEndTag(VOLT_ALERTS))
.append(VOLT_NE_CLOSE);
controller.getDevicesMap().get(ncDeviceId).getSession().
editConfig(RUNNING, null, request.toString());
} catch (NetconfException e) {
log.error("Cannot communicate to device {} exception {}", ncDeviceId, e);
return false;
}
return true;
}
|
@Test
public void testValidSetAlertFilter() throws Exception {
String target;
boolean result;
for (int i = ZERO; i < VALID_SET_TCS.length; i++) {
target = VALID_SET_TCS[i];
currentKey = i;
result = voltConfig.setAlertFilter(target);
assertTrue("Incorrect response for ", result);
}
}
|
@VisibleForTesting
public String getLullMessage(Thread trackedThread, Duration millis) {
// TODO(ajamato): Share getLullMessage code with DataflowExecutionState.
String userStepName =
this.labelsMetadata.getOrDefault(MonitoringInfoConstants.Labels.PTRANSFORM, null);
StringBuilder message = new StringBuilder();
message.append("Operation ongoing");
if (userStepName != null) {
message.append(" in step ").append(userStepName);
}
message
.append(" for at least ")
.append(formatDuration(millis))
.append(" without outputting or completing in state ")
.append(getStateName());
message.append("\n");
StackTraceElement[] fullTrace = trackedThread.getStackTrace();
for (StackTraceElement e : fullTrace) {
message.append(" at ").append(e).append("\n");
}
return message.toString();
}
|
@Test
public void testGetLullReturnsARelevantMessageWithStepName() {
HashMap<String, String> labelsMetadata = new HashMap<String, String>();
labelsMetadata.put(MonitoringInfoConstants.Labels.PTRANSFORM, "myPTransform");
SimpleExecutionState testObject = new SimpleExecutionState("myState", null, labelsMetadata);
String message = testObject.getLullMessage(new Thread(), Duration.millis(100_000));
assertThat(message, containsString("myState"));
assertThat(message, containsString("myPTransform"));
}
|
public static Map.Entry<String, String> getRegressionTableBuilder(final RegressionTable regressionTable,
final RegressionCompilationDTO compilationDTO) {
logger.trace("getRegressionTableBuilder {}", regressionTable);
String className = "KiePMMLRegressionTable" + classArity.addAndGet(1);
CompilationUnit cloneCU = JavaParserUtils.getKiePMMLModelCompilationUnit(className,
compilationDTO.getPackageName(),
KIE_PMML_REGRESSION_TABLE_TEMPLATE_JAVA, KIE_PMML_REGRESSION_TABLE_TEMPLATE);
ClassOrInterfaceDeclaration tableTemplate = cloneCU.getClassByName(className)
.orElseThrow(() -> new KiePMMLException(MAIN_CLASS_NOT_FOUND + ": " + className));
final MethodDeclaration staticGetterMethod =
tableTemplate.getMethodsByName(GETKIEPMML_TABLE).get(0);
setStaticGetter(regressionTable, compilationDTO, staticGetterMethod, className.toLowerCase());
return new AbstractMap.SimpleEntry<>(getFullClassName(cloneCU), cloneCU.toString());
}
|
@Test
void getRegressionTableBuilder() {
regressionTable = getRegressionTable(3.5, "professional");
RegressionModel regressionModel = new RegressionModel();
regressionModel.setNormalizationMethod(RegressionModel.NormalizationMethod.CAUCHIT);
regressionModel.addRegressionTables(regressionTable);
regressionModel.setModelName(getGeneratedClassName("RegressionModel"));
String targetField = "targetField";
DataField dataField = new DataField();
dataField.setName(targetField);
dataField.setOpType(OpType.CATEGORICAL);
DataDictionary dataDictionary = new DataDictionary();
dataDictionary.addDataFields(dataField);
MiningField miningField = new MiningField();
miningField.setUsageType(MiningField.UsageType.TARGET);
miningField.setName(dataField.getName());
MiningSchema miningSchema = new MiningSchema();
miningSchema.addMiningFields(miningField);
regressionModel.setMiningSchema(miningSchema);
PMML pmml = new PMML();
pmml.setDataDictionary(dataDictionary);
pmml.addModels(regressionModel);
final CommonCompilationDTO<RegressionModel> source =
CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME,
pmml,
regressionModel,
new PMMLCompilationContextMock(),
"FILENAME");
final RegressionCompilationDTO compilationDTO =
RegressionCompilationDTO.fromCompilationDTORegressionTablesAndNormalizationMethod(source,
new ArrayList<>(),
regressionModel.getNormalizationMethod());
Map.Entry<String, String> retrieved = KiePMMLRegressionTableFactory.getRegressionTableBuilder(regressionTable
, compilationDTO);
assertThat(retrieved).isNotNull();
Map<String, String> sources = new HashMap<>();
sources.put(retrieved.getKey(), retrieved.getValue());
commonValidateCompilation(sources);
}
|
public static String format(String source) {
return new FormatProcess(source).perform().trim();
}
|
@Test
@Disabled
public void testKeyword() {
final String sql = "select * from `order`";
final String format = SqlFormatter.format(sql);
System.out.println(format);
}
|
@Override
public void handle(ContainersLauncherEvent event) {
// TODO: ContainersLauncher launches containers one by one!!
Container container = event.getContainer();
ContainerId containerId = container.getContainerId();
switch (event.getType()) {
case LAUNCH_CONTAINER:
Application app =
context.getApplications().get(
containerId.getApplicationAttemptId().getApplicationId());
ContainerLaunch launch =
new ContainerLaunch(context, getConfig(), dispatcher, exec, app,
event.getContainer(), dirsHandler, containerManager);
containerLauncher.submit(launch);
running.put(containerId, launch);
break;
case RELAUNCH_CONTAINER:
app = context.getApplications().get(
containerId.getApplicationAttemptId().getApplicationId());
ContainerRelaunch relaunch =
new ContainerRelaunch(context, getConfig(), dispatcher, exec, app,
event.getContainer(), dirsHandler, containerManager);
containerLauncher.submit(relaunch);
running.put(containerId, relaunch);
break;
case RECOVER_CONTAINER:
app = context.getApplications().get(
containerId.getApplicationAttemptId().getApplicationId());
launch = new RecoveredContainerLaunch(context, getConfig(), dispatcher,
exec, app, event.getContainer(), dirsHandler, containerManager);
containerLauncher.submit(launch);
running.put(containerId, launch);
break;
case RECOVER_PAUSED_CONTAINER:
app = context.getApplications().get(
containerId.getApplicationAttemptId().getApplicationId());
launch = new RecoverPausedContainerLaunch(context, getConfig(),
dispatcher, exec, app, event.getContainer(), dirsHandler,
containerManager);
containerLauncher.submit(launch);
break;
case CLEANUP_CONTAINER:
cleanup(event, containerId, true);
break;
case CLEANUP_CONTAINER_FOR_REINIT:
cleanup(event, containerId, false);
break;
case SIGNAL_CONTAINER:
SignalContainersLauncherEvent signalEvent =
(SignalContainersLauncherEvent) event;
ContainerLaunch runningContainer = running.get(containerId);
if (runningContainer == null) {
// Container not launched. So nothing needs to be done.
LOG.info("Container " + containerId + " not running, nothing to signal.");
return;
}
try {
runningContainer.signalContainer(signalEvent.getCommand());
} catch (IOException e) {
LOG.warn("Got exception while signaling container " + containerId
+ " with command " + signalEvent.getCommand());
}
break;
case PAUSE_CONTAINER:
ContainerLaunch launchedContainer = running.get(containerId);
if (launchedContainer == null) {
// Container not launched. So nothing needs to be done.
return;
}
// Pause the container
try {
launchedContainer.pauseContainer();
} catch (Exception e) {
LOG.info("Got exception while pausing container: " +
StringUtils.stringifyException(e));
}
break;
case RESUME_CONTAINER:
ContainerLaunch launchCont = running.get(containerId);
if (launchCont == null) {
// Container not launched. So nothing needs to be done.
return;
}
// Resume the container.
try {
launchCont.resumeContainer();
} catch (Exception e) {
LOG.info("Got exception while resuming container: " +
StringUtils.stringifyException(e));
}
break;
}
}
|
@Test
public void testSignalContainerEvent()
throws IllegalArgumentException, IllegalAccessException, IOException {
SignalContainersLauncherEvent dummyEvent =
mock(SignalContainersLauncherEvent.class);
when(dummyEvent.getContainer()).thenReturn(container);
when(container.getContainerId()).thenReturn(containerId);
when(containerId.getApplicationAttemptId()).thenReturn(appAttemptId);
when(containerId.getApplicationAttemptId().getApplicationId())
.thenReturn(appId);
spy.running.clear();
spy.running.put(containerId, containerLaunch);
when(dummyEvent.getType())
.thenReturn(ContainersLauncherEventType.SIGNAL_CONTAINER);
when(dummyEvent.getCommand())
.thenReturn(SignalContainerCommand.GRACEFUL_SHUTDOWN);
doNothing().when(containerLaunch)
.signalContainer(SignalContainerCommand.GRACEFUL_SHUTDOWN);
spy.handle(dummyEvent);
assertEquals(1, spy.running.size());
Mockito.verify(containerLaunch, Mockito.times(1))
.signalContainer(SignalContainerCommand.GRACEFUL_SHUTDOWN);
}
|
public Predicate convert(ScalarOperator operator) {
if (operator == null) {
return null;
}
return operator.accept(this, null);
}
|
@Test
public void testAnd() {
BinaryPredicateOperator op1 = new BinaryPredicateOperator(
BinaryType.GT, F0, ConstantOperator.createInt(2));
BinaryPredicateOperator op2 = new BinaryPredicateOperator(
BinaryType.LT, F0, ConstantOperator.createInt(5));
ScalarOperator op = new CompoundPredicateOperator(CompoundPredicateOperator.CompoundType.AND, op1, op2);
Predicate result = CONVERTER.convert(op);
Assert.assertTrue(result instanceof CompoundPredicate);
CompoundPredicate compoundPredicate = (CompoundPredicate) result;
Assert.assertTrue(compoundPredicate.function() instanceof And);
Assert.assertEquals(2, compoundPredicate.children().size());
Assert.assertTrue(compoundPredicate.children().get(0) instanceof LeafPredicate);
LeafPredicate p1 = (LeafPredicate) compoundPredicate.children().get(0);
Assert.assertTrue(p1.function() instanceof GreaterThan);
Assert.assertEquals(2, p1.literals().get(0));
Assert.assertTrue(compoundPredicate.children().get(1) instanceof LeafPredicate);
LeafPredicate p2 = (LeafPredicate) compoundPredicate.children().get(1);
Assert.assertTrue(p2.function() instanceof LessThan);
Assert.assertEquals(5, p2.literals().get(0));
}
|
public static Iterator<Row> computeUpdates(
Iterator<Row> rowIterator, StructType rowType, String[] identifierFields) {
Iterator<Row> carryoverRemoveIterator = removeCarryovers(rowIterator, rowType);
ChangelogIterator changelogIterator =
new ComputeUpdateIterator(carryoverRemoveIterator, rowType, identifierFields);
return Iterators.filter(changelogIterator, Objects::nonNull);
}
|
@Test
public void testRowsWithNullValue() {
final List<Row> rowsWithNull =
Lists.newArrayList(
new GenericRowWithSchema(new Object[] {2, null, null, DELETE, 0, 0}, null),
new GenericRowWithSchema(new Object[] {3, null, null, INSERT, 0, 0}, null),
new GenericRowWithSchema(new Object[] {4, null, null, DELETE, 0, 0}, null),
new GenericRowWithSchema(new Object[] {4, null, null, INSERT, 0, 0}, null),
// mixed null and non-null value in non-identifier columns
new GenericRowWithSchema(new Object[] {5, null, null, DELETE, 0, 0}, null),
new GenericRowWithSchema(new Object[] {5, null, "data", INSERT, 0, 0}, null),
// mixed null and non-null value in identifier columns
new GenericRowWithSchema(new Object[] {6, null, null, DELETE, 0, 0}, null),
new GenericRowWithSchema(new Object[] {6, "name", null, INSERT, 0, 0}, null));
Iterator<Row> iterator =
ChangelogIterator.computeUpdates(rowsWithNull.iterator(), SCHEMA, IDENTIFIER_FIELDS);
List<Row> result = Lists.newArrayList(iterator);
assertEquals(
"Rows should match",
Lists.newArrayList(
new Object[] {2, null, null, DELETE, 0, 0},
new Object[] {3, null, null, INSERT, 0, 0},
new Object[] {5, null, null, UPDATE_BEFORE, 0, 0},
new Object[] {5, null, "data", UPDATE_AFTER, 0, 0},
new Object[] {6, null, null, DELETE, 0, 0},
new Object[] {6, "name", null, INSERT, 0, 0}),
rowsToJava(result));
}
|
@Override
public int countListeners() {
return subscribeService.countListeners(channelName);
}
|
@Test
public void testCountListeners() {
RTopic topic1 = redisson.getTopic("topic", LongCodec.INSTANCE);
assertThat(topic1.countListeners()).isZero();
int id = topic1.addListener(Long.class, (channel, msg) -> {
});
assertThat(topic1.countListeners()).isOne();
RTopic topic2 = redisson.getTopic("topic2", LongCodec.INSTANCE);
assertThat(topic2.countListeners()).isZero();
int id2 = topic2.addListener(Long.class, (channel, msg) -> {
});
assertThat(topic2.countListeners()).isOne();
topic1.removeListener(id);
assertThat(topic1.countListeners()).isZero();
topic2.removeListener(id2);
assertThat(topic2.countListeners()).isZero();
}
|
@Override
public void close() throws IOException {
lock.lock();
try {
if (closed) return;
if (LOG.isDebugEnabled()) {
LOG.debug(this + ": closing");
}
closed = true;
} finally {
lock.unlock();
}
// Close notificationSockets[0], so that notificationSockets[1] gets an EOF
// event. This will wake up the thread immediately if it is blocked inside
// the select() system call.
notificationSockets[0].close();
// Wait for the select thread to terminate.
Uninterruptibles.joinUninterruptibly(watcherThread);
}
|
@Test(timeout=60000)
public void testInterruption() throws Exception {
final DomainSocketWatcher watcher = newDomainSocketWatcher(10);
watcher.watcherThread.interrupt();
Uninterruptibles.joinUninterruptibly(watcher.watcherThread);
watcher.close();
}
|
public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException {
readData( stepnode, databases );
}
|
@Test
public void testLoadXml() throws Exception {
TableOutputMeta tableOutputMeta = new TableOutputMeta();
tableOutputMeta.loadXML( getTestNode(), databases, metaStore );
assertEquals( "1000", tableOutputMeta.getCommitSize() );
assertEquals( null, tableOutputMeta.getGeneratedKeyField() );
assertEquals( "public", tableOutputMeta.getSchemaName() );
assertEquals( "sales_csv", tableOutputMeta.getTableName() );
assertEquals( null, tableOutputMeta.getPartitioningField() );
assertTrue( tableOutputMeta.truncateTable() );
assertTrue( tableOutputMeta.specifyFields() );
assertFalse( tableOutputMeta.ignoreErrors() );
assertFalse( tableOutputMeta.isPartitioningEnabled() );
assertTrue( tableOutputMeta.useBatchUpdate() );
assertFalse( tableOutputMeta.isTableNameInField() );
assertTrue( tableOutputMeta.isTableNameInTable() );
assertFalse( tableOutputMeta.isReturningGeneratedKeys() );
String expectedXml = ""
+ " <connection/>\n"
+ " <schema>public</schema>\n"
+ " <table>sales_csv</table>\n"
+ " <commit>1000</commit>\n"
+ " <truncate>Y</truncate>\n"
+ " <ignore_errors>N</ignore_errors>\n"
+ " <use_batch>Y</use_batch>\n"
+ " <specify_fields>Y</specify_fields>\n"
+ " <partitioning_enabled>N</partitioning_enabled>\n"
+ " <partitioning_field/>\n"
+ " <partitioning_daily>N</partitioning_daily>\n"
+ " <partitioning_monthly>Y</partitioning_monthly>\n"
+ " <tablename_in_field>N</tablename_in_field>\n"
+ " <tablename_field/>\n"
+ " <tablename_in_table>Y</tablename_in_table>\n"
+ " <return_keys>N</return_keys>\n"
+ " <return_field/>\n"
+ " <fields>\n"
+ " <field>\n"
+ " <column_name>ORDERNUMBER</column_name>\n"
+ " <stream_name>ORDERNUMBER</stream_name>\n"
+ " </field>\n"
+ " <field>\n"
+ " <column_name>QUANTITYORDERED</column_name>\n"
+ " <stream_name>QUANTITYORDERED</stream_name>\n"
+ " </field>\n"
+ " <field>\n"
+ " <column_name>PRICEEACH</column_name>\n"
+ " <stream_name>PRICEEACH</stream_name>\n"
+ " </field>\n"
+ " </fields>\n";
String actualXml = TestUtils.toUnixLineSeparators( tableOutputMeta.getXML() );
assertEquals( expectedXml, actualXml );
}
|
@Override
public boolean retainAll(Collection<?> c) {
boolean changed = false;
for (byte[] item : items) {
E deserialized = serializer.decode(item);
if (!c.contains(deserialized)) {
changed = items.remove(item) || changed;
}
}
return changed;
}
|
@Test
public void testRetainAll() throws Exception {
//Test ability to generate the intersection set
Set<Integer> retainSet = Sets.newHashSet();
fillSet(10, set);
assertTrue("The set should have changed.", set.retainAll(retainSet));
assertTrue("The set should have been emptied.", set.isEmpty());
fillSet(10, set);
fillSet(10, retainSet);
Set<Integer> duplicateSet = new HashSet<>(set);
assertFalse("The set should not have changed.", set.retainAll(retainSet));
assertEquals("The set should be the same as the duplicate.", duplicateSet, set);
retainSet.remove(9);
assertTrue("The set should have changed.", set.retainAll(retainSet));
duplicateSet.remove(9);
assertEquals("The set should have had the nine element removed.", duplicateSet, set);
}
|
public static AliyunSlsLogCollectClient getAliyunSlsLogCollectClient() {
return ALIYUN_SLS_LOG_COLLECT_CLIENT;
}
|
@Test
public void testGetAliyunSlsLogCollectClient() {
Assertions.assertEquals(LoggingAliyunSlsPluginDataHandler.getAliyunSlsLogCollectClient().getClass(), AliyunSlsLogCollectClient.class);
}
|
public static String wrapWithMarkdownClassDiv(String html) {
return new StringBuilder()
.append("<div class=\"markdown-body\">\n")
.append(html)
.append("\n</div>")
.toString();
}
|
@Test
void testOrderedList() {
String input =
new StringBuilder()
.append("1. First ordered list item\n")
.append("2. Another item")
.toString();
String expected =
new StringBuilder()
.append("<ol>\n")
.append("<li>First ordered list item</li>\n")
.append("<li>Another item</li>\n")
.append("</ol>\n")
.toString();
InterpreterResult result = md.interpret(input, null);
assertEquals(wrapWithMarkdownClassDiv(expected), result.message().get(0).getData());
}
|
public static StringSetResult empty() {
return EmptyStringSetResult.INSTANCE;
}
|
@Test
public void empty() {
// Test empty returns an immutable set
StringSetResult empptyStringSetResult = StringSetResult.empty();
assertTrue(empptyStringSetResult.getStringSet().isEmpty());
assertThrows(
UnsupportedOperationException.class,
() -> empptyStringSetResult.getStringSet().add("should-fail"));
}
|
@Override
public boolean available(String pluginName) {
if (StringUtils.isBlank(pluginName)) {
return false;
}
PluginWrapper pluginWrapper = pluginManager.getPlugin(pluginName);
if (pluginWrapper == null) {
return false;
}
return PluginState.STARTED.equals(pluginWrapper.getPluginState());
}
|
@Test
void available() {
assertThat(pluginFinder.available(null)).isFalse();
boolean available = pluginFinder.available("fake-plugin");
assertThat(available).isFalse();
PluginWrapper mockPluginWrapper = Mockito.mock(PluginWrapper.class);
when(haloPluginManager.getPlugin(eq("fake-plugin")))
.thenReturn(mockPluginWrapper);
when(mockPluginWrapper.getPluginState()).thenReturn(PluginState.RESOLVED);
available = pluginFinder.available("fake-plugin");
assertThat(available).isFalse();
when(mockPluginWrapper.getPluginState()).thenReturn(PluginState.STARTED);
available = pluginFinder.available("fake-plugin");
assertThat(available).isTrue();
}
|
@Override
public void open(ExecutionContext ctx) throws Exception {
super.open(ctx);
equaliser =
genRecordEqualiser.newInstance(ctx.getRuntimeContext().getUserCodeClassLoader());
}
|
@Test
public void testWithoutGenerateUpdateBeforeAndInsert() throws Exception {
ProcTimeMiniBatchDeduplicateKeepLastRowFunction func =
createFunction(false, false, minTime.toMilliseconds());
OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(func);
testHarness.open();
testHarness.processElement(insertRecord("book", 1L, 10));
testHarness.processElement(insertRecord("book", 2L, 11));
// output is empty because bundle not trigger yet.
assertThat(testHarness.getOutput()).isEmpty();
testHarness.processElement(insertRecord("book", 1L, 13));
List<Object> expectedOutput = new ArrayList<>();
expectedOutput.add(updateAfterRecord("book", 2L, 11));
expectedOutput.add(updateAfterRecord("book", 1L, 13));
assertor.assertOutputEqualsSorted("output wrong.", expectedOutput, testHarness.getOutput());
testHarness.processElement(insertRecord("book", 1L, 12));
testHarness.processElement(insertRecord("book", 2L, 11));
testHarness.processElement(insertRecord("book", 3L, 11));
expectedOutput.add(updateAfterRecord("book", 1L, 12));
expectedOutput.add(updateAfterRecord("book", 2L, 11));
expectedOutput.add(updateAfterRecord("book", 3L, 11));
testHarness.close();
assertor.assertOutputEqualsSorted("output wrong.", expectedOutput, testHarness.getOutput());
}
|
@Override
public ClusterMetricsInfo getClusterMetricsInfo() {
ClusterMetricsInfo metrics = new ClusterMetricsInfo();
Collection<SubClusterInfo> subClusterInfos = federationFacade.getActiveSubClusters();
Stream<ClusterMetricsInfo> clusterMetricsInfoStream = subClusterInfos.parallelStream()
.map(subClusterInfo -> {
DefaultRequestInterceptorREST interceptor =
getOrCreateInterceptorForSubCluster(subClusterInfo);
try {
return interceptor.getClusterMetricsInfo();
} catch (Exception e) {
LOG.error("Subcluster {} failed to return Cluster Metrics.",
subClusterInfo.getSubClusterId());
return null;
}
});
clusterMetricsInfoStream.forEach(clusterMetricsInfo -> {
try {
if (clusterMetricsInfo != null) {
RouterWebServiceUtil.mergeMetrics(metrics, clusterMetricsInfo);
}
} catch (Throwable e) {
LOG.warn("Failed to get nodes report.", e);
}
});
return metrics;
}
|
@Test
public void testGetClusterMetrics() {
ClusterMetricsInfo responseGet = interceptor.getClusterMetricsInfo();
Assert.assertNotNull(responseGet);
int expectedAppSubmitted = 0;
for (int i = 0; i < NUM_SUBCLUSTER; i++) {
expectedAppSubmitted += i;
}
Assert.assertEquals(expectedAppSubmitted, responseGet.getAppsSubmitted());
// The merge operations is tested in TestRouterWebServiceUtil
}
|
@Override
public CompletableFuture<TopicList> queryTopicsByConsumer(String address,
QueryTopicsByConsumerRequestHeader requestHeader, long timeoutMillis) {
CompletableFuture<TopicList> future = new CompletableFuture<>();
RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.QUERY_TOPICS_BY_CONSUMER, requestHeader);
remotingClient.invoke(address, request, timeoutMillis).thenAccept(response -> {
if (response.getCode() == ResponseCode.SUCCESS) {
TopicList topicList = TopicList.decode(response.getBody(), TopicList.class);
future.complete(topicList);
} else {
log.warn("queryTopicsByConsumer getResponseCommand failed, {} {}", response.getCode(), response.getRemark());
future.completeExceptionally(new MQClientException(response.getCode(), response.getRemark()));
}
});
return future;
}
|
@Test
public void assertQueryTopicsByConsumerWithError() {
setResponseError();
QueryTopicsByConsumerRequestHeader requestHeader = mock(QueryTopicsByConsumerRequestHeader.class);
CompletableFuture<TopicList> actual = mqClientAdminImpl.queryTopicsByConsumer(defaultBrokerAddr, requestHeader, defaultTimeout);
Throwable thrown = assertThrows(ExecutionException.class, actual::get);
assertTrue(thrown.getCause() instanceof MQClientException);
MQClientException mqException = (MQClientException) thrown.getCause();
assertEquals(ResponseCode.SYSTEM_ERROR, mqException.getResponseCode());
assertTrue(mqException.getMessage().contains("CODE: 1 DESC: null"));
}
|
public T orElseThrow() {
return orElseThrow(NoSuchElementException::new, "No value present");
}
|
@Test
public void orElseThrowTest() {
assertThrows(NoSuchElementException.class, () -> {
// 获取一个不可能为空的值,否则抛出NoSuchElementException异常
Object obj = Opt.ofNullable(null).orElseThrow();
assertNull(obj);
});
}
|
public ConfigPayloadBuilder build(Element configE) {
parseConfigName(configE);
ConfigPayloadBuilder payloadBuilder = new ConfigPayloadBuilder(configDefinition);
for (Element child : XML.getChildren(configE)) {
parseElement(child, payloadBuilder, null);
}
return payloadBuilder;
}
|
@Test
void require_that_exceptions_are_issued() throws FileNotFoundException {
assertThrows(IllegalArgumentException.class, () -> {
Element configRoot = getDocument(
"<config name=\"test.simpletypes\">" +
"<longval>invalid</longval>" +
"</config>");
DefParser defParser = new DefParser("simpletypes",
new FileReader("src/test/resources/configdefinitions/test.simpletypes.def"));
ConfigDefinition def = ConfigDefinitionBuilder.createConfigDefinition(defParser.getTree());
ConfigPayloadBuilder unused = new DomConfigPayloadBuilder(def).build(configRoot);
});
}
|
public static boolean webSocketHostPathMatches(String hostPath, String targetPath) {
boolean exactPathMatch = true;
if (ObjectHelper.isEmpty(hostPath) || ObjectHelper.isEmpty(targetPath)) {
// This scenario should not really be possible as the input args come from the vertx-websocket consumer / producer URI
return false;
}
// Paths ending with '*' are Vert.x wildcard routes so match on the path prefix
if (hostPath.endsWith("*")) {
exactPathMatch = false;
hostPath = hostPath.substring(0, hostPath.lastIndexOf('*'));
}
String normalizedHostPath = HttpUtils.normalizePath(hostPath + "/");
String normalizedTargetPath = HttpUtils.normalizePath(targetPath + "/");
String[] hostPathElements = normalizedHostPath.split("/");
String[] targetPathElements = normalizedTargetPath.split("/");
if (exactPathMatch && hostPathElements.length != targetPathElements.length) {
return false;
}
if (exactPathMatch) {
return normalizedHostPath.equals(normalizedTargetPath);
} else {
return normalizedTargetPath.startsWith(normalizedHostPath);
}
}
|
@Test
void webSocketHostExactPathNotEnoughElementsNotMatches() {
String hostPath = "/foo/bar/cheese/wine";
String targetPath = "/foo/bar";
assertFalse(VertxWebsocketHelper.webSocketHostPathMatches(hostPath, targetPath));
}
|
public int[] intersection(SparseVector other) {
List<Integer> diffIndicesList = new ArrayList<>();
Iterator<VectorTuple> itr = iterator();
Iterator<VectorTuple> otherItr = other.iterator();
if (itr.hasNext() && otherItr.hasNext()) {
VectorTuple tuple = itr.next();
VectorTuple otherTuple = otherItr.next();
while (itr.hasNext() && otherItr.hasNext()) {
if (tuple.index == otherTuple.index) {
diffIndicesList.add(tuple.index);
tuple = itr.next();
otherTuple = otherItr.next();
} else if (tuple.index < otherTuple.index) {
tuple = itr.next();
} else {
otherTuple = otherItr.next();
}
}
while (itr.hasNext()) {
if (tuple.index == otherTuple.index) {
diffIndicesList.add(tuple.index);
}
tuple = itr.next();
}
while (otherItr.hasNext()) {
if (tuple.index == otherTuple.index) {
diffIndicesList.add(tuple.index);
}
otherTuple = otherItr.next();
}
if (tuple.index == otherTuple.index) {
diffIndicesList.add(tuple.index);
}
}
return Util.toPrimitiveInt(diffIndicesList);
}
|
@Test
public void intersection() {
SparseVector a = generateVectorA();
SparseVector b = generateVectorB();
SparseVector c = generateVectorC();
SparseVector empty = generateEmptyVector();
assertArrayEquals(a.intersection(b), new int[]{0,1,4,5,8});
assertArrayEquals(a.intersection(c), new int[]{1,5});
assertArrayEquals(a.intersection(empty), new int[0]);
}
|
public MonitorBuilder group(String group) {
this.group = group;
return getThis();
}
|
@Test
void group() {
MonitorBuilder builder = MonitorBuilder.newBuilder();
builder.group("group");
Assertions.assertEquals("group", builder.build().getGroup());
}
|
@Override
public KsMaterializedQueryResult<WindowedRow> get(
final GenericKey key,
final int partition,
final Range<Instant> windowStartBounds,
final Range<Instant> windowEndBounds,
final Optional<Position> position
) {
try {
final ReadOnlyWindowStore<GenericKey, ValueAndTimestamp<GenericRow>> store = stateStore
.store(QueryableStoreTypes.timestampedWindowStore(), partition);
final Instant lower = calculateLowerBound(windowStartBounds, windowEndBounds);
final Instant upper = calculateUpperBound(windowStartBounds, windowEndBounds);
try (WindowStoreIterator<ValueAndTimestamp<GenericRow>> it
= cacheBypassFetcher.fetch(store, key, lower, upper)) {
final Builder<WindowedRow> builder = ImmutableList.builder();
while (it.hasNext()) {
final KeyValue<Long, ValueAndTimestamp<GenericRow>> next = it.next();
final Instant windowStart = Instant.ofEpochMilli(next.key);
if (!windowStartBounds.contains(windowStart)) {
continue;
}
final Instant windowEnd = windowStart.plus(windowSize);
if (!windowEndBounds.contains(windowEnd)) {
continue;
}
final TimeWindow window =
new TimeWindow(windowStart.toEpochMilli(), windowEnd.toEpochMilli());
final WindowedRow row = WindowedRow.of(
stateStore.schema(),
new Windowed<>(key, window),
next.value.value(),
next.value.timestamp()
);
builder.add(row);
}
return KsMaterializedQueryResult.rowIterator(builder.build().iterator());
}
} catch (final Exception e) {
throw new MaterializationException("Failed to get value from materialized table", e);
}
}
|
@Test
public void shouldReturnValuesForOpenStartBounds_fetchAll() {
// Given:
final Range<Instant> start = Range.open(
NOW,
NOW.plusSeconds(10)
);
when(keyValueIterator.hasNext())
.thenReturn(true, true, true, false);
when(keyValueIterator.next())
.thenReturn(new KeyValue<>(new Windowed<>(A_KEY,
new TimeWindow(start.lowerEndpoint().toEpochMilli(),
start.lowerEndpoint().toEpochMilli() + WINDOW_SIZE.toMillis())), VALUE_1))
.thenReturn(new KeyValue<>(new Windowed<>(A_KEY2,
new TimeWindow(start.lowerEndpoint().plusMillis(1).toEpochMilli(),
start.lowerEndpoint().toEpochMilli() + WINDOW_SIZE.toMillis() + 1)), VALUE_2))
.thenReturn(new KeyValue<>(new Windowed<>(A_KEY3,
new TimeWindow(start.upperEndpoint().toEpochMilli(),
start.upperEndpoint().toEpochMilli() + WINDOW_SIZE.toMillis())), VALUE_3))
.thenThrow(new AssertionError());
// When:
final Iterator<WindowedRow> rowIterator =
table.get(PARTITION, start, Range.all()).rowIterator;
// Then:
assertThat(rowIterator.hasNext(), is(true));
assertThat(rowIterator.next(),
is (WindowedRow.of(
SCHEMA,
windowedKey(A_KEY2, start.lowerEndpoint().plusMillis(1)),
VALUE_2.value(),
VALUE_2.timestamp())));
assertThat(rowIterator.hasNext(), is(false));
}
|
@Override
public String execute(SampleResult previousResult, Sampler currentSampler) throws InvalidVariableException {
String originalString = values[0].execute();
String mode = null; // default
if (values.length > 1) {
mode = values[1].execute();
}
if(StringUtils.isEmpty(mode)){
mode = ChangeCaseMode.UPPER.getName(); // default
}
String targetString = changeCase(originalString, mode);
addVariableValue(targetString, values, 2);
return targetString;
}
|
@Test
public void testChangeCaseWrongMode() throws Exception {
String returnValue = execute("myUpperTest", "Wrong");
assertEquals("myUpperTest", returnValue);
}
|
public static void removeDupes(
final List<CharSequence> suggestions, List<CharSequence> stringsPool) {
if (suggestions.size() < 2) return;
int i = 1;
// Don't cache suggestions.size(), since we may be removing items
while (i < suggestions.size()) {
final CharSequence cur = suggestions.get(i);
// Compare each suggestion with each previous suggestion
for (int j = 0; j < i; j++) {
CharSequence previous = suggestions.get(j);
if (TextUtils.equals(cur, previous)) {
removeSuggestion(suggestions, i, stringsPool);
i--;
break;
}
}
i++;
}
}
|
@Test
public void testRemoveDupesNoDupes() throws Exception {
ArrayList<CharSequence> list =
new ArrayList<>(Arrays.<CharSequence>asList("typed", "something", "banana", "car"));
IMEUtil.removeDupes(list, mStringPool);
Assert.assertEquals(4, list.size());
Assert.assertEquals("typed", list.get(0));
Assert.assertEquals("something", list.get(1));
Assert.assertEquals("banana", list.get(2));
Assert.assertEquals("car", list.get(3));
}
|
public String create(final String secret, final String bucket, String region, final String key, final String method, final long expiry) {
if(StringUtils.isBlank(region)) {
// Only for AWS
switch(session.getSignatureVersion()) {
case AWS4HMACSHA256:
// Region is required for AWS4-HMAC-SHA256 signature
region = S3LocationFeature.DEFAULT_REGION.getIdentifier();
}
}
final Host bookmark = session.getHost();
return new RestS3Service(new AWSCredentials(StringUtils.strip(bookmark.getCredentials().getUsername()), StringUtils.strip(secret))) {
@Override
public String getEndpoint() {
if(S3Session.isAwsHostname(bookmark.getHostname())) {
return bookmark.getProtocol().getDefaultHostname();
}
return bookmark.getHostname();
}
@Override
protected void initializeProxy(final HttpClientBuilder httpClientBuilder) {
//
}
}.createSignedUrlUsingSignatureVersion(
session.getSignatureVersion().toString(),
region, method, bucket, key, null, null, expiry / 1000, false, true,
new HostPreferences(bookmark).getBoolean("s3.bucket.virtualhost.disable"));
}
|
@Test
public void testCreateEuCentral() throws Exception {
final Calendar expiry = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
expiry.add(Calendar.MILLISECOND, (int) TimeUnit.DAYS.toMillis(7));
final String url = new S3PresignedUrlProvider(session).create(PROPERTIES.get("s3.secret"),
"test-eu-central-1-cyberduck", "eu-central-1", "f", "GET", expiry.getTimeInMillis());
assertNotNull(url);
assertEquals("test-eu-central-1-cyberduck.s3.amazonaws.com", URI.create(url).getHost());
}
|
@Subscribe
public void onChatMessage(ChatMessage event)
{
if (event.getType() != ChatMessageType.SPAM
&& event.getType() != ChatMessageType.GAMEMESSAGE
&& event.getType() != ChatMessageType.MESBOX)
{
return;
}
final var msg = event.getMessage();
if (WOOD_CUT_PATTERN.matcher(msg).matches())
{
if (session == null)
{
session = new WoodcuttingSession();
}
session.setLastChopping();
session.incrementLogsCut();
}
var matcher = ANIMA_BARK_PATTERN.matcher(msg);
if (matcher.matches())
{
if (session == null)
{
session = new WoodcuttingSession();
}
session.setLastChopping();
int num = Integer.parseInt(matcher.group(1));
session.incrementBark(num);
}
if (msg.contains("A bird's nest falls out of the tree"))
{
if (clueTierSpawned == null || clueTierSpawned.ordinal() >= config.clueNestNotifyTier().ordinal())
{
notifier.notify(config.showNestNotification(), "A bird nest has spawned!");
}
// Clear the clue tier that has previously spawned
clueTierSpawned = null;
}
if (msg.startsWith("The sapling seems to love"))
{
int ingredientNum = msg.contains("first") ? 1 : (msg.contains("second") ? 2 : (msg.contains("third") ? 3 : -1));
if (ingredientNum == -1)
{
log.debug("unable to find ingredient index from message: {}", msg);
return;
}
GameObject ingredientObj = saplingIngredients.stream()
.filter(obj -> msg.contains(client.getObjectDefinition(obj.getId()).getName().toLowerCase()))
.findAny()
.orElse(null);
if (ingredientObj == null)
{
log.debug("unable to find ingredient from message: {}", msg);
return;
}
saplingOrder[ingredientNum - 1] = ingredientObj;
}
if (msg.equals("There are no open, unpollinated flowers on this bush yet.")
|| msg.equals("The flowers on this bush have not yet opened enough to harvest pollen.")
|| msg.equals("<col=06600c>The bush is already fruiting and won't benefit from <col=06600c>any more pollen.</col>"))
{
if (activeFlowers.contains(lastInteractFlower))
{
log.debug("Flowers reset");
activeFlowers.clear();
}
}
}
|
@Test
public void testLogs()
{
ChatMessage chatMessage = new ChatMessage(null, ChatMessageType.SPAM, "", "You get some logs.", "", 0);
woodcuttingPlugin.onChatMessage(chatMessage);
assertNotNull(woodcuttingPlugin.getSession());
}
|
public static HttpClient create() {
return new HttpClientConnect(new HttpConnectionProvider());
}
|
@Test
void testConnectionNoIdleTimeElasticPool() throws Exception {
ConnectionProvider provider =
ConnectionProvider.create("testConnectionNoIdleTimeElasticPool", Integer.MAX_VALUE);
ChannelId[] ids = doTestConnectionIdleTime(provider);
assertThat(ids[0]).isEqualTo(ids[1]);
}
|
@Override
public double getDoubleValue() {
checkValueType(DOUBLE);
return measure.getDoubleValue();
}
|
@Test
public void get_double_value() {
MeasureImpl measure = new MeasureImpl(Measure.newMeasureBuilder().create(1d, 1));
assertThat(measure.getDoubleValue()).isEqualTo(1d);
}
|
static boolean isSupportedProtocol(URL url) {
String protocol = url.getProtocol().toLowerCase(java.util.Locale.ENGLISH);
return protocol.equals(HTTPConstants.PROTOCOL_HTTP) || protocol.equals(HTTPConstants.PROTOCOL_HTTPS);
}
|
@Test
public void testHttp() throws Exception {
Assertions.assertTrue(AuthManager.isSupportedProtocol(new URL("http:")));
}
|
public static SinkConfig validateUpdate(SinkConfig existingConfig, SinkConfig newConfig) {
SinkConfig mergedConfig = clone(existingConfig);
if (!existingConfig.getTenant().equals(newConfig.getTenant())) {
throw new IllegalArgumentException("Tenants differ");
}
if (!existingConfig.getNamespace().equals(newConfig.getNamespace())) {
throw new IllegalArgumentException("Namespaces differ");
}
if (!existingConfig.getName().equals(newConfig.getName())) {
throw new IllegalArgumentException("Sink Names differ");
}
if (!StringUtils.isEmpty(newConfig.getClassName())) {
mergedConfig.setClassName(newConfig.getClassName());
}
if (!StringUtils.isEmpty(newConfig.getSourceSubscriptionName()) && !newConfig.getSourceSubscriptionName()
.equals(existingConfig.getSourceSubscriptionName())) {
throw new IllegalArgumentException("Subscription Name cannot be altered");
}
if (newConfig.getInputSpecs() == null) {
newConfig.setInputSpecs(new HashMap<>());
}
if (mergedConfig.getInputSpecs() == null) {
mergedConfig.setInputSpecs(new HashMap<>());
}
if (!StringUtils.isEmpty(newConfig.getLogTopic())) {
mergedConfig.setLogTopic(newConfig.getLogTopic());
}
if (newConfig.getInputs() != null) {
newConfig.getInputs().forEach((topicName -> {
newConfig.getInputSpecs().putIfAbsent(topicName,
ConsumerConfig.builder().isRegexPattern(false).build());
}));
}
if (newConfig.getTopicsPattern() != null && !newConfig.getTopicsPattern().isEmpty()) {
newConfig.getInputSpecs().put(newConfig.getTopicsPattern(),
ConsumerConfig.builder()
.isRegexPattern(true)
.build());
}
if (newConfig.getTopicToSerdeClassName() != null) {
newConfig.getTopicToSerdeClassName().forEach((topicName, serdeClassName) -> {
newConfig.getInputSpecs().put(topicName,
ConsumerConfig.builder()
.serdeClassName(serdeClassName)
.isRegexPattern(false)
.build());
});
}
if (newConfig.getTopicToSchemaType() != null) {
newConfig.getTopicToSchemaType().forEach((topicName, schemaClassname) -> {
newConfig.getInputSpecs().put(topicName,
ConsumerConfig.builder()
.schemaType(schemaClassname)
.isRegexPattern(false)
.build());
});
}
if (!newConfig.getInputSpecs().isEmpty()) {
SinkConfig finalMergedConfig = mergedConfig;
newConfig.getInputSpecs().forEach((topicName, consumerConfig) -> {
if (!existingConfig.getInputSpecs().containsKey(topicName)) {
throw new IllegalArgumentException("Input Topics cannot be altered");
}
if (consumerConfig.isRegexPattern() != existingConfig.getInputSpecs().get(topicName).isRegexPattern()) {
throw new IllegalArgumentException(
"isRegexPattern for input topic " + topicName + " cannot be altered");
}
finalMergedConfig.getInputSpecs().put(topicName, consumerConfig);
});
}
if (newConfig.getProcessingGuarantees() != null && !newConfig.getProcessingGuarantees()
.equals(existingConfig.getProcessingGuarantees())) {
throw new IllegalArgumentException("Processing Guarantees cannot be altered");
}
if (newConfig.getConfigs() != null) {
mergedConfig.setConfigs(newConfig.getConfigs());
}
if (newConfig.getSecrets() != null) {
mergedConfig.setSecrets(newConfig.getSecrets());
}
if (newConfig.getParallelism() != null) {
mergedConfig.setParallelism(newConfig.getParallelism());
}
if (newConfig.getRetainOrdering() != null && !newConfig.getRetainOrdering()
.equals(existingConfig.getRetainOrdering())) {
throw new IllegalArgumentException("Retain Ordering cannot be altered");
}
if (newConfig.getRetainKeyOrdering() != null && !newConfig.getRetainKeyOrdering()
.equals(existingConfig.getRetainKeyOrdering())) {
throw new IllegalArgumentException("Retain Key Ordering cannot be altered");
}
if (newConfig.getAutoAck() != null && !newConfig.getAutoAck().equals(existingConfig.getAutoAck())) {
throw new IllegalArgumentException("AutoAck cannot be altered");
}
if (newConfig.getResources() != null) {
mergedConfig
.setResources(ResourceConfigUtils.merge(existingConfig.getResources(), newConfig.getResources()));
}
if (newConfig.getTimeoutMs() != null) {
mergedConfig.setTimeoutMs(newConfig.getTimeoutMs());
}
if (newConfig.getCleanupSubscription() != null) {
mergedConfig.setCleanupSubscription(newConfig.getCleanupSubscription());
}
if (!StringUtils.isEmpty(newConfig.getArchive())) {
mergedConfig.setArchive(newConfig.getArchive());
}
if (!StringUtils.isEmpty(newConfig.getRuntimeFlags())) {
mergedConfig.setRuntimeFlags(newConfig.getRuntimeFlags());
}
if (!StringUtils.isEmpty(newConfig.getCustomRuntimeOptions())) {
mergedConfig.setCustomRuntimeOptions(newConfig.getCustomRuntimeOptions());
}
if (newConfig.getTransformFunction() != null) {
mergedConfig.setTransformFunction(newConfig.getTransformFunction());
}
if (newConfig.getTransformFunctionClassName() != null) {
mergedConfig.setTransformFunctionClassName(newConfig.getTransformFunctionClassName());
}
if (newConfig.getTransformFunctionConfig() != null) {
mergedConfig.setTransformFunctionConfig(newConfig.getTransformFunctionConfig());
}
return mergedConfig;
}
|
@Test
public void testMergeDifferentResources() {
SinkConfig sinkConfig = createSinkConfig();
Resources resources = new Resources();
resources.setCpu(0.3);
resources.setRam(1232L);
resources.setDisk(123456L);
SinkConfig newSinkConfig = createUpdatedSinkConfig("resources", resources);
SinkConfig mergedConfig = SinkConfigUtils.validateUpdate(sinkConfig, newSinkConfig);
assertEquals(
mergedConfig.getResources(),
resources
);
mergedConfig.setResources(sinkConfig.getResources());
assertEquals(
new Gson().toJson(sinkConfig),
new Gson().toJson(mergedConfig)
);
}
|
@SuppressWarnings("all")
public static boolean isValidDesc(@Nullable String desc) {
if (desc == null)
return false;
if (desc.length() == 0)
return false;
char first = desc.charAt(0);
if (first == '(') {
try {
Type methodType = Type.getMethodType(desc);
methodType.getArgumentTypes();
methodType.getReturnType();
return true;
} catch (Throwable t) {
return false;
}
} else if (first == 'L' || first == '[') {
try {
Type type = Type.getType(desc);
if (type.getSort() == Type.OBJECT && !desc.endsWith(";"))
return false;
else if (type.getSort() == Type.ARRAY && type.getElementType() == null)
return false;
return true;
} catch (Throwable t) {
return false;
}
}
return false;
}
|
@Test
void testIsValidDesc() {
assertTrue(Types.isValidDesc("([I[[J[[I)V"), "method desc");
assertTrue(Types.isValidDesc("[I"), "array desc");
assertTrue(Types.isValidDesc("Ljava/lang/String;"), "object desc");
//
assertTrue(Types.isValidDesc("LLLLj/av/a/la/ng/S/t/ri/n/g;"), "ugly but valid");
assertTrue(Types.isValidDesc("L\0;"), "null-terminator is valid");
assertTrue(Types.isValidDesc("L\n;"), "newline is valid");
assertTrue(Types.isValidDesc("L;"), "empty is valid");
//
assertFalse(Types.isValidDesc(null), "null is invalid");
assertFalse(Types.isValidDesc(""), "empty string is invalid");
assertFalse(Types.isValidDesc("["), "array without type is invalid");
assertFalse(Types.isValidDesc("[P"), "array without valid type is invalid");
assertFalse(Types.isValidDesc("java/lang/String"), "internal name is invalid");
assertFalse(Types.isValidDesc("(L;;)V"), "double ;; is unresolvable in method desc args");
}
|
public MessageType union(MessageType toMerge) {
return union(toMerge, true);
}
|
@Test
public void testMergeSchemaWithOriginalType() throws Exception {
MessageType t5 = new MessageType(
"root1",
new GroupType(REQUIRED, "g1", LIST, new PrimitiveType(OPTIONAL, BINARY, "a")),
new GroupType(REQUIRED, "g2", new PrimitiveType(OPTIONAL, BINARY, "b")));
MessageType t6 = new MessageType(
"root1",
new GroupType(REQUIRED, "g1", LIST, new PrimitiveType(OPTIONAL, BINARY, "a")),
new GroupType(
REQUIRED,
"g2",
LIST,
new GroupType(REQUIRED, "g3", new PrimitiveType(OPTIONAL, BINARY, "c")),
new PrimitiveType(OPTIONAL, BINARY, "b")));
assertEquals(
new MessageType(
"root1",
new GroupType(REQUIRED, "g1", LIST, new PrimitiveType(OPTIONAL, BINARY, "a")),
new GroupType(
REQUIRED,
"g2",
LIST,
new PrimitiveType(OPTIONAL, BINARY, "b"),
new GroupType(REQUIRED, "g3", new PrimitiveType(OPTIONAL, BINARY, "c")))),
t5.union(t6));
}
|
@Override
public List<Long> selectAllComputeNodes() {
if (!usedComputeNode) {
return Collections.emptyList();
}
List<Long> nodeIds = availableID2ComputeNode.values().stream()
.map(ComputeNode::getId)
.collect(Collectors.toList());
nodeIds.forEach(this::selectWorkerUnchecked);
return nodeIds;
}
|
@Test
public void testChooseAllComputedNodes() {
DefaultWorkerProvider workerProvider;
List<Long> computeNodes;
workerProvider =
new DefaultWorkerProvider(id2Backend, id2ComputeNode, availableId2Backend, availableId2ComputeNode,
false);
computeNodes = workerProvider.selectAllComputeNodes();
Assert.assertTrue(computeNodes.isEmpty());
workerProvider =
new DefaultWorkerProvider(id2Backend, id2ComputeNode, availableId2Backend, availableId2ComputeNode,
true);
computeNodes = workerProvider.selectAllComputeNodes();
Assert.assertEquals(availableId2ComputeNode.size(), computeNodes.size());
Set<Long> computeNodeSet = new HashSet<>(computeNodes);
for (ComputeNode computeNode : availableId2ComputeNode.values()) {
Assert.assertTrue(computeNodeSet.contains(computeNode.getId()));
testUsingWorkerHelper(workerProvider, computeNode.getId());
}
}
|
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof DefaultMappingKey) {
DefaultMappingKey that = (DefaultMappingKey) obj;
return Objects.equals(address, that.address);
}
return false;
}
|
@Test
public void testEquals() {
IpPrefix ip1 = IpPrefix.valueOf(IP_ADDRESS_1);
MappingAddress address1 = MappingAddresses.ipv4MappingAddress(ip1);
MappingKey key1 = DefaultMappingKey.builder()
.withAddress(address1)
.build();
MappingKey sameAsKey1 = DefaultMappingKey.builder()
.withAddress(address1)
.build();
IpPrefix ip2 = IpPrefix.valueOf(IP_ADDRESS_2);
MappingAddress address2 = MappingAddresses.ipv4MappingAddress(ip2);
MappingKey key2 = DefaultMappingKey.builder()
.withAddress(address2)
.build();
new EqualsTester()
.addEqualityGroup(key1, sameAsKey1)
.addEqualityGroup(key2)
.testEquals();
}
|
@Override
public List<Intent> compile(PointToPointIntent intent, List<Intent> installable) {
log.trace("compiling {} {}", intent, installable);
ConnectPoint ingressPoint = intent.filteredIngressPoint().connectPoint();
ConnectPoint egressPoint = intent.filteredEgressPoint().connectPoint();
//TODO: handle protected path case with suggested path!!
//Idea: use suggested path as primary and another path from path service as protection
if (intent.suggestedPath() != null && intent.suggestedPath().size() > 0) {
Path path = new DefaultPath(PID, intent.suggestedPath(), new ScalarWeight(1));
//Check intent constraints against suggested path and suggested path availability
if (checkPath(path, intent.constraints()) && pathAvailable(intent)) {
allocateIntentBandwidth(intent, path);
return asList(createLinkCollectionIntent(ImmutableSet.copyOf(intent.suggestedPath()),
DEFAULT_COST, intent));
}
}
if (ingressPoint.deviceId().equals(egressPoint.deviceId())) {
return createZeroHopLinkCollectionIntent(intent);
}
// proceed with no protected paths
if (!ProtectionConstraint.requireProtectedPath(intent)) {
return createUnprotectedLinkCollectionIntent(intent);
}
try {
// attempt to compute and implement backup path
return createProtectedIntent(ingressPoint, egressPoint, intent, installable);
} catch (PathNotFoundException e) {
log.warn("Could not find disjoint Path for {}", intent);
// no disjoint path extant -- maximum one path exists between devices
return createSinglePathIntent(ingressPoint, egressPoint, intent, installable);
}
}
|
@Test
public void testBandwidthConstrainedIntentSuccess() {
final double bpsTotal = 1000.0;
final double bpsToReserve = 100.0;
final ResourceService resourceService =
MockResourceService.makeCustomBandwidthResourceService(bpsTotal);
final List<Constraint> constraints =
Collections.singletonList(new BandwidthConstraint(Bandwidth.bps(bpsToReserve)));
final PointToPointIntent intent = makeIntent(new ConnectPoint(DID_1, PORT_1),
new ConnectPoint(DID_3, PORT_2),
constraints);
String[] hops = {S1, S2, S3};
final PointToPointIntentCompiler compiler = makeCompiler(hops,
resourceService);
final List<Intent> compiledIntents = compiler.compile(intent, null);
assertThat(compiledIntents, Matchers.notNullValue());
assertThat(compiledIntents, hasSize(1));
assertThat("key is inherited",
compiledIntents.stream().map(Intent::key).collect(Collectors.toList()),
everyItem(is(intent.key())));
}
|
@Override
public Array getArray(final int columnIndex) throws SQLException {
return (Array) mergeResultSet.getValue(columnIndex, Array.class);
}
|
@Test
void assertGetArrayWithColumnIndex() throws SQLException {
Array array = mock(Array.class);
when(mergeResultSet.getValue(1, Array.class)).thenReturn(array);
assertThat(shardingSphereResultSet.getArray(1), is(array));
}
|
static boolean hasPartitionsWithIsrGreaterThanReplicas(Cluster cluster) {
for (String topic : cluster.topics()) {
for (PartitionInfo partitionInfo : cluster.partitionsForTopic(topic)) {
int numISR = partitionInfo.inSyncReplicas().length;
if (numISR > partitionInfo.replicas().length) {
return true;
}
}
}
return false;
}
|
@Test
public void testHasPartitionsWithIsrGreaterThanReplicas() {
Cluster cluster = getCluster(Arrays.asList(new TopicPartition(TOPIC0, 0), new TopicPartition(TOPIC0, 1)));
// Verify: No signal when the cluster has all replicas in sync.
assertFalse(MonitorUtils.hasPartitionsWithIsrGreaterThanReplicas(cluster));
// Verify: No signal when the cluster has an URP.
Node[] singletonNode0 = {NODE_0};
PartitionInfo urp = new PartitionInfo(TOPIC1, 0, NODE_0, nodes(), singletonNode0);
cluster = cluster.withPartitions(Collections.singletonMap(new TopicPartition(TOPIC1, 0), urp));
assertFalse(MonitorUtils.hasPartitionsWithIsrGreaterThanReplicas(cluster));
// Verify: Expect signal when the cluster has partitions with ISR greater than replicas.
PartitionInfo badPartition = new PartitionInfo(TOPIC1, 1, NODE_0, singletonNode0, nodes());
cluster = cluster.withPartitions(Collections.singletonMap(new TopicPartition(TOPIC1, 1), badPartition));
assertTrue(MonitorUtils.hasPartitionsWithIsrGreaterThanReplicas(cluster));
}
|
@Override
public CheckResult runCheck() {
try {
// Create an absolute range from the relative range to make sure it doesn't change during the two
// search requests. (count and find messages)
// This is needed because the RelativeRange computes the range from NOW on every invocation of getFrom() and
// getTo().
// See: https://github.com/Graylog2/graylog2-server/issues/2382
final RelativeRange relativeRange = RelativeRange.create(time * 60);
final AbsoluteRange range = AbsoluteRange.create(relativeRange.getFrom(), relativeRange.getTo());
final String filter = buildQueryFilter(stream.getId(), query);
final CountResult result = searches.count("*", range, filter);
final long count = result.count();
LOG.debug("Alert check <{}> result: [{}]", id, count);
final boolean triggered;
switch (thresholdType) {
case MORE:
triggered = count > threshold;
break;
case LESS:
triggered = count < threshold;
break;
default:
triggered = false;
}
if (triggered) {
final List<MessageSummary> summaries = Lists.newArrayList();
if (getBacklog() > 0) {
final SearchResult backlogResult = searches.search("*", filter,
range, getBacklog(), 0, new Sorting(Message.FIELD_TIMESTAMP, Sorting.Direction.DESC));
for (ResultMessage resultMessage : backlogResult.getResults()) {
final Message msg = resultMessage.getMessage();
summaries.add(new MessageSummary(resultMessage.getIndex(), msg));
}
}
final String resultDescription = "Stream had " + count + " messages in the last " + time
+ " minutes with trigger condition " + thresholdType.toString().toLowerCase(Locale.ENGLISH)
+ " than " + threshold + " messages. " + "(Current grace time: " + grace + " minutes)";
return new CheckResult(true, this, resultDescription, Tools.nowUTC(), summaries);
} else {
return new NegativeCheckResult();
}
} catch (InvalidRangeParametersException e) {
// cannot happen lol
LOG.error("Invalid timerange.", e);
return null;
}
}
|
@Test
public void testRunCheckLessNegative() throws Exception {
final MessageCountAlertCondition.ThresholdType type = MessageCountAlertCondition.ThresholdType.LESS;
final MessageCountAlertCondition messageCountAlertCondition = getConditionWithParameters(type, threshold);
searchCountShouldReturn(threshold);
final AlertCondition.CheckResult result = messageCountAlertCondition.runCheck();
assertNotTriggered(result);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.