focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
@Override
public boolean isEmpty() {
return true;
}
|
@Test
public void testIsEmpty() throws Exception {
assertTrue(NULL_QUERY_CACHE.isEmpty());
}
|
Collection<OutputFile> compile() {
List<OutputFile> out = new ArrayList<>(queue.size() + 1);
for (Schema schema : queue) {
out.add(compile(schema));
}
if (protocol != null) {
out.add(compileInterface(protocol));
}
return out;
}
|
@Test
void maxParameterCounts() throws Exception {
Schema validSchema1 = createSampleRecordSchema(SpecificCompiler.MAX_FIELD_PARAMETER_UNIT_COUNT, 0);
assertTrue(new SpecificCompiler(validSchema1).compile().size() > 0);
Schema validSchema2 = createSampleRecordSchema(SpecificCompiler.MAX_FIELD_PARAMETER_UNIT_COUNT - 2, 1);
assertTrue(new SpecificCompiler(validSchema2).compile().size() > 0);
Schema validSchema3 = createSampleRecordSchema(SpecificCompiler.MAX_FIELD_PARAMETER_UNIT_COUNT - 1, 1);
assertTrue(new SpecificCompiler(validSchema3).compile().size() > 0);
Schema validSchema4 = createSampleRecordSchema(SpecificCompiler.MAX_FIELD_PARAMETER_UNIT_COUNT + 1, 0);
assertTrue(new SpecificCompiler(validSchema4).compile().size() > 0);
}
|
@Override
public Column convert(BasicTypeDefine typeDefine) {
PhysicalColumn.PhysicalColumnBuilder builder =
PhysicalColumn.builder()
.name(typeDefine.getName())
.nullable(typeDefine.isNullable())
.defaultValue(typeDefine.getDefaultValue())
.comment(typeDefine.getComment());
String dmType = typeDefine.getDataType().toUpperCase();
switch (dmType) {
case DM_BIT:
builder.sourceType(DM_BIT);
builder.dataType(BasicType.BOOLEAN_TYPE);
break;
case DM_TINYINT:
builder.sourceType(DM_TINYINT);
builder.dataType(BasicType.BYTE_TYPE);
break;
case DM_BYTE:
builder.sourceType(DM_BYTE);
builder.dataType(BasicType.BYTE_TYPE);
break;
case DM_SMALLINT:
builder.sourceType(DM_SMALLINT);
builder.dataType(BasicType.SHORT_TYPE);
break;
case DM_INT:
builder.sourceType(DM_INT);
builder.dataType(BasicType.INT_TYPE);
break;
case DM_INTEGER:
builder.sourceType(DM_INTEGER);
builder.dataType(BasicType.INT_TYPE);
break;
case DM_PLS_INTEGER:
builder.sourceType(DM_PLS_INTEGER);
builder.dataType(BasicType.INT_TYPE);
break;
case DM_BIGINT:
builder.sourceType(DM_BIGINT);
builder.dataType(BasicType.LONG_TYPE);
break;
case DM_REAL:
builder.sourceType(DM_REAL);
builder.dataType(BasicType.FLOAT_TYPE);
break;
case DM_FLOAT:
builder.sourceType(DM_FLOAT);
builder.dataType(BasicType.DOUBLE_TYPE);
break;
case DM_DOUBLE:
builder.sourceType(DM_DOUBLE);
builder.dataType(BasicType.DOUBLE_TYPE);
break;
case DM_DOUBLE_PRECISION:
builder.sourceType(DM_DOUBLE_PRECISION);
builder.dataType(BasicType.DOUBLE_TYPE);
break;
case DM_NUMERIC:
case DM_NUMBER:
case DM_DECIMAL:
case DM_DEC:
DecimalType decimalType;
if (typeDefine.getPrecision() != null && typeDefine.getPrecision() > 0) {
decimalType =
new DecimalType(
typeDefine.getPrecision().intValue(), typeDefine.getScale());
} else {
decimalType = new DecimalType(DEFAULT_PRECISION, DEFAULT_SCALE);
}
builder.sourceType(
String.format(
"%s(%s,%s)",
DM_DECIMAL, decimalType.getPrecision(), decimalType.getScale()));
builder.dataType(decimalType);
builder.columnLength((long) decimalType.getPrecision());
builder.scale(decimalType.getScale());
break;
case DM_CHAR:
case DM_CHARACTER:
builder.sourceType(String.format("%s(%s)", DM_CHAR, typeDefine.getLength()));
builder.dataType(BasicType.STRING_TYPE);
builder.columnLength(TypeDefineUtils.charTo4ByteLength(typeDefine.getLength()));
break;
case DM_VARCHAR:
case DM_VARCHAR2:
builder.sourceType(String.format("%s(%s)", DM_VARCHAR2, typeDefine.getLength()));
builder.dataType(BasicType.STRING_TYPE);
builder.columnLength(TypeDefineUtils.charTo4ByteLength(typeDefine.getLength()));
break;
case DM_TEXT:
builder.sourceType(DM_TEXT);
builder.dataType(BasicType.STRING_TYPE);
// dm text max length is 2147483647
builder.columnLength(typeDefine.getLength());
break;
case DM_LONG:
builder.sourceType(DM_LONG);
builder.dataType(BasicType.STRING_TYPE);
// dm long max length is 2147483647
builder.columnLength(typeDefine.getLength());
break;
case DM_LONGVARCHAR:
builder.sourceType(DM_LONGVARCHAR);
builder.dataType(BasicType.STRING_TYPE);
// dm longvarchar max length is 2147483647
builder.columnLength(typeDefine.getLength());
break;
case DM_CLOB:
builder.sourceType(DM_CLOB);
builder.dataType(BasicType.STRING_TYPE);
// dm clob max length is 2147483647
builder.columnLength(typeDefine.getLength());
break;
case DM_BINARY:
builder.sourceType(String.format("%s(%s)", DM_BINARY, typeDefine.getLength()));
builder.dataType(PrimitiveByteArrayType.INSTANCE);
builder.columnLength(typeDefine.getLength());
break;
case DM_VARBINARY:
builder.sourceType(String.format("%s(%s)", DM_VARBINARY, typeDefine.getLength()));
builder.dataType(PrimitiveByteArrayType.INSTANCE);
builder.columnLength(typeDefine.getLength());
break;
case DM_LONGVARBINARY:
builder.sourceType(DM_LONGVARBINARY);
builder.dataType(PrimitiveByteArrayType.INSTANCE);
builder.columnLength(typeDefine.getLength());
break;
case DM_IMAGE:
builder.sourceType(DM_IMAGE);
builder.dataType(PrimitiveByteArrayType.INSTANCE);
builder.columnLength(typeDefine.getLength());
break;
case DM_BLOB:
builder.sourceType(DM_BLOB);
builder.dataType(PrimitiveByteArrayType.INSTANCE);
builder.columnLength(typeDefine.getLength());
break;
case DM_BFILE:
builder.sourceType(DM_BFILE);
builder.dataType(BasicType.STRING_TYPE);
builder.columnLength(typeDefine.getLength());
break;
case DM_DATE:
builder.sourceType(DM_DATE);
builder.dataType(LocalTimeType.LOCAL_DATE_TYPE);
break;
case DM_TIME:
if (typeDefine.getScale() == null) {
builder.sourceType(DM_TIME);
} else {
builder.sourceType(String.format("%s(%s)", DM_TIME, typeDefine.getScale()));
}
builder.dataType(LocalTimeType.LOCAL_TIME_TYPE);
builder.scale(typeDefine.getScale());
break;
case DM_TIME_WITH_TIME_ZONE:
if (typeDefine.getScale() == null) {
builder.sourceType(DM_TIME_WITH_TIME_ZONE);
} else {
builder.sourceType(
String.format("TIME(%s) WITH TIME ZONE", typeDefine.getScale()));
}
builder.dataType(LocalTimeType.LOCAL_TIME_TYPE);
builder.scale(typeDefine.getScale());
break;
case DM_TIMESTAMP:
if (typeDefine.getScale() == null) {
builder.sourceType(DM_TIMESTAMP);
} else {
builder.sourceType(
String.format("%s(%s)", DM_TIMESTAMP, typeDefine.getScale()));
}
builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE);
builder.scale(typeDefine.getScale());
break;
case DM_DATETIME:
if (typeDefine.getScale() == null) {
builder.sourceType(DM_DATETIME);
} else {
builder.sourceType(String.format("%s(%s)", DM_DATETIME, typeDefine.getScale()));
}
builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE);
builder.scale(typeDefine.getScale());
break;
case DM_DATETIME_WITH_TIME_ZONE:
if (typeDefine.getScale() == null) {
builder.sourceType(DM_DATETIME_WITH_TIME_ZONE);
} else {
builder.sourceType(
String.format("DATETIME(%s) WITH TIME ZONE", typeDefine.getScale()));
}
builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE);
builder.scale(typeDefine.getScale());
break;
default:
throw CommonError.convertToSeaTunnelTypeError(
DatabaseIdentifier.DAMENG, typeDefine.getDataType(), typeDefine.getName());
}
return builder.build();
}
|
@Test
public void testConvertUnsupported() {
BasicTypeDefine<Object> typeDefine =
BasicTypeDefine.builder().name("test").columnType("aaa").dataType("aaa").build();
try {
DmdbTypeConverter.INSTANCE.convert(typeDefine);
Assertions.fail();
} catch (SeaTunnelRuntimeException e) {
// ignore
} catch (Throwable e) {
Assertions.fail();
}
}
|
public static String removePrefixIgnoreCase(CharSequence str, CharSequence prefix) {
if (isEmpty(str) || isEmpty(prefix)) {
return str(str);
}
final String str2 = str.toString();
if (startWithIgnoreCase(str, prefix)) {
return subSuf(str2, prefix.length());// 截取后半段
}
return str2;
}
|
@Test
public void removePrefixIgnoreCaseTest(){
assertEquals("de", CharSequenceUtil.removePrefixIgnoreCase("ABCde", "abc"));
assertEquals("de", CharSequenceUtil.removePrefixIgnoreCase("ABCde", "ABC"));
assertEquals("de", CharSequenceUtil.removePrefixIgnoreCase("ABCde", "Abc"));
assertEquals("ABCde", CharSequenceUtil.removePrefixIgnoreCase("ABCde", ""));
assertEquals("ABCde", CharSequenceUtil.removePrefixIgnoreCase("ABCde", null));
assertEquals("", CharSequenceUtil.removePrefixIgnoreCase("ABCde", "ABCde"));
assertEquals("ABCde", CharSequenceUtil.removePrefixIgnoreCase("ABCde", "ABCdef"));
assertNull(CharSequenceUtil.removePrefixIgnoreCase(null, "ABCdef"));
}
|
@Override
public void processElement(final StreamRecord<T> element) throws Exception {
final T event = element.getValue();
final long previousTimestamp =
element.hasTimestamp() ? element.getTimestamp() : Long.MIN_VALUE;
final long newTimestamp = timestampAssigner.extractTimestamp(event, previousTimestamp);
element.setTimestamp(newTimestamp);
output.collect(element);
watermarkGenerator.onEvent(event, newTimestamp, wmOutput);
}
|
@Test
void periodicWatermarksEmitOnPeriodicEmitStreamMode() throws Exception {
OneInputStreamOperatorTestHarness<Long, Long> testHarness =
createTestHarness(
WatermarkStrategy.forGenerator((ctx) -> new PeriodicWatermarkGenerator())
.withTimestampAssigner((ctx) -> new LongExtractor()));
testHarness.processElement(new StreamRecord<>(2L, 1));
testHarness.setProcessingTime(AUTO_WATERMARK_INTERVAL);
assertThat(pollNextStreamRecord(testHarness)).is(matching(streamRecord(2L, 2L)));
assertThat(pollNextLegacyWatermark(testHarness)).is(matching(legacyWatermark(1L)));
testHarness.processElement(new StreamRecord<>(4L, 1));
testHarness.setProcessingTime(AUTO_WATERMARK_INTERVAL * 2);
assertThat(pollNextStreamRecord(testHarness)).is(matching(streamRecord(4L, 4L)));
assertThat(pollNextLegacyWatermark(testHarness)).is(matching(legacyWatermark(3L)));
}
|
public CodegenTableDO buildTable(TableInfo tableInfo) {
CodegenTableDO table = CodegenConvert.INSTANCE.convert(tableInfo);
initTableDefault(table);
return table;
}
|
@Test
public void testBuildTable() {
// 准备参数
TableInfo tableInfo = mock(TableInfo.class);
// mock 方法
when(tableInfo.getName()).thenReturn("system_user");
when(tableInfo.getComment()).thenReturn("用户");
// 调用
CodegenTableDO table = codegenBuilder.buildTable(tableInfo);
// 断言
assertEquals("system_user", table.getTableName());
assertEquals("用户", table.getTableComment());
assertEquals("system", table.getModuleName());
assertEquals("user", table.getBusinessName());
assertEquals("User", table.getClassName());
assertEquals("用户", table.getClassComment());
}
|
@Override
public List<Integer> applyTransforms(List<Integer> originalGlyphIds)
{
List<Integer> intermediateGlyphsFromGsub = originalGlyphIds;
for (String feature : FEATURES_IN_ORDER)
{
if (!gsubData.isFeatureSupported(feature))
{
LOG.debug("the feature {} was not found", feature);
continue;
}
LOG.debug("applying the feature {}", feature);
ScriptFeature scriptFeature = gsubData.getFeature(feature);
intermediateGlyphsFromGsub = applyGsubFeature(scriptFeature,
intermediateGlyphsFromGsub);
}
return Collections.unmodifiableList(intermediateGlyphsFromGsub);
}
|
@Test
void testApplyLigaturesFoglihtenNo07() throws IOException
{
CmapLookup cmapLookup;
GsubWorker gsubWorkerForLatin;
try (TrueTypeFont ttf = new OTFParser().parse(
new RandomAccessReadBufferedFile("src/test/resources/otf/FoglihtenNo07.otf")))
{
cmapLookup = ttf.getUnicodeCmapLookup();
gsubWorkerForLatin = new GsubWorkerFactory().getGsubWorker(cmapLookup, ttf.getGsubData());
}
assertEquals(Arrays.asList(66, 1590, 645, 70),
gsubWorkerForLatin.applyTransforms(getGlyphIds("affine", cmapLookup)));
assertEquals(Arrays.asList(538, 633, 85, 86, 69, 70),
gsubWorkerForLatin.applyTransforms(getGlyphIds("attitude", cmapLookup)));
assertEquals(Arrays.asList(66, 1590, 525, 74, 683),
gsubWorkerForLatin.applyTransforms(getGlyphIds("affiliate", cmapLookup)));
assertEquals(Arrays.asList(542, 1, 1591, 498),
gsubWorkerForLatin.applyTransforms(getGlyphIds("The film", cmapLookup)));
assertEquals(Arrays.asList(542, 1, 45, 703, 85),
gsubWorkerForLatin.applyTransforms(getGlyphIds("The Last", cmapLookup)));
assertEquals(Arrays.asList(81, 77, 538, 71, 80, 83, 78),
gsubWorkerForLatin.applyTransforms(getGlyphIds("platform", cmapLookup)));
}
|
@Override
public void restartConnector(final String connName, final Callback<Void> callback) {
restartConnector(0, connName, callback);
}
|
@Test
public void testRestartConnector() throws Exception {
// get the initial assignment
when(member.memberId()).thenReturn("leader");
when(member.currentProtocolVersion()).thenReturn(CONNECT_PROTOCOL_V0);
when(worker.isSinkConnector(CONN1)).thenReturn(Boolean.TRUE);
expectRebalance(1, singletonList(CONN1), Collections.emptyList(), true);
expectConfigRefreshAndSnapshot(SNAPSHOT);
when(statusBackingStore.connectors()).thenReturn(Collections.emptySet());
expectMemberPoll();
ArgumentCaptor<Callback<TargetState>> onStart = ArgumentCaptor.forClass(Callback.class);
doAnswer(invocation -> {
onStart.getValue().onCompletion(null, TargetState.STARTED);
return true;
}).when(worker).startConnector(eq(CONN1), any(), any(), eq(herder), eq(TargetState.STARTED), onStart.capture());
expectExecuteTaskReconfiguration(true, conn1SinkConfig, invocation -> TASK_CONFIGS);
// Initial rebalance where this member becomes the leader
herder.tick();
expectMemberEnsureActive();
doNothing().when(worker).stopAndAwaitConnector(CONN1);
FutureCallback<Void> callback = new FutureCallback<>();
herder.restartConnector(CONN1, callback);
herder.tick();
callback.get(1000L, TimeUnit.MILLISECONDS);
verify(worker, times(2)).startConnector(eq(CONN1), any(), any(), eq(herder), eq(TargetState.STARTED), any());
verify(worker, times(2)).connectorTaskConfigs(eq(CONN1), any());
verify(worker).stopAndAwaitConnector(CONN1);
verifyNoMoreInteractions(worker, member, configBackingStore, statusBackingStore);
}
|
public int read(final MessageHandler handler)
{
return read(handler, Integer.MAX_VALUE);
}
|
@Test
void shouldReadNothingFromEmptyBuffer()
{
final long head = 0L;
when(buffer.getLong(HEAD_COUNTER_INDEX)).thenReturn(head);
final MessageHandler handler = (msgTypeId, buffer, index, length) -> fail("should not be called");
final int messagesRead = ringBuffer.read(handler);
assertThat(messagesRead, is(0));
}
|
@JsonProperty(FIELD_ID)
public abstract String id();
|
@Test
public void ignoreIdFieldWithUnderscore() throws Exception {
final URL eventString = Resources.getResource(getClass(), "filter-event-from-elasticsearch.json");
final ObjectMapper objectMapper = new ObjectMapperProvider().get();
final EventDto eventDto = objectMapper.readValue(eventString, EventDto.class);
assertThat(eventDto.id()).isEqualTo("01DNM0DVJDV52NA5VEBTYJ6PJY");
}
|
public static String fix(final String raw) {
if ( raw == null || "".equals( raw.trim() )) {
return raw;
}
MacroProcessor macroProcessor = new MacroProcessor();
macroProcessor.setMacros( macros );
return macroProcessor.parse( raw );
}
|
@Test
public void testLeaveAssertAlone() {
final String original = "drools.insert(foo)";
assertEqualsIgnoreWhitespace( original,
KnowledgeHelperFixerTest.fixer.fix( original ) );
}
|
public static MetricName getMetricName(
String group,
String typeName,
String name
) {
return getMetricName(
group,
typeName,
name,
null
);
}
|
@Test
public void testTaggedMetricNameWithEmptyValue() {
LinkedHashMap<String, String> tags = new LinkedHashMap<>();
tags.put("foo", "bar");
tags.put("bar", "");
tags.put("baz", "raz.taz");
MetricName metricName = KafkaYammerMetrics.getMetricName(
"kafka.metrics",
"TestMetrics",
"TaggedMetric",
tags
);
assertEquals("kafka.metrics", metricName.getGroup());
assertEquals("TestMetrics", metricName.getType());
assertEquals("TaggedMetric", metricName.getName());
// MBean name should preserve initial ordering (with empty key value removed)
assertEquals("kafka.metrics:type=TestMetrics,name=TaggedMetric,foo=bar,baz=raz.taz",
metricName.getMBeanName());
// Scope should be sorted by key (with empty key value removed)
assertEquals("baz.raz_taz.foo.bar", metricName.getScope());
}
|
@Override
protected void processOptions(LinkedList<String> args)
throws IOException {
CommandFormat cf = new CommandFormat(0, Integer.MAX_VALUE,
OPTION_PATHONLY, OPTION_DIRECTORY, OPTION_HUMAN,
OPTION_HIDENONPRINTABLE, OPTION_RECURSIVE, OPTION_REVERSE,
OPTION_MTIME, OPTION_SIZE, OPTION_ATIME, OPTION_ECPOLICY);
cf.parse(args);
pathOnly = cf.getOpt(OPTION_PATHONLY);
dirRecurse = !cf.getOpt(OPTION_DIRECTORY);
setRecursive(cf.getOpt(OPTION_RECURSIVE) && dirRecurse);
humanReadable = cf.getOpt(OPTION_HUMAN);
hideNonPrintable = cf.getOpt(OPTION_HIDENONPRINTABLE);
orderReverse = cf.getOpt(OPTION_REVERSE);
orderTime = cf.getOpt(OPTION_MTIME);
orderSize = !orderTime && cf.getOpt(OPTION_SIZE);
useAtime = cf.getOpt(OPTION_ATIME);
displayECPolicy = cf.getOpt(OPTION_ECPOLICY);
if (args.isEmpty()) args.add(Path.CUR_DIR);
initialiseOrderComparator();
}
|
@Test(expected = UnsupportedOperationException.class)
public void processPathFileDisplayECPolicyWhenUnsupported()
throws IOException {
TestFile testFile = new TestFile("testDirectory", "testFile");
LinkedList<PathData> pathData = new LinkedList<PathData>();
pathData.add(testFile.getPathData());
Ls ls = new Ls();
LinkedList<String> options = new LinkedList<String>();
options.add("-e");
ls.processOptions(options);
ls.processArguments(pathData);
}
|
@Override
public CompletableFuture<RegistrationResponse> registerTaskExecutor(
final TaskExecutorRegistration taskExecutorRegistration, final Time timeout) {
CompletableFuture<TaskExecutorGateway> taskExecutorGatewayFuture =
getRpcService()
.connect(
taskExecutorRegistration.getTaskExecutorAddress(),
TaskExecutorGateway.class);
taskExecutorGatewayFutures.put(
taskExecutorRegistration.getResourceId(), taskExecutorGatewayFuture);
return taskExecutorGatewayFuture.handleAsync(
(TaskExecutorGateway taskExecutorGateway, Throwable throwable) -> {
final ResourceID resourceId = taskExecutorRegistration.getResourceId();
if (taskExecutorGatewayFuture == taskExecutorGatewayFutures.get(resourceId)) {
taskExecutorGatewayFutures.remove(resourceId);
if (throwable != null) {
return new RegistrationResponse.Failure(throwable);
} else {
return registerTaskExecutorInternal(
taskExecutorGateway, taskExecutorRegistration);
}
} else {
log.debug(
"Ignoring outdated TaskExecutorGateway connection for {}.",
resourceId.getStringWithMetadata());
return new RegistrationResponse.Failure(
new FlinkException("Decline outdated task executor registration."));
}
},
getMainThreadExecutor());
}
|
@Test
void testTaskExecutorBecomesUnreachableTriggersDisconnect() throws Exception {
final ResourceID taskExecutorId = ResourceID.generate();
final CompletableFuture<Exception> disconnectFuture = new CompletableFuture<>();
final CompletableFuture<ResourceID> stopWorkerFuture = new CompletableFuture<>();
final TaskExecutorGateway taskExecutorGateway =
new TestingTaskExecutorGatewayBuilder()
.setAddress(UUID.randomUUID().toString())
.setDisconnectResourceManagerConsumer(disconnectFuture::complete)
.setHeartbeatResourceManagerFunction(
resourceId ->
FutureUtils.completedExceptionally(
new RecipientUnreachableException(
"sender",
"recipient",
"task executor is unreachable")))
.createTestingTaskExecutorGateway();
rpcService.registerGateway(taskExecutorGateway.getAddress(), taskExecutorGateway);
runHeartbeatTargetBecomesUnreachableTest(
builder -> builder.withStopWorkerConsumer(stopWorkerFuture::complete),
resourceManagerGateway ->
registerTaskExecutor(
resourceManagerGateway,
taskExecutorId,
taskExecutorGateway.getAddress()),
resourceManagerResourceId -> {
assertThatFuture(disconnectFuture)
.eventuallySucceeds()
.isInstanceOf(ResourceManagerException.class);
assertThatFuture(stopWorkerFuture)
.eventuallySucceeds()
.isEqualTo(taskExecutorId);
});
}
|
public synchronized void maybeAddPartition(TopicPartition topicPartition) {
maybeFailWithError();
throwIfPendingState("send");
if (isTransactional()) {
if (!hasProducerId()) {
throw new IllegalStateException("Cannot add partition " + topicPartition +
" to transaction before completing a call to initTransactions");
} else if (currentState != State.IN_TRANSACTION) {
throw new IllegalStateException("Cannot add partition " + topicPartition +
" to transaction while in state " + currentState);
} else if (isPartitionAdded(topicPartition) || isPartitionPendingAdd(topicPartition)) {
return;
} else {
log.debug("Begin adding new partition {} to transaction", topicPartition);
txnPartitionMap.getOrCreate(topicPartition);
newPartitionsInTransaction.add(topicPartition);
}
}
}
|
@Test
public void testFailIfNotReadyForSendNoProducerId() {
assertThrows(IllegalStateException.class, () -> transactionManager.maybeAddPartition(tp0));
}
|
public static PredicateTreeAnalyzerResult analyzePredicateTree(Predicate predicate) {
AnalyzerContext context = new AnalyzerContext();
int treeSize = aggregatePredicateStatistics(predicate, false, context);
int minFeature = ((int)Math.ceil(findMinFeature(predicate, false, context))) + (context.hasNegationPredicate ? 1 : 0);
return new PredicateTreeAnalyzerResult(minFeature, treeSize, context.subTreeSizes);
}
|
@Test
void require_that_multilevel_and_stores_size() {
Predicate p =
and(
and(
feature("foo").inSet("bar"),
feature("baz").inSet("qux"),
feature("quux").inSet("corge")),
and(
feature("grault").inSet("garply"),
feature("waldo").inSet("fred")));
PredicateTreeAnalyzerResult r = PredicateTreeAnalyzer.analyzePredicateTree(p);
assertEquals(5, r.minFeature);
assertEquals(5, r.treeSize);
assertEquals(7, r.sizeMap.size());
assertSizeMapContains(r, pred(p).child(0), 3);
assertSizeMapContains(r, pred(p).child(1), 2);
assertSizeMapContains(r, pred(p).child(0).child(0), 1);
assertSizeMapContains(r, pred(p).child(0).child(1), 1);
assertSizeMapContains(r, pred(p).child(0).child(2), 1);
assertSizeMapContains(r, pred(p).child(1).child(0), 1);
assertSizeMapContains(r, pred(p).child(1).child(1), 1);
}
|
@Override
public ClusterHealth checkCluster() {
checkState(!nodeInformation.isStandalone(), "Clustering is not enabled");
checkState(sharedHealthState != null, "HealthState instance can't be null when clustering is enabled");
Set<NodeHealth> nodeHealths = sharedHealthState.readAll();
Health health = clusterHealthChecks.stream()
.map(clusterHealthCheck -> clusterHealthCheck.check(nodeHealths))
.reduce(Health.GREEN, HealthReducer::merge);
return new ClusterHealth(health, nodeHealths);
}
|
@Test
public void checkCluster_returns_GREEN_status_if_only_GREEN_statuses_returned_by_ClusterHealthChecks() {
when(nodeInformation.isStandalone()).thenReturn(false);
List<Health.Status> statuses = IntStream.range(1, 1 + random.nextInt(20)).mapToObj(i -> GREEN).toList();
HealthCheckerImpl underTest = newClusterHealthCheckerImpl(statuses.stream());
assertThat(underTest.checkCluster().getHealth().getStatus())
.describedAs("%s should have been computed from %s statuses", GREEN, statuses)
.isEqualTo(GREEN);
}
|
@Override
public Collection<LocalDataQueryResultRow> getRows(final ShowGlobalClockRuleStatement sqlStatement, final ContextManager contextManager) {
GlobalClockRuleConfiguration ruleConfig = rule.getConfiguration();
return Collections.singleton(new LocalDataQueryResultRow(ruleConfig.getType(), ruleConfig.getProvider(), ruleConfig.isEnabled(), ruleConfig.getProps()));
}
|
@Test
void assertGlobalClockRule() throws SQLException {
engine.executeQuery();
Collection<LocalDataQueryResultRow> actual = engine.getRows();
assertThat(actual.size(), is(1));
Iterator<LocalDataQueryResultRow> iterator = actual.iterator();
LocalDataQueryResultRow row = iterator.next();
assertThat(row.getCell(1), is("TSO"));
assertThat(row.getCell(2), is("local"));
assertThat(row.getCell(3), is("false"));
assertThat(row.getCell(4), is("{\"key\":\"value\"}"));
}
|
public static Coder<PublishResult> fullPublishResultWithoutHeaders() {
return new PublishResultCoder(
RESPONSE_METADATA_CODER, NullableCoder.of(AwsCoders.sdkHttpMetadataWithoutHeaders()));
}
|
@Test
public void testFullPublishResultWithoutHeadersDecodeEncodeEquals() throws Exception {
CoderProperties.coderDecodeEncodeEqual(
PublishResultCoders.fullPublishResultWithoutHeaders(),
new PublishResult().withMessageId(UUID.randomUUID().toString()));
PublishResult value = buildFullPublishResult();
PublishResult clone =
CoderUtils.clone(PublishResultCoders.fullPublishResultWithoutHeaders(), value);
assertThat(
clone.getSdkResponseMetadata().getRequestId(),
equalTo(value.getSdkResponseMetadata().getRequestId()));
assertThat(
clone.getSdkHttpMetadata().getHttpStatusCode(),
equalTo(value.getSdkHttpMetadata().getHttpStatusCode()));
assertThat(clone.getSdkHttpMetadata().getHttpHeaders().isEmpty(), equalTo(true));
}
|
public JunctionTree junctionTree(List<OpenBitSet> cliques, boolean init) {
return junctionTree(null, null, null, cliques, init);
}
|
@Test
public void testJunctionWithPruning2() {
Graph<BayesVariable> graph = new BayesNetwork();
GraphNode x0 = addNode(graph);
GraphNode x1 = addNode(graph);
GraphNode x2 = addNode(graph);
GraphNode x3 = addNode(graph);
GraphNode x4 = addNode(graph);
GraphNode x5 = addNode(graph);
GraphNode x6 = addNode(graph);
GraphNode x7 = addNode(graph);
List<OpenBitSet> list = new ArrayList<OpenBitSet>();
OpenBitSet OpenBitSet1 = bitSet("00001111");
OpenBitSet OpenBitSet2 = bitSet("00111100");
OpenBitSet OpenBitSet3 = bitSet("11100000");
OpenBitSet OpenBitSet4 = bitSet("00100001");
OpenBitSet intersect1And2 = OpenBitSet2.clone();
intersect1And2.and(OpenBitSet1);
OpenBitSet intersect2And3 = OpenBitSet2.clone();
intersect2And3.and(OpenBitSet3);
OpenBitSet intersect1And4 = OpenBitSet1.clone();
intersect1And4.and(OpenBitSet4);
list.add(OpenBitSet1);
list.add(OpenBitSet2);
list.add(OpenBitSet3);
list.add(OpenBitSet4);
JunctionTreeBuilder jtBuilder = new JunctionTreeBuilder( graph );
JunctionTreeClique jtNode = jtBuilder.junctionTree(list, false).getRoot();
JunctionTreeClique root = jtNode;
assertThat(root.getBitSet()).isEqualTo(OpenBitSet1);
assertThat(root.getChildren().size()).isEqualTo(2);
JunctionTreeSeparator sep = root.getChildren().get(0);
assertThat(sep.getParent().getBitSet()).isEqualTo(OpenBitSet1);
assertThat(sep.getChild().getBitSet()).isEqualTo(OpenBitSet2);
assertThat(sep.getChild().getChildren().size()).isEqualTo(1);
jtNode = sep.getChild();
assertThat(jtNode.getBitSet()).isEqualTo(OpenBitSet2);
assertThat(jtNode.getChildren().size()).isEqualTo(1);
sep = jtNode.getChildren().get(0);
assertThat(sep.getParent().getBitSet()).isEqualTo(OpenBitSet2);
assertThat(sep.getChild().getBitSet()).isEqualTo(OpenBitSet3);
assertThat(sep.getBitSet()).isEqualTo(intersect2And3);
assertThat(sep.getChild().getChildren().size()).isEqualTo(0);
sep = root.getChildren().get(1);
assertThat(sep.getParent().getBitSet()).isEqualTo(OpenBitSet1);
assertThat(sep.getChild().getBitSet()).isEqualTo(OpenBitSet4);
assertThat(sep.getBitSet()).isEqualTo(intersect1And4);
assertThat(sep.getChild().getChildren().size()).isEqualTo(0);
}
|
@Override
public String getName() {
return _name;
}
|
@Test
public void testStringRepeatTransformFunction() {
int timesToRepeat = 21;
ExpressionContext expression =
RequestContextUtils.getExpression(String.format("repeat(%s, %d)", STRING_ALPHANUM_SV_COLUMN, timesToRepeat));
TransformFunction transformFunction = TransformFunctionFactory.get(expression, _dataSourceMap);
assertTrue(transformFunction instanceof ScalarTransformFunctionWrapper);
assertEquals(transformFunction.getName(), "repeat");
String[] expectedValues = new String[NUM_ROWS];
for (int i = 0; i < NUM_ROWS; i++) {
expectedValues[i] = StringUtils.repeat(_stringAlphaNumericSVValues[i], timesToRepeat);
}
testTransformFunction(transformFunction, expectedValues);
String seperator = "::";
expression = RequestContextUtils.getExpression(
String.format("repeat(%s, '%s', %d)", STRING_ALPHANUM_SV_COLUMN, seperator, timesToRepeat));
transformFunction = TransformFunctionFactory.get(expression, _dataSourceMap);
assertTrue(transformFunction instanceof ScalarTransformFunctionWrapper);
assertEquals(transformFunction.getName(), "repeat");
expectedValues = new String[NUM_ROWS];
for (int i = 0; i < NUM_ROWS; i++) {
expectedValues[i] = StringUtils.repeat(_stringAlphaNumericSVValues[i], seperator, timesToRepeat);
}
testTransformFunction(transformFunction, expectedValues);
timesToRepeat = -1;
expression = RequestContextUtils.getExpression(
String.format("repeat(%s, '%s', %d)", STRING_ALPHANUM_SV_COLUMN, seperator, timesToRepeat));
transformFunction = TransformFunctionFactory.get(expression, _dataSourceMap);
assertTrue(transformFunction instanceof ScalarTransformFunctionWrapper);
assertEquals(transformFunction.getName(), "repeat");
expectedValues = new String[NUM_ROWS];
for (int i = 0; i < NUM_ROWS; i++) {
expectedValues[i] = "";
}
testTransformFunction(transformFunction, expectedValues);
}
|
public PublicKey convertPublicKey(final String publicPemKey) {
final StringReader keyReader = new StringReader(publicPemKey);
try {
SubjectPublicKeyInfo publicKeyInfo = SubjectPublicKeyInfo
.getInstance(new PEMParser(keyReader).readObject());
return new JcaPEMKeyConverter().getPublicKey(publicKeyInfo);
} catch (IOException exception) {
throw new RuntimeException(exception);
}
}
|
@Test
void givenEmptyPublicKey_whenConvertPublicKey_thenThrowRuntimeException() {
// Given
String emptyPublicPemKey = "";
// When & Then
assertThatThrownBy(() -> KeyConverter.convertPublicKey(emptyPublicPemKey))
.isInstanceOf(RuntimeException.class)
.hasCauseInstanceOf(PEMException.class)
.hasMessageContaining("PEMException");
}
|
@Override
@Transactional(rollbackFor = Exception.class)
public void deleteJob(Long id) throws SchedulerException {
// 校验存在
JobDO job = validateJobExists(id);
// 更新
jobMapper.deleteById(id);
// 删除 Job 到 Quartz 中
schedulerManager.deleteJob(job.getHandlerName());
}
|
@Test
public void testDeleteJob_success() throws SchedulerException {
// mock 数据
JobDO job = randomPojo(JobDO.class);
jobMapper.insert(job);
// 调用
jobService.deleteJob(job.getId());
// 校验不存在
assertNull(jobMapper.selectById(job.getId()));
// 校验调用
verify(schedulerManager).deleteJob(eq(job.getHandlerName()));
}
|
@Override
public Headers add(Header header) throws IllegalStateException {
Objects.requireNonNull(header, "Header cannot be null.");
canWrite();
headers.add(header);
return this;
}
|
@Test
public void shouldThrowNpeWhenAddingNullHeader() {
final RecordHeaders recordHeaders = new RecordHeaders();
assertThrows(NullPointerException.class, () -> recordHeaders.add(null));
}
|
@Override
public Mono<Long> delete(final long id) {
return Mono.zip(
dataSourceRepository.existsByNamespace(id),
collectorRepository.existsByNamespace(id),
termRepository.existsByNamespace(id),
dataEntityRepository.existsNonDeletedByNamespaceId(id)
)
.map(t -> BooleanUtils.toBoolean(t.getT1())
|| BooleanUtils.toBoolean(t.getT2())
|| BooleanUtils.toBoolean(t.getT3())
|| BooleanUtils.toBoolean(t.getT4()))
.filter(exists -> !exists)
.switchIfEmpty(Mono.error(new CascadeDeleteException(
"Namespace cannot be deleted: there are still resources attached")))
.flatMap(ign -> namespaceRepository.delete(id))
.map(NamespacePojo::getId);
}
|
@Test
@DisplayName("Deletes a namespace which isn't tied with any data sources, collector or term from the database")
public void testDelete() {
final long namespaceId = 1L;
final NamespacePojo namespace = new NamespacePojo()
.setId(namespaceId)
.setDeletedAt(LocalDateTime.now());
when(dataSourceRepository.existsByNamespace(eq(namespaceId))).thenReturn(Mono.just(false));
when(collectorRepository.existsByNamespace(eq(namespaceId))).thenReturn(Mono.just(false));
when(termRepository.existsByNamespace(eq(namespaceId))).thenReturn(Mono.just(false));
when(dataEntityRepository.existsNonDeletedByNamespaceId(eq(namespaceId))).thenReturn(Mono.just(false));
when(namespaceRepository.delete(eq(namespaceId))).thenReturn(Mono.just(namespace));
namespaceService.delete(namespaceId)
.as(StepVerifier::create)
.assertNext(deletedNamespaceId -> assertThat(deletedNamespaceId).isEqualTo(namespaceId))
.verifyComplete();
verify(namespaceRepository, only()).delete(eq(namespaceId));
verify(dataSourceRepository, only()).existsByNamespace(eq(namespaceId));
verify(collectorRepository, only()).existsByNamespace(eq(namespaceId));
verify(termRepository, only()).existsByNamespace(eq(namespaceId));
verify(dataEntityRepository, only()).existsNonDeletedByNamespaceId(eq(namespaceId));
}
|
public static Path getClusterHighAvailableStoragePath(Configuration configuration) {
final String storagePath = configuration.getValue(HighAvailabilityOptions.HA_STORAGE_PATH);
if (isNullOrWhitespaceOnly(storagePath)) {
throw new IllegalConfigurationException(
"Configuration is missing the mandatory parameter: "
+ HighAvailabilityOptions.HA_STORAGE_PATH);
}
final Path path;
try {
path = new Path(storagePath);
} catch (Exception e) {
throw new IllegalConfigurationException(
"Invalid path for highly available storage ("
+ HighAvailabilityOptions.HA_STORAGE_PATH.key()
+ ')',
e);
}
final String clusterId = configuration.getValue(HighAvailabilityOptions.HA_CLUSTER_ID);
final Path clusterStoragePath;
try {
clusterStoragePath = new Path(path, clusterId);
} catch (Exception e) {
throw new IllegalConfigurationException(
String.format(
"Cannot create cluster high available storage path '%s/%s'. This indicates that an invalid cluster id (%s) has been specified.",
storagePath, clusterId, HighAvailabilityOptions.HA_CLUSTER_ID.key()),
e);
}
return clusterStoragePath;
}
|
@Test
public void testGetClusterHighAvailableStoragePath() throws IOException {
final String haStorageRootDirectory = temporaryFolder.newFolder().getAbsolutePath();
final String clusterId = UUID.randomUUID().toString();
final Configuration configuration = new Configuration();
configuration.set(HighAvailabilityOptions.HA_STORAGE_PATH, haStorageRootDirectory);
configuration.set(HighAvailabilityOptions.HA_CLUSTER_ID, clusterId);
final Path clusterHighAvailableStoragePath =
HighAvailabilityServicesUtils.getClusterHighAvailableStoragePath(configuration);
final Path expectedPath = new Path(haStorageRootDirectory, clusterId);
assertThat(clusterHighAvailableStoragePath, is(expectedPath));
}
|
static SuggestedItem findClosestSuggestedItem(List<SuggestedItem> r, String query) {
for (SuggestedItem curItem : r) {
if (LOGGER.isLoggable(Level.FINE)) {
LOGGER.fine(String.format("item's searchUrl:%s;query=%s", curItem.item.getSearchUrl(), query));
}
if (curItem.item.getSearchUrl().contains(Util.rawEncode(query))) {
return curItem;
}
}
// couldn't find an item with the query in the url so just
// return the first one
return r.get(0);
}
|
@Test
public void findClosestSuggestedItem() {
final String query = "foobar 123";
final String searchName = "sameDisplayName";
SearchItem searchItemHit = new SearchItem() {
@Override
public SearchIndex getSearchIndex() {
return null;
}
@Override
public String getSearchName() {
return searchName;
}
@Override
public String getSearchUrl() {
return "/job/" + Util.rawEncode(query) + "/";
}
};
SearchItem searchItemNoHit = new SearchItem() {
@Override
public SearchIndex getSearchIndex() {
return null;
}
@Override
public String getSearchName() {
return searchName;
}
@Override
public String getSearchUrl() {
return "/job/someotherJob/";
}
};
SuggestedItem suggestedHit = new SuggestedItem(searchItemHit);
SuggestedItem suggestedNoHit = new SuggestedItem(searchItemNoHit);
ArrayList<SuggestedItem> list = new ArrayList<>();
list.add(suggestedNoHit);
list.add(suggestedHit); // make sure the hit is the second item
SuggestedItem found = Search.findClosestSuggestedItem(list, query);
assertEquals(searchItemHit, found.item);
SuggestedItem found2 = Search.findClosestSuggestedItem(list, "abcd");
assertEquals(searchItemNoHit, found2.item);
}
|
@Override
public InterpreterResult interpret(final String st, final InterpreterContext context)
throws InterpreterException {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("st:\n{}", st);
}
final FormType form = getFormType();
RemoteInterpreterProcess interpreterProcess = null;
try {
interpreterProcess = getOrCreateInterpreterProcess();
} catch (IOException e) {
throw new InterpreterException(e);
}
if (!interpreterProcess.isRunning()) {
return new InterpreterResult(InterpreterResult.Code.ERROR,
"Interpreter process is not running\n" + interpreterProcess.getErrorMessage());
}
return interpreterProcess.callRemoteFunction(client -> {
RemoteInterpreterResult remoteResult = client.interpret(
sessionId, className, st, convert(context));
Map<String, Object> remoteConfig = (Map<String, Object>) GSON.fromJson(
remoteResult.getConfig(), new TypeToken<Map<String, Object>>() {
}.getType());
context.getConfig().clear();
if (remoteConfig != null) {
context.getConfig().putAll(remoteConfig);
}
GUI currentGUI = context.getGui();
GUI currentNoteGUI = context.getNoteGui();
if (form == FormType.NATIVE) {
GUI remoteGui = GUI.fromJson(remoteResult.getGui());
GUI remoteNoteGui = GUI.fromJson(remoteResult.getNoteGui());
currentGUI.clear();
currentGUI.setParams(remoteGui.getParams());
currentGUI.setForms(remoteGui.getForms());
currentNoteGUI.setParams(remoteNoteGui.getParams());
currentNoteGUI.setForms(remoteNoteGui.getForms());
} else if (form == FormType.SIMPLE) {
final Map<String, Input> currentForms = currentGUI.getForms();
final Map<String, Object> currentParams = currentGUI.getParams();
final GUI remoteGUI = GUI.fromJson(remoteResult.getGui());
final Map<String, Input> remoteForms = remoteGUI.getForms();
final Map<String, Object> remoteParams = remoteGUI.getParams();
currentForms.putAll(remoteForms);
currentParams.putAll(remoteParams);
}
return convert(remoteResult);
}
);
}
|
@Test
public void testParallelScheduler() throws InterruptedException, InterpreterException {
interpreterSetting.getOption().setPerUser(InterpreterOption.SHARED);
interpreterSetting.setProperty("zeppelin.SleepInterpreter.parallel", "true");
final Interpreter interpreter1 = interpreterSetting.getInterpreter("user1", note1Id, "sleep");
final InterpreterContext context1 = createDummyInterpreterContext();
// run this dummy interpret method first to launch the RemoteInterpreterProcess to avoid the
// time overhead of launching the process.
interpreter1.interpret("1", context1);
Thread thread1 = new Thread() {
@Override
public void run() {
try {
assertEquals(Code.SUCCESS, interpreter1.interpret("100", context1).code());
} catch (InterpreterException e) {
e.printStackTrace();
fail();
}
}
};
Thread thread2 = new Thread() {
@Override
public void run() {
try {
assertEquals(Code.SUCCESS, interpreter1.interpret("100", context1).code());
} catch (InterpreterException e) {
e.printStackTrace();
fail();
}
}
};
long start = System.currentTimeMillis();
thread1.start();
thread2.start();
thread1.join();
thread2.join();
long end = System.currentTimeMillis();
assertTrue((end - start) <= 200);
}
|
public static String post(HttpURLConnection con,
Map<String, String> headers,
String requestBody,
Integer connectTimeoutMs,
Integer readTimeoutMs)
throws IOException, UnretryableException {
handleInput(con, headers, requestBody, connectTimeoutMs, readTimeoutMs);
return handleOutput(con);
}
|
@Test
public void testErrorResponseRetryableCode() throws IOException {
HttpURLConnection mockedCon = createHttpURLConnection("dummy");
when(mockedCon.getInputStream()).thenThrow(new IOException("Can't read"));
when(mockedCon.getErrorStream()).thenReturn(new ByteArrayInputStream(
"{\"error\":\"some_arg\", \"error_description\":\"some problem with arg\"}"
.getBytes(StandardCharsets.UTF_8)));
when(mockedCon.getResponseCode()).thenReturn(HttpURLConnection.HTTP_INTERNAL_ERROR);
IOException ioe = assertThrows(IOException.class,
() -> HttpAccessTokenRetriever.post(mockedCon, null, null, null, null));
assertTrue(ioe.getMessage().contains("{\"some_arg\" - \"some problem with arg\"}"));
// error response body has different keys
when(mockedCon.getErrorStream()).thenReturn(new ByteArrayInputStream(
"{\"errorCode\":\"some_arg\", \"errorSummary\":\"some problem with arg\"}"
.getBytes(StandardCharsets.UTF_8)));
ioe = assertThrows(IOException.class,
() -> HttpAccessTokenRetriever.post(mockedCon, null, null, null, null));
assertTrue(ioe.getMessage().contains("{\"some_arg\" - \"some problem with arg\"}"));
// error response is valid json but unknown keys
when(mockedCon.getErrorStream()).thenReturn(new ByteArrayInputStream(
"{\"err\":\"some_arg\", \"err_des\":\"some problem with arg\"}"
.getBytes(StandardCharsets.UTF_8)));
ioe = assertThrows(IOException.class,
() -> HttpAccessTokenRetriever.post(mockedCon, null, null, null, null));
assertTrue(ioe.getMessage().contains("{\"err\":\"some_arg\", \"err_des\":\"some problem with arg\"}"));
}
|
@Override
protected SubtaskExecutionAttemptDetailsInfo handleRequest(
HandlerRequest<EmptyRequestBody> request, AccessExecutionVertex executionVertex)
throws RestHandlerException {
final AccessExecution execution = executionVertex.getCurrentExecutionAttempt();
final JobID jobID = request.getPathParameter(JobIDPathParameter.class);
final JobVertexID jobVertexID = request.getPathParameter(JobVertexIdPathParameter.class);
final Collection<AccessExecution> attempts = executionVertex.getCurrentExecutions();
List<SubtaskExecutionAttemptDetailsInfo> otherConcurrentAttempts = null;
if (attempts.size() > 1) {
otherConcurrentAttempts = new ArrayList<>();
for (AccessExecution attempt : attempts) {
if (attempt.getAttemptNumber() != execution.getAttemptNumber()) {
otherConcurrentAttempts.add(
SubtaskExecutionAttemptDetailsInfo.create(
attempt, metricFetcher, jobID, jobVertexID, null));
}
}
}
return SubtaskExecutionAttemptDetailsInfo.create(
execution, metricFetcher, jobID, jobVertexID, otherConcurrentAttempts);
}
|
@Test
void testHandleRequest() throws Exception {
// Prepare the execution graph.
final JobID jobID = new JobID();
final JobVertexID jobVertexID = new JobVertexID();
// The testing subtask.
final long deployingTs = System.currentTimeMillis() - 1024;
final long finishedTs = System.currentTimeMillis();
final long bytesIn = 1L;
final long bytesOut = 10L;
final long recordsIn = 20L;
final long recordsOut = 30L;
final long accumulateIdleTime = 40L;
final long accumulateBusyTime = 50L;
final long accumulateBackPressuredTime = 60L;
final IOMetrics ioMetrics =
new IOMetrics(
bytesIn,
bytesOut,
recordsIn,
recordsOut,
accumulateIdleTime,
accumulateBusyTime,
accumulateBackPressuredTime);
final long[] timestamps = new long[ExecutionState.values().length];
final long[] endTimestamps = new long[ExecutionState.values().length];
timestamps[ExecutionState.DEPLOYING.ordinal()] = deployingTs;
endTimestamps[ExecutionState.DEPLOYING.ordinal()] = deployingTs + 10;
final ExecutionState expectedState = ExecutionState.FINISHED;
timestamps[expectedState.ordinal()] = finishedTs;
final LocalTaskManagerLocation assignedResourceLocation = new LocalTaskManagerLocation();
final AllocationID allocationID = new AllocationID();
final int subtaskIndex = 1;
final int attempt = 2;
final ArchivedExecution execution =
new ArchivedExecution(
new StringifiedAccumulatorResult[0],
ioMetrics,
createExecutionAttemptId(jobVertexID, subtaskIndex, attempt),
expectedState,
null,
assignedResourceLocation,
allocationID,
timestamps,
endTimestamps);
final ArchivedExecutionVertex executionVertex =
new ArchivedExecutionVertex(
subtaskIndex,
"Test archived execution vertex",
execution,
new ExecutionHistory(0));
// Instance the handler.
final RestHandlerConfiguration restHandlerConfiguration =
RestHandlerConfiguration.fromConfiguration(new Configuration());
final MetricFetcher metricFetcher =
new MetricFetcherImpl<>(
() -> null,
address -> null,
Executors.directExecutor(),
Time.milliseconds(1000L),
MetricOptions.METRIC_FETCHER_UPDATE_INTERVAL.defaultValue().toMillis());
final SubtaskCurrentAttemptDetailsHandler handler =
new SubtaskCurrentAttemptDetailsHandler(
() -> null,
Time.milliseconds(100),
Collections.emptyMap(),
SubtaskCurrentAttemptDetailsHeaders.getInstance(),
new DefaultExecutionGraphCache(
restHandlerConfiguration.getTimeout(),
Time.milliseconds(restHandlerConfiguration.getRefreshInterval())),
Executors.directExecutor(),
metricFetcher);
final HashMap<String, String> receivedPathParameters = new HashMap<>(2);
receivedPathParameters.put(JobIDPathParameter.KEY, jobID.toString());
receivedPathParameters.put(JobVertexIdPathParameter.KEY, jobVertexID.toString());
final HandlerRequest<EmptyRequestBody> request =
HandlerRequest.resolveParametersAndCreate(
EmptyRequestBody.getInstance(),
new SubtaskMessageParameters(),
receivedPathParameters,
Collections.emptyMap(),
Collections.emptyList());
// Handle request.
final SubtaskExecutionAttemptDetailsInfo detailsInfo =
handler.handleRequest(request, executionVertex);
// Verify
final IOMetricsInfo ioMetricsInfo =
new IOMetricsInfo(
bytesIn,
true,
bytesOut,
true,
recordsIn,
true,
recordsOut,
true,
accumulateBackPressuredTime,
accumulateIdleTime,
accumulateBusyTime);
final Map<ExecutionState, Long> statusDuration = new HashMap<>();
statusDuration.put(ExecutionState.CREATED, -1L);
statusDuration.put(ExecutionState.SCHEDULED, -1L);
statusDuration.put(ExecutionState.DEPLOYING, 10L);
statusDuration.put(ExecutionState.INITIALIZING, -1L);
statusDuration.put(ExecutionState.RUNNING, -1L);
final SubtaskExecutionAttemptDetailsInfo expectedDetailsInfo =
new SubtaskExecutionAttemptDetailsInfo(
subtaskIndex,
expectedState,
attempt,
assignedResourceLocation.getHostname(),
assignedResourceLocation.getEndpoint(),
deployingTs,
finishedTs,
finishedTs - deployingTs,
ioMetricsInfo,
assignedResourceLocation.getResourceID().getResourceIdString(),
statusDuration,
null);
assertThat(detailsInfo).isEqualTo(expectedDetailsInfo);
}
|
@Override
public URL getResource(final String name) {
try {
final Enumeration<URL> resources = getResources(name);
if (resources.hasMoreElements()) {
return resources.nextElement();
}
} catch (IOException ignored) {
// mimics the behavior of the JDK
}
return null;
}
|
@Test
void testComponentFirstResourceFoundIgnoresOwner() throws Exception {
String resourceToLoad = TempDirUtils.newFile(tempFolder).getName();
TestUrlClassLoader owner =
new TestUrlClassLoader(resourceToLoad, RESOURCE_RETURNED_BY_OWNER);
final ComponentClassLoader componentClassLoader =
new ComponentClassLoader(
new URL[] {tempFolder.toUri().toURL()},
owner,
new String[0],
new String[] {resourceToLoad},
Collections.emptyMap());
final URL loadedResource = componentClassLoader.getResource(resourceToLoad);
assertThat(loadedResource.toString()).contains(resourceToLoad);
}
|
@Override
public AppResponse process(Flow flow, AppRequest request) throws FlowNotDefinedException, IOException, NoSuchAlgorithmException {
var response = new WidPollResponse(attestEnabled && Arrays.asList(allowedActions).contains(appSession.getAction()));
setValid(false);
switch (appSession.getState()) {
case "VERIFIED" -> {
if (validateCardStatus()) {
setValid(true);
response.setStatus(appSession.getState());
}
}
case "COMPLETED", "AUTHENTICATED", "CONFIRMED", "CANCELLED", "ABORTED" -> response.setStatus(appSession.getState());
default -> response.setStatus("PENDING");
}
if ("ABORTED".equals(appSession.getState())) {
response.setStatus("ABORTED");
response.setError(appSession.getError());
}
return response;
}
|
@Test
void processAttestDisabled() throws FlowNotDefinedException, IOException, NoSuchAlgorithmException {
setupWidPolling(false);
WidPollResponse appResponse = (WidPollResponse) widPolling.process(mockedFlow, mockedAbstractAppRequest);
assertFalse(appResponse.getAttestApp());
assertEquals("PENDING", appResponse.getStatus());
}
|
public static List<String> join(Map<String, String> map, String separator) {
if (map == null) {
return null;
}
List<String> list = new ArrayList<>();
if (map.size() == 0) {
return list;
}
for (Map.Entry<String, String> entry : map.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
if (StringUtils.isEmpty(value)) {
list.add(key);
} else {
list.add(key + separator + value);
}
}
return list;
}
|
@Test
void testJoinList() {
List<String> list = emptyList();
assertEquals("", CollectionUtils.join(list, "/"));
list = Arrays.asList("x");
assertEquals("x", CollectionUtils.join(list, "-"));
list = Arrays.asList("a", "b");
assertEquals("a/b", CollectionUtils.join(list, "/"));
}
|
public boolean isInputSelected(int inputId) {
return (inputMask & (1L << (inputId - 1))) != 0;
}
|
@Test
void testIsInputSelected() {
assertThat(new Builder().build().isInputSelected(1)).isFalse();
assertThat(new Builder().select(2).build().isInputSelected(1)).isFalse();
assertThat(new Builder().select(1).build().isInputSelected(1)).isTrue();
assertThat(new Builder().select(1).select(2).build().isInputSelected(1)).isTrue();
assertThat(new Builder().select(-1).build().isInputSelected(1)).isTrue();
assertThat(new Builder().select(64).build().isInputSelected(64)).isTrue();
}
|
@Override
public void schedule(Object task) {
}
|
@Test
public void test() {
NopScheduler scheduler = new NopScheduler();
scheduler.schedule(new IOBuffer(64));
}
|
@Override
public void uncaughtException(@NonNull Thread thread, Throwable ex) {
ex.printStackTrace();
Logger.e(TAG, "Caught an unhandled exception!!!", ex);
// https://github.com/AnySoftKeyboard/AnySoftKeyboard/issues/15
// https://github.com/AnySoftKeyboard/AnySoftKeyboard/issues/433
final String stackTrace = Logger.getStackTrace(ex);
if (ex instanceof NullPointerException) {
if (stackTrace.contains(
"android.inputmethodservice.IInputMethodSessionWrapper.executeMessage(IInputMethodSessionWrapper.java")
|| stackTrace.contains(
"android.inputmethodservice.IInputMethodWrapper.executeMessage(IInputMethodWrapper.java")) {
Logger.w(TAG, "An OS bug has been adverted. Move along, there is nothing to see here.");
return;
}
} else if (ex instanceof java.util.concurrent.TimeoutException
&& stackTrace.contains(".finalize")) {
Logger.w(TAG, "An OS bug has been adverted. Move along, there is nothing to see here.");
return;
}
StringBuilder reportMessage = new StringBuilder();
reportMessage
.append("Hi. It seems that we have crashed.... Here are some details:")
.append(NEW_LINE)
.append("****** UTC Time: ")
.append(DateFormat.format("kk:mm:ss dd.MM.yyyy", System.currentTimeMillis()))
.append(NEW_LINE)
.append("****** Application name: ")
.append(getAppDetails())
.append(NEW_LINE)
.append("******************************")
.append(NEW_LINE)
.append(ex.getClass().getName())
.append(NEW_LINE)
.append("****** Exception message: ")
.append(ex.getMessage())
.append(NEW_LINE)
.append(HEADER_BREAK_LINE)
.append(NEW_LINE)
.append("****** Trace trace:")
.append(NEW_LINE)
.append(stackTrace)
.append(NEW_LINE)
.append("******************************")
.append(NEW_LINE)
.append("****** Device information:")
.append(NEW_LINE)
.append(ChewbaccaUtils.getSysInfo(mApp))
.append(NEW_LINE);
if (ex instanceof OutOfMemoryError
|| (ex.getCause() != null && ex.getCause() instanceof OutOfMemoryError)) {
reportMessage
.append("******************************")
.append(NEW_LINE)
.append("****** Memory: ")
.append(Runtime.getRuntime().totalMemory())
.append(NEW_LINE)
.append("Free: ")
.append(Runtime.getRuntime().freeMemory())
.append(NEW_LINE)
.append("Max: ")
.append(Runtime.getRuntime().maxMemory())
.append(NEW_LINE);
}
reportMessage
.append("******************************")
.append(NEW_LINE)
.append("****** Log-Cat: ")
.append(NEW_LINE)
.append(Logger.getAllLogLines())
.append(NEW_LINE);
try (OutputStreamWriter writer =
new OutputStreamWriter(
mApp.openFileOutput(NEW_CRASH_FILENAME, Context.MODE_PRIVATE),
Charset.forName("UTF-8"))) {
writer.write(reportMessage.toString());
Logger.i(TAG, "Wrote crash report to %s.", NEW_CRASH_FILENAME);
Logger.d(TAG, "Crash report:");
for (String line : TextUtils.split(reportMessage.toString(), NEW_LINE)) {
Logger.d(TAG, "err: %s", line);
}
} catch (Exception writeEx) {
Logger.e(TAG, writeEx, "Failed to write crash report file!");
}
// and sending to the OS
if (mOsDefaultHandler != null) {
Logger.i(TAG, "Sending the exception to OS exception handler...");
mOsDefaultHandler.uncaughtException(thread, ex);
}
}
|
@Test
public void testCrashLogFileWasCreated() throws Exception {
Application app = ApplicationProvider.getApplicationContext();
NotificationDriver notificationDriver = Mockito.mock(NotificationDriver.class);
TestableChewbaccaUncaughtExceptionHandler underTest =
new TestableChewbaccaUncaughtExceptionHandler(app, null, notificationDriver);
underTest.uncaughtException(Thread.currentThread(), new IOException("an error"));
Mockito.verify(notificationDriver, Mockito.never()).notify(Mockito.any(), Mockito.anyBoolean());
File newReport =
new File(app.getFilesDir(), ChewbaccaUncaughtExceptionHandler.NEW_CRASH_FILENAME);
Assert.assertTrue(newReport.isFile());
List<String> text = Files.readAllLines(newReport.toPath());
Assert.assertEquals(
44 /*this is fragile, and can change when crash report is changed*/, text.size());
Assert.assertEquals(
"Hi. It seems that we have crashed.... Here are some details:", text.get(0));
Assert.assertEquals(
ChewbaccaUncaughtExceptionHandler.HEADER_BREAK_LINE,
text.stream()
.filter(ChewbaccaUncaughtExceptionHandler.HEADER_BREAK_LINE::equals)
.findFirst()
.orElse(null));
}
|
public static Checksum newInstance(final String className)
{
Objects.requireNonNull(className, "className is required!");
if (Crc32.class.getName().equals(className))
{
return crc32();
}
else if (Crc32c.class.getName().equals(className))
{
return crc32c();
}
else
{
try
{
final Class<?> klass = Class.forName(className);
final Object instance = klass.getDeclaredConstructor().newInstance();
return (Checksum)instance;
}
catch (final ReflectiveOperationException ex)
{
throw new IllegalArgumentException("failed to create Checksum instance for class: " + className, ex);
}
}
}
|
@Test
void newInstanceThrowsIllegalArgumentExceptionIfClassIsNotFound()
{
final IllegalArgumentException exception = assertThrows(
IllegalArgumentException.class, () -> Checksums.newInstance("a.b.c.MissingClass"));
assertEquals(ClassNotFoundException.class, exception.getCause().getClass());
}
|
@Override
protected void decode(final ChannelHandlerContext ctx, final ByteBuf in, final List<Object> out) {
while (in.readableBytes() >= 1 + MySQLBinlogEventHeader.MYSQL_BINLOG_EVENT_HEADER_LENGTH) {
in.markReaderIndex();
MySQLPacketPayload payload = new MySQLPacketPayload(in, ctx.channel().attr(CommonConstants.CHARSET_ATTRIBUTE_KEY).get());
checkPayload(payload);
MySQLBinlogEventHeader binlogEventHeader = new MySQLBinlogEventHeader(payload, binlogContext.getChecksumLength());
if (!checkEventIntegrity(in, binlogEventHeader)) {
return;
}
Optional<MySQLBaseBinlogEvent> binlogEvent = decodeEvent(binlogEventHeader, payload);
if (!binlogEvent.isPresent()) {
skipChecksum(binlogEventHeader.getEventType(), in);
return;
}
if (binlogEvent.get() instanceof PlaceholderBinlogEvent) {
out.add(binlogEvent.get());
skipChecksum(binlogEventHeader.getEventType(), in);
return;
}
if (decodeWithTX) {
processEventWithTX(binlogEvent.get(), out);
} else {
processEventIgnoreTX(binlogEvent.get(), out);
}
skipChecksum(binlogEventHeader.getEventType(), in);
}
}
|
@Test
void assertBinlogEventBodyIncomplete() {
ByteBuf byteBuf = ByteBufAllocator.DEFAULT.buffer();
byte[] completeData = StringUtil.decodeHexDump("002a80a862200100000038000000c569000000007400000000000100020004ff0801000000000000000100000007535543434553531c9580c5");
byteBuf.writeBytes(completeData);
byteBuf.writeBytes(StringUtil.decodeHexDump("006acb656410010000001f000000fa29000000001643000000000000b13f8340"));
byte[] notCompleteData = StringUtil.decodeHexDump("00cb38a962130100000041000000be7d000000007b000000000001000464735f310009745f6f726465725f31000408030f");
byteBuf.writeBytes(notCompleteData);
List<Object> decodedEvents = new LinkedList<>();
binlogContext.getTableMap().put(116L, tableMapEventPacket);
when(tableMapEventPacket.getColumnDefs()).thenReturn(columnDefs);
binlogEventPacketDecoder.decode(channelHandlerContext, byteBuf, decodedEvents);
assertThat(decodedEvents.size(), is(1));
}
|
static Catalog loadCatalog(IcebergSinkConfig config) {
return CatalogUtil.buildIcebergCatalog(
config.catalogName(), config.catalogProps(), loadHadoopConfig(config));
}
|
@Test
public void testLoadCatalogNoHadoopDir() {
Map<String, String> props =
ImmutableMap.of(
"topics",
"mytopic",
"iceberg.tables",
"mytable",
"iceberg.hadoop.conf-prop",
"conf-value",
"iceberg.catalog.catalog-impl",
TestCatalog.class.getName());
IcebergSinkConfig config = new IcebergSinkConfig(props);
Catalog result = CatalogUtils.loadCatalog(config);
assertThat(result).isInstanceOf(TestCatalog.class);
Configuration conf = ((TestCatalog) result).conf;
assertThat(conf).isNotNull();
// check that the sink config property was added
assertThat(conf.get("conf-prop")).isEqualTo("conf-value");
// check that core-site.xml was loaded
assertThat(conf.get("foo")).isEqualTo("bar");
}
|
@Override
public HttpResponse send(HttpRequest httpRequest) throws IOException {
return send(httpRequest, null);
}
|
@Test
public void send_default_userAgent() throws IOException, InterruptedException {
String responseBody = "test response";
mockWebServer.enqueue(
new MockResponse()
.setResponseCode(HttpStatus.OK.code())
.setHeader(CONTENT_TYPE, MediaType.PLAIN_TEXT_UTF_8.toString())
.setBody(responseBody));
mockWebServer.start();
HttpUrl baseUrl = mockWebServer.url("/");
httpClient.send(get(baseUrl.toString()).withEmptyHeaders().build());
assertThat(mockWebServer.takeRequest().getHeader(USER_AGENT))
.isEqualTo(HttpClient.TSUNAMI_USER_AGENT);
}
|
public static Builder newChangesetBuilder() {
return new Builder();
}
|
@Test
public void test_to_string() {
Changeset underTest = Changeset.newChangesetBuilder()
.setAuthor("john")
.setDate(123456789L)
.setRevision("rev-1")
.build();
assertThat(underTest).hasToString("Changeset{revision='rev-1', author='john', date=123456789}");
}
|
public URLNormalizer removeFragment() {
url = url.replaceFirst("(.*?)(#.*)", "$1");
return this;
}
|
@Test
public void testRemoveFragment() {
s = "http://www.example.com/bar.html#section1";
t = "http://www.example.com/bar.html";
assertEquals(t, n(s).removeFragment().toString());
s = "http://www.example.com/bar.html#";
t = "http://www.example.com/bar.html";
assertEquals(t, n(s).removeFragment().toString());
}
|
@Override
public ColumnStatisticsObj aggregate(List<ColStatsObjWithSourceInfo> colStatsWithSourceInfo,
List<String> partNames, boolean areAllPartsFound) throws MetaException {
checkStatisticsList(colStatsWithSourceInfo);
ColumnStatisticsObj statsObj = null;
String colType;
String colName = null;
// check if all the ColumnStatisticsObjs contain stats and all the ndv are
// bitvectors
boolean doAllPartitionContainStats = partNames.size() == colStatsWithSourceInfo.size();
NumDistinctValueEstimator ndvEstimator = null;
boolean areAllNDVEstimatorsMergeable = true;
for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) {
ColumnStatisticsObj cso = csp.getColStatsObj();
if (statsObj == null) {
colName = cso.getColName();
colType = cso.getColType();
statsObj = ColumnStatsAggregatorFactory.newColumnStaticsObj(colName, colType,
cso.getStatsData().getSetField());
LOG.trace("doAllPartitionContainStats for column: {} is: {}", colName, doAllPartitionContainStats);
}
DateColumnStatsDataInspector columnStatsData = dateInspectorFromStats(cso);
// check if we can merge NDV estimators
if (columnStatsData.getNdvEstimator() == null) {
areAllNDVEstimatorsMergeable = false;
break;
} else {
NumDistinctValueEstimator estimator = columnStatsData.getNdvEstimator();
if (ndvEstimator == null) {
ndvEstimator = estimator;
} else {
if (!ndvEstimator.canMerge(estimator)) {
areAllNDVEstimatorsMergeable = false;
break;
}
}
}
}
if (areAllNDVEstimatorsMergeable && ndvEstimator != null) {
ndvEstimator = NumDistinctValueEstimatorFactory.getEmptyNumDistinctValueEstimator(ndvEstimator);
}
LOG.debug("all of the bit vectors can merge for {} is {}", colName, areAllNDVEstimatorsMergeable);
ColumnStatisticsData columnStatisticsData = initColumnStatisticsData();
if (doAllPartitionContainStats || colStatsWithSourceInfo.size() < 2) {
DateColumnStatsDataInspector aggregateData = null;
long lowerBound = 0;
long higherBound = 0;
double densityAvgSum = 0.0;
DateColumnStatsMerger merger = new DateColumnStatsMerger();
for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) {
ColumnStatisticsObj cso = csp.getColStatsObj();
DateColumnStatsDataInspector newData = dateInspectorFromStats(cso);
lowerBound = Math.max(lowerBound, newData.getNumDVs());
higherBound += newData.getNumDVs();
if (newData.isSetLowValue() && newData.isSetHighValue()) {
densityAvgSum += ((double) diff(newData.getHighValue(), newData.getLowValue())) / newData.getNumDVs();
}
if (areAllNDVEstimatorsMergeable && ndvEstimator != null) {
ndvEstimator.mergeEstimators(newData.getNdvEstimator());
}
if (aggregateData == null) {
aggregateData = newData.deepCopy();
} else {
aggregateData.setLowValue(merger.mergeLowValue(
merger.getLowValue(aggregateData), merger.getLowValue(newData)));
aggregateData.setHighValue(merger.mergeHighValue(
merger.getHighValue(aggregateData), merger.getHighValue(newData)));
aggregateData.setNumNulls(merger.mergeNumNulls(aggregateData.getNumNulls(), newData.getNumNulls()));
aggregateData.setNumDVs(merger.mergeNumDVs(aggregateData.getNumDVs(), newData.getNumDVs()));
}
}
if (areAllNDVEstimatorsMergeable && ndvEstimator != null) {
// if all the ColumnStatisticsObjs contain bitvectors, we do not need to
// use uniform distribution assumption because we can merge bitvectors
// to get a good estimation.
aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues());
} else {
long estimation;
if (useDensityFunctionForNDVEstimation && aggregateData != null
&& aggregateData.isSetLowValue() && aggregateData.isSetHighValue()) {
// We have estimation, lowerbound and higherbound. We use estimation
// if it is between lowerbound and higherbound.
double densityAvg = densityAvgSum / partNames.size();
estimation = (long) (diff(aggregateData.getHighValue(), aggregateData.getLowValue()) / densityAvg);
if (estimation < lowerBound) {
estimation = lowerBound;
} else if (estimation > higherBound) {
estimation = higherBound;
}
} else {
estimation = (long) (lowerBound + (higherBound - lowerBound) * ndvTuner);
}
aggregateData.setNumDVs(estimation);
}
columnStatisticsData.setDateStats(aggregateData);
} else {
// TODO: bail out if missing stats are over a certain threshold
// we need extrapolation
LOG.debug("start extrapolation for {}", colName);
Map<String, Integer> indexMap = new HashMap<>();
for (int index = 0; index < partNames.size(); index++) {
indexMap.put(partNames.get(index), index);
}
Map<String, Double> adjustedIndexMap = new HashMap<>();
Map<String, ColumnStatisticsData> adjustedStatsMap = new HashMap<>();
// while we scan the css, we also get the densityAvg, lowerbound and
// higherbound when useDensityFunctionForNDVEstimation is true.
double densityAvgSum = 0.0;
if (!areAllNDVEstimatorsMergeable) {
// if not every partition uses bitvector for ndv, we just fall back to
// the traditional extrapolation methods.
for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) {
ColumnStatisticsObj cso = csp.getColStatsObj();
String partName = csp.getPartName();
DateColumnStatsData newData = cso.getStatsData().getDateStats();
if (useDensityFunctionForNDVEstimation && newData.isSetLowValue() && newData.isSetHighValue()) {
densityAvgSum += ((double) diff(newData.getHighValue(), newData.getLowValue())) / newData.getNumDVs();
}
adjustedIndexMap.put(partName, (double) indexMap.get(partName));
adjustedStatsMap.put(partName, cso.getStatsData());
}
} else {
// we first merge all the adjacent bitvectors that we could merge and
// derive new partition names and index.
StringBuilder pseudoPartName = new StringBuilder();
double pseudoIndexSum = 0;
int length = 0;
int curIndex = -1;
DateColumnStatsDataInspector aggregateData = null;
for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) {
ColumnStatisticsObj cso = csp.getColStatsObj();
String partName = csp.getPartName();
DateColumnStatsDataInspector newData = dateInspectorFromStats(cso);
// newData.isSetBitVectors() should be true for sure because we
// already checked it before.
if (indexMap.get(partName) != curIndex) {
// There is bitvector, but it is not adjacent to the previous ones.
if (length > 0) {
// we have to set ndv
adjustedIndexMap.put(pseudoPartName.toString(), pseudoIndexSum / length);
aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues());
ColumnStatisticsData csd = new ColumnStatisticsData();
csd.setDateStats(aggregateData);
adjustedStatsMap.put(pseudoPartName.toString(), csd);
if (useDensityFunctionForNDVEstimation) {
densityAvgSum += ((double) diff(aggregateData.getHighValue(), aggregateData.getLowValue()))
/ aggregateData.getNumDVs();
}
// reset everything
pseudoPartName = new StringBuilder();
pseudoIndexSum = 0;
length = 0;
ndvEstimator = NumDistinctValueEstimatorFactory.getEmptyNumDistinctValueEstimator(ndvEstimator);
}
aggregateData = null;
}
curIndex = indexMap.get(partName);
pseudoPartName.append(partName);
pseudoIndexSum += curIndex;
length++;
curIndex++;
if (aggregateData == null) {
aggregateData = newData.deepCopy();
} else {
aggregateData.setLowValue(min(aggregateData.getLowValue(), newData.getLowValue()));
aggregateData.setHighValue(max(aggregateData.getHighValue(), newData.getHighValue()));
aggregateData.setNumNulls(aggregateData.getNumNulls() + newData.getNumNulls());
}
ndvEstimator.mergeEstimators(newData.getNdvEstimator());
}
if (length > 0) {
// we have to set ndv
adjustedIndexMap.put(pseudoPartName.toString(), pseudoIndexSum / length);
aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues());
ColumnStatisticsData csd = new ColumnStatisticsData();
csd.setDateStats(aggregateData);
adjustedStatsMap.put(pseudoPartName.toString(), csd);
if (useDensityFunctionForNDVEstimation) {
densityAvgSum += ((double) diff(aggregateData.getHighValue(), aggregateData.getLowValue()))
/ aggregateData.getNumDVs();
}
}
}
extrapolate(columnStatisticsData, partNames.size(), colStatsWithSourceInfo.size(),
adjustedIndexMap, adjustedStatsMap, densityAvgSum / adjustedStatsMap.size());
}
LOG.debug(
"Ndv estimation for {} is {}. # of partitions requested: {}. # of partitions found: {}",
colName, columnStatisticsData.getDateStats().getNumDVs(), partNames.size(),
colStatsWithSourceInfo.size());
KllHistogramEstimator mergedKllHistogramEstimator = mergeHistograms(colStatsWithSourceInfo);
if (mergedKllHistogramEstimator != null) {
columnStatisticsData.getDateStats().setHistogram(mergedKllHistogramEstimator.serialize());
}
statsObj.setStatsData(columnStatisticsData);
return statsObj;
}
|
@Test
public void testAggregateSingleStat() throws MetaException {
List<String> partitions = Collections.singletonList("part1");
long[] values = { DATE_1.getDaysSinceEpoch(), DATE_4.getDaysSinceEpoch() };
ColumnStatisticsData data1 = new ColStatsBuilder<>(Date.class).numNulls(1).numDVs(2).low(DATE_1).high(DATE_4)
.hll(values).kll(values).build();
List<ColStatsObjWithSourceInfo> statsList =
Collections.singletonList(createStatsWithInfo(data1, TABLE, COL, partitions.get(0)));
DateColumnStatsAggregator aggregator = new DateColumnStatsAggregator();
ColumnStatisticsObj computedStatsObj = aggregator.aggregate(statsList, partitions, true);
assertEqualStatistics(data1, computedStatsObj.getStatsData());
}
|
@Override
public int getScale(final int column) {
Preconditions.checkArgument(1 == column);
return 0;
}
|
@Test
void assertGetScale() throws SQLException {
assertThat(actualMetaData.getScale(1), is(0));
}
|
@Override
public void updateApiErrorLogProcess(Long id, Integer processStatus, Long processUserId) {
ApiErrorLogDO errorLog = apiErrorLogMapper.selectById(id);
if (errorLog == null) {
throw exception(API_ERROR_LOG_NOT_FOUND);
}
if (!ApiErrorLogProcessStatusEnum.INIT.getStatus().equals(errorLog.getProcessStatus())) {
throw exception(API_ERROR_LOG_PROCESSED);
}
// 标记处理
apiErrorLogMapper.updateById(ApiErrorLogDO.builder().id(id).processStatus(processStatus)
.processUserId(processUserId).processTime(LocalDateTime.now()).build());
}
|
@Test
public void testUpdateApiErrorLogProcess_success() {
// 准备参数
ApiErrorLogDO apiErrorLogDO = randomPojo(ApiErrorLogDO.class,
o -> o.setProcessStatus(ApiErrorLogProcessStatusEnum.INIT.getStatus()));
apiErrorLogMapper.insert(apiErrorLogDO);
// 准备参数
Long id = apiErrorLogDO.getId();
Integer processStatus = randomEle(ApiErrorLogProcessStatusEnum.values()).getStatus();
Long processUserId = randomLongId();
// 调用
apiErrorLogService.updateApiErrorLogProcess(id, processStatus, processUserId);
// 断言
ApiErrorLogDO dbApiErrorLogDO = apiErrorLogMapper.selectById(apiErrorLogDO.getId());
assertEquals(processStatus, dbApiErrorLogDO.getProcessStatus());
assertEquals(processUserId, dbApiErrorLogDO.getProcessUserId());
assertNotNull(dbApiErrorLogDO.getProcessTime());
}
|
public BatchConfig(String tableNameWithType, Map<String, String> batchConfigsMap) {
_batchConfigMap = batchConfigsMap;
_tableNameWithType = tableNameWithType;
String inputFormat = batchConfigsMap.get(BatchConfigProperties.INPUT_FORMAT);
if (inputFormat != null) {
_inputFormat = FileFormat.valueOf(inputFormat.toUpperCase());
} else {
_inputFormat = null;
}
_inputDirURI = batchConfigsMap.get(BatchConfigProperties.INPUT_DIR_URI);
_inputFsClassName = batchConfigsMap.get(BatchConfigProperties.INPUT_FS_CLASS);
_inputFsProps =
IngestionConfigUtils.extractPropsMatchingPrefix(batchConfigsMap, BatchConfigProperties.INPUT_FS_PROP_PREFIX);
_outputDirURI = batchConfigsMap.get(BatchConfigProperties.OUTPUT_DIR_URI);
_outputFsClassName = batchConfigsMap.get(BatchConfigProperties.OUTPUT_FS_CLASS);
_outputFsProps =
IngestionConfigUtils.extractPropsMatchingPrefix(batchConfigsMap, BatchConfigProperties.OUTPUT_FS_PROP_PREFIX);
_overwriteOutput = Boolean.parseBoolean(batchConfigsMap.get(BatchConfigProperties.OVERWRITE_OUTPUT));
_recordReaderClassName = batchConfigsMap.get(BatchConfigProperties.RECORD_READER_CLASS);
_recordReaderConfigClassName = batchConfigsMap.get(BatchConfigProperties.RECORD_READER_CONFIG_CLASS);
_recordReaderProps = IngestionConfigUtils.extractPropsMatchingPrefix(batchConfigsMap,
BatchConfigProperties.RECORD_READER_PROP_PREFIX);
_segmentNameGeneratorType = IngestionConfigUtils.getSegmentNameGeneratorType(batchConfigsMap);
_segmentNameGeneratorConfigs = IngestionConfigUtils.extractPropsMatchingPrefix(batchConfigsMap,
BatchConfigProperties.SEGMENT_NAME_GENERATOR_PROP_PREFIX);
Map<String, String> segmentNameGeneratorProps = IngestionConfigUtils.getSegmentNameGeneratorProps(batchConfigsMap);
_segmentName = segmentNameGeneratorProps.get(BatchConfigProperties.SEGMENT_NAME);
_segmentNamePrefix = segmentNameGeneratorProps.get(BatchConfigProperties.SEGMENT_NAME_PREFIX);
_segmentNamePostfix = segmentNameGeneratorProps.get(BatchConfigProperties.SEGMENT_NAME_POSTFIX);
_excludeSequenceId = Boolean.parseBoolean(segmentNameGeneratorProps.get(BatchConfigProperties.EXCLUDE_SEQUENCE_ID));
_sequenceId = batchConfigsMap.get(BatchConfigProperties.SEQUENCE_ID);
_appendUUIDToSegmentName =
Boolean.parseBoolean(segmentNameGeneratorProps.get(BatchConfigProperties.APPEND_UUID_TO_SEGMENT_NAME));
_excludeTimeInSegmentName =
Boolean.parseBoolean(segmentNameGeneratorProps.get(BatchConfigProperties.EXCLUDE_TIME_IN_SEGMENT_NAME));
_pushMode = IngestionConfigUtils.getPushMode(batchConfigsMap);
_pushAttempts = IngestionConfigUtils.getPushAttempts(batchConfigsMap);
_pushParallelism = IngestionConfigUtils.getPushParallelism(batchConfigsMap);
_pushIntervalRetryMillis = IngestionConfigUtils.getPushRetryIntervalMillis(batchConfigsMap);
_pushSegmentURIPrefix = batchConfigsMap.get(BatchConfigProperties.PUSH_SEGMENT_URI_PREFIX);
_pushSegmentURISuffix = batchConfigsMap.get(BatchConfigProperties.PUSH_SEGMENT_URI_SUFFIX);
_pushControllerURI = batchConfigsMap.get(BatchConfigProperties.PUSH_CONTROLLER_URI);
_outputSegmentDirURI = batchConfigsMap.get(BatchConfigProperties.OUTPUT_SEGMENT_DIR_URI);
}
|
@Test
public void testBatchConfig() {
Map<String, String> batchConfigMap = new HashMap<>();
String tableName = "foo_REALTIME";
String inputDir = "s3://foo/input";
String outputDir = "s3://foo/output";
String inputFsClass = "org.apache.S3FS";
String outputFsClass = "org.apache.GcsGS";
String region = "us-west";
String username = "foo";
String accessKey = "${ACCESS_KEY}";
String secretKey = "${SECRET_KEY}";
String inputFormat = "csv";
String recordReaderClass = "org.foo.CSVRecordReader";
String recordReaderConfigClass = "org.foo.CSVRecordReaderConfig";
String separator = "|";
batchConfigMap.put(BatchConfigProperties.INPUT_DIR_URI, inputDir);
batchConfigMap.put(BatchConfigProperties.OUTPUT_DIR_URI, outputDir);
batchConfigMap.put(BatchConfigProperties.INPUT_FS_CLASS, inputFsClass);
batchConfigMap.put(BatchConfigProperties.OUTPUT_FS_CLASS, outputFsClass);
batchConfigMap.put(BatchConfigProperties.INPUT_FORMAT, inputFormat);
batchConfigMap.put(BatchConfigProperties.RECORD_READER_CLASS, recordReaderClass);
batchConfigMap.put(BatchConfigProperties.RECORD_READER_CONFIG_CLASS, recordReaderConfigClass);
batchConfigMap.put(BatchConfigProperties.INPUT_FS_PROP_PREFIX + ".region", region);
batchConfigMap.put(BatchConfigProperties.INPUT_FS_PROP_PREFIX + ".username", username);
batchConfigMap.put(BatchConfigProperties.OUTPUT_FS_PROP_PREFIX + ".region", region);
batchConfigMap.put(BatchConfigProperties.OUTPUT_FS_PROP_PREFIX + ".accessKey", accessKey);
batchConfigMap.put(BatchConfigProperties.OUTPUT_FS_PROP_PREFIX + ".secretKey", secretKey);
batchConfigMap.put(BatchConfigProperties.RECORD_READER_PROP_PREFIX + ".separator", separator);
// config with all the right properties
BatchConfig batchConfig = new BatchConfig(tableName, batchConfigMap);
assertEquals(batchConfig.getInputDirURI(), inputDir);
assertEquals(batchConfig.getOutputDirURI(), outputDir);
assertEquals(batchConfig.getInputFsClassName(), inputFsClass);
assertEquals(batchConfig.getOutputFsClassName(), outputFsClass);
assertEquals(batchConfig.getInputFormat(), FileFormat.CSV);
assertEquals(batchConfig.getRecordReaderClassName(), recordReaderClass);
assertEquals(batchConfig.getRecordReaderConfigClassName(), recordReaderConfigClass);
assertEquals(batchConfig.getInputFsProps().size(), 2);
assertEquals(batchConfig.getInputFsProps().get(BatchConfigProperties.INPUT_FS_PROP_PREFIX + ".region"), region);
assertEquals(batchConfig.getInputFsProps().get(BatchConfigProperties.INPUT_FS_PROP_PREFIX + ".username"), username);
assertEquals(batchConfig.getOutputFsProps().size(), 3);
assertEquals(batchConfig.getOutputFsProps().get(BatchConfigProperties.OUTPUT_FS_PROP_PREFIX + ".region"), region);
assertEquals(batchConfig.getOutputFsProps().get(BatchConfigProperties.OUTPUT_FS_PROP_PREFIX + ".accessKey"),
accessKey);
assertEquals(batchConfig.getOutputFsProps().get(BatchConfigProperties.OUTPUT_FS_PROP_PREFIX + ".secretKey"),
secretKey);
assertEquals(batchConfig.getRecordReaderProps().size(), 1);
assertEquals(batchConfig.getRecordReaderProps().get(BatchConfigProperties.RECORD_READER_PROP_PREFIX + ".separator"),
separator);
assertEquals(batchConfig.getTableNameWithType(), tableName);
}
|
@Override
public AppResponse process(Flow flow, CheckAuthenticationStatusRequest request){
switch(appSession.getState()) {
case "AUTHENTICATION_REQUIRED", "AWAITING_QR_SCAN":
return new CheckAuthenticationStatusResponse("PENDING", false);
case "RETRIEVED", "AWAITING_CONFIRMATION":
return new CheckAuthenticationStatusResponse("PENDING", true);
case "CONFIRMED":
return new StatusResponse("PENDING_CONFIRMED");
case "AUTHENTICATED":
return new OkResponse();
case "CANCELLED":
return new StatusResponse("CANCELLED");
case "ABORTED":
if (appSession.getAbortCode().equals("verification_code_invalid")) {
String logCode = "wid_checker".equals(request.getAppType()) ? "1320" : "1368";
digidClient.remoteLog(logCode, Map.of(HIDDEN, true));
}
return new NokResponse();
default:
return new CheckAuthenticationStatusResponse("PENDING", false);
}
}
|
@Test
void processAuthenticated() {
appSession.setState("AUTHENTICATED");
AppResponse response = checkAuthenticationStatus.process(flow, request);
assertTrue(response instanceof OkResponse);
}
|
public void verifyPassword(AttendeePassword other) {
if (!this.equals(other)) {
throw new MomoException(AttendeeErrorCode.PASSWORD_MISMATCHED);
}
}
|
@DisplayName("비밀번호가 서로 다르면 예외를 발생시킨다.")
@Test
void throwsExceptionForMismatchedPasswords() {
AttendeePassword password = new AttendeePassword("1234");
AttendeePassword other = new AttendeePassword("123456");
assertThatThrownBy(() -> password.verifyPassword(other))
.isInstanceOf(MomoException.class)
.hasMessage(AttendeeErrorCode.PASSWORD_MISMATCHED.message());
}
|
public static <T> Object create(Class<T> iface, T implementation,
RetryPolicy retryPolicy) {
return RetryProxy.create(iface,
new DefaultFailoverProxyProvider<T>(iface, implementation),
retryPolicy);
}
|
@Test
public void testRetryForever() throws UnreliableException {
UnreliableInterface unreliable = (UnreliableInterface)
RetryProxy.create(UnreliableInterface.class, unreliableImpl, RETRY_FOREVER);
unreliable.alwaysSucceeds();
unreliable.failsOnceThenSucceeds();
unreliable.failsTenTimesThenSucceeds();
}
|
@VisibleForTesting
static void moveIfDoesNotExist(Path source, Path destination) throws IOException {
String errorMessage =
String.format(
"unable to move: %s to %s; such failures are often caused by interference from "
+ "antivirus (https://github.com/GoogleContainerTools/jib/issues/3127#issuecomment-796838294), "
+ "or rarely if the operation is not supported by the file system (for example: "
+ "special non-local file system)",
source, destination);
try {
Action<IOException> rename =
() -> {
if (Files.exists(destination)) {
// If the file already exists, we skip renaming and use the existing file.
// This happens, e.g., if a new layer happens to have the same content as a
// previously-cached layer or the same layer is being cached concurrently.
return true;
}
Files.move(source, destination);
return Files.exists(destination);
};
// Some Windows users report java.nio.file.AccessDeniedException that we suspect is caused
// by anti-virus programs, like Windows Defender, that open new files for scanning.
// Retry the rename up to 10 times, with 15ms pause between each retry.
if (!Retry.action(rename)
.maximumRetries(10)
.retryOnException(ex -> ex instanceof FileSystemException)
.sleep(15, TimeUnit.MILLISECONDS)
.run()) {
throw new IOException(errorMessage);
}
} catch (IOException ex) {
throw new IOException(errorMessage, ex);
}
}
|
@Test
public void testMoveIfDoesNotExist_exceptionAfterFailure() {
Exception exception =
assertThrows(
IOException.class,
() -> CacheStorageWriter.moveIfDoesNotExist(Paths.get("foo"), Paths.get("bar")));
assertThat(exception)
.hasMessageThat()
.contains(
"unable to move: foo to bar; such failures are often caused by interference from "
+ "antivirus");
assertThat(exception).hasCauseThat().isInstanceOf(NoSuchFileException.class);
assertThat(exception.getCause()).hasMessageThat().isEqualTo("foo");
}
|
public void setPercentOfJvmHeap(int percentOfJvmHeap) {
if (percentOfJvmHeap > 0) {
setLimit(Math.round(Runtime.getRuntime().maxMemory() * percentOfJvmHeap / 100.0));
}
}
|
@Test
public void testPercentOfJvmHeap() throws Exception {
underTest.setPercentOfJvmHeap(50);
assertEquals("limit is half jvm limit", Math.round(Runtime.getRuntime().maxMemory() / 2.0), underTest.getLimit());
}
|
@GET
@Path("{path:.*}")
@Produces({MediaType.APPLICATION_OCTET_STREAM + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8})
public Response get(@PathParam("path") String path,
@Context UriInfo uriInfo,
@QueryParam(OperationParam.NAME) OperationParam op,
@Context Parameters params,
@Context HttpServletRequest request)
throws IOException, FileSystemAccessException {
// Restrict access to only GETFILESTATUS and LISTSTATUS in write-only mode
if((op.value() != HttpFSFileSystem.Operation.GETFILESTATUS) &&
(op.value() != HttpFSFileSystem.Operation.LISTSTATUS) &&
accessMode == AccessMode.WRITEONLY) {
return Response.status(Response.Status.FORBIDDEN).build();
}
UserGroupInformation user = HttpUserGroupInformation.get();
Response response;
path = makeAbsolute(path);
MDC.put(HttpFSFileSystem.OP_PARAM, op.value().name());
MDC.put("hostname", request.getRemoteAddr());
switch (op.value()) {
case OPEN: {
Boolean noRedirect = params.get(
NoRedirectParam.NAME, NoRedirectParam.class);
if (noRedirect) {
URI redirectURL = createOpenRedirectionURL(uriInfo);
final String js = JsonUtil.toJsonString("Location", redirectURL);
response = Response.ok(js).type(MediaType.APPLICATION_JSON).build();
} else {
//Invoking the command directly using an unmanaged FileSystem that is
// released by the FileSystemReleaseFilter
final FSOperations.FSOpen command = new FSOperations.FSOpen(path);
final FileSystem fs = createFileSystem(user);
InputStream is = null;
UserGroupInformation ugi = UserGroupInformation
.createProxyUser(user.getShortUserName(),
UserGroupInformation.getLoginUser());
try {
is = ugi.doAs(new PrivilegedExceptionAction<InputStream>() {
@Override
public InputStream run() throws Exception {
return command.execute(fs);
}
});
} catch (InterruptedException ie) {
LOG.warn("Open interrupted.", ie);
Thread.currentThread().interrupt();
}
Long offset = params.get(OffsetParam.NAME, OffsetParam.class);
Long len = params.get(LenParam.NAME, LenParam.class);
AUDIT_LOG.info("[{}] offset [{}] len [{}]",
new Object[] { path, offset, len });
InputStreamEntity entity = new InputStreamEntity(is, offset, len);
response = Response.ok(entity).type(MediaType.APPLICATION_OCTET_STREAM)
.build();
}
break;
}
case GETFILESTATUS: {
FSOperations.FSFileStatus command = new FSOperations.FSFileStatus(path);
Map json = fsExecute(user, command);
AUDIT_LOG.info("[{}]", path);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case LISTSTATUS: {
String filter = params.get(FilterParam.NAME, FilterParam.class);
FSOperations.FSListStatus command =
new FSOperations.FSListStatus(path, filter);
Map json = fsExecute(user, command);
AUDIT_LOG.info("[{}] filter [{}]", path, (filter != null) ? filter : "-");
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETHOMEDIRECTORY: {
enforceRootPath(op.value(), path);
FSOperations.FSHomeDir command = new FSOperations.FSHomeDir();
JSONObject json = fsExecute(user, command);
AUDIT_LOG.info("Home Directory for [{}]", user);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case INSTRUMENTATION: {
enforceRootPath(op.value(), path);
Groups groups = HttpFSServerWebApp.get().get(Groups.class);
Set<String> userGroups = groups.getGroupsSet(user.getShortUserName());
if (!userGroups.contains(HttpFSServerWebApp.get().getAdminGroup())) {
throw new AccessControlException(
"User not in HttpFSServer admin group");
}
Instrumentation instrumentation =
HttpFSServerWebApp.get().get(Instrumentation.class);
Map snapshot = instrumentation.getSnapshot();
response = Response.ok(snapshot).build();
break;
}
case GETCONTENTSUMMARY: {
FSOperations.FSContentSummary command =
new FSOperations.FSContentSummary(path);
Map json = fsExecute(user, command);
AUDIT_LOG.info("Content summary for [{}]", path);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETQUOTAUSAGE: {
FSOperations.FSQuotaUsage command =
new FSOperations.FSQuotaUsage(path);
Map json = fsExecute(user, command);
AUDIT_LOG.info("Quota Usage for [{}]", path);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETFILECHECKSUM: {
FSOperations.FSFileChecksum command =
new FSOperations.FSFileChecksum(path);
Boolean noRedirect = params.get(
NoRedirectParam.NAME, NoRedirectParam.class);
AUDIT_LOG.info("[{}]", path);
if (noRedirect) {
URI redirectURL = createOpenRedirectionURL(uriInfo);
final String js = JsonUtil.toJsonString("Location", redirectURL);
response = Response.ok(js).type(MediaType.APPLICATION_JSON).build();
} else {
Map json = fsExecute(user, command);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
}
break;
}
case GETFILEBLOCKLOCATIONS: {
long offset = 0;
long len = Long.MAX_VALUE;
Long offsetParam = params.get(OffsetParam.NAME, OffsetParam.class);
Long lenParam = params.get(LenParam.NAME, LenParam.class);
AUDIT_LOG.info("[{}] offset [{}] len [{}]", path, offsetParam, lenParam);
if (offsetParam != null && offsetParam > 0) {
offset = offsetParam;
}
if (lenParam != null && lenParam > 0) {
len = lenParam;
}
FSOperations.FSFileBlockLocations command =
new FSOperations.FSFileBlockLocations(path, offset, len);
@SuppressWarnings("rawtypes")
Map locations = fsExecute(user, command);
final String json = JsonUtil.toJsonString("BlockLocations", locations);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETACLSTATUS: {
FSOperations.FSAclStatus command = new FSOperations.FSAclStatus(path);
Map json = fsExecute(user, command);
AUDIT_LOG.info("ACL status for [{}]", path);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETXATTRS: {
List<String> xattrNames =
params.getValues(XAttrNameParam.NAME, XAttrNameParam.class);
XAttrCodec encoding =
params.get(XAttrEncodingParam.NAME, XAttrEncodingParam.class);
FSOperations.FSGetXAttrs command =
new FSOperations.FSGetXAttrs(path, xattrNames, encoding);
@SuppressWarnings("rawtypes") Map json = fsExecute(user, command);
AUDIT_LOG.info("XAttrs for [{}]", path);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case LISTXATTRS: {
FSOperations.FSListXAttrs command = new FSOperations.FSListXAttrs(path);
@SuppressWarnings("rawtypes") Map json = fsExecute(user, command);
AUDIT_LOG.info("XAttr names for [{}]", path);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case LISTSTATUS_BATCH: {
String startAfter = params.get(
HttpFSParametersProvider.StartAfterParam.NAME,
HttpFSParametersProvider.StartAfterParam.class);
byte[] token = HttpFSUtils.EMPTY_BYTES;
if (startAfter != null) {
token = startAfter.getBytes(StandardCharsets.UTF_8);
}
FSOperations.FSListStatusBatch command = new FSOperations
.FSListStatusBatch(path, token);
@SuppressWarnings("rawtypes") Map json = fsExecute(user, command);
AUDIT_LOG.info("[{}] token [{}]", path, token);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETTRASHROOT: {
FSOperations.FSTrashRoot command = new FSOperations.FSTrashRoot(path);
JSONObject json = fsExecute(user, command);
AUDIT_LOG.info("[{}]", path);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETALLSTORAGEPOLICY: {
FSOperations.FSGetAllStoragePolicies command =
new FSOperations.FSGetAllStoragePolicies();
JSONObject json = fsExecute(user, command);
AUDIT_LOG.info("[{}]", path);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETSTORAGEPOLICY: {
FSOperations.FSGetStoragePolicy command =
new FSOperations.FSGetStoragePolicy(path);
JSONObject json = fsExecute(user, command);
AUDIT_LOG.info("[{}]", path);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETSNAPSHOTDIFF: {
String oldSnapshotName = params.get(OldSnapshotNameParam.NAME,
OldSnapshotNameParam.class);
String snapshotName = params.get(SnapshotNameParam.NAME,
SnapshotNameParam.class);
FSOperations.FSGetSnapshotDiff command =
new FSOperations.FSGetSnapshotDiff(path, oldSnapshotName,
snapshotName);
String js = fsExecute(user, command);
AUDIT_LOG.info("[{}]", path);
response = Response.ok(js).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETSNAPSHOTDIFFLISTING: {
String oldSnapshotName = params.get(OldSnapshotNameParam.NAME,
OldSnapshotNameParam.class);
String snapshotName = params.get(SnapshotNameParam.NAME,
SnapshotNameParam.class);
String snapshotDiffStartPath = params
.get(HttpFSParametersProvider.SnapshotDiffStartPathParam.NAME,
HttpFSParametersProvider.SnapshotDiffStartPathParam.class);
Integer snapshotDiffIndex = params.get(HttpFSParametersProvider.SnapshotDiffIndexParam.NAME,
HttpFSParametersProvider.SnapshotDiffIndexParam.class);
FSOperations.FSGetSnapshotDiffListing command =
new FSOperations.FSGetSnapshotDiffListing(path, oldSnapshotName,
snapshotName, snapshotDiffStartPath, snapshotDiffIndex);
String js = fsExecute(user, command);
AUDIT_LOG.info("[{}]", path);
response = Response.ok(js).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETSNAPSHOTTABLEDIRECTORYLIST: {
FSOperations.FSGetSnapshottableDirListing command =
new FSOperations.FSGetSnapshottableDirListing();
String js = fsExecute(user, command);
AUDIT_LOG.info("[{}]", "/");
response = Response.ok(js).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETSNAPSHOTLIST: {
FSOperations.FSGetSnapshotListing command =
new FSOperations.FSGetSnapshotListing(path);
String js = fsExecute(user, command);
AUDIT_LOG.info("[{}]", "/");
response = Response.ok(js).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETSERVERDEFAULTS: {
FSOperations.FSGetServerDefaults command =
new FSOperations.FSGetServerDefaults();
String js = fsExecute(user, command);
AUDIT_LOG.info("[{}]", "/");
response = Response.ok(js).type(MediaType.APPLICATION_JSON).build();
break;
}
case CHECKACCESS: {
String mode = params.get(FsActionParam.NAME, FsActionParam.class);
FsActionParam fsparam = new FsActionParam(mode);
FSOperations.FSAccess command = new FSOperations.FSAccess(path,
FsAction.getFsAction(fsparam.value()));
fsExecute(user, command);
AUDIT_LOG.info("[{}]", "/");
response = Response.ok().build();
break;
}
case GETECPOLICY: {
FSOperations.FSGetErasureCodingPolicy command =
new FSOperations.FSGetErasureCodingPolicy(path);
String js = fsExecute(user, command);
AUDIT_LOG.info("[{}]", path);
response = Response.ok(js).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETECPOLICIES: {
FSOperations.FSGetErasureCodingPolicies command =
new FSOperations.FSGetErasureCodingPolicies();
String js = fsExecute(user, command);
AUDIT_LOG.info("[{}]", path);
response = Response.ok(js).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETECCODECS: {
FSOperations.FSGetErasureCodingCodecs command =
new FSOperations.FSGetErasureCodingCodecs();
Map json = fsExecute(user, command);
AUDIT_LOG.info("[{}]", path);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case GET_BLOCK_LOCATIONS: {
long offset = 0;
long len = Long.MAX_VALUE;
Long offsetParam = params.get(OffsetParam.NAME, OffsetParam.class);
Long lenParam = params.get(LenParam.NAME, LenParam.class);
AUDIT_LOG.info("[{}] offset [{}] len [{}]", path, offsetParam, lenParam);
if (offsetParam != null && offsetParam > 0) {
offset = offsetParam;
}
if (lenParam != null && lenParam > 0) {
len = lenParam;
}
FSOperations.FSFileBlockLocationsLegacy command =
new FSOperations.FSFileBlockLocationsLegacy(path, offset, len);
@SuppressWarnings("rawtypes")
Map locations = fsExecute(user, command);
final String json = JsonUtil.toJsonString("LocatedBlocks", locations);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETFILELINKSTATUS: {
FSOperations.FSFileLinkStatus command =
new FSOperations.FSFileLinkStatus(path);
@SuppressWarnings("rawtypes") Map js = fsExecute(user, command);
AUDIT_LOG.info("[{}]", path);
response = Response.ok(js).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETSTATUS: {
FSOperations.FSStatus command = new FSOperations.FSStatus(path);
@SuppressWarnings("rawtypes") Map js = fsExecute(user, command);
response = Response.ok(js).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETTRASHROOTS: {
Boolean allUsers = params.get(AllUsersParam.NAME, AllUsersParam.class);
FSOperations.FSGetTrashRoots command = new FSOperations.FSGetTrashRoots(allUsers);
Map json = fsExecute(user, command);
AUDIT_LOG.info("allUsers [{}]", allUsers);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
default: {
throw new IOException(
MessageFormat.format("Invalid HTTP GET operation [{0}]", op.value()));
}
}
return response;
}
|
@Test
@TestDir
@TestJetty
@TestHdfs
public void testGetServerDefaults() throws Exception {
createHttpFSServer(false, false);
String pathStr1 = "/";
Path path1 = new Path(pathStr1);
DistributedFileSystem dfs = (DistributedFileSystem) FileSystem
.get(path1.toUri(), TestHdfsHelper.getHdfsConf());
verifyGetServerDefaults(dfs);
}
|
public Printed<K, V> withLabel(final String label) {
Objects.requireNonNull(label, "label can't be null");
this.label = label;
return this;
}
|
@Test
public void shouldPrintWithLabel() throws UnsupportedEncodingException {
final Processor<String, Integer, Void, Void> processor = new PrintedInternal<>(sysOutPrinter.withLabel("label"))
.build("processor")
.get();
processor.process(new Record<>("hello", 3, 0L));
processor.close();
assertThat(sysOut.toString(StandardCharsets.UTF_8.name()), equalTo("[label]: hello, 3\n"));
}
|
public static NotificationDispatcherMetadata newMetadata() {
return METADATA;
}
|
@Test
public void verify_myNewIssues_notification_dispatcher_key() {
NotificationDispatcherMetadata metadata = MyNewIssuesNotificationHandler.newMetadata();
assertThat(metadata.getDispatcherKey()).isEqualTo(MY_NEW_ISSUES_DISPATCHER_KEY);
}
|
@Override
public void handleRequest(RestRequest request, RequestContext requestContext, final Callback<RestResponse> callback)
{
if (HttpMethod.POST != HttpMethod.valueOf(request.getMethod()))
{
_log.error("POST is expected, but " + request.getMethod() + " received");
callback.onError(RestException.forError(HttpStatus.S_405_METHOD_NOT_ALLOWED.getCode(), "Invalid method"));
return;
}
// Disable server-side latency instrumentation for multiplexed requests
requestContext.putLocalAttr(TimingContextUtil.TIMINGS_DISABLED_KEY_NAME, true);
IndividualRequestMap individualRequests;
try
{
individualRequests = extractIndividualRequests(request);
if (_multiplexerSingletonFilter != null) {
individualRequests = _multiplexerSingletonFilter.filterRequests(individualRequests);
}
}
catch (RestException e)
{
_log.error("Invalid multiplexed request", e);
callback.onError(e);
return;
}
catch (Exception e)
{
_log.error("Invalid multiplexed request", e);
callback.onError(RestException.forError(HttpStatus.S_400_BAD_REQUEST.getCode(), e));
return;
}
// prepare the map of individual responses to be collected
final IndividualResponseMap individualResponses = new IndividualResponseMap(individualRequests.size());
final Map<String, HttpCookie> responseCookies = new HashMap<>();
// all tasks are Void and side effect based, that will be useful when we add streaming
Task<?> requestProcessingTask = createParallelRequestsTask(request, requestContext, individualRequests, individualResponses, responseCookies);
Task<Void> responseAggregationTask = Task.action("send aggregated response", () ->
{
RestResponse aggregatedResponse = aggregateResponses(individualResponses, responseCookies);
callback.onSuccess(aggregatedResponse);
}
);
_engine.run(requestProcessingTask.andThen(responseAggregationTask), MUX_PLAN_CLASS);
}
|
@Test(dataProvider = "multiplexerConfigurations")
public void testHandleError(MultiplexerRunMode multiplexerRunMode) throws Exception
{
SynchronousRequestHandler mockHandler = createMockHandler();
MultiplexedRequestHandlerImpl multiplexer = createMultiplexer(mockHandler, multiplexerRunMode);
RequestContext requestContext = new RequestContext();
RestRequest request = fakeMuxRestRequest(ImmutableMap.of("0", fakeIndRequest(FOO_URL), "1", fakeIndRequest(BAR_URL)));
// set expectations
expect(mockHandler.handleRequestSync(fakeIndRestRequest(FOO_URL), requestContext)).andReturn(fakeIndRestResponse(FOO_ENTITY));
expect(mockHandler.handleRequestSync(fakeIndRestRequest(BAR_URL), requestContext)).andThrow(new NullPointerException());
// switch into replay mode
replay(mockHandler);
FutureCallback<RestResponse> callback = new FutureCallback<>();
multiplexer.handleRequest(request, requestContext, callback);
RestResponse muxRestResponse = callback.get();
RestResponse expectedMuxRestResponse = fakeMuxRestResponse(ImmutableMap.of(0, fakeIndResponse(FOO_JSON_BODY), 1, errorIndResponse()));
assertEquals(muxRestResponse, expectedMuxRestResponse);
verify(mockHandler);
}
|
public <T extends Config> T getConfig(Class<T> clazz)
{
if (!Modifier.isPublic(clazz.getModifiers()))
{
throw new RuntimeException("Non-public configuration classes can't have default methods invoked");
}
T t = (T) Proxy.newProxyInstance(clazz.getClassLoader(), new Class<?>[]
{
clazz
}, handler);
return t;
}
|
@Test
public void testGetConfig() throws IOException
{
manager.setConfiguration("test", "key", "moo");
TestConfig conf = manager.getConfig(TestConfig.class);
Assert.assertEquals("moo", conf.key());
}
|
@Override
public ByteBuf writeBytes(byte[] src, int srcIndex, int length) {
ensureWritable(length);
setBytes(writerIndex, src, srcIndex, length);
writerIndex += length;
return this;
}
|
@Test
public void testWriteBytesAfterRelease8() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() throws IOException {
releasedBuffer().writeBytes(new TestScatteringByteChannel(), 1);
}
});
}
|
@Override
protected Class<?> loadClass(final String name, final boolean resolve)
throws ClassNotFoundException {
if (blacklist.test(name)) {
throw new ClassNotFoundException("The requested class is not permitted to be used from a "
+ "udf. Class " + name);
}
Class<?> clazz = findLoadedClass(name);
if (clazz == null) {
try {
if (shouldLoadFromChild(name)) {
clazz = findClass(name);
}
} catch (final ClassNotFoundException e) {
logger.trace("Class {} not found in {} using parent classloader",
name,
path);
}
}
if (clazz == null) {
clazz = super.loadClass(name, false);
}
if (resolve) {
resolveClass(clazz);
}
return clazz;
}
|
@Test
public void shouldLoadNonConfluentClassesFromChildFirst() throws ClassNotFoundException {
assertThat(parentLoader.findClass(IN_PARENT_AND_JAR), is(equalTo(InParentAndJar.class)));
assertThat(udfClassLoader.loadClass(IN_PARENT_AND_JAR, true), not(InParentAndJar.class));
}
|
public static void removeMatching(Collection<String> values, String... patterns) {
removeMatching(values, Arrays.asList(patterns));
}
|
@Test
public void testRemoveMatchingWithMatchingPattern() throws Exception {
Collection<String> values = stringToList("A");
StringCollectionUtil.removeMatching(values, "A");
assertTrue(values.isEmpty());
}
|
@Override
public Iterator iterator() {
if (entries == null) {
return Collections.emptyIterator();
}
return new ResultIterator();
}
|
@Test
public void testIterator_whenEmpty() {
List<Map.Entry> emptyList = Collections.emptyList();
ResultSet resultSet = new ResultSet(emptyList, IterationType.KEY);
Iterator iterator = resultSet.iterator();
assertFalse(iterator.hasNext());
}
|
@Override
public Flux<RawMetric> retrieve(KafkaCluster c, Node node) {
log.debug("Retrieving metrics from prometheus exporter: {}:{}", node.host(), c.getMetricsConfig().getPort());
MetricsConfig metricsConfig = c.getMetricsConfig();
var webClient = new WebClientConfigurator()
.configureBufferSize(DataSize.ofMegabytes(20))
.configureBasicAuth(metricsConfig.getUsername(), metricsConfig.getPassword())
.configureSsl(
c.getOriginalProperties().getSsl(),
new ClustersProperties.KeystoreConfig(
metricsConfig.getKeystoreLocation(),
metricsConfig.getKeystorePassword()))
.build();
return retrieve(webClient, node.host(), c.getMetricsConfig());
}
|
@Test
void callsSecureMetricsEndpointAndConvertsResponceToRawMetric() {
var url = mockWebServer.url("/metrics");
mockWebServer.enqueue(prepareResponse());
MetricsConfig metricsConfig = prepareMetricsConfig(url.port(), "username", "password");
StepVerifier.create(retriever.retrieve(WebClient.create(), url.host(), metricsConfig))
.expectNextSequence(expectedRawMetrics())
// third metric should not be present, since it has "NaN" value
.verifyComplete();
}
|
public final void isAtMost(int other) {
asDouble.isAtMost(other);
}
|
@Test
public void isAtMost_int_withNoExactFloatRepresentation() {
expectFailureWhenTestingThat(0x1.0p30f).isAtMost((1 << 30) - 1);
}
|
@PostMapping("create")
public String createProduct(NewProductPayload payload,
Model model,
HttpServletResponse response) {
try {
Product product = this.productsRestClient.createProduct(payload.title(), payload.details());
return "redirect:/catalogue/products/%d".formatted(product.id());
} catch (BadRequestException exception) {
response.setStatus(HttpStatus.BAD_REQUEST.value());
model.addAttribute("payload", payload);
model.addAttribute("errors", exception.getErrors());
return "catalogue/products/new_product";
}
}
|
@Test
@DisplayName("createProduct вернёт страницу с ошибками, если запрос невалиден")
void createProduct_RequestIsInvalid_ReturnsProductFormWithErrors() {
// given
var payload = new NewProductPayload(" ", null);
var model = new ConcurrentModel();
var response = new MockHttpServletResponse();
doThrow(new BadRequestException(List.of("Ошибка 1", "Ошибка 2")))
.when(this.productsRestClient)
.createProduct(" ", null);
// when
var result = this.controller.createProduct(payload, model, response);
// then
assertEquals("catalogue/products/new_product", result);
assertEquals(payload, model.getAttribute("payload"));
assertEquals(List.of("Ошибка 1", "Ошибка 2"), model.getAttribute("errors"));
assertEquals(HttpStatus.BAD_REQUEST.value(), response.getStatus());
verify(this.productsRestClient).createProduct(" ", null);
verifyNoMoreInteractions(this.productsRestClient);
}
|
@Override
public ClientDetailsEntity saveNewClient(ClientDetailsEntity client) {
if (client.getId() != null) { // if it's not null, it's already been saved, this is an error
throw new IllegalArgumentException("Tried to save a new client with an existing ID: " + client.getId());
}
if (client.getRegisteredRedirectUri() != null) {
for (String uri : client.getRegisteredRedirectUri()) {
if (blacklistedSiteService.isBlacklisted(uri)) {
throw new IllegalArgumentException("Client URI is blacklisted: " + uri);
}
}
}
// assign a random clientid if it's empty
// NOTE: don't assign a random client secret without asking, since public clients have no secret
if (Strings.isNullOrEmpty(client.getClientId())) {
client = generateClientId(client);
}
// make sure that clients with the "refresh_token" grant type have the "offline_access" scope, and vice versa
ensureRefreshTokenConsistency(client);
// make sure we don't have both a JWKS and a JWKS URI
ensureKeyConsistency(client);
// check consistency when using HEART mode
checkHeartMode(client);
// timestamp this to right now
client.setCreatedAt(new Date());
// check the sector URI
checkSectorIdentifierUri(client);
ensureNoReservedScopes(client);
ClientDetailsEntity c = clientRepository.saveClient(client);
statsService.resetCache();
return c;
}
|
@Test(expected = IllegalArgumentException.class)
public void heartMode_implicit_invalidGrants() {
Mockito.when(config.isHeartMode()).thenReturn(true);
ClientDetailsEntity client = new ClientDetailsEntity();
Set<String> grantTypes = new LinkedHashSet<>();
grantTypes.add("implicit");
grantTypes.add("authorization_code");
grantTypes.add("client_credentials");
client.setGrantTypes(grantTypes);
client.setTokenEndpointAuthMethod(AuthMethod.NONE);
client.setRedirectUris(Sets.newHashSet("https://foo.bar/"));
client.setJwksUri("https://foo.bar/jwks");
service.saveNewClient(client);
}
|
public static Range<Comparable<?>> safeIntersection(final Range<Comparable<?>> range, final Range<Comparable<?>> connectedRange) {
try {
return range.intersection(connectedRange);
} catch (final ClassCastException ex) {
Class<?> clazz = getRangeTargetNumericType(range, connectedRange);
if (null == clazz) {
throw ex;
}
Range<Comparable<?>> newRange = createTargetNumericTypeRange(range, clazz);
Range<Comparable<?>> newConnectedRange = createTargetNumericTypeRange(connectedRange, clazz);
return newRange.intersection(newConnectedRange);
}
}
|
@Test
void assertSafeIntersectionForBigInteger() {
Range<Comparable<?>> range = Range.upTo(new BigInteger("131323233123211"), BoundType.CLOSED);
Range<Comparable<?>> connectedRange = Range.downTo(35, BoundType.OPEN);
Range<Comparable<?>> newRange = SafeNumberOperationUtils.safeIntersection(range, connectedRange);
assertThat(newRange.lowerEndpoint(), is(new BigInteger("35")));
assertThat(newRange.lowerBoundType(), is(BoundType.OPEN));
assertThat(newRange.upperEndpoint(), is(new BigInteger("131323233123211")));
assertThat(newRange.upperBoundType(), is(BoundType.CLOSED));
}
|
public String getPassword() {
return password;
}
|
@Test
void hasAPassword() {
assertThat(credentials.getPassword()).isEqualTo("p");
}
|
@Override
public String toString() {
try {
return new ObjectMapper().configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true)
.configure(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS, true)
.setSerializationInclusion(Include.NON_NULL).writeValueAsString(this);
} catch (JsonProcessingException e) {
return "AlertaMassage Object parsing error : " + e.getMessage();
}
}
|
@Test
public void testAlertaMessageJsonFormatWithoutNullValues() {
AlertaMessage alertaMessage = new AlertaMessage("resource", "event");
String expectedJson =
"{\"event\":\"event\","
+ "\"resource\":\"resource\"}";
assertEquals(expectedJson, alertaMessage.toString());
}
|
public void readFrom(InputStream inStream, int offset, int length) throws IOException {
ensureCapacity(offset + length);
ByteStreams.readFully(inStream, buffer, offset, length);
size = offset + length;
}
|
@Test
public void testReadFrom() throws Exception {
ByteArrayInputStream bais = new ByteArrayInputStream(TEST_DATA_A);
RandomAccessData stream = new RandomAccessData();
stream.readFrom(bais, 3, 2);
assertArrayEquals(
new byte[] {0x00, 0x00, 0x00, 0x01, 0x02}, Arrays.copyOf(stream.array(), stream.size()));
bais.close();
}
|
public static String analytical(String expression, String language) throws Exception {
return analytical(expression, new HashMap<>(), language);
}
|
@Test
public void testJson2(){
String js = ExpressionUtils.analytical("{\n" +
" \"msgtype\": \"markdown\",\n" +
" \"markdown\": {\n" +
" \"title\":\"消息类型:${messageType}\",\n" +
" \"text\": \" - 设备ID: `${deviceId}` 值:`${properties.a-r-str}` \\n - 设备型号: `${headers.productId}`\\n - 设备名称: `${headers.deviceName}`\"" +
" \n},\n" +
" \"at\": {\n" +
" \"isAtAll\": false\n" +
" }\n" +
"}", JSON.parseObject("{\n" +
" \"deviceId\": \"VIS-Mandrake-12289\",\n" +
" \"headers\": {\n" +
" \"productId\": \"VIS-Mandrake\",\n" +
" \"deviceName\": \"能见度仪-曼德克-01\"\n" +
" }, \"properties\": {\n" +
" \"a-r-str\": \"a2\"\n" +
" },\n" +
" \"messageType\": \"OFFLINE\",\n" +
" \"timestamp\": 1592098397277\n" +
"}"), "spel");
System.out.println(js);
}
|
public void start() {
// Last minute init. Neither properties not annotations provided an
// injector source.
if (injector == null) {
injector = createDefaultScenarioModuleInjectorSource().getInjector();
}
scenarioScope = injector.getInstance(ScenarioScope.class);
scenarioScope.enterScope();
}
|
@Test
void shouldStartWhenInjectorSourceIsNull() {
factory = new GuiceFactory();
factory.start();
}
|
@Override
protected boolean notExist() {
return Stream.of(ConsulConstants.PLUGIN_DATA, ConsulConstants.AUTH_DATA, ConsulConstants.META_DATA).allMatch(this::dataKeyNotExist);
}
|
@Test
public void testNotExist() throws Exception {
ConsulDataChangedInit consulDataChangedInit = new ConsulDataChangedInit(consulClient);
assertNotNull(consulDataChangedInit);
Response<GetValue> pluginResponse = mock(Response.class);
when(consulClient.getKVValue(ConsulConstants.PLUGIN_DATA)).thenReturn(pluginResponse);
when(pluginResponse.getValue()).thenReturn(new GetValue());
boolean pluginExist = consulDataChangedInit.notExist();
assertFalse(pluginExist, "plugin exist.");
when(pluginResponse.getValue()).thenReturn(null);
Response<GetValue> appAuthResponse = mock(Response.class);
when(consulClient.getKVValue(ConsulConstants.AUTH_DATA)).thenReturn(appAuthResponse);
when(appAuthResponse.getValue()).thenReturn(new GetValue());
boolean appAuthExist = consulDataChangedInit.notExist();
assertFalse(appAuthExist, "app auth exist.");
when(appAuthResponse.getValue()).thenReturn(null);
Response<GetValue> mataDataResponse = mock(Response.class);
when(consulClient.getKVValue(ConsulConstants.META_DATA)).thenReturn(mataDataResponse);
when(mataDataResponse.getValue()).thenReturn(new GetValue());
boolean metaDataExist = consulDataChangedInit.notExist();
assertFalse(metaDataExist, "metadata exist.");
when(mataDataResponse.getValue()).thenReturn(null);
boolean metaDataNotExist = consulDataChangedInit.notExist();
assertTrue(metaDataNotExist, "metadata not exist.");
}
|
@Override
public GroupAssignment assign(
GroupSpec groupSpec,
SubscribedTopicDescriber subscribedTopicDescriber
) throws PartitionAssignorException {
if (groupSpec.memberIds().isEmpty())
return new GroupAssignment(Collections.emptyMap());
if (groupSpec.subscriptionType().equals(HOMOGENEOUS)) {
return assignHomogenous(groupSpec, subscribedTopicDescriber);
} else {
return assignHeterogeneous(groupSpec, subscribedTopicDescriber);
}
}
|
@Test
public void testAssignWithSubscribedToNonExistentTopic() {
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
Collections.singletonMap(
TOPIC_1_UUID,
new TopicMetadata(
TOPIC_1_UUID,
TOPIC_1_NAME,
3,
Collections.emptyMap()
)
)
);
Map<String, MemberSubscriptionAndAssignmentImpl> members = Collections.singletonMap(
MEMBER_A,
new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
mkSet(TOPIC_2_UUID),
Assignment.EMPTY
)
);
GroupSpec groupSpec = new GroupSpecImpl(
members,
HOMOGENEOUS,
Collections.emptyMap()
);
assertThrows(PartitionAssignorException.class,
() -> assignor.assign(groupSpec, subscribedTopicMetadata));
}
|
public static String randomUUID() {
return UUID.randomUUID().toString();
}
|
@Test
public void randomUUIDTest() {
String simpleUUID = IdUtil.simpleUUID();
assertEquals(32, simpleUUID.length());
String randomUUID = IdUtil.randomUUID();
assertEquals(36, randomUUID.length());
}
|
public ConnectorType connectorType(Map<String, String> connConfig) {
if (connConfig == null) {
return ConnectorType.UNKNOWN;
}
String connClass = connConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG);
if (connClass == null) {
return ConnectorType.UNKNOWN;
}
try {
return ConnectorType.from(getConnector(connClass).getClass());
} catch (ConnectException e) {
log.warn("Unable to retrieve connector type", e);
return ConnectorType.UNKNOWN;
}
}
|
@Test
public void testGetConnectorTypeWithMissingPlugin() {
String connName = "AnotherPlugin";
AbstractHerder herder = testHerder();
when(worker.getPlugins()).thenReturn(plugins);
when(plugins.newConnector(anyString())).thenThrow(new ConnectException("No class found"));
assertEquals(ConnectorType.UNKNOWN, herder.connectorType(Collections.singletonMap(ConnectorConfig.CONNECTOR_CLASS_CONFIG, connName)));
}
|
@Override
public String format(final Schema schema) {
final String converted = SchemaWalker.visit(schema, new Converter()) + typePostFix(schema);
return options.contains(Option.AS_COLUMN_LIST)
? stripTopLevelStruct(converted)
: converted;
}
|
@Test
public void shouldFormatOptionalMap() {
// Given:
final Schema schema = SchemaBuilder
.map(Schema.OPTIONAL_STRING_SCHEMA, Schema.OPTIONAL_FLOAT64_SCHEMA)
.optional()
.build();
// Then:
assertThat(DEFAULT.format(schema), is("MAP<VARCHAR, DOUBLE>"));
assertThat(STRICT.format(schema), is("MAP<VARCHAR, DOUBLE>"));
}
|
public Future<KafkaVersionChange> reconcile() {
return getVersionFromController()
.compose(i -> getPods())
.compose(this::detectToAndFromVersions)
.compose(i -> prepareVersionChange());
}
|
@Test
public void testNewClusterWithOldProtocolVersion(VertxTestContext context) {
VersionChangeCreator vcc = mockVersionChangeCreator(
mockKafka(VERSIONS.defaultVersion().version(), "2.8", "2.7"),
mockNewCluster(null, null, List.of())
);
Checkpoint async = context.checkpoint();
vcc.reconcile().onComplete(context.succeeding(c -> context.verify(() -> {
assertThat(c.from(), is(VERSIONS.defaultVersion()));
assertThat(c.to(), is(VERSIONS.defaultVersion()));
assertThat(c.interBrokerProtocolVersion(), nullValue());
assertThat(c.logMessageFormatVersion(), nullValue());
assertThat(c.metadataVersion(), is(VERSIONS.defaultVersion().metadataVersion()));
async.flag();
})));
}
|
public Item and(Item item) {
Item result = and(getRoot(), item);
setRoot(result);
return result;
}
|
@Test
void testAddQueryItemWithRoot() {
assertEquals("AND a b",
new QueryTree(new WordItem("a")).and(new WordItem("b")).toString());
NotItem not = new NotItem();
not.addNegativeItem(new WordItem("b"));
assertEquals("+a -b",
new QueryTree(new WordItem("a")).and(not).toString());
}
|
@Override
public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException {
try {
if(0L == status.getLength()) {
return new NullInputStream(0L);
}
final Storage.Objects.Get request = session.getClient().objects().get(
containerService.getContainer(file).getName(), containerService.getKey(file));
if(containerService.getContainer(file).attributes().getCustom().containsKey(GoogleStorageAttributesFinderFeature.KEY_REQUESTER_PAYS)) {
request.setUserProject(session.getHost().getCredentials().getUsername());
}
final VersioningConfiguration versioning = null != session.getFeature(Versioning.class) ? session.getFeature(Versioning.class).getConfiguration(
containerService.getContainer(file)
) : VersioningConfiguration.empty();
if(versioning.isEnabled()) {
if(StringUtils.isNotBlank(file.attributes().getVersionId())) {
request.setGeneration(Long.parseLong(file.attributes().getVersionId()));
}
}
if(status.isAppend()) {
final HttpRange range = HttpRange.withStatus(status);
final String header;
if(TransferStatus.UNKNOWN_LENGTH == range.getEnd()) {
header = String.format("bytes=%d-", range.getStart());
}
else {
header = String.format("bytes=%d-%d", range.getStart(), range.getEnd());
}
if(log.isDebugEnabled()) {
log.debug(String.format("Add range header %s for file %s", header, file));
}
final HttpHeaders headers = request.getRequestHeaders();
headers.setRange(header);
// Disable compression
headers.setAcceptEncoding("identity");
}
return request.executeMediaAsInputStream();
}
catch(IOException e) {
throw new GoogleStorageExceptionMappingService().map("Download {0} failed", e, file);
}
}
|
@Test
public void testReadPreviousVersion() throws Exception {
final int length = 8;
final byte[] content = RandomUtils.nextBytes(length);
final Path container = new Path("cyberduck-test-eu", EnumSet.of(Path.Type.directory, Path.Type.volume));
assertTrue(new GoogleStorageVersioningFeature(session).getConfiguration(container).isEnabled());
final Path file = new GoogleStorageTouchFeature(session).touch(new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus());
final String initialVersion = file.attributes().getVersionId();
final TransferStatus status = new TransferStatus().withLength(content.length);
status.setChecksum(new SHA256ChecksumCompute().compute(new ByteArrayInputStream(content), status));
final HttpResponseOutputStream<StorageObject> out = new GoogleStorageWriteFeature(session).write(file, status, new DisabledConnectionCallback());
new StreamCopier(new TransferStatus(), new TransferStatus()).transfer(new ByteArrayInputStream(content), out);
assertEquals(0L, new GoogleStorageAttributesFinderFeature(session).find(file.withAttributes(new PathAttributes(file.attributes()).withVersionId(initialVersion))).getSize());
// Read previous version
status.setLength(0L);
final InputStream in = new GoogleStorageReadFeature(session).read(file, status, new DisabledConnectionCallback());
assertNotNull(in);
final ByteArrayOutputStream buffer = new ByteArrayOutputStream(0);
new StreamCopier(status, status).transfer(in, buffer);
assertEquals(0, buffer.size());
in.close();
file.attributes().setVersionId(String.valueOf(out.getStatus().getGeneration()));
assertEquals(length, new GoogleStorageAttributesFinderFeature(session).find(file).getSize());
new GoogleStorageDeleteFeature(session).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback());
}
|
@Override
public PathAttributes find(final Path file, final ListProgressListener listener) throws BackgroundException {
if(file.isRoot()) {
// {"code":400,"message":"Bad Request","debugInfo":"Node ID must be positive.","errorCode":-80001}
final PathAttributes attributes = new PathAttributes();
if(session.userAccount().isUserInRole(SDSPermissionsFeature.ROOM_MANAGER_ROLE)) {
// We need to map user roles to ACLs in order to decide if creating a top-level room is allowed
final Acl acl = new Acl();
acl.addAll(new Acl.CanonicalUser(), SDSPermissionsFeature.CREATE_ROLE);
attributes.setAcl(acl);
}
return attributes;
}
// Throw failure if looking up file fails
final String id = nodeid.getVersionId(file);
try {
return this.findNode(file, id);
}
catch(NotfoundException e) {
if(log.isWarnEnabled()) {
log.warn(String.format("Previously cached node id %s no longer found for file %s", id, file));
}
// Try with reset cache after failure finding node id
return this.findNode(file, nodeid.getVersionId(file));
}
}
|
@Test
public void testChangedNodeId() throws Exception {
final SDSNodeIdProvider nodeid = new SDSNodeIdProvider(session);
final Path room = new SDSDirectoryFeature(session, nodeid).mkdir(
new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus());
final Path test = new SDSTouchFeature(session, nodeid).touch(new Path(room, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus());
final String latestnodeid = test.attributes().getVersionId();
assertNotNull(latestnodeid);
// Assume previously seen but changed on server
final String invalidId = String.valueOf(RandomUtils.nextLong());
test.attributes().setVersionId(invalidId);
nodeid.cache(test, invalidId);
final SDSAttributesFinderFeature f = new SDSAttributesFinderFeature(session, nodeid);
assertEquals(latestnodeid, f.find(test).getVersionId());
assertEquals(latestnodeid, test.attributes().getVersionId());
new SDSDeleteFeature(session, nodeid).delete(Collections.singletonList(room), new DisabledLoginCallback(), new Delete.DisabledCallback());
}
|
public long onStatusMessage(
final StatusMessageFlyweight flyweight,
final InetSocketAddress receiverAddress,
final long senderLimit,
final int initialTermId,
final int positionBitsToShift,
final long timeNs)
{
return processStatusMessage(
flyweight, senderLimit, initialTermId, positionBitsToShift, timeNs, matchesTag(flyweight));
}
|
@Test
void shouldNotIncludeReceiverMoreThanWindowSizeBehindMinPosition()
{
final UdpChannel udpChannel = UdpChannel.parse(
"aeron:udp?endpoint=224.20.30.39:24326|interface=localhost|fc=min,g:123/2");
flowControl.initialize(
newContext(), countersManager, udpChannel, 0, 0, 0, 0, 0);
final int senderLimit = 5000;
final int termOffset0 = WINDOW_LENGTH * 2;
final int termOffset1 = termOffset0 - (WINDOW_LENGTH + 1);
final int termOffset2 = termOffset0 - (WINDOW_LENGTH);
assertEquals(senderLimit, onStatusMessage(flowControl, 1, termOffset0, senderLimit, 123L));
assertEquals(senderLimit, onStatusMessage(flowControl, 2, termOffset1, senderLimit, 123L));
assertEquals(termOffset2 + WINDOW_LENGTH, onStatusMessage(flowControl, 3, termOffset2, senderLimit, 123L));
}
|
public static int MurmurHash3_x86_32(byte[] data, int offset, int len) {
final long endIndex = (long) offset + len - 1;
assert endIndex >= Integer.MIN_VALUE && endIndex <= Integer.MAX_VALUE
: String.format("offset %,d len %,d would cause int overflow", offset, len);
return MurmurHash3_x86_32(BYTE_ARRAY_LOADER, data, offset, len, DEFAULT_MURMUR_SEED);
}
|
@Test(expected = AssertionError.class)
@RequireAssertEnabled
public void testMurmurHash3_x86_32_withIntOverflow() {
MurmurHash3_x86_32(null, Integer.MAX_VALUE, Integer.MAX_VALUE);
}
|
@Override
public boolean contains(CharSequence name) {
return get(name) != null;
}
|
@Test
public void testContainsNameAndValue() {
Http2Headers headers = newClientHeaders();
assertTrue(headers.contains("Name1", "value1"));
assertFalse(headers.contains("Name1", "Value1"));
assertTrue(headers.contains("name2", "Value2", true));
assertFalse(headers.contains("name2", "Value2", false));
assertTrue(headers.contains(Http2Headers.PseudoHeaderName.PATH.value(), "/foo"));
assertFalse(headers.contains(Http2Headers.PseudoHeaderName.STATUS.value(), "200"));
assertFalse(headers.contains("a missing header", "a missing value"));
}
|
public void createQprofileChangesForRuleUpdates(DbSession dbSession, Set<PluginRuleUpdate> pluginRuleUpdates) {
List<QProfileChangeDto> changesToPersist = pluginRuleUpdates.stream()
.flatMap(pluginRuleUpdate -> {
RuleChangeDto ruleChangeDto = createNewRuleChange(pluginRuleUpdate);
insertRuleChange(dbSession, ruleChangeDto);
return findQualityProfilesForRule(dbSession, pluginRuleUpdate.getRuleUuid()).stream()
.map(qualityProfileUuid -> buildQprofileChangeDtoForRuleChange(qualityProfileUuid, ruleChangeDto));
}).toList();
if (!changesToPersist.isEmpty()) {
dbClient.qProfileChangeDao().bulkInsert(dbSession, changesToPersist);
}
}
|
@Test
public void updateWithoutCommit_whenTwoRulesChangedTheirImpactsAndAttributes_thenInsertRuleChangeAndImpactChange() {
PluginRuleUpdate pluginRuleUpdate = new PluginRuleUpdate();
pluginRuleUpdate.setNewCleanCodeAttribute(CleanCodeAttribute.CLEAR);
pluginRuleUpdate.setOldCleanCodeAttribute(CleanCodeAttribute.TESTED);
pluginRuleUpdate.setRuleUuid(RULE_UUID);
//testing here detecting the change with 2 the same software qualities
pluginRuleUpdate.addNewImpact(SoftwareQuality.RELIABILITY, Severity.LOW);
pluginRuleUpdate.addOldImpact(SoftwareQuality.RELIABILITY, Severity.MEDIUM);
PluginRuleUpdate pluginRuleUpdate2 = new PluginRuleUpdate();
pluginRuleUpdate2.setNewCleanCodeAttribute(CleanCodeAttribute.EFFICIENT);
pluginRuleUpdate2.setOldCleanCodeAttribute(CleanCodeAttribute.DISTINCT);
pluginRuleUpdate2.setRuleUuid("ruleUuid2");
//testing here detecting the change with 2 the different software qualities
pluginRuleUpdate2.addNewImpact(SoftwareQuality.SECURITY, Severity.HIGH);
pluginRuleUpdate2.addOldImpact(SoftwareQuality.RELIABILITY, Severity.MEDIUM);
underTest.createQprofileChangesForRuleUpdates(dbSession, Set.of(pluginRuleUpdate, pluginRuleUpdate2));
ArgumentCaptor<RuleChangeDto> captor = ArgumentCaptor.forClass(RuleChangeDto.class);
verify(ruleChangeDao, times(2)).insert(argThat(dbSession::equals), captor.capture());
RuleChangeDto firstChange = captor.getAllValues().stream().filter(change -> change.getRuleUuid().equals(RULE_UUID)).findFirst().get();
RuleChangeDto secondChange = captor.getAllValues().stream().filter(change -> change.getRuleUuid().equals("ruleUuid2")).findFirst().get();
assertThat(firstChange.getNewCleanCodeAttribute()).isEqualTo(CleanCodeAttribute.CLEAR);
assertThat(firstChange.getOldCleanCodeAttribute()).isEqualTo(CleanCodeAttribute.TESTED);
assertThat(firstChange.getRuleUuid()).isEqualTo(RULE_UUID);
assertThat(firstChange.getRuleImpactChanges()).hasSize(1);
assertThat(firstChange.getRuleImpactChanges()).extracting(RuleImpactChangeDto::getNewSoftwareQuality,
RuleImpactChangeDto::getOldSoftwareQuality, RuleImpactChangeDto::getOldSeverity, RuleImpactChangeDto::getNewSeverity)
.containsExactly(tuple(SoftwareQuality.RELIABILITY, SoftwareQuality.RELIABILITY, Severity.MEDIUM, Severity.LOW));
assertThat(secondChange.getNewCleanCodeAttribute()).isEqualTo(CleanCodeAttribute.EFFICIENT);
assertThat(secondChange.getOldCleanCodeAttribute()).isEqualTo(CleanCodeAttribute.DISTINCT);
assertThat(secondChange.getRuleUuid()).isEqualTo("ruleUuid2");
assertThat(secondChange.getRuleImpactChanges()).hasSize(1);
assertThat(secondChange.getRuleImpactChanges()).extracting(RuleImpactChangeDto::getNewSoftwareQuality,
RuleImpactChangeDto::getOldSoftwareQuality, RuleImpactChangeDto::getOldSeverity, RuleImpactChangeDto::getNewSeverity)
.containsExactly(tuple(SoftwareQuality.SECURITY, SoftwareQuality.RELIABILITY, Severity.MEDIUM, Severity.HIGH));
}
|
@Override
public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
outputFieldsDeclarer.declare(output.fields());
}
|
@Test
public void fieldsAreDeclaredThroughProvidedOutput() {
Fields fields = new Fields(UUID.randomUUID().toString());
when(output.fields()).thenReturn(fields);
OutputFieldsDeclarer declarer = mock(OutputFieldsDeclarer.class);
bolt.declareOutputFields(declarer);
ArgumentCaptor<Fields> declaredFields = ArgumentCaptor.forClass(Fields.class);
verify(declarer).declare(declaredFields.capture());
assertThat(declaredFields.getValue(), is(fields));
}
|
@Override
public PollResult poll(long currentTimeMs) {
return pollInternal(
prepareFetchRequests(),
this::handleFetchSuccess,
this::handleFetchFailure
);
}
|
@Test
public void testCorruptMessageError() {
buildFetcher();
assignFromUser(singleton(tp0));
subscriptions.seek(tp0, 0);
assertEquals(1, sendFetches());
assertFalse(fetcher.hasCompletedFetches());
// Prepare a response with the CORRUPT_MESSAGE error.
client.prepareResponse(fullFetchResponse(
tidp0,
buildRecords(1L, 1, 1),
Errors.CORRUPT_MESSAGE,
100L, 0));
networkClientDelegate.poll(time.timer(0));
assertTrue(fetcher.hasCompletedFetches());
// Trigger the exception.
assertThrows(KafkaException.class, this::fetchRecords);
}
|
protected Map<String, Object> fetchActionRequestContext(Method method, Object[] arguments) {
Map<String, Object> context = new HashMap<>(8);
Annotation[][] parameterAnnotations = method.getParameterAnnotations();
for (int i = 0; i < parameterAnnotations.length; i++) {
for (int j = 0; j < parameterAnnotations[i].length; j++) {
if (parameterAnnotations[i][j] instanceof BusinessActionContextParameter) {
// get annotation
BusinessActionContextParameter annotation = (BusinessActionContextParameter) parameterAnnotations[i][j];
if (arguments[i] == null) {
throw new IllegalArgumentException("@BusinessActionContextParameter 's params can not null");
}
// get param
Object paramObject = arguments[i];
if (paramObject == null) {
continue;
}
// load param by the config of annotation, and then put into the context
ActionContextUtil.loadParamByAnnotationAndPutToContext(ParamType.PARAM, "", paramObject, annotation, context);
}
}
}
return context;
}
|
@Test
public void testBusinessActionContext() throws NoSuchMethodException {
Method prepareMethod = TestAction.class.getDeclaredMethod("prepare",
BusinessActionContext.class, int.class, List.class, TestParam.class);
List<Object> list = new ArrayList<>();
list.add("b");
TestParam tccParam = new TestParam(1, "abc@ali.com");
Map<String, Object> paramContext = actionInterceptorHandler.fetchActionRequestContext(prepareMethod,
new Object[]{null, 10, list, tccParam});
System.out.println(paramContext);
Assertions.assertEquals(10, paramContext.get("a"));
Assertions.assertEquals("b", paramContext.get("b"));
Assertions.assertEquals("abc@ali.com", paramContext.get("email"));
}
|
public String getBaseUrl() {
String url = config.get(SERVER_BASE_URL).orElse("");
if (isEmpty(url)) {
url = computeBaseUrl();
}
// Remove trailing slashes
return StringUtils.removeEnd(url, "/");
}
|
@Test
public void base_url_is_http_localhost_9000_when_port_is_negative() {
settings.setProperty(PORT_PORPERTY, -23);
assertThat(underTest().getBaseUrl()).isEqualTo("http://localhost:9000");
}
|
@Override
public T setDouble(K name, double value) {
throw new UnsupportedOperationException("read only");
}
|
@Test
public void testSetDouble() {
assertThrows(UnsupportedOperationException.class, new Executable() {
@Override
public void execute() {
HEADERS.setDouble("name", 0);
}
});
}
|
@Override
public AttributedList<Path> search(final Path workdir, final Filter<Path> regex, final ListProgressListener listener) throws BackgroundException {
if(workdir.isRoot()) {
if(StringUtils.isEmpty(RequestEntityRestStorageService.findBucketInHostname(session.getHost()))) {
final AttributedList<Path> result = new AttributedList<>();
final AttributedList<Path> buckets = new S3BucketListService(session).list(workdir, listener);
for(Path bucket : buckets) {
result.addAll(filter(regex, new S3ObjectListService(session, acl).list(bucket, listener, null)));
}
result.addAll(filter(regex, buckets));
return result;
}
}
try {
return filter(regex, new S3ObjectListService(session, acl).list(workdir, listener, null));
}
catch(NotfoundException e) {
return AttributedList.emptyList();
}
}
|
@Test
public void testSearchInBucket() throws Exception {
final String name = new AlphanumericRandomStringService().random();
final Path root = new Path("/", EnumSet.of(Path.Type.directory, Path.Type.volume));
final Path bucket = new Path(root, "test-eu-central-1-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume));
final Path file = new S3TouchFeature(session, new S3AccessControlListFeature(session)).touch(new Path(bucket, name, EnumSet.of(Path.Type.file)), new TransferStatus());
final S3SearchFeature feature = new S3SearchFeature(session, new S3AccessControlListFeature(session));
assertNotNull(feature.search(bucket, new SearchFilter(name), new DisabledListProgressListener()).find(new SimplePathPredicate(file)));
assertNotNull(feature.search(bucket, new SearchFilter(StringUtils.substring(name, 2)), new DisabledListProgressListener()).find(new SimplePathPredicate(file)));
assertNotNull(feature.search(bucket, new SearchFilter(StringUtils.substring(name, 0, name.length() - 2)), new DisabledListProgressListener()).find(new SimplePathPredicate(file)));
assertNotNull(feature.search(root, new SearchFilter(name), new DisabledListProgressListener()).find(new SimplePathPredicate(file)));
final Path subdir = new Path(bucket, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory));
assertNull(feature.search(subdir, new SearchFilter(name), new DisabledListProgressListener()).find(new SimplePathPredicate(file)));
new S3DefaultDeleteFeature(session).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback());
}
|
static Serde<List<?>> createSerde(final PersistenceSchema schema) {
final List<SimpleColumn> columns = schema.columns();
if (columns.isEmpty()) {
// No columns:
return new KsqlVoidSerde<>();
}
if (columns.size() != 1) {
throw new KsqlException("The '" + FormatFactory.KAFKA.name()
+ "' format only supports a single field. Got: " + columns);
}
final SimpleColumn singleColumn = columns.get(0);
final Class<?> javaType = SchemaConverters.sqlToJavaConverter()
.toJavaType(singleColumn.type());
return createSerde(singleColumn, javaType);
}
|
@Test
public void shouldHandleEmptyKey() {
// Given:
final LogicalSchema logical = LogicalSchema.builder()
.valueColumn(ColumnName.of("f0"), SqlTypes.INTEGER)
.build();
final PersistenceSchema schema = PhysicalSchema
.from(logical, SerdeFeatures.of(), SerdeFeatures.of())
.keySchema();
final Serde<List<?>> serde = KafkaSerdeFactory.createSerde(schema);
// When:
final byte[] bytes = serde.serializer().serialize("topic", ImmutableList.of());
final Object result = serde.deserializer().deserialize("topic", null);
// Then:
assertThat(bytes, is(nullValue()));
assertThat(result, is(nullValue()));
}
|
@Override
public void close() {
store.close();
}
|
@Test
public void shouldCloseVersionedStore() {
givenWrapperWithVersionedStore();
wrapper.close();
verify(versionedStore).close();
}
|
@Override
public UserAccount writeToDb(final UserAccount userAccount) {
db.getCollection(USER_ACCOUNT).insertOne(
new Document(USER_ID, userAccount.getUserId())
.append(USER_NAME, userAccount.getUserName())
.append(ADD_INFO, userAccount.getAdditionalInfo())
);
return userAccount;
}
|
@Test
void writeToDb() {
MongoCollection<Document> mongoCollection = mock(MongoCollection.class);
when(db.getCollection(CachingConstants.USER_ACCOUNT)).thenReturn(mongoCollection);
assertDoesNotThrow(()-> {mongoDb.writeToDb(userAccount);});
}
|
public String process(final Expression expression) {
return formatExpression(expression);
}
|
@Test
public void shouldGenerateCastExpressionsWhichAreComparable() {
// Given:
final Expression cast = new Cast(new StringLiteral("2020-01-01"), new io.confluent.ksql.execution.expression.tree.Type(SqlTypes.DATE));
final ComparisonExpression exp = new ComparisonExpression(Type.GREATER_THAN_OR_EQUAL, cast, cast);
// When:
final String java = sqlToJavaVisitor.process(exp);
// Then:
final Evaluator evaluator = CodeGenTestUtil.cookCode(java, Boolean.class);
evaluator.evaluate();
}
|
@Override
public Long sendSingleMailToMember(String mail, Long userId,
String templateCode, Map<String, Object> templateParams) {
// 如果 mail 为空,则加载用户编号对应的邮箱
if (StrUtil.isEmpty(mail)) {
mail = memberService.getMemberUserEmail(userId);
}
// 执行发送
return sendSingleMail(mail, userId, UserTypeEnum.MEMBER.getValue(), templateCode, templateParams);
}
|
@Test
public void testSendSingleMailToMember() {
// 准备参数
Long userId = randomLongId();
String templateCode = RandomUtils.randomString();
Map<String, Object> templateParams = MapUtil.<String, Object>builder().put("code", "1234")
.put("op", "login").build();
// mock memberService 的方法
String mail = randomEmail();
when(memberService.getMemberUserEmail(eq(userId))).thenReturn(mail);
// mock MailTemplateService 的方法
MailTemplateDO template = randomPojo(MailTemplateDO.class, o -> {
o.setStatus(CommonStatusEnum.ENABLE.getStatus());
o.setContent("验证码为{code}, 操作为{op}");
o.setParams(Lists.newArrayList("code", "op"));
});
when(mailTemplateService.getMailTemplateByCodeFromCache(eq(templateCode))).thenReturn(template);
String title = RandomUtils.randomString();
when(mailTemplateService.formatMailTemplateContent(eq(template.getTitle()), eq(templateParams)))
.thenReturn(title);
String content = RandomUtils.randomString();
when(mailTemplateService.formatMailTemplateContent(eq(template.getContent()), eq(templateParams)))
.thenReturn(content);
// mock MailAccountService 的方法
MailAccountDO account = randomPojo(MailAccountDO.class);
when(mailAccountService.getMailAccountFromCache(eq(template.getAccountId()))).thenReturn(account);
// mock MailLogService 的方法
Long mailLogId = randomLongId();
when(mailLogService.createMailLog(eq(userId), eq(UserTypeEnum.MEMBER.getValue()), eq(mail),
eq(account), eq(template), eq(content), eq(templateParams), eq(true))).thenReturn(mailLogId);
// 调用
Long resultMailLogId = mailSendService.sendSingleMailToMember(null, userId, templateCode, templateParams);
// 断言
assertEquals(mailLogId, resultMailLogId);
// 断言调用
verify(mailProducer).sendMailSendMessage(eq(mailLogId), eq(mail),
eq(account.getId()), eq(template.getNickname()), eq(title), eq(content));
}
|
public Type getGELFType() {
if (payload.length < Type.HEADER_SIZE) {
throw new IllegalStateException("GELF message is too short. Not even the type header would fit.");
}
return Type.determineType(payload[0], payload[1]);
}
|
@Test
public void testGetGELFTypeDetectsUncompressedMessage() throws Exception {
byte[] fakeData = new byte[20];
fakeData[0] = (byte) '{';
fakeData[1] = (byte) '\n';
GELFMessage msg = new GELFMessage(fakeData);
assertEquals(GELFMessage.Type.UNCOMPRESSED, msg.getGELFType());
}
|
@Override
public String sendCall(String to, String data, DefaultBlockParameter defaultBlockParameter)
throws IOException {
EthCall ethCall =
web3j.ethCall(
Transaction.createEthCallTransaction(getFromAddress(), to, data),
defaultBlockParameter)
.send();
assertCallNotReverted(ethCall);
return ethCall.getValue() != null
? ethCall.getValue()
: ethCall.getError() != null ? ethCall.getError().getData() : null;
}
|
@Test
void sendCallErrorRevertByDataNull() throws IOException {
EthCall lookupDataHex = new EthCall();
Response.Error error = new Response.Error();
error.setCode(3);
error.setData(null);
lookupDataHex.setError(error);
Request request = mock(Request.class);
when(request.send()).thenReturn(lookupDataHex);
when(web3j.ethCall(any(Transaction.class), any(DefaultBlockParameter.class)))
.thenReturn(request);
assertThrows(
ContractCallException.class,
() ->
clientTransactionManager.sendCall(
"0xAdress", "data", DefaultBlockParameter.valueOf("latest")));
}
|
public Direction getNearestDirection()
{
int round = angle >>> 9;
int up = angle & 256;
if (up != 0)
{
// round up
++round;
}
switch (round & 3)
{
case 0:
return SOUTH;
case 1:
return WEST;
case 2:
return NORTH;
case 3:
return EAST;
default:
throw new IllegalStateException();
}
}
|
@Test
public void getNearestDirection()
{
Angle angle = new Angle(512 + 10);
assertEquals(WEST, angle.getNearestDirection());
angle = new Angle(512 + 256 + 1);
assertEquals(NORTH, angle.getNearestDirection());
}
|
@Override
public void close() throws UnavailableException {
// JournalContext is closed before block deletion context so that file system master changes
// are written before block master changes. If a failure occurs between deleting an inode and
// remove its blocks, it's better to have an orphaned block than an inode with a missing block.
closeQuietly(mJournalContext);
closeQuietly(mBlockDeletionContext);
if (mThrown != null) {
Throwables.propagateIfPossible(mThrown, UnavailableException.class);
throw new RuntimeException(mThrown);
}
}
|
@Test
public void throwTwoUnavailableExceptions() throws Throwable {
Exception bdcException = new UnavailableException("block deletion context exception");
Exception jcException = new UnavailableException("journal context exception");
doThrow(bdcException).when(mMockBDC).close();
doThrow(jcException).when(mMockJC).close();
try {
mRpcContext.close();
fail("Expected an exception to be thrown");
} catch (UnavailableException e) {
assertEquals(jcException, e);
// journal context is closed first, so the block deletion context exception should be
// suppressed.
assertEquals(bdcException, e.getSuppressed()[0]);
}
}
|
@Override
public DnsServerAddressStream nameServerAddressStream(String hostname) {
for (;;) {
int i = hostname.indexOf('.', 1);
if (i < 0 || i == hostname.length() - 1) {
return defaultNameServerAddresses.stream();
}
DnsServerAddresses addresses = domainToNameServerStreamMap.get(hostname);
if (addresses != null) {
return addresses.stream();
}
hostname = hostname.substring(i + 1);
}
}
|
@Test
public void emptyEtcResolverDirectoryDoesNotThrow(@TempDir Path tempDir) throws IOException {
File f = buildFile(tempDir, "domain linecorp.local\n" +
"nameserver 127.0.0.2\n" +
"nameserver 127.0.0.3\n");
UnixResolverDnsServerAddressStreamProvider p =
new UnixResolverDnsServerAddressStreamProvider(f, tempDir.resolve("netty-empty").toFile().listFiles());
DnsServerAddressStream stream = p.nameServerAddressStream("somehost");
assertHostNameEquals("127.0.0.2", stream.next());
}
|
public static Set<Result> anaylze(String log) {
Set<Result> results = new HashSet<>();
for (Rule rule : Rule.values()) {
Matcher matcher = rule.pattern.matcher(log);
if (matcher.find()) {
results.add(new Result(rule, log, matcher));
}
}
return results;
}
|
@Test
public void fabricWarnings() throws IOException {
CrashReportAnalyzer.Result result = findResultByRule(
CrashReportAnalyzer.anaylze(loadLog("/logs/fabric_warnings.txt")),
CrashReportAnalyzer.Rule.FABRIC_WARNINGS);
assertEquals((" - Conflicting versions found for fabric-api-base: used 0.3.0+a02b446313, also found 0.3.0+a02b44633d, 0.3.0+a02b446318\n" +
" - Conflicting versions found for fabric-rendering-data-attachment-v1: used 0.1.5+a02b446313, also found 0.1.5+a02b446318\n" +
" - Conflicting versions found for fabric-rendering-fluids-v1: used 0.1.13+a02b446318, also found 0.1.13+a02b446313\n" +
" - Conflicting versions found for fabric-lifecycle-events-v1: used 1.4.4+a02b44633d, also found 1.4.4+a02b446318\n" +
" - Mod 'Sodium Extra' (sodium-extra) recommends any version of mod reeses-sodium-options, which is missing!\n" +
"\t - You must install any version of reeses-sodium-options.\n" +
" - Conflicting versions found for fabric-screen-api-v1: used 1.0.4+155f865c18, also found 1.0.4+198a96213d\n" +
" - Conflicting versions found for fabric-key-binding-api-v1: used 1.0.4+a02b446318, also found 1.0.4+a02b44633d\n").replaceAll("\\s+", ""),
result.getMatcher().group("reason").replaceAll("\\s+", ""));
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.