focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
public static boolean isBindingAnnotation(Annotation annotation) {
LinkedList<Class<?>> queue = new LinkedList<>();
queue.add(annotation.getClass());
queue.addAll(List.of(annotation.getClass().getInterfaces()));
while (!queue.isEmpty()) {
Class<?> clazz = queue.removeFirst();
if (clazz.getAnnotation(BindingAnnotation.class) != null) {
return true;
} else {
if (clazz.getSuperclass() != null) {
queue.addFirst(clazz.getSuperclass());
}
}
}
return false;
}
|
@Test
void check_if_annotation_is_a_binding_annotation() {
assertTrue(isBindingAnnotation(Names.named("name")));
assertFalse(isBindingAnnotation(Named.class.getAnnotations()[0]));
}
|
@VisibleForTesting
Map<String, List<Operation>> computeOperations(SegmentDirectory.Reader segmentReader)
throws Exception {
Map<String, List<Operation>> columnOperationsMap = new HashMap<>();
// Does not work for segment versions < V3.
if (_segmentDirectory.getSegmentMetadata().getVersion().compareTo(SegmentVersion.v3) < 0) {
return columnOperationsMap;
}
Set<String> existingAllColumns = _segmentDirectory.getSegmentMetadata().getAllColumns();
Set<String> existingDictColumns = _segmentDirectory.getColumnsWithIndex(StandardIndexes.dictionary());
Set<String> existingForwardIndexColumns = _segmentDirectory.getColumnsWithIndex(StandardIndexes.forward());
for (String column : existingAllColumns) {
if (_schema != null && !_schema.hasColumn(column)) {
// _schema will be null only in tests
LOGGER.info("Column {} is not in schema, skipping updating forward index", column);
continue;
}
boolean existingHasDict = existingDictColumns.contains(column);
boolean existingHasFwd = existingForwardIndexColumns.contains(column);
FieldIndexConfigs newConf = _fieldIndexConfigs.get(column);
boolean newIsFwd = newConf.getConfig(StandardIndexes.forward()).isEnabled();
boolean newIsDict = newConf.getConfig(StandardIndexes.dictionary()).isEnabled();
boolean newIsRange = newConf.getConfig(StandardIndexes.range()).isEnabled();
if (existingHasFwd && !newIsFwd) {
// Existing column has a forward index. New column config disables the forward index
ColumnMetadata columnMetadata = _segmentDirectory.getSegmentMetadata().getColumnMetadataFor(column);
if (columnMetadata.isSorted()) {
// Check if the column is sorted. If sorted, disabling forward index should be a no-op. Do not return an
// operation for this column related to disabling forward index.
LOGGER.warn("Trying to disable the forward index for a sorted column {}, ignoring", column);
continue;
}
if (existingHasDict) {
if (!newIsDict) {
// Dictionary was also disabled. Just disable the dictionary and remove it along with the forward index
// If range index exists, don't try to regenerate it on toggling the dictionary, throw an error instead
Preconditions.checkState(!newIsRange, String.format(
"Must disable range (enabled) index to disable the dictionary and forward index for column: %s or "
+ "refresh / back-fill the forward index", column));
columnOperationsMap.put(column,
Arrays.asList(Operation.DISABLE_FORWARD_INDEX, Operation.DISABLE_DICTIONARY));
} else {
// Dictionary is still enabled, keep it but remove the forward index
columnOperationsMap.put(column, Collections.singletonList(Operation.DISABLE_FORWARD_INDEX));
}
} else {
if (!newIsDict) {
// Dictionary remains disabled and we should not reconstruct temporary forward index as dictionary based
columnOperationsMap.put(column, Collections.singletonList(Operation.DISABLE_FORWARD_INDEX));
} else {
// Dictionary is enabled, creation of dictionary and conversion to dictionary based forward index is needed
columnOperationsMap.put(column,
Arrays.asList(Operation.DISABLE_FORWARD_INDEX, Operation.ENABLE_DICTIONARY));
}
}
} else if (!existingHasFwd && newIsFwd) {
// Existing column does not have a forward index. New column config enables the forward index
ColumnMetadata columnMetadata = _segmentDirectory.getSegmentMetadata().getColumnMetadataFor(column);
if (columnMetadata != null && columnMetadata.isSorted()) {
// Check if the column is sorted. If sorted, disabling forward index should be a no-op and forward index
// should already exist. Do not return an operation for this column related to enabling forward index.
LOGGER.warn("Trying to enable the forward index for a sorted column {}, ignoring", column);
continue;
}
// Get list of columns with inverted index
Set<String> existingInvertedIndexColumns =
segmentReader.toSegmentDirectory().getColumnsWithIndex(StandardIndexes.inverted());
if (!existingHasDict || !existingInvertedIndexColumns.contains(column)) {
// If either dictionary or inverted index is missing on the column there is no way to re-generate the forward
// index. Treat this as a no-op and log a warning.
LOGGER.warn("Trying to enable the forward index for a column {} missing either the dictionary ({}) and / or "
+ "the inverted index ({}) is not possible. Either a refresh or back-fill is required to get the "
+ "forward index, ignoring", column, existingHasDict ? "enabled" : "disabled",
existingInvertedIndexColumns.contains(column) ? "enabled" : "disabled");
continue;
}
columnOperationsMap.put(column, Collections.singletonList(Operation.ENABLE_FORWARD_INDEX));
} else if (!existingHasFwd) {
// Forward index is disabled for the existing column and should remain disabled based on the latest config
// Need some checks to see whether the dictionary is being enabled or disabled here and take appropriate actions
// If the dictionary is not enabled on the existing column it must be on the new noDictionary column list.
// Cannot enable the dictionary for a column with forward index disabled.
Preconditions.checkState(existingHasDict || !newIsDict,
String.format("Cannot regenerate the dictionary for column %s with forward index disabled. Please "
+ "refresh or back-fill the data to add back the forward index", column));
if (existingHasDict && !newIsDict) {
// Dictionary is currently enabled on this column but is supposed to be disabled. Remove the dictionary
// and update the segment metadata If the range index exists then throw an error since we are not
// regenerating the range index on toggling the dictionary
Preconditions.checkState(!newIsRange, String.format(
"Must disable range (enabled) index to disable the dictionary for a forwardIndexDisabled column: %s or "
+ "refresh / back-fill the forward index", column));
columnOperationsMap.put(column, Collections.singletonList(Operation.DISABLE_DICTIONARY));
}
} else if (!existingHasDict && newIsDict) {
// Existing column is RAW. New column is dictionary enabled.
if (_schema == null || _tableConfig == null) {
// This can only happen in tests.
LOGGER.warn("Cannot enable dictionary for column={} as schema or tableConfig is null.", column);
continue;
}
ColumnMetadata existingColumnMetadata = _segmentDirectory.getSegmentMetadata().getColumnMetadataFor(column);
if (DictionaryIndexType.ignoreDictionaryOverride(_tableConfig.getIndexingConfig().isOptimizeDictionary(),
_tableConfig.getIndexingConfig().isOptimizeDictionaryForMetrics(),
_tableConfig.getIndexingConfig().getNoDictionarySizeRatioThreshold(), existingColumnMetadata.getFieldSpec(),
_fieldIndexConfigs.get(column), existingColumnMetadata.getCardinality(),
existingColumnMetadata.getTotalNumberOfEntries())) {
columnOperationsMap.put(column, Collections.singletonList(Operation.ENABLE_DICTIONARY));
}
} else if (existingHasDict && !newIsDict) {
// Existing column has dictionary. New config for the column is RAW.
if (shouldDisableDictionary(column, _segmentDirectory.getSegmentMetadata().getColumnMetadataFor(column))) {
columnOperationsMap.put(column, Collections.singletonList(Operation.DISABLE_DICTIONARY));
}
} else if (!existingHasDict) {
// Both existing and new column is RAW forward index encoded. Check if compression needs to be changed.
// TODO: Also check if raw index version needs to be changed
if (shouldChangeRawCompressionType(column, segmentReader)) {
columnOperationsMap.put(column, Collections.singletonList(Operation.CHANGE_INDEX_COMPRESSION_TYPE));
}
} else {
// Both existing and new column is dictionary encoded. Check if compression needs to be changed.
if (shouldChangeDictIdCompressionType(column, segmentReader)) {
columnOperationsMap.put(column, Collections.singletonList(Operation.CHANGE_INDEX_COMPRESSION_TYPE));
}
}
}
return columnOperationsMap;
}
|
@Test
public void testComputeOperationDisableForwardIndex()
throws Exception {
// Setup
SegmentMetadataImpl existingSegmentMetadata = new SegmentMetadataImpl(_segmentDirectory);
SegmentDirectory segmentLocalFSDirectory =
new SegmentLocalFSDirectory(_segmentDirectory, existingSegmentMetadata, ReadMode.mmap);
SegmentDirectory.Writer writer = segmentLocalFSDirectory.createWriter();
// TEST1: Disable forward index for a column which already has forward index disabled
IndexLoadingConfig indexLoadingConfig = new IndexLoadingConfig(null, _tableConfig);
indexLoadingConfig.addForwardIndexDisabledColumns(DIM_SV_FORWARD_INDEX_DISABLED_INTEGER);
ForwardIndexHandler fwdIndexHandler = new ForwardIndexHandler(segmentLocalFSDirectory, indexLoadingConfig, null);
Map<String, List<ForwardIndexHandler.Operation>> operationMap = fwdIndexHandler.computeOperations(writer);
assertEquals(operationMap, Collections.EMPTY_MAP);
// TEST2: Disable forward index for a dictionary column with forward index enabled
indexLoadingConfig = new IndexLoadingConfig(null, _tableConfig);
indexLoadingConfig.addForwardIndexDisabledColumns(DIM_DICT_INTEGER);
indexLoadingConfig.addInvertedIndexColumns(DIM_DICT_INTEGER);
fwdIndexHandler = new ForwardIndexHandler(segmentLocalFSDirectory, indexLoadingConfig, _schema);
operationMap = fwdIndexHandler.computeOperations(writer);
assertEquals(operationMap.size(), 1);
assertEquals(operationMap.get(DIM_DICT_INTEGER),
Collections.singletonList(ForwardIndexHandler.Operation.DISABLE_FORWARD_INDEX));
// TEST3: Disable forward index for a raw column with forward index enabled and enable inverted index and
// dictionary
indexLoadingConfig = new IndexLoadingConfig(null, _tableConfig);
indexLoadingConfig.addForwardIndexDisabledColumns(DIM_LZ4_INTEGER);
indexLoadingConfig.removeNoDictionaryColumns(DIM_LZ4_INTEGER);
indexLoadingConfig.addInvertedIndexColumns(DIM_LZ4_INTEGER);
fwdIndexHandler = new ForwardIndexHandler(segmentLocalFSDirectory, indexLoadingConfig, _schema);
operationMap = fwdIndexHandler.computeOperations(writer);
assertEquals(operationMap.size(), 1);
Set<ForwardIndexHandler.Operation> operations = new HashSet<>(operationMap.get(DIM_LZ4_INTEGER));
assertEquals(operations.size(), 2);
Set<ForwardIndexHandler.Operation> expectedOperations = new HashSet<>(
Arrays.asList(ForwardIndexHandler.Operation.DISABLE_FORWARD_INDEX,
ForwardIndexHandler.Operation.ENABLE_DICTIONARY));
assertEquals(expectedOperations, operations);
// TEST4: Disable forward index for two dictionary columns with forward index enabled
indexLoadingConfig = new IndexLoadingConfig(null, _tableConfig);
indexLoadingConfig.addForwardIndexDisabledColumns(DIM_DICT_LONG);
indexLoadingConfig.addForwardIndexDisabledColumns(DIM_DICT_STRING);
indexLoadingConfig.addInvertedIndexColumns(DIM_DICT_LONG);
indexLoadingConfig.addInvertedIndexColumns(DIM_DICT_STRING);
fwdIndexHandler = new ForwardIndexHandler(segmentLocalFSDirectory, indexLoadingConfig, _schema);
operationMap = fwdIndexHandler.computeOperations(writer);
assertEquals(operationMap.size(), 2);
assertEquals(operationMap.get(DIM_DICT_LONG),
Collections.singletonList(ForwardIndexHandler.Operation.DISABLE_FORWARD_INDEX));
assertEquals(operationMap.get(DIM_DICT_STRING),
Collections.singletonList(ForwardIndexHandler.Operation.DISABLE_FORWARD_INDEX));
// TEST5: Disable forward index for two raw columns with forward index enabled and enable dictionary
indexLoadingConfig = new IndexLoadingConfig(null, _tableConfig);
indexLoadingConfig.addForwardIndexDisabledColumns(DIM_LZ4_LONG);
indexLoadingConfig.removeNoDictionaryColumns(DIM_LZ4_LONG);
indexLoadingConfig.addInvertedIndexColumns(DIM_LZ4_LONG);
indexLoadingConfig.addForwardIndexDisabledColumns(DIM_SNAPPY_STRING);
indexLoadingConfig.removeNoDictionaryColumns(DIM_SNAPPY_STRING);
indexLoadingConfig.addInvertedIndexColumns(DIM_SNAPPY_STRING);
fwdIndexHandler = new ForwardIndexHandler(segmentLocalFSDirectory, indexLoadingConfig, _schema);
operationMap = fwdIndexHandler.computeOperations(writer);
assertEquals(operationMap.size(), 2);
operations = new HashSet<>(operationMap.get(DIM_LZ4_LONG));
assertEquals(operations.size(), 2);
expectedOperations = new HashSet<>(Arrays.asList(ForwardIndexHandler.Operation.DISABLE_FORWARD_INDEX,
ForwardIndexHandler.Operation.ENABLE_DICTIONARY));
assertEquals(expectedOperations, operations);
operations = new HashSet<>(operationMap.get(DIM_SNAPPY_STRING));
assertEquals(operations.size(), 2);
assertEquals(expectedOperations, operations);
// TEST6: Disable forward index for a dictionary and a raw column with forward index enabled
indexLoadingConfig = new IndexLoadingConfig(null, _tableConfig);
indexLoadingConfig.addForwardIndexDisabledColumns(DIM_ZSTANDARD_INTEGER);
indexLoadingConfig.removeNoDictionaryColumns(DIM_ZSTANDARD_INTEGER);
indexLoadingConfig.addInvertedIndexColumns(DIM_ZSTANDARD_INTEGER);
indexLoadingConfig.addInvertedIndexColumns(DIM_DICT_STRING);
indexLoadingConfig.addForwardIndexDisabledColumns(DIM_DICT_STRING);
fwdIndexHandler = new ForwardIndexHandler(segmentLocalFSDirectory, indexLoadingConfig, _schema);
operationMap = fwdIndexHandler.computeOperations(writer);
assertEquals(operationMap.size(), 2);
operations = new HashSet<>(operationMap.get(DIM_ZSTANDARD_INTEGER));
assertEquals(operations.size(), 2);
expectedOperations = new HashSet<>(Arrays.asList(ForwardIndexHandler.Operation.DISABLE_FORWARD_INDEX,
ForwardIndexHandler.Operation.ENABLE_DICTIONARY));
assertEquals(expectedOperations, operations);
assertEquals(operationMap.get(DIM_DICT_STRING),
Collections.singletonList(ForwardIndexHandler.Operation.DISABLE_FORWARD_INDEX));
// TEST7: Disable forward index for a raw column without enabling dictionary or inverted index
indexLoadingConfig = new IndexLoadingConfig(null, _tableConfig);
indexLoadingConfig.addForwardIndexDisabledColumns(DIM_LZ4_INTEGER);
fwdIndexHandler = new ForwardIndexHandler(segmentLocalFSDirectory, indexLoadingConfig, _schema);
operationMap = fwdIndexHandler.computeOperations(writer);
assertEquals(operationMap.size(), 1);
assertEquals(operationMap.get(DIM_LZ4_INTEGER),
Collections.singletonList(ForwardIndexHandler.Operation.DISABLE_FORWARD_INDEX));
// TEST8: Disable forward index for a dictionary column and also disable dictionary and inverted index
indexLoadingConfig = new IndexLoadingConfig(null, _tableConfig);
indexLoadingConfig.addForwardIndexDisabledColumns(DIM_DICT_INTEGER);
indexLoadingConfig.addNoDictionaryColumns(DIM_DICT_INTEGER);
fwdIndexHandler = new ForwardIndexHandler(segmentLocalFSDirectory, indexLoadingConfig, _schema);
operationMap = fwdIndexHandler.computeOperations(writer);
assertEquals(operationMap.size(), 1);
operations = new HashSet<>(operationMap.get(DIM_DICT_INTEGER));
assertEquals(operations.size(), 2);
expectedOperations = new HashSet<>(Arrays.asList(ForwardIndexHandler.Operation.DISABLE_FORWARD_INDEX,
ForwardIndexHandler.Operation.DISABLE_DICTIONARY));
assertEquals(expectedOperations, operations);
// TEST9: Disable dictionary on a column that already has forward index disabled
indexLoadingConfig = new IndexLoadingConfig(null, _tableConfig);
indexLoadingConfig.addForwardIndexDisabledColumns(DIM_SV_FORWARD_INDEX_DISABLED_INTEGER);
indexLoadingConfig.addNoDictionaryColumns(DIM_SV_FORWARD_INDEX_DISABLED_INTEGER);
indexLoadingConfig.removeInvertedIndexColumns(DIM_SV_FORWARD_INDEX_DISABLED_INTEGER);
fwdIndexHandler = new ForwardIndexHandler(segmentLocalFSDirectory, indexLoadingConfig, null);
operationMap = fwdIndexHandler.computeOperations(writer);
assertEquals(operationMap.size(), 1);
assertEquals(operationMap.get(DIM_SV_FORWARD_INDEX_DISABLED_INTEGER),
Collections.singletonList(ForwardIndexHandler.Operation.DISABLE_DICTIONARY));
// TEST10: Disable inverted index on a column that already has forward index disabled
indexLoadingConfig = new IndexLoadingConfig(null, _tableConfig);
indexLoadingConfig.addForwardIndexDisabledColumns(DIM_SV_FORWARD_INDEX_DISABLED_INTEGER);
indexLoadingConfig.removeInvertedIndexColumns(DIM_SV_FORWARD_INDEX_DISABLED_INTEGER);
fwdIndexHandler = new ForwardIndexHandler(segmentLocalFSDirectory, indexLoadingConfig, null);
operationMap = fwdIndexHandler.computeOperations(writer);
assertEquals(operationMap, Collections.EMPTY_MAP);
// TEST11: Disable dictionary on a column that already has forward index disabled and inverted index disabled
indexLoadingConfig = new IndexLoadingConfig(null, _tableConfig);
indexLoadingConfig.addNoDictionaryColumns(DIM_SV_FORWARD_INDEX_DISABLED_INTEGER_WITHOUT_INV_IDX);
fwdIndexHandler = new ForwardIndexHandler(segmentLocalFSDirectory, indexLoadingConfig, null);
operationMap = fwdIndexHandler.computeOperations(writer);
assertEquals(operationMap.size(), 1);
assertEquals(operationMap.get(DIM_SV_FORWARD_INDEX_DISABLED_INTEGER_WITHOUT_INV_IDX),
Collections.singletonList(ForwardIndexHandler.Operation.DISABLE_DICTIONARY));
// TEST12: Enable dictionary on a column that already has forward index disabled and dictionary disabled
indexLoadingConfig = new IndexLoadingConfig(null, _tableConfig);
indexLoadingConfig.addForwardIndexDisabledColumns(DIM_RAW_SV_FORWARD_INDEX_DISABLED_INTEGER);
indexLoadingConfig.removeNoDictionaryColumns(DIM_RAW_SV_FORWARD_INDEX_DISABLED_INTEGER);
fwdIndexHandler = new ForwardIndexHandler(segmentLocalFSDirectory, indexLoadingConfig, null);
try {
operationMap = fwdIndexHandler.computeOperations(writer);
Assert.fail("Enabling dictionary on forward index disabled column is not possible");
} catch (IllegalStateException e) {
assertEquals(e.getMessage(), "Cannot regenerate the dictionary for column "
+ "DIM_RAW_SV_FORWARD_INDEX_DISABLED_INTEGER with forward index disabled. Please refresh or back-fill "
+ "the data to add back the forward index");
}
// TEST13: Disable dictionary on a column that already has forward index disabled without an inverted index but
// with a range index
indexLoadingConfig = new IndexLoadingConfig(null, _tableConfig);
indexLoadingConfig.addForwardIndexDisabledColumns(DIM_SV_FORWARD_INDEX_DISABLED_INTEGER_WITH_RANGE_INDEX);
indexLoadingConfig.addNoDictionaryColumns(DIM_SV_FORWARD_INDEX_DISABLED_INTEGER_WITH_RANGE_INDEX);
fwdIndexHandler = new ForwardIndexHandler(segmentLocalFSDirectory, indexLoadingConfig, null);
try {
operationMap = fwdIndexHandler.computeOperations(writer);
Assert.fail("Disabling dictionary on forward index disabled column without inverted index but which has a "
+ "range index is not possible");
} catch (IllegalStateException e) {
assertEquals(e.getMessage(), "Must disable range (enabled) index to disable the dictionary for a "
+ "forwardIndexDisabled column: DIM_SV_FORWARD_INDEX_DISABLED_INTEGER_WITH_RANGE_INDEX or refresh / "
+ "back-fill the forward index");
}
// TEST13: Disable dictionary on a column that already has forward index disabled and inverted index enabled with
// a range index
indexLoadingConfig = new IndexLoadingConfig(null, _tableConfig);
indexLoadingConfig.addNoDictionaryColumns(DIM_SV_FORWARD_INDEX_DISABLED_INTEGER);
indexLoadingConfig.addRangeIndexColumns(DIM_SV_FORWARD_INDEX_DISABLED_INTEGER);
fwdIndexHandler = new ForwardIndexHandler(segmentLocalFSDirectory, indexLoadingConfig, null);
try {
operationMap = fwdIndexHandler.computeOperations(writer);
Assert.fail("Disabling dictionary on forward index disabled column with inverted index and a range index "
+ "is not possible");
} catch (IllegalStateException e) {
assertEquals(e.getMessage(), "Must disable range (enabled) index to disable the dictionary for a "
+ "forwardIndexDisabled column: DIM_SV_FORWARD_INDEX_DISABLED_INTEGER or refresh / back-fill the "
+ "forward index");
}
}
|
@Override
public byte[] serialize() {
byte[] actorInfo = Optional.ofNullable(tlv.get(TYPE_ACTOR)).map(LacpTlv::serialize)
.orElse(new byte[0]);
byte[] partnerInfo = Optional.ofNullable(tlv.get(TYPE_PARTNER)).map(LacpTlv::serialize)
.orElse(new byte[0]);
byte[] collectorInfo = Optional.ofNullable(tlv.get(TYPE_COLLECTOR)).map(LacpTlv::serialize)
.orElse(new byte[0]);
byte[] terminatorInfo = Optional.ofNullable(tlv.get(TYPE_TERMINATOR)).map(LacpTlv::serialize)
.orElse(new byte[0]);
final byte[] data = new byte[HEADER_LENGTH
+ LacpTlv.HEADER_LENGTH + actorInfo.length
+ LacpTlv.HEADER_LENGTH + partnerInfo.length
+ LacpTlv.HEADER_LENGTH + collectorInfo.length
+ LacpTlv.HEADER_LENGTH + terminatorInfo.length];
final ByteBuffer bb = ByteBuffer.wrap(data);
bb.put(lacpVersion);
bb.put(TYPE_ACTOR);
bb.put(LacpBaseTlv.LENGTH);
bb.put(actorInfo);
bb.put(TYPE_PARTNER);
bb.put(LacpBaseTlv.LENGTH);
bb.put(partnerInfo);
bb.put(TYPE_COLLECTOR);
bb.put(LacpCollectorTlv.LENGTH);
bb.put(collectorInfo);
bb.put(TYPE_TERMINATOR);
bb.put(LacpTerminatorTlv.LENGTH);
bb.put(terminatorInfo);
return data;
}
|
@Test
public void serialize() {
assertArrayEquals(data, LACP.serialize());
}
|
@SuppressWarnings("unchecked")
public static <T> TypeInformation<T> convert(String jsonSchema) {
Preconditions.checkNotNull(jsonSchema, "JSON schema");
final ObjectMapper mapper = JacksonMapperFactory.createObjectMapper();
mapper.getFactory()
.enable(JsonParser.Feature.ALLOW_COMMENTS)
.enable(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES)
.enable(JsonParser.Feature.ALLOW_SINGLE_QUOTES);
final JsonNode node;
try {
node = mapper.readTree(jsonSchema);
} catch (IOException e) {
throw new IllegalArgumentException("Invalid JSON schema.", e);
}
return (TypeInformation<T>) convertType("<root>", node, node);
}
|
@Test
void testWrongType() {
assertThatThrownBy(() -> JsonRowSchemaConverter.convert("{ type: 'whatever' }"))
.isInstanceOf(IllegalArgumentException.class);
}
|
public static CreateSourceAsProperties from(final Map<String, Literal> literals) {
try {
return new CreateSourceAsProperties(literals, false);
} catch (final ConfigException e) {
final String message = e.getMessage().replace(
"configuration",
"property"
);
throw new KsqlException(message, e);
}
}
|
@Test
public void shouldThrowIfValueSchemaNameAndAvroSchemaNameProvided() {
// When:
final Exception e = assertThrows(
KsqlException.class,
() -> CreateSourceAsProperties.from(
ImmutableMap.<String, Literal>builder()
.put(VALUE_SCHEMA_FULL_NAME, new StringLiteral("value_schema"))
.put(VALUE_AVRO_SCHEMA_FULL_NAME, new StringLiteral("value_schema"))
.build())
);
// Then:
assertThat(e.getMessage(), is("Cannot supply both 'VALUE_AVRO_SCHEMA_FULL_NAME' "
+ "and 'VALUE_SCHEMA_FULL_NAME' properties. Please only set 'VALUE_SCHEMA_FULL_NAME'."));
}
|
@Override
@SuppressWarnings("unchecked")
public <T> T create(Class<T> extensionClass) {
String extensionClassName = extensionClass.getName();
ClassLoader extensionClassLoader = extensionClass.getClassLoader();
if (!cache.containsKey(extensionClassLoader)) {
cache.put(extensionClassLoader, new HashMap<>());
}
Map<String, Object> classLoaderBucket = cache.get(extensionClassLoader);
if (classLoaderBucket.containsKey(extensionClassName)) {
return (T) classLoaderBucket.get(extensionClassName);
}
T extension = super.create(extensionClass);
if (extensionClassNames.isEmpty() || extensionClassNames.contains(extensionClassName)) {
classLoaderBucket.put(extensionClassName, extension);
}
return extension;
}
|
@Test
public void createNewEachTime() {
ExtensionFactory extensionFactory = new SingletonExtensionFactory(pluginManager, "FailTestExtension.class");
Object extensionOne = extensionFactory.create(TestExtension.class);
Object extensionTwo = extensionFactory.create(TestExtension.class);
assertNotSame(extensionOne, extensionTwo);
}
|
public static <FnT extends DoFn<?, ?>> DoFnSignature getSignature(Class<FnT> fn) {
return signatureCache.computeIfAbsent(fn, DoFnSignatures::parseSignature);
}
|
@Test
public void testDeclAndUsageOfTimerInSuperclass() throws Exception {
DoFnSignature sig =
DoFnSignatures.getSignature(new DoFnOverridingAbstractTimerUse().getClass());
assertThat(sig.timerDeclarations().size(), equalTo(1));
assertThat(sig.processElement().extraParameters().size(), equalTo(2));
DoFnSignature.TimerDeclaration decl =
sig.timerDeclarations()
.get(TimerDeclaration.PREFIX + DoFnOverridingAbstractTimerUse.TIMER_ID);
TimerParameter timerParam = (TimerParameter) sig.processElement().extraParameters().get(1);
assertThat(
decl.field(),
equalTo(DoFnDeclaringTimerAndAbstractUse.class.getDeclaredField("myTimerSpec")));
// The method we pull out is the superclass method; this is what allows validation to remain
// simple. The later invokeDynamic instruction causes it to invoke the actual implementation.
assertThat(timerParam.referent(), equalTo(decl));
}
|
int calculateParamTransportSize(Object value) {
if (value == null) {
return 0;
}
// Layout for primitives: |type flag(1)|value|
// size = original size + type flag (1)
if (value instanceof Integer || int.class.isInstance(value)) {
return 5;
} else if (value instanceof String) {
// Layout for string: |type flag(1)|length(4)|string content|
String tmpValue = (String) value;
byte[] tmpChars = tmpValue.getBytes();
return 1 + 4 + tmpChars.length;
} else if (boolean.class.isInstance(value) || value instanceof Boolean) {
return 2;
} else if (long.class.isInstance(value) || value instanceof Long) {
return 9;
} else if (double.class.isInstance(value) || value instanceof Double) {
return 9;
} else if (float.class.isInstance(value) || value instanceof Float) {
return 5;
} else if (byte.class.isInstance(value) || value instanceof Byte) {
return 2;
} else if (short.class.isInstance(value) || value instanceof Short) {
return 3;
} else {
// Ignore unexpected type.
return 0;
}
}
|
@Test
public void testCalculateParamTransportSize() {
ParamFlowRequestDataWriter writer = new ParamFlowRequestDataWriter();
// POJO (non-primitive type) should not be regarded as a valid parameter.
assertEquals(0, writer.calculateParamTransportSize(new SomePojo().setParam1("abc")));
assertEquals(4 + 1, writer.calculateParamTransportSize(1));
assertEquals(1 + 1, writer.calculateParamTransportSize((byte) 1));
assertEquals(1 + 1, writer.calculateParamTransportSize(false));
assertEquals(8 + 1, writer.calculateParamTransportSize(2L));
assertEquals(8 + 1, writer.calculateParamTransportSize(4.0d));
final String paramStr = "Sentinel";
assertEquals(1 + 4 + paramStr.getBytes().length, writer.calculateParamTransportSize(paramStr));
}
|
public static ByteBuffer cloneByteBuffer(ByteBuffer buf) {
ByteBuffer ret = ByteBuffer.allocate(buf.limit() - buf.position());
if (buf.hasArray()) {
ret.put(buf.array(), buf.position(), buf.limit() - buf.position());
} else {
// direct buffer
ret.put(buf);
}
ret.flip();
return ret;
}
|
@Test
public void cloneDirectByteBuffer() {
final int bufferSize = 10;
ByteBuffer bufDirect = ByteBuffer.allocateDirect(bufferSize);
for (byte i = 0; i < bufferSize; i++) {
bufDirect.put(i);
}
ByteBuffer bufClone = BufferUtils.cloneByteBuffer(bufDirect);
assertEquals(bufDirect, bufClone);
}
|
protected void declareRuleFromAttribute(final Attribute attribute, final String parentPath,
final int attributeIndex,
final List<KiePMMLDroolsRule> rules,
final String statusToSet,
final String characteristicReasonCode,
final Number characteristicBaselineScore,
final boolean isLastCharacteristic) {
logger.trace("declareRuleFromAttribute {} {}", attribute, parentPath);
final Predicate predicate = attribute.getPredicate();
// This means the rule should not be created at all.
// Different semantics has to be implemented if the "False"/"True" predicates are declared inside
// an XOR compound predicate
if (predicate instanceof False) {
return;
}
String currentRule = String.format(PATH_PATTERN, parentPath, attributeIndex);
KiePMMLReasonCodeAndValue reasonCodeAndValue = getKiePMMLReasonCodeAndValue(attribute, characteristicReasonCode, characteristicBaselineScore);
PredicateASTFactoryData predicateASTFactoryData = new PredicateASTFactoryData(predicate, outputFields, rules, parentPath, currentRule, fieldTypeMap);
KiePMMLPredicateASTFactory.factory(predicateASTFactoryData).declareRuleFromPredicate(attribute.getPartialScore(), statusToSet, reasonCodeAndValue, isLastCharacteristic);
}
|
@Test
void declareRuleFromAttributeWithSimplePredicateLastCharacteristic() {
Attribute attribute = getSimplePredicateAttribute();
final String parentPath = "parent_path";
final int attributeIndex = 2;
final List<KiePMMLDroolsRule> rules = new ArrayList<>();
final String statusToSet = "status_to_set";
final String characteristicReasonCode = "REASON_CODE";
final double characteristicBaselineScore = 12;
final boolean isLastCharacteristic = true;
getKiePMMLScorecardModelCharacteristicASTFactory()
.declareRuleFromAttribute(attribute, parentPath, attributeIndex, rules, statusToSet, characteristicReasonCode, characteristicBaselineScore, isLastCharacteristic);
assertThat(rules).hasSize(1);
commonValidateRule(rules.get(0),
attribute,
statusToSet,
parentPath,
attributeIndex,
isLastCharacteristic,
1,
null,
BOOLEAN_OPERATOR.AND,
"value <= 5.0",
1);
}
|
public static Optional<String> getViewName(final String path) {
Pattern pattern = Pattern.compile(getMetaDataNode() + VIEWS_PATTERN + "/([\\w\\-]+)$", Pattern.CASE_INSENSITIVE);
Matcher matcher = pattern.matcher(path);
return matcher.find() ? Optional.of(matcher.group(3)) : Optional.empty();
}
|
@Test
void assertGetViewName() {
Optional<String> actual = ViewMetaDataNode.getViewName("/metadata/foo_db/schemas/foo_schema/views/foo_view");
assertTrue(actual.isPresent());
assertThat(actual.get(), is("foo_view"));
}
|
public static HintValueContext extractHint(final String sql) {
if (!containsSQLHint(sql)) {
return new HintValueContext();
}
HintValueContext result = new HintValueContext();
int hintKeyValueBeginIndex = getHintKeyValueBeginIndex(sql);
String hintKeyValueText = sql.substring(hintKeyValueBeginIndex, sql.indexOf(SQL_COMMENT_SUFFIX, hintKeyValueBeginIndex));
Map<String, String> hintKeyValues = getSQLHintKeyValues(hintKeyValueText);
if (containsHintKey(hintKeyValues, SQLHintPropertiesKey.DATASOURCE_NAME_KEY)) {
result.setDataSourceName(getHintValue(hintKeyValues, SQLHintPropertiesKey.DATASOURCE_NAME_KEY));
}
if (containsHintKey(hintKeyValues, SQLHintPropertiesKey.WRITE_ROUTE_ONLY_KEY)) {
result.setWriteRouteOnly(Boolean.parseBoolean(getHintValue(hintKeyValues, SQLHintPropertiesKey.WRITE_ROUTE_ONLY_KEY)));
}
if (containsHintKey(hintKeyValues, SQLHintPropertiesKey.SKIP_SQL_REWRITE_KEY)) {
result.setSkipSQLRewrite(Boolean.parseBoolean(getHintValue(hintKeyValues, SQLHintPropertiesKey.SKIP_SQL_REWRITE_KEY)));
}
if (containsHintKey(hintKeyValues, SQLHintPropertiesKey.DISABLE_AUDIT_NAMES_KEY)) {
String property = getHintValue(hintKeyValues, SQLHintPropertiesKey.DISABLE_AUDIT_NAMES_KEY);
result.getDisableAuditNames().addAll(getSplitterSQLHintValue(property));
}
if (containsHintKey(hintKeyValues, SQLHintPropertiesKey.SHADOW_KEY)) {
result.setShadow(Boolean.parseBoolean(getHintValue(hintKeyValues, SQLHintPropertiesKey.SHADOW_KEY)));
}
for (Entry<String, String> entry : hintKeyValues.entrySet()) {
Object value = convert(entry.getValue());
Comparable<?> comparable = value instanceof Comparable ? (Comparable<?>) value : Objects.toString(value);
if (containsHintKey(Objects.toString(entry.getKey()), SQLHintPropertiesKey.SHARDING_DATABASE_VALUE_KEY)) {
result.getShardingDatabaseValues().put(Objects.toString(entry.getKey()).toUpperCase(), comparable);
}
if (containsHintKey(Objects.toString(entry.getKey()), SQLHintPropertiesKey.SHARDING_TABLE_VALUE_KEY)) {
result.getShardingTableValues().put(Objects.toString(entry.getKey()).toUpperCase(), comparable);
}
}
return result;
}
|
@Test
void assertSQLHintSkipSQLRewrite() {
HintValueContext actual = SQLHintUtils.extractHint("/* SHARDINGSPHERE_HINT: SKIP_SQL_REWRITE=true */");
assertTrue(actual.isSkipSQLRewrite());
}
|
public static int read(
final UnsafeBuffer termBuffer,
final int termOffset,
final FragmentHandler handler,
final int fragmentsLimit,
final Header header,
final ErrorHandler errorHandler,
final long currentPosition,
final Position subscriberPosition)
{
int fragmentsRead = 0;
int offset = termOffset;
final int capacity = termBuffer.capacity();
header.buffer(termBuffer);
try
{
while (fragmentsRead < fragmentsLimit && offset < capacity)
{
final int frameLength = frameLengthVolatile(termBuffer, offset);
if (frameLength <= 0)
{
break;
}
final int frameOffset = offset;
offset += BitUtil.align(frameLength, FRAME_ALIGNMENT);
if (!isPaddingFrame(termBuffer, frameOffset))
{
++fragmentsRead;
header.offset(frameOffset);
handler.onFragment(termBuffer, frameOffset + HEADER_LENGTH, frameLength - HEADER_LENGTH, header);
}
}
}
catch (final Exception ex)
{
errorHandler.onError(ex);
}
finally
{
final long newPosition = currentPosition + (offset - termOffset);
if (newPosition > currentPosition)
{
subscriberPosition.setOrdered(newPosition);
}
}
return fragmentsRead;
}
|
@Test
void shouldReadFirstMessage()
{
final int msgLength = 1;
final int frameLength = HEADER_LENGTH + msgLength;
final int alignedFrameLength = align(frameLength, FRAME_ALIGNMENT);
final int termOffset = 0;
when(termBuffer.getIntVolatile(0)).thenReturn(frameLength);
when(termBuffer.getShort(typeOffset(0))).thenReturn((short)HDR_TYPE_DATA);
final int readOutcome = TermReader.read(
termBuffer, termOffset, handler, Integer.MAX_VALUE, header, errorHandler, 0, subscriberPosition);
assertEquals(1, readOutcome);
final InOrder inOrder = inOrder(termBuffer, handler, subscriberPosition);
inOrder.verify(termBuffer).getIntVolatile(0);
inOrder.verify(handler).onFragment(eq(termBuffer), eq(HEADER_LENGTH), eq(msgLength), any(Header.class));
inOrder.verify(subscriberPosition).setOrdered(alignedFrameLength);
}
|
public synchronized TopologyDescription describe() {
return internalTopologyBuilder.describe();
}
|
@Test
public void kGroupedStreamZeroArgCountWithTopologyConfigShouldPreserveTopologyStructure() {
// override the default store into in-memory
final StreamsBuilder builder = new StreamsBuilder(overrideDefaultStore(StreamsConfig.IN_MEMORY));
builder.stream("input-topic")
.groupByKey()
.count();
final Topology topology = builder.build();
final TopologyDescription describe = topology.describe();
assertEquals(
"Topology: my-topology:\n" +
" Sub-topology: 0\n" +
" Source: KSTREAM-SOURCE-0000000000 (topics: [input-topic])\n" +
" --> KSTREAM-AGGREGATE-0000000002\n" +
" Processor: KSTREAM-AGGREGATE-0000000002 (stores: [KSTREAM-AGGREGATE-STATE-STORE-0000000001])\n" +
" --> none\n" +
" <-- KSTREAM-SOURCE-0000000000\n\n",
describe.toString()
);
topology.internalTopologyBuilder.setStreamsConfig(streamsConfig);
assertThat(topology.internalTopologyBuilder.setApplicationId("test").buildTopology().hasPersistentLocalStore(), is(false));
}
|
public static URL parseEncodedStr(String encodedURLStr) {
Map<String, String> parameters = null;
int pathEndIdx = encodedURLStr.toUpperCase().indexOf("%3F"); // '?'
if (pathEndIdx >= 0) {
parameters = parseEncodedParams(encodedURLStr, pathEndIdx + 3);
} else {
pathEndIdx = encodedURLStr.length();
}
// decodedBody format: [protocol://][username:password@][host:port]/[path]
String decodedBody = decodeComponent(encodedURLStr, 0, pathEndIdx, false, DECODE_TEMP_BUF.get());
return parseURLBody(encodedURLStr, decodedBody, parameters);
}
|
@Test
void testEncoded() {
testCases.forEach(testCase -> {
assertThat(URLStrParser.parseEncodedStr(URL.encode(testCase)), equalTo(URL.valueOf(testCase)));
});
errorEncodedCases.forEach(errorCase -> {
Assertions.assertThrows(RuntimeException.class, () -> URLStrParser.parseEncodedStr(errorCase));
});
}
|
@Override
public List<BlockWorkerInfo> getPreferredWorkers(WorkerClusterView workerClusterView,
String fileId, int count) throws ResourceExhaustedException {
if (workerClusterView.size() < count) {
throw new ResourceExhaustedException(String.format(
"Not enough workers in the cluster %d workers in the cluster but %d required",
workerClusterView.size(), count));
}
Set<WorkerIdentity> workerIdentities = workerClusterView.workerIds();
mHashProvider.refresh(workerIdentities);
List<WorkerIdentity> workers = mHashProvider.getMultiple(fileId, count);
if (workers.size() != count) {
throw new ResourceExhaustedException(String.format(
"Found %d workers from the hash ring but %d required", workers.size(), count));
}
ImmutableList.Builder<BlockWorkerInfo> builder = ImmutableList.builder();
for (WorkerIdentity worker : workers) {
Optional<WorkerInfo> optionalWorkerInfo = workerClusterView.getWorkerById(worker);
final WorkerInfo workerInfo;
if (optionalWorkerInfo.isPresent()) {
workerInfo = optionalWorkerInfo.get();
} else {
// the worker returned by the policy does not exist in the cluster view
// supplied by the client.
// this can happen when the membership changes and some callers fail to update
// to the latest worker cluster view.
// in this case, just skip this worker
LOG.debug("Inconsistency between caller's view of cluster and that of "
+ "the consistent hash policy's: worker {} selected by policy does not exist in "
+ "caller's view {}. Skipping this worker.",
worker, workerClusterView);
continue;
}
BlockWorkerInfo blockWorkerInfo = new BlockWorkerInfo(
worker, workerInfo.getAddress(), workerInfo.getCapacityBytes(),
workerInfo.getUsedBytes(), workerInfo.getState() == WorkerState.LIVE
);
builder.add(blockWorkerInfo);
}
List<BlockWorkerInfo> infos = builder.build();
return infos;
}
|
@Test
public void workerAddrUpdateWithIdUnchanged() throws Exception {
MultiProbeHashPolicy policy = new MultiProbeHashPolicy(mConf);
List<WorkerInfo> workers = new ArrayList<>();
workers.add(new WorkerInfo().setIdentity(WorkerIdentityTestUtils.ofLegacyId(1L))
.setAddress(new WorkerNetAddress().setHost("host1"))
.setCapacityBytes(0)
.setUsedBytes(0)
.setState(WorkerState.LIVE));
workers.add(new WorkerInfo().setIdentity(WorkerIdentityTestUtils.ofLegacyId(2L))
.setAddress(new WorkerNetAddress().setHost("host2"))
.setCapacityBytes(0)
.setUsedBytes(0)
.setState(WorkerState.LIVE));
List<BlockWorkerInfo> selectedWorkers =
policy.getPreferredWorkers(new WorkerClusterView(workers), "fileId", 2);
assertEquals("host1",
selectedWorkers.stream()
.filter(w -> w.getIdentity().equals(WorkerIdentityTestUtils.ofLegacyId(1L)))
.findFirst()
.get()
.getNetAddress()
.getHost());
// now the worker 1 has migrated to host 3
workers.set(0, new WorkerInfo().setIdentity(WorkerIdentityTestUtils.ofLegacyId(1L))
.setAddress(new WorkerNetAddress().setHost("host3"))
.setCapacityBytes(0)
.setUsedBytes(0)
.setState(WorkerState.LIVE));
List<BlockWorkerInfo> updatedWorkers =
policy.getPreferredWorkers(new WorkerClusterView(workers), "fileId", 2);
assertEquals(
selectedWorkers.stream().map(BlockWorkerInfo::getIdentity).collect(Collectors.toList()),
updatedWorkers.stream().map(BlockWorkerInfo::getIdentity).collect(Collectors.toList()));
assertEquals("host3",
updatedWorkers.stream()
.filter(w -> w.getIdentity().equals(WorkerIdentityTestUtils.ofLegacyId(1L)))
.findFirst()
.get()
.getNetAddress()
.getHost());
}
|
@Override
public Collection<ThreadPoolPlugin> getAllPlugins() {
return mainLock.applyWithReadLock(() -> {
// sort if necessary
if (isEnableSort()) {
return registeredPlugins.values().stream()
.sorted(pluginComparator)
.collect(Collectors.toList());
}
return registeredPlugins.values();
});
}
|
@Test
public void testGetAllPlugins() {
manager.register(new TestExecuteAwarePlugin());
manager.register(new TestRejectedAwarePlugin());
Assert.assertEquals(2, manager.getAllPlugins().size());
}
|
@Override
public Map<String, Object> processCsvFile(String encodedCsvData, boolean dryRun) throws JsonProcessingException {
services = new HashMap<>();
serviceParentChildren = new HashMap<>();
Map<String, Object> result = super.processCsvFile(encodedCsvData, dryRun);
if (!services.isEmpty()) {
retrieveLegacyServiceIds();
saveAll(dryRun);
processServiceParentChildren(serviceParentChildren, dryRun);
}
return result;
}
|
@Test
void processCsvFileFailDigidTrueWithMultipleServiceOrganizationRolesTest() throws IOException {
mockconnection();
String csvData = """SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS""";
Service service = new Service();
service.setDigid(false);
service.setServiceOrganizationRoles(List.of(new ServiceOrganizationRole(), new ServiceOrganizationRole()));
Optional<Service> optService = Optional.of(service);
when(serviceRepositoryMock.findFirstByServiceUuid("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS")).thenReturn(optService);
Map<String, Object> resultMap = csvService.processCsvFile(encodeCsv(csvData), false);
String expectedValue = "SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS";
assertEquals("Bestand verwerkt", resultMap.get("result"));
assertTrue(((ArrayList) resultMap.get("succeeded")).isEmpty());
assertTrue(((ArrayList) resultMap.get("failed")).size() == 1);
assertTrue(((ArrayList) resultMap.get("failed")).contains(expectedValue));
}
|
public static boolean isCoastedRadarHit(StarsRadarHit starsRh) {
String statusFieldValue = starsRh.trackStatus();
return (statusFieldValue == null)
? false
: STARS_COASTED_FLAGS.contains(statusFieldValue);
}
|
@Test
public void testIsCoastedRadarHit() {
StarsRadarHit active = (StarsRadarHit) NopMessageType.parse(ACTIVE_STARS);
StarsRadarHit coasted = (StarsRadarHit) NopMessageType.parse(COASTED_STARS);
StarsRadarHit dropped = (StarsRadarHit) NopMessageType.parse(DROPPED_STARS);
assertFalse(StarsSmoothing.isCoastedRadarHit(active));
assertTrue(StarsSmoothing.isCoastedRadarHit(coasted));
assertTrue(StarsSmoothing.isCoastedRadarHit(dropped));
}
|
@Override
public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException {
try {
final AttributedList<Path> objects = new AttributedList<>();
Marker marker = new Marker(null, null);
final String containerId = fileid.getVersionId(containerService.getContainer(directory));
// Seen placeholders
final Map<String, Long> revisions = new HashMap<>();
boolean hasDirectoryPlaceholder = containerService.isContainer(directory);
do {
if(log.isDebugEnabled()) {
log.debug(String.format("List directory %s with marker %s", directory, marker));
}
final B2ListFilesResponse response;
if(versioning.isEnabled()) {
// In alphabetical order by file name, and by reverse of date/time uploaded for
// versions of files with the same name.
response = session.getClient().listFileVersions(containerId,
marker.nextFilename, marker.nextFileId, chunksize,
this.createPrefix(directory),
String.valueOf(Path.DELIMITER));
}
else {
response = session.getClient().listFileNames(containerId,
marker.nextFilename, chunksize,
this.createPrefix(directory),
String.valueOf(Path.DELIMITER));
}
marker = this.parse(directory, objects, response, revisions);
if(null == marker.nextFileId) {
if(!response.getFiles().isEmpty()) {
hasDirectoryPlaceholder = true;
}
}
listener.chunk(directory, objects);
}
while(marker.hasNext());
if(!hasDirectoryPlaceholder && objects.isEmpty()) {
if(log.isWarnEnabled()) {
log.warn(String.format("No placeholder found for directory %s", directory));
}
throw new NotfoundException(directory.getAbsolute());
}
return objects;
}
catch(B2ApiException e) {
throw new B2ExceptionMappingService(fileid).map("Listing directory {0} failed", e, directory);
}
catch(IOException e) {
throw new DefaultIOExceptionMappingService().map(e);
}
}
|
@Test
public void testListFileNameDot() throws Exception {
final B2VersionIdProvider fileid = new B2VersionIdProvider(session);
final Path bucket = new B2DirectoryFeature(session, fileid).mkdir(
new Path(String.format("test-%s", new AsciiRandomStringService().random()), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus());
final Path folder = new B2DirectoryFeature(session, fileid).mkdir(new Path(bucket, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus());
final Path file = new B2TouchFeature(session, fileid).touch(new Path(folder, ".", EnumSet.of(Path.Type.file)), new TransferStatus());
assertEquals(".", file.getName());
assertEquals(folder, file.getParent());
assertTrue(new B2ObjectListService(session, fileid).list(folder, new DisabledListProgressListener()).contains(file));
new B2DeleteFeature(session, fileid).delete(Arrays.asList(file, folder, bucket), new DisabledLoginCallback(), new Delete.DisabledCallback());
}
|
@Override
protected String buildUndoSQL() {
TableRecords afterImage = sqlUndoLog.getAfterImage();
List<Row> afterImageRows = afterImage.getRows();
if (CollectionUtils.isEmpty(afterImageRows)) {
throw new ShouldNeverHappenException("Invalid UNDO LOG");
}
return generateDeleteSql(afterImageRows,afterImage);
}
|
@Test
public void buildUndoSQL() {
OracleUndoInsertExecutor executor = upperCase();
String sql = executor.buildUndoSQL();
Assertions.assertNotNull(sql);
Assertions.assertTrue(sql.contains("DELETE"));
Assertions.assertTrue(sql.contains("ID"));
Assertions.assertTrue(sql.contains("TABLE_NAME"));
}
|
static BlockStmt getApplyVariableDeclaration(final String variableName, final Apply apply) {
final MethodDeclaration methodDeclaration = APPLY_TEMPLATE.getMethodsByName(GETKIEPMMLAPPLY).get(0).clone();
final BlockStmt applyBody =
methodDeclaration.getBody().orElseThrow(() -> new KiePMMLException(String.format(MISSING_BODY_TEMPLATE, methodDeclaration)));
final VariableDeclarator variableDeclarator =
getVariableDeclarator(applyBody, APPLY).orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_IN_BODY, APPLY, applyBody)));
variableDeclarator.setName(variableName);
final BlockStmt toReturn = new BlockStmt();
int counter = 0;
final NodeList<Expression> arguments = new NodeList<>();
for (org.dmg.pmml.Expression expression : apply.getExpressions()) {
String nestedVariableName = String.format(VARIABLE_NAME_TEMPLATE, variableName, counter);
arguments.add(new NameExpr(nestedVariableName));
BlockStmt toAdd = getKiePMMLExpressionBlockStmt(nestedVariableName, expression);
toAdd.getStatements().forEach(toReturn::addStatement);
counter++;
}
final MethodCallExpr initializer = variableDeclarator.getInitializer()
.orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE, APPLY, toReturn)))
.asMethodCallExpr();
final MethodCallExpr builder = getChainedMethodCallExprFrom("builder", initializer);
final StringLiteralExpr nameExpr = new StringLiteralExpr(variableName);
final StringLiteralExpr functionExpr = new StringLiteralExpr(apply.getFunction());
builder.setArgument(0, nameExpr);
builder.setArgument(2, functionExpr);
getChainedMethodCallExprFrom("withDefaultValue", initializer).setArgument(0, getExpressionForObject(apply.getDefaultValue()));
getChainedMethodCallExprFrom("withMapMissingTo", initializer).setArgument(0, getExpressionForObject(apply.getMapMissingTo()));
final Expression invalidTreatmentExpr = apply.getInvalidValueTreatment() != null ? new StringLiteralExpr(apply.getInvalidValueTreatment().value()) : new NullLiteralExpr();
getChainedMethodCallExprFrom("withInvalidValueTreatmentMethod", initializer).setArgument(0, invalidTreatmentExpr);
getChainedMethodCallExprFrom("asList", initializer).setArguments(arguments);
applyBody.getStatements().forEach(toReturn::addStatement);
return toReturn;
}
|
@Test
void getApplyVariableDeclarationWithFieldRefs() throws IOException {
String variableName = "variableName";
Apply apply = new Apply();
apply.setFunction(function);
String mapMissingTo = "mapMissingTo";
apply.setMapMissingTo(mapMissingTo);
String defaultValue = "defaultValue";
apply.setDefaultValue(defaultValue);
InvalidValueTreatmentMethod invalidValueTreatmentMethod = InvalidValueTreatmentMethod.AS_MISSING;
apply.setInvalidValueTreatment(invalidValueTreatmentMethod);
FieldRef fieldRef1 = new FieldRef();
fieldRef1.setField(PARAM_1);
FieldRef fieldRef2 = new FieldRef();
fieldRef2.setField(PARAM_2);
apply.addExpressions(fieldRef1, fieldRef2);
BlockStmt retrieved =
org.kie.pmml.compiler.commons.codegenfactories.KiePMMLApplyFactory.getApplyVariableDeclaration(variableName, apply);
String text = getFileContent(TEST_02_SOURCE);
Statement expected = JavaParserUtils.parseBlock(String.format(text, PARAM_1, PARAM_2, variableName, function,
defaultValue, mapMissingTo,
invalidValueTreatmentMethod.value()));
assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue();
List<Class<?>> imports = Arrays.asList(KiePMMLFieldRef.class, KiePMMLApply.class, Collections.class,
Arrays.class);
commonValidateCompilationWithImports(retrieved, imports);
}
|
@Override
@Transactional(value="defaultTransactionManager")
public OAuth2AccessTokenEntity refreshAccessToken(String refreshTokenValue, TokenRequest authRequest) throws AuthenticationException {
if (Strings.isNullOrEmpty(refreshTokenValue)) {
// throw an invalid token exception if there's no refresh token value at all
throw new InvalidTokenException("Invalid refresh token: " + refreshTokenValue);
}
OAuth2RefreshTokenEntity refreshToken = clearExpiredRefreshToken(tokenRepository.getRefreshTokenByValue(refreshTokenValue));
if (refreshToken == null) {
// throw an invalid token exception if we couldn't find the token
throw new InvalidTokenException("Invalid refresh token: " + refreshTokenValue);
}
ClientDetailsEntity client = refreshToken.getClient();
AuthenticationHolderEntity authHolder = refreshToken.getAuthenticationHolder();
// make sure that the client requesting the token is the one who owns the refresh token
ClientDetailsEntity requestingClient = clientDetailsService.loadClientByClientId(authRequest.getClientId());
if (!client.getClientId().equals(requestingClient.getClientId())) {
tokenRepository.removeRefreshToken(refreshToken);
throw new InvalidClientException("Client does not own the presented refresh token");
}
//Make sure this client allows access token refreshing
if (!client.isAllowRefresh()) {
throw new InvalidClientException("Client does not allow refreshing access token!");
}
// clear out any access tokens
if (client.isClearAccessTokensOnRefresh()) {
tokenRepository.clearAccessTokensForRefreshToken(refreshToken);
}
if (refreshToken.isExpired()) {
tokenRepository.removeRefreshToken(refreshToken);
throw new InvalidTokenException("Expired refresh token: " + refreshTokenValue);
}
OAuth2AccessTokenEntity token = new OAuth2AccessTokenEntity();
// get the stored scopes from the authentication holder's authorization request; these are the scopes associated with the refresh token
Set<String> refreshScopesRequested = new HashSet<>(refreshToken.getAuthenticationHolder().getAuthentication().getOAuth2Request().getScope());
Set<SystemScope> refreshScopes = scopeService.fromStrings(refreshScopesRequested);
// remove any of the special system scopes
refreshScopes = scopeService.removeReservedScopes(refreshScopes);
Set<String> scopeRequested = authRequest.getScope() == null ? new HashSet<String>() : new HashSet<>(authRequest.getScope());
Set<SystemScope> scope = scopeService.fromStrings(scopeRequested);
// remove any of the special system scopes
scope = scopeService.removeReservedScopes(scope);
if (scope != null && !scope.isEmpty()) {
// ensure a proper subset of scopes
if (refreshScopes != null && refreshScopes.containsAll(scope)) {
// set the scope of the new access token if requested
token.setScope(scopeService.toStrings(scope));
} else {
String errorMsg = "Up-scoping is not allowed.";
logger.error(errorMsg);
throw new InvalidScopeException(errorMsg);
}
} else {
// otherwise inherit the scope of the refresh token (if it's there -- this can return a null scope set)
token.setScope(scopeService.toStrings(refreshScopes));
}
token.setClient(client);
if (client.getAccessTokenValiditySeconds() != null) {
Date expiration = new Date(System.currentTimeMillis() + (client.getAccessTokenValiditySeconds() * 1000L));
token.setExpiration(expiration);
}
if (client.isReuseRefreshToken()) {
// if the client re-uses refresh tokens, do that
token.setRefreshToken(refreshToken);
} else {
// otherwise, make a new refresh token
OAuth2RefreshTokenEntity newRefresh = createRefreshToken(client, authHolder);
token.setRefreshToken(newRefresh);
// clean up the old refresh token
tokenRepository.removeRefreshToken(refreshToken);
}
token.setAuthenticationHolder(authHolder);
tokenEnhancer.enhance(token, authHolder.getAuthentication());
tokenRepository.saveAccessToken(token);
return token;
}
|
@Test
public void refreshAccessToken_requestingEmptyScope() {
Set<String> emptyScope = newHashSet();
tokenRequest.setScope(emptyScope);
OAuth2AccessTokenEntity token = service.refreshAccessToken(refreshTokenValue, tokenRequest);
verify(scopeService, atLeastOnce()).removeReservedScopes(anySet());
assertThat(token.getScope(), equalTo(storedScope));
}
|
public static Map<Node, List<Node>> getNestedChildrenNodesMap(Document document, String mainContainerNodeName, String containerNodeName, String childNodeName) {
Map<Node, List<Node>> toReturn = new HashMap<>();
asStream(document.getElementsByTagName(mainContainerNodeName))
.map(mainContainerNode -> getChildrenNodesMap(mainContainerNode, containerNodeName, childNodeName))
.forEach(toReturn::putAll);
return toReturn;
}
|
@Test
public void getNestedChildrenNodesMap() throws Exception {
Document document = DOMParserUtil.getDocument(XML);
Map<Node, List<Node>> retrieved = DOMParserUtil.getNestedChildrenNodesMap(document, MAIN_NODE, CHILD_NODE, TEST_NODE);
assertThat(retrieved).isNotNull();
assertThat(retrieved).hasSize(2);
retrieved.forEach((childNode, testNodes) -> {
assertThat(childNode.getNodeName()).isEqualTo(CHILD_NODE);
assertThat(testNodes).hasSize(1);
assertThat(testNodes.get(0).getNodeName()).isEqualTo(TEST_NODE);
});
retrieved = DOMParserUtil.getNestedChildrenNodesMap(document, CHILD_NODE, NESTING_NODE, NESTED_NODE);
assertThat(retrieved).isNotNull();
assertThat(retrieved).hasSize(2);
retrieved.forEach((nestingNode, nestedNodes) -> {
assertThat(nestingNode.getNodeName()).isEqualTo(NESTING_NODE);
assertThat(nestedNodes).hasSize(1);
assertThat(nestedNodes.get(0).getNodeName()).isEqualTo(NESTED_NODE);
});
}
|
public static <T> JSONSchema<T> of(SchemaDefinition<T> schemaDefinition) {
SchemaReader<T> reader = schemaDefinition.getSchemaReaderOpt()
.orElseGet(() -> new JacksonJsonReader<>(jsonMapper(), schemaDefinition.getPojo()));
SchemaWriter<T> writer = schemaDefinition.getSchemaWriterOpt()
.orElseGet(() -> new JacksonJsonWriter<>(jsonMapper()));
return new JSONSchema<>(parseSchemaInfo(schemaDefinition, SchemaType.JSON), schemaDefinition.getPojo(),
reader, writer);
}
|
@Test
public void testAllowNullNestedClasses() {
JSONSchema<NestedBar> jsonSchema = JSONSchema.of(SchemaDefinition.<NestedBar>builder().withPojo(NestedBar.class).build());
JSONSchema<NestedBarList> listJsonSchema = JSONSchema.of(SchemaDefinition.<NestedBarList>builder().withPojo(NestedBarList.class).build());
Bar bar = new Bar();
bar.setField1(true);
NestedBar nested = new NestedBar();
nested.setField1(true);
nested.setNested(bar);
byte[] bytes = jsonSchema.encode(nested);
Assert.assertTrue(bytes.length > 0);
Assert.assertEquals(jsonSchema.decode(bytes), nested);
List<Bar> list = Collections.singletonList(bar);
NestedBarList nestedList = new NestedBarList();
nestedList.setField1(true);
nestedList.setList(list);
bytes = listJsonSchema.encode(nestedList);
Assert.assertTrue(bytes.length > 0);
Assert.assertEquals(listJsonSchema.decode(bytes), nestedList);
}
|
@Override
public DataSink createDataSink(Context context) {
FactoryHelper.createFactoryHelper(this, context)
.validateExcept(TABLE_CREATE_PROPERTIES_PREFIX, SINK_PROPERTIES_PREFIX);
StarRocksSinkOptions sinkOptions =
buildSinkConnectorOptions(context.getFactoryConfiguration());
TableCreateConfig tableCreateConfig =
TableCreateConfig.from(context.getFactoryConfiguration());
SchemaChangeConfig schemaChangeConfig =
SchemaChangeConfig.from(context.getFactoryConfiguration());
String zoneStr = context.getFactoryConfiguration().get(PIPELINE_LOCAL_TIME_ZONE);
ZoneId zoneId =
PIPELINE_LOCAL_TIME_ZONE.defaultValue().equals(zoneStr)
? ZoneId.systemDefault()
: ZoneId.of(zoneStr);
return new StarRocksDataSink(sinkOptions, tableCreateConfig, schemaChangeConfig, zoneId);
}
|
@Test
void testCreateDataSink() {
DataSinkFactory sinkFactory =
FactoryDiscoveryUtils.getFactoryByIdentifier("starrocks", DataSinkFactory.class);
Assertions.assertThat(sinkFactory).isInstanceOf(StarRocksDataSinkFactory.class);
Configuration conf =
Configuration.fromMap(
ImmutableMap.<String, String>builder()
.put("jdbc-url", "jdbc:mysql://127.0.0.1:9030")
.put("load-url", "127.0.0.1:8030")
.put("username", "root")
.put("password", "")
.build());
DataSink dataSink =
sinkFactory.createDataSink(
new FactoryHelper.DefaultContext(
conf, conf, Thread.currentThread().getContextClassLoader()));
Assertions.assertThat(dataSink).isInstanceOf(StarRocksDataSink.class);
}
|
public static boolean isPrimitive(Object bean) {
return isPrimitive(bean.getClass());
}
|
@Test
public void testIsPrimitive() {
Assert.assertTrue(ReflectKit.isPrimitive(int.class));
Assert.assertTrue(ReflectKit.isPrimitive(long.class));
Assert.assertTrue(ReflectKit.isPrimitive(boolean.class));
Assert.assertTrue(ReflectKit.isPrimitive(short.class));
Assert.assertTrue(ReflectKit.isPrimitive(byte.class));
Assert.assertTrue(ReflectKit.isPrimitive(char.class));
Assert.assertFalse(ReflectKit.isPrimitive(Integer.class));
Assert.assertFalse(ReflectKit.isPrimitive(Date.class));
Assert.assertFalse(ReflectKit.isPrimitive(BigDecimal.class));
}
|
public static Predicate parse(String expression)
{
final Stack<Predicate> predicateStack = new Stack<>();
final Stack<Character> operatorStack = new Stack<>();
final String trimmedExpression = TRIMMER_PATTERN.matcher(expression).replaceAll("");
final StringTokenizer tokenizer = new StringTokenizer(trimmedExpression, OPERATORS, true);
boolean isTokenMode = true;
while (true)
{
final Character operator;
final String token;
if (isTokenMode)
{
if (tokenizer.hasMoreTokens())
{
token = tokenizer.nextToken();
}
else
{
break;
}
if (OPERATORS.contains(token))
{
operator = token.charAt(0);
}
else
{
operator = null;
}
}
else
{
operator = operatorStack.pop();
token = null;
}
isTokenMode = true;
if (operator == null)
{
try
{
predicateStack.push(Class.forName(token).asSubclass(Predicate.class).getDeclaredConstructor().newInstance());
}
catch (ClassCastException e)
{
throw new RuntimeException(token + " must implement " + Predicate.class.getName(), e);
}
catch (Exception e)
{
throw new RuntimeException(e);
}
}
else
{
if (operatorStack.empty() || operator == '(')
{
operatorStack.push(operator);
}
else if (operator == ')')
{
while (operatorStack.peek() != '(')
{
evaluate(predicateStack, operatorStack);
}
operatorStack.pop();
}
else
{
if (OPERATOR_PRECEDENCE.get(operator) < OPERATOR_PRECEDENCE.get(operatorStack.peek()))
{
evaluate(predicateStack, operatorStack);
isTokenMode = false;
}
operatorStack.push(operator);
}
}
}
while (!operatorStack.empty())
{
evaluate(predicateStack, operatorStack);
}
if (predicateStack.size() > 1)
{
throw new RuntimeException("Invalid logical expression");
}
return predicateStack.pop();
}
|
@Test(expectedExceptions = RuntimeException.class)
public void testMissingOpenParen()
{
PredicateExpressionParser.parse("com.linkedin.data.it.AlwaysFalsePredicate)");
}
|
public static String groupSessionTimeoutKey(String groupId, String memberId) {
return "session-timeout-" + groupId + "-" + memberId;
}
|
@Test
public void testConsumerGroupMemberUsingClassicProtocolFencedWhenSessionTimeout() {
String groupId = "group-id";
String memberId = Uuid.randomUuid().toString();
int sessionTimeout = 5000;
List<ConsumerGroupMemberMetadataValue.ClassicProtocol> protocols = Collections.singletonList(
new ConsumerGroupMemberMetadataValue.ClassicProtocol()
.setName("range")
.setMetadata(Utils.toArray(ConsumerProtocol.serializeSubscription(
new ConsumerPartitionAssignor.Subscription(
Collections.singletonList("foo"),
null,
Collections.emptyList()
)
)))
);
// Consumer group with a member using the classic protocol.
GroupMetadataManagerTestContext context = new GroupMetadataManagerTestContext.Builder()
.withConsumerGroupAssignors(Collections.singletonList(new MockPartitionAssignor("range")))
.withConsumerGroup(new ConsumerGroupBuilder(groupId, 10)
.withMember(new ConsumerGroupMember.Builder(memberId)
.setClassicMemberMetadata(
new ConsumerGroupMemberMetadataValue.ClassicMemberMetadata()
.setSessionTimeoutMs(sessionTimeout)
.setSupportedProtocols(protocols)
)
.setMemberEpoch(10)
.build()))
.build();
// Heartbeat to schedule the session timeout.
HeartbeatRequestData request = new HeartbeatRequestData()
.setGroupId(groupId)
.setMemberId(memberId)
.setGenerationId(10);
context.sendClassicGroupHeartbeat(request);
context.assertSessionTimeout(groupId, memberId, sessionTimeout);
// Advance clock by session timeout + 1.
List<ExpiredTimeout<Void, CoordinatorRecord>> timeouts = context.sleep(sessionTimeout + 1);
// The member is fenced from the group.
assertEquals(1, timeouts.size());
ExpiredTimeout<Void, CoordinatorRecord> timeout = timeouts.get(0);
assertEquals(groupSessionTimeoutKey(groupId, memberId), timeout.key);
assertRecordsEquals(
Arrays.asList(
// The member is removed.
GroupCoordinatorRecordHelpers.newConsumerGroupCurrentAssignmentTombstoneRecord(groupId, memberId),
GroupCoordinatorRecordHelpers.newConsumerGroupTargetAssignmentTombstoneRecord(groupId, memberId),
GroupCoordinatorRecordHelpers.newConsumerGroupMemberSubscriptionTombstoneRecord(groupId, memberId),
// The group epoch is bumped.
GroupCoordinatorRecordHelpers.newConsumerGroupEpochRecord(groupId, 11)
),
timeout.result.records()
);
}
|
@Override
public void execute(Runnable r) {
runQueue.add(r);
schedule(r);
}
|
@Test
void testSerial() throws InterruptedException {
int total = 10000;
Map<String, Integer> map = new HashMap<>();
map.put("val", 0);
Semaphore semaphore = new Semaphore(1);
CountDownLatch startLatch = new CountDownLatch(1);
AtomicBoolean failed = new AtomicBoolean(false);
for (int i = 0; i < total; i++) {
final int index = i;
serializingExecutor.execute(() -> {
if (!semaphore.tryAcquire()) {
System.out.println("Concurrency");
failed.set(true);
}
try {
startLatch.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
int num = map.get("val");
map.put("val", num + 1);
if (num != index) {
System.out.println("Index error. Excepted :" + index + " but actual: " + num);
failed.set(true);
}
semaphore.release();
});
}
startLatch.countDown();
await().until(() -> map.get("val") == total);
Assertions.assertFalse(failed.get());
}
|
public FloatArrayAsIterable usingExactEquality() {
return new FloatArrayAsIterable(EXACT_EQUALITY_CORRESPONDENCE, iterableSubject());
}
|
@Test
public void usingExactEquality_contains_otherTypes_doubleNotSupported() {
double expected = 2.0;
float[] actual = array(1.0f, 2.0f, 3.0f);
expectFailureWhenTestingThat(actual).usingExactEquality().contains(expected);
assertFailureKeys(
"value of",
"expected to contain",
"testing whether",
"but was",
"additionally, one or more exceptions were thrown while comparing elements",
"first exception");
assertThatFailure()
.factValue("first exception")
.startsWith(
"compare("
+ actual[0]
+ ", "
+ expected
+ ") threw java.lang.IllegalArgumentException");
assertThatFailure()
.factValue("first exception")
.contains(
"Expected value in assertion using exact float equality was a double, which is not "
+ "supported as a double may not have an exact float representation");
}
|
@Udf(description = "Returns a masked version of the input string. All characters except for the"
+ " last n will be replaced according to the default masking rules.")
@SuppressWarnings("MethodMayBeStatic") // Invoked via reflection
public String mask(
@UdfParameter("input STRING to be masked") final String input,
@UdfParameter("number of characters to keep unmasked at the end") final int numChars
) {
return doMask(new Masker(), input, numChars);
}
|
@Test
public void shouldReturnNullForNullInput() {
final String result = udf.mask(null, 5);
assertThat(result, is(nullValue()));
}
|
public synchronized boolean saveNamespace(long timeWindow, long txGap,
FSNamesystem source) throws IOException {
if (timeWindow > 0 || txGap > 0) {
final FSImageStorageInspector inspector = storage.readAndInspectDirs(
EnumSet.of(NameNodeFile.IMAGE, NameNodeFile.IMAGE_ROLLBACK),
StartupOption.REGULAR);
FSImageFile image = inspector.getLatestImages().get(0);
File imageFile = image.getFile();
final long checkpointTxId = image.getCheckpointTxId();
final long checkpointAge = Time.now() - imageFile.lastModified();
if (checkpointAge <= timeWindow * 1000 &&
checkpointTxId >= this.getCorrectLastAppliedOrWrittenTxId() - txGap) {
return false;
}
}
saveNamespace(source, NameNodeFile.IMAGE, null);
return true;
}
|
@Test
public void testSaveAndLoadErasureCodingPolicies() throws IOException{
Configuration conf = new Configuration();
final int blockSize = 16 * 1024 * 1024;
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize);
try (MiniDFSCluster cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(10).build()) {
cluster.waitActive();
DistributedFileSystem fs = cluster.getFileSystem();
DFSTestUtil.enableAllECPolicies(fs);
// Save namespace and restart NameNode
fs.setSafeMode(SafeModeAction.ENTER);
fs.saveNamespace();
fs.setSafeMode(SafeModeAction.LEAVE);
cluster.restartNameNodes();
cluster.waitActive();
assertEquals("Erasure coding policy number should match",
SystemErasureCodingPolicies.getPolicies().size(),
ErasureCodingPolicyManager.getInstance().getPolicies().length);
// Add new erasure coding policy
ECSchema newSchema = new ECSchema("rs", 5, 4);
ErasureCodingPolicy newPolicy =
new ErasureCodingPolicy(newSchema, 2 * 1024, (byte) 254);
ErasureCodingPolicy[] policies = new ErasureCodingPolicy[]{newPolicy};
AddErasureCodingPolicyResponse[] ret =
fs.addErasureCodingPolicies(policies);
assertEquals(1, ret.length);
assertEquals(true, ret[0].isSucceed());
newPolicy = ret[0].getPolicy();
// Save namespace and restart NameNode
fs.setSafeMode(SafeModeAction.ENTER);
fs.saveNamespace();
fs.setSafeMode(SafeModeAction.LEAVE);
cluster.restartNameNodes();
cluster.waitActive();
assertEquals("Erasure coding policy number should match",
SystemErasureCodingPolicies.getPolicies().size() + 1,
ErasureCodingPolicyManager.getInstance().getPolicies().length);
ErasureCodingPolicy ecPolicy =
ErasureCodingPolicyManager.getInstance().getByID(newPolicy.getId());
assertEquals("Newly added erasure coding policy is not found",
newPolicy, ecPolicy);
assertEquals(
"Newly added erasure coding policy should be of disabled state",
ErasureCodingPolicyState.DISABLED,
DFSTestUtil.getECPolicyState(ecPolicy));
// Test enable/disable/remove user customized erasure coding policy
testChangeErasureCodingPolicyState(cluster, blockSize, newPolicy, false);
// Test enable/disable default built-in erasure coding policy
testChangeErasureCodingPolicyState(cluster, blockSize,
SystemErasureCodingPolicies.getByID((byte) 1), true);
// Test enable/disable non-default built-in erasure coding policy
testChangeErasureCodingPolicyState(cluster, blockSize,
SystemErasureCodingPolicies.getByID((byte) 2), false);
}
}
|
@PUT
@Path("{id}")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Response updateSubnet(@PathParam("id") String id, InputStream input) throws IOException {
log.trace(String.format(MESSAGE, "UPDATE " + id));
String inputStr = IOUtils.toString(input, REST_UTF8);
if (!haService.isActive()
&& !DEFAULT_ACTIVE_IP_ADDRESS.equals(haService.getActiveIp())) {
return syncPut(haService, SUBNETS, id, inputStr);
}
final NeutronSubnet subnet = (NeutronSubnet)
jsonToModelEntity(inputStr, NeutronSubnet.class);
adminService.updateSubnet(subnet);
return status(Response.Status.OK).build();
}
|
@Test
public void testUpdateSubnetWithNonexistId() {
expect(mockOpenstackHaService.isActive()).andReturn(true).anyTimes();
replay(mockOpenstackHaService);
mockOpenstackNetworkAdminService.updateSubnet(anyObject());
expectLastCall().andThrow(new IllegalArgumentException());
replay(mockOpenstackNetworkAdminService);
final WebTarget wt = target();
InputStream jsonStream = OpenstackSubnetWebResourceTest.class
.getResourceAsStream("openstack-subnet.json");
Response response = wt.path(PATH + "/non-exist-id")
.request(MediaType.APPLICATION_JSON_TYPE)
.put(Entity.json(jsonStream));
final int status = response.getStatus();
assertThat(status, is(400));
verify(mockOpenstackNetworkAdminService);
}
|
@VisibleForTesting
public void validateSmsTemplateCodeDuplicate(Long id, String code) {
SmsTemplateDO template = smsTemplateMapper.selectByCode(code);
if (template == null) {
return;
}
// 如果 id 为空,说明不用比较是否为相同 id 的字典类型
if (id == null) {
throw exception(SMS_TEMPLATE_CODE_DUPLICATE, code);
}
if (!template.getId().equals(id)) {
throw exception(SMS_TEMPLATE_CODE_DUPLICATE, code);
}
}
|
@Test
public void testValidateDictDataValueUnique_valueDuplicateForUpdate() {
// 准备参数
Long id = randomLongId();
String code = randomString();
// mock 数据
smsTemplateMapper.insert(randomSmsTemplateDO(o -> o.setCode(code)));
// 调用,校验异常
assertServiceException(() -> smsTemplateService.validateSmsTemplateCodeDuplicate(id, code),
SMS_TEMPLATE_CODE_DUPLICATE, code);
}
|
Command getCommand(String request) {
var commandClass = getCommandClass(request);
try {
return (Command) commandClass.getDeclaredConstructor().newInstance();
} catch (Exception e) {
throw new ApplicationException(e);
}
}
|
@Test
void testGetCommandUnknown() {
Command command = dispatcher.getCommand("Unknown");
assertNotNull(command);
assertTrue(command instanceof UnknownCommand);
}
|
void startup(@Observes StartupEvent event) {
if (jobRunrBuildTimeConfiguration.backgroundJobServer().enabled()) {
backgroundJobServerInstance.get().start();
}
if (jobRunrBuildTimeConfiguration.dashboard().enabled()) {
dashboardWebServerInstance.get().start();
}
}
|
@Test
void jobRunrStarterDoesNotStartDashboardIfNotConfigured() {
when(dashboardConfiguration.enabled()).thenReturn(false);
jobRunrStarter.startup(new StartupEvent());
verify(dashboardWebServerInstance, never()).get();
}
|
@SuppressWarnings("unchecked")
public static <T> NFAFactory<T> compileFactory(
final Pattern<T, ?> pattern, boolean timeoutHandling) {
if (pattern == null) {
// return a factory for empty NFAs
return new NFAFactoryImpl<>(
0,
Collections.<String, Long>emptyMap(),
Collections.<State<T>>emptyList(),
timeoutHandling);
} else {
final NFAFactoryCompiler<T> nfaFactoryCompiler = new NFAFactoryCompiler<>(pattern);
nfaFactoryCompiler.compileFactory();
return new NFAFactoryImpl<>(
nfaFactoryCompiler.getWindowTime(),
nfaFactoryCompiler.getWindowTimes(),
nfaFactoryCompiler.getStates(),
timeoutHandling);
}
}
|
@Test
public void testWindowTimesCorrectlySet() {
Pattern<Event, ?> pattern =
Pattern.<Event>begin("start")
.followedBy("middle")
.within(Time.seconds(10), WithinType.PREVIOUS_AND_CURRENT)
.followedBy("then")
.within(Time.seconds(20), WithinType.PREVIOUS_AND_CURRENT)
.followedBy("end");
NFACompiler.NFAFactoryCompiler<Event> factory =
new NFACompiler.NFAFactoryCompiler<>(pattern);
factory.compileFactory();
Map<String, Long> expectedWindowTimes = new HashMap<>();
expectedWindowTimes.put("middle", Time.seconds(10).toMilliseconds());
expectedWindowTimes.put("then", Time.seconds(20).toMilliseconds());
assertEquals(expectedWindowTimes, factory.getWindowTimes());
}
|
@Override
public ObjectNode encode(Criterion criterion, CodecContext context) {
EncodeCriterionCodecHelper encoder = new EncodeCriterionCodecHelper(criterion, context);
return encoder.encode();
}
|
@Test
public void matchIPv6SrcTest() {
Criterion criterion = Criteria.matchIPv6Src(ipPrefix6);
ObjectNode result = criterionCodec.encode(criterion, context);
assertThat(result, matchesCriterion(criterion));
}
|
public List<ParsedTerm> filterElementsContainingUsefulInformation(final Map<String, List<ParsedTerm>> parsedTermsGroupedByField) {
return parsedTermsGroupedByField.values()
.stream()
.map(this::filterElementsContainingUsefulInformation)
.flatMap(Collection::stream)
.collect(Collectors.toList());
}
|
@Test
void doesNothingOnEmptyInput() {
assertThat(toTest.filterElementsContainingUsefulInformation(Map.of()))
.isEmpty();
}
|
public static void copyDirectory(File srcDir, File destDir) throws IOException {
FileUtils.copyDirectory(srcDir, destDir);
}
|
@Test
void testCopyDirectory() throws IOException {
Path srcPath = Paths.get(TMP_PATH, UUID.randomUUID().toString());
DiskUtils.forceMkdir(srcPath.toString());
File nacos = DiskUtils.createTmpFile(srcPath.toString(), "nacos", ".ut");
Path destPath = Paths.get(TMP_PATH, UUID.randomUUID().toString());
DiskUtils.copyDirectory(srcPath.toFile(), destPath.toFile());
File file = Paths.get(destPath.toString(), nacos.getName()).toFile();
assertTrue(file.exists());
DiskUtils.deleteDirectory(srcPath.toString());
DiskUtils.deleteDirectory(destPath.toString());
}
|
@Override
public void run()
throws InvalidInputException {
String tableType = _input.getTableType();
if ((tableType.equalsIgnoreCase(REALTIME) || tableType.equalsIgnoreCase(HYBRID))) {
_output.setAggregateMetrics(shouldAggregate(_input));
}
}
|
@Test
public void testRunNonAggregate()
throws Exception {
Set<String> metrics = ImmutableSet.of("a", "b", "c");
InputManager input =
createInput(metrics, "select sum(a), sum(b), sum(c) from tableT", "select sum(a), avg(b) from tableT2");
ConfigManager output = new ConfigManager();
AggregateMetricsRule rule = new AggregateMetricsRule(input, output);
rule.run();
assertFalse(output.isAggregateMetrics());
}
|
public static void clear() {
logList.clear();
}
|
@Test
void testClear() {
DubboAppender.doStart();
DubboAppender appender = new DubboAppender();
appender.append(event);
assumeTrue(1 == DubboAppender.logList.size());
DubboAppender.clear();
assertThat(DubboAppender.logList, hasSize(0));
}
|
public ScriptBuilder opFalse() {
return number(0); // push OP_0/OP_FALSE
}
|
@Test
public void testOpFalse() {
byte[] expected = new byte[] { OP_FALSE };
byte[] s = new ScriptBuilder().opFalse().build().program();
assertArrayEquals(expected, s);
}
|
public static boolean hasCauseOf(Throwable t, Class<? extends Throwable> causeType) {
return Throwables.getCausalChain(t)
.stream()
.anyMatch(c -> causeType.isAssignableFrom(c.getClass()));
}
|
@Test
public void hasCauseOf_returnsTrueIfTheExceptionItselfIsSubtypeOfTheProvidedType() {
assertThat(ExceptionUtils.hasCauseOf(new SocketTimeoutException("asdasd"), IOException.class)).isTrue();
assertThat(ExceptionUtils.hasCauseOf(new IOException("asdasd"), IOException.class)).isTrue();
}
|
@Override
public String getTargetRestEndpointURL() {
return URL;
}
|
@Test
void testURL() {
assertThat(instance.getTargetRestEndpointURL()).endsWith("yarn-stop");
}
|
public LibResponse createBook(BookDto bookDto) {
log.info("Book DTO from POST request : {}", bookDto);
LibResponse resp = null;
AuditDto audit = null;
try {
Call<LibResponse> callLibResponse = libraryClient.createNewBook(bookDto);
Response<LibResponse> libResponse = callLibResponse.execute();
if (libResponse.isSuccessful()) {
resp = libResponse.body();
audit = auditMapper.populateAuditLogForPostAndPut(bookDto, resp, HttpMethod.POST);
} else {
log.error("Error calling library client: {}", libResponse.errorBody());
if (Objects.nonNull(libResponse.errorBody())) {
audit = auditMapper.populateAuditLogForException(
new ObjectMapper().writeValueAsString(bookDto),
HttpMethod.POST, libResponse.errorBody().string());
}
}
if (Objects.nonNull(audit)) {
AuditLog savedObj = auditRepository.save(libraryMapper.auditDtoToAuditLog(audit));
log.info("Saved into audit successfully: {}", savedObj);
}
return resp;
} catch (Exception ex) {
log.error("Error handling retrofit call for createNewBook", ex);
return new LibResponse(Constants.ERROR, "Failed");
}
}
|
@Test
@DisplayName("Successful call to create a book")
public void postBookRequestTest() throws Exception {
String booksResponse = getBooksResponse("/request/postBook.json");
BookDto bookDto = new ObjectMapper().readValue(booksResponse, BookDto.class);
LibResponse response = new LibResponse("Success", "Book created successfully");
when(libraryClient.createNewBook(bookDto)).thenReturn(Calls.response(response));
doReturn(null).when(auditRepository).save(any());
LibResponse libResponse = libraryAuditService.createBook(bookDto);
assertAll(
() -> assertNotNull(libResponse),
() -> assertTrue(libResponse.getResponseCode().equals("Success"))
);
}
|
public void processRow(GenericRow decodedRow, Result reusedResult)
throws Exception {
reusedResult.reset();
if (_complexTypeTransformer != null) {
// TODO: consolidate complex type transformer into composite type transformer
decodedRow = _complexTypeTransformer.transform(decodedRow);
}
Collection<GenericRow> rows = (Collection<GenericRow>) decodedRow.getValue(GenericRow.MULTIPLE_RECORDS_KEY);
if (rows != null) {
for (GenericRow row : rows) {
processPlainRow(row, reusedResult);
}
} else {
processPlainRow(decodedRow, reusedResult);
}
}
|
@Test
public void testMultipleRow()
throws Exception {
TableConfig config = createTestTableConfig();
Schema schema = Fixtures.createSchema();
TransformPipeline pipeline = new TransformPipeline(config, schema);
GenericRow multipleRow = Fixtures.createMultipleRow(9527);
Collection<GenericRow> rows = (Collection<GenericRow>) multipleRow.getValue(GenericRow.MULTIPLE_RECORDS_KEY);
TransformPipeline.Result result = new TransformPipeline.Result();
pipeline.processRow(multipleRow, result);
Assert.assertNotNull(result);
Assert.assertEquals(result.getTransformedRows().size(), rows.size());
Assert.assertEquals(result.getSkippedRowCount(), 0);
Assert.assertEquals(result.getTransformedRows(), rows);
}
|
static Object parseCell(String cell, Schema.Field field) {
Schema.FieldType fieldType = field.getType();
try {
switch (fieldType.getTypeName()) {
case STRING:
return cell;
case INT16:
return Short.parseShort(cell);
case INT32:
return Integer.parseInt(cell);
case INT64:
return Long.parseLong(cell);
case BOOLEAN:
return Boolean.parseBoolean(cell);
case BYTE:
return Byte.parseByte(cell);
case DECIMAL:
return new BigDecimal(cell);
case DOUBLE:
return Double.parseDouble(cell);
case FLOAT:
return Float.parseFloat(cell);
case DATETIME:
return Instant.parse(cell);
default:
throw new UnsupportedOperationException(
"Unsupported type: " + fieldType + ", consider using withCustomRecordParsing");
}
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException(
e.getMessage() + " field " + field.getName() + " was received -- type mismatch");
}
}
|
@Test
public void givenBigDecimalWithSurroundingSpaces_throws() {
BigDecimal decimal = new BigDecimal("123.456");
DefaultMapEntry cellToExpectedValue = new DefaultMapEntry(" 123.456 ", decimal);
Schema schema =
Schema.builder().addDecimalField("a_decimal").addStringField("a_string").build();
assertThrows(
IllegalArgumentException.class,
() ->
CsvIOParseHelpers.parseCell(
cellToExpectedValue.getKey().toString(), schema.getField("a_decimal")));
}
|
@Override
public void isEqualTo(@Nullable Object expected) {
if (sameClassMessagesWithDifferentDescriptors(actual, expected)) {
// This can happen with DynamicMessages, and it's very confusing if they both have the
// same string.
failWithoutActual(
simpleFact("Not true that messages compare equal; they have different descriptors."),
fact("expected", expected),
fact("with descriptor", ((Message) expected).getDescriptorForType()),
fact("but was", actual),
fact("with descriptor", actual.getDescriptorForType()));
} else if (notMessagesWithSameDescriptor(actual, expected)) {
super.isEqualTo(expected);
} else {
DiffResult diffResult =
makeDifferencer((Message) expected).diffMessages(actual, (Message) expected);
if (!diffResult.isMatched()) {
failWithoutActual(
simpleFact(
"Not true that messages compare equal.\n"
+ diffResult.printToString(config.reportMismatchesOnly())));
}
}
}
|
@Test
public void testDifferentDynamicDescriptors() throws InvalidProtocolBufferException {
// Only test once.
if (!isProto3()) {
return;
}
DynamicMessage message1 =
DynamicMessage.parseFrom(
TestMessage2.getDescriptor(),
TestMessage2.newBuilder().setOInt(43).build().toByteString());
DynamicMessage message2 =
DynamicMessage.parseFrom(
TestMessage3.getDescriptor(),
TestMessage3.newBuilder().setOInt(43).build().toByteString());
expectFailureWhenTesting().that(message1).isEqualTo(message2);
expectThatFailure().hasMessageThat().contains("different descriptors");
}
|
public void setText(String newText) {
this.text = newText;
this.dateUpdated = new Date();
parseText();
}
|
@Test
void testInvalidProperties() {
Note note = createNote();
Paragraph paragraph = new Paragraph(note, null);
assertThrows(RuntimeException.class,
() -> {
paragraph.setText("%test(p1=v1=v2) a");
},
"Invalid paragraph properties format");
}
|
@Override
public List<DiscoveryUpstreamData> parseValue(final String jsonString) {
if (StringUtils.isBlank(jsonString)) {
return Collections.emptyList();
}
GsonBuilder gsonBuilder = new GsonBuilder().registerTypeAdapter(DiscoveryUpstreamData.class, this);
Gson gson = gsonBuilder.create();
return Collections.singletonList(gson.fromJson(jsonString, DiscoveryUpstreamData.class));
}
|
@Test
void testParseValue() {
final String jsonString = "{\"protocol\":\"tcp\",\"url\":\"127.0.0.1:8188\",\"status\":1,\"weight\":1}";
final List<DiscoveryUpstreamData> expectDiscoveryUpstreamData = new ArrayList<>();
DiscoveryUpstreamData discoveryUpstreamData = new DiscoveryUpstreamData();
discoveryUpstreamData.setUrl("127.0.0.1:8188");
discoveryUpstreamData.setProtocol("tcp");
discoveryUpstreamData.setStatus(1);
discoveryUpstreamData.setWeight(1);
expectDiscoveryUpstreamData.add(discoveryUpstreamData);
List<DiscoveryUpstreamData> actualDiscoveryUpstreamData = customDiscoveryUpstreamParser.parseValue(jsonString);
assertEquals(expectDiscoveryUpstreamData, actualDiscoveryUpstreamData);
}
|
@VisibleForTesting
public ConfigDO validateConfigExists(Long id) {
if (id == null) {
return null;
}
ConfigDO config = configMapper.selectById(id);
if (config == null) {
throw exception(CONFIG_NOT_EXISTS);
}
return config;
}
|
@Test
public void testValidateConfigExists_success() {
// mock 数据
ConfigDO dbConfigDO = randomConfigDO();
configMapper.insert(dbConfigDO);// @Sql: 先插入出一条存在的数据
// 调用成功
configService.validateConfigExists(dbConfigDO.getId());
}
|
@Override
public boolean remove(Object o) {
throw new UnsupportedOperationException("RangeSet is immutable");
}
|
@Test(expected = UnsupportedOperationException.class)
public void remove1() throws Exception {
RangeSet rs = new RangeSet(4);
rs.remove(Integer.valueOf(1));
}
|
public static String formatExpression(final Expression expression) {
return formatExpression(expression, FormatOptions.of(s -> false));
}
|
@Test
public void shouldFormatDateLiteral() {
assertThat(ExpressionFormatter.formatExpression(new DateLiteral(new Date(864000000))), equalTo("1970-01-11"));
}
|
public String apiURL() {
return DEFAULT_API_URL;
}
|
@Test
public void default_apiUrl() {
assertThat(underTest.apiURL()).isEqualTo("https://api.bitbucket.org/");
}
|
@Override
public ObjectNode encode(MappingAddress address, CodecContext context) {
EncodeMappingAddressCodecHelper encoder =
new EncodeMappingAddressCodecHelper(address, context);
return encoder.encode();
}
|
@Test
public void asMappingAddressTest() {
MappingAddress address = MappingAddresses.asMappingAddress(AS);
ObjectNode result = addressCodec.encode(address, context);
assertThat(result, matchesMappingAddress(address));
}
|
public static String castValue(String value, Type requiredType, String valueType) {
String requiredTypeName = requiredType.asString();
if (requiredTypeName.equals(valueType)) return value;
return String.format("(%s) %s", requiredTypeName, value);
}
|
@Test
void castReturnValue_whenAValueIsNotAssignedByReturnShouldBeCasted() {
Type returnType = StaticJavaParser.parseType("String");
Type valueType = StaticJavaParser.parseType("Object");
assertEquals(
String.format("(%s) %s", returnType, RETURN_VALUE),
castValue(RETURN_VALUE, returnType, valueType.asString()));
}
|
public static <T> boolean contains(T[] array, T item) {
for (T o : array) {
if (o == null) {
if (item == null) {
return true;
}
} else {
if (o.equals(item)) {
return true;
}
}
}
return false;
}
|
@Test
public void contains_nullValue_returnsFalse() {
Object[] array = new Object[1];
array[0] = new Object();
assertFalse(ArrayUtils.contains(array, null));
}
|
@Override
public boolean isSatisfied(int index, TradingRecord tradingRecord) {
final boolean satisfied = !ruleToNegate.isSatisfied(index, tradingRecord);
traceIsSatisfied(index, satisfied);
return satisfied;
}
|
@Test
public void isSatisfied() {
assertFalse(satisfiedRule.negation().isSatisfied(0));
assertTrue(unsatisfiedRule.negation().isSatisfied(0));
assertFalse(satisfiedRule.negation().isSatisfied(10));
assertTrue(unsatisfiedRule.negation().isSatisfied(10));
}
|
public Optional<ContentPack> insert(final ContentPack pack) {
if (findByIdAndRevision(pack.id(), pack.revision()).isPresent()) {
LOG.debug("Content pack already found: id: {} revision: {}. Did not insert!", pack.id(), pack.revision());
return Optional.empty();
}
final WriteResult<ContentPack, ObjectId> writeResult = dbCollection.insert(pack);
return Optional.of(writeResult.getSavedObject());
}
|
@Test
public void insertDuplicate() {
final ContentPackV1 contentPack = ContentPackV1.builder()
.id(ModelId.of("id"))
.revision(1)
.name("name")
.description("description")
.summary("summary")
.vendor("vendor")
.url(URI.create("https://www.graylog.org/"))
.entities(ImmutableSet.of())
.build();
contentPackPersistenceService.insert(contentPack);
final Optional<ContentPack> savedContentPack2 = contentPackPersistenceService.insert(contentPack);
assertThat(savedContentPack2)
.isEmpty();
}
|
@Override
public void onActivityDestroyed(Activity activity) {
}
|
@Test
public void onActivityDestroyed() {
mActivityLifecycle.onActivityDestroyed(mActivity);
}
|
@Override
@SuppressWarnings("rawtypes")
public void report(SortedMap<String, Gauge> gauges,
SortedMap<String, Counter> counters,
SortedMap<String, Histogram> histograms,
SortedMap<String, Meter> meters,
SortedMap<String, Timer> timers) {
final long timestamp = clock.getTime() / 1000;
// oh it'd be lovely to use Java 7 here
try {
graphite.connect();
for (Map.Entry<String, Gauge> entry : gauges.entrySet()) {
reportGauge(entry.getKey(), entry.getValue(), timestamp);
}
for (Map.Entry<String, Counter> entry : counters.entrySet()) {
reportCounter(entry.getKey(), entry.getValue(), timestamp);
}
for (Map.Entry<String, Histogram> entry : histograms.entrySet()) {
reportHistogram(entry.getKey(), entry.getValue(), timestamp);
}
for (Map.Entry<String, Meter> entry : meters.entrySet()) {
reportMetered(entry.getKey(), entry.getValue(), timestamp);
}
for (Map.Entry<String, Timer> entry : timers.entrySet()) {
reportTimer(entry.getKey(), entry.getValue(), timestamp);
}
graphite.flush();
} catch (IOException e) {
LOGGER.warn("Unable to report to Graphite", graphite, e);
} finally {
try {
graphite.close();
} catch (IOException e1) {
LOGGER.warn("Error closing Graphite", graphite, e1);
}
}
}
|
@Test
public void reportsMetersInMinutes() throws Exception {
final Meter meter = mock(Meter.class);
when(meter.getCount()).thenReturn(1L);
when(meter.getOneMinuteRate()).thenReturn(2.0);
when(meter.getFiveMinuteRate()).thenReturn(3.0);
when(meter.getFifteenMinuteRate()).thenReturn(4.0);
when(meter.getMeanRate()).thenReturn(5.0);
minuteRateReporter.report(this.map(),
this.map(),
this.map(),
this.map("meter", meter),
this.map());
final InOrder inOrder = inOrder(graphite);
inOrder.verify(graphite).connect();
inOrder.verify(graphite).send("prefix.meter.count", "1", timestamp);
inOrder.verify(graphite).send("prefix.meter.m1_rate", "120.00", timestamp);
inOrder.verify(graphite).send("prefix.meter.m5_rate", "180.00", timestamp);
inOrder.verify(graphite).send("prefix.meter.m15_rate", "240.00", timestamp);
inOrder.verify(graphite).send("prefix.meter.mean_rate", "300.00", timestamp);
inOrder.verify(graphite).flush();
inOrder.verify(graphite).close();
verifyNoMoreInteractions(graphite);
}
|
public UpstreamCheck getUpstreamCheck() {
return upstreamCheck;
}
|
@Test
public void testUpstreamCheck() {
ShenyuConfig.UpstreamCheck upstreamCheck = config.getUpstreamCheck();
upstreamCheck.setEnabled(false);
upstreamCheck.setPoolSize(10);
upstreamCheck.setHealthyThreshold(4);
upstreamCheck.setTimeout(10);
upstreamCheck.setInterval(5);
upstreamCheck.setUnhealthyThreshold(5);
upstreamCheck.setPrintEnabled(false);
upstreamCheck.setPrintInterval(5);
notEmptyElements(upstreamCheck.getEnabled(), upstreamCheck.getPoolSize(), upstreamCheck.getHealthyThreshold(), upstreamCheck.getTimeout(),
upstreamCheck.getInterval(), upstreamCheck.getUnhealthyThreshold(), upstreamCheck.getPrintInterval(), upstreamCheck.getPrintEnabled());
}
|
@VisibleForTesting
static String extractTableName(MultivaluedMap<String, String> pathParameters,
MultivaluedMap<String, String> queryParameters) {
String tableName = extractTableName(pathParameters);
if (tableName != null) {
return tableName;
}
return extractTableName(queryParameters);
}
|
@Test
public void testExtractTableNameWithSchemaNameInPathParams() {
MultivaluedMap<String, String> pathParams = new MultivaluedHashMap<>();
MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<>();
pathParams.putSingle("schemaName", "C");
queryParams.putSingle("tableName", "D");
queryParams.putSingle("tableNameWithType", "E");
queryParams.putSingle("schemaName", "F");
assertEquals(AuthenticationFilter.extractTableName(pathParams, queryParams), "C");
}
|
public ExtensionInstaller install(ExtensionContainer container, ExtensionMatcher matcher) {
// core components
for (Object o : BatchComponents.all()) {
doInstall(container, matcher, null, o);
}
// plugin extensions
installExtensionsForPlugins(container, matcher, pluginRepository.getPluginInfos());
return this;
}
|
@Test
public void should_filter_extensions_to_install() {
when(pluginRepository.getPluginInfos()).thenReturn(Arrays.asList(new PluginInfo("foo")));
when(pluginRepository.getPluginInstance("foo")).thenReturn(newPluginInstance(Foo.class, Bar.class));
ListContainer container = new ListContainer();
ExtensionInstaller installer = new ExtensionInstaller(mock(SonarRuntime.class), pluginRepository, settings.asConfig());
installer.install(container, new FooMatcher());
assertThat(container.getAddedObjects())
.contains(Foo.class)
.doesNotContain(Bar.class);
}
|
@PostMapping("/updateDetailPath")
@RequiresPermissions("system:authen:editResourceDetails")
public ShenyuAdminResult updateDetailPath(@RequestBody @Valid final AuthPathWarpDTO authPathWarpDTO) {
return appAuthService.updateDetailPath(authPathWarpDTO);
}
|
@Test
public void testUpdateDetailPath() throws Exception {
final AuthPathDTO authPathDTO = new AuthPathDTO();
authPathDTO.setAppName("testApp");
authPathDTO.setPath("/test");
authPathDTO.setEnabled(true);
List<AuthPathDTO> authPathDTOS = new ArrayList<>();
authPathDTOS.add(authPathDTO);
final AuthPathWarpDTO authPathWarpDTO = new AuthPathWarpDTO();
authPathWarpDTO.setId("0001");
authPathWarpDTO.setAuthPathDTOList(authPathDTOS);
ConfigurableApplicationContext context = mock(ConfigurableApplicationContext.class);
SpringBeanUtils.getInstance().setApplicationContext(context);
when(SpringBeanUtils.getInstance().getBean(AppAuthMapper.class)).thenReturn(appAuthMapper);
when(appAuthMapper.existed(authPathWarpDTO.getId())).thenReturn(true);
given(this.appAuthService.updateDetailPath(authPathWarpDTO)).willReturn(ShenyuAdminResult.success());
this.mockMvc.perform(MockMvcRequestBuilders.post("/appAuth/updateDetailPath")
.contentType(MediaType.APPLICATION_JSON)
.content(GsonUtils.getInstance().toJson(authPathWarpDTO)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.code", is(CommonErrorCode.SUCCESSFUL)))
.andReturn();
}
|
@Override
public void start() {
initializeMonitorThread();
monitorThread.start();
}
|
@Test
void shouldNotAllowMonitorToBeStartedMultipleTimes() {
monitor.start();
assertThatThrownBy(() -> monitor.start()).isExactlyInstanceOf(IllegalStateException.class)
.hasMessageContaining("Cannot start the monitor multiple times.");
}
|
@Override
public UpdateSchema renameColumn(String name, String newName) {
Types.NestedField field = findField(name);
Preconditions.checkArgument(field != null, "Cannot rename missing column: %s", name);
Preconditions.checkArgument(newName != null, "Cannot rename a column to null");
Preconditions.checkArgument(
!deletes.contains(field.fieldId()),
"Cannot rename a column that will be deleted: %s",
field.name());
// merge with an update, if present
int fieldId = field.fieldId();
Types.NestedField update = updates.get(fieldId);
if (update != null) {
updates.put(
fieldId,
Types.NestedField.of(fieldId, update.isOptional(), newName, update.type(), update.doc()));
} else {
updates.put(
fieldId,
Types.NestedField.of(fieldId, field.isOptional(), newName, field.type(), field.doc()));
}
if (identifierFieldNames.contains(name)) {
identifierFieldNames.remove(name);
identifierFieldNames.add(newName);
}
return this;
}
|
@Test
public void testRenameMissingColumn() {
assertThatThrownBy(
() -> {
UpdateSchema update = new SchemaUpdate(SCHEMA, SCHEMA_LAST_COLUMN_ID);
update.renameColumn("col", "fail");
})
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Cannot rename missing column: col");
}
|
public boolean updateAccessConfig(PlainAccessConfig plainAccessConfig) {
if (plainAccessConfig == null) {
log.error("Parameter value plainAccessConfig is null,Please check your parameter");
throw new AclException("Parameter value plainAccessConfig is null, Please check your parameter");
}
checkPlainAccessConfig(plainAccessConfig);
Permission.checkResourcePerms(plainAccessConfig.getTopicPerms());
Permission.checkResourcePerms(plainAccessConfig.getGroupPerms());
if (accessKeyTable.containsKey(plainAccessConfig.getAccessKey())) {
PlainAccessConfig updateAccountMap = null;
String aclFileName = accessKeyTable.get(plainAccessConfig.getAccessKey());
PlainAccessData aclAccessConfigMap = AclUtils.getYamlDataObject(aclFileName, PlainAccessData.class);
List<PlainAccessConfig> accounts = aclAccessConfigMap.getAccounts();
if (null != accounts) {
for (PlainAccessConfig account : accounts) {
if (account.getAccessKey().equals(plainAccessConfig.getAccessKey())) {
// Update acl access config elements
accounts.remove(account);
updateAccountMap = createAclAccessConfigMap(account, plainAccessConfig);
accounts.add(updateAccountMap);
aclAccessConfigMap.setAccounts(accounts);
break;
}
}
} else {
// Maybe deleted in file, add it back
accounts = new LinkedList<>();
updateAccountMap = createAclAccessConfigMap(null, plainAccessConfig);
accounts.add(updateAccountMap);
aclAccessConfigMap.setAccounts(accounts);
}
Map<String, PlainAccessResource> accountMap = aclPlainAccessResourceMap.get(aclFileName);
if (accountMap == null) {
accountMap = new HashMap<>(1);
accountMap.put(plainAccessConfig.getAccessKey(), buildPlainAccessResource(plainAccessConfig));
} else if (accountMap.isEmpty()) {
accountMap.put(plainAccessConfig.getAccessKey(), buildPlainAccessResource(plainAccessConfig));
} else {
for (Map.Entry<String, PlainAccessResource> entry : accountMap.entrySet()) {
if (entry.getValue().getAccessKey().equals(plainAccessConfig.getAccessKey())) {
PlainAccessResource plainAccessResource = buildPlainAccessResource(plainAccessConfig);
accountMap.put(entry.getKey(), plainAccessResource);
break;
}
}
}
aclPlainAccessResourceMap.put(aclFileName, accountMap);
return AclUtils.writeDataObject(aclFileName, updateAclConfigFileVersion(aclFileName, aclAccessConfigMap));
} else {
String fileName = MixAll.dealFilePath(defaultAclFile);
//Create acl access config elements on the default acl file
if (aclPlainAccessResourceMap.get(defaultAclFile) == null || aclPlainAccessResourceMap.get(defaultAclFile).size() == 0) {
try {
File defaultAclFile = new File(fileName);
if (!defaultAclFile.exists()) {
defaultAclFile.createNewFile();
}
} catch (IOException e) {
log.warn("create default acl file has exception when update accessConfig. ", e);
}
}
PlainAccessData aclAccessConfigMap = AclUtils.getYamlDataObject(defaultAclFile, PlainAccessData.class);
if (aclAccessConfigMap == null) {
aclAccessConfigMap = new PlainAccessData();
}
List<PlainAccessConfig> accounts = aclAccessConfigMap.getAccounts();
// When no accounts defined
if (null == accounts) {
accounts = new ArrayList<>();
}
accounts.add(createAclAccessConfigMap(null, plainAccessConfig));
aclAccessConfigMap.setAccounts(accounts);
accessKeyTable.put(plainAccessConfig.getAccessKey(), fileName);
if (aclPlainAccessResourceMap.get(fileName) == null) {
Map<String, PlainAccessResource> plainAccessResourceMap = new HashMap<>(1);
plainAccessResourceMap.put(plainAccessConfig.getAccessKey(), buildPlainAccessResource(plainAccessConfig));
aclPlainAccessResourceMap.put(fileName, plainAccessResourceMap);
} else {
Map<String, PlainAccessResource> plainAccessResourceMap = aclPlainAccessResourceMap.get(fileName);
plainAccessResourceMap.put(plainAccessConfig.getAccessKey(), buildPlainAccessResource(plainAccessConfig));
aclPlainAccessResourceMap.put(fileName, plainAccessResourceMap);
}
return AclUtils.writeDataObject(defaultAclFile, updateAclConfigFileVersion(defaultAclFile, aclAccessConfigMap));
}
}
|
@Test
public void updateAccessConfigTest() {
Assert.assertThrows(AclException.class, () -> plainPermissionManager.updateAccessConfig(null));
plainAccessConfig.setAccessKey("admin_test");
// Invalid parameter
plainAccessConfig.setSecretKey("123456");
plainAccessConfig.setAdmin(true);
Assert.assertThrows(AclException.class, () -> plainPermissionManager.updateAccessConfig(plainAccessConfig));
plainAccessConfig.setSecretKey("12345678");
// Invalid parameter
plainAccessConfig.setGroupPerms(Lists.newArrayList("groupA!SUB"));
Assert.assertThrows(AclException.class, () -> plainPermissionManager.updateAccessConfig(plainAccessConfig));
// first update
plainAccessConfig.setGroupPerms(Lists.newArrayList("groupA=SUB"));
plainPermissionManager.updateAccessConfig(plainAccessConfig);
// second update
plainAccessConfig.setTopicPerms(Lists.newArrayList("topicA=SUB"));
plainPermissionManager.updateAccessConfig(plainAccessConfig);
}
|
public URI getServerAddress() {
return serverAddresses.get(0);
}
|
@Test
public void shouldParseHttpsAddress() throws Exception {
// Given:
final String serverAddress = "https://singleServer:8088";
final URI serverURI = new URI(serverAddress);
// When:
try (KsqlRestClient ksqlRestClient = clientWithServerAddresses(serverAddress)) {
// Then:
assertThat(ksqlRestClient.getServerAddress(), is(serverURI));
}
}
|
@Override
public String toString() {
return "DelegateAndSkipOnConcurrentExecutionDecorator{"
+ "isAlreadyRunning=" + isAlreadyRunning
+ ", runnable=" + runnable
+ ", executor=" + executor
+ ", throwable=" + throwable
+ '}';
}
|
@Test
public void toString_contains_runnables_info() {
ResumableCountingRunnable runnable = new ResumableCountingRunnable();
Executor executor = command -> {
};
String stringified = new DelegateAndSkipOnConcurrentExecutionDecorator(runnable, executor).toString();
assertTrue(stringified.contains("ResumableCountingRunnable"));
}
|
public static void refreshSuperUserGroupsConfiguration() {
//load server side configuration;
refreshSuperUserGroupsConfiguration(new Configuration());
}
|
@Test
public void testProxyUsersWithCustomPrefix() throws Exception {
Configuration conf = new Configuration(false);
conf.set("x." + REAL_USER_NAME + ".users",
StringUtils.join(",", Arrays.asList(AUTHORIZED_PROXY_USER_NAME)));
conf.set("x." + REAL_USER_NAME+ ".hosts", PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf, "x");
// First try proxying a user that's allowed
UserGroupInformation realUserUgi = UserGroupInformation
.createRemoteUser(REAL_USER_NAME);
UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
AUTHORIZED_PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
// From good IP
assertAuthorized(proxyUserUgi, "1.2.3.4");
// From bad IP
assertNotAuthorized(proxyUserUgi, "1.2.3.5");
// Now try proxying a user that's not allowed
realUserUgi = UserGroupInformation.createRemoteUser(REAL_USER_NAME);
proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
// From good IP
assertNotAuthorized(proxyUserUgi, "1.2.3.4");
// From bad IP
assertNotAuthorized(proxyUserUgi, "1.2.3.5");
}
|
@Nullable public static String ipOrNull(@Nullable String ip) {
if (ip == null || ip.isEmpty()) return null;
if ("::1".equals(ip) || "127.0.0.1".equals(ip)) return ip; // special-case localhost
IpFamily format = detectFamily(ip);
if (format == IpFamily.IPv4Embedded) {
ip = ip.substring(ip.lastIndexOf(':') + 1);
} else if (format == IpFamily.Unknown) {
ip = null;
}
return ip;
}
|
@Test void ipOrNull_ipv6() {
assertThat(IpLiteral.ipOrNull("2001:db8::c001")).isEqualTo("2001:db8::c001");
}
|
@Override
public final void getSize(@NonNull SizeReadyCallback cb) {
sizeDeterminer.getSize(cb);
}
|
@Test
public void testDoesNotThrowOnPreDrawIfViewTreeObserverIsDead() {
target.getSize(cb);
int width = 1;
int height = 2;
LayoutParams layoutParams = new FrameLayout.LayoutParams(width, height);
view.setLayoutParams(layoutParams);
ViewTreeObserver vto = view.getViewTreeObserver();
view.requestLayout();
activity.visible();
assertFalse(vto.isAlive());
vto.dispatchOnPreDraw();
verify(cb).onSizeReady(eq(width), eq(height));
}
|
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
final P4TableModel other = (P4TableModel) obj;
return Objects.equals(this.id, other.id)
&& Objects.equals(this.tableType, other.tableType)
&& Objects.equals(this.actionProfile, other.actionProfile)
&& Objects.equals(this.maxSize, other.maxSize)
&& Objects.equals(this.counters, other.counters)
&& Objects.equals(this.meters, other.meters)
&& Objects.equals(this.supportAging, other.supportAging)
&& Objects.equals(this.matchFields, other.matchFields)
&& Objects.equals(this.actions, other.actions)
&& Objects.equals(this.constDefaultAction, other.constDefaultAction)
&& Objects.equals(this.oneShotOnly, other.oneShotOnly);
}
|
@Test
public void testEquals() {
new EqualsTester()
.addEqualityGroup(P4_TABLE_MODEL_1, SAME_AS_P4_TABLE_MODEL_1)
.addEqualityGroup(P4_TABLE_MODEL_2)
.addEqualityGroup(P4_TABLE_MODEL_3)
.testEquals();
}
|
public Set<String> listFunctions() {
return usedModules.stream()
.map(name -> loadedModules.get(name).listFunctions(false))
.flatMap(Collection::stream)
.collect(Collectors.toSet());
}
|
@Test
void testListFunctions() {
ModuleMock x = new ModuleMock("x");
manager.loadModule(x.getType(), x);
assertThat(manager.listFunctions()).contains(ModuleMock.DUMMY_FUNCTION_NAME);
// hidden functions not in the default list
assertThat(manager.listFunctions()).doesNotContain(ModuleMock.INTERNAL_FUNCTION_NAME);
// should not return function name of an unused module
manager.useModules(CoreModuleFactory.IDENTIFIER);
assertThat(manager.listFunctions()).doesNotContain(ModuleMock.DUMMY_FUNCTION_NAME);
}
|
@Override
public void doFilter(ServletRequest req, ServletResponse resp, FilterChain chain) throws
IOException, ServletException {
HttpServletRequest request = (HttpServletRequest) req;
HttpServletResponse response = (HttpServletResponse) resp;
String token = request.getHeader(HttpHeaders.AUTHORIZATION);
Long consumerId = consumerAuthUtil.getConsumerId(token);
if (consumerId == null) {
response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Unauthorized");
return;
}
consumerAuthUtil.storeConsumerId(request, consumerId);
consumerAuditUtil.audit(request, consumerId);
chain.doFilter(req, resp);
}
|
@Test
public void testAuthFailed() throws Exception {
String someInvalidToken = "someInvalidToken";
when(request.getHeader(HttpHeaders.AUTHORIZATION)).thenReturn(someInvalidToken);
when(consumerAuthUtil.getConsumerId(someInvalidToken)).thenReturn(null);
authenticationFilter.doFilter(request, response, filterChain);
verify(response, times(1)).sendError(eq(HttpServletResponse.SC_UNAUTHORIZED), anyString());
verify(consumerAuthUtil, never()).storeConsumerId(eq(request), anyLong());
verify(consumerAuditUtil, never()).audit(eq(request), anyLong());
verify(filterChain, never()).doFilter(request, response);
}
|
public double asDouble() {
checkState(type == Type.FLOAT || type == Type.DOUBLE, "Value is not a float or double");
return Double.parseDouble(value);
}
|
@Test
public void asDouble() {
ConfigProperty p = defineProperty("foo", DOUBLE, "123.0", "Foo Prop");
validate(p, "foo", DOUBLE, "123.0", "123.0");
assertEquals("incorrect value", 123.0, p.asDouble(), 0.01);
}
|
@Override
public String getOriginalHost() {
try {
if (originalHost == null) {
originalHost = getOriginalHost(getHeaders(), getServerName());
}
return originalHost;
} catch (URISyntaxException e) {
throw new IllegalArgumentException(e);
}
}
|
@Test
void testGetOriginalHost() {
HttpQueryParams queryParams = new HttpQueryParams();
Headers headers = new Headers();
headers.add("Host", "blah.netflix.com");
request = new HttpRequestMessageImpl(
new SessionContext(),
"HTTP/1.1",
"POST",
"/some/where",
queryParams,
headers,
"192.168.0.2",
"https",
7002,
"localhost");
assertEquals("blah.netflix.com", request.getOriginalHost());
queryParams = new HttpQueryParams();
headers = new Headers();
headers.add("Host", "0.0.0.1");
request = new HttpRequestMessageImpl(
new SessionContext(),
"HTTP/1.1",
"POST",
"/some/where",
queryParams,
headers,
"192.168.0.2",
"https",
7002,
"localhost");
assertEquals("0.0.0.1", request.getOriginalHost());
queryParams = new HttpQueryParams();
headers = new Headers();
headers.add("Host", "0.0.0.1:2");
request = new HttpRequestMessageImpl(
new SessionContext(),
"HTTP/1.1",
"POST",
"/some/where",
queryParams,
headers,
"192.168.0.2",
"https",
7002,
"localhost");
assertEquals("0.0.0.1", request.getOriginalHost());
queryParams = new HttpQueryParams();
headers = new Headers();
headers.add("Host", "[::2]");
request = new HttpRequestMessageImpl(
new SessionContext(),
"HTTP/1.1",
"POST",
"/some/where",
queryParams,
headers,
"192.168.0.2",
"https",
7002,
"localhost");
assertEquals("[::2]", request.getOriginalHost());
queryParams = new HttpQueryParams();
headers = new Headers();
headers.add("Host", "[::2]:3");
request = new HttpRequestMessageImpl(
new SessionContext(),
"HTTP/1.1",
"POST",
"/some/where",
queryParams,
headers,
"192.168.0.2",
"https",
7002,
"localhost");
assertEquals("[::2]", request.getOriginalHost());
headers = new Headers();
headers.add("Host", "blah.netflix.com");
headers.add("X-Forwarded-Host", "foo.netflix.com");
request = new HttpRequestMessageImpl(
new SessionContext(),
"HTTP/1.1",
"POST",
"/some/where",
queryParams,
headers,
"192.168.0.2",
"https",
7002,
"localhost");
assertEquals("foo.netflix.com", request.getOriginalHost());
headers = new Headers();
headers.add("X-Forwarded-Host", "foo.netflix.com");
request = new HttpRequestMessageImpl(
new SessionContext(),
"HTTP/1.1",
"POST",
"/some/where",
queryParams,
headers,
"192.168.0.2",
"https",
7002,
"localhost");
assertEquals("foo.netflix.com", request.getOriginalHost());
headers = new Headers();
headers.add("Host", "blah.netflix.com:8080");
request = new HttpRequestMessageImpl(
new SessionContext(),
"HTTP/1.1",
"POST",
"/some/where",
queryParams,
headers,
"192.168.0.2",
"https",
7002,
"localhost");
assertEquals("blah.netflix.com", request.getOriginalHost());
}
|
public static int readUnsignedIntLE(InputStream in) throws IOException {
return in.read()
| (in.read() << 8)
| (in.read() << 16)
| (in.read() << 24);
}
|
@Test
public void testReadUnsignedIntLEFromInputStream() throws IOException {
byte[] array1 = {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09};
ByteArrayInputStream is1 = new ByteArrayInputStream(array1);
assertEquals(0x04030201, ByteUtils.readUnsignedIntLE(is1));
assertEquals(0x08070605, ByteUtils.readUnsignedIntLE(is1));
byte[] array2 = {(byte) 0xf1, (byte) 0xf2, (byte) 0xf3, (byte) 0xf4, (byte) 0xf5, (byte) 0xf6, (byte) 0xf7, (byte) 0xf8};
ByteArrayInputStream is2 = new ByteArrayInputStream(array2);
assertEquals(0xf4f3f2f1, ByteUtils.readUnsignedIntLE(is2));
assertEquals(0xf8f7f6f5, ByteUtils.readUnsignedIntLE(is2));
}
|
@Override
public KCell[] getRow( int rownr ) {
// xlsx raw row numbers are 1-based index, KSheet is 0-based
// Don't check the upper limit as not all rows may have been read!
// If it's found that the row does not exist, the exception will be thrown at the end of this method.
if ( rownr < 0 ) {
// KSheet requires out of bounds here
throw new ArrayIndexOutOfBoundsException( rownr );
}
if ( rownr + 1 < firstRow ) {
// before first non-empty row
return new KCell[0];
}
if ( rownr > 0 && currentRow == rownr + 1 ) {
if ( currentRowCells != null ) {
return currentRowCells;
}
// The case when the table contains the empty row(s) before the header
// but at the same time user wants to read starting from 0 row
return new KCell[0];
}
try {
if ( currentRow >= rownr + 1 ) {
// allow random access per api despite performance hit
resetSheetReader();
}
while ( sheetReader.hasNext() ) {
int event = sheetReader.next();
if ( event == XMLStreamConstants.START_ELEMENT && sheetReader.getLocalName().equals( TAG_ROW ) ) {
String rowIndicator = sheetReader.getAttributeValue( null, "r" );
currentRow = Integer.parseInt( rowIndicator );
if ( currentRow < rownr + 1 ) {
continue;
}
currentRowCells = parseRow();
return currentRowCells;
}
if ( event == XMLStreamConstants.END_ELEMENT && sheetReader.getLocalName().equals( TAG_SHEET_DATA ) ) {
// There're no more columns, no need to continue to read
break;
}
}
} catch ( Exception e ) {
throw new RuntimeException( e );
}
// We've read all document rows, let's update the final count.
numRows = currentRow;
// And, as this was an invalid row to ask for, throw the proper exception!
throw new ArrayIndexOutOfBoundsException( rownr );
}
|
@Test
public void testNoUsedRangeSpecified() throws Exception {
final String sheetId = "1";
final String sheetName = "Sheet 1";
SharedStringsTable sharedStringsTableMock =
mockSharedStringsTable( "Report ID", "Report ID", "Approval Status", "Total Report Amount", "Policy",
"ReportIdValue_1", "ReportIdValue_1", "ApprovalStatusValue_1", "PolicyValue_1" );
XSSFReader reader =
mockXSSFReader( sheetId, SHEET_NO_USED_RANGE_SPECIFIED, sharedStringsTableMock, mock( StylesTable.class ) );
StaxPoiSheet spSheet = new StaxPoiSheet( reader, sheetName, sheetId );
// The first row is empty - it should have empty rowCells
KCell[] rowCells = spSheet.getRow( 0 );
assertEquals( 0, rowCells.length );
// The second row - is the header - just skip it
rowCells = spSheet.getRow( 1 );
assertEquals( 0, rowCells.length );
// The row3 - is the first row with data - validating it
rowCells = spSheet.getRow( 2 );
assertEquals( KCellType.LABEL, rowCells[ 0 ].getType() );
assertEquals( "ReportIdValue_1", rowCells[ 0 ].getValue() );
assertEquals( KCellType.LABEL, rowCells[ 1 ].getType() );
assertEquals( "ReportIdValue_1", rowCells[ 1 ].getValue() );
assertEquals( KCellType.LABEL, rowCells[ 2 ].getType() );
assertEquals( "ApprovalStatusValue_1", rowCells[ 2 ].getValue() );
assertEquals( KCellType.NUMBER, rowCells[ 3 ].getType() );
assertEquals( 2623.0, rowCells[ 3 ].getValue() );
assertEquals( KCellType.LABEL, rowCells[ 4 ].getType() );
assertEquals( "PolicyValue_1", rowCells[ 4 ].getValue() );
}
|
public static String getDateFormatByRegex( String regex ) {
return getDateFormatByRegex( regex, null );
}
|
@Test
public void testGetDateFormatByRegexLocale() {
assertNull( DateDetector.getDateFormatByRegex( null, null ) );
assertNull( DateDetector.getDateFormatByRegex( null, LOCALE_en_US ) );
// return eu if we pass en_US regexp without locale
assertEquals( SAMPLE_DATE_FORMAT, DateDetector.getDateFormatByRegex( SAMPLE_REGEXP_US ) );
assertEquals( SAMPLE_DATE_FORMAT_US, DateDetector.getDateFormatByRegex( SAMPLE_REGEXP_US, LOCALE_en_US ) );
}
|
@Override
public CompletableFuture<Void> globalCleanupAsync(JobID jobId, Executor executor) {
checkNotNull(jobId, "Job ID");
return runAsyncWithLockAssertRunning(
() -> {
LOG.debug("Removing job graph {} from {}.", jobId, jobGraphStateHandleStore);
final String name = jobGraphStoreUtil.jobIDToName(jobId);
releaseAndRemoveOrThrowCompletionException(jobId, name);
addedJobGraphs.remove(jobId);
LOG.info("Removed job graph {} from {}.", jobId, jobGraphStateHandleStore);
},
executor);
}
|
@Test
public void testGlobalCleanupWithNonExistName() throws Exception {
final CompletableFuture<JobID> removeFuture = new CompletableFuture<>();
final TestingStateHandleStore<JobGraph> stateHandleStore =
builder.setRemoveFunction(name -> removeFuture.complete(JobID.fromHexString(name)))
.build();
final JobGraphStore jobGraphStore = createAndStartJobGraphStore(stateHandleStore);
jobGraphStore
.globalCleanupAsync(testingJobGraph.getJobID(), Executors.directExecutor())
.join();
assertThat(removeFuture.isDone(), is(true));
}
|
@Override
public void invoke(IN value, Context context) throws Exception {
bufferLock.lock();
try {
// TODO this implementation is not very effective,
// optimize this with MemorySegment if needed
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputViewStreamWrapper wrapper = new DataOutputViewStreamWrapper(baos);
serializer.serialize(value, wrapper);
invokingRecordBytes = baos.size();
if (invokingRecordBytes > maxBytesPerBatch) {
throw new RuntimeException(
"Record size is too large for CollectSinkFunction. Record size is "
+ invokingRecordBytes
+ " bytes, "
+ "but max bytes per batch is only "
+ maxBytesPerBatch
+ " bytes. "
+ "Please consider increasing max bytes per batch value by setting "
+ CollectSinkOperatorFactory.MAX_BATCH_SIZE.key());
}
if (currentBufferBytes + invokingRecordBytes > bufferSizeLimitBytes) {
bufferCanAddNextResultCondition.await();
}
buffer.add(baos.toByteArray());
currentBufferBytes += baos.size();
} finally {
bufferLock.unlock();
}
}
|
@Test
void testIncreasingToken() throws Exception {
functionWrapper.openFunction();
for (int i = 0; i < 6; i++) {
functionWrapper.invoke(i);
}
String version = initializeVersion();
CollectCoordinationResponse response;
response = functionWrapper.sendRequestAndGetResponse(version, 0);
assertResponseEquals(response, version, 0, Arrays.asList(0, 1, 2));
response = functionWrapper.sendRequestAndGetResponse(version, 4);
assertResponseEquals(response, version, 0, Arrays.asList(4, 5));
response = functionWrapper.sendRequestAndGetResponse(version, 6);
assertResponseEquals(response, version, 0, Collections.emptyList());
functionWrapper.closeFunctionNormally();
}
|
private void stop(int numOfServicesStarted, boolean stopOnlyStartedServices) {
// stop in reverse order of start
Exception firstException = null;
List<Service> services = getServices();
for (int i = numOfServicesStarted - 1; i >= 0; i--) {
Service service = services.get(i);
if (LOG.isDebugEnabled()) {
LOG.debug("Stopping service #" + i + ": " + service);
}
STATE state = service.getServiceState();
//depending on the stop police
if (state == STATE.STARTED
|| (!stopOnlyStartedServices && state == STATE.INITED)) {
Exception ex = ServiceOperations.stopQuietly(LOG, service);
if (ex != null && firstException == null) {
firstException = ex;
}
}
}
//after stopping all services, rethrow the first exception raised
if (firstException != null) {
throw ServiceStateException.convert(firstException);
}
}
|
@Test(timeout = 10000)
public void testAddInitedChildInInit() throws Throwable {
CompositeService parent = new CompositeService("parent");
BreakableService child = new BreakableService();
child.init(new Configuration());
parent.init(new Configuration());
AddSiblingService.addChildToService(parent, child);
parent.start();
assertInState(STATE.STARTED, child);
parent.stop();
assertInState(STATE.STOPPED, child);
}
|
@Override
protected Mono<Void> doExecute(final ServerWebExchange exchange, final ShenyuPluginChain chain, final SelectorData selector, final RuleData rule) {
String param = exchange.getAttribute(Constants.PARAM_TRANSFORM);
ShenyuContext shenyuContext = exchange.getAttribute(Constants.CONTEXT);
assert shenyuContext != null;
MetaData metaData = exchange.getAttribute(Constants.META_DATA);
if (!checkMetaData(metaData)) {
assert metaData != null;
LOG.error(" path is :{}, meta data have error.... {}", shenyuContext.getPath(), metaData);
exchange.getResponse().setStatusCode(HttpStatus.INTERNAL_SERVER_ERROR);
Object error = ShenyuResultWrap.error(exchange, ShenyuResultEnum.META_DATA_ERROR);
return WebFluxResultUtils.result(exchange, error);
}
if (StringUtils.isNoneBlank(metaData.getParameterTypes()) && StringUtils.isBlank(param)) {
exchange.getResponse().setStatusCode(HttpStatus.INTERNAL_SERVER_ERROR);
Object error = ShenyuResultWrap.error(exchange, ShenyuResultEnum.SOFA_HAVE_BODY_PARAM);
return WebFluxResultUtils.result(exchange, error);
}
Map<String, Map<String, String>> rpcContext = exchange.getAttribute(Constants.GENERAL_CONTEXT);
Optional.ofNullable(rpcContext).map(context -> context.get(PluginEnum.SOFA.getName())).ifPresent(context -> RpcInvokeContext.getContext().putAllRequestBaggage(context));
final Mono<Object> result = sofaProxyService.genericInvoker(param, metaData, exchange);
return result.then(chain.execute(exchange));
}
|
@Test
public void testSofaPlugin2() {
ShenyuContext context = mock(ShenyuContext.class);
exchange.getAttributes().put(Constants.CONTEXT, context);
exchange.getAttributes().put(Constants.META_DATA, metaData);
when(chain.execute(exchange)).thenReturn(Mono.empty());
SelectorData selectorData = mock(SelectorData.class);
metaData.setParameterTypes(null);
metaData.setMethodName(null);
RuleData data = mock(RuleData.class);
StepVerifier.create(sofaPlugin.doExecute(exchange, chain, selectorData, data)).expectSubscription().verifyComplete();
}
|
public List<JobInfo> getRecentActivities() {
return Collections.unmodifiableList(mRecentActivities);
}
|
@Test
public void testRecentActivities() {
Collection<JobInfo> recentActivities = mSummary.getRecentActivities();
assertEquals("Unexpected length of recent activities", 7, recentActivities.size());
JobInfo[] recentActvitiesArray = new JobInfo[7];
recentActivities.toArray(recentActvitiesArray);
assertEquals(1, recentActvitiesArray[0].getId());
assertEquals(6, recentActvitiesArray[1].getId());
assertEquals(5, recentActvitiesArray[2].getId());
assertEquals(3, recentActvitiesArray[3].getId());
assertEquals(2, recentActvitiesArray[4].getId());
assertEquals(4, recentActvitiesArray[5].getId());
assertEquals(7, recentActvitiesArray[6].getId());
}
|
public FontMetrics parse() throws IOException
{
return parseFontMetric(false);
}
|
@Test
void testMalformedInteger() throws IOException
{
AFMParser parser = new AFMParser(
new FileInputStream("src/test/resources/afm/MalformedInteger.afm"));
try
{
parser.parse();
fail("The AFMParser should have thrown an IOException because of a malformed int value");
}
catch (IOException e)
{
assertTrue(e.getCause() instanceof NumberFormatException);
assertTrue(e.getMessage().contains("3.4"));
}
}
|
public boolean shouldCreateSSLSocketFactory(URL url) {
return url.getProtocol().equalsIgnoreCase("https");
}
|
@Test
public void testShouldUseSslClientConfig() throws Exception {
JaasOptionsUtils jou = new JaasOptionsUtils(Collections.emptyMap());
assertFalse(jou.shouldCreateSSLSocketFactory(new URL("http://example.com")));
assertTrue(jou.shouldCreateSSLSocketFactory(new URL("https://example.com")));
assertFalse(jou.shouldCreateSSLSocketFactory(new URL("file:///tmp/test.txt")));
}
|
public Optional<UserAgent> getUserAgent(final LocalAddress localAddress) {
return Optional.ofNullable(remoteChannelsByLocalAddress.get(localAddress))
.map(remoteChannel -> remoteChannel.attr(PARSED_USER_AGENT_ATTRIBUTE_KEY).get());
}
|
@Test
void getUserAgent() throws UnrecognizedUserAgentException {
clientConnectionManager.handleConnectionEstablished(localChannel, remoteChannel, Optional.empty());
assertEquals(Optional.empty(),
clientConnectionManager.getUserAgent(localChannel.localAddress()));
final UserAgent userAgent = UserAgentUtil.parseUserAgentString("Signal-Desktop/1.2.3 Linux");
remoteChannel.attr(ClientConnectionManager.PARSED_USER_AGENT_ATTRIBUTE_KEY).set(userAgent);
assertEquals(Optional.of(userAgent),
clientConnectionManager.getUserAgent(localChannel.localAddress()));
}
|
public static <T> T getOnlyElement(Iterable<T> iterable) {
if (iterable == null) {
throw new IllegalArgumentException("iterable cannot be null.");
}
Iterator<T> iterator = iterable.iterator();
T first = iterator.next();
if (!iterator.hasNext()) {
return first;
}
throw new IllegalArgumentException(buildExceptionMessage(iterator, first));
}
|
@Test
void testGetOnlyElementIllegalArgumentException2() {
assertThrows(IllegalArgumentException.class, () -> {
CollectionUtils.getOnlyElement(null);
});
}
|
Number evaluateOutlierValue(final Number input) {
switch (outlierTreatmentMethod) {
case AS_IS:
KiePMMLLinearNorm[] limitLinearNorms;
if (input.doubleValue() < firstLinearNorm.getOrig()) {
limitLinearNorms = linearNorms.subList(0, 2).toArray(new KiePMMLLinearNorm[0]);
} else {
limitLinearNorms = linearNorms.subList(linearNorms.size() -2, linearNorms.size()).toArray(new KiePMMLLinearNorm[0]);
}
return evaluate(input, limitLinearNorms);
case AS_MISSING_VALUES:
return mapMissingTo;
case AS_EXTREME_VALUES:
return input.doubleValue() < firstLinearNorm.getOrig() ? firstLinearNorm.getNorm() : lastLinearNorm.getNorm();
default:
throw new KiePMMLException("Unknown outlierTreatmentMethod " + outlierTreatmentMethod);
}
}
|
@Test
void evaluateOutlierValueAsExtremeValues() {
KiePMMLNormContinuous kiePMMLNormContinuous = getKiePMMLNormContinuous(null,
OUTLIER_TREATMENT_METHOD.AS_EXTREME_VALUES, null);
Number input = 23;
Number retrieved = kiePMMLNormContinuous.evaluateOutlierValue(input);
assertThat(retrieved).isEqualTo(kiePMMLNormContinuous.linearNorms.get(0).getNorm());
input = 41;
retrieved = kiePMMLNormContinuous.evaluateOutlierValue(input);
assertThat(retrieved).isEqualTo(kiePMMLNormContinuous.linearNorms.get(3).getNorm());
}
|
public static int precision(final Schema schema) {
requireDecimal(schema);
final String precisionString = schema.parameters().get(PRECISION_FIELD);
if (precisionString == null) {
return PRECISION_DEFAULT;
}
try {
return Integer.parseInt(precisionString);
} catch (final NumberFormatException e) {
throw new KsqlException("Invalid precision parameter found in Decimal schema: ", e);
}
}
|
@Test
public void shouldUseDefaultPrecisionIfNotPresentInSchema() {
// When:
final int precision = DecimalUtil.precision(decimalSchemaWithoutPrecision(3));
// Then:
assertThat(precision, is(64));
}
|
public CompletableFuture<Void> setPublicKey(
final BackupAuthCredentialPresentation presentation,
final byte[] signature,
final ECPublicKey publicKey) {
// Note: this is a special case where we can't validate the presentation signature against the stored public key
// because we are currently setting it. We check against the provided public key, but we must also verify that
// there isn't an existing, different stored public key for the backup-id (verified with a condition expression)
final BackupLevel backupLevel = verifyPresentation(presentation).verifySignature(signature, publicKey);
return backupsDb.setPublicKey(presentation.getBackupId(), backupLevel, publicKey)
.exceptionally(ExceptionUtils.exceptionallyHandler(PublicKeyConflictException.class, ex -> {
Metrics.counter(ZK_AUTHN_COUNTER_NAME,
SUCCESS_TAG_NAME, String.valueOf(false),
FAILURE_REASON_TAG_NAME, "public_key_conflict")
.increment();
throw Status.UNAUTHENTICATED
.withDescription("public key does not match existing public key for the backup-id")
.asRuntimeException();
}));
}
|
@Test
public void mismatchedPublicKey() throws VerificationFailedException {
final BackupAuthCredentialPresentation presentation = backupAuthTestUtil.getPresentation(
BackupLevel.MESSAGES, backupKey, aci);
final ECKeyPair keyPair1 = Curve.generateKeyPair();
final ECKeyPair keyPair2 = Curve.generateKeyPair();
final byte[] signature1 = keyPair1.getPrivateKey().calculateSignature(presentation.serialize());
final byte[] signature2 = keyPair2.getPrivateKey().calculateSignature(presentation.serialize());
backupManager.setPublicKey(presentation, signature1, keyPair1.getPublicKey()).join();
// shouldn't be able to set a different public key
assertThat(CompletableFutureTestUtil.assertFailsWithCause(
StatusRuntimeException.class,
backupManager.setPublicKey(presentation, signature2, keyPair2.getPublicKey()))
.getStatus().getCode())
.isEqualTo(Status.UNAUTHENTICATED.getCode());
// should be able to set the same public key again (noop)
backupManager.setPublicKey(presentation, signature1, keyPair1.getPublicKey()).join();
}
|
private MergeSortedPages() {}
|
@Test
public void testSingleStream()
throws Exception
{
List<Type> types = ImmutableList.of(INTEGER, INTEGER);
MaterializedResult actual = mergeSortedPages(
types,
ImmutableList.of(0, 1),
ImmutableList.of(ASC_NULLS_FIRST, DESC_NULLS_FIRST),
ImmutableList.of(
rowPagesBuilder(types)
.row(1, 4)
.row(2, 3)
.pageBreak()
.row(3, 2)
.row(4, 1)
.build()));
MaterializedResult expected = resultBuilder(TEST_SESSION, types)
.row(1, 4)
.row(2, 3)
.row(3, 2)
.row(4, 1)
.build();
assertEquals(actual, expected);
}
|
public static void cleanDirectory(File directory) throws IOException {
requireNonNull(directory, DIRECTORY_CAN_NOT_BE_NULL);
Path path = directory.toPath();
if (!path.toFile().exists()) {
return;
}
cleanDirectoryImpl(path);
}
|
@Test
public void cleanDirectory_follows_symlink_to_target_directory() throws IOException {
assumeTrue(SystemUtils.IS_OS_UNIX);
Path target = temporaryFolder.newFolder().toPath();
Path symToDir = Files.createSymbolicLink(temporaryFolder.newFolder().toPath().resolve("sym_to_dir"), target);
Path childFile1 = Files.createFile(target.resolve("file1.txt"));
Path childDir1 = Files.createDirectory(target.resolve("subDir1"));
Path childFile2 = Files.createFile(childDir1.resolve("file2.txt"));
Path childDir2 = Files.createDirectory(childDir1.resolve("subDir2"));
assertThat(target).isDirectory();
assertThat(symToDir).isSymbolicLink();
assertThat(childFile1).isRegularFile();
assertThat(childDir1).isDirectory();
assertThat(childFile2).isRegularFile();
assertThat(childDir2).isDirectory();
// on supporting FileSystem, target will change if directory is recreated
Object targetKey = getFileKey(target);
Object symLinkKey = getFileKey(symToDir);
FileUtils.cleanDirectory(symToDir.toFile());
assertThat(target).isDirectory();
assertThat(symToDir).isSymbolicLink();
assertThat(childFile1).doesNotExist();
assertThat(childDir1).doesNotExist();
assertThat(childFile2).doesNotExist();
assertThat(childDir2).doesNotExist();
assertThat(getFileKey(target)).isEqualTo(targetKey);
assertThat(getFileKey(symToDir)).isEqualTo(symLinkKey);
}
|
public void startProcessing(Job job, Thread thread) {
Optional<Job> optionalExistingThatMayBeReplacedJob = jobsCurrentlyInProgress.keySet().stream().filter(j -> j.getId().equals(job.getId())).findFirst();
optionalExistingThatMayBeReplacedJob
.map(j -> j.delete("Job has been replaced"))
.map(jobsCurrentlyInProgress::get)
.ifPresent(Thread::interrupt);
jobsCurrentlyInProgress.put(job, thread);
}
|
@Test
void jobsThatAreProcessedAreBeingUpdatedWithAHeartbeat() {
jobSteward = initializeBackgroundJobServerWithJobSteward();
final Job job = anEnqueuedJob().withId().build();
job.startProcessingOn(backgroundJobServer);
jobSteward.startProcessing(job, mock(Thread.class));
jobSteward.run();
verify(storageProvider).save(singletonList(job));
ProcessingState processingState = job.getJobState();
assertThat(processingState.getUpdatedAt()).isAfter(processingState.getCreatedAt());
}
|
public static SharedFileDescriptorFactory create(String prefix,
String paths[]) throws IOException {
String loadingFailureReason = getLoadingFailureReason();
if (loadingFailureReason != null) {
throw new IOException(loadingFailureReason);
}
if (paths.length == 0) {
throw new IOException("no SharedFileDescriptorFactory paths were " +
"configured.");
}
StringBuilder errors = new StringBuilder();
String strPrefix = "";
for (String path : paths) {
try {
FileInputStream fis =
new FileInputStream(createDescriptor0(prefix + "test", path, 1));
fis.close();
deleteStaleTemporaryFiles0(prefix, path);
return new SharedFileDescriptorFactory(prefix, path);
} catch (IOException e) {
errors.append(strPrefix).append("Error creating file descriptor in ").
append(path).append(": ").append(e.getMessage());
strPrefix = ", ";
}
}
throw new IOException(errors.toString());
}
|
@Test(timeout=10000)
public void testCleanupRemainders() throws Exception {
Assume.assumeTrue(NativeIO.isAvailable());
Assume.assumeTrue(SystemUtils.IS_OS_UNIX);
File path = new File(TEST_BASE, "testCleanupRemainders");
path.mkdirs();
String remainder1 = path.getAbsolutePath() +
Path.SEPARATOR + "woot2_remainder1";
String remainder2 = path.getAbsolutePath() +
Path.SEPARATOR + "woot2_remainder2";
createTempFile(remainder1);
createTempFile(remainder2);
SharedFileDescriptorFactory.create("woot2_",
new String[] { path.getAbsolutePath() });
// creating the SharedFileDescriptorFactory should have removed
// the remainders
Assert.assertFalse(new File(remainder1).exists());
Assert.assertFalse(new File(remainder2).exists());
FileUtil.fullyDelete(path);
}
|
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response createNetwork(InputStream input) {
log.trace(String.format(MESSAGE, "CREATE"));
KubevirtNetworkAdminService service = get(KubevirtNetworkAdminService.class);
URI location;
try {
ObjectNode jsonTree = readTreeFromStream(mapper(), input);
final KubevirtNetwork network =
codec(KubevirtNetwork.class).decode(jsonTree, this);
service.createNetwork(network);
location = new URI(network.networkId());
} catch (IOException | URISyntaxException e) {
throw new IllegalArgumentException(e);
}
return Response.created(location).build();
}
|
@Test
public void testCreateNetworkWithCreateOperation() {
mockAdminService.createNetwork(anyObject());
replay(mockAdminService);
final WebTarget wt = target();
InputStream jsonStream = KubevirtNetworkWebResourceTest.class
.getResourceAsStream("kubevirt-network.json");
Response response = wt.path(PATH).request(MediaType.APPLICATION_JSON_TYPE)
.post(Entity.json(jsonStream));
final int status = response.getStatus();
assertThat(status, is(201));
verify(mockAdminService);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.