index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/clientImpl/ScannerOptionsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.clientImpl;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.util.SortedSet;
import org.apache.accumulo.core.client.IteratorSetting;
import org.apache.accumulo.core.data.Column;
import org.apache.accumulo.core.iterators.DebugIterator;
import org.apache.accumulo.core.iterators.user.WholeRowIterator;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
/**
* Test that scanner options are set/unset correctly
*/
public class ScannerOptionsTest {
/**
* Test that you properly add and remove iterators from a scanner
*/
@Test
public void testAddRemoveIterator() {
try (ScannerOptions options = new ScannerOptions()) {
options.addScanIterator(new IteratorSetting(1, "NAME", WholeRowIterator.class));
assertEquals(1, options.serverSideIteratorList.size());
options.removeScanIterator("NAME");
assertEquals(0, options.serverSideIteratorList.size());
}
}
@Test
public void testIteratorConflict() {
try (ScannerOptions options = new ScannerOptions()) {
options.addScanIterator(new IteratorSetting(1, "NAME", DebugIterator.class));
assertThrows(IllegalArgumentException.class,
() -> options.addScanIterator(new IteratorSetting(2, "NAME", DebugIterator.class)));
assertThrows(IllegalArgumentException.class,
() -> options.addScanIterator(new IteratorSetting(1, "NAME2", DebugIterator.class)));
}
}
@Test
public void testFetchColumn() {
try (ScannerOptions options = new ScannerOptions()) {
assertEquals(0, options.getFetchedColumns().size());
IteratorSetting.Column col =
new IteratorSetting.Column(new Text("family"), new Text("qualifier"));
options.fetchColumn(col);
SortedSet<Column> fetchedColumns = options.getFetchedColumns();
assertEquals(1, fetchedColumns.size());
Column fetchCol = fetchedColumns.iterator().next();
assertEquals(col.getColumnFamily(), new Text(fetchCol.getColumnFamily()));
assertEquals(col.getColumnQualifier(), new Text(fetchCol.getColumnQualifier()));
}
}
@Test
public void testFetchNullColumn() {
try (ScannerOptions options = new ScannerOptions()) {
// Require a non-null instance of Column
assertThrows(IllegalArgumentException.class, () -> options.fetchColumn(null));
}
}
}
| 9,400 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/clientImpl | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/clientImpl/bulk/ConcurrentKeyExtentCacheTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.clientImpl.bulk;
import static org.apache.accumulo.core.util.LazySingletons.RANDOM;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentSkipListSet;
import java.util.stream.Stream;
import org.apache.accumulo.core.data.TableId;
import org.apache.accumulo.core.dataImpl.KeyExtent;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
public class ConcurrentKeyExtentCacheTest {
private static List<KeyExtent> extents = new ArrayList<>();
private static Set<KeyExtent> extentsSet = new HashSet<>();
@BeforeAll
public static void setupSplits() {
Text prev = null;
for (int i = 1; i < 255; i++) {
Text endRow = new Text(String.format("%02x", i));
extents.add(new KeyExtent(TableId.of("1"), endRow, prev));
prev = endRow;
}
extents.add(new KeyExtent(TableId.of("1"), null, prev));
extentsSet.addAll(extents);
}
private static class TestCache extends ConcurrentKeyExtentCache {
ConcurrentSkipListSet<KeyExtent> seen = new ConcurrentSkipListSet<>();
TestCache() {
super(null, null);
}
@Override
protected void updateCache(KeyExtent e) {
super.updateCache(e);
assertTrue(seen.add(e));
}
@Override
protected Stream<KeyExtent> lookupExtents(Text row) {
int index = -1;
for (int i = 0; i < extents.size(); i++) {
if (extents.get(i).contains(row)) {
index = i;
break;
}
}
try {
Thread.sleep(3);
} catch (InterruptedException ex) {
// ignore exception
}
return extents.subList(index, extents.size()).stream().limit(73);
}
}
private void testLookup(TestCache tc, Text lookupRow) {
KeyExtent extent = tc.lookup(lookupRow);
assertTrue(extent.contains(lookupRow));
assertTrue(extentsSet.contains(extent));
}
@Test
public void testExactEndRows() {
TestCache tc = new TestCache();
RANDOM.get().ints(20000, 0, 256).mapToObj(i -> new Text(String.format("%02x", i))).sequential()
.forEach(lookupRow -> testLookup(tc, lookupRow));
assertEquals(extentsSet, tc.seen);
// try parallel
TestCache tc2 = new TestCache();
RANDOM.get().ints(20000, 0, 256).mapToObj(i -> new Text(String.format("%02x", i))).parallel()
.forEach(lookupRow -> testLookup(tc2, lookupRow));
assertEquals(extentsSet, tc.seen);
}
@Test
public void testRandom() {
TestCache tc = new TestCache();
RANDOM.get().ints(20000).mapToObj(i -> new Text(String.format("%08x", i))).sequential()
.forEach(lookupRow -> testLookup(tc, lookupRow));
assertEquals(extentsSet, tc.seen);
// try parallel
TestCache tc2 = new TestCache();
RANDOM.get().ints(20000).mapToObj(i -> new Text(String.format("%08x", i))).parallel()
.forEach(lookupRow -> testLookup(tc2, lookupRow));
assertEquals(extentsSet, tc2.seen);
}
}
| 9,401 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/clientImpl | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/clientImpl/bulk/BulkSerializeTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.clientImpl.bulk;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.accumulo.core.Constants;
import org.apache.accumulo.core.clientImpl.bulk.Bulk.FileInfo;
import org.apache.accumulo.core.clientImpl.bulk.Bulk.Files;
import org.apache.accumulo.core.clientImpl.bulk.BulkSerialize.Input;
import org.apache.accumulo.core.data.TableId;
import org.apache.accumulo.core.dataImpl.KeyExtent;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
public class BulkSerializeTest {
@Test
public void writeReadLoadMapping() throws Exception {
TableId tableId = TableId.of("3");
SortedMap<KeyExtent,Bulk.Files> mapping = generateMapping(tableId);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
BulkSerialize.writeLoadMapping(mapping, "/some/dir", p -> baos);
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
try (LoadMappingIterator lmi = BulkSerialize.readLoadMapping("/some/dir", tableId, p -> bais)) {
SortedMap<KeyExtent,Bulk.Files> readMapping = new TreeMap<>();
lmi.forEachRemaining(e -> readMapping.put(e.getKey(), e.getValue()));
assertEquals(mapping, readMapping);
}
}
@Test
public void writeReadRenames() throws Exception {
Map<String,String> renames = new HashMap<>();
for (String f : "f1 f2 f3 f4 f5".split(" ")) {
renames.put("old_" + f + ".rf", "new_" + f + ".rf");
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
BulkSerialize.writeRenameMap(renames, "/some/dir", p -> baos);
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
Map<String,String> readMap = BulkSerialize.readRenameMap("/some/dir", p -> bais);
assertEquals(renames.size(), readMap.size(), "Read renames file wrong size");
assertEquals(renames, readMap, "Read renames file different from what was written.");
}
@Test
public void testRemap() throws Exception {
TableId tableId = TableId.of("3");
SortedMap<KeyExtent,Bulk.Files> mapping = generateMapping(tableId);
SortedMap<KeyExtent,Bulk.Files> newNameMapping = new TreeMap<>();
Map<String,String> nameMap = new HashMap<>();
mapping.forEach((extent, files) -> {
Files newFiles = new Files();
files.forEach(fi -> {
newFiles.add(new FileInfo("N" + fi.name, fi.estSize, fi.estEntries));
nameMap.put(fi.name, "N" + fi.name);
});
newNameMapping.put(extent, newFiles);
});
ByteArrayOutputStream mappingBaos = new ByteArrayOutputStream();
ByteArrayOutputStream nameBaos = new ByteArrayOutputStream();
BulkSerialize.writeRenameMap(nameMap, "/some/dir", p -> nameBaos);
BulkSerialize.writeLoadMapping(mapping, "/some/dir", p -> mappingBaos);
Input input = p -> {
if (p.getName().equals(Constants.BULK_LOAD_MAPPING)) {
return new ByteArrayInputStream(mappingBaos.toByteArray());
} else if (p.getName().equals(Constants.BULK_RENAME_FILE)) {
return new ByteArrayInputStream(nameBaos.toByteArray());
} else {
throw new IllegalArgumentException("bad path " + p);
}
};
try (LoadMappingIterator lmi =
BulkSerialize.getUpdatedLoadMapping("/some/dir", tableId, input)) {
SortedMap<KeyExtent,Bulk.Files> actual = new TreeMap<>();
lmi.forEachRemaining(e -> actual.put(e.getKey(), e.getValue()));
assertEquals(newNameMapping, actual);
}
}
public SortedMap<KeyExtent,Bulk.Files> generateMapping(TableId tableId) {
SortedMap<KeyExtent,Bulk.Files> mapping = new TreeMap<>();
Bulk.Files testFiles = new Bulk.Files();
Bulk.Files testFiles2 = new Bulk.Files();
Bulk.Files testFiles3 = new Bulk.Files();
long c = 0L;
for (String f : "f1 f2 f3".split(" ")) {
c++;
testFiles.add(new Bulk.FileInfo(f, c, c));
}
c = 0L;
for (String f : "g1 g2 g3".split(" ")) {
c++;
testFiles2.add(new Bulk.FileInfo(f, c, c));
}
for (String f : "h1 h2 h3".split(" ")) {
c++;
testFiles3.add(new Bulk.FileInfo(f, c, c));
}
// add out of order to test sorting
mapping.put(new KeyExtent(tableId, new Text("d"), new Text("c")), testFiles);
mapping.put(new KeyExtent(tableId, new Text("c"), new Text("b")), testFiles2);
mapping.put(new KeyExtent(tableId, new Text("b"), new Text("a")), testFiles3);
return mapping;
}
}
| 9,402 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/clientImpl | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/clientImpl/bulk/BulkImportTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.clientImpl.bulk;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.accumulo.core.clientImpl.bulk.Bulk.FileInfo;
import org.apache.accumulo.core.clientImpl.bulk.Bulk.Files;
import org.apache.accumulo.core.data.TableId;
import org.apache.accumulo.core.dataImpl.KeyExtent;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
public class BulkImportTest {
@Test
public void testMergeOverlappingSingleSplit() {
SortedMap<KeyExtent,Files> mappings = new TreeMap<>();
// simulate the tablet (m,s] splitting into (m,p] and (p,s] while files are being examined
addMapping(mappings, null, "m", "f0");
addMapping(mappings, "m", "s", "f1", "f2");
addMapping(mappings, "p", "s", "f3");
addMapping(mappings, "m", "p", "f4");
addMapping(mappings, "s", null, "f5");
var actual = BulkImport.mergeOverlapping(mappings);
SortedMap<KeyExtent,Files> expected = new TreeMap<>();
addMapping(expected, null, "m", "f0");
addMapping(expected, "m", "s", "f1", "f2", "f3", "f4");
addMapping(expected, "s", null, "f5");
assertEquals(expected, actual);
}
@Test
public void testMergeOverlappingMultipleSplit() {
SortedMap<KeyExtent,Files> mappings = new TreeMap<>();
// simulate the tablet (m,s] splitting into (m,o],(o,p],(p,s] while files are being examined
addMapping(mappings, null, "m", "f0");
addMapping(mappings, "m", "s", "f1");
addMapping(mappings, "m", "o", "f2");
addMapping(mappings, "o", "p", "f3");
addMapping(mappings, "p", "s", "f4");
addMapping(mappings, "s", null, "f5");
var actual = BulkImport.mergeOverlapping(mappings);
SortedMap<KeyExtent,Files> expected = new TreeMap<>();
addMapping(expected, null, "m", "f0");
addMapping(expected, "m", "s", "f1", "f2", "f3", "f4");
addMapping(expected, "s", null, "f5");
assertEquals(expected, actual);
}
@Test
public void testMergeOverlappingTabletsMergedAway() {
// simulate the tablets (m,p] and (p,s] being merged into (m,s] and that splitting into
// (m,q],(q,s] while files are being examined
SortedMap<KeyExtent,Files> mappings = new TreeMap<>();
addMapping(mappings, null, "m", "f0");
addMapping(mappings, "p", "s", "f1");
addMapping(mappings, "m", "p", "f2");
addMapping(mappings, "m", "s", "f3");
addMapping(mappings, "q", "s", "f4");
addMapping(mappings, "m", "q", "f5");
addMapping(mappings, "s", null, "f6");
assertThrows(RuntimeException.class, () -> BulkImport.mergeOverlapping(mappings));
}
private void addMapping(SortedMap<KeyExtent,Files> mappings, String prevRow, String endRow,
String... fileNames) {
KeyExtent ke = new KeyExtent(TableId.of("42"), endRow == null ? null : new Text(endRow),
prevRow == null ? null : new Text(prevRow));
Files files = new Files();
for (String name : fileNames) {
files.add(new FileInfo(name, 2, 2));
}
mappings.put(ke, files);
}
}
| 9,403 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/clientImpl | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/clientImpl/bulk/BulkImportFilterInvalidTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.clientImpl.bulk;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.IOException;
import java.util.Arrays;
import org.apache.accumulo.core.file.FileOperations;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class BulkImportFilterInvalidTest {
FileSystem fs;
Path testdir = new Path("testing");
@BeforeEach
public void setup() throws IOException {
fs = FileSystem.getLocal(new Configuration());
fs.mkdirs(testdir);
}
@AfterEach
public void cleanup() throws IOException {
fs.delete(testdir, true);
}
@Test
public void testFilterInvalidGood() throws IOException {
FileStatus[] files = new FileStatus[FileOperations.getValidExtensions().size()];
int i = 0;
for (String extension : FileOperations.getValidExtensions()) {
String filename = "testFile." + extension;
fs.createNewFile(new Path(testdir, filename));
files[i++] = fs.getFileStatus(new Path(testdir, filename));
}
// all files should be valid
assertEquals(i, BulkImport.filterInvalid(files).size());
assertArrayEquals(files, BulkImport.filterInvalid(files).toArray());
}
@Test
public void testFilterInvalidFile() throws IOException {
FileStatus[] files = new FileStatus[2];
int i = 0;
// create file with no extension and an invalid extension
for (String flag : Arrays.asList("testFile", "testFile.bad")) {
fs.createNewFile(new Path(testdir, flag));
files[i++] = fs.getFileStatus(new Path(testdir, flag));
}
assertEquals(0, BulkImport.filterInvalid(files).size());
}
@Test
public void testFilterInvalidwithDir() throws IOException {
String dir = "justadir";
fs.mkdirs(new Path(testdir, dir));
FileStatus[] files = new FileStatus[1];
files[0] = fs.getFileStatus(new Path(testdir, dir));
// no files should be valid
assertEquals(0, BulkImport.filterInvalid(files).size());
}
@Test
public void testFilterInvalidwithWorkingFile() throws IOException {
FileStatus[] files = new FileStatus[FileOperations.getBulkWorkingFiles().size()];
int i = 0;
for (String workingfile : FileOperations.getBulkWorkingFiles()) {
fs.createNewFile(new Path(testdir, workingfile));
files[i++] = fs.getFileStatus(new Path(testdir, workingfile));
}
// no files should be valid
assertEquals(0, BulkImport.filterInvalid(files).size());
}
@Test
public void testFilterInvalidMixGoodBad() throws IOException {
FileStatus[] files = new FileStatus[FileOperations.getValidExtensions().size() + 1];
int i = 0;
for (String extension : FileOperations.getValidExtensions()) {
String filename = "testFile." + extension;
fs.createNewFile(new Path(testdir, filename));
files[i++] = fs.getFileStatus(new Path(testdir, filename));
}
// adding one more bad file so size is i+1
fs.createNewFile(new Path(testdir, "testFile.bad"));
files[i] = fs.getFileStatus(new Path(testdir, "testFile.bad"));
// all files should be valid except the last
assertEquals(i, BulkImport.filterInvalid(files).size());
assertArrayEquals(Arrays.copyOf(files, i), BulkImport.filterInvalid(files).toArray());
}
}
| 9,404 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/clientImpl | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/clientImpl/lexicoder/ByteUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.clientImpl.lexicoder;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import org.junit.jupiter.api.Test;
public class ByteUtilsTest {
private final byte[] empty = new byte[0];
private final byte[] noSplits = "nosplits".getBytes();
private final byte[] splitAt5 = ("1234" + (char) 0x00 + "56789").getBytes();
@Test
public void testSplit() {
byte[][] result;
// always returns the original array itself
result = ByteUtils.split(empty);
assertEquals(1, result.length);
assertArrayEquals(empty, result[0]);
result = ByteUtils.split(noSplits);
assertEquals(1, result.length);
assertArrayEquals(noSplits, result[0]);
result = ByteUtils.split(splitAt5);
assertEquals(2, result.length);
assertArrayEquals("1234".getBytes(), result[0]);
assertArrayEquals("56789".getBytes(), result[1]);
}
@Test
public void testSplitWithOffset() {
int offset;
byte[][] result;
// still see both splits
offset = 4;
result = ByteUtils.split(splitAt5, offset, splitAt5.length - offset);
assertEquals(2, result.length);
assertArrayEquals(empty, result[0]);
assertArrayEquals("56789".getBytes(), result[1]);
// should only see 1 split at this offset
offset = 5;
result = ByteUtils.split(splitAt5, offset, splitAt5.length - offset);
assertEquals(1, result.length);
assertArrayEquals("56789".getBytes(), result[0]);
// still one split, but smaller ending
offset = 5;
int len = splitAt5.length - offset - 1;
result = ByteUtils.split(splitAt5, offset, len);
assertEquals(1, result.length);
assertArrayEquals("5678".getBytes(), result[0]);
}
@Test
public void testEscape() {
byte[] bytes = {0x00, 0x01};
byte[] escaped = ByteUtils.escape(bytes);
assertArrayEquals(bytes, ByteUtils.unescape(escaped));
// no escaped bytes found so returns the input
byte[] notEscaped = {0x02, 0x02, 0x02};
assertArrayEquals(notEscaped, ByteUtils.unescape(notEscaped));
}
@Test
public void testIllegalArgument() {
// incomplete bytes would cause an ArrayIndexOutOfBounds in the past
byte[] errorBytes = {0x01};
assertThrows(IllegalArgumentException.class, () -> ByteUtils.unescape(errorBytes));
}
}
| 9,405 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/clientImpl | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/clientImpl/lexicoder/AbstractLexicoderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.clientImpl.lexicoder;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import org.apache.accumulo.core.client.lexicoder.AbstractLexicoder;
import org.apache.accumulo.core.client.lexicoder.LexicoderTest;
import org.apache.commons.lang3.ArrayUtils;
/**
* Assists in Testing classes that extend
* {@link org.apache.accumulo.core.client.lexicoder.AbstractEncoder}. It references methods not
* formally defined in the {@link org.apache.accumulo.core.client.lexicoder.Lexicoder} interface.
*
* @since 1.7.0
*/
public abstract class AbstractLexicoderTest extends LexicoderTest {
public static <T> void assertDecodes(AbstractLexicoder<T> lexicoder, T expected) {
LexicoderTest.assertDecodes(lexicoder, expected);
byte[] encoded = lexicoder.encode(expected);
assertOutOfBoundsFails(lexicoder, encoded);
// munge bytes at start and end, then use offset and length to decode
final byte[] combined = ArrayUtils.addAll(ArrayUtils.addAll(START_PAD, encoded), END_PAD);
int offset = START_PAD.length;
int len = encoded.length;
T result = lexicoder.decode(combined, offset, len);
assertEquals(expected, result);
}
public void assertDecodesB(AbstractLexicoder<byte[]> lexicoder, byte[] expected) {
super.assertDecodesB(lexicoder, expected);
byte[] encoded = lexicoder.encode(expected);
assertOutOfBoundsFails(lexicoder, encoded);
// munge bytes at start and end, then use offset and length to decode
final byte[] combined = ArrayUtils.addAll(ArrayUtils.addAll(START_PAD, encoded), END_PAD);
int offset = START_PAD.length;
int len = encoded.length;
byte[] result = lexicoder.decode(combined, offset, len);
assertEqualsB(expected, result);
}
protected static <T> void assertOutOfBoundsFails(AbstractLexicoder<T> lexicoder, byte[] encoded) {
// decode null; should fail
assertThrows(NullPointerException.class, () -> lexicoder.decode(null, 0, encoded.length),
"Should throw on null bytes.");
// decode out of bounds, expect an exception
assertThrows(IllegalArgumentException.class,
() -> lexicoder.decode(encoded, 0, encoded.length + 1),
"Should throw on exceeding length.");
assertThrows(IllegalArgumentException.class,
() -> lexicoder.decode(encoded, -1, encoded.length), "Should throw on negative offset.");
assertThrows(IllegalArgumentException.class, () -> lexicoder.decode(encoded, 0, -1),
"Should throw on negative length.");
assertThrows(IllegalArgumentException.class, () -> lexicoder.decode(encoded, 1, -1),
"Should throw on negative length, even if (offset+len) is within bounds.");
}
}
| 9,406 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/tabletserver | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/tabletserver/log/LogEntryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.tabletserver.log;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotSame;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.nio.file.Path;
import java.util.List;
import java.util.Map.Entry;
import java.util.UUID;
import java.util.stream.Stream;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.metadata.schema.MetadataSchema;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import com.google.common.net.HostAndPort;
public class LogEntryTest {
final HostAndPort validHost = HostAndPort.fromParts("default", 8080);
final UUID validUUID = UUID.randomUUID();
final String validFilename = Path.of(validHost.toString(), validUUID.toString()).toString();
@Test
public void test() throws Exception {
String uuid = UUID.randomUUID().toString();
String filename = Path.of("default", uuid).toString();
LogEntry entry = new LogEntry(filename);
assertEquals(filename, entry.getFilePath());
assertEquals(filename, entry.toString());
assertEquals(new Text("log"), MetadataSchema.TabletsSection.LogColumnFamily.NAME);
assertEquals(new Text("-/" + filename), entry.getColumnQualifier());
Key key = new Key(new Text("1<"), new Text("log"), new Text("localhost:1234/default/foo"));
var mapEntry = new Entry<Key,Value>() {
@Override
public Key getKey() {
return key;
}
@Override
public Value getValue() {
return entry.getValue();
}
@Override
public Value setValue(Value value) {
throw new UnsupportedOperationException();
}
};
LogEntry copy2 = LogEntry.fromMetaWalEntry(mapEntry);
assertEquals(entry.toString(), copy2.toString());
assertEquals(uuid, entry.getUniqueID());
assertEquals("-/" + filename, entry.getColumnQualifier().toString());
assertEquals(new Value(filename), entry.getValue());
}
@Test
public void testEquals() {
LogEntry one = new LogEntry(validFilename);
LogEntry two = new LogEntry(validFilename);
assertNotSame(one, two);
assertEquals(one.toString(), two.toString());
assertEquals(one.getColumnQualifier(), two.getColumnQualifier());
assertEquals(one.getUniqueID(), two.getUniqueID());
assertEquals(one.getValue(), two.getValue());
assertEquals(one, two);
assertEquals(one, one);
assertEquals(two, two);
}
@Nested
class ValidateFilePath {
@Test
public void testValidPaths() {
Path validPath = Path.of(validHost.toString(), validUUID.toString());
Path validPath2 = Path.of("dir1", validPath.toString());
Path validPath3 = Path.of("dir2", validPath2.toString());
Stream.of(validPath, validPath2, validPath3).map(Path::toString)
.forEach(validFilePath -> assertDoesNotThrow(() -> new LogEntry(validFilePath)));
}
@Test
public void testBadPathLength() {
List<String> badFilePaths = List.of("foo", "", validHost.toString());
for (String badFilePath : badFilePaths) {
IllegalArgumentException iae =
assertThrows(IllegalArgumentException.class, () -> new LogEntry(badFilePath));
assertTrue(iae.getMessage().contains("The path should at least contain tserver/UUID."));
}
}
@Test
public void testInvalidHostPort() {
final String badHostAndPort = "default:badPort";
final Path badFilepathHostPort = Path.of(badHostAndPort, validUUID.toString());
IllegalArgumentException iae = assertThrows(IllegalArgumentException.class,
() -> new LogEntry(badFilepathHostPort.toString()));
assertTrue(
iae.getMessage().contains("Expected format: host:port. Found '" + badHostAndPort + "'"));
}
@Test
public void testInvalidUUID() {
final String badUUID = "badUUID";
String filePathWithBadUUID = Path.of(validHost.toString(), badUUID).toString();
IllegalArgumentException iae =
assertThrows(IllegalArgumentException.class, () -> new LogEntry(filePathWithBadUUID));
assertTrue(iae.getMessage().contains("Expected valid UUID. Found '" + badUUID + "'"));
}
}
}
| 9,407 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/rpc/SaslConnectionParamsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.rpc;
import static org.apache.accumulo.core.clientImpl.AuthenticationTokenIdentifier.createTAuthIdentifier;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import java.security.PrivilegedExceptionAction;
import java.util.Map;
import java.util.Properties;
import javax.security.sasl.Sasl;
import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
import org.apache.accumulo.core.client.security.tokens.KerberosToken;
import org.apache.accumulo.core.clientImpl.AuthenticationTokenIdentifier;
import org.apache.accumulo.core.clientImpl.DelegationTokenImpl;
import org.apache.accumulo.core.conf.ClientProperty;
import org.apache.accumulo.core.conf.Property;
import org.apache.accumulo.core.rpc.SaslConnectionParams.QualityOfProtection;
import org.apache.accumulo.core.rpc.SaslConnectionParams.SaslMechanism;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.security.UserGroupInformation;
import org.easymock.EasyMock;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class SaslConnectionParamsTest {
private UserGroupInformation testUser;
private String username;
private static final String primary = "accumulo";
@BeforeEach
public void setup() {
System.setProperty("java.security.krb5.realm", "accumulo");
System.setProperty("java.security.krb5.kdc", "fake");
Configuration conf = new Configuration(false);
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
UserGroupInformation.setConfiguration(conf);
testUser = UserGroupInformation.createUserForTesting("test_user", new String[0]);
username = testUser.getUserName();
}
private static SaslConnectionParams createSaslParams(AuthenticationToken token) {
Properties props = new Properties();
props.setProperty(ClientProperty.SASL_KERBEROS_SERVER_PRIMARY.getKey(), primary);
props.setProperty(ClientProperty.SASL_ENABLED.getKey(), "true");
return new SaslConnectionParams(props, token);
}
@Test
public void testDefaultParamsAsClient() throws Exception {
final KerberosToken token = EasyMock.createMock(KerberosToken.class);
testUser.doAs((PrivilegedExceptionAction<Void>) () -> {
final SaslConnectionParams saslParams = createSaslParams(token);
assertEquals(primary, saslParams.getKerberosServerPrimary());
final QualityOfProtection defaultQop =
QualityOfProtection.get(Property.RPC_SASL_QOP.getDefaultValue());
assertEquals(defaultQop, saslParams.getQualityOfProtection());
Map<String,String> properties = saslParams.getSaslProperties();
assertEquals(1, properties.size());
assertEquals(defaultQop.getQuality(), properties.get(Sasl.QOP));
assertEquals(username, saslParams.getPrincipal());
return null;
});
}
@Test
public void testDefaultParams() throws Exception {
final KerberosToken token = EasyMock.createMock(KerberosToken.class);
testUser.doAs((PrivilegedExceptionAction<Void>) () -> {
final SaslConnectionParams saslParams = createSaslParams(token);
assertEquals(primary, saslParams.getKerberosServerPrimary());
final QualityOfProtection defaultQop =
QualityOfProtection.get(Property.RPC_SASL_QOP.getDefaultValue());
assertEquals(defaultQop, saslParams.getQualityOfProtection());
Map<String,String> properties = saslParams.getSaslProperties();
assertEquals(1, properties.size());
assertEquals(defaultQop.getQuality(), properties.get(Sasl.QOP));
assertEquals(username, saslParams.getPrincipal());
return null;
});
}
@Test
public void testDelegationTokenImpl() throws Exception {
final DelegationTokenImpl token =
new DelegationTokenImpl(new byte[0], new AuthenticationTokenIdentifier(
createTAuthIdentifier("user", 1, 10L, 20L, "instanceid")));
testUser.doAs((PrivilegedExceptionAction<Void>) () -> {
final SaslConnectionParams saslParams = createSaslParams(token);
assertEquals(primary, saslParams.getKerberosServerPrimary());
final QualityOfProtection defaultQop =
QualityOfProtection.get(Property.RPC_SASL_QOP.getDefaultValue());
assertEquals(defaultQop, saslParams.getQualityOfProtection());
assertEquals(SaslMechanism.DIGEST_MD5, saslParams.getMechanism());
assertNotNull(saslParams.getCallbackHandler());
assertEquals(SaslClientDigestCallbackHandler.class,
saslParams.getCallbackHandler().getClass());
Map<String,String> properties = saslParams.getSaslProperties();
assertEquals(1, properties.size());
assertEquals(defaultQop.getQuality(), properties.get(Sasl.QOP));
assertEquals(username, saslParams.getPrincipal());
return null;
});
}
@Test
public void testEquality() throws Exception {
final KerberosToken token = EasyMock.createMock(KerberosToken.class);
SaslConnectionParams params1 = testUser
.doAs((PrivilegedExceptionAction<SaslConnectionParams>) () -> createSaslParams(token));
SaslConnectionParams params2 = testUser
.doAs((PrivilegedExceptionAction<SaslConnectionParams>) () -> createSaslParams(token));
assertEquals(params1, params2);
assertEquals(params1.hashCode(), params2.hashCode());
final DelegationTokenImpl delToken1 =
new DelegationTokenImpl(new byte[0], new AuthenticationTokenIdentifier(
createTAuthIdentifier("user", 1, 10L, 20L, "instanceid")));
SaslConnectionParams params3 = testUser
.doAs((PrivilegedExceptionAction<SaslConnectionParams>) () -> createSaslParams(delToken1));
assertNotEquals(params1, params3);
assertNotEquals(params1.hashCode(), params3.hashCode());
assertNotEquals(params2, params3);
assertNotEquals(params2.hashCode(), params3.hashCode());
final DelegationTokenImpl delToken2 =
new DelegationTokenImpl(new byte[0], new AuthenticationTokenIdentifier(
createTAuthIdentifier("user", 1, 10L, 20L, "instanceid")));
SaslConnectionParams params4 = testUser
.doAs((PrivilegedExceptionAction<SaslConnectionParams>) () -> createSaslParams(delToken2));
assertNotEquals(params1, params4);
assertNotEquals(params1.hashCode(), params4.hashCode());
assertNotEquals(params2, params4);
assertNotEquals(params2.hashCode(), params4.hashCode());
assertEquals(params3, params4);
assertEquals(params3.hashCode(), params4.hashCode());
}
}
| 9,408 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/rpc/TTimeoutTransportTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.rpc;
import static java.util.concurrent.TimeUnit.MINUTES;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.createMockBuilder;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.io.IOException;
import java.io.InputStream;
import java.net.Socket;
import java.net.SocketAddress;
import org.apache.thrift.transport.TTransportException;
import org.junit.jupiter.api.Test;
/**
* Tests for {@link TTimeoutTransport}.
*/
public class TTimeoutTransportTest {
void expectedSocketSetup(Socket s) throws IOException {
s.setSoLinger(false, 0);
expectLastCall().once();
s.setTcpNoDelay(true);
expectLastCall().once();
}
@Test
public void testFailedSocketOpenIsClosed() throws IOException {
SocketAddress addr = createMock(SocketAddress.class);
Socket s = createMock(Socket.class);
TTimeoutTransport timeoutTransport = createMockBuilder(TTimeoutTransport.class)
.addMockedMethod("openSocketChannel").createMock();
// Return out mocked socket
expect(timeoutTransport.openSocketChannel()).andReturn(s).once();
// tcpnodelay and solinger
expectedSocketSetup(s);
// Connect to the addr
s.connect(addr, 1);
expectLastCall().andThrow(new IOException());
// The socket should be closed after the above IOException
s.close();
replay(addr, s, timeoutTransport);
assertThrows(IOException.class, () -> timeoutTransport.openSocket(addr, 1));
verify(addr, s, timeoutTransport);
}
@Test
public void testFailedInputStreamClosesSocket() throws IOException {
long timeout = MINUTES.toMillis(2);
SocketAddress addr = createMock(SocketAddress.class);
Socket s = createMock(Socket.class);
TTimeoutTransport timeoutTransport = createMockBuilder(TTimeoutTransport.class)
.addMockedMethod("openSocketChannel").addMockedMethod("wrapInputStream").createMock();
// Return out mocked socket
expect(timeoutTransport.openSocketChannel()).andReturn(s).once();
// tcpnodelay and solinger
expectedSocketSetup(s);
// Connect to the addr
s.connect(addr, (int) timeout);
expectLastCall().once();
expect(timeoutTransport.wrapInputStream(s, timeout)).andThrow(new IOException());
// The socket should be closed after the above IOException
s.close();
replay(addr, s, timeoutTransport);
assertThrows(TTransportException.class, () -> timeoutTransport.createInternal(addr, timeout));
verify(addr, s, timeoutTransport);
}
@Test
public void testFailedOutputStreamClosesSocket() throws IOException {
long timeout = MINUTES.toMillis(2);
SocketAddress addr = createMock(SocketAddress.class);
Socket s = createMock(Socket.class);
InputStream is = createMock(InputStream.class);
TTimeoutTransport timeoutTransport =
createMockBuilder(TTimeoutTransport.class).addMockedMethod("openSocketChannel")
.addMockedMethod("wrapInputStream").addMockedMethod("wrapOutputStream").createMock();
// Return out mocked socket
expect(timeoutTransport.openSocketChannel()).andReturn(s).once();
// tcpnodelay and solinger
expectedSocketSetup(s);
// Connect to the addr
s.connect(addr, (int) timeout);
expectLastCall().once();
// Input stream is set up
expect(timeoutTransport.wrapInputStream(s, timeout)).andReturn(is);
// Output stream fails to be set up
expect(timeoutTransport.wrapOutputStream(s, timeout)).andThrow(new IOException());
// The socket should be closed after the above IOException
s.close();
replay(addr, s, timeoutTransport);
assertThrows(TTransportException.class, () -> timeoutTransport.createInternal(addr, timeout));
verify(addr, s, timeoutTransport);
}
}
| 9,409 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/rpc/SaslClientDigestCallbackHandlerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.rpc;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.Test;
public class SaslClientDigestCallbackHandlerTest {
@Test
public void testEquality() {
SaslClientDigestCallbackHandler handler1 =
new SaslClientDigestCallbackHandler("user", "mypass".toCharArray()),
handler2 = new SaslClientDigestCallbackHandler("user", "mypass".toCharArray());
assertEquals(handler1, handler2);
assertEquals(handler1.hashCode(), handler2.hashCode());
}
}
| 9,410 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/rpc/ThriftUtilTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.rpc;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.apache.thrift.transport.TByteBuffer;
import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportException;
import org.apache.thrift.transport.layered.TFramedTransport;
import org.junit.jupiter.api.Test;
public class ThriftUtilTest {
public static final int FRAME_HDR_SIZE = 4;
public static final int MB1 = 1 * 1024 * 1024;
public static final int MB10 = 10 * 1024 * 1024;
public static final int MB100 = 100 * 1024 * 1024;
public static final int GB = 1 * 1024 * 1024 * 1024;
@Test
public void testDefaultTFramedTransportFactory() throws TTransportException {
// This test confirms that the default maxMessageSize in Thrift is 100MB
// even when we set the frame size to be 1GB
TByteBuffer underlyingTransport = new TByteBuffer(ByteBuffer.allocate(1024));
TFramedTransport.Factory factory = new TFramedTransport.Factory(GB);
TTransport framedTransport = factory.getTransport(underlyingTransport);
assertEquals(framedTransport.getConfiguration().getMaxFrameSize(), GB);
assertEquals(framedTransport.getConfiguration().getMaxMessageSize(), MB100);
}
@Test
public void testAccumuloTFramedTransportFactory() throws TTransportException {
// This test confirms that our custom FramedTransportFactory sets the max
// message size and max frame size to the value that we want.
TByteBuffer underlyingTransport = new TByteBuffer(ByteBuffer.allocate(1024));
AccumuloTFramedTransportFactory factory = new AccumuloTFramedTransportFactory(GB);
TTransport framedTransport = factory.getTransport(underlyingTransport);
assertEquals(framedTransport.getConfiguration().getMaxFrameSize(), GB);
assertEquals(framedTransport.getConfiguration().getMaxMessageSize(), GB);
}
@Test
public void testMessageSizeReadWriteSuccess() throws Exception {
// This test creates an 10MB buffer in memory as the underlying transport, then
// creates a TFramedTransport with a 1MB maxFrameSize and maxMessageSize. It then
// writes 1MB - 4 bytes (to account for the frame header) to the transport and
// reads the data back out.
TByteBuffer underlyingTransport = new TByteBuffer(ByteBuffer.allocate(MB10));
AccumuloTFramedTransportFactory factory = new AccumuloTFramedTransportFactory(MB1);
TTransport framedTransport = factory.getTransport(underlyingTransport);
assertEquals(framedTransport.getConfiguration().getMaxFrameSize(), MB1);
assertEquals(framedTransport.getConfiguration().getMaxMessageSize(), MB1);
byte[] writeBuf = new byte[MB1 - FRAME_HDR_SIZE];
Arrays.fill(writeBuf, (byte) 1);
framedTransport.write(writeBuf);
framedTransport.flush();
assertEquals(MB1, underlyingTransport.getByteBuffer().position());
underlyingTransport.flip();
assertEquals(0, underlyingTransport.getByteBuffer().position());
assertEquals(MB1, underlyingTransport.getByteBuffer().limit());
byte[] readBuf = new byte[MB1];
framedTransport.read(readBuf, 0, MB1);
}
@Test
public void testMessageSizeWriteFailure() throws Exception {
// This test creates an 10MB buffer in memory as the underlying transport, then
// creates a TFramedTransport with a 1MB maxFrameSize and maxMessageSize. It then
// writes 1MB + 100 bytes to the transport, which fails as it's larger than the
// configured frame and message size.
TByteBuffer underlyingTransport = new TByteBuffer(ByteBuffer.allocate(MB10));
AccumuloTFramedTransportFactory factory = new AccumuloTFramedTransportFactory(MB1);
TTransport framedTransport = factory.getTransport(underlyingTransport);
assertEquals(framedTransport.getConfiguration().getMaxFrameSize(), MB1);
assertEquals(framedTransport.getConfiguration().getMaxMessageSize(), MB1);
// Write more than 1MB to the TByteBuffer, it's possible to write more data
// than allowed by the frame, it's enforced on the read.
final int ourSize = MB1 + 100;
byte[] writeBuf = new byte[ourSize];
Arrays.fill(writeBuf, (byte) 1);
framedTransport.write(writeBuf);
framedTransport.flush();
assertEquals(ourSize + FRAME_HDR_SIZE, underlyingTransport.getByteBuffer().position());
underlyingTransport.flip();
assertEquals(0, underlyingTransport.getByteBuffer().position());
assertEquals(ourSize + FRAME_HDR_SIZE, underlyingTransport.getByteBuffer().limit());
byte[] readBuf = new byte[ourSize];
var e =
assertThrows(TTransportException.class, () -> framedTransport.read(readBuf, 0, ourSize));
assertEquals("Frame size (" + ourSize + ") larger than max length (" + MB1 + ")!",
e.getMessage());
}
}
| 9,411 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data/NamespaceIdTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.data;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotSame;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.apache.accumulo.core.WithTestNames;
import org.apache.accumulo.core.clientImpl.Namespace;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Tests the NamespaceId class, mainly the internal cache.
*/
public class NamespaceIdTest extends WithTestNames {
private static final Logger LOG = LoggerFactory.getLogger(NamespaceIdTest.class);
private static long cacheCount() {
// guava cache size() is approximate, and can include garbage-collected entries
// so we iterate to get the actual cache size
return NamespaceId.cache.asMap().entrySet().stream().count();
}
@Test
public void testCacheNoDuplicates() {
// the next line just preloads the built-ins, since they now exist in a separate class from
// NamespaceId, and aren't preloaded when the NamespaceId class is referenced
assertNotSame(Namespace.ACCUMULO.id(), Namespace.DEFAULT.id());
String namespaceString = "namespace-" + testName();
long initialSize = cacheCount();
NamespaceId nsId = NamespaceId.of(namespaceString);
assertEquals(initialSize + 1, cacheCount());
assertEquals(namespaceString, nsId.canonical());
// ensure duplicates are not created
NamespaceId builtInNamespaceId = NamespaceId.of("+accumulo");
assertSame(Namespace.ACCUMULO.id(), builtInNamespaceId);
builtInNamespaceId = NamespaceId.of("+default");
assertSame(Namespace.DEFAULT.id(), builtInNamespaceId);
nsId = NamespaceId.of(namespaceString);
assertEquals(initialSize + 1, cacheCount());
assertEquals(namespaceString, nsId.canonical());
NamespaceId nsId2 = NamespaceId.of(namespaceString);
assertEquals(initialSize + 1, cacheCount());
assertSame(nsId, nsId2);
}
@Test
@Timeout(30)
public void testCacheIncreasesAndDecreasesAfterGC() {
long initialSize = cacheCount();
assertTrue(initialSize < 20); // verify initial amount is reasonably low
LOG.info("Initial cache size: {}", initialSize);
LOG.info(NamespaceId.cache.asMap().toString());
// add one and check increase
String namespaceString = "namespace-" + testName();
NamespaceId nsId = NamespaceId.of(namespaceString);
assertEquals(initialSize + 1, cacheCount());
assertEquals(namespaceString, nsId.canonical());
// create a bunch more and throw them away
long preGCSize = 0;
int i = 0;
while ((preGCSize = cacheCount()) < 100) {
NamespaceId.of(new String("namespace" + i++));
}
LOG.info("Entries before System.gc(): {}", preGCSize);
assertEquals(100, preGCSize);
long postGCSize = preGCSize;
while (postGCSize >= preGCSize) {
TableIdTest.tryToGc();
postGCSize = cacheCount();
LOG.info("Entries after System.gc(): {}", postGCSize);
}
}
}
| 9,412 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data/TableIdTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.data;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotSame;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import org.apache.accumulo.core.WithTestNames;
import org.apache.accumulo.core.metadata.MetadataTable;
import org.apache.accumulo.core.metadata.RootTable;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
/**
* Tests the Table ID class, mainly the internal cache.
*/
public class TableIdTest extends WithTestNames {
private static final Logger LOG = LoggerFactory.getLogger(TableIdTest.class);
private static long cacheCount() {
// guava cache size() is approximate, and can include garbage-collected entries
// so we iterate to get the actual cache size
return TableId.cache.asMap().entrySet().stream().count();
}
@Test
public void testCacheNoDuplicates() {
// the next line just preloads the built-ins, since they now exist in a separate class from
// TableId, and aren't preloaded when the TableId class is referenced
assertNotSame(RootTable.ID, MetadataTable.ID);
String tableString = "table-" + testName();
long initialSize = cacheCount();
TableId table1 = TableId.of(tableString);
assertEquals(initialSize + 1, cacheCount());
assertEquals(tableString, table1.canonical());
// ensure duplicates are not created
TableId builtInTableId = TableId.of("!0");
assertSame(MetadataTable.ID, builtInTableId);
builtInTableId = TableId.of("+r");
assertSame(RootTable.ID, builtInTableId);
table1 = TableId.of(tableString);
assertEquals(initialSize + 1, cacheCount());
assertEquals(tableString, table1.canonical());
TableId table2 = TableId.of(tableString);
assertEquals(initialSize + 1, cacheCount());
assertEquals(tableString, table2.canonical());
assertSame(table1, table2);
}
@Test
@Timeout(30)
public void testCacheIncreasesAndDecreasesAfterGC() {
long initialSize = cacheCount();
assertTrue(initialSize < 20); // verify initial amount is reasonably low
LOG.info("Initial cache size: {}", initialSize);
LOG.info(TableId.cache.asMap().toString());
// add one and check increase
String tableString = "table-" + testName();
TableId table1 = TableId.of(tableString);
assertEquals(initialSize + 1, cacheCount());
assertEquals(tableString, table1.canonical());
// create a bunch more and throw them away
long preGCSize = 0;
int i = 0;
while ((preGCSize = cacheCount()) < 100) {
TableId.of(("table" + i++));
}
LOG.info("Entries before System.gc(): {}", preGCSize);
assertEquals(100, preGCSize);
long postGCSize = preGCSize;
while (postGCSize >= preGCSize) {
tryToGc();
postGCSize = cacheCount();
LOG.info("Entries after System.gc(): {}", postGCSize);
}
}
@SuppressFBWarnings(value = "DM_GC", justification = "gc is okay for test")
static void tryToGc() {
System.gc();
try {
Thread.sleep(100);
} catch (InterruptedException e) {
fail("Thread interrupted while waiting for GC");
}
}
}
| 9,413 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data/LoadPlanTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.data;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.stream.Collectors.toSet;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.util.HashSet;
import java.util.Set;
import org.apache.accumulo.core.data.LoadPlan.Destination;
import org.apache.accumulo.core.data.LoadPlan.RangeType;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
public class LoadPlanTest {
@Test
public void testBadRange1() {
assertThrows(IllegalArgumentException.class,
() -> LoadPlan.builder().loadFileTo("f1.rf", RangeType.TABLE, "a", "a").build());
}
@Test
public void testBadRange2() {
assertThrows(IllegalArgumentException.class,
() -> LoadPlan.builder().loadFileTo("f1.rf", RangeType.TABLE, "b", "a").build());
}
@Test
public void testBadRange3() {
assertThrows(IllegalArgumentException.class,
() -> LoadPlan.builder().loadFileTo("f1.rf", RangeType.FILE, "b", "a").build());
}
@Test
public void testBadRange4() {
assertThrows(IllegalArgumentException.class,
() -> LoadPlan.builder().loadFileTo("f1.rf", RangeType.FILE, null, "a").build());
}
@Test
public void testBadRange5() {
assertThrows(IllegalArgumentException.class,
() -> LoadPlan.builder().loadFileTo("f1.rf", RangeType.FILE, "a", null).build());
}
@Test
public void testTypes() {
LoadPlan loadPlan = LoadPlan.builder().loadFileTo("f1.rf", RangeType.FILE, "1112", "1145")
.loadFileTo("f2.rf", RangeType.FILE, "abc".getBytes(UTF_8), "def".getBytes(UTF_8))
.loadFileTo("f3.rf", RangeType.FILE, new Text("368"), new Text("479"))
.loadFileTo("f4.rf", RangeType.TABLE, null, "aaa")
.loadFileTo("f5.rf", RangeType.TABLE, "yyy", null)
.loadFileTo("f6.rf", RangeType.TABLE, null, "bbb".getBytes(UTF_8))
.loadFileTo("f7.rf", RangeType.TABLE, "www".getBytes(UTF_8), null)
.loadFileTo("f8.rf", RangeType.TABLE, null, new Text("ccc"))
.loadFileTo("f9.rf", RangeType.TABLE, new Text("xxx"), null)
.loadFileTo("fa.rf", RangeType.TABLE, "1138", "1147")
.loadFileTo("fb.rf", RangeType.TABLE, "heg".getBytes(UTF_8), "klt".getBytes(UTF_8))
.loadFileTo("fc.rf", RangeType.TABLE, new Text("agt"), new Text("ctt"))
.addPlan(
LoadPlan.builder().loadFileTo("fd.rf", RangeType.TABLE, (String) null, null).build())
.build();
Set<String> expected = new HashSet<>();
expected.add("f1.rf:FILE:1112:1145");
expected.add("f2.rf:FILE:abc:def");
expected.add("f3.rf:FILE:368:479");
expected.add("f4.rf:TABLE:null:aaa");
expected.add("f5.rf:TABLE:yyy:null");
expected.add("f6.rf:TABLE:null:bbb");
expected.add("f7.rf:TABLE:www:null");
expected.add("f8.rf:TABLE:null:ccc");
expected.add("f9.rf:TABLE:xxx:null");
expected.add("fa.rf:TABLE:1138:1147");
expected.add("fb.rf:TABLE:heg:klt");
expected.add("fc.rf:TABLE:agt:ctt");
expected.add("fd.rf:TABLE:null:null");
Set<String> actual =
loadPlan.getDestinations().stream().map(LoadPlanTest::toString).collect(toSet());
assertEquals(expected, actual);
}
private static String toString(Destination d) {
return d.getFileName() + ":" + d.getRangeType() + ":" + toString(d.getStartRow()) + ":"
+ toString(d.getEndRow());
}
private static String toString(byte[] r) {
return r == null ? null : new String(r, UTF_8);
}
}
| 9,414 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data/KeyTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.data;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotSame;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.accumulo.core.dataImpl.thrift.TKey;
import org.apache.accumulo.core.dataImpl.thrift.TKeyValue;
import org.apache.accumulo.core.security.ColumnVisibility;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
public class KeyTest {
@Test
public void testDeletedCompare() {
Key k1 = new Key("r1".getBytes(), "cf".getBytes(), "cq".getBytes(), new byte[0], 0, false);
Key k2 = new Key("r1".getBytes(), "cf".getBytes(), "cq".getBytes(), new byte[0], 0, false);
Key k3 = new Key("r1".getBytes(), "cf".getBytes(), "cq".getBytes(), new byte[0], 0, true);
Key k4 = new Key("r1".getBytes(), "cf".getBytes(), "cq".getBytes(), new byte[0], 0, true);
assertEquals(k1, k2);
assertEquals(k3, k4);
assertTrue(k1.compareTo(k3) > 0);
assertTrue(k3.compareTo(k1) < 0);
}
@Test
public void testCopyData() {
byte[] row = "r".getBytes();
byte[] cf = "cf".getBytes();
byte[] cq = "cq".getBytes();
byte[] cv = "cv".getBytes();
Key k1 = new Key(row, cf, cq, cv, 5L, false, false);
Key k2 = new Key(row, cf, cq, cv, 5L, false, true);
assertSame(row, k1.getRowBytes());
assertSame(cf, k1.getColFamily());
assertSame(cq, k1.getColQualifier());
assertSame(cv, k1.getColVisibility());
assertSame(row, k1.getRowData().getBackingArray());
assertSame(cf, k1.getColumnFamilyData().getBackingArray());
assertSame(cq, k1.getColumnQualifierData().getBackingArray());
assertSame(cv, k1.getColumnVisibilityData().getBackingArray());
assertNotSame(row, k2.getRowBytes());
assertNotSame(cf, k2.getColFamily());
assertNotSame(cq, k2.getColQualifier());
assertNotSame(cv, k2.getColVisibility());
assertNotSame(row, k2.getRowData().getBackingArray());
assertNotSame(cf, k2.getColumnFamilyData().getBackingArray());
assertNotSame(cq, k2.getColumnQualifierData().getBackingArray());
assertNotSame(cv, k2.getColumnVisibilityData().getBackingArray());
assertEquals(k1, k2);
}
@Test
public void testCopyDataWithByteArrayConstructors() {
byte[] row = "r".getBytes();
byte[] cf = "cf".getBytes();
byte[] cq = "cq".getBytes();
byte[] cv = "cv".getBytes();
byte[] empty = "".getBytes();
Key kRow = new Key(row);
Key kRowcolFam = new Key(row, cf);
Key kRowcolFamColQual = new Key(row, cf, cq);
Key kRowcolFamColQualColVis = new Key(row, cf, cq, cv);
Key kRowcolFamColQualColVisTimeStamp = new Key(row, cf, cq, cv, 5L);
// test row constructor
assertNotSameByteArray(kRow, row, empty, empty, empty);
// test row, column family constructor
assertNotSameByteArray(kRowcolFam, row, cf, empty, empty);
// test row, column family, column qualifier constructor
assertNotSameByteArray(kRowcolFamColQual, row, cf, cq, empty);
// test row, column family, column qualifier, column visibility constructor
assertNotSameByteArray(kRowcolFamColQualColVis, row, cf, cq, cv);
// test row, column family, column qualifier, column visibility, timestamp constructor
assertNotSameByteArray(kRowcolFamColQualColVisTimeStamp, row, cf, cq, cv);
}
private void assertNotSameByteArray(Key key, byte[] row, byte[] cf, byte[] cq, byte[] cv) {
if (key.getRowBytes().length != 0) {
assertNotSame(row, key.getRowBytes());
assertNotSame(row, key.getRowData().getBackingArray());
assertArrayEquals(row, key.getRowBytes());
}
if (key.getColFamily().length != 0) {
assertNotSame(cf, key.getColFamily());
assertNotSame(cf, key.getColumnFamilyData().getBackingArray());
assertArrayEquals(cf, key.getColFamily());
}
if (key.getColQualifier().length != 0) {
assertNotSame(cq, key.getColQualifier());
assertNotSame(cq, key.getColumnQualifierData().getBackingArray());
assertArrayEquals(cq, key.getColQualifier());
}
if (key.getColVisibility().length != 0) {
assertNotSame(cv, key.getColVisibility());
assertNotSame(cv, key.getColumnVisibilityData().getBackingArray());
assertArrayEquals(cv, key.getColVisibility());
}
}
@Test
public void testTextConstructorByteArrayConversion() {
Text rowText = new Text("r");
Text cfText = new Text("cf");
Text cqText = new Text("cq");
Text cvText = new Text("cv");
// make Keys from Text parameters
Key kRow = new Key(rowText);
Key kRowColFam = new Key(rowText, cfText);
Key kRowColFamColQual = new Key(rowText, cfText, cqText);
Key kRowColFamColQualColVis = new Key(rowText, cfText, cqText, cvText);
Key kRowColFamColQualColVisTimeStamp = new Key(rowText, cfText, cqText, cvText, 5L);
// test row constructor
assertTextValueConversionToByteArray(kRow);
// test row, column family constructor
assertTextValueConversionToByteArray(kRowColFam);
// test row, column family, column qualifier constructor
assertTextValueConversionToByteArray(kRowColFamColQual);
// test row, column family, column qualifier, column visibility constructor
assertTextValueConversionToByteArray(kRowColFamColQualColVis);
// test row, column family, column qualifier, column visibility, timestamp constructor
assertTextValueConversionToByteArray(kRowColFamColQualColVisTimeStamp);
}
private void assertTextValueConversionToByteArray(Key key) {
byte[] row = "r".getBytes();
byte[] cf = "cf".getBytes();
byte[] cq = "cq".getBytes();
byte[] cv = "cv".getBytes();
// show Text values submitted in constructor
// are converted to byte array containing
// the same value
if (key.getRowBytes().length != 0) {
assertArrayEquals(row, key.getRowBytes());
}
if (key.getColFamily().length != 0) {
assertArrayEquals(cf, key.getColFamily());
}
if (key.getColQualifier().length != 0) {
assertArrayEquals(cq, key.getColQualifier());
}
if (key.getColVisibility().length != 0) {
assertArrayEquals(cv, key.getColVisibility());
}
}
@Test
public void testString() {
Key k1 = new Key("r1");
Key k2 = new Key(new Text("r1"));
assertEquals(k2, k1);
k1 = new Key("r1", "cf1");
k2 = new Key(new Text("r1"), new Text("cf1"));
assertEquals(k2, k1);
k1 = new Key("r1", "cf2", "cq2");
k2 = new Key(new Text("r1"), new Text("cf2"), new Text("cq2"));
assertEquals(k2, k1);
k1 = new Key("r1", "cf2", "cq2", "cv");
k2 = new Key(new Text("r1"), new Text("cf2"), new Text("cq2"), new Text("cv"));
assertEquals(k2, k1);
k1 = new Key("r1", "cf2", "cq2", "cv", 89);
k2 = new Key(new Text("r1"), new Text("cf2"), new Text("cq2"), new Text("cv"), 89);
assertEquals(k2, k1);
k1 = new Key("r1", "cf2", "cq2", 89);
k2 = new Key(new Text("r1"), new Text("cf2"), new Text("cq2"), 89);
assertEquals(k2, k1);
}
@Test
public void testVisibilityFollowingKey() {
Key k = new Key("r", "f", "q", "v");
assertEquals(k.followingKey(PartialKey.ROW_COLFAM_COLQUAL_COLVIS).toString(),
"r f:q [v%00;] " + Long.MAX_VALUE + " false");
}
@Test
public void testVisibilityGetters() {
Key k = new Key("r", "f", "q", "v1|(v2&v3)");
Text expression = k.getColumnVisibility();
ColumnVisibility parsed = k.getColumnVisibilityParsed();
assertEquals(expression, new Text(parsed.getExpression()));
}
@Test
public void testThrift() {
Key k = new Key("r1", "cf2", "cq2", "cv");
TKey tk = k.toThrift();
Key k2 = new Key(tk);
assertEquals(k, k2);
}
@Test
public void testThrift_Invalid() {
Key k = new Key("r1", "cf2", "cq2", "cv");
TKey tk = k.toThrift();
tk.setRow((byte[]) null);
assertThrows(IllegalArgumentException.class, () -> new Key(tk));
}
@Test
public void testCompressDecompress() {
List<KeyValue> kvs = new ArrayList<>();
kvs.add(new KeyValue(new Key(), new byte[] {}));
kvs.add(new KeyValue(new Key("r"), new byte[] {}));
kvs.add(new KeyValue(new Key("r", "cf"), new byte[] {}));
kvs.add(new KeyValue(new Key("r2", "cf"), new byte[] {}));
kvs.add(new KeyValue(new Key("r", "cf", "cq"), new byte[] {}));
kvs.add(new KeyValue(new Key("r2", "cf2", "cq"), new byte[] {}));
kvs.add(new KeyValue(new Key("r", "cf", "cq", "cv"), new byte[] {}));
kvs.add(new KeyValue(new Key("r2", "cf2", "cq2", "cv"), new byte[] {}));
kvs.add(new KeyValue(new Key("r2", "cf2", "cq2", "cv"), new byte[] {}));
kvs.add(new KeyValue(new Key(), new byte[] {}));
List<TKeyValue> tkvs = Key.compress(kvs);
Key.decompress(tkvs);
assertEquals(kvs.size(), tkvs.size());
Iterator<KeyValue> kvi = kvs.iterator();
Iterator<TKeyValue> tkvi = tkvs.iterator();
while (kvi.hasNext()) {
KeyValue kv = kvi.next();
TKeyValue tkv = tkvi.next();
assertEquals(kv.getKey(), new Key(tkv.getKey()));
}
}
@Test
public void testBytesText() {
byte[] row = {1};
Key bytesRowKey = new Key(row);
Key textRowKey = new Key(new Text(row));
assertEquals(bytesRowKey, textRowKey);
byte[] colFamily = {0, 1};
Key bytesColFamilyKey = new Key(row, colFamily);
Key textColFamilyKey = new Key(new Text(row), new Text(colFamily));
assertEquals(bytesColFamilyKey, textColFamilyKey);
byte[] colQualifier = {0, 0, 1};
Key bytesColQualifierKey = new Key(row, colFamily, colQualifier);
Key textColQualifierKey = new Key(new Text(row), new Text(colFamily), new Text(colQualifier));
assertEquals(bytesColQualifierKey, textColQualifierKey);
byte[] colVisibility = {0, 0, 0, 1};
Key bytesColVisibilityKey = new Key(row, colFamily, colQualifier, colVisibility);
Key textColVisibilityKey = new Key(new Text(row), new Text(colFamily), new Text(colQualifier),
new Text(colVisibility));
assertEquals(bytesColVisibilityKey, textColVisibilityKey);
long ts = 0L;
Key bytesTSKey = new Key(row, colFamily, colQualifier, colVisibility, ts);
Key textTSKey = new Key(new Text(row), new Text(colFamily), new Text(colQualifier),
new Text(colVisibility), ts);
assertEquals(bytesTSKey, textTSKey);
Key bytesTSKey2 = new Key(row, ts);
Key textTSKey2 = new Key(new Text(row), ts);
assertEquals(bytesTSKey2, textTSKey2);
Key bytesTSKey3 = new Key(row, colFamily, colQualifier, ts);
Key testTSKey3 = new Key(new Text(row), new Text(colFamily), new Text(colQualifier), ts);
assertEquals(bytesTSKey3, testTSKey3);
ColumnVisibility colVisibility2 = new ColumnVisibility("v1");
Key bytesColVisibilityKey2 = new Key(row, colFamily, colQualifier, colVisibility2, ts);
Key textColVisibilityKey2 =
new Key(new Text(row), new Text(colFamily), new Text(colQualifier), colVisibility2, ts);
assertEquals(bytesColVisibilityKey2, textColVisibilityKey2);
}
}
| 9,415 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data/ValueTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.data;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNotSame;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.nio.ByteBuffer;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class ValueTest {
private static final byte[] toBytes(String s) {
return s.getBytes(UTF_8);
}
private static final byte[] DATA = toBytes("data");
private static final ByteBuffer DATABUFF = ByteBuffer.allocate(DATA.length);
static {
DATABUFF.put(DATA);
}
@BeforeEach
public void setUp() {
DATABUFF.rewind();
}
@Test
public void testDefault() {
Value v = new Value();
assertEquals(0, v.get().length);
}
@Test
public void testNullBytesConstructor() {
assertThrows(NullPointerException.class, () -> new Value((byte[]) null));
}
@Test
public void testNullCopyConstructor() {
assertThrows(NullPointerException.class, () -> new Value((Value) null));
}
@Test
public void testNullByteBufferConstructor() {
assertThrows(NullPointerException.class, () -> new Value((ByteBuffer) null));
}
@Test
public void testNullSet() {
Value v = new Value();
assertThrows(NullPointerException.class, () -> v.set(null));
}
@Test
public void testByteArray() {
Value v = new Value(DATA);
assertArrayEquals(DATA, v.get());
assertSame(DATA, v.get());
}
@Test
public void testByteArrayCopy() {
Value v = new Value(DATA, true);
assertArrayEquals(DATA, v.get());
assertNotSame(DATA, v.get());
}
@Test
public void testByteBuffer() {
Value v = new Value(DATABUFF);
assertArrayEquals(DATA, v.get());
}
@Test
public void testValueCopy() {
Value ov = createMock(Value.class);
expect(ov.get()).andReturn(DATA);
expect(ov.getSize()).andReturn(4);
replay(ov);
Value v = new Value(ov);
assertArrayEquals(DATA, v.get());
}
@Test
public void testByteArrayOffsetLength() {
Value v = new Value(DATA, 0, 4);
assertArrayEquals(DATA, v.get());
}
@Test
public void testSet() {
Value v = new Value();
v.set(DATA);
assertArrayEquals(DATA, v.get());
assertSame(DATA, v.get());
}
@Test
public void testCopy() {
Value v = new Value();
v.copy(DATA);
assertArrayEquals(DATA, v.get());
assertNotSame(DATA, v.get());
}
@Test
public void testGetSize() {
Value v = new Value(DATA);
assertEquals(DATA.length, v.getSize());
}
@Test
public void testGetSizeDefault() {
Value v = new Value();
assertEquals(0, v.getSize());
}
@Test
public void testWriteRead() throws Exception {
Value v = new Value(DATA);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
v.write(dos);
dos.close();
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
DataInputStream dis = new DataInputStream(bais);
Value v2 = new Value();
v2.readFields(dis);
dis.close();
assertArrayEquals(DATA, v2.get());
}
@Test
public void testHashCode() {
Value v1 = new Value(DATA);
Value v2 = new Value(DATA);
assertEquals(v1.hashCode(), v2.hashCode());
}
@Test
public void testCompareTo() {
Value v1 = new Value(DATA);
Value v2 = new Value(toBytes("datb"));
assertTrue(v1.compareTo(v2) < 0);
assertTrue(v2.compareTo(v1) > 0);
Value v1a = new Value(DATA);
assertEquals(0, v1.compareTo(v1a));
Value v3 = new Value(toBytes("datc"));
assertTrue(v2.compareTo(v3) < 0);
assertTrue(v1.compareTo(v3) < 0);
}
@Test
public void testEquals() {
Value v1 = new Value(DATA);
assertEquals(v1, v1);
Value v2 = new Value(DATA);
assertEquals(v1, v2);
assertEquals(v2, v1);
Value v3 = new Value(toBytes("datb"));
assertNotEquals(v1, v3);
}
@Test
public void testString() {
Value v1 = new Value("abc");
Value v2 = new Value("abc".getBytes(UTF_8));
assertEquals(v2, v1);
}
@Test
public void testNullCharSequence() {
assertThrows(NullPointerException.class, () -> new Value((CharSequence) null));
}
@Test
public void testText() {
Value v1 = new Value(new Text("abc"));
Value v2 = new Value("abc");
assertEquals(v2, v1);
}
@Test
public void testNullText() {
assertThrows(NullPointerException.class, () -> new Value((Text) null));
}
}
| 9,416 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data/ConstraintViolationSummaryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.data;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.Test;
public class ConstraintViolationSummaryTest {
@Test
public void testToString() {
ConstraintViolationSummary cvs =
new ConstraintViolationSummary("fooClass", (short) 1, "fooDescription", 100L);
assertEquals("ConstraintViolationSummary(constrainClass:fooClass,"
+ " violationCode:1, violationDescription:fooDescription,"
+ " numberOfViolatingMutations:100)", cvs.toString());
cvs = new ConstraintViolationSummary(null, (short) 2, null, 101L);
assertEquals(
"ConstraintViolationSummary(constrainClass:null,"
+ " violationCode:2, violationDescription:null, numberOfViolatingMutations:101)",
cvs.toString());
}
}
| 9,417 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data/ConditionalMutationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.data;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.List;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class ConditionalMutationTest {
private static final byte[] ROW = "row".getBytes(UTF_8);
private static final String FAMILY = "family";
private static final String QUALIFIER = "qualifier";
private static final String QUALIFIER2 = "qualifier2";
private static final String QUALIFIER3 = "qualifier3";
private Condition c1, c2;
private ConditionalMutation cm;
@BeforeEach
public void setUp() {
c1 = new Condition(FAMILY, QUALIFIER);
c2 = new Condition(FAMILY, QUALIFIER2);
assertNotEquals(c1, c2);
cm = new ConditionalMutation(ROW, c1, c2);
}
@Test
public void testConstruction_ByteArray() {
assertArrayEquals(ROW, cm.getRow());
List<Condition> cs = cm.getConditions();
assertEquals(2, cs.size());
assertEquals(c1, cs.get(0));
assertEquals(c2, cs.get(1));
}
@Test
public void testConstruction_ByteArray_StartAndLength() {
cm = new ConditionalMutation(ROW, 1, 1, c1, c2);
assertArrayEquals("o".getBytes(UTF_8), cm.getRow());
List<Condition> cs = cm.getConditions();
assertEquals(2, cs.size());
assertEquals(c1, cs.get(0));
assertEquals(c2, cs.get(1));
}
@Test
public void testConstruction_Text() {
cm = new ConditionalMutation(new Text(ROW), c1, c2);
assertArrayEquals(ROW, cm.getRow());
List<Condition> cs = cm.getConditions();
assertEquals(2, cs.size());
assertEquals(c1, cs.get(0));
assertEquals(c2, cs.get(1));
}
@Test
public void testConstruction_CharSequence() {
cm = new ConditionalMutation(new String(ROW, UTF_8), c1, c2);
assertArrayEquals(ROW, cm.getRow());
List<Condition> cs = cm.getConditions();
assertEquals(2, cs.size());
assertEquals(c1, cs.get(0));
assertEquals(c2, cs.get(1));
}
@Test
public void testConstruction_ByteSequence() {
cm = new ConditionalMutation(new ArrayByteSequence(ROW), c1, c2);
assertArrayEquals(ROW, cm.getRow());
List<Condition> cs = cm.getConditions();
assertEquals(2, cs.size());
assertEquals(c1, cs.get(0));
assertEquals(c2, cs.get(1));
}
@Test
public void testCopyConstructor() {
ConditionalMutation cm2 = new ConditionalMutation(cm);
assertArrayEquals(cm.getRow(), cm2.getRow());
assertEquals(cm.getConditions(), cm2.getConditions());
}
@Test
public void testAddCondition() {
Condition c3 = new Condition(FAMILY, QUALIFIER3);
cm.addCondition(c3);
List<Condition> cs = cm.getConditions();
assertEquals(3, cs.size());
assertEquals(c1, cs.get(0));
assertEquals(c2, cs.get(1));
assertEquals(c3, cs.get(2));
}
@Test
public void testEquals() {
// reflexivity
assertTrue(cm.equals(cm));
// non-nullity
assertNotEquals(cm, (Object) null);
// symmetry
ConditionalMutation cm2 = new ConditionalMutation(ROW, c1, c2);
assertTrue(cm.equals(cm2));
assertTrue(cm2.equals(cm));
ConditionalMutation cm3 = new ConditionalMutation("row2".getBytes(UTF_8), c1, c2);
assertFalse(cm.equals(cm3));
cm3 = new ConditionalMutation(ROW, c2, c1);
assertNotEquals(cm.getConditions(), cm3.getConditions());
assertFalse(cm.equals(cm3));
}
@Test
public void testEquals_Mutation() {
Mutation m = new Mutation(ROW);
assertFalse(m.equals(cm));
assertFalse(cm.equals(m));
}
@Test
public void testHashcode() {
ConditionalMutation cm2 = new ConditionalMutation(ROW, c1, c2);
assertTrue(cm.equals(cm2));
assertEquals(cm2.hashCode(), cm.hashCode());
}
}
| 9,418 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data/MutationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.data;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.List;
import org.apache.accumulo.core.dataImpl.thrift.TMutation;
import org.apache.accumulo.core.security.ColumnVisibility;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
public class MutationTest {
private static String toHexString(byte[] ba) {
StringBuilder str = new StringBuilder();
for (byte b : ba) {
str.append(String.format("%x", b));
}
return str.toString();
}
/*
* Test constructing a Mutation using a byte buffer. The byte array returned as the row is
* converted to a hexadecimal string for easy comparision.
*/
@Test
public void testByteConstructor() {
Mutation m = new Mutation("0123456789".getBytes());
assertEquals("30313233343536373839", toHexString(m.getRow()));
}
@Test
public void testLimitedByteConstructor() {
Mutation m = new Mutation("0123456789".getBytes(), 2, 5);
assertEquals("3233343536", toHexString(m.getRow()));
}
@Test
public void test1() {
Mutation m = new Mutation(new Text("r1"));
m.put(new Text("cf1"), new Text("cq1"), new Value("v1"));
List<ColumnUpdate> updates = m.getUpdates();
assertEquals(1, updates.size());
ColumnUpdate cu = updates.get(0);
assertEquals("cf1", new String(cu.getColumnFamily()));
assertEquals("cq1", new String(cu.getColumnQualifier()));
assertEquals("", new String(cu.getColumnVisibility()));
assertFalse(cu.hasTimestamp());
}
@Test
public void test2() throws IOException {
Mutation m = new Mutation(new Text("r1"));
m.put(new Text("cf1"), new Text("cq1"), new Value("v1"));
m.put(new Text("cf2"), new Text("cq2"), 56, new Value("v2"));
List<ColumnUpdate> updates = m.getUpdates();
assertEquals(2, updates.size());
assertEquals("r1", new String(m.getRow()));
ColumnUpdate cu = updates.get(0);
assertEquals("cf1", new String(cu.getColumnFamily()));
assertEquals("cq1", new String(cu.getColumnQualifier()));
assertEquals("", new String(cu.getColumnVisibility()));
assertFalse(cu.hasTimestamp());
cu = updates.get(1);
assertEquals("cf2", new String(cu.getColumnFamily()));
assertEquals("cq2", new String(cu.getColumnQualifier()));
assertEquals("", new String(cu.getColumnVisibility()));
assertTrue(cu.hasTimestamp());
assertEquals(56, cu.getTimestamp());
m = cloneMutation(m);
assertEquals("r1", new String(m.getRow()));
updates = m.getUpdates();
assertEquals(2, updates.size());
cu = updates.get(0);
assertEquals("cf1", new String(cu.getColumnFamily()));
assertEquals("cq1", new String(cu.getColumnQualifier()));
assertEquals("", new String(cu.getColumnVisibility()));
assertFalse(cu.hasTimestamp());
cu = updates.get(1);
assertEquals("cf2", new String(cu.getColumnFamily()));
assertEquals("cq2", new String(cu.getColumnQualifier()));
assertEquals("", new String(cu.getColumnVisibility()));
assertTrue(cu.hasTimestamp());
assertEquals(56, cu.getTimestamp());
}
private Mutation cloneMutation(Mutation m) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
m.write(dos);
dos.close();
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
DataInputStream dis = new DataInputStream(bais);
m = new Mutation();
m.readFields(dis);
return m;
}
@Test
public void test3() throws IOException {
Mutation m = new Mutation(new Text("r1"));
for (int i = 0; i < 5; i++) {
int len = Mutation.VALUE_SIZE_COPY_CUTOFF - 2 + i;
byte[] val = new byte[len];
for (int j = 0; j < len; j++) {
val[j] = (byte) i;
}
m.put(new Text("cf" + i), new Text("cq" + i), new Value(val));
}
for (int r = 0; r < 3; r++) {
assertEquals("r1", new String(m.getRow()));
List<ColumnUpdate> updates = m.getUpdates();
assertEquals(5, updates.size());
for (int i = 0; i < 5; i++) {
ColumnUpdate cu = updates.get(i);
assertEquals("cf" + i, new String(cu.getColumnFamily()));
assertEquals("cq" + i, new String(cu.getColumnQualifier()));
assertEquals("", new String(cu.getColumnVisibility()));
assertFalse(cu.hasTimestamp());
byte[] val = cu.getValue();
int len = Mutation.VALUE_SIZE_COPY_CUTOFF - 2 + i;
assertEquals(len, val.length);
for (int j = 0; j < len; j++) {
assertEquals(i, val[j]);
}
}
m = cloneMutation(m);
}
}
private Text nt(String s) {
return new Text(s);
}
private Value nv(String s) {
return new Value(s);
}
@Test
public void testAtFamilyTypes() {
final String fam = "f16bc";
final String qual = "q1pm2";
final String val = "v8672194923750";
Mutation expected = new Mutation("row5");
expected.put(fam, qual, val);
// Test all family methods, keeping qual and val constant as Strings
// fam: byte[]
Mutation actual = new Mutation("row5");
actual.at().family(fam.getBytes(UTF_8)).qualifier(qual).put(val);
assertEquals(expected, actual);
// fam: ByteBuffer
final ByteBuffer bbFam = ByteBuffer.wrap(fam.getBytes(UTF_8));
final int bbFamStartPos = bbFam.position();
actual = new Mutation("row5");
actual.at().family(bbFam).qualifier(qual).put(val);
assertEquals(expected, actual);
// make sure the ByteBuffer last byte filled in the buffer (its position) is same as before the
// API call
assertEquals(bbFamStartPos, bbFam.position());
// fam: CharSequence (String implementation)
actual = new Mutation("row5");
actual.at().family(fam).qualifier(qual).put(val);
assertEquals(expected, actual);
// fam: Text
actual = new Mutation("row5");
actual.at().family(new Text(fam)).qualifier(qual).put(val);
assertEquals(expected, actual);
}
@Test
public void testAtQualifierTypes() {
final String fam = "f16bc";
final String qual = "q1pm2";
final String val = "v8672194923750";
Mutation expected = new Mutation("row5");
expected.put(fam, qual, val);
// Test all qualifier methods, keeping fam and val constant as Strings
// qual: byte[]
Mutation actual = new Mutation("row5");
actual.at().family(fam).qualifier(qual.getBytes(UTF_8)).put(val);
assertEquals(expected, actual);
// qual: ByteBuffer
final ByteBuffer bbQual = ByteBuffer.wrap(qual.getBytes(UTF_8));
final int bbQualStartPos = bbQual.position();
actual = new Mutation("row5");
actual.at().family(fam).qualifier(bbQual).put(val);
assertEquals(expected, actual);
// make sure the ByteBuffer last byte filled in the buffer (its position) is same as before the
// API call
assertEquals(bbQualStartPos, bbQual.position());
// qual: CharSequence (String implementation)
actual = new Mutation("row5");
actual.at().family(fam).qualifier(qual).put(val);
assertEquals(expected, actual);
// qual: Text
actual = new Mutation("row5");
actual.at().family(fam).qualifier(new Text(qual)).put(val);
assertEquals(expected, actual);
}
@Test
public void testAtVisiblityTypes() {
final byte[] fam = "f16bc".getBytes(UTF_8);
final byte[] qual = "q1pm2".getBytes(UTF_8);
final ColumnVisibility vis = new ColumnVisibility("v35x2");
final byte[] val = "v8672194923750".getBytes(UTF_8);
Mutation expected = new Mutation("row5");
expected.put(fam, qual, vis, val);
// Test all visibility methods, keeping fam, qual, and val constant as byte arrays
// vis: byte[]
Mutation actual = new Mutation("row5");
actual.at().family(fam).qualifier(qual).visibility(vis.getExpression()).put(val);
assertEquals(expected, actual);
// vis: ByteBuffer
final ByteBuffer bbVis = ByteBuffer.wrap(vis.getExpression());
final int bbVisStartPos = bbVis.position();
actual = new Mutation("row5");
actual.at().family(fam).qualifier(qual).visibility(bbVis).put(val);
assertEquals(expected, actual);
// make sure the ByteBuffer last byte filled in the buffer (its position) is same as before the
// API call
assertEquals(bbVisStartPos, bbVis.position());
// vis: CharSequence (String implementation)
actual = new Mutation("row5");
actual.at().family(fam).qualifier(qual).visibility(new String(vis.getExpression())).put(val);
assertEquals(expected, actual);
// vis: ColumnVisibility
actual = new Mutation("row5");
actual.at().family(fam).qualifier(qual).visibility(vis).put(val);
assertEquals(expected, actual);
// vis: Text
actual = new Mutation("row5");
actual.at().family(fam).qualifier(qual).visibility(new Text(vis.getExpression())).put(val);
assertEquals(expected, actual);
}
@Test
public void testAtTimestampTypes() {
final String fam = "f16bc";
final String qual = "q1pm2";
final long ts = 324324L;
final String val = "v8672194923750";
Mutation expected = new Mutation("row5");
expected.put(fam, qual, ts, val);
// Test timestamp method, keeping fam and val constant as Strings
Mutation actual = new Mutation("row5");
actual.at().family(fam).qualifier(qual).timestamp(ts).put(val);
assertEquals(expected, actual);
}
@Test
public void testAtPutTypes() {
final String fam = "f16bc";
final String qual = "q1pm2";
final String val = "v8672194923750";
Mutation expected = new Mutation("row5");
expected.put(fam, qual, val);
// Test all pull methods, keeping fam and qual,constant as Strings
// put: byte[]
Mutation actual = new Mutation("row5");
actual.at().family(fam).qualifier(qual).put(val.getBytes(UTF_8));
assertEquals(expected, actual);
// put: ByteBuffer
final ByteBuffer bbVal = ByteBuffer.wrap(val.getBytes(UTF_8));
final int bbValStartPos = bbVal.position();
actual = new Mutation("row5");
actual.at().family(fam).qualifier(qual).put(bbVal);
assertEquals(expected, actual);
// make sure the ByteBuffer last byte filled in the buffer (its position) is same as before the
// API call
assertEquals(bbValStartPos, bbVal.position());
// put: CharSequence (String implementation)
actual = new Mutation("row5");
actual.at().family(fam).qualifier(qual).put(val);
assertEquals(expected, actual);
// put: Text
actual = new Mutation("row5");
actual.at().family(fam).qualifier(qual).put(val);
assertEquals(expected, actual);
// put: Value
actual = new Mutation("row5");
actual.at().family(fam).qualifier(qual).put(new Value(val));
assertEquals(expected, actual);
}
@Test
public void testFluentPutNull() {
final String fam = "f16bc";
final String qual = "q1pm2";
final String val = "v8672194923750";
Mutation expected = new Mutation("row5");
expected.put(fam, qual, val);
Mutation actual = new Mutation("row5");
actual.at().family(fam).qualifier(qual).put(val.getBytes());
assertEquals(expected, actual);
assertEquals(34, actual.numBytes());
assertThrows(IllegalStateException.class,
() -> actual.at().family(fam).qualifier(qual).put("test2"));
}
@Test
public void testFluentPutLarge() {
byte[] largeVal = new byte[Mutation.VALUE_SIZE_COPY_CUTOFF + 13];
Arrays.fill(largeVal, (byte) 3);
Mutation m = new Mutation("row123");
m.at().family("fam").qualifier("qual").put(largeVal);
assertEquals(32800, m.numBytes());
}
@Test
public void testAtDelete() {
final String fam = "f16bc";
final String qual = "q1pm2";
Mutation expected = new Mutation("row5");
expected.putDelete(fam, qual);
Mutation actual = new Mutation("row5");
actual.at().family(fam).qualifier(qual).delete();
assertEquals(expected, actual);
}
@Test
public void testPuts() {
Mutation m = new Mutation(new Text("r1"));
m.put(nt("cf1"), nt("cq1"), nv("v1"));
m.put(nt("cf2"), nt("cq2"), new ColumnVisibility("cv2"), nv("v2"));
m.put(nt("cf3"), nt("cq3"), 3L, nv("v3"));
m.put(nt("cf4"), nt("cq4"), new ColumnVisibility("cv4"), 4L, nv("v4"));
m.putDelete(nt("cf5"), nt("cq5"));
m.putDelete(nt("cf6"), nt("cq6"), new ColumnVisibility("cv6"));
m.putDelete(nt("cf7"), nt("cq7"), 7L);
m.putDelete(nt("cf8"), nt("cq8"), new ColumnVisibility("cv8"), 8L);
assertEquals(8, m.size());
List<ColumnUpdate> updates = m.getUpdates();
assertEquals(8, m.size());
assertEquals(8, updates.size());
verifyColumnUpdate(updates.get(0), "cf1", "cq1", "", 0L, false, false, "v1");
verifyColumnUpdate(updates.get(1), "cf2", "cq2", "cv2", 0L, false, false, "v2");
verifyColumnUpdate(updates.get(2), "cf3", "cq3", "", 3L, true, false, "v3");
verifyColumnUpdate(updates.get(3), "cf4", "cq4", "cv4", 4L, true, false, "v4");
verifyColumnUpdate(updates.get(4), "cf5", "cq5", "", 0L, false, true, "");
verifyColumnUpdate(updates.get(5), "cf6", "cq6", "cv6", 0L, false, true, "");
verifyColumnUpdate(updates.get(6), "cf7", "cq7", "", 7L, true, true, "");
verifyColumnUpdate(updates.get(7), "cf8", "cq8", "cv8", 8L, true, true, "");
}
@Test
public void testPutsString() {
Mutation m = new Mutation("r1");
m.put("cf1", "cq1", nv("v1"));
m.put("cf2", "cq2", new ColumnVisibility("cv2"), nv("v2"));
m.put("cf3", "cq3", 3L, nv("v3"));
m.put("cf4", "cq4", new ColumnVisibility("cv4"), 4L, nv("v4"));
m.putDelete("cf5", "cq5");
m.putDelete("cf6", "cq6", new ColumnVisibility("cv6"));
m.putDelete("cf7", "cq7", 7L);
m.putDelete("cf8", "cq8", new ColumnVisibility("cv8"), 8L);
assertEquals(8, m.size());
List<ColumnUpdate> updates = m.getUpdates();
assertEquals(8, m.size());
assertEquals(8, updates.size());
verifyColumnUpdate(updates.get(0), "cf1", "cq1", "", 0L, false, false, "v1");
verifyColumnUpdate(updates.get(1), "cf2", "cq2", "cv2", 0L, false, false, "v2");
verifyColumnUpdate(updates.get(2), "cf3", "cq3", "", 3L, true, false, "v3");
verifyColumnUpdate(updates.get(3), "cf4", "cq4", "cv4", 4L, true, false, "v4");
verifyColumnUpdate(updates.get(4), "cf5", "cq5", "", 0L, false, true, "");
verifyColumnUpdate(updates.get(5), "cf6", "cq6", "cv6", 0L, false, true, "");
verifyColumnUpdate(updates.get(6), "cf7", "cq7", "", 7L, true, true, "");
verifyColumnUpdate(updates.get(7), "cf8", "cq8", "cv8", 8L, true, true, "");
}
@Test
public void testPutsStringString() {
Mutation m = new Mutation("r1");
m.put("cf1", "cq1", "v1");
m.put("cf2", "cq2", new ColumnVisibility("cv2"), "v2");
m.put("cf3", "cq3", 3L, "v3");
m.put("cf4", "cq4", new ColumnVisibility("cv4"), 4L, "v4");
m.putDelete("cf5", "cq5");
m.putDelete("cf6", "cq6", new ColumnVisibility("cv6"));
m.putDelete("cf7", "cq7", 7L);
m.putDelete("cf8", "cq8", new ColumnVisibility("cv8"), 8L);
assertEquals(8, m.size());
assertEquals("r1", new String(m.getRow()));
List<ColumnUpdate> updates = m.getUpdates();
assertEquals(8, m.size());
assertEquals(8, updates.size());
verifyColumnUpdate(updates.get(0), "cf1", "cq1", "", 0L, false, false, "v1");
verifyColumnUpdate(updates.get(1), "cf2", "cq2", "cv2", 0L, false, false, "v2");
verifyColumnUpdate(updates.get(2), "cf3", "cq3", "", 3L, true, false, "v3");
verifyColumnUpdate(updates.get(3), "cf4", "cq4", "cv4", 4L, true, false, "v4");
verifyColumnUpdate(updates.get(4), "cf5", "cq5", "", 0L, false, true, "");
verifyColumnUpdate(updates.get(5), "cf6", "cq6", "cv6", 0L, false, true, "");
verifyColumnUpdate(updates.get(6), "cf7", "cq7", "", 7L, true, true, "");
verifyColumnUpdate(updates.get(7), "cf8", "cq8", "cv8", 8L, true, true, "");
}
@Test
public void testByteArrays() {
Mutation m = new Mutation("r1".getBytes());
m.put("cf1".getBytes(), "cq1".getBytes(), "v1".getBytes());
m.put("cf2".getBytes(), "cq2".getBytes(), new ColumnVisibility("cv2"), "v2".getBytes());
m.put("cf3".getBytes(), "cq3".getBytes(), 3L, "v3".getBytes());
m.put("cf4".getBytes(), "cq4".getBytes(), new ColumnVisibility("cv4"), 4L, "v4".getBytes());
m.putDelete("cf5".getBytes(), "cq5".getBytes());
m.putDelete("cf6".getBytes(), "cq6".getBytes(), new ColumnVisibility("cv6"));
m.putDelete("cf7".getBytes(), "cq7".getBytes(), 7L);
m.putDelete("cf8".getBytes(), "cq8".getBytes(), new ColumnVisibility("cv8"), 8L);
assertEquals(8, m.size());
List<ColumnUpdate> updates = m.getUpdates();
assertEquals(8, m.size());
assertEquals(8, updates.size());
verifyColumnUpdate(updates.get(0), "cf1", "cq1", "", 0L, false, false, "v1");
verifyColumnUpdate(updates.get(1), "cf2", "cq2", "cv2", 0L, false, false, "v2");
verifyColumnUpdate(updates.get(2), "cf3", "cq3", "", 3L, true, false, "v3");
verifyColumnUpdate(updates.get(3), "cf4", "cq4", "cv4", 4L, true, false, "v4");
verifyColumnUpdate(updates.get(4), "cf5", "cq5", "", 0L, false, true, "");
verifyColumnUpdate(updates.get(5), "cf6", "cq6", "cv6", 0L, false, true, "");
verifyColumnUpdate(updates.get(6), "cf7", "cq7", "", 7L, true, true, "");
verifyColumnUpdate(updates.get(7), "cf8", "cq8", "cv8", 8L, true, true, "");
}
/**
* Test for regression on bug 3422. If a {@link Mutation} object is reused for multiple calls to
* readFields, the mutation would previously be "locked in" to the first set of column updates
* (and value lengths). Hadoop input formats reuse objects when reading, so if Mutations are used
* with an input format (or as the input to a combiner or reducer) then they will be used in this
* fashion.
*/
@Test
public void testMultipleReadFieldsCalls() throws IOException {
// Create test mutations and write them to a byte output stream
Mutation m1 = new Mutation("row1");
m1.put("cf1.1", "cq1.1", new ColumnVisibility("A|B"), "val1.1");
m1.put("cf1.2", "cq1.2", new ColumnVisibility("C|D"), "val1.2");
byte[] val1_3 = new byte[Mutation.VALUE_SIZE_COPY_CUTOFF + 3];
Arrays.fill(val1_3, (byte) 3);
m1.put("cf1.3", "cq1.3", new ColumnVisibility("E|F"), new String(val1_3));
int size1 = m1.size();
long nb1 = m1.numBytes();
Mutation m2 = new Mutation("row2");
byte[] val2 = new byte[Mutation.VALUE_SIZE_COPY_CUTOFF + 2];
Arrays.fill(val2, (byte) 2);
m2.put("cf2", "cq2", new ColumnVisibility("G|H"), 1234, new String(val2));
int size2 = m2.size();
long nb2 = m2.numBytes();
ByteArrayOutputStream bos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(bos);
m1.write(dos);
m2.write(dos);
dos.close();
// Now read the mutations back in from the byte array, making sure to
// reuse the same mutation object, and make sure everything is correct.
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
DataInputStream dis = new DataInputStream(bis);
Mutation m = new Mutation();
m.readFields(dis);
assertEquals("row1", new String(m.getRow()));
assertEquals(size1, m.size());
assertEquals(nb1, m.numBytes());
assertEquals(3, m.getUpdates().size());
verifyColumnUpdate(m.getUpdates().get(0), "cf1.1", "cq1.1", "A|B", 0L, false, false, "val1.1");
verifyColumnUpdate(m.getUpdates().get(1), "cf1.2", "cq1.2", "C|D", 0L, false, false, "val1.2");
verifyColumnUpdate(m.getUpdates().get(2), "cf1.3", "cq1.3", "E|F", 0L, false, false,
new String(val1_3));
// Reuse the same mutation object (which is what happens in the hadoop framework
// when objects are read by an input format)
m.readFields(dis);
assertEquals("row2", new String(m.getRow()));
assertEquals(size2, m.size());
assertEquals(nb2, m.numBytes());
assertEquals(1, m.getUpdates().size());
verifyColumnUpdate(m.getUpdates().get(0), "cf2", "cq2", "G|H", 1234L, true, false,
new String(val2));
}
private void verifyColumnUpdate(ColumnUpdate cu, String cf, String cq, String cv, long ts,
boolean timeSet, boolean deleted, String val) {
assertEquals(cf, new String(cu.getColumnFamily()));
assertEquals(cq, new String(cu.getColumnQualifier()));
assertEquals(cv, new String(cu.getColumnVisibility()));
assertEquals(timeSet, cu.hasTimestamp());
if (timeSet) {
assertEquals(ts, cu.getTimestamp());
}
assertEquals(deleted, cu.isDeleted());
assertEquals(val, new String(cu.getValue()));
}
@Test
public void test4() throws Exception {
Mutation m1 = new Mutation(new Text("r1"));
m1.put(nt("cf1"), nt("cq1"), nv("v1"));
m1.put(nt("cf2"), nt("cq2"), new ColumnVisibility("cv2"), nv("v2"));
ByteArrayOutputStream bos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(bos);
m1.write(dos);
dos.close();
Mutation m2 = new Mutation(new Text("r2"));
m2.put(nt("cf3"), nt("cq3"), nv("v3"));
m2.put(nt("cf4"), nt("cq4"), new ColumnVisibility("cv2"), nv("v4"));
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
DataInputStream dis = new DataInputStream(bis);
// used to be a bug where puts done before readFields would be seen
// after readFields
m2.readFields(dis);
assertEquals("r1", new String(m2.getRow()));
assertEquals(2, m2.getUpdates().size());
assertEquals(2, m2.size());
verifyColumnUpdate(m2.getUpdates().get(0), "cf1", "cq1", "", 0L, false, false, "v1");
verifyColumnUpdate(m2.getUpdates().get(1), "cf2", "cq2", "cv2", 0L, false, false, "v2");
}
Mutation convert(OldMutation old) throws IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(bos);
old.write(dos);
dos.close();
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
DataInputStream dis = new DataInputStream(bis);
Mutation m = new Mutation();
m.readFields(dis);
dis.close();
return m;
}
@Test
public void testNewSerialization() throws Exception {
// write an old mutation
OldMutation m2 = new OldMutation("r1");
m2.put("cf1", "cq1", "v1");
m2.put("cf2", "cq2", new ColumnVisibility("cv2"), "v2");
m2.putDelete("cf3", "cq3");
ByteArrayOutputStream bos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(bos);
m2.write(dos);
dos.close();
long oldSize = dos.size();
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
DataInputStream dis = new DataInputStream(bis);
m2.readFields(dis);
dis.close();
// check it
assertEquals("r1", new String(m2.getRow()));
assertEquals(3, m2.getUpdates().size());
assertEquals(3, m2.size());
verifyColumnUpdate(m2.getUpdates().get(0), "cf1", "cq1", "", 0L, false, false, "v1");
verifyColumnUpdate(m2.getUpdates().get(1), "cf2", "cq2", "cv2", 0L, false, false, "v2");
verifyColumnUpdate(m2.getUpdates().get(2), "cf3", "cq3", "", 0L, false, true, "");
Mutation m1 = convert(m2);
assertEquals("r1", new String(m1.getRow()));
assertEquals(3, m1.getUpdates().size());
assertEquals(3, m1.size());
verifyColumnUpdate(m1.getUpdates().get(0), "cf1", "cq1", "", 0L, false, false, "v1");
verifyColumnUpdate(m1.getUpdates().get(1), "cf2", "cq2", "cv2", 0L, false, false, "v2");
verifyColumnUpdate(m1.getUpdates().get(2), "cf3", "cq3", "", 0L, false, true, "");
Text exampleRow = new Text(" 123456789 123456789 123456789 123456789 123456789");
int exampleLen = exampleRow.getLength();
m1 = new Mutation(exampleRow);
m1.put("", "", "");
bos = new ByteArrayOutputStream();
dos = new DataOutputStream(bos);
m1.write(dos);
dos.close();
long newSize = dos.size();
assertTrue(newSize < oldSize);
assertEquals(10, newSize - exampleLen);
assertEquals(68, oldSize - exampleLen);
// I am converting to integer to avoid comparing floats which are inaccurate
assertEquals(14705, (int) (((newSize - exampleLen) * 100. / (oldSize - exampleLen)) * 1000));
StringBuilder sb = new StringBuilder();
byte[] ba = bos.toByteArray();
for (int i = 0; i < bos.size(); i += 4) {
for (int j = i; j < bos.size() && j < i + 4; j++) {
sb.append(String.format("%02x", ba[j]));
}
sb.append(" ");
}
assertEquals("80322031 32333435 36373839 20313233 34353637"
+ " 38392031 32333435 36373839 20313233 34353637"
+ " 38392031 32333435 36373839 06000000 00000001 ", sb.toString());
}
@Test
public void testReserialize() throws Exception {
// test reading in a new mutation from an old mutation and reserializing the new mutation...
// this was failing
OldMutation om = new OldMutation("r1");
om.put("cf1", "cq1", "v1");
om.put("cf2", "cq2", new ColumnVisibility("cv2"), "v2");
om.putDelete("cf3", "cq3");
StringBuilder bigVal = new StringBuilder();
for (int i = 0; i < 100000; i++) {
bigVal.append('a');
}
om.put("cf2", "big", bigVal);
Mutation m1 = convert(om);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(bos);
m1.write(dos);
dos.close();
Mutation m2 = new Mutation();
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
DataInputStream dis = new DataInputStream(bis);
m2.readFields(dis);
assertEquals("r1", new String(m1.getRow()));
assertEquals(4, m2.getUpdates().size());
assertEquals(4, m2.size());
verifyColumnUpdate(m2.getUpdates().get(0), "cf1", "cq1", "", 0L, false, false, "v1");
verifyColumnUpdate(m2.getUpdates().get(1), "cf2", "cq2", "cv2", 0L, false, false, "v2");
verifyColumnUpdate(m2.getUpdates().get(2), "cf3", "cq3", "", 0L, false, true, "");
verifyColumnUpdate(m2.getUpdates().get(3), "cf2", "big", "", 0L, false, false,
bigVal.toString());
}
// populate for testInitialBufferSizesEquals method
private static void populate(Mutation... muts) {
for (Mutation m : muts) {
m.put("cf1", "cq1", "v1");
m.put("cf1", "cq1", new ColumnVisibility("A&B"), "v2");
m.put("cf1", "cq1", 3, "v3");
m.put("cf1", "cq1", new ColumnVisibility("A&B&C"), 4, "v4");
m.putDelete("cf2", "cf3");
m.putDelete("cf2", "cf4", 3);
m.putDelete("cf2", "cf4", new ColumnVisibility("A&B&C"), 3);
}
}
@Test
public void testInitialBufferSizesEquals() {
// m1 uses CharSequence constructor
Mutation m1 = new Mutation("r1");
// m2 uses a different buffer size
Mutation m2 = new Mutation("r1", 4242);
// m3 uses Text constructor
Mutation m3 = new Mutation(new Text("r1"));
// m4 uses a different buffer size
Mutation m4 = new Mutation(new Text("r1"), 4242);
// m5 uses bytes constructor with offset/length
byte[] r1Bytes = "r1".getBytes(UTF_8);
Mutation m5 = new Mutation(r1Bytes);
// m6 uses a different buffer size
Mutation m6 = new Mutation(r1Bytes, 4242);
// m7 uses bytes constructor with offset/length
Mutation m7 = new Mutation(r1Bytes, 0, r1Bytes.length);
// m8 uses a different buffer size
Mutation m8 = new Mutation(r1Bytes, 0, r1Bytes.length, 4242);
Mutation[] muts = {m1, m2, m3, m4, m5, m6, m7, m8};
populate(muts);
for (Mutation m : muts) {
assertEquals(m1, m);
}
}
@Test
public void testEquals() {
Mutation m1 = new Mutation("r1");
m1.put("cf1", "cq1", "v1");
m1.put("cf1", "cq1", new ColumnVisibility("A&B"), "v2");
m1.put("cf1", "cq1", 3, "v3");
m1.put("cf1", "cq1", new ColumnVisibility("A&B&C"), 4, "v4");
m1.putDelete("cf2", "cf3");
m1.putDelete("cf2", "cf4", 3);
m1.putDelete("cf2", "cf4", new ColumnVisibility("A&B&C"), 3);
// m2 has same data as m1
Mutation m2 = new Mutation("r1");
m2.put("cf1", "cq1", "v1");
m2.put("cf1", "cq1", new ColumnVisibility("A&B"), "v2");
m2.put("cf1", "cq1", 3, "v3");
m2.put("cf1", "cq1", new ColumnVisibility("A&B&C"), 4, "v4");
m2.putDelete("cf2", "cf3");
m2.putDelete("cf2", "cf4", 3);
m2.putDelete("cf2", "cf4", new ColumnVisibility("A&B&C"), 3);
// m3 has different row than m1
Mutation m3 = new Mutation("r2");
m3.put("cf1", "cq1", "v1");
m3.put("cf1", "cq1", new ColumnVisibility("A&B"), "v2");
m3.put("cf1", "cq1", 3, "v3");
m3.put("cf1", "cq1", new ColumnVisibility("A&B&C"), 4, "v4");
m3.putDelete("cf2", "cf3");
m3.putDelete("cf2", "cf4", 3);
m3.putDelete("cf2", "cf4", new ColumnVisibility("A&B&C"), 3);
// m4 has a different column than m1
Mutation m4 = new Mutation("r1");
m4.put("cf2", "cq1", "v1");
m4.put("cf1", "cq1", new ColumnVisibility("A&B"), "v2");
m4.put("cf1", "cq1", 3, "v3");
m4.put("cf1", "cq1", new ColumnVisibility("A&B&C"), 4, "v4");
m4.putDelete("cf2", "cf3");
m4.putDelete("cf2", "cf4", 3);
m4.putDelete("cf2", "cf4", new ColumnVisibility("A&B&C"), 3);
// m5 has a different value than m1
Mutation m5 = new Mutation("r1");
m5.put("cf1", "cq1", "v1");
m5.put("cf1", "cq1", new ColumnVisibility("A&B"), "v2");
m5.put("cf1", "cq1", 3, "v4");
m5.put("cf1", "cq1", new ColumnVisibility("A&B&C"), 4, "v4");
m5.putDelete("cf2", "cf3");
m5.putDelete("cf2", "cf4", 3);
m5.putDelete("cf2", "cf4", new ColumnVisibility("A&B&C"), 3);
assertEquals(m1, m1);
assertEquals(m1, m2);
assertEquals(m2, m1);
assertEquals(m2.hashCode(), m1.hashCode());
assertNotEquals(0, m1.hashCode());
assertFalse(m1.equals(m3));
assertFalse(m3.equals(m1));
assertFalse(m1.equals(m4));
assertFalse(m4.equals(m1));
assertFalse(m3.equals(m4));
assertFalse(m1.equals(m5));
assertFalse(m5.equals(m1));
assertFalse(m3.equals(m5));
assertFalse(m4.equals(m5));
}
@Test
public void testThrift() {
Mutation m1 = new Mutation("r1");
m1.put("cf1", "cq1", "v1");
TMutation tm1 = m1.toThrift();
Mutation m2 = new Mutation(tm1);
assertEquals(m1, m2);
}
@Test
public void testThrift_Invalid() {
Mutation m1 = new Mutation("r1");
m1.put("cf1", "cq1", "v1");
TMutation tm1 = m1.toThrift();
tm1.setRow((byte[]) null);
assertThrows(IllegalArgumentException.class, () -> new Mutation(tm1));
}
/*
* The following two tests assert that no exception is thrown after calling hashCode or equals on
* a Mutation. These guard against the condition noted in ACCUMULO-3718.
*/
@Test
public void testPutAfterHashCode() {
Mutation m = new Mutation("r");
m.hashCode();
try {
m.put("cf", "cq", "v");
} catch (IllegalStateException e) {
fail("Calling Mutation#hashCode then Mutation#put should not result in an"
+ " IllegalStateException.");
}
}
@Test
public void testPutAfterEquals() {
Mutation m = new Mutation("r");
Mutation m2 = new Mutation("r2");
m.equals(m2);
try {
m.put("cf", "cq", "v");
m2.put("cf", "cq", "v");
} catch (IllegalStateException e) {
fail("Calling Mutation#equals then Mutation#put should not result in an"
+ " IllegalStateException.");
}
}
@Test
public void testSanityCheck() {
Mutation m = new Mutation("too big mutation");
m.put("cf", "cq1", "v");
m.estRowAndLargeValSize += (Long.MAX_VALUE / 2);
assertThrows(IllegalArgumentException.class, () -> m.put("cf", "cq2", "v"));
}
@Test
public void testPrettyPrint() {
String row = "row";
String fam1 = "fam1";
String fam2 = "fam2";
String qual1 = "qual1";
String qual2 = "qual2";
String value1 = "value1";
Mutation m = new Mutation("row");
m.put(fam1, qual1, value1);
m.putDelete(fam2, qual2);
m.getUpdates(); // serialize
String expected = "mutation: " + row + "\n update: " + fam1 + ":" + qual1 + " value " + value1
+ "\n update: " + fam2 + ":" + qual2 + " value [delete]\n";
assertEquals(expected, m.prettyPrint());
}
}
| 9,419 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data/ConditionTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.data;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import org.apache.accumulo.core.client.IteratorSetting;
import org.apache.accumulo.core.security.ColumnVisibility;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class ConditionTest {
private static final ByteSequence EMPTY = new ArrayByteSequence(new byte[0]);
private static final String FAMILY = "family";
private static final String QUALIFIER = "qualifier";
private static final String VISIBILITY = "visibility";
private static final String VALUE = "value";
private static final IteratorSetting[] ITERATORS = {new IteratorSetting(1, "first", "someclass"),
new IteratorSetting(2, "second", "someotherclass"),
new IteratorSetting(3, "third", "yetanotherclass")};
private String toString(ByteSequence bs) {
if (bs == null) {
return null;
}
return new String(bs.toArray(), UTF_8);
}
private Condition c;
@BeforeEach
public void setUp() {
c = new Condition(FAMILY, QUALIFIER);
}
@Test
public void testConstruction_CharSequence() {
assertEquals(FAMILY, toString(c.getFamily()));
assertEquals(QUALIFIER, toString(c.getQualifier()));
assertEquals(EMPTY, c.getVisibility());
}
@Test
public void testConstruction_ByteArray() {
c = new Condition(FAMILY.getBytes(UTF_8), QUALIFIER.getBytes(UTF_8));
assertEquals(FAMILY, toString(c.getFamily()));
assertEquals(QUALIFIER, toString(c.getQualifier()));
assertEquals(EMPTY, c.getVisibility());
}
@Test
public void testConstruction_Text() {
c = new Condition(new Text(FAMILY), new Text(QUALIFIER));
assertEquals(FAMILY, toString(c.getFamily()));
assertEquals(QUALIFIER, toString(c.getQualifier()));
assertEquals(EMPTY, c.getVisibility());
}
@Test
public void testConstruction_ByteSequence() {
c = new Condition(new ArrayByteSequence(FAMILY.getBytes(UTF_8)),
new ArrayByteSequence(QUALIFIER.getBytes(UTF_8)));
assertEquals(FAMILY, toString(c.getFamily()));
assertEquals(QUALIFIER, toString(c.getQualifier()));
assertEquals(EMPTY, c.getVisibility());
}
@Test
public void testGetSetTimestamp() {
c.setTimestamp(1234L);
assertEquals(Long.valueOf(1234L), c.getTimestamp());
}
@Test
public void testSetValue_CharSequence() {
c.setValue(VALUE);
assertEquals(VALUE, toString(c.getValue()));
}
@Test
public void testSetValue_ByteArray() {
c.setValue(VALUE.getBytes(UTF_8));
assertEquals(VALUE, toString(c.getValue()));
}
@Test
public void testSetValue_Text() {
c.setValue(new Text(VALUE));
assertEquals(VALUE, toString(c.getValue()));
}
@Test
public void testSetValue_ByteSequence() {
c.setValue(new ArrayByteSequence(VALUE.getBytes(UTF_8)));
assertEquals(VALUE, toString(c.getValue()));
}
@Test
public void testGetSetVisibility() {
ColumnVisibility vis = new ColumnVisibility(VISIBILITY);
c.setVisibility(vis);
assertEquals(VISIBILITY, toString(c.getVisibility()));
}
@Test
public void testGetSetIterators() {
c.setIterators(ITERATORS);
assertArrayEquals(ITERATORS, c.getIterators());
}
@Test
public void testSetIterators_DuplicateName() {
IteratorSetting[] iterators = {new IteratorSetting(1, "first", "someclass"),
new IteratorSetting(2, "second", "someotherclass"),
new IteratorSetting(3, "first", "yetanotherclass")};
assertThrows(IllegalArgumentException.class, () -> c.setIterators(iterators));
}
@Test
public void testSetIterators_DuplicatePriority() {
IteratorSetting[] iterators = {new IteratorSetting(1, "first", "someclass"),
new IteratorSetting(2, "second", "someotherclass"),
new IteratorSetting(1, "third", "yetanotherclass")};
assertThrows(IllegalArgumentException.class, () -> c.setIterators(iterators));
}
@Test
public void testEquals() {
ColumnVisibility cvis = new ColumnVisibility(VISIBILITY);
c.setVisibility(cvis);
c.setValue(VALUE);
c.setTimestamp(1234L);
c.setIterators(ITERATORS);
// reflexivity
assertEquals(c, c);
// non-nullity
assertFalse(c.equals(null));
// symmetry
Condition c2 = new Condition(FAMILY, QUALIFIER);
c2.setVisibility(cvis);
c2.setValue(VALUE);
c2.setTimestamp(1234L);
c2.setIterators(ITERATORS);
assertEquals(c, c2);
assertEquals(c2, c);
Condition c3 = new Condition("nope", QUALIFIER);
c3.setVisibility(cvis);
c3.setValue(VALUE);
c3.setTimestamp(1234L);
c3.setIterators(ITERATORS);
assertNotEquals(c, c3);
assertNotEquals(c3, c);
c3 = new Condition(FAMILY, "nope");
c3.setVisibility(cvis);
c3.setValue(VALUE);
c3.setTimestamp(1234L);
c3.setIterators(ITERATORS);
assertNotEquals(c, c3);
assertNotEquals(c3, c);
c2.setVisibility(new ColumnVisibility("sekrit"));
assertNotEquals(c, c2);
assertNotEquals(c2, c);
c2.setVisibility(cvis);
c2.setValue(EMPTY);
assertNotEquals(c, c2);
assertNotEquals(c2, c);
c2.setValue(VALUE);
c2.setTimestamp(2345L);
assertNotEquals(c, c2);
assertNotEquals(c2, c);
c2.setTimestamp(1234L);
c2.setIterators();
assertNotEquals(c, c2);
assertNotEquals(c2, c);
c2.setIterators(ITERATORS);
assertEquals(c, c2);
assertEquals(c2, c);
// set everything but vis, so its null
Condition c4 = new Condition(FAMILY, QUALIFIER);
c4.setValue(VALUE);
c4.setTimestamp(1234L);
c4.setIterators(ITERATORS);
assertNotEquals(c, c4);
assertNotEquals(c4, c);
// set everything but timestamp, so its null
Condition c5 = new Condition(FAMILY, QUALIFIER);
c5.setVisibility(cvis);
c5.setValue(VALUE);
c5.setIterators(ITERATORS);
assertNotEquals(c, c5);
assertNotEquals(c5, c);
// set everything but value
Condition c6 = new Condition(FAMILY, QUALIFIER);
c6.setVisibility(cvis);
c6.setTimestamp(1234L);
c6.setIterators(ITERATORS);
assertNotEquals(c, c6);
assertNotEquals(c6, c);
// test w/ no optional fields set
Condition c7 = new Condition(FAMILY, QUALIFIER);
Condition c8 = new Condition(FAMILY, QUALIFIER);
assertEquals(c7, c8);
assertEquals(c8, c7);
}
@Test
public void testHashCode() {
ColumnVisibility cvis = new ColumnVisibility(VISIBILITY);
c.setVisibility(cvis);
c.setValue(VALUE);
c.setTimestamp(1234L);
c.setIterators(ITERATORS);
int hc1 = c.hashCode();
Condition c2 = new Condition(FAMILY, QUALIFIER);
c2.setVisibility(cvis);
c2.setValue(VALUE);
c2.setTimestamp(1234L);
c2.setIterators(ITERATORS);
assertEquals(c, c2);
assertEquals(hc1, c2.hashCode());
}
}
| 9,420 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data/ColumnTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.data;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.apache.accumulo.core.dataImpl.thrift.TColumn;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
public class ColumnTest {
static Column[] col;
@BeforeAll
public static void setup() {
col = new Column[5];
col[0] = new Column("colfam".getBytes(), "colq".getBytes(), "colv".getBytes());
col[1] = new Column("colfam".getBytes(), "colq".getBytes(), "colv".getBytes());
col[2] = new Column(new byte[0], new byte[0], new byte[0]);
col[3] = new Column(null, null, null);
col[4] = new Column("colfam".getBytes(), "cq".getBytes(), "cv".getBytes());
}
@Test
public void testEquals() {
for (int i = 0; i < col.length; i++) {
for (int j = 0; j < col.length; j++) {
if (i == j || (i == 0 && j == 1) || (i == 1 && j == 0)) {
assertTrue(col[i].equals(col[j]));
} else {
assertFalse(col[i].equals(col[j]));
}
}
}
}
@Test
public void testCompare() {
for (int i = 0; i < col.length; i++) {
for (int j = 0; j < col.length; j++) {
if (i == j || (i == 0 && j == 1) || (i == 1 && j == 0)) {
assertEquals(0, col[i].compareTo(col[j]));
} else {
assertNotEquals(0, col[i].compareTo(col[j]));
}
}
}
}
@Test
public void testEqualsCompare() {
for (Column value : col) {
for (Column column : col) {
assertEquals(value.equals(column), value.compareTo(column) == 0);
}
}
}
@Test
public void testWriteReadFields() throws IOException {
for (Column c : col) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
c.write(new DataOutputStream(baos));
Column other = new Column();
other.readFields(new DataInputStream(new ByteArrayInputStream(baos.toByteArray())));
assertEquals(c, other);
}
}
@Test
public void testThriftRoundTrip() {
for (Column c : col) {
TColumn tc = c.toThrift();
assertEquals(c, new Column(tc));
}
}
}
| 9,421 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data/OldMutation.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.data;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.accumulo.core.dataImpl.thrift.TMutation;
import org.apache.accumulo.core.security.ColumnVisibility;
import org.apache.accumulo.core.util.ByteBufferUtil;
import org.apache.accumulo.core.util.TextUtil;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
/**
* Will read/write old mutations.
*/
public class OldMutation implements Writable {
static final int VALUE_SIZE_COPY_CUTOFF = 1 << 15;
private byte[] row;
private byte[] data;
private int entries;
private List<byte[]> values;
// created this little class instead of using ByteArrayOutput stream and DataOutputStream
// because both are synchronized... lots of small syncs slow things down
private static class ByteBuffer {
int offset;
byte[] data = new byte[64];
private void reserve(int l) {
if (offset + l > data.length) {
int newSize = data.length * 2;
while (newSize <= offset + l) {
newSize = newSize * 2;
}
byte[] newData = new byte[newSize];
System.arraycopy(data, 0, newData, 0, offset);
data = newData;
}
}
void add(byte[] b) {
reserve(b.length);
System.arraycopy(b, 0, data, offset, b.length);
offset += b.length;
}
public void add(byte[] bytes, int off, int length) {
reserve(length);
System.arraycopy(bytes, off, data, offset, length);
offset += length;
}
void add(boolean b) {
reserve(1);
if (b) {
data[offset++] = 1;
} else {
data[offset++] = 0;
}
}
void add(long v) {
reserve(8);
data[offset++] = (byte) (v >>> 56);
data[offset++] = (byte) (v >>> 48);
data[offset++] = (byte) (v >>> 40);
data[offset++] = (byte) (v >>> 32);
data[offset++] = (byte) (v >>> 24);
data[offset++] = (byte) (v >>> 16);
data[offset++] = (byte) (v >>> 8);
data[offset++] = (byte) (v >>> 0);
}
void add(int i) {
reserve(4);
data[offset++] = (byte) (i >>> 24);
data[offset++] = (byte) (i >>> 16);
data[offset++] = (byte) (i >>> 8);
data[offset++] = (byte) (i >>> 0);
}
public byte[] toArray() {
byte[] ret = new byte[offset];
System.arraycopy(data, 0, ret, 0, offset);
return ret;
}
}
private static class SimpleReader {
int offset;
byte[] data;
SimpleReader(byte[] b) {
this.data = b;
}
int readInt() {
return (data[offset++] << 24) + ((data[offset++] & 255) << 16) + ((data[offset++] & 255) << 8)
+ ((data[offset++] & 255) << 0);
}
long readLong() {
return (((long) data[offset++] << 56) + ((long) (data[offset++] & 255) << 48)
+ ((long) (data[offset++] & 255) << 40) + ((long) (data[offset++] & 255) << 32)
+ ((long) (data[offset++] & 255) << 24) + ((data[offset++] & 255) << 16)
+ ((data[offset++] & 255) << 8) + ((data[offset++] & 255) << 0));
}
void readBytes(byte[] b) {
System.arraycopy(data, offset, b, 0, b.length);
offset += b.length;
}
boolean readBoolean() {
return (data[offset++] == 1);
}
}
private ByteBuffer buffer;
private List<ColumnUpdate> updates;
private static final byte[] EMPTY_BYTES = new byte[0];
private void serialize() {
if (buffer != null) {
data = buffer.toArray();
buffer = null;
}
}
public OldMutation(Text row) {
this.row = new byte[row.getLength()];
System.arraycopy(row.getBytes(), 0, this.row, 0, row.getLength());
buffer = new ByteBuffer();
}
public OldMutation(CharSequence row) {
this(new Text(row.toString()));
}
public OldMutation() {}
public OldMutation(TMutation tmutation) {
this.row = ByteBufferUtil.toBytes(tmutation.row);
this.data = ByteBufferUtil.toBytes(tmutation.data);
this.entries = tmutation.entries;
this.values = ByteBufferUtil.toBytesList(tmutation.values);
if (this.row == null) {
throw new IllegalArgumentException("null row");
}
if (this.data == null) {
throw new IllegalArgumentException("null serialized data");
}
}
public byte[] getRow() {
return row;
}
private void put(byte[] b) {
buffer.add(b.length);
buffer.add(b);
}
private void put(Text t) {
buffer.add(t.getLength());
buffer.add(t.getBytes(), 0, t.getLength());
}
private void put(boolean b) {
buffer.add(b);
}
private void put(int i) {
buffer.add(i);
}
private void put(long l) {
buffer.add(l);
}
private void put(Text cf, Text cq, byte[] cv, boolean hasts, long ts, boolean deleted,
byte[] val) {
if (buffer == null) {
throw new IllegalStateException("Can not add to mutation after serializing it");
}
put(cf);
put(cq);
put(cv);
put(hasts);
put(ts);
put(deleted);
if (val.length < VALUE_SIZE_COPY_CUTOFF) {
put(val);
} else {
if (values == null) {
values = new ArrayList<>();
}
byte[] copy = new byte[val.length];
System.arraycopy(val, 0, copy, 0, val.length);
values.add(copy);
put(-1 * values.size());
}
entries++;
}
private void put(CharSequence cf, CharSequence cq, byte[] cv, boolean hasts, long ts,
boolean deleted, byte[] val) {
put(new Text(cf.toString()), new Text(cq.toString()), cv, hasts, ts, deleted, val);
}
private void put(CharSequence cf, CharSequence cq, byte[] cv, boolean hasts, long ts,
boolean deleted, CharSequence val) {
put(cf, cq, cv, hasts, ts, deleted, TextUtil.getBytes(new Text(val.toString())));
}
public void put(Text columnFamily, Text columnQualifier, Value value) {
put(columnFamily, columnQualifier, EMPTY_BYTES, false, 0L, false, value.get());
}
public void put(Text columnFamily, Text columnQualifier, ColumnVisibility columnVisibility,
Value value) {
put(columnFamily, columnQualifier, columnVisibility.getExpression(), false, 0L, false,
value.get());
}
public void put(Text columnFamily, Text columnQualifier, long timestamp, Value value) {
put(columnFamily, columnQualifier, EMPTY_BYTES, true, timestamp, false, value.get());
}
public void put(Text columnFamily, Text columnQualifier, ColumnVisibility columnVisibility,
long timestamp, Value value) {
put(columnFamily, columnQualifier, columnVisibility.getExpression(), true, timestamp, false,
value.get());
}
public void putDelete(Text columnFamily, Text columnQualifier) {
put(columnFamily, columnQualifier, EMPTY_BYTES, false, 0L, true, EMPTY_BYTES);
}
public void putDelete(Text columnFamily, Text columnQualifier,
ColumnVisibility columnVisibility) {
put(columnFamily, columnQualifier, columnVisibility.getExpression(), false, 0L, true,
EMPTY_BYTES);
}
public void putDelete(Text columnFamily, Text columnQualifier, long timestamp) {
put(columnFamily, columnQualifier, EMPTY_BYTES, true, timestamp, true, EMPTY_BYTES);
}
public void putDelete(Text columnFamily, Text columnQualifier, ColumnVisibility columnVisibility,
long timestamp) {
put(columnFamily, columnQualifier, columnVisibility.getExpression(), true, timestamp, true,
EMPTY_BYTES);
}
public void put(CharSequence columnFamily, CharSequence columnQualifier, Value value) {
put(columnFamily, columnQualifier, EMPTY_BYTES, false, 0L, false, value.get());
}
public void put(CharSequence columnFamily, CharSequence columnQualifier,
ColumnVisibility columnVisibility, Value value) {
put(columnFamily, columnQualifier, columnVisibility.getExpression(), false, 0L, false,
value.get());
}
public void put(CharSequence columnFamily, CharSequence columnQualifier, long timestamp,
Value value) {
put(columnFamily, columnQualifier, EMPTY_BYTES, true, timestamp, false, value.get());
}
public void put(CharSequence columnFamily, CharSequence columnQualifier,
ColumnVisibility columnVisibility, long timestamp, Value value) {
put(columnFamily, columnQualifier, columnVisibility.getExpression(), true, timestamp, false,
value.get());
}
public void putDelete(CharSequence columnFamily, CharSequence columnQualifier) {
put(columnFamily, columnQualifier, EMPTY_BYTES, false, 0L, true, EMPTY_BYTES);
}
public void putDelete(CharSequence columnFamily, CharSequence columnQualifier,
ColumnVisibility columnVisibility) {
put(columnFamily, columnQualifier, columnVisibility.getExpression(), false, 0L, true,
EMPTY_BYTES);
}
public void putDelete(CharSequence columnFamily, CharSequence columnQualifier, long timestamp) {
put(columnFamily, columnQualifier, EMPTY_BYTES, true, timestamp, true, EMPTY_BYTES);
}
public void putDelete(CharSequence columnFamily, CharSequence columnQualifier,
ColumnVisibility columnVisibility, long timestamp) {
put(columnFamily, columnQualifier, columnVisibility.getExpression(), true, timestamp, true,
EMPTY_BYTES);
}
public void put(CharSequence columnFamily, CharSequence columnQualifier, CharSequence value) {
put(columnFamily, columnQualifier, EMPTY_BYTES, false, 0L, false, value);
}
public void put(CharSequence columnFamily, CharSequence columnQualifier,
ColumnVisibility columnVisibility, CharSequence value) {
put(columnFamily, columnQualifier, columnVisibility.getExpression(), false, 0L, false, value);
}
public void put(CharSequence columnFamily, CharSequence columnQualifier, long timestamp,
CharSequence value) {
put(columnFamily, columnQualifier, EMPTY_BYTES, true, timestamp, false, value);
}
public void put(CharSequence columnFamily, CharSequence columnQualifier,
ColumnVisibility columnVisibility, long timestamp, CharSequence value) {
put(columnFamily, columnQualifier, columnVisibility.getExpression(), true, timestamp, false,
value);
}
private byte[] readBytes(SimpleReader in) {
int len = in.readInt();
if (len == 0) {
return EMPTY_BYTES;
}
byte[] bytes = new byte[len];
in.readBytes(bytes);
return bytes;
}
public List<ColumnUpdate> getUpdates() {
serialize();
SimpleReader in = new SimpleReader(data);
if (updates == null) {
if (entries == 1) {
updates = Collections.singletonList(deserializeColumnUpdate(in));
} else {
ColumnUpdate[] tmpUpdates = new ColumnUpdate[entries];
for (int i = 0; i < entries; i++) {
tmpUpdates[i] = deserializeColumnUpdate(in);
}
updates = Arrays.asList(tmpUpdates);
}
}
return updates;
}
private ColumnUpdate deserializeColumnUpdate(SimpleReader in) {
byte[] cf = readBytes(in);
byte[] cq = readBytes(in);
byte[] cv = readBytes(in);
boolean hasts = in.readBoolean();
long ts = in.readLong();
boolean deleted = in.readBoolean();
byte[] val;
int valLen = in.readInt();
if (valLen < 0) {
val = values.get((-1 * valLen) - 1);
} else if (valLen == 0) {
val = EMPTY_BYTES;
} else {
val = new byte[valLen];
in.readBytes(val);
}
return new ColumnUpdate(cf, cq, cv, hasts, ts, deleted, val);
}
private int cachedValLens = -1;
long getValueLengths() {
if (values == null) {
return 0;
}
if (cachedValLens == -1) {
int tmpCVL = 0;
for (byte[] val : values) {
tmpCVL += val.length;
}
cachedValLens = tmpCVL;
}
return cachedValLens;
}
public long numBytes() {
serialize();
return row.length + data.length + getValueLengths();
}
public long estimatedMemoryUsed() {
return numBytes() + 230;
}
/**
* @return the number of column value pairs added to the mutation
*/
public int size() {
return entries;
}
@Override
public void readFields(DataInput in) throws IOException {
// Clear out cached column updates and value lengths so
// that we recalculate them based on the (potentially) new
// data we are about to read in.
updates = null;
cachedValLens = -1;
buffer = null;
int len = in.readInt();
row = new byte[len];
in.readFully(row);
len = in.readInt();
data = new byte[len];
in.readFully(data);
entries = in.readInt();
boolean valuesPresent = in.readBoolean();
if (valuesPresent) {
values = new ArrayList<>();
int numValues = in.readInt();
for (int i = 0; i < numValues; i++) {
len = in.readInt();
byte[] val = new byte[len];
in.readFully(val);
values.add(val);
}
} else {
values = null;
}
}
@Override
public void write(DataOutput out) throws IOException {
serialize();
out.writeInt(row.length);
out.write(row);
out.writeInt(data.length);
out.write(data);
out.writeInt(entries);
if (values == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeInt(values.size());
for (byte[] val : values) {
out.writeInt(val.length);
out.write(val);
}
}
}
@Override
public boolean equals(Object o) {
if (o instanceof OldMutation) {
return equals((OldMutation) o);
}
return false;
}
@Override
public int hashCode() {
return toThrift().hashCode();
}
public boolean equals(OldMutation m) {
serialize();
if (!Arrays.equals(row, m.getRow())) {
return false;
}
List<ColumnUpdate> oldcus = this.getUpdates();
List<ColumnUpdate> newcus = m.getUpdates();
if (oldcus.size() != newcus.size()) {
return false;
}
for (int i = 0; i < newcus.size(); i++) {
ColumnUpdate oldcu = oldcus.get(i);
ColumnUpdate newcu = newcus.get(i);
if (!oldcu.equals(newcu)) {
return false;
}
}
return false;
}
public TMutation toThrift() {
serialize();
return new TMutation(java.nio.ByteBuffer.wrap(row), java.nio.ByteBuffer.wrap(data),
ByteBufferUtil.toByteBuffers(values), entries);
}
}
| 9,422 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data/KeyBuilderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.data;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertSame;
import org.apache.accumulo.core.security.ColumnVisibility;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
public class KeyBuilderTest {
private static final byte[] EMPTY_BYTES = new byte[0];
byte[] rowBytes = "row".getBytes(UTF_8);
byte[] familyBytes = "family".getBytes(UTF_8);
byte[] qualifierBytes = "qualifier".getBytes(UTF_8);
byte[] visibilityBytes = "visibility".getBytes(UTF_8);
Text rowText = new Text(rowBytes);
Text familyText = new Text(familyBytes);
Text qualifierText = new Text(qualifierBytes);
Text visibilityText = new Text(visibilityBytes);
ColumnVisibility visibilityVisibility = new ColumnVisibility(visibilityBytes);
@Test
public void testKeyBuildingFromRow() {
Key keyBuilt = Key.builder().row("foo").build();
Key keyExpected = new Key("foo");
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowFamily() {
Key keyBuilt = Key.builder().row("foo").family("bar").build();
Key keyExpected = new Key("foo", "bar");
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowFamilyQualifier() {
Key keyBuilt = Key.builder().row("foo").family("bar").qualifier("baz").build();
Key keyExpected = new Key("foo", "bar", "baz");
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowFamilyQualifierVisibility() {
Key keyBuilt = Key.builder().row("foo").family("bar").qualifier("baz").visibility("v").build();
Key keyExpected = new Key("foo", "bar", "baz", "v");
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowFamilyQualifierVisibilityTimestamp() {
Key keyBuilt = Key.builder().row("foo").family("bar").qualifier("baz").visibility("v")
.timestamp(1L).build();
Key keyExpected = new Key("foo", "bar", "baz", "v", 1L);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowFamilyQualifierVisibilityTimestampDeleted() {
Key keyBuilt = Key.builder().row("foo").family("bar").qualifier("baz").visibility("v")
.timestamp(10L).deleted(true).build();
Key keyExpected = new Key("foo", "bar", "baz", "v", 10L);
keyExpected.setDeleted(true);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowVisibility() {
Key keyBuilt = Key.builder().row("foo").visibility("v").build();
Key keyExpected = new Key("foo", "", "", "v");
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowFamilyVisibility() {
Key keyBuilt = Key.builder().row("foo").family("bar").visibility("v").build();
Key keyExpected = new Key("foo", "bar", "", "v");
assertEquals(keyExpected, keyBuilt);
}
@Test
public void textKeyBuildingFromRowTimestamp() {
Key keyBuilt = Key.builder().row("foo").timestamp(3L).build();
Key keyExpected = new Key("foo");
keyExpected.setTimestamp(3L);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowBytes() {
Key keyBuilt = Key.builder().row(rowBytes).build();
Key keyExpected = new Key(rowBytes, EMPTY_BYTES, EMPTY_BYTES, EMPTY_BYTES, Long.MAX_VALUE);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowFamilyBytes() {
Key keyBuilt = Key.builder().row(rowBytes).family(familyBytes).build();
Key keyExpected = new Key(rowBytes, familyBytes, EMPTY_BYTES, EMPTY_BYTES, Long.MAX_VALUE);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowFamilyQualifierBytes() {
Key keyBuilt =
Key.builder().row(rowBytes).family(familyBytes).qualifier(qualifierBytes).build();
Key keyExpected = new Key(rowBytes, familyBytes, qualifierBytes, EMPTY_BYTES, Long.MAX_VALUE);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowFamilyQualifierVisibilityBytes() {
Key keyBuilt = Key.builder().row(rowBytes).family(familyBytes).qualifier(qualifierBytes)
.visibility(visibilityBytes).build();
Key keyExpected =
new Key(rowBytes, familyBytes, qualifierBytes, visibilityBytes, Long.MAX_VALUE);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowFamilyQualifierVisibilityTimestampBytes() {
Key keyBuilt = Key.builder().row(rowBytes).family(familyBytes).qualifier(qualifierBytes)
.visibility(visibilityBytes).timestamp(1L).build();
Key keyExpected = new Key(rowBytes, familyBytes, qualifierBytes, visibilityBytes, 1L);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowFamilyQualifierVisibilityTimestampDeletedBytes() {
Key keyBuilt = Key.builder().row(rowBytes).family(familyBytes).qualifier(qualifierBytes)
.visibility(visibilityBytes).timestamp(10L).deleted(true).build();
Key keyExpected = new Key(rowBytes, familyBytes, qualifierBytes, visibilityBytes, 10L);
keyExpected.setDeleted(true);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowVisibilityBytes() {
Key keyBuilt = Key.builder().row(rowBytes).visibility(visibilityBytes).build();
Key keyExpected = new Key(rowBytes, EMPTY_BYTES, EMPTY_BYTES, visibilityBytes, Long.MAX_VALUE);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowFamilyVisibilityBytes() {
Key keyBuilt =
Key.builder().row(rowBytes).family(familyBytes).visibility(visibilityBytes).build();
Key keyExpected = new Key(rowBytes, familyBytes, EMPTY_BYTES, visibilityBytes, Long.MAX_VALUE);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void textKeyBuildingFromRowTimestampBytes() {
Key keyBuilt = Key.builder().row(rowBytes).timestamp(3L).build();
Key keyExpected = new Key(rowBytes, EMPTY_BYTES, EMPTY_BYTES, EMPTY_BYTES, Long.MAX_VALUE);
keyExpected.setTimestamp(3L);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowText() {
Key keyBuilt = Key.builder().row(rowText).build();
Key keyExpected = new Key(rowText);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowFamilyText() {
Key keyBuilt = Key.builder().row(rowText).family(familyText).build();
Key keyExpected = new Key(rowText, familyText);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowFamilyQualifierText() {
Key keyBuilt = Key.builder().row(rowText).family(familyText).qualifier(qualifierText).build();
Key keyExpected = new Key(rowText, familyText, qualifierText);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowFamilyQualifierVisibilityText() {
Key keyBuilt = Key.builder().row(rowText).family(familyText).qualifier(qualifierText)
.visibility(visibilityText).build();
Key keyExpected = new Key(rowText, familyText, qualifierText, visibilityText);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowFamilyQualifierVisibilityTimestampText() {
Key keyBuilt = Key.builder().row(rowText).family(familyText).qualifier(qualifierText)
.visibility(visibilityText).timestamp(1L).build();
Key keyExpected = new Key(rowText, familyText, qualifierText, visibilityText, 1L);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowFamilyQualifierVisibilityTimestampDeletedText() {
Key keyBuilt = Key.builder().row(rowText).family(familyText).qualifier(qualifierText)
.visibility(visibilityText).timestamp(10L).deleted(true).build();
Key keyExpected = new Key(rowText, familyText, qualifierText, visibilityText, 10L);
keyExpected.setDeleted(true);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowVisibilityText() {
Key keyBuilt = Key.builder().row(rowText).visibility(visibilityText).build();
Key keyExpected = new Key(rowText, new Text(), new Text(), visibilityText);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowFamilyVisibilityText() {
Key keyBuilt = Key.builder().row(rowText).family(familyText).visibility(visibilityText).build();
Key keyExpected = new Key(rowText, familyText, new Text(), visibilityText);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowFamilyVisibilityVisibility() {
Key keyBuilt =
Key.builder().row(rowText).family(familyText).visibility(visibilityVisibility).build();
Key keyExpected =
new Key(rowText, familyText, new Text(), visibilityVisibility, Long.MAX_VALUE);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingFromRowTimestampText() {
Key keyBuilt = Key.builder().row(rowText).timestamp(3L).build();
Key keyExpected = new Key(rowText);
keyExpected.setTimestamp(3L);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingReusingBytes() {
byte[] reuse = {1, 2, 3};
KeyBuilder.Build keyBuilder = Key.builder(false).row(reuse);
Key keyBuilt = keyBuilder.build();
assertSame(reuse, keyBuilt.getRowBytes());
}
@Test
public void testKeyBuildingCopyBytes() {
byte[] reuse = {1, 2, 3};
KeyBuilder.Build keyBuilder = Key.builder(true).row(reuse);
Key keyBuilt = keyBuilder.build();
assertNotEquals(reuse, keyBuilt.getRowBytes());
Key keyBuilt2 = keyBuilder.build();
assertNotEquals(reuse, keyBuilt2.getRowBytes());
}
@Test
public void testKeyHeterogeneous() {
Key keyBuilt = Key.builder().row(rowText).family(familyBytes).qualifier("foo").build();
Text fooText = new Text("foo");
Key keyExpected =
new Key(rowText.getBytes(), 0, rowText.getLength(), familyBytes, 0, familyBytes.length,
fooText.getBytes(), 0, fooText.getLength(), EMPTY_BYTES, 0, 0, Long.MAX_VALUE);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyUsingSubsetOfBytes() {
Key keyBuilt = Key.builder().row(rowBytes, 0, rowBytes.length - 1).build();
Key keyExpected = new Key(rowBytes, 0, rowBytes.length - 1, EMPTY_BYTES, 0, 0, EMPTY_BYTES, 0,
0, EMPTY_BYTES, 0, 0, Long.MAX_VALUE);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingWithMultipleTimestamps() {
Key keyBuilt = Key.builder().row("r").timestamp(44).timestamp(99).build();
Key keyExpected = new Key("r", "", "", 99);
assertEquals(keyExpected, keyBuilt);
}
@Test
public void testKeyBuildingWithMultipleDeleted() {
Key keyBuilt = Key.builder().row("r").deleted(true).deleted(false).build();
Key keyExpected = new Key("r");
keyExpected.setDeleted(false);
assertEquals(keyExpected, keyBuilt);
}
/**
* Tests bug where a String of 10 chars or longer was being encoded incorrectly.
*/
@Test
public void test10CharactersBug() {
Key keyBuilt1 = Key.builder().row(rowText).family("1234567890").build();
Key keyBuilt2 = Key.builder().row(rowText).family(new Text("1234567890")).build();
assertEquals(keyBuilt1, keyBuilt2);
}
}
| 9,423 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data/KeyExtentTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.data;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.accumulo.core.dataImpl.KeyExtent;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
public class KeyExtentTest {
KeyExtent nke(String t, String er, String per) {
return new KeyExtent(TableId.of(t), er == null ? null : new Text(er),
per == null ? null : new Text(per));
}
KeyExtent ke;
@Test
public void testDecodingMetadataRow() {
Text flattenedExtent = new Text("foo;bar");
ke = KeyExtent.fromMetaRow(flattenedExtent);
assertEquals(new Text("bar"), ke.endRow());
assertEquals("foo", ke.tableId().canonical());
assertNull(ke.prevEndRow());
flattenedExtent = new Text("foo<");
ke = KeyExtent.fromMetaRow(flattenedExtent);
assertNull(ke.endRow());
assertEquals("foo", ke.tableId().canonical());
assertNull(ke.prevEndRow());
flattenedExtent = new Text("foo;bar;");
ke = KeyExtent.fromMetaRow(flattenedExtent);
assertEquals(new Text("bar;"), ke.endRow());
assertEquals("foo", ke.tableId().canonical());
assertNull(ke.prevEndRow());
}
private static boolean overlaps(KeyExtent extent, SortedMap<KeyExtent,Object> extents) {
return !KeyExtent.findOverlapping(extent, extents).isEmpty();
}
@Test
public void testOverlaps() {
SortedMap<KeyExtent,Object> set0 = new TreeMap<>();
set0.put(nke("a", null, null), null);
// Nothing overlaps with the empty set
assertFalse(overlaps(nke("t", null, null), null));
assertFalse(overlaps(nke("t", null, null), set0));
SortedMap<KeyExtent,Object> set1 = new TreeMap<>();
// Everything overlaps with the infinite range
set1.put(nke("t", null, null), null);
assertTrue(overlaps(nke("t", null, null), set1));
assertTrue(overlaps(nke("t", "b", "a"), set1));
assertTrue(overlaps(nke("t", null, "a"), set1));
set1.put(nke("t", "b", "a"), null);
assertTrue(overlaps(nke("t", null, null), set1));
assertTrue(overlaps(nke("t", "b", "a"), set1));
assertTrue(overlaps(nke("t", null, "a"), set1));
// simple overlaps
SortedMap<KeyExtent,Object> set2 = new TreeMap<>();
set2.put(nke("a", null, null), null);
set2.put(nke("t", "m", "j"), null);
set2.put(nke("z", null, null), null);
assertTrue(overlaps(nke("t", null, null), set2));
assertTrue(overlaps(nke("t", "m", "j"), set2));
assertTrue(overlaps(nke("t", "z", "a"), set2));
assertFalse(overlaps(nke("t", "j", "a"), set2));
assertFalse(overlaps(nke("t", "z", "m"), set2));
// non-overlaps
assertFalse(overlaps(nke("t", "b", "a"), set2));
assertFalse(overlaps(nke("t", "z", "y"), set2));
assertFalse(overlaps(nke("t", "b", null), set2));
assertFalse(overlaps(nke("t", null, "y"), set2));
assertFalse(overlaps(nke("t", "j", null), set2));
assertFalse(overlaps(nke("t", null, "m"), set2));
// infinite overlaps
SortedMap<KeyExtent,Object> set3 = new TreeMap<>();
set3.put(nke("t", "j", null), null);
set3.put(nke("t", null, "m"), null);
assertTrue(overlaps(nke("t", "k", "a"), set3));
assertTrue(overlaps(nke("t", "k", null), set3));
assertTrue(overlaps(nke("t", "z", "k"), set3));
assertTrue(overlaps(nke("t", null, "k"), set3));
assertTrue(overlaps(nke("t", null, null), set3));
// falls between
assertFalse(overlaps(nke("t", "l", "k"), set3));
SortedMap<KeyExtent,Object> set4 = new TreeMap<>();
set4.put(nke("t", null, null), null);
assertTrue(overlaps(nke("t", "k", "a"), set4));
assertTrue(overlaps(nke("t", "k", null), set4));
assertTrue(overlaps(nke("t", "z", "k"), set4));
assertTrue(overlaps(nke("t", null, "k"), set4));
assertTrue(overlaps(nke("t", null, null), set4));
assertTrue(overlaps(nke("t", null, null), set4));
for (String er : new String[] {"z", "y", "r", null}) {
for (String per : new String[] {"a", "b", "d", null}) {
assertTrue(nke("t", "y", "b").overlaps(nke("t", er, per)));
assertTrue(nke("t", "y", null).overlaps(nke("t", er, per)));
assertTrue(nke("t", null, "b").overlaps(nke("t", er, per)));
assertTrue(nke("t", null, null).overlaps(nke("t", er, per)));
}
}
assertFalse(nke("t", "y", "b").overlaps(nke("t", "z", "y")));
assertFalse(nke("t", "y", "b").overlaps(nke("t", null, "y")));
assertFalse(nke("t", "y", null).overlaps(nke("t", "z", "y")));
assertFalse(nke("t", "y", null).overlaps(nke("t", null, "y")));
assertFalse(nke("t", "y", "b").overlaps(nke("t", "b", "a")));
assertFalse(nke("t", "y", "b").overlaps(nke("t", "b", null)));
assertFalse(nke("t", null, "b").overlaps(nke("t", "b", "a")));
assertFalse(nke("t", null, "b").overlaps(nke("t", "b", null)));
}
@Test
public void testWriteReadFields() throws Exception {
ke = nke("t", "e", "b");
assertEquals(ke, writeAndReadFields(ke));
ke = nke("t", "e", null);
assertEquals(ke, writeAndReadFields(ke));
ke = nke("t", null, "b");
assertEquals(ke, writeAndReadFields(ke));
ke = nke("t", null, null);
assertEquals(ke, writeAndReadFields(ke));
}
private KeyExtent writeAndReadFields(KeyExtent in) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
in.writeTo(new DataOutputStream(baos));
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
return KeyExtent.readFrom(new DataInputStream(bais));
}
}
| 9,424 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data/ArrayByteSequenceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.data;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.nio.ByteBuffer;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class ArrayByteSequenceTest {
ArrayByteSequence abs;
byte[] data;
@BeforeEach
public void setUp() {
data = new byte[] {'s', 'm', 'i', 'l', 'e', 's'};
abs = new ArrayByteSequence(data);
}
@Test
public void testInvalidByteBufferBounds0() {
assertThrows(IllegalArgumentException.class, () -> abs = new ArrayByteSequence(data, -1, 0));
}
@Test
public void testInvalidByteBufferBounds1() {
assertThrows(IllegalArgumentException.class,
() -> abs = new ArrayByteSequence(data, data.length + 1, 0));
}
@Test
public void testInvalidByteBufferBounds2() {
assertThrows(IllegalArgumentException.class, () -> abs = new ArrayByteSequence(data, 0, -1));
}
@Test
public void testInvalidByteBufferBounds3() {
assertThrows(IllegalArgumentException.class, () -> abs = new ArrayByteSequence(data, 6, 2));
}
@Test
public void testInvalidByteAt0() {
assertThrows(IllegalArgumentException.class, () -> abs.byteAt(-1));
}
@Test
public void testInvalidByteAt1() {
assertThrows(IllegalArgumentException.class, () -> abs.byteAt(data.length));
}
@Test
public void testSubSequence() {
assertEquals(0, abs.subSequence(0, 0).length());
assertEquals("mile", abs.subSequence(1, 5).toString());
}
@Test
public void testInvalidSubsequence0() {
assertThrows(IllegalArgumentException.class, () -> abs.subSequence(5, 1));
}
@Test
public void testInvalidSubsequence1() {
assertThrows(IllegalArgumentException.class, () -> abs.subSequence(-1, 1));
}
@Test
public void testInvalidSubsequence3() {
assertThrows(IllegalArgumentException.class, () -> abs.subSequence(0, 10));
}
@Test
public void testFromByteBuffer() {
ByteBuffer bb = ByteBuffer.wrap(data, 1, 4);
abs = new ArrayByteSequence(bb);
assertEquals("mile", abs.toString());
bb = bb.asReadOnlyBuffer();
abs = new ArrayByteSequence(bb);
assertEquals("mile", abs.toString());
}
@Test
public void testToString() {
assertEquals("", new ArrayByteSequence("").toString(),
"String conversion should round trip correctly");
}
}
| 9,425 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data/RangeTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.data;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.InvalidObjectException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import org.apache.accumulo.core.dataImpl.KeyExtent;
import org.apache.accumulo.core.dataImpl.thrift.TRange;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
public class RangeTest {
private Range newRange(String k1, String k2) {
Key ik1 = null;
if (k1 != null) {
ik1 = new Key(new Text(k1), 0L);
}
Key ik2 = null;
if (k2 != null) {
ik2 = new Key(new Text(k2), 0L);
}
return new Range(ik1, ik2);
}
private List<Range> newRangeList(Range... ranges) {
return Arrays.asList(ranges);
}
private void check(List<Range> rl, List<Range> expected) {
HashSet<Range> s1 = new HashSet<>(rl);
HashSet<Range> s2 = new HashSet<>(expected);
assertEquals(s1, s2, "got : " + rl + " expected : " + expected);
}
@Test
public void testMergeOverlapping1() {
List<Range> rl = newRangeList(newRange("a", "c"), newRange("a", "b"));
List<Range> expected = newRangeList(newRange("a", "c"));
check(Range.mergeOverlapping(rl), expected);
}
@Test
public void testMergeOverlapping2() {
List<Range> rl = newRangeList(newRange("a", "c"), newRange("d", "f"));
List<Range> expected = newRangeList(newRange("a", "c"), newRange("d", "f"));
check(Range.mergeOverlapping(rl), expected);
}
@Test
public void testMergeOverlapping3() {
List<Range> rl = newRangeList(newRange("a", "e"), newRange("b", "f"), newRange("c", "r"),
newRange("g", "j"), newRange("t", "x"));
List<Range> expected = newRangeList(newRange("a", "r"), newRange("t", "x"));
check(Range.mergeOverlapping(rl), expected);
}
@Test
public void testMergeOverlapping4() {
List<Range> rl = newRangeList(newRange("a", "e"), newRange("b", "f"), newRange("c", "r"),
newRange("g", "j"));
List<Range> expected = newRangeList(newRange("a", "r"));
check(Range.mergeOverlapping(rl), expected);
}
@Test
public void testMergeOverlapping5() {
List<Range> rl = newRangeList(newRange("a", "e"));
List<Range> expected = newRangeList(newRange("a", "e"));
check(Range.mergeOverlapping(rl), expected);
}
@Test
public void testMergeOverlapping6() {
List<Range> rl = newRangeList();
List<Range> expected = newRangeList();
check(Range.mergeOverlapping(rl), expected);
}
@Test
public void testMergeOverlapping7() {
List<Range> rl = newRangeList(newRange("a", "e"), newRange("g", "q"), newRange("r", "z"));
List<Range> expected = newRangeList(newRange("a", "e"), newRange("g", "q"), newRange("r", "z"));
check(Range.mergeOverlapping(rl), expected);
}
@Test
public void testMergeOverlapping8() {
List<Range> rl = newRangeList(newRange("a", "c"), newRange("a", "c"));
List<Range> expected = newRangeList(newRange("a", "c"));
check(Range.mergeOverlapping(rl), expected);
}
@Test
public void testMergeOverlapping9() {
List<Range> rl = newRangeList(newRange(null, null));
List<Range> expected = newRangeList(newRange(null, null));
check(Range.mergeOverlapping(rl), expected);
}
@Test
public void testMergeOverlapping10() {
List<Range> rl = newRangeList(newRange(null, null), newRange("a", "c"));
List<Range> expected = newRangeList(newRange(null, null));
check(Range.mergeOverlapping(rl), expected);
}
@Test
public void testMergeOverlapping11() {
List<Range> rl = newRangeList(newRange("a", "c"), newRange(null, null));
List<Range> expected = newRangeList(newRange(null, null));
check(Range.mergeOverlapping(rl), expected);
}
@Test
public void testMergeOverlapping12() {
List<Range> rl = newRangeList(newRange("b", "d"), newRange("c", null));
List<Range> expected = newRangeList(newRange("b", null));
check(Range.mergeOverlapping(rl), expected);
}
@Test
public void testMergeOverlapping13() {
List<Range> rl = newRangeList(newRange("b", "d"), newRange("a", null));
List<Range> expected = newRangeList(newRange("a", null));
check(Range.mergeOverlapping(rl), expected);
}
@Test
public void testMergeOverlapping14() {
List<Range> rl = newRangeList(newRange("b", "d"), newRange("e", null));
List<Range> expected = newRangeList(newRange("b", "d"), newRange("e", null));
check(Range.mergeOverlapping(rl), expected);
}
@Test
public void testMergeOverlapping15() {
List<Range> rl = newRangeList(newRange("b", "d"), newRange("e", null), newRange("c", "f"));
List<Range> expected = newRangeList(newRange("b", null));
check(Range.mergeOverlapping(rl), expected);
}
@Test
public void testMergeOverlapping16() {
List<Range> rl = newRangeList(newRange("b", "d"), newRange("f", null), newRange("c", "e"));
List<Range> expected = newRangeList(newRange("b", "e"), newRange("f", null));
check(Range.mergeOverlapping(rl), expected);
}
@Test
public void testMergeOverlapping17() {
List<Range> rl = newRangeList(newRange("b", "d"), newRange("r", null), newRange("c", "e"),
newRange("g", "t"));
List<Range> expected = newRangeList(newRange("b", "e"), newRange("g", null));
check(Range.mergeOverlapping(rl), expected);
}
@Test
public void testMergeOverlapping18() {
List<Range> rl = newRangeList(newRange(null, "d"), newRange("r", null), newRange("c", "e"),
newRange("g", "t"));
List<Range> expected = newRangeList(newRange(null, "e"), newRange("g", null));
check(Range.mergeOverlapping(rl), expected);
}
@Test
public void testMergeOverlapping19() {
List<Range> rl = newRangeList(newRange(null, "d"), newRange("r", null), newRange("c", "e"),
newRange("g", "t"), newRange("d", "h"));
List<Range> expected = newRangeList(newRange(null, null));
check(Range.mergeOverlapping(rl), expected);
}
@Test
public void testMergeOverlapping20() {
List<Range> rl = newRangeList(new Range(new Text("a"), true, new Text("b"), false),
new Range(new Text("b"), false, new Text("c"), false));
List<Range> expected = newRangeList(new Range(new Text("a"), true, new Text("b"), false),
new Range(new Text("b"), false, new Text("c"), false));
check(Range.mergeOverlapping(rl), expected);
rl = newRangeList(new Range(new Text("a"), true, new Text("b"), false),
new Range(new Text("b"), true, new Text("c"), false));
expected = newRangeList(new Range(new Text("a"), true, new Text("c"), false));
check(Range.mergeOverlapping(rl), expected);
rl = newRangeList(new Range(new Text("a"), true, new Text("b"), true),
new Range(new Text("b"), false, new Text("c"), false));
expected = newRangeList(new Range(new Text("a"), true, new Text("c"), false));
check(Range.mergeOverlapping(rl), expected);
rl = newRangeList(new Range(new Text("a"), true, new Text("b"), true),
new Range(new Text("b"), true, new Text("c"), false));
expected = newRangeList(new Range(new Text("a"), true, new Text("c"), false));
check(Range.mergeOverlapping(rl), expected);
}
@Test
public void testMergeOverlapping22() {
Range ke1 = new KeyExtent(TableId.of("tab1"), new Text("Bank"), null).toMetaRange();
Range ke2 =
new KeyExtent(TableId.of("tab1"), new Text("Fails"), new Text("Bank")).toMetaRange();
Range ke3 = new KeyExtent(TableId.of("tab1"), new Text("Sam"), new Text("Fails")).toMetaRange();
Range ke4 = new KeyExtent(TableId.of("tab1"), new Text("bails"), new Text("Sam")).toMetaRange();
Range ke5 = new KeyExtent(TableId.of("tab1"), null, new Text("bails")).toMetaRange();
List<Range> rl = newRangeList(ke1, ke2, ke3, ke4, ke5);
List<Range> expected =
newRangeList(new KeyExtent(TableId.of("tab1"), null, null).toMetaRange());
check(Range.mergeOverlapping(rl), expected);
rl = newRangeList(ke1, ke2, ke4, ke5);
expected =
newRangeList(new KeyExtent(TableId.of("tab1"), new Text("Fails"), null).toMetaRange(),
new KeyExtent(TableId.of("tab1"), null, new Text("Sam")).toMetaRange());
check(Range.mergeOverlapping(rl), expected);
rl = newRangeList(ke2, ke3, ke4, ke5);
expected =
newRangeList(new KeyExtent(TableId.of("tab1"), null, new Text("Bank")).toMetaRange());
check(Range.mergeOverlapping(rl), expected);
rl = newRangeList(ke1, ke2, ke3, ke4);
expected =
newRangeList(new KeyExtent(TableId.of("tab1"), new Text("bails"), null).toMetaRange());
check(Range.mergeOverlapping(rl), expected);
rl = newRangeList(ke2, ke3, ke4);
expected = newRangeList(
new KeyExtent(TableId.of("tab1"), new Text("bails"), new Text("Bank")).toMetaRange());
check(Range.mergeOverlapping(rl), expected);
}
@Test
public void testMergeOverlapping21() {
for (boolean b1 : new boolean[] {true, false}) {
for (boolean b2 : new boolean[] {true, false}) {
for (boolean b3 : new boolean[] {true, false}) {
for (boolean b4 : new boolean[] {true, false}) {
// System.out.println("b1:"+b1+" b2:"+b2+" b3:"+b3+" b4:"+b4);
List<Range> rl =
newRangeList(new Range(new Key(new Text("a")), b1, new Key(new Text("m")), b2),
new Range(new Key(new Text("b")), b3, new Key(new Text("n")), b4));
List<Range> expected =
newRangeList(new Range(new Key(new Text("a")), b1, new Key(new Text("n")), b4));
check(Range.mergeOverlapping(rl), expected);
rl = newRangeList(new Range(new Key(new Text("a")), b1, new Key(new Text("m")), b2),
new Range(new Key(new Text("a")), b3, new Key(new Text("n")), b4));
expected = newRangeList(
new Range(new Key(new Text("a")), b1 || b3, new Key(new Text("n")), b4));
check(Range.mergeOverlapping(rl), expected);
rl = newRangeList(new Range(new Key(new Text("a")), b1, new Key(new Text("n")), b2),
new Range(new Key(new Text("b")), b3, new Key(new Text("n")), b4));
expected = newRangeList(
new Range(new Key(new Text("a")), b1, new Key(new Text("n")), b2 || b4));
check(Range.mergeOverlapping(rl), expected);
rl = newRangeList(new Range(new Key(new Text("a")), b1, new Key(new Text("n")), b2),
new Range(new Key(new Text("a")), b3, new Key(new Text("n")), b4));
expected = newRangeList(
new Range(new Key(new Text("a")), b1 || b3, new Key(new Text("n")), b2 || b4));
check(Range.mergeOverlapping(rl), expected);
}
}
}
}
}
@Test
public void testEqualsNull() {
assertTrue(newRange(null, "d").equals(newRange(null, "d")));
assertTrue(newRange(null, null).equals(newRange(null, null)));
assertTrue(newRange("a", null).equals(newRange("a", null)));
assertFalse(newRange(null, "d").equals(newRange("a", "d")));
assertFalse(newRange("a", "d").equals(newRange(null, "d")));
assertFalse(newRange(null, null).equals(newRange("a", "d")));
assertFalse(newRange("a", "d").equals(newRange(null, null)));
assertFalse(newRange("a", null).equals(newRange("a", "d")));
assertFalse(newRange("a", "d").equals(newRange("a", null)));
}
@Test
public void testEquals() {
assertFalse(newRange("b", "d").equals(newRange("a", "d")));
assertFalse(newRange("a", "d").equals(newRange("b", "d")));
assertFalse(newRange("x", "y").equals(newRange("a", "d")));
assertFalse(newRange("a", "d").equals(newRange("x", "y")));
assertFalse(newRange("a", "z").equals(newRange("a", "d")));
assertFalse(newRange("a", "d").equals(newRange("a", "z")));
assertTrue(newRange("a", "z").equals(newRange("a", "z")));
}
@Test
public void testRow1() {
Range rowRange = new Range(new Text("r1"));
assertTrue(rowRange.contains(new Key(new Text("r1"))));
assertTrue(rowRange.contains(new Key(new Text("r1"), new Text("cf1"))));
assertTrue(rowRange.contains(new Key(new Text("r1"), new Text("cf1"), new Text("cq1"))));
assertFalse(rowRange.contains(new Key(new Text("r1")).followingKey(PartialKey.ROW)));
assertFalse(rowRange.contains(new Key(new Text("r11"))));
assertFalse(rowRange.contains(new Key(new Text("r0"))));
}
@Test
public void testRow2() {
Range rowRange = new Range(new Text("r1"), new Text("r2"));
assertTrue(rowRange.contains(new Key(new Text("r1"))));
assertTrue(rowRange.contains(new Key(new Text("r1"), new Text("cf1"))));
assertTrue(rowRange.contains(new Key(new Text("r1"), new Text("cf1"), new Text("cq1"))));
assertTrue(rowRange.contains(new Key(new Text("r1")).followingKey(PartialKey.ROW)));
assertTrue(rowRange.contains(new Key(new Text("r11"))));
assertTrue(rowRange.contains(new Key(new Text("r2"))));
assertTrue(rowRange.contains(new Key(new Text("r2"), new Text("cf1"))));
assertTrue(rowRange.contains(new Key(new Text("r2"), new Text("cf1"), new Text("cq1"))));
assertFalse(rowRange.contains(new Key(new Text("r0"))));
assertFalse(rowRange.contains(new Key(new Text("r2")).followingKey(PartialKey.ROW)));
}
@Test
public void testRow3() {
Range rowRange = new Range(new Text("r1"), false, new Text("r2"), false);
assertFalse(rowRange.contains(new Key(new Text("r1"))));
assertFalse(rowRange.contains(new Key(new Text("r1"), new Text("cf1"))));
assertFalse(rowRange.contains(new Key(new Text("r1"), new Text("cf1"), new Text("cq1"))));
assertTrue(rowRange.contains(new Key(new Text("r1")).followingKey(PartialKey.ROW)));
assertTrue(rowRange.contains(new Key(new Text("r11"))));
assertFalse(rowRange.contains(new Key(new Text("r2"))));
assertFalse(rowRange.contains(new Key(new Text("r2"), new Text("cf1"))));
assertFalse(rowRange.contains(new Key(new Text("r2"), new Text("cf1"), new Text("cq1"))));
assertFalse(rowRange.contains(new Key(new Text("r0"))));
assertFalse(rowRange.contains(new Key(new Text("r2")).followingKey(PartialKey.ROW)));
}
@Test
public void testRow4() {
Range rowRange = new Range(new Text("r1"), true, new Text("r2"), false);
assertTrue(rowRange.contains(new Key(new Text("r1"))));
assertTrue(rowRange.contains(new Key(new Text("r1"), new Text("cf1"))));
assertTrue(rowRange.contains(new Key(new Text("r1"), new Text("cf1"), new Text("cq1"))));
assertTrue(rowRange.contains(new Key(new Text("r1")).followingKey(PartialKey.ROW)));
assertTrue(rowRange.contains(new Key(new Text("r11"))));
assertFalse(rowRange.contains(new Key(new Text("r2"))));
assertFalse(rowRange.contains(new Key(new Text("r2"), new Text("cf1"))));
assertFalse(rowRange.contains(new Key(new Text("r2"), new Text("cf1"), new Text("cq1"))));
assertFalse(rowRange.contains(new Key(new Text("r0"))));
assertFalse(rowRange.contains(new Key(new Text("r2")).followingKey(PartialKey.ROW)));
}
@Test
public void testRow5() {
Range rowRange = new Range(new Text("r1"), false, new Text("r2"), true);
assertFalse(rowRange.contains(new Key(new Text("r1"))));
assertFalse(rowRange.contains(new Key(new Text("r1"), new Text("cf1"))));
assertFalse(rowRange.contains(new Key(new Text("r1"), new Text("cf1"), new Text("cq1"))));
assertTrue(rowRange.contains(new Key(new Text("r1")).followingKey(PartialKey.ROW)));
assertTrue(rowRange.contains(new Key(new Text("r11"))));
assertTrue(rowRange.contains(new Key(new Text("r2"))));
assertTrue(rowRange.contains(new Key(new Text("r2"), new Text("cf1"))));
assertTrue(rowRange.contains(new Key(new Text("r2"), new Text("cf1"), new Text("cq1"))));
assertFalse(rowRange.contains(new Key(new Text("r0"))));
assertFalse(rowRange.contains(new Key(new Text("r2")).followingKey(PartialKey.ROW)));
}
@Test
public void testRow6() {
Range rowRange = new Range(new Text("r1"), true, null, true);
assertTrue(rowRange.contains(new Key(new Text("r1"))));
assertTrue(rowRange.contains(new Key(new Text("r1"), new Text("cf1"))));
assertTrue(rowRange.contains(new Key(new Text("r1"), new Text("cf1"), new Text("cq1"))));
assertTrue(rowRange.contains(new Key(new Text("r1")).followingKey(PartialKey.ROW)));
assertTrue(rowRange.contains(new Key(new Text("r11"))));
assertTrue(rowRange.contains(new Key(new Text("r2"))));
assertTrue(rowRange.contains(new Key(new Text("r2"), new Text("cf1"))));
assertTrue(rowRange.contains(new Key(new Text("r2"), new Text("cf1"), new Text("cq1"))));
assertFalse(rowRange.contains(new Key(new Text("r0"))));
assertTrue(rowRange.contains(new Key(new Text("r2")).followingKey(PartialKey.ROW)));
}
@Test
public void testRow7() {
Range rowRange = new Range(null, true, new Text("r2"), true);
assertTrue(rowRange.contains(new Key(new Text("r1"))));
assertTrue(rowRange.contains(new Key(new Text("r1"), new Text("cf1"))));
assertTrue(rowRange.contains(new Key(new Text("r1"), new Text("cf1"), new Text("cq1"))));
assertTrue(rowRange.contains(new Key(new Text("r1")).followingKey(PartialKey.ROW)));
assertTrue(rowRange.contains(new Key(new Text("r11"))));
assertTrue(rowRange.contains(new Key(new Text("r2"))));
assertTrue(rowRange.contains(new Key(new Text("r2"), new Text("cf1"))));
assertTrue(rowRange.contains(new Key(new Text("r2"), new Text("cf1"), new Text("cq1"))));
assertTrue(rowRange.contains(new Key(new Text("r0"))));
assertFalse(rowRange.contains(new Key(new Text("r2")).followingKey(PartialKey.ROW)));
}
@Test
public void testRow8() {
Range rowRange = new Range((Text) null);
assertTrue(rowRange.contains(new Key(new Text("r1"))));
assertTrue(rowRange.contains(new Key(new Text("r1"), new Text("cf1"))));
assertTrue(rowRange.contains(new Key(new Text("r1"), new Text("cf1"), new Text("cq1"))));
assertTrue(rowRange.contains(new Key(new Text("r1")).followingKey(PartialKey.ROW)));
assertTrue(rowRange.contains(new Key(new Text("r11"))));
assertTrue(rowRange.contains(new Key(new Text("r2"))));
assertTrue(rowRange.contains(new Key(new Text("r2"), new Text("cf1"))));
assertTrue(rowRange.contains(new Key(new Text("r2"), new Text("cf1"), new Text("cq1"))));
assertTrue(rowRange.contains(new Key(new Text("r0"))));
assertTrue(rowRange.contains(new Key(new Text("r2")).followingKey(PartialKey.ROW)));
}
private static Range newRange(String r1, boolean r1i, String r2, boolean r2i) {
Text tr1 = null;
Text tr2 = null;
if (r1 != null) {
tr1 = new Text(r1);
}
if (r2 != null) {
tr2 = new Text(r2);
}
return new Range(tr1, r1i, tr2, r2i);
}
private static Key newKey(String r) {
return new Key(new Text(r));
}
@Test
public void testClip1() {
Range fence = newRange("a", false, "c", false);
runClipTest(fence, newRange("a", false, "c", false), newRange("a", false, "c", false));
runClipTest(fence, newRange("a", true, "c", false), newRange("a", false, "c", false));
runClipTest(fence, newRange("a", false, "c", true), newRange("a", false, "c", false));
runClipTest(fence, newRange("a", true, "c", true), newRange("a", false, "c", false));
fence = newRange("a", true, "c", false);
runClipTest(fence, newRange("a", false, "c", false), newRange("a", false, "c", false));
runClipTest(fence, newRange("a", true, "c", false), newRange("a", true, "c", false));
runClipTest(fence, newRange("a", false, "c", true), newRange("a", false, "c", false));
runClipTest(fence, newRange("a", true, "c", true), newRange("a", true, "c", false));
fence = newRange("a", false, "c", true);
runClipTest(fence, newRange("a", false, "c", false), newRange("a", false, "c", false));
runClipTest(fence, newRange("a", true, "c", false), newRange("a", false, "c", false));
runClipTest(fence, newRange("a", false, "c", true), newRange("a", false, "c", true));
runClipTest(fence, newRange("a", true, "c", true), newRange("a", false, "c", true));
fence = newRange("a", true, "c", true);
runClipTest(fence, newRange("a", false, "c", false), newRange("a", false, "c", false));
runClipTest(fence, newRange("a", true, "c", false), newRange("a", true, "c", false));
runClipTest(fence, newRange("a", false, "c", true), newRange("a", false, "c", true));
runClipTest(fence, newRange("a", true, "c", true), newRange("a", true, "c", true));
}
@Test
public void testClip2() {
Range fence = newRange("a", false, "c", false);
runClipTest(fence, newRange(null, true, null, true), newRange("a", false, "c", false));
runClipTest(fence, newRange(null, true, "c", true), newRange("a", false, "c", false));
runClipTest(fence, newRange("a", true, null, true), newRange("a", false, "c", false));
runClipTest(fence, newRange("a", true, "c", true), newRange("a", false, "c", false));
}
@Test
public void testClip3() {
Range fence = newRange("a", false, "c", false);
runClipTest(fence, newRange("0", false, "z", false), newRange("a", false, "c", false));
runClipTest(fence, newRange("0", true, "z", false), newRange("a", false, "c", false));
runClipTest(fence, newRange("0", false, "z", true), newRange("a", false, "c", false));
runClipTest(fence, newRange("0", true, "z", true), newRange("a", false, "c", false));
runClipTest(fence, newRange("0", false, "b", false), newRange("a", false, "b", false));
runClipTest(fence, newRange("0", true, "b", false), newRange("a", false, "b", false));
runClipTest(fence, newRange("0", false, "b", true), newRange("a", false, "b", true));
runClipTest(fence, newRange("0", true, "b", true), newRange("a", false, "b", true));
runClipTest(fence, newRange("a1", false, "z", false), newRange("a1", false, "c", false));
runClipTest(fence, newRange("a1", true, "z", false), newRange("a1", true, "c", false));
runClipTest(fence, newRange("a1", false, "z", true), newRange("a1", false, "c", false));
runClipTest(fence, newRange("a1", true, "z", true), newRange("a1", true, "c", false));
runClipTest(fence, newRange("a1", false, "b", false), newRange("a1", false, "b", false));
runClipTest(fence, newRange("a1", true, "b", false), newRange("a1", true, "b", false));
runClipTest(fence, newRange("a1", false, "b", true), newRange("a1", false, "b", true));
runClipTest(fence, newRange("a1", true, "b", true), newRange("a1", true, "b", true));
}
@Test
public void testClip4() {
Range fence = new Range(newKey("c"), false, newKey("n"), false);
runClipTest(fence, new Range(newKey("a"), false, newKey("c"), false));
runClipTest(fence, new Range(newKey("a"), false, newKey("c"), true));
runClipTest(fence, new Range(newKey("n"), false, newKey("r"), false));
runClipTest(fence, new Range(newKey("n"), true, newKey("r"), false));
runClipTest(fence, new Range(newKey("a"), true, newKey("b"), false));
runClipTest(fence, new Range(newKey("a"), true, newKey("b"), true));
fence = new Range(newKey("c"), true, newKey("n"), true);
runClipTest(fence, new Range(newKey("a"), false, newKey("c"), false));
runClipTest(fence, new Range(newKey("a"), false, newKey("c"), true),
new Range(newKey("c"), true, newKey("c"), true));
runClipTest(fence, new Range(newKey("n"), false, newKey("r"), false));
runClipTest(fence, new Range(newKey("n"), true, newKey("r"), false),
new Range(newKey("n"), true, newKey("n"), true));
runClipTest(fence, new Range(newKey("q"), false, newKey("r"), false));
runClipTest(fence, new Range(newKey("q"), true, newKey("r"), false));
fence = newRange("b", true, "b", true);
runClipTest(fence, newRange("b", false, "c", false));
runClipTest(fence, newRange("b", true, "c", false), newRange("b", true, "b", true));
runClipTest(fence, newRange("a", false, "b", false));
runClipTest(fence, newRange("a", false, "b", true), newRange("b", true, "b", true));
}
@Test
public void testBug1() {
// unit test related to a bug that was observed (bug was not in range, but want to ensure the
// following works)
// clip caught the scanner going to a tablet passed the end of the scan range
Range fence = new Range(new Text("10<"), false, new Text("~"), true);
Key k1 = new Key(new Text("10<"), new Text("~tab"), new Text("~pr"));
Range range = new Range(k1, true, k1.followingKey(PartialKey.ROW), false);
runClipTest(fence, range);
// scanner was not handling edge case properly...
Range scanRange =
new Range(
new Key("10;007cdc5b0".getBytes(), "~tab".getBytes(), "~pr".getBytes(), "".getBytes(),
130962, false),
false, new Key(new Text("10<")).followingKey(PartialKey.ROW), false);
// below is the proper check the scanner now does instead of just comparing the row bytes
scanRange.afterEndKey(new Key(new Text("10<")).followingKey(PartialKey.ROW));
}
private void runClipTest(Range fence, Range range) {
assertThrows(IllegalArgumentException.class, () -> fence.clip(range));
}
private void runClipTest(Range fence, Range range, Range expected) {
Range clipped = fence.clip(range);
assertEquals(expected, clipped);
}
private static Key newKey(String r, String cf, String cq) {
return new Key(new Text(r), new Text(cf), new Text(cq));
}
private static Key newKey(String r, String cf, String cq, String cv) {
return new Key(new Text(r), new Text(cf), new Text(cq), new Text(cv));
}
private static Column newColumn(String cf, String cq) {
return new Column(cf.getBytes(), cq == null ? null : cq.getBytes(), null);
}
private static Column newColumn(String cf) {
return newColumn(cf, null);
}
private static Range newRange(String row) {
return new Range(new Text(row));
}
@Test
public void testBound1() {
Range range1 = newRange("row1");
Range range2 = range1.bound(newColumn("b"), newColumn("e"));
assertFalse(range2.contains(newKey("row1")));
assertFalse(range2.contains(newKey("row1", "a", "z")));
assertTrue(range2.contains(newKey("row1", "b", "")));
assertTrue(range2.contains(newKey("row1", "b", "z")));
assertTrue(range2.contains(newKey("row1", "c", "z")));
assertTrue(range2.contains(newKey("row1", "e", "")));
assertTrue(range2.contains(newKey("row1", "e", "z")));
assertFalse(range2.contains(newKey("row1", "e", "").followingKey(PartialKey.ROW_COLFAM)));
assertFalse(range2.contains(newKey("row1", "f", "")));
assertFalse(range2.contains(newKey("row1", "f", "z")));
}
@Test
public void testBound2() {
Range range1 = new Range(newKey("row1", "b", "x"), true, newKey("row1", "f", "x"), true);
Range range2 = range1.bound(newColumn("a"), newColumn("g"));
assertEquals(range1, range2);
assertFalse(range2.contains(newKey("row1", "a", "x")));
assertTrue(range2.contains(newKey("row1", "b", "x")));
assertTrue(range2.contains(newKey("row1", "f", "x")));
assertFalse(range2.contains(newKey("row1", "g", "")));
Range range3 = range1.bound(newColumn("c"), newColumn("d"));
assertFalse(range3.contains(newKey("row1", "b", "x")));
assertTrue(range3.contains(newKey("row1", "c", "")));
assertTrue(range3.contains(newKey("row1", "c", "z")));
assertTrue(range3.contains(newKey("row1", "d", "")));
assertTrue(range3.contains(newKey("row1", "d", "z")));
assertFalse(range3.contains(newKey("row1", "e", "")));
assertFalse(range3.contains(newKey("row1", "f", "x")));
Range range4 = range1.bound(newColumn("c", "w"), newColumn("d", "z"));
assertFalse(range4.contains(newKey("row1", "b", "x")));
assertTrue(range4.contains(newKey("row1", "c", "w")));
assertTrue(range4.contains(newKey("row1", "c", "w", "")));
assertTrue(range4.contains(newKey("row1", "c", "w", "a")));
assertTrue(range4.contains(newKey("row1", "d", "z", "")));
assertTrue(range4.contains(newKey("row1", "d", "z", "a")));
assertFalse(range4.contains(newKey("row1", "d", "{", "")));
assertFalse(
range4.contains(newKey("row1", "d", "z", "a").followingKey(PartialKey.ROW_COLFAM_COLQUAL)));
assertFalse(range4.contains(newKey("row1", "f", "x")));
Range range5 = range1.bound(newColumn("b", "w"), newColumn("f", "z"));
assertEquals(range1, range5);
assertFalse(range5.contains(newKey("row1", "b", "w")));
assertTrue(range5.contains(newKey("row1", "b", "x")));
assertTrue(range5.contains(newKey("row1", "f", "x")));
assertFalse(range5.contains(newKey("row1", "f", "z")));
Range range6 = range1.bound(newColumn("b", "y"), newColumn("f", "w"));
assertFalse(range6.contains(newKey("row1", "b", "x")));
assertTrue(range6.contains(newKey("row1", "b", "y")));
assertTrue(range6.contains(newKey("row1", "f", "w")));
assertTrue(range6.contains(newKey("row1", "f", "w", "a")));
assertFalse(
range6.contains(newKey("row1", "f", "w").followingKey(PartialKey.ROW_COLFAM_COLQUAL)));
assertFalse(range6.contains(newKey("row1", "f", "x")));
Range range7 = range1.bound(newColumn("a", "y"), newColumn("g", "w"));
assertEquals(range1, range7);
assertFalse(range7.contains(newKey("row1", "b", "w")));
assertTrue(range7.contains(newKey("row1", "b", "x")));
assertTrue(range7.contains(newKey("row1", "f", "x")));
assertFalse(range7.contains(newKey("row1", "f", "z")));
}
@Test
public void testString() {
Range r1 = new Range(new Text("r1"));
Range r2 = new Range("r1");
assertEquals(r1, r2);
r1 = new Range(new Text("r1"), new Text("r2"));
r2 = new Range("r1", "r2");
assertEquals(r1, r2);
r1 = new Range(new Text("r1"), false, new Text("r2"), true);
r2 = new Range("r1", false, "r2", true);
assertEquals(r1, r2);
r1 = new Range(new Text("r1"), true, new Text("r2"), false);
r2 = new Range("r1", true, "r2", false);
assertEquals(r1, r2);
}
@Test
public void testExactRange() {
Range r = Range.exact("abc");
assertTrue(r.contains(new Key("abc")));
assertTrue(r.contains(new Key("abc", "def")));
assertFalse(r.contains(new Key("abcd")));
assertFalse(r.contains(new Key("abb")));
assertFalse(r.contains(new Key("abd")));
r = Range.exact("abc", "def");
assertTrue(r.contains(new Key("abc", "def", "ghi")));
assertFalse(r.contains(new Key("abc", "defg")));
assertFalse(r.contains(new Key("abc", "dee")));
assertFalse(r.contains(new Key("abc", "deg")));
r = Range.exact("abc", "def", "ghi");
assertTrue(r.contains(new Key("abc", "def", "ghi", "j&k")));
assertFalse(r.contains(new Key("abc", "def", "ghij")));
assertFalse(r.contains(new Key("abc", "def", "ghh")));
assertFalse(r.contains(new Key("abc", "def", "ghj")));
r = Range.exact("abc", "def", "ghi", "j&k");
assertTrue(r.contains(new Key("abc", "def", "ghi", "j&k", 7L)));
assertFalse(r.contains(new Key("abc", "def", "ghi", "j&kl")));
assertFalse(r.contains(new Key("abc", "def", "ghi", "j&j")));
assertFalse(r.contains(new Key("abc", "def", "ghi", "j&l")));
r = Range.exact("abc", "def", "ghi", "j&k", 7L);
assertTrue(r.contains(new Key("abc", "def", "ghi", "j&k", 7L)));
assertFalse(r.contains(new Key("abc", "def", "ghi", "j&k", 6L)));
assertFalse(r.contains(new Key("abc", "def", "ghi", "j&k", 8L)));
}
@Test
public void testPrefixRange() {
Range r = Range.prefix("abc");
assertTrue(r.contains(new Key("abc")));
assertTrue(r.contains(new Key("abc", "def")));
assertTrue(r.contains(new Key("abcd")));
assertFalse(r.contains(new Key("abb")));
assertFalse(r.contains(new Key("abd")));
r = Range.prefix("abc", "def");
assertTrue(r.contains(new Key("abc", "def", "ghi")));
assertTrue(r.contains(new Key("abc", "defg")));
assertFalse(r.contains(new Key("abc", "dee")));
assertFalse(r.contains(new Key("abc", "deg")));
r = Range.prefix("abc", "def", "ghi");
assertTrue(r.contains(new Key("abc", "def", "ghi", "j&k")));
assertTrue(r.contains(new Key("abc", "def", "ghij")));
assertFalse(r.contains(new Key("abc", "def", "ghh")));
assertFalse(r.contains(new Key("abc", "def", "ghj")));
r = Range.prefix("abc", "def", "ghi", "j&k");
assertTrue(r.contains(new Key("abc", "def", "ghi", "j&k", 7L)));
assertTrue(r.contains(new Key("abc", "def", "ghi", "j&kl")));
assertFalse(r.contains(new Key("abc", "def", "ghi", "j&j")));
assertFalse(r.contains(new Key("abc", "def", "ghi", "j&l")));
r = Range.prefix(makeText((byte) 0x07, (byte) 0xff));
assertTrue(r.contains(new Key(makeText((byte) 0x07, (byte) 0xff))));
assertTrue(r.contains(new Key(makeText((byte) 0x07, (byte) 0xff, (byte) 0x00))));
assertFalse(r.contains(new Key(makeText((byte) 0x07, (byte) 0xfe))));
assertFalse(r.contains(new Key(makeText((byte) 0x08))));
r = Range.prefix(makeText((byte) 0xff));
assertTrue(r.isInfiniteStopKey());
assertTrue(r.contains(new Key(makeText((byte) 0xff))));
assertTrue(r.contains(new Key(makeText((byte) 0xff, (byte) 0x07))));
r = Range.prefix(new Text("abc"), makeText((byte) 0xff));
assertTrue(r.contains(new Key(new Text("abc"), makeText((byte) 0xff))));
assertTrue(r.contains(new Key(new Text("abc"), makeText((byte) 0xff, (byte) 0x07))));
assertFalse(r.contains(new Key(new Text("abcd"))));
assertFalse(r.contains(new Key(new Text("abd"))));
r = Range.prefix(new Text("abc"), new Text("def"), makeText((byte) 0xff));
assertTrue(r.contains(new Key(new Text("abc"), new Text("def"), makeText((byte) 0xff))));
assertTrue(
r.contains(new Key(new Text("abc"), new Text("def"), makeText((byte) 0xff, (byte) 0x07))));
assertFalse(r.contains(new Key(new Text("abc"), new Text("defg"))));
assertFalse(r.contains(new Key(new Text("abc"), new Text("deg"))));
r = Range.prefix(new Text("abc"), new Text("def"), new Text("ghi"), makeText((byte) 0xff));
assertTrue(r.contains(
new Key(new Text("abc"), new Text("def"), new Text("ghi"), makeText((byte) 0xff))));
assertTrue(r.contains(new Key(new Text("abc"), new Text("def"), new Text("ghi"),
makeText((byte) 0xff, (byte) 0x07))));
assertFalse(r.contains(new Key(new Text("abc"), new Text("def"), new Text("ghij"))));
assertFalse(r.contains(new Key(new Text("abc"), new Text("def"), new Text("ghj"))));
}
public static Text makeText(byte... b) {
return new Text(b);
}
@Test
public void testPrefix() {
assertEquals(Range.followingPrefix(makeText((byte) 0x07)), new Text(makeText((byte) 0x08)));
assertEquals(Range.followingPrefix(makeText((byte) 0xfe)), new Text(makeText((byte) 0xff)));
assertNull(Range.followingPrefix(makeText((byte) 0xff)));
assertNull(Range.followingPrefix(makeText((byte) 0xff, (byte) 0xff)));
assertEquals(Range.followingPrefix(makeText((byte) 0x07, (byte) 0xff)),
new Text(makeText((byte) 0x08)));
}
@Test
public void testReadFields() throws Exception {
Range r = newRange("nuts", "soup");
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
r.write(dos);
dos.close();
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
DataInputStream dis = new DataInputStream(bais);
Range r2 = new Range();
r2.readFields(dis);
dis.close();
assertEquals(r, r2);
}
@Test
public void testReadFields_Check() throws Exception {
Range r =
new Range(new Key(new Text("soup")), true, false, new Key(new Text("nuts")), true, false);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
r.write(dos);
dos.close();
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
Range r2 = new Range();
try (DataInputStream dis = new DataInputStream(bais)) {
assertThrows(InvalidObjectException.class, () -> r2.readFields(dis),
"readFields allowed invalid range");
}
}
@Test
public void testThrift() {
Range r = newRange("nuts", "soup");
TRange tr = r.toThrift();
Range r2 = new Range(tr);
assertEquals(r, r2);
}
@Test
public void testThrift_Check() {
Range r =
new Range(new Key(new Text("soup")), true, false, new Key(new Text("nuts")), true, false);
TRange tr = r.toThrift();
assertThrows(IllegalArgumentException.class, () -> new Range(tr),
"Thrift constructor allowed invalid range");
}
}
| 9,426 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data/ByteSequenceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.data;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.Test;
public class ByteSequenceTest {
@Test
public void testCompareBytes() {
ByteSequence a = new ArrayByteSequence("a");
ByteSequence b = new ArrayByteSequence("b");
ByteSequence abc = new ArrayByteSequence("abc");
assertLessThan(a, b);
assertLessThan(a, abc);
assertLessThan(abc, b);
}
private void assertLessThan(ByteSequence lhs, ByteSequence rhs) {
int result = ByteSequence.compareBytes(lhs, rhs);
assertTrue(result < 0);
}
}
| 9,427 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data/constraints/NoDeleteConstraintTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.data.constraints;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import java.util.List;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Value;
import org.junit.jupiter.api.Test;
public class NoDeleteConstraintTest {
@Test
public void testConstraint() {
Mutation m1 = new Mutation("r1");
m1.putDelete("f1", "q1");
NoDeleteConstraint ndc = new NoDeleteConstraint();
List<Short> results = ndc.check(null, m1);
assertEquals(1, results.size());
assertEquals(1, results.get(0).intValue());
Mutation m2 = new Mutation("r1");
m2.put("f1", "q1", new Value("v1"));
results = ndc.check(null, m2);
assertNull(results);
}
}
| 9,428 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data/constraints/DefaultKeySizeConstraintTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.data.constraints;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.Collections;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Value;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
public class DefaultKeySizeConstraintTest {
Constraint constraint = new DefaultKeySizeConstraint();
final private byte[] oversized = new byte[1048577];
final private byte[] large = new byte[419430];
@Test
public void testConstraint() {
// pass constraints
Mutation m = new Mutation("rowId");
m.put("colf", "colq", new Value());
assertEquals(Collections.emptyList(), constraint.check(null, m));
// test with row id > 1mb
m = new Mutation(oversized);
m.put("colf", "colq", new Value());
assertEquals(
Collections.singletonList(DefaultKeySizeConstraint.MAX__KEY_SIZE_EXCEEDED_VIOLATION),
constraint.check(null, m));
// test with colf > 1mb
m = new Mutation("rowid");
m.put(new Text(oversized), new Text("colq"), new Value());
assertEquals(
Collections.singletonList(DefaultKeySizeConstraint.MAX__KEY_SIZE_EXCEEDED_VIOLATION),
constraint.check(null, m));
// test with colf > 1mb
m = new Mutation("rowid");
m.put(new Text(oversized), new Text("colq"), new Value());
assertEquals(
Collections.singletonList(DefaultKeySizeConstraint.MAX__KEY_SIZE_EXCEEDED_VIOLATION),
constraint.check(null, m));
// test sum of smaller sizes violates 1mb constraint
m = new Mutation(large);
m.put(new Text(large), new Text(large), new Value());
assertEquals(
Collections.singletonList(DefaultKeySizeConstraint.MAX__KEY_SIZE_EXCEEDED_VIOLATION),
constraint.check(null, m));
}
}
| 9,429 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/data/constraints/VisibilityConstraintTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.data.constraints;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.createNiceMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import java.util.Arrays;
import java.util.List;
import org.apache.accumulo.core.data.ArrayByteSequence;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.constraints.Constraint.Environment;
import org.apache.accumulo.core.security.AuthorizationContainer;
import org.apache.accumulo.core.security.ColumnVisibility;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class VisibilityConstraintTest {
VisibilityConstraint vc;
Environment env;
Mutation mutation;
static final ColumnVisibility good = new ColumnVisibility("good");
static final ColumnVisibility bad = new ColumnVisibility("bad");
static final String D = "don't care";
static final List<Short> ENOAUTH = Arrays.asList((short) 2);
@BeforeEach
public void setUp() {
vc = new VisibilityConstraint();
mutation = new Mutation("r");
ArrayByteSequence bs = new ArrayByteSequence("good".getBytes(UTF_8));
AuthorizationContainer ac = createNiceMock(AuthorizationContainer.class);
expect(ac.contains(bs)).andReturn(true);
replay(ac);
env = createMock(Environment.class);
expect(env.getAuthorizationsContainer()).andReturn(ac);
replay(env);
}
@Test
public void testNoVisibility() {
mutation.put(D, D, D);
assertNull(vc.check(env, mutation), "authorized");
}
@Test
public void testVisibilityNoAuth() {
mutation.put(D, D, bad, D);
assertEquals(ENOAUTH, vc.check(env, mutation), "unauthorized");
}
@Test
public void testGoodVisibilityAuth() {
mutation.put(D, D, good, D);
assertNull(vc.check(env, mutation), "authorized");
}
@Test
public void testCachedVisibilities() {
mutation.put(D, D, good, "v");
mutation.put(D, D, good, "v2");
assertNull(vc.check(env, mutation), "authorized");
}
@Test
public void testMixedVisibilities() {
mutation.put(D, D, bad, D);
mutation.put(D, D, good, D);
assertEquals(ENOAUTH, vc.check(env, mutation), "unauthorized");
}
}
| 9,430 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/conf/PropertyTypeTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.conf;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.lang.reflect.Method;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.accumulo.core.WithTestNames;
import org.apache.accumulo.core.file.rfile.RFile;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import com.google.common.base.Joiner;
public class PropertyTypeTest extends WithTestNames {
private PropertyType type;
@BeforeEach
public void getPropertyTypeForTest() {
if (testName().startsWith("testType")) {
String tn = testName().substring("testType".length());
try {
type = PropertyType.valueOf(tn);
} catch (IllegalArgumentException e) {
throw new AssertionError("Unexpected test method for non-existent "
+ PropertyType.class.getSimpleName() + "." + tn);
}
}
}
@Test
public void testGetFormatDescription() {
assertEquals(
"An arbitrary string of characters whose format is unspecified"
+ " and interpreted based on the context of the property to which it applies.",
PropertyType.STRING.getFormatDescription());
}
@Test
public void testToString() {
assertEquals("string", PropertyType.STRING.toString());
}
/**
* This test checks the remainder of the methods in this class to ensure each property type has a
* corresponding test
*/
@Test
public void testFullCoverage() {
String typePrefix = "testType";
Set<String> typesTested = Stream.of(this.getClass().getMethods()).map(Method::getName)
.filter(m -> m.startsWith(typePrefix)).map(m -> m.substring(typePrefix.length()))
.collect(Collectors.toSet());
Set<String> types =
Stream.of(PropertyType.values()).map(Enum::name).collect(Collectors.toSet());
assertEquals(types, typesTested, "Expected to see a test method for each property type");
}
private void valid(final String... args) {
for (String s : args) {
assertTrue(type.isValidFormat(s),
s + " should be valid for " + PropertyType.class.getSimpleName() + "." + type.name());
}
}
private void invalid(final String... args) {
for (String s : args) {
assertFalse(type.isValidFormat(s),
s + " should be invalid for " + PropertyType.class.getSimpleName() + "." + type.name());
}
}
@Test
public void testTypeABSOLUTEPATH() {
valid(null, "/foo", "/foo/c", "/", System.getProperty("user.dir"));
// in Hadoop 2.x, Path only normalizes Windows paths properly when run on a Windows system
// this makes the following checks fail
if (System.getProperty("os.name").toLowerCase().contains("windows")) {
valid("d:\\foo12", "c:\\foo\\g", "c:\\foo\\c", "c:\\");
}
invalid("foo12", "foo/g", "foo\\c");
}
@Test
public void testTypeBOOLEAN() {
valid(null, "True", "true", "False", "false", "tRUE", "fAlSe");
invalid("foobar", "", "F", "T", "1", "0", "f", "t");
}
@Test
public void testTypeCLASSNAME() {
valid(null, "", String.class.getName(), String.class.getName() + "$1",
String.class.getName() + "$TestClass");
invalid("abc-def", "-", "!@#$%");
}
@Test
public void testTypeCLASSNAMELIST() {
testTypeCLASSNAME(); // test single class name
valid(null, Joiner.on(",").join(String.class.getName(), String.class.getName() + "$1",
String.class.getName() + "$TestClass"));
}
@Test
public void testTypeCOUNT() {
valid(null, "0", "1024", Long.toString(Integer.MAX_VALUE));
invalid(Long.toString(Integer.MAX_VALUE + 1L), "-65535", "-1");
}
@Test
public void testTypeDURABILITY() {
valid(null, "none", "log", "flush", "sync");
invalid("", "other");
}
@Test
public void testTypeGC_POST_ACTION() {
valid(null, "none", "flush", "compact");
invalid("", "other");
}
@Test
public void testTypeLAST_LOCATION_MODE() {
valid(null, "compaction", "assignment");
invalid("", "other");
}
@Test
public void testTypeFRACTION() {
valid(null, "1", "0", "1.0", "25%", "2.5%", "10.2E-3", "10.2E-3%", ".3");
invalid("", "other", "20%%", "-0.3", "3.6a", "%25", "3%a");
}
@Test
public void testTypeHOSTLIST() {
valid(null, "localhost", "server1,server2,server3", "server1:1111,server2:3333",
"localhost:1111", "server2:1111", "www.server", "www.server:1111", "www.server.com",
"www.server.com:111");
invalid(":111", "local host");
}
@Test
public void testTypeBYTES() {
valid(null, "1024", "20B", "100K", "1500M", "2G");
invalid("1M500K", "1M 2K", "1MB", "1.5G", "1,024K", "", "a", "10%");
}
@Test
public void testTypeMEMORY() {
valid(null, "1024", "20B", "100K", "1500M", "2G", "10%");
invalid("1M500K", "1M 2K", "1MB", "1.5G", "1,024K", "", "a");
}
@Test
public void testTypePATH() {
valid(null, "", "/absolute/path", "relative/path", "/with/trailing/slash/",
"with/trailing/slash/");
}
@Test
public void testTypePORT() {
valid(null, "0", "1024", "30000", "65535");
invalid("65536", "-65535", "-1", "1023");
}
@Test
public void testTypeJSON() {
valid("{\"y\":123}",
"[{'name':'small','type':'internal','maxSize':'32M','numThreads':1},{'name':'huge','type':'internal','numThreads':1}]"
.replaceAll("'", "\""));
invalid("not json", "{\"x}", "{\"y\"", "{name:value}",
"{ \"foo\" : \"bar\", \"foo\" : \"baz\" }", "{\"y\":123}extra");
}
@Test
public void testTypePREFIX() {
invalid(null, "", "whatever");
}
@Test
public void testTypeSTRING() {
valid(null, "", "whatever");
}
@Test
public void testTypeTIMEDURATION() {
valid(null, "600", "30s", "45m", "30000ms", "3d", "1h");
invalid("1w", "1h30m", "1s 200ms", "ms", "", "a");
}
@Test
public void testTypeURI() {
valid(null, "", "hdfs://hostname", "file:///path/", "hdfs://example.com:port/path");
}
@Test
public void testTypeFILENAME_EXT() {
valid(RFile.EXTENSION, "rf");
invalid(null, "RF", "map", "", "MAP", "rF", "Rf", " rf ");
}
}
| 9,431 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/conf/HadoopCredentialProviderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.conf;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.io.File;
import java.net.URL;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
@SuppressFBWarnings(value = "PATH_TRAVERSAL_IN", justification = "paths not set by user input")
public class HadoopCredentialProviderTest {
private static final Configuration hadoopConf = new Configuration();
private static final Logger log = LoggerFactory.getLogger(HadoopCredentialProviderTest.class);
private static final String populatedKeyStoreName = "/accumulo.jceks",
emptyKeyStoreName = "/empty.jceks";
private static File emptyKeyStore, populatedKeyStore;
@BeforeAll
public static void checkCredentialProviderAvailable() {
URL populatedKeyStoreUrl =
HadoopCredentialProviderTest.class.getResource(populatedKeyStoreName),
emptyKeyStoreUrl = HadoopCredentialProviderTest.class.getResource(emptyKeyStoreName);
assertNotNull(populatedKeyStoreUrl, "Could not find " + populatedKeyStoreName);
assertNotNull(emptyKeyStoreUrl, "Could not find " + emptyKeyStoreName);
populatedKeyStore = new File(populatedKeyStoreUrl.getFile());
emptyKeyStore = new File(emptyKeyStoreUrl.getFile());
}
protected String getKeyStoreUrl(File absoluteFilePath) {
return "jceks://file" + absoluteFilePath.getAbsolutePath();
}
@Test
public void testNullConfigOnGetValue() {
assertThrows(NullPointerException.class,
() -> HadoopCredentialProvider.getValue(null, "alias"));
}
@Test
public void testNullAliasOnGetValue() {
assertThrows(NullPointerException.class,
() -> HadoopCredentialProvider.getValue(new Configuration(false), null));
}
protected void checkCredentialProviders(Configuration conf, Map<String,String> expectation) {
List<String> keys = HadoopCredentialProvider.getKeys(conf);
assertNotNull(keys);
assertEquals(expectation.keySet(), new HashSet<>(keys));
for (String expectedKey : keys) {
char[] value = HadoopCredentialProvider.getValue(conf, expectedKey);
assertNotNull(value);
assertEquals(expectation.get(expectedKey), new String(value));
}
}
@Test
public void testExtractFromProvider() {
String absPath = getKeyStoreUrl(populatedKeyStore);
Configuration conf = new Configuration();
HadoopCredentialProvider.setPath(conf, absPath);
Map<String,String> expectations = new HashMap<>();
expectations.put("key1", "value1");
expectations.put("key2", "value2");
checkCredentialProviders(conf, expectations);
}
@Test
public void testEmptyKeyStoreParses() {
String absPath = getKeyStoreUrl(emptyKeyStore);
Configuration conf = new Configuration();
HadoopCredentialProvider.setPath(conf, absPath);
checkCredentialProviders(conf, new HashMap<>());
}
@Test
public void testEmptyAndPopulatedKeyStores() {
String populatedAbsPath = getKeyStoreUrl(populatedKeyStore),
emptyAbsPath = getKeyStoreUrl(emptyKeyStore);
Configuration conf = new Configuration();
HadoopCredentialProvider.setPath(conf, populatedAbsPath + "," + emptyAbsPath);
Map<String,String> expectations = new HashMap<>();
expectations.put("key1", "value1");
expectations.put("key2", "value2");
checkCredentialProviders(conf, expectations);
}
@Test
public void testNonExistentClassesDoesntFail() {
Configuration conf = new Configuration();
HadoopCredentialProvider.setPath(conf, "jceks://file/foo/bar.jceks");
List<String> keys = HadoopCredentialProvider.getKeys(conf);
assertNotNull(keys);
assertEquals(Collections.emptyList(), keys);
assertNull(HadoopCredentialProvider.getValue(conf, "key1"));
}
@Test
public void testConfigurationCreation() {
final String path = "jceks://file/tmp/foo.jks";
final Configuration actualConf = hadoopConf;
HadoopCredentialProvider.setPath(actualConf, path);
assertNotNull(actualConf);
assertEquals(path, actualConf.get("hadoop.security.credential.provider.path"));
}
@Test
public void createKeystoreProvider() throws Exception {
File targetDir = new File(System.getProperty("user.dir") + "/target");
File keystoreFile = new File(targetDir, "create.jks");
if (keystoreFile.exists()) {
if (!keystoreFile.delete()) {
log.error("Unable to delete {}", keystoreFile);
}
}
String providerUrl = "jceks://file" + keystoreFile.getAbsolutePath();
Configuration conf = new Configuration();
HadoopCredentialProvider.setPath(conf, providerUrl);
String alias = "foo";
char[] credential = "bar".toCharArray();
HadoopCredentialProvider.createEntry(conf, alias, credential);
assertArrayEquals(credential, HadoopCredentialProvider.getValue(conf, alias));
}
@Test
public void extractFromHdfs() throws Exception {
File target = new File(System.getProperty("user.dir"), "target");
String prevValue = System.setProperty("test.build.data",
new File(target, this.getClass().getName() + "_minidfs").toString());
MiniDFSCluster dfsCluster = new MiniDFSCluster.Builder(new Configuration()).build();
try {
if (null != prevValue) {
System.setProperty("test.build.data", prevValue);
} else {
System.clearProperty("test.build.data");
}
// One namenode, One configuration
Configuration dfsConfiguration = dfsCluster.getConfiguration(0);
Path destPath = new Path("/accumulo.jceks");
FileSystem dfs = dfsCluster.getFileSystem();
// Put the populated keystore in hdfs
dfs.copyFromLocalFile(new Path(populatedKeyStore.toURI()), destPath);
Configuration cpConf = dfsConfiguration;
HadoopCredentialProvider.setPath(cpConf, "jceks://hdfs/accumulo.jceks");
// The values in the keystore
Map<String,String> expectations = new HashMap<>();
expectations.put("key1", "value1");
expectations.put("key2", "value2");
checkCredentialProviders(cpConf, expectations);
} finally {
dfsCluster.shutdown();
}
}
}
| 9,432 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/conf/ConfigurationTypeHelperTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.conf;
import static java.util.concurrent.TimeUnit.DAYS;
import static java.util.concurrent.TimeUnit.HOURS;
import static java.util.concurrent.TimeUnit.MINUTES;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.util.function.Function;
import java.util.stream.Stream;
import org.junit.jupiter.api.Test;
public class ConfigurationTypeHelperTest {
@Test
public void testGetMemoryInBytes() {
Stream.<Function<String,Long>>of(ConfigurationTypeHelper::getFixedMemoryAsBytes,
ConfigurationTypeHelper::getMemoryAsBytes).forEach(memFunc -> {
assertEquals(42L, memFunc.apply("42").longValue());
assertEquals(42L, memFunc.apply("42b").longValue());
assertEquals(42L, memFunc.apply("42B").longValue());
assertEquals(42L * 1024L, memFunc.apply("42K").longValue());
assertEquals(42L * 1024L, memFunc.apply("42k").longValue());
assertEquals(42L * 1024L * 1024L, memFunc.apply("42M").longValue());
assertEquals(42L * 1024L * 1024L, memFunc.apply("42m").longValue());
assertEquals(42L * 1024L * 1024L * 1024L, memFunc.apply("42G").longValue());
assertEquals(42L * 1024L * 1024L * 1024L, memFunc.apply("42g").longValue());
});
assertEquals(Runtime.getRuntime().maxMemory() / 10,
ConfigurationTypeHelper.getMemoryAsBytes("10%"));
assertEquals(Runtime.getRuntime().maxMemory() / 5,
ConfigurationTypeHelper.getMemoryAsBytes("20%"));
}
@Test
public void testGetFixedMemoryAsBytesFailureCases1() {
assertThrows(IllegalArgumentException.class,
() -> ConfigurationTypeHelper.getFixedMemoryAsBytes("42x"));
}
@Test
public void testGetFixedMemoryAsBytesFailureCases2() {
assertThrows(IllegalArgumentException.class,
() -> ConfigurationTypeHelper.getFixedMemoryAsBytes("FooBar"));
}
@Test
public void testGetFixedMemoryAsBytesFailureCases3() {
assertThrows(IllegalArgumentException.class,
() -> ConfigurationTypeHelper.getFixedMemoryAsBytes("40%"));
}
@Test
public void testGetMemoryAsBytesFailureCases1() {
assertThrows(IllegalArgumentException.class,
() -> ConfigurationTypeHelper.getMemoryAsBytes("42x"));
}
@Test
public void testGetMemoryAsBytesFailureCases2() {
assertThrows(IllegalArgumentException.class,
() -> ConfigurationTypeHelper.getMemoryAsBytes("FooBar"));
}
@Test
public void testGetTimeInMillis() {
assertEquals(DAYS.toMillis(42), ConfigurationTypeHelper.getTimeInMillis("42d"));
assertEquals(HOURS.toMillis(42), ConfigurationTypeHelper.getTimeInMillis("42h"));
assertEquals(MINUTES.toMillis(42), ConfigurationTypeHelper.getTimeInMillis("42m"));
assertEquals(SECONDS.toMillis(42), ConfigurationTypeHelper.getTimeInMillis("42s"));
assertEquals(SECONDS.toMillis(42), ConfigurationTypeHelper.getTimeInMillis("42"));
assertEquals(42L, ConfigurationTypeHelper.getTimeInMillis("42ms"));
}
@Test
public void testGetTimeInMillisFailureCase1() {
assertThrows(IllegalArgumentException.class,
() -> ConfigurationTypeHelper.getTimeInMillis("abc"));
}
@Test
public void testGetTimeInMillisFailureCase2() {
assertThrows(IllegalArgumentException.class,
() -> ConfigurationTypeHelper.getTimeInMillis("ms"));
}
@Test
public void testGetFraction() {
double delta = 0.0000000000001;
assertEquals(0.5d, ConfigurationTypeHelper.getFraction("0.5"), delta);
assertEquals(3.0d, ConfigurationTypeHelper.getFraction("3"), delta);
assertEquals(-0.25d, ConfigurationTypeHelper.getFraction("-25%"), delta);
assertEquals(0.99546d, ConfigurationTypeHelper.getFraction("99.546%"), delta);
assertEquals(0.0d, ConfigurationTypeHelper.getFraction("0%"), delta);
assertEquals(0.0d, ConfigurationTypeHelper.getFraction("-0.000"), delta);
assertEquals(0.001d, ConfigurationTypeHelper.getFraction(".1%"), delta);
assertEquals(1d, ConfigurationTypeHelper.getFraction("1."), delta);
}
@Test
public void testGetFractionFailureCase1() {
assertThrows(IllegalArgumentException.class, () -> ConfigurationTypeHelper.getFraction("%"));
}
@Test
public void testGetFractionFailureCase2() {
assertThrows(IllegalArgumentException.class,
() -> ConfigurationTypeHelper.getFraction("abc0%"));
}
@Test
public void testGetFractionFailureCase3() {
assertThrows(IllegalArgumentException.class, () -> ConfigurationTypeHelper.getFraction(".%"));
}
}
| 9,433 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/conf/PropertyTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.conf;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.HashSet;
import java.util.List;
import java.util.Map.Entry;
import java.util.TreeMap;
import java.util.function.Predicate;
import java.util.stream.Collector;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Test the Property class
*/
public class PropertyTest {
private static final Logger LOG = LoggerFactory.getLogger(PropertyTest.class);
@Test
public void testProperties() {
HashSet<String> validPrefixes = new HashSet<>();
for (Property prop : Property.values()) {
if (prop.getType().equals(PropertyType.PREFIX)) {
validPrefixes.add(prop.getKey());
}
}
HashSet<String> propertyNames = new HashSet<>();
for (Property prop : Property.values()) {
// make sure properties default values match their type
if (prop.getType() == PropertyType.PREFIX) {
assertNull(prop.getDefaultValue(),
"PREFIX property " + prop.name() + " has unexpected non-null default value.");
} else {
assertTrue(Property.isValidProperty(prop.getKey(), prop.getDefaultValue()),
"Property " + prop + " has invalid default value " + prop.getDefaultValue()
+ " for type " + prop.getType());
}
// make sure property has a description
assertFalse(prop.getDescription() == null || prop.getDescription().isEmpty(),
"Description not set for " + prop);
// make sure property description ends with a period
assertTrue(prop.getDescription().endsWith("."),
"Property: " + prop.getKey() + " description does not end with period.");
// make sure property starts with valid prefix
boolean containsValidPrefix = false;
for (String pre : validPrefixes) {
if (prop.getKey().startsWith(pre)) {
containsValidPrefix = true;
break;
}
}
assertTrue(containsValidPrefix, "Invalid prefix on prop " + prop);
// make sure properties aren't duplicate
assertFalse(propertyNames.contains(prop.getKey()),
"Duplicate property name " + prop.getKey());
propertyNames.add(prop.getKey());
}
}
@Test
public void testPorts() {
HashSet<Integer> usedPorts = new HashSet<>();
for (Property prop : Property.values()) {
if (prop.getType().equals(PropertyType.PORT)) {
int port = Integer.parseInt(prop.getDefaultValue());
assertTrue(Property.isValidProperty(prop.getKey(), Integer.toString(port)));
assertFalse(usedPorts.contains(port), "Port already in use: " + port);
usedPorts.add(port);
assertTrue(port > 1023 && port < 65536, "Port out of range of valid ports: " + port);
}
}
}
@Test
public void testJson() {
// using "real" example
String json1 =
"[{'name':'small','type':'internal','maxSize':'32M','numThreads':2},{'name':'huge','type':'internal','numThreads':2}]"
.replaceAll("'", "\"");
// use synthetic, but valid json
String json2 =
"[{'foo':'bar','type':'test','fooBar':'32'},{'foo':'bar','type':'test','fooBar':32}]"
.replaceAll("'", "\"");
String json3 = "{'foo':'bar','type':'test','fooBar':'32'}".replaceAll("'", "\"");
List<String> valids = List.of(json1, json2, json3);
List<String> invalids = List.of("notJson", "also not json", "{\"x}", "{\"y\"", "{name:value}",
"{ \"foo\" : \"bar\", \"foo\" : \"baz\" }", "{\"y\":123}extra");
for (Property prop : Property.values()) {
if (prop.getType().equals(PropertyType.JSON)) {
valids.forEach(j -> assertTrue(Property.isValidProperty(prop.getKey(), j)));
valids.forEach(j -> assertTrue(prop.getType().isValidFormat(j)));
invalids.forEach(j -> assertFalse(Property.isValidProperty(prop.getKey(), j)));
invalids.forEach(j -> assertFalse(prop.getType().isValidFormat(j)));
}
}
}
@Test
public void testPropertyValidation() {
for (Property property : Property.values()) {
PropertyType propertyType = property.getType();
String invalidValue, validValue = property.getDefaultValue();
LOG.debug("Testing property: {} with type: {}", property.getKey(), propertyType);
switch (propertyType) {
case URI:
case PATH:
case PREFIX:
case STRING:
// Skipping these values as they have default type of null
LOG.debug("Skipping property {} due to property type: \"{}\"", property.getKey(),
propertyType);
continue;
case TIMEDURATION:
invalidValue = "1h30min";
break;
case BYTES:
invalidValue = "1M500k";
break;
case MEMORY:
invalidValue = "1.5G";
break;
case HOSTLIST:
invalidValue = ":1000";
break;
case PORT:
invalidValue = "65539";
break;
case COUNT:
invalidValue = "-1";
break;
case FRACTION:
invalidValue = "10Percent";
break;
case ABSOLUTEPATH:
invalidValue = "~/foo";
break;
case CLASSNAME:
LOG.debug("CLASSNAME properties currently fail this test");
LOG.debug("Regex used for CLASSNAME property types may need to be modified");
continue;
case CLASSNAMELIST:
invalidValue = "String,Object;Thing";
break;
case DURABILITY:
invalidValue = "rinse";
break;
case GC_POST_ACTION:
invalidValue = "expand";
break;
case BOOLEAN:
invalidValue = "fooFalse";
break;
case JSON:
invalidValue = "not json";
break;
default:
LOG.debug("Property type: {} has no defined test case", propertyType);
invalidValue = "foo";
}
assertFalse(Property.isValidProperty(property.getKey(), invalidValue));
assertTrue(Property.isValidProperty(property.getKey(), validValue));
}
}
// This test verifies all "sensitive" properties are properly marked as sensitive
@Test
public void testSensitiveKeys() {
// add instance.crypto.opts, because it's a sensitive property not in the default configuration
ConfigurationCopy conf = new ConfigurationCopy(DefaultConfiguration.getInstance());
conf.set("instance.crypto.opts.sensitive.blah", "something");
// ignores duplicates because ConfigurationCopy already de-duplicates
Collector<Entry<String,String>,?,TreeMap<String,String>> treeMapCollector =
Collectors.toMap(Entry::getKey, Entry::getValue, (a, b) -> a, TreeMap::new);
Predicate<Entry<String,String>> sensitiveNames =
e -> e.getKey().equals(Property.INSTANCE_SECRET.getKey())
|| e.getKey().toLowerCase().contains("password")
|| e.getKey().toLowerCase().endsWith("secret")
|| e.getKey().startsWith(Property.INSTANCE_CRYPTO_SENSITIVE_PREFIX.getKey());
Predicate<Entry<String,String>> isMarkedSensitive = e -> Property.isSensitive(e.getKey());
TreeMap<String,String> expected = StreamSupport.stream(conf.spliterator(), false)
.filter(sensitiveNames).collect(treeMapCollector);
TreeMap<String,String> actual = StreamSupport.stream(conf.spliterator(), false)
.filter(isMarkedSensitive).collect(treeMapCollector);
// make sure instance.crypto.opts property wasn't excluded from both
assertEquals("something", expected.get("instance.crypto.opts.sensitive.blah"));
assertEquals(expected, actual);
}
@Test
public void validatePropertyKeys() {
for (Property prop : Property.values()) {
if (prop.getType().equals(PropertyType.PREFIX)) {
assertTrue(prop.getKey().endsWith("."));
assertNull(prop.getDefaultValue());
}
}
}
@Test
public void testAnnotations() {
assertTrue(Property.GENERAL_VOLUME_CHOOSER.isExperimental());
assertFalse(Property.TABLE_SAMPLER.isExperimental());
assertTrue(Property.INSTANCE_SECRET.isSensitive());
assertFalse(Property.INSTANCE_VOLUMES.isSensitive());
assertFalse(Property.INSTANCE_VOLUMES_REPLACEMENTS.isDeprecated());
}
@Test
public void testGetPropertyByKey() {
for (Property prop : Property.values()) {
assertSame(prop, Property.getPropertyByKey(prop.getKey()));
}
}
@Test
public void testIsValidPropertyKey() {
for (Property prop : Property.values()) {
assertTrue(Property.isValidPropertyKey(prop.getKey()));
if (prop.getType().equals(PropertyType.PREFIX)) {
assertTrue(Property.isValidPropertyKey(prop.getKey() + "foo9"));
}
}
assertFalse(Property.isValidPropertyKey("abc.def"));
}
@Test
public void testIsValidTablePropertyKey() {
for (Property prop : Property.values()) {
if (prop.getKey().startsWith("table.") && !prop.getKey().equals("table.")) {
assertTrue(Property.isValidTablePropertyKey(prop.getKey()), prop.getKey());
if (prop.getType().equals(PropertyType.PREFIX)) {
assertTrue(Property.isValidTablePropertyKey(prop.getKey() + "foo9"));
} else {
assertFalse(Property.isValidTablePropertyKey(prop.getKey() + "foo9"));
}
} else {
assertFalse(Property.isValidTablePropertyKey(prop.getKey()));
}
}
assertFalse(Property.isValidTablePropertyKey("abc.def"));
}
@Test
public void testFixedPropertiesNonNull() {
Property.fixedProperties.forEach(p -> {
assertNotNull(p.getDefaultValue());
assertFalse(p.getDefaultValue().isBlank());
});
}
}
| 9,434 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/conf/AccumuloConfigurationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.conf;
import static org.apache.accumulo.core.conf.Property.TABLE_ITERATOR_MINC_PREFIX;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotSame;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.lang.reflect.Field;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.function.Predicate;
import org.apache.accumulo.core.conf.AccumuloConfiguration.ScanExecutorConfig;
import org.apache.accumulo.core.spi.scan.SimpleScanDispatcher;
import org.junit.jupiter.api.Test;
public class AccumuloConfigurationTest {
@Test
public void testGetPropertyByString() {
AccumuloConfiguration c = DefaultConfiguration.getInstance();
boolean found = false;
for (Property p : Property.values()) {
if (p.getType() != PropertyType.PREFIX) {
found = true;
// ensure checking by property and by key works the same
assertEquals(c.get(p), c.get(p.getKey()));
// ensure that getting by key returns the expected value
assertEquals(p.getDefaultValue(), c.get(p.getKey()));
}
}
assertTrue(found, "test was a dud, and did nothing");
}
@Test
public void testGetSinglePort() {
AccumuloConfiguration c = DefaultConfiguration.getInstance();
ConfigurationCopy cc = new ConfigurationCopy(c);
cc.set(Property.TSERV_CLIENTPORT, "9997");
int[] ports = cc.getPort(Property.TSERV_CLIENTPORT);
assertEquals(1, ports.length);
assertEquals(9997, ports[0]);
}
@Test
public void testGetAnyPort() {
AccumuloConfiguration c = DefaultConfiguration.getInstance();
ConfigurationCopy cc = new ConfigurationCopy(c);
cc.set(Property.TSERV_CLIENTPORT, "0");
int[] ports = cc.getPort(Property.TSERV_CLIENTPORT);
assertEquals(1, ports.length);
assertEquals(0, ports[0]);
}
@Test
public void testGetInvalidPort() {
AccumuloConfiguration c = DefaultConfiguration.getInstance();
ConfigurationCopy cc = new ConfigurationCopy(c);
cc.set(Property.TSERV_CLIENTPORT, "1020");
int[] ports = cc.getPort(Property.TSERV_CLIENTPORT);
assertEquals(1, ports.length);
assertEquals(Integer.parseInt(Property.TSERV_CLIENTPORT.getDefaultValue()), ports[0]);
}
@Test
public void testGetPortRange() {
AccumuloConfiguration c = DefaultConfiguration.getInstance();
ConfigurationCopy cc = new ConfigurationCopy(c);
cc.set(Property.TSERV_CLIENTPORT, "9997-9999");
int[] ports = cc.getPort(Property.TSERV_CLIENTPORT);
assertEquals(3, ports.length);
assertEquals(9997, ports[0]);
assertEquals(9998, ports[1]);
assertEquals(9999, ports[2]);
}
@Test
public void testGetPortRangeInvalidLow() {
AccumuloConfiguration c = DefaultConfiguration.getInstance();
ConfigurationCopy cc = new ConfigurationCopy(c);
cc.set(Property.TSERV_CLIENTPORT, "1020-1026");
assertThrows(IllegalArgumentException.class, () -> {
int[] ports = cc.getPort(Property.TSERV_CLIENTPORT);
assertEquals(3, ports.length);
assertEquals(1024, ports[0]);
assertEquals(1025, ports[1]);
assertEquals(1026, ports[2]);
});
}
@Test
public void testGetPortRangeInvalidHigh() {
AccumuloConfiguration c = DefaultConfiguration.getInstance();
ConfigurationCopy cc = new ConfigurationCopy(c);
cc.set(Property.TSERV_CLIENTPORT, "65533-65538");
assertThrows(IllegalArgumentException.class, () -> {
int[] ports = cc.getPort(Property.TSERV_CLIENTPORT);
assertEquals(3, ports.length);
assertEquals(65533, ports[0]);
assertEquals(65534, ports[1]);
assertEquals(65535, ports[2]);
});
}
@Test
public void testGetPortInvalidSyntax() {
AccumuloConfiguration c = DefaultConfiguration.getInstance();
ConfigurationCopy cc = new ConfigurationCopy(c);
cc.set(Property.TSERV_CLIENTPORT, "[65533,65538]");
assertThrows(IllegalArgumentException.class, () -> cc.getPort(Property.TSERV_CLIENTPORT));
}
private static class TestConfiguration extends AccumuloConfiguration {
private HashMap<String,String> props = new HashMap<>();
private int upCount = 0;
private AccumuloConfiguration parent;
TestConfiguration() {
parent = null;
}
TestConfiguration(AccumuloConfiguration parent) {
this.parent = parent;
}
public void set(String p, String v) {
props.put(p, v);
upCount++;
}
@Override
public boolean isPropertySet(Property prop) {
return props.containsKey(prop.getKey());
}
@Override
public long getUpdateCount() {
return upCount;
}
@Override
public String get(Property property) {
String v = props.get(property.getKey());
if (v == null & parent != null) {
v = parent.get(property);
}
return v;
}
@Override
public void getProperties(Map<String,String> output, Predicate<String> filter) {
if (parent != null) {
parent.getProperties(output, filter);
}
for (Entry<String,String> entry : props.entrySet()) {
if (filter.test(entry.getKey())) {
output.put(entry.getKey(), entry.getValue());
}
}
}
}
@Test
public void testMutatePrefixMap() {
TestConfiguration tc = new TestConfiguration();
tc.set(Property.TABLE_ARBITRARY_PROP_PREFIX.getKey() + "a1", "325");
tc.set(Property.TABLE_ARBITRARY_PROP_PREFIX.getKey() + "a2", "asg34");
Map<String,String> pm1 = tc.getAllPropertiesWithPrefix(Property.TABLE_ARBITRARY_PROP_PREFIX);
Map<String,String> expected1 = new HashMap<>();
expected1.put(Property.TABLE_ARBITRARY_PROP_PREFIX.getKey() + "a1", "325");
expected1.put(Property.TABLE_ARBITRARY_PROP_PREFIX.getKey() + "a2", "asg34");
assertEquals(expected1, pm1);
assertThrows(UnsupportedOperationException.class, () -> pm1.put("k9", "v3"));
}
@Test
public void testGetByPrefix() {
// This test checks that when anything changes that all prefix maps are regenerated. However
// when there are not changes the test expects all the exact same
// map to always be returned.
TestConfiguration tc = new TestConfiguration();
tc.set(Property.TABLE_ARBITRARY_PROP_PREFIX.getKey() + "a1", "325");
tc.set(Property.TABLE_ARBITRARY_PROP_PREFIX.getKey() + "a2", "asg34");
tc.set(Property.TABLE_ITERATOR_SCAN_PREFIX.getKey() + "i1", "class34");
tc.set(Property.TABLE_ITERATOR_SCAN_PREFIX.getKey() + "i1.opt", "o99");
Map<String,String> pm1 = tc.getAllPropertiesWithPrefix(Property.TABLE_ARBITRARY_PROP_PREFIX);
Map<String,String> pm2 = tc.getAllPropertiesWithPrefix(Property.TABLE_ARBITRARY_PROP_PREFIX);
assertSame(pm1, pm2);
Map<String,String> expected1 = new HashMap<>();
expected1.put(Property.TABLE_ARBITRARY_PROP_PREFIX.getKey() + "a1", "325");
expected1.put(Property.TABLE_ARBITRARY_PROP_PREFIX.getKey() + "a2", "asg34");
assertEquals(expected1, pm1);
Map<String,String> pm3 = tc.getAllPropertiesWithPrefix(Property.TABLE_ITERATOR_SCAN_PREFIX);
Map<String,String> pm4 = tc.getAllPropertiesWithPrefix(Property.TABLE_ITERATOR_SCAN_PREFIX);
assertSame(pm3, pm4);
Map<String,String> expected2 = new HashMap<>();
expected2.put(Property.TABLE_ITERATOR_SCAN_PREFIX.getKey() + "i1", "class34");
expected2.put(Property.TABLE_ITERATOR_SCAN_PREFIX.getKey() + "i1.opt", "o99");
assertEquals(expected2, pm3);
Map<String,String> pm5 = tc.getAllPropertiesWithPrefix(TABLE_ITERATOR_MINC_PREFIX);
Map<String,String> pm6 = tc.getAllPropertiesWithPrefix(TABLE_ITERATOR_MINC_PREFIX);
assertSame(pm5, pm6);
assertEquals(0, pm5.size());
// ensure getting one prefix does not cause others to unnecessarily regenerate
Map<String,String> pm7 = tc.getAllPropertiesWithPrefix(Property.TABLE_ARBITRARY_PROP_PREFIX);
assertSame(pm1, pm7);
Map<String,String> pm8 = tc.getAllPropertiesWithPrefix(Property.TABLE_ITERATOR_SCAN_PREFIX);
assertSame(pm3, pm8);
Map<String,String> pm9 = tc.getAllPropertiesWithPrefix(TABLE_ITERATOR_MINC_PREFIX);
assertSame(pm5, pm9);
tc.set(Property.TABLE_ITERATOR_SCAN_PREFIX.getKey() + "i2", "class42");
tc.set(Property.TABLE_ITERATOR_SCAN_PREFIX.getKey() + "i2.opt", "o78234");
expected2.put(Property.TABLE_ITERATOR_SCAN_PREFIX.getKey() + "i2", "class42");
expected2.put(Property.TABLE_ITERATOR_SCAN_PREFIX.getKey() + "i2.opt", "o78234");
Map<String,String> pmA = tc.getAllPropertiesWithPrefix(Property.TABLE_ITERATOR_SCAN_PREFIX);
Map<String,String> pmB = tc.getAllPropertiesWithPrefix(Property.TABLE_ITERATOR_SCAN_PREFIX);
assertNotSame(pm3, pmA);
assertSame(pmA, pmB);
assertEquals(expected2, pmA);
Map<String,String> pmC = tc.getAllPropertiesWithPrefix(Property.TABLE_ARBITRARY_PROP_PREFIX);
Map<String,String> pmD = tc.getAllPropertiesWithPrefix(Property.TABLE_ARBITRARY_PROP_PREFIX);
assertNotSame(pm1, pmC);
assertSame(pmC, pmD);
assertEquals(expected1, pmC);
tc.set(TABLE_ITERATOR_MINC_PREFIX.getKey() + "minc1", "abcd");
Map<String,String> pmE = tc.getAllPropertiesWithPrefix(TABLE_ITERATOR_MINC_PREFIX);
Map<String,String> pmF = tc.getAllPropertiesWithPrefix(TABLE_ITERATOR_MINC_PREFIX);
assertSame(pmE, pmF);
assertNotSame(pm5, pmE);
assertEquals(Map.of(TABLE_ITERATOR_MINC_PREFIX.getKey() + "minc1", "abcd"), pmE);
Map<String,String> pmG = tc.getAllPropertiesWithPrefix(Property.TABLE_ITERATOR_SCAN_PREFIX);
Map<String,String> pmH = tc.getAllPropertiesWithPrefix(Property.TABLE_ITERATOR_SCAN_PREFIX);
assertNotSame(pmA, pmG);
assertSame(pmG, pmH);
assertEquals(expected2, pmG);
Map<String,String> pmI = tc.getAllPropertiesWithPrefix(Property.TABLE_ARBITRARY_PROP_PREFIX);
Map<String,String> pmJ = tc.getAllPropertiesWithPrefix(Property.TABLE_ARBITRARY_PROP_PREFIX);
assertNotSame(pmC, pmI);
assertSame(pmI, pmJ);
assertEquals(expected1, pmI);
Map<String,String> pmK = tc.getAllPropertiesWithPrefix(TABLE_ITERATOR_MINC_PREFIX);
assertSame(pmE, pmK);
Map<String,String> pmL = tc.getAllPropertiesWithPrefix(Property.TABLE_ITERATOR_SCAN_PREFIX);
assertSame(pmG, pmL);
}
@Test
public void testScanExecutors() {
String defName = SimpleScanDispatcher.DEFAULT_SCAN_EXECUTOR_NAME;
TestConfiguration tc = new TestConfiguration(DefaultConfiguration.getInstance());
Collection<ScanExecutorConfig> executors = tc.getScanExecutors(false);
assertEquals(2, executors.size());
ScanExecutorConfig sec =
executors.stream().filter(c -> c.name.equals(defName)).findFirst().orElseThrow();
assertEquals(Integer.parseInt(Property.TSERV_SCAN_EXECUTORS_DEFAULT_THREADS.getDefaultValue()),
sec.maxThreads);
assertFalse(sec.priority.isPresent());
assertTrue(sec.prioritizerClass.orElseThrow().isEmpty());
assertTrue(sec.prioritizerOpts.isEmpty());
// ensure new props override default props
tc.set(Property.TSERV_SCAN_EXECUTORS_DEFAULT_THREADS.getKey(), "9");
assertEquals(9, sec.getCurrentMaxThreads());
assertEquals(Integer.parseInt(Property.TSERV_SCAN_EXECUTORS_DEFAULT_THREADS.getDefaultValue()),
sec.maxThreads);
ScanExecutorConfig sec3 = tc.getScanExecutors(false).stream()
.filter(c -> c.name.equals(defName)).findFirst().orElseThrow();
assertEquals(9, sec3.maxThreads);
ScanExecutorConfig sec4 =
executors.stream().filter(c -> c.name.equals("meta")).findFirst().orElseThrow();
assertEquals(Integer.parseInt(Property.TSERV_SCAN_EXECUTORS_META_THREADS.getDefaultValue()),
sec4.maxThreads);
assertFalse(sec4.priority.isPresent());
assertFalse(sec4.prioritizerClass.isPresent());
assertTrue(sec4.prioritizerOpts.isEmpty());
tc.set(Property.TSERV_SCAN_EXECUTORS_META_THREADS.getKey(), "2");
assertEquals(2, sec4.getCurrentMaxThreads());
ScanExecutorConfig sec5 = tc.getScanExecutors(false).stream().filter(c -> c.name.equals("meta"))
.findFirst().orElseThrow();
assertEquals(2, sec5.maxThreads);
tc.set(Property.TSERV_SCAN_EXECUTORS_META_THREADS.getKey(), "3");
assertEquals(3, sec4.getCurrentMaxThreads());
ScanExecutorConfig sec6 = tc.getScanExecutors(false).stream().filter(c -> c.name.equals("meta"))
.findFirst().orElseThrow();
assertEquals(3, sec6.maxThreads);
String prefix = Property.TSERV_SCAN_EXECUTORS_PREFIX.getKey();
tc.set(prefix + "hulksmash.threads", "66");
tc.set(prefix + "hulksmash.priority", "3");
tc.set(prefix + "hulksmash.prioritizer", "com.foo.ScanPrioritizer");
tc.set(prefix + "hulksmash.prioritizer.opts.k1", "v1");
tc.set(prefix + "hulksmash.prioritizer.opts.k2", "v3");
executors = tc.getScanExecutors(false);
assertEquals(3, executors.size());
ScanExecutorConfig sec7 =
executors.stream().filter(c -> c.name.equals("hulksmash")).findFirst().orElseThrow();
assertEquals(66, sec7.maxThreads);
assertEquals(3, sec7.priority.getAsInt());
assertEquals("com.foo.ScanPrioritizer", sec7.prioritizerClass.orElseThrow());
assertEquals(Map.of("k1", "v1", "k2", "v3"), sec7.prioritizerOpts);
tc.set(prefix + "hulksmash.threads", "44");
assertEquals(66, sec7.maxThreads);
assertEquals(44, sec7.getCurrentMaxThreads());
ScanExecutorConfig sec8 = tc.getScanExecutors(false).stream()
.filter(c -> c.name.equals("hulksmash")).findFirst().orElseThrow();
assertEquals(44, sec8.maxThreads);
// test scan server props
tc.set(Property.SSERV_SCAN_EXECUTORS_DEFAULT_THREADS.getKey(), "6");
Collection<ScanExecutorConfig> scanServExecutors = tc.getScanExecutors(true);
assertEquals(2, scanServExecutors.size());
ScanExecutorConfig sec9 =
scanServExecutors.stream().filter(c -> c.name.equals(defName)).findFirst().orElseThrow();
// verify set to 6
assertEquals(6, sec9.maxThreads);
assertFalse(sec9.priority.isPresent());
assertTrue(sec9.prioritizerClass.orElseThrow().isEmpty());
assertTrue(sec9.prioritizerOpts.isEmpty());
tc.set(Property.SSERV_SCAN_EXECUTORS_DEFAULT_THREADS.getKey(), "17");
ScanExecutorConfig sec10 = tc.getScanExecutors(true).stream()
.filter(c -> c.name.equals(defName)).findFirst().orElseThrow();
assertEquals(17, sec10.maxThreads);
}
// note: this is hard to test if there aren't any deprecated properties
// Update a couple of non-deprecated properties using reflection for testing purposes
@Test
public void testResolveDeprecated() throws Exception {
var conf = new ConfigurationCopy();
final Field isDeprecatedField =
Property.INSTANCE_ZK_HOST.getClass().getDeclaredField("isDeprecated");
isDeprecatedField.setAccessible(true);
// Capture the original setting. These are not deprecated but just in case they are in the
// future
// this will prevent the test from breaking when we reset at the end
final boolean origIsDepInstanceZkHost = Property.INSTANCE_ZK_HOST.isDeprecated();
final boolean origIsDepInstanceZkTimeout = Property.INSTANCE_ZK_TIMEOUT.isDeprecated();
try {
// Mark these 2 properties as deprecated just for testing purposes to make sure resolve works
isDeprecatedField.set(Property.INSTANCE_ZK_HOST, true);
isDeprecatedField.set(Property.INSTANCE_ZK_TIMEOUT, true);
// deprecated first argument
var e1 =
assertThrows(IllegalArgumentException.class, () -> conf.resolve(Property.INSTANCE_ZK_HOST,
Property.INSTANCE_ZK_TIMEOUT, Property.INSTANCE_ZK_TIMEOUT));
assertEquals("Unexpected deprecated INSTANCE_ZK_HOST", e1.getMessage());
// non-deprecated second argument
var e2 = assertThrows(IllegalArgumentException.class,
() -> conf.resolve(Property.INSTANCE_VOLUMES, Property.INSTANCE_ZK_HOST,
Property.INSTANCE_SECRET, Property.INSTANCE_ZK_TIMEOUT, Property.INSTANCE_VOLUMES));
assertEquals("Unexpected non-deprecated [INSTANCE_SECRET, INSTANCE_VOLUMES]",
e2.getMessage());
// empty second argument always resolves to non-deprecated first argument
assertSame(Property.INSTANCE_VOLUMES, conf.resolve(Property.INSTANCE_VOLUMES));
// none are set, resolve to non-deprecated
assertSame(Property.INSTANCE_VOLUMES, conf.resolve(Property.INSTANCE_VOLUMES,
Property.INSTANCE_ZK_HOST, Property.INSTANCE_ZK_TIMEOUT));
// resolve to first deprecated argument that's set; here, it's the final one
conf.set(Property.INSTANCE_ZK_TIMEOUT, "");
assertSame(Property.INSTANCE_ZK_TIMEOUT, conf.resolve(Property.INSTANCE_VOLUMES,
Property.INSTANCE_ZK_HOST, Property.INSTANCE_ZK_TIMEOUT));
// resolve to first deprecated argument that's set; now, it's the first one because both are
// set
conf.set(Property.INSTANCE_ZK_HOST, "");
assertSame(Property.INSTANCE_ZK_HOST, conf.resolve(Property.INSTANCE_VOLUMES,
Property.INSTANCE_ZK_HOST, Property.INSTANCE_ZK_TIMEOUT));
// every property is set, so resolve to the non-deprecated one
conf.set(Property.INSTANCE_VOLUMES, "");
assertSame(Property.INSTANCE_VOLUMES, conf.resolve(Property.INSTANCE_VOLUMES,
Property.INSTANCE_ZK_HOST, Property.INSTANCE_ZK_TIMEOUT));
} finally {
// Reset back to original setting
isDeprecatedField.set(Property.INSTANCE_ZK_HOST, origIsDepInstanceZkHost);
isDeprecatedField.set(Property.INSTANCE_ZK_TIMEOUT, origIsDepInstanceZkTimeout);
}
}
}
| 9,435 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/conf/DeprecatedPropertyUtilTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.conf;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertSame;
import java.util.function.BiConsumer;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
public class DeprecatedPropertyUtilTest {
private static class TestPropertyUtil extends DeprecatedPropertyUtil {
private static final String OLD_PREFIX = "old.";
private static final String MIDDLE_PREFIX = "middle.";
private static final String NEW_PREFIX = "new.";
public static void registerTestRenamer() {
renamers.add(PropertyRenamer.renamePrefix(OLD_PREFIX, MIDDLE_PREFIX));
renamers.add(PropertyRenamer.renamePrefix(MIDDLE_PREFIX, NEW_PREFIX));
}
}
private static final BiConsumer<Logger,String> NOOP = (log, replacement) -> {};
@BeforeAll
public static void setup() {
TestPropertyUtil.registerTestRenamer();
}
@Test
public void testNonDeprecatedPropertyRename() {
String oldProp = "some_property_name";
String newProp = DeprecatedPropertyUtil.getReplacementName(oldProp, NOOP);
assertSame(oldProp, newProp);
}
@Test
public void testDeprecatedPropertyRename() {
// 'middle.test' -> 'new.test'
String newProp = DeprecatedPropertyUtil.getReplacementName("middle.test", NOOP);
assertEquals("new.test", newProp);
// 'old.test' -> 'middle.test' -> 'new.test'
String newProp2 = DeprecatedPropertyUtil.getReplacementName("old.test", NOOP);
assertEquals("new.test", newProp2);
}
}
| 9,436 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/conf/ConfigCheckUtilTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.conf;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.util.Map;
import org.apache.accumulo.core.conf.ConfigCheckUtil.ConfigCheckException;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class ConfigCheckUtilTest {
private Map<String,String> m;
@BeforeEach
public void setUp() {
m = new java.util.HashMap<>();
}
@Test
public void testPass() {
m.put(Property.MANAGER_CLIENTPORT.getKey(), "9999");
m.put(Property.MANAGER_TABLET_BALANCER.getKey(),
"org.apache.accumulo.server.manager.balancer.TableLoadBalancer");
m.put(Property.MANAGER_BULK_TIMEOUT.getKey(), "5m");
ConfigCheckUtil.validate(m.entrySet(), "test");
}
@Test
public void testPass_Empty() {
ConfigCheckUtil.validate(m.entrySet(), "test");
}
@Test
public void testPass_UnrecognizedValidProperty() {
m.put(Property.MANAGER_CLIENTPORT.getKey(), "9999");
m.put(Property.MANAGER_PREFIX.getKey() + "something", "abcdefg");
ConfigCheckUtil.validate(m.entrySet(), "test");
}
@Test
public void testPass_UnrecognizedProperty() {
m.put(Property.MANAGER_CLIENTPORT.getKey(), "9999");
m.put("invalid.prefix.value", "abcdefg");
ConfigCheckUtil.validate(m.entrySet(), "test");
}
@Test
public void testFail_Prefix() {
m.put(Property.MANAGER_CLIENTPORT.getKey(), "9999");
m.put(Property.MANAGER_PREFIX.getKey(), "oops");
assertThrows(ConfigCheckException.class, () -> ConfigCheckUtil.validate(m.entrySet(), "test"));
}
@Test
public void testFail_InstanceZkTimeoutOutOfRange() {
m.put(Property.INSTANCE_ZK_TIMEOUT.getKey(), "10ms");
assertThrows(ConfigCheckException.class, () -> ConfigCheckUtil.validate(m.entrySet(), "test"));
}
@Test
public void testFail_badCryptoFactory() {
m.put(Property.INSTANCE_CRYPTO_FACTORY.getKey(), "DoesNotExistCryptoFactory");
assertThrows(ConfigCheckException.class, () -> ConfigCheckUtil.validate(m.entrySet(), "test"));
}
@Test
public void testPass_defaultCryptoFactory() {
m.put(Property.INSTANCE_CRYPTO_FACTORY.getKey(),
Property.INSTANCE_CRYPTO_FACTORY.getDefaultValue());
ConfigCheckUtil.validate(m.entrySet(), "test");
}
}
| 9,437 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/conf/SiteConfigurationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.conf;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import java.io.File;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import org.junit.jupiter.api.Test;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
public class SiteConfigurationTest {
@SuppressFBWarnings(value = "PATH_TRAVERSAL_IN",
justification = "path to keystore not provided by user input")
@Test
public void testOnlySensitivePropertiesExtractedFromCredentialProvider()
throws SecurityException {
// site-cfg.jceks={'ignored.property'=>'ignored', 'instance.secret'=>'mysecret',
// 'general.rpc.timeout'=>'timeout'}
URL keystore = SiteConfigurationTest.class.getResource("/site-cfg.jceks");
assertNotNull(keystore);
String credProvPath = "jceks://file" + new File(keystore.getFile()).getAbsolutePath();
var overrides =
Map.of(Property.GENERAL_SECURITY_CREDENTIAL_PROVIDER_PATHS.getKey(), credProvPath);
var config = SiteConfiguration.empty().withOverrides(overrides).build();
assertEquals("mysecret", config.get(Property.INSTANCE_SECRET));
assertNull(config.get("ignored.property"));
assertEquals(Property.GENERAL_RPC_TIMEOUT.getDefaultValue(),
config.get(Property.GENERAL_RPC_TIMEOUT.getKey()));
}
@Test
public void testDefault() {
var conf = SiteConfiguration.empty().build();
assertEquals("localhost:2181", conf.get(Property.INSTANCE_ZK_HOST));
assertEquals("DEFAULT", conf.get(Property.INSTANCE_SECRET));
assertEquals("", conf.get(Property.INSTANCE_VOLUMES));
assertEquals("120s", conf.get(Property.GENERAL_RPC_TIMEOUT));
assertEquals("1G", conf.get(Property.TSERV_WAL_MAX_SIZE));
assertEquals("org.apache.accumulo.core.spi.crypto.NoCryptoServiceFactory",
conf.get(Property.INSTANCE_CRYPTO_FACTORY));
}
@Test
public void testFile() {
System.setProperty("DIR", "/tmp/test/dir");
URL propsUrl = getClass().getClassLoader().getResource("accumulo2.properties");
var conf = new SiteConfiguration.Builder().fromUrl(propsUrl).build();
assertEquals("myhost123:2181", conf.get(Property.INSTANCE_ZK_HOST));
assertEquals("mysecret", conf.get(Property.INSTANCE_SECRET));
assertEquals("hdfs://localhost:8020/accumulo123", conf.get(Property.INSTANCE_VOLUMES));
assertEquals("123s", conf.get(Property.GENERAL_RPC_TIMEOUT));
assertEquals("256M", conf.get(Property.TSERV_WAL_MAX_SIZE));
assertEquals("org.apache.accumulo.core.spi.crypto.PerTableCryptoServiceFactory",
conf.get(Property.INSTANCE_CRYPTO_FACTORY));
assertEquals(System.getenv("USER"), conf.get("general.test.user.name"));
assertEquals("/tmp/test/dir", conf.get("general.test.user.dir"));
}
@Test
public void testConfigOverrides() {
var conf = SiteConfiguration.empty().build();
assertEquals("localhost:2181", conf.get(Property.INSTANCE_ZK_HOST));
conf = SiteConfiguration.empty()
.withOverrides(Map.of(Property.INSTANCE_ZK_HOST.getKey(), "myhost:2181")).build();
assertEquals("myhost:2181", conf.get(Property.INSTANCE_ZK_HOST));
var results = new HashMap<String,String>();
conf.getProperties(results, p -> p.startsWith("instance"));
assertEquals("myhost:2181", results.get(Property.INSTANCE_ZK_HOST.getKey()));
}
}
| 9,438 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/conf/DefaultConfigurationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.conf;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.Map;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class DefaultConfigurationTest {
private DefaultConfiguration c;
@BeforeEach
public void setUp() {
c = DefaultConfiguration.getInstance();
}
@Test
public void testGet() {
assertEquals(Property.MANAGER_CLIENTPORT.getDefaultValue(), c.get(Property.MANAGER_CLIENTPORT));
}
@Test
public void testGetProperties() {
Map<String,String> p = new java.util.HashMap<>();
c.getProperties(p, x -> true);
assertEquals(Property.MANAGER_CLIENTPORT.getDefaultValue(),
p.get(Property.MANAGER_CLIENTPORT.getKey()));
assertFalse(p.containsKey(Property.MANAGER_PREFIX.getKey()));
assertTrue(p.containsKey(Property.TSERV_DEFAULT_BLOCKSIZE.getKey()));
}
@Test
public void testSanityCheck() {
ConfigCheckUtil.validate(c, "test");
}
}
| 9,439 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/conf/ClientPropertyTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.conf;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.Properties;
import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
import org.apache.accumulo.core.client.security.tokens.PasswordToken;
import org.junit.jupiter.api.Test;
public class ClientPropertyTest {
@Test
public void testAuthentication() {
Properties props = new Properties();
props.setProperty(ClientProperty.AUTH_PRINCIPAL.getKey(), "user");
ClientProperty.setPassword(props, "testpass1");
assertEquals("testpass1", ClientProperty.AUTH_TOKEN.getValue(props));
AuthenticationToken token = ClientProperty.getAuthenticationToken(props);
assertTrue(token instanceof PasswordToken);
assertEquals("testpass1", new String(((PasswordToken) token).getPassword()));
ClientProperty.setAuthenticationToken(props, new PasswordToken("testpass2"));
assertEquals("/////gAAAAl0ZXN0cGFzczI=", ClientProperty.AUTH_TOKEN.getValue(props));
token = ClientProperty.getAuthenticationToken(props);
assertTrue(token instanceof PasswordToken);
assertEquals("testpass2", new String(((PasswordToken) token).getPassword()));
ClientProperty.setAuthenticationToken(props, new PasswordToken("testpass3"));
assertEquals("/////gAAAAl0ZXN0cGFzczM=", ClientProperty.AUTH_TOKEN.getValue(props));
token = ClientProperty.getAuthenticationToken(props);
assertTrue(token instanceof PasswordToken);
assertEquals("testpass3", new String(((PasswordToken) token).getPassword()));
ClientProperty.setKerberosKeytab(props, "/path/to/keytab");
assertEquals("/path/to/keytab", ClientProperty.AUTH_TOKEN.getValue(props));
}
@Test
public void testTypes() {
Properties props = new Properties();
props.setProperty(ClientProperty.BATCH_WRITER_LATENCY_MAX.getKey(), "10s");
Long value = ClientProperty.BATCH_WRITER_LATENCY_MAX.getTimeInMillis(props);
assertEquals(10000L, value.longValue());
props.setProperty(ClientProperty.BATCH_WRITER_MEMORY_MAX.getKey(), "555M");
value = ClientProperty.BATCH_WRITER_MEMORY_MAX.getBytes(props);
assertEquals(581959680L, value.longValue());
ClientProperty.BATCH_WRITER_MEMORY_MAX.setBytes(props, 5819L);
value = ClientProperty.BATCH_WRITER_MEMORY_MAX.getBytes(props);
assertEquals(5819L, value.longValue());
ClientProperty.BATCH_WRITER_LATENCY_MAX.setTimeInMillis(props, 1234L);
value = ClientProperty.BATCH_WRITER_LATENCY_MAX.getTimeInMillis(props);
assertEquals(1234L, value.longValue());
assertThrows(IllegalStateException.class,
() -> ClientProperty.BATCH_WRITER_LATENCY_MAX.getBytes(props));
}
@Test
public void validateThrowsNPEOnNullProperties() {
assertThrows(NullPointerException.class, () -> ClientProperty.validate(null));
}
}
| 9,440 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/conf | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/conf/cluster/ClusterConfigParserTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.conf.cluster;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Function;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
@SuppressFBWarnings(value = "PATH_TRAVERSAL_IN", justification = "paths provided by test")
public class ClusterConfigParserTest {
@TempDir
private static File tempDir;
@Test
public void testParse() throws Exception {
URL configFile = ClusterConfigParserTest.class
.getResource("/org/apache/accumulo/core/conf/cluster/cluster.yaml");
assertNotNull(configFile);
Map<String,String> contents =
ClusterConfigParser.parseConfiguration(new File(configFile.toURI()).getAbsolutePath());
assertEquals(4, contents.size());
assertTrue(contents.containsKey("manager"));
assertEquals("localhost1 localhost2", contents.get("manager"));
assertTrue(contents.containsKey("monitor"));
assertEquals("localhost1 localhost2", contents.get("monitor"));
assertTrue(contents.containsKey("gc"));
assertEquals("localhost", contents.get("gc"));
assertTrue(contents.containsKey("tserver"));
assertEquals("localhost1 localhost2 localhost3 localhost4", contents.get("tserver"));
assertFalse(contents.containsKey("compaction"));
assertFalse(contents.containsKey("compaction.coordinator"));
assertFalse(contents.containsKey("compaction.compactor"));
assertFalse(contents.containsKey("compaction.compactor.queue"));
assertFalse(contents.containsKey("compaction.compactor.q1"));
assertFalse(contents.containsKey("compaction.compactor.q2"));
assertFalse(contents.containsKey("tservers_per_host"));
assertFalse(contents.containsKey("sservers_per_host"));
}
@Test
public void testParseWithOptionalComponents() throws Exception {
URL configFile = ClusterConfigParserTest.class
.getResource("/org/apache/accumulo/core/conf/cluster/cluster-with-optional-services.yaml");
assertNotNull(configFile);
Map<String,String> contents =
ClusterConfigParser.parseConfiguration(new File(configFile.toURI()).getAbsolutePath());
assertEquals(12, contents.size());
assertTrue(contents.containsKey("manager"));
assertEquals("localhost1 localhost2", contents.get("manager"));
assertTrue(contents.containsKey("monitor"));
assertEquals("localhost1 localhost2", contents.get("monitor"));
assertTrue(contents.containsKey("gc"));
assertEquals("localhost", contents.get("gc"));
assertTrue(contents.containsKey("tserver"));
assertEquals("localhost1 localhost2 localhost3 localhost4", contents.get("tserver"));
assertFalse(contents.containsKey("compaction"));
assertTrue(contents.containsKey("compaction.coordinator"));
assertEquals("localhost1 localhost2", contents.get("compaction.coordinator"));
assertFalse(contents.containsKey("compaction.compactor"));
assertTrue(contents.containsKey("compaction.compactor.q1"));
assertEquals("localhost1 localhost2", contents.get("compaction.compactor.q1"));
assertTrue(contents.containsKey("compaction.compactor.q2"));
assertEquals("localhost3 localhost4", contents.get("compaction.compactor.q2"));
assertFalse(contents.containsKey("sserver"));
assertTrue(contents.containsKey("sserver.default"));
assertEquals("localhost1 localhost2", contents.get("sserver.default"));
assertTrue(contents.containsKey("sserver.highmem"));
assertEquals("hmvm1 hmvm2 hmvm3", contents.get("sserver.highmem"));
assertTrue(contents.containsKey("sserver.cheap"));
assertEquals("burstyvm1 burstyvm2", contents.get("sserver.cheap"));
assertTrue(contents.containsKey("tservers_per_host"));
assertEquals("2", contents.get("tservers_per_host"));
assertTrue(contents.containsKey("sservers_per_host"));
assertEquals("1", contents.get("sservers_per_host"));
}
@Test
public void testShellOutput() throws Exception {
testShellOutput(configFile -> {
try {
final Map<String,String> contents =
ClusterConfigParser.parseConfiguration(new File(configFile.toURI()).getAbsolutePath());
final File outputFile = new File(tempDir, "ClusterConfigParserTest_testShellOutput");
if (!outputFile.createNewFile()) {
fail("Unable to create file in " + tempDir);
}
outputFile.deleteOnExit();
final PrintStream ps = new PrintStream(outputFile);
ClusterConfigParser.outputShellVariables(contents, ps);
ps.close();
return outputFile;
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
});
}
@Test
public void testShellOutputMain() throws Exception {
// Test that the main method in ClusterConfigParser properly parses the configuration
// and outputs to a given file instead of System.out when provided
testShellOutput(configFile -> {
try {
File outputFile = new File(tempDir, "ClusterConfigParserTest_testShellOutputMain");
ClusterConfigParser.main(new String[] {configFile.getFile(), outputFile.getAbsolutePath()});
return outputFile;
} catch (IOException e) {
throw new RuntimeException(e.getMessage(), e);
}
});
}
private void testShellOutput(Function<URL,File> outputConfigFunction) throws Exception {
final URL configFile = ClusterConfigParserTest.class
.getResource("/org/apache/accumulo/core/conf/cluster/cluster.yaml");
assertNotNull(configFile);
final File f = outputConfigFunction.apply(configFile);
Map<String,String> expected = new HashMap<>();
expected.put("MANAGER_HOSTS", "localhost1 localhost2");
expected.put("MONITOR_HOSTS", "localhost1 localhost2");
expected.put("GC_HOSTS", "localhost");
expected.put("TSERVER_HOSTS", "localhost1 localhost2 localhost3 localhost4");
expected.put("NUM_TSERVERS", "${NUM_TSERVERS:=1}");
expected.put("NUM_SSERVERS", "${NUM_SSERVERS:=1}");
expected.replaceAll((k, v) -> '"' + v + '"');
Map<String,String> actual = new HashMap<>();
try (BufferedReader rdr = Files.newBufferedReader(Paths.get(f.toURI()))) {
rdr.lines().forEach(l -> {
String[] props = l.split("=", 2);
actual.put(props[0], props[1]);
});
}
assertEquals(expected, actual);
}
@Test
public void testShellOutputWithOptionalComponents() throws Exception {
String userDir = System.getProperty("user.dir");
String targetDir = "target";
File dir = new File(userDir, targetDir);
if (!dir.exists()) {
if (!dir.mkdirs()) {
fail("Unable to make directory ${user.dir}/target");
}
}
File f = new File(dir, "ClusterConfigParserTest_testShellOutputWithOptionalComponents");
if (!f.createNewFile()) {
fail("Unable to create file in ${user.dir}/target");
}
f.deleteOnExit();
PrintStream ps = new PrintStream(f);
URL configFile = ClusterConfigParserTest.class
.getResource("/org/apache/accumulo/core/conf/cluster/cluster-with-optional-services.yaml");
assertNotNull(configFile);
Map<String,String> contents =
ClusterConfigParser.parseConfiguration(new File(configFile.toURI()).getAbsolutePath());
ClusterConfigParser.outputShellVariables(contents, ps);
ps.close();
Map<String,String> expected = new HashMap<>();
expected.put("MANAGER_HOSTS", "localhost1 localhost2");
expected.put("MONITOR_HOSTS", "localhost1 localhost2");
expected.put("GC_HOSTS", "localhost");
expected.put("TSERVER_HOSTS", "localhost1 localhost2 localhost3 localhost4");
expected.put("COORDINATOR_HOSTS", "localhost1 localhost2");
expected.put("COMPACTION_QUEUES", "q1 q2");
expected.put("COMPACTOR_HOSTS_q1", "localhost1 localhost2");
expected.put("COMPACTOR_HOSTS_q2", "localhost3 localhost4");
expected.put("SSERVER_GROUPS", "default highmem cheap");
expected.put("SSERVER_HOSTS_default", "localhost1 localhost2");
expected.put("SSERVER_HOSTS_highmem", "hmvm1 hmvm2 hmvm3");
expected.put("SSERVER_HOSTS_cheap", "burstyvm1 burstyvm2");
expected.put("NUM_TSERVERS", "${NUM_TSERVERS:=2}");
expected.put("NUM_SSERVERS", "${NUM_SSERVERS:=1}");
expected.replaceAll((k, v) -> {
return '"' + v + '"';
});
Map<String,String> actual = new HashMap<>();
try (BufferedReader rdr = Files.newBufferedReader(Paths.get(f.toURI()))) {
rdr.lines().forEach(l -> {
String[] props = l.split("=", 2);
actual.put(props[0], props[1]);
});
}
assertEquals(expected, actual);
}
@Test
public void testFileWithUnknownSections() throws Exception {
URL configFile = ClusterConfigParserTest.class
.getResource("/org/apache/accumulo/core/conf/cluster/bad-cluster.yaml");
assertNotNull(configFile);
Map<String,String> contents =
ClusterConfigParser.parseConfiguration(new File(configFile.toURI()).getAbsolutePath());
try (var baos = new ByteArrayOutputStream(); var ps = new PrintStream(baos)) {
var exception = assertThrows(IllegalArgumentException.class,
() -> ClusterConfigParser.outputShellVariables(contents, ps));
assertTrue(exception.getMessage().contains("vserver"));
}
}
}
| 9,441 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/volume/VolumeImplTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.volume;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.net.URI;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.Test;
import org.slf4j.LoggerFactory;
public class VolumeImplTest {
@Test
public void testFileSystemInequivalence() {
Configuration hadoopConf = createMock(Configuration.class);
FileSystem fs = createMock(FileSystem.class), other = createMock(FileSystem.class);
String basePath = "/accumulo";
expect(fs.getConf()).andReturn(hadoopConf).anyTimes();
expect(fs.getUri()).andReturn(URI.create("hdfs://localhost:8020")).anyTimes();
expect(other.getUri()).andReturn(URI.create("hdfs://otherhost:8020")).anyTimes();
replay(fs, other);
VolumeImpl volume = new VolumeImpl(fs, basePath);
assertFalse(volume.equivalentFileSystems(other));
verify(fs, other);
}
@Test
public void testFileSystemEquivalence() {
Configuration hadoopConf = createMock(Configuration.class);
FileSystem fs = createMock(FileSystem.class), other = createMock(FileSystem.class);
String basePath = "/accumulo";
expect(fs.getConf()).andReturn(hadoopConf).anyTimes();
expect(fs.getUri()).andReturn(URI.create("hdfs://myhost:8020/")).anyTimes();
expect(other.getUri()).andReturn(URI.create("hdfs://myhost:8020")).anyTimes();
replay(fs, other);
VolumeImpl volume = new VolumeImpl(fs, basePath);
assertTrue(volume.equivalentFileSystems(other));
verify(fs, other);
}
@Test
public void testBasePathInequivalence() {
FileSystem fs = createMock(FileSystem.class);
VolumeImpl volume = new VolumeImpl(fs, "/accumulo");
assertFalse(volume.isAncestorPathOf(new Path("/something/accumulo")));
assertFalse(volume.isAncestorPathOf(new Path("/accumulo2")));
assertFalse(volume.isAncestorPathOf(new Path("/accumulo/..")));
}
@Test
public void testBasePathEquivalence() {
FileSystem fs = createMock(FileSystem.class);
final String basePath = "/accumulo";
VolumeImpl volume = new VolumeImpl(fs, basePath);
// Bare path should match
assertTrue(volume.isAncestorPathOf(new Path(basePath)));
// Prefix should also match
assertTrue(volume.isAncestorPathOf(new Path(basePath + "/tables/1/F000001.rf")));
}
@Test
public void testPrefixChild() throws IOException {
FileSystem fs = new Path("file:///").getFileSystem(new Configuration(false));
var volume = new VolumeImpl(fs, "/tmp/accumulo/");
assertEquals("file:/tmp/accumulo", volume.toString());
// test normalization for effectively empty child
Set.of(" ", " ", " ", " .", " ./", " .// ", " ././/./ ").forEach(s -> {
assertEquals("file:/tmp/accumulo", volume.prefixChild(s).toString());
});
// test normalization for single depth child
Set.of("/abc", "abc", " abc/ ", " abc/// ", "./abc/.", "./abc").forEach(s -> {
assertEquals("file:/tmp/accumulo/abc", volume.prefixChild(s).toString());
});
// test normalization for multi depth child
Set.of("abc/./def/", " abc/def/ ", " abc////def/ ", " ./abc/.//def/. ").forEach(s -> {
assertEquals("file:/tmp/accumulo/abc/def", volume.prefixChild(s).toString());
});
// test failures for absolute paths
Set.of("//abc", " //abc ", "///abc").forEach(s -> {
var e = assertThrows(IllegalArgumentException.class, () -> {
volume.prefixChild(s);
LoggerFactory.getLogger(VolumeImplTest.class).error("Should have thrown on " + s);
});
assertEquals("Cannot prefix " + s + " (absolute path) with volume file:/tmp/accumulo",
e.getMessage());
});
// test failures for qualified paths
Set.of("file:/abc", "hdfs://host:1234", " file:/def ").forEach(s -> {
var e = assertThrows(IllegalArgumentException.class, () -> {
volume.prefixChild(s);
LoggerFactory.getLogger(VolumeImplTest.class).error("Should have thrown on " + s);
});
assertEquals("Cannot prefix " + s + " (qualified path) with volume file:/tmp/accumulo",
e.getMessage());
});
// test failures for breakout paths
Set.of("./abc/..", "abc/../def/", "../abc", " .. ").forEach(s -> {
var e = assertThrows(IllegalArgumentException.class, () -> {
volume.prefixChild(s);
LoggerFactory.getLogger(VolumeImplTest.class).error("Should have thrown on " + s);
});
assertEquals("Cannot prefix " + s + " (path contains '..') with volume file:/tmp/accumulo",
e.getMessage());
});
// quick check to verify with hdfs
FileSystem fs2 = new Path("hdfs://127.0.0.1:1234/").getFileSystem(new Configuration(false));
var volume2 = new VolumeImpl(fs2, "/tmp/accumulo/");
assertEquals("hdfs://127.0.0.1:1234/tmp/accumulo", volume2.toString());
assertEquals("hdfs://127.0.0.1:1234/tmp/accumulo/abc", volume2.prefixChild("abc").toString());
}
@Test
public void testContains() throws IOException {
FileSystem fs = new Path("file:///").getFileSystem(new Configuration(false));
var volume = new VolumeImpl(fs, "/tmp/accumulo/");
Set.of("abc", " abc/def/ ", " ghi/// ").forEach(s -> {
assertTrue(volume.containsPath(volume.prefixChild(s)));
});
}
}
| 9,442 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/BatchWriterConfigTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.concurrent.TimeUnit.DAYS;
import static java.util.concurrent.TimeUnit.HOURS;
import static java.util.concurrent.TimeUnit.MICROSECONDS;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Arrays;
import org.apache.accumulo.core.conf.ClientProperty;
import org.junit.jupiter.api.Test;
public class BatchWriterConfigTest {
@Test
public void testReasonableDefaults() {
long expectedMaxMemory = 50 * 1024 * 1024L;
long expectedMaxLatency = 120000L;
long expectedTimeout = Long.MAX_VALUE;
int expectedMaxWriteThreads = 3;
Durability expectedDurability = Durability.DEFAULT;
BatchWriterConfig defaults = new BatchWriterConfig();
assertEquals(expectedMaxMemory, defaults.getMaxMemory());
assertEquals(expectedMaxLatency, defaults.getMaxLatency(MILLISECONDS));
assertEquals(expectedTimeout, defaults.getTimeout(MILLISECONDS));
assertEquals(expectedMaxWriteThreads, defaults.getMaxWriteThreads());
assertEquals(expectedDurability, defaults.getDurability());
}
@Test
public void testOverridingDefaults() {
BatchWriterConfig bwConfig = new BatchWriterConfig();
bwConfig.setMaxMemory(1123581321L);
bwConfig.setMaxLatency(22, HOURS);
bwConfig.setTimeout(33, DAYS);
bwConfig.setMaxWriteThreads(42);
bwConfig.setDurability(Durability.NONE);
assertEquals(1123581321L, bwConfig.getMaxMemory());
assertEquals(HOURS.toMillis(22), bwConfig.getMaxLatency(MILLISECONDS));
assertEquals(DAYS.toMillis(33), bwConfig.getTimeout(MILLISECONDS));
assertEquals(42, bwConfig.getMaxWriteThreads());
assertEquals(Durability.NONE, bwConfig.getDurability());
}
@Test
public void testZeroValues() {
BatchWriterConfig bwConfig = new BatchWriterConfig();
bwConfig.setMaxLatency(0, MILLISECONDS);
bwConfig.setTimeout(0, MILLISECONDS);
bwConfig.setMaxMemory(0);
assertEquals(Long.MAX_VALUE, bwConfig.getMaxLatency(MILLISECONDS));
assertEquals(Long.MAX_VALUE, bwConfig.getTimeout(MILLISECONDS));
assertEquals(0, bwConfig.getMaxMemory());
}
@Test
public void testNegativeMaxMemory() {
BatchWriterConfig bwConfig = new BatchWriterConfig();
assertThrows(IllegalArgumentException.class, () -> bwConfig.setMaxMemory(-1));
}
@Test
public void testNegativeMaxLatency() {
BatchWriterConfig bwConfig = new BatchWriterConfig();
assertThrows(IllegalArgumentException.class, () -> bwConfig.setMaxLatency(-1, DAYS));
}
@Test
public void testTinyTimeConversions() {
BatchWriterConfig bwConfig = new BatchWriterConfig();
bwConfig.setMaxLatency(999, MICROSECONDS);
bwConfig.setTimeout(999, MICROSECONDS);
assertEquals(1000, bwConfig.getMaxLatency(MICROSECONDS));
assertEquals(1000, bwConfig.getTimeout(MICROSECONDS));
assertEquals(1, bwConfig.getMaxLatency(MILLISECONDS));
assertEquals(1, bwConfig.getTimeout(MILLISECONDS));
bwConfig.setMaxLatency(10, NANOSECONDS);
bwConfig.setTimeout(10, NANOSECONDS);
assertEquals(1000000, bwConfig.getMaxLatency(NANOSECONDS));
assertEquals(1000000, bwConfig.getTimeout(NANOSECONDS));
assertEquals(1, bwConfig.getMaxLatency(MILLISECONDS));
assertEquals(1, bwConfig.getTimeout(MILLISECONDS));
}
@Test
public void testNegativeTimeout() {
BatchWriterConfig bwConfig = new BatchWriterConfig();
assertThrows(IllegalArgumentException.class, () -> bwConfig.setTimeout(-1, DAYS));
}
@Test
public void testZeroMaxWriteThreads() {
BatchWriterConfig bwConfig = new BatchWriterConfig();
assertThrows(IllegalArgumentException.class, () -> bwConfig.setMaxWriteThreads(0));
}
@Test
public void testNegativeMaxWriteThreads() {
BatchWriterConfig bwConfig = new BatchWriterConfig();
assertThrows(IllegalArgumentException.class, () -> bwConfig.setMaxWriteThreads(-1));
}
@Test
public void testSerialize() throws IOException {
// make sure we aren't testing defaults
final BatchWriterConfig bwDefaults = new BatchWriterConfig();
assertNotEquals(7654321L, bwDefaults.getMaxLatency(MILLISECONDS));
assertNotEquals(9898989L, bwDefaults.getTimeout(MILLISECONDS));
assertNotEquals(42, bwDefaults.getMaxWriteThreads());
assertNotEquals(1123581321L, bwDefaults.getMaxMemory());
assertNotEquals(Durability.FLUSH, bwDefaults.getDurability());
// test setting all fields
BatchWriterConfig bwConfig = new BatchWriterConfig();
bwConfig.setMaxLatency(7654321L, MILLISECONDS);
bwConfig.setTimeout(9898989L, MILLISECONDS);
bwConfig.setMaxWriteThreads(42);
bwConfig.setMaxMemory(1123581321L);
bwConfig.setDurability(Durability.FLUSH);
byte[] bytes = createBytes(bwConfig);
checkBytes(bwConfig, bytes);
// test human-readable serialization
bwConfig = new BatchWriterConfig();
bwConfig.setMaxWriteThreads(42);
bytes = createBytes(bwConfig);
assertEquals(" i#maxWriteThreads=42", new String(bytes, UTF_8));
checkBytes(bwConfig, bytes);
// test human-readable with 2 fields
bwConfig = new BatchWriterConfig();
bwConfig.setMaxWriteThreads(24);
bwConfig.setTimeout(3, SECONDS);
bytes = createBytes(bwConfig);
assertEquals(" v#maxWriteThreads=24,timeout=3000", new String(bytes, UTF_8));
checkBytes(bwConfig, bytes);
// test human-readable durability
bwConfig = new BatchWriterConfig();
bwConfig.setDurability(Durability.LOG);
bytes = createBytes(bwConfig);
assertEquals(" e#durability=LOG", new String(bytes, UTF_8));
}
@Test
public void testDefaultEquality() {
BatchWriterConfig cfg1 = new BatchWriterConfig(), cfg2 = new BatchWriterConfig();
assertEquals(cfg1, cfg2);
assertEquals(cfg1.hashCode(), cfg2.hashCode());
cfg2.setMaxMemory(1);
assertNotEquals(cfg1, cfg2);
cfg2 = new BatchWriterConfig();
cfg2.setDurability(Durability.FLUSH);
assertNotEquals(cfg1, cfg2);
assertNotEquals(cfg1.hashCode(), cfg2.hashCode());
}
@Test
public void testManualEquality() {
BatchWriterConfig cfg1 = new BatchWriterConfig(), cfg2 = new BatchWriterConfig();
cfg1.setMaxLatency(10, SECONDS);
cfg2.setMaxLatency(10000, MILLISECONDS);
cfg1.setMaxMemory(100);
cfg2.setMaxMemory(100);
cfg1.setTimeout(10, SECONDS);
cfg2.setTimeout(10000, MILLISECONDS);
assertEquals(cfg1, cfg2);
assertEquals(cfg1.hashCode(), cfg2.hashCode());
}
@Test
public void testMerge() {
BatchWriterConfig cfg1 = new BatchWriterConfig(), cfg2 = new BatchWriterConfig();
cfg1.setMaxMemory(1234);
cfg2.setMaxMemory(5858);
cfg2.setDurability(Durability.LOG);
cfg2.setMaxLatency(456, MILLISECONDS);
assertEquals(Durability.DEFAULT, cfg1.getDurability());
BatchWriterConfig merged = cfg1.merge(cfg2);
assertEquals(1234, merged.getMaxMemory());
assertEquals(Durability.LOG, merged.getDurability());
assertEquals(456, merged.getMaxLatency(MILLISECONDS));
assertEquals(3, merged.getMaxWriteThreads());
}
private byte[] createBytes(BatchWriterConfig bwConfig) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
bwConfig.write(new DataOutputStream(baos));
return baos.toByteArray();
}
private void checkBytes(BatchWriterConfig bwConfig, byte[] bytes) throws IOException {
ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
BatchWriterConfig createdConfig = new BatchWriterConfig();
createdConfig.readFields(new DataInputStream(bais));
assertEquals(bwConfig.getMaxMemory(), createdConfig.getMaxMemory());
assertEquals(bwConfig.getMaxLatency(MILLISECONDS), createdConfig.getMaxLatency(MILLISECONDS));
assertEquals(bwConfig.getTimeout(MILLISECONDS), createdConfig.getTimeout(MILLISECONDS));
assertEquals(bwConfig.getMaxWriteThreads(), createdConfig.getMaxWriteThreads());
}
@Test
public void countClientProps() {
// count the number in case one gets added to in one place but not the other
ClientProperty[] bwProps = Arrays.stream(ClientProperty.values())
.filter(c -> c.name().startsWith("BATCH_WRITER")).toArray(ClientProperty[]::new);
assertEquals(5, bwProps.length);
}
}
| 9,443 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/IteratorSettingTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.util.HashMap;
import java.util.Map;
import org.apache.accumulo.core.iterators.Combiner;
import org.apache.accumulo.core.iterators.DevNull;
import org.junit.jupiter.api.Test;
/**
* Test cases for the IteratorSetting class
*/
public class IteratorSettingTest {
IteratorSetting setting1 = new IteratorSetting(500, "combiner", Combiner.class.getName());
IteratorSetting setting2 = new IteratorSetting(500, "combiner", Combiner.class.getName());
IteratorSetting setting3 = new IteratorSetting(500, "combiner", Combiner.class.getName());
IteratorSetting devnull = new IteratorSetting(500, "devNull", DevNull.class.getName());
final IteratorSetting nullsetting = null;
IteratorSetting setting4 = new IteratorSetting(300, "combiner", Combiner.class.getName());
IteratorSetting setting5 = new IteratorSetting(500, "foocombiner", Combiner.class.getName());
IteratorSetting setting6 = new IteratorSetting(500, "combiner", "MySuperCombiner");
@Test
public final void testHashCodeSameObject() {
assertEquals(setting1.hashCode(), setting1.hashCode());
}
@Test
public final void testHashCodeEqualObjects() {
assertEquals(setting1.hashCode(), setting2.hashCode());
}
@Test
public final void testEqualsObjectReflexive() {
assertEquals(setting1, setting1);
}
@Test
public final void testEqualsObjectSymmetric() {
assertEquals(setting1, setting2);
assertEquals(setting2, setting1);
}
@Test
public final void testEqualsObjectTransitive() {
assertEquals(setting1, setting2);
assertEquals(setting2, setting3);
assertEquals(setting1, setting3);
}
@Test
public final void testEqualsNullSetting() {
assertNotEquals(setting1, nullsetting);
}
@Test
public final void testEqualsObjectNotEqual() {
assertNotEquals(setting1, devnull);
}
@Test
public final void testEqualsObjectProperties() {
IteratorSetting mysettings = new IteratorSetting(500, "combiner", Combiner.class.getName());
assertEquals(setting1, mysettings);
mysettings.addOption("myoption1", "myvalue1");
assertNotEquals(setting1, mysettings);
}
@Test
public final void testEqualsDifferentMembers() {
assertNotEquals(setting1, setting4);
assertNotEquals(setting1, setting5);
assertNotEquals(setting1, setting6);
}
@Test
public void testEquivalentConstructor() {
IteratorSetting setting1 = new IteratorSetting(100, Combiner.class);
IteratorSetting setting2 =
new IteratorSetting(100, "Combiner", Combiner.class, new HashMap<>());
assertEquals(setting1, setting2);
IteratorSetting notEqual1 =
new IteratorSetting(100, "FooCombiner", Combiner.class, new HashMap<>());
assertNotEquals(setting1, notEqual1);
Map<String,String> props = new HashMap<>();
props.put("foo", "bar");
IteratorSetting notEquals2 = new IteratorSetting(100, "Combiner", Combiner.class, props);
assertNotEquals(setting1, notEquals2);
}
/**
* Iterator names cannot contain dots. Throw IllegalArgumentException is invalid name is used.
*/
@Test
public void testIteratorNameCannotContainDot() {
assertThrows(IllegalArgumentException.class,
() -> new IteratorSetting(500, "iterator.name.with.dots", Combiner.class.getName()));
}
}
| 9,444 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/RowIteratorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NoSuchElementException;
import java.util.TreeMap;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Value;
import org.junit.jupiter.api.Test;
public class RowIteratorTest {
Iterator<Entry<Key,Value>> makeIterator(final String... args) {
final Map<Key,Value> result = new TreeMap<>();
for (String s : args) {
final String[] parts = s.split("[ \t]");
final Key key = new Key(parts[0], parts[1], parts[2]);
final Value value = new Value(parts[3]);
result.put(key, value);
}
return result.entrySet().iterator();
}
List<List<Entry<Key,Value>>> getRows(final Iterator<Entry<Key,Value>> iter) {
final List<List<Entry<Key,Value>>> result = new ArrayList<>();
final RowIterator riter = new RowIterator(iter);
while (riter.hasNext()) {
final Iterator<Entry<Key,Value>> row = riter.next();
final List<Entry<Key,Value>> rlist = new ArrayList<>();
while (row.hasNext()) {
rlist.add(row.next());
}
result.add(rlist);
}
return result;
}
@Test
public void testRowIterator() {
List<List<Entry<Key,Value>>> rows = getRows(makeIterator());
assertEquals(0, rows.size());
rows = getRows(makeIterator("a b c d"));
assertEquals(1, rows.size());
assertEquals(1, rows.get(0).size());
rows = getRows(makeIterator("a cf cq1 v", "a cf cq2 v", "a cf cq3 v", "b cf cq1 x"));
assertEquals(2, rows.size());
assertEquals(3, rows.get(0).size());
assertEquals(1, rows.get(1).size());
RowIterator i = new RowIterator(makeIterator());
assertThrows(NoSuchElementException.class, i::next);
i = new RowIterator(makeIterator("a b c d", "a 1 2 3"));
assertTrue(i.hasNext());
Iterator<Entry<Key,Value>> row = i.next();
assertTrue(row.hasNext());
row.next();
assertTrue(row.hasNext());
row.next();
assertFalse(row.hasNext());
assertThrows(NoSuchElementException.class, row::next);
assertEquals(0, i.getKVCount());
assertFalse(i.hasNext());
assertEquals(2, i.getKVCount());
assertThrows(NoSuchElementException.class, i::next);
}
@Test
public void testUnreadRow() {
final RowIterator i = new RowIterator(makeIterator("a b c d", "a 1 2 3", "b 1 2 3"));
assertTrue(i.hasNext());
Iterator<Entry<Key,Value>> firstRow = i.next();
assertEquals(0, i.getKVCount());
assertTrue(i.hasNext());
assertEquals(2, i.getKVCount());
Iterator<Entry<Key,Value>> nextRow = i.next();
assertEquals(2, i.getKVCount());
assertFalse(i.hasNext());
assertEquals(3, i.getKVCount());
assertThrows(IllegalStateException.class, firstRow::hasNext);
assertThrows(IllegalStateException.class, nextRow::next);
}
}
| 9,445 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/TestThrift1474.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import org.apache.accumulo.core.clientImpl.thrift.ThriftSecurityException;
import org.apache.accumulo.core.clientImpl.thrift.ThriftTest;
import org.apache.thrift.TException;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.server.TServer;
import org.apache.thrift.server.TThreadPoolServer;
import org.apache.thrift.transport.TServerSocket;
import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransport;
import org.junit.jupiter.api.Test;
public class TestThrift1474 {
static class TestServer implements ThriftTest.Iface {
@Override
public boolean success() throws TException {
return true;
}
@Override
public boolean fails() throws TException {
return false;
}
@Override
public boolean throwsError() throws ThriftSecurityException, TException {
throw new ThriftSecurityException();
}
}
@Test
public void test() throws IOException, TException, InterruptedException {
TServerSocket serverTransport = new TServerSocket(0);
serverTransport.listen();
int port = serverTransport.getServerSocket().getLocalPort();
TestServer handler = new TestServer();
ThriftTest.Processor<ThriftTest.Iface> processor = new ThriftTest.Processor<>(handler);
TThreadPoolServer.Args args = new TThreadPoolServer.Args(serverTransport);
args.stopTimeoutVal = 10;
args.stopTimeoutUnit = MILLISECONDS;
final TServer server = new TThreadPoolServer(args.processor(processor));
Thread thread = new Thread(server::serve);
thread.start();
while (!server.isServing()) {
Thread.sleep(10);
}
TTransport transport = new TSocket("localhost", port);
transport.open();
TProtocol protocol = new TBinaryProtocol(transport);
ThriftTest.Client client = new ThriftTest.Client(protocol);
assertTrue(client.success());
assertFalse(client.fails());
assertThrows(ThriftSecurityException.class, client::throwsError);
server.stop();
thread.join();
}
}
| 9,446 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/ClientPropertiesTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.nio.file.Paths;
import java.util.Properties;
import org.apache.accumulo.core.conf.ClientProperty;
import org.junit.jupiter.api.Test;
public class ClientPropertiesTest {
@Test
public void testBasic() {
Properties props1 =
Accumulo.newClientProperties().to("inst1", "zoo1").as("user1", "pass1").build();
assertEquals("inst1", ClientProperty.INSTANCE_NAME.getValue(props1));
assertEquals("zoo1", ClientProperty.INSTANCE_ZOOKEEPERS.getValue(props1));
assertEquals("user1", ClientProperty.AUTH_PRINCIPAL.getValue(props1));
assertEquals("password", ClientProperty.AUTH_TYPE.getValue(props1));
assertEquals("pass1", ClientProperty.AUTH_TOKEN.getValue(props1));
ClientProperty.validate(props1);
Properties props2 =
Accumulo.newClientProperties().from(props1).as("user2", Paths.get("./path2")).build();
// verify props1 is unchanged
assertEquals("inst1", ClientProperty.INSTANCE_NAME.getValue(props1));
assertEquals("zoo1", ClientProperty.INSTANCE_ZOOKEEPERS.getValue(props1));
assertEquals("user1", ClientProperty.AUTH_PRINCIPAL.getValue(props1));
assertEquals("password", ClientProperty.AUTH_TYPE.getValue(props1));
assertEquals("pass1", ClientProperty.AUTH_TOKEN.getValue(props1));
// verify props2 has new values for overridden fields
assertEquals("inst1", ClientProperty.INSTANCE_NAME.getValue(props2));
assertEquals("zoo1", ClientProperty.INSTANCE_ZOOKEEPERS.getValue(props2));
assertEquals("user2", ClientProperty.AUTH_PRINCIPAL.getValue(props2));
assertEquals("kerberos", ClientProperty.AUTH_TYPE.getValue(props2));
assertEquals("./path2", ClientProperty.AUTH_TOKEN.getValue(props2));
props2.remove(ClientProperty.AUTH_PRINCIPAL.getKey());
var e = assertThrows(IllegalArgumentException.class, () -> ClientProperty.validate(props2));
assertEquals("auth.principal is not set", e.getMessage());
}
}
| 9,447 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/security/SecurityErrorCodeTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.security;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.HashSet;
import org.junit.jupiter.api.Test;
public class SecurityErrorCodeTest {
@Test
public void testEnumsSame() {
HashSet<String> secNames1 = new HashSet<>();
HashSet<String> secNames2 = new HashSet<>();
for (SecurityErrorCode sec : SecurityErrorCode.values()) {
secNames1.add(sec.name());
}
for (org.apache.accumulo.core.clientImpl.thrift.SecurityErrorCode sec : org.apache.accumulo.core.clientImpl.thrift.SecurityErrorCode
.values()) {
secNames2.add(sec.name());
}
assertEquals(secNames1, secNames2);
}
}
| 9,448 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/security | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/security/tokens/PasswordTokenTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.security.tokens;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Base64;
import java.util.List;
import javax.security.auth.DestroyFailedException;
import org.junit.jupiter.api.Test;
public class PasswordTokenTest {
@Test
public void testMultiByte() throws DestroyFailedException {
PasswordToken pt = new PasswordToken();
AuthenticationToken.Properties props = new AuthenticationToken.Properties();
props.put("password", "五六");
pt.init(props);
props.destroy();
String s = new String(pt.getPassword(), UTF_8);
assertEquals("五六", s);
pt = new PasswordToken("五六");
s = new String(pt.getPassword(), UTF_8);
assertEquals("五六", s);
}
@Test
public void testReadingLegacyFormat() throws IOException {
String newFormat = "/////gAAAAh0ZXN0cGFzcw=="; // the new format without using GZip
String oldFormat1 = "AAAAHB+LCAAAAAAAAAArSS0uKUgsLgYAGRFm+ggAAAA="; // jdk 11 GZip produced this
String oldFormat2 = "AAAAHB+LCAAAAAAAAP8rSS0uKUgsLgYAGRFm+ggAAAA="; // jdk 17 GZip produced this
for (String format : List.of(oldFormat1, oldFormat2, newFormat)) {
byte[] array = Base64.getDecoder().decode(format);
try (var bais = new ByteArrayInputStream(array); var dis = new DataInputStream(bais)) {
var deserializedToken = new PasswordToken();
deserializedToken.readFields(dis);
assertArrayEquals("testpass".getBytes(UTF_8), deserializedToken.getPassword());
}
}
}
@Test
public void testReadingAndWriting() throws IOException {
var originalToken = new PasswordToken("testpass");
byte[] saved;
try (var baos = new ByteArrayOutputStream(); var dos = new DataOutputStream(baos)) {
originalToken.write(dos);
saved = baos.toByteArray();
}
try (var bais = new ByteArrayInputStream(saved); var dis = new DataInputStream(bais)) {
var deserializedToken = new PasswordToken();
deserializedToken.readFields(dis);
assertArrayEquals(originalToken.getPassword(), deserializedToken.getPassword());
}
}
}
| 9,449 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/security | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/security/tokens/DelegationTokenImplTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.security.tokens;
import static org.apache.accumulo.core.clientImpl.AuthenticationTokenIdentifier.createTAuthIdentifier;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.apache.accumulo.core.clientImpl.AuthenticationTokenIdentifier;
import org.apache.accumulo.core.clientImpl.DelegationTokenImpl;
import org.junit.jupiter.api.Test;
public class DelegationTokenImplTest {
@Test
public void testSerialization() throws IOException {
byte[] passBytes = new byte[] {'f', 'a', 'k', 'e'};
AuthenticationTokenIdentifier identifier = new AuthenticationTokenIdentifier(
createTAuthIdentifier("user", 1, 1000L, 2000L, "instanceid"));
DelegationTokenImpl token = new DelegationTokenImpl(passBytes, identifier);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
token.write(new DataOutputStream(baos));
DelegationTokenImpl copy = new DelegationTokenImpl();
copy.readFields(new DataInputStream(new ByteArrayInputStream(baos.toByteArray())));
assertEquals(token.getServiceName(), copy.getServiceName());
assertEquals(token, copy);
assertEquals(token.hashCode(), copy.hashCode());
}
@Test
public void testEquality() {
AuthenticationTokenIdentifier identifier = new AuthenticationTokenIdentifier(
createTAuthIdentifier("user", 1, 1000L, 2000L, "instanceid"));
// We don't need a real serialized Token for the password
DelegationTokenImpl token =
new DelegationTokenImpl(new byte[] {'f', 'a', 'k', 'e'}, identifier);
AuthenticationTokenIdentifier identifier2 = new AuthenticationTokenIdentifier(
createTAuthIdentifier("user1", 1, 1000L, 2000L, "instanceid"));
// We don't need a real serialized Token for the password
DelegationTokenImpl token2 =
new DelegationTokenImpl(new byte[] {'f', 'a', 'k', 'e'}, identifier2);
assertNotEquals(token, token2);
assertNotEquals(token.hashCode(), token2.hashCode());
// We don't need a real serialized Token for the password
DelegationTokenImpl token3 =
new DelegationTokenImpl(new byte[] {'f', 'a', 'k', 'e', '0'}, identifier);
assertNotEquals(token, token3);
assertNotEquals(token.hashCode(), token3.hashCode());
assertNotEquals(token2, token3);
assertNotEquals(token2.hashCode(), token3.hashCode());
}
}
| 9,450 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/security | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/security/tokens/KerberosTokenTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.security.tokens;
import static org.junit.jupiter.api.Assertions.fail;
import java.util.Set;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.junit.jupiter.api.Test;
/**
* Test class for {@link KerberosToken}.
*/
public class KerberosTokenTest {
@Test
public void testAuthMethodAcceptance() {
// There is also KERBEROS_SSL but that appears to be deprecated/OBE
Set<AuthenticationMethod> allowedMethods =
Set.of(AuthenticationMethod.KERBEROS, AuthenticationMethod.PROXY);
for (AuthenticationMethod authMethod : AuthenticationMethod.values()) {
final boolean allowable = allowedMethods.contains(authMethod);
try {
KerberosToken.validateAuthMethod(authMethod);
if (!allowable) {
fail(authMethod + " should have triggered a thrown exception but it did not");
}
} catch (IllegalArgumentException e) {
if (allowable) {
fail(authMethod + " should not have triggered a thrown exception");
}
}
}
}
}
| 9,451 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/security | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/security/tokens/CredentialProviderTokenTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.security.tokens;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.io.File;
import java.net.URL;
import org.apache.accumulo.core.client.security.tokens.AuthenticationToken.Properties;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
public class CredentialProviderTokenTest {
// Keystore contains: {'root.password':'password', 'bob.password':'bob'}
private static String keystorePath;
@SuppressFBWarnings(value = "PATH_TRAVERSAL_IN",
justification = "keystoreUrl location isn't provided by user input")
@BeforeAll
public static void setup() {
URL keystoreUrl = CredentialProviderTokenTest.class.getResource("/passwords.jceks");
assertNotNull(keystoreUrl);
keystorePath = "jceks://file/" + new File(keystoreUrl.getFile()).getAbsolutePath();
}
@Test
public void testPasswordsFromCredentialProvider() throws Exception {
CredentialProviderToken token = new CredentialProviderToken("root.password", keystorePath);
assertEquals("root.password", token.getName());
assertEquals(keystorePath, token.getCredentialProviders());
assertArrayEquals("password".getBytes(UTF_8), token.getPassword());
token = new CredentialProviderToken("bob.password", keystorePath);
assertArrayEquals("bob".getBytes(UTF_8), token.getPassword());
}
@Test
public void testEqualityAfterInit() throws Exception {
CredentialProviderToken token = new CredentialProviderToken("root.password", keystorePath);
CredentialProviderToken uninitializedToken = new CredentialProviderToken();
Properties props = new Properties();
props.put(CredentialProviderToken.NAME_PROPERTY, "root.password");
props.put(CredentialProviderToken.CREDENTIAL_PROVIDERS_PROPERTY, keystorePath);
uninitializedToken.init(props);
assertArrayEquals(token.getPassword(), uninitializedToken.getPassword());
}
@Test
public void cloneReturnsCorrectObject() throws Exception {
CredentialProviderToken token = new CredentialProviderToken("root.password", keystorePath);
CredentialProviderToken clone = token.clone();
assertEquals(token, clone);
assertArrayEquals(token.getPassword(), clone.getPassword());
}
@Test
public void missingProperties() {
CredentialProviderToken token = new CredentialProviderToken();
assertThrows(IllegalArgumentException.class, () -> token.init(new Properties()));
}
@Test
public void missingNameProperty() {
CredentialProviderToken token = new CredentialProviderToken();
Properties props = new Properties();
props.put(CredentialProviderToken.NAME_PROPERTY, "root.password");
assertThrows(IllegalArgumentException.class, () -> token.init(props));
}
@Test
public void missingProviderProperty() {
CredentialProviderToken token = new CredentialProviderToken();
Properties props = new Properties();
props.put(CredentialProviderToken.CREDENTIAL_PROVIDERS_PROPERTY, keystorePath);
assertThrows(IllegalArgumentException.class, () -> token.init(props));
}
}
| 9,452 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/admin/ImportConfigurationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.admin;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.apache.accumulo.core.clientImpl.ImportConfigurationImpl;
import org.junit.jupiter.api.Test;
public class ImportConfigurationTest {
@Test
public void testEmpty() {
ImportConfiguration ic = ImportConfiguration.empty();
assertEquals(ImportConfiguration.EMPTY, ic);
assertFalse(ic.isKeepOffline());
assertFalse(ic.isKeepMappings());
}
@Test
public void testErrors() {
ImportConfiguration ic = new ImportConfigurationImpl();
assertThrows(IllegalStateException.class, ic::isKeepMappings);
assertThrows(IllegalStateException.class, ic::isKeepOffline);
ImportConfigurationImpl ic2 = (ImportConfigurationImpl) ImportConfiguration.builder().build();
assertThrows(IllegalStateException.class, () -> ic2.setKeepMappings(true));
assertThrows(IllegalStateException.class, () -> ic2.setKeepOffline(true));
}
@Test
public void testOptions() {
var ic = ImportConfiguration.builder().setKeepMappings(true).setKeepOffline(true).build();
assertTrue(ic.isKeepMappings());
assertTrue(ic.isKeepOffline());
}
}
| 9,453 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/admin/DelegationTokenConfigTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.admin;
import static java.util.concurrent.TimeUnit.DAYS;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import org.junit.jupiter.api.Test;
public class DelegationTokenConfigTest {
@Test
public void testTimeUnit() {
DelegationTokenConfig config1 = new DelegationTokenConfig(),
config2 = new DelegationTokenConfig();
config1.setTokenLifetime(1000, MILLISECONDS);
config2.setTokenLifetime(1, SECONDS);
assertEquals(config1.getTokenLifetime(MILLISECONDS), config2.getTokenLifetime(MILLISECONDS));
assertEquals(config1, config2);
assertEquals(config1.hashCode(), config2.hashCode());
}
@Test
public void testNoTimeout() {
DelegationTokenConfig config = new DelegationTokenConfig();
config.setTokenLifetime(0, MILLISECONDS);
assertEquals(0, config.getTokenLifetime(MILLISECONDS));
}
@Test
public void testInvalidLifetime() {
assertThrows(IllegalArgumentException.class,
() -> new DelegationTokenConfig().setTokenLifetime(-1, DAYS));
}
@Test
public void testSetInvalidTimeUnit() {
assertThrows(NullPointerException.class,
() -> new DelegationTokenConfig().setTokenLifetime(5, null));
}
@Test
public void testGetInvalidTimeUnit() {
assertThrows(NullPointerException.class,
() -> new DelegationTokenConfig().getTokenLifetime(null));
}
}
| 9,454 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/admin/NewTableConfigurationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.admin;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import org.apache.accumulo.core.client.IteratorSetting;
import org.apache.accumulo.core.client.sample.SamplerConfiguration;
import org.apache.accumulo.core.client.summary.Summarizer;
import org.apache.accumulo.core.client.summary.SummarizerConfiguration;
import org.apache.accumulo.core.client.summary.summarizers.FamilySummarizer;
import org.apache.accumulo.core.conf.Property;
import org.apache.accumulo.core.iterators.IteratorUtil.IteratorScope;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class NewTableConfigurationTest {
private SortedSet<Text> splits;
private Map<String,String> options;
@BeforeEach
public void setup() {
populateSplits();
populateOptions();
}
public void populateSplits() {
splits = new TreeSet<>();
splits.add(new Text("ccccc"));
splits.add(new Text("aaaaa"));
splits.add(new Text("ddddd"));
splits.add(new Text("abcde"));
splits.add(new Text("bbbbb"));
}
/**
* Verify the withSplits/getSplits methods do as expected.
*
* The withSplits() takes a SortedSet as its input. Verify that the set orders the data even if
* input non-ordered.
*
* The getSplits should return a SortedSet. Test verifies set performs ordering and the input set
* and output set are equal.
*/
@Test
public void testWithAndGetSplits() {
NewTableConfiguration ntc = new NewTableConfiguration().withSplits(splits);
Collection<Text> ntcSplits = ntc.getSplits();
Iterator<Text> splitIt = splits.iterator();
Iterator<Text> ntcIt = ntcSplits.iterator();
while (splitIt.hasNext() && ntcIt.hasNext()) {
assertEquals(splitIt.next(), ntcIt.next());
}
// verify splits is in sorted order
Iterator<Text> it = splits.iterator();
Text current = new Text("");
while (it.hasNext()) {
Text nxt = it.next();
assertTrue(current.toString().compareTo(nxt.toString()) < 0);
current = nxt;
}
// verify ntcSplits is in sorted order
Iterator<Text> it2 = ntcSplits.iterator();
current = new Text("");
while (it2.hasNext()) {
Text nxt = it2.next();
assertTrue(current.toString().compareTo(nxt.toString()) < 0);
current = nxt;
}
NewTableConfiguration ntc2 = new NewTableConfiguration();
Collection<Text> splits = ntc2.getSplits();
assertTrue(splits.isEmpty());
}
/**
* Verify that createOffline option
*/
@Test
public void testCreateOffline() {
NewTableConfiguration ntcOffline = new NewTableConfiguration().createOffline();
assertTrue(ntcOffline.getInitialTableState() == InitialTableState.OFFLINE);
NewTableConfiguration ntcOnline = new NewTableConfiguration();
assertTrue(ntcOnline.getInitialTableState() == InitialTableState.ONLINE);
}
public void populateOptions() {
options = new HashMap<>();
options.put("hasher", "murmur3_32");
options.put("modulus", "5");
}
/**
* Verify enableSampling returns
*/
@Test
public void testEnableSampling() {
SamplerConfiguration sha1SamplerConfig = new SamplerConfiguration("com.mysampler");
sha1SamplerConfig.setOptions(options);
NewTableConfiguration ntcSample2 =
new NewTableConfiguration().enableSampling(sha1SamplerConfig);
assertEquals("com.mysampler", ntcSample2.getProperties().get("table.sampler"));
assertEquals("5", ntcSample2.getProperties().get("table.sampler.opt.modulus"));
assertEquals("murmur3_32", ntcSample2.getProperties().get("table.sampler.opt.hasher"));
}
/**
* Verify enableSummarization returns SummarizerConfiguration with the expected class name(s).
*/
@Test
public void testEnableSummarization() {
SummarizerConfiguration summarizerConfig1 = SummarizerConfiguration
.builder("com.test.summarizer").setPropertyId("s1").addOption("opt1", "v1").build();
NewTableConfiguration ntcSummarization1 =
new NewTableConfiguration().enableSummarization(summarizerConfig1);
assertEquals("v1", ntcSummarization1.getProperties().get("table.summarizer.s1.opt.opt1"));
assertEquals("com.test.summarizer",
ntcSummarization1.getProperties().get("table.summarizer.s1"));
Class<? extends Summarizer> builderClass = FamilySummarizer.class;
assertTrue(Summarizer.class.isAssignableFrom(builderClass));
SummarizerConfiguration summarizerConfig2 = SummarizerConfiguration.builder(builderClass)
.setPropertyId("s2").addOption("opt2", "v2").build();
NewTableConfiguration ntcSummarization2 =
new NewTableConfiguration().enableSummarization(summarizerConfig2);
assertEquals("v2", ntcSummarization2.getProperties().get("table.summarizer.s2.opt.opt2"));
assertEquals(builderClass.getName(),
ntcSummarization2.getProperties().get("table.summarizer.s2"));
NewTableConfiguration ntcSummarization3 =
new NewTableConfiguration().enableSummarization(summarizerConfig1, summarizerConfig2);
assertEquals("v1", ntcSummarization1.getProperties().get("table.summarizer.s1.opt.opt1"));
assertEquals("v2", ntcSummarization2.getProperties().get("table.summarizer.s2.opt.opt2"));
assertEquals("com.test.summarizer",
ntcSummarization3.getProperties().get("table.summarizer.s1"));
assertEquals(builderClass.getName(),
ntcSummarization3.getProperties().get("table.summarizer.s2"));
}
/**
* Verify that you cannot have overlapping locality groups.
*
* Attempt to set a locality group with overlapping groups. This test should throw an
* IllegalArgumentException indicating that groups overlap.
*/
@Test
public void testOverlappingGroupsFail() {
NewTableConfiguration ntc = new NewTableConfiguration();
Map<String,Set<Text>> lgroups = new HashMap<>();
lgroups.put("lg1", Set.of(new Text("colFamA"), new Text("colFamB")));
lgroups.put("lg2", Set.of(new Text("colFamC"), new Text("colFamB")));
assertThrows(IllegalArgumentException.class, () -> ntc.setLocalityGroups(lgroups));
}
/**
* Verify iterator conflicts are discovered
*/
@Test
public void testIteratorConflictFound1() {
NewTableConfiguration ntc = new NewTableConfiguration();
IteratorSetting setting = new IteratorSetting(10, "someName", "foo.bar");
ntc.attachIterator(setting, EnumSet.of(IteratorScope.scan));
IteratorSetting setting2 = new IteratorSetting(12, "someName", "foo2.bar");
assertThrows(IllegalArgumentException.class,
() -> ntc.attachIterator(setting2, EnumSet.of(IteratorScope.scan)));
}
@Test
public void testIteratorConflictFound2() {
NewTableConfiguration ntc = new NewTableConfiguration();
IteratorSetting setting = new IteratorSetting(10, "someName", "foo.bar");
ntc.attachIterator(setting, EnumSet.of(IteratorScope.scan));
IteratorSetting setting2 = new IteratorSetting(10, "anotherName", "foo2.bar");
assertThrows(IllegalArgumentException.class,
() -> ntc.attachIterator(setting2, EnumSet.of(IteratorScope.scan)));
}
@Test
public void testIteratorConflictFound3() {
NewTableConfiguration ntc = new NewTableConfiguration();
IteratorSetting setting = new IteratorSetting(10, "someName", "foo.bar");
ntc.attachIterator(setting, EnumSet.of(IteratorScope.scan));
IteratorSetting setting2 = new IteratorSetting(12, "someName", "foo.bar");
assertThrows(IllegalArgumentException.class,
() -> ntc.attachIterator(setting2, EnumSet.of(IteratorScope.scan)));
}
/**
* Verify that properties set using NewTableConfiguration must be table properties.
*/
@Test
public void testInvalidTablePropertiesSet() {
NewTableConfiguration ntc = new NewTableConfiguration();
Map<String,String> props = new HashMap<>();
// These properties should work just with no issue
props.put(Property.TABLE_ARBITRARY_PROP_PREFIX.getKey() + "prop1", "val1");
props.put(Property.TABLE_ARBITRARY_PROP_PREFIX.getKey() + "prop2", "val2");
ntc.setProperties(props);
// These properties should result in an illegalArgumentException
props.put("invalidProp1", "value1");
props.put("invalidProp2", "value2");
assertThrows(IllegalArgumentException.class, () -> ntc.setProperties(props));
}
/**
* Verify checkDisjoint works with iterators groups.
*/
@Test
public void testAttachIteratorDisjointCheck() {
NewTableConfiguration ntc = new NewTableConfiguration();
Map<String,String> props = new HashMap<>();
props.put("table.iterator.scan.someName", "10");
ntc.setProperties(props);
IteratorSetting setting = new IteratorSetting(10, "someName", "foo.bar");
assertThrows(IllegalArgumentException.class,
() -> ntc.attachIterator(setting, EnumSet.of(IteratorScope.scan)));
}
/**
* Verify that disjoint check works as expected with setProperties
*/
@Test
public void testSetPropertiesDisjointCheck() {
NewTableConfiguration ntc = new NewTableConfiguration();
Map<String,Set<Text>> lgroups = new HashMap<>();
lgroups.put("lg1", Set.of(new Text("dog")));
ntc.setLocalityGroups(lgroups);
Map<String,String> props = new HashMap<>();
props.put("table.key1", "val1");
props.put("table.group.lg1", "cat");
assertThrows(IllegalArgumentException.class, () -> ntc.setProperties(props));
}
/**
* Verify checkDisjoint works with locality groups.
*/
@Test
public void testSetLocalityGroupsDisjointCheck() {
NewTableConfiguration ntc = new NewTableConfiguration();
Map<String,String> props = new HashMap<>();
props.put("table.group.lg1", "cat");
ntc.setProperties(props);
Map<String,Set<Text>> lgroups = new HashMap<>();
lgroups.put("lg1", Set.of(new Text("dog")));
assertThrows(IllegalArgumentException.class, () -> ntc.setLocalityGroups(lgroups));
}
}
| 9,455 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/rfile/RFileClientTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.rfile;
import static com.google.common.collect.MoreCollectors.onlyElement;
import static org.apache.accumulo.core.util.LazySingletons.RANDOM;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import java.io.File;
import java.io.IOException;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.accumulo.core.client.IteratorSetting;
import org.apache.accumulo.core.client.Scanner;
import org.apache.accumulo.core.client.admin.NewTableConfiguration;
import org.apache.accumulo.core.client.rfile.RFile.InputArguments.FencedPath;
import org.apache.accumulo.core.client.sample.RowSampler;
import org.apache.accumulo.core.client.sample.SamplerConfiguration;
import org.apache.accumulo.core.client.summary.CounterSummary;
import org.apache.accumulo.core.client.summary.SummarizerConfiguration;
import org.apache.accumulo.core.client.summary.Summary;
import org.apache.accumulo.core.client.summary.summarizers.FamilySummarizer;
import org.apache.accumulo.core.client.summary.summarizers.VisibilitySummarizer;
import org.apache.accumulo.core.conf.DefaultConfiguration;
import org.apache.accumulo.core.conf.Property;
import org.apache.accumulo.core.data.ArrayByteSequence;
import org.apache.accumulo.core.data.ByteSequence;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.PartialKey;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.file.FileOperations;
import org.apache.accumulo.core.file.FileSKVIterator;
import org.apache.accumulo.core.file.rfile.RFile.RFileSKVIterator;
import org.apache.accumulo.core.file.rfile.RFile.Reader;
import org.apache.accumulo.core.iterators.user.RegExFilter;
import org.apache.accumulo.core.metadata.UnreferencedTabletFile;
import org.apache.accumulo.core.security.Authorizations;
import org.apache.accumulo.core.spi.crypto.NoCryptoServiceFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
@SuppressFBWarnings(value = "PATH_TRAVERSAL_IN", justification = "path is set by test, not user")
public class RFileClientTest {
private String createTmpTestFile() throws IOException {
File dir = new File(System.getProperty("user.dir") + "/target/rfile-test");
assertTrue(dir.mkdirs() || dir.isDirectory());
File testFile = File.createTempFile("test", ".rf", dir);
assertTrue(testFile.delete() || !testFile.exists());
return testFile.getAbsolutePath();
}
String rowStr(int r) {
return String.format("%06x", r);
}
String colStr(int c) {
return String.format("%04x", c);
}
private SortedMap<Key,Value> createTestData(int rows, int families, int qualifiers) {
return createTestData(0, rows, 0, families, qualifiers);
}
private SortedMap<Key,Value> createTestData(int startRow, int rows, int startFamily, int families,
int qualifiers) {
return createTestData(startRow, rows, startFamily, families, qualifiers, "");
}
private SortedMap<Key,Value> createTestData(int startRow, int rows, int startFamily, int families,
int qualifiers, String... vis) {
TreeMap<Key,Value> testData = new TreeMap<>();
for (int r = 0; r < rows; r++) {
String row = rowStr(r + startRow);
for (int f = 0; f < families; f++) {
String fam = colStr(f + startFamily);
for (int q = 0; q < qualifiers; q++) {
String qual = colStr(q);
for (String v : vis) {
Key k = new Key(row, fam, qual, v);
testData.put(k, new Value(k.hashCode() + ""));
}
}
}
}
return testData;
}
private String createRFile(SortedMap<Key,Value> testData) throws Exception {
String testFile = createTmpTestFile();
try (RFileWriter writer = RFile.newWriter().to(testFile)
.withFileSystem(FileSystem.getLocal(new Configuration())).build()) {
writer.append(testData.entrySet());
// TODO ensure compressors are returned
}
return testFile;
}
@Test
public void testIndependance() throws Exception {
// test to ensure two iterators allocated from same RFile scanner are independent.
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
SortedMap<Key,Value> testData = createTestData(10, 10, 10);
String testFile = createRFile(testData);
Scanner scanner = RFile.newScanner().from(testFile).withFileSystem(localFs).build();
Range range1 = Range.exact(rowStr(5));
scanner.setRange(range1);
Iterator<Entry<Key,Value>> scnIter1 = scanner.iterator();
Iterator<Entry<Key,Value>> mapIter1 =
testData.subMap(range1.getStartKey(), range1.getEndKey()).entrySet().iterator();
Range range2 = new Range(rowStr(3), true, rowStr(4), true);
scanner.setRange(range2);
Iterator<Entry<Key,Value>> scnIter2 = scanner.iterator();
Iterator<Entry<Key,Value>> mapIter2 =
testData.subMap(range2.getStartKey(), range2.getEndKey()).entrySet().iterator();
while (scnIter1.hasNext() || scnIter2.hasNext()) {
if (scnIter1.hasNext()) {
assertTrue(mapIter1.hasNext());
assertEquals(scnIter1.next(), mapIter1.next());
} else {
assertFalse(mapIter1.hasNext());
}
if (scnIter2.hasNext()) {
assertTrue(mapIter2.hasNext());
assertEquals(scnIter2.next(), mapIter2.next());
} else {
assertFalse(mapIter2.hasNext());
}
}
assertFalse(mapIter1.hasNext());
assertFalse(mapIter2.hasNext());
scanner.close();
}
SortedMap<Key,Value> toMap(Scanner scanner) {
TreeMap<Key,Value> map = new TreeMap<>();
for (Entry<Key,Value> entry : scanner) {
map.put(entry.getKey(), entry.getValue());
}
return map;
}
SortedMap<Key,Value> toMap(FileSKVIterator iterator) throws IOException {
TreeMap<Key,Value> map = new TreeMap<>();
while (iterator.hasTop()) {
// Need to copy Value as the reference gets reused
map.put(iterator.getTopKey(), new Value(iterator.getTopValue()));
iterator.next();
}
return map;
}
@Test
public void testMultipleSources() throws Exception {
SortedMap<Key,Value> testData1 = createTestData(10, 10, 10);
SortedMap<Key,Value> testData2 = createTestData(0, 10, 0, 10, 10);
String testFile1 = createRFile(testData1);
String testFile2 = createRFile(testData2);
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
Scanner scanner = RFile.newScanner().from(testFile1, testFile2).withFileSystem(localFs).build();
TreeMap<Key,Value> expected = new TreeMap<>(testData1);
expected.putAll(testData2);
assertEquals(expected, toMap(scanner));
Range range = new Range(rowStr(3), true, rowStr(14), true);
scanner.setRange(range);
assertEquals(expected.subMap(range.getStartKey(), range.getEndKey()), toMap(scanner));
scanner.close();
}
@Test
public void testFencingScanner() throws Exception {
SortedMap<Key,Value> testData = createTestData(10, 10, 10);
String testFile = createRFile(testData);
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
Range range = new Range(rowStr(3), true, rowStr(14), true);
Scanner scanner =
RFile.newScanner().from(new FencedPath(new Path(new File(testFile).toURI()), range))
.withFileSystem(localFs).build();
TreeMap<Key,Value> expected = new TreeMap<>(testData);
// Range is set on the RFile iterator itself and not the scanner
assertEquals(expected.subMap(range.getStartKey(), range.getEndKey()), toMap(scanner));
scanner.close();
}
@Test
public void testRequiresRowRange() throws Exception {
SortedMap<Key,Value> testData = createTestData(10, 10, 10);
String testFile = createRFile(testData);
// Row Ranges may have null for start and/or end row or be set.
// If start is set, it must be inclusive and if end is set it ust be exclusive.
// End key must also be an exclusive key (end in 0x00 byte).
// Lastly only the row portion of a key is allowed.
// Test valid Row Ranges
new FencedPath(new Path(new File(testFile).toURI()), new Range());
// This constructor converts to the proper inclusive/exclusive rows
new FencedPath(new Path(new File(testFile).toURI()),
new Range(rowStr(3), true, rowStr(14), true));
new FencedPath(new Path(new File(testFile).toURI()), new Range(new Key(rowStr(3)), true,
new Key(rowStr(14)).followingKey(PartialKey.ROW), false));
// Test invalid Row Ranges
// Missing 0x00 byte
assertThrows(IllegalArgumentException.class,
() -> new FencedPath(new Path(new File(testFile).toURI()),
new Range(new Key(rowStr(3)), true, new Key(rowStr(14)), false)));
// End key inclusive
assertThrows(IllegalArgumentException.class,
() -> new FencedPath(new Path(new File(testFile).toURI()),
new Range(new Key(rowStr(3)), true, new Key(rowStr(14)), true)));
// Start key exclusive
assertThrows(IllegalArgumentException.class,
() -> new FencedPath(new Path(new File(testFile).toURI()),
new Range(new Key(rowStr(3)), false, new Key(rowStr(14)), false)));
// CF is set which is not allowed
assertThrows(IllegalArgumentException.class,
() -> new FencedPath(new Path(new File(testFile).toURI()),
new Range(new Key(rowStr(3), colStr(3)), true,
new Key(rowStr(14)).followingKey(PartialKey.ROW), false)));
}
@Test
public void testFencingReader() throws Exception {
SortedMap<Key,Value> testData = createTestData(10, 10, 10);
String testFile = createRFile(testData);
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
Range range = new Range(rowStr(3), true, rowStr(14), true);
RFileSKVIterator reader =
getReader(localFs, UnreferencedTabletFile.ofRanged(localFs, new File(testFile), range));
reader.seek(new Range(), List.of(), false);
TreeMap<Key,Value> expected = new TreeMap<>(testData);
// Range is set on the RFile iterator itself and not the scanner
assertEquals(expected.subMap(range.getStartKey(), range.getEndKey()), toMap(reader));
reader.close();
}
@Test
public void testWriterTableProperties() throws Exception {
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
String testFile = createTmpTestFile();
Map<String,String> props = new HashMap<>();
props.put(Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE.getKey(), "1K");
props.put(Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE_INDEX.getKey(), "1K");
RFileWriter writer =
RFile.newWriter().to(testFile).withFileSystem(localFs).withTableProperties(props).build();
SortedMap<Key,Value> testData1 = createTestData(10, 10, 10);
writer.append(testData1.entrySet());
writer.close();
RFileSKVIterator reader =
getReader(localFs, UnreferencedTabletFile.of(localFs, new File(testFile)));
FileSKVIterator iiter = reader.getIndex();
int count = 0;
while (iiter.hasTop()) {
count++;
iiter.next();
}
// if settings are used then should create multiple index entries
assertTrue(count > 10);
reader.close();
Scanner scanner = RFile.newScanner().from(testFile).withFileSystem(localFs).build();
assertEquals(testData1, toMap(scanner));
scanner.close();
}
@Test
public void testLocalityGroups() throws Exception {
SortedMap<Key,Value> testData1 = createTestData(0, 10, 0, 2, 10);
SortedMap<Key,Value> testData2 = createTestData(0, 10, 2, 1, 10);
SortedMap<Key,Value> defaultData = createTestData(0, 10, 3, 7, 10);
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
String testFile = createTmpTestFile();
RFileWriter writer = RFile.newWriter().to(testFile).withFileSystem(localFs).build();
writer.startNewLocalityGroup("z", colStr(0), colStr(1));
writer.append(testData1.entrySet());
writer.startNewLocalityGroup("h", colStr(2));
writer.append(testData2.entrySet());
writer.startDefaultLocalityGroup();
writer.append(defaultData.entrySet());
writer.close();
Scanner scanner = RFile.newScanner().from(testFile).withFileSystem(localFs).build();
scanner.fetchColumnFamily(new Text(colStr(0)));
scanner.fetchColumnFamily(new Text(colStr(1)));
assertEquals(testData1, toMap(scanner));
scanner.clearColumns();
scanner.fetchColumnFamily(new Text(colStr(2)));
assertEquals(testData2, toMap(scanner));
scanner.clearColumns();
for (int i = 3; i < 10; i++) {
scanner.fetchColumnFamily(new Text(colStr(i)));
}
assertEquals(defaultData, toMap(scanner));
scanner.clearColumns();
assertEquals(createTestData(10, 10, 10), toMap(scanner));
scanner.close();
Reader reader =
(Reader) getReader(localFs, UnreferencedTabletFile.of(localFs, new File(testFile)));
Map<String,ArrayList<ByteSequence>> lGroups = reader.getLocalityGroupCF();
assertTrue(lGroups.containsKey("z"));
assertEquals(2, lGroups.get("z").size());
assertTrue(lGroups.get("z").contains(new ArrayByteSequence(colStr(0))));
assertTrue(lGroups.get("z").contains(new ArrayByteSequence(colStr(1))));
assertTrue(lGroups.containsKey("h"));
assertEquals(Arrays.asList(new ArrayByteSequence(colStr(2))), lGroups.get("h"));
reader.close();
}
@Test
public void testIterators() throws Exception {
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
SortedMap<Key,Value> testData = createTestData(10, 10, 10);
String testFile = createRFile(testData);
IteratorSetting is = new IteratorSetting(50, "regex", RegExFilter.class);
RegExFilter.setRegexs(is, ".*00000[78].*", null, null, null, false);
Scanner scanner = RFile.newScanner().from(testFile).withFileSystem(localFs).build();
scanner.addScanIterator(is);
assertEquals(createTestData(7, 2, 0, 10, 10), toMap(scanner));
scanner.close();
}
@Test
public void testAuths() throws Exception {
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
String testFile = createTmpTestFile();
RFileWriter writer = RFile.newWriter().to(testFile).withFileSystem(localFs).build();
Key k1 = new Key("r1", "f1", "q1", "A&B");
Key k2 = new Key("r1", "f1", "q2", "A");
Key k3 = new Key("r1", "f1", "q3");
Value v1 = new Value("p");
Value v2 = new Value("c");
Value v3 = new Value("t");
writer.append(k1, v1);
writer.append(k2, v2);
writer.append(k3, v3);
writer.close();
Scanner scanner = RFile.newScanner().from(testFile).withFileSystem(localFs)
.withAuthorizations(new Authorizations("A")).build();
assertEquals(Map.of(k2, v2, k3, v3), toMap(scanner));
assertEquals(new Authorizations("A"), scanner.getAuthorizations());
scanner.close();
scanner = RFile.newScanner().from(testFile).withFileSystem(localFs)
.withAuthorizations(new Authorizations("A", "B")).build();
assertEquals(Map.of(k1, v1, k2, v2, k3, v3), toMap(scanner));
assertEquals(new Authorizations("A", "B"), scanner.getAuthorizations());
scanner.close();
scanner = RFile.newScanner().from(testFile).withFileSystem(localFs)
.withAuthorizations(new Authorizations("B")).build();
assertEquals(Map.of(k3, v3), toMap(scanner));
assertEquals(new Authorizations("B"), scanner.getAuthorizations());
scanner.close();
}
@Test
public void testNoSystemIters() throws Exception {
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
String testFile = createTmpTestFile();
RFileWriter writer = RFile.newWriter().to(testFile).withFileSystem(localFs).build();
Key k1 = new Key("r1", "f1", "q1");
k1.setTimestamp(3);
Key k2 = new Key("r1", "f1", "q1");
k2.setTimestamp(6);
k2.setDeleted(true);
Value v1 = new Value("p");
Value v2 = new Value("");
writer.append(k2, v2);
writer.append(k1, v1);
writer.close();
Scanner scanner = RFile.newScanner().from(testFile).withFileSystem(localFs).build();
assertFalse(scanner.iterator().hasNext());
scanner.close();
scanner =
RFile.newScanner().from(testFile).withFileSystem(localFs).withoutSystemIterators().build();
assertEquals(Map.of(k2, v2, k1, v1), toMap(scanner));
scanner.setRange(new Range("r2"));
assertFalse(scanner.iterator().hasNext());
scanner.close();
}
@Test
public void testBounds() throws Exception {
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
SortedMap<Key,Value> testData = createTestData(10, 10, 10);
String testFile = createRFile(testData);
// set a lower bound row
Range bounds = new Range(rowStr(3), false, null, true);
Scanner scanner =
RFile.newScanner().from(testFile).withFileSystem(localFs).withBounds(bounds).build();
assertEquals(createTestData(4, 6, 0, 10, 10), toMap(scanner));
scanner.close();
// set an upper bound row
bounds = new Range(null, false, rowStr(7), true);
scanner = RFile.newScanner().from(testFile).withFileSystem(localFs).withBounds(bounds).build();
assertEquals(createTestData(8, 10, 10), toMap(scanner));
scanner.close();
// set row bounds
bounds = new Range(rowStr(3), false, rowStr(7), true);
scanner = RFile.newScanner().from(testFile).withFileSystem(localFs).withBounds(bounds).build();
assertEquals(createTestData(4, 4, 0, 10, 10), toMap(scanner));
scanner.close();
// set a row family bound
bounds = Range.exact(rowStr(3), colStr(5));
scanner = RFile.newScanner().from(testFile).withFileSystem(localFs).withBounds(bounds).build();
assertEquals(createTestData(3, 1, 5, 1, 10), toMap(scanner));
scanner.close();
}
@Test
public void testScannerTableProperties() throws Exception {
NewTableConfiguration ntc = new NewTableConfiguration();
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
String testFile = createTmpTestFile();
RFileWriter writer = RFile.newWriter().to(testFile).withFileSystem(localFs).build();
Key k1 = new Key("r1", "f1", "q1");
k1.setTimestamp(3);
Key k2 = new Key("r1", "f1", "q1");
k2.setTimestamp(6);
Value v1 = new Value("p");
Value v2 = new Value("q");
writer.append(k2, v2);
writer.append(k1, v1);
writer.close();
// pass in table config that has versioning iterator configured
Scanner scanner = RFile.newScanner().from(testFile).withFileSystem(localFs)
.withTableProperties(ntc.getProperties()).build();
assertEquals(Map.of(k2, v2), toMap(scanner));
scanner.close();
scanner = RFile.newScanner().from(testFile).withFileSystem(localFs).build();
assertEquals(Map.of(k2, v2, k1, v1), toMap(scanner));
scanner.close();
}
@Test
public void testSampling() throws Exception {
SortedMap<Key,Value> testData1 = createTestData(1000, 2, 1);
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
String testFile = createTmpTestFile();
SamplerConfiguration sc = new SamplerConfiguration(RowSampler.class)
.setOptions(Map.of("hasher", "murmur3_32", "modulus", "19"));
RFileWriter writer =
RFile.newWriter().to(testFile).withFileSystem(localFs).withSampler(sc).build();
writer.append(testData1.entrySet());
writer.close();
Scanner scanner = RFile.newScanner().from(testFile).withFileSystem(localFs).build();
scanner.setSamplerConfiguration(sc);
RowSampler rowSampler = new RowSampler();
rowSampler.init(sc);
SortedMap<Key,Value> sampleData = new TreeMap<>();
for (Entry<Key,Value> e : testData1.entrySet()) {
if (rowSampler.accept(e.getKey())) {
sampleData.put(e.getKey(), e.getValue());
}
}
assertTrue(sampleData.size() < testData1.size());
assertEquals(sampleData, toMap(scanner));
scanner.clearSamplerConfiguration();
assertEquals(testData1, toMap(scanner));
}
@Test
public void testAppendScanner() throws Exception {
SortedMap<Key,Value> testData = createTestData(10000, 1, 1);
String testFile = createRFile(testData);
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
Scanner scanner = RFile.newScanner().from(testFile).withFileSystem(localFs).build();
String testFile2 = createTmpTestFile();
RFileWriter writer = RFile.newWriter().to(testFile2).build();
writer.append(scanner);
writer.close();
scanner.close();
scanner = RFile.newScanner().from(testFile2).withFileSystem(localFs).build();
assertEquals(testData, toMap(scanner));
scanner.close();
}
@Test
public void testCache() throws Exception {
SortedMap<Key,Value> testData = createTestData(10000, 1, 1);
String testFile = createRFile(testData);
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
Scanner scanner = RFile.newScanner().from(testFile).withFileSystem(localFs)
.withIndexCache(1000000).withDataCache(10000000).build();
RANDOM.get().ints(100, 0, 10_000).forEach(r -> {
scanner.setRange(new Range(rowStr(r)));
String actual = scanner.stream().collect(onlyElement()).getKey().getRow().toString();
assertEquals(rowStr(r), actual);
});
scanner.close();
}
@Test
public void testSummaries() throws Exception {
SummarizerConfiguration sc1 =
SummarizerConfiguration.builder(VisibilitySummarizer.class).build();
SummarizerConfiguration sc2 = SummarizerConfiguration.builder(FamilySummarizer.class).build();
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
String testFile = createTmpTestFile();
SortedMap<Key,Value> testData1 = createTestData(0, 100, 0, 4, 1, "A&B", "A&B&C");
RFileWriter writer =
RFile.newWriter().to(testFile).withFileSystem(localFs).withSummarizers(sc1, sc2).build();
writer.append(testData1.entrySet());
writer.close();
// verify summary data
Collection<Summary> summaries = RFile.summaries().from(testFile).withFileSystem(localFs).read();
assertEquals(2, summaries.size());
for (Summary summary : summaries) {
assertEquals(0, summary.getFileStatistics().getInaccurate());
assertEquals(1, summary.getFileStatistics().getTotal());
String className = summary.getSummarizerConfiguration().getClassName();
CounterSummary counterSummary = new CounterSummary(summary);
if (className.equals(FamilySummarizer.class.getName())) {
Map<String,Long> counters = counterSummary.getCounters();
Map<String,Long> expected = Map.of("0000", 200L, "0001", 200L, "0002", 200L, "0003", 200L);
assertEquals(expected, counters);
} else if (className.equals(VisibilitySummarizer.class.getName())) {
Map<String,Long> counters = counterSummary.getCounters();
Map<String,Long> expected = Map.of("A&B", 400L, "A&B&C", 400L);
assertEquals(expected, counters);
} else {
fail("Unexpected classname " + className);
}
}
// check if writing summary data impacted normal rfile functionality
Scanner scanner = RFile.newScanner().from(testFile).withFileSystem(localFs)
.withAuthorizations(new Authorizations("A", "B", "C")).build();
assertEquals(testData1, toMap(scanner));
scanner.close();
String testFile2 = createTmpTestFile();
SortedMap<Key,Value> testData2 = createTestData(100, 100, 0, 4, 1, "A&B", "A&B&C");
writer =
RFile.newWriter().to(testFile2).withFileSystem(localFs).withSummarizers(sc1, sc2).build();
writer.append(testData2.entrySet());
writer.close();
// verify reading summaries from multiple files works
summaries = RFile.summaries().from(testFile, testFile2).withFileSystem(localFs).read();
assertEquals(2, summaries.size());
for (Summary summary : summaries) {
assertEquals(0, summary.getFileStatistics().getInaccurate());
assertEquals(2, summary.getFileStatistics().getTotal());
String className = summary.getSummarizerConfiguration().getClassName();
CounterSummary counterSummary = new CounterSummary(summary);
if (className.equals(FamilySummarizer.class.getName())) {
Map<String,Long> counters = counterSummary.getCounters();
Map<String,Long> expected = Map.of("0000", 400L, "0001", 400L, "0002", 400L, "0003", 400L);
assertEquals(expected, counters);
} else if (className.equals(VisibilitySummarizer.class.getName())) {
Map<String,Long> counters = counterSummary.getCounters();
Map<String,Long> expected = Map.of("A&B", 800L, "A&B&C", 800L);
assertEquals(expected, counters);
} else {
fail("Unexpected classname " + className);
}
}
// verify reading a subset of summaries works
summaries = RFile.summaries().from(testFile, testFile2).withFileSystem(localFs)
.selectSummaries(sc -> sc.equals(sc1)).read();
checkSummaries(summaries, Map.of("A&B", 800L, "A&B&C", 800L), 0);
// the following test check boundary conditions for start row and end row
summaries = RFile.summaries().from(testFile, testFile2).withFileSystem(localFs)
.selectSummaries(sc -> sc.equals(sc1)).startRow(rowStr(99)).read();
checkSummaries(summaries, Map.of("A&B", 400L, "A&B&C", 400L), 0);
summaries = RFile.summaries().from(testFile, testFile2).withFileSystem(localFs)
.selectSummaries(sc -> sc.equals(sc1)).startRow(rowStr(98)).read();
checkSummaries(summaries, Map.of("A&B", 800L, "A&B&C", 800L), 1);
summaries = RFile.summaries().from(testFile, testFile2).withFileSystem(localFs)
.selectSummaries(sc -> sc.equals(sc1)).startRow(rowStr(0)).read();
checkSummaries(summaries, Map.of("A&B", 800L, "A&B&C", 800L), 1);
summaries = RFile.summaries().from(testFile, testFile2).withFileSystem(localFs)
.selectSummaries(sc -> sc.equals(sc1)).startRow("#").read();
checkSummaries(summaries, Map.of("A&B", 800L, "A&B&C", 800L), 0);
summaries = RFile.summaries().from(testFile, testFile2).withFileSystem(localFs)
.selectSummaries(sc -> sc.equals(sc1)).startRow(rowStr(100)).read();
checkSummaries(summaries, Map.of("A&B", 400L, "A&B&C", 400L), 1);
summaries = RFile.summaries().from(testFile, testFile2).withFileSystem(localFs)
.selectSummaries(sc -> sc.equals(sc1)).endRow(rowStr(99)).read();
checkSummaries(summaries, Map.of("A&B", 400L, "A&B&C", 400L), 0);
summaries = RFile.summaries().from(testFile, testFile2).withFileSystem(localFs)
.selectSummaries(sc -> sc.equals(sc1)).endRow(rowStr(100)).read();
checkSummaries(summaries, Map.of("A&B", 800L, "A&B&C", 800L), 1);
summaries = RFile.summaries().from(testFile, testFile2).withFileSystem(localFs)
.selectSummaries(sc -> sc.equals(sc1)).endRow(rowStr(199)).read();
checkSummaries(summaries, Map.of("A&B", 800L, "A&B&C", 800L), 0);
summaries = RFile.summaries().from(testFile, testFile2).withFileSystem(localFs)
.selectSummaries(sc -> sc.equals(sc1)).startRow(rowStr(50)).endRow(rowStr(150)).read();
checkSummaries(summaries, Map.of("A&B", 800L, "A&B&C", 800L), 2);
summaries = RFile.summaries().from(testFile, testFile2).withFileSystem(localFs)
.selectSummaries(sc -> sc.equals(sc1)).startRow(rowStr(120)).endRow(rowStr(150)).read();
checkSummaries(summaries, Map.of("A&B", 400L, "A&B&C", 400L), 1);
summaries = RFile.summaries().from(testFile, testFile2).withFileSystem(localFs)
.selectSummaries(sc -> sc.equals(sc1)).startRow(rowStr(50)).endRow(rowStr(199)).read();
checkSummaries(summaries, Map.of("A&B", 800L, "A&B&C", 800L), 1);
summaries = RFile.summaries().from(testFile, testFile2).withFileSystem(localFs)
.selectSummaries(sc -> sc.equals(sc1)).startRow("#").endRow(rowStr(150)).read();
checkSummaries(summaries, Map.of("A&B", 800L, "A&B&C", 800L), 1);
summaries = RFile.summaries().from(testFile, testFile2).withFileSystem(localFs)
.selectSummaries(sc -> sc.equals(sc1)).startRow(rowStr(199)).read();
checkSummaries(summaries, Map.of(), 0);
summaries = RFile.summaries().from(testFile, testFile2).withFileSystem(localFs)
.selectSummaries(sc -> sc.equals(sc1)).startRow(rowStr(200)).read();
checkSummaries(summaries, Map.of(), 0);
summaries = RFile.summaries().from(testFile, testFile2).withFileSystem(localFs)
.selectSummaries(sc -> sc.equals(sc1)).endRow("#").read();
checkSummaries(summaries, Map.of(), 0);
summaries = RFile.summaries().from(testFile, testFile2).withFileSystem(localFs)
.selectSummaries(sc -> sc.equals(sc1)).endRow(rowStr(0)).read();
checkSummaries(summaries, Map.of("A&B", 400L, "A&B&C", 400L), 1);
}
private void checkSummaries(Collection<Summary> summaries, Map<String,Long> expected, int extra) {
assertEquals(1, summaries.size());
for (Summary summary : summaries) {
assertEquals(extra, summary.getFileStatistics().getInaccurate());
assertEquals(extra, summary.getFileStatistics().getExtra());
assertEquals(2, summary.getFileStatistics().getTotal());
String className = summary.getSummarizerConfiguration().getClassName();
CounterSummary counterSummary = new CounterSummary(summary);
if (className.equals(VisibilitySummarizer.class.getName())) {
Map<String,Long> counters = counterSummary.getCounters();
assertEquals(expected, counters);
} else {
fail("Unexpected classname " + className);
}
}
}
@Test
public void testOutOfOrder() throws Exception {
// test that exception declared in API is thrown
Key k1 = new Key("r1", "f1", "q1");
Value v1 = new Value("1");
Key k2 = new Key("r2", "f1", "q1");
Value v2 = new Value("2");
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
String testFile = createTmpTestFile();
try (RFileWriter writer = RFile.newWriter().to(testFile).withFileSystem(localFs).build()) {
writer.append(k2, v2);
assertThrows(IllegalArgumentException.class, () -> writer.append(k1, v1));
}
}
@Test
public void testOutOfOrderIterable() throws Exception {
// test that exception declared in API is thrown
Key k1 = new Key("r1", "f1", "q1");
Value v1 = new Value("1");
Key k2 = new Key("r2", "f1", "q1");
Value v2 = new Value("2");
ArrayList<Entry<Key,Value>> data = new ArrayList<>();
data.add(new AbstractMap.SimpleEntry<>(k2, v2));
data.add(new AbstractMap.SimpleEntry<>(k1, v1));
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
String testFile = createTmpTestFile();
try (RFileWriter writer = RFile.newWriter().to(testFile).withFileSystem(localFs).build()) {
assertThrows(IllegalArgumentException.class, () -> writer.append(data));
}
}
@Test
public void testBadVis() throws Exception {
// this test has two purposes ensure an exception is thrown and ensure the exception document in
// the javadoc is thrown
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
String testFile = createTmpTestFile();
try (RFileWriter writer = RFile.newWriter().to(testFile).withFileSystem(localFs).build()) {
writer.startDefaultLocalityGroup();
Key k1 = new Key("r1", "f1", "q1", "(A&(B");
assertThrows(IllegalArgumentException.class, () -> writer.append(k1, new Value("")));
}
}
@Test
public void testBadVisIterable() throws Exception {
// test append(iterable) method
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
String testFile = createTmpTestFile();
try (RFileWriter writer = RFile.newWriter().to(testFile).withFileSystem(localFs).build()) {
writer.startDefaultLocalityGroup();
Key k1 = new Key("r1", "f1", "q1", "(A&(B");
Entry<Key,Value> entry = new AbstractMap.SimpleEntry<>(k1, new Value(""));
assertThrows(IllegalArgumentException.class,
() -> writer.append(Collections.singletonList(entry)));
}
}
@Test
public void testDoubleStart() throws Exception {
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
String testFile = createTmpTestFile();
try (RFileWriter writer = RFile.newWriter().to(testFile).withFileSystem(localFs).build()) {
writer.startDefaultLocalityGroup();
assertThrows(IllegalStateException.class, writer::startDefaultLocalityGroup);
}
}
@Test
public void testAppendStartDefault() throws Exception {
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
String testFile = createTmpTestFile();
try (RFileWriter writer = RFile.newWriter().to(testFile).withFileSystem(localFs).build()) {
writer.append(new Key("r1", "f1", "q1"), new Value("1"));
assertThrows(IllegalStateException.class, writer::startDefaultLocalityGroup);
}
}
@Test
public void testStartAfter() throws Exception {
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
String testFile = createTmpTestFile();
try (RFileWriter writer = RFile.newWriter().to(testFile).withFileSystem(localFs).build()) {
Key k1 = new Key("r1", "f1", "q1");
writer.append(k1, new Value(""));
assertThrows(IllegalStateException.class, () -> writer.startNewLocalityGroup("lg1", "fam1"));
}
}
@Test
public void testIllegalColumn() throws Exception {
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
String testFile = createTmpTestFile();
try (RFileWriter writer = RFile.newWriter().to(testFile).withFileSystem(localFs).build()) {
writer.startNewLocalityGroup("lg1", "fam1");
Key k1 = new Key("r1", "f1", "q1");
// should not be able to append the column family f1
assertThrows(IllegalArgumentException.class, () -> writer.append(k1, new Value("")));
}
}
@Test
public void testWrongGroup() throws Exception {
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
String testFile = createTmpTestFile();
try (RFileWriter writer = RFile.newWriter().to(testFile).withFileSystem(localFs).build()) {
writer.startNewLocalityGroup("lg1", "fam1");
Key k1 = new Key("r1", "fam1", "q1");
writer.append(k1, new Value(""));
writer.startDefaultLocalityGroup();
// should not be able to append the column family fam1 to default locality group
Key k2 = new Key("r1", "fam1", "q2");
assertThrows(IllegalArgumentException.class, () -> writer.append(k2, new Value("")));
}
}
private RFileSKVIterator getReader(LocalFileSystem localFs, UnreferencedTabletFile testFile)
throws IOException {
return (RFileSKVIterator) FileOperations.getInstance().newReaderBuilder()
.forFile(testFile, localFs, localFs.getConf(), NoCryptoServiceFactory.NONE)
.withTableConfiguration(DefaultConfiguration.getInstance()).build();
}
@Test
public void testMultipleFilesAndCache() throws Exception {
SortedMap<Key,Value> testData = createTestData(100, 10, 10);
List<String> files =
Arrays.asList(createTmpTestFile(), createTmpTestFile(), createTmpTestFile());
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
for (int i = 0; i < files.size(); i++) {
try (
RFileWriter writer = RFile.newWriter().to(files.get(i)).withFileSystem(localFs).build()) {
for (Entry<Key,Value> entry : testData.entrySet()) {
if (entry.getKey().hashCode() % files.size() == i) {
writer.append(entry.getKey(), entry.getValue());
}
}
}
}
Scanner scanner = RFile.newScanner().from(files.toArray(new String[files.size()]))
.withFileSystem(localFs).withIndexCache(1000000).withDataCache(10000000).build();
assertEquals(testData, toMap(scanner));
scanner.close();
}
}
| 9,456 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/lexicoder/IntegerLexicoderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.lexicoder;
import java.util.Arrays;
import org.apache.accumulo.core.clientImpl.lexicoder.AbstractLexicoderTest;
import org.junit.jupiter.api.Test;
public class IntegerLexicoderTest extends AbstractLexicoderTest {
@Test
public void testSortOrder() {
assertSortOrder(new IntegerLexicoder(), Arrays.asList(Integer.MIN_VALUE, 0xff123456, 0xffff3456,
0xffffff56, -1, 0, 1, 0x12, 0x1234, 0x123456, 0x1234678, Integer.MAX_VALUE));
}
@Test
public void testDecode() {
assertDecodes(new IntegerLexicoder(), Integer.MIN_VALUE);
assertDecodes(new IntegerLexicoder(), -1);
assertDecodes(new IntegerLexicoder(), 0);
assertDecodes(new IntegerLexicoder(), 1);
assertDecodes(new IntegerLexicoder(), Integer.MAX_VALUE);
}
}
| 9,457 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/lexicoder/ListLexicoderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.lexicoder;
import static java.util.Collections.emptyList;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.util.ArrayList;
import java.util.List;
import java.util.TreeSet;
import org.apache.accumulo.core.clientImpl.lexicoder.AbstractLexicoderTest;
import org.apache.accumulo.core.util.TextUtil;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class ListLexicoderTest extends AbstractLexicoderTest {
private List<Long> data1 = new ArrayList<>();
private List<Long> data2 = new ArrayList<>();
private List<Long> data3 = new ArrayList<>();
private List<Long> data4 = new ArrayList<>();
private List<Long> data5 = new ArrayList<>();
@BeforeEach
public void setUp() {
data1.add(1L);
data1.add(2L);
data2.add(1L);
data3.add(1L);
data3.add(3L);
data4.add(1L);
data4.add(2L);
data4.add(3L);
data5.add(2L);
data5.add(1L);
}
@Test
public void testSortOrder() {
List<List<Long>> data = new ArrayList<>();
// add list in expected sort order
data.add(data2);
data.add(data1);
data.add(data4);
data.add(data3);
data.add(data5);
TreeSet<Text> sortedEnc = new TreeSet<>();
ListLexicoder<Long> listLexicoder = new ListLexicoder<>(new LongLexicoder());
for (List<Long> list : data) {
sortedEnc.add(new Text(listLexicoder.encode(list)));
}
List<List<Long>> unenc = new ArrayList<>();
for (Text enc : sortedEnc) {
unenc.add(listLexicoder.decode(TextUtil.getBytes(enc)));
}
assertEquals(data, unenc);
}
@Test
public void testDecodes() {
assertDecodes(new ListLexicoder<>(new LongLexicoder()), data1);
assertDecodes(new ListLexicoder<>(new LongLexicoder()), data2);
assertDecodes(new ListLexicoder<>(new LongLexicoder()), data3);
assertDecodes(new ListLexicoder<>(new LongLexicoder()), data4);
assertDecodes(new ListLexicoder<>(new LongLexicoder()), data5);
}
@Test
public void testRejectsEmptyLists() {
assertThrows(IllegalArgumentException.class,
() -> new ListLexicoder<>(new LongLexicoder()).encode(emptyList()));
}
}
| 9,458 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/lexicoder/DoubleLexicoderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.lexicoder;
import java.util.Arrays;
import org.apache.accumulo.core.clientImpl.lexicoder.AbstractLexicoderTest;
import org.junit.jupiter.api.Test;
public class DoubleLexicoderTest extends AbstractLexicoderTest {
@Test
public void testSortOrder() {
assertSortOrder(new DoubleLexicoder(),
Arrays.asList(Double.MIN_VALUE, Double.MAX_VALUE, Double.NEGATIVE_INFINITY,
Double.POSITIVE_INFINITY, 0.0, 0.01, 0.001, 1.0, -1.0, -1.1, -1.01,
Math.nextUp(Double.NEGATIVE_INFINITY), Math.nextAfter(0.0, Double.NEGATIVE_INFINITY),
Math.nextAfter(Double.MAX_VALUE, Double.NEGATIVE_INFINITY), Math.pow(10.0, 30.0) * -1.0,
Math.pow(10.0, 30.0), Math.pow(10.0, -30.0) * -1.0, Math.pow(10.0, -30.0)));
}
@Test
public void testDecode() {
assertDecodes(new DoubleLexicoder(), Double.MIN_VALUE);
assertDecodes(new DoubleLexicoder(), -1.0);
assertDecodes(new DoubleLexicoder(), -Math.pow(10.0, -30.0));
assertDecodes(new DoubleLexicoder(), 0.0);
assertDecodes(new DoubleLexicoder(), Math.pow(10.0, -30.0));
assertDecodes(new DoubleLexicoder(), 1.0);
assertDecodes(new DoubleLexicoder(), Double.MAX_VALUE);
}
}
| 9,459 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/lexicoder/UUIDLexicoderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.lexicoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.UUID;
import org.apache.accumulo.core.clientImpl.lexicoder.AbstractLexicoderTest;
import org.junit.jupiter.api.Test;
public class UUIDLexicoderTest extends AbstractLexicoderTest {
@Test
public void testSortOrder() {
assertSortOrder(new UUIDLexicoder(), Arrays.asList(UUID.randomUUID(), UUID.randomUUID(),
UUID.randomUUID(), UUID.randomUUID(), UUID.randomUUID(), UUID.randomUUID()));
ArrayList<UUID> uuids = new ArrayList<>();
for (long ms = -260L; ms < 260L; ms++) {
for (long ls = -2L; ls < 2; ls++) {
uuids.add(new UUID(ms, ls));
}
}
assertSortOrder(new UUIDLexicoder(), uuids);
}
@Test
public void testDecodes() {
assertDecodes(new UUIDLexicoder(), UUID.randomUUID());
}
}
| 9,460 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/lexicoder/BigIntegerLexicoderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.lexicoder;
import java.math.BigInteger;
import java.util.Arrays;
import org.apache.accumulo.core.clientImpl.lexicoder.AbstractLexicoderTest;
import org.junit.jupiter.api.Test;
public class BigIntegerLexicoderTest extends AbstractLexicoderTest {
@Test
public void testSortOrder() {
assertSortOrder(new BigIntegerLexicoder(), Arrays.asList(new BigInteger("-1"),
new BigInteger("0"), new BigInteger("1"), new BigInteger("-257"), new BigInteger("-256"),
new BigInteger("-255"), new BigInteger("255"), new BigInteger("256"), new BigInteger("257"),
new BigInteger("65534"), new BigInteger("65535"), new BigInteger("65536"),
new BigInteger("65537"), new BigInteger("-65534"), new BigInteger("-65535"),
new BigInteger("-65536"), new BigInteger("-65537"), new BigInteger("2147483648"),
new BigInteger("2147483647"), new BigInteger("2147483649"), new BigInteger("-2147483648"),
new BigInteger("-2147483647"), new BigInteger("-2147483649"), new BigInteger("32768"),
new BigInteger("32769"), new BigInteger("32767"), new BigInteger("-32768"),
new BigInteger("-32769"), new BigInteger("-32767"), new BigInteger("126"),
new BigInteger("127"), new BigInteger("128"), new BigInteger("129"), new BigInteger("-126"),
new BigInteger("-127"), new BigInteger("-128"), new BigInteger("-129")));
}
@Test
public void testDecode() {
assertDecodes(new BigIntegerLexicoder(), new BigInteger("-2147483649"));
assertDecodes(new BigIntegerLexicoder(), new BigInteger("-1"));
assertDecodes(new BigIntegerLexicoder(), BigInteger.ZERO);
assertDecodes(new BigIntegerLexicoder(), BigInteger.ONE);
assertDecodes(new BigIntegerLexicoder(), new BigInteger("2147483647"));
}
}
| 9,461 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/lexicoder/LexicoderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.lexicoder;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.apache.accumulo.core.util.TextUtil;
import org.apache.hadoop.io.Text;
public abstract class LexicoderTest {
protected void assertEqualsB(byte[] ba1, byte[] ba2) {
assertEquals(new Text(ba2), new Text(ba1));
}
public <T extends Comparable<T>> void assertSortOrder(Lexicoder<T> lexicoder, Comparator<T> comp,
List<T> data) {
List<T> list = new ArrayList<>();
List<Text> encList = new ArrayList<>();
for (T d : data) {
list.add(d);
encList.add(new Text(lexicoder.encode(d)));
}
if (comp != null) {
list.sort(comp);
} else {
Collections.sort(list);
}
Collections.sort(encList);
List<T> decodedList = new ArrayList<>();
for (Text t : encList) {
decodedList.add(lexicoder.decode(TextUtil.getBytes(t)));
}
assertEquals(list, decodedList);
}
public <T extends Comparable<T>> void assertSortOrder(Lexicoder<T> lexicoder, List<T> data) {
assertSortOrder(lexicoder, null, data);
}
public static final byte[] START_PAD = "start".getBytes();
public static final byte[] END_PAD = "end".getBytes();
/** Asserts a value can be encoded and decoded back to original value */
public static <T> void assertDecodes(Lexicoder<T> lexicoder, T expected) {
byte[] encoded = lexicoder.encode(expected);
// decode full array
T result = lexicoder.decode(encoded);
assertEquals(expected, result);
}
public void assertDecodesB(Lexicoder<byte[]> lexicoder, byte[] expected) {
byte[] encoded = lexicoder.encode(expected);
// decode full array
byte[] result = lexicoder.decode(encoded);
assertEqualsB(expected, result);
}
}
| 9,462 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/lexicoder/FloatLexicoderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.lexicoder;
import java.util.Arrays;
import org.apache.accumulo.core.clientImpl.lexicoder.AbstractLexicoderTest;
import org.junit.jupiter.api.Test;
public class FloatLexicoderTest extends AbstractLexicoderTest {
@Test
public void testSortOrder() {
assertSortOrder(new FloatLexicoder(),
Arrays.asList(Float.MIN_VALUE, Float.MAX_VALUE, Float.NEGATIVE_INFINITY,
Float.POSITIVE_INFINITY, 0.0F, 0.01F, 0.001F, 1.0F, -1.0F, -1.1F, -1.01F,
Math.nextUp(Float.NEGATIVE_INFINITY), Math.nextAfter(0.0F, Float.NEGATIVE_INFINITY),
Math.nextAfter(Float.MAX_VALUE, Float.NEGATIVE_INFINITY)));
}
@Test
public void testDecode() {
assertDecodes(new FloatLexicoder(), Float.MIN_VALUE);
assertDecodes(new FloatLexicoder(), Math.nextUp(Float.NEGATIVE_INFINITY));
assertDecodes(new FloatLexicoder(), -1.0F);
assertDecodes(new FloatLexicoder(), 0.0F);
assertDecodes(new FloatLexicoder(), 1.0F);
assertDecodes(new FloatLexicoder(), Math.nextAfter(Float.POSITIVE_INFINITY, 0.0F));
assertDecodes(new FloatLexicoder(), Float.MAX_VALUE);
}
}
| 9,463 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/lexicoder/ULongLexicoderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.lexicoder;
import java.util.Arrays;
import org.apache.accumulo.core.clientImpl.lexicoder.AbstractLexicoderTest;
import org.junit.jupiter.api.Test;
public class ULongLexicoderTest extends AbstractLexicoderTest {
@Test
public void testEncoding() {
ULongLexicoder ull = new ULongLexicoder();
assertEqualsB(ull.encode(0L), new byte[] {0x00});
assertEqualsB(ull.encode(0x01L), new byte[] {0x01, 0x01});
assertEqualsB(ull.encode(0x1234L), new byte[] {0x02, 0x12, 0x34});
assertEqualsB(ull.encode(0x123456L), new byte[] {0x03, 0x12, 0x34, 0x56});
assertEqualsB(ull.encode(0x12345678L), new byte[] {0x04, 0x12, 0x34, 0x56, 0x78});
assertEqualsB(ull.encode(0x1234567890L),
new byte[] {0x05, 0x12, 0x34, 0x56, 0x78, (byte) 0x90});
assertEqualsB(ull.encode(0x1234567890abL),
new byte[] {0x06, 0x12, 0x34, 0x56, 0x78, (byte) 0x90, (byte) 0xab});
assertEqualsB(ull.encode(0x1234567890abcdL),
new byte[] {0x07, 0x12, 0x34, 0x56, 0x78, (byte) 0x90, (byte) 0xab, (byte) 0xcd});
assertEqualsB(ull.encode(0x1234567890abcdefL), new byte[] {0x08, 0x12, 0x34, 0x56, 0x78,
(byte) 0x90, (byte) 0xab, (byte) 0xcd, (byte) 0xef});
assertEqualsB(ull.encode(0xff34567890abcdefL),
new byte[] {0x09, 0x34, 0x56, 0x78, (byte) 0x90, (byte) 0xab, (byte) 0xcd, (byte) 0xef});
assertEqualsB(ull.encode(0xffff567890abcdefL),
new byte[] {0x0a, 0x56, 0x78, (byte) 0x90, (byte) 0xab, (byte) 0xcd, (byte) 0xef});
assertEqualsB(ull.encode(0xffffff7890abcdefL),
new byte[] {0x0b, 0x78, (byte) 0x90, (byte) 0xab, (byte) 0xcd, (byte) 0xef});
assertEqualsB(ull.encode(0xffffffff90abcdefL),
new byte[] {0x0c, (byte) 0x90, (byte) 0xab, (byte) 0xcd, (byte) 0xef});
assertEqualsB(ull.encode(0xffffffffffabcdefL),
new byte[] {0x0d, (byte) 0xab, (byte) 0xcd, (byte) 0xef});
assertEqualsB(ull.encode(0xffffffffffffcdefL), new byte[] {0x0e, (byte) 0xcd, (byte) 0xef});
assertEqualsB(ull.encode(0xffffffffffffffefL), new byte[] {0x0f, (byte) 0xef});
assertEqualsB(ull.encode(-1L), new byte[] {16});
}
@Test
public void testSortOrder() {
// only testing non negative
assertSortOrder(new ULongLexicoder(), Arrays.asList(0L, 0x01L, 0x1234L, 0x123456L, 0x12345678L,
0x1234567890L, 0x1234567890abL, 0x1234567890abcdL, 0x1234567890abcdefL, Long.MAX_VALUE));
}
@Test
public void testDecodes() {
assertDecodes(new ULongLexicoder(), Long.MIN_VALUE);
assertDecodes(new ULongLexicoder(), -1L);
assertDecodes(new ULongLexicoder(), 0L);
assertDecodes(new ULongLexicoder(), 1L);
assertDecodes(new ULongLexicoder(), 2L);
assertDecodes(new ULongLexicoder(), Long.MAX_VALUE);
}
}
| 9,464 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/lexicoder/LongLexicoderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.lexicoder;
import java.util.Arrays;
import org.apache.accumulo.core.clientImpl.lexicoder.AbstractLexicoderTest;
import org.junit.jupiter.api.Test;
public class LongLexicoderTest extends AbstractLexicoderTest {
@Test
public void testSortOrder() {
assertSortOrder(new LongLexicoder(),
Arrays.asList(Long.MIN_VALUE, 0xff1234567890abcdL, 0xffff1234567890abL, 0xffffff567890abcdL,
0xffffffff7890abcdL, 0xffffffffff90abcdL, 0xffffffffffffabcdL, 0xffffffffffffffcdL, -1L,
0L, 0x01L, 0x1234L, 0x123456L, 0x12345678L, 0x1234567890L, 0x1234567890abL,
0x1234567890abcdL, 0x1234567890abcdefL, Long.MAX_VALUE));
}
@Test
public void testDecodes() {
assertDecodes(new LongLexicoder(), Long.MIN_VALUE);
assertDecodes(new LongLexicoder(), -1L);
assertDecodes(new LongLexicoder(), 0L);
assertDecodes(new LongLexicoder(), 1L);
assertDecodes(new LongLexicoder(), 2L);
assertDecodes(new LongLexicoder(), Long.MAX_VALUE);
}
}
| 9,465 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/lexicoder/SequenceLexicoderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.lexicoder;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.util.ArrayList;
import java.util.List;
import java.util.TreeSet;
import org.apache.accumulo.core.clientImpl.lexicoder.AbstractLexicoderTest;
import org.apache.accumulo.core.util.TextUtil;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
/**
* Unit tests for {@link SequenceLexicoder}.
*/
public class SequenceLexicoderTest extends AbstractLexicoderTest {
private final List<String> nodata = emptyList();
private final List<String> data0 = singletonList("");
private final List<String> data1 = asList("a", "b");
private final List<String> data2 = singletonList("a");
private final List<String> data3 = asList("a", "c");
private final List<String> data4 = asList("a", "b", "c");
private final List<String> data5 = asList("b", "a");
@Test
public void testSortOrder() {
// expected sort order
final List<List<String>> data = asList(nodata, data0, data2, data1, data4, data3, data5);
final TreeSet<Text> sortedEnc = new TreeSet<>();
final SequenceLexicoder<String> sequenceLexicoder =
new SequenceLexicoder<>(new StringLexicoder());
for (final List<String> list : data) {
sortedEnc.add(new Text(sequenceLexicoder.encode(list)));
}
final List<List<String>> unenc = new ArrayList<>();
for (final Text enc : sortedEnc) {
unenc.add(sequenceLexicoder.decode(TextUtil.getBytes(enc)));
}
assertEquals(data, unenc);
}
@Test
public void testDecodes() {
assertDecodes(new SequenceLexicoder<>(new StringLexicoder()), nodata);
assertDecodes(new SequenceLexicoder<>(new StringLexicoder()), data0);
assertDecodes(new SequenceLexicoder<>(new StringLexicoder()), data1);
assertDecodes(new SequenceLexicoder<>(new StringLexicoder()), data2);
assertDecodes(new SequenceLexicoder<>(new StringLexicoder()), data3);
assertDecodes(new SequenceLexicoder<>(new StringLexicoder()), data4);
assertDecodes(new SequenceLexicoder<>(new StringLexicoder()), data5);
}
@Test
public void tesRejectsTrailingBytes() {
assertThrows(IllegalArgumentException.class,
() -> new SequenceLexicoder<>(new StringLexicoder()).decode(new byte[] {10}));
}
}
| 9,466 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/lexicoder/PairLexicoderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.lexicoder;
import java.util.Arrays;
import org.apache.accumulo.core.clientImpl.lexicoder.AbstractLexicoderTest;
import org.apache.accumulo.core.util.ComparablePair;
import org.junit.jupiter.api.Test;
public class PairLexicoderTest extends AbstractLexicoderTest {
@Test
public void testSortOrder() {
PairLexicoder<String,String> plexc =
new PairLexicoder<>(new StringLexicoder(), new StringLexicoder());
assertSortOrder(plexc,
Arrays.asList(new ComparablePair<>("a", "b"), new ComparablePair<>("a", "bc"),
new ComparablePair<>("a", "c"), new ComparablePair<>("ab", "c"),
new ComparablePair<>("ab", ""), new ComparablePair<>("ab", "d"),
new ComparablePair<>("b", "f"), new ComparablePair<>("b", "a")));
PairLexicoder<Long,String> plexc2 =
new PairLexicoder<>(new LongLexicoder(), new StringLexicoder());
assertSortOrder(plexc2,
Arrays.asList(new ComparablePair<>(0x100L, "a"), new ComparablePair<>(0x100L, "ab"),
new ComparablePair<>(0xf0L, "a"), new ComparablePair<>(0xf0L, "ab")));
}
@Test
public void testDecodes() {
PairLexicoder<String,String> plexc =
new PairLexicoder<>(new StringLexicoder(), new StringLexicoder());
assertDecodes(plexc, new ComparablePair<>("a", "b"));
}
}
| 9,467 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/lexicoder/UIntegerLexicoderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.lexicoder;
import org.apache.accumulo.core.clientImpl.lexicoder.AbstractLexicoderTest;
import org.junit.jupiter.api.Test;
public class UIntegerLexicoderTest extends AbstractLexicoderTest {
@Test
public void testEncoding() {
UIntegerLexicoder uil = new UIntegerLexicoder();
assertEqualsB(uil.encode(0), new byte[] {0x00});
assertEqualsB(uil.encode(0x01), new byte[] {0x01, 0x01});
assertEqualsB(uil.encode(0x0102), new byte[] {0x02, 0x01, 0x02});
assertEqualsB(uil.encode(0x010203), new byte[] {0x03, 0x01, 0x02, 0x03});
assertEqualsB(uil.encode(0x01020304), new byte[] {0x04, 0x01, 0x02, 0x03, 0x04});
assertEqualsB(uil.encode(0xff020304), new byte[] {0x05, 0x02, 0x03, 0x04});
assertEqualsB(uil.encode(0xffff0304), new byte[] {0x06, 0x03, 0x04});
assertEqualsB(uil.encode(0xffffff04), new byte[] {0x07, 0x04});
assertEqualsB(uil.encode(-1), new byte[] {0x08});
}
@Test
public void testDecode() {
assertDecodes(new UIntegerLexicoder(), Integer.MIN_VALUE);
assertDecodes(new UIntegerLexicoder(), -1);
assertDecodes(new UIntegerLexicoder(), 0);
assertDecodes(new UIntegerLexicoder(), 1);
assertDecodes(new UIntegerLexicoder(), Integer.MAX_VALUE);
}
}
| 9,468 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/lexicoder/TextLexicoderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.lexicoder;
import org.apache.accumulo.core.clientImpl.lexicoder.AbstractLexicoderTest;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
public class TextLexicoderTest extends AbstractLexicoderTest {
@Test
public void testDecode() {
assertDecodes(new TextLexicoder(), new Text(""));
assertDecodes(new TextLexicoder(), new Text("accumulo"));
}
}
| 9,469 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/lexicoder/StringLexicoderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.lexicoder;
import org.apache.accumulo.core.clientImpl.lexicoder.AbstractLexicoderTest;
import org.junit.jupiter.api.Test;
public class StringLexicoderTest extends AbstractLexicoderTest {
@Test
public void testDecode() {
assertDecodes(new StringLexicoder(), "");
assertDecodes(new StringLexicoder(), "0");
assertDecodes(new StringLexicoder(), "accumulo");
}
}
| 9,470 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/lexicoder/BytesLexicoderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.lexicoder;
import org.apache.accumulo.core.clientImpl.lexicoder.AbstractLexicoderTest;
import org.junit.jupiter.api.Test;
public class BytesLexicoderTest extends AbstractLexicoderTest {
@Test
public void testDecodes() {
BytesLexicoder lexicoder = new BytesLexicoder();
assertDecodesB(lexicoder, new byte[0]);
assertDecodesB(lexicoder, "accumulo".getBytes());
}
}
| 9,471 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/lexicoder/ReverseLexicoderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.lexicoder;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import org.apache.accumulo.core.clientImpl.lexicoder.AbstractLexicoderTest;
import org.junit.jupiter.api.Test;
public class ReverseLexicoderTest extends AbstractLexicoderTest {
@Test
public void testSortOrder() {
Comparator<Long> comp = Collections.reverseOrder();
assertSortOrder(new ReverseLexicoder<>(new LongLexicoder()), comp,
Arrays.asList(Long.MIN_VALUE, 0xff1234567890abcdL, 0xffff1234567890abL, 0xffffff567890abcdL,
0xffffffff7890abcdL, 0xffffffffff90abcdL, 0xffffffffffffabcdL, 0xffffffffffffffcdL, -1L,
0L, 0x01L, 0x1234L, 0x123456L, 0x12345678L, 0x1234567890L, 0x1234567890abL,
0x1234567890abcdL, 0x1234567890abcdefL, Long.MAX_VALUE));
Comparator<String> comp2 = Collections.reverseOrder();
assertSortOrder(new ReverseLexicoder<>(new StringLexicoder()), comp2,
Arrays.asList("a", "aa", "ab", "b", "aab"));
}
/**
* Just a simple test verifying reverse indexed dates
*/
@Test
public void testReverseSortDates() {
ReverseLexicoder<Date> revLex = new ReverseLexicoder<>(new DateLexicoder());
Calendar cal = Calendar.getInstance();
cal.set(1920, 1, 2, 3, 4, 5); // create an instance prior to 1970 for ACCUMULO-3385
Date date0 = new Date(cal.getTimeInMillis());
Date date1 = new Date();
Date date2 = new Date(System.currentTimeMillis() + 10000);
Date date3 = new Date(System.currentTimeMillis() + 500);
Comparator<Date> comparator = Collections.reverseOrder();
assertSortOrder(revLex, comparator, Arrays.asList(date0, date1, date2, date3));
// truncate date to hours
long time = System.currentTimeMillis() - (System.currentTimeMillis() % 3600000);
Date date = new Date(time);
System.out.println(date);
}
@Test
public void testDecodes() {
assertDecodes(new ReverseLexicoder<>(new LongLexicoder()), Long.MIN_VALUE);
assertDecodes(new ReverseLexicoder<>(new LongLexicoder()), -1L);
assertDecodes(new ReverseLexicoder<>(new LongLexicoder()), 0L);
assertDecodes(new ReverseLexicoder<>(new LongLexicoder()), 1L);
assertDecodes(new ReverseLexicoder<>(new LongLexicoder()), Long.MAX_VALUE);
}
}
| 9,472 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/lexicoder/DateLexicoderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.lexicoder;
import java.util.Date;
import org.apache.accumulo.core.clientImpl.lexicoder.AbstractLexicoderTest;
import org.junit.jupiter.api.Test;
public class DateLexicoderTest extends AbstractLexicoderTest {
@Test
public void testDecode() {
assertDecodes(new DateLexicoder(), new Date());
assertDecodes(new DateLexicoder(), new Date(0));
assertDecodes(new DateLexicoder(), new Date(Long.MAX_VALUE));
}
}
| 9,473 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/summary/CountingSummarizerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.summary;
import static org.apache.accumulo.core.client.summary.CountingSummarizer.COUNTER_STAT_PREFIX;
import static org.apache.accumulo.core.client.summary.CountingSummarizer.DELETES_IGNORED_STAT;
import static org.apache.accumulo.core.client.summary.CountingSummarizer.EMITTED_STAT;
import static org.apache.accumulo.core.client.summary.CountingSummarizer.INGNORE_DELETES_OPT;
import static org.apache.accumulo.core.client.summary.CountingSummarizer.MAX_COUNTERS_OPT;
import static org.apache.accumulo.core.client.summary.CountingSummarizer.MAX_COUNTER_LEN_OPT;
import static org.apache.accumulo.core.client.summary.CountingSummarizer.SEEN_STAT;
import static org.apache.accumulo.core.client.summary.CountingSummarizer.TOO_LONG_STAT;
import static org.apache.accumulo.core.client.summary.CountingSummarizer.TOO_MANY_STAT;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.Arrays;
import java.util.HashMap;
import org.apache.accumulo.core.client.summary.Summarizer.Collector;
import org.apache.accumulo.core.client.summary.summarizers.FamilySummarizer;
import org.apache.accumulo.core.client.summary.summarizers.VisibilitySummarizer;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Value;
import org.junit.jupiter.api.Test;
public class CountingSummarizerTest {
public static class MultiSummarizer extends CountingSummarizer<String> {
@Override
protected Converter<String> converter() {
return (k, v, c) -> {
c.accept("rp:" + k.getRowData().subSequence(0, 2));
c.accept("fp:" + k.getColumnFamilyData().subSequence(0, 2));
c.accept("qp:" + k.getColumnQualifierData().subSequence(0, 2));
};
}
}
public static class ValueSummarizer extends CountingSummarizer<String> {
@Override
protected Converter<String> converter() {
return (k, v, c) -> {
c.accept("vp:" + v.toString().subSequence(0, 2));
};
}
}
@Test
public void testMultipleEmit() {
SummarizerConfiguration sc = SummarizerConfiguration.builder(MultiSummarizer.class).build();
MultiSummarizer countSum = new MultiSummarizer();
Summarizer.Collector collector = countSum.collector(sc);
Value val = new Value("abc");
HashMap<String,Long> expected = new HashMap<>();
for (String row : new String[] {"ask", "asleep", "some", "soul"}) {
for (String fam : new String[] {"hop", "hope", "nope", "noop"}) {
for (String qual : new String[] {"mad", "lad", "lab", "map"}) {
collector.accept(new Key(row, fam, qual), val);
expected.merge("rp:" + row.substring(0, 2), 1L, Long::sum);
expected.merge("fp:" + fam.substring(0, 2), 1L, Long::sum);
expected.merge("qp:" + qual.substring(0, 2), 1L, Long::sum);
}
}
}
HashMap<String,Long> stats = new HashMap<>();
collector.summarize(stats::put);
CounterSummary csum = new CounterSummary(stats);
assertEquals(expected, csum.getCounters());
assertEquals(64, csum.getSeen());
assertEquals(3 * 64, csum.getEmitted());
assertEquals(0, csum.getIgnored());
assertEquals(0, csum.getDeletesIgnored());
}
@Test
public void testSummarizing() {
SummarizerConfiguration sc = SummarizerConfiguration.builder(FamilySummarizer.class)
.addOptions(MAX_COUNTERS_OPT, "5", MAX_COUNTER_LEN_OPT, "10").build();
FamilySummarizer countSum = new FamilySummarizer();
Value val = new Value("abc");
Summarizer.Collector collector = countSum.collector(sc);
for (String fam : Arrays.asList("f1", "f1", "f1", "f2", "f1", "f70000000000000000000",
"f70000000000000000001", "f2", "f3", "f4", "f5", "f6", "f7", "f3", "f7")) {
collector.accept(new Key("r", fam), val);
}
Key dk = new Key("r", "f2");
dk.setDeleted(true);
collector.accept(dk, new Value(""));
HashMap<String,Long> stats = new HashMap<>();
collector.summarize(stats::put);
String p = COUNTER_STAT_PREFIX;
HashMap<String,Long> expected = new HashMap<>();
expected.put(p + "f1", 4L);
expected.put(p + "f2", 2L);
expected.put(p + "f3", 2L);
expected.put(p + "f4", 1L);
expected.put(p + "f5", 1L);
expected.put(TOO_LONG_STAT, 2L);
expected.put(TOO_MANY_STAT, 3L);
expected.put(SEEN_STAT, 16L);
expected.put(EMITTED_STAT, 15L);
expected.put(DELETES_IGNORED_STAT, 1L);
assertEquals(expected, stats);
CounterSummary csum = new CounterSummary(stats);
assertEquals(5, csum.getIgnored());
assertEquals(3, csum.getTooMany());
assertEquals(2, csum.getTooLong());
assertEquals(16, csum.getSeen());
assertEquals(15, csum.getEmitted());
assertEquals(1, csum.getDeletesIgnored());
expected.clear();
expected.put("f1", 4L);
expected.put("f2", 2L);
expected.put("f3", 2L);
expected.put("f4", 1L);
expected.put("f5", 1L);
assertEquals(expected, csum.getCounters());
}
@Test
public void testMerge() {
SummarizerConfiguration sc = SummarizerConfiguration.builder(VisibilitySummarizer.class)
.addOption(MAX_COUNTERS_OPT, "5").build();
VisibilitySummarizer countSum = new VisibilitySummarizer();
String p = COUNTER_STAT_PREFIX;
HashMap<String,Long> sm1 = new HashMap<>();
sm1.put(p + "f001", 9L);
sm1.put(p + "f002", 4L);
sm1.put(p + "f003", 2L);
sm1.put(p + "f004", 1L);
sm1.put(p + "f005", 19L);
sm1.put(EMITTED_STAT, 15L);
sm1.put(SEEN_STAT, 5L);
sm1.put(DELETES_IGNORED_STAT, 1L);
HashMap<String,Long> sm2 = new HashMap<>();
sm2.put(p + "f001", 1L);
sm2.put(p + "f002", 2L);
sm2.put(p + "f00a", 7L);
sm2.put(p + "f00b", 1L);
sm2.put(p + "f00c", 17L);
sm2.put(EMITTED_STAT, 18L);
sm2.put(SEEN_STAT, 6L);
sm2.put(DELETES_IGNORED_STAT, 2L);
countSum.combiner(sc).merge(sm1, sm2);
HashMap<String,Long> expected = new HashMap<>();
expected.put(p + "f001", 10L);
expected.put(p + "f002", 6L);
expected.put(p + "f005", 19L);
expected.put(p + "f00a", 7L);
expected.put(p + "f00c", 17L);
expected.put(TOO_LONG_STAT, 0L);
expected.put(TOO_MANY_STAT, 4L);
expected.put(EMITTED_STAT, 18L + 15L);
expected.put(SEEN_STAT, 6L + 5L);
expected.put(DELETES_IGNORED_STAT, 3L);
assertEquals(expected, sm1);
sm2.clear();
sm2.put(p + "f001", 19L);
sm2.put(p + "f002", 2L);
sm2.put(p + "f003", 3L);
sm2.put(p + "f00b", 13L);
sm2.put(p + "f00c", 2L);
sm2.put(TOO_LONG_STAT, 1L);
sm2.put(TOO_MANY_STAT, 3L);
sm2.put(EMITTED_STAT, 21L);
sm2.put(SEEN_STAT, 7L);
sm2.put(DELETES_IGNORED_STAT, 5L);
countSum.combiner(sc).merge(sm1, sm2);
expected.clear();
expected.put(p + "f001", 29L);
expected.put(p + "f002", 8L);
expected.put(p + "f005", 19L);
expected.put(p + "f00b", 13L);
expected.put(p + "f00c", 19L);
expected.put(TOO_LONG_STAT, 1L);
expected.put(TOO_MANY_STAT, 17L);
expected.put(EMITTED_STAT, 21L + 18 + 15);
expected.put(SEEN_STAT, 7L + 6 + 5);
expected.put(DELETES_IGNORED_STAT, 8L);
}
@Test
public void testCountDeletes() {
SummarizerConfiguration sc = SummarizerConfiguration.builder(FamilySummarizer.class)
.addOptions(INGNORE_DELETES_OPT, "false").build();
FamilySummarizer countSum = new FamilySummarizer();
Key k1 = new Key("r1", "f1");
Key k2 = new Key("r1", "f1");
k2.setDeleted(true);
Key k3 = new Key("r1", "f2");
Collector collector = countSum.collector(sc);
collector.accept(k1, new Value(""));
collector.accept(k2, new Value(""));
collector.accept(k3, new Value(""));
String p = COUNTER_STAT_PREFIX;
HashMap<String,Long> expected = new HashMap<>();
expected.put(p + "f1", 2L);
expected.put(p + "f2", 1L);
expected.put(TOO_LONG_STAT, 0L);
expected.put(TOO_MANY_STAT, 0L);
expected.put(SEEN_STAT, 3L);
expected.put(EMITTED_STAT, 3L);
expected.put(DELETES_IGNORED_STAT, 0L);
HashMap<String,Long> stats = new HashMap<>();
collector.summarize(stats::put);
assertEquals(expected, stats);
CounterSummary csum = new CounterSummary(stats);
assertEquals(0, csum.getIgnored());
assertEquals(0, csum.getTooMany());
assertEquals(0, csum.getTooLong());
assertEquals(3, csum.getSeen());
assertEquals(3, csum.getEmitted());
assertEquals(0, csum.getDeletesIgnored());
expected.clear();
expected.put("f1", 2L);
expected.put("f2", 1L);
assertEquals(expected, csum.getCounters());
}
@Test
public void testConvertValue() {
SummarizerConfiguration sc = SummarizerConfiguration.builder(ValueSummarizer.class).build();
ValueSummarizer countSum = new ValueSummarizer();
Summarizer.Collector collector = countSum.collector(sc);
HashMap<String,Long> expected = new HashMap<>();
for (String row : new String[] {"ask", "asleep", "some", "soul"}) {
for (String fam : new String[] {"hop", "hope", "nope", "noop"}) {
for (String qual : new String[] {"mad", "lad", "lab", "map"}) {
for (Value value : new Value[] {new Value("ask"), new Value("asleep"), new Value("some"),
new Value("soul")}) {
collector.accept(new Key(row, fam, qual), value);
expected.merge("vp:" + value.toString().substring(0, 2), 1L, Long::sum);
}
}
}
}
HashMap<String,Long> stats = new HashMap<>();
collector.summarize(stats::put);
CounterSummary csum = new CounterSummary(stats);
assertEquals(expected, csum.getCounters());
assertEquals(256, csum.getSeen());
assertEquals(256, csum.getEmitted());
assertEquals(0, csum.getIgnored());
assertEquals(0, csum.getDeletesIgnored());
}
}
| 9,474 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/summary | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/summary/summarizers/AuthorizationSummarizerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.summary.summarizers;
import static org.apache.accumulo.core.client.summary.CountingSummarizer.COUNTER_STAT_PREFIX;
import static org.apache.accumulo.core.client.summary.CountingSummarizer.DELETES_IGNORED_STAT;
import static org.apache.accumulo.core.client.summary.CountingSummarizer.EMITTED_STAT;
import static org.apache.accumulo.core.client.summary.CountingSummarizer.SEEN_STAT;
import static org.apache.accumulo.core.client.summary.CountingSummarizer.TOO_LONG_STAT;
import static org.apache.accumulo.core.client.summary.CountingSummarizer.TOO_MANY_STAT;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.HashMap;
import org.apache.accumulo.core.client.summary.Summarizer.Collector;
import org.apache.accumulo.core.client.summary.SummarizerConfiguration;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Value;
import org.junit.jupiter.api.Test;
public class AuthorizationSummarizerTest {
private static final Value EV = new Value();
@Test
public void testBasic() {
SummarizerConfiguration sc =
SummarizerConfiguration.builder(AuthorizationSummarizer.class).build();
AuthorizationSummarizer authSummarizer = new AuthorizationSummarizer();
Collector collector = authSummarizer.collector(sc);
collector.accept(new Key("r", "f", "q", ""), EV);
collector.accept(new Key("r", "f", "q", "A"), EV);
collector.accept(new Key("r", "f", "q", "B"), EV);
collector.accept(new Key("r", "f", "q", "A&B"), EV);
collector.accept(new Key("r", "f", "q", "(C|D)&(A|B)"), EV);
collector.accept(new Key("r", "f", "q", "(C|D)&(A|B)"), EV);
collector.accept(new Key("r", "f", "q", "(D&E)|(D&C&F)"), EV);
HashMap<String,Long> actual = new HashMap<>();
collector.summarize(actual::put);
String p = COUNTER_STAT_PREFIX;
HashMap<String,Long> expected = new HashMap<>();
expected.put(p + "A", 4L);
expected.put(p + "B", 4L);
expected.put(p + "C", 3L);
expected.put(p + "D", 3L);
expected.put(p + "E", 1L);
expected.put(p + "F", 1L);
expected.put(TOO_LONG_STAT, 0L);
expected.put(TOO_MANY_STAT, 0L);
expected.put(SEEN_STAT, 7L);
expected.put(EMITTED_STAT, 16L);
expected.put(DELETES_IGNORED_STAT, 0L);
assertEquals(expected, actual);
}
}
| 9,475 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/summary | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/client/summary/summarizers/EntryLengthSummarizersTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.summary.summarizers;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.HashMap;
import org.apache.accumulo.core.client.summary.Summarizer.Collector;
import org.apache.accumulo.core.client.summary.Summarizer.Combiner;
import org.apache.accumulo.core.client.summary.SummarizerConfiguration;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Value;
import org.junit.jupiter.api.Test;
public class EntryLengthSummarizersTest {
/* COLLECTOR TEST */
/* Basic Test: Each test adds to the next, all are simple lengths. */
@Test
public void testEmpty() {
SummarizerConfiguration sc =
SummarizerConfiguration.builder(EntryLengthSummarizer.class).build();
EntryLengthSummarizer entrySum = new EntryLengthSummarizer();
Collector collector = entrySum.collector(sc);
HashMap<String,Long> stats = new HashMap<>();
collector.summarize(stats::put);
HashMap<String,Long> expected = new HashMap<>();
expected.put("key.min", 0L);
expected.put("key.max", 0L);
expected.put("key.sum", 0L);
expected.put("row.min", 0L);
expected.put("row.max", 0L);
expected.put("row.sum", 0L);
expected.put("family.min", 0L);
expected.put("family.max", 0L);
expected.put("family.sum", 0L);
expected.put("qualifier.min", 0L);
expected.put("qualifier.max", 0L);
expected.put("qualifier.sum", 0L);
expected.put("visibility.min", 0L);
expected.put("visibility.max", 0L);
expected.put("visibility.sum", 0L);
expected.put("value.min", 0L);
expected.put("value.max", 0L);
expected.put("value.sum", 0L);
expected.put("total", 0L);
assertEquals(expected, stats);
}
@Test
public void testBasicRow() {
SummarizerConfiguration sc =
SummarizerConfiguration.builder(EntryLengthSummarizer.class).build();
EntryLengthSummarizer entrySum = new EntryLengthSummarizer();
Key k1 = new Key("r1");
Key k2 = new Key("r2");
Key k3 = new Key("r3");
Collector collector = entrySum.collector(sc);
collector.accept(k1, new Value(""));
collector.accept(k2, new Value(""));
collector.accept(k3, new Value(""));
HashMap<String,Long> stats = new HashMap<>();
collector.summarize(stats::put);
HashMap<String,Long> expected = new HashMap<>();
expected.put("key.min", 2L);
expected.put("key.max", 2L);
expected.put("key.sum", 6L);
// Log2 Histogram
expected.put("key.logHist.1", 3L);
expected.put("row.min", 2L);
expected.put("row.max", 2L);
expected.put("row.sum", 6L);
// Log2 Histogram
expected.put("row.logHist.1", 3L);
expected.put("family.min", 0L);
expected.put("family.max", 0L);
expected.put("family.sum", 0L);
// Log2 Histogram
expected.put("family.logHist.0", 3L);
expected.put("qualifier.min", 0L);
expected.put("qualifier.max", 0L);
expected.put("qualifier.sum", 0L);
// Log2 Histogram
expected.put("qualifier.logHist.0", 3L);
expected.put("visibility.min", 0L);
expected.put("visibility.max", 0L);
expected.put("visibility.sum", 0L);
// Log2 Histogram
expected.put("visibility.logHist.0", 3L);
expected.put("value.min", 0L);
expected.put("value.max", 0L);
expected.put("value.sum", 0L);
// Log2 Histogram
expected.put("value.logHist.0", 3L);
expected.put("total", 3L);
assertEquals(expected, stats);
}
@Test
public void testBasicFamily() {
SummarizerConfiguration sc =
SummarizerConfiguration.builder(EntryLengthSummarizer.class).build();
EntryLengthSummarizer entrySum = new EntryLengthSummarizer();
Key k1 = new Key("r1", "f1");
Key k2 = new Key("r2", "f2");
Key k3 = new Key("r3", "f3");
Collector collector = entrySum.collector(sc);
collector.accept(k1, new Value(""));
collector.accept(k2, new Value(""));
collector.accept(k3, new Value(""));
HashMap<String,Long> stats = new HashMap<>();
collector.summarize(stats::put);
HashMap<String,Long> expected = new HashMap<>();
expected.put("key.min", 4L);
expected.put("key.max", 4L);
expected.put("key.sum", 12L);
// Log2 Histogram
expected.put("key.logHist.2", 3L);
expected.put("row.min", 2L);
expected.put("row.max", 2L);
expected.put("row.sum", 6L);
// Log2 Histogram
expected.put("row.logHist.1", 3L);
expected.put("family.min", 2L);
expected.put("family.max", 2L);
expected.put("family.sum", 6L);
// Log2 Histogram
expected.put("family.logHist.1", 3L);
expected.put("qualifier.min", 0L);
expected.put("qualifier.max", 0L);
expected.put("qualifier.sum", 0L);
// Log2 Histogram
expected.put("qualifier.logHist.0", 3L);
expected.put("visibility.min", 0L);
expected.put("visibility.max", 0L);
expected.put("visibility.sum", 0L);
// Log2 Histogram
expected.put("visibility.logHist.0", 3L);
expected.put("value.min", 0L);
expected.put("value.max", 0L);
expected.put("value.sum", 0L);
// Log2 Histogram
expected.put("value.logHist.0", 3L);
expected.put("total", 3L);
assertEquals(expected, stats);
}
@Test
public void testBasicQualifier() {
SummarizerConfiguration sc =
SummarizerConfiguration.builder(EntryLengthSummarizer.class).build();
EntryLengthSummarizer entrySum = new EntryLengthSummarizer();
Key k1 = new Key("r1", "f1", "q1");
Key k2 = new Key("r2", "f2", "q2");
Key k3 = new Key("r3", "f3", "q3");
Collector collector = entrySum.collector(sc);
collector.accept(k1, new Value(""));
collector.accept(k2, new Value(""));
collector.accept(k3, new Value(""));
HashMap<String,Long> stats = new HashMap<>();
collector.summarize(stats::put);
HashMap<String,Long> expected = new HashMap<>();
expected.put("key.min", 6L);
expected.put("key.max", 6L);
expected.put("key.sum", 18L);
// Log2 Histogram
expected.put("key.logHist.3", 3L);
expected.put("row.min", 2L);
expected.put("row.max", 2L);
expected.put("row.sum", 6L);
// Log2 Histogram
expected.put("row.logHist.1", 3L);
expected.put("family.min", 2L);
expected.put("family.max", 2L);
expected.put("family.sum", 6L);
// Log2 Histogram
expected.put("family.logHist.1", 3L);
expected.put("qualifier.min", 2L);
expected.put("qualifier.max", 2L);
expected.put("qualifier.sum", 6L);
// Log2 Histogram
expected.put("qualifier.logHist.1", 3L);
expected.put("visibility.min", 0L);
expected.put("visibility.max", 0L);
expected.put("visibility.sum", 0L);
// Log2 Histogram
expected.put("visibility.logHist.0", 3L);
expected.put("value.min", 0L);
expected.put("value.max", 0L);
expected.put("value.sum", 0L);
// Log2 Histogram
expected.put("value.logHist.0", 3L);
expected.put("total", 3L);
assertEquals(expected, stats);
}
@Test
public void testBasicVisibility() {
SummarizerConfiguration sc =
SummarizerConfiguration.builder(EntryLengthSummarizer.class).build();
EntryLengthSummarizer entrySum = new EntryLengthSummarizer();
Key k1 = new Key("r1", "f1", "q1", "v1");
Key k2 = new Key("r2", "f2", "q2", "v2");
Key k3 = new Key("r3", "f3", "q3", "v3");
Collector collector = entrySum.collector(sc);
collector.accept(k1, new Value(""));
collector.accept(k2, new Value(""));
collector.accept(k3, new Value(""));
HashMap<String,Long> stats = new HashMap<>();
collector.summarize(stats::put);
HashMap<String,Long> expected = new HashMap<>();
expected.put("key.min", 8L);
expected.put("key.max", 8L);
expected.put("key.sum", 24L);
// Log2 Histogram
expected.put("key.logHist.3", 3L);
expected.put("row.min", 2L);
expected.put("row.max", 2L);
expected.put("row.sum", 6L);
// Log2 Histogram
expected.put("row.logHist.1", 3L);
expected.put("family.min", 2L);
expected.put("family.max", 2L);
expected.put("family.sum", 6L);
// Log2 Histogram
expected.put("family.logHist.1", 3L);
expected.put("qualifier.min", 2L);
expected.put("qualifier.max", 2L);
expected.put("qualifier.sum", 6L);
// Log2 Histogram
expected.put("qualifier.logHist.1", 3L);
expected.put("visibility.min", 2L);
expected.put("visibility.max", 2L);
expected.put("visibility.sum", 6L);
// Log2 Histogram
expected.put("visibility.logHist.1", 3L);
expected.put("value.min", 0L);
expected.put("value.max", 0L);
expected.put("value.sum", 0L);
// Log2 Histogram
expected.put("value.logHist.0", 3L);
expected.put("total", 3L);
assertEquals(expected, stats);
}
@Test
public void testBasicValue() {
SummarizerConfiguration sc =
SummarizerConfiguration.builder(EntryLengthSummarizer.class).build();
EntryLengthSummarizer entrySum = new EntryLengthSummarizer();
Key k1 = new Key("r1", "f1", "q1", "v1");
Key k2 = new Key("r2", "f2", "q2", "v2");
Key k3 = new Key("r3", "f3", "q3", "v3");
Collector collector = entrySum.collector(sc);
collector.accept(k1, new Value("v1"));
collector.accept(k2, new Value("v2"));
collector.accept(k3, new Value("v3"));
HashMap<String,Long> stats = new HashMap<>();
collector.summarize(stats::put);
HashMap<String,Long> expected = new HashMap<>();
expected.put("key.min", 8L);
expected.put("key.max", 8L);
expected.put("key.sum", 24L);
// Log2 Histogram
expected.put("key.logHist.3", 3L);
expected.put("row.min", 2L);
expected.put("row.max", 2L);
expected.put("row.sum", 6L);
// Log2 Histogram
expected.put("row.logHist.1", 3L);
expected.put("family.min", 2L);
expected.put("family.max", 2L);
expected.put("family.sum", 6L);
// Log2 Histogram
expected.put("family.logHist.1", 3L);
expected.put("qualifier.min", 2L);
expected.put("qualifier.max", 2L);
expected.put("qualifier.sum", 6L);
// Log2 Histogram
expected.put("qualifier.logHist.1", 3L);
expected.put("visibility.min", 2L);
expected.put("visibility.max", 2L);
expected.put("visibility.sum", 6L);
// Log2 Histogram
expected.put("visibility.logHist.1", 3L);
expected.put("value.min", 2L);
expected.put("value.max", 2L);
expected.put("value.sum", 6L);
// Log2 Histogram
expected.put("value.logHist.1", 3L);
expected.put("total", 3L);
assertEquals(expected, stats);
}
/* Complex Test: Each test adds to the next, all are mixed lengths. */
@Test
public void testComplexRow() {
SummarizerConfiguration sc =
SummarizerConfiguration.builder(EntryLengthSummarizer.class).build();
EntryLengthSummarizer entrySum = new EntryLengthSummarizer();
Key k1 = new Key("r1");
Key k2 = new Key("row2");
Key k3 = new Key("columnRow3");
Collector collector = entrySum.collector(sc);
collector.accept(k1, new Value(""));
collector.accept(k2, new Value(""));
collector.accept(k3, new Value(""));
HashMap<String,Long> stats = new HashMap<>();
collector.summarize(stats::put);
HashMap<String,Long> expected = new HashMap<>();
expected.put("key.min", 2L);
expected.put("key.max", 10L);
expected.put("key.sum", 16L);
// Log2 Histogram
expected.put("key.logHist.1", 1L);
expected.put("key.logHist.2", 1L);
expected.put("key.logHist.3", 1L);
expected.put("row.min", 2L);
expected.put("row.max", 10L);
expected.put("row.sum", 16L);
// Log2 Histogram
expected.put("row.logHist.1", 1L);
expected.put("row.logHist.2", 1L);
expected.put("row.logHist.3", 1L);
expected.put("family.min", 0L);
expected.put("family.max", 0L);
expected.put("family.sum", 0L);
// Log2 Histogram
expected.put("family.logHist.0", 3L);
expected.put("qualifier.min", 0L);
expected.put("qualifier.max", 0L);
expected.put("qualifier.sum", 0L);
// Log2 Histogram
expected.put("qualifier.logHist.0", 3L);
expected.put("visibility.min", 0L);
expected.put("visibility.max", 0L);
expected.put("visibility.sum", 0L);
// Log2 Histogram
expected.put("visibility.logHist.0", 3L);
expected.put("value.min", 0L);
expected.put("value.max", 0L);
expected.put("value.sum", 0L);
// Log2 Histogram
expected.put("value.logHist.0", 3L);
expected.put("total", 3L);
assertEquals(expected, stats);
}
@Test
public void testComplexFamily() {
SummarizerConfiguration sc =
SummarizerConfiguration.builder(EntryLengthSummarizer.class).build();
EntryLengthSummarizer entrySum = new EntryLengthSummarizer();
Key k1 = new Key("r1", "family1");
Key k2 = new Key("row2", "columnFamily2");
Key k3 = new Key("columnRow3", "f3");
Collector collector = entrySum.collector(sc);
collector.accept(k1, new Value(""));
collector.accept(k2, new Value(""));
collector.accept(k3, new Value(""));
HashMap<String,Long> stats = new HashMap<>();
collector.summarize(stats::put);
HashMap<String,Long> expected = new HashMap<>();
expected.put("key.min", 9L);
expected.put("key.max", 17L);
expected.put("key.sum", 38L);
// Log2 Histogram
expected.put("key.logHist.3", 1L);
expected.put("key.logHist.4", 2L);
expected.put("row.min", 2L);
expected.put("row.max", 10L);
expected.put("row.sum", 16L);
// Log2 Histogram
expected.put("row.logHist.1", 1L);
expected.put("row.logHist.2", 1L);
expected.put("row.logHist.3", 1L);
expected.put("family.min", 2L);
expected.put("family.max", 13L);
expected.put("family.sum", 22L);
// Log2 Histogram
expected.put("family.logHist.1", 1L);
expected.put("family.logHist.3", 1L);
expected.put("family.logHist.4", 1L);
expected.put("qualifier.min", 0L);
expected.put("qualifier.max", 0L);
expected.put("qualifier.sum", 0L);
// Log2 Histogram
expected.put("qualifier.logHist.0", 3L);
expected.put("visibility.min", 0L);
expected.put("visibility.max", 0L);
expected.put("visibility.sum", 0L);
// Log2 Histogram
expected.put("visibility.logHist.0", 3L);
expected.put("value.min", 0L);
expected.put("value.max", 0L);
expected.put("value.sum", 0L);
// Log2 Histogram
expected.put("value.logHist.0", 3L);
expected.put("total", 3L);
assertEquals(expected, stats);
}
@Test
public void testComplexQualifier() {
SummarizerConfiguration sc =
SummarizerConfiguration.builder(EntryLengthSummarizer.class).build();
EntryLengthSummarizer entrySum = new EntryLengthSummarizer();
Key k1 = new Key("r1", "family1", "columnQualifier1");
Key k2 = new Key("row2", "columnFamily2", "q2");
Key k3 = new Key("columnRow3", "f3", "qualifier3");
Collector collector = entrySum.collector(sc);
collector.accept(k1, new Value(""));
collector.accept(k2, new Value(""));
collector.accept(k3, new Value(""));
HashMap<String,Long> stats = new HashMap<>();
collector.summarize(stats::put);
HashMap<String,Long> expected = new HashMap<>();
expected.put("key.min", 19L);
expected.put("key.max", 25L);
expected.put("key.sum", 66L);
// Log2 Histogram
expected.put("key.logHist.4", 2L);
expected.put("key.logHist.5", 1L);
expected.put("row.min", 2L);
expected.put("row.max", 10L);
expected.put("row.sum", 16L);
// Log2 Histogram
expected.put("row.logHist.1", 1L);
expected.put("row.logHist.2", 1L);
expected.put("row.logHist.3", 1L);
expected.put("family.min", 2L);
expected.put("family.max", 13L);
expected.put("family.sum", 22L);
// Log2 Histogram
expected.put("family.logHist.1", 1L);
expected.put("family.logHist.3", 1L);
expected.put("family.logHist.4", 1L);
expected.put("qualifier.min", 2L);
expected.put("qualifier.max", 16L);
expected.put("qualifier.sum", 28L);
// Log2 Histogram
expected.put("qualifier.logHist.1", 1L);
expected.put("qualifier.logHist.3", 1L);
expected.put("qualifier.logHist.4", 1L);
expected.put("visibility.min", 0L);
expected.put("visibility.max", 0L);
expected.put("visibility.sum", 0L);
// Log2 Histogram
expected.put("visibility.logHist.0", 3L);
expected.put("value.min", 0L);
expected.put("value.max", 0L);
expected.put("value.sum", 0L);
// Log2 Histogram
expected.put("value.logHist.0", 3L);
expected.put("total", 3L);
assertEquals(expected, stats);
}
@Test
public void testComplexVisibility() {
SummarizerConfiguration sc =
SummarizerConfiguration.builder(EntryLengthSummarizer.class).build();
EntryLengthSummarizer entrySum = new EntryLengthSummarizer();
Key k1 = new Key("r1", "family1", "columnQualifier1", "v1");
Key k2 = new Key("row2", "columnFamily2", "q2", "visibility2");
Key k3 = new Key("columnRow3", "f3", "qualifier3", "columnVisibility3");
Collector collector = entrySum.collector(sc);
collector.accept(k1, new Value(""));
collector.accept(k2, new Value(""));
collector.accept(k3, new Value(""));
HashMap<String,Long> stats = new HashMap<>();
collector.summarize(stats::put);
HashMap<String,Long> expected = new HashMap<>();
expected.put("key.min", 27L);
expected.put("key.max", 39L);
expected.put("key.sum", 96L);
// Log2 Histogram
expected.put("key.logHist.5", 3L);
expected.put("row.min", 2L);
expected.put("row.max", 10L);
expected.put("row.sum", 16L);
// Log2 Histogram
expected.put("row.logHist.1", 1L);
expected.put("row.logHist.2", 1L);
expected.put("row.logHist.3", 1L);
expected.put("family.min", 2L);
expected.put("family.max", 13L);
expected.put("family.sum", 22L);
// Log2 Histogram
expected.put("family.logHist.1", 1L);
expected.put("family.logHist.3", 1L);
expected.put("family.logHist.4", 1L);
expected.put("qualifier.min", 2L);
expected.put("qualifier.max", 16L);
expected.put("qualifier.sum", 28L);
// Log2 Histogram
expected.put("qualifier.logHist.1", 1L);
expected.put("qualifier.logHist.3", 1L);
expected.put("qualifier.logHist.4", 1L);
expected.put("visibility.min", 2L);
expected.put("visibility.max", 17L);
expected.put("visibility.sum", 30L);
// Log2 Histogram
expected.put("visibility.logHist.1", 1L);
expected.put("visibility.logHist.3", 1L);
expected.put("visibility.logHist.4", 1L);
expected.put("value.min", 0L);
expected.put("value.max", 0L);
expected.put("value.sum", 0L);
// Log2 Histogram
expected.put("value.logHist.0", 3L);
expected.put("total", 3L);
assertEquals(expected, stats);
}
@Test
public void testComplexValue() {
SummarizerConfiguration sc =
SummarizerConfiguration.builder(EntryLengthSummarizer.class).build();
EntryLengthSummarizer entrySum = new EntryLengthSummarizer();
Key k1 = new Key("r1", "family1", "columnQualifier1", "v1");
Key k2 = new Key("row2", "columnFamily2", "q2", "visibility2");
Key k3 = new Key("columnRow3", "f3", "qualifier3", "columnVisibility3");
Collector collector = entrySum.collector(sc);
collector.accept(k1, new Value("v1"));
collector.accept(k2, new Value("value2"));
collector.accept(k3, new Value("keyValue3"));
HashMap<String,Long> stats = new HashMap<>();
collector.summarize(stats::put);
HashMap<String,Long> expected = new HashMap<>();
expected.put("key.min", 27L);
expected.put("key.max", 39L);
expected.put("key.sum", 96L);
// Log2 Histogram
expected.put("key.logHist.5", 3L);
expected.put("row.min", 2L);
expected.put("row.max", 10L);
expected.put("row.sum", 16L);
// Log2 Histogram
expected.put("row.logHist.1", 1L);
expected.put("row.logHist.2", 1L);
expected.put("row.logHist.3", 1L);
expected.put("family.min", 2L);
expected.put("family.max", 13L);
expected.put("family.sum", 22L);
// Log2 Histogram
expected.put("family.logHist.1", 1L);
expected.put("family.logHist.3", 1L);
expected.put("family.logHist.4", 1L);
expected.put("qualifier.min", 2L);
expected.put("qualifier.max", 16L);
expected.put("qualifier.sum", 28L);
// Log2 Histogram
expected.put("qualifier.logHist.1", 1L);
expected.put("qualifier.logHist.3", 1L);
expected.put("qualifier.logHist.4", 1L);
expected.put("visibility.min", 2L);
expected.put("visibility.max", 17L);
expected.put("visibility.sum", 30L);
// Log2 Histogram
expected.put("visibility.logHist.1", 1L);
expected.put("visibility.logHist.3", 1L);
expected.put("visibility.logHist.4", 1L);
expected.put("value.min", 2L);
expected.put("value.max", 9L);
expected.put("value.sum", 17L);
// Log2 Histogram
expected.put("value.logHist.1", 1L);
expected.put("value.logHist.3", 2L);
expected.put("total", 3L);
assertEquals(expected, stats);
}
/* Miscellaneous Test */
@Test
public void testAll() {
SummarizerConfiguration sc =
SummarizerConfiguration.builder(EntryLengthSummarizer.class).build();
EntryLengthSummarizer entrySum = new EntryLengthSummarizer();
Key k1 = new Key("maximumnoqualifier", "f1", "q", "vis1");
Key k2 = new Key("minKey", "fam2", "q2", "visibility2");
Key k3 = new Key("row3", "f3", "qualifier3", "v3");
Key k4 = new Key("r4", "family4", "qual4", "vis4");
Key k5 = new Key("fifthrow", "thirdfamily", "q5", "v5");
Key k6 = new Key("r6", "sixthfamily", "qual6", "visibi6");
Collector collector = entrySum.collector(sc);
collector.accept(k1, new Value("v1"));
collector.accept(k2, new Value("value2"));
collector.accept(k3, new Value("val3"));
collector.accept(k4, new Value("fourthvalue"));
collector.accept(k5, new Value(""));
collector.accept(k6, new Value("value6"));
HashMap<String,Long> stats = new HashMap<>();
collector.summarize(stats::put);
HashMap<String,Long> expected = new HashMap<>();
expected.put("key.min", 18L);
expected.put("key.max", 25L);
expected.put("key.sum", 132L);
// Log2 Histogram
expected.put("key.logHist.4", 2L);
expected.put("key.logHist.5", 4L);
expected.put("row.min", 2L);
expected.put("row.max", 18L);
expected.put("row.sum", 40L);
// Log2 Histogram
expected.put("row.logHist.1", 2L);
expected.put("row.logHist.2", 1L);
expected.put("row.logHist.3", 2L);
expected.put("row.logHist.4", 1L);
expected.put("family.min", 2L);
expected.put("family.max", 11L);
expected.put("family.sum", 37L);
// Log2 Histogram
expected.put("family.logHist.1", 2L);
expected.put("family.logHist.2", 1L);
expected.put("family.logHist.3", 3L);
expected.put("qualifier.min", 1L);
expected.put("qualifier.max", 10L);
expected.put("qualifier.sum", 25L);
// Log2 Histogram
expected.put("qualifier.logHist.0", 1L);
expected.put("qualifier.logHist.1", 2L);
expected.put("qualifier.logHist.2", 2L);
expected.put("qualifier.logHist.3", 1L);
expected.put("visibility.min", 2L);
expected.put("visibility.max", 11L);
expected.put("visibility.sum", 30L);
// Log2 Histogram
expected.put("visibility.logHist.1", 2L);
expected.put("visibility.logHist.2", 2L);
expected.put("visibility.logHist.3", 2L);
expected.put("value.min", 0L);
expected.put("value.max", 11L);
expected.put("value.sum", 29L);
// Log2 Histogram
expected.put("value.logHist.0", 1L);
expected.put("value.logHist.1", 1L);
expected.put("value.logHist.2", 1L);
expected.put("value.logHist.3", 3L);
expected.put("total", 6L);
assertEquals(expected, stats);
}
@Test
public void testLog2Histogram() {
SummarizerConfiguration sc =
SummarizerConfiguration.builder(EntryLengthSummarizer.class).build();
EntryLengthSummarizer entrySum = new EntryLengthSummarizer();
Key k1 = new Key("row1");
Key k2 = new Key("row2");
Key k3 = new Key("row3");
Collector collector = entrySum.collector(sc);
collector.accept(k1, new Value("01"));
collector.accept(k2, new Value("012345678"));
collector.accept(k3, new Value("012345679"));
HashMap<String,Long> stats = new HashMap<>();
collector.summarize(stats::put);
HashMap<String,Long> expected = new HashMap<>();
expected.put("key.min", 4L);
expected.put("key.max", 4L);
expected.put("key.sum", 12L);
// Log2 Histogram for Key
expected.put("key.logHist.2", 3L);
expected.put("row.min", 4L);
expected.put("row.max", 4L);
expected.put("row.sum", 12L);
// Log2 Histogram for Row
expected.put("row.logHist.2", 3L);
expected.put("family.min", 0L);
expected.put("family.max", 0L);
expected.put("family.sum", 0L);
// Log2 Histogram for Family
expected.put("family.logHist.0", 3L);
expected.put("qualifier.min", 0L);
expected.put("qualifier.max", 0L);
expected.put("qualifier.sum", 0L);
// Log2 Histogram for Qualifier
expected.put("qualifier.logHist.0", 3L);
expected.put("visibility.min", 0L);
expected.put("visibility.max", 0L);
expected.put("visibility.sum", 0L);
// Log2 Histogram for Visibility
expected.put("visibility.logHist.0", 3L);
expected.put("value.min", 2L);
expected.put("value.max", 9L);
expected.put("value.sum", 20L);
// Log2 Histogram for Value
expected.put("value.logHist.1", 1L);
expected.put("value.logHist.3", 2L);
expected.put("total", 3L);
assertEquals(expected, stats);
}
/* COMBINER TEST */
@Test
public void testCombine() {
SummarizerConfiguration sc =
SummarizerConfiguration.builder(EntryLengthSummarizer.class).build();
EntryLengthSummarizer entrySum = new EntryLengthSummarizer();
Collector collector1 = entrySum.collector(sc);
collector1.accept(new Key("1", "f1", "q1"), new Value("v1"));
collector1.accept(new Key("1234", "f1", "q1"), new Value("v111"));
collector1.accept(new Key("12345678", "f1", "q1"), new Value("v111111"));
HashMap<String,Long> stats1 = new HashMap<>();
collector1.summarize(stats1::put);
Collector collector2 = entrySum.collector(sc);
collector2.accept(new Key("5432", "f11", "q12"), new Value("2"));
collector2.accept(new Key("12", "f11", "q1234"), new Value("12"));
collector2.accept(new Key("12", "f11", "q11234567"), new Value("4444"));
HashMap<String,Long> stats2 = new HashMap<>();
collector2.summarize(stats2::put);
Combiner combiner = entrySum.combiner(sc);
combiner.merge(stats1, stats2);
HashMap<String,Long> expected = new HashMap<>();
expected.put("key.min", 5L);
expected.put("key.max", 14L);
expected.put("key.sum", 59L);
// Log2 Histogram for Key
expected.put("key.logHist.2", 1L);
expected.put("key.logHist.3", 3L);
expected.put("key.logHist.4", 2L);
expected.put("row.min", 1L);
expected.put("row.max", 8L);
expected.put("row.sum", 21L);
// Log2 Histogram for Row
expected.put("row.logHist.0", 1L);
expected.put("row.logHist.1", 2L);
expected.put("row.logHist.2", 2L);
expected.put("row.logHist.3", 1L);
expected.put("family.min", 2L);
expected.put("family.max", 3L);
expected.put("family.sum", 15L);
// Log2 Histogram for Family
expected.put("family.logHist.1", 3L);
expected.put("family.logHist.2", 3L);
expected.put("qualifier.min", 2L);
expected.put("qualifier.max", 9L);
expected.put("qualifier.sum", 23L);
// Log2 Histogram for Qualifier
expected.put("qualifier.logHist.1", 3L);
expected.put("qualifier.logHist.2", 2L);
expected.put("qualifier.logHist.3", 1L);
expected.put("visibility.min", 0L);
expected.put("visibility.max", 0L);
expected.put("visibility.sum", 0L);
// Log2 Histogram for Visibility
expected.put("visibility.logHist.0", 6L);
expected.put("value.min", 1L);
expected.put("value.max", 7L);
expected.put("value.sum", 20L);
// Log2 Histogram for Value
expected.put("value.logHist.0", 1L);
expected.put("value.logHist.1", 2L);
expected.put("value.logHist.2", 2L);
expected.put("value.logHist.3", 1L);
expected.put("total", 6L);
assertEquals(expected, stats1);
}
@Test
public void testCombine2() {
SummarizerConfiguration sc =
SummarizerConfiguration.builder(EntryLengthSummarizer.class).build();
EntryLengthSummarizer entrySum = new EntryLengthSummarizer();
Collector collector1 = entrySum.collector(sc);
collector1.accept(new Key("12345678901234567890", "f12345", "q123456"),
new Value("value1234567890"));
HashMap<String,Long> stats1 = new HashMap<>();
collector1.summarize(stats1::put);
Collector collector2 = entrySum.collector(sc);
collector2.accept(new Key("5432", "f11", "q12"), new Value("2"));
collector2.accept(new Key("12", "f11", "q1234"), new Value("12"));
collector2.accept(new Key("12", "f11", "q11234567"), new Value("4444"));
HashMap<String,Long> stats2 = new HashMap<>();
collector2.summarize(stats2::put);
Combiner combiner = entrySum.combiner(sc);
combiner.merge(stats1, stats2);
HashMap<String,Long> expected = new HashMap<>();
expected.put("key.min", 10L);
expected.put("key.max", 33L);
expected.put("key.sum", 67L);
// Log2 Histogram for Key
expected.put("key.logHist.3", 2L);
expected.put("key.logHist.4", 1L);
expected.put("key.logHist.5", 1L);
expected.put("row.min", 2L);
expected.put("row.max", 20L);
expected.put("row.sum", 28L);
// Log2 Histogram for Row
expected.put("row.logHist.1", 2L);
expected.put("row.logHist.2", 1L);
expected.put("row.logHist.4", 1L);
expected.put("family.min", 3L);
expected.put("family.max", 6L);
expected.put("family.sum", 15L);
// Log2 Histogram for Family
expected.put("family.logHist.2", 3L);
expected.put("family.logHist.3", 1L);
expected.put("qualifier.min", 3L);
expected.put("qualifier.max", 9L);
expected.put("qualifier.sum", 24L);
// Log2 Histogram for Qualifier
expected.put("qualifier.logHist.2", 2L);
expected.put("qualifier.logHist.3", 2L);
expected.put("visibility.min", 0L);
expected.put("visibility.max", 0L);
expected.put("visibility.sum", 0L);
// Log2 Histogram for Visibility
expected.put("visibility.logHist.0", 4L);
expected.put("value.min", 1L);
expected.put("value.max", 15L);
expected.put("value.sum", 22L);
// Log2 Histogram for Value
expected.put("value.logHist.0", 1L);
expected.put("value.logHist.1", 1L);
expected.put("value.logHist.2", 1L);
expected.put("value.logHist.4", 1L);
expected.put("total", 4L);
assertEquals(expected, stats1);
}
@Test
public void testCombine3() {
SummarizerConfiguration sc =
SummarizerConfiguration.builder(EntryLengthSummarizer.class).build();
EntryLengthSummarizer entrySum = new EntryLengthSummarizer();
Collector collector1 = entrySum.collector(sc);
collector1.accept(new Key("r1", "f1"), new Value("v1"));
HashMap<String,Long> stats1 = new HashMap<>();
collector1.summarize(stats1::put);
Collector collector2 = entrySum.collector(sc);
collector2.accept(new Key("row1", "family1", "q1"), new Value(""));
HashMap<String,Long> stats2 = new HashMap<>();
collector2.summarize(stats2::put);
Combiner combiner = entrySum.combiner(sc);
combiner.merge(stats1, stats2);
HashMap<String,Long> expected = new HashMap<>();
expected.put("key.min", 4L);
expected.put("key.max", 13L);
expected.put("key.sum", 17L);
// Log2 Histogram for Key
expected.put("key.logHist.2", 1L);
expected.put("key.logHist.4", 1L);
expected.put("row.min", 2L);
expected.put("row.max", 4L);
expected.put("row.sum", 6L);
// Log2 Histogram for Row
expected.put("row.logHist.1", 1L);
expected.put("row.logHist.2", 1L);
expected.put("family.min", 2L);
expected.put("family.max", 7L);
expected.put("family.sum", 9L);
// Log2 Histogram for Family
expected.put("family.logHist.1", 1L);
expected.put("family.logHist.3", 1L);
expected.put("qualifier.min", 0L);
expected.put("qualifier.max", 2L);
expected.put("qualifier.sum", 2L);
// Log2 Histogram for Qualifier
expected.put("qualifier.logHist.0", 1L);
expected.put("qualifier.logHist.1", 1L);
expected.put("visibility.min", 0L);
expected.put("visibility.max", 0L);
expected.put("visibility.sum", 0L);
// Log2 Histogram for Visibility
expected.put("visibility.logHist.0", 2L);
expected.put("value.min", 0L);
expected.put("value.max", 2L);
expected.put("value.sum", 2L);
// Log2 Histogram for Value
expected.put("value.logHist.0", 1L);
expected.put("value.logHist.1", 1L);
expected.put("total", 2L);
assertEquals(expected, stats1);
}
}
| 9,476 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/metadata/MetadataServicerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.metadata;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.HashMap;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.admin.TableOperations;
import org.apache.accumulo.core.clientImpl.ClientContext;
import org.apache.accumulo.core.data.TableId;
import org.easymock.EasyMock;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
public class MetadataServicerTest {
private static final String userTableName = "tableName";
private static final TableId userTableId = TableId.of("tableId");
private static ClientContext context;
@BeforeAll
public static void setupContext() {
HashMap<String,String> tableNameToIdMap = new HashMap<>();
tableNameToIdMap.put(RootTable.NAME, RootTable.ID.canonical());
tableNameToIdMap.put(MetadataTable.NAME, MetadataTable.ID.canonical());
tableNameToIdMap.put(userTableName, userTableId.canonical());
context = EasyMock.createMock(ClientContext.class);
TableOperations tableOps = EasyMock.createMock(TableOperations.class);
EasyMock.expect(tableOps.tableIdMap()).andReturn(tableNameToIdMap).anyTimes();
EasyMock.expect(context.tableOperations()).andReturn(tableOps).anyTimes();
EasyMock.replay(context, tableOps);
}
@Test
public void checkSystemTableIdentifiers() {
assertNotEquals(RootTable.ID, MetadataTable.ID);
assertNotEquals(RootTable.NAME, MetadataTable.NAME);
}
@Test
public void testGetCorrectServicer() throws AccumuloException, AccumuloSecurityException {
MetadataServicer ms = MetadataServicer.forTableId(context, RootTable.ID);
assertTrue(ms instanceof ServicerForRootTable);
assertFalse(ms instanceof TableMetadataServicer);
assertEquals(RootTable.ID, ms.getServicedTableId());
ms = MetadataServicer.forTableId(context, MetadataTable.ID);
assertTrue(ms instanceof ServicerForMetadataTable);
assertTrue(ms instanceof TableMetadataServicer);
assertEquals(RootTable.NAME, ((TableMetadataServicer) ms).getServicingTableName());
assertEquals(MetadataTable.ID, ms.getServicedTableId());
ms = MetadataServicer.forTableId(context, userTableId);
assertTrue(ms instanceof ServicerForUserTables);
assertTrue(ms instanceof TableMetadataServicer);
assertEquals(MetadataTable.NAME, ((TableMetadataServicer) ms).getServicingTableName());
assertEquals(userTableId, ms.getServicedTableId());
ms = MetadataServicer.forTableName(context, RootTable.NAME);
assertTrue(ms instanceof ServicerForRootTable);
assertFalse(ms instanceof TableMetadataServicer);
assertEquals(RootTable.ID, ms.getServicedTableId());
ms = MetadataServicer.forTableName(context, MetadataTable.NAME);
assertTrue(ms instanceof ServicerForMetadataTable);
assertTrue(ms instanceof TableMetadataServicer);
assertEquals(RootTable.NAME, ((TableMetadataServicer) ms).getServicingTableName());
assertEquals(MetadataTable.ID, ms.getServicedTableId());
ms = MetadataServicer.forTableName(context, userTableName);
assertTrue(ms instanceof ServicerForUserTables);
assertTrue(ms instanceof TableMetadataServicer);
assertEquals(MetadataTable.NAME, ((TableMetadataServicer) ms).getServicingTableName());
assertEquals(userTableId, ms.getServicedTableId());
}
}
| 9,477 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/metadata/ValidationUtilTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.metadata;
import static org.apache.accumulo.core.metadata.ValidationUtil.validateFileName;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.util.Set;
import org.junit.jupiter.api.Test;
public class ValidationUtilTest {
@Test
public void testValidateFileNameSuccess() {
Set.of("F0001acd.rf", "F0001acd.rf_tmp").forEach(f -> validateFileName(f));
}
@Test
public void testValidateFileNameException() {
Set.of("", "hdfs://nn:8020/accumulo/tables/2/default_tablet/F0001acd.rf", "./F0001acd.rf",
"/F0001acd.rf").forEach(f -> {
var e = assertThrows(IllegalArgumentException.class, () -> validateFileName(f));
assertEquals("Provided filename (" + f + ") is empty or contains invalid characters",
e.getMessage());
});
}
@Test
public void testValidateFileNameNull() {
assertThrows(NullPointerException.class, () -> validateFileName(null));
}
}
| 9,478 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/metadata | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/metadata/schema/DeleteMetadataTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.metadata.schema;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.DeletesSection;
import org.junit.jupiter.api.Test;
public class DeleteMetadataTest {
@Test
public void encodeRowTest() {
String path = "/dir/testpath";
assertEquals(path, DeletesSection.decodeRow(DeletesSection.encodeRow(path)));
path = "hdfs://localhost:8020/dir/r+/1_table/f$%#";
assertEquals(path, DeletesSection.decodeRow(DeletesSection.encodeRow(path)));
}
}
| 9,479 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/metadata | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/metadata/schema/MetadataSchemaTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.metadata.schema;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.TabletColumnFamily;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
public class MetadataSchemaTest {
@Test
public void testDecodeEncodePrevEndRow() {
assertNull(TabletColumnFamily.decodePrevEndRow(TabletColumnFamily.encodePrevEndRow(null)));
Text x = new Text();
assertEquals(x, TabletColumnFamily.decodePrevEndRow(TabletColumnFamily.encodePrevEndRow(x)));
Text ab = new Text("ab");
assertEquals(ab, TabletColumnFamily.decodePrevEndRow(TabletColumnFamily.encodePrevEndRow(ab)));
}
}
| 9,480 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/metadata | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/metadata/schema/LinkingIteratorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.metadata.schema;
import static org.apache.accumulo.core.metadata.schema.TabletMetadata.create;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.function.Function;
import java.util.stream.Stream;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.TableId;
import org.apache.accumulo.core.dataImpl.KeyExtent;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
import com.google.common.collect.Lists;
public class LinkingIteratorTest {
private static class IterFactory implements Function<Range,Iterator<TabletMetadata>> {
private int count;
private List<TabletMetadata> initial;
private List<TabletMetadata> subsequent;
IterFactory(List<TabletMetadata> initial, List<TabletMetadata> subsequent) {
this.initial = initial;
this.subsequent = subsequent;
count = 0;
}
@Override
public Iterator<TabletMetadata> apply(Range range) {
Stream<TabletMetadata> stream = count++ == 0 ? initial.stream() : subsequent.stream();
return stream.filter(tm -> range.contains(new Key(tm.getExtent().toMetaRow()))).iterator();
}
}
private static void check(List<TabletMetadata> expected, IterFactory iterFactory, Range range) {
List<KeyExtent> actual = new ArrayList<>();
new LinkingIterator(iterFactory, range).forEachRemaining(tm -> actual.add(tm.getExtent()));
assertEquals(Lists.transform(expected, TabletMetadata::getExtent), actual);
}
private static void check(List<TabletMetadata> expected, IterFactory iterFactory) {
check(expected, iterFactory, new Range());
}
private static void check(List<TabletMetadata> expected, IterFactory iterFactory,
TableId tableId) {
check(expected, iterFactory, TabletsSection.getRange(tableId));
}
@Test
public void testHole() {
List<TabletMetadata> tablets1 = Arrays.asList(create("4", null, "f"), create("4", "f", "m"),
create("4", "r", "x"), create("4", "x", null));
List<TabletMetadata> tablets2 = Arrays.asList(create("4", null, "f"), create("4", "f", "m"),
create("4", "m", "r"), create("4", "r", "x"), create("4", "x", null));
check(tablets2, new IterFactory(tablets1, tablets2));
}
@Test
public void testMerge() {
// test for case when a tablet is merged away
List<TabletMetadata> tablets1 = Arrays.asList(create("4", null, "f"), create("4", "f", "m"),
create("4", "f", "r"), create("4", "x", null));
List<TabletMetadata> tablets2 = Arrays.asList(create("4", null, "f"), create("4", "f", "r"),
create("4", "r", "x"), create("4", "x", null));
LinkingIterator li = new LinkingIterator(new IterFactory(tablets1, tablets2), new Range());
assertThrows(TabletDeletedException.class, () -> {
while (li.hasNext()) {
li.next();
}
});
}
@Test
public void testBadTableTransition1() {
// test when last tablet in table does not have null end row
List<TabletMetadata> tablets1 =
Arrays.asList(create("4", null, "f"), create("4", "f", "m"), create("5", null, null));
List<TabletMetadata> tablets2 = Arrays.asList(create("4", null, "f"), create("4", "f", "m"),
create("4", "m", null), create("5", null, null));
check(tablets2, new IterFactory(tablets1, tablets2));
}
@Test
public void testBadTableTransition2() {
// test when first tablet in table does not have null prev end row
List<TabletMetadata> tablets1 =
Arrays.asList(create("4", null, "f"), create("4", "f", null), create("5", "h", null));
List<TabletMetadata> tablets2 = Arrays.asList(create("4", null, "f"), create("4", "f", null),
create("5", null, "h"), create("5", "h", null));
check(tablets2, new IterFactory(tablets1, tablets2));
}
@Test
public void testFirstTabletSplits() {
// check when first tablet has a prev end row that points to a non existent tablet. This could
// be caused by the first table splitting concurrently with a metadata scan of the first tablet.
List<TabletMetadata> tablets1 = Arrays.asList(create("4", "f", "m"), create("4", "m", null));
List<TabletMetadata> tablets2 =
Arrays.asList(create("4", null, "f"), create("4", "f", "m"), create("4", "m", null));
check(tablets2, new IterFactory(tablets1, tablets2), TableId.of("4"));
check(tablets2, new IterFactory(tablets1, tablets2),
new KeyExtent(TableId.of("4"), null, new Text("e")).toMetaRange());
// following should not care about missing tablet
check(tablets1, new IterFactory(tablets1, tablets2),
new KeyExtent(TableId.of("4"), null, new Text("g")).toMetaRange());
check(tablets1, new IterFactory(tablets1, tablets2),
new KeyExtent(TableId.of("4"), null, new Text("f")).toMetaRange());
}
@Test
public void testIncompleteTable() {
// the last tablet in a table should have a null end row. Ensure the code detects when this does
// not happen.
List<TabletMetadata> tablets1 = Arrays.asList(create("4", null, "f"), create("4", "f", "m"));
LinkingIterator li = new LinkingIterator(new IterFactory(tablets1, tablets1),
TabletsSection.getRange(TableId.of("4")));
assertThrows(IllegalStateException.class, () -> {
while (li.hasNext()) {
li.next();
}
});
}
@Test
public void testIncompleteTableWithRange() {
// because the scan range does not got to end of table, this should not care about missing
// tablets at end of table.
List<TabletMetadata> tablets1 = Arrays.asList(create("4", null, "f"), create("4", "f", "m"));
check(tablets1, new IterFactory(tablets1, tablets1),
new KeyExtent(TableId.of("4"), new Text("r"), new Text("e")).toMetaRange());
}
}
| 9,481 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/metadata | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/metadata/schema/SortSkewTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.metadata.schema;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import org.junit.jupiter.api.Test;
public class SortSkewTest {
private static final String shortpath = "1";
private static final String longpath =
"/verylongpath/12345679xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxiiiiiiiiiiiiiiiiii/zzzzzzzzzzzzzzzzzzzzz"
+ "aaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbccccccccccccccccccccccccccxxxxxxxxxxxxxxxxxxxxxxxxyyyyyyyyyyyyyyyyzzzzzzzzzzzzzzzz";;
// these are values previously generated from SortSkew.getCode() for the above
private static final String shortcode = "9416ac93";
private static final String longcode = "b9ddf266";
@Test
public void verifyCodeSize() {
int expectedLength = SortSkew.SORTSKEW_LENGTH;
assertEquals(expectedLength, SortSkew.getCode(shortpath).length());
assertEquals(expectedLength, SortSkew.getCode(longpath).length());
}
@Test
public void verifySame() {
assertEquals(SortSkew.getCode("123"), SortSkew.getCode("123"));
assertNotEquals(SortSkew.getCode("123"), SortSkew.getCode("321"));
}
@Test
public void verifyStable() {
assertEquals(shortcode, SortSkew.getCode(shortpath));
assertEquals(longcode, SortSkew.getCode(longpath));
}
}
| 9,482 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/metadata | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/metadata/schema/ReferencedTabletFileTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.metadata.schema;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.PartialKey;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.TableId;
import org.apache.accumulo.core.metadata.ReferencedTabletFile;
import org.apache.accumulo.core.metadata.StoredTabletFile;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
public class ReferencedTabletFileTest {
private ReferencedTabletFile test(String metadataFile, String volume, String tableId,
String tabletDir, String fileName) {
String metadataPath = StoredTabletFile.serialize(metadataFile);
StoredTabletFile storedTabletFile = new StoredTabletFile(metadataPath);
ReferencedTabletFile tabletFile = storedTabletFile.getTabletFile();
// Make sure original file name wasn't changed when serialized
assertTrue(metadataPath.contains(metadataFile));
assertEquals(volume, tabletFile.getVolume());
assertEquals(metadataPath, storedTabletFile.getMetadata());
assertEquals(TableId.of(tableId), tabletFile.getTableId());
assertEquals(tabletDir, tabletFile.getTabletDir());
assertEquals(fileName, tabletFile.getFileName());
return tabletFile;
}
@Test
public void testValidPaths() {
test("hdfs://localhost:8020/accumulo/tables/2a/default_tablet/F0000070.rf",
"hdfs://localhost:8020/accumulo", "2a", "default_tablet", "F0000070.rf");
test("hdfs://nn1:9000/accumulo/tables/5a/t-0005/C0009.rf", "hdfs://nn1:9000/accumulo", "5a",
"t-0005", "C0009.rf");
test(
"file:/home/dude/workspace/accumulo/test/target/mini-tests/org.apache.accumulo.test.VolumeIT_test/volumes/v1/tables/1/t-0000003/F0000006.rf",
"file:/home/dude/workspace/accumulo/test/target/mini-tests/org.apache.accumulo.test.VolumeIT_test/volumes/v1",
"1", "t-0000003", "F0000006.rf");
}
@Test
public void testBadPaths() {
// 2a< srv:dir
final String message = "Failed to throw error on bad path";
assertThrows(IllegalArgumentException.class,
() -> test("C0004.rf", "", "2a", "t-0003", "C0004.rf"), message);
assertThrows(IllegalArgumentException.class, () -> test("dir", "", "2a", "", ""), message);
assertThrows(IllegalArgumentException.class,
() -> test("hdfs://localhost:8020/accumulo/tablets/2a/default_tablet/F0000070.rf",
"hdfs://localhost:8020/accumulo", "2a", "default_tablet", "F0000070.rf"),
message);
assertThrows(IllegalArgumentException.class,
() -> test("hdfs://localhost:8020/accumulo/2a/default_tablet/F0000070.rf",
" hdfs://localhost:8020/accumulo", "2a", "default_tablet", " F0000070.rf"),
message);
assertThrows(IllegalArgumentException.class,
() -> test("/accumulo/tables/2a/default_tablet/F0000070.rf", "", "2a", "default_tablet",
"F0000070.rf"),
message);
assertThrows(IllegalArgumentException.class,
() -> test("hdfs://localhost:8020/accumulo/tables/2a/F0000070.rf",
"hdfs://localhost:8020/accumulo", "2a", "", "F0000070.rf"),
message);
assertThrows(IllegalArgumentException.class,
() -> test("hdfs://localhost:8020/accumulo/tables/F0000070.rf",
"hdfs://localhost:8020/accumulo", null, "", "F0000070.rf"),
message);
}
private final String id = "2a";
private final String dir = "t-0003";
private final String filename = "C0004.rf";
@Test
public void testFullPathWithVolume() {
String volume = "hdfs://1.2.3.4/accumulo";
String metadataEntry = volume + "/tables/" + id + "/" + dir + "/" + filename;
test(metadataEntry, volume, id, dir, filename);
}
@Test
public void testNormalizePath() {
String uglyVolume = "hdfs://nn.somewhere.com:86753/accumulo/blah/.././/bad/bad2/../.././/////";
String metadataEntry = uglyVolume + "/tables/" + id + "/" + dir + "/" + filename;
ReferencedTabletFile uglyFile =
test(metadataEntry, "hdfs://nn.somewhere.com:86753/accumulo", id, dir, filename);
ReferencedTabletFile niceFile = StoredTabletFile
.of(new Path(
"hdfs://nn.somewhere.com:86753/accumulo/tables/" + id + "/" + dir + "/" + filename))
.getTabletFile();
assertEquals(niceFile, uglyFile);
assertEquals(niceFile.hashCode(), uglyFile.hashCode());
}
@Test
public void testNonRowRange() {
Path testPath = new Path("hdfs://localhost:8020/accumulo/tables/2a/default_tablet/F0000070.rf");
// range where start key is not a row key
Range r1 = new Range(new Key("r1", "f1"), true, null, false);
assertThrows(IllegalArgumentException.class, () -> new ReferencedTabletFile(testPath, r1));
// range where end key is not a row key
Range r2 = new Range(null, true, new Key("r1", "f1"), false);
assertThrows(IllegalArgumentException.class, () -> new ReferencedTabletFile(testPath, r2));
// range where the key looks like a row, but the start key inclusivity is not whats expected
Range r3 = new Range(new Key("r1"), false, new Key("r2"), false);
assertThrows(IllegalArgumentException.class, () -> new ReferencedTabletFile(testPath, r3));
// range where the key looks like a row, but the end key inclusivity is not whats expected
Range r4 = new Range(new Key("r1"), true, new Key("r2"), true);
assertThrows(IllegalArgumentException.class, () -> new ReferencedTabletFile(testPath, r4));
// range where end key does not end with correct byte and is marked exclusive false
Range r5 = new Range(new Key("r1"), true, new Key("r2"), false);
assertThrows(IllegalArgumentException.class, () -> new ReferencedTabletFile(testPath, r5));
// This is valid as the end key is exclusive and ends in 0x00
Range r6 = new Range(new Key("r1"), true, new Key("r2").followingKey(PartialKey.ROW), false);
assertTrue(new ReferencedTabletFile(testPath, r6).hasRange());
// This is valid as the end key will be converted to exclusive and 0x00 should be appended
Range r7 = new Range(new Text("r1"), true, new Text("r2"), true);
assertTrue(new ReferencedTabletFile(testPath, r7).hasRange());
}
}
| 9,483 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/metadata | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/metadata/schema/RootTabletMetadataTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.metadata.schema;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.apache.accumulo.core.metadata.StoredTabletFile;
import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class RootTabletMetadataTest {
private static final Logger LOG = LoggerFactory.getLogger(RootTabletMetadataTest.class);
@Test
public void convertRoot1File() {
String root21ZkData =
"{\"version\":1,\"columnValues\":{\"file\":{\"hdfs://localhost:8020/accumulo/tables/+r/root_tablet/A000000v.rf\":\"1368,61\"},\"last\":{\"100025091780006\":\"localhost:9997\"},\"loc\":{\"100025091780006\":\"localhost:9997\"},\"srv\":{\"dir\":\"root_tablet\",\"flush\":\"3\",\"lock\":\"tservers/localhost:9997/zlock#9db8961a-4ee9-400e-8e80-3353148baadd#0000000000$100025091780006\",\"time\":\"L53\"},\"~tab\":{\"~pr\":\"\\u0000\"}}}";
RootTabletMetadata rtm = RootTabletMetadata.upgrade(root21ZkData);
LOG.debug("converted column values: {}", rtm.toTabletMetadata().getFiles());
var files = rtm.toTabletMetadata().getFiles();
LOG.info("FILES: {}", rtm.toTabletMetadata().getFilesMap());
assertEquals(1, files.size());
assertTrue(files.contains(StoredTabletFile
.of(new Path("hdfs://localhost:8020/accumulo/tables/+r/root_tablet/A000000v.rf"))));
}
@Test
public void convertRoot2Files() {
String root212ZkData2Files =
"{\"version\":1,\"columnValues\":{\"file\":{\"hdfs://localhost:8020/accumulo/tables/+r/root_tablet/00000_00000.rf\":\"0,0\",\"hdfs://localhost:8020/accumulo/tables/+r/root_tablet/F000000c.rf\":\"926,18\"},\"last\":{\"10001a84d7d0005\":\"localhost:9997\"},\"loc\":{\"10001a84d7d0005\":\"localhost:9997\"},\"srv\":{\"dir\":\"root_tablet\",\"flush\":\"2\",\"lock\":\"tservers/localhost:9997/zlock#d21adaa4-0f97-4004-9ff8-cce9dbb6687f#0000000000$10001a84d7d0005\",\"time\":\"L6\"},\"~tab\":{\"~pr\":\"\\u0000\"}}}\n";
RootTabletMetadata rtm = RootTabletMetadata.upgrade(root212ZkData2Files);
LOG.debug("converted column values: {}", rtm.toTabletMetadata());
var files = rtm.toTabletMetadata().getFiles();
LOG.info("FILES: {}", rtm.toTabletMetadata().getFilesMap());
assertEquals(2, files.size());
assertTrue(files.contains(StoredTabletFile
.of(new Path("hdfs://localhost:8020/accumulo/tables/+r/root_tablet/00000_00000.rf"))));
assertTrue(files.contains(StoredTabletFile
.of(new Path("hdfs://localhost:8020/accumulo/tables/+r/root_tablet/F000000c.rf"))));
}
@Test
public void needsUpgradeTest() {
String root212ZkData2Files =
"{\"version\":1,\"columnValues\":{\"file\":{\"hdfs://localhost:8020/accumulo/tables/+r/root_tablet/00000_00000.rf\":\"0,0\",\"hdfs://localhost:8020/accumulo/tables/+r/root_tablet/F000000c.rf\":\"926,18\"},\"last\":{\"10001a84d7d0005\":\"localhost:9997\"},\"loc\":{\"10001a84d7d0005\":\"localhost:9997\"},\"srv\":{\"dir\":\"root_tablet\",\"flush\":\"2\",\"lock\":\"tservers/localhost:9997/zlock#d21adaa4-0f97-4004-9ff8-cce9dbb6687f#0000000000$10001a84d7d0005\",\"time\":\"L6\"},\"~tab\":{\"~pr\":\"\\u0000\"}}}\n";
assertTrue(RootTabletMetadata.needsUpgrade(root212ZkData2Files));
String converted =
"{\"version\":2,\"columnValues\":{\"file\":{\"{\\\"path\\\":\\\"hdfs://localhost:8020/accumulo/tables/+r/root_tablet/A0000013.rf\\\",\\\"startRow\\\":\\\"\\\",\\\"endRow\\\":\\\"\\\"}\":\"974,19\",\"{\\\"path\\\":\\\"hdfs://localhost:8020/accumulo/tables/+r/root_tablet/F0000014.rf\\\",\\\"startRow\\\":\\\"\\\",\\\"endRow\\\":\\\"\\\"}\":\"708,8\"},\"last\":{\"100024ec6110005\":\"localhost:9997\"},\"srv\":{\"dir\":\"root_tablet\",\"flush\":\"6\",\"lock\":\"tservers/localhost:9997/zlock#0f3000c9-ecf9-4bcd-8790-066c3f7a3818#0000000000$100024ec6110005\",\"time\":\"L43\"},\"~tab\":{\"~pr\":\"\\u0000\"}}}";
assertFalse(RootTabletMetadata.needsUpgrade(converted));
}
@Test
public void ignoresConvertedTest() {
String converted =
"{\"version\":2,\"columnValues\":{\"file\":{\"{\\\"path\\\":\\\"hdfs://localhost:8020/accumulo/tables/+r/root_tablet/A0000013.rf\\\",\\\"startRow\\\":\\\"\\\",\\\"endRow\\\":\\\"\\\"}\":\"974,19\",\"{\\\"path\\\":\\\"hdfs://localhost:8020/accumulo/tables/+r/root_tablet/F0000014.rf\\\",\\\"startRow\\\":\\\"\\\",\\\"endRow\\\":\\\"\\\"}\":\"708,8\"},\"last\":{\"100024ec6110005\":\"localhost:9997\"},\"srv\":{\"dir\":\"root_tablet\",\"flush\":\"6\",\"lock\":\"tservers/localhost:9997/zlock#0f3000c9-ecf9-4bcd-8790-066c3f7a3818#0000000000$100024ec6110005\",\"time\":\"L43\"},\"~tab\":{\"~pr\":\"\\u0000\"}}}";
assertFalse(RootTabletMetadata.needsUpgrade(converted));
RootTabletMetadata rtm = RootTabletMetadata.upgrade(converted);
var files = rtm.toTabletMetadata().getFiles();
assertEquals(2, files.size());
assertTrue(files.contains(StoredTabletFile
.of(new Path("hdfs://localhost:8020/accumulo/tables/+r/root_tablet/A0000013.rf"))));
assertTrue(files.contains(StoredTabletFile
.of(new Path("hdfs://localhost:8020/accumulo/tables/+r/root_tablet/F0000014.rf"))));
}
@Test
public void invalidVersionTest() {
String valid =
"{\"version\":1,\"columnValues\":{\"file\":{\"hdfs://localhost:8020/accumulo/tables/+r/root_tablet/A000000v.rf\":\"1368,61\"},\"last\":{\"100025091780006\":\"localhost:9997\"},\"loc\":{\"100025091780006\":\"localhost:9997\"},\"srv\":{\"dir\":\"root_tablet\",\"flush\":\"3\",\"lock\":\"tservers/localhost:9997/zlock#9db8961a-4ee9-400e-8e80-3353148baadd#0000000000$100025091780006\",\"time\":\"L53\"},\"~tab\":{\"~pr\":\"\\u0000\"}}}";
// only version changed to invalid value
String invalid =
"{\"version\":-1,\"columnValues\":{\"file\":{\"hdfs://localhost:8020/accumulo/tables/+r/root_tablet/A000000v.rf\":\"1368,61\"},\"last\":{\"100025091780006\":\"localhost:9997\"},\"loc\":{\"100025091780006\":\"localhost:9997\"},\"srv\":{\"dir\":\"root_tablet\",\"flush\":\"3\",\"lock\":\"tservers/localhost:9997/zlock#9db8961a-4ee9-400e-8e80-3353148baadd#0000000000$100025091780006\",\"time\":\"L53\"},\"~tab\":{\"~pr\":\"\\u0000\"}}}";
assertTrue(RootTabletMetadata.needsUpgrade(valid));
RootTabletMetadata rtm = RootTabletMetadata.upgrade(valid);
var files = rtm.toTabletMetadata().getFiles();
assertEquals(1, files.size());
assertTrue(files.contains(StoredTabletFile
.of(new Path("hdfs://localhost:8020/accumulo/tables/+r/root_tablet/A000000v.rf"))));
// valid json with files, so try conversion
assertTrue(RootTabletMetadata.needsUpgrade(invalid));
assertThrows(IllegalArgumentException.class, () -> RootTabletMetadata.upgrade(invalid));
}
}
| 9,484 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/metadata | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/metadata/schema/MetadataTimeTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.metadata.schema;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.apache.accumulo.core.client.admin.TimeType;
import org.junit.jupiter.api.Test;
public class MetadataTimeTest {
private static final MetadataTime m1234 = new MetadataTime(1234, TimeType.MILLIS);
private static final MetadataTime m5678 = new MetadataTime(5678, TimeType.MILLIS);
private static final MetadataTime l1234 = new MetadataTime(1234, TimeType.LOGICAL);
private static final MetadataTime l5678 = new MetadataTime(5678, TimeType.LOGICAL);
@Test
public void testGetInstance_InvalidType() {
assertThrows(IllegalArgumentException.class, () -> MetadataTime.parse("X1234"));
}
@Test
public void testGetInstance_Logical_ParseFailure() {
assertThrows(IllegalArgumentException.class, () -> MetadataTime.parse("LABCD"));
}
@Test
public void testGetInstance_Millis_ParseFailure() {
assertThrows(IllegalArgumentException.class, () -> MetadataTime.parse("MABCD"));
}
@Test
public void testGetInstance_nullArgument() {
assertThrows(IllegalArgumentException.class, () -> MetadataTime.parse(null));
}
@Test
public void testGetInstance_Invalid_timestr() {
assertThrows(IllegalArgumentException.class, () -> MetadataTime.parse(""));
assertThrows(IllegalArgumentException.class, () -> MetadataTime.parse("X"));
}
@Test
public void testGetInstance_Millis() {
assertEquals(1234, m1234.getTime());
assertEquals(TimeType.MILLIS, m1234.getType());
}
@Test
public void testGetInstance_Logical() {
assertEquals(1234, l1234.getTime());
assertEquals(TimeType.LOGICAL, l1234.getType());
}
@Test
public void testEquality() {
assertEquals(m1234, new MetadataTime(1234, TimeType.MILLIS));
assertNotEquals(m1234, l1234);
assertNotEquals(l1234, l5678);
}
@Test
public void testValueOfM() {
assertEquals(TimeType.MILLIS, MetadataTime.getType('M'));
}
@Test
public void testValueOfL() {
assertEquals(TimeType.LOGICAL, MetadataTime.getType('L'));
}
@Test
public void testValueOfOtherChar() {
assertThrows(IllegalArgumentException.class, () -> MetadataTime.getType('x'));
}
@Test
public void testgetCodeforTimeType() {
assertEquals('M', MetadataTime.getCode(TimeType.MILLIS));
assertEquals('L', MetadataTime.getCode(TimeType.LOGICAL));
}
@Test
public void testgetCodeforMillis() {
assertEquals('M', m1234.getCode());
}
@Test
public void testgetCodeforLogical() {
assertEquals('L', l1234.getCode());
}
@Test
public void testenCode() {
assertEquals("M21", new MetadataTime(21, TimeType.MILLIS).encode());
assertEquals("L45678", new MetadataTime(45678, TimeType.LOGICAL).encode());
}
@Test
public void testCompareTypesDiffer1() {
assertThrows(IllegalArgumentException.class, () -> m1234.compareTo(l1234));
}
@Test
public void testCompareTypesDiffer2() {
assertThrows(IllegalArgumentException.class, () -> l1234.compareTo(m1234));
}
@Test
public void testCompareSame() {
assertTrue(m1234.compareTo(m1234) == 0);
assertTrue(l1234.compareTo(l1234) == 0);
}
@Test
public void testCompare1() {
assertTrue(m1234.compareTo(m5678) < 0);
assertTrue(l1234.compareTo(l5678) < 0);
}
@Test
public void testCompare2() {
assertTrue(m5678.compareTo(m1234) > 0);
assertTrue(l5678.compareTo(l1234) > 0);
}
}
| 9,485 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/metadata | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/metadata/schema/TabletMetadataTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.metadata.schema;
import static java.util.stream.Collectors.toSet;
import static org.apache.accumulo.core.metadata.StoredTabletFile.serialize;
import static org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.ServerColumnFamily.COMPACT_COLUMN;
import static org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.ServerColumnFamily.DIRECTORY_COLUMN;
import static org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.ServerColumnFamily.FLUSH_COLUMN;
import static org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.ServerColumnFamily.TIME_COLUMN;
import static org.apache.accumulo.core.metadata.schema.TabletMetadata.ColumnType.LAST;
import static org.apache.accumulo.core.metadata.schema.TabletMetadata.ColumnType.LOCATION;
import static org.apache.accumulo.core.metadata.schema.TabletMetadata.ColumnType.SUSPEND;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.EnumSet;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.UUID;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.TableId;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.dataImpl.KeyExtent;
import org.apache.accumulo.core.fate.FateTxId;
import org.apache.accumulo.core.metadata.StoredTabletFile;
import org.apache.accumulo.core.metadata.SuspendingTServer;
import org.apache.accumulo.core.metadata.TServerInstance;
import org.apache.accumulo.core.metadata.TabletState;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.BulkFileColumnFamily;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.ClonedColumnFamily;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.CurrentLocationColumnFamily;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.DataFileColumnFamily;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.FutureLocationColumnFamily;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.LastLocationColumnFamily;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.LogColumnFamily;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.ScanFileColumnFamily;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.SuspendLocationColumn;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.TabletColumnFamily;
import org.apache.accumulo.core.metadata.schema.TabletMetadata.ColumnType;
import org.apache.accumulo.core.metadata.schema.TabletMetadata.LocationType;
import org.apache.accumulo.core.tabletserver.log.LogEntry;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
import com.google.common.net.HostAndPort;
public class TabletMetadataTest {
@Test
public void testAllColumns() {
KeyExtent extent = new KeyExtent(TableId.of("5"), new Text("df"), new Text("da"));
Mutation mutation = TabletColumnFamily.createPrevRowMutation(extent);
COMPACT_COLUMN.put(mutation, new Value("5"));
DIRECTORY_COLUMN.put(mutation, new Value("t-0001757"));
FLUSH_COLUMN.put(mutation, new Value("6"));
TIME_COLUMN.put(mutation, new Value("M123456789"));
String bf1 = serialize("hdfs://nn1/acc/tables/1/t-0001/bf1");
String bf2 = serialize("hdfs://nn1/acc/tables/1/t-0001/bf2");
mutation.at().family(BulkFileColumnFamily.NAME).qualifier(bf1).put(FateTxId.formatTid(56));
mutation.at().family(BulkFileColumnFamily.NAME).qualifier(bf2).put(FateTxId.formatTid(59));
mutation.at().family(ClonedColumnFamily.NAME).qualifier("").put("OK");
DataFileValue dfv1 = new DataFileValue(555, 23);
StoredTabletFile tf1 = StoredTabletFile.of(new Path("hdfs://nn1/acc/tables/1/t-0001/df1.rf"));
StoredTabletFile tf2 = StoredTabletFile.of(new Path("hdfs://nn1/acc/tables/1/t-0001/df2.rf"));
mutation.at().family(DataFileColumnFamily.NAME).qualifier(tf1.getMetadata()).put(dfv1.encode());
DataFileValue dfv2 = new DataFileValue(234, 13);
mutation.at().family(DataFileColumnFamily.NAME).qualifier(tf2.getMetadata()).put(dfv2.encode());
mutation.at().family(CurrentLocationColumnFamily.NAME).qualifier("s001").put("server1:8555");
mutation.at().family(LastLocationColumnFamily.NAME).qualifier("s000").put("server2:8555");
LogEntry le1 = new LogEntry("localhost:8020/" + UUID.randomUUID());
mutation.at().family(LogColumnFamily.NAME).qualifier(le1.getColumnQualifier())
.put(le1.getValue());
LogEntry le2 = new LogEntry("localhost:8020/" + UUID.randomUUID());
mutation.at().family(LogColumnFamily.NAME).qualifier(le2.getColumnQualifier())
.put(le2.getValue());
StoredTabletFile sf1 = StoredTabletFile.of(new Path("hdfs://nn1/acc/tables/1/t-0001/sf1.rf"));
StoredTabletFile sf2 = StoredTabletFile.of(new Path("hdfs://nn1/acc/tables/1/t-0001/sf2.rf"));
mutation.at().family(ScanFileColumnFamily.NAME).qualifier(sf1.getMetadata()).put("");
mutation.at().family(ScanFileColumnFamily.NAME).qualifier(sf2.getMetadata()).put("");
SortedMap<Key,Value> rowMap = toRowMap(mutation);
TabletMetadata tm = TabletMetadata.convertRow(rowMap.entrySet().iterator(),
EnumSet.allOf(ColumnType.class), true);
assertEquals("OK", tm.getCloned());
assertEquals(5L, tm.getCompactId().getAsLong());
assertEquals("t-0001757", tm.getDirName());
assertEquals(extent.endRow(), tm.getEndRow());
assertEquals(extent, tm.getExtent());
assertEquals(Set.of(tf1, tf2), Set.copyOf(tm.getFiles()));
assertEquals(Map.of(tf1, dfv1, tf2, dfv2), tm.getFilesMap());
assertEquals(6L, tm.getFlushId().getAsLong());
assertEquals(rowMap, tm.getKeyValues());
assertEquals(Map.of(new StoredTabletFile(bf1), 56L, new StoredTabletFile(bf2), 59L),
tm.getLoaded());
assertEquals(HostAndPort.fromParts("server1", 8555), tm.getLocation().getHostAndPort());
assertEquals("s001", tm.getLocation().getSession());
assertEquals(LocationType.CURRENT, tm.getLocation().getType());
assertTrue(tm.hasCurrent());
assertEquals(HostAndPort.fromParts("server2", 8555), tm.getLast().getHostAndPort());
assertEquals("s000", tm.getLast().getSession());
assertEquals(LocationType.LAST, tm.getLast().getType());
assertEquals(Set.of(le1.getValue(), le2.getValue()),
tm.getLogs().stream().map(LogEntry::getValue).collect(toSet()));
assertEquals(extent.prevEndRow(), tm.getPrevEndRow());
assertEquals(extent.tableId(), tm.getTableId());
assertTrue(tm.sawPrevEndRow());
assertEquals("M123456789", tm.getTime().encode());
assertEquals(Set.of(sf1, sf2), Set.copyOf(tm.getScans()));
}
@Test
public void testFuture() {
KeyExtent extent = new KeyExtent(TableId.of("5"), new Text("df"), new Text("da"));
Mutation mutation = TabletColumnFamily.createPrevRowMutation(extent);
mutation.at().family(FutureLocationColumnFamily.NAME).qualifier("s001").put("server1:8555");
SortedMap<Key,Value> rowMap = toRowMap(mutation);
TabletMetadata tm = TabletMetadata.convertRow(rowMap.entrySet().iterator(),
EnumSet.allOf(ColumnType.class), false);
assertEquals(extent, tm.getExtent());
assertEquals(HostAndPort.fromParts("server1", 8555), tm.getLocation().getHostAndPort());
assertEquals("s001", tm.getLocation().getSession());
assertEquals(LocationType.FUTURE, tm.getLocation().getType());
assertFalse(tm.hasCurrent());
}
@Test
public void testFutureAndCurrent() {
KeyExtent extent = new KeyExtent(TableId.of("5"), new Text("df"), new Text("da"));
Mutation mutation = TabletColumnFamily.createPrevRowMutation(extent);
mutation.at().family(CurrentLocationColumnFamily.NAME).qualifier("s001").put("server1:8555");
mutation.at().family(FutureLocationColumnFamily.NAME).qualifier("s001").put("server1:8555");
SortedMap<Key,Value> rowMap = toRowMap(mutation);
assertThrows(IllegalStateException.class, () -> TabletMetadata
.convertRow(rowMap.entrySet().iterator(), EnumSet.allOf(ColumnType.class), false));
}
@Test
public void testLocationStates() {
KeyExtent extent = new KeyExtent(TableId.of("5"), new Text("df"), new Text("da"));
TServerInstance ser1 = new TServerInstance(HostAndPort.fromParts("server1", 8555), "s001");
TServerInstance ser2 = new TServerInstance(HostAndPort.fromParts("server2", 8111), "s002");
TServerInstance deadSer = new TServerInstance(HostAndPort.fromParts("server3", 8000), "s003");
Set<TServerInstance> tservers = new LinkedHashSet<>();
tservers.add(ser1);
tservers.add(ser2);
EnumSet<ColumnType> colsToFetch = EnumSet.of(LOCATION, LAST, SUSPEND);
// test assigned
Mutation mutation = TabletColumnFamily.createPrevRowMutation(extent);
mutation.at().family(FutureLocationColumnFamily.NAME).qualifier(ser1.getSession())
.put(ser1.getHostPort());
SortedMap<Key,Value> rowMap = toRowMap(mutation);
TabletMetadata tm = TabletMetadata.convertRow(rowMap.entrySet().iterator(), colsToFetch, false);
TabletState state = tm.getTabletState(tservers);
assertEquals(TabletState.ASSIGNED, state);
assertEquals(ser1, tm.getLocation().getServerInstance());
assertEquals(ser1.getSession(), tm.getLocation().getSession());
assertEquals(LocationType.FUTURE, tm.getLocation().getType());
assertFalse(tm.hasCurrent());
// test hosted
mutation = TabletColumnFamily.createPrevRowMutation(extent);
mutation.at().family(CurrentLocationColumnFamily.NAME).qualifier(ser2.getSession())
.put(ser2.getHostPort());
rowMap = toRowMap(mutation);
tm = TabletMetadata.convertRow(rowMap.entrySet().iterator(), colsToFetch, false);
assertEquals(TabletState.HOSTED, tm.getTabletState(tservers));
assertEquals(ser2, tm.getLocation().getServerInstance());
assertEquals(ser2.getSession(), tm.getLocation().getSession());
assertEquals(LocationType.CURRENT, tm.getLocation().getType());
assertTrue(tm.hasCurrent());
// test ASSIGNED_TO_DEAD_SERVER
mutation = TabletColumnFamily.createPrevRowMutation(extent);
mutation.at().family(CurrentLocationColumnFamily.NAME).qualifier(deadSer.getSession())
.put(deadSer.getHostPort());
rowMap = toRowMap(mutation);
tm = TabletMetadata.convertRow(rowMap.entrySet().iterator(), colsToFetch, false);
assertEquals(TabletState.ASSIGNED_TO_DEAD_SERVER, tm.getTabletState(tservers));
assertEquals(deadSer, tm.getLocation().getServerInstance());
assertEquals(deadSer.getSession(), tm.getLocation().getSession());
assertEquals(LocationType.CURRENT, tm.getLocation().getType());
assertTrue(tm.hasCurrent());
// test UNASSIGNED
mutation = TabletColumnFamily.createPrevRowMutation(extent);
rowMap = toRowMap(mutation);
tm = TabletMetadata.convertRow(rowMap.entrySet().iterator(), colsToFetch, false);
assertEquals(TabletState.UNASSIGNED, tm.getTabletState(tservers));
assertNull(tm.getLocation());
assertFalse(tm.hasCurrent());
// test SUSPENDED
mutation = TabletColumnFamily.createPrevRowMutation(extent);
mutation.at().family(SuspendLocationColumn.SUSPEND_COLUMN.getColumnFamily())
.qualifier(SuspendLocationColumn.SUSPEND_COLUMN.getColumnQualifier())
.put(SuspendingTServer.toValue(ser2, 1000L));
rowMap = toRowMap(mutation);
tm = TabletMetadata.convertRow(rowMap.entrySet().iterator(), colsToFetch, false);
assertEquals(TabletState.SUSPENDED, tm.getTabletState(tservers));
assertEquals(1000L, tm.getSuspend().suspensionTime);
assertEquals(ser2.getHostAndPort(), tm.getSuspend().server);
assertNull(tm.getLocation());
assertFalse(tm.hasCurrent());
}
private SortedMap<Key,Value> toRowMap(Mutation mutation) {
SortedMap<Key,Value> rowMap = new TreeMap<>();
mutation.getUpdates().forEach(cu -> {
Key k = new Key(mutation.getRow(), cu.getColumnFamily(), cu.getColumnQualifier(),
cu.getTimestamp());
Value v = new Value(cu.getValue());
rowMap.put(k, v);
});
return rowMap;
}
}
| 9,486 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/summary/SummaryCollectionTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.summary;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import org.apache.accumulo.core.client.summary.SummarizerConfiguration;
import org.apache.accumulo.core.client.summary.Summary;
import org.apache.accumulo.core.client.summary.Summary.FileStatistics;
import org.apache.accumulo.core.client.summary.summarizers.FamilySummarizer;
import org.apache.accumulo.core.summary.SummaryCollection.FileSummary;
import org.junit.jupiter.api.Test;
public class SummaryCollectionTest {
@Test
public void testDeleted() {
SummarizerConfiguration conf = SummarizerConfiguration.builder(FamilySummarizer.class).build();
HashMap<String,Long> stats = new HashMap<>();
stats.put("c:foo", 9L);
FileSummary fs1 = new FileSummary(conf, stats, false);
SummaryCollection sc1 = new SummaryCollection(Collections.singleton(fs1));
stats = new HashMap<>();
stats.put("c:foo", 5L);
stats.put("c:bar", 3L);
FileSummary fs2 = new FileSummary(conf, stats, true);
SummaryCollection sc2 = new SummaryCollection(Collections.singleton(fs2));
SummaryCollection sc3 = new SummaryCollection(Collections.emptyList());
SummaryCollection sc4 = new SummaryCollection(Collections.emptyList(), true);
SummarizerFactory factory = new SummarizerFactory();
SummaryCollection mergeSc = new SummaryCollection();
for (SummaryCollection sc : Arrays.asList(sc1, sc2, sc3, sc4, sc4)) {
mergeSc.merge(sc, factory);
}
for (SummaryCollection sc : Arrays.asList(mergeSc, new SummaryCollection(mergeSc.toThrift()))) {
List<Summary> summaries = sc.getSummaries();
assertEquals(1, summaries.size());
Summary summary = summaries.get(0);
FileStatistics filestats = summary.getFileStatistics();
assertEquals(5, filestats.getTotal());
assertEquals(1, filestats.getExtra());
assertEquals(0, filestats.getLarge());
assertEquals(1, filestats.getMissing());
assertEquals(2, filestats.getDeleted());
assertEquals(4, filestats.getInaccurate());
}
}
}
| 9,487 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/iterators/DefaultIteratorEnvironment.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.iterators;
import org.apache.accumulo.core.conf.AccumuloConfiguration;
import org.apache.accumulo.core.conf.DefaultConfiguration;
import org.apache.hadoop.conf.Configuration;
public class DefaultIteratorEnvironment implements IteratorEnvironment {
AccumuloConfiguration conf;
Configuration hadoopConf = new Configuration();
public DefaultIteratorEnvironment(AccumuloConfiguration conf) {
this.conf = conf;
}
public DefaultIteratorEnvironment() {
this.conf = DefaultConfiguration.getInstance();
}
@Override
public boolean isSamplingEnabled() {
return false;
}
}
| 9,488 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/iterators/FirstEntryInRowTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.iterators;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.TreeMap;
import org.apache.accumulo.core.data.ByteSequence;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.iteratorsImpl.system.SortedMapIterator;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
public class FirstEntryInRowTest {
private static final Map<String,String> EMPTY_MAP = new HashMap<>();
private static final Collection<ByteSequence> EMPTY_SET = new HashSet<>();
private Key newKey(String row, String cf, String cq, long time) {
return new Key(new Text(row), new Text(cf), new Text(cq), time);
}
private Key newKey(int row, int cf, int cq, long time) {
return newKey(String.format("%06d", row), String.format("%06d", cf), String.format("%06d", cq),
time);
}
private void put(TreeMap<Key,Value> tm, String row, String cf, String cq, long time, Value val) {
tm.put(newKey(row, cf, cq, time), val);
}
private void put(TreeMap<Key,Value> tm, String row, String cf, String cq, long time, String val) {
put(tm, row, cf, cq, time, new Value(val));
}
private void put(TreeMap<Key,Value> tm, int row, int cf, int cq, long time, int val) {
tm.put(newKey(row, cf, cq, time), new Value(val + ""));
}
private void testAndCallNext(FirstEntryInRowIterator rdi, String row, String cf, String cq,
long time, String val) throws Exception {
assertTrue(rdi.hasTop());
assertEquals(newKey(row, cf, cq, time), rdi.getTopKey());
assertEquals(val, rdi.getTopValue().toString());
rdi.next();
}
private void testAndCallNext(FirstEntryInRowIterator rdi, int row, int cf, int cq, long time,
int val) throws Exception {
assertTrue(rdi.hasTop());
assertEquals(newKey(row, cf, cq, time), rdi.getTopKey());
assertEquals(val, Integer.parseInt(rdi.getTopValue().toString()));
rdi.next();
}
@Test
public void test1() throws Exception {
TreeMap<Key,Value> tm1 = new TreeMap<>();
put(tm1, "r1", "cf1", "cq1", 5, "v1");
put(tm1, "r1", "cf1", "cq3", 5, "v2");
put(tm1, "r2", "cf1", "cq1", 5, "v3");
put(tm1, "r2", "cf2", "cq4", 5, "v4");
put(tm1, "r2", "cf2", "cq5", 5, "v5");
put(tm1, "r3", "cf3", "cq6", 5, "v6");
FirstEntryInRowIterator fei = new FirstEntryInRowIterator();
fei.init(new SortedMapIterator(tm1), EMPTY_MAP, null);
fei.seek(new Range(), EMPTY_SET, false);
testAndCallNext(fei, "r1", "cf1", "cq1", 5, "v1");
testAndCallNext(fei, "r2", "cf1", "cq1", 5, "v3");
testAndCallNext(fei, "r3", "cf3", "cq6", 5, "v6");
assertFalse(fei.hasTop());
}
@Test
public void test2() throws Exception {
TreeMap<Key,Value> tm1 = new TreeMap<>();
for (int r = 0; r < 5; r++) {
for (int cf = r; cf < 100; cf++) {
for (int cq = 3; cq < 6; cq++) {
put(tm1, r, cf, cq, 6, r * cf * cq);
}
}
}
FirstEntryInRowIterator fei = new FirstEntryInRowIterator();
fei.init(new SortedMapIterator(tm1), EMPTY_MAP, null);
fei.seek(new Range(newKey(0, 10, 0, 0), null), EMPTY_SET, false);
testAndCallNext(fei, 1, 1, 3, 6, 1 * 1 * 3);
testAndCallNext(fei, 2, 2, 3, 6, 2 * 2 * 3);
testAndCallNext(fei, 3, 3, 3, 6, 3 * 3 * 3);
testAndCallNext(fei, 4, 4, 3, 6, 4 * 4 * 3);
assertFalse(fei.hasTop());
fei.seek(new Range(newKey(1, 1, 3, 6), newKey(3, 3, 3, 6)), EMPTY_SET, false);
testAndCallNext(fei, 1, 1, 3, 6, 1 * 1 * 3);
testAndCallNext(fei, 2, 2, 3, 6, 2 * 2 * 3);
testAndCallNext(fei, 3, 3, 3, 6, 3 * 3 * 3);
assertFalse(fei.hasTop());
fei.seek(new Range(newKey(1, 1, 3, 6), false, newKey(3, 3, 3, 6), false), EMPTY_SET, false);
testAndCallNext(fei, 2, 2, 3, 6, 2 * 2 * 3);
assertFalse(fei.hasTop());
}
}
| 9,489 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/iterators/CombinerTestUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.iterators;
public class CombinerTestUtil {
public static void clearLogCache() {
Combiner.loggedMsgCache.invalidateAll();
}
public static long cacheSize() {
return Combiner.loggedMsgCache.estimatedSize();
}
}
| 9,490 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/iterators/FirstEntryInRowIteratorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.iterators;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.Set;
import java.util.TreeMap;
import org.apache.accumulo.core.client.IteratorSetting;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.PartialKey;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.iteratorsImpl.system.CountingIterator;
import org.apache.accumulo.core.iteratorsImpl.system.SortedMapIterator;
import org.junit.jupiter.api.Test;
public class FirstEntryInRowIteratorTest {
private static long process(TreeMap<Key,Value> sourceMap, TreeMap<Key,Value> resultMap,
Range range, IteratorSetting iteratorSetting) throws IOException {
SortedMapIterator source = new SortedMapIterator(sourceMap);
CountingIterator counter = new CountingIterator(source);
FirstEntryInRowIterator feiri = new FirstEntryInRowIterator();
IteratorEnvironment env = new DefaultIteratorEnvironment();
feiri.init(counter, iteratorSetting.getOptions(), env);
feiri.seek(range, Set.of(), false);
while (feiri.hasTop()) {
resultMap.put(feiri.getTopKey(), feiri.getTopValue());
feiri.next();
}
return counter.getCount();
}
@Test
public void test() throws IOException {
TreeMap<Key,Value> sourceMap = new TreeMap<>();
Value emptyValue = new Value("");
IteratorSetting iteratorSetting = new IteratorSetting(1, FirstEntryInRowIterator.class);
FirstEntryInRowIterator.setNumScansBeforeSeek(iteratorSetting, 10);
assertTrue(
iteratorSetting.getOptions().containsKey(FirstEntryInRowIterator.NUM_SCANS_STRING_NAME));
sourceMap.put(new Key("r1", "cf", "cq"), emptyValue);
sourceMap.put(new Key("r2", "cf", "cq"), emptyValue);
sourceMap.put(new Key("r3", "cf", "cq"), emptyValue);
TreeMap<Key,Value> resultMap = new TreeMap<>();
long numSourceEntries = sourceMap.size();
long numNexts = process(sourceMap, resultMap, new Range(), iteratorSetting);
assertEquals(numNexts, numSourceEntries);
assertEquals(sourceMap.size(), resultMap.size());
for (int i = 0; i < 20; i++) {
sourceMap.put(new Key("r2", "cf", "cq" + i), emptyValue);
}
resultMap.clear();
numNexts = process(sourceMap, resultMap,
new Range(new Key("r1"), (new Key("r2")).followingKey(PartialKey.ROW)), iteratorSetting);
assertEquals(numNexts, resultMap.size() + 10);
assertEquals(resultMap.size(), 2);
resultMap.clear();
numNexts = process(sourceMap, resultMap, new Range(new Key("r1"), new Key("r2", "cf2")),
iteratorSetting);
assertEquals(numNexts, resultMap.size() + 10);
assertEquals(resultMap.size(), 2);
resultMap.clear();
numNexts =
process(sourceMap, resultMap, new Range(new Key("r1"), new Key("r4")), iteratorSetting);
assertEquals(numNexts, resultMap.size() + 10);
assertEquals(resultMap.size(), 3);
}
}
| 9,491 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/iterators/SortedMapIteratorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.iterators;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.util.TreeMap;
import org.apache.accumulo.core.client.SampleNotPresentException;
import org.apache.accumulo.core.client.sample.RowSampler;
import org.apache.accumulo.core.client.sample.SamplerConfiguration;
import org.apache.accumulo.core.iteratorsImpl.system.SortedMapIterator;
import org.junit.jupiter.api.Test;
public class SortedMapIteratorTest {
@Test
public void testSampleNotPresent() {
SortedMapIterator smi = new SortedMapIterator(new TreeMap<>());
assertThrows(SampleNotPresentException.class, () -> smi.deepCopy(new IteratorEnvironment() {
@Override
public boolean isSamplingEnabled() {
return true;
}
@Override
public SamplerConfiguration getSamplerConfiguration() {
return new SamplerConfiguration(RowSampler.class.getName());
}
}));
}
}
| 9,492 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/iterators | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/iterators/user/VersioningIteratorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.iterators.user;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map.Entry;
import java.util.TreeMap;
import org.apache.accumulo.core.client.IteratorSetting;
import org.apache.accumulo.core.client.lexicoder.Encoder;
import org.apache.accumulo.core.data.ByteSequence;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.PartialKey;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.iterators.LongCombiner;
import org.apache.accumulo.core.iteratorsImpl.system.SortedMapIterator;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class VersioningIteratorTest {
// add test for seek function
private static final Collection<ByteSequence> EMPTY_COL_FAMS = new ArrayList<>();
private static final Encoder<Long> encoder = LongCombiner.FIXED_LEN_ENCODER;
private static final Logger log = LoggerFactory.getLogger(VersioningIteratorTest.class);
void createTestData(TreeMap<Key,Value> tm, Text colf, Text colq) {
for (int i = 0; i < 2; i++) {
for (long j = 0; j < 20; j++) {
Key k = new Key(new Text(String.format("%03d", i)), colf, colq, j);
tm.put(k, new Value(encoder.encode(j)));
}
}
assertEquals(40, tm.size(), "Initial size was " + tm.size());
}
TreeMap<Key,Value> iteratorOverTestData(VersioningIterator it) throws IOException {
TreeMap<Key,Value> tmOut = new TreeMap<>();
while (it.hasTop()) {
tmOut.put(it.getTopKey(), it.getTopValue());
it.next();
}
return tmOut;
}
@Test
public void test1() {
Text colf = new Text("a");
Text colq = new Text("b");
TreeMap<Key,Value> tm = new TreeMap<>();
createTestData(tm, colf, colq);
try {
VersioningIterator it = new VersioningIterator();
IteratorSetting is = new IteratorSetting(1, VersioningIterator.class);
VersioningIterator.setMaxVersions(is, 3);
it.init(new SortedMapIterator(tm), is.getOptions(), null);
it.seek(new Range(), EMPTY_COL_FAMS, false);
TreeMap<Key,Value> tmOut = iteratorOverTestData(it);
for (Entry<Key,Value> e : tmOut.entrySet()) {
assertEquals(8, e.getValue().get().length);
assertTrue(16 < encoder.decode(e.getValue().get()));
}
assertEquals(6, tmOut.size(), "size after keeping 3 versions was " + tmOut.size());
} catch (IOException e) {
fail();
} catch (Exception e) {
log.error("{}", e.getMessage(), e);
fail();
}
}
@Test
public void test2() {
Text colf = new Text("a");
Text colq = new Text("b");
TreeMap<Key,Value> tm = new TreeMap<>();
createTestData(tm, colf, colq);
try {
VersioningIterator it = new VersioningIterator();
IteratorSetting is = new IteratorSetting(1, VersioningIterator.class);
VersioningIterator.setMaxVersions(is, 3);
it.init(new SortedMapIterator(tm), is.getOptions(), null);
// after doing this seek, should only get two keys for row 1
// since we are seeking to the middle of the most recent
// three keys
Key seekKey = new Key(new Text(String.format("%03d", 1)), colf, colq, 18);
it.seek(new Range(seekKey, null), EMPTY_COL_FAMS, false);
TreeMap<Key,Value> tmOut = iteratorOverTestData(it);
for (Entry<Key,Value> e : tmOut.entrySet()) {
assertEquals(8, e.getValue().get().length);
assertTrue(16 < encoder.decode(e.getValue().get()));
}
assertEquals(2, tmOut.size(), "size after keeping 2 versions was " + tmOut.size());
} catch (IOException e) {
fail();
} catch (Exception e) {
log.error("{}", e.getMessage(), e);
fail();
}
}
@Test
public void test3() {
Text colf = new Text("a");
Text colq = new Text("b");
TreeMap<Key,Value> tm = new TreeMap<>();
createTestData(tm, colf, colq);
try {
VersioningIterator it = new VersioningIterator();
IteratorSetting is = new IteratorSetting(1, VersioningIterator.class);
VersioningIterator.setMaxVersions(is, 3);
it.init(new SortedMapIterator(tm), is.getOptions(), null);
// after doing this seek, should get zero keys for row 1
Key seekKey = new Key(new Text(String.format("%03d", 1)), colf, colq, 15);
it.seek(new Range(seekKey, null), EMPTY_COL_FAMS, false);
TreeMap<Key,Value> tmOut = iteratorOverTestData(it);
for (Entry<Key,Value> e : tmOut.entrySet()) {
assertEquals(8, e.getValue().get().length);
assertTrue(16 < encoder.decode(e.getValue().get()));
}
assertEquals(0, tmOut.size(), "size after seeking past versions was " + tmOut.size());
// after doing this seek, should get zero keys for row 0 and 3 keys for row 1
seekKey = new Key(new Text(String.format("%03d", 0)), colf, colq, 15);
it.seek(new Range(seekKey, null), EMPTY_COL_FAMS, false);
tmOut = iteratorOverTestData(it);
for (Entry<Key,Value> e : tmOut.entrySet()) {
assertEquals(8, e.getValue().get().length);
assertTrue(16 < encoder.decode(e.getValue().get()));
}
assertEquals(3, tmOut.size(), "size after seeking past versions was " + tmOut.size());
} catch (IOException e) {
fail();
} catch (Exception e) {
log.error("{}", e.getMessage(), e);
fail();
}
}
@Test
public void test4() {
Text colf = new Text("a");
Text colq = new Text("b");
TreeMap<Key,Value> tm = new TreeMap<>();
createTestData(tm, colf, colq);
for (int i = 1; i <= 30; i++) {
try {
VersioningIterator it = new VersioningIterator();
IteratorSetting is = new IteratorSetting(1, VersioningIterator.class);
VersioningIterator.setMaxVersions(is, i);
it.init(new SortedMapIterator(tm), is.getOptions(), null);
it.seek(new Range(), EMPTY_COL_FAMS, false);
TreeMap<Key,Value> tmOut = iteratorOverTestData(it);
assertEquals(tmOut.size(), Math.min(40, 2 * i),
"size after keeping " + i + " versions was " + tmOut.size());
} catch (IOException e) {
fail();
} catch (Exception e) {
log.error("{}", e.getMessage(), e);
fail();
}
}
}
@Test
public void test5() throws IOException {
Text colf = new Text("a");
Text colq = new Text("b");
TreeMap<Key,Value> tm = new TreeMap<>();
createTestData(tm, colf, colq);
VersioningIterator it = new VersioningIterator();
IteratorSetting is = new IteratorSetting(1, VersioningIterator.class);
VersioningIterator.setMaxVersions(is, 3);
it.init(new SortedMapIterator(tm), is.getOptions(), null);
Key seekKey = new Key(new Text(String.format("%03d", 1)), colf, colq, 19);
it.seek(new Range(seekKey, false, null, true), EMPTY_COL_FAMS, false);
assertTrue(it.hasTop());
assertEquals(18, it.getTopKey().getTimestamp());
}
@Test
public void test6() throws IOException {
Text colf = new Text("a");
Text colq = new Text("b");
TreeMap<Key,Value> tm = new TreeMap<>();
createTestData(tm, colf, colq);
VersioningIterator it = new VersioningIterator();
IteratorSetting is = new IteratorSetting(1, VersioningIterator.class);
VersioningIterator.setMaxVersions(is, 3);
it.init(new SortedMapIterator(tm), is.getOptions(), null);
VersioningIterator it2 = it.deepCopy(null);
Key seekKey = new Key(new Text(String.format("%03d", 1)), colf, colq, 19);
it.seek(new Range(seekKey, false, null, true), EMPTY_COL_FAMS, false);
it2.seek(new Range(seekKey, false, null, true), EMPTY_COL_FAMS, false);
assertTrue(it.hasTop());
assertEquals(18, it.getTopKey().getTimestamp());
assertTrue(it2.hasTop());
assertEquals(18, it2.getTopKey().getTimestamp());
}
@Test
public void test_maxLongExclusiveKey() throws IOException {
Text row = new Text("a");
Text colf = new Text("b");
Text colq = new Text("c");
Text cv = new Text();
TreeMap<Key,Value> tm = new TreeMap<>();
tm.put(new Key(row, colf, colq, cv, Long.MAX_VALUE), new Value("00"));
tm.put(new Key(row, colf, colq, cv, Long.MAX_VALUE - 1), new Value("11"));
VersioningIterator it = new VersioningIterator();
IteratorSetting is = new IteratorSetting(1, VersioningIterator.class);
VersioningIterator.setMaxVersions(is, 1);
it.init(new SortedMapIterator(tm), is.getOptions(), null);
Key startKey = new Key(row, colf, colq, cv, Long.MAX_VALUE);
Range testRange = new Range(startKey, false, startKey.followingKey(PartialKey.ROW), true);
it.seek(testRange, EMPTY_COL_FAMS, false);
assertFalse(it.hasTop());
}
@Test
public void test_maxLongInclusiveKey() throws IOException {
Text row = new Text("a");
Text colf = new Text("b");
Text colq = new Text("c");
Text cv = new Text();
TreeMap<Key,Value> tm = new TreeMap<>();
tm.put(new Key(row, colf, colq, cv, Long.MAX_VALUE), new Value("00"));
tm.put(new Key(row, colf, colq, cv, Long.MAX_VALUE - 1), new Value("11"));
VersioningIterator it = new VersioningIterator();
IteratorSetting is = new IteratorSetting(1, VersioningIterator.class);
VersioningIterator.setMaxVersions(is, 1);
it.init(new SortedMapIterator(tm), is.getOptions(), null);
Key startKey = new Key(row, colf, colq, cv, Long.MAX_VALUE);
Range testRange = new Range(startKey, true, startKey.followingKey(PartialKey.ROW), true);
it.seek(testRange, EMPTY_COL_FAMS, false);
assertTrue(it.hasTop());
assertTrue(it.getTopValue().contentEquals("00".getBytes()));
it.next();
assertFalse(it.hasTop());
}
}
| 9,493 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/iterators | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/iterators/user/TestCfCqSliceFilter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.iterators.user;
public class TestCfCqSliceFilter extends TestCfCqSlice {
@Override
protected Class<CfCqSliceFilter> getFilterClass() {
return CfCqSliceFilter.class;
}
}
| 9,494 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/iterators | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/iterators/user/RegExFilterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.iterators.user;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.TreeMap;
import org.apache.accumulo.core.client.IteratorSetting;
import org.apache.accumulo.core.data.ByteSequence;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.iterators.DefaultIteratorEnvironment;
import org.apache.accumulo.core.iteratorsImpl.system.SortedMapIterator;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
public class RegExFilterTest {
private static final Collection<ByteSequence> EMPTY_COL_FAMS = new ArrayList<>();
private Key newKeyValue(TreeMap<Key,Value> tm, String row, String cf, String cq, String val) {
Key k = newKey(row, cf, cq);
tm.put(k, new Value(val));
return k;
}
private Key newKey(String row, String cf, String cq) {
return new Key(new Text(row), new Text(cf), new Text(cq));
}
@Test
public void test1() throws IOException {
TreeMap<Key,Value> tm = new TreeMap<>();
Key k1 = newKeyValue(tm, "boo1", "yup", "20080201", "dog");
Key k2 = newKeyValue(tm, "boo1", "yap", "20080202", "cat");
Key k3 = newKeyValue(tm, "boo2", "yip", "20080203", "hamster");
RegExFilter rei = new RegExFilter();
rei.describeOptions();
IteratorSetting is = new IteratorSetting(1, RegExFilter.class);
RegExFilter.setRegexs(is, ".*2", null, null, null, false);
assertTrue(rei.validateOptions(is.getOptions()));
rei.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
rei.seek(new Range(), EMPTY_COL_FAMS, false);
assertTrue(rei.hasTop());
assertEquals(rei.getTopKey(), k3);
rei.next();
assertFalse(rei.hasTop());
// -----------------------------------------------------
// Test substring regex
is.clearOptions();
RegExFilter.setRegexs(is, null, null, null, "amst", false, true); // Should only match hamster
rei.validateOptions(is.getOptions());
rei.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
rei.seek(new Range(), EMPTY_COL_FAMS, false);
assertTrue(rei.hasTop());
assertEquals(rei.getTopKey(), k3);
rei.next();
assertFalse(rei.hasTop());
// -----------------------------------------------------
is.clearOptions();
RegExFilter.setRegexs(is, null, "ya.*", null, null, false);
assertTrue(rei.validateOptions(is.getOptions()));
rei.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
rei.seek(new Range(), EMPTY_COL_FAMS, false);
assertTrue(rei.hasTop());
assertEquals(rei.getTopKey(), k2);
rei.next();
assertFalse(rei.hasTop());
// -----------------------------------------------------
is.clearOptions();
RegExFilter.setRegexs(is, null, null, ".*01", null, false);
assertTrue(rei.validateOptions(is.getOptions()));
rei.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
rei.seek(new Range(), EMPTY_COL_FAMS, false);
assertTrue(rei.hasTop());
assertEquals(rei.getTopKey(), k1);
rei.next();
assertFalse(rei.hasTop());
// -----------------------------------------------------
is.clearOptions();
RegExFilter.setRegexs(is, null, null, null, ".*at", false);
assertTrue(rei.validateOptions(is.getOptions()));
rei.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
rei.seek(new Range(), EMPTY_COL_FAMS, false);
assertTrue(rei.hasTop());
assertEquals(rei.getTopKey(), k2);
rei.next();
assertFalse(rei.hasTop());
// -----------------------------------------------------
is.clearOptions();
RegExFilter.setRegexs(is, null, null, null, ".*ap", false);
rei.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
rei.seek(new Range(), EMPTY_COL_FAMS, false);
assertFalse(rei.hasTop());
// -----------------------------------------------------
is.clearOptions();
RegExFilter.setRegexs(is, null, "ya.*", null, ".*at", false);
rei.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
rei.seek(new Range(), EMPTY_COL_FAMS, false);
assertTrue(rei.hasTop());
assertEquals(rei.getTopKey(), k2);
rei.next();
assertFalse(rei.hasTop());
// -----------------------------------------------------
is.clearOptions();
RegExFilter.setRegexs(is, null, "ya.*", null, ".*ap", false);
rei.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
rei.seek(new Range(), EMPTY_COL_FAMS, false);
assertFalse(rei.hasTop());
// -----------------------------------------------------
is.clearOptions();
RegExFilter.setRegexs(is, "boo1", null, null, null, false);
rei.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
rei.seek(new Range(), EMPTY_COL_FAMS, false);
assertTrue(rei.hasTop());
assertEquals(rei.getTopKey(), k2);
rei.next();
assertTrue(rei.hasTop());
assertEquals(rei.getTopKey(), k1);
rei.next();
assertFalse(rei.hasTop());
// -----------------------------------------------------
is.clearOptions();
rei.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
rei.seek(new Range(), EMPTY_COL_FAMS, false);
assertTrue(rei.hasTop());
assertEquals(rei.getTopKey(), k2);
rei.next();
assertTrue(rei.hasTop());
assertEquals(rei.getTopKey(), k1);
rei.next();
assertTrue(rei.hasTop());
assertEquals(rei.getTopKey(), k3);
rei.next();
assertFalse(rei.hasTop());
// -----------------------------------------------------
is.clearOptions();
RegExFilter.setRegexs(is, "hamster", null, "hamster", "hamster", true);
rei.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
rei.seek(new Range(), EMPTY_COL_FAMS, false);
assertTrue(rei.hasTop());
assertEquals(rei.getTopKey(), k3);
rei.next();
assertFalse(rei.hasTop());
// -----------------------------------------------------
is.clearOptions();
RegExFilter.setRegexs(is, null, "ya.*", "hamster", null, true);
rei.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
rei.seek(new Range(), EMPTY_COL_FAMS, false);
assertTrue(rei.hasTop());
assertEquals(rei.getTopKey(), k2);
rei.next();
assertFalse(rei.hasTop());
is.clearOptions();
RegExFilter.setRegexs(is, null, "ya.*", "hamster", null, true);
rei.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
rei.seek(new Range(), EMPTY_COL_FAMS, false);
rei.deepCopy(new DefaultIteratorEnvironment());
// -----------------------------------------------------
String multiByteText = new String("\u6d67\u6F68\u7067");
String multiByteRegex = new String(".*\u6F68.*");
Key k4 = new Key("boo4".getBytes(), "hoo".getBytes(), "20080203".getBytes(), "".getBytes(), 1L);
Value inVal = new Value(multiByteText);
tm.put(k4, inVal);
is.clearOptions();
RegExFilter.setRegexs(is, null, null, null, multiByteRegex, true);
rei.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
rei.seek(new Range(), EMPTY_COL_FAMS, false);
assertTrue(rei.hasTop());
Value outValue = rei.getTopValue();
String outVal = new String(outValue.get(), UTF_8);
assertEquals(outVal, multiByteText);
}
@Test
public void testNullByteInKey() throws IOException {
TreeMap<Key,Value> tm = new TreeMap<>();
String s1 = "first", s2 = "second";
byte[] b1 = s1.getBytes(), b2 = s2.getBytes(), ball;
ball = new byte[b1.length + b2.length + 1];
System.arraycopy(b1, 0, ball, 0, b1.length);
ball[b1.length] = (byte) 0;
System.arraycopy(b2, 0, ball, b1.length + 1, b2.length);
Key key = new Key(ball, new byte[0], new byte[0], new byte[0], 90, false);
Value val = new Value();
tm.put(key, val);
IteratorSetting is = new IteratorSetting(5, RegExFilter.class);
RegExFilter.setRegexs(is, s2, null, null, null, true, true);
RegExFilter filter = new RegExFilter();
filter.init(new SortedMapIterator(tm), is.getOptions(), null);
filter.seek(new Range(), EMPTY_COL_FAMS, false);
assertTrue(filter.hasTop(), "iterator couldn't find a match when it should have");
}
}
| 9,495 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/iterators | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/iterators/user/FilterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.iterators.user;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.TreeMap;
import org.apache.accumulo.core.client.IteratorSetting;
import org.apache.accumulo.core.data.ByteSequence;
import org.apache.accumulo.core.data.Column;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.iterators.DefaultIteratorEnvironment;
import org.apache.accumulo.core.iterators.Filter;
import org.apache.accumulo.core.iterators.SortedKeyValueIterator;
import org.apache.accumulo.core.iteratorsImpl.system.ColumnQualifierFilter;
import org.apache.accumulo.core.iteratorsImpl.system.SortedMapIterator;
import org.apache.accumulo.core.iteratorsImpl.system.VisibilityFilter;
import org.apache.accumulo.core.security.Authorizations;
import org.apache.accumulo.core.security.ColumnVisibility;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
public class FilterTest {
private static final Collection<ByteSequence> EMPTY_COL_FAMS = new ArrayList<>();
private static final Map<String,String> EMPTY_OPTS = new HashMap<>();
public static class SimpleFilter extends Filter {
@Override
public boolean accept(Key k, Value v) {
// System.out.println(k.getRow());
return k.getRow().toString().endsWith("0");
}
}
public static class SimpleFilter2 extends Filter {
@Override
public boolean accept(Key k, Value v) {
return !k.getColumnFamily().toString().equals("a");
}
}
private static int size(SortedKeyValueIterator<Key,Value> iterator) throws IOException {
int size = 0;
while (iterator.hasTop()) {
// System.out.println(iterator.getTopKey());
size++;
iterator.next();
}
return size;
}
@Test
public void test1() throws IOException {
Text colf = new Text("a");
Text colq = new Text("b");
Value dv = new Value();
TreeMap<Key,Value> tm = new TreeMap<>();
for (int i = 0; i < 1000; i++) {
Key k = new Key(new Text(String.format("%03d", i)), colf, colq);
tm.put(k, dv);
}
assertEquals(1000, tm.size());
Filter filter1 = new SimpleFilter();
filter1.init(new SortedMapIterator(tm), EMPTY_OPTS, null);
filter1.seek(new Range(), EMPTY_COL_FAMS, false);
int size = size(filter1);
assertEquals(100, size);
Filter fi = new SimpleFilter();
fi.init(new SortedMapIterator(tm), EMPTY_OPTS, null);
Key k = new Key(new Text("500"));
fi.seek(new Range(k, null), EMPTY_COL_FAMS, false);
size = size(fi);
assertEquals(50, size);
filter1 = new SimpleFilter();
filter1.init(new SortedMapIterator(tm), EMPTY_OPTS, null);
Filter filter2 = new SimpleFilter2();
filter2.init(filter1, EMPTY_OPTS, null);
filter2.seek(new Range(), EMPTY_COL_FAMS, false);
size = size(filter2);
assertEquals(0, size);
}
@Test
public void test1neg() throws IOException {
Text colf = new Text("a");
Text colq = new Text("b");
Value dv = new Value();
TreeMap<Key,Value> tm = new TreeMap<>();
for (int i = 0; i < 1000; i++) {
Key k = new Key(new Text(String.format("%03d", i)), colf, colq);
tm.put(k, dv);
}
assertEquals(1000, tm.size());
Filter filter = new SimpleFilter();
IteratorSetting is = new IteratorSetting(1, SimpleFilter.class);
Filter.setNegate(is, true);
filter.init(new SortedMapIterator(tm), is.getOptions(), null);
filter.seek(new Range(), EMPTY_COL_FAMS, false);
int size = size(filter);
assertEquals(900, size);
filter.init(new SortedMapIterator(tm), is.getOptions(), null);
Key k = new Key(new Text("500"));
filter.seek(new Range(k, null), EMPTY_COL_FAMS, false);
size = size(filter);
assertEquals(450, size);
filter.init(new SortedMapIterator(tm), EMPTY_OPTS, null);
Filter filter2 = new SimpleFilter2();
filter2.init(filter, is.getOptions(), null);
filter2.seek(new Range(), EMPTY_COL_FAMS, false);
size = size(filter2);
assertEquals(100, size);
}
@Test
public void testDeepCopy() throws IOException {
Text colf = new Text("a");
Text colq = new Text("b");
Value dv = new Value();
TreeMap<Key,Value> tm = new TreeMap<>();
for (int i = 0; i < 1000; i++) {
Key k = new Key(new Text(String.format("%03d", i)), colf, colq);
tm.put(k, dv);
}
assertEquals(1000, tm.size());
SimpleFilter filter = new SimpleFilter();
IteratorSetting is = new IteratorSetting(1, SimpleFilter.class);
Filter.setNegate(is, true);
filter.init(new SortedMapIterator(tm), is.getOptions(), null);
SortedKeyValueIterator<Key,Value> copy = filter.deepCopy(null);
filter.seek(new Range(), EMPTY_COL_FAMS, false);
int size = size(filter);
assertEquals(900, size);
copy.seek(new Range(), EMPTY_COL_FAMS, false);
size = size(copy);
assertEquals(900, size);
}
@Test
public void test2() throws IOException {
Text colf = new Text("a");
Text colq = new Text("b");
Value dv = new Value();
TreeMap<Key,Value> tm = new TreeMap<>();
for (int i = 0; i < 1000; i++) {
Key k = new Key(new Text(String.format("%03d", i)), colf, colq);
k.setTimestamp(i);
tm.put(k, dv);
}
assertEquals(1000, tm.size());
SortedKeyValueIterator<Key,Value> a = new AgeOffFilter();
IteratorSetting is = new IteratorSetting(1, AgeOffFilter.class);
AgeOffFilter.setTTL(is, 101L);
AgeOffFilter.setCurrentTime(is, 1001L);
AgeOffFilter.setNegate(is, true);
final AgeOffFilter finalA = (AgeOffFilter) a;
assertTrue((finalA.validateOptions(is.getOptions())));
assertThrows(IllegalArgumentException.class, () -> finalA.validateOptions(EMPTY_OPTS));
a.init(new SortedMapIterator(tm), is.getOptions(), null);
a = a.deepCopy(null);
SortedKeyValueIterator<Key,Value> copy = a.deepCopy(null);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(900, size(a));
copy.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(900, size(copy));
}
@Test
public void test2a() throws IOException {
Text colf = new Text("a");
Text colq = new Text("b");
Value dv = new Value();
TreeMap<Key,Value> tm = new TreeMap<>();
IteratorSetting is = new IteratorSetting(1, ColumnAgeOffFilter.class);
ColumnAgeOffFilter.addTTL(is, new IteratorSetting.Column("a"), 901L);
long ts = System.currentTimeMillis();
for (long i = 0; i < 1000; i++) {
Key k = new Key(new Text(String.format("%03d", i)), colf, colq, ts - i);
tm.put(k, dv);
}
assertEquals(1000, tm.size());
ColumnAgeOffFilter a = new ColumnAgeOffFilter();
assertTrue(a.validateOptions(is.getOptions()));
a.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
a.overrideCurrentTime(ts);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(902, size(a));
ColumnAgeOffFilter.addTTL(is, new IteratorSetting.Column("a", "b"), 101L);
a.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
a.overrideCurrentTime(ts);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(102, size(a));
ColumnAgeOffFilter.removeTTL(is, new IteratorSetting.Column("a", "b"));
a.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
a = (ColumnAgeOffFilter) a.deepCopy(null);
a.overrideCurrentTime(ts);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(902, size(a));
}
/**
* Test for fix to ACCUMULO-1604: ColumnAgeOffFilter was throwing an error when using negate
*/
@Test
public void test2aNegate() throws IOException {
Text colf = new Text("a");
Text colq = new Text("b");
Value dv = new Value();
TreeMap<Key,Value> tm = new TreeMap<>();
IteratorSetting is = new IteratorSetting(1, ColumnAgeOffFilter.class);
ColumnAgeOffFilter.addTTL(is, new IteratorSetting.Column("a"), 901L);
ColumnAgeOffFilter.setNegate(is, true);
long ts = System.currentTimeMillis();
for (long i = 0; i < 1000; i++) {
Key k = new Key(new Text(String.format("%03d", i)), colf, colq, ts - i);
tm.put(k, dv);
}
assertEquals(1000, tm.size());
ColumnAgeOffFilter a = new ColumnAgeOffFilter();
assertTrue(a.validateOptions(is.getOptions()));
a.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
a.overrideCurrentTime(ts);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(98, size(a));
ColumnAgeOffFilter.addTTL(is, new IteratorSetting.Column("a", "b"), 101L);
a.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
a.overrideCurrentTime(ts);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(898, size(a));
ColumnAgeOffFilter.removeTTL(is, new IteratorSetting.Column("a", "b"));
a.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
a = (ColumnAgeOffFilter) a.deepCopy(null);
a.overrideCurrentTime(ts);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(98, size(a));
}
/**
* Test for fix to ACCUMULO-1604: ColumnAgeOffFilter was throwing an error when using negate Test
* case for when "negate" is an actual column name
*/
@Test
public void test2b() throws IOException {
Text colf = new Text("negate");
Text colq = new Text("b");
Value dv = new Value();
TreeMap<Key,Value> tm = new TreeMap<>();
IteratorSetting is = new IteratorSetting(1, ColumnAgeOffFilter.class);
ColumnAgeOffFilter.addTTL(is, new IteratorSetting.Column("negate"), 901L);
long ts = System.currentTimeMillis();
for (long i = 0; i < 1000; i++) {
Key k = new Key(new Text(String.format("%03d", i)), colf, colq, ts - i);
tm.put(k, dv);
}
assertEquals(1000, tm.size());
ColumnAgeOffFilter a = new ColumnAgeOffFilter();
assertTrue(a.validateOptions(is.getOptions()));
a.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
a.overrideCurrentTime(ts);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(902, size(a));
ColumnAgeOffFilter.addTTL(is, new IteratorSetting.Column("negate", "b"), 101L);
a.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
a.overrideCurrentTime(ts);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(102, size(a));
ColumnAgeOffFilter.removeTTL(is, new IteratorSetting.Column("negate", "b"));
a.init(new SortedMapIterator(tm), is.getOptions(), new DefaultIteratorEnvironment());
a = (ColumnAgeOffFilter) a.deepCopy(null);
a.overrideCurrentTime(ts);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(902, size(a));
}
@Test
public void test3() throws IOException {
Value dv = new Value();
TreeMap<Key,Value> tm = new TreeMap<>();
HashSet<Column> hsc = new HashSet<>();
hsc.add(new Column("c".getBytes(), null, null));
Text colf1 = new Text("a");
Text colq1 = new Text("b");
Text colf2 = new Text("c");
Text colq2 = new Text("d");
Text colf;
Text colq;
for (int i = 0; i < 1000; i++) {
if (Math.abs(Math.ceil(i / 2.0) - i / 2.0) < .001) {
colf = colf1;
colq = colq1;
} else {
colf = colf2;
colq = colq2;
}
Key k = new Key(new Text(String.format("%03d", i)), colf, colq);
k.setTimestamp(157L);
tm.put(k, dv);
}
assertEquals(1000, tm.size());
SortedKeyValueIterator<Key,Value> a =
ColumnQualifierFilter.wrap(new SortedMapIterator(tm), hsc);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(1000, size(a));
hsc = new HashSet<>();
hsc.add(new Column("a".getBytes(), "b".getBytes(), null));
a = ColumnQualifierFilter.wrap(new SortedMapIterator(tm), hsc);
a.seek(new Range(), EMPTY_COL_FAMS, false);
int size = size(a);
assertEquals(500, size);
hsc = new HashSet<>();
a = ColumnQualifierFilter.wrap(new SortedMapIterator(tm), hsc);
a.seek(new Range(), EMPTY_COL_FAMS, false);
size = size(a);
assertEquals(1000, size);
}
@Test
public void test4() throws IOException {
Value dv = new Value();
TreeMap<Key,Value> tm = new TreeMap<>();
ColumnVisibility le1 = new ColumnVisibility("L1");
ColumnVisibility le2 = new ColumnVisibility("L0&OFFICIAL");
ColumnVisibility le3 = new ColumnVisibility("L1&L2");
ColumnVisibility le4 = new ColumnVisibility("L1&L2&G1");
ColumnVisibility[] lea = {le1, le2, le3, le4};
Authorizations auths = new Authorizations("L1", "L2", "L0", "OFFICIAL");
for (int i = 0; i < 1000; i++) {
Key k = new Key(new Text(String.format("%03d", i)), new Text("a"), new Text("b"),
new Text(lea[i % 4].getExpression()));
tm.put(k, dv);
}
assertEquals(1000, tm.size());
SortedKeyValueIterator<Key,Value> a =
VisibilityFilter.wrap(new SortedMapIterator(tm), auths, le2.getExpression());
a.seek(new Range(), EMPTY_COL_FAMS, false);
int size = size(a);
assertEquals(750, size);
}
private SortedKeyValueIterator<Key,Value> ncqf(TreeMap<Key,Value> tm, Column... columns)
throws IOException {
HashSet<Column> hsc = new HashSet<>();
Collections.addAll(hsc, columns);
SortedKeyValueIterator<Key,Value> a =
ColumnQualifierFilter.wrap(new SortedMapIterator(tm), hsc);
a.seek(new Range(), EMPTY_COL_FAMS, false);
return a;
}
@Test
public void test5() throws IOException {
Value dv = new Value();
TreeMap<Key,Value> tm = new TreeMap<>();
tm.put(new Key(new Text(String.format("%03d", 1)), new Text("a"), new Text("x")), dv);
tm.put(new Key(new Text(String.format("%03d", 2)), new Text("a"), new Text("y")), dv);
tm.put(new Key(new Text(String.format("%03d", 3)), new Text("a"), new Text("z")), dv);
tm.put(new Key(new Text(String.format("%03d", 4)), new Text("b"), new Text("x")), dv);
tm.put(new Key(new Text(String.format("%03d", 5)), new Text("b"), new Text("y")), dv);
assertEquals(5, tm.size());
int size = size(ncqf(tm, new Column("c".getBytes(), null, null)));
assertEquals(5, size);
size = size(ncqf(tm, new Column("a".getBytes(), null, null)));
assertEquals(5, size);
size = size(ncqf(tm, new Column("a".getBytes(), "x".getBytes(), null)));
assertEquals(1, size);
size = size(ncqf(tm, new Column("a".getBytes(), "x".getBytes(), null),
new Column("b".getBytes(), "x".getBytes(), null)));
assertEquals(2, size);
size = size(ncqf(tm, new Column("a".getBytes(), "x".getBytes(), null),
new Column("b".getBytes(), "y".getBytes(), null)));
assertEquals(2, size);
size = size(ncqf(tm, new Column("a".getBytes(), "x".getBytes(), null),
new Column("b".getBytes(), null, null)));
assertEquals(3, size);
}
@Test
public void testNoVisFilter() throws IOException {
TreeMap<Key,Value> tm = new TreeMap<>();
Value v = new Value();
for (int i = 0; i < 1000; i++) {
Key k = new Key(String.format("%03d", i), "a", "b", i % 10 == 0 ? "vis" : "");
tm.put(k, v);
}
assertEquals(1000, tm.size());
Filter filter = new ReqVisFilter();
filter.init(new SortedMapIterator(tm), EMPTY_OPTS, null);
filter.seek(new Range(), EMPTY_COL_FAMS, false);
int size = size(filter);
assertEquals(100, size);
}
@Test
public void testTimestampFilter() throws IOException, ParseException {
Text colf = new Text("a");
Text colq = new Text("b");
Value dv = new Value();
TreeMap<Key,Value> tm = new TreeMap<>();
for (int i = 0; i < 100; i++) {
Key k = new Key(new Text(String.format("%02d", i)), colf, colq);
k.setTimestamp(i);
tm.put(k, dv);
}
assertEquals(100, tm.size());
SimpleDateFormat dateParser = new SimpleDateFormat("yyyyMMddHHmmssz");
long baseTime = dateParser.parse("19990101000000GMT").getTime();
tm.clear();
for (int i = 0; i < 100; i++) {
Key k = new Key(new Text(String.format("%02d", i)), colf, colq);
k.setTimestamp(baseTime + (i * 1000));
tm.put(k, dv);
}
assertEquals(100, tm.size());
TimestampFilter a = new TimestampFilter();
IteratorSetting is = new IteratorSetting(1, TimestampFilter.class);
TimestampFilter.setRange(is, "19990101010011GMT+01:00", "19990101010031GMT+01:00");
a.init(new SortedMapIterator(tm), is.getOptions(), null);
a = (TimestampFilter) a.deepCopy(null);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(21, size(a));
TimestampFilter.setRange(is, baseTime + 11000, baseTime + 31000);
a.init(new SortedMapIterator(tm), is.getOptions(), null);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(21, size(a));
TimestampFilter.setEnd(is, "19990101000031GMT", false);
a.init(new SortedMapIterator(tm), is.getOptions(), null);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(20, size(a));
TimestampFilter.setStart(is, "19990101000011GMT", false);
a.init(new SortedMapIterator(tm), is.getOptions(), null);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(19, size(a));
TimestampFilter.setEnd(is, "19990101000031GMT", true);
a.init(new SortedMapIterator(tm), is.getOptions(), null);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(20, size(a));
is.clearOptions();
TimestampFilter.setStart(is, "19990101000011GMT", true);
a.init(new SortedMapIterator(tm), is.getOptions(), null);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(89, size(a));
TimestampFilter.setStart(is, "19990101000011GMT", false);
assertTrue(a.validateOptions(is.getOptions()));
a.init(new SortedMapIterator(tm), is.getOptions(), null);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(88, size(a));
is.clearOptions();
TimestampFilter.setEnd(is, "19990101000031GMT", true);
a.init(new SortedMapIterator(tm), is.getOptions(), null);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(32, size(a));
TimestampFilter.setEnd(is, "19990101000031GMT", false);
assertTrue(a.validateOptions(is.getOptions()));
a.init(new SortedMapIterator(tm), is.getOptions(), null);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(31, size(a));
TimestampFilter.setEnd(is, 253402300800001L, true);
a.init(new SortedMapIterator(tm), is.getOptions(), null);
is.clearOptions();
is.addOption(TimestampFilter.START, "19990101000011GMT");
assertTrue(a.validateOptions(is.getOptions()));
a.init(new SortedMapIterator(tm), is.getOptions(), null);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(89, size(a));
is.clearOptions();
is.addOption(TimestampFilter.END, "19990101000031GMT");
assertTrue(a.validateOptions(is.getOptions()));
a.init(new SortedMapIterator(tm), is.getOptions(), null);
a.seek(new Range(), EMPTY_COL_FAMS, false);
assertEquals(32, size(a));
final TimestampFilter finalA = a;
assertThrows(IllegalArgumentException.class, () -> finalA.validateOptions(EMPTY_OPTS));
}
@Test
public void testDeletes() throws IOException {
Text colf = new Text("a");
Text colq = new Text("b");
Value dv = new Value();
TreeMap<Key,Value> tm = new TreeMap<>();
Key k = new Key(new Text("0"), colf, colq);
tm.put(k, dv);
k = new Key(new Text("1"), colf, colq, 10);
k.setDeleted(true);
tm.put(k, dv);
k = new Key(new Text("1"), colf, colq, 5);
tm.put(k, dv);
k = new Key(new Text("10"), colf, colq);
tm.put(k, dv);
assertEquals(4, tm.size());
Filter filter = new SimpleFilter();
filter.init(new SortedMapIterator(tm), EMPTY_OPTS, null);
filter.seek(new Range(), EMPTY_COL_FAMS, false);
int size = size(filter);
assertEquals(3, size);
}
}
| 9,496 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/iterators | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/iterators/user/BigDecimalCombinerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.iterators.user;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.TreeMap;
import org.apache.accumulo.core.client.IteratorSetting;
import org.apache.accumulo.core.client.IteratorSetting.Column;
import org.apache.accumulo.core.client.lexicoder.Encoder;
import org.apache.accumulo.core.data.ByteSequence;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.iterators.Combiner;
import org.apache.accumulo.core.iteratorsImpl.system.SortedMapIterator;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class BigDecimalCombinerTest {
private static final Collection<ByteSequence> EMPTY_COL_FAMS = new ArrayList<>();
private static double delta = 0.00001;
Encoder<BigDecimal> encoder;
TreeMap<Key,Value> tm1;
List<Column> columns;
Combiner ai;
@BeforeEach
public void setup() {
encoder = new BigDecimalCombiner.BigDecimalEncoder();
tm1 = new TreeMap<>();
columns = Collections.singletonList(new IteratorSetting.Column("cf001"));
// keys that will aggregate
CombinerTest.newKeyValue(tm1, 1, 1, 1, 1, false, BigDecimal.valueOf(2), encoder);
CombinerTest.newKeyValue(tm1, 1, 1, 1, 2, false, BigDecimal.valueOf(2.3), encoder);
CombinerTest.newKeyValue(tm1, 1, 1, 1, 3, false, BigDecimal.valueOf(-1.4E1), encoder);
// and keys that will not aggregate
CombinerTest.newKeyValue(tm1, 1, 2, 1, 1, false, BigDecimal.valueOf(99), encoder);
CombinerTest.newKeyValue(tm1, 1, 3, 1, 1, false, BigDecimal.valueOf(-88), encoder);
}
@Test
public void testSums() throws IOException {
ai = new BigDecimalCombiner.BigDecimalSummingCombiner();
IteratorSetting is = new IteratorSetting(1, BigDecimalCombiner.BigDecimalSummingCombiner.class);
Combiner.setColumns(is, columns);
ai.init(new SortedMapIterator(tm1), is.getOptions(), CombinerTest.SCAN_IE);
ai.seek(new Range(), EMPTY_COL_FAMS, false);
assertTrue(ai.hasTop());
assertEquals(CombinerTest.newKey(1, 1, 1, 3), ai.getTopKey());
assertEquals(-9.7, encoder.decode(ai.getTopValue().get()).doubleValue(), delta);
verify();
}
@Test
public void testMin() throws IOException {
ai = new BigDecimalCombiner.BigDecimalMinCombiner();
IteratorSetting is = new IteratorSetting(1, BigDecimalCombiner.BigDecimalMinCombiner.class);
Combiner.setColumns(is, columns);
ai.init(new SortedMapIterator(tm1), is.getOptions(), CombinerTest.SCAN_IE);
ai.seek(new Range(), EMPTY_COL_FAMS, false);
assertTrue(ai.hasTop());
assertEquals(CombinerTest.newKey(1, 1, 1, 3), ai.getTopKey());
assertEquals(-14.0, encoder.decode(ai.getTopValue().get()).doubleValue(), delta);
verify();
}
@Test
public void testMax() throws IOException {
ai = new BigDecimalCombiner.BigDecimalMaxCombiner();
IteratorSetting is = new IteratorSetting(1, BigDecimalCombiner.BigDecimalMaxCombiner.class);
Combiner.setColumns(is, columns);
ai.init(new SortedMapIterator(tm1), is.getOptions(), CombinerTest.SCAN_IE);
ai.seek(new Range(), EMPTY_COL_FAMS, false);
assertTrue(ai.hasTop());
assertEquals(CombinerTest.newKey(1, 1, 1, 3), ai.getTopKey());
assertEquals(2.3, encoder.decode(ai.getTopValue().get()).doubleValue(), delta);
verify();
}
private void verify() throws IOException {
ai.next(); // Skip the combined key, since we've already looked at it by now
// Should have exactly two more keys left over
assertEquals(CombinerTest.newKey(1, 2, 1, 1), ai.getTopKey());
assertEquals(99.0, encoder.decode(ai.getTopValue().get()).doubleValue(), delta);
ai.next();
assertEquals(CombinerTest.newKey(1, 3, 1, 1), ai.getTopKey());
assertEquals(-88.0, encoder.decode(ai.getTopValue().get()).doubleValue(), delta);
ai.next();
assertFalse(ai.hasTop());
}
}
| 9,497 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/iterators | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/iterators/user/TestCfCqSliceSeekingFilter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.iterators.user;
public class TestCfCqSliceSeekingFilter extends TestCfCqSlice {
@Override
protected Class<CfCqSliceSeekingFilter> getFilterClass() {
return CfCqSliceSeekingFilter.class;
}
}
| 9,498 |
0 | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/iterators | Create_ds/accumulo/core/src/test/java/org/apache/accumulo/core/iterators/user/WholeRowIteratorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.iterators.user;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.iteratorsImpl.system.MultiIterator;
import org.apache.accumulo.core.iteratorsImpl.system.SortedMapIterator;
import org.apache.hadoop.io.Text;
import org.junit.jupiter.api.Test;
public class WholeRowIteratorTest {
@Test
public void testBadDecodeRow() throws IOException {
Key k = new Key(new Text("r1"), new Text("cf1234567890"));
Value v = new Value("v1");
Value encoded = WholeRowIterator.encodeRow(List.of(k), List.of(v));
encoded.set(Arrays.copyOfRange(encoded.get(), 0, 10)); // truncate to 10 bytes only
assertThrows(IOException.class, () -> WholeRowIterator.decodeRow(k, encoded));
}
@Test
public void testEmptyStuff() throws IOException {
SortedMap<Key,Value> map = new TreeMap<>();
SortedMap<Key,Value> map2 = new TreeMap<>();
final Map<Text,Boolean> toInclude = new HashMap<>();
map.put(new Key(new Text("r1"), new Text("cf1"), new Text("cq1"), new Text("cv1"), 1L),
new Value("val1"));
map.put(new Key(new Text("r1"), new Text("cf1"), new Text("cq2"), new Text("cv1"), 2L),
new Value("val2"));
map.put(new Key(new Text("r2"), new Text("cf1"), new Text("cq1"), new Text("cv1"), 3L),
new Value("val3"));
map.put(new Key(new Text("r2"), new Text("cf2"), new Text("cq1"), new Text("cv1"), 4L),
new Value("val4"));
map.put(new Key(new Text("r3"), new Text("cf1"), new Text("cq1"), new Text("cv1"), 5L),
new Value("val4"));
map.put(new Key(new Text("r3"), new Text("cf1"), new Text("cq1"), new Text("cv2"), 6L),
new Value("val4"));
map.put(new Key(new Text("r4"), new Text("cf1"), new Text("cq1"), new Text("cv1"), 7L),
new Value(""));
map.put(new Key(new Text("r4"), new Text("cf1"), new Text("cq1"), new Text(""), 8L),
new Value("val1"));
map.put(new Key(new Text("r4"), new Text("cf1"), new Text(""), new Text("cv1"), 9L),
new Value("val1"));
map.put(new Key(new Text("r4"), new Text(""), new Text("cq1"), new Text("cv1"), 10L),
new Value("val1"));
map.put(new Key(new Text(""), new Text("cf1"), new Text("cq1"), new Text("cv1"), 11L),
new Value("val1"));
boolean b = true;
int trueCount = 0;
for (Key k : map.keySet()) {
if (toInclude.containsKey(k.getRow())) {
if (toInclude.get(k.getRow())) {
map2.put(k, map.get(k));
}
continue;
}
b = !b;
toInclude.put(k.getRow(), b);
if (b) {
trueCount++;
map2.put(k, map.get(k));
}
}
SortedMapIterator source = new SortedMapIterator(map);
WholeRowIterator iter = new WholeRowIterator(source);
SortedMap<Key,Value> resultMap = new TreeMap<>();
iter.seek(new Range(), new ArrayList<>(), false);
int numRows = 0;
while (iter.hasTop()) {
numRows++;
Key rowKey = iter.getTopKey();
Value rowValue = iter.getTopValue();
resultMap.putAll(WholeRowIterator.decodeRow(rowKey, rowValue));
iter.next();
}
assertEquals(5, numRows);
assertEquals(resultMap, map);
WholeRowIterator iter2 = new WholeRowIterator(source) {
@Override
public boolean filter(Text row, List<Key> keys, List<Value> values) {
return toInclude.get(row);
}
};
resultMap.clear();
iter2.seek(new Range(), new ArrayList<>(), false);
numRows = 0;
while (iter2.hasTop()) {
numRows++;
Key rowKey = iter2.getTopKey();
Value rowValue = iter2.getTopValue();
resultMap.putAll(WholeRowIterator.decodeRow(rowKey, rowValue));
iter2.next();
}
assertEquals(numRows, trueCount);
assertEquals(resultMap, map2);
}
private void pkv(SortedMap<Key,Value> map, String row, String cf, String cq, String cv, long ts,
String val) {
map.put(new Key(new Text(row), new Text(cf), new Text(cq), new Text(cv), ts), new Value(val));
}
@Test
public void testContinue() throws Exception {
SortedMap<Key,Value> map1 = new TreeMap<>();
pkv(map1, "row1", "cf1", "cq1", "cv1", 5, "foo");
pkv(map1, "row1", "cf1", "cq2", "cv1", 6, "bar");
SortedMap<Key,Value> map2 = new TreeMap<>();
pkv(map2, "row2", "cf1", "cq1", "cv1", 5, "foo");
pkv(map2, "row2", "cf1", "cq2", "cv1", 6, "bar");
SortedMap<Key,Value> map3 = new TreeMap<>();
pkv(map3, "row3", "cf1", "cq1", "cv1", 5, "foo");
pkv(map3, "row3", "cf1", "cq2", "cv1", 6, "bar");
SortedMap<Key,Value> map = new TreeMap<>();
map.putAll(map1);
map.putAll(map2);
map.putAll(map3);
SortedMapIterator source = new SortedMapIterator(map);
WholeRowIterator iter = new WholeRowIterator(source);
Range range = new Range(new Text("row1"), true, new Text("row2"), true);
iter.seek(range, new ArrayList<>(), false);
assertTrue(iter.hasTop());
assertEquals(map1, WholeRowIterator.decodeRow(iter.getTopKey(), iter.getTopValue()));
// simulate something continuing using the last key from the iterator
// this is what client and server code will do
range = new Range(iter.getTopKey(), false, range.getEndKey(), range.isEndKeyInclusive());
iter.seek(range, new ArrayList<>(), false);
assertTrue(iter.hasTop());
assertEquals(map2, WholeRowIterator.decodeRow(iter.getTopKey(), iter.getTopValue()));
iter.next();
assertFalse(iter.hasTop());
}
@Test
public void testBug1() throws Exception {
SortedMap<Key,Value> map1 = new TreeMap<>();
pkv(map1, "row1", "cf1", "cq1", "cv1", 5, "foo");
pkv(map1, "row1", "cf1", "cq2", "cv1", 6, "bar");
SortedMap<Key,Value> map2 = new TreeMap<>();
pkv(map2, "row2", "cf1", "cq1", "cv1", 5, "foo");
SortedMap<Key,Value> map = new TreeMap<>();
map.putAll(map1);
map.putAll(map2);
MultiIterator source = new MultiIterator(Collections.singletonList(new SortedMapIterator(map)),
new Range(null, true, new Text("row1"), true));
WholeRowIterator iter = new WholeRowIterator(source);
Range range = new Range(new Text("row1"), true, new Text("row2"), true);
iter.seek(range, new ArrayList<>(), false);
assertTrue(iter.hasTop());
assertEquals(map1, WholeRowIterator.decodeRow(iter.getTopKey(), iter.getTopValue()));
// simulate something continuing using the last key from the iterator
// this is what client and server code will do
range = new Range(iter.getTopKey(), false, range.getEndKey(), range.isEndKeyInclusive());
iter.seek(range, new ArrayList<>(), false);
assertFalse(iter.hasTop());
}
}
| 9,499 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.