src_fm_fc_ms_ff
stringlengths 43
86.8k
| target
stringlengths 20
276k
|
|---|---|
ConfigDef { public List<ConfigValue> validate(Map<String, String> props) { return new ArrayList<>(validateAll(props).values()); } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup,
Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup,
Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup,
Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup,
Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
|
@Test public void testValidate() { Map<String, ConfigValue> expected = new HashMap<>(); String errorMessageB = "Missing required configuration \"b\" which has no default value."; String errorMessageC = "Missing required configuration \"c\" which has no default value."; ConfigValue configA = new ConfigValue("a", 1, Arrays.<Object>asList(1, 2, 3), Collections.<String>emptyList()); ConfigValue configB = new ConfigValue("b", null, Arrays.<Object>asList(4, 5), Arrays.asList(errorMessageB, errorMessageB)); ConfigValue configC = new ConfigValue("c", null, Arrays.<Object>asList(4, 5), Arrays.asList(errorMessageC)); ConfigValue configD = new ConfigValue("d", 10, Arrays.<Object>asList(1, 2, 3), Collections.<String>emptyList()); expected.put("a", configA); expected.put("b", configB); expected.put("c", configC); expected.put("d", configD); ConfigDef def = new ConfigDef() .define("a", Type.INT, Importance.HIGH, "docs", "group", 1, Width.SHORT, "a", Arrays.asList("b", "c"), new IntegerRecommender(false)) .define("b", Type.INT, Importance.HIGH, "docs", "group", 2, Width.SHORT, "b", new IntegerRecommender(true)) .define("c", Type.INT, Importance.HIGH, "docs", "group", 3, Width.SHORT, "c", new IntegerRecommender(true)) .define("d", Type.INT, Importance.HIGH, "docs", "group", 4, Width.SHORT, "d", Arrays.asList("b"), new IntegerRecommender(false)); Map<String, String> props = new HashMap<>(); props.put("a", "1"); props.put("d", "10"); List<ConfigValue> configs = def.validate(props); for (ConfigValue config : configs) { String name = config.name(); ConfigValue expectedConfig = expected.get(name); assertEquals(expectedConfig, config); } }
|
ConfigDef { public Set<String> names() { return Collections.unmodifiableSet(configKeys.keySet()); } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup,
Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup,
Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup,
Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup,
Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
|
@Test public void testNames() { final ConfigDef configDef = new ConfigDef() .define("a", Type.STRING, Importance.LOW, "docs") .define("b", Type.STRING, Importance.LOW, "docs"); Set<String> names = configDef.names(); assertEquals(new HashSet<>(Arrays.asList("a", "b")), names); try { names.add("new"); fail(); } catch (UnsupportedOperationException e) { } }
|
ConfigDef { public String toRst() { StringBuilder b = new StringBuilder(); for (ConfigKey key : sortedConfigs()) { if (key.internalConfig) { continue; } getConfigKeyRst(key, b); b.append("\n"); } return b.toString(); } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup,
Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup,
Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup,
Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup,
Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
|
@Test public void toRst() { final ConfigDef def = new ConfigDef() .define("opt1", Type.STRING, "a", ValidString.in("a", "b", "c"), Importance.HIGH, "docs1") .define("opt2", Type.INT, Importance.MEDIUM, "docs2") .define("opt3", Type.LIST, Arrays.asList("a", "b"), Importance.LOW, "docs3"); final String expectedRst = "" + "``opt2``\n" + " docs2\n" + "\n" + " * Type: int\n" + " * Importance: medium\n" + "\n" + "``opt1``\n" + " docs1\n" + "\n" + " * Type: string\n" + " * Default: a\n" + " * Valid Values: [a, b, c]\n" + " * Importance: high\n" + "\n" + "``opt3``\n" + " docs3\n" + "\n" + " * Type: list\n" + " * Default: a,b\n" + " * Importance: low\n" + "\n"; assertEquals(expectedRst, def.toRst()); }
|
ConfigDef { public String toEnrichedRst() { StringBuilder b = new StringBuilder(); String lastKeyGroupName = ""; for (ConfigKey key : sortedConfigs()) { if (key.internalConfig) { continue; } if (key.group != null) { if (!lastKeyGroupName.equalsIgnoreCase(key.group)) { b.append(key.group).append("\n"); char[] underLine = new char[key.group.length()]; Arrays.fill(underLine, '^'); b.append(new String(underLine)).append("\n\n"); } lastKeyGroupName = key.group; } getConfigKeyRst(key, b); if (key.dependents != null && key.dependents.size() > 0) { int j = 0; b.append(" * Dependents: "); for (String dependent : key.dependents) { b.append("``"); b.append(dependent); if (++j == key.dependents.size()) b.append("``"); else b.append("``, "); } b.append("\n"); } b.append("\n"); } return b.toString(); } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup,
Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup,
Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup,
Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup,
Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
|
@Test public void toEnrichedRst() { final ConfigDef def = new ConfigDef() .define("opt1.of.group1", Type.STRING, "a", ValidString.in("a", "b", "c"), Importance.HIGH, "Doc doc.", "Group One", 0, Width.NONE, "..", Collections.<String>emptyList()) .define("opt2.of.group1", Type.INT, ConfigDef.NO_DEFAULT_VALUE, Importance.MEDIUM, "Doc doc doc.", "Group One", 1, Width.NONE, "..", Arrays.asList("some.option1", "some.option2")) .define("opt2.of.group2", Type.BOOLEAN, false, Importance.HIGH, "Doc doc doc doc.", "Group Two", 1, Width.NONE, "..", Collections.<String>emptyList()) .define("opt1.of.group2", Type.BOOLEAN, false, Importance.HIGH, "Doc doc doc doc doc.", "Group Two", 0, Width.NONE, "..", Collections.singletonList("some.option")) .define("poor.opt", Type.STRING, "foo", Importance.HIGH, "Doc doc doc doc."); final String expectedRst = "" + "``poor.opt``\n" + " Doc doc doc doc.\n" + "\n" + " * Type: string\n" + " * Default: foo\n" + " * Importance: high\n" + "\n" + "Group One\n" + "^^^^^^^^^\n" + "\n" + "``opt1.of.group1``\n" + " Doc doc.\n" + "\n" + " * Type: string\n" + " * Default: a\n" + " * Valid Values: [a, b, c]\n" + " * Importance: high\n" + "\n" + "``opt2.of.group1``\n" + " Doc doc doc.\n" + "\n" + " * Type: int\n" + " * Importance: medium\n" + " * Dependents: ``some.option1``, ``some.option2``\n" + "\n" + "Group Two\n" + "^^^^^^^^^\n" + "\n" + "``opt1.of.group2``\n" + " Doc doc doc doc doc.\n" + "\n" + " * Type: boolean\n" + " * Default: false\n" + " * Importance: high\n" + " * Dependents: ``some.option``\n" + "\n" + "``opt2.of.group2``\n" + " Doc doc doc doc.\n" + "\n" + " * Type: boolean\n" + " * Default: false\n" + " * Importance: high\n" + "\n"; assertEquals(expectedRst, def.toEnrichedRst()); }
|
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation,
String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup,
Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup,
Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup,
Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup,
Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
|
@Test public void testConvertValueToStringBoolean() { assertEquals("true", ConfigDef.convertToString(true, Type.BOOLEAN)); assertNull(ConfigDef.convertToString(null, Type.BOOLEAN)); }
@Test public void testConvertValueToStringShort() { assertEquals("32767", ConfigDef.convertToString(Short.MAX_VALUE, Type.SHORT)); assertNull(ConfigDef.convertToString(null, Type.SHORT)); }
@Test public void testConvertValueToStringInt() { assertEquals("2147483647", ConfigDef.convertToString(Integer.MAX_VALUE, Type.INT)); assertNull(ConfigDef.convertToString(null, Type.INT)); }
@Test public void testConvertValueToStringLong() { assertEquals("9223372036854775807", ConfigDef.convertToString(Long.MAX_VALUE, Type.LONG)); assertNull(ConfigDef.convertToString(null, Type.LONG)); }
@Test public void testConvertValueToStringDouble() { assertEquals("3.125", ConfigDef.convertToString(3.125, Type.DOUBLE)); assertNull(ConfigDef.convertToString(null, Type.DOUBLE)); }
@Test public void testConvertValueToStringString() { assertEquals("foobar", ConfigDef.convertToString("foobar", Type.STRING)); assertNull(ConfigDef.convertToString(null, Type.STRING)); }
@Test public void testConvertValueToStringPassword() { assertEquals(Password.HIDDEN, ConfigDef.convertToString(new Password("foobar"), Type.PASSWORD)); assertEquals("foobar", ConfigDef.convertToString("foobar", Type.PASSWORD)); assertNull(ConfigDef.convertToString(null, Type.PASSWORD)); }
@Test public void testConvertValueToStringList() { assertEquals("a,bc,d", ConfigDef.convertToString(Arrays.asList("a", "bc", "d"), Type.LIST)); assertNull(ConfigDef.convertToString(null, Type.LIST)); }
@Test public void testConvertValueToStringClass() throws ClassNotFoundException { String actual = ConfigDef.convertToString(ConfigDefTest.class, Type.CLASS); assertEquals("org.apache.kafka.common.config.ConfigDefTest", actual); assertEquals(ConfigDefTest.class, Class.forName(actual)); assertNull(ConfigDef.convertToString(null, Type.CLASS)); }
@Test public void testConvertValueToStringNestedClass() throws ClassNotFoundException { String actual = ConfigDef.convertToString(NestedClass.class, Type.CLASS); assertEquals("org.apache.kafka.common.config.ConfigDefTest$NestedClass", actual); assertEquals(NestedClass.class, Class.forName(actual)); }
|
AbstractConfig { public Map<String, Object> originalsWithPrefix(String prefix) { Map<String, Object> result = new RecordingMap<>(prefix, false); for (Map.Entry<String, ?> entry : originals.entrySet()) { if (entry.getKey().startsWith(prefix) && entry.getKey().length() > prefix.length()) result.put(entry.getKey().substring(prefix.length()), entry.getValue()); } return result; } @SuppressWarnings("unchecked") AbstractConfig(ConfigDef definition, Map<?, ?> originals, boolean doLog); AbstractConfig(ConfigDef definition, Map<?, ?> originals); void ignore(String key); Short getShort(String key); Integer getInt(String key); Long getLong(String key); Double getDouble(String key); @SuppressWarnings("unchecked") List<String> getList(String key); Boolean getBoolean(String key); String getString(String key); ConfigDef.Type typeOf(String key); Password getPassword(String key); Class<?> getClass(String key); Set<String> unused(); Map<String, Object> originals(); Map<String, String> originalsStrings(); Map<String, Object> originalsWithPrefix(String prefix); Map<String, Object> valuesWithPrefixOverride(String prefix); Map<String, ?> values(); void logUnused(); T getConfiguredInstance(String key, Class<T> t); List<T> getConfiguredInstances(String key, Class<T> t); List<T> getConfiguredInstances(String key, Class<T> t, Map<String, Object> configOverrides); @Override boolean equals(Object o); @Override int hashCode(); }
|
@Test public void testOriginalsWithPrefix() { Properties props = new Properties(); props.put("foo.bar", "abc"); props.put("setting", "def"); TestConfig config = new TestConfig(props); Map<String, Object> originalsWithPrefix = config.originalsWithPrefix("foo."); assertTrue(config.unused().contains("foo.bar")); originalsWithPrefix.get("bar"); assertFalse(config.unused().contains("foo.bar")); Map<String, Object> expected = new HashMap<>(); expected.put("bar", "abc"); assertEquals(expected, originalsWithPrefix); }
|
AbstractConfig { public Map<String, Object> valuesWithPrefixOverride(String prefix) { Map<String, Object> result = new RecordingMap<>(values(), prefix, true); for (Map.Entry<String, ?> entry : originals.entrySet()) { if (entry.getKey().startsWith(prefix) && entry.getKey().length() > prefix.length()) { String keyWithNoPrefix = entry.getKey().substring(prefix.length()); ConfigDef.ConfigKey configKey = definition.configKeys().get(keyWithNoPrefix); if (configKey != null) result.put(keyWithNoPrefix, definition.parseValue(configKey, entry.getValue(), true)); } } return result; } @SuppressWarnings("unchecked") AbstractConfig(ConfigDef definition, Map<?, ?> originals, boolean doLog); AbstractConfig(ConfigDef definition, Map<?, ?> originals); void ignore(String key); Short getShort(String key); Integer getInt(String key); Long getLong(String key); Double getDouble(String key); @SuppressWarnings("unchecked") List<String> getList(String key); Boolean getBoolean(String key); String getString(String key); ConfigDef.Type typeOf(String key); Password getPassword(String key); Class<?> getClass(String key); Set<String> unused(); Map<String, Object> originals(); Map<String, String> originalsStrings(); Map<String, Object> originalsWithPrefix(String prefix); Map<String, Object> valuesWithPrefixOverride(String prefix); Map<String, ?> values(); void logUnused(); T getConfiguredInstance(String key, Class<T> t); List<T> getConfiguredInstances(String key, Class<T> t); List<T> getConfiguredInstances(String key, Class<T> t, Map<String, Object> configOverrides); @Override boolean equals(Object o); @Override int hashCode(); }
|
@Test public void testValuesWithPrefixOverride() { String prefix = "prefix."; Properties props = new Properties(); props.put("sasl.mechanism", "PLAIN"); props.put("prefix.sasl.mechanism", "GSSAPI"); props.put("prefix.sasl.kerberos.kinit.cmd", "/usr/bin/kinit2"); props.put("prefix.ssl.truststore.location", "my location"); props.put("sasl.kerberos.service.name", "service name"); props.put("ssl.keymanager.algorithm", "algorithm"); TestSecurityConfig config = new TestSecurityConfig(props); Map<String, Object> valuesWithPrefixOverride = config.valuesWithPrefixOverride(prefix); assertTrue(config.unused().contains("prefix.sasl.mechanism")); assertTrue(config.unused().contains("sasl.mechanism")); assertEquals("GSSAPI", valuesWithPrefixOverride.get("sasl.mechanism")); assertFalse(config.unused().contains("sasl.mechanism")); assertFalse(config.unused().contains("prefix.sasl.mechanism")); assertTrue(config.unused().contains("prefix.sasl.kerberos.kinit.cmd")); assertFalse(config.unused().contains("sasl.kerberos.kinit.cmd")); assertEquals("/usr/bin/kinit2", valuesWithPrefixOverride.get("sasl.kerberos.kinit.cmd")); assertFalse(config.unused().contains("sasl.kerberos.kinit.cmd")); assertFalse(config.unused().contains("prefix.sasl.kerberos.kinit.cmd")); assertTrue(config.unused().contains("prefix.ssl.truststore.location")); assertFalse(config.unused().contains("ssl.truststore.location")); assertEquals("my location", valuesWithPrefixOverride.get("ssl.truststore.location")); assertFalse(config.unused().contains("ssl.truststore.location")); assertFalse(config.unused().contains("prefix.ssl.truststore.location")); assertTrue(config.unused().contains("ssl.keymanager.algorithm")); assertEquals("algorithm", valuesWithPrefixOverride.get("ssl.keymanager.algorithm")); assertFalse(config.unused().contains("ssl.keymanager.algorithm")); assertTrue(config.unused().contains("sasl.kerberos.service.name")); assertEquals("service name", valuesWithPrefixOverride.get("sasl.kerberos.service.name")); assertFalse(config.unused().contains("sasl.kerberos.service.name")); assertFalse(config.unused().contains("sasl.kerberos.min.time.before.relogin")); assertEquals(SaslConfigs.DEFAULT_KERBEROS_MIN_TIME_BEFORE_RELOGIN, valuesWithPrefixOverride.get("sasl.kerberos.min.time.before.relogin")); assertFalse(config.unused().contains("sasl.kerberos.min.time.before.relogin")); assertFalse(config.unused().contains("ssl.key.password")); assertNull(valuesWithPrefixOverride.get("ssl.key.password")); assertFalse(config.unused().contains("ssl.key.password")); }
|
AbstractConfig { public Set<String> unused() { Set<String> keys = new HashSet<>(originals.keySet()); keys.removeAll(used); return keys; } @SuppressWarnings("unchecked") AbstractConfig(ConfigDef definition, Map<?, ?> originals, boolean doLog); AbstractConfig(ConfigDef definition, Map<?, ?> originals); void ignore(String key); Short getShort(String key); Integer getInt(String key); Long getLong(String key); Double getDouble(String key); @SuppressWarnings("unchecked") List<String> getList(String key); Boolean getBoolean(String key); String getString(String key); ConfigDef.Type typeOf(String key); Password getPassword(String key); Class<?> getClass(String key); Set<String> unused(); Map<String, Object> originals(); Map<String, String> originalsStrings(); Map<String, Object> originalsWithPrefix(String prefix); Map<String, Object> valuesWithPrefixOverride(String prefix); Map<String, ?> values(); void logUnused(); T getConfiguredInstance(String key, Class<T> t); List<T> getConfiguredInstances(String key, Class<T> t); List<T> getConfiguredInstances(String key, Class<T> t, Map<String, Object> configOverrides); @Override boolean equals(Object o); @Override int hashCode(); }
|
@Test public void testUnused() { Properties props = new Properties(); String configValue = "org.apache.kafka.common.config.AbstractConfigTest$ConfiguredFakeMetricsReporter"; props.put(TestConfig.METRIC_REPORTER_CLASSES_CONFIG, configValue); props.put(FakeMetricsReporterConfig.EXTRA_CONFIG, "my_value"); TestConfig config = new TestConfig(props); assertTrue("metric.extra_config should be marked unused before getConfiguredInstances is called", config.unused().contains(FakeMetricsReporterConfig.EXTRA_CONFIG)); config.getConfiguredInstances(TestConfig.METRIC_REPORTER_CLASSES_CONFIG, MetricsReporter.class); assertTrue("All defined configurations should be marked as used", config.unused().isEmpty()); }
|
Cluster { public static Cluster bootstrap(List<InetSocketAddress> addresses) { List<Node> nodes = new ArrayList<>(); int nodeId = -1; for (InetSocketAddress address : addresses) nodes.add(new Node(nodeId--, address.getHostString(), address.getPort())); return new Cluster(null, true, nodes, new ArrayList<PartitionInfo>(0), Collections.<String>emptySet(), Collections.<String>emptySet(), null); } Cluster(String clusterId,
Collection<Node> nodes,
Collection<PartitionInfo> partitions,
Set<String> unauthorizedTopics,
Set<String> internalTopics); Cluster(String clusterId,
Collection<Node> nodes,
Collection<PartitionInfo> partitions,
Set<String> unauthorizedTopics,
Set<String> internalTopics,
Node controller); private Cluster(String clusterId,
boolean isBootstrapConfigured,
Collection<Node> nodes,
Collection<PartitionInfo> partitions,
Set<String> unauthorizedTopics,
Set<String> internalTopics,
Node controller); static Cluster empty(); static Cluster bootstrap(List<InetSocketAddress> addresses); Cluster withPartitions(Map<TopicPartition, PartitionInfo> partitions); List<Node> nodes(); Node nodeById(int id); Node leaderFor(TopicPartition topicPartition); PartitionInfo partition(TopicPartition topicPartition); List<PartitionInfo> partitionsForTopic(String topic); Integer partitionCountForTopic(String topic); List<PartitionInfo> availablePartitionsForTopic(String topic); List<PartitionInfo> partitionsForNode(int nodeId); Set<String> topics(); Set<String> unauthorizedTopics(); Set<String> internalTopics(); boolean isBootstrapConfigured(); ClusterResource clusterResource(); Node controller(); @Override String toString(); }
|
@Test public void testBootstrap() { String ipAddress = "140.211.11.105"; String hostName = "www.example.com"; Cluster cluster = Cluster.bootstrap(Arrays.asList( new InetSocketAddress(ipAddress, 9002), new InetSocketAddress(hostName, 9002) )); Set<String> expectedHosts = Utils.mkSet(ipAddress, hostName); Set<String> actualHosts = new HashSet<>(); for (Node node : cluster.nodes()) actualHosts.add(node.host()); assertEquals(expectedHosts, actualHosts); }
|
PartitionInfo { @Override public String toString() { return String.format("Partition(topic = %s, partition = %d, leader = %s, replicas = %s, isr = %s)", topic, partition, leader == null ? "none" : leader.idString(), formatNodeIds(replicas), formatNodeIds(inSyncReplicas)); } PartitionInfo(String topic, int partition, Node leader, Node[] replicas, Node[] inSyncReplicas); String topic(); int partition(); Node leader(); Node[] replicas(); Node[] inSyncReplicas(); @Override String toString(); }
|
@Test public void testToString() { String topic = "sample"; int partition = 0; Node leader = new Node(0, "localhost", 9092); Node r1 = new Node(1, "localhost", 9093); Node r2 = new Node(2, "localhost", 9094); Node[] replicas = new Node[] {leader, r1, r2}; Node[] inSyncReplicas = new Node[] {leader, r1, r2}; PartitionInfo partitionInfo = new PartitionInfo(topic, partition, leader, replicas, inSyncReplicas); String expected = String.format("Partition(topic = %s, partition = %d, leader = %s, replicas = %s, isr = %s)", topic, partition, leader.idString(), "[0,1,2]", "[0,1,2]"); Assert.assertEquals(expected, partitionInfo.toString()); }
|
RecordHeaders implements Headers { @Override public Headers add(Header header) throws IllegalStateException { canWrite(); headers.add(header); return this; } RecordHeaders(); RecordHeaders(Header[] headers); RecordHeaders(Iterable<Header> headers); @Override Headers add(Header header); @Override Headers add(String key, byte[] value); @Override Headers remove(String key); @Override Header lastHeader(String key); @Override Iterable<Header> headers(final String key); @Override Iterator<Header> iterator(); void setReadOnly(); Header[] toArray(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); }
|
@Test public void testAdd() { Headers headers = new RecordHeaders(); headers.add(new RecordHeader("key", "value".getBytes())); Header header = headers.iterator().next(); assertHeader("key", "value", header); headers.add(new RecordHeader("key2", "value2".getBytes())); assertHeader("key2", "value2", headers.lastHeader("key2")); assertEquals(2, getCount(headers)); }
|
RecordHeaders implements Headers { @Override public Iterable<Header> headers(final String key) { checkKey(key); return new Iterable<Header>() { @Override public Iterator<Header> iterator() { return new FilterByKeyIterator(headers.iterator(), key); } }; } RecordHeaders(); RecordHeaders(Header[] headers); RecordHeaders(Iterable<Header> headers); @Override Headers add(Header header); @Override Headers add(String key, byte[] value); @Override Headers remove(String key); @Override Header lastHeader(String key); @Override Iterable<Header> headers(final String key); @Override Iterator<Header> iterator(); void setReadOnly(); Header[] toArray(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); }
|
@Test public void testHeaders() throws IOException { RecordHeaders headers = new RecordHeaders(); headers.add(new RecordHeader("key", "value".getBytes())); headers.add(new RecordHeader("key1", "key1value".getBytes())); headers.add(new RecordHeader("key", "value2".getBytes())); headers.add(new RecordHeader("key2", "key2value".getBytes())); Iterator<Header> keyHeaders = headers.headers("key").iterator(); assertHeader("key", "value", keyHeaders.next()); assertHeader("key", "value2", keyHeaders.next()); assertFalse(keyHeaders.hasNext()); keyHeaders = headers.headers("key1").iterator(); assertHeader("key1", "key1value", keyHeaders.next()); assertFalse(keyHeaders.hasNext()); keyHeaders = headers.headers("key2").iterator(); assertHeader("key2", "key2value", keyHeaders.next()); assertFalse(keyHeaders.hasNext()); }
|
Topic { static boolean containsValidPattern(String topic) { return LEGAL_CHARS_PATTERN.matcher(topic).matches(); } static void validate(String topic); static boolean isInternal(String topic); static boolean hasCollisionChars(String topic); static boolean hasCollision(String topicA, String topicB); static final String GROUP_METADATA_TOPIC_NAME; static final String TRANSACTION_STATE_TOPIC_NAME; static final String LEGAL_CHARS; }
|
@Test public void shouldRecognizeInvalidCharactersInTopicNames() { char[] invalidChars = {'/', '\\', ',', '\u0000', ':', '"', '\'', ';', '*', '?', ' ', '\t', '\r', '\n', '='}; for (char c : invalidChars) { String topicName = "Is " + c + "illegal"; assertFalse(Topic.containsValidPattern(topicName)); } }
|
Java { public static boolean isIBMJdk() { return System.getProperty("java.vendor").contains("IBM"); } private Java(); static boolean isIBMJdk(); static final String JVM_SPEC_VERSION; static final boolean IS_JAVA9_COMPATIBLE; }
|
@Test public void testIsIBMJdk() { System.setProperty("java.vendor", "Oracle Corporation"); assertFalse(Java.isIBMJdk()); System.setProperty("java.vendor", "IBM Corporation"); assertTrue(Java.isIBMJdk()); }
@Test public void testLoadKerberosLoginModule() throws ClassNotFoundException { String clazz = Java.isIBMJdk() ? "com.ibm.security.auth.module.Krb5LoginModule" : "com.sun.security.auth.module.Krb5LoginModule"; Class.forName(clazz); }
|
Crc32C { public static Checksum create() { return CHECKSUM_FACTORY.create(); } static long compute(byte[] bytes, int offset, int size); static long compute(ByteBuffer buffer, int offset, int size); static Checksum create(); }
|
@Test public void testUpdate() { final byte[] bytes = "Any String you want".getBytes(); final int len = bytes.length; Checksum crc1 = Crc32C.create(); Checksum crc2 = Crc32C.create(); Checksum crc3 = Crc32C.create(); crc1.update(bytes, 0, len); for (int i = 0; i < len; i++) crc2.update(bytes[i]); crc3.update(bytes, 0, len / 2); crc3.update(bytes, len / 2, len - len / 2); assertEquals("Crc values should be the same", crc1.getValue(), crc2.getValue()); assertEquals("Crc values should be the same", crc1.getValue(), crc3.getValue()); }
|
Crc32C { public static long compute(byte[] bytes, int offset, int size) { Checksum crc = create(); crc.update(bytes, offset, size); return crc.getValue(); } static long compute(byte[] bytes, int offset, int size); static long compute(ByteBuffer buffer, int offset, int size); static Checksum create(); }
|
@Test public void testValue() { final byte[] bytes = "Some String".getBytes(); assertEquals(608512271, Crc32C.compute(bytes, 0, bytes.length)); }
|
Crc32 implements Checksum { @Override public void update(byte[] b, int off, int len) { if (off < 0 || len < 0 || off > b.length - len) throw new ArrayIndexOutOfBoundsException(); int localCrc = crc; while (len > 7) { final int c0 = (b[off + 0] ^ localCrc) & 0xff; final int c1 = (b[off + 1] ^ (localCrc >>>= 8)) & 0xff; final int c2 = (b[off + 2] ^ (localCrc >>>= 8)) & 0xff; final int c3 = (b[off + 3] ^ (localCrc >>>= 8)) & 0xff; localCrc = (T[T8_7_START + c0] ^ T[T8_6_START + c1]) ^ (T[T8_5_START + c2] ^ T[T8_4_START + c3]); final int c4 = b[off + 4] & 0xff; final int c5 = b[off + 5] & 0xff; final int c6 = b[off + 6] & 0xff; final int c7 = b[off + 7] & 0xff; localCrc ^= (T[T8_3_START + c4] ^ T[T8_2_START + c5]) ^ (T[T8_1_START + c6] ^ T[T8_0_START + c7]); off += 8; len -= 8; } switch (len) { case 7: localCrc = (localCrc >>> 8) ^ T[T8_0_START + ((localCrc ^ b[off++]) & 0xff)]; case 6: localCrc = (localCrc >>> 8) ^ T[T8_0_START + ((localCrc ^ b[off++]) & 0xff)]; case 5: localCrc = (localCrc >>> 8) ^ T[T8_0_START + ((localCrc ^ b[off++]) & 0xff)]; case 4: localCrc = (localCrc >>> 8) ^ T[T8_0_START + ((localCrc ^ b[off++]) & 0xff)]; case 3: localCrc = (localCrc >>> 8) ^ T[T8_0_START + ((localCrc ^ b[off++]) & 0xff)]; case 2: localCrc = (localCrc >>> 8) ^ T[T8_0_START + ((localCrc ^ b[off++]) & 0xff)]; case 1: localCrc = (localCrc >>> 8) ^ T[T8_0_START + ((localCrc ^ b[off++]) & 0xff)]; default: } crc = localCrc; } Crc32(); static long crc32(byte[] bytes); static long crc32(byte[] bytes, int offset, int size); static long crc32(ByteBuffer buffer, int offset, int size); @Override long getValue(); @Override void reset(); @Override void update(byte[] b, int off, int len); @Override final void update(int b); }
|
@Test public void testUpdate() { final byte[] bytes = "Any String you want".getBytes(); final int len = bytes.length; Checksum crc1 = Crc32C.create(); Checksum crc2 = Crc32C.create(); Checksum crc3 = Crc32C.create(); crc1.update(bytes, 0, len); for (int i = 0; i < len; i++) crc2.update(bytes[i]); crc3.update(bytes, 0, len / 2); crc3.update(bytes, len / 2, len - len / 2); assertEquals("Crc values should be the same", crc1.getValue(), crc2.getValue()); assertEquals("Crc values should be the same", crc1.getValue(), crc3.getValue()); }
|
Crc32 implements Checksum { public static long crc32(byte[] bytes) { return crc32(bytes, 0, bytes.length); } Crc32(); static long crc32(byte[] bytes); static long crc32(byte[] bytes, int offset, int size); static long crc32(ByteBuffer buffer, int offset, int size); @Override long getValue(); @Override void reset(); @Override void update(byte[] b, int off, int len); @Override final void update(int b); }
|
@Test public void testValue() { final byte[] bytes = "Some String".getBytes(); assertEquals(2021503672, Crc32.crc32(bytes)); }
|
ByteUtils { public static int readUnsignedIntLE(InputStream in) throws IOException { return in.read() | (in.read() << 8) | (in.read() << 16) | (in.read() << 24); } private ByteUtils(); static long readUnsignedInt(ByteBuffer buffer); static long readUnsignedInt(ByteBuffer buffer, int index); static int readUnsignedIntLE(InputStream in); static int readUnsignedIntLE(byte[] buffer, int offset); static void writeUnsignedInt(ByteBuffer buffer, int index, long value); static void writeUnsignedInt(ByteBuffer buffer, long value); static void writeUnsignedIntLE(OutputStream out, int value); static void writeUnsignedIntLE(byte[] buffer, int offset, int value); static int readVarint(ByteBuffer buffer); static int readVarint(DataInput in); static long readVarlong(DataInput in); static long readVarlong(ByteBuffer buffer); static void writeVarint(int value, DataOutput out); static void writeVarint(int value, ByteBuffer buffer); static void writeVarlong(long value, DataOutput out); static void writeVarlong(long value, ByteBuffer buffer); static int sizeOfVarint(int value); static int sizeOfVarlong(long value); }
|
@Test public void testReadUnsignedIntLEFromArray() { byte[] array1 = {0x01, 0x02, 0x03, 0x04, 0x05}; assertEquals(0x04030201, ByteUtils.readUnsignedIntLE(array1, 0)); assertEquals(0x05040302, ByteUtils.readUnsignedIntLE(array1, 1)); byte[] array2 = {(byte) 0xf1, (byte) 0xf2, (byte) 0xf3, (byte) 0xf4, (byte) 0xf5, (byte) 0xf6}; assertEquals(0xf4f3f2f1, ByteUtils.readUnsignedIntLE(array2, 0)); assertEquals(0xf6f5f4f3, ByteUtils.readUnsignedIntLE(array2, 2)); }
@Test public void testReadUnsignedIntLEFromInputStream() throws IOException { byte[] array1 = {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09}; ByteArrayInputStream is1 = new ByteArrayInputStream(array1); assertEquals(0x04030201, ByteUtils.readUnsignedIntLE(is1)); assertEquals(0x08070605, ByteUtils.readUnsignedIntLE(is1)); byte[] array2 = {(byte) 0xf1, (byte) 0xf2, (byte) 0xf3, (byte) 0xf4, (byte) 0xf5, (byte) 0xf6, (byte) 0xf7, (byte) 0xf8}; ByteArrayInputStream is2 = new ByteArrayInputStream(array2); assertEquals(0xf4f3f2f1, ByteUtils.readUnsignedIntLE(is2)); assertEquals(0xf8f7f6f5, ByteUtils.readUnsignedIntLE(is2)); }
|
ByteUtils { public static long readUnsignedInt(ByteBuffer buffer) { return buffer.getInt() & 0xffffffffL; } private ByteUtils(); static long readUnsignedInt(ByteBuffer buffer); static long readUnsignedInt(ByteBuffer buffer, int index); static int readUnsignedIntLE(InputStream in); static int readUnsignedIntLE(byte[] buffer, int offset); static void writeUnsignedInt(ByteBuffer buffer, int index, long value); static void writeUnsignedInt(ByteBuffer buffer, long value); static void writeUnsignedIntLE(OutputStream out, int value); static void writeUnsignedIntLE(byte[] buffer, int offset, int value); static int readVarint(ByteBuffer buffer); static int readVarint(DataInput in); static long readVarlong(DataInput in); static long readVarlong(ByteBuffer buffer); static void writeVarint(int value, DataOutput out); static void writeVarint(int value, ByteBuffer buffer); static void writeVarlong(long value, DataOutput out); static void writeVarlong(long value, ByteBuffer buffer); static int sizeOfVarint(int value); static int sizeOfVarlong(long value); }
|
@Test public void testReadUnsignedInt() { ByteBuffer buffer = ByteBuffer.allocate(4); long writeValue = 133444; ByteUtils.writeUnsignedInt(buffer, writeValue); buffer.flip(); long readValue = ByteUtils.readUnsignedInt(buffer); assertEquals(writeValue, readValue); }
|
ByteUtils { public static void writeUnsignedIntLE(OutputStream out, int value) throws IOException { out.write(value); out.write(value >>> 8); out.write(value >>> 16); out.write(value >>> 24); } private ByteUtils(); static long readUnsignedInt(ByteBuffer buffer); static long readUnsignedInt(ByteBuffer buffer, int index); static int readUnsignedIntLE(InputStream in); static int readUnsignedIntLE(byte[] buffer, int offset); static void writeUnsignedInt(ByteBuffer buffer, int index, long value); static void writeUnsignedInt(ByteBuffer buffer, long value); static void writeUnsignedIntLE(OutputStream out, int value); static void writeUnsignedIntLE(byte[] buffer, int offset, int value); static int readVarint(ByteBuffer buffer); static int readVarint(DataInput in); static long readVarlong(DataInput in); static long readVarlong(ByteBuffer buffer); static void writeVarint(int value, DataOutput out); static void writeVarint(int value, ByteBuffer buffer); static void writeVarlong(long value, DataOutput out); static void writeVarlong(long value, ByteBuffer buffer); static int sizeOfVarint(int value); static int sizeOfVarlong(long value); }
|
@Test public void testWriteUnsignedIntLEToArray() { int value1 = 0x04030201; byte[] array1 = new byte[4]; ByteUtils.writeUnsignedIntLE(array1, 0, value1); assertArrayEquals(new byte[] {0x01, 0x02, 0x03, 0x04}, array1); array1 = new byte[8]; ByteUtils.writeUnsignedIntLE(array1, 2, value1); assertArrayEquals(new byte[] {0, 0, 0x01, 0x02, 0x03, 0x04, 0, 0}, array1); int value2 = 0xf4f3f2f1; byte[] array2 = new byte[4]; ByteUtils.writeUnsignedIntLE(array2, 0, value2); assertArrayEquals(new byte[] {(byte) 0xf1, (byte) 0xf2, (byte) 0xf3, (byte) 0xf4}, array2); array2 = new byte[8]; ByteUtils.writeUnsignedIntLE(array2, 2, value2); assertArrayEquals(new byte[] {0, 0, (byte) 0xf1, (byte) 0xf2, (byte) 0xf3, (byte) 0xf4, 0, 0}, array2); }
@Test public void testWriteUnsignedIntLEToOutputStream() throws IOException { int value1 = 0x04030201; ByteArrayOutputStream os1 = new ByteArrayOutputStream(); ByteUtils.writeUnsignedIntLE(os1, value1); ByteUtils.writeUnsignedIntLE(os1, value1); assertArrayEquals(new byte[] {0x01, 0x02, 0x03, 0x04, 0x01, 0x02, 0x03, 0x04}, os1.toByteArray()); int value2 = 0xf4f3f2f1; ByteArrayOutputStream os2 = new ByteArrayOutputStream(); ByteUtils.writeUnsignedIntLE(os2, value2); assertArrayEquals(new byte[] {(byte) 0xf1, (byte) 0xf2, (byte) 0xf3, (byte) 0xf4}, os2.toByteArray()); }
|
ByteUtils { public static int readVarint(ByteBuffer buffer) { int value = 0; int i = 0; int b; while (((b = buffer.get()) & 0x80) != 0) { value |= (b & 0x7f) << i; i += 7; if (i > 28) throw illegalVarintException(value); } value |= b << i; return (value >>> 1) ^ -(value & 1); } private ByteUtils(); static long readUnsignedInt(ByteBuffer buffer); static long readUnsignedInt(ByteBuffer buffer, int index); static int readUnsignedIntLE(InputStream in); static int readUnsignedIntLE(byte[] buffer, int offset); static void writeUnsignedInt(ByteBuffer buffer, int index, long value); static void writeUnsignedInt(ByteBuffer buffer, long value); static void writeUnsignedIntLE(OutputStream out, int value); static void writeUnsignedIntLE(byte[] buffer, int offset, int value); static int readVarint(ByteBuffer buffer); static int readVarint(DataInput in); static long readVarlong(DataInput in); static long readVarlong(ByteBuffer buffer); static void writeVarint(int value, DataOutput out); static void writeVarint(int value, ByteBuffer buffer); static void writeVarlong(long value, DataOutput out); static void writeVarlong(long value, ByteBuffer buffer); static int sizeOfVarint(int value); static int sizeOfVarlong(long value); }
|
@Test(expected = IllegalArgumentException.class) public void testInvalidVarint() { ByteBuffer buf = ByteBuffer.wrap(new byte[] {xFF, xFF, xFF, xFF, xFF, x01}); ByteUtils.readVarint(buf); }
|
ByteUtils { public static long readVarlong(DataInput in) throws IOException { long value = 0L; int i = 0; long b; while (((b = in.readByte()) & 0x80) != 0) { value |= (b & 0x7f) << i; i += 7; if (i > 63) throw illegalVarlongException(value); } value |= b << i; return (value >>> 1) ^ -(value & 1); } private ByteUtils(); static long readUnsignedInt(ByteBuffer buffer); static long readUnsignedInt(ByteBuffer buffer, int index); static int readUnsignedIntLE(InputStream in); static int readUnsignedIntLE(byte[] buffer, int offset); static void writeUnsignedInt(ByteBuffer buffer, int index, long value); static void writeUnsignedInt(ByteBuffer buffer, long value); static void writeUnsignedIntLE(OutputStream out, int value); static void writeUnsignedIntLE(byte[] buffer, int offset, int value); static int readVarint(ByteBuffer buffer); static int readVarint(DataInput in); static long readVarlong(DataInput in); static long readVarlong(ByteBuffer buffer); static void writeVarint(int value, DataOutput out); static void writeVarint(int value, ByteBuffer buffer); static void writeVarlong(long value, DataOutput out); static void writeVarlong(long value, ByteBuffer buffer); static int sizeOfVarint(int value); static int sizeOfVarlong(long value); }
|
@Test(expected = IllegalArgumentException.class) public void testInvalidVarlong() { ByteBuffer buf = ByteBuffer.wrap(new byte[] {xFF, xFF, xFF, xFF, xFF, xFF, xFF, xFF, xFF, xFF, x01}); ByteUtils.readVarlong(buf); }
|
Utils { public static String getHost(String address) { Matcher matcher = HOST_PORT_PATTERN.matcher(address); return matcher.matches() ? matcher.group(1) : null; } static List<T> sorted(Collection<T> collection); static String utf8(byte[] bytes); static String utf8(ByteBuffer buffer, int length); static String utf8(ByteBuffer buffer, int offset, int length); static byte[] utf8(String string); static int abs(int n); static long min(long first, long ... rest); static long max(long first, long ... rest); static short min(short first, short second); static int utf8Length(CharSequence s); static byte[] toArray(ByteBuffer buffer); static byte[] toArray(ByteBuffer buffer, int size); static byte[] toNullableArray(ByteBuffer buffer); static ByteBuffer wrapNullable(byte[] array); static byte[] toArray(ByteBuffer buffer, int offset, int size); static T notNull(T t); static void sleep(long ms); static T newInstance(Class<T> c); static T newInstance(String klass, Class<T> base); static int murmur2(final byte[] data); static String getHost(String address); static Integer getPort(String address); static String formatAddress(String host, Integer port); static String join(T[] strs, String separator); static String join(Collection<T> list, String separator); static String mkString(Map<K, V> map, String begin, String end,
String keyValueSeparator, String elementSeparator); static Properties loadProps(String filename); static Map<String, String> propsToStringMap(Properties props); static String stackTrace(Throwable e); static Thread newThread(String name, Runnable runnable, boolean daemon); static Thread daemonThread(String name, Runnable runnable); static void croak(String message); static byte[] readBytes(ByteBuffer buffer, int offset, int length); static byte[] readBytes(ByteBuffer buffer); static String readFileAsString(String path, Charset charset); static String readFileAsString(String path); static ByteBuffer ensureCapacity(ByteBuffer existingBuffer, int newLength); @SafeVarargs static Set<T> mkSet(T... elems); @SafeVarargs static List<T> mkList(T... elems); static void delete(final File file); static List<T> safe(List<T> other); static ClassLoader getKafkaClassLoader(); static ClassLoader getContextOrKafkaClassLoader(); static void atomicMoveWithFallback(Path source, Path target); static void closeAll(Closeable... closeables); static void closeQuietly(AutoCloseable closeable, String name); static int toPositive(int number); static int longHashcode(long value); static ByteBuffer sizeDelimited(ByteBuffer buffer, int start); static void readFullyOrFail(FileChannel channel, ByteBuffer destinationBuffer, long position,
String description); static void readFully(FileChannel channel, ByteBuffer destinationBuffer, long position); static final void readFully(InputStream inputStream, ByteBuffer destinationBuffer); static void writeFully(FileChannel channel, ByteBuffer sourceBuffer); static void writeTo(DataOutput out, ByteBuffer buffer, int length); static List<T> toList(Iterator<T> iterator); static final String NL; }
|
@Test public void testGetHost() { assertEquals("127.0.0.1", getHost("127.0.0.1:8000")); assertEquals("mydomain.com", getHost("PLAINTEXT: assertEquals("MyDomain.com", getHost("PLAINTEXT: assertEquals("My_Domain.com", getHost("PLAINTEXT: assertEquals("::1", getHost("[::1]:1234")); assertEquals("2001:db8:85a3:8d3:1319:8a2e:370:7348", getHost("PLAINTEXT: assertEquals("2001:DB8:85A3:8D3:1319:8A2E:370:7348", getHost("PLAINTEXT: assertEquals("fe80::b1da:69ca:57f7:63d8%3", getHost("PLAINTEXT: }
|
Utils { public static Integer getPort(String address) { Matcher matcher = HOST_PORT_PATTERN.matcher(address); return matcher.matches() ? Integer.parseInt(matcher.group(2)) : null; } static List<T> sorted(Collection<T> collection); static String utf8(byte[] bytes); static String utf8(ByteBuffer buffer, int length); static String utf8(ByteBuffer buffer, int offset, int length); static byte[] utf8(String string); static int abs(int n); static long min(long first, long ... rest); static long max(long first, long ... rest); static short min(short first, short second); static int utf8Length(CharSequence s); static byte[] toArray(ByteBuffer buffer); static byte[] toArray(ByteBuffer buffer, int size); static byte[] toNullableArray(ByteBuffer buffer); static ByteBuffer wrapNullable(byte[] array); static byte[] toArray(ByteBuffer buffer, int offset, int size); static T notNull(T t); static void sleep(long ms); static T newInstance(Class<T> c); static T newInstance(String klass, Class<T> base); static int murmur2(final byte[] data); static String getHost(String address); static Integer getPort(String address); static String formatAddress(String host, Integer port); static String join(T[] strs, String separator); static String join(Collection<T> list, String separator); static String mkString(Map<K, V> map, String begin, String end,
String keyValueSeparator, String elementSeparator); static Properties loadProps(String filename); static Map<String, String> propsToStringMap(Properties props); static String stackTrace(Throwable e); static Thread newThread(String name, Runnable runnable, boolean daemon); static Thread daemonThread(String name, Runnable runnable); static void croak(String message); static byte[] readBytes(ByteBuffer buffer, int offset, int length); static byte[] readBytes(ByteBuffer buffer); static String readFileAsString(String path, Charset charset); static String readFileAsString(String path); static ByteBuffer ensureCapacity(ByteBuffer existingBuffer, int newLength); @SafeVarargs static Set<T> mkSet(T... elems); @SafeVarargs static List<T> mkList(T... elems); static void delete(final File file); static List<T> safe(List<T> other); static ClassLoader getKafkaClassLoader(); static ClassLoader getContextOrKafkaClassLoader(); static void atomicMoveWithFallback(Path source, Path target); static void closeAll(Closeable... closeables); static void closeQuietly(AutoCloseable closeable, String name); static int toPositive(int number); static int longHashcode(long value); static ByteBuffer sizeDelimited(ByteBuffer buffer, int start); static void readFullyOrFail(FileChannel channel, ByteBuffer destinationBuffer, long position,
String description); static void readFully(FileChannel channel, ByteBuffer destinationBuffer, long position); static final void readFully(InputStream inputStream, ByteBuffer destinationBuffer); static void writeFully(FileChannel channel, ByteBuffer sourceBuffer); static void writeTo(DataOutput out, ByteBuffer buffer, int length); static List<T> toList(Iterator<T> iterator); static final String NL; }
|
@Test public void testGetPort() { assertEquals(8000, getPort("127.0.0.1:8000").intValue()); assertEquals(8080, getPort("mydomain.com:8080").intValue()); assertEquals(8080, getPort("MyDomain.com:8080").intValue()); assertEquals(1234, getPort("[::1]:1234").intValue()); assertEquals(5678, getPort("[2001:db8:85a3:8d3:1319:8a2e:370:7348]:5678").intValue()); assertEquals(5678, getPort("[2001:DB8:85A3:8D3:1319:8A2E:370:7348]:5678").intValue()); assertEquals(5678, getPort("[fe80::b1da:69ca:57f7:63d8%3]:5678").intValue()); }
|
Utils { public static String formatAddress(String host, Integer port) { return host.contains(":") ? "[" + host + "]:" + port : host + ":" + port; } static List<T> sorted(Collection<T> collection); static String utf8(byte[] bytes); static String utf8(ByteBuffer buffer, int length); static String utf8(ByteBuffer buffer, int offset, int length); static byte[] utf8(String string); static int abs(int n); static long min(long first, long ... rest); static long max(long first, long ... rest); static short min(short first, short second); static int utf8Length(CharSequence s); static byte[] toArray(ByteBuffer buffer); static byte[] toArray(ByteBuffer buffer, int size); static byte[] toNullableArray(ByteBuffer buffer); static ByteBuffer wrapNullable(byte[] array); static byte[] toArray(ByteBuffer buffer, int offset, int size); static T notNull(T t); static void sleep(long ms); static T newInstance(Class<T> c); static T newInstance(String klass, Class<T> base); static int murmur2(final byte[] data); static String getHost(String address); static Integer getPort(String address); static String formatAddress(String host, Integer port); static String join(T[] strs, String separator); static String join(Collection<T> list, String separator); static String mkString(Map<K, V> map, String begin, String end,
String keyValueSeparator, String elementSeparator); static Properties loadProps(String filename); static Map<String, String> propsToStringMap(Properties props); static String stackTrace(Throwable e); static Thread newThread(String name, Runnable runnable, boolean daemon); static Thread daemonThread(String name, Runnable runnable); static void croak(String message); static byte[] readBytes(ByteBuffer buffer, int offset, int length); static byte[] readBytes(ByteBuffer buffer); static String readFileAsString(String path, Charset charset); static String readFileAsString(String path); static ByteBuffer ensureCapacity(ByteBuffer existingBuffer, int newLength); @SafeVarargs static Set<T> mkSet(T... elems); @SafeVarargs static List<T> mkList(T... elems); static void delete(final File file); static List<T> safe(List<T> other); static ClassLoader getKafkaClassLoader(); static ClassLoader getContextOrKafkaClassLoader(); static void atomicMoveWithFallback(Path source, Path target); static void closeAll(Closeable... closeables); static void closeQuietly(AutoCloseable closeable, String name); static int toPositive(int number); static int longHashcode(long value); static ByteBuffer sizeDelimited(ByteBuffer buffer, int start); static void readFullyOrFail(FileChannel channel, ByteBuffer destinationBuffer, long position,
String description); static void readFully(FileChannel channel, ByteBuffer destinationBuffer, long position); static final void readFully(InputStream inputStream, ByteBuffer destinationBuffer); static void writeFully(FileChannel channel, ByteBuffer sourceBuffer); static void writeTo(DataOutput out, ByteBuffer buffer, int length); static List<T> toList(Iterator<T> iterator); static final String NL; }
|
@Test public void testFormatAddress() { assertEquals("127.0.0.1:8000", formatAddress("127.0.0.1", 8000)); assertEquals("mydomain.com:8080", formatAddress("mydomain.com", 8080)); assertEquals("[::1]:1234", formatAddress("::1", 1234)); assertEquals("[2001:db8:85a3:8d3:1319:8a2e:370:7348]:5678", formatAddress("2001:db8:85a3:8d3:1319:8a2e:370:7348", 5678)); }
|
Utils { public static <T> String join(T[] strs, String separator) { return join(Arrays.asList(strs), separator); } static List<T> sorted(Collection<T> collection); static String utf8(byte[] bytes); static String utf8(ByteBuffer buffer, int length); static String utf8(ByteBuffer buffer, int offset, int length); static byte[] utf8(String string); static int abs(int n); static long min(long first, long ... rest); static long max(long first, long ... rest); static short min(short first, short second); static int utf8Length(CharSequence s); static byte[] toArray(ByteBuffer buffer); static byte[] toArray(ByteBuffer buffer, int size); static byte[] toNullableArray(ByteBuffer buffer); static ByteBuffer wrapNullable(byte[] array); static byte[] toArray(ByteBuffer buffer, int offset, int size); static T notNull(T t); static void sleep(long ms); static T newInstance(Class<T> c); static T newInstance(String klass, Class<T> base); static int murmur2(final byte[] data); static String getHost(String address); static Integer getPort(String address); static String formatAddress(String host, Integer port); static String join(T[] strs, String separator); static String join(Collection<T> list, String separator); static String mkString(Map<K, V> map, String begin, String end,
String keyValueSeparator, String elementSeparator); static Properties loadProps(String filename); static Map<String, String> propsToStringMap(Properties props); static String stackTrace(Throwable e); static Thread newThread(String name, Runnable runnable, boolean daemon); static Thread daemonThread(String name, Runnable runnable); static void croak(String message); static byte[] readBytes(ByteBuffer buffer, int offset, int length); static byte[] readBytes(ByteBuffer buffer); static String readFileAsString(String path, Charset charset); static String readFileAsString(String path); static ByteBuffer ensureCapacity(ByteBuffer existingBuffer, int newLength); @SafeVarargs static Set<T> mkSet(T... elems); @SafeVarargs static List<T> mkList(T... elems); static void delete(final File file); static List<T> safe(List<T> other); static ClassLoader getKafkaClassLoader(); static ClassLoader getContextOrKafkaClassLoader(); static void atomicMoveWithFallback(Path source, Path target); static void closeAll(Closeable... closeables); static void closeQuietly(AutoCloseable closeable, String name); static int toPositive(int number); static int longHashcode(long value); static ByteBuffer sizeDelimited(ByteBuffer buffer, int start); static void readFullyOrFail(FileChannel channel, ByteBuffer destinationBuffer, long position,
String description); static void readFully(FileChannel channel, ByteBuffer destinationBuffer, long position); static final void readFully(InputStream inputStream, ByteBuffer destinationBuffer); static void writeFully(FileChannel channel, ByteBuffer sourceBuffer); static void writeTo(DataOutput out, ByteBuffer buffer, int length); static List<T> toList(Iterator<T> iterator); static final String NL; }
|
@Test public void testJoin() { assertEquals("", Utils.join(Collections.emptyList(), ",")); assertEquals("1", Utils.join(Arrays.asList("1"), ",")); assertEquals("1,2,3", Utils.join(Arrays.asList(1, 2, 3), ",")); }
|
Utils { public static int abs(int n) { return (n == Integer.MIN_VALUE) ? 0 : Math.abs(n); } static List<T> sorted(Collection<T> collection); static String utf8(byte[] bytes); static String utf8(ByteBuffer buffer, int length); static String utf8(ByteBuffer buffer, int offset, int length); static byte[] utf8(String string); static int abs(int n); static long min(long first, long ... rest); static long max(long first, long ... rest); static short min(short first, short second); static int utf8Length(CharSequence s); static byte[] toArray(ByteBuffer buffer); static byte[] toArray(ByteBuffer buffer, int size); static byte[] toNullableArray(ByteBuffer buffer); static ByteBuffer wrapNullable(byte[] array); static byte[] toArray(ByteBuffer buffer, int offset, int size); static T notNull(T t); static void sleep(long ms); static T newInstance(Class<T> c); static T newInstance(String klass, Class<T> base); static int murmur2(final byte[] data); static String getHost(String address); static Integer getPort(String address); static String formatAddress(String host, Integer port); static String join(T[] strs, String separator); static String join(Collection<T> list, String separator); static String mkString(Map<K, V> map, String begin, String end,
String keyValueSeparator, String elementSeparator); static Properties loadProps(String filename); static Map<String, String> propsToStringMap(Properties props); static String stackTrace(Throwable e); static Thread newThread(String name, Runnable runnable, boolean daemon); static Thread daemonThread(String name, Runnable runnable); static void croak(String message); static byte[] readBytes(ByteBuffer buffer, int offset, int length); static byte[] readBytes(ByteBuffer buffer); static String readFileAsString(String path, Charset charset); static String readFileAsString(String path); static ByteBuffer ensureCapacity(ByteBuffer existingBuffer, int newLength); @SafeVarargs static Set<T> mkSet(T... elems); @SafeVarargs static List<T> mkList(T... elems); static void delete(final File file); static List<T> safe(List<T> other); static ClassLoader getKafkaClassLoader(); static ClassLoader getContextOrKafkaClassLoader(); static void atomicMoveWithFallback(Path source, Path target); static void closeAll(Closeable... closeables); static void closeQuietly(AutoCloseable closeable, String name); static int toPositive(int number); static int longHashcode(long value); static ByteBuffer sizeDelimited(ByteBuffer buffer, int start); static void readFullyOrFail(FileChannel channel, ByteBuffer destinationBuffer, long position,
String description); static void readFully(FileChannel channel, ByteBuffer destinationBuffer, long position); static final void readFully(InputStream inputStream, ByteBuffer destinationBuffer); static void writeFully(FileChannel channel, ByteBuffer sourceBuffer); static void writeTo(DataOutput out, ByteBuffer buffer, int length); static List<T> toList(Iterator<T> iterator); static final String NL; }
|
@Test public void testAbs() { assertEquals(0, Utils.abs(Integer.MIN_VALUE)); assertEquals(10, Utils.abs(-10)); assertEquals(10, Utils.abs(10)); assertEquals(0, Utils.abs(0)); assertEquals(1, Utils.abs(-1)); }
|
Utils { public static byte[] toArray(ByteBuffer buffer) { return toArray(buffer, 0, buffer.remaining()); } static List<T> sorted(Collection<T> collection); static String utf8(byte[] bytes); static String utf8(ByteBuffer buffer, int length); static String utf8(ByteBuffer buffer, int offset, int length); static byte[] utf8(String string); static int abs(int n); static long min(long first, long ... rest); static long max(long first, long ... rest); static short min(short first, short second); static int utf8Length(CharSequence s); static byte[] toArray(ByteBuffer buffer); static byte[] toArray(ByteBuffer buffer, int size); static byte[] toNullableArray(ByteBuffer buffer); static ByteBuffer wrapNullable(byte[] array); static byte[] toArray(ByteBuffer buffer, int offset, int size); static T notNull(T t); static void sleep(long ms); static T newInstance(Class<T> c); static T newInstance(String klass, Class<T> base); static int murmur2(final byte[] data); static String getHost(String address); static Integer getPort(String address); static String formatAddress(String host, Integer port); static String join(T[] strs, String separator); static String join(Collection<T> list, String separator); static String mkString(Map<K, V> map, String begin, String end,
String keyValueSeparator, String elementSeparator); static Properties loadProps(String filename); static Map<String, String> propsToStringMap(Properties props); static String stackTrace(Throwable e); static Thread newThread(String name, Runnable runnable, boolean daemon); static Thread daemonThread(String name, Runnable runnable); static void croak(String message); static byte[] readBytes(ByteBuffer buffer, int offset, int length); static byte[] readBytes(ByteBuffer buffer); static String readFileAsString(String path, Charset charset); static String readFileAsString(String path); static ByteBuffer ensureCapacity(ByteBuffer existingBuffer, int newLength); @SafeVarargs static Set<T> mkSet(T... elems); @SafeVarargs static List<T> mkList(T... elems); static void delete(final File file); static List<T> safe(List<T> other); static ClassLoader getKafkaClassLoader(); static ClassLoader getContextOrKafkaClassLoader(); static void atomicMoveWithFallback(Path source, Path target); static void closeAll(Closeable... closeables); static void closeQuietly(AutoCloseable closeable, String name); static int toPositive(int number); static int longHashcode(long value); static ByteBuffer sizeDelimited(ByteBuffer buffer, int start); static void readFullyOrFail(FileChannel channel, ByteBuffer destinationBuffer, long position,
String description); static void readFully(FileChannel channel, ByteBuffer destinationBuffer, long position); static final void readFully(InputStream inputStream, ByteBuffer destinationBuffer); static void writeFully(FileChannel channel, ByteBuffer sourceBuffer); static void writeTo(DataOutput out, ByteBuffer buffer, int length); static List<T> toList(Iterator<T> iterator); static final String NL; }
|
@Test public void toArray() { byte[] input = {0, 1, 2, 3, 4}; ByteBuffer buffer = ByteBuffer.wrap(input); assertArrayEquals(input, Utils.toArray(buffer)); assertEquals(0, buffer.position()); assertArrayEquals(new byte[] {1, 2}, Utils.toArray(buffer, 1, 2)); assertEquals(0, buffer.position()); buffer.position(2); assertArrayEquals(new byte[] {2, 3, 4}, Utils.toArray(buffer)); assertEquals(2, buffer.position()); }
@Test public void toArrayDirectByteBuffer() { byte[] input = {0, 1, 2, 3, 4}; ByteBuffer buffer = ByteBuffer.allocateDirect(5); buffer.put(input); buffer.rewind(); assertArrayEquals(input, Utils.toArray(buffer)); assertEquals(0, buffer.position()); assertArrayEquals(new byte[] {1, 2}, Utils.toArray(buffer, 1, 2)); assertEquals(0, buffer.position()); buffer.position(2); assertArrayEquals(new byte[] {2, 3, 4}, Utils.toArray(buffer)); assertEquals(2, buffer.position()); }
|
Utils { public static byte[] readBytes(ByteBuffer buffer, int offset, int length) { byte[] dest = new byte[length]; if (buffer.hasArray()) { System.arraycopy(buffer.array(), buffer.arrayOffset() + offset, dest, 0, length); } else { buffer.mark(); buffer.position(offset); buffer.get(dest, 0, length); buffer.reset(); } return dest; } static List<T> sorted(Collection<T> collection); static String utf8(byte[] bytes); static String utf8(ByteBuffer buffer, int length); static String utf8(ByteBuffer buffer, int offset, int length); static byte[] utf8(String string); static int abs(int n); static long min(long first, long ... rest); static long max(long first, long ... rest); static short min(short first, short second); static int utf8Length(CharSequence s); static byte[] toArray(ByteBuffer buffer); static byte[] toArray(ByteBuffer buffer, int size); static byte[] toNullableArray(ByteBuffer buffer); static ByteBuffer wrapNullable(byte[] array); static byte[] toArray(ByteBuffer buffer, int offset, int size); static T notNull(T t); static void sleep(long ms); static T newInstance(Class<T> c); static T newInstance(String klass, Class<T> base); static int murmur2(final byte[] data); static String getHost(String address); static Integer getPort(String address); static String formatAddress(String host, Integer port); static String join(T[] strs, String separator); static String join(Collection<T> list, String separator); static String mkString(Map<K, V> map, String begin, String end,
String keyValueSeparator, String elementSeparator); static Properties loadProps(String filename); static Map<String, String> propsToStringMap(Properties props); static String stackTrace(Throwable e); static Thread newThread(String name, Runnable runnable, boolean daemon); static Thread daemonThread(String name, Runnable runnable); static void croak(String message); static byte[] readBytes(ByteBuffer buffer, int offset, int length); static byte[] readBytes(ByteBuffer buffer); static String readFileAsString(String path, Charset charset); static String readFileAsString(String path); static ByteBuffer ensureCapacity(ByteBuffer existingBuffer, int newLength); @SafeVarargs static Set<T> mkSet(T... elems); @SafeVarargs static List<T> mkList(T... elems); static void delete(final File file); static List<T> safe(List<T> other); static ClassLoader getKafkaClassLoader(); static ClassLoader getContextOrKafkaClassLoader(); static void atomicMoveWithFallback(Path source, Path target); static void closeAll(Closeable... closeables); static void closeQuietly(AutoCloseable closeable, String name); static int toPositive(int number); static int longHashcode(long value); static ByteBuffer sizeDelimited(ByteBuffer buffer, int start); static void readFullyOrFail(FileChannel channel, ByteBuffer destinationBuffer, long position,
String description); static void readFully(FileChannel channel, ByteBuffer destinationBuffer, long position); static final void readFully(InputStream inputStream, ByteBuffer destinationBuffer); static void writeFully(FileChannel channel, ByteBuffer sourceBuffer); static void writeTo(DataOutput out, ByteBuffer buffer, int length); static List<T> toList(Iterator<T> iterator); static final String NL; }
|
@Test public void testReadBytes() { byte[] myvar = "Any String you want".getBytes(); ByteBuffer buffer = ByteBuffer.allocate(myvar.length); buffer.put(myvar); buffer.rewind(); this.subTest(buffer); buffer = ByteBuffer.wrap(myvar).asReadOnlyBuffer(); this.subTest(buffer); }
|
Utils { public static long min(long first, long ... rest) { long min = first; for (long r : rest) { if (r < min) min = r; } return min; } static List<T> sorted(Collection<T> collection); static String utf8(byte[] bytes); static String utf8(ByteBuffer buffer, int length); static String utf8(ByteBuffer buffer, int offset, int length); static byte[] utf8(String string); static int abs(int n); static long min(long first, long ... rest); static long max(long first, long ... rest); static short min(short first, short second); static int utf8Length(CharSequence s); static byte[] toArray(ByteBuffer buffer); static byte[] toArray(ByteBuffer buffer, int size); static byte[] toNullableArray(ByteBuffer buffer); static ByteBuffer wrapNullable(byte[] array); static byte[] toArray(ByteBuffer buffer, int offset, int size); static T notNull(T t); static void sleep(long ms); static T newInstance(Class<T> c); static T newInstance(String klass, Class<T> base); static int murmur2(final byte[] data); static String getHost(String address); static Integer getPort(String address); static String formatAddress(String host, Integer port); static String join(T[] strs, String separator); static String join(Collection<T> list, String separator); static String mkString(Map<K, V> map, String begin, String end,
String keyValueSeparator, String elementSeparator); static Properties loadProps(String filename); static Map<String, String> propsToStringMap(Properties props); static String stackTrace(Throwable e); static Thread newThread(String name, Runnable runnable, boolean daemon); static Thread daemonThread(String name, Runnable runnable); static void croak(String message); static byte[] readBytes(ByteBuffer buffer, int offset, int length); static byte[] readBytes(ByteBuffer buffer); static String readFileAsString(String path, Charset charset); static String readFileAsString(String path); static ByteBuffer ensureCapacity(ByteBuffer existingBuffer, int newLength); @SafeVarargs static Set<T> mkSet(T... elems); @SafeVarargs static List<T> mkList(T... elems); static void delete(final File file); static List<T> safe(List<T> other); static ClassLoader getKafkaClassLoader(); static ClassLoader getContextOrKafkaClassLoader(); static void atomicMoveWithFallback(Path source, Path target); static void closeAll(Closeable... closeables); static void closeQuietly(AutoCloseable closeable, String name); static int toPositive(int number); static int longHashcode(long value); static ByteBuffer sizeDelimited(ByteBuffer buffer, int start); static void readFullyOrFail(FileChannel channel, ByteBuffer destinationBuffer, long position,
String description); static void readFully(FileChannel channel, ByteBuffer destinationBuffer, long position); static final void readFully(InputStream inputStream, ByteBuffer destinationBuffer); static void writeFully(FileChannel channel, ByteBuffer sourceBuffer); static void writeTo(DataOutput out, ByteBuffer buffer, int length); static List<T> toList(Iterator<T> iterator); static final String NL; }
|
@Test public void testMin() { assertEquals(1, Utils.min(1)); assertEquals(1, Utils.min(1, 2, 3)); assertEquals(1, Utils.min(2, 1, 3)); assertEquals(1, Utils.min(2, 3, 1)); }
|
Utils { public static void closeAll(Closeable... closeables) throws IOException { IOException exception = null; for (Closeable closeable : closeables) { try { closeable.close(); } catch (IOException e) { if (exception != null) exception.addSuppressed(e); else exception = e; } } if (exception != null) throw exception; } static List<T> sorted(Collection<T> collection); static String utf8(byte[] bytes); static String utf8(ByteBuffer buffer, int length); static String utf8(ByteBuffer buffer, int offset, int length); static byte[] utf8(String string); static int abs(int n); static long min(long first, long ... rest); static long max(long first, long ... rest); static short min(short first, short second); static int utf8Length(CharSequence s); static byte[] toArray(ByteBuffer buffer); static byte[] toArray(ByteBuffer buffer, int size); static byte[] toNullableArray(ByteBuffer buffer); static ByteBuffer wrapNullable(byte[] array); static byte[] toArray(ByteBuffer buffer, int offset, int size); static T notNull(T t); static void sleep(long ms); static T newInstance(Class<T> c); static T newInstance(String klass, Class<T> base); static int murmur2(final byte[] data); static String getHost(String address); static Integer getPort(String address); static String formatAddress(String host, Integer port); static String join(T[] strs, String separator); static String join(Collection<T> list, String separator); static String mkString(Map<K, V> map, String begin, String end,
String keyValueSeparator, String elementSeparator); static Properties loadProps(String filename); static Map<String, String> propsToStringMap(Properties props); static String stackTrace(Throwable e); static Thread newThread(String name, Runnable runnable, boolean daemon); static Thread daemonThread(String name, Runnable runnable); static void croak(String message); static byte[] readBytes(ByteBuffer buffer, int offset, int length); static byte[] readBytes(ByteBuffer buffer); static String readFileAsString(String path, Charset charset); static String readFileAsString(String path); static ByteBuffer ensureCapacity(ByteBuffer existingBuffer, int newLength); @SafeVarargs static Set<T> mkSet(T... elems); @SafeVarargs static List<T> mkList(T... elems); static void delete(final File file); static List<T> safe(List<T> other); static ClassLoader getKafkaClassLoader(); static ClassLoader getContextOrKafkaClassLoader(); static void atomicMoveWithFallback(Path source, Path target); static void closeAll(Closeable... closeables); static void closeQuietly(AutoCloseable closeable, String name); static int toPositive(int number); static int longHashcode(long value); static ByteBuffer sizeDelimited(ByteBuffer buffer, int start); static void readFullyOrFail(FileChannel channel, ByteBuffer destinationBuffer, long position,
String description); static void readFully(FileChannel channel, ByteBuffer destinationBuffer, long position); static final void readFully(InputStream inputStream, ByteBuffer destinationBuffer); static void writeFully(FileChannel channel, ByteBuffer sourceBuffer); static void writeTo(DataOutput out, ByteBuffer buffer, int length); static List<T> toList(Iterator<T> iterator); static final String NL; }
|
@Test public void testCloseAll() { TestCloseable[] closeablesWithoutException = TestCloseable.createCloseables(false, false, false); try { Utils.closeAll(closeablesWithoutException); TestCloseable.checkClosed(closeablesWithoutException); } catch (IOException e) { fail("Unexpected exception: " + e); } TestCloseable[] closeablesWithException = TestCloseable.createCloseables(true, true, true); try { Utils.closeAll(closeablesWithException); fail("Expected exception not thrown"); } catch (IOException e) { TestCloseable.checkClosed(closeablesWithException); TestCloseable.checkException(e, closeablesWithException); } TestCloseable[] singleExceptionCloseables = TestCloseable.createCloseables(false, true, false); try { Utils.closeAll(singleExceptionCloseables); fail("Expected exception not thrown"); } catch (IOException e) { TestCloseable.checkClosed(singleExceptionCloseables); TestCloseable.checkException(e, singleExceptionCloseables[1]); } TestCloseable[] mixedCloseables = TestCloseable.createCloseables(false, true, false, true, true); try { Utils.closeAll(mixedCloseables); fail("Expected exception not thrown"); } catch (IOException e) { TestCloseable.checkClosed(mixedCloseables); TestCloseable.checkException(e, mixedCloseables[1], mixedCloseables[3], mixedCloseables[4]); } }
|
Utils { public static void readFullyOrFail(FileChannel channel, ByteBuffer destinationBuffer, long position, String description) throws IOException { if (position < 0) { throw new IllegalArgumentException("The file channel position cannot be negative, but it is " + position); } int expectedReadBytes = destinationBuffer.remaining(); readFully(channel, destinationBuffer, position); if (destinationBuffer.hasRemaining()) { throw new EOFException(String.format("Failed to read `%s` from file channel `%s`. Expected to read %d bytes, " + "but reached end of file after reading %d bytes. Started read from position %d.", description, channel, expectedReadBytes, expectedReadBytes - destinationBuffer.remaining(), position)); } } static List<T> sorted(Collection<T> collection); static String utf8(byte[] bytes); static String utf8(ByteBuffer buffer, int length); static String utf8(ByteBuffer buffer, int offset, int length); static byte[] utf8(String string); static int abs(int n); static long min(long first, long ... rest); static long max(long first, long ... rest); static short min(short first, short second); static int utf8Length(CharSequence s); static byte[] toArray(ByteBuffer buffer); static byte[] toArray(ByteBuffer buffer, int size); static byte[] toNullableArray(ByteBuffer buffer); static ByteBuffer wrapNullable(byte[] array); static byte[] toArray(ByteBuffer buffer, int offset, int size); static T notNull(T t); static void sleep(long ms); static T newInstance(Class<T> c); static T newInstance(String klass, Class<T> base); static int murmur2(final byte[] data); static String getHost(String address); static Integer getPort(String address); static String formatAddress(String host, Integer port); static String join(T[] strs, String separator); static String join(Collection<T> list, String separator); static String mkString(Map<K, V> map, String begin, String end,
String keyValueSeparator, String elementSeparator); static Properties loadProps(String filename); static Map<String, String> propsToStringMap(Properties props); static String stackTrace(Throwable e); static Thread newThread(String name, Runnable runnable, boolean daemon); static Thread daemonThread(String name, Runnable runnable); static void croak(String message); static byte[] readBytes(ByteBuffer buffer, int offset, int length); static byte[] readBytes(ByteBuffer buffer); static String readFileAsString(String path, Charset charset); static String readFileAsString(String path); static ByteBuffer ensureCapacity(ByteBuffer existingBuffer, int newLength); @SafeVarargs static Set<T> mkSet(T... elems); @SafeVarargs static List<T> mkList(T... elems); static void delete(final File file); static List<T> safe(List<T> other); static ClassLoader getKafkaClassLoader(); static ClassLoader getContextOrKafkaClassLoader(); static void atomicMoveWithFallback(Path source, Path target); static void closeAll(Closeable... closeables); static void closeQuietly(AutoCloseable closeable, String name); static int toPositive(int number); static int longHashcode(long value); static ByteBuffer sizeDelimited(ByteBuffer buffer, int start); static void readFullyOrFail(FileChannel channel, ByteBuffer destinationBuffer, long position,
String description); static void readFully(FileChannel channel, ByteBuffer destinationBuffer, long position); static final void readFully(InputStream inputStream, ByteBuffer destinationBuffer); static void writeFully(FileChannel channel, ByteBuffer sourceBuffer); static void writeTo(DataOutput out, ByteBuffer buffer, int length); static List<T> toList(Iterator<T> iterator); static final String NL; }
|
@Test public void testReadFullyOrFailWithRealFile() throws IOException { try (FileChannel channel = FileChannel.open(TestUtils.tempFile().toPath(), StandardOpenOption.READ, StandardOpenOption.WRITE)) { String msg = "hello, world"; channel.write(ByteBuffer.wrap(msg.getBytes()), 0); channel.force(true); assertEquals("Message should be written to the file channel", channel.size(), msg.length()); ByteBuffer perfectBuffer = ByteBuffer.allocate(msg.length()); ByteBuffer smallBuffer = ByteBuffer.allocate(5); ByteBuffer largeBuffer = ByteBuffer.allocate(msg.length() + 1); Utils.readFullyOrFail(channel, perfectBuffer, 0, "perfect"); assertFalse("Buffer should be filled up", perfectBuffer.hasRemaining()); assertEquals("Buffer should be populated correctly", msg, new String(perfectBuffer.array())); Utils.readFullyOrFail(channel, smallBuffer, 0, "small"); assertFalse("Buffer should be filled", smallBuffer.hasRemaining()); assertEquals("Buffer should be populated correctly", "hello", new String(smallBuffer.array())); smallBuffer.clear(); Utils.readFullyOrFail(channel, smallBuffer, 7, "small"); assertFalse("Buffer should be filled", smallBuffer.hasRemaining()); assertEquals("Buffer should be populated correctly", "world", new String(smallBuffer.array())); try { Utils.readFullyOrFail(channel, largeBuffer, 0, "large"); fail("Expected EOFException to be raised"); } catch (EOFException e) { } } }
@Test public void testReadFullyOrFailWithPartialFileChannelReads() throws IOException { FileChannel channelMock = EasyMock.createMock(FileChannel.class); final int bufferSize = 100; ByteBuffer buffer = ByteBuffer.allocate(bufferSize); StringBuilder expectedBufferContent = new StringBuilder(); fileChannelMockExpectReadWithRandomBytes(channelMock, expectedBufferContent, bufferSize); EasyMock.replay(channelMock); Utils.readFullyOrFail(channelMock, buffer, 0L, "test"); assertEquals("The buffer should be populated correctly", expectedBufferContent.toString(), new String(buffer.array())); assertFalse("The buffer should be filled", buffer.hasRemaining()); EasyMock.verify(channelMock); }
|
Utils { public static void readFully(FileChannel channel, ByteBuffer destinationBuffer, long position) throws IOException { if (position < 0) { throw new IllegalArgumentException("The file channel position cannot be negative, but it is " + position); } long currentPosition = position; int bytesRead; do { bytesRead = channel.read(destinationBuffer, currentPosition); currentPosition += bytesRead; } while (bytesRead != -1 && destinationBuffer.hasRemaining()); } static List<T> sorted(Collection<T> collection); static String utf8(byte[] bytes); static String utf8(ByteBuffer buffer, int length); static String utf8(ByteBuffer buffer, int offset, int length); static byte[] utf8(String string); static int abs(int n); static long min(long first, long ... rest); static long max(long first, long ... rest); static short min(short first, short second); static int utf8Length(CharSequence s); static byte[] toArray(ByteBuffer buffer); static byte[] toArray(ByteBuffer buffer, int size); static byte[] toNullableArray(ByteBuffer buffer); static ByteBuffer wrapNullable(byte[] array); static byte[] toArray(ByteBuffer buffer, int offset, int size); static T notNull(T t); static void sleep(long ms); static T newInstance(Class<T> c); static T newInstance(String klass, Class<T> base); static int murmur2(final byte[] data); static String getHost(String address); static Integer getPort(String address); static String formatAddress(String host, Integer port); static String join(T[] strs, String separator); static String join(Collection<T> list, String separator); static String mkString(Map<K, V> map, String begin, String end,
String keyValueSeparator, String elementSeparator); static Properties loadProps(String filename); static Map<String, String> propsToStringMap(Properties props); static String stackTrace(Throwable e); static Thread newThread(String name, Runnable runnable, boolean daemon); static Thread daemonThread(String name, Runnable runnable); static void croak(String message); static byte[] readBytes(ByteBuffer buffer, int offset, int length); static byte[] readBytes(ByteBuffer buffer); static String readFileAsString(String path, Charset charset); static String readFileAsString(String path); static ByteBuffer ensureCapacity(ByteBuffer existingBuffer, int newLength); @SafeVarargs static Set<T> mkSet(T... elems); @SafeVarargs static List<T> mkList(T... elems); static void delete(final File file); static List<T> safe(List<T> other); static ClassLoader getKafkaClassLoader(); static ClassLoader getContextOrKafkaClassLoader(); static void atomicMoveWithFallback(Path source, Path target); static void closeAll(Closeable... closeables); static void closeQuietly(AutoCloseable closeable, String name); static int toPositive(int number); static int longHashcode(long value); static ByteBuffer sizeDelimited(ByteBuffer buffer, int start); static void readFullyOrFail(FileChannel channel, ByteBuffer destinationBuffer, long position,
String description); static void readFully(FileChannel channel, ByteBuffer destinationBuffer, long position); static final void readFully(InputStream inputStream, ByteBuffer destinationBuffer); static void writeFully(FileChannel channel, ByteBuffer sourceBuffer); static void writeTo(DataOutput out, ByteBuffer buffer, int length); static List<T> toList(Iterator<T> iterator); static final String NL; }
|
@Test public void testReadFullyWithPartialFileChannelReads() throws IOException { FileChannel channelMock = EasyMock.createMock(FileChannel.class); final int bufferSize = 100; StringBuilder expectedBufferContent = new StringBuilder(); fileChannelMockExpectReadWithRandomBytes(channelMock, expectedBufferContent, bufferSize); EasyMock.replay(channelMock); ByteBuffer buffer = ByteBuffer.allocate(bufferSize); Utils.readFully(channelMock, buffer, 0L); assertEquals("The buffer should be populated correctly.", expectedBufferContent.toString(), new String(buffer.array())); assertFalse("The buffer should be filled", buffer.hasRemaining()); EasyMock.verify(channelMock); }
@Test public void testReadFullyIfEofIsReached() throws IOException { final FileChannel channelMock = EasyMock.createMock(FileChannel.class); final int bufferSize = 100; final String fileChannelContent = "abcdefghkl"; ByteBuffer buffer = ByteBuffer.allocate(bufferSize); EasyMock.expect(channelMock.size()).andReturn((long) fileChannelContent.length()); EasyMock.expect(channelMock.read(EasyMock.anyObject(ByteBuffer.class), EasyMock.anyInt())).andAnswer(new IAnswer<Integer>() { @Override public Integer answer() throws Throwable { ByteBuffer buffer = (ByteBuffer) EasyMock.getCurrentArguments()[0]; buffer.put(fileChannelContent.getBytes()); return -1; } }); EasyMock.replay(channelMock); Utils.readFully(channelMock, buffer, 0L); assertEquals("abcdefghkl", new String(buffer.array(), 0, buffer.position())); assertEquals(buffer.position(), channelMock.size()); assertTrue(buffer.hasRemaining()); EasyMock.verify(channelMock); }
|
Utils { public static void delete(final File file) throws IOException { if (file == null) return; Files.walkFileTree(file.toPath(), new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFileFailed(Path path, IOException exc) throws IOException { if (exc instanceof NoSuchFileException && path.toFile().equals(file)) return FileVisitResult.TERMINATE; throw exc; } @Override public FileVisitResult visitFile(Path path, BasicFileAttributes attrs) throws IOException { Files.delete(path); return FileVisitResult.CONTINUE; } @Override public FileVisitResult postVisitDirectory(Path path, IOException exc) throws IOException { Files.delete(path); return FileVisitResult.CONTINUE; } }); } static List<T> sorted(Collection<T> collection); static String utf8(byte[] bytes); static String utf8(ByteBuffer buffer, int length); static String utf8(ByteBuffer buffer, int offset, int length); static byte[] utf8(String string); static int abs(int n); static long min(long first, long ... rest); static long max(long first, long ... rest); static short min(short first, short second); static int utf8Length(CharSequence s); static byte[] toArray(ByteBuffer buffer); static byte[] toArray(ByteBuffer buffer, int size); static byte[] toNullableArray(ByteBuffer buffer); static ByteBuffer wrapNullable(byte[] array); static byte[] toArray(ByteBuffer buffer, int offset, int size); static T notNull(T t); static void sleep(long ms); static T newInstance(Class<T> c); static T newInstance(String klass, Class<T> base); static int murmur2(final byte[] data); static String getHost(String address); static Integer getPort(String address); static String formatAddress(String host, Integer port); static String join(T[] strs, String separator); static String join(Collection<T> list, String separator); static String mkString(Map<K, V> map, String begin, String end,
String keyValueSeparator, String elementSeparator); static Properties loadProps(String filename); static Map<String, String> propsToStringMap(Properties props); static String stackTrace(Throwable e); static Thread newThread(String name, Runnable runnable, boolean daemon); static Thread daemonThread(String name, Runnable runnable); static void croak(String message); static byte[] readBytes(ByteBuffer buffer, int offset, int length); static byte[] readBytes(ByteBuffer buffer); static String readFileAsString(String path, Charset charset); static String readFileAsString(String path); static ByteBuffer ensureCapacity(ByteBuffer existingBuffer, int newLength); @SafeVarargs static Set<T> mkSet(T... elems); @SafeVarargs static List<T> mkList(T... elems); static void delete(final File file); static List<T> safe(List<T> other); static ClassLoader getKafkaClassLoader(); static ClassLoader getContextOrKafkaClassLoader(); static void atomicMoveWithFallback(Path source, Path target); static void closeAll(Closeable... closeables); static void closeQuietly(AutoCloseable closeable, String name); static int toPositive(int number); static int longHashcode(long value); static ByteBuffer sizeDelimited(ByteBuffer buffer, int start); static void readFullyOrFail(FileChannel channel, ByteBuffer destinationBuffer, long position,
String description); static void readFully(FileChannel channel, ByteBuffer destinationBuffer, long position); static final void readFully(InputStream inputStream, ByteBuffer destinationBuffer); static void writeFully(FileChannel channel, ByteBuffer sourceBuffer); static void writeTo(DataOutput out, ByteBuffer buffer, int length); static List<T> toList(Iterator<T> iterator); static final String NL; }
|
@Test(timeout = 120000) public void testRecursiveDelete() throws IOException { Utils.delete(null); File tempFile = TestUtils.tempFile(); Utils.delete(tempFile); assertFalse(Files.exists(tempFile.toPath())); File tempDir = TestUtils.tempDirectory(); File tempDir2 = TestUtils.tempDirectory(tempDir.toPath(), "a"); TestUtils.tempDirectory(tempDir.toPath(), "b"); TestUtils.tempDirectory(tempDir2.toPath(), "c"); Utils.delete(tempDir); assertFalse(Files.exists(tempDir.toPath())); assertFalse(Files.exists(tempDir2.toPath())); Utils.delete(tempDir); assertFalse(Files.exists(tempDir.toPath())); }
|
Shell { public static String execCommand(String ... cmd) throws IOException { return execCommand(cmd, -1); } Shell(long timeout); int exitCode(); Process process(); static String execCommand(String ... cmd); static String execCommand(String[] cmd, long timeout); }
|
@Test public void testEchoHello() throws Exception { assumeTrue(!OperatingSystem.IS_WINDOWS); String output = Shell.execCommand("echo", "hello"); assertEquals("hello\n", output); }
@Test public void testHeadDevZero() throws Exception { assumeTrue(!OperatingSystem.IS_WINDOWS); final int length = 100000; String output = Shell.execCommand("head", "-c", Integer.toString(length), "/dev/zero"); assertEquals(length, output.length()); }
|
LegacyRecord { public long checksum() { return ByteUtils.readUnsignedInt(buffer, CRC_OFFSET); } LegacyRecord(ByteBuffer buffer); LegacyRecord(ByteBuffer buffer, Long wrapperRecordTimestamp, TimestampType wrapperRecordTimestampType); long computeChecksum(); long checksum(); boolean isValid(); Long wrapperRecordTimestamp(); TimestampType wrapperRecordTimestampType(); void ensureValid(); int sizeInBytes(); int keySize(); boolean hasKey(); int valueSize(); boolean hasNullValue(); byte magic(); byte attributes(); long timestamp(); TimestampType timestampType(); CompressionType compressionType(); ByteBuffer value(); ByteBuffer key(); ByteBuffer buffer(); String toString(); boolean equals(Object other); int hashCode(); static LegacyRecord create(byte magic,
long timestamp,
byte[] key,
byte[] value,
CompressionType compressionType,
TimestampType timestampType); static LegacyRecord create(byte magic, long timestamp, byte[] key, byte[] value); static void writeCompressedRecordHeader(ByteBuffer buffer,
byte magic,
int recordSize,
long timestamp,
CompressionType compressionType,
TimestampType timestampType); static long write(DataOutputStream out,
byte magic,
long timestamp,
byte[] key,
byte[] value,
CompressionType compressionType,
TimestampType timestampType); static long write(DataOutputStream out,
byte magic,
long timestamp,
ByteBuffer key,
ByteBuffer value,
CompressionType compressionType,
TimestampType timestampType); static void write(DataOutputStream out,
byte magic,
long crc,
byte attributes,
long timestamp,
byte[] key,
byte[] value); static int recordSize(byte magic, int keySize, int valueSize); static byte computeAttributes(byte magic, CompressionType type, TimestampType timestampType); static long computeChecksum(byte magic, byte attributes, long timestamp, byte[] key, byte[] value); static TimestampType timestampType(byte magic, TimestampType wrapperRecordTimestampType, byte attributes); static final int CRC_OFFSET; static final int CRC_LENGTH; static final int MAGIC_OFFSET; static final int MAGIC_LENGTH; static final int ATTRIBUTES_OFFSET; static final int ATTRIBUTES_LENGTH; static final int TIMESTAMP_OFFSET; static final int TIMESTAMP_LENGTH; static final int KEY_SIZE_OFFSET_V0; static final int KEY_SIZE_OFFSET_V1; static final int KEY_SIZE_LENGTH; static final int KEY_OFFSET_V0; static final int KEY_OFFSET_V1; static final int VALUE_SIZE_LENGTH; static final int HEADER_SIZE_V0; static final int HEADER_SIZE_V1; static final int RECORD_OVERHEAD_V0; static final int RECORD_OVERHEAD_V1; static final long NO_TIMESTAMP; }
|
@Test public void testChecksum() { assertEquals(record.checksum(), record.computeChecksum()); byte attributes = LegacyRecord.computeAttributes(magic, this.compression, TimestampType.CREATE_TIME); assertEquals(record.checksum(), LegacyRecord.computeChecksum( magic, attributes, this.timestamp, this.key == null ? null : this.key.array(), this.value == null ? null : this.value.array() )); assertTrue(record.isValid()); for (int i = LegacyRecord.CRC_OFFSET + LegacyRecord.CRC_LENGTH; i < record.sizeInBytes(); i++) { LegacyRecord copy = copyOf(record); copy.buffer().put(i, (byte) 69); assertFalse(copy.isValid()); try { copy.ensureValid(); fail("Should fail the above test."); } catch (InvalidRecordException e) { } } }
@Test public void testChecksum() { assertEquals(record.checksum(), record.computeChecksum()); byte attributes = LegacyRecord.computeAttributes(magic, this.compression, TimestampType.CREATE_TIME); assertEquals(record.checksum(), LegacyRecord.computeChecksum( magic, attributes, this.timestamp, this.key == null ? null : this.key.array(), this.value == null ? null : this.value.array() )); assertTrue(record.isValid()); for (int i = LegacyRecord.CRC_OFFSET + LegacyRecord.CRC_LENGTH; i < record.sizeInBytes(); i++) { LegacyRecord copy = copyOf(record); copy.buffer().put(i, (byte) 69); assertFalse(copy.isValid()); try { copy.ensureValid(); fail("Should fail the above test."); } catch (CorruptRecordException e) { } } }
|
FileLogInputStream implements LogInputStream<FileLogInputStream.FileChannelRecordBatch> { @Override public FileChannelRecordBatch nextBatch() throws IOException { if (position + HEADER_SIZE_UP_TO_MAGIC >= end) return null; logHeaderBuffer.rewind(); Utils.readFullyOrFail(channel, logHeaderBuffer, position, "log header"); logHeaderBuffer.rewind(); long offset = logHeaderBuffer.getLong(OFFSET_OFFSET); int size = logHeaderBuffer.getInt(SIZE_OFFSET); if (size < LegacyRecord.RECORD_OVERHEAD_V0) throw new CorruptRecordException(String.format("Record size is smaller than minimum record overhead (%d).", LegacyRecord.RECORD_OVERHEAD_V0)); if (position + LOG_OVERHEAD + size > end) return null; byte magic = logHeaderBuffer.get(MAGIC_OFFSET); final FileChannelRecordBatch batch; if (magic < RecordBatch.MAGIC_VALUE_V2) batch = new LegacyFileChannelRecordBatch(offset, magic, channel, position, size); else batch = new DefaultFileChannelRecordBatch(offset, magic, channel, position, size); position += batch.sizeInBytes(); return batch; } FileLogInputStream(FileChannel channel,
int start,
int end); @Override FileChannelRecordBatch nextBatch(); }
|
@Test public void testWriteTo() throws IOException { try (FileRecords fileRecords = FileRecords.open(tempFile())) { fileRecords.append(MemoryRecords.withRecords(magic, compression, new SimpleRecord("foo".getBytes()))); fileRecords.flush(); FileLogInputStream logInputStream = new FileLogInputStream(fileRecords.channel(), 0, fileRecords.sizeInBytes()); FileChannelRecordBatch batch = logInputStream.nextBatch(); assertNotNull(batch); assertEquals(magic, batch.magic()); ByteBuffer buffer = ByteBuffer.allocate(128); batch.writeTo(buffer); buffer.flip(); MemoryRecords memRecords = MemoryRecords.readableRecords(buffer); List<Record> records = Utils.toList(memRecords.records().iterator()); assertEquals(1, records.size()); Record record0 = records.get(0); assertTrue(record0.hasMagic(magic)); assertEquals("foo", Utils.utf8(record0.value(), record0.valueSize())); } }
@Test public void testSimpleBatchIteration() throws IOException { try (FileRecords fileRecords = FileRecords.open(tempFile())) { SimpleRecord firstBatchRecord = new SimpleRecord(3241324L, "a".getBytes(), "foo".getBytes()); SimpleRecord secondBatchRecord = new SimpleRecord(234280L, "b".getBytes(), "bar".getBytes()); fileRecords.append(MemoryRecords.withRecords(magic, 0L, compression, CREATE_TIME, firstBatchRecord)); fileRecords.append(MemoryRecords.withRecords(magic, 1L, compression, CREATE_TIME, secondBatchRecord)); fileRecords.flush(); FileLogInputStream logInputStream = new FileLogInputStream(fileRecords.channel(), 0, fileRecords.sizeInBytes()); FileChannelRecordBatch firstBatch = logInputStream.nextBatch(); assertGenericRecordBatchData(firstBatch, 0L, 3241324L, firstBatchRecord); assertNoProducerData(firstBatch); FileChannelRecordBatch secondBatch = logInputStream.nextBatch(); assertGenericRecordBatchData(secondBatch, 1L, 234280L, secondBatchRecord); assertNoProducerData(secondBatch); assertNull(logInputStream.nextBatch()); } }
@Test public void testBatchIterationWithMultipleRecordsPerBatch() throws IOException { if (magic < MAGIC_VALUE_V2 && compression == CompressionType.NONE) return; try (FileRecords fileRecords = FileRecords.open(tempFile())) { SimpleRecord[] firstBatchRecords = new SimpleRecord[]{ new SimpleRecord(3241324L, "a".getBytes(), "1".getBytes()), new SimpleRecord(234280L, "b".getBytes(), "2".getBytes()) }; SimpleRecord[] secondBatchRecords = new SimpleRecord[]{ new SimpleRecord(238423489L, "c".getBytes(), "3".getBytes()), new SimpleRecord(897839L, null, "4".getBytes()), new SimpleRecord(8234020L, "e".getBytes(), null) }; fileRecords.append(MemoryRecords.withRecords(magic, 0L, compression, CREATE_TIME, firstBatchRecords)); fileRecords.append(MemoryRecords.withRecords(magic, 1L, compression, CREATE_TIME, secondBatchRecords)); fileRecords.flush(); FileLogInputStream logInputStream = new FileLogInputStream(fileRecords.channel(), 0, fileRecords.sizeInBytes()); FileChannelRecordBatch firstBatch = logInputStream.nextBatch(); assertNoProducerData(firstBatch); assertGenericRecordBatchData(firstBatch, 0L, 3241324L, firstBatchRecords); FileChannelRecordBatch secondBatch = logInputStream.nextBatch(); assertNoProducerData(secondBatch); assertGenericRecordBatchData(secondBatch, 1L, 238423489L, secondBatchRecords); assertNull(logInputStream.nextBatch()); } }
@Test public void testBatchIterationV2() throws IOException { if (magic != MAGIC_VALUE_V2) return; try (FileRecords fileRecords = FileRecords.open(tempFile())) { long producerId = 83843L; short producerEpoch = 15; int baseSequence = 234; int partitionLeaderEpoch = 9832; SimpleRecord[] firstBatchRecords = new SimpleRecord[]{ new SimpleRecord(3241324L, "a".getBytes(), "1".getBytes()), new SimpleRecord(234280L, "b".getBytes(), "2".getBytes()) }; SimpleRecord[] secondBatchRecords = new SimpleRecord[]{ new SimpleRecord(238423489L, "c".getBytes(), "3".getBytes()), new SimpleRecord(897839L, null, "4".getBytes()), new SimpleRecord(8234020L, "e".getBytes(), null) }; fileRecords.append(MemoryRecords.withIdempotentRecords(magic, 15L, compression, producerId, producerEpoch, baseSequence, partitionLeaderEpoch, firstBatchRecords)); fileRecords.append(MemoryRecords.withTransactionalRecords(magic, 27L, compression, producerId, producerEpoch, baseSequence + firstBatchRecords.length, partitionLeaderEpoch, secondBatchRecords)); fileRecords.flush(); FileLogInputStream logInputStream = new FileLogInputStream(fileRecords.channel(), 0, fileRecords.sizeInBytes()); FileChannelRecordBatch firstBatch = logInputStream.nextBatch(); assertProducerData(firstBatch, producerId, producerEpoch, baseSequence, false, firstBatchRecords); assertGenericRecordBatchData(firstBatch, 15L, 3241324L, firstBatchRecords); assertEquals(partitionLeaderEpoch, firstBatch.partitionLeaderEpoch()); FileChannelRecordBatch secondBatch = logInputStream.nextBatch(); assertProducerData(secondBatch, producerId, producerEpoch, baseSequence + firstBatchRecords.length, true, secondBatchRecords); assertGenericRecordBatchData(secondBatch, 27L, 238423489L, secondBatchRecords); assertEquals(partitionLeaderEpoch, secondBatch.partitionLeaderEpoch()); assertNull(logInputStream.nextBatch()); } }
@Test public void testBatchIterationIncompleteBatch() throws IOException { try (FileRecords fileRecords = FileRecords.open(tempFile())) { SimpleRecord firstBatchRecord = new SimpleRecord(100L, "foo".getBytes()); SimpleRecord secondBatchRecord = new SimpleRecord(200L, "bar".getBytes()); fileRecords.append(MemoryRecords.withRecords(magic, 0L, compression, CREATE_TIME, firstBatchRecord)); fileRecords.append(MemoryRecords.withRecords(magic, 1L, compression, CREATE_TIME, secondBatchRecord)); fileRecords.flush(); fileRecords.truncateTo(fileRecords.sizeInBytes() - 13); FileLogInputStream logInputStream = new FileLogInputStream(fileRecords.channel(), 0, fileRecords.sizeInBytes()); FileChannelRecordBatch firstBatch = logInputStream.nextBatch(); assertNoProducerData(firstBatch); assertGenericRecordBatchData(firstBatch, 0L, 100L, firstBatchRecord); assertNull(logInputStream.nextBatch()); } }
|
DefaultRecord implements Record { public static DefaultRecord readFrom(DataInput input, long baseOffset, long baseTimestamp, int baseSequence, Long logAppendTime) throws IOException { int sizeOfBodyInBytes = ByteUtils.readVarint(input); ByteBuffer recordBuffer = ByteBuffer.allocate(sizeOfBodyInBytes); input.readFully(recordBuffer.array(), 0, sizeOfBodyInBytes); int totalSizeInBytes = ByteUtils.sizeOfVarint(sizeOfBodyInBytes) + sizeOfBodyInBytes; return readFrom(recordBuffer, totalSizeInBytes, sizeOfBodyInBytes, baseOffset, baseTimestamp, baseSequence, logAppendTime); } private DefaultRecord(int sizeInBytes,
byte attributes,
long offset,
long timestamp,
int sequence,
ByteBuffer key,
ByteBuffer value,
Header[] headers); @Override long offset(); @Override int sequence(); @Override int sizeInBytes(); @Override long timestamp(); byte attributes(); @Override Long checksumOrNull(); @Override boolean isValid(); @Override void ensureValid(); @Override int keySize(); @Override int valueSize(); @Override boolean hasKey(); @Override ByteBuffer key(); @Override boolean hasValue(); @Override ByteBuffer value(); @Override Header[] headers(); static int writeTo(DataOutputStream out,
int offsetDelta,
long timestampDelta,
ByteBuffer key,
ByteBuffer value,
Header[] headers); @Override boolean hasMagic(byte magic); @Override boolean isCompressed(); @Override boolean hasTimestampType(TimestampType timestampType); @Override String toString(); @Override boolean equals(Object o); @Override int hashCode(); static DefaultRecord readFrom(DataInput input,
long baseOffset,
long baseTimestamp,
int baseSequence,
Long logAppendTime); static DefaultRecord readFrom(ByteBuffer buffer,
long baseOffset,
long baseTimestamp,
int baseSequence,
Long logAppendTime); static int sizeInBytes(int offsetDelta,
long timestampDelta,
ByteBuffer key,
ByteBuffer value,
Header[] headers); static int sizeInBytes(int offsetDelta,
long timestampDelta,
int keySize,
int valueSize,
Header[] headers); static long computePartialChecksum(long timestamp, int serializedKeySize, int serializedValueSize); static final int MAX_RECORD_OVERHEAD; }
|
@Test(expected = InvalidRecordException.class) public void testInvalidKeySize() { byte attributes = 0; long timestampDelta = 2; int offsetDelta = 1; int sizeOfBodyInBytes = 100; int keySize = 105; ByteBuffer buf = ByteBuffer.allocate(sizeOfBodyInBytes + ByteUtils.sizeOfVarint(sizeOfBodyInBytes)); ByteUtils.writeVarint(sizeOfBodyInBytes, buf); buf.put(attributes); ByteUtils.writeVarlong(timestampDelta, buf); ByteUtils.writeVarint(offsetDelta, buf); ByteUtils.writeVarint(keySize, buf); buf.position(buf.limit()); buf.flip(); DefaultRecord.readFrom(buf, 0L, 0L, RecordBatch.NO_SEQUENCE, null); }
@Test(expected = InvalidRecordException.class) public void testInvalidValueSize() throws IOException { byte attributes = 0; long timestampDelta = 2; int offsetDelta = 1; int sizeOfBodyInBytes = 100; int valueSize = 105; ByteBuffer buf = ByteBuffer.allocate(sizeOfBodyInBytes + ByteUtils.sizeOfVarint(sizeOfBodyInBytes)); ByteUtils.writeVarint(sizeOfBodyInBytes, buf); buf.put(attributes); ByteUtils.writeVarlong(timestampDelta, buf); ByteUtils.writeVarint(offsetDelta, buf); ByteUtils.writeVarint(-1, buf); ByteUtils.writeVarint(valueSize, buf); buf.position(buf.limit()); buf.flip(); DefaultRecord.readFrom(buf, 0L, 0L, RecordBatch.NO_SEQUENCE, null); }
@Test(expected = InvalidRecordException.class) public void testUnderflowReadingTimestamp() { byte attributes = 0; int sizeOfBodyInBytes = 1; ByteBuffer buf = ByteBuffer.allocate(sizeOfBodyInBytes + ByteUtils.sizeOfVarint(sizeOfBodyInBytes)); ByteUtils.writeVarint(sizeOfBodyInBytes, buf); buf.put(attributes); buf.flip(); DefaultRecord.readFrom(buf, 0L, 0L, RecordBatch.NO_SEQUENCE, null); }
@Test(expected = InvalidRecordException.class) public void testUnderflowReadingVarlong() { byte attributes = 0; int sizeOfBodyInBytes = 2; ByteBuffer buf = ByteBuffer.allocate(sizeOfBodyInBytes + ByteUtils.sizeOfVarint(sizeOfBodyInBytes) + 1); ByteUtils.writeVarint(sizeOfBodyInBytes, buf); buf.put(attributes); ByteUtils.writeVarlong(156, buf); buf.position(buf.limit() - 1); buf.flip(); DefaultRecord.readFrom(buf, 0L, 0L, RecordBatch.NO_SEQUENCE, null); }
@Test(expected = InvalidRecordException.class) public void testInvalidVarlong() { byte attributes = 0; int sizeOfBodyInBytes = 11; ByteBuffer buf = ByteBuffer.allocate(sizeOfBodyInBytes + ByteUtils.sizeOfVarint(sizeOfBodyInBytes) + 1); ByteUtils.writeVarint(sizeOfBodyInBytes, buf); int recordStartPosition = buf.position(); buf.put(attributes); ByteUtils.writeVarlong(Long.MAX_VALUE, buf); buf.put(recordStartPosition + 10, Byte.MIN_VALUE); buf.flip(); DefaultRecord.readFrom(buf, 0L, 0L, RecordBatch.NO_SEQUENCE, null); }
|
MemoryRecords extends AbstractRecords { @Override public Iterable<MutableRecordBatch> batches() { return batches; } private MemoryRecords(ByteBuffer buffer); @Override int sizeInBytes(); @Override long writeTo(GatheringByteChannel channel, long position, int length); int writeFullyTo(GatheringByteChannel channel); int validBytes(); @Override MemoryRecords downConvert(byte toMagic, long firstOffset); FilterResult filterTo(TopicPartition partition, RecordFilter filter, ByteBuffer destinationBuffer,
int maxRecordBatchSize, BufferSupplier decompressionBufferSupplier); ByteBuffer buffer(); @Override Iterable<MutableRecordBatch> batches(); @Override String toString(); @Override boolean equals(Object o); @Override int hashCode(); static MemoryRecords readableRecords(ByteBuffer buffer); static MemoryRecordsBuilder builder(ByteBuffer buffer,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset); static MemoryRecordsBuilder idempotentBuilder(ByteBuffer buffer,
CompressionType compressionType,
long baseOffset,
long producerId,
short producerEpoch,
int baseSequence); static MemoryRecordsBuilder builder(ByteBuffer buffer,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset,
long logAppendTime); static MemoryRecordsBuilder builder(ByteBuffer buffer,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset); static MemoryRecordsBuilder builder(ByteBuffer buffer,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset,
long logAppendTime,
int partitionLeaderEpoch); static MemoryRecordsBuilder builder(ByteBuffer buffer,
CompressionType compressionType,
long baseOffset,
long producerId,
short producerEpoch,
int baseSequence,
boolean isTransactional); static MemoryRecordsBuilder builder(ByteBuffer buffer,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset,
long logAppendTime,
long producerId,
short producerEpoch,
int baseSequence); static MemoryRecordsBuilder builder(ByteBuffer buffer,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset,
long logAppendTime,
long producerId,
short producerEpoch,
int baseSequence,
boolean isTransactional,
int partitionLeaderEpoch); static MemoryRecordsBuilder builder(ByteBuffer buffer,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset,
long logAppendTime,
long producerId,
short producerEpoch,
int baseSequence,
boolean isTransactional,
boolean isControlBatch,
int partitionLeaderEpoch); static MemoryRecords withRecords(CompressionType compressionType, SimpleRecord... records); static MemoryRecords withRecords(CompressionType compressionType, int partitionLeaderEpoch, SimpleRecord... records); static MemoryRecords withRecords(byte magic, CompressionType compressionType, SimpleRecord... records); static MemoryRecords withRecords(long initialOffset, CompressionType compressionType, SimpleRecord... records); static MemoryRecords withRecords(long initialOffset, CompressionType compressionType, Integer partitionLeaderEpoch, SimpleRecord... records); static MemoryRecords withIdempotentRecords(CompressionType compressionType, long producerId,
short producerEpoch, int baseSequence, SimpleRecord... records); static MemoryRecords withIdempotentRecords(byte magic, long initialOffset, CompressionType compressionType,
long producerId, short producerEpoch, int baseSequence,
int partitionLeaderEpoch, SimpleRecord... records); static MemoryRecords withIdempotentRecords(long initialOffset, CompressionType compressionType, long producerId,
short producerEpoch, int baseSequence, int partitionLeaderEpoch,
SimpleRecord... records); static MemoryRecords withTransactionalRecords(CompressionType compressionType, long producerId,
short producerEpoch, int baseSequence, SimpleRecord... records); static MemoryRecords withTransactionalRecords(byte magic, long initialOffset, CompressionType compressionType,
long producerId, short producerEpoch, int baseSequence,
int partitionLeaderEpoch, SimpleRecord... records); static MemoryRecords withTransactionalRecords(long initialOffset, CompressionType compressionType, long producerId,
short producerEpoch, int baseSequence, int partitionLeaderEpoch,
SimpleRecord... records); static MemoryRecords withRecords(byte magic, long initialOffset, CompressionType compressionType,
TimestampType timestampType, SimpleRecord... records); static MemoryRecords withRecords(byte magic, long initialOffset, CompressionType compressionType,
TimestampType timestampType, long producerId, short producerEpoch,
int baseSequence, int partitionLeaderEpoch, boolean isTransactional,
SimpleRecord ... records); static MemoryRecords withEndTransactionMarker(long producerId, short producerEpoch, EndTransactionMarker marker); static MemoryRecords withEndTransactionMarker(long timestamp, long producerId, short producerEpoch,
EndTransactionMarker marker); static MemoryRecords withEndTransactionMarker(long initialOffset, long timestamp, int partitionLeaderEpoch,
long producerId, short producerEpoch,
EndTransactionMarker marker); static void writeEndTransactionalMarker(ByteBuffer buffer, long initialOffset, long timestamp,
int partitionLeaderEpoch, long producerId, short producerEpoch,
EndTransactionMarker marker); static final MemoryRecords EMPTY; }
|
@Test public void testIterator() { ByteBuffer buffer = ByteBuffer.allocate(1024); MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, magic, compression, TimestampType.CREATE_TIME, firstOffset, logAppendTime, pid, epoch, firstSequence, false, false, partitionLeaderEpoch, buffer.limit()); SimpleRecord[] records = new SimpleRecord[] { new SimpleRecord(1L, "a".getBytes(), "1".getBytes()), new SimpleRecord(2L, "b".getBytes(), "2".getBytes()), new SimpleRecord(3L, "c".getBytes(), "3".getBytes()), new SimpleRecord(4L, null, "4".getBytes()), new SimpleRecord(5L, "d".getBytes(), null), new SimpleRecord(6L, (byte[]) null, null) }; for (SimpleRecord record : records) builder.append(record); MemoryRecords memoryRecords = builder.build(); for (int iteration = 0; iteration < 2; iteration++) { int total = 0; for (RecordBatch batch : memoryRecords.batches()) { assertTrue(batch.isValid()); assertEquals(compression, batch.compressionType()); assertEquals(firstOffset + total, batch.baseOffset()); if (magic >= RecordBatch.MAGIC_VALUE_V2) { assertEquals(pid, batch.producerId()); assertEquals(epoch, batch.producerEpoch()); assertEquals(firstSequence + total, batch.baseSequence()); assertEquals(partitionLeaderEpoch, batch.partitionLeaderEpoch()); assertEquals(records.length, batch.countOrNull().intValue()); assertEquals(TimestampType.CREATE_TIME, batch.timestampType()); assertEquals(records[records.length - 1].timestamp(), batch.maxTimestamp()); } else { assertEquals(RecordBatch.NO_PRODUCER_ID, batch.producerId()); assertEquals(RecordBatch.NO_PRODUCER_EPOCH, batch.producerEpoch()); assertEquals(RecordBatch.NO_SEQUENCE, batch.baseSequence()); assertEquals(RecordBatch.NO_PARTITION_LEADER_EPOCH, batch.partitionLeaderEpoch()); assertNull(batch.countOrNull()); if (magic == RecordBatch.MAGIC_VALUE_V0) assertEquals(TimestampType.NO_TIMESTAMP_TYPE, batch.timestampType()); else assertEquals(TimestampType.CREATE_TIME, batch.timestampType()); } int recordCount = 0; for (Record record : batch) { assertTrue(record.isValid()); assertTrue(record.hasMagic(batch.magic())); assertFalse(record.isCompressed()); assertEquals(firstOffset + total, record.offset()); assertEquals(records[total].key(), record.key()); assertEquals(records[total].value(), record.value()); if (magic >= RecordBatch.MAGIC_VALUE_V2) assertEquals(firstSequence + total, record.sequence()); assertFalse(record.hasTimestampType(TimestampType.LOG_APPEND_TIME)); if (magic == RecordBatch.MAGIC_VALUE_V0) { assertEquals(RecordBatch.NO_TIMESTAMP, record.timestamp()); assertFalse(record.hasTimestampType(TimestampType.CREATE_TIME)); assertTrue(record.hasTimestampType(TimestampType.NO_TIMESTAMP_TYPE)); } else { assertEquals(records[total].timestamp(), record.timestamp()); assertFalse(record.hasTimestampType(TimestampType.NO_TIMESTAMP_TYPE)); if (magic < RecordBatch.MAGIC_VALUE_V2) assertTrue(record.hasTimestampType(TimestampType.CREATE_TIME)); else assertFalse(record.hasTimestampType(TimestampType.CREATE_TIME)); } total++; recordCount++; } assertEquals(batch.baseOffset() + recordCount - 1, batch.lastOffset()); } } }
|
MemoryRecords extends AbstractRecords { public static MemoryRecordsBuilder builder(ByteBuffer buffer, CompressionType compressionType, TimestampType timestampType, long baseOffset) { return builder(buffer, RecordBatch.CURRENT_MAGIC_VALUE, compressionType, timestampType, baseOffset); } private MemoryRecords(ByteBuffer buffer); @Override int sizeInBytes(); @Override long writeTo(GatheringByteChannel channel, long position, int length); int writeFullyTo(GatheringByteChannel channel); int validBytes(); @Override MemoryRecords downConvert(byte toMagic, long firstOffset); FilterResult filterTo(TopicPartition partition, RecordFilter filter, ByteBuffer destinationBuffer,
int maxRecordBatchSize, BufferSupplier decompressionBufferSupplier); ByteBuffer buffer(); @Override Iterable<MutableRecordBatch> batches(); @Override String toString(); @Override boolean equals(Object o); @Override int hashCode(); static MemoryRecords readableRecords(ByteBuffer buffer); static MemoryRecordsBuilder builder(ByteBuffer buffer,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset); static MemoryRecordsBuilder idempotentBuilder(ByteBuffer buffer,
CompressionType compressionType,
long baseOffset,
long producerId,
short producerEpoch,
int baseSequence); static MemoryRecordsBuilder builder(ByteBuffer buffer,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset,
long logAppendTime); static MemoryRecordsBuilder builder(ByteBuffer buffer,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset); static MemoryRecordsBuilder builder(ByteBuffer buffer,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset,
long logAppendTime,
int partitionLeaderEpoch); static MemoryRecordsBuilder builder(ByteBuffer buffer,
CompressionType compressionType,
long baseOffset,
long producerId,
short producerEpoch,
int baseSequence,
boolean isTransactional); static MemoryRecordsBuilder builder(ByteBuffer buffer,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset,
long logAppendTime,
long producerId,
short producerEpoch,
int baseSequence); static MemoryRecordsBuilder builder(ByteBuffer buffer,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset,
long logAppendTime,
long producerId,
short producerEpoch,
int baseSequence,
boolean isTransactional,
int partitionLeaderEpoch); static MemoryRecordsBuilder builder(ByteBuffer buffer,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset,
long logAppendTime,
long producerId,
short producerEpoch,
int baseSequence,
boolean isTransactional,
boolean isControlBatch,
int partitionLeaderEpoch); static MemoryRecords withRecords(CompressionType compressionType, SimpleRecord... records); static MemoryRecords withRecords(CompressionType compressionType, int partitionLeaderEpoch, SimpleRecord... records); static MemoryRecords withRecords(byte magic, CompressionType compressionType, SimpleRecord... records); static MemoryRecords withRecords(long initialOffset, CompressionType compressionType, SimpleRecord... records); static MemoryRecords withRecords(long initialOffset, CompressionType compressionType, Integer partitionLeaderEpoch, SimpleRecord... records); static MemoryRecords withIdempotentRecords(CompressionType compressionType, long producerId,
short producerEpoch, int baseSequence, SimpleRecord... records); static MemoryRecords withIdempotentRecords(byte magic, long initialOffset, CompressionType compressionType,
long producerId, short producerEpoch, int baseSequence,
int partitionLeaderEpoch, SimpleRecord... records); static MemoryRecords withIdempotentRecords(long initialOffset, CompressionType compressionType, long producerId,
short producerEpoch, int baseSequence, int partitionLeaderEpoch,
SimpleRecord... records); static MemoryRecords withTransactionalRecords(CompressionType compressionType, long producerId,
short producerEpoch, int baseSequence, SimpleRecord... records); static MemoryRecords withTransactionalRecords(byte magic, long initialOffset, CompressionType compressionType,
long producerId, short producerEpoch, int baseSequence,
int partitionLeaderEpoch, SimpleRecord... records); static MemoryRecords withTransactionalRecords(long initialOffset, CompressionType compressionType, long producerId,
short producerEpoch, int baseSequence, int partitionLeaderEpoch,
SimpleRecord... records); static MemoryRecords withRecords(byte magic, long initialOffset, CompressionType compressionType,
TimestampType timestampType, SimpleRecord... records); static MemoryRecords withRecords(byte magic, long initialOffset, CompressionType compressionType,
TimestampType timestampType, long producerId, short producerEpoch,
int baseSequence, int partitionLeaderEpoch, boolean isTransactional,
SimpleRecord ... records); static MemoryRecords withEndTransactionMarker(long producerId, short producerEpoch, EndTransactionMarker marker); static MemoryRecords withEndTransactionMarker(long timestamp, long producerId, short producerEpoch,
EndTransactionMarker marker); static MemoryRecords withEndTransactionMarker(long initialOffset, long timestamp, int partitionLeaderEpoch,
long producerId, short producerEpoch,
EndTransactionMarker marker); static void writeEndTransactionalMarker(ByteBuffer buffer, long initialOffset, long timestamp,
int partitionLeaderEpoch, long producerId, short producerEpoch,
EndTransactionMarker marker); static final MemoryRecords EMPTY; }
|
@Test public void testHasRoomForMethod() { MemoryRecordsBuilder builder = MemoryRecords.builder(ByteBuffer.allocate(1024), magic, compression, TimestampType.CREATE_TIME, 0L); builder.append(0L, "a".getBytes(), "1".getBytes()); assertTrue(builder.hasRoomFor(1L, "b".getBytes(), "2".getBytes(), Record.EMPTY_HEADERS)); builder.close(); assertFalse(builder.hasRoomFor(1L, "b".getBytes(), "2".getBytes(), Record.EMPTY_HEADERS)); }
@Test public void testHasRoomForMethodWithHeaders() { if (magic >= RecordBatch.MAGIC_VALUE_V2) { MemoryRecordsBuilder builder = MemoryRecords.builder(ByteBuffer.allocate(100), magic, compression, TimestampType.CREATE_TIME, 0L); RecordHeaders headers = new RecordHeaders(); headers.add("hello", "world.world".getBytes()); headers.add("hello", "world.world".getBytes()); headers.add("hello", "world.world".getBytes()); headers.add("hello", "world.world".getBytes()); headers.add("hello", "world.world".getBytes()); builder.append(logAppendTime, "key".getBytes(), "value".getBytes()); assertTrue(builder.hasRoomFor(logAppendTime, "key".getBytes(), "value".getBytes(), Record.EMPTY_HEADERS)); assertFalse(builder.hasRoomFor(logAppendTime, "key".getBytes(), "value".getBytes(), headers.toArray())); } }
|
MemoryRecords extends AbstractRecords { public FilterResult filterTo(TopicPartition partition, RecordFilter filter, ByteBuffer destinationBuffer, int maxRecordBatchSize, BufferSupplier decompressionBufferSupplier) { return filterTo(partition, batches(), filter, destinationBuffer, maxRecordBatchSize, decompressionBufferSupplier); } private MemoryRecords(ByteBuffer buffer); @Override int sizeInBytes(); @Override long writeTo(GatheringByteChannel channel, long position, int length); int writeFullyTo(GatheringByteChannel channel); int validBytes(); @Override MemoryRecords downConvert(byte toMagic, long firstOffset); FilterResult filterTo(TopicPartition partition, RecordFilter filter, ByteBuffer destinationBuffer,
int maxRecordBatchSize, BufferSupplier decompressionBufferSupplier); ByteBuffer buffer(); @Override Iterable<MutableRecordBatch> batches(); @Override String toString(); @Override boolean equals(Object o); @Override int hashCode(); static MemoryRecords readableRecords(ByteBuffer buffer); static MemoryRecordsBuilder builder(ByteBuffer buffer,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset); static MemoryRecordsBuilder idempotentBuilder(ByteBuffer buffer,
CompressionType compressionType,
long baseOffset,
long producerId,
short producerEpoch,
int baseSequence); static MemoryRecordsBuilder builder(ByteBuffer buffer,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset,
long logAppendTime); static MemoryRecordsBuilder builder(ByteBuffer buffer,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset); static MemoryRecordsBuilder builder(ByteBuffer buffer,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset,
long logAppendTime,
int partitionLeaderEpoch); static MemoryRecordsBuilder builder(ByteBuffer buffer,
CompressionType compressionType,
long baseOffset,
long producerId,
short producerEpoch,
int baseSequence,
boolean isTransactional); static MemoryRecordsBuilder builder(ByteBuffer buffer,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset,
long logAppendTime,
long producerId,
short producerEpoch,
int baseSequence); static MemoryRecordsBuilder builder(ByteBuffer buffer,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset,
long logAppendTime,
long producerId,
short producerEpoch,
int baseSequence,
boolean isTransactional,
int partitionLeaderEpoch); static MemoryRecordsBuilder builder(ByteBuffer buffer,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset,
long logAppendTime,
long producerId,
short producerEpoch,
int baseSequence,
boolean isTransactional,
boolean isControlBatch,
int partitionLeaderEpoch); static MemoryRecords withRecords(CompressionType compressionType, SimpleRecord... records); static MemoryRecords withRecords(CompressionType compressionType, int partitionLeaderEpoch, SimpleRecord... records); static MemoryRecords withRecords(byte magic, CompressionType compressionType, SimpleRecord... records); static MemoryRecords withRecords(long initialOffset, CompressionType compressionType, SimpleRecord... records); static MemoryRecords withRecords(long initialOffset, CompressionType compressionType, Integer partitionLeaderEpoch, SimpleRecord... records); static MemoryRecords withIdempotentRecords(CompressionType compressionType, long producerId,
short producerEpoch, int baseSequence, SimpleRecord... records); static MemoryRecords withIdempotentRecords(byte magic, long initialOffset, CompressionType compressionType,
long producerId, short producerEpoch, int baseSequence,
int partitionLeaderEpoch, SimpleRecord... records); static MemoryRecords withIdempotentRecords(long initialOffset, CompressionType compressionType, long producerId,
short producerEpoch, int baseSequence, int partitionLeaderEpoch,
SimpleRecord... records); static MemoryRecords withTransactionalRecords(CompressionType compressionType, long producerId,
short producerEpoch, int baseSequence, SimpleRecord... records); static MemoryRecords withTransactionalRecords(byte magic, long initialOffset, CompressionType compressionType,
long producerId, short producerEpoch, int baseSequence,
int partitionLeaderEpoch, SimpleRecord... records); static MemoryRecords withTransactionalRecords(long initialOffset, CompressionType compressionType, long producerId,
short producerEpoch, int baseSequence, int partitionLeaderEpoch,
SimpleRecord... records); static MemoryRecords withRecords(byte magic, long initialOffset, CompressionType compressionType,
TimestampType timestampType, SimpleRecord... records); static MemoryRecords withRecords(byte magic, long initialOffset, CompressionType compressionType,
TimestampType timestampType, long producerId, short producerEpoch,
int baseSequence, int partitionLeaderEpoch, boolean isTransactional,
SimpleRecord ... records); static MemoryRecords withEndTransactionMarker(long producerId, short producerEpoch, EndTransactionMarker marker); static MemoryRecords withEndTransactionMarker(long timestamp, long producerId, short producerEpoch,
EndTransactionMarker marker); static MemoryRecords withEndTransactionMarker(long initialOffset, long timestamp, int partitionLeaderEpoch,
long producerId, short producerEpoch,
EndTransactionMarker marker); static void writeEndTransactionalMarker(ByteBuffer buffer, long initialOffset, long timestamp,
int partitionLeaderEpoch, long producerId, short producerEpoch,
EndTransactionMarker marker); static final MemoryRecords EMPTY; }
|
@Test public void testFilterTo() { ByteBuffer buffer = ByteBuffer.allocate(2048); MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, magic, compression, TimestampType.CREATE_TIME, 0L); builder.append(10L, null, "a".getBytes()); builder.close(); builder = MemoryRecords.builder(buffer, magic, compression, TimestampType.CREATE_TIME, 1L); builder.append(11L, "1".getBytes(), "b".getBytes()); builder.append(12L, null, "c".getBytes()); builder.close(); builder = MemoryRecords.builder(buffer, magic, compression, TimestampType.CREATE_TIME, 3L); builder.append(13L, null, "d".getBytes()); builder.append(20L, "4".getBytes(), "e".getBytes()); builder.append(15L, "5".getBytes(), "f".getBytes()); builder.close(); builder = MemoryRecords.builder(buffer, magic, compression, TimestampType.CREATE_TIME, 6L); builder.append(16L, "6".getBytes(), "g".getBytes()); builder.close(); buffer.flip(); ByteBuffer filtered = ByteBuffer.allocate(2048); MemoryRecords.FilterResult result = MemoryRecords.readableRecords(buffer).filterTo( new TopicPartition("foo", 0), new RetainNonNullKeysFilter(), filtered, Integer.MAX_VALUE, BufferSupplier.NO_CACHING); filtered.flip(); assertEquals(7, result.messagesRead); assertEquals(4, result.messagesRetained); assertEquals(buffer.limit(), result.bytesRead); assertEquals(filtered.limit(), result.bytesRetained); if (magic > RecordBatch.MAGIC_VALUE_V0) { assertEquals(20L, result.maxTimestamp); if (compression == CompressionType.NONE && magic < RecordBatch.MAGIC_VALUE_V2) assertEquals(4L, result.shallowOffsetOfMaxTimestamp); else assertEquals(5L, result.shallowOffsetOfMaxTimestamp); } MemoryRecords filteredRecords = MemoryRecords.readableRecords(filtered); List<MutableRecordBatch> batches = TestUtils.toList(filteredRecords.batches()); final List<Long> expectedEndOffsets; final List<Long> expectedStartOffsets; final List<Long> expectedMaxTimestamps; if (magic < RecordBatch.MAGIC_VALUE_V2 && compression == CompressionType.NONE) { expectedEndOffsets = asList(1L, 4L, 5L, 6L); expectedStartOffsets = asList(1L, 4L, 5L, 6L); expectedMaxTimestamps = asList(11L, 20L, 15L, 16L); } else if (magic < RecordBatch.MAGIC_VALUE_V2) { expectedEndOffsets = asList(1L, 5L, 6L); expectedStartOffsets = asList(1L, 4L, 6L); expectedMaxTimestamps = asList(11L, 20L, 16L); } else { expectedEndOffsets = asList(2L, 5L, 6L); expectedStartOffsets = asList(1L, 3L, 6L); expectedMaxTimestamps = asList(11L, 20L, 16L); } assertEquals(expectedEndOffsets.size(), batches.size()); for (int i = 0; i < expectedEndOffsets.size(); i++) { RecordBatch batch = batches.get(i); assertEquals(expectedStartOffsets.get(i).longValue(), batch.baseOffset()); assertEquals(expectedEndOffsets.get(i).longValue(), batch.lastOffset()); assertEquals(magic, batch.magic()); assertEquals(compression, batch.compressionType()); if (magic >= RecordBatch.MAGIC_VALUE_V1) { assertEquals(expectedMaxTimestamps.get(i).longValue(), batch.maxTimestamp()); assertEquals(TimestampType.CREATE_TIME, batch.timestampType()); } else { assertEquals(RecordBatch.NO_TIMESTAMP, batch.maxTimestamp()); assertEquals(TimestampType.NO_TIMESTAMP_TYPE, batch.timestampType()); } } List<Record> records = TestUtils.toList(filteredRecords.records()); assertEquals(4, records.size()); Record first = records.get(0); assertEquals(1L, first.offset()); if (magic > RecordBatch.MAGIC_VALUE_V0) assertEquals(11L, first.timestamp()); assertEquals("1", Utils.utf8(first.key(), first.keySize())); assertEquals("b", Utils.utf8(first.value(), first.valueSize())); Record second = records.get(1); assertEquals(4L, second.offset()); if (magic > RecordBatch.MAGIC_VALUE_V0) assertEquals(20L, second.timestamp()); assertEquals("4", Utils.utf8(second.key(), second.keySize())); assertEquals("e", Utils.utf8(second.value(), second.valueSize())); Record third = records.get(2); assertEquals(5L, third.offset()); if (magic > RecordBatch.MAGIC_VALUE_V0) assertEquals(15L, third.timestamp()); assertEquals("5", Utils.utf8(third.key(), third.keySize())); assertEquals("f", Utils.utf8(third.value(), third.valueSize())); Record fourth = records.get(3); assertEquals(6L, fourth.offset()); if (magic > RecordBatch.MAGIC_VALUE_V0) assertEquals(16L, fourth.timestamp()); assertEquals("6", Utils.utf8(fourth.key(), fourth.keySize())); assertEquals("g", Utils.utf8(fourth.value(), fourth.valueSize())); }
|
ByteBufferLogInputStream implements LogInputStream<MutableRecordBatch> { public MutableRecordBatch nextBatch() throws IOException { int remaining = buffer.remaining(); if (remaining < LOG_OVERHEAD) return null; int recordSize = buffer.getInt(buffer.position() + SIZE_OFFSET); if (recordSize < LegacyRecord.RECORD_OVERHEAD_V0) throw new CorruptRecordException(String.format("Record size is less than the minimum record overhead (%d)", LegacyRecord.RECORD_OVERHEAD_V0)); if (recordSize > maxMessageSize) throw new CorruptRecordException(String.format("Record size exceeds the largest allowable message size (%d).", maxMessageSize)); int batchSize = recordSize + LOG_OVERHEAD; if (remaining < batchSize) return null; byte magic = buffer.get(buffer.position() + MAGIC_OFFSET); ByteBuffer batchSlice = buffer.slice(); batchSlice.limit(batchSize); buffer.position(buffer.position() + batchSize); if (magic < 0 || magic > RecordBatch.CURRENT_MAGIC_VALUE) throw new CorruptRecordException("Invalid magic found in record: " + magic); if (magic > RecordBatch.MAGIC_VALUE_V1) return new DefaultRecordBatch(batchSlice); else return new AbstractLegacyRecordBatch.ByteBufferLegacyRecordBatch(batchSlice); } ByteBufferLogInputStream(ByteBuffer buffer, int maxMessageSize); MutableRecordBatch nextBatch(); }
|
@Test(expected = CorruptRecordException.class) public void iteratorRaisesOnTooSmallRecords() throws IOException { ByteBuffer buffer = ByteBuffer.allocate(1024); MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, CompressionType.NONE, TimestampType.CREATE_TIME, 0L); builder.append(15L, "a".getBytes(), "1".getBytes()); builder.append(20L, "b".getBytes(), "2".getBytes()); builder.close(); int position = buffer.position(); builder = MemoryRecords.builder(buffer, CompressionType.NONE, TimestampType.CREATE_TIME, 2L); builder.append(30L, "c".getBytes(), "3".getBytes()); builder.append(40L, "d".getBytes(), "4".getBytes()); builder.close(); buffer.flip(); buffer.putInt(position + DefaultRecordBatch.LENGTH_OFFSET, 9); ByteBufferLogInputStream logInputStream = new ByteBufferLogInputStream(buffer, Integer.MAX_VALUE); assertNotNull(logInputStream.nextBatch()); logInputStream.nextBatch(); }
@Test(expected = CorruptRecordException.class) public void iteratorRaisesOnInvalidMagic() throws IOException { ByteBuffer buffer = ByteBuffer.allocate(1024); MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, CompressionType.NONE, TimestampType.CREATE_TIME, 0L); builder.append(15L, "a".getBytes(), "1".getBytes()); builder.append(20L, "b".getBytes(), "2".getBytes()); builder.close(); int position = buffer.position(); builder = MemoryRecords.builder(buffer, CompressionType.NONE, TimestampType.CREATE_TIME, 2L); builder.append(30L, "c".getBytes(), "3".getBytes()); builder.append(40L, "d".getBytes(), "4".getBytes()); builder.close(); buffer.flip(); buffer.put(position + DefaultRecordBatch.MAGIC_OFFSET, (byte) 37); ByteBufferLogInputStream logInputStream = new ByteBufferLogInputStream(buffer, Integer.MAX_VALUE); assertNotNull(logInputStream.nextBatch()); logInputStream.nextBatch(); }
@Test(expected = CorruptRecordException.class) public void iteratorRaisesOnTooLargeRecords() throws IOException { ByteBuffer buffer = ByteBuffer.allocate(1024); MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, CompressionType.NONE, TimestampType.CREATE_TIME, 0L); builder.append(15L, "a".getBytes(), "1".getBytes()); builder.append(20L, "b".getBytes(), "2".getBytes()); builder.close(); builder = MemoryRecords.builder(buffer, CompressionType.NONE, TimestampType.CREATE_TIME, 2L); builder.append(30L, "c".getBytes(), "3".getBytes()); builder.append(40L, "d".getBytes(), "4".getBytes()); builder.close(); buffer.flip(); ByteBufferLogInputStream logInputStream = new ByteBufferLogInputStream(buffer, 25); assertNotNull(logInputStream.nextBatch()); logInputStream.nextBatch(); }
|
DefaultRecordBatch extends AbstractRecordBatch implements MutableRecordBatch { public static void writeEmptyHeader(ByteBuffer buffer, byte magic, long producerId, short producerEpoch, int baseSequence, long baseOffset, long lastOffset, int partitionLeaderEpoch, TimestampType timestampType, long timestamp, boolean isTransactional, boolean isControlRecord) { int offsetDelta = (int) (lastOffset - baseOffset); writeHeader(buffer, baseOffset, offsetDelta, DefaultRecordBatch.RECORD_BATCH_OVERHEAD, magic, CompressionType.NONE, timestampType, timestamp, timestamp, producerId, producerEpoch, baseSequence, isTransactional, isControlRecord, partitionLeaderEpoch, 0); } DefaultRecordBatch(ByteBuffer buffer); @Override byte magic(); @Override void ensureValid(); long baseTimestamp(); @Override long maxTimestamp(); @Override TimestampType timestampType(); @Override long baseOffset(); @Override long lastOffset(); @Override long producerId(); @Override short producerEpoch(); @Override int baseSequence(); @Override int lastSequence(); @Override CompressionType compressionType(); @Override int sizeInBytes(); @Override Integer countOrNull(); @Override void writeTo(ByteBuffer buffer); @Override void writeTo(ByteBufferOutputStream outputStream); @Override boolean isTransactional(); @Override boolean isControlBatch(); @Override int partitionLeaderEpoch(); @Override Iterator<Record> iterator(); @Override CloseableIterator<Record> streamingIterator(BufferSupplier bufferSupplier); @Override void setLastOffset(long offset); @Override void setMaxTimestamp(TimestampType timestampType, long maxTimestamp); @Override void setPartitionLeaderEpoch(int epoch); @Override long checksum(); boolean isValid(); @Override boolean equals(Object o); @Override int hashCode(); static void writeEmptyHeader(ByteBuffer buffer,
byte magic,
long producerId,
short producerEpoch,
int baseSequence,
long baseOffset,
long lastOffset,
int partitionLeaderEpoch,
TimestampType timestampType,
long timestamp,
boolean isTransactional,
boolean isControlRecord); @Override String toString(); static int sizeInBytes(long baseOffset, Iterable<Record> records); static int sizeInBytes(Iterable<SimpleRecord> records); static final int RECORD_BATCH_OVERHEAD; }
|
@Test public void testWriteEmptyHeader() { long producerId = 23423L; short producerEpoch = 145; int baseSequence = 983; long baseOffset = 15L; long lastOffset = 37; int partitionLeaderEpoch = 15; long timestamp = System.currentTimeMillis(); for (TimestampType timestampType : Arrays.asList(TimestampType.CREATE_TIME, TimestampType.LOG_APPEND_TIME)) { for (boolean isTransactional : Arrays.asList(true, false)) { for (boolean isControlBatch : Arrays.asList(true, false)) { ByteBuffer buffer = ByteBuffer.allocate(2048); DefaultRecordBatch.writeEmptyHeader(buffer, RecordBatch.CURRENT_MAGIC_VALUE, producerId, producerEpoch, baseSequence, baseOffset, lastOffset, partitionLeaderEpoch, timestampType, timestamp, isTransactional, isControlBatch); buffer.flip(); DefaultRecordBatch batch = new DefaultRecordBatch(buffer); assertEquals(producerId, batch.producerId()); assertEquals(producerEpoch, batch.producerEpoch()); assertEquals(baseSequence, batch.baseSequence()); assertEquals(baseSequence + ((int) (lastOffset - baseOffset)), batch.lastSequence()); assertEquals(baseOffset, batch.baseOffset()); assertEquals(lastOffset, batch.lastOffset()); assertEquals(partitionLeaderEpoch, batch.partitionLeaderEpoch()); assertEquals(isTransactional, batch.isTransactional()); assertEquals(timestampType, batch.timestampType()); assertEquals(timestamp, batch.maxTimestamp()); assertEquals(isControlBatch, batch.isControlBatch()); } } } }
|
DefaultRecordBatch extends AbstractRecordBatch implements MutableRecordBatch { @Override public int sizeInBytes() { return LOG_OVERHEAD + buffer.getInt(LENGTH_OFFSET); } DefaultRecordBatch(ByteBuffer buffer); @Override byte magic(); @Override void ensureValid(); long baseTimestamp(); @Override long maxTimestamp(); @Override TimestampType timestampType(); @Override long baseOffset(); @Override long lastOffset(); @Override long producerId(); @Override short producerEpoch(); @Override int baseSequence(); @Override int lastSequence(); @Override CompressionType compressionType(); @Override int sizeInBytes(); @Override Integer countOrNull(); @Override void writeTo(ByteBuffer buffer); @Override void writeTo(ByteBufferOutputStream outputStream); @Override boolean isTransactional(); @Override boolean isControlBatch(); @Override int partitionLeaderEpoch(); @Override Iterator<Record> iterator(); @Override CloseableIterator<Record> streamingIterator(BufferSupplier bufferSupplier); @Override void setLastOffset(long offset); @Override void setMaxTimestamp(TimestampType timestampType, long maxTimestamp); @Override void setPartitionLeaderEpoch(int epoch); @Override long checksum(); boolean isValid(); @Override boolean equals(Object o); @Override int hashCode(); static void writeEmptyHeader(ByteBuffer buffer,
byte magic,
long producerId,
short producerEpoch,
int baseSequence,
long baseOffset,
long lastOffset,
int partitionLeaderEpoch,
TimestampType timestampType,
long timestamp,
boolean isTransactional,
boolean isControlRecord); @Override String toString(); static int sizeInBytes(long baseOffset, Iterable<Record> records); static int sizeInBytes(Iterable<SimpleRecord> records); static final int RECORD_BATCH_OVERHEAD; }
|
@Test public void testSizeInBytes() { Header[] headers = new Header[] { new RecordHeader("foo", "value".getBytes()), new RecordHeader("bar", (byte[]) null) }; long timestamp = System.currentTimeMillis(); SimpleRecord[] records = new SimpleRecord[] { new SimpleRecord(timestamp, "key".getBytes(), "value".getBytes()), new SimpleRecord(timestamp + 30000, null, "value".getBytes()), new SimpleRecord(timestamp + 60000, "key".getBytes(), null), new SimpleRecord(timestamp + 60000, "key".getBytes(), "value".getBytes(), headers) }; int actualSize = MemoryRecords.withRecords(CompressionType.NONE, records).sizeInBytes(); assertEquals(actualSize, DefaultRecordBatch.sizeInBytes(Arrays.asList(records))); }
|
DefaultRecordBatch extends AbstractRecordBatch implements MutableRecordBatch { public boolean isValid() { return sizeInBytes() >= RECORD_BATCH_OVERHEAD && checksum() == computeChecksum(); } DefaultRecordBatch(ByteBuffer buffer); @Override byte magic(); @Override void ensureValid(); long baseTimestamp(); @Override long maxTimestamp(); @Override TimestampType timestampType(); @Override long baseOffset(); @Override long lastOffset(); @Override long producerId(); @Override short producerEpoch(); @Override int baseSequence(); @Override int lastSequence(); @Override CompressionType compressionType(); @Override int sizeInBytes(); @Override Integer countOrNull(); @Override void writeTo(ByteBuffer buffer); @Override void writeTo(ByteBufferOutputStream outputStream); @Override boolean isTransactional(); @Override boolean isControlBatch(); @Override int partitionLeaderEpoch(); @Override Iterator<Record> iterator(); @Override CloseableIterator<Record> streamingIterator(BufferSupplier bufferSupplier); @Override void setLastOffset(long offset); @Override void setMaxTimestamp(TimestampType timestampType, long maxTimestamp); @Override void setPartitionLeaderEpoch(int epoch); @Override long checksum(); boolean isValid(); @Override boolean equals(Object o); @Override int hashCode(); static void writeEmptyHeader(ByteBuffer buffer,
byte magic,
long producerId,
short producerEpoch,
int baseSequence,
long baseOffset,
long lastOffset,
int partitionLeaderEpoch,
TimestampType timestampType,
long timestamp,
boolean isTransactional,
boolean isControlRecord); @Override String toString(); static int sizeInBytes(long baseOffset, Iterable<Record> records); static int sizeInBytes(Iterable<SimpleRecord> records); static final int RECORD_BATCH_OVERHEAD; }
|
@Test(expected = InvalidRecordException.class) public void testInvalidRecordCountTooManyNonCompressedV2() { long now = System.currentTimeMillis(); DefaultRecordBatch batch = recordsWithInvalidRecordCount(RecordBatch.MAGIC_VALUE_V2, now, CompressionType.NONE, 5); for (Record record: batch) { record.isValid(); } }
@Test(expected = InvalidRecordException.class) public void testInvalidRecordCountTooLittleNonCompressedV2() { long now = System.currentTimeMillis(); DefaultRecordBatch batch = recordsWithInvalidRecordCount(RecordBatch.MAGIC_VALUE_V2, now, CompressionType.NONE, 2); for (Record record: batch) { record.isValid(); } }
@Test(expected = InvalidRecordException.class) public void testInvalidRecordCountTooManyCompressedV2() { long now = System.currentTimeMillis(); DefaultRecordBatch batch = recordsWithInvalidRecordCount(RecordBatch.MAGIC_VALUE_V2, now, CompressionType.GZIP, 5); for (Record record: batch) { record.isValid(); } }
@Test(expected = InvalidRecordException.class) public void testInvalidRecordCountTooLittleCompressedV2() { long now = System.currentTimeMillis(); DefaultRecordBatch batch = recordsWithInvalidRecordCount(RecordBatch.MAGIC_VALUE_V2, now, CompressionType.GZIP, 2); for (Record record: batch) { record.isValid(); } }
|
DefaultRecordBatch extends AbstractRecordBatch implements MutableRecordBatch { @Override public void setLastOffset(long offset) { buffer.putLong(BASE_OFFSET_OFFSET, offset - lastOffsetDelta()); } DefaultRecordBatch(ByteBuffer buffer); @Override byte magic(); @Override void ensureValid(); long baseTimestamp(); @Override long maxTimestamp(); @Override TimestampType timestampType(); @Override long baseOffset(); @Override long lastOffset(); @Override long producerId(); @Override short producerEpoch(); @Override int baseSequence(); @Override int lastSequence(); @Override CompressionType compressionType(); @Override int sizeInBytes(); @Override Integer countOrNull(); @Override void writeTo(ByteBuffer buffer); @Override void writeTo(ByteBufferOutputStream outputStream); @Override boolean isTransactional(); @Override boolean isControlBatch(); @Override int partitionLeaderEpoch(); @Override Iterator<Record> iterator(); @Override CloseableIterator<Record> streamingIterator(BufferSupplier bufferSupplier); @Override void setLastOffset(long offset); @Override void setMaxTimestamp(TimestampType timestampType, long maxTimestamp); @Override void setPartitionLeaderEpoch(int epoch); @Override long checksum(); boolean isValid(); @Override boolean equals(Object o); @Override int hashCode(); static void writeEmptyHeader(ByteBuffer buffer,
byte magic,
long producerId,
short producerEpoch,
int baseSequence,
long baseOffset,
long lastOffset,
int partitionLeaderEpoch,
TimestampType timestampType,
long timestamp,
boolean isTransactional,
boolean isControlRecord); @Override String toString(); static int sizeInBytes(long baseOffset, Iterable<Record> records); static int sizeInBytes(Iterable<SimpleRecord> records); static final int RECORD_BATCH_OVERHEAD; }
|
@Test public void testSetLastOffset() { SimpleRecord[] simpleRecords = new SimpleRecord[] { new SimpleRecord(1L, "a".getBytes(), "1".getBytes()), new SimpleRecord(2L, "b".getBytes(), "2".getBytes()), new SimpleRecord(3L, "c".getBytes(), "3".getBytes()) }; MemoryRecords records = MemoryRecords.withRecords(RecordBatch.MAGIC_VALUE_V2, 0L, CompressionType.NONE, TimestampType.CREATE_TIME, simpleRecords); long lastOffset = 500L; long firstOffset = lastOffset - simpleRecords.length + 1; DefaultRecordBatch batch = new DefaultRecordBatch(records.buffer()); batch.setLastOffset(lastOffset); assertEquals(lastOffset, batch.lastOffset()); assertEquals(firstOffset, batch.baseOffset()); assertTrue(batch.isValid()); List<MutableRecordBatch> recordBatches = Utils.toList(records.batches().iterator()); assertEquals(1, recordBatches.size()); assertEquals(lastOffset, recordBatches.get(0).lastOffset()); long offset = firstOffset; for (Record record : records.records()) assertEquals(offset++, record.offset()); }
|
DefaultRecordBatch extends AbstractRecordBatch implements MutableRecordBatch { @Override public void setPartitionLeaderEpoch(int epoch) { buffer.putInt(PARTITION_LEADER_EPOCH_OFFSET, epoch); } DefaultRecordBatch(ByteBuffer buffer); @Override byte magic(); @Override void ensureValid(); long baseTimestamp(); @Override long maxTimestamp(); @Override TimestampType timestampType(); @Override long baseOffset(); @Override long lastOffset(); @Override long producerId(); @Override short producerEpoch(); @Override int baseSequence(); @Override int lastSequence(); @Override CompressionType compressionType(); @Override int sizeInBytes(); @Override Integer countOrNull(); @Override void writeTo(ByteBuffer buffer); @Override void writeTo(ByteBufferOutputStream outputStream); @Override boolean isTransactional(); @Override boolean isControlBatch(); @Override int partitionLeaderEpoch(); @Override Iterator<Record> iterator(); @Override CloseableIterator<Record> streamingIterator(BufferSupplier bufferSupplier); @Override void setLastOffset(long offset); @Override void setMaxTimestamp(TimestampType timestampType, long maxTimestamp); @Override void setPartitionLeaderEpoch(int epoch); @Override long checksum(); boolean isValid(); @Override boolean equals(Object o); @Override int hashCode(); static void writeEmptyHeader(ByteBuffer buffer,
byte magic,
long producerId,
short producerEpoch,
int baseSequence,
long baseOffset,
long lastOffset,
int partitionLeaderEpoch,
TimestampType timestampType,
long timestamp,
boolean isTransactional,
boolean isControlRecord); @Override String toString(); static int sizeInBytes(long baseOffset, Iterable<Record> records); static int sizeInBytes(Iterable<SimpleRecord> records); static final int RECORD_BATCH_OVERHEAD; }
|
@Test public void testSetPartitionLeaderEpoch() { MemoryRecords records = MemoryRecords.withRecords(RecordBatch.MAGIC_VALUE_V2, 0L, CompressionType.NONE, TimestampType.CREATE_TIME, new SimpleRecord(1L, "a".getBytes(), "1".getBytes()), new SimpleRecord(2L, "b".getBytes(), "2".getBytes()), new SimpleRecord(3L, "c".getBytes(), "3".getBytes())); int leaderEpoch = 500; DefaultRecordBatch batch = new DefaultRecordBatch(records.buffer()); batch.setPartitionLeaderEpoch(leaderEpoch); assertEquals(leaderEpoch, batch.partitionLeaderEpoch()); assertTrue(batch.isValid()); List<MutableRecordBatch> recordBatches = Utils.toList(records.batches().iterator()); assertEquals(1, recordBatches.size()); assertEquals(leaderEpoch, recordBatches.get(0).partitionLeaderEpoch()); }
|
DefaultRecordBatch extends AbstractRecordBatch implements MutableRecordBatch { @Override public void setMaxTimestamp(TimestampType timestampType, long maxTimestamp) { long currentMaxTimestamp = maxTimestamp(); if (timestampType() == timestampType && currentMaxTimestamp == maxTimestamp) return; byte attributes = computeAttributes(compressionType(), timestampType, isTransactional(), isControlBatch()); buffer.putShort(ATTRIBUTES_OFFSET, attributes); buffer.putLong(MAX_TIMESTAMP_OFFSET, maxTimestamp); long crc = computeChecksum(); ByteUtils.writeUnsignedInt(buffer, CRC_OFFSET, crc); } DefaultRecordBatch(ByteBuffer buffer); @Override byte magic(); @Override void ensureValid(); long baseTimestamp(); @Override long maxTimestamp(); @Override TimestampType timestampType(); @Override long baseOffset(); @Override long lastOffset(); @Override long producerId(); @Override short producerEpoch(); @Override int baseSequence(); @Override int lastSequence(); @Override CompressionType compressionType(); @Override int sizeInBytes(); @Override Integer countOrNull(); @Override void writeTo(ByteBuffer buffer); @Override void writeTo(ByteBufferOutputStream outputStream); @Override boolean isTransactional(); @Override boolean isControlBatch(); @Override int partitionLeaderEpoch(); @Override Iterator<Record> iterator(); @Override CloseableIterator<Record> streamingIterator(BufferSupplier bufferSupplier); @Override void setLastOffset(long offset); @Override void setMaxTimestamp(TimestampType timestampType, long maxTimestamp); @Override void setPartitionLeaderEpoch(int epoch); @Override long checksum(); boolean isValid(); @Override boolean equals(Object o); @Override int hashCode(); static void writeEmptyHeader(ByteBuffer buffer,
byte magic,
long producerId,
short producerEpoch,
int baseSequence,
long baseOffset,
long lastOffset,
int partitionLeaderEpoch,
TimestampType timestampType,
long timestamp,
boolean isTransactional,
boolean isControlRecord); @Override String toString(); static int sizeInBytes(long baseOffset, Iterable<Record> records); static int sizeInBytes(Iterable<SimpleRecord> records); static final int RECORD_BATCH_OVERHEAD; }
|
@Test(expected = IllegalArgumentException.class) public void testSetNoTimestampTypeNotAllowed() { MemoryRecords records = MemoryRecords.withRecords(RecordBatch.MAGIC_VALUE_V2, 0L, CompressionType.NONE, TimestampType.CREATE_TIME, new SimpleRecord(1L, "a".getBytes(), "1".getBytes()), new SimpleRecord(2L, "b".getBytes(), "2".getBytes()), new SimpleRecord(3L, "c".getBytes(), "3".getBytes())); DefaultRecordBatch batch = new DefaultRecordBatch(records.buffer()); batch.setMaxTimestamp(TimestampType.NO_TIMESTAMP_TYPE, RecordBatch.NO_TIMESTAMP); }
|
DefaultRecordBatch extends AbstractRecordBatch implements MutableRecordBatch { @Override public boolean isControlBatch() { return (attributes() & CONTROL_FLAG_MASK) > 0; } DefaultRecordBatch(ByteBuffer buffer); @Override byte magic(); @Override void ensureValid(); long baseTimestamp(); @Override long maxTimestamp(); @Override TimestampType timestampType(); @Override long baseOffset(); @Override long lastOffset(); @Override long producerId(); @Override short producerEpoch(); @Override int baseSequence(); @Override int lastSequence(); @Override CompressionType compressionType(); @Override int sizeInBytes(); @Override Integer countOrNull(); @Override void writeTo(ByteBuffer buffer); @Override void writeTo(ByteBufferOutputStream outputStream); @Override boolean isTransactional(); @Override boolean isControlBatch(); @Override int partitionLeaderEpoch(); @Override Iterator<Record> iterator(); @Override CloseableIterator<Record> streamingIterator(BufferSupplier bufferSupplier); @Override void setLastOffset(long offset); @Override void setMaxTimestamp(TimestampType timestampType, long maxTimestamp); @Override void setPartitionLeaderEpoch(int epoch); @Override long checksum(); boolean isValid(); @Override boolean equals(Object o); @Override int hashCode(); static void writeEmptyHeader(ByteBuffer buffer,
byte magic,
long producerId,
short producerEpoch,
int baseSequence,
long baseOffset,
long lastOffset,
int partitionLeaderEpoch,
TimestampType timestampType,
long timestamp,
boolean isTransactional,
boolean isControlRecord); @Override String toString(); static int sizeInBytes(long baseOffset, Iterable<Record> records); static int sizeInBytes(Iterable<SimpleRecord> records); static final int RECORD_BATCH_OVERHEAD; }
|
@Test public void testReadAndWriteControlBatch() { long producerId = 1L; short producerEpoch = 0; int coordinatorEpoch = 15; ByteBuffer buffer = ByteBuffer.allocate(128); MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, RecordBatch.CURRENT_MAGIC_VALUE, CompressionType.NONE, TimestampType.CREATE_TIME, 0L, RecordBatch.NO_TIMESTAMP, producerId, producerEpoch, RecordBatch.NO_SEQUENCE, true, true, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.remaining()); EndTransactionMarker marker = new EndTransactionMarker(ControlRecordType.COMMIT, coordinatorEpoch); builder.appendEndTxnMarker(System.currentTimeMillis(), marker); MemoryRecords records = builder.build(); List<MutableRecordBatch> batches = TestUtils.toList(records.batches()); assertEquals(1, batches.size()); MutableRecordBatch batch = batches.get(0); assertTrue(batch.isControlBatch()); List<Record> logRecords = TestUtils.toList(records.records()); assertEquals(1, logRecords.size()); Record commitRecord = logRecords.get(0); assertEquals(marker, EndTransactionMarker.deserialize(commitRecord)); }
|
DefaultRecordBatch extends AbstractRecordBatch implements MutableRecordBatch { static int incrementSequence(int baseSequence, int increment) { if (baseSequence > Integer.MAX_VALUE - increment) return increment - (Integer.MAX_VALUE - baseSequence) - 1; return baseSequence + increment; } DefaultRecordBatch(ByteBuffer buffer); @Override byte magic(); @Override void ensureValid(); long baseTimestamp(); @Override long maxTimestamp(); @Override TimestampType timestampType(); @Override long baseOffset(); @Override long lastOffset(); @Override long producerId(); @Override short producerEpoch(); @Override int baseSequence(); @Override int lastSequence(); @Override CompressionType compressionType(); @Override int sizeInBytes(); @Override Integer countOrNull(); @Override void writeTo(ByteBuffer buffer); @Override void writeTo(ByteBufferOutputStream outputStream); @Override boolean isTransactional(); @Override boolean isControlBatch(); @Override int partitionLeaderEpoch(); @Override Iterator<Record> iterator(); @Override CloseableIterator<Record> streamingIterator(BufferSupplier bufferSupplier); @Override void setLastOffset(long offset); @Override void setMaxTimestamp(TimestampType timestampType, long maxTimestamp); @Override void setPartitionLeaderEpoch(int epoch); @Override long checksum(); boolean isValid(); @Override boolean equals(Object o); @Override int hashCode(); static void writeEmptyHeader(ByteBuffer buffer,
byte magic,
long producerId,
short producerEpoch,
int baseSequence,
long baseOffset,
long lastOffset,
int partitionLeaderEpoch,
TimestampType timestampType,
long timestamp,
boolean isTransactional,
boolean isControlRecord); @Override String toString(); static int sizeInBytes(long baseOffset, Iterable<Record> records); static int sizeInBytes(Iterable<SimpleRecord> records); static final int RECORD_BATCH_OVERHEAD; }
|
@Test public void testIncrementSequence() { assertEquals(10, DefaultRecordBatch.incrementSequence(5, 5)); assertEquals(0, DefaultRecordBatch.incrementSequence(Integer.MAX_VALUE, 1)); assertEquals(4, DefaultRecordBatch.incrementSequence(Integer.MAX_VALUE - 5, 10)); }
|
FileRecords extends AbstractRecords implements Closeable { public FileChannel channel() { return channel; } FileRecords(File file,
FileChannel channel,
int start,
int end,
boolean isSlice); @Override int sizeInBytes(); File file(); FileChannel channel(); ByteBuffer readInto(ByteBuffer buffer, int position); FileRecords read(int position, int size); int append(MemoryRecords records); void flush(); void close(); boolean delete(); void trim(); void setFile(File file); void renameTo(File f); int truncateTo(int targetSize); @Override Records downConvert(byte toMagic, long firstOffset); @Override long writeTo(GatheringByteChannel destChannel, long offset, int length); LogOffsetPosition searchForOffsetWithSize(long targetOffset, int startingPosition); TimestampAndOffset searchForTimestamp(long targetTimestamp, int startingPosition, long startingOffset); TimestampAndOffset largestTimestampAfter(int startingPosition); @Override Iterable<FileChannelRecordBatch> batches(); static FileRecords open(File file,
boolean mutable,
boolean fileAlreadyExists,
int initFileSize,
boolean preallocate); static FileRecords open(File file,
boolean fileAlreadyExists,
int initFileSize,
boolean preallocate); static FileRecords open(File file, boolean mutable); static FileRecords open(File file); }
|
@Test public void testIterationDoesntChangePosition() throws IOException { long position = fileRecords.channel().position(); Iterator<Record> records = fileRecords.records().iterator(); for (byte[] value : values) { assertTrue(records.hasNext()); assertEquals(records.next().value(), ByteBuffer.wrap(value)); } assertEquals(position, fileRecords.channel().position()); }
|
FileRecords extends AbstractRecords implements Closeable { public FileRecords read(int position, int size) throws IOException { if (position < 0) throw new IllegalArgumentException("Invalid position: " + position); if (size < 0) throw new IllegalArgumentException("Invalid size: " + size); final int end; if (this.start + position + size < 0) end = sizeInBytes(); else end = Math.min(this.start + position + size, sizeInBytes()); return new FileRecords(file, channel, this.start + position, end, true); } FileRecords(File file,
FileChannel channel,
int start,
int end,
boolean isSlice); @Override int sizeInBytes(); File file(); FileChannel channel(); ByteBuffer readInto(ByteBuffer buffer, int position); FileRecords read(int position, int size); int append(MemoryRecords records); void flush(); void close(); boolean delete(); void trim(); void setFile(File file); void renameTo(File f); int truncateTo(int targetSize); @Override Records downConvert(byte toMagic, long firstOffset); @Override long writeTo(GatheringByteChannel destChannel, long offset, int length); LogOffsetPosition searchForOffsetWithSize(long targetOffset, int startingPosition); TimestampAndOffset searchForTimestamp(long targetTimestamp, int startingPosition, long startingOffset); TimestampAndOffset largestTimestampAfter(int startingPosition); @Override Iterable<FileChannelRecordBatch> batches(); static FileRecords open(File file,
boolean mutable,
boolean fileAlreadyExists,
int initFileSize,
boolean preallocate); static FileRecords open(File file,
boolean fileAlreadyExists,
int initFileSize,
boolean preallocate); static FileRecords open(File file, boolean mutable); static FileRecords open(File file); }
|
@Test public void testRead() throws IOException { FileRecords read = fileRecords.read(0, fileRecords.sizeInBytes()); TestUtils.checkEquals(fileRecords.batches(), read.batches()); List<RecordBatch> items = batches(read); RecordBatch second = items.get(1); read = fileRecords.read(second.sizeInBytes(), fileRecords.sizeInBytes()); assertEquals("Try a read starting from the second message", items.subList(1, 3), batches(read)); read = fileRecords.read(second.sizeInBytes(), second.sizeInBytes()); assertEquals("Try a read of a single message starting from the second message", Collections.singletonList(second), batches(read)); }
|
FileRecords extends AbstractRecords implements Closeable { public int truncateTo(int targetSize) throws IOException { int originalSize = sizeInBytes(); if (targetSize > originalSize || targetSize < 0) throw new KafkaException("Attempt to truncate log segment to " + targetSize + " bytes failed, " + " size of this log segment is " + originalSize + " bytes."); if (targetSize < (int) channel.size()) { channel.truncate(targetSize); size.set(targetSize); } return originalSize - targetSize; } FileRecords(File file,
FileChannel channel,
int start,
int end,
boolean isSlice); @Override int sizeInBytes(); File file(); FileChannel channel(); ByteBuffer readInto(ByteBuffer buffer, int position); FileRecords read(int position, int size); int append(MemoryRecords records); void flush(); void close(); boolean delete(); void trim(); void setFile(File file); void renameTo(File f); int truncateTo(int targetSize); @Override Records downConvert(byte toMagic, long firstOffset); @Override long writeTo(GatheringByteChannel destChannel, long offset, int length); LogOffsetPosition searchForOffsetWithSize(long targetOffset, int startingPosition); TimestampAndOffset searchForTimestamp(long targetTimestamp, int startingPosition, long startingOffset); TimestampAndOffset largestTimestampAfter(int startingPosition); @Override Iterable<FileChannelRecordBatch> batches(); static FileRecords open(File file,
boolean mutable,
boolean fileAlreadyExists,
int initFileSize,
boolean preallocate); static FileRecords open(File file,
boolean fileAlreadyExists,
int initFileSize,
boolean preallocate); static FileRecords open(File file, boolean mutable); static FileRecords open(File file); }
|
@Test public void testTruncateNotCalledIfSizeIsSameAsTargetSize() throws IOException { FileChannel channelMock = EasyMock.createMock(FileChannel.class); EasyMock.expect(channelMock.size()).andReturn(42L).atLeastOnce(); EasyMock.expect(channelMock.position(42L)).andReturn(null); EasyMock.replay(channelMock); FileRecords fileRecords = new FileRecords(tempFile(), channelMock, 0, Integer.MAX_VALUE, false); fileRecords.truncateTo(42); EasyMock.verify(channelMock); }
@Test public void testTruncateNotCalledIfSizeIsBiggerThanTargetSize() throws IOException { FileChannel channelMock = EasyMock.createMock(FileChannel.class); EasyMock.expect(channelMock.size()).andReturn(42L).atLeastOnce(); EasyMock.expect(channelMock.position(42L)).andReturn(null); EasyMock.replay(channelMock); FileRecords fileRecords = new FileRecords(tempFile(), channelMock, 0, Integer.MAX_VALUE, false); try { fileRecords.truncateTo(43); fail("Should throw KafkaException"); } catch (KafkaException e) { } EasyMock.verify(channelMock); }
@Test public void testTruncateIfSizeIsDifferentToTargetSize() throws IOException { FileChannel channelMock = EasyMock.createMock(FileChannel.class); EasyMock.expect(channelMock.size()).andReturn(42L).atLeastOnce(); EasyMock.expect(channelMock.position(42L)).andReturn(null).once(); EasyMock.expect(channelMock.truncate(23L)).andReturn(null).once(); EasyMock.replay(channelMock); FileRecords fileRecords = new FileRecords(tempFile(), channelMock, 0, Integer.MAX_VALUE, false); fileRecords.truncateTo(23); EasyMock.verify(channelMock); }
|
MemoryRecordsBuilder { public void close() { if (aborted) throw new IllegalStateException("Cannot close MemoryRecordsBuilder as it has already been aborted"); if (builtRecords != null) return; validateProducerState(); closeForRecordAppends(); if (numRecords == 0L) { buffer().position(initialPosition); builtRecords = MemoryRecords.EMPTY; } else { if (magic > RecordBatch.MAGIC_VALUE_V1) this.actualCompressionRatio = (float) writeDefaultBatchHeader() / this.writtenUncompressed; else if (compressionType != CompressionType.NONE) this.actualCompressionRatio = (float) writeLegacyCompressedWrapperHeader() / this.writtenUncompressed; ByteBuffer buffer = buffer().duplicate(); buffer.flip(); buffer.position(initialPosition); builtRecords = MemoryRecords.readableRecords(buffer.slice()); } } MemoryRecordsBuilder(ByteBufferOutputStream bufferStream,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset,
long logAppendTime,
long producerId,
short producerEpoch,
int baseSequence,
boolean isTransactional,
boolean isControlBatch,
int partitionLeaderEpoch,
int writeLimit); MemoryRecordsBuilder(ByteBuffer buffer,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset,
long logAppendTime,
long producerId,
short producerEpoch,
int baseSequence,
boolean isTransactional,
boolean isControlBatch,
int partitionLeaderEpoch,
int writeLimit); ByteBuffer buffer(); int initialCapacity(); double compressionRatio(); CompressionType compressionType(); boolean isControlBatch(); boolean isTransactional(); MemoryRecords build(); RecordsInfo info(); void setProducerState(long producerId, short producerEpoch, int baseSequence, boolean isTransactional); void overrideLastOffset(long lastOffset); void closeForRecordAppends(); void abort(); void close(); Long appendWithOffset(long offset, long timestamp, byte[] key, byte[] value, Header[] headers); Long appendWithOffset(long offset, long timestamp, ByteBuffer key, ByteBuffer value, Header[] headers); Long appendWithOffset(long offset, long timestamp, byte[] key, byte[] value); Long appendWithOffset(long offset, long timestamp, ByteBuffer key, ByteBuffer value); Long appendWithOffset(long offset, SimpleRecord record); Long append(long timestamp, ByteBuffer key, ByteBuffer value); Long append(long timestamp, ByteBuffer key, ByteBuffer value, Header[] headers); Long append(long timestamp, byte[] key, byte[] value); Long append(long timestamp, byte[] key, byte[] value, Header[] headers); Long append(SimpleRecord record); Long appendEndTxnMarker(long timestamp, EndTransactionMarker marker); void appendUncheckedWithOffset(long offset, LegacyRecord record); void append(Record record); void appendWithOffset(long offset, Record record); void appendWithOffset(long offset, LegacyRecord record); void append(LegacyRecord record); void setEstimatedCompressionRatio(float estimatedCompressionRatio); boolean hasRoomFor(long timestamp, byte[] key, byte[] value, Header[] headers); boolean hasRoomFor(long timestamp, ByteBuffer key, ByteBuffer value, Header[] headers); boolean isClosed(); boolean isFull(); int sizeInBytes(); byte magic(); long producerId(); short producerEpoch(); }
|
@Test(expected = IllegalArgumentException.class) public void testWriteTransactionalWithInvalidPID() { ByteBuffer buffer = ByteBuffer.allocate(128); buffer.position(bufferOffset); long pid = RecordBatch.NO_PRODUCER_ID; short epoch = 15; int sequence = 2342; MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, RecordBatch.CURRENT_MAGIC_VALUE, compressionType, TimestampType.CREATE_TIME, 0L, 0L, pid, epoch, sequence, true, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity()); builder.close(); }
@Test(expected = IllegalArgumentException.class) public void testWriteIdempotentWithInvalidEpoch() { ByteBuffer buffer = ByteBuffer.allocate(128); buffer.position(bufferOffset); long pid = 9809; short epoch = RecordBatch.NO_PRODUCER_EPOCH; int sequence = 2342; MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, RecordBatch.CURRENT_MAGIC_VALUE, compressionType, TimestampType.CREATE_TIME, 0L, 0L, pid, epoch, sequence, true, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity()); builder.close(); }
@Test(expected = IllegalArgumentException.class) public void testWriteIdempotentWithInvalidBaseSequence() { ByteBuffer buffer = ByteBuffer.allocate(128); buffer.position(bufferOffset); long pid = 9809; short epoch = 15; int sequence = RecordBatch.NO_SEQUENCE; MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, RecordBatch.CURRENT_MAGIC_VALUE, compressionType, TimestampType.CREATE_TIME, 0L, 0L, pid, epoch, sequence, true, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity()); builder.close(); }
|
MemoryRecordsBuilder { public Long appendEndTxnMarker(long timestamp, EndTransactionMarker marker) { if (producerId == RecordBatch.NO_PRODUCER_ID) throw new IllegalArgumentException("End transaction marker requires a valid producerId"); if (!isTransactional) throw new IllegalArgumentException("End transaction marker depends on batch transactional flag being enabled"); ByteBuffer value = marker.serializeValue(); return appendControlRecord(timestamp, marker.controlType(), value); } MemoryRecordsBuilder(ByteBufferOutputStream bufferStream,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset,
long logAppendTime,
long producerId,
short producerEpoch,
int baseSequence,
boolean isTransactional,
boolean isControlBatch,
int partitionLeaderEpoch,
int writeLimit); MemoryRecordsBuilder(ByteBuffer buffer,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset,
long logAppendTime,
long producerId,
short producerEpoch,
int baseSequence,
boolean isTransactional,
boolean isControlBatch,
int partitionLeaderEpoch,
int writeLimit); ByteBuffer buffer(); int initialCapacity(); double compressionRatio(); CompressionType compressionType(); boolean isControlBatch(); boolean isTransactional(); MemoryRecords build(); RecordsInfo info(); void setProducerState(long producerId, short producerEpoch, int baseSequence, boolean isTransactional); void overrideLastOffset(long lastOffset); void closeForRecordAppends(); void abort(); void close(); Long appendWithOffset(long offset, long timestamp, byte[] key, byte[] value, Header[] headers); Long appendWithOffset(long offset, long timestamp, ByteBuffer key, ByteBuffer value, Header[] headers); Long appendWithOffset(long offset, long timestamp, byte[] key, byte[] value); Long appendWithOffset(long offset, long timestamp, ByteBuffer key, ByteBuffer value); Long appendWithOffset(long offset, SimpleRecord record); Long append(long timestamp, ByteBuffer key, ByteBuffer value); Long append(long timestamp, ByteBuffer key, ByteBuffer value, Header[] headers); Long append(long timestamp, byte[] key, byte[] value); Long append(long timestamp, byte[] key, byte[] value, Header[] headers); Long append(SimpleRecord record); Long appendEndTxnMarker(long timestamp, EndTransactionMarker marker); void appendUncheckedWithOffset(long offset, LegacyRecord record); void append(Record record); void appendWithOffset(long offset, Record record); void appendWithOffset(long offset, LegacyRecord record); void append(LegacyRecord record); void setEstimatedCompressionRatio(float estimatedCompressionRatio); boolean hasRoomFor(long timestamp, byte[] key, byte[] value, Header[] headers); boolean hasRoomFor(long timestamp, ByteBuffer key, ByteBuffer value, Header[] headers); boolean isClosed(); boolean isFull(); int sizeInBytes(); byte magic(); long producerId(); short producerEpoch(); }
|
@Test(expected = IllegalArgumentException.class) public void testWriteEndTxnMarkerNonTransactionalBatch() { ByteBuffer buffer = ByteBuffer.allocate(128); buffer.position(bufferOffset); long pid = 9809; short epoch = 15; int sequence = RecordBatch.NO_SEQUENCE; MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, RecordBatch.CURRENT_MAGIC_VALUE, compressionType, TimestampType.CREATE_TIME, 0L, 0L, pid, epoch, sequence, false, true, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity()); builder.appendEndTxnMarker(RecordBatch.NO_TIMESTAMP, new EndTransactionMarker(ControlRecordType.ABORT, 0)); }
@Test(expected = IllegalArgumentException.class) public void testWriteEndTxnMarkerNonControlBatch() { ByteBuffer buffer = ByteBuffer.allocate(128); buffer.position(bufferOffset); long pid = 9809; short epoch = 15; int sequence = RecordBatch.NO_SEQUENCE; MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, RecordBatch.CURRENT_MAGIC_VALUE, compressionType, TimestampType.CREATE_TIME, 0L, 0L, pid, epoch, sequence, true, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity()); builder.appendEndTxnMarker(RecordBatch.NO_TIMESTAMP, new EndTransactionMarker(ControlRecordType.ABORT, 0)); }
|
MemoryRecordsBuilder { private Long appendWithOffset(long offset, boolean isControlRecord, long timestamp, ByteBuffer key, ByteBuffer value, Header[] headers) { try { if (isControlRecord != isControlBatch) throw new IllegalArgumentException("Control records can only be appended to control batches"); if (lastOffset != null && offset <= lastOffset) throw new IllegalArgumentException(String.format("Illegal offset %s following previous offset %s " + "(Offsets must increase monotonically).", offset, lastOffset)); if (timestamp < 0 && timestamp != RecordBatch.NO_TIMESTAMP) throw new IllegalArgumentException("Invalid negative timestamp " + timestamp); if (magic < RecordBatch.MAGIC_VALUE_V2 && headers != null && headers.length > 0) throw new IllegalArgumentException("Magic v" + magic + " does not support record headers"); if (baseTimestamp == null) baseTimestamp = timestamp; if (magic > RecordBatch.MAGIC_VALUE_V1) { appendDefaultRecord(offset, timestamp, key, value, headers); return null; } else { return appendLegacyRecord(offset, timestamp, key, value); } } catch (IOException e) { throw new KafkaException("I/O exception when writing to the append stream, closing", e); } } MemoryRecordsBuilder(ByteBufferOutputStream bufferStream,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset,
long logAppendTime,
long producerId,
short producerEpoch,
int baseSequence,
boolean isTransactional,
boolean isControlBatch,
int partitionLeaderEpoch,
int writeLimit); MemoryRecordsBuilder(ByteBuffer buffer,
byte magic,
CompressionType compressionType,
TimestampType timestampType,
long baseOffset,
long logAppendTime,
long producerId,
short producerEpoch,
int baseSequence,
boolean isTransactional,
boolean isControlBatch,
int partitionLeaderEpoch,
int writeLimit); ByteBuffer buffer(); int initialCapacity(); double compressionRatio(); CompressionType compressionType(); boolean isControlBatch(); boolean isTransactional(); MemoryRecords build(); RecordsInfo info(); void setProducerState(long producerId, short producerEpoch, int baseSequence, boolean isTransactional); void overrideLastOffset(long lastOffset); void closeForRecordAppends(); void abort(); void close(); Long appendWithOffset(long offset, long timestamp, byte[] key, byte[] value, Header[] headers); Long appendWithOffset(long offset, long timestamp, ByteBuffer key, ByteBuffer value, Header[] headers); Long appendWithOffset(long offset, long timestamp, byte[] key, byte[] value); Long appendWithOffset(long offset, long timestamp, ByteBuffer key, ByteBuffer value); Long appendWithOffset(long offset, SimpleRecord record); Long append(long timestamp, ByteBuffer key, ByteBuffer value); Long append(long timestamp, ByteBuffer key, ByteBuffer value, Header[] headers); Long append(long timestamp, byte[] key, byte[] value); Long append(long timestamp, byte[] key, byte[] value, Header[] headers); Long append(SimpleRecord record); Long appendEndTxnMarker(long timestamp, EndTransactionMarker marker); void appendUncheckedWithOffset(long offset, LegacyRecord record); void append(Record record); void appendWithOffset(long offset, Record record); void appendWithOffset(long offset, LegacyRecord record); void append(LegacyRecord record); void setEstimatedCompressionRatio(float estimatedCompressionRatio); boolean hasRoomFor(long timestamp, byte[] key, byte[] value, Header[] headers); boolean hasRoomFor(long timestamp, ByteBuffer key, ByteBuffer value, Header[] headers); boolean isClosed(); boolean isFull(); int sizeInBytes(); byte magic(); long producerId(); short producerEpoch(); }
|
@Test(expected = IllegalArgumentException.class) public void testAppendAtInvalidOffset() { ByteBuffer buffer = ByteBuffer.allocate(1024); buffer.position(bufferOffset); long logAppendTime = System.currentTimeMillis(); MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, RecordBatch.MAGIC_VALUE_V1, compressionType, TimestampType.CREATE_TIME, 0L, logAppendTime, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, RecordBatch.NO_SEQUENCE, false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity()); builder.appendWithOffset(0L, System.currentTimeMillis(), "a".getBytes(), null); builder.appendWithOffset(0L, System.currentTimeMillis(), "b".getBytes(), null); }
|
ProduceRequest extends AbstractRequest { public boolean isTransactional() { return transactional; } private ProduceRequest(short version, short acks, int timeout, Map<TopicPartition, MemoryRecords> partitionRecords, String transactionalId); ProduceRequest(Struct struct, short version); @Override Struct toStruct(); @Override String toString(boolean verbose); @Override ProduceResponse getErrorResponse(int throttleTimeMs, Throwable e); short acks(); int timeout(); String transactionalId(); boolean isTransactional(); boolean isIdempotent(); Map<TopicPartition, MemoryRecords> partitionRecordsOrFail(); void clearPartitionRecords(); static ProduceRequest parse(ByteBuffer buffer, short version); static byte requiredMagicForVersion(short produceRequestVersion); }
|
@Test public void shouldBeFlaggedAsTransactionalWhenTransactionalRecords() throws Exception { final MemoryRecords memoryRecords = MemoryRecords.withTransactionalRecords(0, CompressionType.NONE, 1L, (short) 1, 1, 1, simpleRecord); final ProduceRequest request = new ProduceRequest.Builder(RecordBatch.CURRENT_MAGIC_VALUE, (short) -1, 10, Collections.singletonMap( new TopicPartition("topic", 1), memoryRecords)).build(); assertTrue(request.isTransactional()); }
@Test public void shouldNotBeFlaggedAsTransactionalWhenNoRecords() throws Exception { final ProduceRequest request = createNonIdempotentNonTransactionalRecords(); assertFalse(request.isTransactional()); }
@Test public void shouldNotBeFlaggedAsIdempotentWhenRecordsNotIdempotent() throws Exception { final ProduceRequest request = createNonIdempotentNonTransactionalRecords(); assertFalse(request.isTransactional()); }
|
ProduceRequest extends AbstractRequest { public boolean isIdempotent() { return idempotent; } private ProduceRequest(short version, short acks, int timeout, Map<TopicPartition, MemoryRecords> partitionRecords, String transactionalId); ProduceRequest(Struct struct, short version); @Override Struct toStruct(); @Override String toString(boolean verbose); @Override ProduceResponse getErrorResponse(int throttleTimeMs, Throwable e); short acks(); int timeout(); String transactionalId(); boolean isTransactional(); boolean isIdempotent(); Map<TopicPartition, MemoryRecords> partitionRecordsOrFail(); void clearPartitionRecords(); static ProduceRequest parse(ByteBuffer buffer, short version); static byte requiredMagicForVersion(short produceRequestVersion); }
|
@Test public void shouldBeFlaggedAsIdempotentWhenIdempotentRecords() throws Exception { final MemoryRecords memoryRecords = MemoryRecords.withIdempotentRecords(1, CompressionType.NONE, 1L, (short) 1, 1, 1, simpleRecord); final ProduceRequest request = new ProduceRequest.Builder(RecordBatch.CURRENT_MAGIC_VALUE, (short) -1, 10, Collections.singletonMap( new TopicPartition("topic", 1), memoryRecords)).build(); assertTrue(request.isIdempotent()); }
|
StreamsConfig extends AbstractConfig { public Map<String, Object> getProducerConfigs(final String clientId) { final Map<String, Object> clientProvidedProps = getClientPropsWithPrefix(PRODUCER_PREFIX, ProducerConfig.configNames()); if (eosEnabled) { if (clientProvidedProps.containsKey(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG)) { throw new ConfigException("Unexpected user-specified consumer config " + ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG + "; because " + PROCESSING_GUARANTEE_CONFIG + " is set to '" + EXACTLY_ONCE + "' producer will always have idempotency enabled."); } if (clientProvidedProps.containsKey(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION)) { throw new ConfigException("Unexpected user-specified consumer config " + ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION + "; because " + PROCESSING_GUARANTEE_CONFIG + " is set to '" + EXACTLY_ONCE + "' producer will always have only one in-flight request per connection."); } } final Map<String, Object> props = new HashMap<>(eosEnabled ? PRODUCER_EOS_OVERRIDES : PRODUCER_DEFAULT_OVERRIDES); props.putAll(clientProvidedProps); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, originals().get(BOOTSTRAP_SERVERS_CONFIG)); props.put(CommonClientConfigs.CLIENT_ID_CONFIG, clientId + "-producer"); return props; } StreamsConfig(final Map<?, ?> props); static String consumerPrefix(final String consumerProp); static String producerPrefix(final String producerProp); static ConfigDef configDef(); Map<String, Object> getConsumerConfigs(final StreamThread streamThread,
final String groupId,
final String clientId); Map<String, Object> getRestoreConsumerConfigs(final String clientId); Map<String, Object> getProducerConfigs(final String clientId); @Deprecated Serde keySerde(); Serde defaultKeySerde(); @Deprecated Serde valueSerde(); Serde defaultValueSerde(); TimestampExtractor defaultTimestampExtractor(); static void main(final String[] args); static final String CONSUMER_PREFIX; static final String PRODUCER_PREFIX; static final String AT_LEAST_ONCE; static final String EXACTLY_ONCE; static final String APPLICATION_ID_CONFIG; static final String APPLICATION_SERVER_CONFIG; static final String BOOTSTRAP_SERVERS_CONFIG; static final String BUFFERED_RECORDS_PER_PARTITION_CONFIG; static final String CACHE_MAX_BYTES_BUFFERING_CONFIG; static final String CLIENT_ID_CONFIG; static final String COMMIT_INTERVAL_MS_CONFIG; static final String CONNECTIONS_MAX_IDLE_MS_CONFIG; static final String DEFAULT_KEY_SERDE_CLASS_CONFIG; static final String DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG; static final String DEFAULT_VALUE_SERDE_CLASS_CONFIG; @Deprecated
static final String KEY_SERDE_CLASS_CONFIG; static final String METADATA_MAX_AGE_CONFIG; static final String METRICS_NUM_SAMPLES_CONFIG; static final String METRICS_RECORDING_LEVEL_CONFIG; static final String METRIC_REPORTER_CLASSES_CONFIG; static final String METRICS_SAMPLE_WINDOW_MS_CONFIG; static final String NUM_STANDBY_REPLICAS_CONFIG; static final String NUM_STREAM_THREADS_CONFIG; static final String PARTITION_GROUPER_CLASS_CONFIG; static final String POLL_MS_CONFIG; static final String PROCESSING_GUARANTEE_CONFIG; static final String RECEIVE_BUFFER_CONFIG; static final String RECONNECT_BACKOFF_MS_CONFIG; static final String RECONNECT_BACKOFF_MAX_MS_CONFIG; static final String REPLICATION_FACTOR_CONFIG; static final String REQUEST_TIMEOUT_MS_CONFIG; static final String RETRY_BACKOFF_MS_CONFIG; static final String ROCKSDB_CONFIG_SETTER_CLASS_CONFIG; static final String SECURITY_PROTOCOL_CONFIG; static final String SEND_BUFFER_CONFIG; static final String STATE_CLEANUP_DELAY_MS_CONFIG; static final String STATE_DIR_CONFIG; @Deprecated
static final String TIMESTAMP_EXTRACTOR_CLASS_CONFIG; @Deprecated
static final String VALUE_SERDE_CLASS_CONFIG; static final String WINDOW_STORE_CHANGE_LOG_ADDITIONAL_RETENTION_MS_CONFIG; @Deprecated
static final String ZOOKEEPER_CONNECT_CONFIG; }
|
@Test public void testGetProducerConfigs() throws Exception { final String clientId = "client"; final Map<String, Object> returnedProps = streamsConfig.getProducerConfigs(clientId); assertEquals(returnedProps.get(ProducerConfig.CLIENT_ID_CONFIG), clientId + "-producer"); assertEquals(returnedProps.get(ProducerConfig.LINGER_MS_CONFIG), "100"); assertNull(returnedProps.get("DUMMY")); }
@Test public void shouldSupportNonPrefixedProducerConfigs() throws Exception { props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 10); props.put(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG, 1); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> configs = streamsConfig.getProducerConfigs("clientId"); assertEquals(10, configs.get(ProducerConfig.BUFFER_MEMORY_CONFIG)); assertEquals(1, configs.get(ProducerConfig.METRICS_NUM_SAMPLES_CONFIG)); }
@Test(expected = ConfigException.class) public void shouldThrowExceptionIfProducerEnableIdempotenceIsOverriddenIfEosEnabled() { props.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, EXACTLY_ONCE); props.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "anyValue"); final StreamsConfig streamsConfig = new StreamsConfig(props); streamsConfig.getProducerConfigs("clientId"); }
@Test public void shouldAllowSettingProducerEnableIdempotenceIfEosDisabled() { props.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, true); final StreamsConfig streamsConfig = new StreamsConfig(props); streamsConfig.getProducerConfigs("clientId"); }
@Test(expected = ConfigException.class) public void shouldThrowExceptionIfProducerMaxInFlightRequestPerConnectionsIsOverriddenIfEosEnabled() { props.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, EXACTLY_ONCE); props.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, "anyValue"); final StreamsConfig streamsConfig = new StreamsConfig(props); streamsConfig.getProducerConfigs("clientId"); }
@Test public void shouldAllowSettingProducerMaxInFlightRequestPerConnectionsWhenEosDisabled() { props.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, "anyValue"); final StreamsConfig streamsConfig = new StreamsConfig(props); streamsConfig.getProducerConfigs("clientId"); }
@Test public void shouldNotOverrideUserConfigRetriesIfExactlyOnceEnabled() { final int numberOfRetries = 42; props.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, EXACTLY_ONCE); props.put(ProducerConfig.RETRIES_CONFIG, numberOfRetries); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> producerConfigs = streamsConfig.getProducerConfigs("clientId"); assertThat((Integer) producerConfigs.get(ProducerConfig.RETRIES_CONFIG), equalTo(numberOfRetries)); }
|
StreamsConfig extends AbstractConfig { public Map<String, Object> getConsumerConfigs(final StreamThread streamThread, final String groupId, final String clientId) throws ConfigException { final Map<String, Object> consumerProps = getCommonConsumerConfigs(); consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, groupId); consumerProps.put(CommonClientConfigs.CLIENT_ID_CONFIG, clientId + "-consumer"); consumerProps.put(InternalConfig.STREAM_THREAD_INSTANCE, streamThread); consumerProps.put(REPLICATION_FACTOR_CONFIG, getInt(REPLICATION_FACTOR_CONFIG)); consumerProps.put(NUM_STANDBY_REPLICAS_CONFIG, getInt(NUM_STANDBY_REPLICAS_CONFIG)); consumerProps.put(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG, StreamPartitionAssignor.class.getName()); consumerProps.put(WINDOW_STORE_CHANGE_LOG_ADDITIONAL_RETENTION_MS_CONFIG, getLong(WINDOW_STORE_CHANGE_LOG_ADDITIONAL_RETENTION_MS_CONFIG)); consumerProps.put(APPLICATION_SERVER_CONFIG, getString(APPLICATION_SERVER_CONFIG)); return consumerProps; } StreamsConfig(final Map<?, ?> props); static String consumerPrefix(final String consumerProp); static String producerPrefix(final String producerProp); static ConfigDef configDef(); Map<String, Object> getConsumerConfigs(final StreamThread streamThread,
final String groupId,
final String clientId); Map<String, Object> getRestoreConsumerConfigs(final String clientId); Map<String, Object> getProducerConfigs(final String clientId); @Deprecated Serde keySerde(); Serde defaultKeySerde(); @Deprecated Serde valueSerde(); Serde defaultValueSerde(); TimestampExtractor defaultTimestampExtractor(); static void main(final String[] args); static final String CONSUMER_PREFIX; static final String PRODUCER_PREFIX; static final String AT_LEAST_ONCE; static final String EXACTLY_ONCE; static final String APPLICATION_ID_CONFIG; static final String APPLICATION_SERVER_CONFIG; static final String BOOTSTRAP_SERVERS_CONFIG; static final String BUFFERED_RECORDS_PER_PARTITION_CONFIG; static final String CACHE_MAX_BYTES_BUFFERING_CONFIG; static final String CLIENT_ID_CONFIG; static final String COMMIT_INTERVAL_MS_CONFIG; static final String CONNECTIONS_MAX_IDLE_MS_CONFIG; static final String DEFAULT_KEY_SERDE_CLASS_CONFIG; static final String DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG; static final String DEFAULT_VALUE_SERDE_CLASS_CONFIG; @Deprecated
static final String KEY_SERDE_CLASS_CONFIG; static final String METADATA_MAX_AGE_CONFIG; static final String METRICS_NUM_SAMPLES_CONFIG; static final String METRICS_RECORDING_LEVEL_CONFIG; static final String METRIC_REPORTER_CLASSES_CONFIG; static final String METRICS_SAMPLE_WINDOW_MS_CONFIG; static final String NUM_STANDBY_REPLICAS_CONFIG; static final String NUM_STREAM_THREADS_CONFIG; static final String PARTITION_GROUPER_CLASS_CONFIG; static final String POLL_MS_CONFIG; static final String PROCESSING_GUARANTEE_CONFIG; static final String RECEIVE_BUFFER_CONFIG; static final String RECONNECT_BACKOFF_MS_CONFIG; static final String RECONNECT_BACKOFF_MAX_MS_CONFIG; static final String REPLICATION_FACTOR_CONFIG; static final String REQUEST_TIMEOUT_MS_CONFIG; static final String RETRY_BACKOFF_MS_CONFIG; static final String ROCKSDB_CONFIG_SETTER_CLASS_CONFIG; static final String SECURITY_PROTOCOL_CONFIG; static final String SEND_BUFFER_CONFIG; static final String STATE_CLEANUP_DELAY_MS_CONFIG; static final String STATE_DIR_CONFIG; @Deprecated
static final String TIMESTAMP_EXTRACTOR_CLASS_CONFIG; @Deprecated
static final String VALUE_SERDE_CLASS_CONFIG; static final String WINDOW_STORE_CHANGE_LOG_ADDITIONAL_RETENTION_MS_CONFIG; @Deprecated
static final String ZOOKEEPER_CONNECT_CONFIG; }
|
@Test public void testGetConsumerConfigs() throws Exception { final String groupId = "example-application"; final String clientId = "client"; final Map<String, Object> returnedProps = streamsConfig.getConsumerConfigs(null, groupId, clientId); assertEquals(returnedProps.get(ConsumerConfig.CLIENT_ID_CONFIG), clientId + "-consumer"); assertEquals(returnedProps.get(ConsumerConfig.GROUP_ID_CONFIG), groupId); assertEquals(returnedProps.get(ConsumerConfig.MAX_POLL_RECORDS_CONFIG), "1000"); assertNull(returnedProps.get("DUMMY")); }
@Test public void shouldBeSupportNonPrefixedConsumerConfigs() throws Exception { props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.put(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG, 1); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> consumerConfigs = streamsConfig.getConsumerConfigs(null, "groupId", "clientId"); assertEquals("earliest", consumerConfigs.get(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG)); assertEquals(1, consumerConfigs.get(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG)); }
@Test public void shouldSetInternalLeaveGroupOnCloseConfigToFalseInConsumer() throws Exception { final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> consumerConfigs = streamsConfig.getConsumerConfigs(null, "groupId", "clientId"); assertThat(consumerConfigs.get("internal.leave.group.on.close"), CoreMatchers.<Object>equalTo(false)); }
@Test(expected = ConfigException.class) public void shouldThrowExceptionIfConsumerIsolationLevelIsOverriddenIfEosEnabled() { props.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, EXACTLY_ONCE); props.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, "anyValue"); final StreamsConfig streamsConfig = new StreamsConfig(props); streamsConfig.getConsumerConfigs(null, "groupId", "clientId"); }
@Test public void shouldAllowSettingConsumerIsolationLevelIfEosDisabled() { props.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, READ_UNCOMMITTED.name().toLowerCase(Locale.ROOT)); final StreamsConfig streamsConfig = new StreamsConfig(props); streamsConfig.getConsumerConfigs(null, "groupId", "clientId"); }
|
StreamsConfig extends AbstractConfig { public Map<String, Object> getRestoreConsumerConfigs(final String clientId) throws ConfigException { final Map<String, Object> consumerProps = getCommonConsumerConfigs(); consumerProps.remove(ConsumerConfig.GROUP_ID_CONFIG); consumerProps.put(CommonClientConfigs.CLIENT_ID_CONFIG, clientId + "-restore-consumer"); return consumerProps; } StreamsConfig(final Map<?, ?> props); static String consumerPrefix(final String consumerProp); static String producerPrefix(final String producerProp); static ConfigDef configDef(); Map<String, Object> getConsumerConfigs(final StreamThread streamThread,
final String groupId,
final String clientId); Map<String, Object> getRestoreConsumerConfigs(final String clientId); Map<String, Object> getProducerConfigs(final String clientId); @Deprecated Serde keySerde(); Serde defaultKeySerde(); @Deprecated Serde valueSerde(); Serde defaultValueSerde(); TimestampExtractor defaultTimestampExtractor(); static void main(final String[] args); static final String CONSUMER_PREFIX; static final String PRODUCER_PREFIX; static final String AT_LEAST_ONCE; static final String EXACTLY_ONCE; static final String APPLICATION_ID_CONFIG; static final String APPLICATION_SERVER_CONFIG; static final String BOOTSTRAP_SERVERS_CONFIG; static final String BUFFERED_RECORDS_PER_PARTITION_CONFIG; static final String CACHE_MAX_BYTES_BUFFERING_CONFIG; static final String CLIENT_ID_CONFIG; static final String COMMIT_INTERVAL_MS_CONFIG; static final String CONNECTIONS_MAX_IDLE_MS_CONFIG; static final String DEFAULT_KEY_SERDE_CLASS_CONFIG; static final String DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG; static final String DEFAULT_VALUE_SERDE_CLASS_CONFIG; @Deprecated
static final String KEY_SERDE_CLASS_CONFIG; static final String METADATA_MAX_AGE_CONFIG; static final String METRICS_NUM_SAMPLES_CONFIG; static final String METRICS_RECORDING_LEVEL_CONFIG; static final String METRIC_REPORTER_CLASSES_CONFIG; static final String METRICS_SAMPLE_WINDOW_MS_CONFIG; static final String NUM_STANDBY_REPLICAS_CONFIG; static final String NUM_STREAM_THREADS_CONFIG; static final String PARTITION_GROUPER_CLASS_CONFIG; static final String POLL_MS_CONFIG; static final String PROCESSING_GUARANTEE_CONFIG; static final String RECEIVE_BUFFER_CONFIG; static final String RECONNECT_BACKOFF_MS_CONFIG; static final String RECONNECT_BACKOFF_MAX_MS_CONFIG; static final String REPLICATION_FACTOR_CONFIG; static final String REQUEST_TIMEOUT_MS_CONFIG; static final String RETRY_BACKOFF_MS_CONFIG; static final String ROCKSDB_CONFIG_SETTER_CLASS_CONFIG; static final String SECURITY_PROTOCOL_CONFIG; static final String SEND_BUFFER_CONFIG; static final String STATE_CLEANUP_DELAY_MS_CONFIG; static final String STATE_DIR_CONFIG; @Deprecated
static final String TIMESTAMP_EXTRACTOR_CLASS_CONFIG; @Deprecated
static final String VALUE_SERDE_CLASS_CONFIG; static final String WINDOW_STORE_CHANGE_LOG_ADDITIONAL_RETENTION_MS_CONFIG; @Deprecated
static final String ZOOKEEPER_CONNECT_CONFIG; }
|
@Test public void testGetRestoreConsumerConfigs() throws Exception { final String clientId = "client"; final Map<String, Object> returnedProps = streamsConfig.getRestoreConsumerConfigs(clientId); assertEquals(returnedProps.get(ConsumerConfig.CLIENT_ID_CONFIG), clientId + "-restore-consumer"); assertNull(returnedProps.get(ConsumerConfig.GROUP_ID_CONFIG)); assertNull(returnedProps.get("DUMMY")); }
@Test public void shouldBeSupportNonPrefixedRestoreConsumerConfigs() throws Exception { props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.put(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG, 1); final StreamsConfig streamsConfig = new StreamsConfig(props); final Map<String, Object> consumerConfigs = streamsConfig.getRestoreConsumerConfigs("groupId"); assertEquals("earliest", consumerConfigs.get(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG)); assertEquals(1, consumerConfigs.get(ConsumerConfig.METRICS_NUM_SAMPLES_CONFIG)); }
|
StreamsConfig extends AbstractConfig { public Serde defaultKeySerde() { try { Serde<?> serde = getConfiguredInstance(DEFAULT_KEY_SERDE_CLASS_CONFIG, Serde.class); serde.configure(originals(), true); return serde; } catch (final Exception e) { throw new StreamsException(String.format("Failed to configure key serde %s", get(DEFAULT_KEY_SERDE_CLASS_CONFIG)), e); } } StreamsConfig(final Map<?, ?> props); static String consumerPrefix(final String consumerProp); static String producerPrefix(final String producerProp); static ConfigDef configDef(); Map<String, Object> getConsumerConfigs(final StreamThread streamThread,
final String groupId,
final String clientId); Map<String, Object> getRestoreConsumerConfigs(final String clientId); Map<String, Object> getProducerConfigs(final String clientId); @Deprecated Serde keySerde(); Serde defaultKeySerde(); @Deprecated Serde valueSerde(); Serde defaultValueSerde(); TimestampExtractor defaultTimestampExtractor(); static void main(final String[] args); static final String CONSUMER_PREFIX; static final String PRODUCER_PREFIX; static final String AT_LEAST_ONCE; static final String EXACTLY_ONCE; static final String APPLICATION_ID_CONFIG; static final String APPLICATION_SERVER_CONFIG; static final String BOOTSTRAP_SERVERS_CONFIG; static final String BUFFERED_RECORDS_PER_PARTITION_CONFIG; static final String CACHE_MAX_BYTES_BUFFERING_CONFIG; static final String CLIENT_ID_CONFIG; static final String COMMIT_INTERVAL_MS_CONFIG; static final String CONNECTIONS_MAX_IDLE_MS_CONFIG; static final String DEFAULT_KEY_SERDE_CLASS_CONFIG; static final String DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG; static final String DEFAULT_VALUE_SERDE_CLASS_CONFIG; @Deprecated
static final String KEY_SERDE_CLASS_CONFIG; static final String METADATA_MAX_AGE_CONFIG; static final String METRICS_NUM_SAMPLES_CONFIG; static final String METRICS_RECORDING_LEVEL_CONFIG; static final String METRIC_REPORTER_CLASSES_CONFIG; static final String METRICS_SAMPLE_WINDOW_MS_CONFIG; static final String NUM_STANDBY_REPLICAS_CONFIG; static final String NUM_STREAM_THREADS_CONFIG; static final String PARTITION_GROUPER_CLASS_CONFIG; static final String POLL_MS_CONFIG; static final String PROCESSING_GUARANTEE_CONFIG; static final String RECEIVE_BUFFER_CONFIG; static final String RECONNECT_BACKOFF_MS_CONFIG; static final String RECONNECT_BACKOFF_MAX_MS_CONFIG; static final String REPLICATION_FACTOR_CONFIG; static final String REQUEST_TIMEOUT_MS_CONFIG; static final String RETRY_BACKOFF_MS_CONFIG; static final String ROCKSDB_CONFIG_SETTER_CLASS_CONFIG; static final String SECURITY_PROTOCOL_CONFIG; static final String SEND_BUFFER_CONFIG; static final String STATE_CLEANUP_DELAY_MS_CONFIG; static final String STATE_DIR_CONFIG; @Deprecated
static final String TIMESTAMP_EXTRACTOR_CLASS_CONFIG; @Deprecated
static final String VALUE_SERDE_CLASS_CONFIG; static final String WINDOW_STORE_CHANGE_LOG_ADDITIONAL_RETENTION_MS_CONFIG; @Deprecated
static final String ZOOKEEPER_CONNECT_CONFIG; }
|
@Test(expected = StreamsException.class) public void shouldThrowStreamsExceptionIfKeySerdeConfigFails() throws Exception { props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, MisconfiguredSerde.class); final StreamsConfig streamsConfig = new StreamsConfig(props); streamsConfig.defaultKeySerde(); }
|
StreamsConfig extends AbstractConfig { public Serde defaultValueSerde() { try { Serde<?> serde = getConfiguredInstance(DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serde.class); serde.configure(originals(), false); return serde; } catch (final Exception e) { throw new StreamsException(String.format("Failed to configure value serde %s", get(DEFAULT_VALUE_SERDE_CLASS_CONFIG)), e); } } StreamsConfig(final Map<?, ?> props); static String consumerPrefix(final String consumerProp); static String producerPrefix(final String producerProp); static ConfigDef configDef(); Map<String, Object> getConsumerConfigs(final StreamThread streamThread,
final String groupId,
final String clientId); Map<String, Object> getRestoreConsumerConfigs(final String clientId); Map<String, Object> getProducerConfigs(final String clientId); @Deprecated Serde keySerde(); Serde defaultKeySerde(); @Deprecated Serde valueSerde(); Serde defaultValueSerde(); TimestampExtractor defaultTimestampExtractor(); static void main(final String[] args); static final String CONSUMER_PREFIX; static final String PRODUCER_PREFIX; static final String AT_LEAST_ONCE; static final String EXACTLY_ONCE; static final String APPLICATION_ID_CONFIG; static final String APPLICATION_SERVER_CONFIG; static final String BOOTSTRAP_SERVERS_CONFIG; static final String BUFFERED_RECORDS_PER_PARTITION_CONFIG; static final String CACHE_MAX_BYTES_BUFFERING_CONFIG; static final String CLIENT_ID_CONFIG; static final String COMMIT_INTERVAL_MS_CONFIG; static final String CONNECTIONS_MAX_IDLE_MS_CONFIG; static final String DEFAULT_KEY_SERDE_CLASS_CONFIG; static final String DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG; static final String DEFAULT_VALUE_SERDE_CLASS_CONFIG; @Deprecated
static final String KEY_SERDE_CLASS_CONFIG; static final String METADATA_MAX_AGE_CONFIG; static final String METRICS_NUM_SAMPLES_CONFIG; static final String METRICS_RECORDING_LEVEL_CONFIG; static final String METRIC_REPORTER_CLASSES_CONFIG; static final String METRICS_SAMPLE_WINDOW_MS_CONFIG; static final String NUM_STANDBY_REPLICAS_CONFIG; static final String NUM_STREAM_THREADS_CONFIG; static final String PARTITION_GROUPER_CLASS_CONFIG; static final String POLL_MS_CONFIG; static final String PROCESSING_GUARANTEE_CONFIG; static final String RECEIVE_BUFFER_CONFIG; static final String RECONNECT_BACKOFF_MS_CONFIG; static final String RECONNECT_BACKOFF_MAX_MS_CONFIG; static final String REPLICATION_FACTOR_CONFIG; static final String REQUEST_TIMEOUT_MS_CONFIG; static final String RETRY_BACKOFF_MS_CONFIG; static final String ROCKSDB_CONFIG_SETTER_CLASS_CONFIG; static final String SECURITY_PROTOCOL_CONFIG; static final String SEND_BUFFER_CONFIG; static final String STATE_CLEANUP_DELAY_MS_CONFIG; static final String STATE_DIR_CONFIG; @Deprecated
static final String TIMESTAMP_EXTRACTOR_CLASS_CONFIG; @Deprecated
static final String VALUE_SERDE_CLASS_CONFIG; static final String WINDOW_STORE_CHANGE_LOG_ADDITIONAL_RETENTION_MS_CONFIG; @Deprecated
static final String ZOOKEEPER_CONNECT_CONFIG; }
|
@Test(expected = StreamsException.class) public void shouldThrowStreamsExceptionIfValueSerdeConfigFails() throws Exception { props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, MisconfiguredSerde.class); final StreamsConfig streamsConfig = new StreamsConfig(props); streamsConfig.defaultValueSerde(); }
|
JsonConverter implements Converter { @Override public void configure(Map<String, ?> configs, boolean isKey) { Object enableConfigsVal = configs.get(SCHEMAS_ENABLE_CONFIG); if (enableConfigsVal != null) enableSchemas = enableConfigsVal.toString().equals("true"); serializer.configure(configs, isKey); deserializer.configure(configs, isKey); Object cacheSizeVal = configs.get(SCHEMAS_CACHE_SIZE_CONFIG); if (cacheSizeVal != null) cacheSize = Integer.parseInt((String) cacheSizeVal); fromConnectSchemaCache = new SynchronizedCache<>(new LRUCache<Schema, ObjectNode>(cacheSize)); toConnectSchemaCache = new SynchronizedCache<>(new LRUCache<JsonNode, Schema>(cacheSize)); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
@Test public void testJsonSchemaCacheSizeFromConfigFile() throws URISyntaxException, IOException { URL url = getClass().getResource("/connect-test.properties"); File propFile = new File(url.toURI()); String workerPropsFile = propFile.getAbsolutePath(); Map<String, String> workerProps = !workerPropsFile.isEmpty() ? Utils.propsToStringMap(Utils.loadProps(workerPropsFile)) : Collections.<String, String>emptyMap(); JsonConverter rc = new JsonConverter(); rc.configure(workerProps, false); }
|
Stores { public static StoreFactory create(final String name) { return new StoreFactory() { @Override public <K> ValueFactory<K> withKeys(final Serde<K> keySerde) { return new ValueFactory<K>() { @Override public <V> KeyValueFactory<K, V> withValues(final Serde<V> valueSerde) { return new KeyValueFactory<K, V>() { @Override public InMemoryKeyValueFactory<K, V> inMemory() { return new InMemoryKeyValueFactory<K, V>() { private int capacity = Integer.MAX_VALUE; private final Map<String, String> logConfig = new HashMap<>(); private boolean logged = true; @Override public InMemoryKeyValueFactory<K, V> maxEntries(int capacity) { if (capacity < 1) throw new IllegalArgumentException("The capacity must be positive"); this.capacity = capacity; return this; } @Override public InMemoryKeyValueFactory<K, V> enableLogging(final Map<String, String> config) { logged = true; logConfig.putAll(config); return this; } @Override public InMemoryKeyValueFactory<K, V> disableLogging() { logged = false; logConfig.clear(); return this; } @Override public StateStoreSupplier build() { log.trace("Creating InMemory Store name={} capacity={} logged={}", name, capacity, logged); if (capacity < Integer.MAX_VALUE) { return new InMemoryLRUCacheStoreSupplier<>(name, capacity, keySerde, valueSerde, logged, logConfig); } return new InMemoryKeyValueStoreSupplier<>(name, keySerde, valueSerde, logged, logConfig); } }; } @Override public PersistentKeyValueFactory<K, V> persistent() { return new PersistentKeyValueFactory<K, V>() { public boolean cachingEnabled; private long windowSize; private final Map<String, String> logConfig = new HashMap<>(); private int numSegments = 0; private long retentionPeriod = 0L; private boolean retainDuplicates = false; private boolean sessionWindows; private boolean logged = true; @Override public PersistentKeyValueFactory<K, V> windowed(final long windowSize, final long retentionPeriod, final int numSegments, final boolean retainDuplicates) { this.windowSize = windowSize; this.numSegments = numSegments; this.retentionPeriod = retentionPeriod; this.retainDuplicates = retainDuplicates; this.sessionWindows = false; return this; } @Override public PersistentKeyValueFactory<K, V> sessionWindowed(final long retentionPeriod) { this.sessionWindows = true; this.retentionPeriod = retentionPeriod; return this; } @Override public PersistentKeyValueFactory<K, V> enableLogging(final Map<String, String> config) { logged = true; logConfig.putAll(config); return this; } @Override public PersistentKeyValueFactory<K, V> disableLogging() { logged = false; logConfig.clear(); return this; } @Override public PersistentKeyValueFactory<K, V> enableCaching() { cachingEnabled = true; return this; } @Override public StateStoreSupplier build() { log.trace("Creating RocksDb Store name={} numSegments={} logged={}", name, numSegments, logged); if (sessionWindows) { return new RocksDBSessionStoreSupplier<>(name, retentionPeriod, keySerde, valueSerde, logged, logConfig, cachingEnabled); } else if (numSegments > 0) { return new RocksDBWindowStoreSupplier<>(name, retentionPeriod, numSegments, retainDuplicates, keySerde, valueSerde, windowSize, logged, logConfig, cachingEnabled); } return new RocksDBKeyValueStoreSupplier<>(name, keySerde, valueSerde, logged, logConfig, cachingEnabled); } }; } }; } }; } }; } static StoreFactory create(final String name); }
|
@Test public void shouldCreateInMemoryStoreSupplierWithLoggedConfig() throws Exception { final StateStoreSupplier supplier = Stores.create("store") .withKeys(Serdes.String()) .withValues(Serdes.String()) .inMemory() .enableLogging(Collections.singletonMap("retention.ms", "1000")) .build(); final Map<String, String> config = supplier.logConfig(); assertTrue(supplier.loggingEnabled()); assertEquals("1000", config.get("retention.ms")); }
@Test public void shouldCreateInMemoryStoreSupplierNotLogged() throws Exception { final StateStoreSupplier supplier = Stores.create("store") .withKeys(Serdes.String()) .withValues(Serdes.String()) .inMemory() .disableLogging() .build(); assertFalse(supplier.loggingEnabled()); }
@Test public void shouldCreatePersistenStoreSupplierWithLoggedConfig() throws Exception { final StateStoreSupplier supplier = Stores.create("store") .withKeys(Serdes.String()) .withValues(Serdes.String()) .persistent() .enableLogging(Collections.singletonMap("retention.ms", "1000")) .build(); final Map<String, String> config = supplier.logConfig(); assertTrue(supplier.loggingEnabled()); assertEquals("1000", config.get("retention.ms")); }
@Test public void shouldCreatePersistenStoreSupplierNotLogged() throws Exception { final StateStoreSupplier supplier = Stores.create("store") .withKeys(Serdes.String()) .withValues(Serdes.String()) .persistent() .disableLogging() .build(); assertFalse(supplier.loggingEnabled()); }
|
SerializedKeyValueIterator implements KeyValueIterator<K, V> { @Override public KeyValue<K, V> next() { if (!hasNext()) { throw new NoSuchElementException(); } final KeyValue<Bytes, byte[]> next = bytesIterator.next(); return KeyValue.pair(serdes.keyFrom(next.key.get()), serdes.valueFrom(next.value)); } SerializedKeyValueIterator(final KeyValueIterator<Bytes, byte[]> bytesIterator,
final StateSerdes<K, V> serdes); @Override void close(); @Override K peekNextKey(); @Override boolean hasNext(); @Override KeyValue<K, V> next(); @Override void remove(); }
|
@Test public void shouldReturnNextValueWhenItExists() throws Exception { assertThat(serializedKeyValueIterator.next(), equalTo(KeyValue.pair("hi", "there"))); assertThat(serializedKeyValueIterator.next(), equalTo(KeyValue.pair("hello", "world"))); }
@Test public void shouldThrowNoSuchElementOnNextWhenIteratorExhausted() throws Exception { advanceIteratorToEnd(); try { serializedKeyValueIterator.next(); fail("Expected NoSuchElementException on exhausted iterator"); } catch (final NoSuchElementException nse) { } }
@Test public void shouldReturnNextValueWhenItExists() { assertThat(serializedKeyValueIterator.next(), equalTo(KeyValue.pair("hi", "there"))); assertThat(serializedKeyValueIterator.next(), equalTo(KeyValue.pair("hello", "world"))); }
@Test public void shouldThrowNoSuchElementOnNextWhenIteratorExhausted() { advanceIteratorToEnd(); try { serializedKeyValueIterator.next(); fail("Expected NoSuchElementException on exhausted iterator"); } catch (final NoSuchElementException nse) { } }
|
SerializedKeyValueIterator implements KeyValueIterator<K, V> { @Override public boolean hasNext() { return bytesIterator.hasNext(); } SerializedKeyValueIterator(final KeyValueIterator<Bytes, byte[]> bytesIterator,
final StateSerdes<K, V> serdes); @Override void close(); @Override K peekNextKey(); @Override boolean hasNext(); @Override KeyValue<K, V> next(); @Override void remove(); }
|
@Test public void shouldReturnFalseOnHasNextWhenNoMoreResults() throws Exception { advanceIteratorToEnd(); assertFalse(serializedKeyValueIterator.hasNext()); }
@Test public void shouldReturnTrueOnHasNextWhenMoreResults() { assertTrue(serializedKeyValueIterator.hasNext()); }
@Test public void shouldReturnFalseOnHasNextWhenNoMoreResults() { advanceIteratorToEnd(); assertFalse(serializedKeyValueIterator.hasNext()); }
|
SerializedKeyValueIterator implements KeyValueIterator<K, V> { @Override public void remove() { throw new UnsupportedOperationException("remove() is not supported in " + getClass().getName()); } SerializedKeyValueIterator(final KeyValueIterator<Bytes, byte[]> bytesIterator,
final StateSerdes<K, V> serdes); @Override void close(); @Override K peekNextKey(); @Override boolean hasNext(); @Override KeyValue<K, V> next(); @Override void remove(); }
|
@Test(expected = UnsupportedOperationException.class) public void shouldThrowUnsupportedOperationOnRemove() throws Exception { serializedKeyValueIterator.remove(); }
@Test(expected = UnsupportedOperationException.class) public void shouldThrowUnsupportedOperationOnRemove() { serializedKeyValueIterator.remove(); }
|
SessionKeySchema implements SegmentedBytesStore.KeySchema { @Override public HasNextCondition hasNextCondition(final Bytes binaryKeyFrom, final Bytes binaryKeyTo, final long from, final long to) { return new HasNextCondition() { @Override public boolean hasNext(final KeyValueIterator<Bytes, ?> iterator) { while (iterator.hasNext()) { final Bytes bytes = iterator.peekNextKey(); final Windowed<Bytes> windowedKey = SessionKeySerde.fromBytes(bytes); if (windowedKey.key().compareTo(binaryKeyFrom) >= 0 && windowedKey.key().compareTo(binaryKeyTo) <= 0 && windowedKey.window().end() >= from && windowedKey.window().start() <= to) { return true; } iterator.next(); } return false; } }; } @Override void init(final String topic); @Override Bytes upperRangeFixedSize(final Bytes key, final long to); @Override Bytes lowerRangeFixedSize(final Bytes key, final long from); @Override Bytes upperRange(Bytes key, long to); @Override Bytes lowerRange(Bytes key, long from); @Override long segmentTimestamp(final Bytes key); @Override HasNextCondition hasNextCondition(final Bytes binaryKeyFrom, final Bytes binaryKeyTo, final long from, final long to); @Override List<Segment> segmentsToSearch(final Segments segments, final long from, final long to); }
|
@Test public void shouldFetchExactKeysSkippingLongerKeys() throws Exception { final Bytes key = Bytes.wrap(new byte[]{0}); final List<Integer> result = getValues(sessionKeySchema.hasNextCondition(key, key, 0, Long.MAX_VALUE)); assertThat(result, equalTo(Arrays.asList(2, 4))); }
@Test public void shouldFetchExactKeySkippingShorterKeys() throws Exception { final Bytes key = Bytes.wrap(new byte[]{0, 0}); final HasNextCondition hasNextCondition = sessionKeySchema.hasNextCondition(key, key, 0, Long.MAX_VALUE); final List<Integer> results = getValues(hasNextCondition); assertThat(results, equalTo(Arrays.asList(1, 5))); }
|
SessionKeySchema implements SegmentedBytesStore.KeySchema { @Override public Bytes upperRange(Bytes key, long to) { final byte[] maxSuffix = ByteBuffer.allocate(SUFFIX_SIZE) .putLong(to) .putLong(to) .array(); return OrderedBytes.upperRange(key, maxSuffix); } @Override void init(final String topic); @Override Bytes upperRangeFixedSize(final Bytes key, final long to); @Override Bytes lowerRangeFixedSize(final Bytes key, final long from); @Override Bytes upperRange(Bytes key, long to); @Override Bytes lowerRange(Bytes key, long from); @Override long segmentTimestamp(final Bytes key); @Override HasNextCondition hasNextCondition(final Bytes binaryKeyFrom, final Bytes binaryKeyTo, final long from, final long to); @Override List<Segment> segmentsToSearch(final Segments segments, final long from, final long to); }
|
@Test public void testUpperBoundWithLargeTimestamps() throws Exception { Bytes upper = sessionKeySchema.upperRange(Bytes.wrap(new byte[]{0xA, 0xB, 0xC}), Long.MAX_VALUE); assertThat( "shorter key with max timestamp should be in range", upper.compareTo( SessionKeySerde.bytesToBinary( new Windowed<>( Bytes.wrap(new byte[]{0xA}), new SessionWindow(Long.MAX_VALUE, Long.MAX_VALUE)) ) ) >= 0 ); assertThat( "shorter key with max timestamp should be in range", upper.compareTo( SessionKeySerde.bytesToBinary( new Windowed<>( Bytes.wrap(new byte[]{0xA, 0xB}), new SessionWindow(Long.MAX_VALUE, Long.MAX_VALUE)) ) ) >= 0 ); assertThat(upper, equalTo(SessionKeySerde.bytesToBinary( new Windowed<>(Bytes.wrap(new byte[]{0xA}), new SessionWindow(Long.MAX_VALUE, Long.MAX_VALUE)))) ); }
@Test public void testUpperBoundWithKeyBytesLargerThanFirstTimestampByte() throws Exception { Bytes upper = sessionKeySchema.upperRange(Bytes.wrap(new byte[]{0xA, (byte) 0x8F, (byte) 0x9F}), Long.MAX_VALUE); assertThat( "shorter key with max timestamp should be in range", upper.compareTo( SessionKeySerde.bytesToBinary( new Windowed<>( Bytes.wrap(new byte[]{0xA, (byte) 0x8F}), new SessionWindow(Long.MAX_VALUE, Long.MAX_VALUE)) ) ) >= 0 ); assertThat(upper, equalTo(SessionKeySerde.bytesToBinary( new Windowed<>(Bytes.wrap(new byte[]{0xA, (byte) 0x8F, (byte) 0x9F}), new SessionWindow(Long.MAX_VALUE, Long.MAX_VALUE)))) ); }
@Test public void testUpperBoundWithZeroTimestamp() throws Exception { Bytes upper = sessionKeySchema.upperRange(Bytes.wrap(new byte[]{0xA, 0xB, 0xC}), 0); assertThat(upper, equalTo(SessionKeySerde.bytesToBinary( new Windowed<>(Bytes.wrap(new byte[]{0xA, 0xB, 0xC}), new SessionWindow(0, 0)))) ); }
|
SessionKeySchema implements SegmentedBytesStore.KeySchema { @Override public Bytes lowerRange(Bytes key, long from) { return OrderedBytes.lowerRange(key, MIN_SUFFIX); } @Override void init(final String topic); @Override Bytes upperRangeFixedSize(final Bytes key, final long to); @Override Bytes lowerRangeFixedSize(final Bytes key, final long from); @Override Bytes upperRange(Bytes key, long to); @Override Bytes lowerRange(Bytes key, long from); @Override long segmentTimestamp(final Bytes key); @Override HasNextCondition hasNextCondition(final Bytes binaryKeyFrom, final Bytes binaryKeyTo, final long from, final long to); @Override List<Segment> segmentsToSearch(final Segments segments, final long from, final long to); }
|
@Test public void testLowerBoundWithZeroTimestamp() throws Exception { Bytes lower = sessionKeySchema.lowerRange(Bytes.wrap(new byte[]{0xA, 0xB, 0xC}), 0); assertThat(lower, equalTo(SessionKeySerde.bytesToBinary(new Windowed<>(Bytes.wrap(new byte[]{0xA, 0xB, 0xC}), new SessionWindow(0, 0))))); }
@Test public void testLowerBoundMatchesTrailingZeros() throws Exception { Bytes lower = sessionKeySchema.lowerRange(Bytes.wrap(new byte[]{0xA, 0xB, 0xC}), Long.MAX_VALUE); assertThat( "appending zeros to key should still be in range", lower.compareTo( SessionKeySerde.bytesToBinary( new Windowed<>( Bytes.wrap(new byte[]{0xA, 0xB, 0xC, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}), new SessionWindow(Long.MAX_VALUE, Long.MAX_VALUE)) ) ) < 0 ); assertThat(lower, equalTo(SessionKeySerde.bytesToBinary(new Windowed<>(Bytes.wrap(new byte[]{0xA, 0xB, 0xC}), new SessionWindow(0, 0))))); }
|
CompositeReadOnlySessionStore implements ReadOnlySessionStore<K, V> { private KeyValueIterator<Windowed<K>, V> fetch(Fetcher<K, V> fetcher) { final List<ReadOnlySessionStore<K, V>> stores = storeProvider.stores(storeName, queryableStoreType); for (final ReadOnlySessionStore<K, V> store : stores) { try { final KeyValueIterator<Windowed<K>, V> result = fetcher.fetch(store); if (!result.hasNext()) { result.close(); } else { return result; } } catch (final InvalidStateStoreException ise) { throw new InvalidStateStoreException("State store [" + storeName + "] is not available anymore" + " and may have been migrated to another instance; " + "please re-discover its location from the state metadata."); } } return KeyValueIterators.emptyIterator(); } CompositeReadOnlySessionStore(final StateStoreProvider storeProvider,
final QueryableStoreType<ReadOnlySessionStore<K, V>> queryableStoreType,
final String storeName); @Override KeyValueIterator<Windowed<K>, V> fetch(final K key); @Override KeyValueIterator<Windowed<K>, V> fetch(final K from, final K to); }
|
@Test public void shouldFetchResulstFromUnderlyingSessionStore() throws Exception { underlyingSessionStore.put(new Windowed<>("a", new SessionWindow(0, 0)), 1L); underlyingSessionStore.put(new Windowed<>("a", new SessionWindow(10, 10)), 2L); final List<KeyValue<Windowed<String>, Long>> results = toList(sessionStore.fetch("a")); assertEquals(Arrays.asList(KeyValue.pair(new Windowed<>("a", new SessionWindow(0, 0)), 1L), KeyValue.pair(new Windowed<>("a", new SessionWindow(10, 10)), 2L)), results); }
@Test public void shouldReturnEmptyIteratorIfNoData() throws Exception { final KeyValueIterator<Windowed<String>, Long> result = sessionStore.fetch("b"); assertFalse(result.hasNext()); }
@Test public void shouldFindValueForKeyWhenMultiStores() throws Exception { final ReadOnlySessionStoreStub<String, Long> secondUnderlying = new ReadOnlySessionStoreStub<>(); stubProviderTwo.addStore(storeName, secondUnderlying); final Windowed<String> keyOne = new Windowed<>("key-one", new SessionWindow(0, 0)); final Windowed<String> keyTwo = new Windowed<>("key-two", new SessionWindow(0, 0)); underlyingSessionStore.put(keyOne, 0L); secondUnderlying.put(keyTwo, 10L); final List<KeyValue<Windowed<String>, Long>> keyOneResults = toList(sessionStore.fetch("key-one")); final List<KeyValue<Windowed<String>, Long>> keyTwoResults = toList(sessionStore.fetch("key-two")); assertEquals(Collections.singletonList(KeyValue.pair(keyOne, 0L)), keyOneResults); assertEquals(Collections.singletonList(KeyValue.pair(keyTwo, 10L)), keyTwoResults); }
@Test public void shouldNotGetValueFromOtherStores() throws Exception { final Windowed<String> expectedKey = new Windowed<>("foo", new SessionWindow(0, 0)); otherUnderlyingStore.put(new Windowed<>("foo", new SessionWindow(10, 10)), 10L); underlyingSessionStore.put(expectedKey, 1L); final KeyValueIterator<Windowed<String>, Long> result = sessionStore.fetch("foo"); assertEquals(KeyValue.pair(expectedKey, 1L), result.next()); assertFalse(result.hasNext()); }
@Test(expected = InvalidStateStoreException.class) public void shouldThrowInvalidStateStoreExceptionOnRebalance() throws Exception { final CompositeReadOnlySessionStore<String, String> store = new CompositeReadOnlySessionStore<>(new StateStoreProviderStub(true), QueryableStoreTypes.<String, String>sessionStore(), "whateva"); store.fetch("a"); }
@Test(expected = InvalidStateStoreException.class) public void shouldThrowInvalidStateStoreExceptionIfFetchThrows() throws Exception { underlyingSessionStore.setOpen(false); underlyingSessionStore.fetch("key"); }
|
NamedCache { synchronized void evict() { if (tail == null) { return; } final LRUNode eldest = tail; currentSizeBytes -= eldest.size(); remove(eldest); cache.remove(eldest.key); if (eldest.entry.isDirty()) { flush(eldest); } } NamedCache(final String name, final StreamsMetrics metrics); long size(); }
|
@Test public void shouldNotThrowNullPointerWhenCacheIsEmptyAndEvictionCalled() throws Exception { cache.evict(); }
|
NamedCache { synchronized void put(final Bytes key, final LRUCacheEntry value) { if (!value.isDirty() && dirtyKeys.contains(key)) { throw new IllegalStateException(String.format("Attempting to put a clean entry for key [%s] " + "into NamedCache [%s] when it already contains " + "a dirty entry for the same key", key, name)); } LRUNode node = cache.get(key); if (node != null) { numOverwrites++; currentSizeBytes -= node.size(); node.update(value); updateLRU(node); } else { node = new LRUNode(key, value); putHead(node); cache.put(key, node); } if (value.isDirty()) { dirtyKeys.remove(key); dirtyKeys.add(key); } currentSizeBytes += node.size(); } NamedCache(final String name, final StreamsMetrics metrics); long size(); }
|
@Test(expected = IllegalStateException.class) public void shouldThrowIllegalStateExceptionWhenTryingToOverwriteDirtyEntryWithCleanEntry() throws Exception { cache.put(Bytes.wrap(new byte[]{0}), new LRUCacheEntry(new byte[]{10}, true, 0, 0, 0, "")); cache.put(Bytes.wrap(new byte[]{0}), new LRUCacheEntry(new byte[]{10}, false, 0, 0, 0, "")); }
|
NamedCache { synchronized LRUCacheEntry get(final Bytes key) { if (key == null) { return null; } final LRUNode node = getInternal(key); if (node == null) { return null; } updateLRU(node); return node.entry; } NamedCache(final String name, final StreamsMetrics metrics); long size(); }
|
@Test public void shouldReturnNullIfKeyIsNull() throws Exception { assertNull(cache.get(null)); }
|
CachingWindowStore extends WrappedStateStore.AbstractStateStore implements WindowStore<K, V>, CachedStateStore<Windowed<K>, V> { @Override public synchronized WindowStoreIterator<V> fetch(final K key, final long timeFrom, final long timeTo) { validateStoreOpen(); final Bytes keyBytes = Bytes.wrap(serdes.rawKey(key)); final WindowStoreIterator<byte[]> underlyingIterator = underlying.fetch(keyBytes, timeFrom, timeTo); final Bytes cacheKeyFrom = cacheFunction.cacheKey(keySchema.lowerRangeFixedSize(keyBytes, timeFrom)); final Bytes cacheKeyTo = cacheFunction.cacheKey(keySchema.upperRangeFixedSize(keyBytes, timeTo)); final ThreadCache.MemoryLRUCacheBytesIterator cacheIterator = cache.range(name, cacheKeyFrom, cacheKeyTo); final HasNextCondition hasNextCondition = keySchema.hasNextCondition(keyBytes, keyBytes, timeFrom, timeTo); final PeekingKeyValueIterator<Bytes, LRUCacheEntry> filteredCacheIterator = new FilteredCacheIterator( cacheIterator, hasNextCondition, cacheFunction ); return new MergedSortedCacheWindowStoreIterator<>(filteredCacheIterator, underlyingIterator, new StateSerdes<>(serdes.topic(), Serdes.Long(), serdes.valueSerde())); } CachingWindowStore(final WindowStore<Bytes, byte[]> underlying,
final Serde<K> keySerde,
final Serde<V> valueSerde,
final long windowSize,
final long segmentInterval); @SuppressWarnings("unchecked") @Override void init(final ProcessorContext context, final StateStore root); void setFlushListener(CacheFlushListener<Windowed<K>, V> flushListener); @Override synchronized void flush(); @Override void close(); @Override synchronized void put(final K key, final V value); @Override synchronized void put(final K key, final V value, final long timestamp); @Override synchronized WindowStoreIterator<V> fetch(final K key, final long timeFrom, final long timeTo); @Override KeyValueIterator<Windowed<K>, V> fetch(final K from, final K to, final long timeFrom, final long timeTo); }
|
@Test public void shouldFlushEvictedItemsIntoUnderlyingStore() throws Exception { int added = addItemsToCache(); final KeyValueIterator<Bytes, byte[]> iter = underlying.fetch(Bytes.wrap("0".getBytes()), DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP); final KeyValue<Bytes, byte[]> next = iter.next(); assertEquals(DEFAULT_TIMESTAMP, keySchema.segmentTimestamp(next.key)); assertArrayEquals("0".getBytes(), next.value); assertFalse(iter.hasNext()); assertEquals(added - 1, cache.size()); }
|
CachingSessionStore extends WrappedStateStore.AbstractStateStore implements SessionStore<K, AGG>, CachedStateStore<Windowed<K>, AGG> { @Override public KeyValueIterator<Windowed<K>, AGG> fetch(final K key) { return findSessions(key, 0, Long.MAX_VALUE); } CachingSessionStore(final SessionStore<Bytes, byte[]> bytesStore,
final Serde<K> keySerde,
final Serde<AGG> aggSerde,
final long segmentInterval); void init(final ProcessorContext context, final StateStore root); KeyValueIterator<Windowed<K>, AGG> findSessions(final K key,
final long earliestSessionEndTime,
final long latestSessionStartTime); @Override KeyValueIterator<Windowed<K>, AGG> findSessions(K keyFrom,
K keyTo,
long earliestSessionEndTime,
long latestSessionStartTime); @Override void remove(final Windowed<K> sessionKey); @Override void put(final Windowed<K> key, AGG value); @Override KeyValueIterator<Windowed<K>, AGG> fetch(final K key); @Override KeyValueIterator<Windowed<K>, AGG> fetch(K from, K to); void flush(); void close(); void setFlushListener(CacheFlushListener<Windowed<K>, AGG> flushListener); }
|
@Test public void shouldFlushItemsToStoreOnEviction() throws Exception { final List<KeyValue<Windowed<String>, Long>> added = addSessionsUntilOverflow("a", "b", "c", "d"); assertEquals(added.size() - 1, cache.size()); final KeyValueIterator<Bytes, byte[]> iterator = underlying.fetch(Bytes.wrap(added.get(0).key.key().getBytes()), 0, 0); final KeyValue<Bytes, byte[]> next = iterator.next(); assertEquals(added.get(0).key, SessionKeySerde.from(next.key.get(), Serdes.String().deserializer(), "dummy")); assertArrayEquals(serdes.rawValue(added.get(0).value), next.value); }
|
CachingSessionStore extends WrappedStateStore.AbstractStateStore implements SessionStore<K, AGG>, CachedStateStore<Windowed<K>, AGG> { public KeyValueIterator<Windowed<K>, AGG> findSessions(final K key, final long earliestSessionEndTime, final long latestSessionStartTime) { validateStoreOpen(); final Bytes binarySessionId = Bytes.wrap(serdes.rawKey(key)); final Bytes cacheKeyFrom = cacheFunction.cacheKey(keySchema.lowerRangeFixedSize(binarySessionId, earliestSessionEndTime)); final Bytes cacheKeyTo = cacheFunction.cacheKey(keySchema.upperRangeFixedSize(binarySessionId, latestSessionStartTime)); final ThreadCache.MemoryLRUCacheBytesIterator cacheIterator = cache.range(cacheName, cacheKeyFrom, cacheKeyTo); final KeyValueIterator<Windowed<Bytes>, byte[]> storeIterator = bytesStore.findSessions( binarySessionId, earliestSessionEndTime, latestSessionStartTime ); final HasNextCondition hasNextCondition = keySchema.hasNextCondition(binarySessionId, binarySessionId, earliestSessionEndTime, latestSessionStartTime); final PeekingKeyValueIterator<Bytes, LRUCacheEntry> filteredCacheIterator = new FilteredCacheIterator(cacheIterator, hasNextCondition, cacheFunction); return new MergedSortedCacheSessionStoreIterator<>(filteredCacheIterator, storeIterator, serdes, cacheFunction); } CachingSessionStore(final SessionStore<Bytes, byte[]> bytesStore,
final Serde<K> keySerde,
final Serde<AGG> aggSerde,
final long segmentInterval); void init(final ProcessorContext context, final StateStore root); KeyValueIterator<Windowed<K>, AGG> findSessions(final K key,
final long earliestSessionEndTime,
final long latestSessionStartTime); @Override KeyValueIterator<Windowed<K>, AGG> findSessions(K keyFrom,
K keyTo,
long earliestSessionEndTime,
long latestSessionStartTime); @Override void remove(final Windowed<K> sessionKey); @Override void put(final Windowed<K> key, AGG value); @Override KeyValueIterator<Windowed<K>, AGG> fetch(final K key); @Override KeyValueIterator<Windowed<K>, AGG> fetch(K from, K to); void flush(); void close(); void setFlushListener(CacheFlushListener<Windowed<K>, AGG> flushListener); }
|
@Test public void shouldQueryItemsInCacheAndStore() throws Exception { final List<KeyValue<Windowed<String>, Long>> added = addSessionsUntilOverflow("a"); final KeyValueIterator<Windowed<String>, Long> iterator = cachingStore.findSessions("a", 0, added.size() * 10); final List<KeyValue<Windowed<String>, Long>> actual = toList(iterator); assertEquals(added, actual); }
|
SegmentedCacheFunction implements CacheFunction { @Override public Bytes key(Bytes cacheKey) { return Bytes.wrap(bytesFromCacheKey(cacheKey)); } SegmentedCacheFunction(KeySchema keySchema, long segmentInterval); @Override Bytes key(Bytes cacheKey); @Override Bytes cacheKey(Bytes key); long segmentId(Bytes key); }
|
@Test public void key() throws Exception { assertThat( cacheFunction.key(THE_CACHE_KEY), equalTo(THE_KEY) ); }
|
SegmentedCacheFunction implements CacheFunction { @Override public Bytes cacheKey(Bytes key) { final byte[] keyBytes = key.get(); ByteBuffer buf = ByteBuffer.allocate(SEGMENT_ID_BYTES + keyBytes.length); buf.putLong(segmentId(key)).put(keyBytes); return Bytes.wrap(buf.array()); } SegmentedCacheFunction(KeySchema keySchema, long segmentInterval); @Override Bytes key(Bytes cacheKey); @Override Bytes cacheKey(Bytes key); long segmentId(Bytes key); }
|
@Test public void cacheKey() throws Exception { final long segmentId = TIMESTAMP / SEGMENT_INTERVAL; final Bytes actualCacheKey = cacheFunction.cacheKey(THE_KEY); final ByteBuffer buffer = ByteBuffer.wrap(actualCacheKey.get()); assertThat(buffer.getLong(), equalTo(segmentId)); byte[] actualKey = new byte[buffer.remaining()]; buffer.get(actualKey); assertThat(Bytes.wrap(actualKey), equalTo(THE_KEY)); }
|
SegmentedCacheFunction implements CacheFunction { int compareSegmentedKeys(Bytes cacheKey, Bytes storeKey) { long storeSegmentId = segmentId(storeKey); long cacheSegmentId = ByteBuffer.wrap(cacheKey.get()).getLong(); final int segmentCompare = Long.compare(cacheSegmentId, storeSegmentId); if (segmentCompare == 0) { byte[] cacheKeyBytes = cacheKey.get(); byte[] storeKeyBytes = storeKey.get(); return Bytes.BYTES_LEXICO_COMPARATOR.compare( cacheKeyBytes, SEGMENT_ID_BYTES, cacheKeyBytes.length - SEGMENT_ID_BYTES, storeKeyBytes, 0, storeKeyBytes.length ); } else { return segmentCompare; } } SegmentedCacheFunction(KeySchema keySchema, long segmentInterval); @Override Bytes key(Bytes cacheKey); @Override Bytes cacheKey(Bytes key); long segmentId(Bytes key); }
|
@Test public void compareSegmentedKeys() throws Exception { assertThat( "same key in same segment should be ranked the same", cacheFunction.compareSegmentedKeys( cacheFunction.cacheKey(THE_KEY), THE_KEY ) == 0 ); final Bytes sameKeyInPriorSegment = WindowStoreUtils.toBinaryKey(new byte[]{0xA, 0xB, 0xC}, 1234, 42); assertThat( "same keys in different segments should be ordered according to segment", cacheFunction.compareSegmentedKeys( cacheFunction.cacheKey(sameKeyInPriorSegment), THE_KEY ) < 0 ); assertThat( "same keys in different segments should be ordered according to segment", cacheFunction.compareSegmentedKeys( cacheFunction.cacheKey(THE_KEY), sameKeyInPriorSegment ) > 0 ); final Bytes lowerKeyInSameSegment = WindowStoreUtils.toBinaryKey(new byte[]{0xA, 0xB, 0xB}, TIMESTAMP - 1, 0); assertThat( "different keys in same segments should be ordered according to key", cacheFunction.compareSegmentedKeys( cacheFunction.cacheKey(THE_KEY), lowerKeyInSameSegment ) > 0 ); assertThat( "different keys in same segments should be ordered according to key", cacheFunction.compareSegmentedKeys( cacheFunction.cacheKey(lowerKeyInSameSegment), THE_KEY ) < 0 ); }
|
ThreadCache { public LRUCacheEntry delete(final String namespace, final Bytes key) { final NamedCache cache = getCache(namespace); if (cache == null) { return null; } return cache.delete(key); } ThreadCache(final String name, long maxCacheSizeBytes, final StreamsMetrics metrics); long puts(); long gets(); long evicts(); long flushes(); void addDirtyEntryFlushListener(final String namespace, DirtyEntryFlushListener listener); void flush(final String namespace); LRUCacheEntry get(final String namespace, Bytes key); void put(final String namespace, Bytes key, LRUCacheEntry value); LRUCacheEntry putIfAbsent(final String namespace, Bytes key, LRUCacheEntry value); void putAll(final String namespace, final List<KeyValue<Bytes, LRUCacheEntry>> entries); LRUCacheEntry delete(final String namespace, final Bytes key); MemoryLRUCacheBytesIterator range(final String namespace, final Bytes from, final Bytes to); MemoryLRUCacheBytesIterator all(final String namespace); long size(); }
|
@Test public void shouldNotBlowUpOnNonExistentNamespaceWhenDeleting() throws Exception { final ThreadCache cache = new ThreadCache("testCache", 10000L, new MockStreamsMetrics(new Metrics())); assertNull(cache.delete("name", Bytes.wrap(new byte[]{1}))); }
|
ThreadCache { public MemoryLRUCacheBytesIterator range(final String namespace, final Bytes from, final Bytes to) { final NamedCache cache = getCache(namespace); if (cache == null) { return new MemoryLRUCacheBytesIterator(Collections.<Bytes>emptyIterator(), new NamedCache(namespace, this.metrics)); } return new MemoryLRUCacheBytesIterator(cache.keyRange(from, to), cache); } ThreadCache(final String name, long maxCacheSizeBytes, final StreamsMetrics metrics); long puts(); long gets(); long evicts(); long flushes(); void addDirtyEntryFlushListener(final String namespace, DirtyEntryFlushListener listener); void flush(final String namespace); LRUCacheEntry get(final String namespace, Bytes key); void put(final String namespace, Bytes key, LRUCacheEntry value); LRUCacheEntry putIfAbsent(final String namespace, Bytes key, LRUCacheEntry value); void putAll(final String namespace, final List<KeyValue<Bytes, LRUCacheEntry>> entries); LRUCacheEntry delete(final String namespace, final Bytes key); MemoryLRUCacheBytesIterator range(final String namespace, final Bytes from, final Bytes to); MemoryLRUCacheBytesIterator all(final String namespace); long size(); }
|
@Test(expected = NoSuchElementException.class) public void shouldThrowIfNoPeekNextKey() throws Exception { final ThreadCache cache = new ThreadCache("testCache", 10000L, new MockStreamsMetrics(new Metrics())); final ThreadCache.MemoryLRUCacheBytesIterator iterator = cache.range("", Bytes.wrap(new byte[]{0}), Bytes.wrap(new byte[]{1})); iterator.peekNextKey(); }
@Test public void shouldReturnFalseIfNoNextKey() throws Exception { final ThreadCache cache = new ThreadCache("testCache", 10000L, new MockStreamsMetrics(new Metrics())); final ThreadCache.MemoryLRUCacheBytesIterator iterator = cache.range("", Bytes.wrap(new byte[]{0}), Bytes.wrap(new byte[]{1})); assertFalse(iterator.hasNext()); }
|
Segments { long segmentId(long timestamp) { return timestamp / segmentInterval; } Segments(final String name, final long retentionPeriod, final int numSegments); void close(); }
|
@Test public void shouldGetSegmentIdsFromTimestamp() throws Exception { assertEquals(0, segments.segmentId(0)); assertEquals(1, segments.segmentId(60000)); assertEquals(2, segments.segmentId(120000)); assertEquals(3, segments.segmentId(180000)); }
@Test public void shouldBaseSegmentIntervalOnRetentionAndNumSegments() throws Exception { final Segments segments = new Segments("test", 8 * 60 * 1000, 5); assertEquals(0, segments.segmentId(0)); assertEquals(0, segments.segmentId(60000)); assertEquals(1, segments.segmentId(120000)); }
|
Segments { String segmentName(long segmentId) { return name + "-" + formatter.format(new Date(segmentId * segmentInterval)); } Segments(final String name, final long retentionPeriod, final int numSegments); void close(); }
|
@Test public void shouldGetSegmentNameFromId() throws Exception { assertEquals("test-197001010000", segments.segmentName(0)); assertEquals("test-197001010001", segments.segmentName(1)); assertEquals("test-197001010002", segments.segmentName(2)); }
|
Segments { Segment getOrCreateSegment(final long segmentId, final ProcessorContext context) { if (segmentId > maxSegmentId - numSegments) { final long key = segmentId % numSegments; final Segment segment = segments.get(key); if (!isSegment(segment, segmentId)) { cleanup(segmentId); } Segment newSegment = new Segment(segmentName(segmentId), name, segmentId); Segment previousSegment = segments.putIfAbsent(key, newSegment); if (previousSegment == null) { newSegment.openDB(context); maxSegmentId = segmentId > maxSegmentId ? segmentId : maxSegmentId; if (minSegmentId == Long.MAX_VALUE) { minSegmentId = maxSegmentId; } } return previousSegment == null ? newSegment : previousSegment; } else { return null; } } Segments(final String name, final long retentionPeriod, final int numSegments); void close(); }
|
@Test public void shouldCreateSegments() throws Exception { final Segment segment1 = segments.getOrCreateSegment(0, context); final Segment segment2 = segments.getOrCreateSegment(1, context); final Segment segment3 = segments.getOrCreateSegment(2, context); assertTrue(new File(context.stateDir(), "test/test-197001010000").isDirectory()); assertTrue(new File(context.stateDir(), "test/test-197001010001").isDirectory()); assertTrue(new File(context.stateDir(), "test/test-197001010002").isDirectory()); assertEquals(true, segment1.isOpen()); assertEquals(true, segment2.isOpen()); assertEquals(true, segment3.isOpen()); }
@Test public void shouldNotCreateSegmentThatIsAlreadyExpired() throws Exception { segments.getOrCreateSegment(7, context); assertNull(segments.getOrCreateSegment(0, context)); assertFalse(new File(context.stateDir(), "test/test-197001010000").exists()); }
@Test public void shouldCleanupSegmentsThatHaveExpired() throws Exception { final Segment segment1 = segments.getOrCreateSegment(0, context); final Segment segment2 = segments.getOrCreateSegment(0, context); final Segment segment3 = segments.getOrCreateSegment(7, context); assertFalse(segment1.isOpen()); assertFalse(segment2.isOpen()); assertTrue(segment3.isOpen()); assertFalse(new File(context.stateDir(), "test/test-197001010000").exists()); assertFalse(new File(context.stateDir(), "test/test-197001010001").exists()); assertTrue(new File(context.stateDir(), "test/test-197001010007").exists()); }
@Test public void shouldRollSegments() throws Exception { segments.getOrCreateSegment(0, context); verifyCorrectSegments(0, 1); segments.getOrCreateSegment(1, context); verifyCorrectSegments(0, 2); segments.getOrCreateSegment(2, context); verifyCorrectSegments(0, 3); segments.getOrCreateSegment(3, context); verifyCorrectSegments(0, 4); segments.getOrCreateSegment(4, context); verifyCorrectSegments(0, 5); segments.getOrCreateSegment(5, context); verifyCorrectSegments(1, 5); segments.getOrCreateSegment(6, context); verifyCorrectSegments(2, 5); }
|
CachingKeyValueStore extends WrappedStateStore.AbstractStateStore implements KeyValueStore<K, V>, CachedStateStore<K, V> { @Override public KeyValueIterator<K, V> all() { validateStoreOpen(); final KeyValueIterator<Bytes, byte[]> storeIterator = new DelegatingPeekingKeyValueIterator<>(this.name(), underlying.all()); final ThreadCache.MemoryLRUCacheBytesIterator cacheIterator = cache.all(cacheName); return new MergedSortedCacheKeyValueStoreIterator<>(cacheIterator, storeIterator, serdes); } CachingKeyValueStore(final KeyValueStore<Bytes, byte[]> underlying,
final Serde<K> keySerde,
final Serde<V> valueSerde); @SuppressWarnings("unchecked") @Override void init(final ProcessorContext context, final StateStore root); void setFlushListener(final CacheFlushListener<K, V> flushListener); @Override synchronized void flush(); @Override void close(); @Override boolean persistent(); @Override boolean isOpen(); @Override synchronized V get(final K key); @Override KeyValueIterator<K, V> range(final K from, final K to); @Override KeyValueIterator<K, V> all(); @Override synchronized long approximateNumEntries(); @Override synchronized void put(final K key, final V value); @Override synchronized V putIfAbsent(final K key, final V value); @Override synchronized void putAll(final List<KeyValue<K, V>> entries); @Override synchronized V delete(final K key); @Override StateStore inner(); }
|
@Test public void shouldIterateAllStoredItems() throws Exception { int items = addItemsToCache(); final KeyValueIterator<String, String> all = store.all(); final List<String> results = new ArrayList<>(); while (all.hasNext()) { results.add(all.next().key); } assertEquals(items, results.size()); }
|
CachingKeyValueStore extends WrappedStateStore.AbstractStateStore implements KeyValueStore<K, V>, CachedStateStore<K, V> { @Override public KeyValueIterator<K, V> range(final K from, final K to) { validateStoreOpen(); final Bytes origFrom = Bytes.wrap(serdes.rawKey(from)); final Bytes origTo = Bytes.wrap(serdes.rawKey(to)); final KeyValueIterator<Bytes, byte[]> storeIterator = underlying.range(origFrom, origTo); final ThreadCache.MemoryLRUCacheBytesIterator cacheIterator = cache.range(cacheName, origFrom, origTo); return new MergedSortedCacheKeyValueStoreIterator<>(cacheIterator, storeIterator, serdes); } CachingKeyValueStore(final KeyValueStore<Bytes, byte[]> underlying,
final Serde<K> keySerde,
final Serde<V> valueSerde); @SuppressWarnings("unchecked") @Override void init(final ProcessorContext context, final StateStore root); void setFlushListener(final CacheFlushListener<K, V> flushListener); @Override synchronized void flush(); @Override void close(); @Override boolean persistent(); @Override boolean isOpen(); @Override synchronized V get(final K key); @Override KeyValueIterator<K, V> range(final K from, final K to); @Override KeyValueIterator<K, V> all(); @Override synchronized long approximateNumEntries(); @Override synchronized void put(final K key, final V value); @Override synchronized V putIfAbsent(final K key, final V value); @Override synchronized void putAll(final List<KeyValue<K, V>> entries); @Override synchronized V delete(final K key); @Override StateStore inner(); }
|
@Test public void shouldIterateOverRange() throws Exception { int items = addItemsToCache(); final KeyValueIterator<String, String> range = store.range(String.valueOf(0), String.valueOf(items)); final List<String> results = new ArrayList<>(); while (range.hasNext()) { results.add(range.next().key); } assertEquals(items, results.size()); }
|
CachingKeyValueStore extends WrappedStateStore.AbstractStateStore implements KeyValueStore<K, V>, CachedStateStore<K, V> { @Override public synchronized V get(final K key) { validateStoreOpen(); if (key == null) { return null; } final byte[] rawKey = serdes.rawKey(key); return get(rawKey); } CachingKeyValueStore(final KeyValueStore<Bytes, byte[]> underlying,
final Serde<K> keySerde,
final Serde<V> valueSerde); @SuppressWarnings("unchecked") @Override void init(final ProcessorContext context, final StateStore root); void setFlushListener(final CacheFlushListener<K, V> flushListener); @Override synchronized void flush(); @Override void close(); @Override boolean persistent(); @Override boolean isOpen(); @Override synchronized V get(final K key); @Override KeyValueIterator<K, V> range(final K from, final K to); @Override KeyValueIterator<K, V> all(); @Override synchronized long approximateNumEntries(); @Override synchronized void put(final K key, final V value); @Override synchronized V putIfAbsent(final K key, final V value); @Override synchronized void putAll(final List<KeyValue<K, V>> entries); @Override synchronized V delete(final K key); @Override StateStore inner(); }
|
@Test public void shouldReturnNullIfKeyIsNull() throws Exception { assertNull(store.get(null)); }
|
SegmentIterator implements KeyValueIterator<Bytes, byte[]> { @Override public Bytes peekNextKey() { if (!hasNext()) { throw new NoSuchElementException(); } return currentIterator.peekNextKey(); } SegmentIterator(final Iterator<Segment> segments,
final HasNextCondition hasNextCondition,
final Bytes from,
final Bytes to); void close(); @Override Bytes peekNextKey(); @Override boolean hasNext(); KeyValue<Bytes, byte[]> next(); void remove(); }
|
@Test(expected = NoSuchElementException.class) public void shouldThrowNoSuchElementOnPeekNextKeyIfNoNext() throws Exception { iterator = new SegmentIterator(Arrays.asList(segmentOne, segmentTwo).iterator(), hasNextCondition, Bytes.wrap("f".getBytes()), Bytes.wrap("h".getBytes())); iterator.peekNextKey(); }
|
SegmentIterator implements KeyValueIterator<Bytes, byte[]> { public KeyValue<Bytes, byte[]> next() { if (!hasNext()) { throw new NoSuchElementException(); } return currentIterator.next(); } SegmentIterator(final Iterator<Segment> segments,
final HasNextCondition hasNextCondition,
final Bytes from,
final Bytes to); void close(); @Override Bytes peekNextKey(); @Override boolean hasNext(); KeyValue<Bytes, byte[]> next(); void remove(); }
|
@Test(expected = NoSuchElementException.class) public void shouldThrowNoSuchElementOnNextIfNoNext() throws Exception { iterator = new SegmentIterator(Arrays.asList(segmentOne, segmentTwo).iterator(), hasNextCondition, Bytes.wrap("f".getBytes()), Bytes.wrap("h".getBytes())); iterator.next(); }
|
CompositeReadOnlyWindowStore implements ReadOnlyWindowStore<K, V> { public <IteratorType extends KeyValueIterator<?, V>> IteratorType fetch(Fetcher<K, V, IteratorType> fetcher) { final List<ReadOnlyWindowStore<K, V>> stores = provider.stores(storeName, windowStoreType); for (ReadOnlyWindowStore<K, V> windowStore : stores) { try { final IteratorType result = fetcher.fetch(windowStore); if (!result.hasNext()) { result.close(); } else { return result; } } catch (InvalidStateStoreException e) { throw new InvalidStateStoreException( "State store is not available anymore and may have been migrated to another instance; " + "please re-discover its location from the state metadata."); } } return fetcher.empty(); } CompositeReadOnlyWindowStore(final StateStoreProvider provider,
final QueryableStoreType<ReadOnlyWindowStore<K, V>> windowStoreType,
final String storeName); IteratorType fetch(Fetcher<K, V, IteratorType> fetcher); @Override WindowStoreIterator<V> fetch(final K key, final long timeFrom, final long timeTo); @Override KeyValueIterator<Windowed<K>, V> fetch(final K from, final K to, final long timeFrom, final long timeTo); }
|
@Test public void shouldFetchValuesFromWindowStore() throws Exception { underlyingWindowStore.put("my-key", "my-value", 0L); underlyingWindowStore.put("my-key", "my-later-value", 10L); final WindowStoreIterator<String> iterator = windowStore.fetch("my-key", 0L, 25L); final List<KeyValue<Long, String>> results = StreamsTestUtils.toList(iterator); assertEquals(asList(new KeyValue<>(0L, "my-value"), new KeyValue<>(10L, "my-later-value")), results); }
@Test public void shouldReturnEmptyIteratorIfNoData() throws Exception { final WindowStoreIterator<String> iterator = windowStore.fetch("my-key", 0L, 25L); assertEquals(false, iterator.hasNext()); }
@Test public void shouldFindValueForKeyWhenMultiStores() throws Exception { final ReadOnlyWindowStoreStub<String, String> secondUnderlying = new ReadOnlyWindowStoreStub<>(WINDOW_SIZE); stubProviderTwo.addStore(storeName, secondUnderlying); underlyingWindowStore.put("key-one", "value-one", 0L); secondUnderlying.put("key-two", "value-two", 10L); final List<KeyValue<Long, String>> keyOneResults = StreamsTestUtils.toList(windowStore.fetch("key-one", 0L, 1L)); final List<KeyValue<Long, String>> keyTwoResults = StreamsTestUtils.toList(windowStore.fetch("key-two", 10L, 11L)); assertEquals(Collections.singletonList(KeyValue.pair(0L, "value-one")), keyOneResults); assertEquals(Collections.singletonList(KeyValue.pair(10L, "value-two")), keyTwoResults); }
@Test public void shouldNotGetValuesFromOtherStores() throws Exception { otherUnderlyingStore.put("some-key", "some-value", 0L); underlyingWindowStore.put("some-key", "my-value", 1L); final List<KeyValue<Long, String>> results = StreamsTestUtils.toList(windowStore.fetch("some-key", 0L, 2L)); assertEquals(Collections.singletonList(new KeyValue<>(1L, "my-value")), results); }
@Test(expected = InvalidStateStoreException.class) public void shouldThrowInvalidStateStoreExceptionOnRebalance() throws Exception { final CompositeReadOnlyWindowStore<Object, Object> store = new CompositeReadOnlyWindowStore<>(new StateStoreProviderStub(true), QueryableStoreTypes.windowStore(), "foo"); store.fetch("key", 1, 10); }
@Test public void shouldThrowInvalidStateStoreExceptionIfFetchThrows() throws Exception { underlyingWindowStore.setOpen(false); final CompositeReadOnlyWindowStore<Object, Object> store = new CompositeReadOnlyWindowStore<>(stubProviderOne, QueryableStoreTypes.windowStore(), "window-store"); try { store.fetch("key", 1, 10); Assert.fail("InvalidStateStoreException was expected"); } catch (InvalidStateStoreException e) { Assert.assertEquals("State store is not available anymore and may have been migrated to another instance; " + "please re-discover its location from the state metadata.", e.getMessage()); } }
@Test public void emptyIteratorAlwaysReturnsFalse() throws Exception { final CompositeReadOnlyWindowStore<Object, Object> store = new CompositeReadOnlyWindowStore<>(new StateStoreProviderStub(false), QueryableStoreTypes.windowStore(), "foo"); final WindowStoreIterator<Object> windowStoreIterator = store.fetch("key", 1, 10); Assert.assertFalse(windowStoreIterator.hasNext()); }
@Test public void emptyIteratorPeekNextKeyShouldThrowNoSuchElementException() throws Exception { final CompositeReadOnlyWindowStore<Object, Object> store = new CompositeReadOnlyWindowStore<>(new StateStoreProviderStub(false), QueryableStoreTypes.windowStore(), "foo"); final WindowStoreIterator<Object> windowStoreIterator = store.fetch("key", 1, 10); windowStoreIteratorException.expect(NoSuchElementException.class); windowStoreIterator.peekNextKey(); }
@Test public void emptyIteratorNextShouldThrowNoSuchElementException() throws Exception { final CompositeReadOnlyWindowStore<Object, Object> store = new CompositeReadOnlyWindowStore<>(new StateStoreProviderStub(false), QueryableStoreTypes.windowStore(), "foo"); final WindowStoreIterator<Object> windowStoreIterator = store.fetch("key", 1, 10); windowStoreIteratorException.expect(NoSuchElementException.class); windowStoreIterator.next(); }
|
FilteredCacheIterator implements PeekingKeyValueIterator<Bytes, LRUCacheEntry> { @Override public void remove() { throw new UnsupportedOperationException(); } FilteredCacheIterator(final PeekingKeyValueIterator<Bytes, LRUCacheEntry> cacheIterator,
final HasNextCondition hasNextCondition,
final CacheFunction cacheFunction); @Override void close(); @Override Bytes peekNextKey(); @Override boolean hasNext(); @Override KeyValue<Bytes, LRUCacheEntry> next(); @Override void remove(); @Override KeyValue<Bytes, LRUCacheEntry> peekNext(); }
|
@Test(expected = UnsupportedOperationException.class) public void shouldThrowUnsupportedOperationExeceptionOnRemove() throws Exception { allIterator.remove(); }
|
RocksDBWindowStore extends WrappedStateStore.AbstractStateStore implements WindowStore<K, V> { @Override public WindowStoreIterator<V> fetch(K key, long timeFrom, long timeTo) { final KeyValueIterator<Bytes, byte[]> bytesIterator = bytesStore.fetch(Bytes.wrap(serdes.rawKey(key)), timeFrom, timeTo); return new WindowStoreIteratorWrapper<>(bytesIterator, serdes, windowSize).valuesIterator(); } RocksDBWindowStore(final SegmentedBytesStore bytesStore,
final Serde<K> keySerde,
final Serde<V> valueSerde,
final boolean retainDuplicates,
final long windowSize); @Override @SuppressWarnings("unchecked") void init(final ProcessorContext context, final StateStore root); @Override void put(K key, V value); @Override void put(K key, V value, long timestamp); @Override WindowStoreIterator<V> fetch(K key, long timeFrom, long timeTo); @Override KeyValueIterator<Windowed<K>, V> fetch(K from, K to, long timeFrom, long timeTo); }
|
@SuppressWarnings("unchecked") @Test public void testPutAndFetch() throws IOException { windowStore = createWindowStore(context, false, true); long startTime = segmentSize - 4L; putFirstBatch(windowStore, startTime, context); assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, startTime + 0L - WINDOW_SIZE, startTime + 0L + WINDOW_SIZE))); assertEquals(Utils.mkList("one"), toList(windowStore.fetch(1, startTime + 1L - WINDOW_SIZE, startTime + 1L + WINDOW_SIZE))); assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, startTime + 2L - WINDOW_SIZE, startTime + 2L + WINDOW_SIZE))); assertEquals(Utils.mkList(), toList(windowStore.fetch(3, startTime + 3L - WINDOW_SIZE, startTime + 3L + WINDOW_SIZE))); assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, startTime + 4L - WINDOW_SIZE, startTime + 4L + WINDOW_SIZE))); assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, startTime + 5L - WINDOW_SIZE, startTime + 5L + WINDOW_SIZE))); putSecondBatch(windowStore, startTime, context); assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime - 2L - WINDOW_SIZE, startTime - 2L + WINDOW_SIZE))); assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, startTime - 1L - WINDOW_SIZE, startTime - 1L + WINDOW_SIZE))); assertEquals(Utils.mkList("two", "two+1"), toList(windowStore.fetch(2, startTime - WINDOW_SIZE, startTime + WINDOW_SIZE))); assertEquals(Utils.mkList("two", "two+1", "two+2"), toList(windowStore.fetch(2, startTime + 1L - WINDOW_SIZE, startTime + 1L + WINDOW_SIZE))); assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3"), toList(windowStore.fetch(2, startTime + 2L - WINDOW_SIZE, startTime + 2L + WINDOW_SIZE))); assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3", "two+4"), toList(windowStore.fetch(2, startTime + 3L - WINDOW_SIZE, startTime + 3L + WINDOW_SIZE))); assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3", "two+4", "two+5"), toList(windowStore.fetch(2, startTime + 4L - WINDOW_SIZE, startTime + 4L + WINDOW_SIZE))); assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, startTime + 5L - WINDOW_SIZE, startTime + 5L + WINDOW_SIZE))); assertEquals(Utils.mkList("two+1", "two+2", "two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, startTime + 6L - WINDOW_SIZE, startTime + 6L + WINDOW_SIZE))); assertEquals(Utils.mkList("two+2", "two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, startTime + 7L - WINDOW_SIZE, startTime + 7L + WINDOW_SIZE))); assertEquals(Utils.mkList("two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, startTime + 8L - WINDOW_SIZE, startTime + 8L + WINDOW_SIZE))); assertEquals(Utils.mkList("two+4", "two+5", "two+6"), toList(windowStore.fetch(2, startTime + 9L - WINDOW_SIZE, startTime + 9L + WINDOW_SIZE))); assertEquals(Utils.mkList("two+5", "two+6"), toList(windowStore.fetch(2, startTime + 10L - WINDOW_SIZE, startTime + 10L + WINDOW_SIZE))); assertEquals(Utils.mkList("two+6"), toList(windowStore.fetch(2, startTime + 11L - WINDOW_SIZE, startTime + 11L + WINDOW_SIZE))); assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime + 12L - WINDOW_SIZE, startTime + 12L + WINDOW_SIZE))); windowStore.flush(); Map<Integer, Set<String>> entriesByKey = entriesByKey(changeLog, startTime); assertEquals(Utils.mkSet("zero@0"), entriesByKey.get(0)); assertEquals(Utils.mkSet("one@1"), entriesByKey.get(1)); assertEquals(Utils.mkSet("two@2", "two+1@3", "two+2@4", "two+3@5", "two+4@6", "two+5@7", "two+6@8"), entriesByKey.get(2)); assertNull(entriesByKey.get(3)); assertEquals(Utils.mkSet("four@4"), entriesByKey.get(4)); assertEquals(Utils.mkSet("five@5"), entriesByKey.get(5)); assertNull(entriesByKey.get(6)); }
@SuppressWarnings("unchecked") @Test public void testFetchRange() throws IOException { windowStore = createWindowStore(context, false, true); long startTime = segmentSize - 4L; putFirstBatch(windowStore, startTime, context); final KeyValue<Windowed<Integer>, String> zero = windowedPair(0, "zero", startTime + 0); final KeyValue<Windowed<Integer>, String> one = windowedPair(1, "one", startTime + 1); final KeyValue<Windowed<Integer>, String> two = windowedPair(2, "two", startTime + 2); final KeyValue<Windowed<Integer>, String> four = windowedPair(4, "four", startTime + 4); final KeyValue<Windowed<Integer>, String> five = windowedPair(5, "five", startTime + 5); assertEquals( Utils.mkList(zero, one), StreamsTestUtils.toList(windowStore.fetch(0, 1, startTime + 0L - WINDOW_SIZE, startTime + 0L + WINDOW_SIZE)) ); assertEquals( Utils.mkList(one), StreamsTestUtils.toList(windowStore.fetch(1, 1, startTime + 0L - WINDOW_SIZE, startTime + 0L + WINDOW_SIZE)) ); assertEquals( Utils.mkList(one, two), StreamsTestUtils.toList(windowStore.fetch(1, 3, startTime + 0L - WINDOW_SIZE, startTime + 0L + WINDOW_SIZE)) ); assertEquals( Utils.mkList(zero, one, two), StreamsTestUtils.toList(windowStore.fetch(0, 5, startTime + 0L - WINDOW_SIZE, startTime + 0L + WINDOW_SIZE)) ); assertEquals( Utils.mkList(zero, one, two, four, five), StreamsTestUtils.toList(windowStore.fetch(0, 5, startTime + 0L - WINDOW_SIZE, startTime + 0L + WINDOW_SIZE + 5L)) ); assertEquals( Utils.mkList(two, four, five), StreamsTestUtils.toList(windowStore.fetch(0, 5, startTime + 2L, startTime + 0L + WINDOW_SIZE + 5L)) ); assertEquals( Utils.mkList(), StreamsTestUtils.toList(windowStore.fetch(4, 5, startTime + 2L, startTime + WINDOW_SIZE)) ); assertEquals( Utils.mkList(), StreamsTestUtils.toList(windowStore.fetch(0, 3, startTime + 3L, startTime + WINDOW_SIZE + 5)) ); }
@SuppressWarnings("unchecked") @Test public void testPutAndFetchBefore() throws IOException { windowStore = createWindowStore(context, false, true); long startTime = segmentSize - 4L; putFirstBatch(windowStore, startTime, context); assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, startTime + 0L - WINDOW_SIZE, startTime + 0L))); assertEquals(Utils.mkList("one"), toList(windowStore.fetch(1, startTime + 1L - WINDOW_SIZE, startTime + 1L))); assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, startTime + 2L - WINDOW_SIZE, startTime + 2L))); assertEquals(Utils.mkList(), toList(windowStore.fetch(3, startTime + 3L - WINDOW_SIZE, startTime + 3L))); assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, startTime + 4L - WINDOW_SIZE, startTime + 4L))); assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, startTime + 5L - WINDOW_SIZE, startTime + 5L))); putSecondBatch(windowStore, startTime, context); assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime - 1L - WINDOW_SIZE, startTime - 1L))); assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime + 0L - WINDOW_SIZE, startTime + 0L))); assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime + 1L - WINDOW_SIZE, startTime + 1L))); assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, startTime + 2L - WINDOW_SIZE, startTime + 2L))); assertEquals(Utils.mkList("two", "two+1"), toList(windowStore.fetch(2, startTime + 3L - WINDOW_SIZE, startTime + 3L))); assertEquals(Utils.mkList("two", "two+1", "two+2"), toList(windowStore.fetch(2, startTime + 4L - WINDOW_SIZE, startTime + 4L))); assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3"), toList(windowStore.fetch(2, startTime + 5L - WINDOW_SIZE, startTime + 5L))); assertEquals(Utils.mkList("two+1", "two+2", "two+3", "two+4"), toList(windowStore.fetch(2, startTime + 6L - WINDOW_SIZE, startTime + 6L))); assertEquals(Utils.mkList("two+2", "two+3", "two+4", "two+5"), toList(windowStore.fetch(2, startTime + 7L - WINDOW_SIZE, startTime + 7L))); assertEquals(Utils.mkList("two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, startTime + 8L - WINDOW_SIZE, startTime + 8L))); assertEquals(Utils.mkList("two+4", "two+5", "two+6"), toList(windowStore.fetch(2, startTime + 9L - WINDOW_SIZE, startTime + 9L))); assertEquals(Utils.mkList("two+5", "two+6"), toList(windowStore.fetch(2, startTime + 10L - WINDOW_SIZE, startTime + 10L))); assertEquals(Utils.mkList("two+6"), toList(windowStore.fetch(2, startTime + 11L - WINDOW_SIZE, startTime + 11L))); assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime + 12L - WINDOW_SIZE, startTime + 12L))); assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime + 13L - WINDOW_SIZE, startTime + 13L))); windowStore.flush(); Map<Integer, Set<String>> entriesByKey = entriesByKey(changeLog, startTime); assertEquals(Utils.mkSet("zero@0"), entriesByKey.get(0)); assertEquals(Utils.mkSet("one@1"), entriesByKey.get(1)); assertEquals(Utils.mkSet("two@2", "two+1@3", "two+2@4", "two+3@5", "two+4@6", "two+5@7", "two+6@8"), entriesByKey.get(2)); assertNull(entriesByKey.get(3)); assertEquals(Utils.mkSet("four@4"), entriesByKey.get(4)); assertEquals(Utils.mkSet("five@5"), entriesByKey.get(5)); assertNull(entriesByKey.get(6)); }
@SuppressWarnings("unchecked") @Test public void testPutAndFetchAfter() throws IOException { windowStore = createWindowStore(context, false, true); long startTime = segmentSize - 4L; putFirstBatch(windowStore, startTime, context); assertEquals(Utils.mkList("zero"), toList(windowStore.fetch(0, startTime + 0L, startTime + 0L + WINDOW_SIZE))); assertEquals(Utils.mkList("one"), toList(windowStore.fetch(1, startTime + 1L, startTime + 1L + WINDOW_SIZE))); assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, startTime + 2L, startTime + 2L + WINDOW_SIZE))); assertEquals(Utils.mkList(), toList(windowStore.fetch(3, startTime + 3L, startTime + 3L + WINDOW_SIZE))); assertEquals(Utils.mkList("four"), toList(windowStore.fetch(4, startTime + 4L, startTime + 4L + WINDOW_SIZE))); assertEquals(Utils.mkList("five"), toList(windowStore.fetch(5, startTime + 5L, startTime + 5L + WINDOW_SIZE))); putSecondBatch(windowStore, startTime, context); assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime - 2L, startTime - 2L + WINDOW_SIZE))); assertEquals(Utils.mkList("two"), toList(windowStore.fetch(2, startTime - 1L, startTime - 1L + WINDOW_SIZE))); assertEquals(Utils.mkList("two", "two+1"), toList(windowStore.fetch(2, startTime, startTime + WINDOW_SIZE))); assertEquals(Utils.mkList("two", "two+1", "two+2"), toList(windowStore.fetch(2, startTime + 1L, startTime + 1L + WINDOW_SIZE))); assertEquals(Utils.mkList("two", "two+1", "two+2", "two+3"), toList(windowStore.fetch(2, startTime + 2L, startTime + 2L + WINDOW_SIZE))); assertEquals(Utils.mkList("two+1", "two+2", "two+3", "two+4"), toList(windowStore.fetch(2, startTime + 3L, startTime + 3L + WINDOW_SIZE))); assertEquals(Utils.mkList("two+2", "two+3", "two+4", "two+5"), toList(windowStore.fetch(2, startTime + 4L, startTime + 4L + WINDOW_SIZE))); assertEquals(Utils.mkList("two+3", "two+4", "two+5", "two+6"), toList(windowStore.fetch(2, startTime + 5L, startTime + 5L + WINDOW_SIZE))); assertEquals(Utils.mkList("two+4", "two+5", "two+6"), toList(windowStore.fetch(2, startTime + 6L, startTime + 6L + WINDOW_SIZE))); assertEquals(Utils.mkList("two+5", "two+6"), toList(windowStore.fetch(2, startTime + 7L, startTime + 7L + WINDOW_SIZE))); assertEquals(Utils.mkList("two+6"), toList(windowStore.fetch(2, startTime + 8L, startTime + 8L + WINDOW_SIZE))); assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime + 9L, startTime + 9L + WINDOW_SIZE))); assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime + 10L, startTime + 10L + WINDOW_SIZE))); assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime + 11L, startTime + 11L + WINDOW_SIZE))); assertEquals(Utils.mkList(), toList(windowStore.fetch(2, startTime + 12L, startTime + 12L + WINDOW_SIZE))); windowStore.flush(); Map<Integer, Set<String>> entriesByKey = entriesByKey(changeLog, startTime); assertEquals(Utils.mkSet("zero@0"), entriesByKey.get(0)); assertEquals(Utils.mkSet("one@1"), entriesByKey.get(1)); assertEquals(Utils.mkSet("two@2", "two+1@3", "two+2@4", "two+3@5", "two+4@6", "two+5@7", "two+6@8"), entriesByKey.get(2)); assertNull(entriesByKey.get(3)); assertEquals(Utils.mkSet("four@4"), entriesByKey.get(4)); assertEquals(Utils.mkSet("five@5"), entriesByKey.get(5)); assertNull(entriesByKey.get(6)); }
@SuppressWarnings("unchecked") @Test public void testInitialLoading() throws IOException { File storeDir = new File(baseDir, windowName); windowStore = createWindowStore(context, false, true); new File(storeDir, segments.segmentName(0L)).mkdir(); new File(storeDir, segments.segmentName(1L)).mkdir(); new File(storeDir, segments.segmentName(2L)).mkdir(); new File(storeDir, segments.segmentName(3L)).mkdir(); new File(storeDir, segments.segmentName(4L)).mkdir(); new File(storeDir, segments.segmentName(5L)).mkdir(); new File(storeDir, segments.segmentName(6L)).mkdir(); windowStore.close(); windowStore = createWindowStore(context, false, true); assertEquals( Utils.mkSet(segments.segmentName(4L), segments.segmentName(5L), segments.segmentName(6L)), segmentDirs(baseDir) ); try (WindowStoreIterator iter = windowStore.fetch(0, 0L, 1000000L)) { while (iter.hasNext()) { iter.next(); } } assertEquals( Utils.mkSet(segments.segmentName(4L), segments.segmentName(5L), segments.segmentName(6L)), segmentDirs(baseDir) ); }
|
ChangeLoggingSegmentedBytesStore extends WrappedStateStore.AbstractStateStore implements SegmentedBytesStore { @Override public KeyValueIterator<Bytes, byte[]> fetch(final Bytes key, final long from, final long to) { return bytesStore.fetch(key, from, to); } ChangeLoggingSegmentedBytesStore(final SegmentedBytesStore bytesStore); @Override KeyValueIterator<Bytes, byte[]> fetch(final Bytes key, final long from, final long to); @Override KeyValueIterator<Bytes, byte[]> fetch(Bytes keyFrom, Bytes keyTo, long from, long to); @Override void remove(final Bytes key); @Override void put(final Bytes key, final byte[] value); @Override byte[] get(final Bytes key); @Override @SuppressWarnings("unchecked") void init(final ProcessorContext context, final StateStore root); }
|
@Test public void shouldDelegateToUnderlyingStoreWhenFetching() throws Exception { store.fetch(Bytes.wrap(new byte[0]), 1, 1); assertTrue(bytesStore.fetchCalled); }
|
WrappingStoreProvider implements StateStoreProvider { public <T> List<T> stores(final String storeName, QueryableStoreType<T> type) { final List<T> allStores = new ArrayList<>(); for (StateStoreProvider provider : storeProviders) { final List<T> stores = provider.stores(storeName, type); allStores.addAll(stores); } if (allStores.isEmpty()) { throw new InvalidStateStoreException("the state store, " + storeName + ", may have migrated to another instance."); } return allStores; } WrappingStoreProvider(final List<StateStoreProvider> storeProviders); List<T> stores(final String storeName, QueryableStoreType<T> type); }
|
@Test public void shouldFindKeyValueStores() throws Exception { List<ReadOnlyKeyValueStore<String, String>> results = wrappingStoreProvider.stores("kv", QueryableStoreTypes.<String, String>keyValueStore()); assertEquals(2, results.size()); }
@Test public void shouldFindWindowStores() throws Exception { final List<ReadOnlyWindowStore<Object, Object>> windowStores = wrappingStoreProvider.stores("window", windowStore()); assertEquals(2, windowStores.size()); }
@Test(expected = InvalidStateStoreException.class) public void shouldThrowInvalidStoreExceptionIfNoStoreOfTypeFound() throws Exception { wrappingStoreProvider.stores("doesn't exist", QueryableStoreTypes.keyValueStore()); }
|
ChangeLoggingKeyValueBytesStore extends WrappedStateStore.AbstractStateStore implements KeyValueStore<Bytes, byte[]> { @Override public byte[] putIfAbsent(final Bytes key, final byte[] value) { final byte[] previous = get(key); if (previous == null) { put(key, value); } return previous; } ChangeLoggingKeyValueBytesStore(final KeyValueStore<Bytes, byte[]> inner); @Override void init(final ProcessorContext context, final StateStore root); @Override long approximateNumEntries(); @Override void put(final Bytes key, final byte[] value); @Override byte[] putIfAbsent(final Bytes key, final byte[] value); @Override void putAll(final List<KeyValue<Bytes, byte[]>> entries); @Override byte[] delete(final Bytes key); @Override byte[] get(final Bytes key); @Override KeyValueIterator<Bytes, byte[]> range(final Bytes from, final Bytes to); @Override KeyValueIterator<Bytes, byte[]> all(); }
|
@Test public void shouldReturnNullOnPutIfAbsentWhenNoPreviousValue() throws Exception { assertThat(store.putIfAbsent(hi, there), is(nullValue())); }
|
ChangeLoggingKeyValueBytesStore extends WrappedStateStore.AbstractStateStore implements KeyValueStore<Bytes, byte[]> { @Override public byte[] get(final Bytes key) { return inner.get(key); } ChangeLoggingKeyValueBytesStore(final KeyValueStore<Bytes, byte[]> inner); @Override void init(final ProcessorContext context, final StateStore root); @Override long approximateNumEntries(); @Override void put(final Bytes key, final byte[] value); @Override byte[] putIfAbsent(final Bytes key, final byte[] value); @Override void putAll(final List<KeyValue<Bytes, byte[]>> entries); @Override byte[] delete(final Bytes key); @Override byte[] get(final Bytes key); @Override KeyValueIterator<Bytes, byte[]> range(final Bytes from, final Bytes to); @Override KeyValueIterator<Bytes, byte[]> all(); }
|
@Test public void shouldReturnNullOnGetWhenDoesntExist() throws Exception { assertThat(store.get(hello), is(nullValue())); }
|
WindowKeySchema implements RocksDBSegmentedBytesStore.KeySchema { @Override public Bytes upperRange(final Bytes key, final long to) { final byte[] maxSuffix = ByteBuffer.allocate(SUFFIX_SIZE) .putLong(to) .putInt(Integer.MAX_VALUE) .array(); return OrderedBytes.upperRange(key, maxSuffix); } @Override void init(final String topic); @Override Bytes upperRange(final Bytes key, final long to); @Override Bytes lowerRange(final Bytes key, final long from); @Override Bytes lowerRangeFixedSize(final Bytes key, final long from); @Override Bytes upperRangeFixedSize(final Bytes key, final long to); @Override long segmentTimestamp(final Bytes key); @Override HasNextCondition hasNextCondition(final Bytes binaryKeyFrom, final Bytes binaryKeyTo, final long from, final long to); @Override List<Segment> segmentsToSearch(final Segments segments, final long from, final long to); }
|
@Test public void testUpperBoundWithLargeTimestamps() throws Exception { Bytes upper = windowKeySchema.upperRange(Bytes.wrap(new byte[]{0xA, 0xB, 0xC}), Long.MAX_VALUE); assertThat( "shorter key with max timestamp should be in range", upper.compareTo( WindowStoreUtils.toBinaryKey( new byte[]{0xA}, Long.MAX_VALUE, Integer.MAX_VALUE ) ) >= 0 ); assertThat( "shorter key with max timestamp should be in range", upper.compareTo( WindowStoreUtils.toBinaryKey( new byte[]{0xA, 0xB}, Long.MAX_VALUE, Integer.MAX_VALUE ) ) >= 0 ); assertThat(upper, equalTo(WindowStoreUtils.toBinaryKey(new byte[]{0xA}, Long.MAX_VALUE, Integer.MAX_VALUE))); }
@Test public void testUpperBoundWithKeyBytesLargerThanFirstTimestampByte() throws Exception { Bytes upper = windowKeySchema.upperRange(Bytes.wrap(new byte[]{0xA, (byte) 0x8F, (byte) 0x9F}), Long.MAX_VALUE); assertThat( "shorter key with max timestamp should be in range", upper.compareTo( WindowStoreUtils.toBinaryKey( new byte[]{0xA, (byte) 0x8F}, Long.MAX_VALUE, Integer.MAX_VALUE ) ) >= 0 ); assertThat(upper, equalTo(WindowStoreUtils.toBinaryKey(new byte[]{0xA, (byte) 0x8F, (byte) 0x9F}, Long.MAX_VALUE, Integer.MAX_VALUE))); }
@Test public void testUpperBoundWithKeyBytesLargerAndSmallerThanFirstTimestampByte() throws Exception { Bytes upper = windowKeySchema.upperRange(Bytes.wrap(new byte[]{0xC, 0xC, 0x9}), 0x0AffffffffffffffL); assertThat( "shorter key with max timestamp should be in range", upper.compareTo( WindowStoreUtils.toBinaryKey( new byte[]{0xC, 0xC}, 0x0AffffffffffffffL, Integer.MAX_VALUE ) ) >= 0 ); assertThat(upper, equalTo(WindowStoreUtils.toBinaryKey(new byte[]{0xC, 0xC}, 0x0AffffffffffffffL, Integer.MAX_VALUE))); }
@Test public void testUpperBoundWithZeroTimestamp() throws Exception { Bytes upper = windowKeySchema.upperRange(Bytes.wrap(new byte[]{0xA, 0xB, 0xC}), 0); assertThat(upper, equalTo(WindowStoreUtils.toBinaryKey(new byte[]{0xA, 0xB, 0xC}, 0, Integer.MAX_VALUE))); }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.