index int64 0 0 | repo_id stringlengths 26 205 | file_path stringlengths 51 246 | content stringlengths 8 433k | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/combine/HollowCombinerOrdinalRemapper.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.combine;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
/**
* Used by the {@link HollowCombiner} to track the mapping between ordinals in the input state and ordinals in the output state. Not intended for external consumption.
*
* @author dkoszewnik
*
*/
public class HollowCombinerOrdinalRemapper implements OrdinalRemapper {
private final HollowCombiner combiner;
private final Map<String, int[]> typeMappings;
public HollowCombinerOrdinalRemapper(HollowCombiner combiner, HollowReadStateEngine inputStateEngine) {
this.combiner = combiner;
this.typeMappings = initializeTypeMappings(inputStateEngine);
}
@Override
public int getMappedOrdinal(String type, int originalOrdinal) {
int typeMapping[] = typeMappings.get(type);
if(typeMapping == null)
return originalOrdinal;
if(typeMapping[originalOrdinal] == -1)
typeMapping[originalOrdinal] = combiner.copyOrdinal(type, originalOrdinal);
return typeMapping[originalOrdinal];
}
@Override
public void remapOrdinal(String type, int originalOrdinal, int mappedOrdinal) {
typeMappings.get(type)[originalOrdinal] = mappedOrdinal;
}
@Override
public boolean ordinalIsMapped(String type, int originalOrdinal) {
return typeMappings.get(type)[originalOrdinal] != -1;
}
private Map<String, int[]> initializeTypeMappings(HollowReadStateEngine inputStateEngine) {
Map<String, int[]> typeMappings = new HashMap<String, int[]>();
for(HollowTypeReadState typeState : inputStateEngine.getTypeStates()) {
int mapping[] = new int[typeState.maxOrdinal() + 1];
Arrays.fill(mapping, -1);
typeMappings.put(typeState.getSchema().getName(), mapping);
}
return typeMappings;
}
}
| 8,900 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/combine/HollowCombinerPrimaryKeyOrdinalRemapper.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.combine;
import com.netflix.hollow.core.index.HollowPrimaryKeyIndex;
import java.util.Map;
/**
* Used by the {@link HollowCombiner} to deduplicate records in the output based on primary keys. Not intended for external consumption.
*
* @author dkoszewnik
*
*/
class HollowCombinerPrimaryKeyOrdinalRemapper implements OrdinalRemapper {
private final Map<String, HollowPrimaryKeyIndex[]> primaryKeyIndexes;
private final OrdinalRemapper baseRemappers[];
private final int stateEngineIdx;
public HollowCombinerPrimaryKeyOrdinalRemapper(OrdinalRemapper[] baseRemappers, Map<String, HollowPrimaryKeyIndex[]> primaryKeyIndexes, int stateEngineIdx) {
this.primaryKeyIndexes = primaryKeyIndexes;
this.baseRemappers = baseRemappers;
this.stateEngineIdx = stateEngineIdx;
}
@Override
public int getMappedOrdinal(String type, int originalOrdinal) {
return baseRemappers[stateEngineIdx].getMappedOrdinal(type, originalOrdinal);
}
@Override
public void remapOrdinal(String type, int originalOrdinal, int mappedOrdinal) {
baseRemappers[stateEngineIdx].remapOrdinal(type, originalOrdinal, mappedOrdinal);
HollowPrimaryKeyIndex[] typeKeyIndexes = this.primaryKeyIndexes.get(type);
if(typeKeyIndexes != null) {
Object primaryKey[] = typeKeyIndexes[stateEngineIdx].getRecordKey(originalOrdinal);
for(int i=0;i<baseRemappers.length;i++) {
if(i != stateEngineIdx) {
if(typeKeyIndexes[i] != null) {
int matchOrdinal = typeKeyIndexes[i].getMatchingOrdinal(primaryKey);
if(matchOrdinal != -1) {
baseRemappers[i].remapOrdinal(type, matchOrdinal, mappedOrdinal);
}
}
}
}
}
}
@Override
public boolean ordinalIsMapped(String type, int originalOrdinal) {
return baseRemappers[stateEngineIdx].ordinalIsMapped(type, originalOrdinal);
}
}
| 8,901 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/util/SearchUtils.java | package com.netflix.hollow.tools.util;
import static com.netflix.hollow.core.HollowConstants.ORDINAL_NONE;
import com.netflix.hollow.core.index.HollowPrimaryKeyIndex;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.core.read.HollowReadFieldUtils;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.engine.HollowTypeStateListener;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import java.util.BitSet;
public class SearchUtils {
public static final String MULTI_FIELD_KEY_DELIMITER = ":";
public static final String REGEX_MATCH_DELIMITER = "\\:";
public static final String ESCAPED_MULTI_FIELD_KEY_DELIMITER = "\\\\:";
/**
* Parse a colon-separated string into a primary key based on a delimiter (for .eg. ':'), and throw exception if
* format unexpected for eg. if primary key was expecting an integer but keyString didn't contain a parse-able
* integer at the right spot.
*
* If the the value of the field itself contains the delimiter character, the value can be escaped using backslash
* in order to perform search.
*/
public static Object[] parseKey(HollowReadStateEngine readStateEngine, PrimaryKey primaryKey, String keyString) {
/**
* Split by the number of fields of the primary key. This ensures correct extraction of an empty value for the last field.
* Escape the delimiter if it is preceded by a backslash.
*/
String fields[] = keyString.split("(?<!\\\\)" + MULTI_FIELD_KEY_DELIMITER, primaryKey.numFields());
Object key[] = new Object[fields.length];
for(int i=0;i<fields.length;i++) {
switch(primaryKey.getFieldType(readStateEngine, i)) {
case BOOLEAN:
key[i] = Boolean.parseBoolean(fields[i]);
break;
case STRING:
key[i] = fields[i].replaceAll(ESCAPED_MULTI_FIELD_KEY_DELIMITER, MULTI_FIELD_KEY_DELIMITER);
break;
case INT:
case REFERENCE:
key[i] = Integer.parseInt(fields[i]);
break;
case LONG:
key[i] = Long.parseLong(fields[i]);
break;
case DOUBLE:
key[i] = Double.parseDouble(fields[i]);
break;
case FLOAT:
key[i] = Float.parseFloat(fields[i]);
break;
case BYTES:
throw new IllegalArgumentException("Primary key contains a field of type BYTES");
}
}
return key;
}
/**
* Return field index in object schema for each field comprising primary key.
*/
public static int[][] getFieldPathIndexes(HollowReadStateEngine readStateEngine, PrimaryKey primaryKey) {
if(primaryKey != null) {
int fieldPathIndexes[][] = new int[primaryKey.numFields()][];
for(int i=0;i<primaryKey.numFields();i++) {
fieldPathIndexes[i] = primaryKey.getFieldPathIndex(readStateEngine, i);
}
return fieldPathIndexes;
}
return null;
}
/**
* Returns primary key index for a given type if it exists.
*/
public static HollowPrimaryKeyIndex findPrimaryKeyIndex(HollowTypeReadState typeState) {
PrimaryKey pkey = getPrimaryKey(typeState.getSchema());
if(pkey == null)
return null;
for(HollowTypeStateListener listener : typeState.getListeners()) {
if(listener instanceof HollowPrimaryKeyIndex) {
if(((HollowPrimaryKeyIndex) listener).getPrimaryKey().equals(pkey))
return (HollowPrimaryKeyIndex) listener;
}
}
return null;
}
/**
* Get the primary key for an object schema.
*/
public static PrimaryKey getPrimaryKey(HollowSchema schema) {
if(schema.getSchemaType() == HollowSchema.SchemaType.OBJECT)
return ((HollowObjectSchema)schema).getPrimaryKey();
return null;
}
/**
* Returns the ordinal corresponding to the search result of searching by primary key.
*/
public static Integer getOrdinalToDisplay(HollowReadStateEngine readStateEngine, String query, Object[] parsedKey,
int ordinal, BitSet selectedOrdinals, int[][] fieldPathIndexes, HollowTypeReadState keyTypeState) {
if ("".equals(query) && ordinal != ORDINAL_NONE) { // trust ordinal if query is empty
return ordinal;
} else if (!"".equals(query)) {
// verify ordinal key matches parsed key
if (ordinal != ORDINAL_NONE && selectedOrdinals.get(ordinal)
&& recordKeyEquals(keyTypeState, ordinal, parsedKey, fieldPathIndexes)) {
return ordinal;
} else {
HollowPrimaryKeyIndex idx = findPrimaryKeyIndex(keyTypeState);
if (idx != null) {
// N.B. - findOrdinal can return ORDINAL_NONE, the caller deals with it
return idx.getMatchingOrdinal(parsedKey);
} else {
// no index, scan through records
ordinal = selectedOrdinals.nextSetBit(0);
while (ordinal != ORDINAL_NONE) {
if (recordKeyEquals(keyTypeState, ordinal, parsedKey, fieldPathIndexes)) {
return ordinal;
}
ordinal = selectedOrdinals.nextSetBit(ordinal + 1);
}
}
}
}
return ORDINAL_NONE;
}
private static boolean recordKeyEquals(HollowTypeReadState typeState, int ordinal, Object[] key, int[][] fieldPathIndexes) {
HollowObjectTypeReadState objState = (HollowObjectTypeReadState)typeState;
for(int i=0;i<fieldPathIndexes.length;i++) {
int curOrdinal = ordinal;
HollowObjectTypeReadState curState = objState;
for(int j=0;j<fieldPathIndexes[i].length - 1;j++) {
curOrdinal = curState.readOrdinal(curOrdinal, fieldPathIndexes[i][j]);
curState = (HollowObjectTypeReadState) curState.getSchema().getReferencedTypeState(fieldPathIndexes[i][j]);
}
if(!HollowReadFieldUtils.fieldValueEquals(curState, curOrdinal, fieldPathIndexes[i][fieldPathIndexes[i].length - 1], key[i]))
return false;
}
return true;
}
}
| 8,902 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/util/ObjectInternPool.java | package com.netflix.hollow.tools.util;
import com.netflix.hollow.core.memory.ByteArrayOrdinalMap;
import com.netflix.hollow.core.memory.ByteDataArray;
import com.netflix.hollow.core.memory.ByteData;
import com.netflix.hollow.core.memory.encoding.VarInt;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import java.util.HashSet;
import java.util.Optional;
// This class memoizes types by returning references to existing objects, or storing
// Objects if they are not currently in the pool
public class ObjectInternPool {
final private ByteArrayOrdinalMap ordinalMap;
private boolean isReadyToRead = false;
HashSet<Integer> ordinalsInCycle;
public ObjectInternPool() {
this.ordinalMap = new ByteArrayOrdinalMap(1024);
this.ordinalsInCycle = new HashSet<>();
}
public void prepareForRead() {
if(!isReadyToRead) {
ordinalMap.prepareForWrite();
}
ordinalsInCycle.clear();
isReadyToRead = true;
}
public boolean ordinalInCurrentCycle(int ordinal) {
return ordinalsInCycle.contains(ordinal);
}
public Object getObject(int ordinal, FieldType type) {
long pointer = ordinalMap.getPointerForData(ordinal);
switch (type) {
case BOOLEAN:
return getBoolean(pointer);
case FLOAT:
return getFloat(pointer);
case DOUBLE:
return getDouble(pointer);
case INT:
return getInt(pointer);
case LONG:
return getLong(pointer);
case STRING:
return getString(pointer);
default:
throw new IllegalArgumentException("Unknown type " + type);
}
}
public boolean getBoolean(long pointer) {
ByteData byteData = ordinalMap.getByteData().getUnderlyingArray();
return byteData.get(pointer) == 1;
}
public float getFloat(long pointer) {
ByteData byteData = ordinalMap.getByteData().getUnderlyingArray();
int intBytes = VarInt.readVInt(byteData, pointer);
return Float.intBitsToFloat(intBytes);
}
public double getDouble(long pointer) {
ByteData byteData = ordinalMap.getByteData().getUnderlyingArray();
long longBytes = VarInt.readVLong(byteData, pointer);
return Double.longBitsToDouble(longBytes);
}
public int getInt(long pointer) {
ByteData byteData = ordinalMap.getByteData().getUnderlyingArray();
return VarInt.readVInt(byteData, pointer);
}
public long getLong(long pointer) {
ByteData byteData = ordinalMap.getByteData().getUnderlyingArray();
return VarInt.readVLong(byteData, pointer);
}
public String getString(long pointer) {
ByteData byteData = ordinalMap.getByteData().getUnderlyingArray();
int length = VarInt.readVInt(byteData, pointer);
byte[] bytes = new byte[length];
for(int i=0;i<length;i++) {
bytes[i] = byteData.get(pointer+1+i);
}
return new String(bytes);
}
public int writeAndGetOrdinal(Object objectToIntern) {
ByteDataArray buf = new ByteDataArray();
if(objectToIntern==null) {
throw new IllegalArgumentException("Cannot intern null objects");
}
isReadyToRead = false;
if(objectToIntern instanceof Float) {
int intBits = Float.floatToIntBits((Float) objectToIntern);
VarInt.writeVInt(buf, intBits);
} else if(objectToIntern instanceof Double) {
long longBits = Double.doubleToLongBits((Double) objectToIntern);
VarInt.writeVLong(buf, longBits);
} else if(objectToIntern instanceof Integer) {
int intBits = (int) objectToIntern;
VarInt.writeVInt(buf, intBits);
} else if(objectToIntern instanceof Long) {
long longBits = (long) objectToIntern;
VarInt.writeVLong(buf, longBits);
} else if(objectToIntern instanceof String) {
VarInt.writeVInt(buf, ((String) objectToIntern).length());
for (byte b : ((String) objectToIntern).getBytes()) {
buf.write(b);
}
} else if(objectToIntern instanceof Boolean) {
int valToWrite = (boolean) objectToIntern ? 1 : 0;
VarInt.writeVInt(buf, valToWrite);
} else {
String className = objectToIntern.getClass().getName();
throw new IllegalArgumentException("Cannot intern object of type " + className);
}
int ordinal = ordinalMap.getOrAssignOrdinal(buf);
ordinalsInCycle.add(ordinal);
return ordinal;
}
} | 8,903 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/HollowDiff.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.util.SimultaneousExecutor;
import com.netflix.hollow.tools.diff.exact.DiffEqualityMapping;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.logging.Logger;
/**
* Calculate a detailed accounting for the differences between two data states.
* <p>
* The differences between two states are broken down by specified types. Records for each evaluated type are matched based on primary keys.
* The matched records are traversed in tandem to determine which individual fields/branches in their hierarchies differ.
* The difference between matched records is measured per field in the hierarchy as an integer value indicating the number
* of unmatched values in either the <i>from</i> or the <i>to</i> state.
* <p>
* Unmatched records are accounted for separately -- for the purposes of the diff, it is sufficient to mark these records as unmatched.
*
*/
public class HollowDiff {
private final EnumSet<FieldType> SINGLE_FIELD_SUPPORTED_TYPES = EnumSet.of(FieldType.INT, FieldType.LONG, FieldType.DOUBLE, FieldType.STRING, FieldType.FLOAT, FieldType.BOOLEAN);
private final Logger log = Logger.getLogger(HollowDiff.class.getName());
private final HollowReadStateEngine fromStateEngine;
private final HollowReadStateEngine toStateEngine;
private final DiffEqualityMapping equalityMapping;
private final Map<String, HollowTypeDiff> typeDiffs = new LinkedHashMap<>();
/**
* Instantiate a HollowDiff. By default, all OBJECT types with a defined PrimaryKey will be
* configured to be diffed.
* <p>
* To calculate the diff, call calculateDiff().
*
* @param from the "from" state
* @param to the "to" state
*/
public HollowDiff(HollowReadStateEngine from, HollowReadStateEngine to) {
this(from, to, true, false);
}
/**
* Instantiate a HollowDiff.
* <p>
* To calculate the diff, call calculateDiff().
*
* @param from the "from" state
* @param to the "to" state
* @param isAutoDiscoverTypeDiff If true, all OBJECT types with a defined PrimaryKey will be configured to be diffed.
*/
public HollowDiff(HollowReadStateEngine from, HollowReadStateEngine to, boolean isAutoDiscoverTypeDiff) {
this(from, to, isAutoDiscoverTypeDiff, false);
}
/**
* Instantiate a HollowDiff.
* <p>
* To calculate the diff, call calculateDiff().
*
* @param from the "from" state
* @param to the "to" state
* @param isAutoDiscoverTypeDiff If true, all OBJECT types with a defined PrimaryKey will be configured to be diffed.
* @param isIncludeNonPrimaryKeyTypes If true, all OBJECT types without PrimaryKey will also be configured to be diffed.
*/
public HollowDiff(HollowReadStateEngine from, HollowReadStateEngine to, boolean isAutoDiscoverTypeDiff, boolean isIncludeNonPrimaryKeyTypes) {
this.fromStateEngine = from;
this.toStateEngine = to;
this.equalityMapping = new DiffEqualityMapping(from, to);
if (isAutoDiscoverTypeDiff) { // Auto Discover TypeDiff from both from and to StateEngine
List<HollowSchema> schemas = new ArrayList<>();
schemas.addAll(fromStateEngine.getSchemas());
schemas.addAll(toStateEngine.getSchemas());
for (HollowSchema schema : schemas) {
if (schema instanceof HollowObjectSchema) {
HollowObjectSchema objectSchema = ((HollowObjectSchema) schema);
PrimaryKey pKey = objectSchema.getPrimaryKey();
if (pKey==null && !isIncludeNonPrimaryKeyTypes) continue;
// Support basic Single Field Types
if (pKey==null && objectSchema.numFields()==1 && SINGLE_FIELD_SUPPORTED_TYPES.contains(objectSchema.getFieldType(0))) {
pKey = new PrimaryKey(schema.getName(), objectSchema.getFieldName(0));
}
addTypeDiff(schema.getName(), pKey==null? null : pKey.getFieldPaths());
}
}
}
}
/**
* Add a type to be included in the diff report
*
* @param type the type name
* @param primaryKeyPaths the path(s) to the field(s) which comprise the type's primary key
* @return the diff type
*/
public HollowTypeDiff addTypeDiff(String type, String... primaryKeyPaths) {
HollowTypeDiff typeDiff = new HollowTypeDiff(this, type, primaryKeyPaths);
if(typeDiff.hasAnyData())
typeDiffs.put(type, typeDiff);
return typeDiff;
}
public List<HollowTypeDiff> getTypeDiffs() {
return new ArrayList<>(typeDiffs.values());
}
/**
* Retrieve a diff report for a specific type in order to inspect the calculated differences
* @param type the type name
* @return the diff type
*/
public HollowTypeDiff getTypeDiff(String type) {
return typeDiffs.get(type);
}
public HollowReadStateEngine getFromStateEngine() {
return fromStateEngine;
}
public HollowReadStateEngine getToStateEngine() {
return toStateEngine;
}
/**
* Run the diff
*/
public void calculateDiffs() {
long startTime = System.currentTimeMillis();
prepareForDiffCalculation();
long endTime = System.currentTimeMillis();
log.info("PREPARED IN " + (endTime - startTime) + "ms");
for(HollowTypeDiff typeDiff : typeDiffs.values()) {
typeDiff.calculateDiffs();
}
}
public DiffEqualityMapping getEqualityMapping() {
return equalityMapping;
}
private void prepareForDiffCalculation() {
SimultaneousExecutor executor = new SimultaneousExecutor(1 + typeDiffs.size(), getClass(), "prepare");
executor.execute(() -> {
for(HollowTypeDiff typeDiff : typeDiffs.values()) {
equalityMapping.getEqualOrdinalMap(typeDiff.getTypeName());
}
});
for(final HollowTypeDiff typeDiff : typeDiffs.values()) {
executor.execute(typeDiff::calculateMatches);
}
try {
executor.awaitSuccessfulCompletion();
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
equalityMapping.markPrepared();
}
}
| 8,904 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/HollowTypeDiff.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.util.IntList;
import com.netflix.hollow.core.util.LongList;
import com.netflix.hollow.core.util.SimultaneousExecutor;
import com.netflix.hollow.tools.diff.count.HollowDiffCountingNode;
import com.netflix.hollow.tools.diff.count.HollowDiffObjectCountingNode;
import com.netflix.hollow.tools.diff.count.HollowFieldDiff;
import com.netflix.hollow.tools.diff.exact.DiffEqualOrdinalMap;
import com.netflix.hollow.tools.diff.exact.DiffEqualityMapping;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
/**
* Obtained via a {@link HollowDiff}, this is a report of the differences in a specific type between two data states.
*/
public class HollowTypeDiff {
private final HollowDiff rootDiff;
private final HollowObjectTypeReadState from;
private final HollowObjectTypeReadState to;
private final HollowDiffMatcher matcher;
private final String type;
private final Set<String> shortcutTypes;
private List<HollowFieldDiff> calculatedFieldDiffs;
HollowTypeDiff(HollowDiff rootDiff, String type, String... matchPaths) {
this.rootDiff = rootDiff;
this.type = type;
this.from = (HollowObjectTypeReadState) rootDiff.getFromStateEngine().getTypeState(type);
this.to = (HollowObjectTypeReadState) rootDiff.getToStateEngine().getTypeState(type);
this.matcher = new HollowDiffMatcher(this.from, this.to);
this.shortcutTypes = new HashSet<>();
// Allow Basic diffing of Type that do not have PrimaryKey/MatchPaths
if (matchPaths!=null && matchPaths.length>0) {
for (String matchPath : matchPaths) {
addMatchPath(matchPath);
}
}
}
/**
* @return The type name for this type diff
*/
public String getTypeName() {
return type;
}
/**
* Indicate whether Match Paths are defined
* @return true to indicate there is
*/
public boolean hasMatchPaths() {
return !matcher.getMatchPaths().isEmpty();
}
/**
* Add a field path to a component of the primary key
* @param path the field path
*/
public void addMatchPath(String path) {
matcher.addMatchPath(path);
}
/**
* Shortcut the diff detail when encountering a specific type. This can be done to improve the performance
* of diff calculation -- at the expense of some detail.
*
* @param type the type name
*/
public void addShortcutType(String type) {
shortcutTypes.add(type);
}
/**
* @param type the type name
* @return whether or not this type diff will shortcut at the specified type.
*/
public boolean isShortcutType(String type) {
return shortcutTypes.contains(type);
}
/**
* Get the differences broken down by specific field paths
*
* @return the field differences
*/
public List<HollowFieldDiff> getFieldDiffs() {
return calculatedFieldDiffs;
}
/**
* @return the total number of matched records (based on primary key)
*/
public int getTotalNumberOfMatches() {
return matcher.getMatchedOrdinals().size();
}
/**
* @return A list of the record ordinals in the from state which did not have a corresponding match (based on primary key) in the to state.
*/
public IntList getUnmatchedOrdinalsInFrom() {
return matcher.getExtraInFrom();
}
/**
* @return A list of the record ordinals in the to state which did not have a corresponding match (based on primary key) in the from state.
*/
public IntList getUnmatchedOrdinalsInTo() {
return matcher.getExtraInTo();
}
/**
* @return The total 'diff score', useful as a very broad measure of the magnitude of the diff.
*/
public long getTotalDiffScore() {
long totalDiffScore = 0;
for(HollowFieldDiff diff : calculatedFieldDiffs) {
totalDiffScore += diff.getTotalDiffScore();
}
return totalDiffScore;
}
/**
* @return The total number of records for this type in the to state.
*/
public int getTotalItemsInFromState() {
if (from == null) return 0;
return from.getPopulatedOrdinals().cardinality();
}
/**
* @return The total number of records for this type in the to state.
*/
public int getTotalItemsInToState() {
if (to == null) return 0;
return to.getPopulatedOrdinals().cardinality();
}
public boolean hasAnyData() {
return from != null || to != null;
}
public HollowObjectTypeReadState getFromTypeState() {
return from;
}
public HollowObjectTypeReadState getToTypeState() {
return to;
}
public HollowDiffMatcher getMatcher() {
return matcher;
}
void calculateMatches() {
matcher.calculateMatches();
}
@SuppressWarnings("unchecked")
void calculateDiffs() {
final HollowDiffNodeIdentifier rootId = new HollowDiffNodeIdentifier(type);
SimultaneousExecutor executor = new SimultaneousExecutor(getClass(), "calculate");
final int numThreads = executor.getCorePoolSize();
final List<HollowFieldDiff>results[] = new List[numThreads];
for(int i=0;i<numThreads;i++) {
final int threadId = i;
executor.execute(new Runnable() {
@Override
public void run() {
DiffEqualityMapping equalityMapping = rootDiff.getEqualityMapping();
HollowDiffCountingNode rootNode = new HollowDiffObjectCountingNode(rootDiff, HollowTypeDiff.this, rootId, from, to);
DiffEqualOrdinalMap rootNodeOrdinalMap = equalityMapping.getEqualOrdinalMap(type);
boolean requiresMissingFieldTraversal = equalityMapping.requiresMissingFieldTraversal(type);
LongList matches = matcher.getMatchedOrdinals();
for(int i=threadId;i<matches.size();i+=numThreads) {
int fromOrdinal = (int)(matches.get(i) >> 32);
int toOrdinal = (int)matches.get(i);
if(rootNodeOrdinalMap.getIdentityFromOrdinal(fromOrdinal) == -1
|| rootNodeOrdinalMap.getIdentityFromOrdinal(fromOrdinal) != rootNodeOrdinalMap.getIdentityToOrdinal(toOrdinal)) {
rootNode.prepare(fromOrdinal, toOrdinal);
rootNode.traverseDiffs(fromIntList(fromOrdinal), toIntList(toOrdinal));
} else if(requiresMissingFieldTraversal) {
rootNode.prepare(fromOrdinal, toOrdinal);
rootNode.traverseMissingFields(fromIntList(fromOrdinal), toIntList(toOrdinal));
}
}
results[threadId] = rootNode.getFieldDiffs();
}
private final IntList fromIntList = new IntList(1);
private final IntList toIntList = new IntList(1);
private IntList fromIntList(int ordinal) {
fromIntList.clear();
fromIntList.add(ordinal);
return fromIntList;
}
private IntList toIntList(int ordinal) {
toIntList.clear();
toIntList.add(ordinal);
return toIntList;
}
});
}
try {
executor.awaitSuccessfulCompletion();
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
this.calculatedFieldDiffs = combineResults(results);
}
private List<HollowFieldDiff> combineResults(List<HollowFieldDiff> shardedResults[]) {
Map<HollowDiffNodeIdentifier, HollowFieldDiff> combinedResultsMap = new HashMap<HollowDiffNodeIdentifier, HollowFieldDiff>();
for(List<HollowFieldDiff> shardResult : shardedResults) {
for(HollowFieldDiff fieldDiff : shardResult) {
HollowFieldDiff combinedResult = combinedResultsMap.get(fieldDiff.getFieldIdentifier());
if(combinedResult != null)
combinedResult.addResults(fieldDiff);
else
combinedResultsMap.put(fieldDiff.getFieldIdentifier(), fieldDiff);
}
}
List<HollowFieldDiff> combinedResults = new ArrayList<HollowFieldDiff>();
combinedResults.addAll(combinedResultsMap.values());
return combinedResults;
}
}
| 8,905 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/HollowDiffRecordFieldExtractor.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff;
import com.netflix.hollow.core.read.dataaccess.HollowCollectionTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowMapTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess;
import com.netflix.hollow.core.read.iterator.HollowMapEntryOrdinalIterator;
import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator;
import com.netflix.hollow.core.util.IntList;
import java.util.ArrayList;
import java.util.List;
/**
* A utility to extract the values of a field identified by a {@link HollowDiffNodeIdentifier} for a specific record in a {@link HollowDataAccess}.
*/
public class HollowDiffRecordFieldExtractor {
public List<Object> extractValues(HollowDataAccess dataAccess, HollowDiffNodeIdentifier fieldIdentifier, int ordinal) {
IntList ordinalList = new IntList(1);
ordinalList.add(ordinal);
return traverse(dataAccess.getTypeDataAccess(getType(fieldIdentifier)), ordinalList, fieldIdentifier, 0);
}
private List<Object> traverse(HollowTypeDataAccess typeDataAccess, IntList ordinals, HollowDiffNodeIdentifier fieldIdentifier, int level) {
if(level == fieldIdentifier.getParents().size() - 1) {
return extractValues(typeDataAccess, ordinals, fieldIdentifier);
} else {
HollowTypeDataAccess childDataAccess = null;
IntList childOrdinals = new IntList();
if(typeDataAccess instanceof HollowObjectTypeDataAccess) {
HollowObjectTypeDataAccess objectAccess = (HollowObjectTypeDataAccess)typeDataAccess;
int fieldIdx = objectAccess.getSchema().getPosition(fieldIdentifier.getParents().get(level+1).getViaFieldName());
childDataAccess = typeDataAccess.getDataAccess().getTypeDataAccess(objectAccess.getSchema().getReferencedType(fieldIdx));
for(int i=0;i<ordinals.size();i++)
childOrdinals.add(objectAccess.readOrdinal(ordinals.get(i), fieldIdx));
} else if(typeDataAccess instanceof HollowCollectionTypeDataAccess) {
HollowCollectionTypeDataAccess collectionAccess = (HollowCollectionTypeDataAccess)typeDataAccess;
childDataAccess = typeDataAccess.getDataAccess().getTypeDataAccess(collectionAccess.getSchema().getElementType());
for(int i=0;i<ordinals.size();i++) {
HollowOrdinalIterator iter = collectionAccess.ordinalIterator(ordinals.get(i));
int childOrdinal = iter.next();
while(childOrdinal != HollowOrdinalIterator.NO_MORE_ORDINALS) {
childOrdinals.add(childOrdinal);
childOrdinal = iter.next();
}
}
} else if(typeDataAccess instanceof HollowMapTypeDataAccess) {
HollowMapTypeDataAccess mapAccess = (HollowMapTypeDataAccess)typeDataAccess;
boolean isValue = fieldIdentifier.getParents().get(level + 1).getViaFieldName().equals("value");
String childType = isValue ? mapAccess.getSchema().getValueType() : mapAccess.getSchema().getKeyType();
childDataAccess = typeDataAccess.getDataAccess().getTypeDataAccess(childType);
for(int i=0;i<ordinals.size();i++) {
HollowMapEntryOrdinalIterator iter = mapAccess.ordinalIterator(ordinals.get(i));
while(iter.next()) {
childOrdinals.add(isValue ? iter.getValue() : iter.getKey());
}
}
}
return traverse(childDataAccess, childOrdinals, fieldIdentifier, level + 1);
}
}
private List<Object> extractValues(HollowTypeDataAccess typeDataAccess, IntList ordinals, HollowDiffNodeIdentifier fieldIdentifier) {
List<Object> values = new ArrayList<Object>();
HollowObjectTypeDataAccess objectAccess = (HollowObjectTypeDataAccess)typeDataAccess;
int fieldIdx = objectAccess.getSchema().getPosition(fieldIdentifier.getViaFieldName());
for(int i=0;i<ordinals.size();i++) {
switch(objectAccess.getSchema().getFieldType(fieldIdx)) {
case BOOLEAN:
values.add(objectAccess.readBoolean(ordinals.get(i), fieldIdx));
break;
case BYTES:
values.add(objectAccess.readBytes(ordinals.get(i), fieldIdx));
break;
case DOUBLE:
values.add(objectAccess.readDouble(ordinals.get(i), fieldIdx));
break;
case FLOAT:
values.add(objectAccess.readFloat(ordinals.get(i), fieldIdx));
break;
case INT:
values.add(objectAccess.readInt(ordinals.get(i), fieldIdx));
break;
case LONG:
values.add(objectAccess.readLong(ordinals.get(i), fieldIdx));
break;
case STRING:
values.add(objectAccess.readString(ordinals.get(i), fieldIdx));
break;
case REFERENCE:
throw new IllegalArgumentException();
}
}
return values;
}
private final String getType(HollowDiffNodeIdentifier nodeId) {
return nodeId.getParents().get(0).getNodeName();
}
}
| 8,906 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/HollowDiffNodeIdentifier.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* A breadcrumbs-like unique identifier for a field's location within a type hierarchy, used in the {@link HollowDiff}.
*
* Calling toString() returns a human-readable representation of the field within the type hierarchy.
*/
public class HollowDiffNodeIdentifier {
private final List<HollowDiffNodeIdentifier> parents;
private final String viaFieldName;
private final String nodeName;
public HollowDiffNodeIdentifier(String typeName) {
this(null, null, typeName);
}
public HollowDiffNodeIdentifier(HollowDiffNodeIdentifier parent, String viaFieldName, String typeName) {
this.parents = parent == null ?
Collections.<HollowDiffNodeIdentifier>emptyList()
: buildParentsList(parent);
this.viaFieldName = viaFieldName;
this.nodeName = typeName;
}
public List<HollowDiffNodeIdentifier> getParents() {
return parents;
}
public String getViaFieldName() {
return viaFieldName;
}
public String getNodeName() {
return nodeName;
}
private List<HollowDiffNodeIdentifier> buildParentsList(HollowDiffNodeIdentifier immediateParent) {
List<HollowDiffNodeIdentifier> parents = new ArrayList<HollowDiffNodeIdentifier>(immediateParent.getParents().size() + 1);
parents.addAll(immediateParent.getParents());
parents.add(immediateParent);
return parents;
}
public int hashCode() {
int hashCode = 0;
for(int i=0;i<parents.size();i++) {
String parentViaFieldName = parents.get(i).getViaFieldName();
if(parentViaFieldName != null)
hashCode = 31 * hashCode + parentViaFieldName.hashCode();
hashCode = 31 * hashCode + parents.get(i).getNodeName().hashCode();
}
if(viaFieldName != null)
hashCode = 31 * hashCode + viaFieldName.hashCode();
hashCode = 31 * hashCode + nodeName.hashCode();
return hashCode;
}
public boolean equals(Object other) {
if(this == other)
return true;
if(other instanceof HollowDiffNodeIdentifier) {
HollowDiffNodeIdentifier otherId = (HollowDiffNodeIdentifier)other;
if(otherId.getParents().size() == parents.size()) {
for(int i=parents.size() - 1;i >= 0;i--) {
HollowDiffNodeIdentifier myParent = parents.get(i);
HollowDiffNodeIdentifier otherParent = otherId.getParents().get(i);
if(!myParent.shallowEquals(otherParent))
return false;
}
return shallowEquals(otherId);
}
}
return false;
}
/**
* @return a human-readable representation of this field location
*/
public String toString() {
StringBuilder builder = new StringBuilder();
if(parents.size() > 0) {
builder.append(parents.get(0).getNodeName());
}
for(int i=1;i<parents.size();i++) {
builder.append('.').append(parents.get(i).getViaFieldName());
}
builder.append('.').append(viaFieldName);
builder.append(" (").append(nodeName).append(")");
return builder.toString();
}
private boolean shallowEquals(HollowDiffNodeIdentifier other) {
if(viaFieldName == null ?
other.getViaFieldName() == null
: viaFieldName.equals(other.getViaFieldName()))
return nodeName.equals(other.getNodeName());
return false;
}
}
| 8,907 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/HollowDiffMatcher.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff;
import com.netflix.hollow.core.HollowConstants;
import com.netflix.hollow.core.index.HollowPrimaryKeyIndex;
import com.netflix.hollow.core.read.engine.PopulatedOrdinalListener;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.util.IntList;
import com.netflix.hollow.core.util.LongList;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.BitSet;
import java.util.List;
/**
* Intended for use in the context of a HollowDiff.
*
* This class will match records of a specific type from two data states based on a user defined primary key.
*/
public class HollowDiffMatcher {
private final List<String> matchPaths;
private final HollowObjectTypeReadState fromTypeState;
private final HollowObjectTypeReadState toTypeState;
private final LongList matchedOrdinals;
private final IntList extraInFrom;
private final IntList extraInTo;
private HollowPrimaryKeyIndex fromIdx;
private HollowPrimaryKeyIndex toIdx;
public HollowDiffMatcher(HollowObjectTypeReadState fromTypeState, HollowObjectTypeReadState toTypeState) {
this.matchPaths = new ArrayList<>();
this.fromTypeState = fromTypeState;
this.toTypeState = toTypeState;
this.matchedOrdinals = new LongList();
this.extraInFrom = new IntList();
this.extraInTo = new IntList();
}
public void addMatchPath(String path) {
matchPaths.add(path);
}
public List<String> getMatchPaths() {
return matchPaths;
}
public void calculateMatches() {
if (fromTypeState==null) {
toTypeState.getPopulatedOrdinals().stream().forEach(i -> extraInTo.add(i));
return;
}
if (toTypeState==null) {
fromTypeState.getPopulatedOrdinals().stream().forEach(i -> extraInFrom.add(i));
return;
}
// No Primary Key so no matching will be done
if (matchPaths==null || matchPaths.isEmpty()) {
toTypeState.getPopulatedOrdinals().stream().forEach(i -> extraInTo.add(i));
fromTypeState.getPopulatedOrdinals().stream().forEach(i -> extraInFrom.add(i));
return;
}
fromIdx = new HollowPrimaryKeyIndex(fromTypeState.getStateEngine(), fromTypeState.getSchema().getName(), matchPaths.toArray(new String[matchPaths.size()]));
toIdx = new HollowPrimaryKeyIndex(toTypeState.getStateEngine(), toTypeState.getSchema().getName(), matchPaths.toArray(new String[matchPaths.size()]));
BitSet fromPopulatedOrdinals = fromTypeState.getListener(PopulatedOrdinalListener.class).getPopulatedOrdinals();
BitSet fromUnmatchedOrdinals = new BitSet(fromPopulatedOrdinals.length());
fromUnmatchedOrdinals.or(fromPopulatedOrdinals);
BitSet toPopulatedOrdinals = toTypeState.getListener(PopulatedOrdinalListener.class).getPopulatedOrdinals();
int candidateToMatchOrdinal = toPopulatedOrdinals.nextSetBit(0);
while(candidateToMatchOrdinal != -1) {
Object key[] = toIdx.getRecordKey(candidateToMatchOrdinal);
int matchedOrdinal = HollowConstants.ORDINAL_NONE;
try {
matchedOrdinal = fromIdx.getMatchingOrdinal(key);
} catch(NullPointerException ex) {
throw new RuntimeException("Error fetching matching ordinal for null type " + toTypeState.getSchema().getName()
+ " with key field values " + Arrays.asList(key) + " at ordinal : " + candidateToMatchOrdinal
+ "with stack trace ", ex);
}
if(matchedOrdinal != -1) {
matchedOrdinals.add(((long)matchedOrdinal << 32) | candidateToMatchOrdinal);
fromUnmatchedOrdinals.clear(matchedOrdinal);
} else {
extraInTo.add(candidateToMatchOrdinal);
}
candidateToMatchOrdinal = toPopulatedOrdinals.nextSetBit(candidateToMatchOrdinal + 1);
}
int unmatchedFromOrdinal = fromUnmatchedOrdinals.nextSetBit(0);
while(unmatchedFromOrdinal != -1) {
extraInFrom.add(unmatchedFromOrdinal);
unmatchedFromOrdinal = fromUnmatchedOrdinals.nextSetBit(unmatchedFromOrdinal + 1);
}
}
public LongList getMatchedOrdinals() {
return matchedOrdinals;
}
public IntList getExtraInFrom() {
return extraInFrom;
}
public IntList getExtraInTo() {
return extraInTo;
}
public String getKeyDisplayString(HollowObjectTypeReadState state, int ordinal) {
Object[] key = null;
if(state == fromTypeState && fromIdx!=null) {
key = fromIdx.getRecordKey(ordinal);
} else if(state == toTypeState && toIdx!=null) {
key = toIdx.getRecordKey(ordinal);
}
// Show Display similar to Hollow Explorer when there is no primary key
if(key == null)
return "ORDINAL:" + ordinal;
return keyDisplayString(key);
}
private String keyDisplayString(Object[] key) {
StringBuilder sb = new StringBuilder(key[0].toString());
for(int i=1;i<key.length;i++) {
sb.append(" ");
sb.append(key[i].toString());
}
return sb.toString();
}
}
| 8,908 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/specific/HollowSpecificDiff.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff.specific;
import com.netflix.hollow.core.index.traversal.HollowIndexerValueTraverser;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.util.SimultaneousExecutor;
import com.netflix.hollow.tools.diff.HollowDiffMatcher;
import java.util.Arrays;
import java.util.BitSet;
import java.util.concurrent.atomic.AtomicLong;
/**
* The HollowSpecificDiff allows for the investigation of diffs among specific fields for matched records in two states.
*/
public class HollowSpecificDiff {
private final HollowReadStateEngine from;
private final HollowReadStateEngine to;
private final HollowDiffMatcher matcher;
private final String type;
private BitSet elementKeyPaths;
private BitSet elementNonKeyPaths;
private String elementPaths[];
private final AtomicLong totalUnmatchedFromElements;
private final AtomicLong totalUnmatchedToElements;
private final AtomicLong totalModifiedElements;
private final AtomicLong totalMatchedEqualElements;
/**
* @param from the from state
* @param to the to state
* @param type the type to diff
*/
public HollowSpecificDiff(HollowReadStateEngine from, HollowReadStateEngine to, String type) {
this.from = from;
this.to = to;
this.matcher = new HollowDiffMatcher((HollowObjectTypeReadState)from.getTypeState(type), (HollowObjectTypeReadState)to.getTypeState(type));
this.type = type;
this.totalUnmatchedFromElements = new AtomicLong();
this.totalUnmatchedToElements = new AtomicLong();
this.totalMatchedEqualElements = new AtomicLong();
this.totalModifiedElements = new AtomicLong();
}
/**
* Set the primary key paths which will be used to find matching records across the two states
*
* @param paths the key paths
*/
public void setRecordMatchPaths(String... paths) {
for(String path : paths)
matcher.addMatchPath(path);
}
/**
* Set the paths for which we will inspect differences across the two states
*
* @param paths the paths for inspection
*/
public void setElementMatchPaths(String... paths) {
resetResults();
this.elementPaths = paths;
this.elementKeyPaths = null;
this.elementNonKeyPaths = null;
}
/**
* Optionally specify paths for which we will match records within an individual type's hierarchy
*
* @param paths the paths for matching
*/
public void setElementKeyPaths(String... paths) {
resetResults();
elementKeyPaths = new BitSet(elementPaths.length);
for(int i=0;i<paths.length;i++) {
int elementPathIdx = getElementPathIdx(paths[i]);
if(elementPathIdx == -1)
throw new IllegalArgumentException("Key path must have been specified as an element match path. Offending path: " + paths[i]);
elementKeyPaths.set(elementPathIdx);
}
elementNonKeyPaths = new BitSet(elementPaths.length);
elementNonKeyPaths.set(0, elementPaths.length);
elementNonKeyPaths.andNot(elementKeyPaths);
}
public int getElementPathIdx(String path) {
for(int i=0;i<elementPaths.length;i++) {
if(elementPaths[i].equals(path))
return i;
}
return -1;
}
/**
* Find the matching records (based on primary keys) across states
*/
public void prepareMatches() {
matcher.calculateMatches();
}
/**
* Calculate the differences
*/
public void calculate() {
resetResults();
SimultaneousExecutor executor = new SimultaneousExecutor(getClass(), "calculate");
final int numThreads = executor.getCorePoolSize();
for(int i=0;i<numThreads;i++) {
final int threadNumber = i;
executor.execute(new Runnable() {
public void run() {
HollowIndexerValueTraverser fromTraverser = new HollowIndexerValueTraverser(from, type, elementPaths);
HollowIndexerValueTraverser toTraverser = new HollowIndexerValueTraverser(to, type, elementPaths);
int hashedResults[] = new int[16];
for(int i=threadNumber;i<matcher.getMatchedOrdinals().size();i += numThreads) {
long ordinalPair = matcher.getMatchedOrdinals().get(i);
int fromOrdinal = (int)(ordinalPair >>> 32);
int toOrdinal = (int)ordinalPair;
fromTraverser.traverse(fromOrdinal);
toTraverser.traverse(toOrdinal);
if(fromTraverser.getNumMatches() * 2 > hashedResults.length)
hashedResults = new int[hashTableSize(fromTraverser.getNumMatches())];
populateHashTable(fromTraverser, hashedResults);
countMatches(fromTraverser, toTraverser, hashedResults);
}
for(int i=threadNumber;i<matcher.getExtraInFrom().size();i+=numThreads) {
fromTraverser.traverse(matcher.getExtraInFrom().get(i));
totalUnmatchedFromElements.addAndGet(fromTraverser.getNumMatches());
}
for(int i=threadNumber;i<matcher.getExtraInTo().size();i+=numThreads) {
toTraverser.traverse(matcher.getExtraInTo().get(i));
totalUnmatchedToElements.addAndGet(toTraverser.getNumMatches());
}
}
});
}
try {
executor.awaitSuccessfulCompletion();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
private void countMatches(HollowIndexerValueTraverser fromTraverser, HollowIndexerValueTraverser toTraverser, int[] hashedResults) {
int numMatchedEqualElements = 0;
int numModifiedElements = 0;
int hashMask = hashedResults.length - 1;
for(int j=0;j<toTraverser.getNumMatches();j++) {
int hash = elementKeyPaths == null ? toTraverser.getMatchHash(j) : toTraverser.getMatchHash(j, elementKeyPaths);
int bucket = hash & hashMask;
while(hashedResults[bucket] != -1) {
if(elementKeyPaths == null) {
if(fromTraverser.isMatchEqual(hashedResults[bucket], toTraverser, j)) {
numMatchedEqualElements++;
break;
}
} else {
if(fromTraverser.isMatchEqual(hashedResults[bucket], toTraverser, j, elementKeyPaths)) {
if(fromTraverser.isMatchEqual(hashedResults[bucket], toTraverser, j, elementNonKeyPaths))
numMatchedEqualElements++;
else
numModifiedElements++;
break;
}
}
bucket++;
bucket &= hashMask;
}
}
int numCommonMatches = numMatchedEqualElements + numModifiedElements;
totalMatchedEqualElements.addAndGet(numMatchedEqualElements);
totalModifiedElements.addAndGet(numModifiedElements);
totalUnmatchedFromElements.addAndGet(fromTraverser.getNumMatches() - numCommonMatches);
totalUnmatchedToElements.addAndGet(toTraverser.getNumMatches() - numCommonMatches);
}
private void populateHashTable(HollowIndexerValueTraverser fromTraverser, int[] hashedResults) {
Arrays.fill(hashedResults, -1);
int hashMask = hashedResults.length - 1;
for(int j=0;j<fromTraverser.getNumMatches();j++) {
int hash = elementKeyPaths == null ? fromTraverser.getMatchHash(j) : fromTraverser.getMatchHash(j, elementKeyPaths);
int bucket = hash & hashMask;
while(hashedResults[bucket] != -1) {
bucket++;
bucket &= hashMask;
}
hashedResults[bucket] = j;
}
}
private int hashTableSize(int numMatches) {
return 1 << (32 - Integer.numberOfLeadingZeros((numMatches * 2) - 1));
}
private void resetResults() {
totalUnmatchedFromElements.set(0);
totalUnmatchedToElements.set(0);
totalMatchedEqualElements.set(0);
totalModifiedElements.set(0);
}
public long getTotalUnmatchedFromElements() {
return totalUnmatchedFromElements.get();
}
public long getTotalUnmatchedToElements() {
return totalUnmatchedToElements.get();
}
public long getTotalMatchedEqualElements() {
return totalMatchedEqualElements.get();
}
public long getTotalModifiedElements() {
return totalModifiedElements.get();
}
}
| 8,909 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/count/HollowDiffCollectionCountingNode.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff.count;
import com.netflix.hollow.core.read.engine.HollowCollectionTypeReadState;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator;
import com.netflix.hollow.core.util.IntList;
import com.netflix.hollow.tools.diff.HollowDiff;
import com.netflix.hollow.tools.diff.HollowDiffNodeIdentifier;
import com.netflix.hollow.tools.diff.HollowTypeDiff;
import com.netflix.hollow.tools.diff.exact.DiffEqualOrdinalFilter;
import java.util.List;
/**
* Counting nodes are used by the HollowDiff to count and aggregate changes for specific record types in a data model.
*
* This type of counting node is applicable to collection types.
*
* Not intended for external consumption.
*/
public class HollowDiffCollectionCountingNode extends HollowDiffCountingNode {
private final HollowCollectionTypeReadState fromState;
private final HollowCollectionTypeReadState toState;
private final HollowDiffCountingNode elementNode;
private final DiffEqualOrdinalFilter referenceFilter;
private final boolean requiresTraversalForMissingFields;
public HollowDiffCollectionCountingNode(HollowDiff diff, HollowTypeDiff topLevelTypeDiff, HollowDiffNodeIdentifier nodeId, HollowCollectionTypeReadState fromState, HollowCollectionTypeReadState toState) {
super(diff, topLevelTypeDiff, nodeId);
this.fromState = fromState;
this.toState = toState;
HollowTypeReadState refFromState = fromState == null ? null : fromState.getSchema().getElementTypeState();
HollowTypeReadState refToState = toState == null ? null : toState.getSchema().getElementTypeState();
String referencedType = fromState == null ? toState.getSchema().getElementType() : fromState.getSchema().getElementType();
this.elementNode = getHollowDiffCountingNode(refFromState, refToState, "element");
this.referenceFilter = new DiffEqualOrdinalFilter(equalityMapping.getEqualOrdinalMap(referencedType));
this.requiresTraversalForMissingFields = equalityMapping.requiresMissingFieldTraversal(referencedType);
}
@Override
public void prepare(int topLevelFromOrdinal, int topLevelToOrdinal) {
elementNode.prepare(topLevelFromOrdinal, topLevelToOrdinal);
}
@Override
public List<HollowFieldDiff> getFieldDiffs() {
return elementNode.getFieldDiffs();
}
private final IntList traversalFromOrdinals = new IntList();
private final IntList traversalToOrdinals = new IntList();
@Override
public int traverseDiffs(IntList fromOrdinals, IntList toOrdinals) {
fillTraversalLists(fromOrdinals, toOrdinals);
referenceFilter.filter(traversalFromOrdinals, traversalToOrdinals);
int score = 0;
if(referenceFilter.getUnmatchedFromOrdinals().size() != 0 || referenceFilter.getUnmatchedToOrdinals().size() != 0)
score += elementNode.traverseDiffs(referenceFilter.getUnmatchedFromOrdinals(), referenceFilter.getUnmatchedToOrdinals());
if(requiresTraversalForMissingFields)
if(referenceFilter.getMatchedFromOrdinals().size() != 0 || referenceFilter.getMatchedToOrdinals().size() != 0)
score += elementNode.traverseMissingFields(referenceFilter.getMatchedFromOrdinals(), referenceFilter.getMatchedToOrdinals());
return score;
}
@Override
public int traverseMissingFields(IntList fromOrdinals, IntList toOrdinals) {
fillTraversalLists(fromOrdinals, toOrdinals);
return elementNode.traverseMissingFields(traversalFromOrdinals, traversalToOrdinals);
}
private void fillTraversalLists(IntList fromOrdinals, IntList toOrdinals) {
traversalFromOrdinals.clear();
traversalToOrdinals.clear();
if(fromState != null) {
for(int i=0;i<fromOrdinals.size();i++) {
fillListWithReferencedOrdinals(fromState, fromOrdinals.get(i), traversalFromOrdinals);
}
}
if(toState != null) {
for(int i=0;i<toOrdinals.size();i++) {
fillListWithReferencedOrdinals(toState, toOrdinals.get(i), traversalToOrdinals);
}
}
}
private void fillListWithReferencedOrdinals(HollowCollectionTypeReadState typeState, int ordinal, IntList fillList) {
HollowOrdinalIterator iter = typeState.ordinalIterator(ordinal);
int refOrdinal = iter.next();
while(refOrdinal != HollowOrdinalIterator.NO_MORE_ORDINALS) {
fillList.add(refOrdinal);
refOrdinal = iter.next();
}
}
}
| 8,910 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/count/HollowDiffCountingNode.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff.count;
import com.netflix.hollow.core.read.engine.HollowCollectionTypeReadState;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.engine.map.HollowMapTypeReadState;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.util.IntList;
import com.netflix.hollow.tools.diff.HollowDiff;
import com.netflix.hollow.tools.diff.HollowDiffNodeIdentifier;
import com.netflix.hollow.tools.diff.HollowTypeDiff;
import com.netflix.hollow.tools.diff.exact.DiffEqualityMapping;
import java.util.List;
/**
* Counting nodes are used by the HollowDiff to count and aggregate changes for specific record types in a data model.
*
* Not intended for external consumption.
*
*/
public abstract class HollowDiffCountingNode {
protected static final IntList EMPTY_ORDINAL_LIST = new IntList(0);
private final HollowDiff diff;
private final HollowTypeDiff topLevelTypeDiff;
protected final DiffEqualityMapping equalityMapping;
protected final HollowDiffNodeIdentifier nodeId;
public HollowDiffCountingNode(HollowDiff diff, HollowTypeDiff topLevelTypeDiff, HollowDiffNodeIdentifier nodeId) {
this.diff = diff;
this.topLevelTypeDiff = topLevelTypeDiff;
this.equalityMapping = diff.getEqualityMapping();
this.nodeId = nodeId;
}
public abstract void prepare(int topLevelFromOrdinal, int topLevelToOrdinal);
public abstract int traverseDiffs(IntList fromOrdinals, IntList toOrdinals);
public abstract int traverseMissingFields(IntList fromOrdinals, IntList toOrdinals);
public abstract List<HollowFieldDiff> getFieldDiffs();
protected HollowDiffCountingNode getHollowDiffCountingNode(HollowTypeReadState refFromState, HollowTypeReadState refToState, String viaFieldName) {
if(refFromState == null && refToState == null)
return HollowDiffMissingCountingNode.INSTANCE;
HollowSchema elementSchema = refFromState == null ? refToState.getSchema() : refFromState.getSchema();
HollowDiffNodeIdentifier childNodeId = new HollowDiffNodeIdentifier(this.nodeId, viaFieldName, elementSchema.getName());
if(topLevelTypeDiff.isShortcutType(elementSchema.getName()))
return new HollowDiffShortcutTypeCountingNode(diff, topLevelTypeDiff, childNodeId);
switch(elementSchema.getSchemaType()) {
case OBJECT:
return new HollowDiffObjectCountingNode(diff, topLevelTypeDiff, childNodeId, (HollowObjectTypeReadState)refFromState, (HollowObjectTypeReadState)refToState);
case LIST:
case SET:
return new HollowDiffCollectionCountingNode(diff, topLevelTypeDiff, childNodeId, (HollowCollectionTypeReadState)refFromState, (HollowCollectionTypeReadState)refToState);
case MAP:
return new HollowDiffMapCountingNode(diff, topLevelTypeDiff, childNodeId, (HollowMapTypeReadState)refFromState, (HollowMapTypeReadState)refToState);
}
throw new IllegalArgumentException("I don't know how to create a HollowDiffCountingNode for a " + elementSchema.getSchemaType());
}
}
| 8,911 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/count/HollowDiffShortcutTypeCountingNode.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff.count;
import com.netflix.hollow.core.util.IntList;
import com.netflix.hollow.tools.diff.HollowDiff;
import com.netflix.hollow.tools.diff.HollowDiffNodeIdentifier;
import com.netflix.hollow.tools.diff.HollowTypeDiff;
import java.util.Collections;
import java.util.List;
public class HollowDiffShortcutTypeCountingNode extends HollowDiffCountingNode {
private final HollowFieldDiff fieldDiff;
private int currentTopLevelFromOrdinal;
private int currentTopLevelToOrdinal;
public HollowDiffShortcutTypeCountingNode(HollowDiff diff, HollowTypeDiff topLevelTypeDiff, HollowDiffNodeIdentifier nodeId) {
super(diff, topLevelTypeDiff, nodeId);
this.fieldDiff = new HollowFieldDiff(nodeId);
}
@Override
public void prepare(int topLevelFromOrdinal, int topLevelToOrdinal) {
this.currentTopLevelFromOrdinal = topLevelFromOrdinal;
this.currentTopLevelToOrdinal = topLevelToOrdinal;
}
@Override
public int traverseDiffs(IntList fromOrdinals, IntList toOrdinals) {
return addResultToFieldDiff(fromOrdinals, toOrdinals);
}
@Override
public int traverseMissingFields(IntList fromOrdinals, IntList toOrdinals) {
return addResultToFieldDiff(fromOrdinals, toOrdinals);
}
private int addResultToFieldDiff(IntList fromOrdinals, IntList toOrdinals) {
int score = fromOrdinals.size() + toOrdinals.size();
if(score != 0)
fieldDiff.addDiff(currentTopLevelFromOrdinal, currentTopLevelToOrdinal, score);
return score;
}
@Override
public List<HollowFieldDiff> getFieldDiffs() {
if(fieldDiff.getTotalDiffScore() > 0)
return Collections.singletonList(fieldDiff);
return Collections.emptyList();
}
}
| 8,912 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/count/HollowDiffFieldCountingNode.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff.count;
import static com.netflix.hollow.core.read.HollowReadFieldUtils.fieldHashCode;
import static com.netflix.hollow.core.read.HollowReadFieldUtils.fieldsAreEqual;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.util.IntList;
import com.netflix.hollow.tools.diff.HollowDiff;
import com.netflix.hollow.tools.diff.HollowDiffNodeIdentifier;
import com.netflix.hollow.tools.diff.HollowTypeDiff;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* Counting nodes are used by the HollowDiff to count and aggregate changes for specific record types in a data model.
*
* This type of counting node is applicable to specific fields in object types.
*
* Not intended for external consumption.
*/
public class HollowDiffFieldCountingNode extends HollowDiffCountingNode {
private final HollowObjectTypeReadState fromState;
private final HollowObjectTypeReadState toState;
private final int fromFieldIndex;
private final int toFieldIndex;
private int[] hashedOrdinals;
private int[] ordinalHashCodes;
private int[] ordinalHashCounts;
private int hashSizeBeforeGrow;
private int hashSize;
private int currentTopLevelFromOrdinal;
private int currentTopLevelToOrdinal;
private int unmatchedToFields;
private final HollowFieldDiff fieldDiff;
public HollowDiffFieldCountingNode(HollowDiff diff, HollowTypeDiff topLevelTypeDiff, HollowDiffNodeIdentifier nodeId, HollowObjectTypeReadState fromState, HollowObjectTypeReadState toState, HollowObjectSchema unionSchema, int unionFieldIndex) {
super(diff, topLevelTypeDiff, nodeId);
this.fromState = fromState;
this.toState = toState;
String fieldName = unionSchema.getFieldName(unionFieldIndex);
this.fromFieldIndex = fromState == null ? -1 : fromState.getSchema().getPosition(fieldName);
this.toFieldIndex = toState == null ? -1 : toState.getSchema().getPosition(fieldName);
this.fieldDiff = new HollowFieldDiff(nodeId);
this.hashedOrdinals = new int[16];
this.ordinalHashCodes = new int[16];
this.ordinalHashCounts = new int[16];
this.hashSizeBeforeGrow = 11;
Arrays.fill(hashedOrdinals, -1);
}
public void prepare(int topLevelFromOrdinal, int topLevelToOrdinal) {
this.currentTopLevelFromOrdinal = topLevelFromOrdinal;
this.currentTopLevelToOrdinal = topLevelToOrdinal;
}
@Override
public int traverseDiffs(IntList fromOrdinals, IntList toOrdinals) {
if(fromFieldIndex == -1 || toFieldIndex == -1) {
return traverseMissingFields(fromOrdinals, toOrdinals);
}
clearHashTable();
for(int i=0;i<fromOrdinals.size();i++) {
indexFromOrdinal(fromOrdinals.get(i));
}
for(int i=0;i<toOrdinals.size();i++) {
compareToOrdinal(toOrdinals.get(i));
}
int score = unmatchedToFields;
for(int i=0;i<ordinalHashCounts.length;i++) {
score += ordinalHashCounts[i];
}
if(score != 0) {
fieldDiff.addDiff(currentTopLevelFromOrdinal, currentTopLevelToOrdinal, score);
}
return score;
}
@Override
public int traverseMissingFields(IntList fromOrdinals, IntList toOrdinals) {
if(fromFieldIndex == -1) {
fieldDiff.addDiff(currentTopLevelFromOrdinal, currentTopLevelToOrdinal, toOrdinals.size());
return toOrdinals.size();
}
if(toFieldIndex == -1) {
fieldDiff.addDiff(currentTopLevelFromOrdinal, currentTopLevelToOrdinal, fromOrdinals.size());
return fromOrdinals.size();
}
return 0;
}
private void clearHashTable() {
Arrays.fill(hashedOrdinals, -1);
Arrays.fill(ordinalHashCounts, 0);
unmatchedToFields = 0;
hashSize = 0;
}
private void indexFromOrdinal(int ordinal) {
if(hashSize == hashSizeBeforeGrow) {
growHashTable();
}
int hashCode = fieldHashCode(fromState, ordinal, fromFieldIndex);
if(hashIntoArray(ordinal, hashCode, 1, hashedOrdinals, ordinalHashCodes, ordinalHashCounts))
hashSize++;
}
private void compareToOrdinal(int ordinal) {
int hashCode = fieldHashCode(toState, ordinal, toFieldIndex);
int bucket = hashCode & (hashedOrdinals.length - 1);
while(hashedOrdinals[bucket] != -1) {
/// check to see if this is an equal value.
if(fieldsAreEqual(fromState, hashedOrdinals[bucket], fromFieldIndex, toState, ordinal, toFieldIndex)) {
if(ordinalHashCounts[bucket] > 0) {
ordinalHashCounts[bucket]--;
} else {
unmatchedToFields++;
}
return;
}
bucket = (bucket + 1) & (hashedOrdinals.length - 1);
}
unmatchedToFields++;
}
private void growHashTable() {
int newHashedOrdinals[] = new int[hashedOrdinals.length * 2];
int newOrdinalHashCodes[] = new int[ordinalHashCodes.length * 2];
int newOrdinalHashCodeCounts[] = new int[ordinalHashCounts.length * 2];
Arrays.fill(newHashedOrdinals, -1);
long ordinalsAndHashCodes[] = ordinalsAndHashCodes();
for(int i=0;i<ordinalsAndHashCodes.length;i++) {
int hashOrdinal = (int)(ordinalsAndHashCodes[i] >> 32);
int hashCode = (int)ordinalsAndHashCodes[i];
int hashCount = findOrdinalCount(hashOrdinal, hashCode);
hashIntoArray(hashOrdinal, hashCode, hashCount, newHashedOrdinals, newOrdinalHashCodes, newOrdinalHashCodeCounts);
}
hashedOrdinals = newHashedOrdinals;
ordinalHashCodes = newOrdinalHashCodes;
ordinalHashCounts = newOrdinalHashCodeCounts;
hashSizeBeforeGrow = newHashedOrdinals.length * 7 / 10;
}
private long[] ordinalsAndHashCodes() {
long ordinalsAndHashCodes[] = new long[hashSize];
int count = 0;
for(int i=0;i<hashedOrdinals.length;i++) {
if(hashedOrdinals[i] != -1)
ordinalsAndHashCodes[count++] = ((long)hashedOrdinals[i] << 32) | (ordinalHashCodes[i] & 0xFFFFFFFFL);
}
Arrays.sort(ordinalsAndHashCodes);
return ordinalsAndHashCodes;
}
private int findOrdinalCount(int ordinal, int hashCode) {
int bucket = hashCode & (hashedOrdinals.length - 1);
while(hashedOrdinals[bucket] != ordinal)
bucket = (bucket + 1) & (hashedOrdinals.length - 1);
return ordinalHashCounts[bucket];
}
private boolean hashIntoArray(int ordinal, int hashCode, int count, int hashedOrdinals[], int ordinalHashCodes[], int ordinalHashCounts[]) {
int bucket = hashCode & (hashedOrdinals.length - 1);
while(hashedOrdinals[bucket] != -1) {
/// check to see if this is an equal value.
if(fieldsAreEqual(fromState, hashedOrdinals[bucket], fromFieldIndex, fromState, ordinal, fromFieldIndex)) {
ordinalHashCounts[bucket]++;
return false;
}
bucket = (bucket + 1) & (hashedOrdinals.length - 1);
}
hashedOrdinals[bucket] = ordinal;
ordinalHashCodes[bucket] = hashCode;
ordinalHashCounts[bucket] = count;
return true;
}
@Override
public List<HollowFieldDiff> getFieldDiffs() {
if(fieldDiff.getTotalDiffScore() > 0)
return Collections.singletonList(fieldDiff);
return Collections.emptyList();
}
}
| 8,913 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/count/HollowDiffMissingCountingNode.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff.count;
import com.netflix.hollow.core.util.IntList;
import com.netflix.hollow.tools.diff.HollowDiff;
import com.netflix.hollow.tools.diff.HollowDiffNodeIdentifier;
import com.netflix.hollow.tools.diff.HollowTypeDiff;
import java.util.Collections;
import java.util.List;
/**
* Counting nodes are used by the HollowDiff to count and aggregate changes for specific record types in a data model.
*
* This type of counting node is applicable to types which are missing.
*
* Not intended for external consumption.
*/
public class HollowDiffMissingCountingNode extends HollowDiffCountingNode {
public static final HollowDiffMissingCountingNode INSTANCE = new HollowDiffMissingCountingNode(null, null, null);
public HollowDiffMissingCountingNode(HollowDiff diff, HollowTypeDiff topLevelTypeDiff, HollowDiffNodeIdentifier nodeId) {
super(diff, topLevelTypeDiff, nodeId);
}
@Override
public void prepare(int topLevelFromOrdinal, int topLevelToOrdinal) { }
@Override
public int traverseDiffs(IntList fromOrdinals, IntList toOrdinals) { return 0; }
@Override
public int traverseMissingFields(IntList fromOrdinals, IntList toOrdinals) { return 0; }
@Override
public List<HollowFieldDiff> getFieldDiffs() {
return Collections.emptyList();
}
}
| 8,914 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/count/HollowDiffObjectCountingNode.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff.count;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.core.util.IntList;
import com.netflix.hollow.tools.diff.HollowDiff;
import com.netflix.hollow.tools.diff.HollowDiffNodeIdentifier;
import com.netflix.hollow.tools.diff.HollowTypeDiff;
import com.netflix.hollow.tools.diff.exact.DiffEqualOrdinalFilter;
import java.util.ArrayList;
import java.util.List;
/**
* Counting nodes are used by the HollowDiff to count and aggregate changes for specific record types in a data model.
*
* This type of counting node is applicable to object types.
*
* Not intended for external consumption.
*/
public class HollowDiffObjectCountingNode extends HollowDiffCountingNode {
private final HollowObjectTypeReadState fromState;
private final HollowObjectTypeReadState toState;
private final HollowObjectSchema fromSchema;
private final HollowObjectSchema toSchema;
private final HollowObjectSchema unionSchema;
private final int[] fromFieldMapping;
private final int[] toFieldMapping;
private final HollowDiffCountingNode fieldNodes[];
private final boolean fieldRequiresMissingFieldTraversal[];
private final DiffEqualOrdinalFilter fieldEqualOrdinalFilters[];
public HollowDiffObjectCountingNode(HollowDiff diff, HollowTypeDiff topLevelTypeDiff, HollowDiffNodeIdentifier nodeId, HollowObjectTypeReadState fromState, HollowObjectTypeReadState toState) {
super(diff, topLevelTypeDiff, nodeId);
this.fromState = fromState;
this.toState = toState;
this.fromSchema = fromState == null ? emptySchema(toState.getSchema()) : fromState.getSchema();
this.toSchema = toState == null ? emptySchema(fromState.getSchema()) : toState.getSchema();
if(!fromSchema.getName().equals(toSchema.getName()))
throw new IllegalArgumentException("Cannot diff between two schemas with different names: from '" + fromSchema.getName() + "' to '" + toSchema.getName() + "'");
this.unionSchema = fromSchema.findUnionSchema(toSchema);
this.fieldNodes = new HollowDiffCountingNode[unionSchema.numFields()];
this.fromFieldMapping = createFieldMapping(unionSchema, fromSchema);
this.toFieldMapping = createFieldMapping(unionSchema, toSchema);
this.fieldRequiresMissingFieldTraversal = new boolean[unionSchema.numFields()];
this.fieldEqualOrdinalFilters = new DiffEqualOrdinalFilter[unionSchema.numFields()];
for(int i=0;i<unionSchema.numFields();i++) {
int fromFieldIndex = fromSchema.getPosition(unionSchema.getFieldName(i));
int toFieldIndex = toSchema.getPosition(unionSchema.getFieldName(i));
if(unionSchema.getFieldType(i) == FieldType.REFERENCE) {
HollowTypeReadState refFromState = fromFieldIndex == -1 ? null : fromSchema.getReferencedTypeState(fromFieldIndex);
HollowTypeReadState refToState = toFieldIndex == -1 ? null : toSchema.getReferencedTypeState(toFieldIndex);
fieldNodes[i] = getHollowDiffCountingNode(refFromState, refToState, unionSchema.getFieldName(i));
fieldEqualOrdinalFilters[i] = new DiffEqualOrdinalFilter(equalityMapping.getEqualOrdinalMap(unionSchema.getReferencedType(i)));
if(refFromState == null || refToState == null || equalityMapping.requiresMissingFieldTraversal(unionSchema.getReferencedType(i)))
fieldRequiresMissingFieldTraversal[i] = true;
} else {
HollowDiffNodeIdentifier childNodeId = new HollowDiffNodeIdentifier(nodeId, unionSchema.getFieldName(i), unionSchema.getFieldType(i).toString());
fieldNodes[i] = new HollowDiffFieldCountingNode(diff, topLevelTypeDiff, childNodeId, fromState, toState, unionSchema, i);
}
}
}
private HollowObjectSchema emptySchema(HollowObjectSchema other) {
return new HollowObjectSchema(other.getName(), 0);
}
public void prepare(int topLevelFromOrdinal, int topLevelToOrdinal) {
for(int i=0;i<fieldNodes.length;i++) {
fieldNodes[i].prepare(topLevelFromOrdinal, topLevelToOrdinal);
}
}
private final IntList traversalFromOrdinals = new IntList();
private final IntList traversalToOrdinals = new IntList();
@Override
public int traverseDiffs(IntList fromOrdinals, IntList toOrdinals) {
int score = 0;
for(int i=0;i<fieldNodes.length;i++) {
int fromFieldIdx = fromFieldMapping[i];
int toFieldIdx = toFieldMapping[i];
if(unionSchema.getFieldType(i) == FieldType.REFERENCE) {
traversalFromOrdinals.clear();
traversalToOrdinals.clear();
if(fromFieldIdx != -1) {
for(int j=0;j<fromOrdinals.size();j++) {
int fromOrdinal = fromOrdinals.get(j);
int refOrdinal = fromState.readOrdinal(fromOrdinal, fromFieldIdx);
if(refOrdinal != -1)
traversalFromOrdinals.add(refOrdinal);
}
}
if(toFieldIdx != -1) {
for(int j=0;j<toOrdinals.size();j++) {
int toOrdinal = toOrdinals.get(j);
int refOrdinal = toState.readOrdinal(toOrdinal, toFieldIdx);
if(refOrdinal != -1)
traversalToOrdinals.add(refOrdinal);
}
}
if(traversalFromOrdinals.size() != 0 || traversalToOrdinals.size() != 0) {
fieldEqualOrdinalFilters[i].filter(traversalFromOrdinals, traversalToOrdinals);
if(fieldEqualOrdinalFilters[i].getUnmatchedFromOrdinals().size() != 0 || fieldEqualOrdinalFilters[i].getUnmatchedToOrdinals().size() != 0)
score += fieldNodes[i].traverseDiffs(fieldEqualOrdinalFilters[i].getUnmatchedFromOrdinals(), fieldEqualOrdinalFilters[i].getUnmatchedToOrdinals());
if(fieldRequiresMissingFieldTraversal[i])
if(fieldEqualOrdinalFilters[i].getMatchedFromOrdinals().size() != 0 || fieldEqualOrdinalFilters[i].getMatchedToOrdinals().size() != 0)
score += fieldNodes[i].traverseMissingFields(fieldEqualOrdinalFilters[i].getMatchedFromOrdinals(), fieldEqualOrdinalFilters[i].getMatchedToOrdinals());
}
} else {
if(fromFieldIdx == -1)
score += fieldNodes[i].traverseDiffs(EMPTY_ORDINAL_LIST, toOrdinals);
else if(toFieldIdx == -1)
score += fieldNodes[i].traverseDiffs(fromOrdinals, EMPTY_ORDINAL_LIST);
else
score += fieldNodes[i].traverseDiffs(fromOrdinals, toOrdinals);
}
}
return score;
}
public int traverseMissingFields(IntList fromOrdinals, IntList toOrdinals) {
int score = 0;
for(int i=0;i<fieldNodes.length;i++) {
if(fieldRequiresMissingFieldTraversal[i]) {
traversalFromOrdinals.clear();
traversalToOrdinals.clear();
if(fromFieldMapping[i] != -1) {
for(int j=0;j<fromOrdinals.size();j++) {
int fromOrdinal = fromState.readOrdinal(fromOrdinals.get(j), fromFieldMapping[i]);
if(fromOrdinal != -1)
traversalFromOrdinals.add(fromOrdinal);
}
}
if(toFieldMapping[i] != -1) {
for(int j=0;j<toOrdinals.size();j++) {
int toOrdinal = toState.readOrdinal(toOrdinals.get(j), toFieldMapping[i]);
if(toOrdinal != -1)
traversalToOrdinals.add(toOrdinal);
}
}
score += fieldNodes[i].traverseMissingFields(traversalFromOrdinals, traversalToOrdinals);
} else if(fieldNodes[i] instanceof HollowDiffFieldCountingNode) {
score += fieldNodes[i].traverseMissingFields(fromOrdinals, toOrdinals);
}
}
return score;
}
private int[] createFieldMapping(HollowObjectSchema unionSchema, HollowObjectSchema individualSchema) {
int mapping[] = new int[unionSchema.numFields()];
for(int i=0;i<unionSchema.numFields();i++) {
String fieldName = unionSchema.getFieldName(i);
mapping[i] = individualSchema.getPosition(fieldName);
}
return mapping;
}
@Override
public List<HollowFieldDiff> getFieldDiffs() {
List<HollowFieldDiff> list = new ArrayList<HollowFieldDiff>();
for(HollowDiffCountingNode node : fieldNodes) {
list.addAll(node.getFieldDiffs());
}
return list;
}
}
| 8,915 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/count/HollowFieldDiff.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff.count;
import com.netflix.hollow.core.util.IntList;
import com.netflix.hollow.tools.diff.HollowDiff;
import com.netflix.hollow.tools.diff.HollowDiffNodeIdentifier;
import com.netflix.hollow.tools.diff.HollowTypeDiff;
/**
* Obtained via a {@link HollowTypeDiff}, this is a report of the differences in a specific field between two data states.
*
*/
public class HollowFieldDiff implements Comparable<HollowFieldDiff> {
private final HollowDiffNodeIdentifier fieldIdentifier;
private final IntList diffFromOrdinals;
private final IntList diffToOrdinals;
private final IntList diffPairScores;
private long totalDiffScore;
public HollowFieldDiff(HollowDiffNodeIdentifier fieldIdentifier) {
this.diffFromOrdinals = new IntList();
this.diffToOrdinals = new IntList();
this.diffPairScores = new IntList();
this.fieldIdentifier = fieldIdentifier;
}
/**
* Should be called exclusively from the {@link HollowDiff} -- not intended for external consumption
*
* @param fromOrdinal the from ordinal
* @param toOrdinal the to ordinal
* @param score the score
*/
public void addDiff(int fromOrdinal, int toOrdinal, int score) {
if(isSameDiffAsLastAdd(fromOrdinal, toOrdinal)) {
int scoreIdx = diffPairScores.size() - 1;
diffPairScores.set(scoreIdx, diffPairScores.get(scoreIdx) + score);
} else {
diffFromOrdinals.add(fromOrdinal);
diffToOrdinals.add(toOrdinal);
diffPairScores.add(score);
}
totalDiffScore += score;
}
private boolean isSameDiffAsLastAdd(int fromOrdinal, int toOrdinal) {
return diffFromOrdinals.size() > 0
&& diffFromOrdinals.get(diffFromOrdinals.size() - 1) == fromOrdinal
&& diffToOrdinals.get(diffToOrdinals.size() - 1) == toOrdinal;
}
/**
* @return The identifier for the field on which this diff reports.
*/
public HollowDiffNodeIdentifier getFieldIdentifier() {
return fieldIdentifier;
}
/**
* @return the total score, used to judge relative magnitude of the diff.
*/
public long getTotalDiffScore() {
return totalDiffScore;
}
/**
* @return the number of records which had at least one diff for this field.
*/
public int getNumDiffs() {
return diffToOrdinals.size();
}
/**
* @param diffPairIdx a number from 0-n, where n is the value returned from numDiffs
* @return the from ordinal for the (diffPairIdx)th record pair in which there were differences for this field.
*/
public int getFromOrdinal(int diffPairIdx) {
return diffFromOrdinals.get(diffPairIdx);
}
/**
* @param diffPairIdx a number from 0-n, where n is the value returned from numDiffs
* @return the to ordinal for the (diffPairIdx)th record pair in which there were differences for this field.
*/
public int getToOrdinal(int diffPairIdx) {
return diffToOrdinals.get(diffPairIdx);
}
/**
* @param diffPairIdx a number from 0-n, where n is the value returned from numDiffs
* @return the score of the diff for this field in the (diffPairIdx)th record pair in which there were differences for this field.
*/
public int getPairScore(int diffPairIdx) {
return diffPairScores.get(diffPairIdx);
}
/**
* This should be called exclusively from the {@link HollowDiff}. Not for external consumption.
* @param otherFieldDiff the field diff to add
*/
public void addResults(HollowFieldDiff otherFieldDiff) {
for(int i=0;i<otherFieldDiff.getNumDiffs();i++) {
addDiff(otherFieldDiff.getFromOrdinal(i), otherFieldDiff.getToOrdinal(i), otherFieldDiff.getPairScore(i));
}
}
/**
* Comparison is based on the totalDiffScore().
*/
@Override
public int compareTo(HollowFieldDiff o) {
if(o.getTotalDiffScore() > totalDiffScore)
return 1;
else if(o.getTotalDiffScore() < totalDiffScore)
return -1;
return 0;
}
}
| 8,916 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/count/HollowDiffMapCountingNode.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff.count;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.engine.map.HollowMapTypeReadState;
import com.netflix.hollow.core.read.iterator.HollowMapEntryOrdinalIterator;
import com.netflix.hollow.core.util.IntList;
import com.netflix.hollow.tools.diff.HollowDiff;
import com.netflix.hollow.tools.diff.HollowDiffNodeIdentifier;
import com.netflix.hollow.tools.diff.HollowTypeDiff;
import com.netflix.hollow.tools.diff.exact.DiffEqualOrdinalFilter;
import java.util.ArrayList;
import java.util.List;
/**
* Counting nodes are used by the HollowDiff to count and aggregate changes for specific record types in a data model.
*
* This type of counting node is applicable to map types.
*
* Not intended for external consumption.
*/
public class HollowDiffMapCountingNode extends HollowDiffCountingNode {
private final HollowMapTypeReadState fromState;
private final HollowMapTypeReadState toState;
private final HollowDiffCountingNode keyNode;
private final HollowDiffCountingNode valueNode;
private final DiffEqualOrdinalFilter keyFilter;
private final DiffEqualOrdinalFilter valueFilter;
private final boolean keyRequiresTraversalForMissingFields;
private final boolean valueRequiresTraversalForMissingFields;
public HollowDiffMapCountingNode(HollowDiff diff, HollowTypeDiff topLevelTypeDiff, HollowDiffNodeIdentifier nodeId, HollowMapTypeReadState fromState, HollowMapTypeReadState toState) {
super(diff, topLevelTypeDiff, nodeId);
this.fromState = fromState;
this.toState = toState;
HollowTypeReadState keyFromState = fromState == null ? null : fromState.getSchema().getKeyTypeState();
HollowTypeReadState keyToState = toState == null ? null : toState.getSchema().getKeyTypeState();
this.keyNode = getHollowDiffCountingNode(keyFromState, keyToState, "key");
HollowTypeReadState valueFromState = fromState == null ? null : fromState.getSchema().getValueTypeState();
HollowTypeReadState valueToState = toState == null ? null : toState.getSchema().getValueTypeState();
this.valueNode = getHollowDiffCountingNode(valueFromState, valueToState, "value");
String keyType = fromState != null ? fromState.getSchema().getKeyType() : toState.getSchema().getKeyType();
String valueType = fromState != null ? fromState.getSchema().getValueType() : toState.getSchema().getValueType();
this.keyFilter = new DiffEqualOrdinalFilter(equalityMapping.getEqualOrdinalMap(keyType));
this.valueFilter = new DiffEqualOrdinalFilter(equalityMapping.getEqualOrdinalMap(valueType));
this.keyRequiresTraversalForMissingFields = equalityMapping.requiresMissingFieldTraversal(keyType);
this.valueRequiresTraversalForMissingFields = equalityMapping.requiresMissingFieldTraversal(valueType);
}
@Override
public void prepare(int topLevelFromOrdinal, int topLevelToOrdinal) {
keyNode.prepare(topLevelFromOrdinal, topLevelToOrdinal);
valueNode.prepare(topLevelFromOrdinal, topLevelToOrdinal);
}
private final IntList traversalFromKeyOrdinals = new IntList();
private final IntList traversalToKeyOrdinals = new IntList();
private final IntList traversalFromValueOrdinals = new IntList();
private final IntList traversalToValueOrdinals = new IntList();
@Override
public int traverseDiffs(IntList fromOrdinals, IntList toOrdinals) {
fillTraversalLists(fromOrdinals, toOrdinals);
keyFilter.filter(traversalFromKeyOrdinals, traversalToKeyOrdinals);
valueFilter.filter(traversalFromValueOrdinals, traversalToValueOrdinals);
int score = 0;
if(keyFilter.getUnmatchedFromOrdinals().size() != 0 || keyFilter.getUnmatchedToOrdinals().size() != 0)
score += keyNode.traverseDiffs(keyFilter.getUnmatchedFromOrdinals(), keyFilter.getUnmatchedToOrdinals());
if(keyRequiresTraversalForMissingFields)
if(keyFilter.getMatchedFromOrdinals().size() != 0 || keyFilter.getMatchedToOrdinals().size() != 0)
score += keyNode.traverseMissingFields(keyFilter.getMatchedFromOrdinals(), keyFilter.getMatchedToOrdinals());
if(valueFilter.getUnmatchedFromOrdinals().size() != 0 || valueFilter.getUnmatchedToOrdinals().size() != 0)
score += valueNode.traverseDiffs(valueFilter.getUnmatchedFromOrdinals(), valueFilter.getUnmatchedToOrdinals());
if(valueRequiresTraversalForMissingFields)
if(valueFilter.getMatchedFromOrdinals().size() != 0 || valueFilter.getMatchedToOrdinals().size() != 0)
score += valueNode.traverseMissingFields(valueFilter.getMatchedFromOrdinals(), valueFilter.getMatchedToOrdinals());
return score;
}
@Override
public int traverseMissingFields(IntList fromOrdinals, IntList toOrdinals) {
fillTraversalLists(fromOrdinals, toOrdinals);
int score = 0;
score += keyNode.traverseMissingFields(traversalFromKeyOrdinals, traversalToKeyOrdinals);
score += valueNode.traverseMissingFields(traversalFromValueOrdinals, traversalToValueOrdinals);
return score;
}
@Override
public List<HollowFieldDiff> getFieldDiffs() {
List<HollowFieldDiff> list = new ArrayList<HollowFieldDiff>();
list.addAll(keyNode.getFieldDiffs());
list.addAll(valueNode.getFieldDiffs());
return list;
}
private void fillTraversalLists(IntList fromOrdinals, IntList toOrdinals) {
traversalFromKeyOrdinals.clear();
traversalToKeyOrdinals.clear();
traversalFromValueOrdinals.clear();
traversalToValueOrdinals.clear();
if(fromState != null) {
for(int i=0;i<fromOrdinals.size();i++) {
fillListsWithReferencedOrdinals(fromState, fromOrdinals.get(i), traversalFromKeyOrdinals, traversalFromValueOrdinals);
}
}
if(toState != null) {
for(int i=0;i<toOrdinals.size();i++) {
fillListsWithReferencedOrdinals(toState, toOrdinals.get(i), traversalToKeyOrdinals, traversalToValueOrdinals);
}
}
}
private void fillListsWithReferencedOrdinals(HollowMapTypeReadState typeState, int ordinal, IntList fillKeyList, IntList fillValueList) {
HollowMapEntryOrdinalIterator iter = typeState.ordinalIterator(ordinal);
while(iter.next()) {
fillKeyList.add(iter.getKey());
fillValueList.add(iter.getValue());
}
}
}
| 8,917 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/exact/DiffEqualOrdinalMap.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff.exact;
import com.netflix.hollow.core.memory.encoding.HashCodes;
import com.netflix.hollow.core.util.IntList;
import java.util.Arrays;
/**
* Not intended for external consumption.
*/
public class DiffEqualOrdinalMap {
public static final DiffEqualOrdinalMap EMPTY_MAP = new DiffEqualOrdinalMap(0);
private final long fromOrdinalsMap[];
private final IntList pivotedToOrdinalClusters;
private final long toOrdinalsIdentityMap[];
public DiffEqualOrdinalMap(int numMatches) {
int hashTableSize = 1 << (32 - Integer.numberOfLeadingZeros(numMatches * 2 - 1));
fromOrdinalsMap = new long[hashTableSize];
toOrdinalsIdentityMap = new long[hashTableSize];
pivotedToOrdinalClusters = new IntList();
Arrays.fill(fromOrdinalsMap, -1L);
Arrays.fill(toOrdinalsIdentityMap, -1L);
}
public void putEqualOrdinal(int fromOrdinal, int toOrdinal) {
long fromOrdinalMapEntry = (long)toOrdinal << 32 | fromOrdinal;
int hashCode = HashCodes.hashInt(fromOrdinal);
int bucket = hashCode & (fromOrdinalsMap.length - 1);
while(fromOrdinalsMap[bucket] != -1)
bucket = (bucket + 1) & (fromOrdinalsMap.length - 1);
fromOrdinalsMap[bucket] = fromOrdinalMapEntry;
}
public void putEqualOrdinals(int fromOrdinal, IntList toOrdinals) {
long fromOrdinalMapEntry = (long)toOrdinals.get(0) << 32 | fromOrdinal;
if(toOrdinals.size() > 1) {
fromOrdinalMapEntry = Long.MIN_VALUE | (long)pivotedToOrdinalClusters.size() << 32 | fromOrdinal;
for(int i=0;i<toOrdinals.size();i++) {
int valueToAdd = toOrdinals.get(i);
if(i == toOrdinals.size() - 1)
valueToAdd |= Integer.MIN_VALUE;
pivotedToOrdinalClusters.add(valueToAdd);
}
}
int hashCode = HashCodes.hashInt(fromOrdinal);
int bucket = hashCode & (fromOrdinalsMap.length - 1);
while(fromOrdinalsMap[bucket] != -1)
bucket = (bucket + 1) & (fromOrdinalsMap.length - 1);
fromOrdinalsMap[bucket] = fromOrdinalMapEntry;
}
public void buildToOrdinalIdentityMapping() {
for(int i=0;i<fromOrdinalsMap.length;i++) {
if(fromOrdinalsMap[i] >= 0) {
int toOrdinal = (int)(fromOrdinalsMap[i] >> 32);
addToOrdinalIdentity(toOrdinal, toOrdinal);
}
}
boolean newCluster = true;
int currentIdentity = 0;
for(int i=0;i<pivotedToOrdinalClusters.size();i++) {
if(newCluster)
currentIdentity = pivotedToOrdinalClusters.get(i);
addToOrdinalIdentity(pivotedToOrdinalClusters.get(i) & Integer.MAX_VALUE, currentIdentity);
newCluster = (pivotedToOrdinalClusters.get(i) & Integer.MIN_VALUE) != 0;
}
}
private void addToOrdinalIdentity(int toOrdinal, int identity) {
int hashCode = HashCodes.hashInt(toOrdinal);
int bucket = hashCode & (toOrdinalsIdentityMap.length - 1);
while(toOrdinalsIdentityMap[bucket] != -1) {
bucket = (bucket + 1) & (toOrdinalsIdentityMap.length - 1);
}
toOrdinalsIdentityMap[bucket] = ((long)identity << 32) | toOrdinal;
}
public MatchIterator getEqualOrdinals(int fromOrdinal) {
int hashCode = HashCodes.hashInt(fromOrdinal);
int bucket = hashCode & (fromOrdinalsMap.length - 1);
while(fromOrdinalsMap[bucket] != -1L) {
if((int)fromOrdinalsMap[bucket] == fromOrdinal) {
if((fromOrdinalsMap[bucket] & Long.MIN_VALUE) != 0L)
return new PivotedMatchIterator((int)((fromOrdinalsMap[bucket] & Long.MAX_VALUE) >> 32));
return new SingleMatchIterator((int)(fromOrdinalsMap[bucket] >> 32));
}
bucket = (bucket + 1) & (fromOrdinalsMap.length - 1);
}
return EmptyMatchIterator.INSTANCE;
}
public int getIdentityFromOrdinal(int fromOrdinal) {
int hashCode = HashCodes.hashInt(fromOrdinal);
int bucket = hashCode & (fromOrdinalsMap.length - 1);
while(fromOrdinalsMap[bucket] != -1L) {
if((int)fromOrdinalsMap[bucket] == fromOrdinal) {
if((fromOrdinalsMap[bucket] & Long.MIN_VALUE) != 0L)
return pivotedToOrdinalClusters.get((int)((fromOrdinalsMap[bucket] & Long.MAX_VALUE) >> 32));
return (int)(fromOrdinalsMap[bucket] >> 32);
}
bucket = (bucket + 1) & (fromOrdinalsMap.length - 1);
}
return -1;
}
public int getIdentityToOrdinal(int toOrdinal) {
int hashCode = HashCodes.hashInt(toOrdinal);
int bucket = hashCode & (toOrdinalsIdentityMap.length - 1);
while(toOrdinalsIdentityMap[bucket] != -1L) {
if((int)toOrdinalsIdentityMap[bucket] == toOrdinal)
return (int)(toOrdinalsIdentityMap[bucket] >> 32);
bucket = (bucket + 1) & (toOrdinalsIdentityMap.length - 1);
}
return -1;
}
public static interface OrdinalIdentityTranslator {
public int getIdentityOrdinal(int ordinal);
}
private final OrdinalIdentityTranslator fromIdentityTranslator = new OrdinalIdentityTranslator() {
public int getIdentityOrdinal(int ordinal) {
return getIdentityFromOrdinal(ordinal);
}
};
private final OrdinalIdentityTranslator toIdentityTranslator = new OrdinalIdentityTranslator() {
public int getIdentityOrdinal(int ordinal) {
return getIdentityToOrdinal(ordinal);
}
};
public OrdinalIdentityTranslator getFromOrdinalIdentityTranslator() {
return fromIdentityTranslator;
}
public OrdinalIdentityTranslator getToOrdinalIdentityTranslator() {
return toIdentityTranslator;
}
public static interface MatchIterator {
public boolean hasNext();
public int next();
}
public static class EmptyMatchIterator implements MatchIterator {
static EmptyMatchIterator INSTANCE = new EmptyMatchIterator();
public boolean hasNext() { return false; }
public int next() { return -1; }
}
public static class SingleMatchIterator implements MatchIterator {
private final int singleMatch;
private boolean exhausted;
public SingleMatchIterator(int singleMatch) {
this.singleMatch = singleMatch;
}
public boolean hasNext() {
return !exhausted;
}
public int next() {
exhausted = true;
return singleMatch;
}
}
public class PivotedMatchIterator implements MatchIterator {
private int currentMatchListPosition;
private boolean exhausted;
public PivotedMatchIterator(int matchListPosition) {
this.currentMatchListPosition = matchListPosition;
}
public boolean hasNext() {
return !exhausted;
}
public int next() {
int nextVal = pivotedToOrdinalClusters.get(currentMatchListPosition++);
exhausted = (nextVal & Integer.MIN_VALUE) != 0;
return nextVal & Integer.MAX_VALUE;
}
}
}
| 8,918 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/exact/DiffEqualityMapping.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff.exact;
import com.netflix.hollow.core.read.engine.HollowCollectionTypeReadState;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.engine.list.HollowListTypeReadState;
import com.netflix.hollow.core.read.engine.map.HollowMapTypeReadState;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.tools.diff.HollowDiff;
import com.netflix.hollow.tools.diff.exact.mapper.DiffEqualityCollectionMapper;
import com.netflix.hollow.tools.diff.exact.mapper.DiffEqualityMapMapper;
import com.netflix.hollow.tools.diff.exact.mapper.DiffEqualityObjectMapper;
import com.netflix.hollow.tools.diff.exact.mapper.DiffEqualityOrderedListMapper;
import com.netflix.hollow.tools.diff.exact.mapper.DiffEqualityTypeMapper;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.logging.Logger;
/**
* The {@link HollowDiff} uses this class to expedite diff processing -- this class determines pairs of records which are exactly equal.
*
* This calculation is relatively easy, and these record pairs can be entirely excluded while diffing a hierarchy.
*
* Not intended for external consumption.
*
*/
public class DiffEqualityMapping {
private final Logger log = Logger.getLogger(DiffEqualityMapping.class.getName());
private final HollowReadStateEngine fromState;
private final HollowReadStateEngine toState;
private final boolean oneToOne;
private final boolean listOrderingIsImportant;
private final Map<String, DiffEqualOrdinalMap> map = new HashMap<String, DiffEqualOrdinalMap>();
private final Set<String> typesWhichRequireMissingFieldTraversal = new HashSet<String>();
private boolean isPrepared;
public DiffEqualityMapping(HollowReadStateEngine fromState, HollowReadStateEngine toState) {
this(fromState, toState, false, true);
}
public DiffEqualityMapping(HollowReadStateEngine fromState, HollowReadStateEngine toState, boolean oneToOne, boolean listOrderingIsImportant) {
this.fromState = fromState;
this.toState = toState;
this.oneToOne = oneToOne;
this.listOrderingIsImportant = listOrderingIsImportant;
}
public boolean requiresMissingFieldTraversal(String type) {
return typesWhichRequireMissingFieldTraversal.contains(type);
}
public DiffEqualOrdinalMap getEqualOrdinalMap(String type) {
DiffEqualOrdinalMap ordinalMap = map.get(type);
if(ordinalMap != null)
return ordinalMap;
return isPrepared ? DiffEqualOrdinalMap.EMPTY_MAP : buildMap(type);
}
public void markPrepared() {
this.isPrepared = true;
}
private DiffEqualOrdinalMap buildMap(String type) {
HollowTypeReadState fromTypeState = fromState.getTypeState(type);
HollowTypeReadState toTypeState = toState.getTypeState(type);
if(fromTypeState == null || toTypeState == null)
return DiffEqualOrdinalMap.EMPTY_MAP;
log.info("starting to build equality map for " + type);
DiffEqualOrdinalMap map = buildMap(fromTypeState, toTypeState);
log.info("finished building equality map for " + type);
return map;
}
private DiffEqualOrdinalMap buildMap(HollowTypeReadState fromTypeState, HollowTypeReadState toTypeState) {
String typeName = fromTypeState.getSchema().getName();
DiffEqualityTypeMapper mapper = getTypeMapper(fromTypeState, toTypeState);
DiffEqualOrdinalMap equalOrdinalMap = mapper.mapEqualObjects();
if(mapper.requiresTraversalForMissingFields())
typesWhichRequireMissingFieldTraversal.add(fromTypeState.getSchema().getName());
equalOrdinalMap.buildToOrdinalIdentityMapping();
map.put(typeName, equalOrdinalMap);
return equalOrdinalMap;
}
private DiffEqualityTypeMapper getTypeMapper(HollowTypeReadState fromState, HollowTypeReadState toState) {
if(fromState instanceof HollowObjectTypeReadState)
return new DiffEqualityObjectMapper(this, (HollowObjectTypeReadState)fromState, (HollowObjectTypeReadState)toState, oneToOne);
if(listOrderingIsImportant && fromState instanceof HollowListTypeReadState)
return new DiffEqualityOrderedListMapper(this, (HollowListTypeReadState)fromState, (HollowListTypeReadState)toState, oneToOne);
if(fromState instanceof HollowCollectionTypeReadState)
return new DiffEqualityCollectionMapper(this, (HollowCollectionTypeReadState)fromState, (HollowCollectionTypeReadState)toState, oneToOne);
if(fromState instanceof HollowMapTypeReadState)
return new DiffEqualityMapMapper(this, (HollowMapTypeReadState)fromState, (HollowMapTypeReadState)toState, oneToOne);
throw new IllegalArgumentException("I don't know how to map equality for a " + fromState.getClass().getName());
}
}
| 8,919 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/exact/DiffEqualOrdinalFilter.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff.exact;
import com.netflix.hollow.core.memory.encoding.HashCodes;
import com.netflix.hollow.core.util.IntList;
import java.util.Arrays;
/**
* Not intended for external consumption.
*/
public class DiffEqualOrdinalFilter {
private final DiffEqualOrdinalMap equalOrdinalMap;
private final IntList matchedFromOrdinals;
private final IntList matchedToOrdinals;
private final IntList unmatchedFromOrdinals;
private final IntList unmatchedToOrdinals;
private int hashedIdentityOrdinals[];
private int hashedIdentityOrdinalsCounts[];
private int matchedOrdinalsCounts[];
public DiffEqualOrdinalFilter(DiffEqualOrdinalMap equalityMapping) {
this.equalOrdinalMap = equalityMapping;
this.matchedFromOrdinals = new IntList();
this.matchedToOrdinals = new IntList();
this.unmatchedFromOrdinals = new IntList();
this.unmatchedToOrdinals = new IntList();
this.hashedIdentityOrdinals = new int[0];
this.hashedIdentityOrdinalsCounts = new int[0];
this.matchedOrdinalsCounts = new int[0];
}
public void filter(IntList fromOrdinals, IntList toOrdinals) {
matchedFromOrdinals.clear();
matchedToOrdinals.clear();
unmatchedFromOrdinals.clear();
unmatchedToOrdinals.clear();
int hashSize = 1 << (32 - Integer.numberOfLeadingZeros((fromOrdinals.size() * 2) - 1));
if(hashedIdentityOrdinals.length < hashSize) {
hashedIdentityOrdinals = new int[hashSize];
hashedIdentityOrdinalsCounts = new int[hashSize];
matchedOrdinalsCounts = new int[hashSize];
}
Arrays.fill(hashedIdentityOrdinals, -1);
Arrays.fill(hashedIdentityOrdinalsCounts, 0);
Arrays.fill(matchedOrdinalsCounts, 0);
for(int i=0;i<fromOrdinals.size();i++) {
int identity = equalOrdinalMap.getIdentityFromOrdinal(fromOrdinals.get(i));
if(identity != -1) {
int hashCode = HashCodes.hashInt(identity);
int bucket = hashCode & (hashedIdentityOrdinals.length - 1);
while(hashedIdentityOrdinals[bucket] != -1 && hashedIdentityOrdinals[bucket] != identity) {
bucket = (bucket + 1) & (hashedIdentityOrdinals.length - 1);
}
hashedIdentityOrdinals[bucket] = identity;
hashedIdentityOrdinalsCounts[bucket]++;
}
}
for(int i=0;i<toOrdinals.size();i++) {
int identity = equalOrdinalMap.getIdentityToOrdinal(toOrdinals.get(i));
if(identity != -1) {
int hashCode = HashCodes.hashInt(identity);
int bucket = hashCode & (hashedIdentityOrdinals.length - 1);
while(hashedIdentityOrdinals[bucket] != -1 && hashedIdentityOrdinals[bucket] != identity) {
bucket = (bucket + 1) & (hashedIdentityOrdinals.length - 1);
}
if(hashedIdentityOrdinals[bucket] == identity && matchedOrdinalsCounts[bucket] < hashedIdentityOrdinalsCounts[bucket]) {
matchedOrdinalsCounts[bucket]++;
matchedToOrdinals.add(toOrdinals.get(i));
} else {
unmatchedToOrdinals.add(toOrdinals.get(i));
}
} else {
unmatchedToOrdinals.add(toOrdinals.get(i));
}
}
for(int i=0;i<fromOrdinals.size();i++) {
int identity = equalOrdinalMap.getIdentityFromOrdinal(fromOrdinals.get(i));
if(identity != -1) {
int hashCode = HashCodes.hashInt(identity);
int bucket = hashCode & (hashedIdentityOrdinals.length - 1);
while(hashedIdentityOrdinals[bucket] != identity) {
bucket = (bucket + 1) & (hashedIdentityOrdinals.length - 1);
}
if(matchedOrdinalsCounts[bucket] > 0) {
matchedOrdinalsCounts[bucket]--;
matchedFromOrdinals.add(fromOrdinals.get(i));
} else {
unmatchedFromOrdinals.add(fromOrdinals.get(i));
}
} else {
unmatchedFromOrdinals.add(fromOrdinals.get(i));
}
}
}
public IntList getMatchedFromOrdinals() {
return matchedFromOrdinals;
}
public IntList getMatchedToOrdinals() {
return matchedToOrdinals;
}
public IntList getUnmatchedFromOrdinals() {
return unmatchedFromOrdinals;
}
public IntList getUnmatchedToOrdinals() {
return unmatchedToOrdinals;
}
}
| 8,920 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/exact/CombinedMatchPairResultsIterator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff.exact;
import com.netflix.hollow.core.util.IntList;
import com.netflix.hollow.core.util.LongList;
/**
* Not intended for external consumption.
*/
public class CombinedMatchPairResultsIterator {
private final LongList[] shardedResults;
private int currentShardList;
private int currentShardListPosition;
private int currentFromOrdinal;
private final IntList list;
public CombinedMatchPairResultsIterator(LongList[] shardedResults) {
this.shardedResults = shardedResults;
this.list = new IntList();
}
public boolean next() {
list.clear();
while(currentShardList < shardedResults.length) {
if(currentShardListPosition < shardedResults[currentShardList].size()) {
currentFromOrdinal = (int)(shardedResults[currentShardList].get(currentShardListPosition) >> 32);
while(currentShardListPosition < shardedResults[currentShardList].size()
&& (int)(shardedResults[currentShardList].get(currentShardListPosition) >> 32) == currentFromOrdinal) {
int toOrdinal = (int)shardedResults[currentShardList].get(currentShardListPosition);
list.add(toOrdinal);
currentShardListPosition++;
}
return true;
}
currentShardListPosition = 0;
currentShardList++;
}
return false;
}
public int fromOrdinal() {
return currentFromOrdinal;
}
public IntList toOrdinals() {
return list;
}
}
| 8,921 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/exact | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/exact/mapper/DiffEqualityTypeMapper.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff.exact.mapper;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.engine.PopulatedOrdinalListener;
import com.netflix.hollow.core.util.IntList;
import com.netflix.hollow.core.util.LongList;
import com.netflix.hollow.core.util.SimultaneousExecutor;
import com.netflix.hollow.tools.diff.exact.CombinedMatchPairResultsIterator;
import com.netflix.hollow.tools.diff.exact.DiffEqualOrdinalMap;
import java.util.BitSet;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicIntegerArray;
/**
* Not intended for external consumption.
*/
public abstract class DiffEqualityTypeMapper {
protected final HollowTypeReadState fromState;
protected final HollowTypeReadState toState;
private final boolean oneToOne;
protected DiffEqualityTypeMapper(HollowTypeReadState fromState, HollowTypeReadState toState, boolean oneToOne) {
this.fromState = fromState;
this.toState = toState;
this.oneToOne = oneToOne;
}
public DiffEqualOrdinalMap mapEqualObjects() {
int toOrdinalsHashed[] = hashToOrdinals();
return mapMatchingFromOrdinals(toOrdinalsHashed);
}
protected int[] hashToOrdinals() {
PopulatedOrdinalListener listener = toState.getListener(PopulatedOrdinalListener.class);
final BitSet toPopulatedOrdinals = listener.getPopulatedOrdinals();
final int ordinalSpaceLength = toPopulatedOrdinals.length();
int hashedOrdinalsLength = 1 << (32 - Integer.numberOfLeadingZeros((toPopulatedOrdinals.cardinality() * 2) - 1));
final AtomicIntegerArray hashedToOrdinals = new AtomicIntegerArray(hashedOrdinalsLength);
for(int i=0;i<hashedOrdinalsLength;i++)
hashedToOrdinals.set(i, -1);
SimultaneousExecutor executor = new SimultaneousExecutor(1.5d, getClass(), "hash-to-ordinals");
final int numThreads = executor.getCorePoolSize();
for(int i=0;i<numThreads;i++) {
final int threadNumber = i;
executor.execute(() -> {
for(int t=threadNumber;t<ordinalSpaceLength;t+=numThreads) {
if(toPopulatedOrdinals.get(t)) {
int hashCode = toRecordHashCode(t);
if(hashCode != -1) {
int bucket = hashCode & (hashedToOrdinals.length() - 1);
while(!hashedToOrdinals.compareAndSet(bucket, -1, t)) {
bucket = (bucket + 1) & (hashedToOrdinals.length() - 1);
}
}
}
}
});
}
try {
executor.awaitSuccessfulCompletion();
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
int arr[] = new int[hashedToOrdinals.length()];
for(int i=0;i<arr.length;i++) {
arr[i] = hashedToOrdinals.get(i);
}
return arr;
}
protected DiffEqualOrdinalMap mapMatchingFromOrdinals(final int[] hashedToOrdinals) {
PopulatedOrdinalListener listener = fromState.getListener(PopulatedOrdinalListener.class);
final BitSet fromPopulatedOrdinals = listener.getPopulatedOrdinals();
final int ordinalSpaceLength = fromPopulatedOrdinals.length();
SimultaneousExecutor executor = new SimultaneousExecutor(1.5d, getClass(), "map-matching-from-ordinals");
final int numThreads = executor.getCorePoolSize();
final LongList[] matchPairResults = new LongList[numThreads];
for(int i=0;i<numThreads;i++) {
final int threadNumber = i;
matchPairResults[threadNumber] = new LongList();
executor.execute(() -> {
EqualityDeterminer equalityDeterminer = getEqualityDeterminer();
for(int t=threadNumber;t <ordinalSpaceLength;t+=numThreads) {
if(fromPopulatedOrdinals.get(t)) {
int hashCode = fromRecordHashCode(t);
if(hashCode != -1) {
int bucket = hashCode & (hashedToOrdinals.length - 1);
while(hashedToOrdinals[bucket] != -1) {
if(equalityDeterminer.recordsAreEqual(t, hashedToOrdinals[bucket])) {
matchPairResults[threadNumber].add(((long) t << 32) | hashedToOrdinals[bucket]);
}
bucket = (bucket + 1) & (hashedToOrdinals.length - 1);
}
}
}
}
});
}
try {
executor.awaitSuccessfulCompletion();
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
int numMatches = 0;
for(int i=0;i<matchPairResults.length;i++) {
numMatches += matchPairResults[i].size();
}
CombinedMatchPairResultsIterator resultsIterator = new CombinedMatchPairResultsIterator(matchPairResults);
DiffEqualOrdinalMap ordinalMap = new DiffEqualOrdinalMap(numMatches);
if(oneToOne) {
BitSet alreadyMappedToOrdinals = new BitSet(toState.maxOrdinal() + 1);
while(resultsIterator.next()) {
int fromOrdinal = resultsIterator.fromOrdinal();
IntList toOrdinals = resultsIterator.toOrdinals();
for(int i=0;i<toOrdinals.size();i++) {
if(!alreadyMappedToOrdinals.get(toOrdinals.get(i))) {
alreadyMappedToOrdinals.set(toOrdinals.get(i));
ordinalMap.putEqualOrdinal(fromOrdinal, toOrdinals.get(i));
break;
}
}
}
} else {
while(resultsIterator.next()) {
ordinalMap.putEqualOrdinals(resultsIterator.fromOrdinal(), resultsIterator.toOrdinals());
}
}
return ordinalMap;
}
public abstract boolean requiresTraversalForMissingFields();
protected abstract int fromRecordHashCode(int ordinal);
protected abstract int toRecordHashCode(int ordinal);
protected abstract EqualityDeterminer getEqualityDeterminer();
protected interface EqualityDeterminer {
public boolean recordsAreEqual(int fromOrdinal, int toOrdinal);
}
}
| 8,922 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/exact | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/exact/mapper/DiffEqualityMapMapper.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff.exact.mapper;
import com.netflix.hollow.core.memory.encoding.HashCodes;
import com.netflix.hollow.core.read.engine.map.HollowMapTypeReadState;
import com.netflix.hollow.core.read.iterator.HollowMapEntryOrdinalIterator;
import com.netflix.hollow.core.schema.HollowMapSchema;
import com.netflix.hollow.core.util.IntList;
import com.netflix.hollow.tools.diff.exact.DiffEqualOrdinalMap;
import com.netflix.hollow.tools.diff.exact.DiffEqualOrdinalMap.OrdinalIdentityTranslator;
import com.netflix.hollow.tools.diff.exact.DiffEqualityMapping;
/**
* Not intended for external consumption.
*/
public class DiffEqualityMapMapper extends DiffEqualityTypeMapper {
private final DiffEqualOrdinalMap keyEqualOrdinalMap;
private final DiffEqualOrdinalMap valueEqualOrdinalMap;
private final boolean requiresTraversalForMissingFields;
public DiffEqualityMapMapper(DiffEqualityMapping mapping, HollowMapTypeReadState fromState, HollowMapTypeReadState toState, boolean oneToOne) {
super(fromState, toState, oneToOne);
HollowMapSchema schema = fromState.getSchema();
this.keyEqualOrdinalMap = mapping.getEqualOrdinalMap(schema.getKeyType());
this.valueEqualOrdinalMap = mapping.getEqualOrdinalMap(schema.getValueType());
this.requiresTraversalForMissingFields =
mapping.requiresMissingFieldTraversal(schema.getKeyType())
|| mapping.requiresMissingFieldTraversal(schema.getValueType());
}
@Override
public boolean requiresTraversalForMissingFields() {
return requiresTraversalForMissingFields;
}
@Override
protected int fromRecordHashCode(int ordinal) {
return recordHashCode(fromState(), ordinal, keyEqualOrdinalMap.getFromOrdinalIdentityTranslator(), valueEqualOrdinalMap.getFromOrdinalIdentityTranslator());
}
@Override
protected int toRecordHashCode(int ordinal) {
return recordHashCode(toState(), ordinal, keyEqualOrdinalMap.getToOrdinalIdentityTranslator(), valueEqualOrdinalMap.getToOrdinalIdentityTranslator());
}
private int recordHashCode(HollowMapTypeReadState typeState, int ordinal, OrdinalIdentityTranslator keyTranslator, OrdinalIdentityTranslator valueTranslator) {
HollowMapEntryOrdinalIterator iter = typeState.ordinalIterator(ordinal);
int hashCode = 0;
while(iter.next()) {
int keyIdentityOrdinal = keyTranslator.getIdentityOrdinal(iter.getKey());
int valueIdentityOrdinal = valueTranslator.getIdentityOrdinal(iter.getValue());
if(keyIdentityOrdinal == -1 && iter.getKey() != -1)
return -1;
if(valueIdentityOrdinal == -1 && iter.getValue() != -1)
return -1;
hashCode ^= HashCodes.hashInt(keyIdentityOrdinal + (31 * valueIdentityOrdinal));
}
return hashCode;
}
@Override
protected EqualityDeterminer getEqualityDeterminer() {
return new EqualityDeterminer() {
private final IntList fromKeysIntList = new IntList();
private final IntList fromValuesIntList = new IntList();
private final IntList toKeysIntList = new IntList();
private final IntList toValuesIntList = new IntList();
@Override
public boolean recordsAreEqual(int fromOrdinal, int toOrdinal) {
if(!populateIntLists(fromKeysIntList, fromValuesIntList, fromState().ordinalIterator(fromOrdinal), keyEqualOrdinalMap.getFromOrdinalIdentityTranslator(), valueEqualOrdinalMap.getFromOrdinalIdentityTranslator()))
return false;
if(!populateIntLists(toKeysIntList, toValuesIntList, toState().ordinalIterator(toOrdinal), keyEqualOrdinalMap.getToOrdinalIdentityTranslator(), valueEqualOrdinalMap.getToOrdinalIdentityTranslator()))
return false;
return fromKeysIntList.equals(toKeysIntList) && fromValuesIntList.equals(toValuesIntList);
}
private boolean populateIntLists(IntList keysList, IntList valuesList, HollowMapEntryOrdinalIterator iter, OrdinalIdentityTranslator keyTranslator, OrdinalIdentityTranslator valueTranslator) {
keysList.clear();
valuesList.clear();
while(iter.next()) {
int keyIdentity = keyTranslator.getIdentityOrdinal(iter.getKey());
int valueIdentity = valueTranslator.getIdentityOrdinal(iter.getValue());
if(keyIdentity == -1 && iter.getKey() != -1)
return false;
if(valueIdentity == -1 && iter.getValue() != -1)
return false;
keysList.add(keyTranslator.getIdentityOrdinal(iter.getKey()));
valuesList.add(valueTranslator.getIdentityOrdinal(iter.getValue()));
}
keysList.sort();
valuesList.sort();
return true;
}
};
}
private HollowMapTypeReadState fromState() {
return (HollowMapTypeReadState) fromState;
}
private HollowMapTypeReadState toState() {
return (HollowMapTypeReadState) toState;
}
}
| 8,923 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/exact | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/exact/mapper/DiffEqualityCollectionMapper.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff.exact.mapper;
import com.netflix.hollow.core.memory.encoding.HashCodes;
import com.netflix.hollow.core.read.engine.HollowCollectionTypeReadState;
import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator;
import com.netflix.hollow.core.schema.HollowCollectionSchema;
import com.netflix.hollow.core.util.IntList;
import com.netflix.hollow.tools.diff.exact.DiffEqualOrdinalMap;
import com.netflix.hollow.tools.diff.exact.DiffEqualOrdinalMap.OrdinalIdentityTranslator;
import com.netflix.hollow.tools.diff.exact.DiffEqualityMapping;
/**
* Not intended for external consumption.
*/
public class DiffEqualityCollectionMapper extends DiffEqualityTypeMapper {
private final boolean requiresTraversalForMissingFields;
private final DiffEqualOrdinalMap referencedTypeEqualOrdinalMap;
private final boolean orderingIsImportant;
public DiffEqualityCollectionMapper(DiffEqualityMapping mapping, HollowCollectionTypeReadState fromState, HollowCollectionTypeReadState toState, boolean oneToOne) {
this(mapping, fromState, toState, oneToOne, false);
}
public DiffEqualityCollectionMapper(DiffEqualityMapping mapping, HollowCollectionTypeReadState fromState, HollowCollectionTypeReadState toState, boolean oneToOne, boolean orderingIsImportant) {
super(fromState, toState, oneToOne);
HollowCollectionSchema schema = fromState.getSchema();
this.referencedTypeEqualOrdinalMap = mapping.getEqualOrdinalMap(schema.getElementType());
this.requiresTraversalForMissingFields = mapping.requiresMissingFieldTraversal(schema.getElementType());
this.orderingIsImportant = orderingIsImportant;
}
public boolean requiresTraversalForMissingFields() {
return requiresTraversalForMissingFields;
}
protected EqualityDeterminer getEqualityDeterminer() {
return new EqualityDeterminer() {
private final IntList fromIntList = new IntList();
private final IntList toIntList = new IntList();
public boolean recordsAreEqual(int fromOrdinal, int toOrdinal) {
if(!populateIntList(fromIntList, fromState().ordinalIterator(fromOrdinal), referencedTypeEqualOrdinalMap.getFromOrdinalIdentityTranslator()))
return false;
if(!populateIntList(toIntList, toState().ordinalIterator(toOrdinal), referencedTypeEqualOrdinalMap.getToOrdinalIdentityTranslator()))
return false;
return fromIntList.equals(toIntList);
}
private boolean populateIntList(IntList list, HollowOrdinalIterator iter, OrdinalIdentityTranslator identityTranslator) {
list.clear();
int nextOrdinal = iter.next();
while(nextOrdinal != HollowOrdinalIterator.NO_MORE_ORDINALS) {
int identityOrdinal = identityTranslator.getIdentityOrdinal(nextOrdinal);
if(identityOrdinal == -1 && nextOrdinal != -1)
return false;
list.add(identityOrdinal);
nextOrdinal = iter.next();
}
if(!orderingIsImportant)
list.sort();
return true;
}
};
}
protected int fromRecordHashCode(int ordinal) {
return recordHashCode(fromState(), ordinal, referencedTypeEqualOrdinalMap.getFromOrdinalIdentityTranslator());
}
protected int toRecordHashCode(int ordinal) {
return recordHashCode(toState(), ordinal, referencedTypeEqualOrdinalMap.getToOrdinalIdentityTranslator());
}
protected int recordHashCode(HollowCollectionTypeReadState typeState, int ordinal, OrdinalIdentityTranslator identityTranslator) {
HollowOrdinalIterator iter = typeState.ordinalIterator(ordinal);
int elementOrdinal = iter.next();
int hashCode = 0;
while(elementOrdinal != HollowOrdinalIterator.NO_MORE_ORDINALS) {
int identityElementOrdinal = identityTranslator.getIdentityOrdinal(elementOrdinal);
if(identityElementOrdinal == -1 && elementOrdinal != -1)
return -1;
hashCode ^= HashCodes.hashInt(identityElementOrdinal);
if(hashCode == 0)
hashCode ^= HashCodes.hashInt(identityElementOrdinal);
elementOrdinal = iter.next();
}
return hashCode;
}
private HollowCollectionTypeReadState fromState() {
return (HollowCollectionTypeReadState)fromState;
}
private HollowCollectionTypeReadState toState() {
return (HollowCollectionTypeReadState)toState;
}
}
| 8,924 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/exact | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/exact/mapper/DiffEqualityOrderedListMapper.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff.exact.mapper;
import com.netflix.hollow.core.memory.encoding.HashCodes;
import com.netflix.hollow.core.read.engine.HollowCollectionTypeReadState;
import com.netflix.hollow.core.read.engine.list.HollowListTypeReadState;
import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator;
import com.netflix.hollow.tools.diff.exact.DiffEqualOrdinalMap.OrdinalIdentityTranslator;
import com.netflix.hollow.tools.diff.exact.DiffEqualityMapping;
/**
* Not intended for external consumption.
*/
public class DiffEqualityOrderedListMapper extends DiffEqualityCollectionMapper {
public DiffEqualityOrderedListMapper(DiffEqualityMapping mapping, HollowListTypeReadState fromState, HollowListTypeReadState toState, boolean oneToOne) {
super(mapping, fromState, toState, oneToOne, true);
}
@Override
protected int recordHashCode(HollowCollectionTypeReadState typeState, int ordinal, OrdinalIdentityTranslator identityTranslator) {
HollowOrdinalIterator iter = typeState.ordinalIterator(ordinal);
int elementOrdinal = iter.next();
int hashCode = 0;
while(elementOrdinal != HollowOrdinalIterator.NO_MORE_ORDINALS) {
int identityElementOrdinal = identityTranslator.getIdentityOrdinal(elementOrdinal);
if(identityElementOrdinal == -1 && elementOrdinal != -1)
return -1;
hashCode = 7919 * hashCode + identityElementOrdinal;
elementOrdinal = iter.next();
}
return HashCodes.hashInt(hashCode);
}
}
| 8,925 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/exact | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/diff/exact/mapper/DiffEqualityObjectMapper.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.diff.exact.mapper;
import com.netflix.hollow.core.memory.encoding.HashCodes;
import com.netflix.hollow.core.read.HollowReadFieldUtils;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.tools.diff.exact.DiffEqualOrdinalMap;
import com.netflix.hollow.tools.diff.exact.DiffEqualityMapping;
/**
* Not intended for external consumption.
*/
public class DiffEqualityObjectMapper extends DiffEqualityTypeMapper{
private final HollowObjectSchema commonSchema;
private final int[] toSchemaCommonFieldMapping;
private final int[] fromSchemaCommonFieldMapping;
private final boolean requiresTraversalForMissingFields;
private final DiffEqualOrdinalMap[] commonReferenceFieldEqualOrdinalMaps;
public DiffEqualityObjectMapper(DiffEqualityMapping mapping, HollowObjectTypeReadState fromState, HollowObjectTypeReadState toState, boolean oneToOne) {
super(fromState, toState, oneToOne);
this.commonSchema = fromState.getSchema().findCommonSchema(toState.getSchema());
this.commonReferenceFieldEqualOrdinalMaps = new DiffEqualOrdinalMap[commonSchema.numFields()];
for(int i=0;i<commonReferenceFieldEqualOrdinalMaps.length;i++) {
if(commonSchema.getFieldType(i) == FieldType.REFERENCE)
this.commonReferenceFieldEqualOrdinalMaps[i] = mapping.getEqualOrdinalMap(commonSchema.getReferencedType(i));
}
this.fromSchemaCommonFieldMapping = buildCommonSchemaFieldMapping(fromState);
this.toSchemaCommonFieldMapping = buildCommonSchemaFieldMapping(toState);
boolean requiresTraversalForMissingFields =
fromState().getSchema().numFields() != commonSchema.numFields()
|| toState().getSchema().numFields() != commonSchema.numFields();
for(int i=0;i<commonSchema.numFields();i++) {
if(commonSchema.getFieldType(i) == FieldType.REFERENCE
&& mapping.requiresMissingFieldTraversal(commonSchema.getReferencedType(i))) {
requiresTraversalForMissingFields = true;
break;
}
}
this.requiresTraversalForMissingFields = requiresTraversalForMissingFields;
}
private int[] buildCommonSchemaFieldMapping(HollowObjectTypeReadState state) {
int[] commonFieldMapping = new int[commonSchema.numFields()];
for(int i=0;i<commonFieldMapping.length;i++) {
String fieldName = commonSchema.getFieldName(i);
commonFieldMapping[i] = state.getSchema().getPosition(fieldName);
}
return commonFieldMapping;
}
public boolean requiresTraversalForMissingFields() {
return requiresTraversalForMissingFields;
}
protected int fromRecordHashCode(int ordinal) {
return recordHashCode(fromState(), ordinal, fromSchemaCommonFieldMapping, true);
}
protected int toRecordHashCode(int ordinal) {
return recordHashCode(toState(), ordinal, toSchemaCommonFieldMapping, false);
}
private int recordHashCode(HollowObjectTypeReadState typeState, int ordinal, int[] commonSchemaFieldMapping, boolean fromState) {
int hashCode = 0;
for(int i=0;i<commonSchemaFieldMapping.length;i++) {
int typeStateFieldIndex = commonSchemaFieldMapping[i];
if(commonSchema.getFieldType(i) == FieldType.REFERENCE) {
int referencedOrdinal = typeState.readOrdinal(ordinal, typeStateFieldIndex);
int ordinalIdentity = fromState ?
commonReferenceFieldEqualOrdinalMaps[i].getIdentityFromOrdinal(referencedOrdinal)
: commonReferenceFieldEqualOrdinalMaps[i].getIdentityToOrdinal(referencedOrdinal);
if(ordinalIdentity == -1 && referencedOrdinal != -1)
return -1;
hashCode = hashCode * 31 ^ HashCodes.hashInt(ordinalIdentity);
} else {
hashCode = hashCode * 31 ^ HashCodes.hashInt(HollowReadFieldUtils.fieldHashCode(typeState, ordinal, typeStateFieldIndex));
}
}
return hashCode;
}
public EqualityDeterminer getEqualityDeterminer() {
return new EqualityDeterminer() {
public boolean recordsAreEqual(int fromOrdinal, int toOrdinal) {
for(int i=0;i<fromSchemaCommonFieldMapping.length;i++) {
if(commonSchema.getFieldType(i) == FieldType.REFERENCE) {
int fromReferenceOrdinal = fromState().readOrdinal(fromOrdinal, fromSchemaCommonFieldMapping[i]);
int toReferenceOrdinal = toState().readOrdinal(toOrdinal, toSchemaCommonFieldMapping[i]);
int fromIdentityOrdinal = commonReferenceFieldEqualOrdinalMaps[i].getIdentityFromOrdinal(fromReferenceOrdinal);
int toIdentityOrdinal = commonReferenceFieldEqualOrdinalMaps[i].getIdentityToOrdinal(toReferenceOrdinal);
if((fromIdentityOrdinal == -1 && fromReferenceOrdinal != -1)
|| (toIdentityOrdinal == -1 && toReferenceOrdinal != -1)
|| (fromIdentityOrdinal != toIdentityOrdinal))
return false;
} else if(!HollowReadFieldUtils.fieldsAreEqual(fromState(), fromOrdinal, fromSchemaCommonFieldMapping[i], toState(), toOrdinal, toSchemaCommonFieldMapping[i])) {
return false;
}
}
return true;
}
};
}
private HollowObjectTypeReadState fromState() {
return (HollowObjectTypeReadState) fromState;
}
private HollowObjectTypeReadState toState() {
return (HollowObjectTypeReadState) toState;
}
}
| 8,926 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/stringifier/HollowRecordJsonStringifier.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.stringifier;
import static com.netflix.hollow.core.HollowConstants.ORDINAL_NONE;
import com.netflix.hollow.api.objects.HollowRecord;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowListTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowMapTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowSetTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess;
import com.netflix.hollow.core.read.iterator.HollowMapEntryOrdinalIterator;
import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator;
import com.netflix.hollow.core.schema.HollowListSchema;
import com.netflix.hollow.core.schema.HollowMapSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.core.schema.HollowSetSchema;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
/**
* Produces JSON String representations of Hollow records.
*/
public class HollowRecordJsonStringifier implements HollowStringifier<HollowRecordJsonStringifier> {
private final Set<String> collapseObjectTypes;
private final Set<String> excludeObjectTypes = new HashSet<String>();
private final boolean collapseAllSingleFieldObjects;
private final boolean prettyPrint;
public HollowRecordJsonStringifier() {
this(true, true);
}
public HollowRecordJsonStringifier(boolean prettyPrint, boolean collapseAllSingleFieldObjects) {
this.prettyPrint = prettyPrint;
this.collapseAllSingleFieldObjects = collapseAllSingleFieldObjects;
this.collapseObjectTypes = Collections.emptySet();
}
public HollowRecordJsonStringifier(boolean indent, String... collapseObjectTypes) {
this.prettyPrint = indent;
this.collapseAllSingleFieldObjects = false;
this.collapseObjectTypes = new HashSet<String>();
for (String collapseObjectType : collapseObjectTypes) {
this.collapseObjectTypes.add(collapseObjectType);
}
}
@Override
public HollowRecordJsonStringifier addExcludeObjectTypes(String... types) {
for (String type : types) {
this.excludeObjectTypes.add(type);
}
return this;
}
@Override
public String stringify(HollowRecord record) {
return stringify(record.getTypeDataAccess().getDataAccess(), record.getSchema().getName(), record.getOrdinal());
}
@Override
public void stringify(Writer writer, HollowRecord record) throws IOException {
stringify(writer, record.getTypeDataAccess().getDataAccess(), record.getSchema().getName(), record.getOrdinal());
}
@Override
public void stringify(Writer writer, Iterable<HollowRecord> records) throws IOException {
writer.write("[");
Iterator<HollowRecord> iterator = records.iterator();
while (iterator.hasNext()) {
stringify(writer, iterator.next());
if (iterator.hasNext()) {
writer.write(",");
}
}
writer.write("]");
}
@Override
public String stringify(HollowDataAccess dataAccess, String type, int ordinal) {
try {
StringWriter writer = new StringWriter();
appendStringify(writer, dataAccess, type, ordinal, 0);
return writer.toString();
} catch (IOException e) {
throw new RuntimeException("Error using StringWriter", e);
}
}
@Override
public void stringify(Writer writer, HollowDataAccess dataAccess, String type, int ordinal) throws IOException {
appendStringify(writer, dataAccess, type, ordinal, 0);
}
private void appendStringify(Writer writer, HollowDataAccess dataAccess, String type, int ordinal, int indentation) throws IOException {
if (excludeObjectTypes.contains(type)) {
writer.append("null");
return;
}
HollowTypeDataAccess typeDataAccess = dataAccess.getTypeDataAccess(type);
if (typeDataAccess == null) {
writer.append("{ }");
} else if (ordinal == ORDINAL_NONE) {
writer.append("null");
} else {
if (typeDataAccess instanceof HollowObjectTypeDataAccess) {
appendObjectStringify(writer, dataAccess, (HollowObjectTypeDataAccess)typeDataAccess, ordinal, indentation);
} else if (typeDataAccess instanceof HollowListTypeDataAccess) {
appendListStringify(writer, dataAccess, (HollowListTypeDataAccess)typeDataAccess, ordinal, indentation);
} else if (typeDataAccess instanceof HollowSetTypeDataAccess) {
appendSetStringify(writer, dataAccess, (HollowSetTypeDataAccess)typeDataAccess, ordinal, indentation);
} else if (typeDataAccess instanceof HollowMapTypeDataAccess) {
appendMapStringify(writer, dataAccess, (HollowMapTypeDataAccess)typeDataAccess, ordinal, indentation);
}
}
}
private void appendMapStringify(Writer writer, HollowDataAccess dataAccess, HollowMapTypeDataAccess typeDataAccess, int ordinal, int indentation) throws IOException {
HollowMapSchema schema = typeDataAccess.getSchema();
indentation++;
int size = typeDataAccess.size(ordinal);
if (size == 0) {
writer.append("{ }");
} else {
String keyType = schema.getKeyType();
String valueType = schema.getValueType();
HollowObjectTypeDataAccess keyTypeDataAccess = (HollowObjectTypeDataAccess) dataAccess.getTypeDataAccess(keyType);
HollowObjectSchema keySchema = keyTypeDataAccess.getSchema();
HollowMapEntryOrdinalIterator ordinalIterator = typeDataAccess.ordinalIterator(ordinal);
if (keySchema.numFields() == 1) {
writer.append("{");
if (prettyPrint) {
writer.append(NEWLINE);
}
boolean firstEntry = true;
while(ordinalIterator.next()) {
if (firstEntry) {
firstEntry = false;
} else {
writer.append(",");
if (prettyPrint) {
writer.append(NEWLINE);
}
}
if (prettyPrint) {
appendIndentation(writer, indentation);
}
boolean needToQuoteKey = keySchema.getFieldType(0) != FieldType.STRING;
if (needToQuoteKey)
writer.append("\"");
int keyOrdinal = ordinalIterator.getKey();
appendFieldStringify(writer, dataAccess, indentation, keySchema, keyTypeDataAccess, keyOrdinal, 0);
if (needToQuoteKey)
writer.append("\"");
writer.append(": ");
appendStringify(writer, dataAccess, valueType, ordinalIterator.getValue(), indentation);
}
if (prettyPrint && !firstEntry) {
writer.append(NEWLINE);
appendIndentation(writer, indentation - 1);
}
writer.append("}");
} else {
writer.append("[");
if (prettyPrint)
writer.append(NEWLINE);
boolean firstEntry = true;
while(ordinalIterator.next()) {
if (firstEntry) {
firstEntry = false;
} else {
writer.append(",");
if (prettyPrint)
writer.append(NEWLINE);
}
if (prettyPrint) {
appendIndentation(writer, indentation - 1);
}
writer.append("{");
if (prettyPrint) {
writer.append(NEWLINE);
appendIndentation(writer, indentation);
}
writer.append("\"key\":");
appendStringify(writer, dataAccess, keyType, ordinalIterator.getKey(), indentation + 1);
writer.append(",");
if (prettyPrint) {
writer.append(NEWLINE);
appendIndentation(writer, indentation);
}
writer.append("\"value\":");
appendStringify(writer, dataAccess, valueType, ordinalIterator.getValue(), indentation + 1);
if (prettyPrint) {
writer.append(NEWLINE);
appendIndentation(writer, indentation - 1);
}
writer.append("}");
}
writer.append("]");
}
}
}
private void appendSetStringify(Writer writer, HollowDataAccess dataAccess, HollowSetTypeDataAccess typeDataAccess, int ordinal, int indentation) throws IOException {
HollowSetSchema schema = typeDataAccess.getSchema();
indentation++;
String elementType = schema.getElementType();
HollowOrdinalIterator iter = typeDataAccess.ordinalIterator(ordinal);
int elementOrdinal = iter.next();
if (elementOrdinal == HollowOrdinalIterator.NO_MORE_ORDINALS) {
writer.append("[]");
} else {
boolean firstElement = true;
writer.append("[");
if (prettyPrint)
writer.append(NEWLINE);
while(elementOrdinal != HollowOrdinalIterator.NO_MORE_ORDINALS) {
if (firstElement)
firstElement = false;
else
writer.append(",");
if (prettyPrint)
appendIndentation(writer, indentation);
appendStringify(writer, dataAccess, elementType, elementOrdinal, indentation);
elementOrdinal = iter.next();
}
if (prettyPrint) {
writer.append(NEWLINE);
appendIndentation(writer, indentation - 1);
}
writer.append("]");
}
}
private void appendListStringify(Writer writer, HollowDataAccess dataAccess, HollowListTypeDataAccess typeDataAccess, int ordinal, int indentation) throws IOException {
HollowListSchema schema = typeDataAccess.getSchema();
indentation++;
int size = typeDataAccess.size(ordinal);
if (size == 0) {
writer.append("[]");
} else {
writer.append("[");
if (prettyPrint) {
writer.append(NEWLINE);
}
String elementType = schema.getElementType();
for(int i=0;i<size;i++) {
int elementOrdinal = typeDataAccess.getElementOrdinal(ordinal, i);
if (prettyPrint) {
appendIndentation(writer, indentation);
}
appendStringify(writer, dataAccess, elementType, elementOrdinal, indentation);
if (i < size - 1) {
writer.append(",");
if (prettyPrint) {
writer.append(NEWLINE);
}
}
}
if (prettyPrint) {
writer.append(NEWLINE);
appendIndentation(writer, indentation - 1);
}
writer.append("]");
}
}
private void appendObjectStringify(Writer writer, HollowDataAccess dataAccess, HollowObjectTypeDataAccess typeDataAccess, int ordinal, int indentation) throws IOException {
HollowObjectSchema schema = typeDataAccess.getSchema();
if (schema.numFields() == 1 && (collapseAllSingleFieldObjects || collapseObjectTypes.contains(schema.getName()))) {
appendFieldStringify(writer, dataAccess, indentation, schema, typeDataAccess, ordinal, 0);
} else {
writer.append("{");
boolean firstField = true;
indentation++;
for(int i=0;i<schema.numFields();i++) {
String fieldName = schema.getFieldName(i);
if (!typeDataAccess.isNull(ordinal, i)) {
if (firstField)
firstField = false;
else
writer.append(",");
if (prettyPrint) {
writer.append(NEWLINE);
appendIndentation(writer, indentation);
}
writer.append("\"").append(fieldName).append("\": ");
appendFieldStringify(writer, dataAccess, indentation, schema, typeDataAccess, ordinal, i);
}
}
if (prettyPrint && !firstField) {
writer.append(NEWLINE);
appendIndentation(writer, indentation - 1);
}
writer.append("}");
}
}
private void appendFieldStringify(Writer writer, HollowDataAccess dataAccess, int indentation, HollowObjectSchema schema, HollowObjectTypeDataAccess typeDataAccess, int ordinal, int fieldIdx) throws IOException {
switch(schema.getFieldType(fieldIdx)) {
case BOOLEAN:
writer.append(typeDataAccess.readBoolean(ordinal, fieldIdx).booleanValue() ? "true" : "false");
return;
case BYTES:
writer.append(Arrays.toString(typeDataAccess.readBytes(ordinal, fieldIdx)));
return;
case DOUBLE:
writer.append(String.valueOf(typeDataAccess.readDouble(ordinal, fieldIdx)));
return;
case FLOAT:
writer.append(String.valueOf(typeDataAccess.readFloat(ordinal, fieldIdx)));
return;
case INT:
writer.append(String.valueOf(typeDataAccess.readInt(ordinal, fieldIdx)));
return;
case LONG:
writer.append(String.valueOf(typeDataAccess.readLong(ordinal, fieldIdx)));
return;
case STRING:
writer.append("\"").append(escapeString(typeDataAccess.readString(ordinal, fieldIdx))).append("\"");
return;
case REFERENCE:
int refOrdinal = typeDataAccess.readOrdinal(ordinal, fieldIdx);
appendStringify(writer, dataAccess, schema.getReferencedType(fieldIdx), refOrdinal, indentation);
return;
}
}
private String escapeString(String str) {
if (str.indexOf('\\') == -1 && str.indexOf('\"') == -1)
return str;
return str.replace("\\", "\\\\").replace("\"", "\\\"");
}
private void appendIndentation(Writer writer, int indentation) throws IOException {
switch(indentation) {
case 0:
return;
case 1:
writer.append(INDENT);
return;
case 2:
writer.append(INDENT + INDENT);
return;
case 3:
writer.append(INDENT + INDENT + INDENT);
return;
case 4:
writer.append(INDENT + INDENT + INDENT + INDENT);
return;
case 5:
writer.append(INDENT + INDENT + INDENT + INDENT + INDENT);
return;
case 6:
writer.append(INDENT + INDENT + INDENT + INDENT + INDENT + INDENT);
return;
case 7:
writer.append(INDENT + INDENT + INDENT + INDENT + INDENT + INDENT + INDENT);
return;
case 8:
writer.append(INDENT + INDENT + INDENT + INDENT + INDENT + INDENT + INDENT + INDENT);
return;
case 9:
writer.append(INDENT + INDENT + INDENT + INDENT + INDENT + INDENT + INDENT + INDENT + INDENT);
return;
default:
for(int i=0;i<indentation;i++) {
writer.append(INDENT);
}
}
}
}
| 8,927 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/stringifier/HollowRecordStringifier.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.stringifier;
import static com.netflix.hollow.core.read.iterator.HollowOrdinalIterator.NO_MORE_ORDINALS;
import com.netflix.hollow.api.objects.HollowRecord;
import com.netflix.hollow.api.objects.generic.GenericHollowObject;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowListTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowMapTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowSetTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess;
import com.netflix.hollow.core.read.iterator.HollowMapEntryOrdinalIterator;
import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator;
import com.netflix.hollow.core.schema.HollowListSchema;
import com.netflix.hollow.core.schema.HollowMapSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowSetSchema;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
/**
* Produces human-readable String representations of Hollow records.
*/
public class HollowRecordStringifier implements HollowStringifier<HollowRecordStringifier> {
private final Set<String> excludeObjectTypes = new HashSet<String>();
private final boolean showOrdinals;
private final boolean showTypes;
private final boolean collapseSingleFieldObjects;
public HollowRecordStringifier() {
this(false, false, true);
}
public HollowRecordStringifier(boolean showOrdinals, boolean showTypes, boolean collapseSingleFieldObjects) {
this.showOrdinals = showOrdinals;
this.showTypes = showTypes;
this.collapseSingleFieldObjects = collapseSingleFieldObjects;
}
@Override
public HollowRecordStringifier addExcludeObjectTypes(String... types) {
for (String type : types) {
this.excludeObjectTypes.add(type);
}
return this;
}
@Override
public String stringify(HollowRecord record) {
return stringify(record.getTypeDataAccess().getDataAccess(),
record.getSchema().getName(), record.getOrdinal());
}
@Override
public void stringify(Writer writer, HollowRecord record) throws IOException {
stringify(writer, record.getTypeDataAccess().getDataAccess(), record.getSchema().getName(),
record.getOrdinal());
}
@Override
public void stringify(Writer writer, Iterable<HollowRecord> records) throws IOException {
writer.write("[");
Iterator<HollowRecord> iterator = records.iterator();
while (iterator.hasNext()) {
stringify(writer, iterator.next());
if (iterator.hasNext()) {
writer.write(",");
}
}
writer.write("\n]");
}
@Override
public String stringify(HollowDataAccess dataAccess, String type, int ordinal) {
try {
StringWriter writer = new StringWriter();
stringify(writer, dataAccess, type, ordinal);
return writer.toString();
} catch (IOException e) {
throw new RuntimeException("Unexpected exception using StringWriter", e);
}
}
@Override
public void stringify(Writer writer, HollowDataAccess dataAccess, String type, int ordinal) throws IOException {
appendStringify(writer, dataAccess, type, ordinal, 0);
}
private void appendStringify(Writer writer, HollowDataAccess dataAccess, String type, int ordinal,
int indentation) throws IOException {
if (excludeObjectTypes.contains(type)) {
writer.append("null");
return;
}
HollowTypeDataAccess typeDataAccess = dataAccess.getTypeDataAccess(type);
if(typeDataAccess == null) {
writer.append("[missing type " + type + "]");
} else if (ordinal == -1) {
writer.append("null");
} else {
if(typeDataAccess instanceof HollowObjectTypeDataAccess) {
appendObjectStringify(writer, dataAccess, (HollowObjectTypeDataAccess)typeDataAccess, ordinal, indentation);
} else if(typeDataAccess instanceof HollowListTypeDataAccess) {
appendListStringify(writer, dataAccess, (HollowListTypeDataAccess)typeDataAccess, ordinal, indentation);
} else if(typeDataAccess instanceof HollowSetTypeDataAccess) {
appendSetStringify(writer, dataAccess, (HollowSetTypeDataAccess)typeDataAccess, ordinal, indentation);
} else if(typeDataAccess instanceof HollowMapTypeDataAccess) {
appendMapStringify(writer, dataAccess, (HollowMapTypeDataAccess)typeDataAccess, ordinal, indentation);
}
}
}
private void appendMapStringify(Writer writer, HollowDataAccess dataAccess,
HollowMapTypeDataAccess typeDataAccess, int ordinal, int indentation) throws IOException {
HollowMapSchema schema = typeDataAccess.getSchema();
if(showTypes)
writer.append("(").append(schema.getName()).append(")");
if(showOrdinals)
writer.append(" (ordinal ").append(Integer.toString(ordinal)).append(")");
indentation++;
String keyType = schema.getKeyType();
String valueType = schema.getValueType();
HollowMapEntryOrdinalIterator iter = typeDataAccess.ordinalIterator(ordinal);
while(iter.next()) {
writer.append(NEWLINE);
appendIndentation(writer, indentation);
writer.append("k: ");
appendStringify(writer, dataAccess, keyType, iter.getKey(), indentation);
writer.append(NEWLINE);
appendIndentation(writer, indentation);
writer.append("v: ");
appendStringify(writer, dataAccess, valueType, iter.getValue(), indentation);
}
}
private void appendSetStringify(Writer writer, HollowDataAccess dataAccess,
HollowSetTypeDataAccess typeDataAccess, int ordinal, int indentation) throws IOException {
HollowSetSchema schema = typeDataAccess.getSchema();
if(showTypes)
writer.append("(").append(schema.getName()).append(")");
if(showOrdinals)
writer.append(" (ordinal ").append(Integer.toString(ordinal)).append(")");
indentation++;
String elementType = schema.getElementType();
HollowOrdinalIterator iter = typeDataAccess.ordinalIterator(ordinal);
int elementOrdinal = iter.next();
while(elementOrdinal != NO_MORE_ORDINALS) {
writer.append(NEWLINE);
appendIndentation(writer, indentation);
writer.append("e: ");
appendStringify(writer, dataAccess, elementType, elementOrdinal, indentation);
elementOrdinal = iter.next();
}
}
private void appendListStringify(Writer writer, HollowDataAccess dataAccess,
HollowListTypeDataAccess typeDataAccess, int ordinal, int indentation) throws IOException {
HollowListSchema schema = typeDataAccess.getSchema();
if(showTypes)
writer.append("(").append(schema.getName()).append(")");
if(showOrdinals)
writer.append(" (ordinal ").append(Integer.toString(ordinal)).append(")");
indentation++;
int size = typeDataAccess.size(ordinal);
String elementType = schema.getElementType();
for(int i=0;i<size;i++) {
writer.append(NEWLINE);
int elementOrdinal = typeDataAccess.getElementOrdinal(ordinal, i);
appendIndentation(writer, indentation);
writer.append("e" + i + ": ");
appendStringify(writer, dataAccess, elementType, elementOrdinal, indentation);
}
}
private void appendObjectStringify(Writer writer, HollowDataAccess dataAccess,
HollowObjectTypeDataAccess typeDataAccess, int ordinal, int indentation) throws IOException {
HollowObjectSchema schema = typeDataAccess.getSchema();
GenericHollowObject obj = new GenericHollowObject(typeDataAccess, ordinal);
if(collapseSingleFieldObjects && typeDataAccess.getSchema().numFields() == 1) {
appendFieldStringify(writer, dataAccess, indentation, schema, obj, 0, schema.getFieldName(0));
} else {
if(showTypes)
writer.append("(").append(schema.getName()).append(")");
if(showOrdinals)
writer.append(" (ordinal ").append(Integer.toString(ordinal)).append(")");
indentation++;
for(int i=0;i<schema.numFields();i++) {
writer.append(NEWLINE);
String fieldName = schema.getFieldName(i);
appendIndentation(writer, indentation);
writer.append(fieldName).append(": ");
appendFieldStringify(writer, dataAccess, indentation, schema, obj, i, fieldName);
}
}
}
private void appendFieldStringify(Writer writer, HollowDataAccess dataAccess, int indentation,
HollowObjectSchema schema, GenericHollowObject obj, int i, String fieldName) throws IOException {
if(obj.isNull(fieldName)) {
writer.append("null");
} else {
switch(schema.getFieldType(i)) {
case BOOLEAN:
writer.append(Boolean.toString(obj.getBoolean(fieldName)));
break;
case BYTES:
writer.append(Arrays.toString(obj.getBytes(fieldName)));
break;
case DOUBLE:
writer.append(Double.toString(obj.getDouble(fieldName)));
break;
case FLOAT:
writer.append(Float.toString(obj.getFloat(fieldName)));
break;
case INT:
writer.append(Integer.toString(obj.getInt(fieldName)));
break;
case LONG:
writer.append(Long.toString(obj.getLong(fieldName)));
break;
case STRING:
writer.append(obj.getString(fieldName));
break;
case REFERENCE:
int refOrdinal = obj.getOrdinal(fieldName);
appendStringify(writer, dataAccess, schema.getReferencedType(i), refOrdinal, indentation);
break;
}
}
}
private void appendIndentation(Writer writer, int indentation) throws IOException {
for(int i=0;i<indentation;i++) {
writer.append(INDENT);
}
}
}
| 8,928 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/stringifier/HollowStringifier.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.stringifier;
import com.netflix.hollow.api.objects.HollowRecord;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import java.io.IOException;
import java.io.Writer;
@SuppressWarnings("rawtypes")
public interface HollowStringifier<T extends HollowStringifier> {
String NEWLINE = "\n";
String INDENT = " ";
/**
* Exclude specified object types (replace output with null).
*
* @param types the object type names
* @return this stringifier
*/
T addExcludeObjectTypes(String... types);
/**
* Create a String representation of the specified {@link HollowRecord}.
*
* @param record the record
* @return the string representation
*/
String stringify(HollowRecord record);
/**
* Writes a String representation of the specified {@link HollowRecord} to the provided Writer.
*
* @param writer the writer
* @param record the record
* @throws IOException thrown if there is an error writing to the Writer
*/
void stringify(Writer writer, HollowRecord record) throws IOException;
/**
* Writes a String representation of the specified collection of {@link HollowRecord} to the provided Writer.
*
* @param writer the writer
* @param records the records
* @throws IOException thrown if there is an error writing to the Writer
*/
default void stringify(Writer writer, Iterable<HollowRecord> records) throws IOException {
throw new UnsupportedOperationException("not implemented");
}
/**
* Create a String representation of the record in the provided dataset, of the given type, with the specified ordinal.
*
* @param dataAccess the data access
* @param type the type name
* @param ordinal the oridinal
* @return the string representation
*/
String stringify(HollowDataAccess dataAccess, String type, int ordinal);
/**
* Writes a String representation of the record in the provided dataset, of the given type, with the specified ordinal.
*
* @param writer the writer
* @param dataAccess the data access
* @param type the type name
* @param ordinal the oridinal
* @throws IOException thrown if there is an error writing to the Writer
*/
void stringify(Writer writer, HollowDataAccess dataAccess, String type, int ordinal)
throws IOException;
}
| 8,929 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/patch | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/patch/delta/HollowStateDeltaPatcher.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.patch.delta;
import com.netflix.hollow.core.memory.ThreadSafeBitSet;
import com.netflix.hollow.core.read.HollowReadFieldUtils;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.engine.PopulatedOrdinalListener;
import com.netflix.hollow.core.read.engine.list.HollowListTypeReadState;
import com.netflix.hollow.core.read.engine.map.HollowMapTypeReadState;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.read.engine.set.HollowSetTypeReadState;
import com.netflix.hollow.core.read.iterator.HollowMapEntryOrdinalIterator;
import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator;
import com.netflix.hollow.core.schema.HollowMapSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.schema.HollowSchema.SchemaType;
import com.netflix.hollow.core.schema.HollowSchemaSorter;
import com.netflix.hollow.core.schema.HollowSetSchema;
import com.netflix.hollow.core.util.HollowWriteStateCreator;
import com.netflix.hollow.core.util.IntList;
import com.netflix.hollow.core.util.IntMap;
import com.netflix.hollow.core.util.LongList;
import com.netflix.hollow.core.util.SimultaneousExecutor;
import com.netflix.hollow.core.write.HollowTypeWriteState;
import com.netflix.hollow.core.write.HollowWriteRecord;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import com.netflix.hollow.core.write.copy.HollowRecordCopier;
import com.netflix.hollow.tools.combine.IdentityOrdinalRemapper;
import com.netflix.hollow.tools.traverse.TransitiveSetTraverser;
import java.util.BitSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* The HollowStateDeltaPatcher will create delta blobs which patch between two arbitrary states in a contiguous delta state chain.
* <p>
* This tool can be used in the following scenarios:
* <ol>
* <li>If a delta is lost (either published incorrectly or accidentally deleted from a file store), and
* a replacement must be created.</li>
* <li>If a long chain of deltas must be followed by clients, and it is desirable to create a shortcut to skip over many states</li>
* </ol>
*
* <p>
* The HollowStateDeltaPatcher must create <b>two</b> deltas in order to map from one state to another. The intermediate state
* will move all of the records in ordinals which are shared by different records between two non-adjacent states out of the way.
* This way, we maintain the guarantee that ghost records will continue to be accessible in adjacent states.
*/
public class HollowStateDeltaPatcher {
private final HollowReadStateEngine from;
private final HollowReadStateEngine to;
private final HollowWriteStateEngine writeEngine;
private final List<HollowSchema> schemas;
private Map<String, BitSet> changedOrdinalsBetweenStates;
/**
* Create a delta patcher which will patch between the states contained in the two state engines.
*
* @param from The earlier state
* @param to The later state.
*/
public HollowStateDeltaPatcher(HollowReadStateEngine from, HollowReadStateEngine to) {
this.from = from;
this.to = to;
this.schemas = HollowSchemaSorter.dependencyOrderedSchemaList(getCommonSchemas(from, to));
this.writeEngine = HollowWriteStateCreator.createWithSchemas(schemas);
this.changedOrdinalsBetweenStates = discoverChangedOrdinalsBetweenStates();
}
/**
* Returns the HollowWriteStateEngine containing the state, use this to write the deltas and reverse deltas.
* @return the HollowWriteStateEngine containing the state
*/
public HollowWriteStateEngine getStateEngine() {
return writeEngine;
}
/**
* Call this method first. After this returns, you can write a delta/reversedelta from/to the earlier state to/from the intermediate state.
*/
public void prepareInitialTransition() {
writeEngine.overridePreviousStateRandomizedTag(from.getCurrentRandomizedTag());
writeEngine.overridePreviousHeaderTags(from.getHeaderTags());
copyUnchangedDataToIntermediateState();
remapTheChangedDataToUnusedOrdinals();
}
/**
* Call this method second. After this returns, you can write a delta/reversedelta from/to the intermediate state to/from the later state.
*/
public void prepareFinalTransition() {
writeEngine.prepareForNextCycle();
writeEngine.overrideNextStateRandomizedTag(to.getCurrentRandomizedTag());
writeEngine.addHeaderTags(to.getHeaderTags());
copyUnchangedDataToDestinationState();
remapTheChangedDataToDestinationOrdinals();
}
private void copyUnchangedDataToIntermediateState() {
SimultaneousExecutor executor = new SimultaneousExecutor(getClass(), "copy-unchanged");
for(final HollowSchema schema : schemas) {
executor.execute(new Runnable() {
public void run() {
HollowTypeReadState fromTypeState = from.getTypeState(schema.getName());
HollowTypeWriteState writeTypeState = writeEngine.getTypeState(schema.getName());
BitSet changedOrdinals = changedOrdinalsBetweenStates.get(schema.getName());
HollowRecordCopier copier = HollowRecordCopier.createCopier(fromTypeState, schema, IdentityOrdinalRemapper.INSTANCE, true);
BitSet fromOrdinals = fromTypeState.getListener(PopulatedOrdinalListener.class).getPopulatedOrdinals();
int ordinal = fromOrdinals.nextSetBit(0);
while(ordinal != -1) {
boolean markCurrentCycle = !changedOrdinals.get(ordinal);
HollowWriteRecord rec = copier.copy(ordinal);
writeTypeState.mapOrdinal(rec, ordinal, true, markCurrentCycle);
ordinal = fromOrdinals.nextSetBit(ordinal + 1);
}
writeTypeState.recalculateFreeOrdinals();
}
});
}
try {
executor.awaitSuccessfulCompletion();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private void remapTheChangedDataToUnusedOrdinals() {
PartialOrdinalRemapper remapper = new PartialOrdinalRemapper();
for(HollowSchema schema : schemas) {
BitSet ordinalsToRemap = changedOrdinalsBetweenStates.get(schema.getName());
HollowTypeReadState fromTypeState = from.getTypeState(schema.getName());
HollowTypeReadState toTypeState = to.getTypeState(schema.getName());
HollowTypeWriteState typeWriteState = writeEngine.getTypeState(schema.getName());
IntMap ordinalRemapping = new IntMap(ordinalsToRemap.cardinality());
int nextFreeOrdinal = Math.max(fromTypeState.maxOrdinal(), toTypeState.maxOrdinal()) + 1;
boolean preserveHashPositions = shouldPreserveHashPositions(schema);
HollowRecordCopier copier = HollowRecordCopier.createCopier(fromTypeState, schema, remapper, preserveHashPositions);
int ordinal = ordinalsToRemap.nextSetBit(0);
while(ordinal != -1 && ordinal <= fromTypeState.maxOrdinal()) {
HollowWriteRecord copy = copier.copy(ordinal);
typeWriteState.mapOrdinal(copy, nextFreeOrdinal, false, true);
ordinalRemapping.put(ordinal, nextFreeOrdinal++);
ordinal = ordinalsToRemap.nextSetBit(ordinal + 1);
}
remapper.addOrdinalRemapping(schema.getName(), ordinalRemapping);
typeWriteState.recalculateFreeOrdinals();
}
}
private void copyUnchangedDataToDestinationState() {
for(HollowSchema schema : schemas) {
HollowTypeWriteState writeTypeState = writeEngine.getTypeState(schema.getName());
HollowTypeReadState toTypeState = to.getTypeState(schema.getName());
HollowTypeReadState fromTypeState = from.getTypeState(schema.getName());
BitSet toOrdinals = toTypeState.getListener(PopulatedOrdinalListener.class).getPopulatedOrdinals();
BitSet fromOrdinals = fromTypeState.getListener(PopulatedOrdinalListener.class).getPopulatedOrdinals();
BitSet changedOrdinals = changedOrdinalsBetweenStates.get(schema.getName());
int ordinal = toOrdinals.nextSetBit(0);
while(ordinal != -1) {
if(!changedOrdinals.get(ordinal) && fromOrdinals.get(ordinal))
writeTypeState.addOrdinalFromPreviousCycle(ordinal);
ordinal = toOrdinals.nextSetBit(ordinal + 1);
}
}
}
private void remapTheChangedDataToDestinationOrdinals() {
for(HollowSchema schema : schemas) {
BitSet changedOrdinals = changedOrdinalsBetweenStates.get(schema.getName());
HollowTypeWriteState typeWriteState = writeEngine.getTypeState(schema.getName());
HollowTypeReadState toReadState = to.getTypeState(schema.getName());
HollowTypeReadState fromReadState = from.getTypeState(schema.getName());
BitSet toOrdinals = toReadState.getListener(PopulatedOrdinalListener.class).getPopulatedOrdinals();
BitSet fromOrdinals = fromReadState.getListener(PopulatedOrdinalListener.class).getPopulatedOrdinals();
HollowRecordCopier copier = HollowRecordCopier.createCopier(toReadState, schema, IdentityOrdinalRemapper.INSTANCE, true);
int ordinal = toOrdinals.nextSetBit(0);
while(ordinal != -1) {
if(!fromOrdinals.get(ordinal) || changedOrdinals.get(ordinal)) {
HollowWriteRecord copy = copier.copy(ordinal);
typeWriteState.mapOrdinal(copy, ordinal, false, true);
}
ordinal = toOrdinals.nextSetBit(ordinal + 1);
}
typeWriteState.recalculateFreeOrdinals();
}
}
private Map<String, BitSet> discoverChangedOrdinalsBetweenStates() {
SimultaneousExecutor executor = new SimultaneousExecutor(getClass(), "discover-changed");
Map<String, BitSet> excludeOrdinalsFromCopy = new HashMap<String, BitSet>();
for(HollowSchema schema : schemas) {
BitSet recordsToExclude = findOrdinalsPopulatedWithDifferentRecords(schema.getName(), executor);
excludeOrdinalsFromCopy.put(schema.getName(), recordsToExclude);
}
TransitiveSetTraverser.addReferencingOutsideClosure(from, excludeOrdinalsFromCopy);
return excludeOrdinalsFromCopy;
}
private BitSet findOrdinalsPopulatedWithDifferentRecords(String typeName, SimultaneousExecutor executor) {
final HollowTypeReadState fromTypeState = from.getTypeState(typeName);
final HollowTypeReadState toTypeState = to.getTypeState(typeName);
if(fromTypeState.getSchema().getSchemaType() != SchemaType.OBJECT)
ensureEqualSchemas(fromTypeState, toTypeState);
final BitSet fromOrdinals = fromTypeState.getListener(PopulatedOrdinalListener.class).getPopulatedOrdinals();
final BitSet toOrdinals = toTypeState.getListener(PopulatedOrdinalListener.class).getPopulatedOrdinals();
final int maxSharedOrdinal = Math.min(fromTypeState.maxOrdinal(), toTypeState.maxOrdinal());
final ThreadSafeBitSet populatedOrdinalsWithDifferentRecords = new ThreadSafeBitSet();
final int numThreads = executor.getCorePoolSize();
for(int i=0;i<numThreads;i++) {
final int threadNum = i;
executor.execute(new Runnable() {
public void run() {
EqualityCondition equalityCondition = null;
switch(fromTypeState.getSchema().getSchemaType()) {
case OBJECT:
equalityCondition = objectRecordEquality(fromTypeState, toTypeState);
break;
case LIST:
equalityCondition = listRecordEquality(fromTypeState, toTypeState);
break;
case SET:
equalityCondition = setRecordEquality(fromTypeState, toTypeState);
break;
case MAP:
equalityCondition = mapRecordEquality(fromTypeState, toTypeState);
break;
}
for(int i=threadNum;i<=maxSharedOrdinal;i+=numThreads) {
if(fromOrdinals.get(i) && toOrdinals.get(i)) {
if(!equalityCondition.recordsAreEqual(i)) {
populatedOrdinalsWithDifferentRecords.set(i);
}
}
}
}
});
}
try {
executor.awaitSuccessfulCompletionOfCurrentTasks();
} catch(Exception e) {
throw new RuntimeException(e);
}
return toBitSet(populatedOrdinalsWithDifferentRecords);
}
private BitSet toBitSet(ThreadSafeBitSet tsbs) {
BitSet bs = new BitSet(tsbs.currentCapacity());
int bit = tsbs.nextSetBit(0);
while(bit != -1) {
bs.set(bit);
bit = tsbs.nextSetBit(bit+1);
}
return bs;
}
private static interface EqualityCondition {
boolean recordsAreEqual(int ordinal);
}
private EqualityCondition objectRecordEquality(HollowTypeReadState fromState, HollowTypeReadState toState) {
final HollowObjectTypeReadState fromObjectState = (HollowObjectTypeReadState)fromState;
final HollowObjectTypeReadState toObjectState = (HollowObjectTypeReadState)toState;
final HollowObjectSchema commonSchema = fromObjectState.getSchema().findCommonSchema(toObjectState.getSchema());
return new EqualityCondition() {
public boolean recordsAreEqual(int ordinal) {
for(int i=0;i<commonSchema.numFields();i++) {
int fromFieldPos = fromObjectState.getSchema().getPosition(commonSchema.getFieldName(i));
int toFieldPos = toObjectState.getSchema().getPosition(commonSchema.getFieldName(i));
if(commonSchema.getFieldType(i) == FieldType.REFERENCE) {
if(fromObjectState.readOrdinal(ordinal, fromFieldPos) != toObjectState.readOrdinal(ordinal, toFieldPos))
return false;
} else if(!HollowReadFieldUtils.fieldsAreEqual(fromObjectState, ordinal, fromFieldPos, toObjectState, ordinal, toFieldPos)) {
return false;
}
}
return true;
}
};
}
private EqualityCondition listRecordEquality(HollowTypeReadState fromState, HollowTypeReadState toState) {
final HollowListTypeReadState fromListState = (HollowListTypeReadState)fromState;
final HollowListTypeReadState toListState = (HollowListTypeReadState)toState;
return new EqualityCondition() {
public boolean recordsAreEqual(int ordinal) {
int size = fromListState.size(ordinal);
if(toListState.size(ordinal) != size)
return false;
for(int i=0;i<size;i++) {
if(fromListState.getElementOrdinal(ordinal, i) != toListState.getElementOrdinal(ordinal, i))
return false;
}
return true;
}
};
}
private EqualityCondition setRecordEquality(HollowTypeReadState fromState, HollowTypeReadState toState) {
final HollowSetTypeReadState fromSetState = (HollowSetTypeReadState)fromState;
final HollowSetTypeReadState toSetState = (HollowSetTypeReadState)toState;
return new EqualityCondition() {
final IntList fromScratch = new IntList();
final IntList toScratch = new IntList();
public boolean recordsAreEqual(int ordinal) {
int size = fromSetState.size(ordinal);
if(toSetState.size(ordinal) != size)
return false;
fromScratch.clear();
toScratch.clear();
HollowOrdinalIterator iter = fromSetState.ordinalIterator(ordinal);
int next = iter.next();
while(next != HollowOrdinalIterator.NO_MORE_ORDINALS) {
fromScratch.add(next);
next = iter.next();
}
iter = toSetState.ordinalIterator(ordinal);
next = iter.next();
while(next != HollowOrdinalIterator.NO_MORE_ORDINALS) {
toScratch.add(next);
next = iter.next();
}
fromScratch.sort();
toScratch.sort();
return fromScratch.equals(toScratch);
}
};
}
private EqualityCondition mapRecordEquality(HollowTypeReadState fromState, HollowTypeReadState toState) {
final HollowMapTypeReadState fromMapState = (HollowMapTypeReadState) fromState;
final HollowMapTypeReadState toMapState = (HollowMapTypeReadState) toState;
return new EqualityCondition() {
final LongList fromScratch = new LongList();
final LongList toScratch = new LongList();
public boolean recordsAreEqual(int ordinal) {
int size = fromMapState.size(ordinal);
if(toMapState.size(ordinal) != size)
return false;
fromScratch.clear();
toScratch.clear();
HollowMapEntryOrdinalIterator iter = fromMapState.ordinalIterator(ordinal);
while(iter.next())
fromScratch.add(((long)iter.getKey() << 32) | iter.getValue());
iter = toMapState.ordinalIterator(ordinal);
while(iter.next())
toScratch.add(((long)iter.getKey() << 32) | iter.getValue());
fromScratch.sort();
toScratch.sort();
return fromScratch.equals(toScratch);
}
};
}
private void ensureEqualSchemas(HollowTypeReadState fromState, HollowTypeReadState toState) {
if(!fromState.getSchema().equals(toState.getSchema()))
throw new IllegalStateException("FROM and TO schemas were not the same: " + fromState.getSchema().getName());
}
private Set<HollowSchema> getCommonSchemas(HollowReadStateEngine from, HollowReadStateEngine to) {
Set<HollowSchema> schemas = new HashSet<HollowSchema>();
for(HollowSchema fromSchema : from.getSchemas()) {
HollowSchema toSchema = to.getTypeState(fromSchema.getName()).getSchema();
if(toSchema != null) {
if(fromSchema.getSchemaType() == SchemaType.OBJECT) {
HollowObjectSchema commonSchema = ((HollowObjectSchema)fromSchema).findCommonSchema((HollowObjectSchema)toSchema);
schemas.add(commonSchema);
} else {
schemas.add(toSchema);
}
}
}
return schemas;
}
private boolean shouldPreserveHashPositions(HollowSchema schema) {
switch(schema.getSchemaType()) {
case MAP:
return from.getTypesWithDefinedHashCodes().contains(((HollowMapSchema)schema).getKeyType());
case SET:
return from.getTypesWithDefinedHashCodes().contains(((HollowSetSchema)schema).getElementType());
default:
return false;
}
}
}
| 8,930 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/patch | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/patch/delta/PartialOrdinalRemapper.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.patch.delta;
import com.netflix.hollow.core.util.IntMap;
import com.netflix.hollow.tools.combine.OrdinalRemapper;
import com.netflix.hollow.tools.compact.HollowCompactor;
import java.util.HashMap;
import java.util.Map;
/**
* An {@link OrdinalRemapper} used by the {@link HollowCompactor} and {@link HollowStateDeltaPatcher}.
*
* Not intended for external consumption.
*/
public class PartialOrdinalRemapper implements OrdinalRemapper {
private final Map<String, IntMap> ordinalMappings;
public PartialOrdinalRemapper() {
this.ordinalMappings = new HashMap<String, IntMap>();
}
public void addOrdinalRemapping(String typeName, IntMap mapping) {
ordinalMappings.put(typeName, mapping);
}
public IntMap getOrdinalRemapping(String typeName) {
return ordinalMappings.get(typeName);
}
@Override
public int getMappedOrdinal(String type, int originalOrdinal) {
IntMap mapping = ordinalMappings.get(type);
if(mapping != null) {
int remappedOrdinal = mapping.get(originalOrdinal);
if(remappedOrdinal != -1)
return remappedOrdinal;
}
return originalOrdinal;
}
@Override
public boolean ordinalIsMapped(String type, int originalOrdinal) {
return true;
}
@Override
public void remapOrdinal(String type, int originalOrdinal, int mappedOrdinal) {
throw new UnsupportedOperationException("Cannot explicitly remap an ordinal in an IntMapOrdinalRemapper");
}
}
| 8,931 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/patch | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/patch/record/TypeMatchSpec.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.patch.record;
import java.util.ArrayList;
import java.util.List;
public class TypeMatchSpec {
private final String typeName;
private final String keyPaths[];
private final List<Object[]> keyMatchingValues;
public TypeMatchSpec(String typeName, String... keyPaths) {
this.typeName = typeName;
this.keyPaths = keyPaths;
this.keyMatchingValues = new ArrayList<Object[]>();
}
public void addMatchingValue(Object... matchValues) {
this.keyMatchingValues.add(matchValues);
}
public String getTypeName() {
return typeName;
}
public String[] getKeyPaths() {
return keyPaths;
}
public List<Object[]> getKeyMatchingValues() {
return keyMatchingValues;
}
}
| 8,932 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/patch | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/patch/record/HollowStateEngineRecordPatcher.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.patch.record;
import com.netflix.hollow.core.index.traversal.HollowIndexerValueTraverser;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.engine.PopulatedOrdinalListener;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import com.netflix.hollow.tools.combine.HollowCombiner;
import com.netflix.hollow.tools.combine.HollowCombinerCopyDirector;
import com.netflix.hollow.tools.traverse.TransitiveSetTraverser;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* This tool can be used to create a state with most records coming from a "base" state, but selected records
* coming from a "patch" state.
*/
public class HollowStateEngineRecordPatcher {
private final HollowReadStateEngine base;
private final HollowReadStateEngine patchFrom;
private final List<TypeMatchSpec> matchKeyPaths;
private String ignoredTypes[] = new String[0];
public HollowStateEngineRecordPatcher(HollowReadStateEngine base, HollowReadStateEngine patchFrom) {
this(base, patchFrom, true);
}
public HollowStateEngineRecordPatcher(HollowReadStateEngine base, HollowReadStateEngine patchFrom, boolean removeDetachedTransitiveReferences) {
this.base = base;
this.patchFrom = patchFrom;
this.matchKeyPaths = new ArrayList<TypeMatchSpec>();
}
public void addTypeMatchSpec(TypeMatchSpec matchSpec) {
this.matchKeyPaths.add(matchSpec);
}
public void setIgnoredTypes(String... ignoredTypes) {
this.ignoredTypes = ignoredTypes;
}
public HollowWriteStateEngine patch() {
Map<String, BitSet> baseMatches = findMatches(base);
TransitiveSetTraverser.addTransitiveMatches(base, baseMatches);
TransitiveSetTraverser.removeReferencedOutsideClosure(base, baseMatches);
Map<String, BitSet> patchFromMatches = findMatches(patchFrom);
HollowCombinerCopyDirector combineDirector = new HollowPatcherCombinerCopyDirector(base, baseMatches, patchFrom, patchFromMatches);
HollowCombiner combiner = new HollowCombiner(combineDirector, base, patchFrom);
combiner.addIgnoredTypes(ignoredTypes);
combiner.combine();
return combiner.getCombinedStateEngine();
}
private Map<String, BitSet> findMatches(HollowReadStateEngine stateEngine) {
Map<String, BitSet> matches = new HashMap<String, BitSet>();
for(TypeMatchSpec spec : matchKeyPaths) {
HollowTypeReadState typeState = stateEngine.getTypeState(spec.getTypeName());
BitSet foundMatches = getOrCreateBitSet(matches, spec.getTypeName(), typeState.maxOrdinal());
if(typeState != null) {
BitSet ordinals = getPopulatedOrdinals(typeState);
HollowIndexerValueTraverser traverser = new HollowIndexerValueTraverser(stateEngine, spec.getTypeName(), spec.getKeyPaths());
int ordinal = ordinals.nextSetBit(0);
while(ordinal != -1) {
traverser.traverse(ordinal);
for(int i=0;i<traverser.getNumMatches();i++) {
boolean foundMatch = false;
for(int j=0;j<spec.getKeyMatchingValues().size();j++) {
boolean matched = true;
for(int k=0;k<traverser.getNumFieldPaths();k++) {
if(!traverser.isMatchedValueEqual(i, k, spec.getKeyMatchingValues().get(j)[k])) {
matched = false;
break;
}
}
if(matched) {
foundMatch = true;
break;
}
}
if(foundMatch) {
foundMatches.set(ordinal);
break;
}
}
ordinal = ordinals.nextSetBit(ordinal + 1);
}
if(foundMatches.size() > 0)
matches.put(spec.getTypeName(), foundMatches);
}
}
return matches;
}
private BitSet getOrCreateBitSet(Map<String, BitSet> bitSets, String typeName, int numBitsRequired) {
if(numBitsRequired < 0)
return new BitSet(0);
BitSet bs = bitSets.get(typeName);
if(bs == null) {
bs = new BitSet(numBitsRequired);
bitSets.put(typeName, bs);
}
return bs;
}
private BitSet getPopulatedOrdinals(HollowTypeReadState typeState) {
return typeState.getListener(PopulatedOrdinalListener.class).getPopulatedOrdinals();
}
} | 8,933 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/patch | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/patch/record/HollowPatcherCombinerCopyDirector.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.patch.record;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.tools.combine.HollowCombinerCopyDirector;
import java.util.BitSet;
import java.util.Map;
public class HollowPatcherCombinerCopyDirector implements HollowCombinerCopyDirector {
private HollowReadStateEngine base;
private HollowReadStateEngine patchFrom;
private final Map<String, BitSet> baseMatchesClosure;
private final Map<String, BitSet> patchFromMatchesClosure;
public HollowPatcherCombinerCopyDirector(HollowReadStateEngine base, Map<String, BitSet> baseMatchesClosure, HollowReadStateEngine patchFrom, Map<String, BitSet> patchFromMatchesClosure) {
this.base = base;
this.patchFrom = patchFrom;
this.baseMatchesClosure = baseMatchesClosure;
this.patchFromMatchesClosure = patchFromMatchesClosure;
}
@Override
public boolean shouldCopy(HollowTypeReadState typeState, int ordinal) {
if(typeState.getStateEngine() == base) {
BitSet bitSet = baseMatchesClosure.get(typeState.getSchema().getName());
if(bitSet == null)
return true;
return !bitSet.get(ordinal);
} else if(typeState.getStateEngine() == patchFrom){
BitSet bitSet = patchFromMatchesClosure.get(typeState.getSchema().getName());
if(bitSet == null)
return false;
return bitSet.get(ordinal);
}
return false;
}
}
| 8,934 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history/IntMapOrdinalRemapper.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.history;
import com.netflix.hollow.core.util.IntMap;
import com.netflix.hollow.tools.combine.OrdinalRemapper;
import java.util.HashMap;
import java.util.Map;
/**
* An {@link OrdinalRemapper} which is used to explicitly remap ordinals.
* <p>
* Not intended for external consumption.
*
*/
public class IntMapOrdinalRemapper implements OrdinalRemapper {
private final Map<String, IntMap> ordinalMappings;
public IntMapOrdinalRemapper() {
this.ordinalMappings = new HashMap<String, IntMap>();
}
public void addOrdinalRemapping(String typeName, IntMap mapping) {
ordinalMappings.put(typeName, mapping);
}
public IntMap getOrdinalRemapping(String typeName) {
return ordinalMappings.get(typeName);
}
@Override
public int getMappedOrdinal(String type, int originalOrdinal) {
IntMap mapping = ordinalMappings.get(type);
if(mapping != null)
return mapping.get(originalOrdinal);
return -1;
}
@Override
public boolean ordinalIsMapped(String type, int originalOrdinal) {
IntMap mapping = ordinalMappings.get(type);
if(mapping != null)
return mapping.get(originalOrdinal) != -1;
return false;
}
@Override
public void remapOrdinal(String type, int originalOrdinal, int mappedOrdinal) {
throw new UnsupportedOperationException("Cannot explicitly remap an ordinal in an IntMapOrdinalRemapper");
}
}
| 8,935 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history/HistoricalPrimaryKeyMatcher.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.history;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import java.util.Arrays;
public class HistoricalPrimaryKeyMatcher {
private final HollowObjectTypeDataAccess keyTypeAccess;
private final int fieldPathIndexes[][];
private final FieldType[] fieldTypes;
public HistoricalPrimaryKeyMatcher(HollowDataAccess dataAccess, PrimaryKey primaryKey) {
this.fieldPathIndexes = new int[primaryKey.numFields()][];
this.fieldTypes = new FieldType[primaryKey.numFields()];
for(int i=0;i<primaryKey.numFields();i++) {
fieldPathIndexes[i] = primaryKey.getFieldPathIndex(dataAccess, i);
fieldTypes[i] = primaryKey.getFieldType(dataAccess, i);
}
this.keyTypeAccess = (HollowObjectTypeDataAccess) dataAccess.getTypeDataAccess(primaryKey.getType());
}
public boolean keyMatches(int ordinal, Object... keys) {
if(keys.length != fieldPathIndexes.length)
return false;
for(int i=0;i<keys.length;i++) {
if(!keyMatches(keys[i], ordinal, i))
return false;
}
return true;
}
public boolean keyMatches(Object key, int ordinal, int fieldIdx) {
HollowObjectTypeDataAccess dataAccess = keyTypeAccess;
HollowObjectSchema schema = dataAccess.getSchema();
int lastFieldPath = fieldPathIndexes[fieldIdx].length - 1;
for(int i=0;i<lastFieldPath;i++) {
int fieldPosition = fieldPathIndexes[fieldIdx][i];
ordinal = dataAccess.readOrdinal(ordinal, fieldPosition);
dataAccess = (HollowObjectTypeDataAccess) dataAccess.getDataAccess().getTypeDataAccess(schema.getReferencedType(fieldPosition), ordinal);
schema = dataAccess.getSchema();
}
int lastFieldIdx = fieldPathIndexes[fieldIdx][lastFieldPath];
switch(fieldTypes[fieldIdx]) {
case BOOLEAN:
Boolean b = dataAccess.readBoolean(ordinal, lastFieldIdx);
if(b == key)
return true;
if(b == null || key == null)
return false;
return b.booleanValue() == ((Boolean)key).booleanValue();
case BYTES:
return Arrays.equals(dataAccess.readBytes(ordinal, lastFieldIdx), (byte[])key);
case DOUBLE:
return dataAccess.readDouble(ordinal, lastFieldIdx) == ((Double)key).doubleValue();
case FLOAT:
return dataAccess.readFloat(ordinal, lastFieldIdx) == ((Float)key).floatValue();
case INT:
return dataAccess.readInt(ordinal, lastFieldIdx) == ((Integer)key).intValue();
case LONG:
return dataAccess.readLong(ordinal, lastFieldIdx) == ((Long)key).longValue();
case REFERENCE:
return dataAccess.readOrdinal(ordinal, lastFieldIdx) == ((Integer)key).intValue();
case STRING:
return dataAccess.isStringFieldEqual(ordinal, lastFieldIdx, (String)key);
}
throw new IllegalArgumentException("I don't know how to compare a " + fieldTypes[fieldIdx]);
}
public FieldType[] getFieldTypes() {
return fieldTypes;
}
}
| 8,936 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history/HollowHistoricalSetDataAccess.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.history;
import com.netflix.hollow.api.sampling.DisabledSamplingDirector;
import com.netflix.hollow.api.sampling.HollowSetSampler;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.core.memory.encoding.HashCodes;
import com.netflix.hollow.core.read.dataaccess.HollowSetTypeDataAccess;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.engine.SetMapKeyHasher;
import com.netflix.hollow.core.read.engine.set.HollowSetTypeReadState;
import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator;
import com.netflix.hollow.core.schema.HollowSetSchema;
public class HollowHistoricalSetDataAccess extends HollowHistoricalTypeDataAccess implements HollowSetTypeDataAccess {
private HistoricalPrimaryKeyMatcher keyMatcher;
public HollowHistoricalSetDataAccess(HollowHistoricalStateDataAccess dataAccess, HollowTypeReadState typeState) {
super(dataAccess, typeState, new HollowSetSampler(typeState.getSchema().getName(), DisabledSamplingDirector.INSTANCE));
}
@Override
public HollowSetSchema getSchema() {
return removedRecords().getSchema();
}
@Override
public int size(int ordinal) {
sampler().recordSize();
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowSetTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).size(ordinal);
return removedRecords().size(getMappedOrdinal(ordinal));
}
@Override
public boolean contains(int ordinal, int value) {
sampler().recordGet();
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowSetTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).contains(ordinal, value);
return removedRecords().contains(getMappedOrdinal(ordinal), value);
}
@Override
public int findElement(int ordinal, Object... hashKey) {
sampler().recordGet();
recordStackTrace();
if(keyMatcher == null)
return -1;
if(!ordinalIsPresent(ordinal))
return ((HollowSetTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).findElement(ordinal, hashKey);
ordinal = ordinalRemap.get(ordinal);
HollowSetTypeReadState removedRecords = (HollowSetTypeReadState)getRemovedRecords();
int hashTableSize = HashCodes.hashTableSize(removedRecords.size(ordinal));
int hash = SetMapKeyHasher.hash(hashKey, keyMatcher.getFieldTypes());
int bucket = hash & (hashTableSize - 1);
int bucketOrdinal = removedRecords.relativeBucketValue(ordinal, bucket);
while(bucketOrdinal != -1) {
if(keyMatcher.keyMatches(bucketOrdinal, hashKey))
return bucketOrdinal;
bucket++;
bucket &= (hashTableSize - 1);
bucketOrdinal = removedRecords.relativeBucketValue(ordinal, bucket);
}
return -1;
}
@Override
public boolean contains(int ordinal, int value, int hashCode) {
sampler().recordGet();
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowSetTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).contains(ordinal, value, hashCode);
return removedRecords().contains(getMappedOrdinal(ordinal), value, hashCode);
}
@Override
public int relativeBucketValue(int ordinal, int bucketIndex) {
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowSetTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).relativeBucketValue(ordinal, bucketIndex);
return removedRecords().relativeBucketValue(getMappedOrdinal(ordinal), bucketIndex);
}
@Override
public HollowOrdinalIterator potentialMatchOrdinalIterator(int ordinal, int hashCode) {
sampler().recordIterator();
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowSetTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).potentialMatchOrdinalIterator(ordinal, hashCode);
return removedRecords().potentialMatchOrdinalIterator(getMappedOrdinal(ordinal), hashCode);
}
@Override
public HollowOrdinalIterator ordinalIterator(int ordinal) {
sampler().recordIterator();
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowSetTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).ordinalIterator(ordinal);
return removedRecords().ordinalIterator(getMappedOrdinal(ordinal));
}
private HollowSetTypeReadState removedRecords() {
return (HollowSetTypeReadState) removedRecords;
}
private HollowSetSampler sampler() {
return (HollowSetSampler) sampler;
}
void buildKeyMatcher() {
PrimaryKey hashKey = getSchema().getHashKey();
if(hashKey != null)
this.keyMatcher = new HistoricalPrimaryKeyMatcher(getDataAccess(), hashKey);
}
}
| 8,937 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history/HollowHistoricalObjectDataAccess.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.history;
import com.netflix.hollow.api.sampling.DisabledSamplingDirector;
import com.netflix.hollow.api.sampling.HollowObjectSampler;
import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.schema.HollowObjectSchema;
public class HollowHistoricalObjectDataAccess extends HollowHistoricalTypeDataAccess implements HollowObjectTypeDataAccess {
public HollowHistoricalObjectDataAccess(HollowHistoricalStateDataAccess dataAccess, HollowTypeReadState removedRecords) {
super(dataAccess, removedRecords, new HollowObjectSampler((HollowObjectSchema)removedRecords.getSchema(), DisabledSamplingDirector.INSTANCE));
}
@Override
public HollowObjectSchema getSchema() {
return removedRecords().getSchema();
}
@Override
public boolean isNull(int ordinal, int fieldIndex) {
sampler().recordFieldAccess(fieldIndex);
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowObjectTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).isNull(ordinal, fieldIndex);
return removedRecords().isNull(getMappedOrdinal(ordinal), fieldIndex);
}
@Override
public int readOrdinal(int ordinal, int fieldIndex) {
sampler().recordFieldAccess(fieldIndex);
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowObjectTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).readOrdinal(ordinal, fieldIndex);
return removedRecords().readOrdinal(getMappedOrdinal(ordinal), fieldIndex);
}
@Override
public int readInt(int ordinal, int fieldIndex) {
sampler().recordFieldAccess(fieldIndex);
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowObjectTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).readInt(ordinal, fieldIndex);
return removedRecords().readInt(getMappedOrdinal(ordinal), fieldIndex);
}
@Override
public float readFloat(int ordinal, int fieldIndex) {
sampler().recordFieldAccess(fieldIndex);
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowObjectTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).readFloat(ordinal, fieldIndex);
return removedRecords().readFloat(getMappedOrdinal(ordinal), fieldIndex);
}
@Override
public double readDouble(int ordinal, int fieldIndex) {
sampler().recordFieldAccess(fieldIndex);
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowObjectTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).readDouble(ordinal, fieldIndex);
return removedRecords().readDouble(getMappedOrdinal(ordinal), fieldIndex);
}
@Override
public long readLong(int ordinal, int fieldIndex) {
sampler().recordFieldAccess(fieldIndex);
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowObjectTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).readLong(ordinal, fieldIndex);
return removedRecords().readLong(getMappedOrdinal(ordinal), fieldIndex);
}
@Override
public Boolean readBoolean(int ordinal, int fieldIndex) {
sampler().recordFieldAccess(fieldIndex);
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowObjectTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).readBoolean(ordinal, fieldIndex);
return removedRecords().readBoolean(getMappedOrdinal(ordinal), fieldIndex);
}
@Override
public byte[] readBytes(int ordinal, int fieldIndex) {
sampler().recordFieldAccess(fieldIndex);
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowObjectTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).readBytes(ordinal, fieldIndex);
return removedRecords().readBytes(getMappedOrdinal(ordinal), fieldIndex);
}
@Override
public String readString(int ordinal, int fieldIndex) {
sampler().recordFieldAccess(fieldIndex);
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowObjectTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).readString(ordinal, fieldIndex);
return removedRecords().readString(getMappedOrdinal(ordinal), fieldIndex);
}
@Override
public boolean isStringFieldEqual(int ordinal, int fieldIndex, String testValue) {
sampler().recordFieldAccess(fieldIndex);
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowObjectTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).isStringFieldEqual(ordinal, fieldIndex, testValue);
return removedRecords().isStringFieldEqual(getMappedOrdinal(ordinal), fieldIndex, testValue);
}
@Override
public int findVarLengthFieldHashCode(int ordinal, int fieldIndex) {
sampler().recordFieldAccess(fieldIndex);
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowObjectTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).findVarLengthFieldHashCode(ordinal, fieldIndex);
return removedRecords().findVarLengthFieldHashCode(getMappedOrdinal(ordinal), fieldIndex);
}
private HollowObjectTypeReadState removedRecords() {
return (HollowObjectTypeReadState) removedRecords;
}
private HollowObjectSampler sampler() {
return (HollowObjectSampler) sampler;
}
}
| 8,938 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history/HollowHistoricalStateDataAccess.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.history;
import com.netflix.hollow.api.client.StackTraceRecorder;
import com.netflix.hollow.api.error.SchemaNotFoundException;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.missing.MissingDataHandler;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.util.HollowObjectHashCodeFinder;
import com.netflix.hollow.tools.combine.OrdinalRemapper;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* A {@link HollowDataAccess} from a historical state.
*/
public class HollowHistoricalStateDataAccess implements HollowDataAccess {
private final HollowHistory totalHistory;
private final long version;
private final OrdinalRemapper removedCopyOrdinalMapping;
private final Map<String, HollowHistoricalSchemaChange> schemaChanges;
private final Map<String, HollowHistoricalTypeDataAccess> typeDataAccessMap;
private final HollowObjectHashCodeFinder hashCodeFinder;
private final MissingDataHandler missingDataHandler;
private HollowDataAccess nextState;
private StackTraceRecorder stackTraceRecorder;
public HollowHistoricalStateDataAccess(HollowHistory totalHistory, long version, HollowReadStateEngine removedRecordCopies, OrdinalRemapper removedCopyOrdinalMappings, Map<String, HollowHistoricalSchemaChange> schemaChanges) {
this(totalHistory, version, removedRecordCopies, removedRecordCopies.getTypeStates(), removedCopyOrdinalMappings, schemaChanges);
}
public HollowHistoricalStateDataAccess(HollowHistory totalHistory, long version, HollowReadStateEngine removedRecordCopies, Collection<HollowTypeReadState> typeStates, OrdinalRemapper removedCopyOrdinalMappings, Map<String, HollowHistoricalSchemaChange> schemaChanges) {
this.totalHistory = totalHistory;
this.version = version;
this.hashCodeFinder = removedRecordCopies.getHashCodeFinder();
this.missingDataHandler = removedRecordCopies.getMissingDataHandler();
this.removedCopyOrdinalMapping = removedCopyOrdinalMappings;
this.schemaChanges = schemaChanges;
Map<String, HollowHistoricalTypeDataAccess> typeDataAccessMap = new HashMap<String, HollowHistoricalTypeDataAccess>();
for(HollowTypeReadState typeState : typeStates) {
String typeName = typeState.getSchema().getName();
switch(typeState.getSchema().getSchemaType()) {
case OBJECT:
typeDataAccessMap.put(typeName, new HollowHistoricalObjectDataAccess(this, typeState));
break;
case LIST:
typeDataAccessMap.put(typeName, new HollowHistoricalListDataAccess(this, typeState));
break;
case SET:
typeDataAccessMap.put(typeName, new HollowHistoricalSetDataAccess(this, typeState));
break;
case MAP:
typeDataAccessMap.put(typeName, new HollowHistoricalMapDataAccess(this, typeState));
break;
}
}
this.typeDataAccessMap = typeDataAccessMap;
for(Map.Entry<String, HollowHistoricalTypeDataAccess> entry : typeDataAccessMap.entrySet()) {
HollowHistoricalTypeDataAccess typeDataAccess = entry.getValue();
switch(typeDataAccess.getSchema().getSchemaType()) {
case MAP:
((HollowHistoricalMapDataAccess)typeDataAccess).buildKeyMatcher();
break;
case SET:
((HollowHistoricalSetDataAccess)typeDataAccess).buildKeyMatcher();
break;
default:
}
}
}
public HollowHistory getTotalHistory() {
return totalHistory;
}
public long getVersion() {
return version;
}
public void setNextState(HollowDataAccess nextState) {
this.nextState = nextState;
}
public HollowDataAccess getNextState() {
return nextState;
}
public OrdinalRemapper getOrdinalMapping() {
return removedCopyOrdinalMapping;
}
public Map<String, HollowHistoricalSchemaChange> getSchemaChanges() {
return schemaChanges;
}
Map<String, HollowHistoricalTypeDataAccess> getTypeDataAccessMap() {
return typeDataAccessMap;
}
@Override
public HollowTypeDataAccess getTypeDataAccess(String typeName) {
HollowDataAccess state = this;
HollowTypeDataAccess typeDataAccess = typeDataAccessMap.get(typeName);
if(typeDataAccess != null)
return typeDataAccess;
while(state instanceof HollowHistoricalStateDataAccess) {
HollowHistoricalStateDataAccess historicalState = (HollowHistoricalStateDataAccess)state;
typeDataAccess = historicalState.typeDataAccessMap.get(typeName);
if(typeDataAccess != null)
return typeDataAccess;
state = historicalState.getNextState();
}
return state.getTypeDataAccess(typeName);
}
@Override
public Collection<String> getAllTypes() {
return typeDataAccessMap.keySet();
}
@Override
public HollowTypeDataAccess getTypeDataAccess(String typeName, int ordinal) {
HollowDataAccess state = this;
while(state instanceof HollowHistoricalStateDataAccess) {
HollowHistoricalStateDataAccess historicalState = (HollowHistoricalStateDataAccess)state;
if(historicalState.getOrdinalMapping().ordinalIsMapped(typeName, ordinal))
return state.getTypeDataAccess(typeName);
state = historicalState.getNextState();
}
return state.getTypeDataAccess(typeName);
}
@Override
public HollowObjectHashCodeFinder getHashCodeFinder() {
return hashCodeFinder;
}
@Override
public MissingDataHandler getMissingDataHandler() {
return missingDataHandler;
}
@Override
public void resetSampling() {
for(Map.Entry<String, HollowHistoricalTypeDataAccess> entry : typeDataAccessMap.entrySet())
entry.getValue().getSampler().reset();
}
@Override
public boolean hasSampleResults() {
for(Map.Entry<String, HollowHistoricalTypeDataAccess> entry : typeDataAccessMap.entrySet())
if(entry.getValue().getSampler().hasSampleResults())
return true;
return false;
}
public void setStackTraceRecorder(StackTraceRecorder recorder) {
this.stackTraceRecorder = recorder;
}
StackTraceRecorder getStackTraceRecorder() {
return stackTraceRecorder;
}
public List<HollowSchema> getSchemas() {
List<HollowSchema> schemas = new ArrayList<HollowSchema>(typeDataAccessMap.size());
for(Map.Entry<String, HollowHistoricalTypeDataAccess> entry : typeDataAccessMap.entrySet())
schemas.add(entry.getValue().getSchema());
return schemas;
}
@Override
public HollowSchema getSchema(String name) {
return getTypeDataAccess(name).getSchema();
}
@Override
public HollowSchema getNonNullSchema(String name) {
HollowSchema schema = getSchema(name);
if (schema == null) {
throw new SchemaNotFoundException(name, getAllTypes());
}
return schema;
}
}
| 8,939 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history/HollowHistoricalStateCreator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.history;
import com.netflix.hollow.core.read.HollowBlobInput;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import com.netflix.hollow.core.read.engine.HollowBlobReader;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.engine.PopulatedOrdinalListener;
import com.netflix.hollow.core.read.engine.list.HollowListDeltaHistoricalStateCreator;
import com.netflix.hollow.core.read.engine.list.HollowListTypeReadState;
import com.netflix.hollow.core.read.engine.map.HollowMapDeltaHistoricalStateCreator;
import com.netflix.hollow.core.read.engine.map.HollowMapTypeReadState;
import com.netflix.hollow.core.read.engine.object.HollowObjectDeltaHistoricalStateCreator;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.read.engine.set.HollowSetDeltaHistoricalStateCreator;
import com.netflix.hollow.core.read.engine.set.HollowSetTypeReadState;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.schema.HollowSchemaSorter;
import com.netflix.hollow.core.util.HollowWriteStateCreator;
import com.netflix.hollow.core.util.IntMap;
import com.netflix.hollow.core.util.IntMap.IntMapEntryIterator;
import com.netflix.hollow.core.util.SimultaneousExecutor;
import com.netflix.hollow.core.write.HollowBlobWriter;
import com.netflix.hollow.core.write.HollowTypeWriteState;
import com.netflix.hollow.core.write.HollowWriteRecord;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import com.netflix.hollow.core.write.copy.HollowRecordCopier;
import com.netflix.hollow.tools.combine.IdentityOrdinalRemapper;
import com.netflix.hollow.tools.combine.OrdinalRemapper;
import com.netflix.hollow.tools.diff.exact.DiffEqualOrdinalMap;
import com.netflix.hollow.tools.diff.exact.DiffEqualityMapping;
import java.io.BufferedOutputStream;
import java.io.Closeable;
import java.io.IOException;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
/**
* Used to create a historical {@link HollowDataAccess}, even in the absence of a {@link HollowHistory}.
*
*/
public class HollowHistoricalStateCreator {
private final HollowHistory totalHistory;
public HollowHistoricalStateCreator() {
this(null);
}
public HollowHistoricalStateCreator(HollowHistory totalHistory) {
this.totalHistory = totalHistory;
}
/**
* Create a {@link HollowDataAccess} for the prior state of the supplied {@link HollowReadStateEngine} after a delta
* has been applied.
*
* @param version The state's version
* @param stateEngine The current {@link HollowReadStateEngine} to which a delta has been applied.
* @return a data access for history
*/
public HollowHistoricalStateDataAccess createBasedOnNewDelta(long version, HollowReadStateEngine stateEngine) {
return createBasedOnNewDelta(version, stateEngine, false);
}
public HollowHistoricalStateDataAccess createBasedOnNewDelta(long version, HollowReadStateEngine stateEngine, boolean reverse) {
IntMapOrdinalRemapper typeRemovedOrdinalMapping = new IntMapOrdinalRemapper();
List<HollowTypeReadState> historicalTypeStates = new ArrayList<HollowTypeReadState>(stateEngine.getTypeStates().size());
for(HollowTypeReadState typeState : stateEngine.getTypeStates()) {
createDeltaHistoricalTypeState(typeRemovedOrdinalMapping, historicalTypeStates, typeState, reverse);
}
HollowHistoricalStateDataAccess dataAccess = new HollowHistoricalStateDataAccess(totalHistory, version, stateEngine, historicalTypeStates, typeRemovedOrdinalMapping, Collections.<String, HollowHistoricalSchemaChange>emptyMap());
dataAccess.setNextState(stateEngine);
return dataAccess;
}
private void createDeltaHistoricalTypeState(IntMapOrdinalRemapper typeRemovedOrdinalMapping, List<HollowTypeReadState> historicalTypeStates, HollowTypeReadState typeState, boolean reverse) {
if(typeState instanceof HollowObjectTypeReadState) {
HollowObjectDeltaHistoricalStateCreator deltaHistoryCreator = new HollowObjectDeltaHistoricalStateCreator((HollowObjectTypeReadState)typeState, reverse);
deltaHistoryCreator.populateHistory();
typeRemovedOrdinalMapping.addOrdinalRemapping(typeState.getSchema().getName(), deltaHistoryCreator.getOrdinalMapping());
historicalTypeStates.add(deltaHistoryCreator.createHistoricalTypeReadState());
// drop references into typeState to allow it to be GC'ed as soon as all historical states have been constructed
deltaHistoryCreator.dereferenceTypeState();
} else if(typeState instanceof HollowListTypeReadState) {
HollowListDeltaHistoricalStateCreator deltaHistoryCreator = new HollowListDeltaHistoricalStateCreator((HollowListTypeReadState)typeState, reverse);
deltaHistoryCreator.populateHistory();
typeRemovedOrdinalMapping.addOrdinalRemapping(typeState.getSchema().getName(), deltaHistoryCreator.getOrdinalMapping());
historicalTypeStates.add(deltaHistoryCreator.createHistoricalTypeReadState());
deltaHistoryCreator.dereferenceTypeState();
} else if(typeState instanceof HollowSetTypeReadState) {
HollowSetDeltaHistoricalStateCreator deltaHistoryCreator = new HollowSetDeltaHistoricalStateCreator((HollowSetTypeReadState)typeState, reverse);
deltaHistoryCreator.populateHistory();
typeRemovedOrdinalMapping.addOrdinalRemapping(typeState.getSchema().getName(), deltaHistoryCreator.getOrdinalMapping());
historicalTypeStates.add(deltaHistoryCreator.createHistoricalTypeReadState());
deltaHistoryCreator.dereferenceTypeState();
} else if(typeState instanceof HollowMapTypeReadState) {
HollowMapDeltaHistoricalStateCreator deltaHistoryCreator = new HollowMapDeltaHistoricalStateCreator((HollowMapTypeReadState)typeState, reverse);
deltaHistoryCreator.populateHistory();
typeRemovedOrdinalMapping.addOrdinalRemapping(typeState.getSchema().getName(), deltaHistoryCreator.getOrdinalMapping());
historicalTypeStates.add(deltaHistoryCreator.createHistoricalTypeReadState());
deltaHistoryCreator.dereferenceTypeState();
}
}
/**
* Create a {@link HollowDataAccess} for a {@link HollowHistory}. Remap ordinal spaces for all prior historical
* versions in the {@link HollowHistory} for consistency.
*
* @param version the version
* @param previous the prior read state
* @return the data access for a history
*/
public HollowHistoricalStateDataAccess createConsistentOrdinalHistoricalStateFromDoubleSnapshot(long version, HollowReadStateEngine previous) {
return new HollowHistoricalStateDataAccess(totalHistory, version, previous, IdentityOrdinalRemapper.INSTANCE, Collections.<String, HollowHistoricalSchemaChange>emptyMap());
}
/**
* Create a {@link HollowDataAccess} for a historical state after a double snapshot occurs, without a {@link HollowHistory}.
*
* @param version the version
* @param previous the previous state
* @param current the current state
* @param ordinalRemapper the ordinal remapper
* @return the data access for a history
*/
public HollowHistoricalStateDataAccess createHistoricalStateFromDoubleSnapshot(long version, HollowReadStateEngine previous, HollowReadStateEngine current, DiffEqualityMappingOrdinalRemapper ordinalRemapper) {
HollowWriteStateEngine writeEngine = HollowWriteStateCreator.createWithSchemas(schemasWithoutKeys(previous.getSchemas()));
IntMapOrdinalRemapper typeRemovedOrdinalLookupMaps = new IntMapOrdinalRemapper();
for(HollowSchema previousSchema : HollowSchemaSorter.dependencyOrderedSchemaList(previous)) {
HollowTypeReadState previousTypeState = previous.getTypeState(previousSchema.getName());
String typeName = previousTypeState.getSchema().getName();
IntMap ordinalLookupMap;
if(current.getTypeState(typeName) == null) {
ordinalLookupMap = copyAllRecords(previousTypeState, ordinalRemapper, writeEngine);
} else {
HollowTypeReadState currentTypeState = current.getTypeState(typeName);
BitSet currentlyPopulatedOrdinals = currentTypeState.getListener(PopulatedOrdinalListener.class).getPopulatedOrdinals();
ordinalLookupMap = copyUnmatchedRecords(previousTypeState, ordinalRemapper, currentlyPopulatedOrdinals, writeEngine);
}
typeRemovedOrdinalLookupMaps.addOrdinalRemapping(typeName, ordinalLookupMap);
}
Map<String, HollowHistoricalSchemaChange> schemaChanges = calculateSchemaChanges(previous, current, ordinalRemapper.getDiffEqualityMapping());
return new HollowHistoricalStateDataAccess(totalHistory, version, roundTripStateEngine(writeEngine), typeRemovedOrdinalLookupMaps, schemaChanges);
}
private Map<String, HollowHistoricalSchemaChange> calculateSchemaChanges(HollowReadStateEngine previous, HollowReadStateEngine current, DiffEqualityMapping equalityMapping) {
Map<String, HollowHistoricalSchemaChange> schemaChanges = new HashMap<String, HollowHistoricalSchemaChange>();
for(HollowTypeReadState previousTypeState : previous.getTypeStates()) {
String typeName = previousTypeState.getSchema().getName();
HollowTypeReadState currentTypeState = current.getTypeState(typeName);
if(currentTypeState == null) {
schemaChanges.put(typeName, new HollowHistoricalSchemaChange(previousTypeState.getSchema(), null));
} else if (equalityMapping.requiresMissingFieldTraversal(typeName)) {
schemaChanges.put(typeName, new HollowHistoricalSchemaChange(previousTypeState.getSchema(), currentTypeState.getSchema()));
}
}
for(HollowTypeReadState currentTypeState : current.getTypeStates()) {
String typeName = currentTypeState.getSchema().getName();
HollowTypeReadState previousTypeState = previous.getTypeState(typeName);
if(previousTypeState == null) {
schemaChanges.put(typeName, new HollowHistoricalSchemaChange(null, currentTypeState.getSchema()));
}
}
return schemaChanges;
}
private IntMap copyUnmatchedRecords(HollowTypeReadState previousTypeState, DiffEqualityMappingOrdinalRemapper ordinalRemapper, BitSet currentlyPopulatedOrdinals, HollowWriteStateEngine writeEngine) {
String typeName = previousTypeState.getSchema().getName();
PopulatedOrdinalListener previousListener = previousTypeState.getListener(PopulatedOrdinalListener.class);
HollowRecordCopier recordCopier = HollowRecordCopier.createCopier(previousTypeState, ordinalRemapper, false); ///NOTE: This will invalidate custom hash codes
DiffEqualOrdinalMap equalityMap = ordinalRemapper.getDiffEqualityMapping().getEqualOrdinalMap(typeName);
boolean shouldCopyAllRecords = ordinalRemapper.getDiffEqualityMapping().requiresMissingFieldTraversal(typeName);
BitSet previouslyPopulatedOrdinals = previousListener.getPopulatedOrdinals();
int ordinalSpaceLength = Math.max(currentlyPopulatedOrdinals.length(), previouslyPopulatedOrdinals.length());
int unmatchedOrdinalCount = ordinalSpaceLength - countMatchedRecords(previouslyPopulatedOrdinals, equalityMap);
int unmatchedRecordCount = countUnmatchedRecords(previouslyPopulatedOrdinals, equalityMap);
int nextFreeOrdinal = ordinalSpaceLength;
ordinalRemapper.hintUnmatchedOrdinalCount(typeName, unmatchedOrdinalCount * 2);
IntMap ordinalLookupMap = new IntMap(shouldCopyAllRecords ? previouslyPopulatedOrdinals.cardinality() : unmatchedRecordCount);
BitSet mappedFromOrdinals = new BitSet(ordinalSpaceLength);
BitSet mappedToOrdinals = new BitSet(ordinalSpaceLength);
int fromOrdinal = previouslyPopulatedOrdinals.nextSetBit(0);
while(fromOrdinal != -1) {
int matchedToOrdinal = equalityMap.getIdentityFromOrdinal(fromOrdinal);
if(matchedToOrdinal != -1) {
mappedFromOrdinals.set(fromOrdinal);
mappedToOrdinals.set(matchedToOrdinal);
if(shouldCopyAllRecords) {
HollowWriteRecord rec = recordCopier.copy(fromOrdinal);
int removedMappedOrdinal = writeEngine.add(typeName, rec);
ordinalLookupMap.put(matchedToOrdinal, removedMappedOrdinal);
}
}
fromOrdinal = previouslyPopulatedOrdinals.nextSetBit(fromOrdinal + 1);
}
fromOrdinal = mappedFromOrdinals.nextClearBit(0);
int toOrdinal = mappedToOrdinals.nextClearBit(0);
while(fromOrdinal < ordinalSpaceLength) {
ordinalRemapper.remapOrdinal(typeName, fromOrdinal, nextFreeOrdinal);
ordinalRemapper.remapOrdinal(typeName, nextFreeOrdinal, toOrdinal);
if(previouslyPopulatedOrdinals.get(fromOrdinal)) {
HollowWriteRecord rec = recordCopier.copy(fromOrdinal);
int removedMappedOrdinal = writeEngine.add(typeName, rec);
ordinalLookupMap.put(nextFreeOrdinal, removedMappedOrdinal);
}
fromOrdinal = mappedFromOrdinals.nextClearBit(fromOrdinal + 1);
toOrdinal = mappedToOrdinals.nextClearBit(toOrdinal + 1);
nextFreeOrdinal++;
}
return ordinalLookupMap;
}
private int countMatchedRecords(BitSet populatedOrdinals, DiffEqualOrdinalMap equalityMap) {
int matchedRecordCount = 0;
int ordinal = populatedOrdinals.nextSetBit(0);
while(ordinal != -1) {
if(equalityMap.getIdentityFromOrdinal(ordinal) != -1)
matchedRecordCount++;
ordinal = populatedOrdinals.nextSetBit(ordinal + 1);
}
return matchedRecordCount;
}
private int countUnmatchedRecords(BitSet populatedOrdinals, DiffEqualOrdinalMap equalityMap) {
int unmatchedRecordCount = 0;
int ordinal = populatedOrdinals.nextSetBit(0);
while(ordinal != -1) {
if(equalityMap.getIdentityFromOrdinal(ordinal) == -1)
unmatchedRecordCount++;
ordinal = populatedOrdinals.nextSetBit(ordinal + 1);
}
return unmatchedRecordCount;
}
private IntMap copyAllRecords(HollowTypeReadState typeState, DiffEqualityMappingOrdinalRemapper ordinalRemapper, HollowWriteStateEngine writeEngine) {
String typeName = typeState.getSchema().getName();
HollowRecordCopier recordCopier = HollowRecordCopier.createCopier(typeState, ordinalRemapper, false); ///NOTE: This will invalidate custom hash codes
PopulatedOrdinalListener listener = typeState.getListener(PopulatedOrdinalListener.class);
IntMap ordinalLookupMap = new IntMap(listener.getPopulatedOrdinals().cardinality());
int ordinal = listener.getPopulatedOrdinals().nextSetBit(0);
while(ordinal != -1) {
HollowWriteRecord rec = recordCopier.copy(ordinal);
int mappedOrdinal = writeEngine.add(typeName, rec);
ordinalLookupMap.put(ordinal, mappedOrdinal);
ordinal = listener.getPopulatedOrdinals().nextSetBit(ordinal + 1);
}
return ordinalLookupMap;
}
public HollowHistoricalStateDataAccess copyButRemapOrdinals(HollowHistoricalStateDataAccess previous, OrdinalRemapper ordinalRemapper) {
HollowWriteStateEngine writeEngine = HollowWriteStateCreator.createWithSchemas(schemasWithoutKeys(previous.getSchemas()));
IntMapOrdinalRemapper typeRemovedOrdinalRemapping = new IntMapOrdinalRemapper();
for(String typeName : previous.getAllTypes()) {
HollowHistoricalTypeDataAccess typeDataAccess = (HollowHistoricalTypeDataAccess) previous.getTypeDataAccess(typeName);
copyRemappedRecords(typeDataAccess.getRemovedRecords(), ordinalRemapper, writeEngine);
IntMap ordinalLookupMap = remapPreviousOrdinalMapping(typeDataAccess.getOrdinalRemap(), typeName, ordinalRemapper);
typeRemovedOrdinalRemapping.addOrdinalRemapping(typeName, ordinalLookupMap);
}
return new HollowHistoricalStateDataAccess(totalHistory, previous.getVersion(), roundTripStateEngine(writeEngine), typeRemovedOrdinalRemapping, previous.getSchemaChanges());
}
private void copyRemappedRecords(HollowTypeReadState readTypeState, OrdinalRemapper ordinalRemapper, HollowWriteStateEngine writeEngine) {
String typeName = readTypeState.getSchema().getName();
HollowTypeWriteState typeState = writeEngine.getTypeState(typeName);
HollowRecordCopier copier = HollowRecordCopier.createCopier(readTypeState, ordinalRemapper, false); ///NOTE: This will invalidate custom hash codes
for(int i=0;i<=readTypeState.maxOrdinal();i++) {
HollowWriteRecord rec = copier.copy(i);
typeState.add(rec);
}
}
private IntMap remapPreviousOrdinalMapping(IntMap previousOrdinalMapping, String typeName, OrdinalRemapper ordinalRemapper) {
IntMapEntryIterator ordinalMappingIter = previousOrdinalMapping.iterator();
IntMap ordinalLookupMap = new IntMap(previousOrdinalMapping.size());
while(ordinalMappingIter.next())
ordinalLookupMap.put(ordinalRemapper.getMappedOrdinal(typeName, ordinalMappingIter.getKey()), ordinalMappingIter.getValue());
return ordinalLookupMap;
}
private static HollowReadStateEngine roundTripStateEngine(HollowWriteStateEngine writeEngine) {
HollowBlobWriter writer = new HollowBlobWriter(writeEngine);
HollowReadStateEngine removedRecordCopies = new HollowReadStateEngine();
HollowBlobReader reader = new HollowBlobReader(removedRecordCopies);
// Use a pipe to write and read concurrently to avoid writing
// to temporary files or allocating memory
// @@@ for small states it's more efficient to sequentially write to
// and read from a byte array but it is tricky to estimate the size
SimultaneousExecutor executor = new SimultaneousExecutor(1, HollowHistoricalStateCreator.class, "round-trip");
Exception pipeException = null;
// Ensure read-side is closed after completion of read
try (PipedInputStream in = new PipedInputStream(1 << 15)) {
BufferedOutputStream out = new BufferedOutputStream(new PipedOutputStream(in));
executor.execute(() -> {
// Ensure write-side is closed after completion of write
try (Closeable ac = out) {
writer.writeSnapshot(out);
} catch (IOException e) {
throw new RuntimeException(e);
}
});
HollowBlobInput hbi = HollowBlobInput.serial(in);
reader.readSnapshot(hbi);
} catch (Exception e) {
pipeException = e;
}
// Ensure no underlying writer exception is lost due to broken pipe
try {
executor.awaitSuccessfulCompletion();
} catch (InterruptedException | ExecutionException e) {
if (pipeException == null) {
throw new RuntimeException(e);
}
pipeException.addSuppressed(e);
}
if (pipeException != null)
throw new RuntimeException(pipeException);
return removedRecordCopies;
}
private List<HollowSchema> schemasWithoutKeys(List<HollowSchema> schemas) {
List<HollowSchema> baldSchemas = new ArrayList<HollowSchema>();
for(HollowSchema prevSchema : schemas)
baldSchemas.add(HollowSchema.withoutKeys(prevSchema));
return baldSchemas;
}
}
| 8,940 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history/HollowHistoricalListDataAccess.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.history;
import com.netflix.hollow.api.sampling.DisabledSamplingDirector;
import com.netflix.hollow.api.sampling.HollowListSampler;
import com.netflix.hollow.core.read.dataaccess.HollowListTypeDataAccess;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.engine.list.HollowListTypeReadState;
import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator;
import com.netflix.hollow.core.schema.HollowListSchema;
public class HollowHistoricalListDataAccess extends HollowHistoricalTypeDataAccess implements HollowListTypeDataAccess {
public HollowHistoricalListDataAccess(HollowHistoricalStateDataAccess dataAccess, HollowTypeReadState typeState) {
super(dataAccess, typeState, new HollowListSampler(typeState.getSchema().getName(), DisabledSamplingDirector.INSTANCE));
}
@Override
public HollowListSchema getSchema() {
return removedRecords().getSchema();
}
@Override
public int getElementOrdinal(int ordinal, int listIndex) {
sampler().recordGet();
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowListTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).getElementOrdinal(ordinal, listIndex);
return removedRecords().getElementOrdinal(getMappedOrdinal(ordinal), listIndex);
}
@Override
public int size(int ordinal) {
sampler().recordSize();
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowListTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).size(ordinal);
return removedRecords().size(getMappedOrdinal(ordinal));
}
@Override
public HollowOrdinalIterator ordinalIterator(int ordinal) {
sampler().recordIterator();
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowListTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).ordinalIterator(ordinal);
return removedRecords().ordinalIterator(getMappedOrdinal(ordinal));
}
private HollowListTypeReadState removedRecords() {
return (HollowListTypeReadState) removedRecords;
}
private HollowListSampler sampler() {
return (HollowListSampler) sampler;
}
}
| 8,941 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history/DiffEqualityMappingOrdinalRemapper.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.history;
import com.netflix.hollow.core.util.IntMap;
import com.netflix.hollow.tools.combine.OrdinalRemapper;
import com.netflix.hollow.tools.diff.exact.DiffEqualityMapping;
import java.util.HashMap;
public class DiffEqualityMappingOrdinalRemapper implements OrdinalRemapper {
private final HashMap<String, IntMap> unmatchedOrdinalRemapping;
private final DiffEqualityMapping equalityMapping;
DiffEqualityMappingOrdinalRemapper(DiffEqualityMapping mapping) {
this.equalityMapping = mapping;
this.unmatchedOrdinalRemapping = new HashMap<String, IntMap>();
}
@Override
public int getMappedOrdinal(String type, int originalOrdinal) {
IntMap remapping = unmatchedOrdinalRemapping.get(type);
if(remapping != null) {
int remappedOrdinal = remapping.get(originalOrdinal);
if(remappedOrdinal != -1)
return remappedOrdinal;
}
int matchedOrdinal = equalityMapping.getEqualOrdinalMap(type).getIdentityFromOrdinal(originalOrdinal);
return matchedOrdinal == -1 ? originalOrdinal : matchedOrdinal;
}
public void hintUnmatchedOrdinalCount(String type, int numOrdinals) {
unmatchedOrdinalRemapping.put(type, new IntMap(numOrdinals));
}
@Override
public void remapOrdinal(String type, int originalOrdinal, int mappedOrdinal) {
IntMap remap = unmatchedOrdinalRemapping.get(type);
if(remap == null)
throw new IllegalStateException("Must call hintUnmatchedOrdinalCount for type " + type + " before attempting to remap unmatched ordinals");
remap.put(originalOrdinal, mappedOrdinal);
}
@Override
public boolean ordinalIsMapped(String type, int originalOrdinal) {
throw new UnsupportedOperationException();
}
public DiffEqualityMapping getDiffEqualityMapping() {
return equalityMapping;
}
public IntMap getUnmatchedOrdinalMapping(String type) {
return unmatchedOrdinalRemapping.get(type);
}
}
| 8,942 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history/HollowHistoricalState.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.history;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import com.netflix.hollow.tools.history.keyindex.HollowHistoricalStateKeyOrdinalMapping;
import com.netflix.hollow.tools.history.keyindex.HollowHistoryKeyIndex;
import java.util.Map;
/**
* A data state from the past, represented as just the changes which happened on the subsequent transition.
* Contains links to all subsequent deltas which happened in the interim between this state
* and the now current state.
*/
public class HollowHistoricalState {
private final long version;
private final HollowHistoricalStateKeyOrdinalMapping keyOrdinalMapping;
private final HollowHistoricalStateDataAccess dataAccess;
private final Map<String, String> headerEntries;
private HollowHistoricalState nextState;
public HollowHistoricalState(long version, HollowHistoricalStateKeyOrdinalMapping keyOrdinalMapping, HollowHistoricalStateDataAccess dataAccess, Map<String, String> headerEntries) {
this.version = version;
this.dataAccess = dataAccess;
this.keyOrdinalMapping = keyOrdinalMapping;
this.headerEntries = headerEntries;
}
/**
* @return The version of this state
*/
public long getVersion() {
return version;
}
/**
* @return A {@link HollowDataAccess} which can be used to retrieve the data from this state. For example,
* you can use this with a generated Hollow API or the generic hollow object API.
*/
public HollowHistoricalStateDataAccess getDataAccess() {
return dataAccess;
}
/**
* To find a specific historical record which changed
* in this state:
* <ul>
* <li>Use the {@link HollowHistoryKeyIndex} from the {@link HollowHistory} to look up a <i>key ordinal</i> by an indexed primary key.</li>
* <li>Use the retrieved <i>key ordinal</i> with the {@link HollowHistoricalStateKeyOrdinalMapping} in this state to find the record's ordinal in this state.</li>
* </ul>
* <p>
* If a change isn't found for the key ordinal in this state, you can try walking the chain of states up to
* the present using successive calls to {@link #getNextState()}
*
* @return the historical state key ordinal mapping
*/
public HollowHistoricalStateKeyOrdinalMapping getKeyOrdinalMapping() {
return keyOrdinalMapping;
}
/**
* @return The subsequent historical state which occurred after this one
*/
public HollowHistoricalState getNextState() {
return nextState;
}
/**
* @return The blob header entries from this state.
*/
public Map<String, String> getHeaderEntries() {
return headerEntries;
}
void setNextState(HollowHistoricalState nextState) {
this.nextState = nextState;
}
public long getApproximateHeapFootprintInBytes() {
long total = 0L;
for (HollowHistoricalTypeDataAccess typeDataAccess : dataAccess.getTypeDataAccessMap().values()) {
total += typeDataAccess.removedRecords.getApproximateHeapFootprintInBytes();
}
return total;
}
}
| 8,943 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history/HollowHistoricalMapDataAccess.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.history;
import com.netflix.hollow.api.sampling.DisabledSamplingDirector;
import com.netflix.hollow.api.sampling.HollowMapSampler;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.core.memory.encoding.HashCodes;
import com.netflix.hollow.core.read.dataaccess.HollowMapTypeDataAccess;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.engine.SetMapKeyHasher;
import com.netflix.hollow.core.read.engine.map.HollowMapTypeReadState;
import com.netflix.hollow.core.read.iterator.HollowMapEntryOrdinalIterator;
import com.netflix.hollow.core.schema.HollowMapSchema;
public class HollowHistoricalMapDataAccess extends HollowHistoricalTypeDataAccess implements HollowMapTypeDataAccess {
private HistoricalPrimaryKeyMatcher keyMatcher;
public HollowHistoricalMapDataAccess(HollowHistoricalStateDataAccess dataAccess, HollowTypeReadState typeState) {
super(dataAccess, typeState, new HollowMapSampler(typeState.getSchema().getName(), DisabledSamplingDirector.INSTANCE));
}
@Override
public HollowMapSchema getSchema() {
return (HollowMapSchema) removedRecords.getSchema();
}
@Override
public int size(int ordinal) {
sampler().recordSize();
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowMapTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).size(ordinal);
return removedRecords().size(getMappedOrdinal(ordinal));
}
@Override
public int get(int ordinal, int keyOrdinal) {
sampler().recordGet();
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowMapTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).get(ordinal, keyOrdinal);
return removedRecords().get(getMappedOrdinal(ordinal), keyOrdinal);
}
@Override
public int get(int ordinal, int keyOrdinal, int hashCode) {
sampler().recordGet();
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowMapTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).get(ordinal, keyOrdinal, hashCode);
return removedRecords().get(getMappedOrdinal(ordinal), keyOrdinal, hashCode);
}
@Override
public int findKey(int ordinal, Object... hashKey) {
return (int)(findEntry(ordinal, hashKey) >> 32);
}
@Override
public int findValue(int ordinal, Object... hashKey) {
return (int)findEntry(ordinal, hashKey);
}
@Override
public long findEntry(int ordinal, Object... hashKey) {
sampler().recordGet();
recordStackTrace();
if(keyMatcher == null)
return -1L;
if(!ordinalIsPresent(ordinal))
return ((HollowMapTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).findEntry(ordinal, hashKey);
ordinal = ordinalRemap.get(ordinal);
HollowMapTypeReadState removedRecords = (HollowMapTypeReadState)getRemovedRecords();
int hashTableSize = HashCodes.hashTableSize(removedRecords.size(ordinal));
int hash = SetMapKeyHasher.hash(hashKey, keyMatcher.getFieldTypes());
int bucket = hash & (hashTableSize - 1);
long bucketOrdinals = removedRecords.relativeBucket(ordinal, bucket);
while(bucketOrdinals != -1L) {
if(keyMatcher.keyMatches((int)(bucketOrdinals >> 32), hashKey))
return bucketOrdinals;
bucket++;
bucket &= (hashTableSize - 1);
bucketOrdinals = removedRecords.relativeBucket(ordinal, bucket);
}
return -1L;
}
@Override
public HollowMapEntryOrdinalIterator potentialMatchOrdinalIterator(int ordinal, int hashCode) {
sampler().recordIterator();
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowMapTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).potentialMatchOrdinalIterator(ordinal, hashCode);
return removedRecords().potentialMatchOrdinalIterator(getMappedOrdinal(ordinal), hashCode);
}
@Override
public HollowMapEntryOrdinalIterator ordinalIterator(int ordinal) {
sampler().recordIterator();
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowMapTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).ordinalIterator(ordinal);
return removedRecords().ordinalIterator(getMappedOrdinal(ordinal));
}
@Override
public long relativeBucket(int ordinal, int bucketIndex) {
sampler().recordBucketRetrieval();
recordStackTrace();
if(!ordinalIsPresent(ordinal))
return ((HollowMapTypeDataAccess)dataAccess.getTypeDataAccess(getSchema().getName(), ordinal)).relativeBucket(ordinal, bucketIndex);
return removedRecords().relativeBucket(getMappedOrdinal(ordinal), bucketIndex);
}
private HollowMapTypeReadState removedRecords() {
return (HollowMapTypeReadState) removedRecords;
}
private HollowMapSampler sampler() {
return (HollowMapSampler) sampler;
}
void buildKeyMatcher() {
PrimaryKey hashKey = getSchema().getHashKey();
if(hashKey != null)
this.keyMatcher = new HistoricalPrimaryKeyMatcher(getDataAccess(), hashKey);
}
}
| 8,944 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history/HollowHistoricalTypeDataAccess.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.history;
import com.netflix.hollow.api.client.StackTraceRecorder;
import com.netflix.hollow.api.sampling.HollowSampler;
import com.netflix.hollow.api.sampling.HollowSamplingDirector;
import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.filter.HollowFilterConfig;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.util.IntMap;
public abstract class HollowHistoricalTypeDataAccess implements HollowTypeDataAccess {
protected final HollowHistoricalStateDataAccess dataAccess;
protected final HollowTypeReadState removedRecords;
protected final IntMap ordinalRemap;
protected final HollowSampler sampler;
public HollowHistoricalTypeDataAccess(HollowHistoricalStateDataAccess dataAccess, HollowTypeReadState removedRecords, HollowSampler sampler) {
IntMap ordinalRemap = null;
if(dataAccess.getOrdinalMapping() instanceof IntMapOrdinalRemapper) {
ordinalRemap = ((IntMapOrdinalRemapper)dataAccess.getOrdinalMapping()).getOrdinalRemapping(removedRecords.getSchema().getName());
}
this.dataAccess = dataAccess;
this.ordinalRemap = ordinalRemap;
this.removedRecords = removedRecords;
this.sampler = sampler;
}
@Override
public HollowHistoricalStateDataAccess getDataAccess() {
return dataAccess;
}
@Override
public HollowSchema getSchema() {
return removedRecords.getSchema();
}
protected boolean ordinalIsPresent(int ordinal) {
return ordinalRemap == null || ordinalRemap.get(ordinal) != -1;
}
protected int getMappedOrdinal(int ordinal) {
return ordinalRemap == null ? ordinal : ordinalRemap.get(ordinal);
}
@Override
public HollowTypeReadState getTypeState() {
throw new UnsupportedOperationException();
}
@Override
public void setSamplingDirector(HollowSamplingDirector director) {
sampler.setSamplingDirector(director);
}
@Override
public void setFieldSpecificSamplingDirector(HollowFilterConfig fieldSpec, HollowSamplingDirector director) {
sampler.setFieldSpecificSamplingDirector(fieldSpec, director);
}
@Override
public void ignoreUpdateThreadForSampling(Thread t) {
sampler.setUpdateThread(t);
}
@Override
public HollowSampler getSampler() {
return sampler;
}
protected void recordStackTrace() {
StackTraceRecorder recorder = dataAccess.getStackTraceRecorder();
if(recorder != null)
recorder.recordStackTrace(2);
}
HollowTypeReadState getRemovedRecords() {
return removedRecords;
}
IntMap getOrdinalRemap() {
return ordinalRemap;
}
}
| 8,945 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history/HollowHistoricalSchemaChange.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.history;
import com.netflix.hollow.core.schema.HollowSchema;
public class HollowHistoricalSchemaChange {
private final HollowSchema beforeSchema;
private final HollowSchema afterSchema;
public HollowHistoricalSchemaChange(HollowSchema beforeSchema, HollowSchema afterSchema) {
this.beforeSchema = beforeSchema;
this.afterSchema = afterSchema;
}
public HollowSchema getBeforeSchema() {
return beforeSchema;
}
public HollowSchema getAfterSchema() {
return afterSchema;
}
}
| 8,946 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history/HollowHistory.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.history;
import static com.netflix.hollow.core.HollowConstants.VERSION_NONE;
import static java.util.Objects.requireNonNull;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.engine.PopulatedOrdinalListener;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.util.RemovedOrdinalIterator;
import com.netflix.hollow.core.util.SimultaneousExecutor;
import com.netflix.hollow.tools.diff.exact.DiffEqualOrdinalMap;
import com.netflix.hollow.tools.diff.exact.DiffEqualOrdinalMap.OrdinalIdentityTranslator;
import com.netflix.hollow.tools.diff.exact.DiffEqualityMapping;
import com.netflix.hollow.tools.history.keyindex.HollowHistoricalStateKeyOrdinalMapping;
import com.netflix.hollow.tools.history.keyindex.HollowHistoricalStateTypeKeyOrdinalMapping;
import com.netflix.hollow.tools.history.keyindex.HollowHistoryKeyIndex;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
/**
* Retains, in memory, the changes in a dataset over many states. Indexes data for efficient retrieval from any
* point in time.
* <p>
* This historical data is maintained by retaining and indexing all of the changes for the delta chain in memory.
* Because only changes over time are retained, rather than complete states, a great length of history can often
* be held in memory. Additionally, because the data is held indexed in memory, it can be accessed very quickly,
* so that changes to specific records over time can be precisely investigated quickly.
* <p>
* Each retained state is accessible via a {@link HollowHistoricalState}, from which a {@link HollowDataAccess} can
* be obtained and used interchangeably with a (current) {@link HollowReadStateEngine} for many operations.
*
* This class is not thread safe.
*
*/
public class HollowHistory {
private final HollowHistoryKeyIndex keyIndex;
private final HollowHistoricalStateCreator creator;
private final int maxHistoricalStatesToKeep;
private final long fwdInitialVersion;
/**
* A list of historical states in decreasing order of version i.e. index 0 holds the highest version
* {@code historicalStates} ordered like: V3 -> V2 -> V1 (as displayed to user)
* however internally the states are linked like:
* V1.nextState = V2; V2.nextState = V3; etc. whether building using fwd or rev deltas
*/
private final List<HollowHistoricalState> historicalStates;
// A map of version to HollowHistoricalState for quick retrieval
private final Map<Long, HollowHistoricalState> historicalStateLookupMap;
// StateEngines and versions corresponding to the latest and oldest read states. Two are required when building
// history in fwd and rev directions simultaneously, then once there is sufficient history built
// oldestHollowReadStateEngine can be dropped.
// For history v1->v2->v3,
// latestHollowReadStateEngine will be at v3, and
// oldestHollowReadStateEngine will be at v0 (since the v1 historical state represents the v0->v1 diff)
private HollowReadStateEngine latestHollowReadStateEngine;
private long latestVersion = VERSION_NONE;
// reverse facing read state is optional at initialization
private HollowReadStateEngine oldestHollowReadStateEngine;
private long oldestVersion = VERSION_NONE;
private Map<String, String> latestHeaderEntries;
private boolean ignoreListOrderingOnDoubleSnapshot = false;
/**
* @param initialHollowStateEngine The HollowReadStateEngine at an initial (earliest) state.
* @param initialVersion The initial version of the HollowReadStateEngine
* @param maxHistoricalStatesToKeep The number of historical states to keep in memory
*/
public HollowHistory(HollowReadStateEngine initialHollowStateEngine, long initialVersion, int maxHistoricalStatesToKeep) {
this(initialHollowStateEngine, initialVersion, maxHistoricalStatesToKeep, true);
}
/**
* @param initialHollowStateEngine The HollowReadStateEngine at an initial (earliest) state.
* @param initialVersion The initial version of the HollowReadStateEngine
* @param maxHistoricalStatesToKeep The number of historical states to keep in memory
* @param isAutoDiscoverTypeIndex true if scheme types are auto-discovered from the initiate state engine
*/
public HollowHistory(HollowReadStateEngine initialHollowStateEngine, long initialVersion, int maxHistoricalStatesToKeep, boolean isAutoDiscoverTypeIndex) {
this(initialHollowStateEngine, null, initialVersion, VERSION_NONE, maxHistoricalStatesToKeep, isAutoDiscoverTypeIndex);
}
/**
* When building history bi-directionally, 2 state engines moving in opposite directions need to be maintained. They
* must be at the same version, or {@code revMovingHollowReadStateEngine} can be null now but later initialized with
* a snapshot for the same version as {@code fwdMovingHollowReadStateEngine} passed here before any reverse deltas are applied.
*
* @param fwdMovingHollowReadStateEngine The HollowReadStateEngine that will incur application of fwd deltas
* @param revMovingHollowReadStateEngine The HollowReadStateEngine that will incur application of reverse deltas, or null
* @param fwdInitialVersion The version of {@code fwdMovingHollowReadStateEngine}
* @param revInitialVersion The version of {@code revMovingHollowReadStateEngine}, pass VERSION_NONE if revMovingHollowReadStateEngine is null
* @param maxHistoricalStatesToKeep The number of historical states to keep in memory
*/
public HollowHistory(HollowReadStateEngine fwdMovingHollowReadStateEngine,
HollowReadStateEngine revMovingHollowReadStateEngine,
long fwdInitialVersion,
long revInitialVersion,
int maxHistoricalStatesToKeep) {
this(fwdMovingHollowReadStateEngine, revMovingHollowReadStateEngine, fwdInitialVersion, revInitialVersion,
maxHistoricalStatesToKeep, true);
}
/**
* When building history bi-directionally, 2 state engines moving in opposite directions need to be maintained. They
* must start at the same version for contiguous history. {@code revMovingHollowReadStateEngine} can be null now and
* initialized later by calling {@code initializeReverseStateEngine} with the same version as
* {@code fwdMovingHollowReadStateEngine} passed here.
*
* @param fwdMovingHollowReadStateEngine The HollowReadStateEngine that will incur application of fwd deltas.
* This is required to be initialized before calling this constructor.
* @param revMovingHollowReadStateEngine The HollowReadStateEngine that will incur application of reverse deltas.
* This can optionally be initialized before calling this constructor, or
* anytime before applying the first reverse delta.
* @param fwdInitialVersion The version of {@code fwdMovingHollowReadStateEngine}
* @param revInitialVersion The version of {@code revMovingHollowReadStateEngine}
* @param maxHistoricalStatesToKeep The number of historical states to keep in memory
* @param isAutoDiscoverTypeIndex true if scheme types are auto-discovered from the initiate state engine
*/
public HollowHistory(HollowReadStateEngine fwdMovingHollowReadStateEngine,
HollowReadStateEngine revMovingHollowReadStateEngine,
long fwdInitialVersion,
long revInitialVersion,
int maxHistoricalStatesToKeep,
boolean isAutoDiscoverTypeIndex) {
this.keyIndex = new HollowHistoryKeyIndex(this);
this.creator = new HollowHistoricalStateCreator(this);
this.historicalStates = new ArrayList<>();
this.historicalStateLookupMap = new HashMap<>();
this.maxHistoricalStatesToKeep = maxHistoricalStatesToKeep;
// validate fwd moving state initialization
requireNonNull(fwdMovingHollowReadStateEngine, "Fwd direction read state engine should be initialized");
if (fwdInitialVersion == VERSION_NONE) {
throw new IllegalArgumentException("Valid version corresponding to fwdMovingHollowReadStateEngine should be specified" +
"during HollowHistory initialization");
}
this.latestHollowReadStateEngine = fwdMovingHollowReadStateEngine;
this.fwdInitialVersion = fwdInitialVersion;
this.latestVersion = fwdInitialVersion;
this.latestHeaderEntries = latestHollowReadStateEngine.getHeaderTags();
// rev moving state, may or may not be specified at initialization
if (revMovingHollowReadStateEngine != null || revInitialVersion != VERSION_NONE) {
initializeReverseStateEngine(revMovingHollowReadStateEngine, revInitialVersion);
}
if (isAutoDiscoverTypeIndex) {
for (HollowSchema schema : fwdMovingHollowReadStateEngine.getSchemas()) {
if (schema instanceof HollowObjectSchema) {
PrimaryKey pKey = ((HollowObjectSchema) schema).getPrimaryKey();
if (pKey == null) continue;
keyIndex.addTypeIndex(pKey);
keyIndex.indexTypeField(pKey);
}
}
}
}
public void initializeReverseStateEngine(HollowReadStateEngine revReadStateEngine, long version) {
requireNonNull(revReadStateEngine, "Non-null revReadStateEngine required");
if (version == VERSION_NONE) {
throw new IllegalArgumentException("Valid version corresponding to revReadStateEngine required");
}
if (version != fwdInitialVersion) {
throw new IllegalStateException("Reverse state engine version should correspond to the version that fwd state engine" +
"initialized to for a contiguous history chain");
}
if (latestHollowReadStateEngine == null) {
// so that history key index is initialized to latestReadStateEngine, the one we're going to retain forever
throw new IllegalStateException("Initialize fwd direction read state engine before initializing rev direction read state engine");
}
if (oldestHollowReadStateEngine != null || oldestVersion != VERSION_NONE) {
throw new IllegalStateException("oldestHollowReadStateEngine has already been initialized");
}
this.oldestHollowReadStateEngine = revReadStateEngine;
this.oldestVersion = version;
}
/**
* Call this method to indicate that list ordering changes should be identified as modified records when
* a double snapshot occurs. By default, these will not be identified as updates.
*/
public void ignoreListOrderingOnDoubleSnapshot() {
this.ignoreListOrderingOnDoubleSnapshot = true;
}
/**
* @return The {@link HollowHistoryKeyIndex}, responsible for identifying keyOrdinals.
*/
public HollowHistoryKeyIndex getKeyIndex() {
return keyIndex;
}
/**
* @return The {@link HollowReadStateEngine} backing the latest state.
*/
public HollowReadStateEngine getLatestState() {
return latestHollowReadStateEngine;
}
/**
* @return The {@link HollowReadStateEngine} backing the oldest state.
*/
public HollowReadStateEngine getOldestState() {
return oldestHollowReadStateEngine;
}
public long getLatestVersion() {
return latestVersion;
}
public long getOldestVersion() {
return oldestVersion;
}
/**
* @return An array of each historical state.
*/
public HollowHistoricalState[] getHistoricalStates() {
return historicalStates.toArray(new HollowHistoricalState[historicalStates.size()]);
}
/**
* @return the number of historical states
*/
public int getNumberOfHistoricalStates() {
return historicalStates.size();
}
/**
* @param version A version in the past
* @return The {@link HollowHistoricalState} for the specified version, if it exists.
*/
public HollowHistoricalState getHistoricalState(long version) {
if(latestVersion == version)
return historicalStates.get(0);
return historicalStateLookupMap.get(version);
}
/**
* Call this method after each time a delta occurs in the backing {@link HollowReadStateEngine}. This
* is how the HollowHistory knows how to create a new {@link HollowHistoricalState}.
*
* @param newVersion The version of the new state
*/
public void deltaOccurred(long newVersion) {
// When invoked in a listener the delta update has been already applied to latestHollowReadStateEngine, but
// {@code latestVersion} is still the version from before the delta transition. {@code latestVersion} is
// updated in this method.
// Update the state stored in keyIndex (in its member readStateEngine) with the passed read state engine.
// The readStateEngine within keyIndex stores an ever-growing state of all keys ever seen by this HollowHistory
// instance i.e. all keys seen in initial load or a successive double-snapshot and all keys added/removed in
// deltas and reverse deltas. It doesn't store a copy of the keyed records, instead just the primary key values
// for each type that has a primary key defined (in schema or custom via history helpers).
keyIndex.update(latestHollowReadStateEngine, true);
// A HollowHistoricalStateDataAccess is used to save data that won't exist in future states so it needs to
// be stashed away for making those records accessible in the history view. It achieves this by copying over
// data corresponding to ghost records in the "to" state in any state transition (i.e. records corresponding to
// ordinals were populated in the "from" state but are not populated in the "to" state) into a new state engine
// where it assigns new ordinals serially(0, 1, 2, etc.) to each such record. A mapping of original ordinal
// in the read state to its new ordinal position in the historical state data access for all such records in
// each type is stored in the member typeRemovedOrdinalMapping.
HollowHistoricalStateDataAccess historicalDataAccess = creator.createBasedOnNewDelta(latestVersion, latestHollowReadStateEngine);
historicalDataAccess.setNextState(latestHollowReadStateEngine);
// keyOrdinalMapping tracks, for each primary key in each type, ordinals corresponding to added and removed records
// in the latest read state engine. Used together with the mapping for original ordinal in the read state engine to
// assigned ordinal in historic state, this helps power the UI view where given a historic version it lists all
// keys that were added/removed/modified in that version, and can then retrieve the data in those historic states
// corresponding to those keys
HollowHistoricalStateKeyOrdinalMapping keyOrdinalMapping = createKeyOrdinalMappingFromDelta(latestHollowReadStateEngine, false);
HollowHistoricalState historicalState = new HollowHistoricalState(newVersion, keyOrdinalMapping, historicalDataAccess, latestHeaderEntries);
addHistoricalState(historicalState);
this.latestVersion = newVersion;
this.latestHeaderEntries = latestHollowReadStateEngine.getHeaderTags();
}
/**
* Call this method after each time a reverse delta occurs in the backing {@link HollowReadStateEngine}. This
* is how the HollowHistory knows how to create a new {@link HollowHistoricalState}.
*
* @param newVersion The version of the new state
*/
public void reverseDeltaOccurred(long newVersion) {
if (oldestHollowReadStateEngine == null) {
throw new IllegalStateException("Read state engine for reverse direction history computation isn't initialized. " +
"This can occur if the required hollow history init sequence isn't followed or if oldestHollowReadStateEngine " +
"was discarded after history was initialized to max old versions");
}
if(historicalStates.size() >= maxHistoricalStatesToKeep) {
throw new IllegalStateException("No. of history states reached max states capacity. HollowHistory does not " +
"support reaching this state when building history in reverse because older states would be evicted " +
"and history past here wouldn't be of contiguous versions");
}
// keyIndex is an ever-growing stat that maintains all primary key values ever seen, and when a reverse delta
// update occurs we add any newly seen keys to it
keyIndex.update(oldestHollowReadStateEngine, true);
// Applying reverse delta from v2->v1 builds a historical data access state for v2
//
// The behavior for building history using reverse deltas is logically flipped i.e. it needs to track data
// corresponding to ordinals that were added in reverse delta transition instead of removed in the delta direction,
// because those will not be present in the latestReadStateEngine (oldestReadStateEngine is discarded eventually). So,
// When building history using fwd deltas for e.g. v1->v2
// look up ordinals removed in v2, copy over data at those ordinals from v2's read state into a new data access
// under ordinals 0,1,2,etc. - this comprises the historical state corresponding to v2
// When building history using rev delta for e.g. v2->v1:
// look up ordinals added in v1, copy over data at those ordinals from v1 read state a new data access under
// ordinals 0,1,2,etc. - this comprises the historical state corresponding to v2
HollowHistoricalStateDataAccess historicalDataAccess = creator.createBasedOnNewDelta(oldestVersion, oldestHollowReadStateEngine, true);
// Depending on directionality (delta or reverse delta) the significance of additions and removal is flipped
HollowHistoricalStateKeyOrdinalMapping keyOrdinalMapping = createKeyOrdinalMappingFromDelta(oldestHollowReadStateEngine, true);
HollowHistoricalState historicalState = new HollowHistoricalState(oldestVersion, keyOrdinalMapping, historicalDataAccess,
oldestHollowReadStateEngine.getHeaderTags());
addReverseHistoricalState(historicalState);
this.oldestVersion = newVersion;
}
/**
* Call this method after each time a double snapshot occurs that advances the latest version. Note that building
* history using double snapshot in the reverse direction is not supported.
* <p>
* This method will replace the previous backing {@link HollowReadStateEngine} with the newly
* supplied one, stitch together all of the existing history with the new state currently in the
* new {@link HollowReadStateEngine}, and create a new {@link HollowHistoricalState} to represent
* the transition.
*
* @param newHollowStateEngine the new state engine
* @param newVersion the new version
*/
public void doubleSnapshotOccurred(HollowReadStateEngine newHollowStateEngine, long newVersion) {
if (newVersion <= latestVersion) {
throw new UnsupportedOperationException("Double snapshot only supports advancing the latest version");
}
if(!keyIndex.isInitialized())
keyIndex.update(latestHollowReadStateEngine, false);
keyIndex.update(newHollowStateEngine, false);
HollowHistoricalStateDataAccess historicalDataAccess;
DiffEqualityMapping mapping = new DiffEqualityMapping(latestHollowReadStateEngine, newHollowStateEngine, true, !ignoreListOrderingOnDoubleSnapshot);
DiffEqualityMappingOrdinalRemapper remapper = new DiffEqualityMappingOrdinalRemapper(mapping);
historicalDataAccess = creator.createHistoricalStateFromDoubleSnapshot(latestVersion, latestHollowReadStateEngine, newHollowStateEngine, remapper);
HollowHistoricalStateDataAccess nextRemappedDataAccess = historicalDataAccess;
HollowHistoricalState nextRemappedState = null;
HollowHistoricalStateDataAccess[] remappedDataAccesses = new HollowHistoricalStateDataAccess[historicalStates.size()];
HollowHistoricalStateKeyOrdinalMapping[] remappedKeyOrdinalMappings = new HollowHistoricalStateKeyOrdinalMapping[historicalStates.size()];
remapHistoricalStateOrdinals(remapper, remappedDataAccesses, remappedKeyOrdinalMappings);
for(int i=0;i<historicalStates.size();i++) {
HollowHistoricalState historicalStateToRemap = historicalStates.get(i);
HollowHistoricalStateDataAccess remappedDataAccess = remappedDataAccesses[i];
HollowHistoricalStateKeyOrdinalMapping remappedKeyOrdinalMapping = remappedKeyOrdinalMappings[i];
remappedDataAccess.setNextState(nextRemappedDataAccess);
nextRemappedDataAccess = remappedDataAccess;
HollowHistoricalState remappedState = new HollowHistoricalState(historicalStateToRemap.getVersion(), remappedKeyOrdinalMapping, remappedDataAccess, historicalStateToRemap.getHeaderEntries());
remappedState.setNextState(nextRemappedState);
nextRemappedState = remappedState;
historicalStates.set(i, remappedState);
historicalStateLookupMap.put(remappedState.getVersion(), remappedState);
}
historicalDataAccess.setNextState(newHollowStateEngine);
HollowHistoricalStateKeyOrdinalMapping keyOrdinalMapping = createKeyOrdinalMappingFromDoubleSnapshot(newHollowStateEngine, remapper);
HollowHistoricalState historicalState = new HollowHistoricalState(newVersion, keyOrdinalMapping, historicalDataAccess, latestHeaderEntries);
addHistoricalState(historicalState);
this.latestVersion = newVersion;
this.latestHollowReadStateEngine = newHollowStateEngine;
this.latestHeaderEntries = latestHollowReadStateEngine.getHeaderTags();
}
// only called when doing a double snapshot
private void remapHistoricalStateOrdinals(final DiffEqualityMappingOrdinalRemapper remapper, final HollowHistoricalStateDataAccess[] remappedDataAccesses, final HollowHistoricalStateKeyOrdinalMapping[] remappedKeyOrdinalMappings) {
SimultaneousExecutor executor = new SimultaneousExecutor(getClass(), "remap");
final int numThreads = executor.getCorePoolSize();
for(int i=0;i<executor.getCorePoolSize();i++) {
final int threadNumber = i;
executor.execute(() -> {
for(int t=threadNumber;t<historicalStates.size();t+=numThreads) {
HollowHistoricalState historicalStateToRemap = historicalStates.get(t);
remappedDataAccesses[t] = creator.copyButRemapOrdinals(historicalStateToRemap.getDataAccess(), remapper);
remappedKeyOrdinalMappings[t] = historicalStateToRemap.getKeyOrdinalMapping().remap(remapper);
}
});
}
try {
executor.awaitSuccessfulCompletion();
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
}
private HollowHistoricalStateKeyOrdinalMapping createKeyOrdinalMappingFromDelta(HollowReadStateEngine readStateEngine, boolean reverse) {
HollowHistoricalStateKeyOrdinalMapping keyOrdinalMapping = new HollowHistoricalStateKeyOrdinalMapping(keyIndex);
for(String keyType : keyIndex.getTypeKeyIndexes().keySet()) {
HollowHistoricalStateTypeKeyOrdinalMapping typeMapping = keyOrdinalMapping.getTypeMapping(keyType);
HollowObjectTypeReadState typeState = (HollowObjectTypeReadState) readStateEngine.getTypeState(keyType);
if (typeState==null) {
// The type is present in the history's primary key index but is not present
// in the latest read state; ensure the mapping is initialized to the default state
typeMapping.prepare(0, 0);
typeMapping.finish();
continue;
}
PopulatedOrdinalListener listener = typeState.getListener(PopulatedOrdinalListener.class);
RemovedOrdinalIterator additionsIterator, removalIterator;
if (reverse) {
removalIterator = new RemovedOrdinalIterator(listener.getPopulatedOrdinals(), listener.getPreviousOrdinals());
additionsIterator = new RemovedOrdinalIterator(listener);
} else {
removalIterator = new RemovedOrdinalIterator(listener);
additionsIterator = new RemovedOrdinalIterator(listener.getPopulatedOrdinals(), listener.getPreviousOrdinals());
}
typeMapping.prepare(additionsIterator.countTotal(), removalIterator.countTotal());
int removedOrdinal = removalIterator.next();
while(removedOrdinal != -1) {
typeMapping.removed(typeState, removedOrdinal);
removedOrdinal = removalIterator.next();
}
int addedOrdinal = additionsIterator.next();
while(addedOrdinal != -1) {
typeMapping.added(typeState, addedOrdinal);
addedOrdinal = additionsIterator.next();
}
typeMapping.finish();
}
return keyOrdinalMapping;
}
private HollowHistoricalStateKeyOrdinalMapping createKeyOrdinalMappingFromDoubleSnapshot(HollowReadStateEngine newStateEngine, DiffEqualityMappingOrdinalRemapper ordinalRemapper) {
HollowHistoricalStateKeyOrdinalMapping keyOrdinalMapping = new HollowHistoricalStateKeyOrdinalMapping(keyIndex);
DiffEqualityMapping mapping = ordinalRemapper.getDiffEqualityMapping();
for(String keyType : keyIndex.getTypeKeyIndexes().keySet()) {
HollowHistoricalStateTypeKeyOrdinalMapping typeMapping = keyOrdinalMapping.getTypeMapping(keyType);
HollowObjectTypeReadState fromTypeState = (HollowObjectTypeReadState) latestHollowReadStateEngine.getTypeState(keyType);
HollowObjectTypeReadState toTypeState = (HollowObjectTypeReadState) newStateEngine.getTypeState(keyType);
DiffEqualOrdinalMap equalOrdinalMap = mapping.getEqualOrdinalMap(keyType);
BitSet fromOrdinals = fromTypeState == null ? new BitSet() : fromTypeState.getListener(PopulatedOrdinalListener.class).getPopulatedOrdinals();
BitSet toOrdinals = toTypeState == null ? new BitSet() : toTypeState.getListener(PopulatedOrdinalListener.class).getPopulatedOrdinals();
int removedOrdinalsCount = countUnmatchedOrdinals(fromOrdinals, equalOrdinalMap.getFromOrdinalIdentityTranslator());
int addedOrdinalsCount = countUnmatchedOrdinals(toOrdinals, equalOrdinalMap.getToOrdinalIdentityTranslator());
typeMapping.prepare(addedOrdinalsCount, removedOrdinalsCount);
int fromOrdinal = fromOrdinals.nextSetBit(0);
while(fromOrdinal != -1) {
if(equalOrdinalMap.getIdentityFromOrdinal(fromOrdinal) == -1)
typeMapping.removed(fromTypeState, fromOrdinal, ordinalRemapper.getMappedOrdinal(keyType, fromOrdinal));
fromOrdinal = fromOrdinals.nextSetBit(fromOrdinal + 1);
}
int toOrdinal = toOrdinals.nextSetBit(0);
while(toOrdinal != -1) {
if(equalOrdinalMap.getIdentityToOrdinal(toOrdinal) == -1)
typeMapping.added(toTypeState, toOrdinal);
toOrdinal = toOrdinals.nextSetBit(toOrdinal + 1);
}
typeMapping.finish();
}
return keyOrdinalMapping;
}
private int countUnmatchedOrdinals(BitSet ordinals, OrdinalIdentityTranslator translator) {
int count = 0;
int ordinal = ordinals.nextSetBit(0);
while(ordinal != -1) {
if(translator.getIdentityOrdinal(ordinal) == -1)
count++;
ordinal = ordinals.nextSetBit(ordinal + 1);
}
return count;
}
// place historicalState at the beginning of historicalStates
//
// historicalStates is ordered like: V3 -> V2 -> V1
// however internally the states are linked like: V1.nextState = V2; V2.nextState = V3; etc.
private void addHistoricalState(HollowHistoricalState historicalState) {
if(historicalStates.size() > 0) {
historicalStates.get(0).getDataAccess().setNextState(historicalState.getDataAccess());
historicalStates.get(0).setNextState(historicalState);
}
historicalStates.add(0, historicalState);
historicalStateLookupMap.put(historicalState.getVersion(), historicalState);
if(historicalStates.size() > maxHistoricalStatesToKeep) {
removeHistoricalStates(1);
}
}
// place historicalState at the end of historicalStates
//
// historicalStates is ordered like: V3 -> V2 -> V1
// however internally the states are linked like: V1.nextState = V2; V2.nextState = V3; etc.
private void addReverseHistoricalState(HollowHistoricalState historicalState) {
if (historicalStates.size() > 0) {
historicalState.getDataAccess().setNextState(historicalStates.get(historicalStates.size()-1).getDataAccess());
historicalState.setNextState(historicalStates.get(historicalStates.size()-1));
} else { // if reverse delta occurs before any fwd deltas
historicalState.getDataAccess().setNextState(latestHollowReadStateEngine);
}
historicalStates.add(historicalState);
historicalStateLookupMap.put(historicalState.getVersion(), historicalState);
if (historicalStates.size() >= maxHistoricalStatesToKeep) {
// drop old read state because we won't be building history in reverse after we get here
oldestHollowReadStateEngine = null;
}
}
/**
* Removes the oldest {@code n} historical states.
*
* @param n the number of historical states to remove
* @throws IllegalArgumentException if the {@code n} is less than {@code 0} or
* greater than the {@link #getNumberOfHistoricalStates() number} of historical
* states.
*/
public void removeHistoricalStates(int n) {
if (n < 0) {
throw new IllegalArgumentException(String.format(
"Number of states to remove is negative: %d", n));
}
if (n > historicalStates.size()) {
throw new IllegalArgumentException(String.format(
"Number of states to remove, %d, is greater than the number of states. %d",
n, historicalStates.size()));
}
// drop oldest HollowReadStateEngine if it hasn't already been
oldestHollowReadStateEngine = null;
while (n-- > 0) {
HollowHistoricalState removedState;
removedState = historicalStates.remove(historicalStates.size() - 1);
historicalStateLookupMap.remove(removedState.getVersion());
}
}
}
| 8,947 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history/keyindex/HollowOrdinalMapper.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.history.keyindex;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.core.memory.encoding.HashCodes;
import com.netflix.hollow.core.read.HollowReadFieldUtils;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.util.IntList;
import com.netflix.hollow.tools.util.ObjectInternPool;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import static com.netflix.hollow.core.HollowConstants.ORDINAL_NONE;
import java.util.Arrays;
public class HollowOrdinalMapper {
private int size = 0;
private static final double LOAD_FACTOR = 0.7;
private static final int STARTING_SIZE = 2069;
/*
* hashToAssignedOrdinal: OA/LP record hash -> assigned ordinal
* fieldHashToObjectOrdinal: field index -> OA/LP record hash -> object ordinal
* fieldHashToAssignedOrdinal: field index -> OA/LP record hash -> assigned ordinal
* assignedOrdinalToIndex: assigned ordinal -> index
*
* NOTE: hashToAssignedOrdinal and fieldHashToObjectOrdinal are parallel arrays.
* This is why fieldHashToObjectOrdinal is always used in conjunction with fieldHashToAssignedOrdinal.
* */
private int[] hashToAssignedOrdinal;
private int[][] fieldHashToObjectOrdinal;
private IntList[][] fieldHashToAssignedOrdinal;
private int[] assignedOrdinalToIndex;
private final PrimaryKey primaryKey;
private final int[][] keyFieldIndices;
private final boolean[] keyFieldIsIndexed;
private final FieldType[] keyFieldTypes;
private final ObjectInternPool memoizedPool;
public HollowOrdinalMapper(PrimaryKey primaryKey, boolean[] keyFieldIsIndexed, int[][] keyFieldIndices, FieldType[] keyFieldTypes) {
this.hashToAssignedOrdinal = new int[STARTING_SIZE];
this.fieldHashToObjectOrdinal = new int[primaryKey.numFields()][STARTING_SIZE];
this.fieldHashToAssignedOrdinal = new IntList[primaryKey.numFields()][STARTING_SIZE];
this.assignedOrdinalToIndex = new int[STARTING_SIZE];
Arrays.fill(this.hashToAssignedOrdinal, ORDINAL_NONE);
for(int field=0;field<primaryKey.numFields();field++) {
Arrays.fill(this.fieldHashToObjectOrdinal[field], ORDINAL_NONE);
}
Arrays.fill(this.assignedOrdinalToIndex, ORDINAL_NONE);
this.primaryKey = primaryKey;
this.keyFieldIndices = keyFieldIndices;
this.keyFieldIsIndexed = keyFieldIsIndexed;
this.keyFieldTypes = keyFieldTypes;
this.memoizedPool = new ObjectInternPool();
}
public void addMatches(int hashCode, Object objectToMatch, int field, FieldType type, IntList results) {
IntList[] fieldHashes = fieldHashToAssignedOrdinal[field];
int scanIndex = indexFromHash(hashCode, fieldHashes.length);
if (fieldHashes[scanIndex] == null)
return;
for(int i=0;i<fieldHashes[scanIndex].size();i++) {
int assignedOrdinal = fieldHashes[scanIndex].get(i);
Object object = getFieldObject(assignedOrdinal, field, type);
if(object.equals(objectToMatch))
results.add(assignedOrdinal);
}
}
public void writeKeyFieldHash(Object fieldObject, int assignedOrdinal, int fieldIdx) {
if (!keyFieldIsIndexed[fieldIdx])
return;
IntList[] fieldHashes = fieldHashToAssignedOrdinal[fieldIdx];
int fieldHash = hashObject(fieldObject);
int newIndex = indexFromHash(fieldHash, fieldHashes.length);
if(fieldHashes[newIndex]==null) {
fieldHashes[newIndex] = new IntList();
}
fieldHashes[newIndex].add(assignedOrdinal);
}
public void prepareForRead() {
memoizedPool.prepareForRead();
}
public int findAssignedOrdinal(HollowObjectTypeReadState typeState, int keyOrdinal) {
int hashedRecord = hashKeyRecord(typeState, keyOrdinal);
int scanIndex = indexFromHash(hashedRecord, hashToAssignedOrdinal.length);
while (hashToAssignedOrdinal[scanIndex]!=ORDINAL_NONE) {
if(recordsAreEqual(typeState, keyOrdinal, scanIndex))
return hashToAssignedOrdinal[scanIndex];
scanIndex = (scanIndex + 1) % hashToAssignedOrdinal.length;
}
return ORDINAL_NONE;
}
private boolean recordsAreEqual(HollowObjectTypeReadState typeState, int keyOrdinal, int index) {
for(int fieldIdx=0;fieldIdx<primaryKey.numFields();fieldIdx++) {
if(!keyFieldIsIndexed[fieldIdx])
continue;
Object newFieldValue = readValueInState(typeState, keyOrdinal, fieldIdx);
int existingFieldOrdinalValue = fieldHashToObjectOrdinal[fieldIdx][index];
//Assuming two records in the same cycle cannot be equal
if(memoizedPool.ordinalInCurrentCycle(existingFieldOrdinalValue)) {
return false;
}
Object existingFieldObjectValue = memoizedPool.getObject(existingFieldOrdinalValue, keyFieldTypes[fieldIdx]);
if (!newFieldValue.equals(existingFieldObjectValue)) {
return false;
}
}
return true;
}
public boolean storeNewRecord(HollowObjectTypeReadState typeState, int ordinal, int assignedOrdinal) {
int hashedRecord = hashKeyRecord(typeState, ordinal);
if ((double) size / hashToAssignedOrdinal.length > LOAD_FACTOR) {
expandAndRehashTable();
}
int newIndex = indexFromHash(hashedRecord, hashToAssignedOrdinal.length);
// Linear probing
while (hashToAssignedOrdinal[newIndex] != ORDINAL_NONE) {
if(recordsAreEqual(typeState, ordinal, newIndex)) {
assignedOrdinalToIndex[assignedOrdinal]=newIndex;
return false;
}
newIndex = (newIndex + 1) % hashToAssignedOrdinal.length;
}
for (int i = 0; i < primaryKey.numFields(); i++) {
Object objectToHash = readValueInState(typeState, ordinal, i);
writeKeyFieldHash(objectToHash, assignedOrdinal, i);
}
storeFieldObjects(typeState, ordinal, newIndex);
hashToAssignedOrdinal[newIndex] = assignedOrdinal;
assignedOrdinalToIndex[assignedOrdinal]=newIndex;
size++;
return true;
}
private void storeFieldObjects(HollowObjectTypeReadState typeState, int ordinal, int index) {
for(int i=0;i<primaryKey.numFields();i++) {
if(!keyFieldIsIndexed[i])
continue;
Object objectToStore = readValueInState(typeState, ordinal, i);
int objectOrdinal = memoizedPool.writeAndGetOrdinal(objectToStore);
fieldHashToObjectOrdinal[i][index] = objectOrdinal;
}
}
private int[] getFieldOrdinals(int index) {
int[] fieldObjects = new int[primaryKey.numFields()];
for(int fieldIdx=0;fieldIdx< primaryKey.numFields();fieldIdx++) {
fieldObjects[fieldIdx] = fieldHashToObjectOrdinal[fieldIdx][index];
}
return fieldObjects;
}
private int hashFromIndex(int index) {
int[] fieldOrdinals = getFieldOrdinals(index);
Object[] fieldObjects = new Object[primaryKey.numFields()];
for(int fieldIdx=0;fieldIdx< primaryKey.numFields();fieldIdx++) {
fieldObjects[fieldIdx] = memoizedPool.getObject(fieldOrdinals[fieldIdx], keyFieldTypes[fieldIdx]);
}
return hashKeyRecord(fieldObjects);
}
private void expandAndRehashTable() {
prepareForRead();
int[] newTable = new int[hashToAssignedOrdinal.length*2];
Arrays.fill(newTable, ORDINAL_NONE);
int[][] newFieldMappings = new int[primaryKey.numFields()][hashToAssignedOrdinal.length*2];
IntList[][] newFieldHashToOrdinal = new IntList[primaryKey.numFields()][hashToAssignedOrdinal.length*2];
assignedOrdinalToIndex = Arrays.copyOf(assignedOrdinalToIndex, hashToAssignedOrdinal.length*2);
for(int fieldIdx=0;fieldIdx<primaryKey.numFields();fieldIdx++) {
IntList[] hashToOrdinal = fieldHashToAssignedOrdinal[fieldIdx];
for (IntList ordinalList : hashToOrdinal) {
if(ordinalList==null || ordinalList.size()==0)
continue;
// Recompute original hash, based on the fact that all objects in this IntList have the same hash
Object originalFieldObject = getFieldObject(ordinalList.get(0), fieldIdx, keyFieldTypes[fieldIdx]);
int originalHash = hashObject(originalFieldObject);
int newIndex = indexFromHash(originalHash, newTable.length);
newFieldHashToOrdinal[fieldIdx][newIndex]=ordinalList;
}
}
for(int i=0;i<hashToAssignedOrdinal.length;i++) {
if(hashToAssignedOrdinal[i]==ORDINAL_NONE)
continue;
// Recompute original hash
int firstHash = hashFromIndex(i);
int newIndex = rehashExistingRecord(newTable, firstHash, hashToAssignedOrdinal[i]);
for(int fieldIdx=0;fieldIdx<primaryKey.numFields();fieldIdx++) {
newFieldMappings[fieldIdx][newIndex] = fieldHashToObjectOrdinal[fieldIdx][i];
}
// Store new index in old table, so we can remap assignedOrdinalToIndex
hashToAssignedOrdinal[i]=newIndex;
}
for (int assignedOrdinal=0;assignedOrdinal<assignedOrdinalToIndex.length;assignedOrdinal++) {
int previousIndex = assignedOrdinalToIndex[assignedOrdinal];
if (previousIndex==ORDINAL_NONE)
//linear, so we can break
break;
int newIndex = hashToAssignedOrdinal[previousIndex];
assignedOrdinalToIndex[assignedOrdinal]=newIndex;
}
this.hashToAssignedOrdinal = newTable;
this.fieldHashToObjectOrdinal = newFieldMappings;
this.fieldHashToAssignedOrdinal = newFieldHashToOrdinal;
}
private int rehashExistingRecord(int[] newTable, int originalHash, int assignedOrdinal) {
int newIndex = indexFromHash(originalHash, newTable.length);
while (newTable[newIndex]!=ORDINAL_NONE)
newIndex = (newIndex + 1) % newTable.length;
newTable[newIndex] = assignedOrdinal;
return newIndex;
}
public Object getFieldObject(int assignedOrdinal, int fieldIndex, FieldType type) {
int index = assignedOrdinalToIndex[assignedOrdinal];
int fieldOrdinal = fieldHashToObjectOrdinal[fieldIndex][index];
return memoizedPool.getObject(fieldOrdinal, type);
}
private int hashKeyRecord(HollowObjectTypeReadState typeState, int ordinal) {
int hashCode = 0;
for (int i = 0; i < primaryKey.numFields(); i++) {
Object fieldObjectToHash = readValueInState(typeState, ordinal, i);
int fieldHashCode = HollowReadFieldUtils.hashObject(fieldObjectToHash);
hashCode = (hashCode * 31) ^ fieldHashCode;
}
return HashCodes.hashInt(hashCode);
}
private int hashKeyRecord(Object[] objects) {
int hashCode = 0;
for (Object fieldObject : objects) {
int fieldHashCode = HollowReadFieldUtils.hashObject(fieldObject);
hashCode = (hashCode * 31) ^ fieldHashCode;
}
return HashCodes.hashInt(hashCode);
}
//taken and modified from HollowPrimaryKeyValueDeriver
public Object readValueInState(HollowObjectTypeReadState typeState, int ordinal, int fieldIdx) {
HollowObjectSchema schema = typeState.getSchema();
int lastFieldPath = keyFieldIndices[fieldIdx].length - 1;
for (int i = 0; i < lastFieldPath; i++) {
int fieldPosition = keyFieldIndices[fieldIdx][i];
ordinal = typeState.readOrdinal(ordinal, fieldPosition);
typeState = (HollowObjectTypeReadState) schema.getReferencedTypeState(fieldPosition);
schema = typeState.getSchema();
}
return HollowReadFieldUtils.fieldValueObject(typeState, ordinal, keyFieldIndices[fieldIdx][lastFieldPath]);
}
// Java modulo is more like a remainder, indices can't be negative
private static int indexFromHash(int hashedValue, int length) {
int modulus = hashedValue % length;
return modulus < 0 ? modulus + length : modulus;
}
private static int hashObject(Object object) {
return HashCodes.hashInt(HollowReadFieldUtils.hashObject(object));
}
} | 8,948 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history/keyindex/HollowHistoryTypeKeyIndex.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.history.keyindex;
import static com.netflix.hollow.core.HollowConstants.ORDINAL_NONE;
import static com.netflix.hollow.tools.util.SearchUtils.MULTI_FIELD_KEY_DELIMITER;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.core.memory.encoding.HashCodes;
import com.netflix.hollow.core.read.HollowReadFieldUtils;
import com.netflix.hollow.core.read.engine.PopulatedOrdinalListener;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.core.util.IntList;
import com.netflix.hollow.core.util.RemovedOrdinalIterator;
import java.util.Arrays;
import java.util.BitSet;
public class HollowHistoryTypeKeyIndex {
private final PrimaryKey primaryKey;
private final FieldType[] fieldTypes;
private final String[][] keyFieldNames;
private final int[][] keyFieldIndices;
private final boolean[] keyFieldIsIndexed;
private boolean isInitialized = false;
private int maxIndexedOrdinal = 0;
private final HollowOrdinalMapper ordinalMapping;
public HollowHistoryTypeKeyIndex(PrimaryKey primaryKey, HollowDataset dataModel) {
this.primaryKey = primaryKey;
this.fieldTypes = new FieldType[primaryKey.numFields()];
this.keyFieldNames = new String[primaryKey.numFields()][];
this.keyFieldIndices = new int[primaryKey.numFields()][];
this.keyFieldIsIndexed = new boolean[primaryKey.numFields()];
initializeKeyParts(dataModel);
this.ordinalMapping = new HollowOrdinalMapper(primaryKey, keyFieldIsIndexed, keyFieldIndices, fieldTypes);
}
public boolean isInitialized() {
return isInitialized;
}
public int findKeyIndexOrdinal(HollowObjectTypeReadState typeState, int ordinal) {
return ordinalMapping.findAssignedOrdinal(typeState, ordinal);
}
public int getMaxIndexedOrdinal() {
return maxIndexedOrdinal;
}
public String[] getKeyFields() {
return primaryKey.getFieldPaths();
}
public void addFieldIndex(String fieldName, HollowDataset dataModel) {
String[] fieldPathParts = PrimaryKey.getCompleteFieldPathParts(dataModel, primaryKey.getType(), fieldName);
for (int i = 0; i < primaryKey.numFields(); i++) {
String[] pkFieldPathParts = PrimaryKey.getCompleteFieldPathParts(dataModel, primaryKey.getType(), primaryKey.getFieldPath(i));
if (Arrays.equals(pkFieldPathParts, fieldPathParts)) {
keyFieldIsIndexed[i] = true;
break;
}
}
}
public void initializeKeySchema(HollowObjectTypeReadState initialTypeState) {
if (isInitialized) return;
HollowObjectSchema schema = initialTypeState.getSchema();
for (int i= 0; i < keyFieldNames.length; i ++) {
String[] keyFieldPart = keyFieldNames[i];
fieldTypes[i] = addSchemaField(schema, keyFieldPart, 0);
}
isInitialized = true;
}
private FieldType addSchemaField(HollowObjectSchema schema, String[] keyFieldNames, int keyFieldPartPosition) {
int schemaPosition = schema.getPosition(keyFieldNames[keyFieldPartPosition]);
if (keyFieldPartPosition < keyFieldNames.length - 1) {
HollowObjectSchema nextPartSchema = (HollowObjectSchema) schema.getReferencedTypeState(schemaPosition).getSchema();
return addSchemaField(nextPartSchema, keyFieldNames, keyFieldPartPosition + 1);
}
return schema.getFieldType(schemaPosition);
}
public boolean[] getKeyFieldIsIndexed() {
return keyFieldIsIndexed;
}
private void initializeKeyParts(HollowDataset dataModel) {
for (int i = 0; i < primaryKey.numFields(); i++) {
keyFieldNames[i] = PrimaryKey.getCompleteFieldPathParts(dataModel, primaryKey.getType(), primaryKey.getFieldPath(i));
keyFieldIndices[i] = PrimaryKey.getFieldPathIndex(dataModel, primaryKey.getType(), primaryKey.getFieldPath(i));
}
}
public void update(HollowObjectTypeReadState latestTypeState, boolean isDeltaAndIndexInitialized) {
if (latestTypeState == null) return;
if (isDeltaAndIndexInitialized) {
populateNewCurrentRecordKeysIntoIndex(latestTypeState);
} else {
maxIndexedOrdinal = 0;
populateAllCurrentRecordKeysIntoIndex(latestTypeState);
}
ordinalMapping.prepareForRead();
}
private void populateNewCurrentRecordKeysIntoIndex(HollowObjectTypeReadState typeState) {
PopulatedOrdinalListener listener = typeState.getListener(PopulatedOrdinalListener.class);
BitSet populatedOrdinals = listener.getPopulatedOrdinals();
BitSet previousOrdinals = listener.getPreviousOrdinals();
RemovedOrdinalIterator iter = new RemovedOrdinalIterator(populatedOrdinals, previousOrdinals);
int ordinal = iter.next();
while (ordinal != ORDINAL_NONE) {
writeKeyObject(typeState, ordinal);
ordinal = iter.next();
}
}
private void populateAllCurrentRecordKeysIntoIndex(HollowObjectTypeReadState typeState) {
PopulatedOrdinalListener listener = typeState.getListener(PopulatedOrdinalListener.class);
BitSet previousOrdinals = listener.getPreviousOrdinals();
BitSet populatedOrdinals = listener.getPopulatedOrdinals();
final int maxLength = Math.max(previousOrdinals.length(), populatedOrdinals.length());
for (int i = 0; i < maxLength; i++) {
if (populatedOrdinals.get(i) || previousOrdinals.get(i))
writeKeyObject(typeState, i);
}
}
private void writeKeyObject(HollowObjectTypeReadState typeState, int ordinal) {
int assignedOrdinal = maxIndexedOrdinal;
boolean storedUniqueRecord = ordinalMapping.storeNewRecord(typeState, ordinal, assignedOrdinal);
// Identical record already in memory, no need to store fields
if(!storedUniqueRecord)
return;
maxIndexedOrdinal+=1;
}
public String getKeyDisplayString(int keyOrdinal) {
StringBuilder builder = new StringBuilder();
for (int i = 0; i < primaryKey.numFields(); i++) {
Object valueAtField = ordinalMapping.getFieldObject(keyOrdinal, i, fieldTypes[i]);
builder.append(valueAtField);
if (i < primaryKey.numFields() - 1)
builder.append(MULTI_FIELD_KEY_DELIMITER);
}
return builder.toString();
}
public IntList queryIndexedFields(final String query) {
IntList matchingKeys = new IntList();
if (!isInitialized) {
return matchingKeys;
}
for (int i = 0; i < primaryKey.numFields(); i++) {
int hashCode = 0;
Object objectToFind = null;
try {
switch (fieldTypes[i]) {
case INT:
final int queryInt = Integer.parseInt(query);
hashCode = HollowReadFieldUtils.intHashCode(queryInt);
objectToFind = queryInt;
break;
case LONG:
final long queryLong = Long.parseLong(query);
hashCode = HollowReadFieldUtils.longHashCode(queryLong);
objectToFind = queryLong;
break;
case STRING:
hashCode = HashCodes.hashCode(query);
objectToFind = query;
break;
case DOUBLE:
final double queryDouble = Double.parseDouble(query);
hashCode = HollowReadFieldUtils.doubleHashCode(queryDouble);
objectToFind = queryDouble;
break;
case FLOAT:
final float queryFloat = Float.parseFloat(query);
hashCode = HollowReadFieldUtils.floatHashCode(queryFloat);
objectToFind = queryFloat;
break;
default:
}
ordinalMapping.addMatches(HashCodes.hashInt(hashCode), objectToFind, i, fieldTypes[i], matchingKeys);
} catch(NumberFormatException ignore) {}
}
return matchingKeys;
}
public Object getKeyFieldValue(int keyFieldIdx, int keyOrdinal) {
return ordinalMapping.getFieldObject(keyOrdinal, keyFieldIdx, fieldTypes[keyFieldIdx]);
}
} | 8,949 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history/keyindex/HollowHistoryKeyIndex.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.history.keyindex;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.util.SimultaneousExecutor;
import com.netflix.hollow.tools.history.HollowHistory;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ExecutionException;
/**
* A {@code HollowHistoryKeyIndex} index is used to track all records seen in all known states.
* It achieves this by maintaining a growing readStateEngine. A delta transition applies incoming keys to this
* readStateEngine, and a snapshot transition applies all the existing keys in readStateEngine and new keys
* in the incoming snapshot into a new readStateEngine that is used moving forward.
*/
public class HollowHistoryKeyIndex {
private final HollowHistory history;
private final Map<String, HollowHistoryTypeKeyIndex> typeKeyIndexes;
private boolean isInitialized;
public HollowHistoryKeyIndex(HollowHistory history) {
this.history = history;
this.typeKeyIndexes = new HashMap<>();
}
public int numUniqueKeys(String type) {
return typeKeyIndexes.get(type).getMaxIndexedOrdinal();
}
public String getKeyDisplayString(String type, int keyOrdinal) {
return typeKeyIndexes.get(type).getKeyDisplayString(keyOrdinal);
}
public int getRecordKeyOrdinal(HollowObjectTypeReadState typeState, int ordinal) {
return typeKeyIndexes.get(typeState.getSchema().getName()).findKeyIndexOrdinal(typeState, ordinal);
}
public void addTypeIndex(String type, String... keyFieldPaths) {
addTypeIndex(new PrimaryKey(type, keyFieldPaths));
}
public void addTypeIndex(PrimaryKey primaryKey) {
addTypeIndex(primaryKey, history.getLatestState());
}
public HollowHistoryTypeKeyIndex addTypeIndex(PrimaryKey primaryKey, HollowDataset dataModel) {
HollowHistoryTypeKeyIndex prevKeyIdx = typeKeyIndexes.get(primaryKey.getType());
HollowHistoryTypeKeyIndex keyIdx = new HollowHistoryTypeKeyIndex(primaryKey, dataModel);
// retain any previous indexed fields
if (prevKeyIdx != null) {
for (int i = 0; i < prevKeyIdx.getKeyFields().length; i++) {
if (prevKeyIdx.getKeyFieldIsIndexed()[i]) {
keyIdx.addFieldIndex(prevKeyIdx.getKeyFields()[i], dataModel);
}
}
}
typeKeyIndexes.put(primaryKey.getType(), keyIdx);
return keyIdx;
}
public void indexTypeField(String type, String keyFieldPath) {
typeKeyIndexes.get(type).addFieldIndex(keyFieldPath, history.getLatestState());
}
public void indexTypeField(PrimaryKey primaryKey) {
indexTypeField(primaryKey, history.getLatestState());
}
public void indexTypeField(PrimaryKey primaryKey, HollowDataset dataModel) {
String type = primaryKey.getType();
HollowHistoryTypeKeyIndex typeIndex = typeKeyIndexes.get(type);
if (typeIndex==null) {
typeIndex = addTypeIndex(primaryKey, dataModel);
}
for (String fieldPath : primaryKey.getFieldPaths()) {
typeIndex.addFieldIndex(fieldPath, dataModel);
}
}
public Map<String, HollowHistoryTypeKeyIndex> getTypeKeyIndexes() {
return typeKeyIndexes;
}
public void update(HollowReadStateEngine latestStateEngine, boolean isDelta) {
boolean isInitialUpdate = !isInitialized();
// For all the types in the key index make sure a {@code HollowHistoryTypeKeyIndex} index is initialized (and
// has a writeable write state engine)
// The type index stores ordinals (in a sequence independent of how they existed in the read state) and the
// value of the primary keys.
initializeTypeIndexes(latestStateEngine);
// This call updates the type key indexes of all types in this history key index.
updateTypeIndexes(latestStateEngine, isDelta && !isInitialUpdate);
isInitialized = true;
}
public boolean isInitialized() {
return isInitialized;
}
private void initializeTypeIndexes(HollowReadStateEngine latestStateEngine) {
for(Map.Entry<String, HollowHistoryTypeKeyIndex> entry : typeKeyIndexes.entrySet()) {
String type = entry.getKey();
HollowHistoryTypeKeyIndex index = entry.getValue();
if (index.isInitialized()) continue;
HollowObjectTypeReadState typeState = (HollowObjectTypeReadState) latestStateEngine.getTypeState(type);
if (typeState == null) continue;
index.initializeKeySchema(typeState);
}
}
private void updateTypeIndexes(final HollowReadStateEngine latestStateEngine, final boolean isDelta) {
SimultaneousExecutor executor = new SimultaneousExecutor(getClass(), "update-type-indexes");
for(final Map.Entry<String, HollowHistoryTypeKeyIndex> entry : typeKeyIndexes.entrySet()) {
executor.execute(() -> {
HollowObjectTypeReadState typeState = (HollowObjectTypeReadState) latestStateEngine.getTypeState(entry.getKey());
entry.getValue().update(typeState, isDelta);
});
}
try {
executor.awaitSuccessfulCompletion();
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
}
}
| 8,950 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history/keyindex/HollowHistoricalStateTypeKeyOrdinalMapping.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.history.keyindex;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.util.IntMap;
import com.netflix.hollow.core.util.IntMap.IntMapEntryIterator;
import com.netflix.hollow.tools.combine.OrdinalRemapper;
public class HollowHistoricalStateTypeKeyOrdinalMapping {
private final String typeName;
private final HollowHistoryTypeKeyIndex keyIndex;
private IntMap addedOrdinalMap;
private IntMap removedOrdinalMap;
private int numberOfNewRecords;
private int numberOfRemovedRecords;
private int numberOfModifiedRecords;
public HollowHistoricalStateTypeKeyOrdinalMapping(String typeName, HollowHistoryTypeKeyIndex keyIndex) {
this.typeName = typeName;
this.keyIndex = keyIndex;
}
// this is only invoked for double snapshots
private HollowHistoricalStateTypeKeyOrdinalMapping(String typeName, HollowHistoryTypeKeyIndex keyIndex, IntMap addedOrdinalMap, IntMap removedOrdinalMap) {
this.typeName = typeName;
this.keyIndex = keyIndex;
this.addedOrdinalMap = addedOrdinalMap;
this.removedOrdinalMap = removedOrdinalMap;
finish();
}
public void prepare(int numAdditions, int numRemovals) {
this.addedOrdinalMap = new IntMap(numAdditions);
this.removedOrdinalMap = new IntMap(numRemovals);
}
public void added(HollowTypeReadState typeState, int ordinal) {
int recordKeyOrdinal = keyIndex.findKeyIndexOrdinal((HollowObjectTypeReadState)typeState, ordinal);
addedOrdinalMap.put(recordKeyOrdinal, ordinal);
}
public void removed(HollowTypeReadState typeState, int ordinal) {
removed(typeState, ordinal, ordinal);
}
public void removed(HollowTypeReadState typeState, int stateEngineOrdinal, int mappedOrdinal) {
int recordKeyOrdinal = keyIndex.findKeyIndexOrdinal((HollowObjectTypeReadState)typeState, stateEngineOrdinal);
removedOrdinalMap.put(recordKeyOrdinal, mappedOrdinal);
}
// this is only invoked for double snapshots
public HollowHistoricalStateTypeKeyOrdinalMapping remap(OrdinalRemapper remapper) {
IntMap newAddedOrdinalMap = new IntMap(addedOrdinalMap.size());
IntMapEntryIterator addedIter = addedOrdinalMap.iterator();
while(addedIter.next())
newAddedOrdinalMap.put(addedIter.getKey(), remapper.getMappedOrdinal(typeName, addedIter.getValue()));
IntMap newRemovedOrdinalMap = new IntMap(removedOrdinalMap.size());
IntMapEntryIterator removedIter = removedOrdinalMap.iterator();
while(removedIter.next())
newRemovedOrdinalMap.put(removedIter.getKey(), remapper.getMappedOrdinal(typeName, removedIter.getValue()));
return new HollowHistoricalStateTypeKeyOrdinalMapping(typeName, keyIndex, newAddedOrdinalMap, newRemovedOrdinalMap);
}
public void finish() {
IntMapEntryIterator iter = addedOrdinalMap.iterator();
while(iter.next()) {
if(removedOrdinalMap.get(iter.getKey()) != -1)
numberOfModifiedRecords++;
}
numberOfNewRecords = addedOrdinalMap.size() - numberOfModifiedRecords;
numberOfRemovedRecords = removedOrdinalMap.size() - numberOfModifiedRecords;
}
public IntMapEntryIterator removedOrdinalMappingIterator() {
return removedOrdinalMap.iterator();
}
public IntMapEntryIterator addedOrdinalMappingIterator() {
return addedOrdinalMap.iterator();
}
public int findRemovedOrdinal(int keyOrdinal) {
return removedOrdinalMap.get(keyOrdinal);
}
public int findAddedOrdinal(int keyOrdinal) {
return addedOrdinalMap.get(keyOrdinal);
}
public HollowHistoryTypeKeyIndex getKeyIndex() {
return keyIndex;
}
public int getNumberOfNewRecords() {
return numberOfNewRecords;
}
public int getNumberOfRemovedRecords() {
return numberOfRemovedRecords;
}
public int getNumberOfModifiedRecords() {
return numberOfModifiedRecords;
}
}
| 8,951 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/history/keyindex/HollowHistoricalStateKeyOrdinalMapping.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.history.keyindex;
import com.netflix.hollow.tools.combine.OrdinalRemapper;
import java.util.HashMap;
import java.util.Map;
public class HollowHistoricalStateKeyOrdinalMapping {
private final Map<String, HollowHistoricalStateTypeKeyOrdinalMapping> typeMappings;
public HollowHistoricalStateKeyOrdinalMapping(HollowHistoryKeyIndex keyIndex) {
this.typeMappings = new HashMap<String, HollowHistoricalStateTypeKeyOrdinalMapping>();
for(Map.Entry<String, HollowHistoryTypeKeyIndex> entry : keyIndex.getTypeKeyIndexes().entrySet()) {
typeMappings.put(entry.getKey(), new HollowHistoricalStateTypeKeyOrdinalMapping(entry.getKey(), entry.getValue()));
}
}
private HollowHistoricalStateKeyOrdinalMapping(Map<String, HollowHistoricalStateTypeKeyOrdinalMapping> typeMappings) {
this.typeMappings = typeMappings;
}
public HollowHistoricalStateKeyOrdinalMapping remap(OrdinalRemapper remapper) {
Map<String, HollowHistoricalStateTypeKeyOrdinalMapping> typeMappings = new HashMap<String, HollowHistoricalStateTypeKeyOrdinalMapping>();
for(Map.Entry<String, HollowHistoricalStateTypeKeyOrdinalMapping> entry : this.typeMappings.entrySet()) {
typeMappings.put(entry.getKey(), entry.getValue().remap(remapper));
}
return new HollowHistoricalStateKeyOrdinalMapping(typeMappings);
}
public HollowHistoricalStateTypeKeyOrdinalMapping getTypeMapping(String typeName) {
return typeMappings.get(typeName);
}
public Map<String, HollowHistoricalStateTypeKeyOrdinalMapping> getTypeMappings() {
return typeMappings;
}
}
| 8,952 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/traverse/TransitiveSetTraverser.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.traverse;
import static com.netflix.hollow.tools.traverse.TransitiveSetTraverser.TransitiveSetTraverserAction.ADD_REFERENCING_OUTSIDE_CLOSURE;
import static com.netflix.hollow.tools.traverse.TransitiveSetTraverser.TransitiveSetTraverserAction.REMOVE_REFERENCED_OUTSIDE_CLOSURE;
import com.netflix.hollow.core.read.engine.HollowCollectionTypeReadState;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.engine.PopulatedOrdinalListener;
import com.netflix.hollow.core.read.engine.map.HollowMapTypeReadState;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.read.iterator.HollowMapEntryOrdinalIterator;
import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator;
import com.netflix.hollow.core.schema.HollowCollectionSchema;
import com.netflix.hollow.core.schema.HollowMapSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.schema.HollowSchemaSorter;
import java.util.BitSet;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* The TransitiveSetTraverser can be used to find children and parent references for a selected set of records.
* A selection is represented with a Map<String, BitSet>, where each key in the map represents a type, and the corresponding BitSet
* represents the ordinals of the selected records.
* Entries in this Map will indicate a type, plus the ordinals of the selected records:
* <pre>
* {@code
* Map<String, BitSet> selection = new HashMap<String, BitSet>();
*
* /// select the movies with ordinals 1 and 4.
* BitSet selectedMovies = new BitSet();
* selectedMovies.set(1);
* selectedMovies.set(4);
*
* selection.put("Movie", movies);
* }
* </pre>
* <p>
* We can add the references, and the <i>transitive</i> references, of our selection.
* After the following call returns, our selection will be augmented with these matches:
* <pre>
* {@code TransitiveSetTraverser.addTransitiveMatches(readEngine, selection);}
* </pre>
* <p>
* Given a selection, we can also add any records which reference anything in the selection.
* This is essentially the opposite operation as above; it can be said that addTransitiveMatches
* traverses down, while addReferencingOutsideClosure traverses up. After the following call returns,
* our selection will be augmented with this selection:
*
*/
public class TransitiveSetTraverser {
private static final Logger log = Logger.getLogger(TransitiveSetTraverser.class.getName());
/**
* Augment the given selection by adding the references, and the <i>transitive</i> references, of our selection.
* @param stateEngine the state engine
* @param matches the map to which matches are placed
*/
public static void addTransitiveMatches(HollowReadStateEngine stateEngine, Map<String, BitSet> matches) {
List<HollowSchema> schemaList = HollowSchemaSorter.dependencyOrderedSchemaList(stateEngine);
Collections.reverse(schemaList);
for(HollowSchema schema : schemaList) {
BitSet currentMatches = matches.get(schema.getName());
if(currentMatches != null) {
addTransitiveMatches(stateEngine, schema.getName(), matches);
}
}
}
/**
* Remove any records from the given selection which are referenced by other records not in the selection.
* @param stateEngine the state engine
* @param matches the matches
*/
public static void removeReferencedOutsideClosure(HollowReadStateEngine stateEngine, Map<String, BitSet> matches) {
List<HollowSchema> orderedSchemas = HollowSchemaSorter.dependencyOrderedSchemaList(stateEngine);
Collections.reverse(orderedSchemas);
for(HollowSchema referencedSchema : orderedSchemas) {
if(matches.containsKey(referencedSchema.getName())) {
for(HollowSchema referencerSchema : orderedSchemas) {
if(referencerSchema == referencedSchema)
break;
if(matches.containsKey(referencedSchema.getName()) && matches.get(referencedSchema.getName()).cardinality() > 0)
traverseReferencesOutsideClosure(stateEngine, referencerSchema.getName(), referencedSchema.getName(), matches, REMOVE_REFERENCED_OUTSIDE_CLOSURE);
}
}
}
}
/**
* Augment the given selection with any records outside the selection which reference
* (or transitively reference) any records in the selection.
* @param stateEngine the state engine
* @param matches the matches
*/
public static void addReferencingOutsideClosure(HollowReadStateEngine stateEngine, Map<String, BitSet> matches) {
List<HollowSchema> orderedSchemas = HollowSchemaSorter.dependencyOrderedSchemaList(stateEngine);
for(HollowSchema referencerSchema : orderedSchemas) {
for(HollowSchema referencedSchema : orderedSchemas) {
if(referencedSchema == referencerSchema)
break;
if(matches.containsKey(referencedSchema.getName()) && matches.get(referencedSchema.getName()).cardinality() > 0)
traverseReferencesOutsideClosure(stateEngine, referencerSchema.getName(), referencedSchema.getName(), matches, ADD_REFERENCING_OUTSIDE_CLOSURE);
}
}
}
private static void addTransitiveMatches(HollowReadStateEngine stateEngine, String type, Map<String, BitSet> matches) {
HollowTypeReadState typeState = stateEngine.getTypeState(type);
switch(typeState.getSchema().getSchemaType()) {
case OBJECT:
addTransitiveMatches(stateEngine, (HollowObjectTypeReadState)typeState, matches);
break;
case LIST:
case SET:
addTransitiveMatches(stateEngine, (HollowCollectionTypeReadState)typeState, matches);
break;
case MAP:
addTransitiveMatches(stateEngine, (HollowMapTypeReadState)typeState, matches);
break;
}
}
private static void addTransitiveMatches(HollowReadStateEngine stateEngine, HollowObjectTypeReadState typeState, Map<String, BitSet> matches) {
HollowObjectSchema schema = typeState.getSchema();
BitSet matchingOrdinals = getOrCreateBitSet(matches, schema.getName(), typeState.maxOrdinal());
BitSet childOrdinals[] = new BitSet[schema.numFields()];
for(int i=0;i<schema.numFields();i++) {
if(schema.getFieldType(i) == FieldType.REFERENCE) {
HollowTypeReadState childTypeState = stateEngine.getTypeState(schema.getReferencedType(i));
if(childTypeState != null && childTypeState.maxOrdinal() >= 0)
childOrdinals[i] = getOrCreateBitSet(matches, schema.getReferencedType(i), childTypeState.maxOrdinal());
}
}
int ordinal = matchingOrdinals.nextSetBit(0);
while(ordinal != -1) {
for(int i=0;i<childOrdinals.length;i++) {
if(childOrdinals[i] != null) {
int childOrdinal = typeState.readOrdinal(ordinal, i);
if(childOrdinal != -1) {
childOrdinals[i].set(childOrdinal);
}
}
}
ordinal = matchingOrdinals.nextSetBit(ordinal + 1);
}
}
private static void addTransitiveMatches(HollowReadStateEngine stateEngine, HollowCollectionTypeReadState typeState, Map<String, BitSet> matches) {
HollowCollectionSchema schema = typeState.getSchema();
BitSet matchingOrdinals = getOrCreateBitSet(matches, schema.getName(), typeState.maxOrdinal());
HollowTypeReadState childTypeState = stateEngine.getTypeState(schema.getElementType());
if(childTypeState != null && childTypeState.maxOrdinal() >= 0) {
BitSet childOrdinals = getOrCreateBitSet(matches, schema.getElementType(), childTypeState.maxOrdinal());
int ordinal = matchingOrdinals.nextSetBit(0);
while(ordinal != -1) {
try {
HollowOrdinalIterator iter = typeState.ordinalIterator(ordinal);
int elementOrdinal = iter.next();
while(elementOrdinal != HollowOrdinalIterator.NO_MORE_ORDINALS) {
childOrdinals.set(elementOrdinal);
elementOrdinal = iter.next();
}
} catch(Exception e) {
log.log(Level.SEVERE, "Add transitive matches failed", e);
}
ordinal = matchingOrdinals.nextSetBit(ordinal + 1);
}
if(!childOrdinals.isEmpty()) {
matches.put(schema.getElementType(), childOrdinals);
}
}
}
private static void addTransitiveMatches(HollowReadStateEngine stateEngine, HollowMapTypeReadState typeState, Map<String, BitSet> matches) {
HollowMapSchema schema = typeState.getSchema();
BitSet matchingOrdinals = getOrCreateBitSet(matches, schema.getName(), typeState.maxOrdinal());
HollowTypeReadState keyTypeState = stateEngine.getTypeState(schema.getKeyType());
HollowTypeReadState valueTypeState = stateEngine.getTypeState(schema.getValueType());
BitSet keyOrdinals = keyTypeState == null || keyTypeState.maxOrdinal() < 0 ? null : getOrCreateBitSet(matches, schema.getKeyType(), keyTypeState.maxOrdinal());
BitSet valueOrdinals = valueTypeState == null || valueTypeState.maxOrdinal() < 0 ? null : getOrCreateBitSet(matches, schema.getValueType(), valueTypeState.maxOrdinal());
int ordinal = matchingOrdinals.nextSetBit(0);
while(ordinal != -1) {
HollowMapEntryOrdinalIterator iter = typeState.ordinalIterator(ordinal);
while(iter.next()) {
if(keyOrdinals != null)
keyOrdinals.set(iter.getKey());
if(valueOrdinals != null)
valueOrdinals.set(iter.getValue());
}
ordinal = matchingOrdinals.nextSetBit(ordinal + 1);
}
}
private static void traverseReferencesOutsideClosure(HollowReadStateEngine stateEngine, String referencerType, String referencedType, Map<String, BitSet> matches, TransitiveSetTraverserAction action) {
HollowTypeReadState referencerTypeState = stateEngine.getTypeState(referencerType);
switch(referencerTypeState.getSchema().getSchemaType()) {
case OBJECT:
traverseReferencesOutsideClosure(stateEngine, (HollowObjectTypeReadState)referencerTypeState, referencedType, matches, action);
break;
case LIST:
case SET:
traverseReferencesOutsideClosure(stateEngine, (HollowCollectionTypeReadState)referencerTypeState, referencedType, matches, action);
break;
case MAP:
traverseReferencesOutsideClosure(stateEngine, (HollowMapTypeReadState)referencerTypeState, referencedType, matches, action);
break;
}
}
private static void traverseReferencesOutsideClosure(HollowReadStateEngine stateEngine, HollowObjectTypeReadState referencerTypeState, String referencedType, Map<String, BitSet> closureMatches, TransitiveSetTraverserAction action) {
HollowObjectSchema schema = referencerTypeState.getSchema();
BitSet referencedClosureMatches = getOrCreateBitSet(closureMatches, referencedType, stateEngine.getTypeState(referencedType).maxOrdinal());
BitSet referencerClosureMatches = getOrCreateBitSet(closureMatches, schema.getName(), referencerTypeState.maxOrdinal());
for(int i=0;i<schema.numFields();i++) {
if(schema.getFieldType(i) == FieldType.REFERENCE && referencedType.equals(schema.getReferencedType(i))) {
BitSet allReferencerOrdinals = getPopulatedOrdinals(referencerTypeState);
int ordinal = allReferencerOrdinals.nextSetBit(0);
while(ordinal != -1) {
if(!referencerClosureMatches.get(ordinal)) {
int refOrdinal = referencerTypeState.readOrdinal(ordinal, i);
if(refOrdinal != -1) {
if(referencedClosureMatches.get(refOrdinal)) {
action.foundReference(referencerClosureMatches, ordinal, referencedClosureMatches, refOrdinal);
}
}
}
ordinal = allReferencerOrdinals.nextSetBit(ordinal + 1);
}
}
}
}
private static void traverseReferencesOutsideClosure(HollowReadStateEngine stateEngine, HollowCollectionTypeReadState referencerTypeState, String referencedType, Map<String, BitSet> closureMatches, TransitiveSetTraverserAction action) {
HollowCollectionSchema schema = referencerTypeState.getSchema();
if(!referencedType.equals(schema.getElementType()))
return;
BitSet referencedClosureMatches = getOrCreateBitSet(closureMatches, referencedType, stateEngine.getTypeState(referencedType).maxOrdinal());
BitSet referencerClosureMatches = getOrCreateBitSet(closureMatches, schema.getName(), referencerTypeState.maxOrdinal());
BitSet allReferencerOrdinals = getPopulatedOrdinals(referencerTypeState);
int ordinal = allReferencerOrdinals.nextSetBit(0);
while(ordinal != -1) {
if(!referencerClosureMatches.get(ordinal)) {
HollowOrdinalIterator iter = referencerTypeState.ordinalIterator(ordinal);
int refOrdinal = iter.next();
while(refOrdinal != HollowOrdinalIterator.NO_MORE_ORDINALS) {
if(referencedClosureMatches.get(refOrdinal)) {
action.foundReference(referencerClosureMatches, ordinal, referencedClosureMatches, refOrdinal);
}
refOrdinal = iter.next();
}
}
ordinal = allReferencerOrdinals.nextSetBit(ordinal + 1);
}
}
private static void traverseReferencesOutsideClosure(HollowReadStateEngine stateEngine, HollowMapTypeReadState referencerTypeState, String referencedType, Map<String, BitSet> closureMatches, TransitiveSetTraverserAction action) {
HollowMapSchema schema = referencerTypeState.getSchema();
BitSet referencedClosureMatches = getOrCreateBitSet(closureMatches, referencedType, stateEngine.getTypeState(referencedType).maxOrdinal());
BitSet referencerClosureMatches = getOrCreateBitSet(closureMatches, schema.getName(), referencerTypeState.maxOrdinal());
BitSet allReferencerOrdinals = getPopulatedOrdinals(referencerTypeState);
boolean keyTypeMatches = referencedType.equals(schema.getKeyType());
boolean valueTypeMatches = referencedType.equals(schema.getValueType());
if(keyTypeMatches || valueTypeMatches) {
int ordinal = allReferencerOrdinals.nextSetBit(0);
while(ordinal != -1) {
if(!referencerClosureMatches.get(ordinal)) {
HollowMapEntryOrdinalIterator iter = referencerTypeState.ordinalIterator(ordinal);
while(iter.next()) {
if(keyTypeMatches) {
int refOrdinal = iter.getKey();
if(referencedClosureMatches.get(refOrdinal)) {
action.foundReference(referencerClosureMatches, ordinal, referencedClosureMatches, refOrdinal);
}
}
if(valueTypeMatches) {
int refOrdinal = iter.getValue();
if(referencedClosureMatches.get(refOrdinal)) {
action.foundReference(referencerClosureMatches, ordinal, referencedClosureMatches, refOrdinal);
}
}
}
}
ordinal = allReferencerOrdinals.nextSetBit(ordinal + 1);
}
}
}
private static BitSet getPopulatedOrdinals(HollowTypeReadState typeState) {
return typeState.getListener(PopulatedOrdinalListener.class).getPopulatedOrdinals();
}
private static BitSet getOrCreateBitSet(Map<String, BitSet> bitSets, String typeName, int numBitsRequired) {
if(numBitsRequired < 0)
numBitsRequired = 0;
BitSet bs = bitSets.get(typeName);
if(bs == null) {
bs = new BitSet(numBitsRequired);
bitSets.put(typeName, bs);
}
return bs;
}
public static interface TransitiveSetTraverserAction {
public void foundReference(BitSet referencerClosureMatches, int referencerOrdinal, BitSet referencedClosureMatches, int referencedOrdinal);
public static final TransitiveSetTraverserAction REMOVE_REFERENCED_OUTSIDE_CLOSURE = new TransitiveSetTraverserAction() {
@Override
public void foundReference(BitSet referencerClosureMatches, int referencerOrdinal, BitSet referencedClosureMatches, int referencedOrdinal) {
referencedClosureMatches.clear(referencedOrdinal);
}
};
public static final TransitiveSetTraverserAction ADD_REFERENCING_OUTSIDE_CLOSURE = new TransitiveSetTraverserAction() {
@Override
public void foundReference(BitSet referencerClosureMatches, int referencerOrdinal, BitSet referencedClosureMatches, int referencedOrdinal) {
referencerClosureMatches.set(referencerOrdinal);
}
};
}
}
| 8,953 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/filter/FixedLengthArrayWriter.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.filter;
import com.netflix.hollow.core.memory.encoding.FixedLengthElementArray;
class FixedLengthArrayWriter {
private final FixedLengthElementArray arr;
private long bitCursor;
public FixedLengthArrayWriter(FixedLengthElementArray arr) {
this.arr = arr;
}
public void writeField(long value, int numBits) {
arr.setElementValue(bitCursor, numBits, value);
bitCursor += numBits;
}
}
| 8,954 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/filter/FilteredHollowBlobWriter.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.filter;
import static com.netflix.hollow.core.util.IOUtils.copySegmentedLongArray;
import static com.netflix.hollow.core.util.IOUtils.copyVInt;
import static com.netflix.hollow.core.util.IOUtils.copyVLong;
import static com.netflix.hollow.tools.filter.FilteredHollowBlobWriterStreamAndFilter.streamsOnly;
import com.netflix.hollow.core.HollowBlobHeader;
import com.netflix.hollow.core.memory.encoding.FixedLengthElementArray;
import com.netflix.hollow.core.memory.encoding.GapEncodedVariableLengthIntegerReader;
import com.netflix.hollow.core.memory.encoding.VarInt;
import com.netflix.hollow.core.memory.pool.ArraySegmentRecycler;
import com.netflix.hollow.core.memory.pool.WastefulRecycler;
import com.netflix.hollow.core.read.HollowBlobInput;
import com.netflix.hollow.core.read.engine.HollowBlobHeaderReader;
import com.netflix.hollow.core.read.engine.list.HollowListTypeReadState;
import com.netflix.hollow.core.read.engine.map.HollowMapTypeReadState;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.read.engine.set.HollowSetTypeReadState;
import com.netflix.hollow.core.read.filter.HollowFilterConfig;
import com.netflix.hollow.core.read.filter.HollowFilterConfig.ObjectFilterConfig;
import com.netflix.hollow.core.schema.HollowListSchema;
import com.netflix.hollow.core.schema.HollowMapSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.schema.HollowSchema.SchemaType;
import com.netflix.hollow.core.schema.HollowSetSchema;
import com.netflix.hollow.core.util.IOUtils;
import com.netflix.hollow.core.write.HollowBlobHeaderWriter;
import java.io.DataOutputStream;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* The FilteredHollowBlobWriter can be used to pre-filter data from serialized blobs before disseminating to
* specific clients.
* <p>
* Note that filter functionality is more commonly applied at the consumer at load-time. Pre-filtering at the producer
* prior to disseminating to clients is unlikely to be important for performance reasons, but may be desirable e.g. for
* security reasons.
*
*/
public class FilteredHollowBlobWriter {
private final HollowFilterConfig configs[];
private final HollowBlobHeaderReader headerReader;
private final HollowBlobHeaderWriter headerWriter;
private final ArraySegmentRecycler memoryRecycler;
private final Set<String> expectedTypes;
/**
* A FilteredHollowBlobWriter should be configured with one or more configs.
*
* Specifying multiple configs will allow for the writing of multiple filtered blobs in parallel.
*
* @param configs the filter configurations
*/
public FilteredHollowBlobWriter(HollowFilterConfig... configs) {
this.configs = configs;
this.headerReader = new HollowBlobHeaderReader();
this.headerWriter = new HollowBlobHeaderWriter();
this.memoryRecycler = WastefulRecycler.DEFAULT_INSTANCE;
this.expectedTypes = new HashSet<String>();
for(HollowFilterConfig config : configs)
expectedTypes.addAll(config.getSpecifiedTypes());
}
/**
* Filter a snapshot, provided via the InputStream, to each of the OutputStreams.
*
* The number of provided OutputStreams should be equal to the number of provided HollowFilterConfigs at instantiation.
*
* @param in the input stream to the snapshot
* @param out the output streams to write the filtered snapshot
* @throws IOException if the snapshot cannot be filtered
*/
public void filterSnapshot(InputStream in, OutputStream... out) throws IOException {
filter(false, in, out);
}
/**
* Filter a delta (or reversedelta), provided via the InputStream, to each of the OutputStreams.
*
* The number of provided OutputStreams should be equal to the number of provided HollowFilterConfigs at instantiation.
*
* @param in the input stream to the delta
* @param out the output streams to write the filtered delta
* @throws IOException if the delta cannot be filtered
*/
public void filterDelta(InputStream in, OutputStream... out) throws IOException {
filter(true, in, out);
}
public void filter(boolean delta, InputStream is, OutputStream... out) throws IOException {
HollowBlobInput in = HollowBlobInput.serial(is);
FilteredHollowBlobWriterStreamAndFilter allStreamAndFilters[] = FilteredHollowBlobWriterStreamAndFilter.combine(out, configs);
HollowBlobHeader header = headerReader.readHeader(in);
List<HollowSchema> unfilteredSchemaList = header.getSchemas();
for(FilteredHollowBlobWriterStreamAndFilter streamAndFilter : allStreamAndFilters) {
List<HollowSchema> filteredSchemaList = getFilteredSchemaList(unfilteredSchemaList, streamAndFilter.getConfig());
header.setSchemas(filteredSchemaList);
headerWriter.writeHeader(header, streamAndFilter.getStream());
VarInt.writeVInt(streamAndFilter.getStream(), filteredSchemaList.size());
}
int numStates = VarInt.readVInt(in);
Set<String> encounteredTypes = new HashSet<String>();
for(int i=0;i<numStates;i++) {
HollowSchema schema = HollowSchema.readFrom(in);
encounteredTypes.add(schema.getName());
int numShards = readNumShards(in);
FilteredHollowBlobWriterStreamAndFilter[] streamsWithType = FilteredHollowBlobWriterStreamAndFilter.withType(schema.getName(), allStreamAndFilters);
if(schema instanceof HollowObjectSchema) {
if(streamsWithType.length == 0)
HollowObjectTypeReadState.discardType(in, (HollowObjectSchema)schema, numShards, delta);
else
copyFilteredObjectState(delta, in, streamsWithType, (HollowObjectSchema)schema, numShards);
} else {
for(int j=0;j<streamsWithType.length;j++) {
schema.writeTo(streamsWithType[j].getStream());
VarInt.writeVInt(streamsWithType[j].getStream(), 1 + VarInt.sizeOfVInt(numShards));
VarInt.writeVInt(streamsWithType[j].getStream(), 0); /// forwards compatibility
VarInt.writeVInt(streamsWithType[j].getStream(), numShards);
}
if (schema instanceof HollowListSchema) {
if(streamsWithType.length == 0)
HollowListTypeReadState.discardType(in, numShards, delta);
else
copyListState(delta, in, streamsOnly(streamsWithType), numShards);
} else if(schema instanceof HollowSetSchema) {
if(streamsWithType.length == 0)
HollowSetTypeReadState.discardType(in, numShards, delta);
else
copySetState(delta, in, streamsOnly(streamsWithType), numShards);
} else if(schema instanceof HollowMapSchema) {
if(streamsWithType.length == 0)
HollowMapTypeReadState.discardType(in, numShards, delta);
else
copyMapState(delta, in, streamsOnly(streamsWithType), numShards);
}
}
}
}
private int readNumShards(HollowBlobInput in) throws IOException {
int backwardsCompatibilityBytes = VarInt.readVInt(in);
if(backwardsCompatibilityBytes == 0)
return 1; /// produced by a version of hollow prior to 2.1.0, always only 1 shard.
skipForwardsCompatibilityBytes(in);
return VarInt.readVInt(in);
}
private void skipForwardsCompatibilityBytes(HollowBlobInput in) throws IOException {
int bytesToSkip = VarInt.readVInt(in);
while(bytesToSkip > 0) {
int skippedBytes = (int)in.skipBytes(bytesToSkip);
if(skippedBytes < 0)
throw new EOFException();
bytesToSkip -= skippedBytes;
}
}
@SuppressWarnings("unchecked")
private void copyFilteredObjectState(boolean delta, HollowBlobInput in, FilteredHollowBlobWriterStreamAndFilter[] streamAndFilters, HollowObjectSchema schema, int numShards) throws IOException {
DataOutputStream[] os = streamsOnly(streamAndFilters);
HollowObjectSchema[] filteredObjectSchemas = new HollowObjectSchema[os.length];
for(int i=0;i<streamAndFilters.length;i++) {
HollowObjectSchema filteredObjectSchema = getFilteredObjectSchema(schema, streamAndFilters[i].getConfig());
filteredObjectSchemas[i] = filteredObjectSchema;
filteredObjectSchema.writeTo(streamAndFilters[i].getStream());
VarInt.writeVInt(streamAndFilters[i].getStream(), 1 + VarInt.sizeOfVInt(numShards));
VarInt.writeVInt(streamAndFilters[i].getStream(), 0); /// forwards compatibility
VarInt.writeVInt(streamAndFilters[i].getStream(), numShards);
}
if(numShards > 1)
copyVInt(in, os);
for(int shard=0;shard<numShards;shard++) {
int maxShardOrdinal = copyVInt(in, os);
int numRecordsToCopy = maxShardOrdinal + 1;
if(delta) {
GapEncodedVariableLengthIntegerReader.copyEncodedDeltaOrdinals(in, os);
GapEncodedVariableLengthIntegerReader addedOrdinals = GapEncodedVariableLengthIntegerReader.readEncodedDeltaOrdinals(in, memoryRecycler);
numRecordsToCopy = addedOrdinals.remainingElements();
for(DataOutputStream stream : os)
addedOrdinals.writeTo(stream);
}
/// SETUP ///
int bitsPerField[] = new int[schema.numFields()];
for(int i=0;i<schema.numFields();i++)
bitsPerField[i] = VarInt.readVInt(in);
FixedLengthElementArray fixedLengthArraysPerStream[] = new FixedLengthElementArray[os.length];
long bitsRequiredPerStream[] = new long[os.length];
List<FixedLengthArrayWriter> fixedLengthArraysPerField[] = (List<FixedLengthArrayWriter>[])new List[schema.numFields()];
for(int i=0;i<fixedLengthArraysPerField.length;i++)
fixedLengthArraysPerField[i] = new ArrayList<FixedLengthArrayWriter>();
for(int i=0;i<streamAndFilters.length;i++) {
long bitsPerRecord = writeBitsPerField(schema, bitsPerField, filteredObjectSchemas[i], streamAndFilters[i].getStream());
bitsRequiredPerStream[i] = bitsPerRecord * numRecordsToCopy;
fixedLengthArraysPerStream[i] = new FixedLengthElementArray(memoryRecycler, bitsRequiredPerStream[i]);
FixedLengthArrayWriter filteredArrayWriter = new FixedLengthArrayWriter(fixedLengthArraysPerStream[i]);
for(int j=0;j<schema.numFields();j++) {
if(filteredObjectSchemas[i].getPosition(schema.getFieldName(j)) != -1) {
fixedLengthArraysPerField[j].add(filteredArrayWriter);
}
}
}
/// END SETUP ///
/// read the unfiltered long array into memory
FixedLengthElementArray unfilteredFixedLengthFields = FixedLengthElementArray.newFrom(in, memoryRecycler);
/// populate the filtered arrays (each field just gets written to all FixedLengthArrayWriters assigned to its field index)
long bitsPerRecord = 0;
for(int fieldBits : bitsPerField)
bitsPerRecord += fieldBits;
long stopBit = bitsPerRecord * numRecordsToCopy;
long bitCursor = 0;
int fieldCursor = 0;
while(bitCursor < stopBit) {
if(!fixedLengthArraysPerField[fieldCursor].isEmpty()) {
long fieldValue = bitsPerField[fieldCursor] > 56 ?
unfilteredFixedLengthFields.getLargeElementValue(bitCursor, bitsPerField[fieldCursor])
: unfilteredFixedLengthFields.getElementValue(bitCursor, bitsPerField[fieldCursor]);
for(int i=0;i<fixedLengthArraysPerField[fieldCursor].size();i++)
fixedLengthArraysPerField[fieldCursor].get(i).writeField(fieldValue, bitsPerField[fieldCursor]);
}
bitCursor += bitsPerField[fieldCursor];
if(++fieldCursor == schema.numFields())
fieldCursor = 0;
}
/// write the filtered arrays
for(int i=0;i<os.length;i++) {
long numLongsRequired = bitsRequiredPerStream[i] == 0 ? 0 : ((bitsRequiredPerStream[i] - 1) / 64) + 1;
fixedLengthArraysPerStream[i].writeTo(os[i], numLongsRequired);
}
/// copy the var length arrays for populated fields
for(int i=0;i<schema.numFields();i++) {
List<DataOutputStream> streamsWithFieldList = new ArrayList<DataOutputStream>();
for(int j=0;j<streamAndFilters.length;j++) {
ObjectFilterConfig objectTypeConfig = streamAndFilters[j].getConfig().getObjectTypeConfig(schema.getName());
if(objectTypeConfig.includesField(schema.getFieldName(i)))
streamsWithFieldList.add(streamAndFilters[j].getStream());
}
DataOutputStream streamsWithField[] = new DataOutputStream[streamsWithFieldList.size()];
streamsWithField = streamsWithFieldList.toArray(streamsWithField);
long numBytesInVarLengthData = IOUtils.copyVLong(in, streamsWithField);
IOUtils.copyBytes(in, streamsWithField, numBytesInVarLengthData);
}
}
if(!delta)
copySnapshotPopulatedOrdinals(in, os);
}
private long writeBitsPerField(HollowObjectSchema unfilteredSchema, int bitsPerField[], HollowObjectSchema filteredSchema, DataOutputStream os) throws IOException {
long bitsPerRecord = 0;
for(int i=0;i<unfilteredSchema.numFields();i++) {
if(filteredSchema.getPosition(unfilteredSchema.getFieldName(i)) != -1) {
VarInt.writeVInt(os, bitsPerField[i]);
bitsPerRecord += bitsPerField[i];
}
}
return bitsPerRecord;
}
private List<HollowSchema> getFilteredSchemaList(List<HollowSchema> schemaList, HollowFilterConfig filterConfig) {
List<HollowSchema> filteredList = new ArrayList<HollowSchema>();
for(HollowSchema schema : schemaList) {
HollowSchema filteredSchema = getFilteredSchema(schema, filterConfig);
if(filteredSchema != null)
filteredList.add(filteredSchema);
}
return filteredList;
}
private HollowSchema getFilteredSchema(HollowSchema schema, HollowFilterConfig filterConfig) {
if(filterConfig.doesIncludeType(schema.getName())) {
if(schema.getSchemaType() == SchemaType.OBJECT)
return getFilteredObjectSchema((HollowObjectSchema) schema, filterConfig);
return schema;
}
return null;
}
private HollowObjectSchema getFilteredObjectSchema(HollowObjectSchema schema, HollowFilterConfig filterConfig) {
ObjectFilterConfig typeConfig = filterConfig.getObjectTypeConfig(schema.getName());
int numIncludedFields = 0;
for(int i=0;i<schema.numFields();i++) {
if(typeConfig.includesField(schema.getFieldName(i)))
numIncludedFields++;
}
if(numIncludedFields == schema.numFields())
return schema;
HollowObjectSchema filteredSchema = new HollowObjectSchema(schema.getName(), numIncludedFields, schema.getPrimaryKey());
for(int i=0;i<schema.numFields();i++) {
if(typeConfig.includesField(schema.getFieldName(i)))
filteredSchema.addField(schema.getFieldName(i), schema.getFieldType(i), schema.getReferencedType(i));
}
return filteredSchema;
}
private void copyListState(boolean delta, HollowBlobInput in, DataOutputStream[] os, int numShards) throws IOException {
if(numShards > 1)
copyVInt(in, os);
for(int shard=0;shard<numShards;shard++) {
copyVInt(in, os); /// maxOrdinal
if(delta) {
GapEncodedVariableLengthIntegerReader.copyEncodedDeltaOrdinals(in, os);
GapEncodedVariableLengthIntegerReader.copyEncodedDeltaOrdinals(in, os);
}
copyVInt(in, os); /// bitsPerListPointer
copyVInt(in, os); /// bitsPerElement
copyVLong(in, os); /// totalNumberOfElements
copySegmentedLongArray(in, os);
copySegmentedLongArray(in, os);
}
if(!delta)
copySnapshotPopulatedOrdinals(in, os);
}
private void copySetState(boolean delta, HollowBlobInput in, DataOutputStream[] os, int numShards) throws IOException {
if(numShards > 1)
copyVInt(in, os);
for(int shard=0;shard<numShards;shard++) {
copyVInt(in, os); /// max ordinal
if(delta) {
GapEncodedVariableLengthIntegerReader.copyEncodedDeltaOrdinals(in, os);
GapEncodedVariableLengthIntegerReader.copyEncodedDeltaOrdinals(in, os);
}
copyVInt(in, os); /// bitsPerSetPointer
copyVInt(in, os); /// bitsPerSetSizeValue
copyVInt(in, os); /// bitsPerElement
copyVLong(in, os); /// totalNumberOfBuckets
copySegmentedLongArray(in, os);
copySegmentedLongArray(in, os);
}
if(!delta)
copySnapshotPopulatedOrdinals(in, os);
}
private void copyMapState(boolean delta, HollowBlobInput in, DataOutputStream[] os, int numShards) throws IOException {
if(numShards > 1)
copyVInt(in, os);
for(int shard=0;shard<numShards;shard++) {
copyVInt(in, os); /// max ordinal
if(delta) {
GapEncodedVariableLengthIntegerReader.copyEncodedDeltaOrdinals(in, os);
GapEncodedVariableLengthIntegerReader.copyEncodedDeltaOrdinals(in, os);
}
copyVInt(in, os); /// bitsPerMapPointer
copyVInt(in, os); /// bitsPerMapSizeValue
copyVInt(in, os); /// bitsPerKeyElement
copyVInt(in, os); /// bitsPerValueElement
copyVLong(in, os); /// totalNumberOfBuckets
copySegmentedLongArray(in, os);
copySegmentedLongArray(in, os);
}
if(!delta)
copySnapshotPopulatedOrdinals(in, os);
}
private void copySnapshotPopulatedOrdinals(HollowBlobInput in, DataOutputStream[] os) throws IOException {
int numLongs = in.readInt();
for(int i=0;i<os.length;i++)
os[i].writeInt(numLongs);
IOUtils.copyBytes(in, os, numLongs * 8);
}
}
| 8,955 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/filter/FilteredHollowBlobWriterStreamAndFilter.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.filter;
import com.netflix.hollow.core.read.filter.HollowFilterConfig;
import java.io.DataOutputStream;
import java.io.OutputStream;
class FilteredHollowBlobWriterStreamAndFilter {
private final DataOutputStream dos;
private final HollowFilterConfig config;
FilteredHollowBlobWriterStreamAndFilter(DataOutputStream dos, HollowFilterConfig config) {
this.dos = dos;
this.config = config;
}
public DataOutputStream getStream() {
return dos;
}
public HollowFilterConfig getConfig() {
return config;
}
public static DataOutputStream[] streamsOnly(FilteredHollowBlobWriterStreamAndFilter[] streamAndFilters) {
DataOutputStream streams[] = new DataOutputStream[streamAndFilters.length];
for(int i=0;i<streams.length;i++)
streams[i] = streamAndFilters[i].getStream();
return streams;
}
public static FilteredHollowBlobWriterStreamAndFilter[] combine(OutputStream streams[], HollowFilterConfig configs[]) {
if(streams.length != configs.length)
throw new IllegalArgumentException("Must provide exactly the same number of streams as configs");
FilteredHollowBlobWriterStreamAndFilter streamAndFilters[] = new FilteredHollowBlobWriterStreamAndFilter[streams.length];
for(int i=0;i<streams.length;i++)
streamAndFilters[i] = new FilteredHollowBlobWriterStreamAndFilter(new DataOutputStream(streams[i]), configs[i]);
return streamAndFilters;
}
public static FilteredHollowBlobWriterStreamAndFilter[] withType(String typeName, FilteredHollowBlobWriterStreamAndFilter[] allStreamAndFilters) {
int countConfigsWithType = 0;
for(int i=0;i<allStreamAndFilters.length;i++) {
if(allStreamAndFilters[i].getConfig().doesIncludeType(typeName))
countConfigsWithType++;
}
FilteredHollowBlobWriterStreamAndFilter[] streamAndFiltersWithType = new FilteredHollowBlobWriterStreamAndFilter[countConfigsWithType];
int withTypeCounter = 0;
for(int i=0;i<allStreamAndFilters.length;i++) {
if(allStreamAndFilters[i].getConfig().doesIncludeType(typeName))
streamAndFiltersWithType[withTypeCounter++] = allStreamAndFilters[i];
}
return streamAndFiltersWithType;
}
} | 8,956 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/checksum/HollowChecksum.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.checksum;
import com.netflix.hollow.core.memory.encoding.HashCodes;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.util.SimultaneousExecutor;
import java.util.Collections;
import java.util.Vector;
/**
* Can be used to generate checksums for data contained in a {@link HollowReadStateEngine}.
* <p>
* Note that the checksums here incorporate the positions of data in sets and maps, which may vary based on hash collisions.
*/
public class HollowChecksum {
private int currentChecksum = 0;
public HollowChecksum() { }
public void applyInt(int value) {
currentChecksum ^= HashCodes.hashInt(value);
currentChecksum = HashCodes.hashInt(currentChecksum);
}
public void applyLong(long value) {
currentChecksum ^= HashCodes.hashLong(value);
currentChecksum = HashCodes.hashInt(currentChecksum);
}
public int intValue() {
return currentChecksum;
}
@Override
public boolean equals(Object other) {
if(other instanceof HollowChecksum)
return ((HollowChecksum) other).currentChecksum == currentChecksum;
return false;
}
@Override
public int hashCode() {
return currentChecksum;
}
public String toString() {
return Integer.toHexString(currentChecksum);
}
public static HollowChecksum forStateEngine(HollowReadStateEngine stateEngine) {
return forStateEngineWithCommonSchemas(stateEngine, stateEngine);
}
public static HollowChecksum forStateEngineWithCommonSchemas(HollowReadStateEngine stateEngine, HollowReadStateEngine commonSchemasWithState) {
final Vector<TypeChecksum> typeChecksums = new Vector<TypeChecksum>();
SimultaneousExecutor executor = new SimultaneousExecutor(HollowChecksum.class, "checksum-common-schemas");
for(final HollowTypeReadState typeState : stateEngine.getTypeStates()) {
HollowTypeReadState commonSchemasWithType = commonSchemasWithState.getTypeState(typeState.getSchema().getName());
if(commonSchemasWithType != null) {
final HollowSchema commonSchemasWith = commonSchemasWithType.getSchema();
executor.execute(new Runnable() {
public void run() {
HollowChecksum cksum = typeState.getChecksum(commonSchemasWith);
typeChecksums.addElement(new TypeChecksum(typeState.getSchema().getName(), cksum));
}
});
}
}
try {
executor.awaitSuccessfulCompletion();
} catch (Exception e) {
throw new RuntimeException(e);
}
Collections.sort(typeChecksums);
HollowChecksum totalChecksum = new HollowChecksum();
for(TypeChecksum cksum : typeChecksums) {
totalChecksum.applyInt(cksum.getChecksum());
}
return totalChecksum;
}
private static class TypeChecksum implements Comparable<TypeChecksum>{
private final String type;
private final int checksum;
public TypeChecksum(String type, HollowChecksum cksum) {
this.type = type;
this.checksum = cksum.intValue();
}
public int getChecksum() {
return checksum;
}
@Override
public int compareTo(TypeChecksum other) {
return type.compareTo(other.type);
}
}
}
| 8,957 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/tools/query/HollowFieldMatchQuery.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.tools.query;
import com.netflix.hollow.core.read.HollowReadFieldUtils;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.core.schema.HollowSchema.SchemaType;
import com.netflix.hollow.tools.traverse.TransitiveSetTraverser;
import java.util.BitSet;
import java.util.HashMap;
import java.util.Map;
/**
* A HollowFieldMatchQuery can be used to scan through all records in a dataset to match specific field name/value combinations.
* <p>
* Results are returned in the form of a Map<String, BitSet>. Each type for which any records matched will have an entry in the
* returned Map, keyed by type name. The corresponding value is a BitSet which is set at the positions of the ordinals of
* the matched records.
* <p>
* <b>Hint:</b> The returned Map<String, BitSet> may be in turn passed to the {@link TransitiveSetTraverser} to be augmented with any records
* which reference matched records. For example, we can imagine a data model for which the following code would provide a
* selection which includes the Actor record for "Tom Hanks", plus any Movie records in which he stars:
* <pre>
* {@code
* HollowFieldMatchQuery query = new HollowFieldMatchQuery(myStateEngine);
* Map<String, BitSet> selection = query.findMatchingRecords("actorName", "Tom Hanks");
*
* TransitiveSetTraverser.addReferencingOutsideClosure(myStateEngine, selection);
* }
* </pre>
*/
public class HollowFieldMatchQuery {
private final HollowReadStateEngine readEngine;
public HollowFieldMatchQuery(HollowReadStateEngine readEngine) {
this.readEngine = readEngine;
}
/**
* Match any records which include a field with the provided fieldName and value.
*
* @param fieldName the field name
* @param fieldValue the field value as a string that will be parsed as the type of the field to match.
* @return the matching records
*/
public Map<String, BitSet> findMatchingRecords(String fieldName, String fieldValue) {
Map<String, BitSet> matches = new HashMap<String, BitSet>();
for(HollowTypeReadState typeState : readEngine.getTypeStates()) {
augmentMatchingRecords(typeState, fieldName, fieldValue, matches);
}
return matches;
}
/**
* Match any records of the specified type, which have the specified field set to the specified value.
*
* @param typeName the type name
* @param fieldName the field name
* @param fieldValue the field value as a string that will be parsed as the type of the field to match.
* @return the matching records
*/
public Map<String, BitSet> findMatchingRecords(String typeName, String fieldName, String fieldValue) {
Map<String, BitSet> matches = new HashMap<String, BitSet>();
HollowTypeReadState typeState = readEngine.getTypeState(typeName);
if(typeState != null)
augmentMatchingRecords(typeState, fieldName, fieldValue, matches);
return matches;
}
private void augmentMatchingRecords(HollowTypeReadState typeState, String fieldName, String fieldValue, Map<String, BitSet> matches) {
if(typeState.getSchema().getSchemaType() == SchemaType.OBJECT) {
HollowObjectSchema schema = (HollowObjectSchema)typeState.getSchema();
for(int i=0;i<schema.numFields();i++) {
if(schema.getFieldName(i).equals(fieldName)) {
HollowObjectTypeReadState objState = (HollowObjectTypeReadState)typeState;
BitSet typeQueryMatches = null;
if(schema.getFieldType(i) == FieldType.REFERENCE) {
typeQueryMatches = attemptReferenceTraversalQuery(objState, i, fieldValue);
} else {
Object queryValue = castQueryValue(fieldValue, schema.getFieldType(i));
if(queryValue != null) {
typeQueryMatches = queryBasedOnValueMatches(objState, i, queryValue);
}
}
if(typeQueryMatches != null && typeQueryMatches.cardinality() > 0)
matches.put(typeState.getSchema().getName(), typeQueryMatches);
}
}
}
}
private BitSet attemptReferenceTraversalQuery(HollowObjectTypeReadState typeState, int fieldIdx, String fieldValue) {
HollowTypeReadState referencedTypeState = typeState.getSchema().getReferencedTypeState(fieldIdx);
if(referencedTypeState.getSchema().getSchemaType() == SchemaType.OBJECT) {
HollowObjectTypeReadState refObjTypeState = (HollowObjectTypeReadState)referencedTypeState;
HollowObjectSchema refSchema = refObjTypeState.getSchema();
if(refSchema.numFields() == 1) {
if(refSchema.getFieldType(0) == FieldType.REFERENCE) {
BitSet refQueryMatches = attemptReferenceTraversalQuery(refObjTypeState, 0, fieldValue);
if(refQueryMatches != null)
return queryBasedOnMatchedReferences(typeState, fieldIdx, refQueryMatches);
} else {
Object queryValue = castQueryValue(fieldValue, refSchema.getFieldType(0));
if(queryValue != null) {
BitSet refQueryMatches = queryBasedOnValueMatches(refObjTypeState, 0, queryValue);
if(refQueryMatches.cardinality() > 0)
return queryBasedOnMatchedReferences(typeState, fieldIdx, refQueryMatches);
}
}
}
}
return null;
}
private BitSet queryBasedOnMatchedReferences(HollowObjectTypeReadState typeState, int referenceFieldPosition, BitSet matchedReferences) {
BitSet populatedOrdinals = typeState.getPopulatedOrdinals();
BitSet typeQueryMatches = new BitSet(populatedOrdinals.length());
int ordinal = populatedOrdinals.nextSetBit(0);
while(ordinal != -1) {
int refOrdinal = typeState.readOrdinal(ordinal, referenceFieldPosition);
if(refOrdinal != -1 && matchedReferences.get(refOrdinal))
typeQueryMatches.set(ordinal);
ordinal = populatedOrdinals.nextSetBit(ordinal+1);
}
return typeQueryMatches;
}
private BitSet queryBasedOnValueMatches(HollowObjectTypeReadState typeState, int fieldPosition, Object queryValue) {
BitSet populatedOrdinals = typeState.getPopulatedOrdinals();
BitSet typeQueryMatches = new BitSet(populatedOrdinals.length());
int ordinal = populatedOrdinals.nextSetBit(0);
while(ordinal != -1) {
if(HollowReadFieldUtils.fieldValueEquals(typeState, ordinal, fieldPosition, queryValue))
typeQueryMatches.set(ordinal);
ordinal = populatedOrdinals.nextSetBit(ordinal+1);
}
return typeQueryMatches;
}
private Object castQueryValue(String fieldValue, FieldType fieldType) {
try {
switch(fieldType) {
case BOOLEAN:
return Boolean.valueOf(fieldValue);
case DOUBLE:
return Double.parseDouble(fieldValue);
case FLOAT:
return Float.parseFloat(fieldValue);
case INT:
return Integer.parseInt(fieldValue);
case LONG:
return Long.parseLong(fieldValue);
case STRING:
return fieldValue;
default:
return null;
}
} catch(Exception e) {
return null;
}
}
}
| 8,958 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/HollowStateEngine.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core;
import com.netflix.hollow.api.error.SchemaNotFoundException;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import java.util.List;
import java.util.Map;
/**
*
* A dataset changes over time. A core concept in Hollow is that the timeline for a changing dataset can be broken
* down into discrete "states", each of which is a complete snapshot of the data at a particular point in time.
*
* A state engine holds a complete copy of a hollow dataset, and is generally the root handle to the data.
* The state engine can be transitioned between states.
*
* Depending on whether a dataset is being produced or consumed, a HollowStateEngine will be either a {@link HollowWriteStateEngine}
* or a {@link HollowReadStateEngine}, respectively.
*
* @author dkoszewnik
*
*/
public interface HollowStateEngine extends HollowDataset {
/**
* A header tag indicating that the schema has changed from that of the prior version.
* <p>
* If the header tag is present in the state engine and the value is "true" (ignoring case)
* then the schema has changed from that of the prior version.
*/
String HEADER_TAG_SCHEMA_CHANGE = "hollow.schema.changedFromPriorVersion";
/**
* A header tag containing the hash of serialized hollow schema.
*/
String HEADER_TAG_SCHEMA_HASH = "hollow.schema.hash";
/**
* A header tag indicating the timestamp in milliseconds of when the producer cycle started
* for this state engine.
*/
String HEADER_TAG_METRIC_CYCLE_START = "hollow.metric.cycle.start";
/**
* A header tag indicating the timestamp in milliseconds to mark the start of announcement of a version.
* */
String HEADER_TAG_METRIC_ANNOUNCEMENT = "hollow.metric.announcement";
/**
* A header tag indicating which version is this blob produce to.
*/
String HEADER_TAG_PRODUCER_TO_VERSION = "hollow.blob.to.version";
@Override
List<HollowSchema> getSchemas();
@Override
HollowSchema getSchema(String typeName);
@Override
HollowSchema getNonNullSchema(String typeName) throws SchemaNotFoundException;
Map<String, String> getHeaderTags();
String getHeaderTag(String name);
}
| 8,959 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/HollowBlobOptionalPartHeader.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core;
import com.netflix.hollow.core.schema.HollowSchema;
import java.util.Collections;
import java.util.List;
public class HollowBlobOptionalPartHeader {
public static final int HOLLOW_BLOB_PART_VERSION_HEADER = 1031;
private final String partName;
private List<HollowSchema> schemas = Collections.emptyList();
private long originRandomizedTag;
private long destinationRandomizedTag;
public HollowBlobOptionalPartHeader(String partName) {
this.partName = partName;
}
public List<HollowSchema> getSchemas() {
return schemas;
}
public void setSchemas(List<HollowSchema> schemas) {
this.schemas = schemas;
}
public long getOriginRandomizedTag() {
return originRandomizedTag;
}
public void setOriginRandomizedTag(long originRandomizedTag) {
this.originRandomizedTag = originRandomizedTag;
}
public long getDestinationRandomizedTag() {
return destinationRandomizedTag;
}
public void setDestinationRandomizedTag(long destinationRandomizedTag) {
this.destinationRandomizedTag = destinationRandomizedTag;
}
public String getPartName() {
return partName;
}
}
| 8,960 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/HollowBlobHeader.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core;
import com.netflix.hollow.core.schema.HollowSchema;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
* Represents the header of a serialized blob. A blob header contains the following elements:
*
* <dl>
* <dt>Header Tags</dt>
* <dd>A set of user-specified key-value pairs -- typically used to communicate overall context about the blob's data.</dd>
*
* <dt>Randomized Tags</dt>
* <dd>A single randomized 64-bit value is randomly generated per data state. This tag is tracked at the client
* as a safety mechanism to ensure that delta transitions are never applied to an incorrect state.</dd>
*
* <dt>Blob Format Version</dt>
* <dd>A 32-bit value used to identify the format of the hollow blob.</dd>
*
* </dl>
*
*/
public class HollowBlobHeader {
public static final int HOLLOW_BLOB_VERSION_HEADER = 1030;
private Map<String, String> headerTags = new HashMap<String, String>();
private List<HollowSchema> schemas = new ArrayList<HollowSchema>();
private long originRandomizedTag;
private long destinationRandomizedTag;
private int blobFormatVersion = HOLLOW_BLOB_VERSION_HEADER;
public Map<String, String> getHeaderTags() {
return headerTags;
}
public void setSchemas(List<HollowSchema> schemas) {
this.schemas = schemas;
}
public List<HollowSchema> getSchemas() {
return schemas;
}
public void setHeaderTags(Map<String, String> headerTags) {
this.headerTags = headerTags;
}
public long getOriginRandomizedTag() {
return originRandomizedTag;
}
public void setOriginRandomizedTag(long originRandomizedTag) {
this.originRandomizedTag = originRandomizedTag;
}
public long getDestinationRandomizedTag() {
return destinationRandomizedTag;
}
public void setDestinationRandomizedTag(long destinationRandomizedTag) {
this.destinationRandomizedTag = destinationRandomizedTag;
}
public void setBlobFormatVersion(int blobFormatVersion) {
this.blobFormatVersion = blobFormatVersion;
}
public int getBlobFormatVersion() {
return blobFormatVersion;
}
@Override
public boolean equals(Object other) {
if(other instanceof HollowBlobHeader) {
HollowBlobHeader oh = (HollowBlobHeader)other;
return blobFormatVersion == oh.blobFormatVersion
&& headerTags.equals(oh.getHeaderTags())
&& originRandomizedTag == oh.originRandomizedTag
&& destinationRandomizedTag == oh.destinationRandomizedTag;
}
return false;
}
@Override
public int hashCode() {
int result = blobFormatVersion;
result = 31 * result + Objects.hash(headerTags,
originRandomizedTag,
destinationRandomizedTag,
blobFormatVersion);
return result;
}
}
| 8,961 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/HollowDataset.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core;
import com.netflix.hollow.api.error.SchemaNotFoundException;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import com.netflix.hollow.core.schema.HollowSchema;
import java.util.List;
/**
* A Hollow dataset contains a set of strongly typed schemas.
*
* This is the superinterface for {@link HollowStateEngine} and {@link HollowDataAccess}
*
*/
public interface HollowDataset {
/**
* @return the schemas for all types in this dataset.
*/
List<HollowSchema> getSchemas();
/**
* @param typeName the type name
* @return the schema for the specified type in this dataset.
*/
HollowSchema getSchema(String typeName);
/**
* @param typeName the type name
* @return the schema for the specified type in this dataset.
* @throws SchemaNotFoundException if the schema is not found
*/
HollowSchema getNonNullSchema(String typeName) throws SchemaNotFoundException;
/**
* @param other another HollowDataset
* @return true iff the other HollowDataset has an identical set of schemas.
*/
default boolean hasIdenticalSchemas(HollowDataset other) {
List<HollowSchema> thisSchemas = getSchemas();
List<HollowSchema> otherSchemas = other.getSchemas();
if(thisSchemas.size() != otherSchemas.size())
return false;
for (HollowSchema thisSchema : thisSchemas) {
HollowSchema otherSchema = other.getSchema(thisSchema.getName());
if(otherSchema == null || !thisSchema.equals(otherSchema))
return false;
}
return true;
}
}
| 8,962 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/HollowConstants.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core;
/**
* An interface to gather various sentinel constants used across hollow.
*/
public interface HollowConstants {
/**
* A version of VERSION_LATEST signifies "latest version".
*/
long VERSION_LATEST = Long.MAX_VALUE;
/**
* A version of VERSION_NONE signifies "no version".
*/
long VERSION_NONE = Long.MIN_VALUE;
/**
* An ordinal of NULL_ORDINAL signifies "null reference" or "no ordinal"
*/
int ORDINAL_NONE = -1;
/**
* The maximum number of buckets allowed in a Hollow hash table. Empty space is reserved (based on 70% load factor),
* otherwise performance approaches O(n).
*/
int HASH_TABLE_MAX_SIZE = (int)((1L << 30) * 7 / 10);
}
| 8,963 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/FieldStatistics.java | /*
* Copyright 2016-2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
public class FieldStatistics {
private final HollowObjectSchema schema;
private final int maxBitsForField[];
private final long nullValueForField[];
private final long totalSizeOfVarLengthField[];
private int numBitsPerRecord;
private final int bitOffsetForField[];
public FieldStatistics(HollowObjectSchema schema) {
this.schema = schema;
this.maxBitsForField = new int[schema.numFields()];
this.nullValueForField = new long[schema.numFields()];
this.totalSizeOfVarLengthField = new long[schema.numFields()];
this.bitOffsetForField = new int[schema.numFields()];
}
public int getNumBitsPerRecord() {
return numBitsPerRecord;
}
public int getFieldBitOffset(int fieldIndex) {
return bitOffsetForField[fieldIndex];
}
public int getMaxBitsForField(int fieldIndex) {
return maxBitsForField[fieldIndex];
}
public long getNullValueForField(int fieldIndex) {
return nullValueForField[fieldIndex];
}
public void addFixedLengthFieldRequiredBits(int fieldIndex, int numberOfBits) {
if(numberOfBits > maxBitsForField[fieldIndex])
maxBitsForField[fieldIndex] = numberOfBits;
}
public void addVarLengthFieldSize(int fieldIndex, int fieldSize) {
totalSizeOfVarLengthField[fieldIndex] += fieldSize;
}
public void completeCalculations() {
for(int i=0;i<schema.numFields();i++) {
if(schema.getFieldType(i) == FieldType.STRING || schema.getFieldType(i) == FieldType.BYTES) {
maxBitsForField[i] = bitsRequiredForRepresentation(totalSizeOfVarLengthField[i]) + 1; // one extra bit for null.
}
nullValueForField[i] = maxBitsForField[i] == 64 ? -1L : (1L << maxBitsForField[i]) - 1;
bitOffsetForField[i] = numBitsPerRecord;
numBitsPerRecord += maxBitsForField[i];
}
}
public long getTotalSizeOfAllVarLengthData() {
long totalVarLengthDataSize = 0;
for(int i=0;i<totalSizeOfVarLengthField.length;i++)
totalVarLengthDataSize += totalSizeOfVarLengthField[i];
return totalVarLengthDataSize;
}
private int bitsRequiredForRepresentation(long value) {
return 64 - Long.numberOfLeadingZeros(value + 1);
}
}
| 8,964 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/HollowListWriteRecord.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write;
import com.netflix.hollow.core.memory.ByteDataArray;
import com.netflix.hollow.core.memory.encoding.VarInt;
import com.netflix.hollow.core.util.IntList;
public class HollowListWriteRecord implements HollowWriteRecord {
private final IntList elementOrdinals;
public HollowListWriteRecord() {
this.elementOrdinals = new IntList();
}
public void addElement(int ordinal) {
elementOrdinals.add(ordinal);
}
@Override
public void writeDataTo(ByteDataArray buf) {
VarInt.writeVInt(buf, elementOrdinals.size());
for(int i=0;i<elementOrdinals.size();i++) {
VarInt.writeVInt(buf, elementOrdinals.get(i));
}
}
@Override
public void reset() {
elementOrdinals.clear();
}
}
| 8,965 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/HollowWriteStateEnginePrimaryKeyHasher.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.core.memory.ByteArrayOrdinalMap;
import com.netflix.hollow.core.memory.SegmentedByteArray;
import com.netflix.hollow.core.memory.encoding.HashCodes;
import com.netflix.hollow.core.memory.encoding.VarInt;
import com.netflix.hollow.core.memory.encoding.ZigZag;
import com.netflix.hollow.core.schema.HollowObjectSchema;
class HollowWriteStateEnginePrimaryKeyHasher {
private final HollowObjectTypeWriteState typeStates[][];
private final int[][] fieldPathIndexes;
public HollowWriteStateEnginePrimaryKeyHasher(PrimaryKey primaryKey, HollowWriteStateEngine writeEngine) {
HollowWriteStateEngine stateEngine = writeEngine;
HollowObjectTypeWriteState rootTypeWriteState = (HollowObjectTypeWriteState)writeEngine.getTypeState(primaryKey.getType());
this.fieldPathIndexes = new int[primaryKey.numFields()][];
this.typeStates = new HollowObjectTypeWriteState[primaryKey.numFields()][];
for(int i=0;i<primaryKey.numFields();i++) {
fieldPathIndexes[i] = primaryKey.getFieldPathIndex(stateEngine, i);
typeStates[i] = new HollowObjectTypeWriteState[fieldPathIndexes[i].length];
typeStates[i][0] = rootTypeWriteState;
for(int j=1;j<typeStates[i].length;j++) {
String referencedType = typeStates[i][j-1].getSchema().getReferencedType(fieldPathIndexes[i][j-1]);
typeStates[i][j] = (HollowObjectTypeWriteState) stateEngine.getTypeState(referencedType);
}
}
}
public int getRecordHash(int ordinal) {
int hash = 0;
for (int i = 0; i < fieldPathIndexes.length; i++) {
hash *= 31;
hash ^= hashValue(ordinal, i);
}
return hash;
}
private int hashValue(int ordinal, int fieldIdx) {
int lastFieldPath = fieldPathIndexes[fieldIdx].length - 1;
for (int i = 0; i < lastFieldPath; i++) {
int fieldPosition = fieldPathIndexes[fieldIdx][i];
ByteArrayOrdinalMap ordinalMap = typeStates[fieldIdx][i].ordinalMap;
long offset = ordinalMap.getPointerForData(ordinal);
SegmentedByteArray recordDataArray = ordinalMap.getByteData().getUnderlyingArray();
offset = navigateToField(typeStates[fieldIdx][i].getSchema(), fieldPosition, recordDataArray, offset);
ordinal = VarInt.readVInt(recordDataArray, offset);
}
int fieldPosition = fieldPathIndexes[fieldIdx][lastFieldPath];
ByteArrayOrdinalMap ordinalMap = typeStates[fieldIdx][lastFieldPath].ordinalMap;
long offset = ordinalMap.getPointerForData(ordinal);
SegmentedByteArray recordDataArray = ordinalMap.getByteData().getUnderlyingArray();
HollowObjectSchema schema = typeStates[fieldIdx][lastFieldPath].getSchema();
offset = navigateToField(schema, fieldPosition, recordDataArray, offset);
return HashCodes.hashInt(fieldHashCode(schema, fieldPosition, recordDataArray, offset));
}
private long navigateToField(HollowObjectSchema schema, int fieldIdx, SegmentedByteArray data, long offset) {
for(int i=0;i<fieldIdx;i++) {
switch(schema.getFieldType(i)) {
case INT:
case LONG:
case REFERENCE:
offset += VarInt.nextVLongSize(data, offset);
break;
case BYTES:
case STRING:
int fieldLength = VarInt.readVInt(data, offset);
offset += VarInt.sizeOfVInt(fieldLength);
offset += fieldLength;
break;
case BOOLEAN:
offset++;
break;
case DOUBLE:
offset += 8;
break;
case FLOAT:
offset += 4;
break;
}
}
return offset;
}
private int fieldHashCode(HollowObjectSchema schema, int fieldIdx, SegmentedByteArray data, long offset) {
switch(schema.getFieldType(fieldIdx)) {
case INT:
if(VarInt.readVNull(data, offset))
return 0;
int intVal = VarInt.readVInt(data, offset);
intVal = ZigZag.decodeInt(intVal);
return intVal;
case LONG:
if(VarInt.readVNull(data, offset))
return 0;
long longVal = VarInt.readVLong(data, offset);
longVal = ZigZag.decodeLong(longVal);
return (int)(longVal ^ (longVal >>> 32));
case REFERENCE:
return VarInt.readVInt(data, offset);
case BYTES:
int byteLen = VarInt.readVInt(data, offset);
offset += VarInt.sizeOfVInt(byteLen);
return HashCodes.hashCode(data, offset, byteLen);
case STRING:
int strByteLen = VarInt.readVInt(data, offset);
offset += VarInt.sizeOfVInt(strByteLen);
return getNaturalStringHashCode(data, offset, strByteLen);
case BOOLEAN:
if(VarInt.readVNull(data, offset))
return 0;
return data.get(offset) == 1 ? 1231 : 1237;
case DOUBLE:
long longBits = data.readLongBits(offset);
return (int)(longBits ^ (longBits >>> 32));
case FLOAT:
return data.readIntBits(offset);
default:
throw new IllegalArgumentException("Schema "+schema.getName()+" has unknown field type for field " + schema.getFieldName(fieldIdx) + ": " + schema.getFieldType(fieldIdx));
}
}
private int getNaturalStringHashCode(SegmentedByteArray data, long offset, int len) {
int hashCode = 0;
long endOffset = len + offset;
while(offset < endOffset) {
int ch = VarInt.readVInt(data, offset);
hashCode = hashCode * 31 + ch;
offset += VarInt.sizeOfVInt(ch);
}
return hashCode;
}
}
| 8,966 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/HollowSetTypeWriteState.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write;
import com.netflix.hollow.core.memory.ByteData;
import com.netflix.hollow.core.memory.ByteDataArray;
import com.netflix.hollow.core.memory.ThreadSafeBitSet;
import com.netflix.hollow.core.memory.encoding.FixedLengthElementArray;
import com.netflix.hollow.core.memory.encoding.HashCodes;
import com.netflix.hollow.core.memory.encoding.VarInt;
import com.netflix.hollow.core.memory.pool.WastefulRecycler;
import com.netflix.hollow.core.schema.HollowSetSchema;
import java.io.DataOutputStream;
import java.io.IOException;
public class HollowSetTypeWriteState extends HollowTypeWriteState {
/// statistics required for writing fixed length set data
private int bitsPerSetPointer;
private int bitsPerElement;
private int bitsPerSetSizeValue;
private long totalOfSetBuckets[];
/// data required for writing snapshot or delta
private int maxOrdinal;
private int maxShardOrdinal[];
private FixedLengthElementArray setPointersAndSizesArray[];
private FixedLengthElementArray elementArray[];
/// additional data required for writing delta
private int numSetsInDelta[];
private long numBucketsInDelta[];
private ByteDataArray deltaAddedOrdinals[];
private ByteDataArray deltaRemovedOrdinals[];
public HollowSetTypeWriteState(HollowSetSchema schema) {
this(schema, -1);
}
public HollowSetTypeWriteState(HollowSetSchema schema, int numShards) {
super(schema, numShards);
}
@Override
public HollowSetSchema getSchema() {
return (HollowSetSchema)schema;
}
public void prepareForWrite() {
super.prepareForWrite();
gatherStatistics();
}
private void gatherStatistics() {
if(numShards == -1)
calculateNumShards();
int maxElementOrdinal = 0;
int maxOrdinal = ordinalMap.maxOrdinal();
maxShardOrdinal = new int[numShards];
int minRecordLocationsPerShard = (maxOrdinal + 1) / numShards;
for(int i=0;i<numShards;i++)
maxShardOrdinal[i] = (i < ((maxOrdinal + 1) & (numShards - 1))) ? minRecordLocationsPerShard : minRecordLocationsPerShard - 1;
int maxSetSize = 0;
ByteData data = ordinalMap.getByteData().getUnderlyingArray();
totalOfSetBuckets = new long[numShards];
for(int i=0;i<=maxOrdinal;i++) {
if(currentCyclePopulated.get(i) || previousCyclePopulated.get(i)) {
long pointer = ordinalMap.getPointerForData(i);
int size = VarInt.readVInt(data, pointer);
int numBuckets = HashCodes.hashTableSize(size);
if(size > maxSetSize)
maxSetSize = size;
pointer += VarInt.sizeOfVInt(size);
int elementOrdinal = 0;
for(int j=0;j<size;j++) {
int elementOrdinalDelta = VarInt.readVInt(data, pointer);
elementOrdinal += elementOrdinalDelta;
if(elementOrdinal > maxElementOrdinal)
maxElementOrdinal = elementOrdinal;
pointer += VarInt.sizeOfVInt(elementOrdinalDelta);
pointer += VarInt.nextVLongSize(data, pointer); /// discard hashed bucket
}
totalOfSetBuckets[i & (numShards-1)] += numBuckets;
}
}
long maxShardTotalOfSetBuckets = 0;
for(int i=0;i<numShards;i++) {
if(totalOfSetBuckets[i] > maxShardTotalOfSetBuckets)
maxShardTotalOfSetBuckets = totalOfSetBuckets[i];
}
bitsPerElement = 64 - Long.numberOfLeadingZeros(maxElementOrdinal + 1);
bitsPerSetSizeValue = 64 - Long.numberOfLeadingZeros(maxSetSize);
bitsPerSetPointer = 64 - Long.numberOfLeadingZeros(maxShardTotalOfSetBuckets);
}
private void calculateNumShards() {
int maxOrdinal = ordinalMap.maxOrdinal();
int maxSetSize = 0;
int maxElementOrdinal = 0;
ByteData data = ordinalMap.getByteData().getUnderlyingArray();
long totalOfSetBuckets = 0;
for(int i=0;i<=maxOrdinal;i++) {
if(currentCyclePopulated.get(i)) {
long pointer = ordinalMap.getPointerForData(i);
int size = VarInt.readVInt(data, pointer);
int numBuckets = HashCodes.hashTableSize(size);
if(size > maxSetSize)
maxSetSize = size;
pointer += VarInt.sizeOfVInt(size);
int elementOrdinal = 0;
for(int j=0;j<size;j++) {
int elementOrdinalDelta = VarInt.readVInt(data, pointer);
elementOrdinal += elementOrdinalDelta;
if(elementOrdinal > maxElementOrdinal)
maxElementOrdinal = elementOrdinal;
pointer += VarInt.sizeOfVInt(elementOrdinalDelta);
pointer += VarInt.nextVLongSize(data, pointer); /// discard hashed bucket
}
totalOfSetBuckets += numBuckets;
}
}
long bitsPerElement = 64 - Long.numberOfLeadingZeros(maxElementOrdinal + 1);
long bitsPerSetSizeValue = 64 - Long.numberOfLeadingZeros(maxSetSize);
long bitsPerSetPointer = 64 - Long.numberOfLeadingZeros(totalOfSetBuckets);
long projectedSizeOfType = (bitsPerSetSizeValue + bitsPerSetPointer) * (maxOrdinal + 1) / 8;
projectedSizeOfType += (bitsPerElement * totalOfSetBuckets) / 8;
numShards = 1;
while(stateEngine.getTargetMaxTypeShardSize() * numShards < projectedSizeOfType)
numShards *= 2;
}
@Override
public void calculateSnapshot() {
maxOrdinal = ordinalMap.maxOrdinal();
int bitsPerSetFixedLengthPortion = bitsPerSetSizeValue + bitsPerSetPointer;
setPointersAndSizesArray = new FixedLengthElementArray[numShards];
elementArray = new FixedLengthElementArray[numShards];
for(int i=0;i<numShards;i++) {
setPointersAndSizesArray[i] = new FixedLengthElementArray(WastefulRecycler.DEFAULT_INSTANCE, (long)bitsPerSetFixedLengthPortion * (maxShardOrdinal[i] + 1));
elementArray[i] = new FixedLengthElementArray(WastefulRecycler.DEFAULT_INSTANCE, (long)bitsPerElement * totalOfSetBuckets[i]);
}
ByteData data = ordinalMap.getByteData().getUnderlyingArray();
int bucketCounter[] = new int[numShards];
int shardMask = numShards - 1;
HollowWriteStateEnginePrimaryKeyHasher primaryKeyHasher = null;
if(getSchema().getHashKey() != null)
primaryKeyHasher = new HollowWriteStateEnginePrimaryKeyHasher(getSchema().getHashKey(), getStateEngine());
for(int ordinal=0;ordinal<=maxOrdinal;ordinal++) {
int shardNumber = ordinal & shardMask;
int shardOrdinal = ordinal / numShards;
if(currentCyclePopulated.get(ordinal)) {
long readPointer = ordinalMap.getPointerForData(ordinal);
int size = VarInt.readVInt(data, readPointer);
readPointer += VarInt.sizeOfVInt(size);
int numBuckets = HashCodes.hashTableSize(size);
setPointersAndSizesArray[shardNumber].setElementValue(((long)bitsPerSetFixedLengthPortion * shardOrdinal) + bitsPerSetPointer, bitsPerSetSizeValue, size);
int elementOrdinal = 0;
for(int j=0;j<numBuckets;j++) {
elementArray[shardNumber].setElementValue((long)bitsPerElement * (bucketCounter[shardNumber] + j), bitsPerElement, (1L << bitsPerElement) - 1);
}
for(int j=0;j<size;j++) {
int elementOrdinalDelta = VarInt.readVInt(data, readPointer);
readPointer += VarInt.sizeOfVInt(elementOrdinalDelta);
int hashedBucket = VarInt.readVInt(data, readPointer);
readPointer += VarInt.sizeOfVInt(hashedBucket);
elementOrdinal += elementOrdinalDelta;
if(primaryKeyHasher != null)
hashedBucket = primaryKeyHasher.getRecordHash(elementOrdinal) & (numBuckets - 1);
while(elementArray[shardNumber].getElementValue((long)bitsPerElement * (bucketCounter[shardNumber] + hashedBucket), bitsPerElement) != ((1L << bitsPerElement) - 1)) {
hashedBucket++;
hashedBucket &= (numBuckets - 1);
}
elementArray[shardNumber].clearElementValue((long)bitsPerElement * (bucketCounter[shardNumber] + hashedBucket), bitsPerElement);
elementArray[shardNumber].setElementValue((long)bitsPerElement * (bucketCounter[shardNumber] + hashedBucket), bitsPerElement, elementOrdinal);
}
bucketCounter[shardNumber] += numBuckets;
}
setPointersAndSizesArray[shardNumber].setElementValue((long)bitsPerSetFixedLengthPortion * shardOrdinal, bitsPerSetPointer, bucketCounter[shardNumber]);
}
}
@Override
public void writeSnapshot(DataOutputStream os) throws IOException {
/// for unsharded blobs, support pre v2.1.0 clients
if(numShards == 1) {
writeSnapshotShard(os, 0);
} else {
/// overall max ordinal
VarInt.writeVInt(os, maxOrdinal);
for(int i=0;i<numShards;i++) {
writeSnapshotShard(os, i);
}
}
/// Populated bits
currentCyclePopulated.serializeBitsTo(os);
setPointersAndSizesArray = null;
elementArray = null;
}
private void writeSnapshotShard(DataOutputStream os, int shardNumber) throws IOException {
int bitsPerSetFixedLengthPortion = bitsPerSetSizeValue + bitsPerSetPointer;
/// 1) max ordinal
VarInt.writeVInt(os, maxShardOrdinal[shardNumber]);
/// 2) statistics
VarInt.writeVInt(os, bitsPerSetPointer);
VarInt.writeVInt(os, bitsPerSetSizeValue);
VarInt.writeVInt(os, bitsPerElement);
VarInt.writeVLong(os, totalOfSetBuckets[shardNumber]);
/// 3) set pointer array
int numSetFixedLengthLongs = maxShardOrdinal[shardNumber] == -1 ? 0 : (int)((((long)(maxShardOrdinal[shardNumber] + 1) * bitsPerSetFixedLengthPortion) - 1) / 64) + 1;
VarInt.writeVInt(os, numSetFixedLengthLongs);
for(int i=0;i<numSetFixedLengthLongs;i++) {
os.writeLong(setPointersAndSizesArray[shardNumber].get(i));
}
/// 4) element array
int numElementLongs = totalOfSetBuckets[shardNumber] == 0 ? 0 : (int)(((totalOfSetBuckets[shardNumber] * bitsPerElement) - 1) / 64) + 1;
VarInt.writeVInt(os, numElementLongs);
for(int i=0;i<numElementLongs;i++) {
os.writeLong(elementArray[shardNumber].get(i));
}
}
@Override
public void calculateDelta() {
calculateDelta(previousCyclePopulated, currentCyclePopulated);
}
@Override
public void writeDelta(DataOutputStream dos) throws IOException {
writeCalculatedDelta(dos);
}
@Override
public void calculateReverseDelta() {
calculateDelta(currentCyclePopulated, previousCyclePopulated);
}
@Override
public void writeReverseDelta(DataOutputStream dos) throws IOException {
writeCalculatedDelta(dos);
}
public void calculateDelta(ThreadSafeBitSet fromCyclePopulated, ThreadSafeBitSet toCyclePopulated) {
maxOrdinal = ordinalMap.maxOrdinal();
int bitsPerSetFixedLengthPortion = bitsPerSetSizeValue + bitsPerSetPointer;
numSetsInDelta = new int[numShards];
numBucketsInDelta = new long[numShards];
setPointersAndSizesArray = new FixedLengthElementArray[numShards];
elementArray = new FixedLengthElementArray[numShards];
deltaAddedOrdinals = new ByteDataArray[numShards];
deltaRemovedOrdinals = new ByteDataArray[numShards];
ThreadSafeBitSet deltaAdditions = toCyclePopulated.andNot(fromCyclePopulated);
int shardMask = numShards - 1;
int addedOrdinal = deltaAdditions.nextSetBit(0);
while(addedOrdinal != -1) {
numSetsInDelta[addedOrdinal & shardMask]++;
long readPointer = ordinalMap.getPointerForData(addedOrdinal);
int size = VarInt.readVInt(ordinalMap.getByteData().getUnderlyingArray(), readPointer);
numBucketsInDelta[addedOrdinal & shardMask] += HashCodes.hashTableSize(size);
addedOrdinal = deltaAdditions.nextSetBit(addedOrdinal + 1);
}
for(int i=0;i<numShards;i++) {
setPointersAndSizesArray[i] = new FixedLengthElementArray(WastefulRecycler.DEFAULT_INSTANCE, (long)numSetsInDelta[i] * bitsPerSetFixedLengthPortion);
elementArray[i] = new FixedLengthElementArray(WastefulRecycler.DEFAULT_INSTANCE, (long)numBucketsInDelta[i] * bitsPerElement);
deltaAddedOrdinals[i] = new ByteDataArray(WastefulRecycler.DEFAULT_INSTANCE);
deltaRemovedOrdinals[i] = new ByteDataArray(WastefulRecycler.DEFAULT_INSTANCE);
}
ByteData data = ordinalMap.getByteData().getUnderlyingArray();
int setCounter[] = new int[numShards];
long bucketCounter[] = new long[numShards];
int previousRemovedOrdinal[] = new int[numShards];
int previousAddedOrdinal[] = new int[numShards];
HollowWriteStateEnginePrimaryKeyHasher primaryKeyHasher = null;
if(getSchema().getHashKey() != null)
primaryKeyHasher = new HollowWriteStateEnginePrimaryKeyHasher(getSchema().getHashKey(), getStateEngine());
for(int ordinal=0;ordinal<=maxOrdinal;ordinal++) {
int shardNumber = ordinal & shardMask;
if(deltaAdditions.get(ordinal)) {
long readPointer = ordinalMap.getPointerForData(ordinal);
int size = VarInt.readVInt(data, readPointer);
readPointer += VarInt.sizeOfVInt(size);
int numBuckets = HashCodes.hashTableSize(size);
long endBucketPosition = bucketCounter[shardNumber] + numBuckets;
setPointersAndSizesArray[shardNumber].setElementValue((long)bitsPerSetFixedLengthPortion * setCounter[shardNumber], bitsPerSetPointer, endBucketPosition);
setPointersAndSizesArray[shardNumber].setElementValue(((long)bitsPerSetFixedLengthPortion * setCounter[shardNumber]) + bitsPerSetPointer, bitsPerSetSizeValue, size);
int elementOrdinal = 0;
for(int j=0;j<numBuckets;j++) {
elementArray[shardNumber].setElementValue((long)bitsPerElement * (bucketCounter[shardNumber] + j), bitsPerElement, (1L << bitsPerElement) - 1);
}
for(int j=0;j<size;j++) {
int elementOrdinalDelta = VarInt.readVInt(data, readPointer);
readPointer += VarInt.sizeOfVInt(elementOrdinalDelta);
int hashedBucket = VarInt.readVInt(data, readPointer);
readPointer += VarInt.sizeOfVInt(hashedBucket);
elementOrdinal += elementOrdinalDelta;
if(primaryKeyHasher != null)
hashedBucket = primaryKeyHasher.getRecordHash(elementOrdinal) & (numBuckets - 1);
while(elementArray[shardNumber].getElementValue((long)bitsPerElement * (bucketCounter[shardNumber] + hashedBucket), bitsPerElement) != ((1L << bitsPerElement) - 1)) {
hashedBucket++;
hashedBucket &= (numBuckets - 1);
}
elementArray[shardNumber].clearElementValue((long)bitsPerElement * (bucketCounter[shardNumber] + hashedBucket), bitsPerElement);
elementArray[shardNumber].setElementValue((long)bitsPerElement * (bucketCounter[shardNumber] + hashedBucket), bitsPerElement, elementOrdinal);
}
bucketCounter[shardNumber] += numBuckets;
setCounter[shardNumber]++;
int shardOrdinal = ordinal / numShards;
VarInt.writeVInt(deltaAddedOrdinals[shardNumber], shardOrdinal - previousAddedOrdinal[shardNumber]);
previousAddedOrdinal[shardNumber] = shardOrdinal;
} else if(fromCyclePopulated.get(ordinal) && !toCyclePopulated.get(ordinal)) {
int shardOrdinal = ordinal / numShards;
VarInt.writeVInt(deltaRemovedOrdinals[shardNumber], shardOrdinal - previousRemovedOrdinal[shardNumber]);
previousRemovedOrdinal[shardNumber] = shardOrdinal;
}
}
}
private void writeCalculatedDelta(DataOutputStream os) throws IOException {
/// for unsharded blobs, support pre v2.1.0 clients
if(numShards == 1) {
writeCalculatedDeltaShard(os, 0);
} else {
/// overall max ordinal
VarInt.writeVInt(os, maxOrdinal);
for(int i=0;i<numShards;i++) {
writeCalculatedDeltaShard(os, i);
}
}
setPointersAndSizesArray = null;
elementArray = null;
deltaAddedOrdinals = null;
deltaRemovedOrdinals = null;
}
private void writeCalculatedDeltaShard(DataOutputStream os, int shardNumber) throws IOException {
int bitsPerSetFixedLengthPortion = bitsPerSetSizeValue + bitsPerSetPointer;
/// 1) max ordinal
VarInt.writeVInt(os, maxShardOrdinal[shardNumber]);
/// 2) removal / addition ordinals.
VarInt.writeVLong(os, deltaRemovedOrdinals[shardNumber].length());
deltaRemovedOrdinals[shardNumber].getUnderlyingArray().writeTo(os, 0, deltaRemovedOrdinals[shardNumber].length());
VarInt.writeVLong(os, deltaAddedOrdinals[shardNumber].length());
deltaAddedOrdinals[shardNumber].getUnderlyingArray().writeTo(os, 0, deltaAddedOrdinals[shardNumber].length());
/// 3) statistics
VarInt.writeVInt(os, bitsPerSetPointer);
VarInt.writeVInt(os, bitsPerSetSizeValue);
VarInt.writeVInt(os, bitsPerElement);
VarInt.writeVLong(os, totalOfSetBuckets[shardNumber]);
/// 4) set pointer array
int numSetFixedLengthLongs = numSetsInDelta[shardNumber] == 0 ? 0 : (int)((((long)numSetsInDelta[shardNumber] * bitsPerSetFixedLengthPortion) - 1) / 64) + 1;
VarInt.writeVInt(os, numSetFixedLengthLongs);
for(int i=0;i<numSetFixedLengthLongs;i++) {
os.writeLong(setPointersAndSizesArray[shardNumber].get(i));
}
/// 5) element array
int numElementLongs = numBucketsInDelta[shardNumber] == 0 ? 0 : (int)(((numBucketsInDelta[shardNumber] * bitsPerElement) - 1) / 64) + 1;
VarInt.writeVInt(os, numElementLongs);
for(int i=0;i<numElementLongs;i++) {
os.writeLong(elementArray[shardNumber].get(i));
}
}
}
| 8,967 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/HollowListTypeWriteState.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write;
import com.netflix.hollow.core.memory.ByteData;
import com.netflix.hollow.core.memory.ByteDataArray;
import com.netflix.hollow.core.memory.ThreadSafeBitSet;
import com.netflix.hollow.core.memory.encoding.FixedLengthElementArray;
import com.netflix.hollow.core.memory.encoding.VarInt;
import com.netflix.hollow.core.memory.pool.WastefulRecycler;
import com.netflix.hollow.core.schema.HollowListSchema;
import java.io.DataOutputStream;
import java.io.IOException;
public class HollowListTypeWriteState extends HollowTypeWriteState {
/// statistics required for writing fixed length list data
private int bitsPerListPointer;
private int bitsPerElement;
private long totalOfListSizes[];
/// data required for writing snapshot or delta
private int maxOrdinal;
private int maxShardOrdinal[];
private FixedLengthElementArray listPointerArray[];
private FixedLengthElementArray elementArray[];
/// additional data required for writing delta
private int numListsInDelta[];
private long numElementsInDelta[];
private ByteDataArray deltaAddedOrdinals[];
private ByteDataArray deltaRemovedOrdinals[];
public HollowListTypeWriteState(HollowListSchema schema) {
this(schema, -1);
}
public HollowListTypeWriteState(HollowListSchema schema, int numShards) {
super(schema, numShards);
}
@Override
public HollowListSchema getSchema() {
return (HollowListSchema)schema;
}
@Override
public void prepareForWrite() {
super.prepareForWrite();
gatherStatistics();
}
private void gatherStatistics() {
if(numShards == -1)
calculateNumShards();
int maxOrdinal = ordinalMap.maxOrdinal();
int maxElementOrdinal = 0;
maxShardOrdinal = new int[numShards];
int minRecordLocationsPerShard = (maxOrdinal + 1) / numShards;
for(int i=0;i<numShards;i++)
maxShardOrdinal[i] = (i < ((maxOrdinal + 1) & (numShards - 1))) ? minRecordLocationsPerShard : minRecordLocationsPerShard - 1;
ByteData data = ordinalMap.getByteData().getUnderlyingArray();
totalOfListSizes = new long[numShards];
for(int i=0;i<=maxOrdinal;i++) {
if(currentCyclePopulated.get(i) || previousCyclePopulated.get(i)) {
long pointer = ordinalMap.getPointerForData(i);
int size = VarInt.readVInt(data, pointer);
pointer += VarInt.sizeOfVInt(size);
for(int j=0;j<size;j++) {
int elementOrdinal = VarInt.readVInt(data, pointer);
if(elementOrdinal > maxElementOrdinal)
maxElementOrdinal = elementOrdinal;
pointer += VarInt.sizeOfVInt(elementOrdinal);
}
totalOfListSizes[i & (numShards-1)] += size;
}
}
long maxShardTotalOfListSizes = 0;
for(int i=0;i<numShards;i++) {
if(totalOfListSizes[i] > maxShardTotalOfListSizes)
maxShardTotalOfListSizes = totalOfListSizes[i];
}
bitsPerElement = maxElementOrdinal == 0 ? 1 : 64 - Long.numberOfLeadingZeros(maxElementOrdinal);
bitsPerListPointer = maxShardTotalOfListSizes == 0 ? 1 : 64 - Long.numberOfLeadingZeros(maxShardTotalOfListSizes);
}
private void calculateNumShards() {
int maxOrdinal = ordinalMap.maxOrdinal();
ByteData data = ordinalMap.getByteData().getUnderlyingArray();
long maxElementOrdinal = 0;
long totalOfListSizes = 0;
for(int i=0;i<=maxOrdinal;i++) {
if(currentCyclePopulated.get(i)) {
long pointer = ordinalMap.getPointerForData(i);
int size = VarInt.readVInt(data, pointer);
pointer += VarInt.sizeOfVInt(size);
for(int j=0;j<size;j++) {
int elementOrdinal = VarInt.readVInt(data, pointer);
if(elementOrdinal > maxElementOrdinal)
maxElementOrdinal = elementOrdinal;
pointer += VarInt.sizeOfVInt(elementOrdinal);
}
totalOfListSizes += size;
}
}
long bitsPerElement = maxElementOrdinal == 0 ? 1 : 64 - Long.numberOfLeadingZeros(maxElementOrdinal);
long bitsPerListPointer = totalOfListSizes == 0 ? 1 : 64 - Long.numberOfLeadingZeros(totalOfListSizes);
long projectedSizeOfType = (bitsPerElement * totalOfListSizes) / 8;
projectedSizeOfType += (bitsPerListPointer * maxOrdinal + 1) / 8;
numShards = 1;
while(stateEngine.getTargetMaxTypeShardSize() * numShards < projectedSizeOfType)
numShards *= 2;
}
@Override
public void calculateSnapshot() {
maxOrdinal = ordinalMap.maxOrdinal();
listPointerArray = new FixedLengthElementArray[numShards];
elementArray = new FixedLengthElementArray[numShards];
for(int i=0;i<numShards;i++) {
listPointerArray[i] = new FixedLengthElementArray(WastefulRecycler.DEFAULT_INSTANCE, (long)bitsPerListPointer * (maxShardOrdinal[i] + 1));
elementArray[i] = new FixedLengthElementArray(WastefulRecycler.DEFAULT_INSTANCE, (long)bitsPerElement * totalOfListSizes[i]);
}
ByteData data = ordinalMap.getByteData().getUnderlyingArray();
long elementCounter[] = new long[numShards];
int shardMask = numShards - 1;
for(int ordinal=0;ordinal<=maxOrdinal;ordinal++) {
int shardNumber = ordinal & shardMask;
int shardOrdinal = ordinal / numShards;
if(currentCyclePopulated.get(ordinal)) {
long readPointer = ordinalMap.getPointerForData(ordinal);
int size = VarInt.readVInt(data, readPointer);
readPointer += VarInt.sizeOfVInt(size);
for(int j=0;j<size;j++) {
int elementOrdinal = VarInt.readVInt(data, readPointer);
readPointer += VarInt.sizeOfVInt(elementOrdinal);
elementArray[shardNumber].setElementValue((long)bitsPerElement * elementCounter[shardNumber], bitsPerElement, elementOrdinal);
elementCounter[shardNumber]++;
}
}
listPointerArray[shardNumber].setElementValue((long)bitsPerListPointer * shardOrdinal, bitsPerListPointer, elementCounter[shardNumber]);
}
}
@Override
public void writeSnapshot(DataOutputStream os) throws IOException {
/// for unsharded blobs, support pre v2.1.0 clients
if(numShards == 1) {
writeSnapshotShard(os, 0);
} else {
/// overall max ordinal
VarInt.writeVInt(os, maxOrdinal);
for(int i=0;i<numShards;i++) {
writeSnapshotShard(os, i);
}
}
/// Populated bits
currentCyclePopulated.serializeBitsTo(os);
listPointerArray = null;
elementArray = null;
}
private void writeSnapshotShard(DataOutputStream os, int shardNumber) throws IOException {
/// 1) shard max ordinal
VarInt.writeVInt(os, maxShardOrdinal[shardNumber]);
/// 2) statistics
VarInt.writeVInt(os, bitsPerListPointer);
VarInt.writeVInt(os, bitsPerElement);
VarInt.writeVLong(os, totalOfListSizes[shardNumber]);
/// 3) list pointer array
int numListPointerLongs = maxShardOrdinal[shardNumber] == -1 ? 0 : (int)((((long)(maxShardOrdinal[shardNumber] + 1) * bitsPerListPointer) - 1) / 64) + 1;
VarInt.writeVInt(os, numListPointerLongs);
for(int i=0;i<numListPointerLongs;i++) {
os.writeLong(listPointerArray[shardNumber].get(i));
}
/// 4) element array
int numElementLongs = totalOfListSizes[shardNumber] == 0 ? 0 : (int)(((totalOfListSizes[shardNumber] * bitsPerElement) - 1) / 64) + 1;
VarInt.writeVInt(os, numElementLongs);
for(int i=0;i<numElementLongs;i++) {
os.writeLong(elementArray[shardNumber].get(i));
}
}
@Override
public void calculateDelta() {
calculateDelta(previousCyclePopulated, currentCyclePopulated);
}
@Override
public void writeDelta(DataOutputStream dos) throws IOException {
writeCalculatedDelta(dos);
}
@Override
public void calculateReverseDelta() {
calculateDelta(currentCyclePopulated, previousCyclePopulated);
}
@Override
public void writeReverseDelta(DataOutputStream dos) throws IOException {
writeCalculatedDelta(dos);
}
private void calculateDelta(ThreadSafeBitSet fromCyclePopulated, ThreadSafeBitSet toCyclePopulated) {
maxOrdinal = ordinalMap.maxOrdinal();
numListsInDelta = new int[numShards];
numElementsInDelta = new long[numShards];
listPointerArray = new FixedLengthElementArray[numShards];
elementArray = new FixedLengthElementArray[numShards];
deltaAddedOrdinals = new ByteDataArray[numShards];
deltaRemovedOrdinals = new ByteDataArray[numShards];
ThreadSafeBitSet deltaAdditions = toCyclePopulated.andNot(fromCyclePopulated);
int shardMask = numShards - 1;
int addedOrdinal = deltaAdditions.nextSetBit(0);
while(addedOrdinal != -1) {
numListsInDelta[addedOrdinal & shardMask]++;
long readPointer = ordinalMap.getPointerForData(addedOrdinal);
numElementsInDelta[addedOrdinal & shardMask] += VarInt.readVInt(ordinalMap.getByteData().getUnderlyingArray(), readPointer);
addedOrdinal = deltaAdditions.nextSetBit(addedOrdinal + 1);
}
for(int i=0;i<numShards;i++) {
listPointerArray[i] = new FixedLengthElementArray(WastefulRecycler.DEFAULT_INSTANCE, (long)numListsInDelta[i] * bitsPerListPointer);
elementArray[i] = new FixedLengthElementArray(WastefulRecycler.DEFAULT_INSTANCE, numElementsInDelta[i] * bitsPerElement);
deltaAddedOrdinals[i] = new ByteDataArray(WastefulRecycler.DEFAULT_INSTANCE);
deltaRemovedOrdinals[i] = new ByteDataArray(WastefulRecycler.DEFAULT_INSTANCE);
}
ByteData data = ordinalMap.getByteData().getUnderlyingArray();
int listCounter[] = new int[numShards];
long elementCounter[] = new long[numShards];
int previousRemovedOrdinal[] = new int[numShards];
int previousAddedOrdinal[] = new int[numShards];
for(int ordinal=0;ordinal<=maxOrdinal;ordinal++) {
int shardNumber = ordinal & shardMask;
if(deltaAdditions.get(ordinal)) {
long readPointer = ordinalMap.getPointerForData(ordinal);
int size = VarInt.readVInt(data, readPointer);
readPointer += VarInt.sizeOfVInt(size);
listPointerArray[shardNumber].setElementValue((long)bitsPerListPointer * listCounter[shardNumber], bitsPerListPointer, elementCounter[shardNumber] + size);
for(int j=0;j<size;j++) {
int elementOrdinal = VarInt.readVInt(data, readPointer);
readPointer += VarInt.sizeOfVInt(elementOrdinal);
elementArray[shardNumber].setElementValue((long)bitsPerElement * elementCounter[shardNumber], bitsPerElement, elementOrdinal);
elementCounter[shardNumber]++;
}
listCounter[shardNumber]++;
int shardOrdinal = ordinal / numShards;
VarInt.writeVInt(deltaAddedOrdinals[shardNumber], shardOrdinal - previousAddedOrdinal[shardNumber]);
previousAddedOrdinal[shardNumber] = shardOrdinal;
} else if(fromCyclePopulated.get(ordinal) && !toCyclePopulated.get(ordinal)) {
int shardOrdinal = ordinal / numShards;
VarInt.writeVInt(deltaRemovedOrdinals[shardNumber], shardOrdinal - previousRemovedOrdinal[shardNumber]);
previousRemovedOrdinal[shardNumber] = shardOrdinal;
}
}
}
private void writeCalculatedDelta(DataOutputStream os) throws IOException {
/// for unsharded blobs, support pre v2.1.0 clients
if(numShards == 1) {
writeCalculatedDeltaShard(os, 0);
} else {
/// overall max ordinal
VarInt.writeVInt(os, maxOrdinal);
for(int i=0;i<numShards;i++) {
writeCalculatedDeltaShard(os, i);
}
}
listPointerArray = null;
elementArray = null;
deltaAddedOrdinals = null;
deltaRemovedOrdinals = null;
}
private void writeCalculatedDeltaShard(DataOutputStream os, int shardNumber) throws IOException {
/// 1) max shard ordinal
VarInt.writeVInt(os, maxShardOrdinal[shardNumber]);
/// 2) removal / addition ordinals.
VarInt.writeVLong(os, deltaRemovedOrdinals[shardNumber].length());
deltaRemovedOrdinals[shardNumber].getUnderlyingArray().writeTo(os, 0, deltaRemovedOrdinals[shardNumber].length());
VarInt.writeVLong(os, deltaAddedOrdinals[shardNumber].length());
deltaAddedOrdinals[shardNumber].getUnderlyingArray().writeTo(os, 0, deltaAddedOrdinals[shardNumber].length());
/// 3) statistics
VarInt.writeVInt(os, bitsPerListPointer);
VarInt.writeVInt(os, bitsPerElement);
VarInt.writeVLong(os, totalOfListSizes[shardNumber]);
/// 4) list pointer array
int numListPointerLongs = numListsInDelta[shardNumber] == 0 ? 0 : (int)((((long)numListsInDelta[shardNumber] * bitsPerListPointer) - 1) / 64) + 1;
VarInt.writeVInt(os, numListPointerLongs);
for(int i=0;i<numListPointerLongs;i++) {
os.writeLong(listPointerArray[shardNumber].get(i));
}
/// 5) element array
int numElementLongs = numElementsInDelta[shardNumber] == 0 ? 0 : (int)(((numElementsInDelta[shardNumber] * bitsPerElement) - 1) / 64) + 1;
VarInt.writeVInt(os, numElementLongs);
for(int i=0;i<numElementLongs;i++) {
os.writeLong(elementArray[shardNumber].get(i));
}
}
}
| 8,968 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/HollowWriteRecord.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write;
import com.netflix.hollow.core.memory.ByteDataArray;
public interface HollowWriteRecord {
public void writeDataTo(ByteDataArray buf);
public void reset();
}
| 8,969 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/HollowTypeWriteState.java | /*
* Copyright 2016-2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write;
import static com.netflix.hollow.core.write.HollowHashableWriteRecord.HashBehavior.IGNORED_HASHES;
import static com.netflix.hollow.core.write.HollowHashableWriteRecord.HashBehavior.UNMIXED_HASHES;
import com.netflix.hollow.core.memory.ByteArrayOrdinalMap;
import com.netflix.hollow.core.memory.ByteDataArray;
import com.netflix.hollow.core.memory.ThreadSafeBitSet;
import com.netflix.hollow.core.memory.pool.WastefulRecycler;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.engine.PopulatedOrdinalListener;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.write.HollowHashableWriteRecord.HashBehavior;
import com.netflix.hollow.core.write.copy.HollowRecordCopier;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.BitSet;
/**
* The {@link HollowTypeWriteState} contains and is the root handle to all of the records of a specific type in
* a {@link HollowWriteStateEngine}.
*/
public abstract class HollowTypeWriteState {
protected final HollowSchema schema;
protected final ByteArrayOrdinalMap ordinalMap;
protected int numShards;
protected HollowSchema restoredSchema;
protected ByteArrayOrdinalMap restoredMap;
protected HollowTypeReadState restoredReadState;
protected ThreadSafeBitSet currentCyclePopulated;
protected ThreadSafeBitSet previousCyclePopulated;
private final ThreadLocal<ByteDataArray> serializedScratchSpace;
protected HollowWriteStateEngine stateEngine;
private boolean wroteData = false;
public HollowTypeWriteState(HollowSchema schema, int numShards) {
this.schema = schema;
this.ordinalMap = new ByteArrayOrdinalMap();
this.serializedScratchSpace = new ThreadLocal<ByteDataArray>();
this.currentCyclePopulated = new ThreadSafeBitSet();
this.previousCyclePopulated = new ThreadSafeBitSet();
this.numShards = numShards;
if(numShards != -1 && ((numShards & (numShards - 1)) != 0 || numShards <= 0))
throw new IllegalArgumentException("Number of shards must be a power of 2! Check configuration for type " + schema.getName());
}
/**
* Add an object to this state. We will create a serialized representation of this object, then
* assign or retrieve the ordinal for this serialized representation in our {@link ByteArrayOrdinalMap}.
* @param rec the record to add to this state
* @return the ordinal of the added record
*/
public int add(HollowWriteRecord rec) {
if(!ordinalMap.isReadyForAddingObjects())
throw new RuntimeException("The HollowWriteStateEngine is not ready to add more Objects. Did you remember to call stateEngine.prepareForNextCycle()?");
int ordinal;
if(restoredMap == null) {
ordinal = assignOrdinal(rec);
} else {
ordinal = reuseOrdinalFromRestoredState(rec);
}
currentCyclePopulated.set(ordinal);
return ordinal;
}
private int assignOrdinal(HollowWriteRecord rec) {
ByteDataArray scratch = scratch();
rec.writeDataTo(scratch);
int ordinal = ordinalMap.getOrAssignOrdinal(scratch);
scratch.reset();
return ordinal;
}
private int reuseOrdinalFromRestoredState(HollowWriteRecord rec) {
ByteDataArray scratch = scratch();
int ordinal;
if(restoredSchema instanceof HollowObjectSchema) {
((HollowObjectWriteRecord)rec).writeDataTo(scratch, (HollowObjectSchema)restoredSchema);
int preferredOrdinal = restoredMap.get(scratch);
scratch.reset();
rec.writeDataTo(scratch);
ordinal = ordinalMap.getOrAssignOrdinal(scratch, preferredOrdinal);
} else {
if(rec instanceof HollowHashableWriteRecord) {
((HollowHashableWriteRecord) rec).writeDataTo(scratch, IGNORED_HASHES);
int preferredOrdinal = restoredMap.get(scratch);
scratch.reset();
rec.writeDataTo(scratch);
ordinal = ordinalMap.getOrAssignOrdinal(scratch, preferredOrdinal);
} else {
rec.writeDataTo(scratch);
int preferredOrdinal = restoredMap.get(scratch);
ordinal = ordinalMap.getOrAssignOrdinal(scratch, preferredOrdinal);
}
}
scratch.reset();
return ordinal;
}
/**
* Resets this write state to empty (i.e. as if prepareForNextCycle() had just been called)
*/
public void resetToLastPrepareForNextCycle() {
if(restoredReadState == null) {
currentCyclePopulated.clearAll();
ordinalMap.compact(previousCyclePopulated, numShards, stateEngine.isFocusHoleFillInFewestShards());
} else {
/// this state engine began the cycle as a restored state engine
currentCyclePopulated.clearAll();
previousCyclePopulated.clearAll();
ordinalMap.compact(previousCyclePopulated, numShards, stateEngine.isFocusHoleFillInFewestShards());
restoreFrom(restoredReadState);
wroteData = false;
}
}
public void addAllObjectsFromPreviousCycle() {
if(!ordinalMap.isReadyForAddingObjects())
throw new RuntimeException("The HollowWriteStateEngine is not ready to add more Objects. Did you remember to call stateEngine.prepareForNextCycle()?");
currentCyclePopulated = ThreadSafeBitSet.orAll(previousCyclePopulated, currentCyclePopulated);
}
public void addOrdinalFromPreviousCycle(int ordinal) {
if(!ordinalMap.isReadyForAddingObjects())
throw new RuntimeException("The HollowWriteStateEngine is not ready to add more Objects. Did you remember to call stateEngine.prepareForNextCycle()?");
if(!previousCyclePopulated.get(ordinal))
throw new IllegalArgumentException("Ordinal " + ordinal + " was not present in the previous cycle");
currentCyclePopulated.set(ordinal);
}
public void removeOrdinalFromThisCycle(int ordinalToRemove) {
if(!ordinalMap.isReadyForAddingObjects())
throw new RuntimeException("The HollowWriteStateEngine is not ready to add more Objects. Did you remember to call stateEngine.prepareForNextCycle()?");
currentCyclePopulated.clear(ordinalToRemove);
}
public void removeAllOrdinalsFromThisCycle() {
if(!ordinalMap.isReadyForAddingObjects())
throw new RuntimeException("The HollowWriteStateEngine is not ready to add more Objects. Did you remember to call stateEngine.prepareForNextCycle()?");
currentCyclePopulated.clearAll();
}
/**
* Put an object in this state with a specific ordinal, and update the currentCyclePopulated bitset.
*
* WARNING: This method is not thread safe.
* WARNING: This method may result in duplicate records getting added into the state engine. Do not add
* records using this method which have already been added to this write state in the current cycle.
* WARNING: This method will not automatically update the ByteArrayOrdinalMap's free ordinals. This will corrupt
* the state unless all remapped ordinals are *also* removed from the free ordinal list using recalculateFreeOrdinals()
* after all calls to mapOrdinal() are complete.
*
* @param rec the record
* @param newOrdinal the new ordinal
* @param markPreviousCycle true if the previous populated cycle should be updated
* @param markCurrentCycle true if the current populated cycle should be updated
*/
public void mapOrdinal(HollowWriteRecord rec, int newOrdinal, boolean markPreviousCycle, boolean markCurrentCycle) {
if(!ordinalMap.isReadyForAddingObjects())
throw new RuntimeException("The HollowWriteStateEngine is not ready to add more Objects. Did you remember to call stateEngine.prepareForNextCycle()?");
ByteDataArray scratch = scratch();
rec.writeDataTo(scratch);
ordinalMap.put(scratch, newOrdinal);
if(markPreviousCycle)
previousCyclePopulated.set(newOrdinal);
if(markCurrentCycle)
currentCyclePopulated.set(newOrdinal);
scratch.reset();
}
/**
* Correct the free ordinal list after using mapOrdinal()
*/
public void recalculateFreeOrdinals() {
ordinalMap.recalculateFreeOrdinals();
}
public ThreadSafeBitSet getPopulatedBitSet() {
return currentCyclePopulated;
}
public ThreadSafeBitSet getPreviousCyclePopulatedBitSet() {
return previousCyclePopulated;
}
public HollowSchema getSchema() {
return schema;
}
int getNumShards() {
return numShards;
}
public void setNumShards(int numShards) {
if(this.numShards == -1) {
this.numShards = numShards;
} else if(this.numShards != numShards) {
throw new IllegalStateException("The number of shards for type " + schema.getName() + " is already fixed to " + this.numShards + ". Cannot reset to " + numShards + ".");
}
}
public void resizeOrdinalMap(int size) {
ordinalMap.resize(size);
}
/**
* Called to perform a state transition.<p>
*
* Precondition: We are writing the previously added objects to a FastBlob.<br>
* Postcondition: We are ready to add objects to this state engine for the next server cycle.
*/
public void prepareForNextCycle() {
ordinalMap.compact(currentCyclePopulated, numShards, stateEngine.isFocusHoleFillInFewestShards());
ThreadSafeBitSet temp = previousCyclePopulated;
previousCyclePopulated = currentCyclePopulated;
currentCyclePopulated = temp;
currentCyclePopulated.clearAll();
restoredMap = null;
restoredSchema = null;
restoredReadState = null;
}
public void prepareForWrite() {
/// write all of the unused objects to the current ordinalMap, without updating the current cycle bitset,
/// this way we can do a reverse delta.
if(isRestored() && !wroteData) {
HollowRecordCopier copier = HollowRecordCopier.createCopier(restoredReadState, schema);
BitSet unusedPreviousOrdinals = ordinalMap.getUnusedPreviousOrdinals();
int ordinal = unusedPreviousOrdinals.nextSetBit(0);
while(ordinal != -1) {
restoreOrdinal(ordinal, copier, ordinalMap, UNMIXED_HASHES);
ordinal = unusedPreviousOrdinals.nextSetBit(ordinal + 1);
}
}
ordinalMap.prepareForWrite();
wroteData = true;
}
public boolean hasChangedSinceLastCycle() {
return !currentCyclePopulated.equals(previousCyclePopulated);
}
public boolean isRestored() {
return ordinalMap.getUnusedPreviousOrdinals() != null;
}
public abstract void calculateSnapshot();
public abstract void writeSnapshot(DataOutputStream dos) throws IOException;
public abstract void calculateDelta();
public abstract void writeDelta(DataOutputStream dos) throws IOException;
public abstract void calculateReverseDelta();
public abstract void writeReverseDelta(DataOutputStream dos) throws IOException;
protected void restoreFrom(HollowTypeReadState readState) {
if(previousCyclePopulated.cardinality() != 0 || currentCyclePopulated.cardinality() != 0)
throw new IllegalStateException("Attempting to restore into a non-empty state (type " + schema.getName() + ")");
PopulatedOrdinalListener listener = readState.getListener(PopulatedOrdinalListener.class);
BitSet populatedOrdinals = listener.getPopulatedOrdinals();
restoredReadState = readState;
if(schema instanceof HollowObjectSchema)
restoredSchema = ((HollowObjectSchema)schema).findCommonSchema((HollowObjectSchema)readState.getSchema());
else
restoredSchema = readState.getSchema();
HollowRecordCopier copier = HollowRecordCopier.createCopier(restoredReadState, restoredSchema);
// Size the restore ordinal map to avoid resizing when adding ordinals
int size = populatedOrdinals.cardinality();
restoredMap = new ByteArrayOrdinalMap(size);
int ordinal = populatedOrdinals.nextSetBit(0);
while(ordinal != -1) {
previousCyclePopulated.set(ordinal);
restoreOrdinal(ordinal, copier, restoredMap, IGNORED_HASHES);
ordinal = populatedOrdinals.nextSetBit(ordinal + 1);
}
// Resize the ordinal map to avoid resizing when populating
ordinalMap.resize(size);
ordinalMap.reservePreviouslyPopulatedOrdinals(populatedOrdinals);
}
protected void restoreOrdinal(int ordinal, HollowRecordCopier copier, ByteArrayOrdinalMap destinationMap, HashBehavior hashBehavior) {
HollowWriteRecord rec = copier.copy(ordinal);
ByteDataArray scratch = scratch();
if(rec instanceof HollowHashableWriteRecord)
((HollowHashableWriteRecord)rec).writeDataTo(scratch, hashBehavior);
else
rec.writeDataTo(scratch);
destinationMap.put(scratch, ordinal);
scratch.reset();
}
/**
* Get or create a scratch byte array. Each thread will need its own array, so these
* are referenced via a ThreadLocal variable.
* @return the scratch byte array
*/
protected ByteDataArray scratch() {
ByteDataArray scratch = serializedScratchSpace.get();
if(scratch == null) {
scratch = new ByteDataArray(WastefulRecycler.DEFAULT_INSTANCE);
serializedScratchSpace.set(scratch);
}
return scratch;
}
void setStateEngine(HollowWriteStateEngine writeEngine) {
this.stateEngine = writeEngine;
}
public HollowWriteStateEngine getStateEngine() {
return stateEngine;
}
}
| 8,970 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/HollowObjectWriteRecord.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write;
import com.netflix.hollow.core.memory.ByteDataArray;
import com.netflix.hollow.core.memory.encoding.VarInt;
import com.netflix.hollow.core.memory.encoding.ZigZag;
import com.netflix.hollow.core.memory.pool.WastefulRecycler;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
public class HollowObjectWriteRecord implements HollowWriteRecord {
private final HollowObjectSchema schema;
private final ByteDataArray fieldData[];
private final boolean isNonNull[];
public HollowObjectWriteRecord(HollowObjectSchema schema) {
this.schema = schema;
this.fieldData = new ByteDataArray[schema.numFields()];
this.isNonNull = new boolean[schema.numFields()];
for (int i = 0; i < fieldData.length; i++) {
fieldData[i] = new ByteDataArray(WastefulRecycler.SMALL_ARRAY_RECYCLER);
}
}
public HollowObjectSchema getSchema() {
return schema;
}
/**
* Concatenates all fields, in order, to the ByteDataBuffer supplied. This concatenation is the
* verbatim serialized representation in the FastBlob.
*
* @param buf the data buffer to write data to
*/
public void writeDataTo(ByteDataArray buf) {
for (int i = 0; i < fieldData.length; i++) {
writeField(buf, i);
}
}
public void writeDataTo(ByteDataArray buf, HollowObjectSchema translate) {
for(int i=0; i < translate.numFields(); i++) {
int fieldIndex = schema.getPosition(translate.getFieldName(i));
if(fieldIndex != -1) {
writeField(buf, fieldIndex);
} else {
writeNull(buf, translate.getFieldType(i));
}
}
}
private void writeField(ByteDataArray buf, int fieldIndex) {
if (isNonNull[fieldIndex]) {
if (getSchema().getFieldType(fieldIndex).isVariableLength())
VarInt.writeVInt(buf, (int)fieldData[fieldIndex].length());
fieldData[fieldIndex].copyTo(buf);
} else {
writeNull(buf, schema.getFieldType(fieldIndex));
}
}
/**
* Reset the ByteDataBuffers for each field.
*/
public void reset() {
for (int i = 0; i < fieldData.length; i++) {
isNonNull[i] = false;
}
}
public void setNull(String fieldName) {
int fieldIndex = getSchema().getPosition(fieldName);
ByteDataArray fieldBuffer = getFieldBuffer(fieldIndex);
FieldType fieldType = getSchema().getFieldType(fieldIndex);
writeNull(fieldBuffer, fieldType);
}
public void setInt(String fieldName, int value) {
if(value == Integer.MIN_VALUE) {
setNull(fieldName);
} else {
int fieldIndex = getSchema().getPosition(fieldName);
validateFieldType(fieldIndex, fieldName, FieldType.INT);
ByteDataArray buf = getFieldBuffer(fieldIndex);
// zig zag encoding
VarInt.writeVInt(buf, ZigZag.encodeInt(value));
}
}
public void setLong(String fieldName, long value) {
if(value == Long.MIN_VALUE) {
setNull(fieldName);
} else {
int fieldIndex = getSchema().getPosition(fieldName);
validateFieldType(fieldIndex, fieldName, FieldType.LONG);
ByteDataArray buf = getFieldBuffer(fieldIndex);
// zig zag encoding
VarInt.writeVLong(buf, ZigZag.encodeLong(value));
}
}
public void setFloat(String fieldName, float value) {
int fieldIndex = getSchema().getPosition(fieldName);
validateFieldType(fieldIndex, fieldName, FieldType.FLOAT);
ByteDataArray buf = getFieldBuffer(fieldIndex);
int intBits = Float.floatToIntBits(value);
writeFixedLengthInt(buf, intBits);
}
public void setDouble(String fieldName, double value) {
int fieldIndex = getSchema().getPosition(fieldName);
validateFieldType(fieldIndex, fieldName, FieldType.DOUBLE);
ByteDataArray buf = getFieldBuffer(fieldIndex);
long longBits = Double.doubleToLongBits(value);
writeFixedLengthLong(buf, longBits);
}
public void setBoolean(String fieldName, boolean value) {
int fieldIndex = getSchema().getPosition(fieldName);
validateFieldType(fieldIndex, fieldName, FieldType.BOOLEAN);
ByteDataArray buf = getFieldBuffer(fieldIndex);
buf.write(value ? (byte) 1 : (byte) 0);
}
public void setBytes(String fieldName, byte[] value) {
if(value == null) return;
int fieldIndex = getSchema().getPosition(fieldName);
validateFieldType(fieldIndex, fieldName, FieldType.BYTES);
ByteDataArray buf = getFieldBuffer(fieldIndex);
for (int i = 0; i < value.length; i++) {
buf.write(value[i]);
}
}
public void setString(String fieldName, String value) {
if(value == null) return;
int fieldIndex = getSchema().getPosition(fieldName);
validateFieldType(fieldIndex, fieldName, FieldType.STRING);
ByteDataArray buf = getFieldBuffer(fieldIndex);
for(int i=0;i<value.length();i++) {
VarInt.writeVInt(buf, value.charAt(i));
}
}
public void setReference(String fieldName, int ordinal) {
int fieldIndex = getSchema().getPosition(fieldName);
validateFieldType(fieldIndex, fieldName, FieldType.REFERENCE);
ByteDataArray buf = getFieldBuffer(fieldIndex);
VarInt.writeVInt(buf, ordinal);
}
private void writeNull(ByteDataArray buf, FieldType fieldType) {
if(fieldType == FieldType.FLOAT) {
writeNullFloat(buf);
} else if(fieldType == FieldType.DOUBLE) {
writeNullDouble(buf);
} else {
VarInt.writeVNull(buf);
}
}
/**
* Returns the buffer which should be used to serialize the data for the field at the given position in the schema.<p>
*
* This is used by the FastBlobFrameworkSerializer when writing the data for a specific field.
*
* @param field
* @return
*/
private ByteDataArray getFieldBuffer(int fieldPosition) {
isNonNull[fieldPosition] = true;
fieldData[fieldPosition].reset();
return fieldData[fieldPosition];
}
public static final int NULL_FLOAT_BITS = Float.floatToIntBits(Float.NaN) + 1;
public static final long NULL_DOUBLE_BITS = Double.doubleToLongBits(Double.NaN) + 1;
/**
* Serialize a special 4-byte long sequence indicating a null Float value.
*/
private static void writeNullFloat(final ByteDataArray fieldBuffer) {
writeFixedLengthInt(fieldBuffer, NULL_FLOAT_BITS);
}
/**
* Write 4 consecutive bytes
*/
private static void writeFixedLengthInt(ByteDataArray fieldBuffer, int intBits) {
fieldBuffer.write((byte) (intBits >>> 24));
fieldBuffer.write((byte) (intBits >>> 16));
fieldBuffer.write((byte) (intBits >>> 8));
fieldBuffer.write((byte) (intBits));
}
/**
* Serialize a special 8-byte long sequence indicating a null Double value.
*/
private static void writeNullDouble(ByteDataArray fieldBuffer) {
writeFixedLengthLong(fieldBuffer, NULL_DOUBLE_BITS);
}
/**
* Write 8 consecutive bytes
*/
private static void writeFixedLengthLong(ByteDataArray fieldBuffer, long intBits) {
fieldBuffer.write((byte) (intBits >>> 56));
fieldBuffer.write((byte) (intBits >>> 48));
fieldBuffer.write((byte) (intBits >>> 40));
fieldBuffer.write((byte) (intBits >>> 32));
fieldBuffer.write((byte) (intBits >>> 24));
fieldBuffer.write((byte) (intBits >>> 16));
fieldBuffer.write((byte) (intBits >>> 8));
fieldBuffer.write((byte) (intBits));
}
private void validateFieldType(int fieldIndex, String fieldName, FieldType attemptedFieldType) {
if(getSchema().getFieldType(fieldIndex) != attemptedFieldType) {
throw new IllegalArgumentException("Attempting to serialize " + attemptedFieldType + " in field " + fieldName + ". Carefully check your schema for type " + getSchema().getName() + ".");
}
}
}
| 8,971 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/HollowSetWriteRecord.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write;
import static com.netflix.hollow.core.write.HollowHashableWriteRecord.HashBehavior.IGNORED_HASHES;
import static com.netflix.hollow.core.write.HollowHashableWriteRecord.HashBehavior.MIXED_HASHES;
import com.netflix.hollow.core.memory.ByteDataArray;
import com.netflix.hollow.core.memory.encoding.HashCodes;
import com.netflix.hollow.core.memory.encoding.VarInt;
import com.netflix.hollow.core.util.LongList;
public class HollowSetWriteRecord implements HollowHashableWriteRecord {
private final LongList elementsAndHashes;
private final HashBehavior defaultHashBehavior;
public HollowSetWriteRecord() {
this(HashBehavior.MIXED_HASHES);
}
public HollowSetWriteRecord(HashBehavior defaultHashBehavior) {
this.elementsAndHashes = new LongList();
this.defaultHashBehavior = defaultHashBehavior;
}
public void addElement(int ordinal) {
addElement(ordinal, ordinal);
}
public void addElement(int ordinal, int hashCode) {
long elementAndHash = (long)ordinal << 32 | (hashCode & 0xFFFFFFFFL);
elementsAndHashes.add(elementAndHash);
}
@Override
public void writeDataTo(ByteDataArray buf) {
writeDataTo(buf, defaultHashBehavior);
}
@Override
public void writeDataTo(ByteDataArray buf, HashBehavior hashBehavior) {
elementsAndHashes.sort();
int hashTableSize = HashCodes.hashTableSize(elementsAndHashes.size());
int bucketMask = hashTableSize - 1; /// hashTableSize is a power of 2.
VarInt.writeVInt(buf, elementsAndHashes.size());
int previousOrdinal = 0;
for(int i=0;i<elementsAndHashes.size();i++) {
int ordinal = (int)(elementsAndHashes.get(i) >>> 32);
VarInt.writeVInt(buf, ordinal - previousOrdinal);
if(hashBehavior != IGNORED_HASHES) {
int hashCode = (int)elementsAndHashes.get(i);
if(hashBehavior == MIXED_HASHES)
hashCode = HashCodes.hashInt(hashCode);
int bucketToHashTo = hashCode & bucketMask;
VarInt.writeVInt(buf, bucketToHashTo);
}
previousOrdinal = ordinal;
}
}
@Override
public void reset() {
elementsAndHashes.clear();
}
}
| 8,972 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/HollowObjectTypeWriteState.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write;
import com.netflix.hollow.core.memory.ByteData;
import com.netflix.hollow.core.memory.ByteDataArray;
import com.netflix.hollow.core.memory.ThreadSafeBitSet;
import com.netflix.hollow.core.memory.encoding.FixedLengthElementArray;
import com.netflix.hollow.core.memory.encoding.VarInt;
import com.netflix.hollow.core.memory.pool.WastefulRecycler;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import java.io.DataOutputStream;
import java.io.IOException;
public class HollowObjectTypeWriteState extends HollowTypeWriteState {
/// statistics required for writing fixed length set data
private FieldStatistics fieldStats;
/// data required for writing snapshot or delta
private int maxOrdinal;
private int maxShardOrdinal[];
private FixedLengthElementArray fixedLengthLongArray[];
private ByteDataArray varLengthByteArrays[][];
private long recordBitOffset[];
/// additional data required for writing delta
private ByteDataArray deltaAddedOrdinals[];
private ByteDataArray deltaRemovedOrdinals[];
public HollowObjectTypeWriteState(HollowObjectSchema schema) {
this(schema, -1);
}
public HollowObjectTypeWriteState(HollowObjectSchema schema, int numShards) {
super(schema, numShards);
}
@Override
public HollowObjectSchema getSchema() {
return (HollowObjectSchema)schema;
}
/**
* Called to perform a state transition.<p>
*
* Precondition: We are adding objects to this state engine.<br>
* Postcondition: We are writing the previously added objects to a FastBlob.
*
*/
@Override
public void prepareForWrite() {
super.prepareForWrite();
fieldStats = new FieldStatistics(getSchema());
int maxOrdinal = ordinalMap.maxOrdinal();
for(int i=0;i<=maxOrdinal;i++) {
discoverObjectFieldStatisticsForRecord(fieldStats, i);
}
fieldStats.completeCalculations();
if(numShards == -1) {
long projectedSizeOfType = ((long)fieldStats.getNumBitsPerRecord() * (maxOrdinal + 1)) / 8;
projectedSizeOfType += fieldStats.getTotalSizeOfAllVarLengthData();
numShards = 1;
while(stateEngine.getTargetMaxTypeShardSize() * numShards < projectedSizeOfType)
numShards *= 2;
}
maxShardOrdinal = new int[numShards];
int minRecordLocationsPerShard = (maxOrdinal + 1) / numShards;
for(int i=0;i<numShards;i++)
maxShardOrdinal[i] = (i < ((maxOrdinal + 1) & (numShards - 1))) ? minRecordLocationsPerShard : minRecordLocationsPerShard - 1;
}
private void discoverObjectFieldStatisticsForRecord(FieldStatistics fieldStats, int ordinal) {
if(currentCyclePopulated.get(ordinal) || previousCyclePopulated.get(ordinal)) {
long pointer = ordinalMap.getPointerForData(ordinal);
for(int fieldIndex=0; fieldIndex<((HollowObjectSchema)schema).numFields(); fieldIndex++) {
pointer = discoverObjectFieldStatisticsForField(fieldStats, pointer, fieldIndex);
}
}
}
private long discoverObjectFieldStatisticsForField(FieldStatistics fieldStats, long pointer, int fieldIndex) {
ByteData data = ordinalMap.getByteData().getUnderlyingArray();
switch(getSchema().getFieldType(fieldIndex)) {
case BOOLEAN:
addFixedLengthFieldRequiredBits(fieldStats, fieldIndex, 2);
pointer += 1;
break;
case FLOAT:
addFixedLengthFieldRequiredBits(fieldStats, fieldIndex, 32);
pointer += 4;
break;
case DOUBLE:
addFixedLengthFieldRequiredBits(fieldStats, fieldIndex, 64);
pointer += 8;
break;
case LONG:
case INT:
case REFERENCE:
if(VarInt.readVNull(data, pointer)) {
addFixedLengthFieldRequiredBits(fieldStats, fieldIndex, 1);
pointer += 1;
} else {
long vLong = VarInt.readVLong(data, pointer);
int requiredBitsForFieldValue = 64 - Long.numberOfLeadingZeros(vLong + 1);
addFixedLengthFieldRequiredBits(fieldStats, fieldIndex, requiredBitsForFieldValue);
pointer += VarInt.sizeOfVLong(vLong);
}
break;
case BYTES:
case STRING:
if(VarInt.readVNull(data, pointer)) {
addFixedLengthFieldRequiredBits(fieldStats, fieldIndex, 1);
pointer += 1;
} else {
int length = VarInt.readVInt(data, pointer);
addVarLengthFieldSizeInBytes(fieldStats, fieldIndex, length);
pointer += length + VarInt.sizeOfVInt(length);
}
break;
}
return pointer;
}
private void addFixedLengthFieldRequiredBits(FieldStatistics fieldStats, int fieldIndex, int numBits) {
fieldStats.addFixedLengthFieldRequiredBits(fieldIndex, numBits);
}
private void addVarLengthFieldSizeInBytes(FieldStatistics fieldStats, int fieldIndex, int numBytes) {
fieldStats.addVarLengthFieldSize(fieldIndex, numBytes);
}
@Override
public void prepareForNextCycle() {
super.prepareForNextCycle();
fieldStats = null;
}
@Override
public void calculateSnapshot() {
maxOrdinal = ordinalMap.maxOrdinal();
int numBitsPerRecord = fieldStats.getNumBitsPerRecord();
fixedLengthLongArray = new FixedLengthElementArray[numShards];
varLengthByteArrays = new ByteDataArray[numShards][];
recordBitOffset = new long[numShards];
for(int i=0;i<numShards;i++) {
fixedLengthLongArray[i] = new FixedLengthElementArray(WastefulRecycler.DEFAULT_INSTANCE, (long)numBitsPerRecord * (maxShardOrdinal[i] + 1));
varLengthByteArrays[i] = new ByteDataArray[getSchema().numFields()];
}
int shardMask = numShards - 1;
for(int i=0;i<=maxOrdinal;i++) {
int shardNumber = i & shardMask;
if(currentCyclePopulated.get(i)) {
addRecord(i, recordBitOffset[shardNumber], fixedLengthLongArray[shardNumber], varLengthByteArrays[shardNumber]);
} else {
addNullRecord(i, recordBitOffset[shardNumber], fixedLengthLongArray[shardNumber], varLengthByteArrays[shardNumber]);
}
recordBitOffset[shardNumber] += numBitsPerRecord;
}
}
@Override
public void writeSnapshot(DataOutputStream os) throws IOException {
/// for unsharded blobs, support pre v2.1.0 clients
if(numShards == 1) {
writeSnapshotShard(os, 0);
} else {
/// overall max ordinal
VarInt.writeVInt(os, maxOrdinal);
for(int i=0;i<numShards;i++) {
writeSnapshotShard(os, i);
}
}
/// Populated bits
currentCyclePopulated.serializeBitsTo(os);
fixedLengthLongArray = null;
varLengthByteArrays = null;
recordBitOffset = null;
}
private void writeSnapshotShard(DataOutputStream os, int shardNumber) throws IOException {
/// 1) shard max ordinal
VarInt.writeVInt(os, maxShardOrdinal[shardNumber]);
/// 2) FixedLength field sizes
for(int i=0;i<getSchema().numFields();i++) {
VarInt.writeVInt(os, fieldStats.getMaxBitsForField(i));
}
/// 3) FixedLength data
long numBitsRequired = recordBitOffset[shardNumber];
long numLongsRequired = recordBitOffset[shardNumber] == 0 ? 0 : ((numBitsRequired - 1) / 64) + 1;
fixedLengthLongArray[shardNumber].writeTo(os, numLongsRequired);
/// 4) VarLength data
for(int i=0;i<varLengthByteArrays[shardNumber].length;i++) {
if(varLengthByteArrays[shardNumber][i] == null) {
VarInt.writeVLong(os, 0);
} else {
VarInt.writeVLong(os, varLengthByteArrays[shardNumber][i].length());
varLengthByteArrays[shardNumber][i].getUnderlyingArray().writeTo(os, 0, varLengthByteArrays[shardNumber][i].length());
}
}
}
@Override
public void calculateDelta() {
calculateDelta(previousCyclePopulated, currentCyclePopulated);
}
@Override
public void writeDelta(DataOutputStream dos) throws IOException {
writeCalculatedDelta(dos);
}
@Override
public void calculateReverseDelta() {
calculateDelta(currentCyclePopulated, previousCyclePopulated);
}
@Override
public void writeReverseDelta(DataOutputStream dos) throws IOException {
writeCalculatedDelta(dos);
}
private void calculateDelta(ThreadSafeBitSet fromCyclePopulated, ThreadSafeBitSet toCyclePopulated) {
maxOrdinal = ordinalMap.maxOrdinal();
int numBitsPerRecord = fieldStats.getNumBitsPerRecord();
ThreadSafeBitSet deltaAdditions = toCyclePopulated.andNot(fromCyclePopulated);
fixedLengthLongArray = new FixedLengthElementArray[numShards];
deltaAddedOrdinals = new ByteDataArray[numShards];
deltaRemovedOrdinals = new ByteDataArray[numShards];
varLengthByteArrays = new ByteDataArray[numShards][];
recordBitOffset = new long[numShards];
int numAddedRecordsInShard[] = new int[numShards];
int shardMask = numShards - 1;
int addedOrdinal = deltaAdditions.nextSetBit(0);
while(addedOrdinal != -1) {
numAddedRecordsInShard[addedOrdinal & shardMask]++;
addedOrdinal = deltaAdditions.nextSetBit(addedOrdinal + 1);
}
for(int i=0;i<numShards;i++) {
fixedLengthLongArray[i] = new FixedLengthElementArray(WastefulRecycler.DEFAULT_INSTANCE, (long)numAddedRecordsInShard[i] * numBitsPerRecord);
deltaAddedOrdinals[i] = new ByteDataArray(WastefulRecycler.DEFAULT_INSTANCE);
deltaRemovedOrdinals[i] = new ByteDataArray(WastefulRecycler.DEFAULT_INSTANCE);
varLengthByteArrays[i] = new ByteDataArray[getSchema().numFields()];
}
int previousRemovedOrdinal[] = new int[numShards];
int previousAddedOrdinal[] = new int[numShards];
for(int i=0;i<=maxOrdinal;i++) {
int shardNumber = i & shardMask;
if(deltaAdditions.get(i)) {
addRecord(i, recordBitOffset[shardNumber], fixedLengthLongArray[shardNumber], varLengthByteArrays[shardNumber]);
recordBitOffset[shardNumber] += numBitsPerRecord;
int shardOrdinal = i / numShards;
VarInt.writeVInt(deltaAddedOrdinals[shardNumber], shardOrdinal - previousAddedOrdinal[shardNumber]);
previousAddedOrdinal[shardNumber] = shardOrdinal;
} else if(fromCyclePopulated.get(i) && !toCyclePopulated.get(i)) {
int shardOrdinal = i / numShards;
VarInt.writeVInt(deltaRemovedOrdinals[shardNumber], shardOrdinal - previousRemovedOrdinal[shardNumber]);
previousRemovedOrdinal[shardNumber] = shardOrdinal;
}
}
}
private void writeCalculatedDelta(DataOutputStream os) throws IOException {
/// for unsharded blobs, support pre v2.1.0 clients
if(numShards == 1) {
writeCalculatedDeltaShard(os, 0);
} else {
/// overall max ordinal
VarInt.writeVInt(os, maxOrdinal);
for(int i=0;i<numShards;i++) {
writeCalculatedDeltaShard(os, i);
}
}
fixedLengthLongArray = null;
varLengthByteArrays = null;
deltaAddedOrdinals = null;
deltaRemovedOrdinals = null;
recordBitOffset = null;
}
private void writeCalculatedDeltaShard(DataOutputStream os, int shardNumber) throws IOException {
/// 1) max ordinal
VarInt.writeVInt(os, maxShardOrdinal[shardNumber]);
/// 2) removal / addition ordinals.
VarInt.writeVLong(os, deltaRemovedOrdinals[shardNumber].length());
deltaRemovedOrdinals[shardNumber].getUnderlyingArray().writeTo(os, 0, deltaRemovedOrdinals[shardNumber].length());
VarInt.writeVLong(os, deltaAddedOrdinals[shardNumber].length());
deltaAddedOrdinals[shardNumber].getUnderlyingArray().writeTo(os, 0, deltaAddedOrdinals[shardNumber].length());
/// 3) FixedLength field sizes
for(int i=0;i<getSchema().numFields();i++) {
VarInt.writeVInt(os, fieldStats.getMaxBitsForField(i));
}
/// 4) FixedLength data
long numBitsRequired = recordBitOffset[shardNumber];
long numLongsRequired = numBitsRequired == 0 ? 0 : ((numBitsRequired - 1) / 64) + 1;
fixedLengthLongArray[shardNumber].writeTo(os, numLongsRequired);
/// 5) VarLength data
for(int i=0;i<varLengthByteArrays[shardNumber].length;i++) {
if(varLengthByteArrays[shardNumber][i] == null) {
VarInt.writeVLong(os, 0);
} else {
VarInt.writeVLong(os, varLengthByteArrays[shardNumber][i].length());
varLengthByteArrays[shardNumber][i].getUnderlyingArray().writeTo(os, 0, varLengthByteArrays[shardNumber][i].length());
}
}
}
/// here we need to add the offsets for the variable-length field endings, as they will be read as the start position for the following record.
private void addNullRecord(int ordinal, long recordBitOffset, FixedLengthElementArray fixedLengthLongArray, ByteDataArray varLengthByteArrays[]) {
for(int fieldIndex=0; fieldIndex < getSchema().numFields(); fieldIndex++) {
if(getSchema().getFieldType(fieldIndex) == FieldType.STRING || getSchema().getFieldType(fieldIndex) == FieldType.BYTES) {
long fieldBitOffset = recordBitOffset + fieldStats.getFieldBitOffset(fieldIndex);
int bitsPerElement = fieldStats.getMaxBitsForField(fieldIndex);
long currentPointer = varLengthByteArrays[fieldIndex] == null ? 0 : varLengthByteArrays[fieldIndex].length();
fixedLengthLongArray.setElementValue(fieldBitOffset, bitsPerElement, currentPointer);
}
}
}
private void addRecord(int ordinal, long recordBitOffset, FixedLengthElementArray fixedLengthLongArray, ByteDataArray varLengthByteArrays[]) {
long pointer = ordinalMap.getPointerForData(ordinal);
for(int fieldIndex=0; fieldIndex < getSchema().numFields(); fieldIndex++) {
pointer = addRecordField(pointer, recordBitOffset, fieldIndex, fixedLengthLongArray, varLengthByteArrays);
}
}
private long addRecordField(long readPointer, long recordBitOffset, int fieldIndex, FixedLengthElementArray fixedLengthLongArray, ByteDataArray varLengthByteArrays[]) {
FieldType fieldType = getSchema().getFieldType(fieldIndex);
long fieldBitOffset = recordBitOffset + fieldStats.getFieldBitOffset(fieldIndex);
int bitsPerElement = fieldStats.getMaxBitsForField(fieldIndex);
ByteData data = ordinalMap.getByteData().getUnderlyingArray();
switch(fieldType) {
case BOOLEAN:
if(VarInt.readVNull(data, readPointer)) {
fixedLengthLongArray.setElementValue(fieldBitOffset, 2, 3);
} else {
fixedLengthLongArray.setElementValue(fieldBitOffset, 2, data.get(readPointer));
}
readPointer += 1;
break;
case FLOAT:
long intValue = data.readIntBits(readPointer) & 0xFFFFFFFFL;
fixedLengthLongArray.setElementValue(fieldBitOffset, 32, intValue);
readPointer += 4;
break;
case DOUBLE:
long longValue = data.readLongBits(readPointer);
fixedLengthLongArray.setElementValue(fieldBitOffset, 64, longValue);
readPointer += 8;
break;
case LONG:
case INT:
case REFERENCE:
if(VarInt.readVNull(data, readPointer)) {
fixedLengthLongArray.setElementValue(fieldBitOffset, bitsPerElement, fieldStats.getNullValueForField(fieldIndex));
readPointer += 1;
} else {
long vLong = VarInt.readVLong(data, readPointer);
fixedLengthLongArray.setElementValue(fieldBitOffset, bitsPerElement, vLong);
readPointer += VarInt.sizeOfVLong(vLong);
}
break;
case BYTES:
case STRING:
ByteDataArray varLengthBuf = getByteArray(varLengthByteArrays, fieldIndex);
if(VarInt.readVNull(data, readPointer)) {
long offset = varLengthBuf.length();
fixedLengthLongArray.setElementValue(fieldBitOffset, bitsPerElement, offset | (1L << (bitsPerElement - 1))); // write offset with set null bit
readPointer += 1;
} else {
int length = VarInt.readVInt(data, readPointer);
readPointer += VarInt.sizeOfVInt(length);
varLengthBuf.copyFrom(data, readPointer, length);
long offset = varLengthBuf.length();
fixedLengthLongArray.setElementValue(fieldBitOffset, bitsPerElement, offset);
readPointer += length;
}
break;
}
return readPointer;
}
private ByteDataArray getByteArray(ByteDataArray buffers[], int index) {
if(buffers[index] == null) {
buffers[index] = new ByteDataArray(WastefulRecycler.DEFAULT_INSTANCE);
}
return buffers[index];
}
}
| 8,973 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/HollowBlobHeaderWriter.java | /*
* Copyright 2016-2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write;
import com.netflix.hollow.core.HollowBlobHeader;
import com.netflix.hollow.core.HollowBlobOptionalPartHeader;
import com.netflix.hollow.core.memory.encoding.VarInt;
import com.netflix.hollow.core.schema.HollowSchema;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Map;
public class HollowBlobHeaderWriter {
/**
* Write the header to the data output stream
* @param header the blob header
* @param dos the data output stream to write the blov header
* @throws IOException if the blob header could not be written
*/
public void writeHeader(HollowBlobHeader header, DataOutputStream dos) throws IOException {
/// save 4 bytes to indicate FastBlob version header. This will be changed to indicate backwards incompatibility.
dos.writeInt(HollowBlobHeader.HOLLOW_BLOB_VERSION_HEADER);
/// Write randomized tag data -- every state gets a random 64-bit tag.
/// When attempting to apply a delta, the originating state's random 64-bit tag is compared against the current 64-bit tag.
/// This prevents deltas from being applied to incorrect states.
dos.writeLong(header.getOriginRandomizedTag());
dos.writeLong(header.getDestinationRandomizedTag());
/// write the schemas contained in this blob to the stream in the pre v2.2.0 backwards compatibility envelope
ByteArrayOutputStream schemasStream = new ByteArrayOutputStream();
VarInt.writeVInt(schemasStream, header.getSchemas().size());
for(HollowSchema schema : header.getSchemas())
schema.writeTo(schemasStream);
byte[] schemasData = schemasStream.toByteArray();
VarInt.writeVInt(dos, schemasData.length + 1); // plus one byte for new backwards compatibility envelope.
dos.write(schemasData);
///backwards compatibility -- new data can be added here by first indicating number of bytes used, will be skipped by existing readers.
VarInt.writeVInt(dos, 0);
/// write the header tags -- intended to include input source data versions
dos.writeShort(header.getHeaderTags().size());
for (Map.Entry<String, String> headerTag : header.getHeaderTags().entrySet()) {
dos.writeUTF(headerTag.getKey());
dos.writeUTF(headerTag.getValue());
}
}
public void writePartHeader(HollowBlobOptionalPartHeader header, DataOutputStream dos) throws IOException {
dos.writeInt(HollowBlobOptionalPartHeader.HOLLOW_BLOB_PART_VERSION_HEADER);
dos.writeUTF(header.getPartName());
dos.writeLong(header.getOriginRandomizedTag());
dos.writeLong(header.getDestinationRandomizedTag());
VarInt.writeVInt(dos, header.getSchemas().size());
for(HollowSchema schema : header.getSchemas())
schema.writeTo(dos);
///backwards compatibility -- new data can be added here by first indicating number of bytes used, will be skipped by existing readers.
VarInt.writeVInt(dos, 0);
}
}
| 8,974 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/HollowMapTypeWriteState.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write;
import com.netflix.hollow.core.memory.ByteData;
import com.netflix.hollow.core.memory.ByteDataArray;
import com.netflix.hollow.core.memory.ThreadSafeBitSet;
import com.netflix.hollow.core.memory.encoding.FixedLengthElementArray;
import com.netflix.hollow.core.memory.encoding.HashCodes;
import com.netflix.hollow.core.memory.encoding.VarInt;
import com.netflix.hollow.core.memory.pool.WastefulRecycler;
import com.netflix.hollow.core.schema.HollowMapSchema;
import java.io.DataOutputStream;
import java.io.IOException;
public class HollowMapTypeWriteState extends HollowTypeWriteState {
/// statistics required for writing fixed length set data
private int bitsPerMapPointer;
private int bitsPerMapSizeValue;
private int bitsPerKeyElement;
private int bitsPerValueElement;
private long totalOfMapBuckets[];
/// data required for writing snapshot or delta
private int maxOrdinal;
private int maxShardOrdinal[];
private FixedLengthElementArray mapPointersAndSizesArray[];
private FixedLengthElementArray entryData[];
/// additional data required for writing delta
private int numMapsInDelta[];
private long numBucketsInDelta[];
private ByteDataArray deltaAddedOrdinals[];
private ByteDataArray deltaRemovedOrdinals[];
public HollowMapTypeWriteState(HollowMapSchema schema) {
this(schema, -1);
}
public HollowMapTypeWriteState(HollowMapSchema schema, int numShards) {
super(schema, numShards);
}
@Override
public HollowMapSchema getSchema() {
return (HollowMapSchema)schema;
}
@Override
public void prepareForWrite() {
super.prepareForWrite();
gatherStatistics();
}
private void gatherStatistics() {
if(numShards == -1)
calculateNumShards();
int maxKeyOrdinal = 0;
int maxValueOrdinal = 0;
int maxOrdinal = ordinalMap.maxOrdinal();
maxShardOrdinal = new int[numShards];
int minRecordLocationsPerShard = (maxOrdinal + 1) / numShards;
for(int i=0;i<numShards;i++)
maxShardOrdinal[i] = (i < ((maxOrdinal + 1) & (numShards - 1))) ? minRecordLocationsPerShard : minRecordLocationsPerShard - 1;
int maxMapSize = 0;
ByteData data = ordinalMap.getByteData().getUnderlyingArray();
totalOfMapBuckets = new long[numShards];
for(int i=0;i<=maxOrdinal;i++) {
if(currentCyclePopulated.get(i) || previousCyclePopulated.get(i)) {
long pointer = ordinalMap.getPointerForData(i);
int size = VarInt.readVInt(data, pointer);
int numBuckets = HashCodes.hashTableSize(size);
if(size > maxMapSize)
maxMapSize = size;
pointer += VarInt.sizeOfVInt(size);
int keyOrdinal = 0;
for(int j=0;j<size;j++) {
int keyOrdinalDelta = VarInt.readVInt(data, pointer);
pointer += VarInt.sizeOfVInt(keyOrdinalDelta);
int valueOrdinal = VarInt.readVInt(data, pointer);
pointer += VarInt.sizeOfVInt(valueOrdinal);
keyOrdinal += keyOrdinalDelta;
if(keyOrdinal > maxKeyOrdinal)
maxKeyOrdinal = keyOrdinal;
if(valueOrdinal > maxValueOrdinal)
maxValueOrdinal = valueOrdinal;
pointer += VarInt.nextVLongSize(data, pointer); /// discard hashed bucket
}
totalOfMapBuckets[i & (numShards-1)] += numBuckets;
}
}
long maxShardTotalOfMapBuckets = 0;
for(int i=0;i<numShards;i++) {
if(totalOfMapBuckets[i] > maxShardTotalOfMapBuckets)
maxShardTotalOfMapBuckets = totalOfMapBuckets[i];
}
bitsPerKeyElement = 64 - Long.numberOfLeadingZeros(maxKeyOrdinal + 1);
bitsPerValueElement = 64 - Long.numberOfLeadingZeros(maxValueOrdinal);
bitsPerMapSizeValue = 64 - Long.numberOfLeadingZeros(maxMapSize);
bitsPerMapPointer = 64 - Long.numberOfLeadingZeros(maxShardTotalOfMapBuckets);
}
private void calculateNumShards() {
int maxKeyOrdinal = 0;
int maxValueOrdinal = 0;
int maxOrdinal = ordinalMap.maxOrdinal();
int maxMapSize = 0;
ByteData data = ordinalMap.getByteData().getUnderlyingArray();
long totalOfMapBuckets = 0;
for(int i=0;i<=maxOrdinal;i++) {
if(currentCyclePopulated.get(i)) {
long pointer = ordinalMap.getPointerForData(i);
int size = VarInt.readVInt(data, pointer);
int numBuckets = HashCodes.hashTableSize(size);
if(size > maxMapSize)
maxMapSize = size;
pointer += VarInt.sizeOfVInt(size);
int keyOrdinal = 0;
for(int j=0;j<size;j++) {
int keyOrdinalDelta = VarInt.readVInt(data, pointer);
pointer += VarInt.sizeOfVInt(keyOrdinalDelta);
int valueOrdinal = VarInt.readVInt(data, pointer);
pointer += VarInt.sizeOfVInt(valueOrdinal);
keyOrdinal += keyOrdinalDelta;
if(keyOrdinal > maxKeyOrdinal)
maxKeyOrdinal = keyOrdinal;
if(valueOrdinal > maxValueOrdinal)
maxValueOrdinal = valueOrdinal;
pointer += VarInt.nextVLongSize(data, pointer); /// discard hashed bucket
}
totalOfMapBuckets += numBuckets;
}
}
long bitsPerKeyElement = 64 - Long.numberOfLeadingZeros(maxKeyOrdinal + 1);
long bitsPerValueElement = 64 - Long.numberOfLeadingZeros(maxValueOrdinal);
long bitsPerMapSizeValue = 64 - Long.numberOfLeadingZeros(maxMapSize);
long bitsPerMapPointer = 64 - Long.numberOfLeadingZeros(totalOfMapBuckets);
long projectedSizeOfType = (bitsPerMapSizeValue + bitsPerMapPointer) * (maxOrdinal + 1) / 8;
projectedSizeOfType += ((bitsPerKeyElement + bitsPerValueElement) * totalOfMapBuckets) / 8;
numShards = 1;
while(stateEngine.getTargetMaxTypeShardSize() * numShards < projectedSizeOfType)
numShards *= 2;
}
@Override
public void calculateSnapshot() {
maxOrdinal = ordinalMap.maxOrdinal();
int bitsPerMapFixedLengthPortion = bitsPerMapSizeValue + bitsPerMapPointer;
int bitsPerMapEntry = bitsPerKeyElement + bitsPerValueElement;
mapPointersAndSizesArray = new FixedLengthElementArray[numShards];
entryData = new FixedLengthElementArray[numShards];
for(int i=0;i<numShards;i++) {
mapPointersAndSizesArray[i] = new FixedLengthElementArray(WastefulRecycler.DEFAULT_INSTANCE, (long)bitsPerMapFixedLengthPortion * (maxShardOrdinal[i] + 1));
entryData[i] = new FixedLengthElementArray(WastefulRecycler.DEFAULT_INSTANCE, (long)bitsPerMapEntry * totalOfMapBuckets[i]);
}
ByteData data = ordinalMap.getByteData().getUnderlyingArray();
int bucketCounter[] = new int[numShards];
int shardMask = numShards - 1;
HollowWriteStateEnginePrimaryKeyHasher primaryKeyHasher = null;
if(getSchema().getHashKey() != null)
primaryKeyHasher = new HollowWriteStateEnginePrimaryKeyHasher(getSchema().getHashKey(), getStateEngine());
for(int ordinal=0;ordinal<=maxOrdinal;ordinal++) {
int shardNumber = ordinal & shardMask;
int shardOrdinal = ordinal / numShards;
if(currentCyclePopulated.get(ordinal)) {
long readPointer = ordinalMap.getPointerForData(ordinal);
int size = VarInt.readVInt(data, readPointer);
readPointer += VarInt.sizeOfVInt(size);
int numBuckets = HashCodes.hashTableSize(size);
mapPointersAndSizesArray[shardNumber].setElementValue(((long)bitsPerMapFixedLengthPortion * shardOrdinal) + bitsPerMapPointer, bitsPerMapSizeValue, size);
int keyElementOrdinal = 0;
for(int j=0;j<numBuckets;j++) {
entryData[shardNumber].setElementValue((long)bitsPerMapEntry * (bucketCounter[shardNumber] + j), bitsPerKeyElement, (1L << bitsPerKeyElement) - 1);
}
for(int j=0;j<size;j++) {
int keyElementOrdinalDelta = VarInt.readVInt(data, readPointer);
readPointer += VarInt.sizeOfVInt(keyElementOrdinalDelta);
int valueElementOrdinal = VarInt.readVInt(data, readPointer);
readPointer += VarInt.sizeOfVInt(valueElementOrdinal);
int hashedBucket = VarInt.readVInt(data, readPointer);
readPointer += VarInt.sizeOfVInt(hashedBucket);
keyElementOrdinal += keyElementOrdinalDelta;
if(primaryKeyHasher != null)
hashedBucket = primaryKeyHasher.getRecordHash(keyElementOrdinal) & (numBuckets - 1);
while(entryData[shardNumber].getElementValue((long)bitsPerMapEntry * (bucketCounter[shardNumber] + hashedBucket), bitsPerKeyElement) != ((1L << bitsPerKeyElement) - 1)) {
hashedBucket++;
hashedBucket &= (numBuckets - 1);
}
long mapEntryBitOffset = (long)bitsPerMapEntry * (bucketCounter[shardNumber] + hashedBucket);
entryData[shardNumber].clearElementValue(mapEntryBitOffset, bitsPerMapEntry);
entryData[shardNumber].setElementValue(mapEntryBitOffset, bitsPerKeyElement, keyElementOrdinal);
entryData[shardNumber].setElementValue(mapEntryBitOffset + bitsPerKeyElement, bitsPerValueElement, valueElementOrdinal);
}
bucketCounter[shardNumber] += numBuckets;
}
mapPointersAndSizesArray[shardNumber].setElementValue((long)bitsPerMapFixedLengthPortion * shardOrdinal, bitsPerMapPointer, bucketCounter[shardNumber]);
}
}
@Override
public void writeSnapshot(DataOutputStream os) throws IOException {
/// for unsharded blobs, support pre v2.1.0 clients
if(numShards == 1) {
writeSnapshotShard(os, 0);
} else {
/// overall max ordinal
VarInt.writeVInt(os, maxOrdinal);
for(int i=0;i<numShards;i++) {
writeSnapshotShard(os, i);
}
}
/// Populated bits
currentCyclePopulated.serializeBitsTo(os);
mapPointersAndSizesArray = null;
entryData = null;
}
private void writeSnapshotShard(DataOutputStream os, int shardNumber) throws IOException {
int bitsPerMapFixedLengthPortion = bitsPerMapSizeValue + bitsPerMapPointer;
int bitsPerMapEntry = bitsPerKeyElement + bitsPerValueElement;
/// 1) max ordinal
VarInt.writeVInt(os, maxShardOrdinal[shardNumber]);
/// 2) statistics
VarInt.writeVInt(os, bitsPerMapPointer);
VarInt.writeVInt(os, bitsPerMapSizeValue);
VarInt.writeVInt(os, bitsPerKeyElement);
VarInt.writeVInt(os, bitsPerValueElement);
VarInt.writeVLong(os, totalOfMapBuckets[shardNumber]);
/// 3) list pointer array
int numMapFixedLengthLongs = maxShardOrdinal[shardNumber] == -1 ? 0 : (int)((((long)(maxShardOrdinal[shardNumber] + 1) * bitsPerMapFixedLengthPortion) - 1) / 64) + 1;
VarInt.writeVInt(os, numMapFixedLengthLongs);
for(int i=0;i<numMapFixedLengthLongs;i++) {
os.writeLong(mapPointersAndSizesArray[shardNumber].get(i));
}
/// 4) element array
int numElementLongs = totalOfMapBuckets[shardNumber] == 0 ? 0 : (int)(((totalOfMapBuckets[shardNumber] * bitsPerMapEntry) - 1) / 64) + 1;
VarInt.writeVInt(os, numElementLongs);
for(int i=0;i<numElementLongs;i++) {
os.writeLong(entryData[shardNumber].get(i));
}
}
@Override
public void calculateDelta() {
calculateDelta(previousCyclePopulated, currentCyclePopulated);
}
@Override
public void writeDelta(DataOutputStream dos) throws IOException {
writeCalculatedDelta(dos);
}
@Override
public void calculateReverseDelta() {
calculateDelta(currentCyclePopulated, previousCyclePopulated);
}
@Override
public void writeReverseDelta(DataOutputStream dos) throws IOException {
writeCalculatedDelta(dos);
}
private void calculateDelta(ThreadSafeBitSet fromCyclePopulated, ThreadSafeBitSet toCyclePopulated) {
maxOrdinal = ordinalMap.maxOrdinal();
int bitsPerMapFixedLengthPortion = bitsPerMapSizeValue + bitsPerMapPointer;
int bitsPerMapEntry = bitsPerKeyElement + bitsPerValueElement;
numMapsInDelta = new int[numShards];
numBucketsInDelta = new long[numShards];
mapPointersAndSizesArray = new FixedLengthElementArray[numShards];
entryData = new FixedLengthElementArray[numShards];
deltaAddedOrdinals = new ByteDataArray[numShards];
deltaRemovedOrdinals = new ByteDataArray[numShards];
ThreadSafeBitSet deltaAdditions = toCyclePopulated.andNot(fromCyclePopulated);
int shardMask = numShards - 1;
int addedOrdinal = deltaAdditions.nextSetBit(0);
while(addedOrdinal != -1) {
numMapsInDelta[addedOrdinal & shardMask]++;
long readPointer = ordinalMap.getPointerForData(addedOrdinal);
int size = VarInt.readVInt(ordinalMap.getByteData().getUnderlyingArray(), readPointer);
numBucketsInDelta[addedOrdinal & shardMask] += HashCodes.hashTableSize(size);
addedOrdinal = deltaAdditions.nextSetBit(addedOrdinal + 1);
}
for(int i=0;i<numShards;i++) {
mapPointersAndSizesArray[i] = new FixedLengthElementArray(WastefulRecycler.DEFAULT_INSTANCE, (long)numMapsInDelta[i] * bitsPerMapFixedLengthPortion);
entryData[i] = new FixedLengthElementArray(WastefulRecycler.DEFAULT_INSTANCE, numBucketsInDelta[i] * bitsPerMapEntry);
deltaAddedOrdinals[i] = new ByteDataArray(WastefulRecycler.DEFAULT_INSTANCE);
deltaRemovedOrdinals[i] = new ByteDataArray(WastefulRecycler.DEFAULT_INSTANCE);
}
ByteData data = ordinalMap.getByteData().getUnderlyingArray();
int mapCounter[] = new int[numShards];
long bucketCounter[] = new long[numShards];
int previousRemovedOrdinal[] = new int[numShards];
int previousAddedOrdinal[] = new int[numShards];
HollowWriteStateEnginePrimaryKeyHasher primaryKeyHasher = null;
if(getSchema().getHashKey() != null)
primaryKeyHasher = new HollowWriteStateEnginePrimaryKeyHasher(getSchema().getHashKey(), getStateEngine());
for(int ordinal=0;ordinal<=maxOrdinal;ordinal++) {
int shardNumber = ordinal & shardMask;
if(deltaAdditions.get(ordinal)) {
long readPointer = ordinalMap.getPointerForData(ordinal);
int size = VarInt.readVInt(data, readPointer);
readPointer += VarInt.sizeOfVInt(size);
int numBuckets = HashCodes.hashTableSize(size);
long endBucketPosition = bucketCounter[shardNumber] + numBuckets;
mapPointersAndSizesArray[shardNumber].setElementValue((long)bitsPerMapFixedLengthPortion * mapCounter[shardNumber], bitsPerMapPointer, endBucketPosition);
mapPointersAndSizesArray[shardNumber].setElementValue(((long)bitsPerMapFixedLengthPortion * mapCounter[shardNumber]) + bitsPerMapPointer, bitsPerMapSizeValue, size);
int keyElementOrdinal = 0;
for(int j=0;j<numBuckets;j++) {
entryData[shardNumber].setElementValue((long)bitsPerMapEntry * (bucketCounter[shardNumber] + j), bitsPerKeyElement, (1L << bitsPerKeyElement) - 1);
}
for(int j=0;j<size;j++) {
int keyElementOrdinalDelta = VarInt.readVInt(data, readPointer);
readPointer += VarInt.sizeOfVInt(keyElementOrdinalDelta);
int valueElementOrdinal = VarInt.readVInt(data, readPointer);
readPointer += VarInt.sizeOfVInt(valueElementOrdinal);
int hashedBucket = VarInt.readVInt(data, readPointer);
readPointer += VarInt.sizeOfVInt(hashedBucket);
keyElementOrdinal += keyElementOrdinalDelta;
if(primaryKeyHasher != null)
hashedBucket = primaryKeyHasher.getRecordHash(keyElementOrdinal) & (numBuckets - 1);
while(entryData[shardNumber].getElementValue((long)bitsPerMapEntry * (bucketCounter[shardNumber] + hashedBucket), bitsPerKeyElement) != ((1L << bitsPerKeyElement) - 1)) {
hashedBucket++;
hashedBucket &= (numBuckets - 1);
}
long mapEntryBitOffset = (long)bitsPerMapEntry * (bucketCounter[shardNumber] + hashedBucket);
entryData[shardNumber].clearElementValue(mapEntryBitOffset, bitsPerMapEntry);
entryData[shardNumber].setElementValue(mapEntryBitOffset, bitsPerKeyElement, keyElementOrdinal);
entryData[shardNumber].setElementValue(mapEntryBitOffset + bitsPerKeyElement, bitsPerValueElement, valueElementOrdinal);
}
bucketCounter[shardNumber] += numBuckets;
mapCounter[shardNumber]++;
int shardOrdinal = ordinal / numShards;
VarInt.writeVInt(deltaAddedOrdinals[shardNumber], shardOrdinal - previousAddedOrdinal[shardNumber]);
previousAddedOrdinal[shardNumber] = shardOrdinal;
} else if(fromCyclePopulated.get(ordinal) && !toCyclePopulated.get(ordinal)) {
int shardOrdinal = ordinal / numShards;
VarInt.writeVInt(deltaRemovedOrdinals[shardNumber], shardOrdinal - previousRemovedOrdinal[shardNumber]);
previousRemovedOrdinal[shardNumber] = shardOrdinal;
}
}
}
private void writeCalculatedDelta(DataOutputStream os) throws IOException {
/// for unsharded blobs, support pre v2.1.0 clients
if(numShards == 1) {
writeCalculatedDeltaShard(os, 0);
} else {
/// overall max ordinal
VarInt.writeVInt(os, maxOrdinal);
for(int i=0;i<numShards;i++) {
writeCalculatedDeltaShard(os, i);
}
}
mapPointersAndSizesArray = null;
entryData = null;
deltaAddedOrdinals = null;
deltaRemovedOrdinals = null;
}
private void writeCalculatedDeltaShard(DataOutputStream os, int shardNumber) throws IOException {
int bitsPerMapFixedLengthPortion = bitsPerMapSizeValue + bitsPerMapPointer;
int bitsPerMapEntry = bitsPerKeyElement + bitsPerValueElement;
/// 1) max ordinal
VarInt.writeVInt(os, maxShardOrdinal[shardNumber]);
/// 2) removal / addition ordinals.
VarInt.writeVLong(os, deltaRemovedOrdinals[shardNumber].length());
deltaRemovedOrdinals[shardNumber].getUnderlyingArray().writeTo(os, 0, deltaRemovedOrdinals[shardNumber].length());
VarInt.writeVLong(os, deltaAddedOrdinals[shardNumber].length());
deltaAddedOrdinals[shardNumber].getUnderlyingArray().writeTo(os, 0, deltaAddedOrdinals[shardNumber].length());
/// 3) statistics
VarInt.writeVInt(os, bitsPerMapPointer);
VarInt.writeVInt(os, bitsPerMapSizeValue);
VarInt.writeVInt(os, bitsPerKeyElement);
VarInt.writeVInt(os, bitsPerValueElement);
VarInt.writeVLong(os, totalOfMapBuckets[shardNumber]);
/// 4) pointer array
int numMapFixedLengthLongs = numMapsInDelta[shardNumber] == 0 ? 0 : (int)((((long)numMapsInDelta[shardNumber] * bitsPerMapFixedLengthPortion) - 1) / 64) + 1;
VarInt.writeVInt(os, numMapFixedLengthLongs);
for(int i=0;i<numMapFixedLengthLongs;i++) {
os.writeLong(mapPointersAndSizesArray[shardNumber].get(i));
}
/// 5) element array
int numElementLongs = numBucketsInDelta[shardNumber] == 0 ? 0 : (int)(((numBucketsInDelta[shardNumber] * bitsPerMapEntry) - 1) / 64) + 1;
VarInt.writeVInt(os, numElementLongs);
for(int i=0;i<numElementLongs;i++) {
os.writeLong(entryData[shardNumber].get(i));
}
}
}
| 8,975 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/HollowMapWriteRecord.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write;
import static com.netflix.hollow.core.write.HollowHashableWriteRecord.HashBehavior.IGNORED_HASHES;
import static com.netflix.hollow.core.write.HollowHashableWriteRecord.HashBehavior.MIXED_HASHES;
import com.netflix.hollow.core.memory.ByteDataArray;
import com.netflix.hollow.core.memory.encoding.HashCodes;
import com.netflix.hollow.core.memory.encoding.VarInt;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
public class HollowMapWriteRecord implements HollowHashableWriteRecord {
private static final Comparator<HollowMapEntry> MAP_ENTRY_COMPARATOR = new Comparator<HollowMapEntry>() {
public int compare(HollowMapEntry o1, HollowMapEntry o2) {
return o1.getKeyOrdinal() - o2.getKeyOrdinal();
}
};
private final List<HollowMapEntry> entryList;
private final HashBehavior defaultHashBehavior;
public HollowMapWriteRecord() {
this(HashBehavior.MIXED_HASHES);
}
public HollowMapWriteRecord(HashBehavior defaultHashBehavior) {
this.entryList = new ArrayList<HollowMapEntry>();
this.defaultHashBehavior = defaultHashBehavior;
}
public void addEntry(int keyOrdinal, int valueOrdinal) {
addEntry(keyOrdinal, valueOrdinal, keyOrdinal);
}
public void addEntry(int keyOrdinal, int valueOrdinal, int hashCode) {
entryList.add(new HollowMapEntry(keyOrdinal, valueOrdinal, hashCode));
}
@Override
public void writeDataTo(ByteDataArray buf) {
writeDataTo(buf, defaultHashBehavior);
}
@Override
public void writeDataTo(ByteDataArray buf, HashBehavior hashBehavior) {
Collections.sort(entryList, MAP_ENTRY_COMPARATOR);
VarInt.writeVInt(buf, entryList.size());
int hashTableSize = HashCodes.hashTableSize(entryList.size());
int bucketMask = hashTableSize - 1; /// hashTableSize is a power of 2.
int previousKeyOrdinal = 0;
for(int i=0;i<entryList.size();i++) {
HollowMapEntry entry = entryList.get(i);
VarInt.writeVInt(buf, entry.getKeyOrdinal() - previousKeyOrdinal);
VarInt.writeVInt(buf, entry.getValueOrdinal());
if(hashBehavior != IGNORED_HASHES) {
int hashCode = entry.getHashCode();
if(hashBehavior == MIXED_HASHES)
hashCode = HashCodes.hashInt(hashCode);
int bucketToHashTo = hashCode & bucketMask;
VarInt.writeVInt(buf, bucketToHashTo);
}
previousKeyOrdinal = entry.getKeyOrdinal();
}
}
@Override
public void reset() {
entryList.clear();
}
private static class HollowMapEntry {
private final int keyOrdinal;
private final int valueOrdinal;
private final int hashCode;
public HollowMapEntry(int keyOrdinal, int valueOrdinal, int hashCode) {
this.keyOrdinal = keyOrdinal;
this.valueOrdinal = valueOrdinal;
this.hashCode = hashCode;
}
public int getKeyOrdinal() {
return keyOrdinal;
}
public int getValueOrdinal() {
return valueOrdinal;
}
public int getHashCode() {
return hashCode;
}
}
}
| 8,976 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/HollowWriteStateEngine.java | /*
* Copyright 2016-2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write;
import com.netflix.hollow.api.error.HollowWriteStateException;
import com.netflix.hollow.api.error.SchemaNotFoundException;
import com.netflix.hollow.core.HollowStateEngine;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.util.DefaultHashCodeFinder;
import com.netflix.hollow.core.util.HollowObjectHashCodeFinder;
import com.netflix.hollow.core.util.HollowWriteStateCreator;
import com.netflix.hollow.core.util.SimultaneousExecutor;
import com.netflix.hollow.core.write.objectmapper.HollowObjectMapper;
import com.netflix.hollow.core.write.objectmapper.HollowTypeMapper;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.logging.Logger;
/**
* A {@link HollowWriteStateEngine} is our main handle to a Hollow dataset as a data producer.
* <p>
* A dataset changes over time. A core concept in Hollow is that the timeline for a changing dataset can be
* broken down into discrete data states, each of which is a complete snapshot of the data at a particular point in time.
* Data producers handle data states with a HollowWriteStateEngine.
* <p>
* A HollowWriteStateEngine cycles back and forth between two-phases:
* <ol>
* <li>Adding records</li>
* <li>Writing the state</li>
* </ol>
* <p>
* During the "adding records" phase, all of the records comprising the dataset are added to the state engine. During the
* "writing" phase, we can write snapshot blobs to initialize and/or delta blobs to keep up-to-date consumers of the dataset.
* <p>
* Each cycle between the phases will produce a state. During each cycle, all of the current records in the dataset should
* be re-added to the write state engine.
*/
public class HollowWriteStateEngine implements HollowStateEngine {
private final Logger log = Logger.getLogger(HollowWriteStateEngine.class.getName());
private final Map<String, HollowTypeWriteState> writeStates;
private final Map<String, HollowSchema> hollowSchemas;
private final List<HollowTypeWriteState> orderedTypeStates;
private final Map<String,String> headerTags = new ConcurrentHashMap<>();
private final Map<String,String> previousHeaderTags = new ConcurrentHashMap<>();
private final HollowObjectHashCodeFinder hashCodeFinder;
//// target a maximum shard size to reduce excess memory pool requirement
private long targetMaxTypeShardSize = Long.MAX_VALUE;
//// focus filling ordinal holes in as few shards as possible to make delta application more efficient for consumers
private boolean focusHoleFillInFewestShards = false;
private List<String> restoredStates;
private boolean preparedForNextCycle = true;
private long previousStateRandomizedTag = -1L;
private long nextStateRandomizedTag;
public HollowWriteStateEngine() {
this(new DefaultHashCodeFinder());
}
@Deprecated
public HollowWriteStateEngine(HollowObjectHashCodeFinder hasher) {
this.writeStates = new HashMap<String, HollowTypeWriteState>();
this.hollowSchemas = new HashMap<String, HollowSchema>();
this.orderedTypeStates = new ArrayList<HollowTypeWriteState>();
this.hashCodeFinder = hasher;
this.nextStateRandomizedTag = mintNewRandomizedStateTag();
}
/**
* Add a record to the state.
* @param type the type name
* @param rec the record
* @return the ordinal of the added record
*/
public int add(String type, HollowWriteRecord rec) {
HollowTypeWriteState hollowTypeWriteState = writeStates.get(type);
if(hollowTypeWriteState == null)
throw new IllegalArgumentException("Type " + type + " does not exist!");
return hollowTypeWriteState.add(rec);
}
/**
* Add a type to the dataset. Should be called during the first cycle, before writing the first state.
* @param writeState the write state to add
*/
public synchronized void addTypeState(HollowTypeWriteState writeState) {
HollowSchema schema = writeState.getSchema();
if(writeStates.containsKey(schema.getName()))
throw new IllegalStateException("The state for type " + schema.getName() + " has already been added!");
hollowSchemas.put(schema.getName(), schema);
writeStates.put(schema.getName(), writeState);
orderedTypeStates.add(writeState);
writeState.setStateEngine(this);
}
/**
* Restore from the data state contained in the provided {@link HollowReadStateEngine}. This is used to continue
* a delta chain after a producer is restarted.
* <p>
* Before calling this method, the data model should be pre-initialized. This can be accomplished by:
* <ul>
* <li>using the {@link HollowWriteStateCreator}</li>
* <li>calling {@link HollowObjectMapper#initializeTypeState(Class)} with each of the top-level classes in the data model</li>
* <li>adding the types via {@link #addTypeState(HollowTypeWriteState)}</li>
* </ul>
* @param readStateEngine the read state to restore from
*/
public void restoreFrom(HollowReadStateEngine readStateEngine) {
if(!readStateEngine.isListenToAllPopulatedOrdinals())
throw new IllegalStateException("The specified HollowReadStateEngine must be listening for all populated ordinals!");
for(HollowTypeReadState readState : readStateEngine.getTypeStates()) {
String typeName = readState.getSchema().getName();
HollowTypeWriteState writeState = writeStates.get(typeName);
if(writeState != null) {
if(writeState.getNumShards() == -1)
writeState.numShards = readState.numShards();
else if(writeState.getNumShards() != readState.numShards())
throw new IllegalStateException("Attempting to restore from a HollowReadStateEngine which does not have the same number of shards as explicitly configured for type " + typeName);
}
}
restoredStates = new ArrayList<String>();
SimultaneousExecutor executor = new SimultaneousExecutor(getClass(), "restore");
for(final HollowTypeReadState readState : readStateEngine.getTypeStates()) {
final String typeName = readState.getSchema().getName();
final HollowTypeWriteState writeState = writeStates.get(typeName);
restoredStates.add(typeName);
if(writeState != null) {
executor.execute(new Runnable() {
@Override
public void run() {
log.info("RESTORE: " + typeName);
writeState.restoreFrom(readState);
}
});
}
}
previousStateRandomizedTag = readStateEngine.getCurrentRandomizedTag();
nextStateRandomizedTag = mintNewRandomizedStateTag();
overridePreviousHeaderTags(readStateEngine.getHeaderTags());
try {
executor.awaitSuccessfulCompletion();
} catch(Exception e){
throw new HollowWriteStateException("Unable to restore write state from read state engine", e);
}
}
/**
* Transition from the "adding records" phase of a cycle to the "writing" phase of a cycle.
*/
public void prepareForWrite() {
if(!preparedForNextCycle) // this call should be a no-op if we are already prepared for write
return;
addTypeNamesWithDefinedHashCodesToHeader();
try {
SimultaneousExecutor executor = new SimultaneousExecutor(getClass(), "prepare-for-write");
for(final Map.Entry<String, HollowTypeWriteState> typeStateEntry : writeStates.entrySet()) {
executor.execute(new Runnable() {
@Override
public void run() {
typeStateEntry.getValue().prepareForWrite();
}
});
}
executor.awaitSuccessfulCompletion();
} catch(Exception ex) {
throw new HollowWriteStateException("Failed to prepare for write", ex);
}
preparedForNextCycle = false;
}
/**
* Transition from the "writing" phase of a cycle to the "adding records" phase of the next cycle.
*/
public void prepareForNextCycle() {
if(preparedForNextCycle) // this call should be a no-op if we are already prepared for the next cycle
return;
previousStateRandomizedTag = nextStateRandomizedTag;
nextStateRandomizedTag = mintNewRandomizedStateTag();
overridePreviousHeaderTags(headerTags);
try {
SimultaneousExecutor executor = new SimultaneousExecutor(getClass(), "prepare-for-next-cycle");
for(final Map.Entry<String, HollowTypeWriteState> typeStateEntry : writeStates.entrySet()) {
executor.execute(new Runnable() {
@Override
public void run() {
typeStateEntry.getValue().prepareForNextCycle();
}
});
}
executor.awaitSuccessfulCompletion();
} catch(Exception ex) {
throw new HollowWriteStateException("Failed to prepare for next cycle", ex);
}
preparedForNextCycle = true;
restoredStates = null;
}
/**
* Add all of the objects from the previous cycle, exactly as they were in the previous cycle.
*/
public void addAllObjectsFromPreviousCycle() {
for(HollowTypeWriteState typeState : orderedTypeStates) {
typeState.addAllObjectsFromPreviousCycle();
}
}
/**
* If a state was partially constructed after the last call to prepareForNextCycle(), this call
* will remove all of those objects from the state engine and reset to the state it was in at the
* last prepareForNextCycle() call.
* <p>
* This method can be called at any time, and will leave the state engine in the same state it was in immediately
* after the last call to {@link #prepareForNextCycle()}
*/
public void resetToLastPrepareForNextCycle() {
SimultaneousExecutor executor = new SimultaneousExecutor(getClass(), "reset-to-last-prepare-for-next-cycle");
for(final Map.Entry<String, HollowTypeWriteState> typeStateEntry : writeStates.entrySet()) {
executor.execute(new Runnable() {
public void run() {
typeStateEntry.getValue().resetToLastPrepareForNextCycle();
}
});
}
try {
executor.awaitSuccessfulCompletion();
} catch(Exception ex) {
throw new HollowWriteStateException("Unable to reset to the prior version of the write state", ex);
}
/// recreate a new randomized tag, to avoid any potential conflict with aborted versions
nextStateRandomizedTag = mintNewRandomizedStateTag();
preparedForNextCycle = true;
}
/**
* @return whether or not there are differences between the current cycle and the previous cycle.
*/
public boolean hasChangedSinceLastCycle() {
for(Map.Entry<String, HollowTypeWriteState> typeStateEntry : writeStates.entrySet()) {
if(typeStateEntry.getValue().hasChangedSinceLastCycle())
return true;
}
return false;
}
public boolean isRestored() {
return restoredStates != null;
}
void ensureAllNecessaryStatesRestored() {
if(!isRestored())
return;
List<String> unrestoredStates = new ArrayList<String>();
for(HollowTypeWriteState typeState : orderedTypeStates) {
if(restoredStates.contains(typeState.getSchema().getName())) {
if(!typeState.isRestored())
unrestoredStates.add(typeState.getSchema().getName());
}
}
if(!unrestoredStates.isEmpty()) {
throw new IllegalStateException(String.format(
"Current state was restored but contains unrestored state for top-level types %s. " +
"Those types need to be registered with the producer (see HollowProducer.initializeDataModel)",
unrestoredStates));
}
}
public List<HollowTypeWriteState> getOrderedTypeStates() {
return orderedTypeStates;
}
/**
* @param typeName the type name
* @return the specified {@link HollowTypeWriteState}
*/
public HollowTypeWriteState getTypeState(String typeName) {
return writeStates.get(typeName);
}
@Override
public List<HollowSchema> getSchemas() {
List<HollowSchema> schemas = new ArrayList<HollowSchema>();
for(HollowTypeWriteState typeState : orderedTypeStates) {
schemas.add(typeState.getSchema());
}
return schemas;
}
@Override
public HollowSchema getSchema(String schemaName) {
return hollowSchemas.get(schemaName);
}
@Override
public HollowSchema getNonNullSchema(String schemaName) {
HollowSchema schema = getSchema(schemaName);
if (schema == null) {
List<String> schemas = new ArrayList<>();
for (HollowSchema s : getSchemas()) {
schemas.add(s.getName());
}
throw new SchemaNotFoundException(schemaName, schemas);
}
return schema;
}
@Override
public Map<String, String> getHeaderTags() {
return headerTags;
}
public void addHeaderTag(String name, String value) {
headerTags.put(name, value);
}
public void addHeaderTags(Map<String,String> headerTags) {
this.headerTags.putAll(headerTags);
}
public Map<String, String> getPreviousHeaderTags() {
return previousHeaderTags;
}
@Override
public String getHeaderTag(String name) {
return headerTags.get(name);
}
@Deprecated
public HollowObjectHashCodeFinder getHashCodeFinder() {
return hashCodeFinder;
}
public long getPreviousStateRandomizedTag() {
return previousStateRandomizedTag;
}
public void overridePreviousStateRandomizedTag(long previousStateRandomizedTag) {
this.previousStateRandomizedTag = previousStateRandomizedTag;
}
public void overridePreviousHeaderTags(Map<String, String> previousHeaderTags) {
this.previousHeaderTags.clear();
this.previousHeaderTags.putAll(previousHeaderTags);
}
public long getNextStateRandomizedTag() {
return nextStateRandomizedTag;
}
public void overrideNextStateRandomizedTag(long nextStateRandomizedTag) {
this.nextStateRandomizedTag = nextStateRandomizedTag;
}
/**
* Setting a target max type shard size (specified in bytes) will limit the excess memory pool required to perform delta transitions.
*
* This value defaults to (16 * 1024 * 1024).
*
* @param targetMaxTypeShardSize the target max type shard size, in bytes
*/
public void setTargetMaxTypeShardSize(long targetMaxTypeShardSize) {
this.targetMaxTypeShardSize = targetMaxTypeShardSize;
}
long getTargetMaxTypeShardSize() {
return targetMaxTypeShardSize;
}
/**
* Experimental: Setting this will focus the holes returned by the FreeOrdinalTracker for each state into as few shards as possible.
*
* This can be used by the consumers to reduce the work necessary to apply a delta, by skipping recreation of shards where no records are added.
*/
public void setFocusHoleFillInFewestShards(boolean focusHoleFillInFewestShards) {
this.focusHoleFillInFewestShards = focusHoleFillInFewestShards;
}
boolean isFocusHoleFillInFewestShards() {
return focusHoleFillInFewestShards;
}
private long mintNewRandomizedStateTag() {
Random rand = new Random();
long newTag = rand.nextLong();
while((newTag & HollowTypeMapper.ASSIGNED_ORDINAL_CYCLE_MASK) == 0 ||
(newTag & HollowTypeMapper.ASSIGNED_ORDINAL_CYCLE_MASK) == HollowTypeMapper.ASSIGNED_ORDINAL_CYCLE_MASK ||
(newTag & HollowTypeMapper.ASSIGNED_ORDINAL_CYCLE_MASK) == (previousStateRandomizedTag & HollowTypeMapper.ASSIGNED_ORDINAL_CYCLE_MASK))
newTag = rand.nextLong();
return newTag;
}
private void addTypeNamesWithDefinedHashCodesToHeader() {
Set<String> typeNames = hashCodeFinder.getTypesWithDefinedHashCodes();
if(typeNames != null && !typeNames.isEmpty()) {
StringBuilder typeNamesBuilder = new StringBuilder();
int counter = 0;
// Sort to be consistent between cycle
Set<String> sortedNames = new TreeSet<String>(typeNames);
for (String typeName : sortedNames) {
if(counter++ != 0)
typeNamesBuilder.append(",");
typeNamesBuilder.append(typeName);
}
addHeaderTag(HollowObjectHashCodeFinder.DEFINED_HASH_CODES_HEADER_NAME, typeNamesBuilder.toString());
}
}
}
| 8,977 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/HollowHashableWriteRecord.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write;
import com.netflix.hollow.core.memory.ByteDataArray;
public interface HollowHashableWriteRecord extends HollowWriteRecord {
public void writeDataTo(ByteDataArray buf, HashBehavior hashBehavior);
public enum HashBehavior {
IGNORED_HASHES,
UNMIXED_HASHES,
MIXED_HASHES
}
}
| 8,978 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/HollowBlobWriter.java | /*
* Copyright 2016-2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write;
import com.netflix.hollow.api.producer.ProducerOptionalBlobPartConfig;
import com.netflix.hollow.api.producer.ProducerOptionalBlobPartConfig.ConfiguredOutputStream;
import com.netflix.hollow.core.HollowBlobHeader;
import com.netflix.hollow.core.HollowBlobOptionalPartHeader;
import com.netflix.hollow.core.memory.encoding.VarInt;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.util.SimultaneousExecutor;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* A {@link HollowBlobWriter} is used to serialize snapshot, delta, and reverse delta blobs based on the data state
* contained in a {@link HollowWriteStateEngine}.
*/
public class HollowBlobWriter {
private final HollowWriteStateEngine stateEngine;
private final HollowBlobHeaderWriter headerWriter;
public HollowBlobWriter(HollowWriteStateEngine stateEngine) {
this.stateEngine = stateEngine;
this.headerWriter = new HollowBlobHeaderWriter();
}
/**
* Write the current state as a snapshot blob.
* @param os the output stream to write the snapshot blob
* @throws IOException if the snapshot blob could not be written
*/
public void writeSnapshot(OutputStream os) throws IOException {
writeSnapshot(os, null);
}
public void writeHeader(OutputStream os, ProducerOptionalBlobPartConfig.OptionalBlobPartOutputStreams partStreams) throws IOException {
stateEngine.prepareForWrite();
DataOutputStream dos = new DataOutputStream(os);
HollowBlobHeaderWrapper hollowBlobHeaderWrapper = buildHeader(partStreams, stateEngine.getSchemas(), false);
writeHeaders(dos, partStreams, false, hollowBlobHeaderWrapper);
os.flush();
if(partStreams != null)
partStreams.flush();
}
public void writeSnapshot(OutputStream os, ProducerOptionalBlobPartConfig.OptionalBlobPartOutputStreams partStreams) throws IOException {
Map<String, DataOutputStream> partStreamsByType = Collections.emptyMap();
if(partStreams != null)
partStreamsByType = partStreams.getStreamsByType();
stateEngine.prepareForWrite();
DataOutputStream dos = new DataOutputStream(os);
HollowBlobHeaderWrapper hollowBlobHeaderWrapper = buildHeader(partStreams, stateEngine.getSchemas(), false);
writeHeaders(dos, partStreams, false, hollowBlobHeaderWrapper);
SimultaneousExecutor executor = new SimultaneousExecutor(getClass(), "write-snapshot");
for(final HollowTypeWriteState typeState : stateEngine.getOrderedTypeStates()) {
executor.execute(new Runnable() {
public void run() {
typeState.calculateSnapshot();
}
});
}
try {
executor.awaitSuccessfulCompletion();
} catch (Exception e) {
throw new RuntimeException(e);
}
for(HollowTypeWriteState typeState : stateEngine.getOrderedTypeStates()) {
DataOutputStream partStream = partStreamsByType.get(typeState.getSchema().getName());
if(partStream == null)
partStream = dos;
HollowSchema schema = typeState.getSchema();
schema.writeTo(partStream);
writeNumShards(partStream, typeState.getNumShards());
typeState.writeSnapshot(partStream);
}
os.flush();
if(partStreams != null)
partStreams.flush();
}
/**
* Serialize the changes necessary to transition a consumer from the previous state
* to the current state as a delta blob.
*
* @param os the output stream to write the delta blob
* @throws IOException if the delta blob could not be written
* @throws IllegalStateException if the current state is restored from the previous state
* and current state contains unrestored state for one or more types. This indicates those
* types have not been declared to the producer as part it's initialized data model.
* @see com.netflix.hollow.api.producer.HollowProducer#initializeDataModel(Class[])
*/
public void writeDelta(OutputStream os) throws IOException {
writeDelta(os, null);
}
public void writeDelta(OutputStream os, ProducerOptionalBlobPartConfig.OptionalBlobPartOutputStreams partStreams) throws IOException {
Map<String, DataOutputStream> partStreamsByType = Collections.emptyMap();
if(partStreams != null)
partStreamsByType = partStreams.getStreamsByType();
stateEngine.prepareForWrite();
if(stateEngine.isRestored())
stateEngine.ensureAllNecessaryStatesRestored();
List<HollowSchema> changedTypes = changedTypes();
DataOutputStream dos = new DataOutputStream(os);
HollowBlobHeaderWrapper hollowBlobHeaderWrapper = buildHeader(partStreams, changedTypes, false);
writeHeaders(dos, partStreams, false, hollowBlobHeaderWrapper);
SimultaneousExecutor executor = new SimultaneousExecutor(getClass(), "write-delta");
for(final HollowTypeWriteState typeState : stateEngine.getOrderedTypeStates()) {
executor.execute(new Runnable() {
public void run() {
if(typeState.hasChangedSinceLastCycle())
typeState.calculateDelta();
}
});
}
try {
executor.awaitSuccessfulCompletion();
} catch (Exception e) {
throw new RuntimeException(e);
}
for(HollowTypeWriteState typeState : stateEngine.getOrderedTypeStates()) {
if(typeState.hasChangedSinceLastCycle()) {
DataOutputStream partStream = partStreamsByType.get(typeState.getSchema().getName());
if(partStream == null)
partStream = dos;
HollowSchema schema = typeState.getSchema();
schema.writeTo(partStream);
writeNumShards(partStream, typeState.getNumShards());
typeState.writeDelta(partStream);
}
}
os.flush();
if(partStreams != null)
partStreams.flush();
}
/**
* Serialize the changes necessary to transition a consumer from the current state to the
* previous state as a delta blob.
*
* @param os the output stream to write the reverse delta blob
* @throws IOException if the reverse delta blob could not be written
* @throws IllegalStateException if the current state is restored from the previous state
* and current state contains unrestored state for one or more types. This indicates those
* types have not been declared to the producer as part it's initialized data model.
* @see com.netflix.hollow.api.producer.HollowProducer#initializeDataModel(Class[])
*/
public void writeReverseDelta(OutputStream os) throws IOException {
writeReverseDelta(os, null);
}
public void writeReverseDelta(OutputStream os, ProducerOptionalBlobPartConfig.OptionalBlobPartOutputStreams partStreams) throws IOException {
Map<String, DataOutputStream> partStreamsByType = Collections.emptyMap();
if(partStreams != null)
partStreamsByType = partStreams.getStreamsByType();
stateEngine.prepareForWrite();
if(stateEngine.isRestored())
stateEngine.ensureAllNecessaryStatesRestored();
List<HollowSchema> changedTypes = changedTypes();
DataOutputStream dos = new DataOutputStream(os);
HollowBlobHeaderWrapper hollowBlobHeaderWrapper = buildHeader(partStreams, changedTypes, true);
writeHeaders(dos, partStreams, true, hollowBlobHeaderWrapper);
SimultaneousExecutor executor = new SimultaneousExecutor(getClass(), "write-reverse-delta");
for(final HollowTypeWriteState typeState : stateEngine.getOrderedTypeStates()) {
executor.execute(new Runnable() {
public void run() {
if(typeState.hasChangedSinceLastCycle())
typeState.calculateReverseDelta();
}
});
}
try {
executor.awaitSuccessfulCompletion();
} catch (Exception e) {
throw new RuntimeException(e);
}
for(HollowTypeWriteState typeState : stateEngine.getOrderedTypeStates()) {
if(typeState.hasChangedSinceLastCycle()) {
DataOutputStream partStream = partStreamsByType.get(typeState.getSchema().getName());
if(partStream == null)
partStream = dos;
HollowSchema schema = typeState.getSchema();
schema.writeTo(partStream);
writeNumShards(partStream, typeState.getNumShards());
typeState.writeReverseDelta(partStream);
}
}
os.flush();
if(partStreams != null)
partStreams.flush();
}
private List<HollowSchema> changedTypes() {
List<HollowSchema> changedTypes = new ArrayList<HollowSchema>();
List<HollowTypeWriteState> orderedTypeStates = stateEngine.getOrderedTypeStates();
for(int i=0;i<orderedTypeStates.size();i++) {
HollowTypeWriteState writeState = orderedTypeStates.get(i);
if(writeState.hasChangedSinceLastCycle())
changedTypes.add(writeState.getSchema());
}
return changedTypes;
}
private void writeNumShards(DataOutputStream dos, int numShards) throws IOException {
VarInt.writeVInt(dos, 1 + VarInt.sizeOfVInt(numShards)); /// pre 2.1.0 forwards compatibility:
/// skip new forwards-compatibility and num shards
VarInt.writeVInt(dos, 0); /// 2.1.0 forwards-compatibility, can write number of bytes for older readers to skip here.
VarInt.writeVInt(dos, numShards);
}
public HollowBlobHeaderWrapper buildHeader(ProducerOptionalBlobPartConfig.OptionalBlobPartOutputStreams partStreams, List<HollowSchema> schemasToInclude, boolean isReverseDelta) {
HollowBlobHeader header = new HollowBlobHeader();
/// bucket schemas by part
List<HollowSchema> mainSchemas = schemasToInclude;
Map<String, List<HollowSchema>> schemasByPartName = Collections.emptyMap();
if(partStreams != null) {
mainSchemas = new ArrayList<>();
Map<String, String> partNameByType = partStreams.getPartNameByType();
schemasByPartName = new HashMap<>();
for(HollowSchema schema : schemasToInclude) {
String partName = partNameByType.get(schema.getName());
if(partName == null) {
mainSchemas.add(schema);
} else {
List<HollowSchema> partSchemas = schemasByPartName.computeIfAbsent(partName, n -> new ArrayList<>());
partSchemas.add(schema);
}
}
}
/// write main header
if(isReverseDelta) {
header.setHeaderTags(stateEngine.getPreviousHeaderTags()); // header tags corresponding to destination state
header.setOriginRandomizedTag(stateEngine.getNextStateRandomizedTag());
header.setDestinationRandomizedTag(stateEngine.getPreviousStateRandomizedTag());
} else {
header.setHeaderTags(stateEngine.getHeaderTags());
header.setOriginRandomizedTag(stateEngine.getPreviousStateRandomizedTag());
header.setDestinationRandomizedTag(stateEngine.getNextStateRandomizedTag());
}
header.setSchemas(mainSchemas);
return new HollowBlobHeaderWrapper(header, schemasByPartName);
}
private void writeHeaders(DataOutputStream os, ProducerOptionalBlobPartConfig.OptionalBlobPartOutputStreams partStreams, boolean isReverseDelta, HollowBlobHeaderWrapper hollowBlobHeaderWrapper) throws IOException {
headerWriter.writeHeader(hollowBlobHeaderWrapper.header, os);
VarInt.writeVInt(os, hollowBlobHeaderWrapper.header.getSchemas().size());
if(partStreams != null) {
/// write part headers
for(Map.Entry<String, ConfiguredOutputStream> entry : partStreams.getPartStreams().entrySet()) {
String partName = entry.getKey();
HollowBlobOptionalPartHeader partHeader = new HollowBlobOptionalPartHeader(partName);
if(isReverseDelta) {
partHeader.setOriginRandomizedTag(stateEngine.getNextStateRandomizedTag());
partHeader.setDestinationRandomizedTag(stateEngine.getPreviousStateRandomizedTag());
} else {
partHeader.setOriginRandomizedTag(stateEngine.getPreviousStateRandomizedTag());
partHeader.setDestinationRandomizedTag(stateEngine.getNextStateRandomizedTag());
}
List<HollowSchema> partSchemas = hollowBlobHeaderWrapper.schemasByPartName.get(partName);
if(partSchemas == null)
partSchemas = Collections.emptyList();
partHeader.setSchemas(partSchemas);
headerWriter.writePartHeader(partHeader, entry.getValue().getStream());
VarInt.writeVInt(entry.getValue().getStream(), partSchemas.size());
}
}
}
private static class HollowBlobHeaderWrapper {
private final HollowBlobHeader header;
private final Map<String, List<HollowSchema>> schemasByPartName;
HollowBlobHeaderWrapper(HollowBlobHeader header, Map<String, List<HollowSchema>> schemasByPartName) {
this.header = header;
this.schemasByPartName = schemasByPartName;
}
}
}
| 8,979 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/copy/HollowMapCopier.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write.copy;
import static com.netflix.hollow.core.write.HollowHashableWriteRecord.HashBehavior.MIXED_HASHES;
import static com.netflix.hollow.core.write.HollowHashableWriteRecord.HashBehavior.UNMIXED_HASHES;
import com.netflix.hollow.core.read.engine.map.HollowMapTypeReadState;
import com.netflix.hollow.core.read.iterator.HollowMapEntryOrdinalIterator;
import com.netflix.hollow.core.read.iterator.HollowMapEntryOrdinalIteratorImpl;
import com.netflix.hollow.core.write.HollowMapWriteRecord;
import com.netflix.hollow.core.write.HollowWriteRecord;
import com.netflix.hollow.tools.combine.OrdinalRemapper;
public class HollowMapCopier extends HollowRecordCopier {
public HollowMapCopier(HollowMapTypeReadState readTypeState, OrdinalRemapper ordinalRemapper, boolean preserveHashPositions) {
super(readTypeState, new HollowMapWriteRecord(preserveHashPositions ? UNMIXED_HASHES : MIXED_HASHES), ordinalRemapper, preserveHashPositions);
}
@Override
public HollowWriteRecord copy(int ordinal) {
HollowMapWriteRecord rec = rec();
rec.reset();
HollowMapEntryOrdinalIterator iter = readState().ordinalIterator(ordinal);
String keyType = readState().getSchema().getKeyType();
String valueType = readState().getSchema().getValueType();
while(iter.next()) {
int remappedKeyOrdinal = ordinalRemapper.getMappedOrdinal(keyType, iter.getKey());
int remappedValueOrdinal = ordinalRemapper.getMappedOrdinal(valueType, iter.getValue());
int hashCode = preserveHashPositions ? ((HollowMapEntryOrdinalIteratorImpl)iter).getCurrentBucket() : remappedKeyOrdinal;
rec.addEntry(remappedKeyOrdinal, remappedValueOrdinal, hashCode);
}
return rec;
}
private HollowMapTypeReadState readState() {
return (HollowMapTypeReadState)readTypeState;
}
private HollowMapWriteRecord rec() {
return (HollowMapWriteRecord)writeRecord;
}
}
| 8,980 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/copy/HollowSetCopier.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write.copy;
import static com.netflix.hollow.core.write.HollowHashableWriteRecord.HashBehavior.MIXED_HASHES;
import static com.netflix.hollow.core.write.HollowHashableWriteRecord.HashBehavior.UNMIXED_HASHES;
import com.netflix.hollow.core.read.engine.set.HollowSetTypeReadState;
import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator;
import com.netflix.hollow.core.read.iterator.HollowSetOrdinalIterator;
import com.netflix.hollow.core.write.HollowSetWriteRecord;
import com.netflix.hollow.core.write.HollowWriteRecord;
import com.netflix.hollow.tools.combine.OrdinalRemapper;
public class HollowSetCopier extends HollowRecordCopier {
public HollowSetCopier(HollowSetTypeReadState typeState, OrdinalRemapper ordinalRemapper, boolean preserveHashPositions) {
super(typeState, new HollowSetWriteRecord(preserveHashPositions ? UNMIXED_HASHES : MIXED_HASHES), ordinalRemapper, preserveHashPositions);
}
@Override
public HollowWriteRecord copy(int ordinal) {
HollowSetWriteRecord rec = rec();
rec.reset();
String elementType = readState().getSchema().getElementType();
HollowOrdinalIterator ordinalIterator = readState().ordinalIterator(ordinal);
int elementOrdinal = ordinalIterator.next();
while(elementOrdinal != HollowOrdinalIterator.NO_MORE_ORDINALS) {
int remappedElementOrdinal = ordinalRemapper.getMappedOrdinal(elementType, elementOrdinal);
int hashCode = preserveHashPositions ? ((HollowSetOrdinalIterator)ordinalIterator).getCurrentBucket() : remappedElementOrdinal;
rec.addElement(remappedElementOrdinal, hashCode);
elementOrdinal = ordinalIterator.next();
}
return rec;
}
private HollowSetTypeReadState readState() {
return (HollowSetTypeReadState) readTypeState;
}
private HollowSetWriteRecord rec() {
return (HollowSetWriteRecord)writeRecord;
}
}
| 8,981 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/copy/HollowObjectCopier.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write.copy;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.write.HollowObjectWriteRecord;
import com.netflix.hollow.core.write.HollowWriteRecord;
import com.netflix.hollow.tools.combine.OrdinalRemapper;
public class HollowObjectCopier extends HollowRecordCopier {
private final int fieldIndexMapping[];
public HollowObjectCopier(HollowObjectTypeReadState readTypeState, HollowObjectSchema destinationSchema, OrdinalRemapper ordinalRemapper) {
super(readTypeState, new HollowObjectWriteRecord(destinationSchema), ordinalRemapper, false);
fieldIndexMapping = new int[destinationSchema.numFields()];
for(int i=0;i<fieldIndexMapping.length;i++) {
String fieldName = destinationSchema.getFieldName(i);
fieldIndexMapping[i] = readState().getSchema().getPosition(fieldName);
}
}
@Override
public HollowWriteRecord copy(int ordinal) {
HollowObjectTypeReadState objectReadState = readState();
HollowObjectWriteRecord rec = rec();
rec.reset();
for(int i=0;i<rec.getSchema().numFields();i++) {
String fieldName = rec.getSchema().getFieldName(i);
int readFieldIndex = fieldIndexMapping[i];
if(readFieldIndex != -1) {
switch(rec().getSchema().getFieldType(i)) {
case BOOLEAN:
Boolean bool = objectReadState.readBoolean(ordinal, readFieldIndex);
if(bool != null)
rec.setBoolean(fieldName, bool.booleanValue());
break;
case BYTES:
byte[] bytes = objectReadState.readBytes(ordinal, readFieldIndex);
if(bytes != null)
rec.setBytes(fieldName, bytes);
break;
case STRING:
String str = objectReadState.readString(ordinal, readFieldIndex);
if(str != null)
rec.setString(fieldName, str);
break;
case DOUBLE:
double doubleVal = objectReadState.readDouble(ordinal, readFieldIndex);
if(!Double.isNaN(doubleVal))
rec.setDouble(fieldName, doubleVal);
break;
case FLOAT:
float floatVal = (float)objectReadState.readFloat(ordinal, readFieldIndex);
if(!Float.isNaN(floatVal))
rec.setFloat(fieldName, floatVal);
break;
case INT:
int intVal = objectReadState.readInt(ordinal, readFieldIndex);
if(intVal != Integer.MIN_VALUE)
rec.setInt(fieldName, intVal);
break;
case LONG:
long longVal = objectReadState.readLong(ordinal, readFieldIndex);
if(longVal != Long.MIN_VALUE)
rec.setLong(fieldName, longVal);
break;
case REFERENCE:
int ordinalVal = objectReadState.readOrdinal(ordinal, readFieldIndex);
if(ordinalVal >= 0) {
int remappedOrdinalVal = ordinalRemapper.getMappedOrdinal(readState().getSchema().getReferencedType(readFieldIndex), ordinalVal);
rec.setReference(fieldName, remappedOrdinalVal);
}
break;
}
}
}
return rec;
}
private HollowObjectTypeReadState readState() {
return (HollowObjectTypeReadState)readTypeState;
}
private HollowObjectWriteRecord rec() {
return (HollowObjectWriteRecord)writeRecord;
}
}
| 8,982 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/copy/HollowRecordCopier.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write.copy;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.read.engine.list.HollowListTypeReadState;
import com.netflix.hollow.core.read.engine.map.HollowMapTypeReadState;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
import com.netflix.hollow.core.read.engine.set.HollowSetTypeReadState;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.write.HollowWriteRecord;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import com.netflix.hollow.tools.combine.IdentityOrdinalRemapper;
import com.netflix.hollow.tools.combine.OrdinalRemapper;
/**
* A HollowRecordCopier is used to copy records from a {@link HollowReadStateEngine} into a {@link HollowWriteStateEngine}
*
*/
public abstract class HollowRecordCopier {
protected final HollowTypeReadState readTypeState;
protected final HollowWriteRecord writeRecord;
protected final OrdinalRemapper ordinalRemapper;
protected final boolean preserveHashPositions;
protected HollowRecordCopier(HollowTypeReadState readTypeState, HollowWriteRecord writeRecord, OrdinalRemapper ordinalRemapper, boolean preserveHashPositions) {
this.readTypeState = readTypeState;
this.writeRecord = writeRecord;
this.ordinalRemapper = ordinalRemapper;
this.preserveHashPositions = preserveHashPositions;
}
public HollowTypeReadState getReadTypeState() {
return readTypeState;
}
public abstract HollowWriteRecord copy(int ordinal);
public static HollowRecordCopier createCopier(HollowTypeReadState typeState) {
return createCopier(typeState, typeState.getSchema());
}
public static HollowRecordCopier createCopier(HollowTypeReadState typeState, HollowSchema destinationSchema) {
return createCopier(typeState, destinationSchema, IdentityOrdinalRemapper.INSTANCE, true);
}
public static HollowRecordCopier createCopier(HollowTypeReadState typeState, OrdinalRemapper remapper, boolean preserveHashPositions) {
return createCopier(typeState, typeState.getSchema(), remapper, preserveHashPositions);
}
public static HollowRecordCopier createCopier(HollowTypeReadState typeState, HollowSchema destinationSchema, OrdinalRemapper ordinalRemapper, boolean preserveHashPositions) {
if(typeState instanceof HollowObjectTypeReadState)
return new HollowObjectCopier((HollowObjectTypeReadState) typeState, (HollowObjectSchema)destinationSchema, ordinalRemapper);
if(typeState instanceof HollowListTypeReadState)
return new HollowListCopier((HollowListTypeReadState) typeState, ordinalRemapper);
if(typeState instanceof HollowSetTypeReadState)
return new HollowSetCopier((HollowSetTypeReadState) typeState, ordinalRemapper, preserveHashPositions);
if(typeState instanceof HollowMapTypeReadState)
return new HollowMapCopier((HollowMapTypeReadState) typeState, ordinalRemapper, preserveHashPositions);
throw new UnsupportedOperationException("I don't know how to create a copier for a " + typeState.getClass().getSimpleName());
}
}
| 8,983 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/copy/HollowListCopier.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write.copy;
import com.netflix.hollow.core.read.engine.list.HollowListTypeReadState;
import com.netflix.hollow.core.write.HollowListWriteRecord;
import com.netflix.hollow.core.write.HollowWriteRecord;
import com.netflix.hollow.tools.combine.OrdinalRemapper;
public class HollowListCopier extends HollowRecordCopier {
public HollowListCopier(HollowListTypeReadState readTypeState, OrdinalRemapper ordinalRemapper) {
super(readTypeState, new HollowListWriteRecord(), ordinalRemapper, false);
}
@Override
public HollowWriteRecord copy(int ordinal) {
HollowListWriteRecord rec = rec();
rec.reset();
String elementType = readState().getSchema().getElementType();
int size = readState().size(ordinal);
for(int i=0;i<size;i++) {
int elementOrdinal = readState().getElementOrdinal(ordinal, i);
int remappedElementOrdinal = ordinalRemapper.getMappedOrdinal(elementType, elementOrdinal);
rec.addElement(remappedElementOrdinal);
}
return rec;
}
private HollowListTypeReadState readState() {
return (HollowListTypeReadState)readTypeState;
}
private HollowListWriteRecord rec() {
return (HollowListWriteRecord)writeRecord;
}
}
| 8,984 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowObjectMapper.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write.objectmapper;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecord;
import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordReader;
import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordWriter;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
public class HollowObjectMapper {
private final HollowWriteStateEngine stateEngine;
private final ConcurrentHashMap<Type, String> typeNameMappers;
private final ConcurrentHashMap<String, HollowTypeMapper> typeMappers;
private AtomicInteger unassignedTypeCounter = new AtomicInteger(0);
private boolean ignoreListOrdering = false;
private boolean useDefaultHashKeys = true;
public HollowObjectMapper(HollowWriteStateEngine stateEngine) {
this.stateEngine = stateEngine;
this.typeNameMappers = new ConcurrentHashMap<>();
this.typeMappers = new ConcurrentHashMap<>();
}
public void ignoreListOrdering() {
this.ignoreListOrdering = true;
}
public void useDefaultHashKeys() {
this.useDefaultHashKeys = true;
}
public void doNotUseDefaultHashKeys() {
this.useDefaultHashKeys = false;
}
/**
* Adds the specified POJO to the state engine.
* <p>
* Unless previously initialized with {@link #initializeTypeState(Class)},
* the first time an instance of a particular type is added
* its schema is derived and added to the data model.
*
* @param o the POJO to add
* @return the ordinal assigned to the newly added object
*/
public int add(Object o) {
HollowTypeMapper typeMapper = getTypeMapper(o.getClass(), null, null);
return typeMapper.write(o);
}
public void writeFlat(Object o, FlatRecordWriter flatRecordWriter) {
HollowTypeMapper typeMapper = getTypeMapper(o.getClass(), null, null);
typeMapper.writeFlat(o, flatRecordWriter);
}
public <T> T readFlat(FlatRecord record) {
FlatRecordReader recordReader = new FlatRecordReader(record);
int ordinal = 0;
Map<Integer, Object> parsedObjects = new HashMap<>();
while(recordReader.hasMore()) {
HollowSchema schema = recordReader.readSchema();
HollowTypeMapper mapper = typeMappers.get(schema.getName());
if (mapper == null) {
recordReader.skipSchema(schema);
} else {
Object obj = mapper.parseFlatRecord(schema, recordReader, parsedObjects);
parsedObjects.put(ordinal, obj);
}
ordinal++;
}
return (T) parsedObjects.get(ordinal - 1);
}
/**
* Extracts the primary key from the specified POJO.
*
* @param o the POJO
* @return the primary key
* @throws IllegalArgumentException if the POJO does not have primary key defined
*/
public RecordPrimaryKey extractPrimaryKey(Object o) {
HollowObjectTypeMapper typeMapper = (HollowObjectTypeMapper) getTypeMapper(o.getClass(), null, null);
return new RecordPrimaryKey(typeMapper.getTypeName(), typeMapper.extractPrimaryKey(o));
}
/**
* @param o the object to add
* @return the ordinal assigned to the newly added object
* @deprecated use {@link #add(Object)} instead.
*/
@Deprecated
public int addObject(Object o) {
return add(o);
}
/**
* Initializes the schema for the specified type in the data model.
* <p>
* The schema will be derived from the field and type names in
* <code>clazz</code> and added to the state engine's data model;
* schemas of types referenced from <code>clazz</code> will also be added.
* This can be used to add a type's schema to the state engine
* without having to add any data for that type.
*
* @param clazz type whose schema to derive and add to the data model
* @see #add(Object)
*/
public void initializeTypeState(Class<?> clazz) {
Objects.requireNonNull(clazz);
getTypeMapper(clazz, null, null);
}
HollowTypeMapper getTypeMapper(Type type, String declaredName, String[] hashKeyFieldPaths) {
return getTypeMapper(type, declaredName, hashKeyFieldPaths, -1, null);
}
HollowTypeMapper getTypeMapper(
Type type, String declaredName, String[] hashKeyFieldPaths, int numShards, Set<Type> visited) {
// Compute the type name
String typeName = declaredName != null
? declaredName
: findTypeName(type);
HollowTypeMapper typeMapper = typeMappers.get(typeName);
if (typeMapper == null) {
if (visited == null) {
// Used to detect circular references in the model
// See HollowObjectTypeMapper and MappedField
visited = new HashSet<>();
}
if (type instanceof ParameterizedType) {
ParameterizedType parameterizedType = (ParameterizedType) type;
Class<?> clazz = (Class<?>) parameterizedType.getRawType();
if (List.class.isAssignableFrom(clazz)) {
typeMapper = new HollowListTypeMapper(this, parameterizedType, typeName, numShards,
ignoreListOrdering, visited);
} else if (Set.class.isAssignableFrom(clazz)) {
typeMapper = new HollowSetTypeMapper(this, parameterizedType, typeName, hashKeyFieldPaths,
numShards, stateEngine, useDefaultHashKeys, visited);
} else if (Map.class.isAssignableFrom(clazz)) {
typeMapper = new HollowMapTypeMapper(this, parameterizedType, typeName, hashKeyFieldPaths,
numShards, stateEngine, useDefaultHashKeys, visited);
} else {
typeMapper = new HollowObjectTypeMapper(this, clazz, typeName, visited);
}
} else {
typeMapper = new HollowObjectTypeMapper(this, (Class<?>) type, typeName, visited);
}
HollowTypeMapper existing = typeMappers.putIfAbsent(typeName, typeMapper);
if (existing != null) {
typeMapper = existing;
} else {
typeMapper.addTypeState(stateEngine);
}
}
return typeMapper;
}
private String findTypeName(Type type) {
String typeName = typeNameMappers.get(type);
if(typeName == null) {
typeName = HollowObjectTypeMapper.getDefaultTypeName(type);
typeNameMappers.putIfAbsent(type, typeName);
}
return typeName;
}
int nextUnassignedTypeId() {
return unassignedTypeCounter.getAndIncrement();
}
public HollowWriteStateEngine getStateEngine() {
return stateEngine;
}
}
| 8,985 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/RecordPrimaryKey.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write.objectmapper;
import java.util.Arrays;
public class RecordPrimaryKey {
private final String type;
private final Object[] key;
public RecordPrimaryKey(String type, Object[] key) {
this.type = type;
this.key = key;
}
public String getType() {
return type;
}
public Object[] getKey() {
return key;
}
@Override
public int hashCode() {
return 31 * type.hashCode() + Arrays.hashCode(key);
}
@Override
public boolean equals(Object obj) {
if(obj instanceof RecordPrimaryKey) {
return type.equals(((RecordPrimaryKey) obj).type)
&& Arrays.equals(key, ((RecordPrimaryKey) obj).key);
}
return false;
}
@Override
public String toString() {
return type + ": " + Arrays.toString(key);
}
}
| 8,986 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/MemoizedMap.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write.objectmapper;
import java.util.HashMap;
import java.util.Map;
/**
* A MemoizedMap is a java.util.HashMap which is expected to be memoized during a producer cycle.
*
* When a HollowObjectMapper adds a MemoizedMap to the HollowWriteStateEngine, it will tag it with the ordinal
* which the corresponding record is assigned. If the same MemoizedMap instance is encountered during the same cycle,
* then it will short-circuit the process of serializing the map -- returning the previously memoized ordinal.
*/
public class MemoizedMap<K, V> extends HashMap<K, V> {
private static final long serialVersionUID = -7952842518944521839L;
public MemoizedMap() {
super();
}
public MemoizedMap(int initialCapacity) {
super(initialCapacity);
}
public MemoizedMap(int initialCapacity, float loadFactor) {
super(initialCapacity, loadFactor);
}
public MemoizedMap(Map<? extends K, ? extends V> m) {
super(m);
}
transient long __assigned_ordinal = -1L;
}
| 8,987 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowMapTypeMapper.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write.objectmapper;
import com.netflix.hollow.core.schema.HollowMapSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.util.HollowObjectHashCodeFinder;
import com.netflix.hollow.core.write.HollowMapTypeWriteState;
import com.netflix.hollow.core.write.HollowMapWriteRecord;
import com.netflix.hollow.core.write.HollowTypeWriteState;
import com.netflix.hollow.core.write.HollowWriteRecord;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordReader;
import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordWriter;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
public class HollowMapTypeMapper extends HollowTypeMapper {
private static final String NULL_KEY_MESSAGE =
"Null key contained in instance of a Map with schema \"%s\". Maps cannot contain null keys or values";
private static final String NULL_VALUE_MESSAGE =
"Null value contained in instance of a Map with schema \"%s\". Maps cannot contain null keys or values";
private final HollowMapSchema schema;
private final HollowMapTypeWriteState writeState;
private final HollowObjectHashCodeFinder hashCodeFinder;
private HollowTypeMapper keyMapper;
private HollowTypeMapper valueMapper;
public HollowMapTypeMapper(HollowObjectMapper parentMapper, ParameterizedType type, String declaredName, String[] hashKeyFieldPaths, int numShards, HollowWriteStateEngine stateEngine, boolean useDefaultHashKeys, Set<Type> visited) {
this.keyMapper = parentMapper.getTypeMapper(type.getActualTypeArguments()[0], null, null, -1, visited);
this.valueMapper = parentMapper.getTypeMapper(type.getActualTypeArguments()[1], null, null, -1, visited);
String typeName = declaredName != null ? declaredName : getDefaultTypeName(type);
if(hashKeyFieldPaths == null && useDefaultHashKeys && (keyMapper instanceof HollowObjectTypeMapper))
hashKeyFieldPaths = ((HollowObjectTypeMapper)keyMapper).getDefaultElementHashKey();
this.schema = new HollowMapSchema(typeName, keyMapper.getTypeName(), valueMapper.getTypeName(), hashKeyFieldPaths);
this.hashCodeFinder = stateEngine.getHashCodeFinder();
HollowMapTypeWriteState typeState = (HollowMapTypeWriteState) parentMapper.getStateEngine().getTypeState(typeName);
this.writeState = typeState != null ? typeState : new HollowMapTypeWriteState(schema, numShards);
}
@Override
protected String getTypeName() {
return schema.getName();
}
@Override
protected int write(Object obj) {
if(obj instanceof MemoizedMap) {
long assignedOrdinal = ((MemoizedMap<?, ?>)obj).__assigned_ordinal;
if((assignedOrdinal & ASSIGNED_ORDINAL_CYCLE_MASK) == cycleSpecificAssignedOrdinalBits())
return (int)assignedOrdinal & Integer.MAX_VALUE;
}
Map<?, ?> m = (Map<?, ?>)obj;
HollowMapWriteRecord rec = copyToWriteRecord(m, null);
int assignedOrdinal = writeState.add(rec);
if(obj instanceof MemoizedMap) {
((MemoizedMap<?, ?>)obj).__assigned_ordinal = (long)assignedOrdinal | cycleSpecificAssignedOrdinalBits();
}
return assignedOrdinal;
}
@Override
protected int writeFlat(Object obj, FlatRecordWriter flatRecordWriter) {
HollowMapWriteRecord rec = copyToWriteRecord((Map<?,?>)obj, flatRecordWriter);
return flatRecordWriter.write(schema, rec);
}
private HollowMapWriteRecord copyToWriteRecord(Map<?, ?> m, FlatRecordWriter flatRecordWriter) {
HollowMapWriteRecord rec = (HollowMapWriteRecord) writeRecord();
for (Map.Entry<?, ?> entry : m.entrySet()) {
Object key = entry.getKey();
if (key == null) {
throw new NullPointerException(String.format(NULL_KEY_MESSAGE, schema));
}
Object value = entry.getValue();
if (value == null) {
throw new NullPointerException(String.format(NULL_VALUE_MESSAGE, schema));
}
int keyOrdinal, valueOrdinal;
if (flatRecordWriter == null) {
keyOrdinal = keyMapper.write(key);
valueOrdinal = valueMapper.write(value);
} else {
keyOrdinal = keyMapper.writeFlat(key, flatRecordWriter);
valueOrdinal = valueMapper.writeFlat(value, flatRecordWriter);
}
int hashCode = hashCodeFinder.hashCode(keyMapper.getTypeName(), keyOrdinal, key);
rec.addEntry(keyOrdinal, valueOrdinal, hashCode);
}
return rec;
}
@Override
protected Object parseFlatRecord(HollowSchema recordSchema, FlatRecordReader reader, Map<Integer, Object> parsedObjects) {
Map<Object, Object> collection = new HashMap<>();
int size = reader.readCollectionSize();
int keyOrdinal = 0;
for (int i = 0; i < size; i++) {
int keyOrdinalDelta = reader.readOrdinal();
int valueOrdinal = reader.readOrdinal();
keyOrdinal += keyOrdinalDelta;
Object key = parsedObjects.get(keyOrdinal);
Object value = parsedObjects.get(valueOrdinal);
collection.put(key, value);
}
return collection;
}
@Override
protected HollowWriteRecord newWriteRecord() {
return new HollowMapWriteRecord();
}
@Override
protected HollowTypeWriteState getTypeWriteState() {
return writeState;
}
}
| 8,988 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowTypeName.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write.objectmapper;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Indicates the hollow type name of a POJO or the hollow field name of a field
* declared by a POJO.
* <p>
* This annotation may be used to override the hollow class name derived from the
* name of a {@code Class} of a POJO, or override the hollow field name derived
* from the name of a {@code Field} declared by a POJO.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target( {ElementType.TYPE, ElementType.FIELD})
public @interface HollowTypeName {
/**
* Returns the hollow type name or hollow field name.
*
* @return the hollow type name or hollow field name
*/
String name();
}
| 8,989 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowPrimaryKey.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write.objectmapper;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Indicates that a hollow object type (a POJO) has a primary key.
* <p>
* The primary key value of such a hollow object is the sequence of
* values obtained by resolving the {@link #fields field} paths (in order)
* given that hollow object. There must be only one such hollow object, of
* a particular type, for a given primary key. Therefore, a hollow object
* may be looked up given its primary key value
* (see
* {@link com.netflix.hollow.api.consumer.index.UniqueKeyIndex UniqueKeyIndex}
* and
* {@link com.netflix.hollow.core.index.HollowPrimaryKeyIndex HollowPrimaryKeyIndex}
* ).
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface HollowPrimaryKey {
/**
* Returns the field paths of the primary key.
*
* @return the field paths of the primary key
*/
String[] fields();
} | 8,990 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowObjectTypeMapper.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write.objectmapper;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.core.memory.HollowUnsafeHandle;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.write.HollowObjectTypeWriteState;
import com.netflix.hollow.core.write.HollowObjectWriteRecord;
import com.netflix.hollow.core.write.HollowTypeWriteState;
import com.netflix.hollow.core.write.HollowWriteRecord;
import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordReader;
import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordWriter;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import sun.misc.Unsafe;
@SuppressWarnings("restriction")
public class HollowObjectTypeMapper extends HollowTypeMapper {
private static Set<Class<?>> BOXED_WRAPPERS = new HashSet<>(Arrays.asList(Boolean.class, Integer.class, Short.class, Byte.class, Character.class, Long.class, Float.class, Double.class, String.class, byte[].class, Date.class));
private static final Unsafe unsafe = HollowUnsafeHandle.getUnsafe();
private final HollowObjectMapper parentMapper;
private final String typeName;
private final Class<?> clazz;
private final HollowObjectSchema schema;
private final HollowObjectTypeWriteState writeState;
private final boolean hasAssignedOrdinalField;
private final long assignedOrdinalFieldOffset;
private final List<MappedField> mappedFields;
private volatile int primaryKeyFieldPathIdx[][];
public HollowObjectTypeMapper(HollowObjectMapper parentMapper, Class<?> clazz, String declaredTypeName, Set<Type> visited) {
this.parentMapper = parentMapper;
this.clazz = clazz;
this.typeName = declaredTypeName != null ? declaredTypeName : getDefaultTypeName(clazz);
this.mappedFields = new ArrayList<MappedField>();
boolean hasAssignedOrdinalField = false;
long assignedOrdinalFieldOffset = -1;
if(clazz == String.class) {
try {
mappedFields.add(new MappedField(clazz.getDeclaredField("value")));
} catch (Exception e) {
throw new RuntimeException(e);
}
} else if(clazz == Date.class) {
try {
mappedFields.add(new MappedField(MappedFieldType.DATE_TIME));
} catch(Exception e) {
throw new RuntimeException(e);
}
} else {
/// gather fields from type hierarchy
Class<?> currentClass = clazz;
while(currentClass != Object.class && currentClass != Enum.class) {
if(currentClass.isInterface()) {
throw new IllegalArgumentException("Unexpected interface " + currentClass.getSimpleName() + " passed as field.");
} if (currentClass.isArray()) {
throw new IllegalArgumentException("Unexpected array " + currentClass.getSimpleName() + " passed as field. Consider using collections or marking as transient.");
}
Field[] declaredFields = currentClass.getDeclaredFields();
for(int i=0;i<declaredFields.length;i++) {
Field declaredField = declaredFields[i];
int modifiers = declaredField.getModifiers();
if(!Modifier.isTransient(modifiers) && !Modifier.isStatic(modifiers) &&
!"__assigned_ordinal".equals(declaredField.getName()) &&
!declaredField.isAnnotationPresent(HollowTransient.class)) {
mappedFields.add(new MappedField(declaredField, visited));
} else if("__assigned_ordinal".equals(declaredField.getName()) &&
currentClass == clazz) {
// If there is a field of name __assigned_ordinal on clazz
if(declaredField.getType() == long.class) {
assignedOrdinalFieldOffset = unsafe.objectFieldOffset(declaredField);
hasAssignedOrdinalField = true;;
}
}
}
if(currentClass.isEnum())
mappedFields.add(new MappedField(MappedFieldType.ENUM_NAME));
currentClass = currentClass.getSuperclass();
}
}
this.schema = new HollowObjectSchema(typeName, mappedFields.size(), getKeyFieldPaths(clazz));
for(MappedField field : mappedFields) {
if(field.getFieldType() == MappedFieldType.REFERENCE) {
schema.addField(field.getFieldName(), field.getFieldType().getSchemaFieldType(), field.getReferencedTypeName());
} else {
schema.addField(field.getFieldName(), field.getFieldType().getSchemaFieldType());
}
}
HollowObjectTypeWriteState existingWriteState = (HollowObjectTypeWriteState) parentMapper.getStateEngine().getTypeState(typeName);
this.writeState = existingWriteState != null ? existingWriteState : new HollowObjectTypeWriteState(schema, getNumShards(clazz));
this.assignedOrdinalFieldOffset = assignedOrdinalFieldOffset;
this.hasAssignedOrdinalField = hasAssignedOrdinalField;
}
private static String[] getKeyFieldPaths(Class<?> clazz) {
HollowPrimaryKey primaryKey = clazz.getAnnotation(HollowPrimaryKey.class);
while(primaryKey == null && clazz != Object.class && clazz.isInterface()) {
clazz = clazz.getSuperclass();
primaryKey = clazz.getAnnotation(HollowPrimaryKey.class);
}
return primaryKey == null ? null : primaryKey.fields();
}
private static int getNumShards(Class<?> clazz) {
HollowShardLargeType numShardsAnnotation = clazz.getAnnotation(HollowShardLargeType.class);
if(numShardsAnnotation != null)
return numShardsAnnotation.numShards();
return -1;
}
@Override
public String getTypeName() {
return typeName;
}
@Override
public int write(Object obj) {
if (hasAssignedOrdinalField) {
long assignedOrdinal = unsafe.getLong(obj, assignedOrdinalFieldOffset);
if((assignedOrdinal & ASSIGNED_ORDINAL_CYCLE_MASK) == cycleSpecificAssignedOrdinalBits())
return (int)assignedOrdinal & Integer.MAX_VALUE;
}
HollowObjectWriteRecord rec = copyToWriteRecord(obj, null);
int assignedOrdinal = writeState.add(rec);
if (hasAssignedOrdinalField) {
unsafe.putLong(obj, assignedOrdinalFieldOffset, (long)assignedOrdinal | cycleSpecificAssignedOrdinalBits());
}
return assignedOrdinal;
}
@Override
public int writeFlat(Object obj, FlatRecordWriter flatRecordWriter) {
HollowObjectWriteRecord rec = copyToWriteRecord(obj, flatRecordWriter);
return flatRecordWriter.write(schema, rec);
}
private HollowObjectWriteRecord copyToWriteRecord(Object obj, FlatRecordWriter flatRecordWriter) {
if (obj.getClass() != clazz && !clazz.isAssignableFrom(obj.getClass()))
throw new IllegalArgumentException("Attempting to write unexpected class! Expected " + clazz + " but object was " + obj.getClass());
HollowObjectWriteRecord rec = (HollowObjectWriteRecord) writeRecord();
for (int i = 0; i < mappedFields.size(); i++) {
mappedFields.get(i).copy(obj, rec, flatRecordWriter);
}
return rec;
}
@Override
protected Object parseFlatRecord(HollowSchema recordSchema, FlatRecordReader reader, Map<Integer, Object> parsedObjects) {
try {
HollowObjectSchema recordObjectSchema = (HollowObjectSchema) recordSchema;
Object obj = null;
if (BOXED_WRAPPERS.contains(clazz)) {
// if `clazz` is a BoxedWrapper then by definition its OBJECT schema will have a single primitive
// field so find it in the FlatRecord and ignore all other fields.
for (int i = 0; i < recordObjectSchema.numFields(); i++) {
int posInPojoSchema = schema.getPosition(recordObjectSchema.getFieldName(i));
if (posInPojoSchema != -1) {
obj = mappedFields.get(posInPojoSchema).parseBoxedWrapper(reader);
} else {
reader.skipField(recordObjectSchema.getFieldType(i));
}
}
} else if (clazz.isEnum()) {
// if `clazz` is an enum, then we should expect to find a field called `_name` in the FlatRecord.
// There may be other fields if the producer enum contained custom properties, we ignore them
// here assuming the enum constructor will set them if needed.
for (int i = 0; i < recordObjectSchema.numFields(); i++) {
String fieldName = recordObjectSchema.getFieldName(i);
int posInPojoSchema = schema.getPosition(fieldName);
if (fieldName.equals(MappedFieldType.ENUM_NAME.getSpecialFieldName()) && posInPojoSchema != -1) {
obj = mappedFields.get(posInPojoSchema).parseBoxedWrapper(reader);
} else {
reader.skipField(recordObjectSchema.getFieldType(i));
}
}
} else {
obj = unsafe.allocateInstance(clazz);
for (int i = 0; i < recordObjectSchema.numFields(); i++) {
int posInPojoSchema = schema.getPosition(recordObjectSchema.getFieldName(i));
if (posInPojoSchema != -1) {
mappedFields.get(posInPojoSchema).parse(obj, reader, parsedObjects);
} else {
reader.skipField(recordObjectSchema.getFieldType(i));
}
}
}
return obj;
} catch(Exception ex) {
throw new RuntimeException(ex);
}
}
Object[] extractPrimaryKey(Object obj) {
int[][] primaryKeyFieldPathIdx = this.primaryKeyFieldPathIdx;
if(primaryKeyFieldPathIdx == null) {
primaryKeyFieldPathIdx = calculatePrimaryKeyFieldPathIdx(primaryKeyFieldPathIdx);
this.primaryKeyFieldPathIdx = primaryKeyFieldPathIdx;
}
Object key[] = new Object[primaryKeyFieldPathIdx.length];
for(int i=0;i<key.length;i++) {
key[i] = retrieveFieldValue(obj, primaryKeyFieldPathIdx[i], 0);
}
return key;
}
private int[][] calculatePrimaryKeyFieldPathIdx(int[][] primaryKeyFieldPathIdx) {
if(schema.getPrimaryKey() == null)
throw new IllegalArgumentException("Type " + typeName + " does not have a primary key defined");
primaryKeyFieldPathIdx = new int[schema.getPrimaryKey().numFields()][];
for(int i=0;i<primaryKeyFieldPathIdx.length;i++)
primaryKeyFieldPathIdx[i] = schema.getPrimaryKey().getFieldPathIndex(parentMapper.getStateEngine(), i);
return primaryKeyFieldPathIdx;
}
String[] getDefaultElementHashKey() {
PrimaryKey pKey = schema.getPrimaryKey();
if (pKey != null) return pKey.getFieldPaths();
if(mappedFields.size() == 1) {
MappedField singleField = mappedFields.get(0);
if(singleField.getFieldType() != MappedFieldType.REFERENCE)
return new String[] { singleField.getFieldName() };
}
return null;
}
@Override
protected HollowWriteRecord newWriteRecord() {
return new HollowObjectWriteRecord(schema);
}
@Override
protected HollowTypeWriteState getTypeWriteState() {
return writeState;
}
private Object retrieveFieldValue(Object obj, int[] fieldPathIdx, int idx) {
return mappedFields.get(fieldPathIdx[idx]).retrieveFieldValue(obj, fieldPathIdx, idx);
}
private class MappedField {
private final String fieldName;
private final long fieldOffset;
private final Type type;
private final MappedFieldType fieldType;
private final HollowTypeMapper subTypeMapper;
private final HollowTypeName typeNameAnnotation;
private final HollowHashKey hashKeyAnnotation;
private final HollowShardLargeType numShardsAnnotation;
private final boolean isInlinedField;
private MappedField(Field f) {
this(f, new HashSet<Type>());
}
@SuppressWarnings("deprecation")
private MappedField(Field f, Set<Type> visitedTypes) {
this.fieldOffset = unsafe.objectFieldOffset(f);
this.fieldName = f.getName();
this.type = f.getGenericType();
this.typeNameAnnotation = f.getAnnotation(HollowTypeName.class);
this.hashKeyAnnotation = f.getAnnotation(HollowHashKey.class);
this.numShardsAnnotation = f.getAnnotation(HollowShardLargeType.class);
this.isInlinedField = f.isAnnotationPresent(HollowInline.class);
HollowTypeMapper subTypeMapper = null;
if(type == int.class) {
fieldType = MappedFieldType.INT;
} else if(type == short.class) {
fieldType = MappedFieldType.SHORT;
} else if(type == byte.class) {
fieldType = MappedFieldType.BYTE;
} else if(type == char.class) {
fieldType = MappedFieldType.CHAR;
} else if(type == long.class) {
fieldType = MappedFieldType.LONG;
} else if(type == boolean.class) {
fieldType = MappedFieldType.BOOLEAN;
} else if(type == float.class) {
fieldType = MappedFieldType.FLOAT;
} else if(type == double.class) {
fieldType = MappedFieldType.DOUBLE;
} else if (type == byte[].class && clazz == String.class) {
fieldType = MappedFieldType.STRING;
} else if(type == byte[].class) {
fieldType = MappedFieldType.BYTES;
} else if(type == char[].class) {
fieldType = MappedFieldType.STRING;
} else if(isInlinedField && type == Integer.class) {
fieldType = MappedFieldType.INLINED_INT;
} else if(isInlinedField && type == Short.class) {
fieldType = MappedFieldType.INLINED_SHORT;
} else if(isInlinedField && type == Byte.class) {
fieldType = MappedFieldType.INLINED_BYTE;
} else if(isInlinedField && type == Character.class) {
fieldType = MappedFieldType.INLINED_CHAR;
} else if(isInlinedField && type == Long.class) {
fieldType = MappedFieldType.INLINED_LONG;
} else if(isInlinedField && type == Boolean.class) {
fieldType = MappedFieldType.INLINED_BOOLEAN;
} else if(isInlinedField && type == Float.class) {
fieldType = MappedFieldType.INLINED_FLOAT;
} else if(isInlinedField && type == Double.class) {
fieldType = MappedFieldType.INLINED_DOUBLE;
} else if(isInlinedField && type == String.class) {
fieldType = MappedFieldType.INLINED_STRING;
} else if(type == NullablePrimitiveBoolean.class) {
fieldType = MappedFieldType.NULLABLE_PRIMITIVE_BOOLEAN;
} else {
if(isInlinedField)
throw new IllegalStateException("@HollowInline annotation defined on field " + f + ", which is not either a String or boxed primitive.");
fieldType = MappedFieldType.REFERENCE;
if(visitedTypes.contains(this.type)){
throw new IllegalStateException("circular reference detected on field " + f + "; this type of relationship is not supported");
}
// guard recursion here
visitedTypes.add(this.type);
subTypeMapper = parentMapper.getTypeMapper(type,
typeNameAnnotation != null ? typeNameAnnotation.name() : null,
hashKeyAnnotation != null ? hashKeyAnnotation.fields() : null,
numShardsAnnotation != null ? numShardsAnnotation.numShards() : -1,
visitedTypes);
// once we've safely returned from a leaf node in recursion, we can remove this MappedField's type
visitedTypes.remove(this.type);
}
this.subTypeMapper = subTypeMapper;
}
private MappedField(MappedFieldType specialField) {
this.fieldOffset = -1;
this.type = null;
this.typeNameAnnotation = null;
this.hashKeyAnnotation = null;
this.numShardsAnnotation = null;
this.fieldName = specialField.getSpecialFieldName();
this.fieldType = specialField;
this.subTypeMapper = null;
this.isInlinedField = false;
}
public String getFieldName() {
return fieldName;
}
public MappedFieldType getFieldType() {
return fieldType;
}
public String getReferencedTypeName() {
if(typeNameAnnotation != null)
return typeNameAnnotation.name();
return subTypeMapper.getTypeName();
}
@SuppressWarnings("deprecation")
public void copy(Object obj, HollowObjectWriteRecord rec, FlatRecordWriter flatRecordWriter) {
Object fieldObject;
switch(fieldType) {
case BOOLEAN:
rec.setBoolean(fieldName, unsafe.getBoolean(obj, fieldOffset));
break;
case INT:
rec.setInt(fieldName, unsafe.getInt(obj, fieldOffset));
break;
case SHORT:
rec.setInt(fieldName, unsafe.getShort(obj, fieldOffset));
break;
case BYTE:
rec.setInt(fieldName, unsafe.getByte(obj, fieldOffset));
break;
case CHAR:
rec.setInt(fieldName, unsafe.getChar(obj, fieldOffset));
break;
case LONG:
rec.setLong(fieldName, unsafe.getLong(obj, fieldOffset));
break;
case DOUBLE:
double d = unsafe.getDouble(obj, fieldOffset);
if(!Double.isNaN(d))
rec.setDouble(fieldName, d);
break;
case FLOAT:
float f = unsafe.getFloat(obj, fieldOffset);
if(!Float.isNaN(f))
rec.setFloat(fieldName, f);
break;
case STRING:
fieldObject = unsafe.getObject(obj, fieldOffset);
if(fieldObject != null)
rec.setString(fieldName, getStringFromField(obj, fieldObject));
break;
case BYTES:
fieldObject = unsafe.getObject(obj, fieldOffset);
if(fieldObject != null)
rec.setBytes(fieldName, (byte[])fieldObject);
break;
case INLINED_BOOLEAN:
fieldObject = unsafe.getObject(obj, fieldOffset);
if(fieldObject != null)
rec.setBoolean(fieldName, ((Boolean)fieldObject).booleanValue());
break;
case INLINED_INT:
fieldObject = unsafe.getObject(obj, fieldOffset);
if(fieldObject != null)
rec.setInt(fieldName, ((Integer)fieldObject).intValue());
break;
case INLINED_SHORT:
fieldObject = unsafe.getObject(obj, fieldOffset);
if(fieldObject != null)
rec.setInt(fieldName, ((Short)fieldObject).intValue());
break;
case INLINED_BYTE:
fieldObject = unsafe.getObject(obj, fieldOffset);
if(fieldObject != null)
rec.setInt(fieldName, ((Byte)fieldObject).intValue());
break;
case INLINED_CHAR:
fieldObject = unsafe.getObject(obj, fieldOffset);
if(fieldObject != null)
rec.setInt(fieldName, (int)((Character)fieldObject).charValue());
break;
case INLINED_LONG:
fieldObject = unsafe.getObject(obj, fieldOffset);
if(fieldObject != null)
rec.setLong(fieldName, ((Long)fieldObject).longValue());
break;
case INLINED_DOUBLE:
fieldObject = unsafe.getObject(obj, fieldOffset);
if(fieldObject != null)
rec.setDouble(fieldName, ((Double)fieldObject).doubleValue());
break;
case INLINED_FLOAT:
fieldObject = unsafe.getObject(obj, fieldOffset);
if(fieldObject != null)
rec.setFloat(fieldName, ((Float)fieldObject).floatValue());
break;
case INLINED_STRING:
fieldObject = unsafe.getObject(obj, fieldOffset);
if(fieldObject != null)
rec.setString(fieldName, (String)fieldObject);
break;
case NULLABLE_PRIMITIVE_BOOLEAN:
fieldObject = unsafe.getObject(obj, fieldOffset);
if(fieldObject != null)
rec.setBoolean(fieldName, ((NullablePrimitiveBoolean)fieldObject).getBooleanValue());
break;
case DATE_TIME:
rec.setLong(fieldName, ((Date)obj).getTime());
break;
case ENUM_NAME:
rec.setString(fieldName, ((Enum<?>)obj).name());
break;
case REFERENCE:
fieldObject = unsafe.getObject(obj, fieldOffset);
if(fieldObject != null) {
if(flatRecordWriter == null)
rec.setReference(fieldName, subTypeMapper.write(fieldObject));
else
rec.setReference(fieldName, subTypeMapper.writeFlat(fieldObject, flatRecordWriter));
}
break;
}
}
private Object parseBoxedWrapper(FlatRecordReader reader) {
switch (fieldType) {
case BOOLEAN: {
return reader.readBoolean();
}
case INT: {
int value = reader.readInt();
if (value != Integer.MIN_VALUE) {
return Integer.valueOf(value);
}
break;
}
case SHORT: {
int value = reader.readInt();
if (value != Integer.MIN_VALUE) {
return Short.valueOf((short) value);
}
break;
}
case BYTE: {
int value = reader.readInt();
if (value != Integer.MIN_VALUE) {
return Byte.valueOf((byte) value);
}
break;
}
case CHAR: {
int value = reader.readInt();
if (value != Integer.MIN_VALUE) {
return Character.valueOf((char) value);
}
break;
}
case LONG: {
long value = reader.readLong();
if (value != Long.MIN_VALUE) {
return Long.valueOf(value);
}
break;
}
case FLOAT: {
float value = reader.readFloat();
if (!Float.isNaN(value)) {
return Float.valueOf(value);
}
break;
}
case DOUBLE: {
double value = reader.readDouble();
if (!Double.isNaN(value)) {
return Double.valueOf(value);
}
break;
}
case STRING: {
return reader.readString();
}
case BYTES: {
return reader.readBytes();
}
case ENUM_NAME: {
String enumName = reader.readString();
if (enumName != null) {
return Enum.valueOf((Class<Enum>) clazz, enumName);
}
break;
}
case DATE_TIME: {
long value = reader.readLong();
if (value != Long.MIN_VALUE) {
return new Date(value);
}
break;
}
}
return null;
}
private void parse(Object obj, FlatRecordReader reader, Map<Integer, Object> parsedRecords) {
switch(fieldType) {
case BOOLEAN: {
Boolean value = reader.readBoolean();
if (value != null) {
unsafe.putBoolean(obj, fieldOffset, value == Boolean.TRUE);
}
break;
}
case INT: {
int value = reader.readInt();
if (value != Integer.MIN_VALUE) {
unsafe.putInt(obj, fieldOffset, value);
}
break;
}
case SHORT: {
int value = reader.readInt();
if (value != Integer.MIN_VALUE) {
unsafe.putShort(obj, fieldOffset, (short) value);
}
break;
}
case BYTE: {
int value = reader.readInt();
if (value != Integer.MIN_VALUE) {
unsafe.putByte(obj, fieldOffset, (byte) value);
}
break;
}
case CHAR: {
int value = reader.readInt();
if (value != Integer.MIN_VALUE) {
unsafe.putChar(obj, fieldOffset, (char) value);
}
break;
}
case LONG: {
long value = reader.readLong();
if (value != Long.MIN_VALUE) {
unsafe.putLong(obj, fieldOffset, value);
}
break;
}
case FLOAT: {
float value = reader.readFloat();
if (!Float.isNaN(value)) {
unsafe.putFloat(obj, fieldOffset, value);
}
break;
}
case DOUBLE: {
double value = reader.readDouble();
if (!Double.isNaN(value)) {
unsafe.putDouble(obj, fieldOffset, value);
}
break;
}
case STRING: {
String value = reader.readString();
if (value != null) {
unsafe.putObject(obj, fieldOffset, value);
}
break;
}
case BYTES: {
byte[] value = reader.readBytes();
if (value != null) {
unsafe.putObject(obj, fieldOffset, value);
}
break;
}
case INLINED_BOOLEAN: {
Boolean value = reader.readBoolean();
if (value != null) {
unsafe.putObject(obj, fieldOffset, value);
}
break;
}
case INLINED_INT: {
int value = reader.readInt();
if (value != Integer.MIN_VALUE) {
unsafe.putObject(obj, fieldOffset, Integer.valueOf(value));
}
break;
}
case INLINED_SHORT: {
int value = reader.readInt();
if (value != Integer.MIN_VALUE) {
unsafe.putObject(obj, fieldOffset, Short.valueOf((short) value));
}
break;
}
case INLINED_BYTE: {
int value = reader.readInt();
if (value != Integer.MIN_VALUE) {
unsafe.putObject(obj, fieldOffset, Byte.valueOf((byte) value));
}
break;
}
case INLINED_CHAR: {
int value = reader.readInt();
if (value != Integer.MIN_VALUE) {
unsafe.putObject(obj, fieldOffset, Character.valueOf((char) value));
}
break;
}
case INLINED_LONG: {
long value = reader.readLong();
if (value != Long.MIN_VALUE) {
unsafe.putObject(obj, fieldOffset, Long.valueOf(value));
}
break;
}
case INLINED_FLOAT: {
float value = reader.readFloat();
if (!Float.isNaN(value)) {
unsafe.putObject(obj, fieldOffset, Float.valueOf(value));
}
break;
}
case INLINED_DOUBLE: {
double value = reader.readDouble();
if (!Double.isNaN(value)) {
unsafe.putObject(obj, fieldOffset, Double.valueOf(value));
}
break;
}
case INLINED_STRING: {
String value = reader.readString();
if (value != null) {
unsafe.putObject(obj, fieldOffset, value);
}
break;
}
case DATE_TIME: {
long value = reader.readLong();
if (value != Long.MIN_VALUE) {
unsafe.putObject(obj, fieldOffset, new Date(value));
}
break;
}
case ENUM_NAME: {
String value = reader.readString();
if (value != null) {
unsafe.putObject(obj, fieldOffset, Enum.valueOf((Class) type, value));
}
break;
}
case REFERENCE: {
int ordinal = reader.readOrdinal();
if (ordinal != -1) {
unsafe.putObject(obj, fieldOffset, parsedRecords.get(ordinal));
}
break;
}
default:
throw new IllegalArgumentException("Unknown field type: " + fieldType);
}
}
public Object retrieveFieldValue(Object obj, int[] fieldPathIdx, int idx) {
Object fieldObject;
if(idx < fieldPathIdx.length - 1) {
if(fieldType != MappedFieldType.REFERENCE)
throw new IllegalArgumentException("Expected REFERENCE mapped field type but found " + fieldType);
fieldObject = unsafe.getObject(obj, fieldOffset);
if(fieldObject == null)
return null;
return ((HollowObjectTypeMapper)subTypeMapper).retrieveFieldValue(fieldObject, fieldPathIdx, idx+1);
}
switch(fieldType) {
case BOOLEAN:
return unsafe.getBoolean(obj, fieldOffset);
case INT:
return Integer.valueOf(unsafe.getInt(obj, fieldOffset));
case SHORT:
return Integer.valueOf(unsafe.getShort(obj, fieldOffset));
case BYTE:
return Integer.valueOf(unsafe.getByte(obj, fieldOffset));
case CHAR:
return Integer.valueOf(unsafe.getChar(obj, fieldOffset));
case LONG:
return Long.valueOf(unsafe.getLong(obj, fieldOffset));
case DOUBLE:
double d = unsafe.getDouble(obj, fieldOffset);
if(Double.isNaN(d))
return null;
return Double.valueOf(d);
case FLOAT:
float f = unsafe.getFloat(obj, fieldOffset);
if(Float.isNaN(f))
return null;
return Float.valueOf(f);
case STRING:
fieldObject = unsafe.getObject(obj, fieldOffset);
return fieldObject == null ? null : getStringFromField(obj, fieldObject);
case BYTES:
fieldObject = unsafe.getObject(obj, fieldOffset);
return fieldObject == null ? null : (byte[])fieldObject;
case INLINED_BOOLEAN:
case INLINED_INT:
case INLINED_LONG:
case INLINED_DOUBLE:
case INLINED_FLOAT:
case INLINED_STRING:
return unsafe.getObject(obj, fieldOffset);
case INLINED_SHORT:
fieldObject = unsafe.getObject(obj, fieldOffset);
return fieldObject == null ? null : Integer.valueOf((Short)fieldObject);
case INLINED_BYTE:
fieldObject = unsafe.getObject(obj, fieldOffset);
return fieldObject == null ? null : Integer.valueOf((Byte)fieldObject);
case INLINED_CHAR:
fieldObject = unsafe.getObject(obj, fieldOffset);
return fieldObject == null ? null : Integer.valueOf((Character)fieldObject);
case NULLABLE_PRIMITIVE_BOOLEAN:
fieldObject = unsafe.getObject(obj, fieldOffset);
return fieldObject == null ? null : Boolean.valueOf(((NullablePrimitiveBoolean)fieldObject).getBooleanValue());
case DATE_TIME:
return Long.valueOf(((Date)obj).getTime());
case ENUM_NAME:
return String.valueOf(((Enum<?>)obj).name());
default:
throw new IllegalArgumentException("Cannot extract POJO primary key from a " + fieldType + " mapped field type");
}
}
private String getStringFromField(Object obj, Object fieldObject) {
if (obj instanceof String) {
return (String) obj;
} else if (fieldObject instanceof char[]) {
return new String((char[]) fieldObject);
}
throw new IllegalArgumentException("Expected char[] or String value container for STRING.");
}
}
private static enum MappedFieldType {
BOOLEAN(FieldType.BOOLEAN),
NULLABLE_PRIMITIVE_BOOLEAN(FieldType.BOOLEAN),
BYTES(FieldType.BYTES),
DOUBLE(FieldType.DOUBLE),
FLOAT(FieldType.FLOAT),
INT(FieldType.INT),
SHORT(FieldType.INT),
BYTE(FieldType.INT),
CHAR(FieldType.INT),
LONG(FieldType.LONG),
STRING(FieldType.STRING),
INLINED_BOOLEAN(FieldType.BOOLEAN),
INLINED_DOUBLE(FieldType.DOUBLE),
INLINED_FLOAT(FieldType.FLOAT),
INLINED_INT(FieldType.INT),
INLINED_SHORT(FieldType.INT),
INLINED_BYTE(FieldType.INT),
INLINED_CHAR(FieldType.INT),
INLINED_LONG(FieldType.LONG),
INLINED_STRING(FieldType.STRING),
REFERENCE(FieldType.REFERENCE),
ENUM_NAME(FieldType.STRING, "_name"),
DATE_TIME(FieldType.LONG, "value");
private final FieldType schemaFieldType;
private final String specialFieldName;
private MappedFieldType(FieldType schemaFieldType) {
this.specialFieldName = null;
this.schemaFieldType = schemaFieldType;
}
private MappedFieldType(FieldType schemaFieldType, String specialFieldName) {
this.schemaFieldType = schemaFieldType;
this.specialFieldName = specialFieldName;
}
public String getSpecialFieldName() {
return specialFieldName;
}
public FieldType getSchemaFieldType() {
return schemaFieldType;
}
}
}
| 8,991 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowTypeMapper.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write.objectmapper;
import com.netflix.hollow.core.memory.ByteDataArray;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.write.HollowTypeWriteState;
import com.netflix.hollow.core.write.HollowWriteRecord;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordReader;
import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordWriter;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.List;
import java.util.Map;
import java.util.Set;
public abstract class HollowTypeMapper {
public static final long ASSIGNED_ORDINAL_CYCLE_MASK = 0xFFFFFFFF00000000L;
private final ThreadLocal<HollowWriteRecord> writeRec = new ThreadLocal<>();
private final ThreadLocal<ByteDataArray> flatRecBuffer = new ThreadLocal<>();
protected abstract String getTypeName();
protected abstract int write(Object obj);
protected abstract int writeFlat(Object obj, FlatRecordWriter flatRecordWriter);
protected abstract Object parseFlatRecord(HollowSchema schema, FlatRecordReader reader, Map<Integer, Object> parsedObjects);
protected abstract HollowWriteRecord newWriteRecord();
protected abstract HollowTypeWriteState getTypeWriteState();
protected void addTypeState(HollowWriteStateEngine stateEngine) {
if(stateEngine.getTypeState(getTypeName()) == null)
stateEngine.addTypeState(getTypeWriteState());
}
protected HollowWriteRecord writeRecord() {
HollowWriteRecord rec = writeRec.get();
if(rec == null) {
rec = newWriteRecord();
writeRec.set(rec);
}
rec.reset();
return rec;
}
protected ByteDataArray flatRecBuffer() {
ByteDataArray buf = flatRecBuffer.get();
if(buf == null) {
buf = new ByteDataArray();
flatRecBuffer.set(buf);
}
buf.reset();
return buf;
}
/**
* Calculates the type name from a given type.
* <p>
* If the type is annotated with {@link HollowTypeName} then the type name
* is the value of the {@code HollowTypeName.name} attribute. Otherwise
* the type name is derived from the type itself.
* If the type is a {@code Class} then the type name is the simple name of
* that class.
* If the type is a parameterized type and is assignable to a class of {@code List},
* {@code Set}, or {@code Map} then the type name begins with the simple class name of
* the parameterized type's raw type, followed by "Of", followed by the result of
* calling this method with the associated parameterized types (in order, in-fixed by "To").
* Otherwise, the type name is the simple class name of the parameterized type's raw type.
* <p>
* The translation from type to type name is lossy since the simple class name of a class
* is used. This means that no two types, from different packages, but with the same simple
* name can be utilized.
*
* @param type the type
* @return the type name.
*/
public static String getDefaultTypeName(Type type) {
if(type instanceof Class) {
Class<?> clazz = (Class<?>)type;
HollowTypeName explicitTypeName = clazz.getAnnotation(HollowTypeName.class);
if(explicitTypeName != null)
return explicitTypeName.name();
return clazz.getSimpleName();
}
ParameterizedType parameterizedType = (ParameterizedType)type;
Class<?> clazz = (Class<?>)parameterizedType.getRawType();
if(List.class.isAssignableFrom(clazz))
return "ListOf" + getDefaultTypeName(parameterizedType.getActualTypeArguments()[0]);
if(Set.class.isAssignableFrom(clazz))
return "SetOf" + getDefaultTypeName(parameterizedType.getActualTypeArguments()[0]);
if(Map.class.isAssignableFrom(clazz))
return "MapOf" + getDefaultTypeName(parameterizedType.getActualTypeArguments()[0]) + "To" + getDefaultTypeName(parameterizedType.getActualTypeArguments()[1]);
return clazz.getSimpleName();
}
protected long cycleSpecificAssignedOrdinalBits() {
return getTypeWriteState().getStateEngine().getNextStateRandomizedTag() & ASSIGNED_ORDINAL_CYCLE_MASK;
}
}
| 8,992 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowHashKey.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write.objectmapper;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Indicates that a field, declared by a hollow object type (a POJO),
* of type {@code Set} or {@code Map} has a hash key defining how the
* hollow objects that are elements in a {@code Set} or are keys in a
* {@code Map} are hashed.
* <p>
* A hash is derived from the sequence of values obtained by resolving
* the {@link #fields field} paths (in order) given an hollow object that is
* the element or key.
* Such hashes are used to distribute the hollow objects encoded within a
* hollow set or map.
* <p>
* By default if this annotation is not declared on a field of type {@code Set} or {@code Map},
* referred to as the hash key field, then a hash key is derived from the element or key type
* as follows.
* If the type is annotated with {@link HollowPrimaryKey} then it's as if the
* hash key field is annotated with {@code HollowHashKey} with the same field paths as
* declared by the {@code HollowPrimaryKey}.
* Otherwise, if the type declares exactly one field whose type is a primitive type then
* it's as if the hash key field is annotated with {@code HollowHashKey} with a single
* field path that is the name of that one field.
* Otherwise, it's as if the field is annotated with {@code HollowHashKey} with an empty
* field paths array (indicating the ordinal of an element or key is used as the hash).
* This annotation with an empty array may be utilized to enforce the latter case,
* overriding one of the other prior cases, if applicable.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface HollowHashKey {
/**
* Returns the field paths of the hash key.
* <p>
* An empty array indicates that the ordinal of an element in a set
* or a key in a map is used as the hash.
*
* @return the field paths of the hash key
*/
String[] fields();
}
| 8,993 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/MemoizedSet.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write.objectmapper;
import java.util.Collection;
import java.util.HashSet;
/**
* A MemoizedSet is a java.util.HashSet which is expected to be memoized during a producer cycle.
*
* When a HollowObjectMapper adds a MemoizedSet to the HollowWriteStateEngine, it will tag it with the ordinal
* which the corresponding record is assigned. If the same MemoizedSet instance is encountered during the same cycle,
* then it will short-circuit the process of serializing the set -- returning the previously memoized ordinal.
*/
public class MemoizedSet<E> extends HashSet<E> {
private static final long serialVersionUID = -3603271528350592970L;
public MemoizedSet() {
super();
}
public MemoizedSet(int initialCapacity) {
super(initialCapacity);
}
public MemoizedSet(int initialCapacity, float loadFactor) {
super(initialCapacity, loadFactor);
}
public MemoizedSet(Collection<? extends E> c) {
super(c);
}
transient long __assigned_ordinal = -1;
}
| 8,994 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowListTypeMapper.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write.objectmapper;
import com.netflix.hollow.core.schema.HollowListSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.util.IntList;
import com.netflix.hollow.core.write.HollowListTypeWriteState;
import com.netflix.hollow.core.write.HollowListWriteRecord;
import com.netflix.hollow.core.write.HollowTypeWriteState;
import com.netflix.hollow.core.write.HollowWriteRecord;
import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordReader;
import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordWriter;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class HollowListTypeMapper extends HollowTypeMapper {
private static final String NULL_ELEMENT_MESSAGE =
"Null element contained in instance of a List with schema \"%s\". Lists cannot contain null elements";
private final HollowListSchema schema;
private final HollowListTypeWriteState writeState;
private final ThreadLocal<IntList> intList = new ThreadLocal<IntList>();
private final boolean ignoreListOrdering;
private final HollowTypeMapper elementMapper;
public HollowListTypeMapper(HollowObjectMapper parentMapper, ParameterizedType type, String declaredName, int numShards, boolean ignoreListOrdering, Set<Type> visited) {
this.elementMapper = parentMapper.getTypeMapper(type.getActualTypeArguments()[0], null, null, -1, visited);
String typeName = declaredName != null ? declaredName : getDefaultTypeName(type);
this.schema = new HollowListSchema(typeName, elementMapper.getTypeName());
this.ignoreListOrdering = ignoreListOrdering;
HollowListTypeWriteState existingTypeState = (HollowListTypeWriteState)parentMapper.getStateEngine().getTypeState(typeName);
this.writeState = existingTypeState != null ? existingTypeState : new HollowListTypeWriteState(schema, numShards);
}
@Override
public String getTypeName() {
return schema.getName();
}
@Override
public int write(Object obj) {
if(obj instanceof MemoizedList) {
long assignedOrdinal = ((MemoizedList<?>)obj).__assigned_ordinal;
if((assignedOrdinal & ASSIGNED_ORDINAL_CYCLE_MASK) == cycleSpecificAssignedOrdinalBits())
return (int)assignedOrdinal & Integer.MAX_VALUE;
}
List<?> l = (List<?>)obj;
HollowListWriteRecord rec = copyToWriteRecord(l, null);
int assignedOrdinal = writeState.add(rec);
if(obj instanceof MemoizedList) {
((MemoizedList<?>)obj).__assigned_ordinal = (long)assignedOrdinal | cycleSpecificAssignedOrdinalBits();
}
return assignedOrdinal;
}
public int writeFlat(Object obj, FlatRecordWriter flatRecordWriter) {
HollowListWriteRecord rec = copyToWriteRecord((List<?>)obj, flatRecordWriter);
return flatRecordWriter.write(schema, rec);
}
private HollowListWriteRecord copyToWriteRecord(List<?> l, FlatRecordWriter flatRecordWriter) {
HollowListWriteRecord rec = (HollowListWriteRecord) writeRecord();
if (ignoreListOrdering) {
IntList ordinalList = getIntList();
for (Object o : l) {
if (o == null) {
throw new NullPointerException(String.format(NULL_ELEMENT_MESSAGE, schema));
}
int ordinal = flatRecordWriter == null ? elementMapper.write(o) : elementMapper.writeFlat(o, flatRecordWriter);
ordinalList.add(ordinal);
}
ordinalList.sort();
for (int i = 0; i < ordinalList.size(); i++)
rec.addElement(ordinalList.get(i));
} else {
for (Object o : l) {
if (o == null) {
throw new NullPointerException(String.format(NULL_ELEMENT_MESSAGE, schema));
}
int ordinal = flatRecordWriter == null ? elementMapper.write(o) : elementMapper.writeFlat(o, flatRecordWriter);
rec.addElement(ordinal);
}
}
return rec;
}
@Override
protected Object parseFlatRecord(HollowSchema recordSchema, FlatRecordReader reader, Map<Integer, Object> parsedObjects) {
List<Object> collection = new ArrayList<>();
int size = reader.readCollectionSize();
for (int i = 0; i < size; i++) {
int ordinal = reader.readOrdinal();
Object element = parsedObjects.get(ordinal);
collection.add(element);
}
return collection;
}
@Override
protected HollowWriteRecord newWriteRecord() {
return new HollowListWriteRecord();
}
private IntList getIntList() {
IntList list = intList.get();
if(list == null) {
list = new IntList();
intList.set(list);
}
list.clear();
return list;
}
@Override
protected HollowTypeWriteState getTypeWriteState() {
return writeState;
}
}
| 8,995 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowTransient.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write.objectmapper;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Indicates that a field of a POJO is to be ignored.
* <p>
* This annotation should be used when the {@code transient} field
* modifier cannot be declared (for example if a JVM compatible
* language is used to represent the POJOs and there are limitations
* in expressing corresponding transient fields in Java byte code).
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface HollowTransient {
}
| 8,996 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowSetTypeMapper.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write.objectmapper;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.schema.HollowSetSchema;
import com.netflix.hollow.core.util.HollowObjectHashCodeFinder;
import com.netflix.hollow.core.write.HollowSetTypeWriteState;
import com.netflix.hollow.core.write.HollowSetWriteRecord;
import com.netflix.hollow.core.write.HollowTypeWriteState;
import com.netflix.hollow.core.write.HollowWriteRecord;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordReader;
import com.netflix.hollow.core.write.objectmapper.flatrecords.FlatRecordWriter;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
public class HollowSetTypeMapper extends HollowTypeMapper {
private static final String NULL_ELEMENT_MESSAGE =
"Null element contained in instance of a Set with schema \"%s\". Sets cannot contain null elements";
private final HollowSetSchema schema;
private final HollowSetTypeWriteState writeState;
private final HollowObjectHashCodeFinder hashCodeFinder;
private final HollowTypeMapper elementMapper;
public HollowSetTypeMapper(HollowObjectMapper parentMapper, ParameterizedType type, String declaredName, String[] hashKeyFieldPaths, int numShards, HollowWriteStateEngine stateEngine, boolean useDefaultHashKeys, Set<Type> visited) {
this.elementMapper = parentMapper.getTypeMapper(type.getActualTypeArguments()[0], null, null, -1, visited);
String typeName = declaredName != null ? declaredName : getDefaultTypeName(type);
if(hashKeyFieldPaths == null && useDefaultHashKeys && (elementMapper instanceof HollowObjectTypeMapper))
hashKeyFieldPaths = ((HollowObjectTypeMapper)elementMapper).getDefaultElementHashKey();
this.schema = new HollowSetSchema(typeName, elementMapper.getTypeName(), hashKeyFieldPaths);
this.hashCodeFinder = stateEngine.getHashCodeFinder();
HollowSetTypeWriteState existingTypeState = (HollowSetTypeWriteState) parentMapper.getStateEngine().getTypeState(typeName);
this.writeState = existingTypeState != null ? existingTypeState : new HollowSetTypeWriteState(schema, numShards);
}
@Override
protected String getTypeName() {
return schema.getName();
}
@Override
protected int write(Object obj) {
if(obj instanceof MemoizedSet) {
long assignedOrdinal = ((MemoizedSet<?>)obj).__assigned_ordinal;
if((assignedOrdinal & ASSIGNED_ORDINAL_CYCLE_MASK) == cycleSpecificAssignedOrdinalBits())
return (int)assignedOrdinal & Integer.MAX_VALUE;
}
Set<?> s = (Set<?>)obj;
HollowSetWriteRecord rec = copyToWriteRecord(s, null);
int assignedOrdinal = writeState.add(rec);
if(obj instanceof MemoizedSet) {
((MemoizedSet<?>)obj).__assigned_ordinal = (long)assignedOrdinal | cycleSpecificAssignedOrdinalBits();
}
return assignedOrdinal;
}
@Override
protected int writeFlat(Object obj, FlatRecordWriter flatRecordWriter) {
HollowSetWriteRecord rec = copyToWriteRecord((Set<?>)obj, flatRecordWriter);
return flatRecordWriter.write(schema, rec);
}
private HollowSetWriteRecord copyToWriteRecord(Set<?> s, FlatRecordWriter flatRecordWriter) {
HollowSetWriteRecord rec = (HollowSetWriteRecord)writeRecord();
for(Object o : s) {
if(o == null) {
throw new NullPointerException(String.format(NULL_ELEMENT_MESSAGE, schema));
}
int ordinal = flatRecordWriter == null ? elementMapper.write(o) : elementMapper.writeFlat(o, flatRecordWriter);
int hashCode = hashCodeFinder.hashCode(elementMapper.getTypeName(), ordinal, o);
rec.addElement(ordinal, hashCode);
}
return rec;
}
@Override
protected Object parseFlatRecord(HollowSchema recordSchema, FlatRecordReader reader, Map<Integer, Object> parsedObjects) {
Set<Object> collection = new HashSet<>();
int size = reader.readCollectionSize();
int ordinal = 0;
for (int i = 0; i < size; i++) {
int ordinalDelta = reader.readOrdinal();
ordinal += ordinalDelta;
Object element = parsedObjects.get(ordinal);
collection.add(element);
}
return collection;
}
@Override
protected HollowWriteRecord newWriteRecord() {
return new HollowSetWriteRecord();
}
@Override
protected HollowTypeWriteState getTypeWriteState() {
return writeState;
}
}
| 8,997 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/HollowShardLargeType.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write.objectmapper;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Indicates how many shards are used to encode hollow records, in effect
* partitioning records into {@code P} parts corresponding to the number of
* shards.
* <p>
* This annotation may be declared on a POJO or the field of a POJO that
* is of type {@code List}, {@code Set}, or {@code Map}. It should be used
* when determining the number of shards automatically, given a target position
* size, is insufficient for a particular hollow record type.
* <p>
* If {@code N} records are encoded for a particular hollow record type,
* where each record has an unique ordinal {@code O},
* and where {@code 0 <= O < N}, and the records are encoded in {@code P}
* parts, then a record's assigned shard is the result of the expression
* {@code O & (P - 1)}.
* <p>
* If this annotation is absent then the number of shards is dynamically
* calculated given a target shard size in bytes
* (see
* {@link com.netflix.hollow.api.producer.HollowProducer.Builder#withTargetMaxTypeShardSize(long)
* HollowProducer.Builder.withTargetMaxTypeShardSize}
* and
* {@link com.netflix.hollow.core.write.HollowWriteStateEngine#setTargetMaxTypeShardSize(long)
* HollowWriteStateEngine.setTargetMaxTypeShardSize}
* )
* and the projected size in bytes of the hollow records for a particular type.
* For example, if the target shard size is set to a value in bytes of {@code 25MB} and the
* projected size of the hollow records for a type is {@code 50MB} then the number of shards
* will be {@code 2}.
* @see <a href="https://hollow.how/advanced-topics/#type-sharding">Type-sharding documentation</a>
*/
@Retention(RetentionPolicy.RUNTIME)
@Target( {ElementType.TYPE, ElementType.FIELD})
public @interface HollowShardLargeType {
/**
* Returns the number of shards to partition a hollow record.
* <p>
* The number of shards must be a power of 2.
*
* @return the number of shards to partition a hollow record
*/
int numShards();
}
| 8,998 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/write/objectmapper/NullablePrimitiveBoolean.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.write.objectmapper;
/**
* @deprecated Use the @HollowInline annotation with a Boolean field instead.
*/
@Deprecated
public enum NullablePrimitiveBoolean {
TRUE,
FALSE;
public boolean getBooleanValue() {
return this == TRUE;
}
}
| 8,999 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.