index
int64
0
0
repo_id
stringlengths
26
205
file_path
stringlengths
51
246
content
stringlengths
8
433k
__index_level_0__
int64
0
10k
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine/object/HollowObjectTypeReadStateShard.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.read.engine.object; import static com.netflix.hollow.core.HollowConstants.ORDINAL_NONE; import com.netflix.hollow.core.memory.ByteData; import com.netflix.hollow.core.memory.encoding.HashCodes; import com.netflix.hollow.core.memory.encoding.VarInt; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.schema.HollowSchema; import com.netflix.hollow.tools.checksum.HollowChecksum; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; import java.util.Collections; import java.util.List; class HollowObjectTypeReadStateShard { final HollowObjectTypeDataElements dataElements; final int shardOrdinalShift; private final HollowObjectSchema schema; HollowObjectTypeReadStateShard(HollowObjectSchema schema, HollowObjectTypeDataElements dataElements, int shardOrdinalShift) { this.schema = schema; this.shardOrdinalShift = shardOrdinalShift; this.dataElements = dataElements; } public long readValue(int ordinal, int fieldIndex) { long bitOffset = fieldOffset(ordinal, fieldIndex); int numBitsForField = dataElements.bitsPerField[fieldIndex]; return numBitsForField <= 56 ? dataElements.fixedLengthData.getElementValue(bitOffset, numBitsForField) : dataElements.fixedLengthData.getLargeElementValue(bitOffset, numBitsForField); } public long readOrdinal(int ordinal, int fieldIndex) { return readFixedLengthFieldValue(ordinal, fieldIndex); } public long readInt(int ordinal, int fieldIndex) { return readFixedLengthFieldValue(ordinal, fieldIndex); } public int readFloat(int ordinal, int fieldIndex) { return (int)readFixedLengthFieldValue(ordinal, fieldIndex); } public long readDouble(int ordinal, int fieldIndex) { long bitOffset = fieldOffset(ordinal, fieldIndex); return dataElements.fixedLengthData.getLargeElementValue(bitOffset, 64, -1L); } public long readLong(int ordinal, int fieldIndex) { long bitOffset = fieldOffset(ordinal, fieldIndex); int numBitsForField = dataElements.bitsPerField[fieldIndex]; return dataElements.fixedLengthData.getLargeElementValue(bitOffset, numBitsForField); } public long readBoolean(int ordinal, int fieldIndex) { return readFixedLengthFieldValue(ordinal, fieldIndex); } private long readFixedLengthFieldValue(int ordinal, int fieldIndex) { long bitOffset = fieldOffset(ordinal, fieldIndex); int numBitsForField = dataElements.bitsPerField[fieldIndex]; long value = dataElements.fixedLengthData.getElementValue(bitOffset, numBitsForField); return value; } public byte[] readBytes(long startByte, long endByte, int numBitsForField, int fieldIndex) { byte[] result; if((endByte & (1L << numBitsForField - 1)) != 0) return null; startByte &= (1L << numBitsForField - 1) - 1; int length = (int)(endByte - startByte); result = new byte[length]; for(int i=0;i<length;i++) result[i] = dataElements.varLengthData[fieldIndex].get(startByte + i); return result; } public String readString(long startByte, long endByte, int numBitsForField, int fieldIndex) { if((endByte & (1L << numBitsForField - 1)) != 0) return null; startByte &= (1L << numBitsForField - 1) - 1; int length = (int)(endByte - startByte); return readString(dataElements.varLengthData[fieldIndex], startByte, length); } public boolean isStringFieldEqual(long startByte, long endByte, int numBitsForField, int fieldIndex, String testValue) { if((endByte & (1L << numBitsForField - 1)) != 0) return testValue == null; if(testValue == null) return false; startByte &= (1L << numBitsForField - 1) - 1; int length = (int)(endByte - startByte); return testStringEquality(dataElements.varLengthData[fieldIndex], startByte, length, testValue); } public int findVarLengthFieldHashCode(long startByte, long endByte, int numBitsForField, int fieldIndex) { if((endByte & (1L << numBitsForField - 1)) != 0) return -1; startByte &= (1L << numBitsForField - 1) - 1; int length = (int)(endByte - startByte); return HashCodes.hashCode(dataElements.varLengthData[fieldIndex], startByte, length); } /** * Warning: Not thread-safe. Should only be called within the update thread. */ public int bitsRequiredForField(String fieldName) { int fieldIndex = schema.getPosition(fieldName); return fieldIndex == -1 ? 0 : dataElements.bitsPerField[fieldIndex]; } long fieldOffset(int ordinal, int fieldIndex) { return ((long)dataElements.bitsPerRecord * ordinal) + dataElements.bitOffsetPerField[fieldIndex]; } /** * Decode a String as a series of VarInts, one per character.<p> */ private static final ThreadLocal<char[]> chararr = ThreadLocal.withInitial(() -> new char[100]); private String readString(ByteData data, long position, int length) { char[] chararr = HollowObjectTypeReadStateShard.chararr.get(); if (length > chararr.length) { chararr = new char[length]; } else { Arrays.fill(chararr, 0, length, '\0'); } int count = VarInt.readVIntsInto(data, position, length, chararr); // The number of chars may be fewer than the number of bytes in the serialized data return new String(chararr, 0, count); } private boolean testStringEquality(ByteData data, long position, int length, String testValue) { if(length < testValue.length()) // can't check exact length here; the length argument is in bytes, which is equal to or greater than the number of characters. return false; long endPosition = position + length; int count = 0; while(position < endPosition && count < testValue.length()) { int c = VarInt.readVInt(data, position); if(testValue.charAt(count++) != (char)c) return false; position += VarInt.sizeOfVInt(c); } // The number of chars may be fewer than the number of bytes in the serialized data return position == endPosition && count == testValue.length(); } protected void applyShardToChecksum(HollowChecksum checksum, HollowSchema withSchema, BitSet populatedOrdinals, int shardNumber, int shardNumberMask) { int numBitsForField; long bitOffset; long endByte; long startByte; if(!(withSchema instanceof HollowObjectSchema)) throw new IllegalArgumentException("HollowObjectTypeReadState can only calculate checksum with a HollowObjectSchema: " + schema.getName()); HollowObjectSchema commonSchema = schema.findCommonSchema((HollowObjectSchema)withSchema); List<String> commonFieldNames = new ArrayList<String>(); for(int i=0;i<commonSchema.numFields();i++) commonFieldNames.add(commonSchema.getFieldName(i)); Collections.sort(commonFieldNames); int fieldIndexes[] = new int[commonFieldNames.size()]; for(int i=0;i<commonFieldNames.size();i++) { fieldIndexes[i] = schema.getPosition(commonFieldNames.get(i)); } int ordinal = populatedOrdinals.nextSetBit(0); while(ordinal != ORDINAL_NONE) { if((ordinal & shardNumberMask) == shardNumber) { int shardOrdinal = ordinal >> shardOrdinalShift; checksum.applyInt(ordinal); for(int i=0;i<fieldIndexes.length;i++) { int fieldIdx = fieldIndexes[i]; bitOffset = fieldOffset(shardOrdinal, fieldIdx); numBitsForField = dataElements.bitsPerField[fieldIdx]; if(!schema.getFieldType(fieldIdx).isVariableLength()) { long fixedLengthValue = numBitsForField <= 56 ? dataElements.fixedLengthData.getElementValue(bitOffset, numBitsForField) : dataElements.fixedLengthData.getLargeElementValue(bitOffset, numBitsForField); if(fixedLengthValue == dataElements.nullValueForField[fieldIdx]) checksum.applyInt(Integer.MAX_VALUE); else checksum.applyLong(fixedLengthValue); } else { endByte = dataElements.fixedLengthData.getElementValue(bitOffset, numBitsForField); startByte = shardOrdinal != 0 ? dataElements.fixedLengthData.getElementValue(bitOffset - dataElements.bitsPerRecord, numBitsForField) : 0; checksum.applyInt(findVarLengthFieldHashCode(startByte, endByte, numBitsForField, fieldIdx)); } } } ordinal = populatedOrdinals.nextSetBit(ordinal + 1); } } public long getApproximateHeapFootprintInBytes() { long bitsPerFixedLengthData = (long)dataElements.bitsPerRecord * (dataElements.maxOrdinal + 1); long requiredBytes = bitsPerFixedLengthData / 8; for(int i=0;i<dataElements.varLengthData.length;i++) { if(dataElements.varLengthData[i] != null) requiredBytes += dataElements.varLengthData[i].size(); } return requiredBytes; } public long getApproximateHoleCostInBytes(BitSet populatedOrdinals, int shardNumber, int numShards) { long holeBits = 0; int holeOrdinal = populatedOrdinals.nextClearBit(0); while(holeOrdinal <= dataElements.maxOrdinal) { if((holeOrdinal & (numShards - 1)) == shardNumber) holeBits += dataElements.bitsPerRecord; holeOrdinal = populatedOrdinals.nextClearBit(holeOrdinal + 1); } return holeBits / 8; } }
9,100
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine/object/HollowObjectTypeReadState.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.read.engine.object; import static com.netflix.hollow.core.HollowConstants.ORDINAL_NONE; import com.netflix.hollow.api.sampling.DisabledSamplingDirector; import com.netflix.hollow.api.sampling.HollowObjectSampler; import com.netflix.hollow.api.sampling.HollowSampler; import com.netflix.hollow.api.sampling.HollowSamplingDirector; import com.netflix.hollow.core.memory.HollowUnsafeHandle; import com.netflix.hollow.core.memory.MemoryMode; import com.netflix.hollow.core.memory.encoding.GapEncodedVariableLengthIntegerReader; import com.netflix.hollow.core.memory.encoding.VarInt; import com.netflix.hollow.core.memory.encoding.ZigZag; import com.netflix.hollow.core.memory.pool.ArraySegmentRecycler; import com.netflix.hollow.core.read.HollowBlobInput; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.read.engine.HollowReadStateEngine; import com.netflix.hollow.core.read.engine.HollowTypeReadState; import com.netflix.hollow.core.read.engine.SnapshotPopulatedOrdinalsReader; import com.netflix.hollow.core.read.filter.HollowFilterConfig; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.schema.HollowSchema; import com.netflix.hollow.core.write.HollowObjectWriteRecord; import com.netflix.hollow.tools.checksum.HollowChecksum; import java.io.IOException; import java.util.Arrays; import java.util.BitSet; /** * A {@link HollowTypeReadState} for OBJECT type records. */ public class HollowObjectTypeReadState extends HollowTypeReadState implements HollowObjectTypeDataAccess { private final HollowObjectSchema unfilteredSchema; private final HollowObjectSampler sampler; private int maxOrdinal; volatile ShardsHolder shardsVolatile; static class ShardsHolder { final HollowObjectTypeReadStateShard shards[]; final int shardNumberMask; private ShardsHolder(HollowObjectTypeReadStateShard[] fromShards) { this.shards = fromShards; this.shardNumberMask = fromShards.length - 1; } private ShardsHolder(HollowObjectTypeReadStateShard[] oldShards, HollowObjectTypeReadStateShard newShard, int newShardIndex) { int numShards = oldShards.length; HollowObjectTypeReadStateShard[] shards = new HollowObjectTypeReadStateShard[numShards]; for (int i=0; i<numShards; i++) { if (i == newShardIndex) { shards[i] = newShard; } else { shards[i] = oldShards[i]; } } this.shards = shards; this.shardNumberMask = numShards - 1; } } public HollowObjectTypeReadState(HollowReadStateEngine fileEngine, MemoryMode memoryMode, HollowObjectSchema schema, HollowObjectSchema unfilteredSchema) { super(fileEngine, memoryMode, schema); this.sampler = new HollowObjectSampler(schema, DisabledSamplingDirector.INSTANCE); this.unfilteredSchema = unfilteredSchema; this.shardsVolatile = null; } public HollowObjectTypeReadState(HollowObjectSchema schema, HollowObjectTypeDataElements dataElements) { super(null, MemoryMode.ON_HEAP, schema); this.sampler = new HollowObjectSampler(schema, DisabledSamplingDirector.INSTANCE); this.unfilteredSchema = schema; HollowObjectTypeReadStateShard newShard = new HollowObjectTypeReadStateShard(schema, dataElements, 0); this.shardsVolatile = new ShardsHolder(new HollowObjectTypeReadStateShard[] {newShard}); this.maxOrdinal = dataElements.maxOrdinal; } @Override public HollowObjectSchema getSchema() { return (HollowObjectSchema)schema; } @Override public int maxOrdinal() { return maxOrdinal; } @Override public void readSnapshot(HollowBlobInput in, ArraySegmentRecycler memoryRecycler) throws IOException { throw new IllegalStateException("Object type read state requires numShards when reading snapshot"); } @Override public void readSnapshot(HollowBlobInput in, ArraySegmentRecycler memoryRecycler, int numShards) throws IOException { if(numShards > 1) maxOrdinal = VarInt.readVInt(in); HollowObjectTypeReadStateShard[] newShards = new HollowObjectTypeReadStateShard[numShards]; int shardOrdinalShift = 31 - Integer.numberOfLeadingZeros(numShards); for(int i=0; i<numShards; i++) { HollowObjectTypeDataElements shardDataElements = new HollowObjectTypeDataElements(getSchema(), memoryMode, memoryRecycler); shardDataElements.readSnapshot(in, unfilteredSchema); newShards[i] = new HollowObjectTypeReadStateShard(getSchema(), shardDataElements, shardOrdinalShift); } shardsVolatile = new ShardsHolder(newShards); if(shardsVolatile.shards.length == 1) maxOrdinal = shardsVolatile.shards[0].dataElements.maxOrdinal; SnapshotPopulatedOrdinalsReader.readOrdinals(in, stateListeners); } @Override public void applyDelta(HollowBlobInput in, HollowSchema deltaSchema, ArraySegmentRecycler memoryRecycler, int deltaNumShards) throws IOException { if (shouldReshard(shardsVolatile.shards.length, deltaNumShards)) { reshard(deltaNumShards); } if(shardsVolatile.shards.length > 1) maxOrdinal = VarInt.readVInt(in); for(int i=0; i<shardsVolatile.shards.length; i++) { HollowObjectTypeDataElements deltaData = new HollowObjectTypeDataElements((HollowObjectSchema)deltaSchema, memoryMode, memoryRecycler); deltaData.readDelta(in); if(stateEngine.isSkipTypeShardUpdateWithNoAdditions() && deltaData.encodedAdditions.isEmpty()) { if(!deltaData.encodedRemovals.isEmpty()) notifyListenerAboutDeltaChanges(deltaData.encodedRemovals, deltaData.encodedAdditions, i, shardsVolatile.shards.length); HollowObjectTypeDataElements currentData = shardsVolatile.shards[i].dataElements; GapEncodedVariableLengthIntegerReader oldRemovals = currentData.encodedRemovals == null ? GapEncodedVariableLengthIntegerReader.EMPTY_READER : currentData.encodedRemovals; if(oldRemovals.isEmpty()) { currentData.encodedRemovals = deltaData.encodedRemovals; oldRemovals.destroy(); } else { if(!deltaData.encodedRemovals.isEmpty()) { currentData.encodedRemovals = GapEncodedVariableLengthIntegerReader.combine(oldRemovals, deltaData.encodedRemovals, memoryRecycler); oldRemovals.destroy(); } deltaData.encodedRemovals.destroy(); } deltaData.encodedAdditions.destroy(); } else { HollowObjectTypeDataElements nextData = new HollowObjectTypeDataElements(getSchema(), memoryMode, memoryRecycler); HollowObjectTypeDataElements oldData = shardsVolatile.shards[i].dataElements; nextData.applyDelta(oldData, deltaData); HollowObjectTypeReadStateShard newShard = new HollowObjectTypeReadStateShard(getSchema(), nextData, shardsVolatile.shards[i].shardOrdinalShift); shardsVolatile = new ShardsHolder(shardsVolatile.shards, newShard, i); notifyListenerAboutDeltaChanges(deltaData.encodedRemovals, deltaData.encodedAdditions, i, shardsVolatile.shards.length); deltaData.encodedAdditions.destroy(); oldData.destroy(); } deltaData.destroy(); stateEngine.getMemoryRecycler().swap(); } if(shardsVolatile.shards.length == 1) maxOrdinal = shardsVolatile.shards[0].dataElements.maxOrdinal; } /** * Given old and new numShards, this method returns the shard resizing multiplier. */ static int shardingFactor(int oldNumShards, int newNumShards) { if (newNumShards <= 0 || oldNumShards <= 0 || newNumShards == oldNumShards) { throw new IllegalStateException("Invalid shard resizing, oldNumShards=" + oldNumShards + ", newNumShards=" + newNumShards); } boolean isNewGreater = newNumShards > oldNumShards; int dividend = isNewGreater ? newNumShards : oldNumShards; int divisor = isNewGreater ? oldNumShards : newNumShards; if (dividend % divisor != 0) { throw new IllegalStateException("Invalid shard resizing, oldNumShards=" + oldNumShards + ", newNumShards=" + newNumShards); } return dividend / divisor; } /** * Reshards this type state to the desired shard count using O(shard size) space while supporting concurrent reads * into the underlying data elements. * * @param newNumShards The desired number of shards */ void reshard(int newNumShards) { int prevNumShards = shardsVolatile.shards.length; int shardingFactor = shardingFactor(prevNumShards, newNumShards); HollowObjectTypeDataElements[] newDataElements; int[] shardOrdinalShifts; if (newNumShards>prevNumShards) { // split existing shards // Step 1: Grow the number of shards. Each original shard will result in N child shards where N is the sharding factor. // The child shards will reference into the existing data elements as-is, and reuse existing shardOrdinalShift. // However since the shards array is resized, a read will map into the new shard index, as a result a subset of // ordinals in each shard will be accessed. In the next "splitting" step, the data elements in these new shards // will be filtered to only retain the subset of ordinals that are actually accessed. // // This is an atomic update to shardsVolatile: full construction happens-before the store to shardsVolatile, // in other words a fully constructed object as visible to this thread will be visible to other threads that // load the new shardsVolatile. shardsVolatile = expandWithOriginalDataElements(shardsVolatile, shardingFactor); // Step 2: Split each original data element into N child data elements where N is the sharding factor. // Then update each of the N child shards with the respective split of data element, this will be // sufficient to serve all reads into this shard. Once all child shards for a pre-split parent // shard have been assigned the split data elements, the parent data elements can be discarded. for(int i=0; i<prevNumShards; i++) { HollowObjectTypeDataElements originalDataElements = shardsVolatile.shards[i].dataElements; shardsVolatile = splitDataElementsForOneShard(shardsVolatile, i, prevNumShards, shardingFactor); destroyOriginalDataElements(originalDataElements); } // Re-sharding done. // shardsVolatile now contains newNumShards shards where each shard contains // a split of original data elements. } else { // join existing shards // Step 1: Join N data elements to create one, where N is the sharding factor. Then update each of the // N shards to reference the joined result, but with a new shardOrdinalShift. // Reads will continue to reference the same shard index as before, but the new shardOrdinalShift // will help these reads land at the right ordinal in the joined shard. When all N old shards // corresponding to one new shard have been updated, the N pre-join data elements can be destroyed. for (int i=0; i<newNumShards; i++) { HollowObjectTypeDataElements destroyCandidates[] = joinCandidates(shardsVolatile.shards, i, shardingFactor); shardsVolatile = joinDataElementsForOneShard(shardsVolatile, i, shardingFactor); // atomic update to shardsVolatile for (int j = 0; j < shardingFactor; j ++) { destroyOriginalDataElements(destroyCandidates[j]); }; } // Step 2: Resize the shards array to only keep the first newNumShards shards. newDataElements = new HollowObjectTypeDataElements[shardsVolatile.shards.length]; shardOrdinalShifts = new int[shardsVolatile.shards.length]; copyShardElements(shardsVolatile, newDataElements, shardOrdinalShifts); shardsVolatile = new ShardsHolder(Arrays.copyOfRange(shardsVolatile.shards, 0, newNumShards)); // Re-sharding done. // shardsVolatile now contains newNumShards shards where each shard contains // a join of original data elements. } } private void copyShardElements(ShardsHolder from, HollowObjectTypeDataElements[] newDataElements, int[] shardOrdinalShifts) { for (int i=0; i<from.shards.length; i++) { newDataElements[i] = from.shards[i].dataElements; shardOrdinalShifts[i] = from.shards[i].shardOrdinalShift; } } private HollowObjectTypeDataElements[] joinCandidates(HollowObjectTypeReadStateShard[] shards, int indexIntoShards, int shardingFactor) { HollowObjectTypeDataElements[] result = new HollowObjectTypeDataElements[shardingFactor]; int newNumShards = shards.length / shardingFactor; for (int i=0; i<shardingFactor; i++) { result[i] = shards[indexIntoShards + (newNumShards*i)].dataElements; }; return result; } ShardsHolder joinDataElementsForOneShard(ShardsHolder shardsHolder, int currentIndex, int shardingFactor) { int newNumShards = shardsHolder.shards.length / shardingFactor; int newShardOrdinalShift = 31 - Integer.numberOfLeadingZeros(newNumShards); HollowObjectTypeDataElementsJoiner joiner = new HollowObjectTypeDataElementsJoiner(); HollowObjectTypeDataElements[] joinCandidates = joinCandidates(shardsHolder.shards, currentIndex, shardingFactor); HollowObjectTypeDataElements joined = joiner.join(joinCandidates); HollowObjectTypeReadStateShard[] newShards = Arrays.copyOf(shardsHolder.shards, shardsHolder.shards.length); for (int i=0; i<shardingFactor; i++) { newShards[currentIndex + (newNumShards*i)] = new HollowObjectTypeReadStateShard(getSchema(), joined, newShardOrdinalShift); } return new ShardsHolder(newShards); } ShardsHolder expandWithOriginalDataElements(ShardsHolder shardsHolder, int shardingFactor) { int prevNumShards = shardsHolder.shards.length; int newNumShards = prevNumShards * shardingFactor; HollowObjectTypeReadStateShard[] newShards = new HollowObjectTypeReadStateShard[newNumShards]; for(int i=0; i<prevNumShards; i++) { for (int j=0; j<shardingFactor; j++) { newShards[i+(prevNumShards*j)] = shardsHolder.shards[i]; } } return new ShardsHolder(newShards); } ShardsHolder splitDataElementsForOneShard(ShardsHolder shardsHolder, int currentIndex, int prevNumShards, int shardingFactor) { int newNumShards = shardsHolder.shards.length; int newShardOrdinalShift = 31 - Integer.numberOfLeadingZeros(newNumShards); HollowObjectTypeDataElementsSplitter splitter = new HollowObjectTypeDataElementsSplitter(); HollowObjectTypeDataElements dataElementsToSplit = shardsHolder.shards[currentIndex].dataElements; HollowObjectTypeDataElements[] splits = splitter.split(dataElementsToSplit, shardingFactor); HollowObjectTypeReadStateShard[] newShards = Arrays.copyOf(shardsHolder.shards, shardsHolder.shards.length); for (int i = 0; i < shardingFactor; i ++) { newShards[currentIndex + (prevNumShards*i)] = new HollowObjectTypeReadStateShard(getSchema(), splits[i], newShardOrdinalShift); } return new ShardsHolder(newShards); } private void destroyOriginalDataElements(HollowObjectTypeDataElements dataElements) { dataElements.destroy(); if (dataElements.encodedRemovals != null) { dataElements.encodedRemovals.destroy(); } } public static void discardSnapshot(HollowBlobInput in, HollowObjectSchema schema, int numShards) throws IOException { discardType(in, schema, numShards, false); } public static void discardDelta(HollowBlobInput in, HollowObjectSchema schema, int numShards) throws IOException { discardType(in, schema, numShards, true); } public static void discardType(HollowBlobInput in, HollowObjectSchema schema, int numShards, boolean delta) throws IOException { HollowObjectTypeDataElements.discardFromInput(in, schema, numShards, delta); if(!delta) SnapshotPopulatedOrdinalsReader.discardOrdinals(in); } @Override public boolean isNull(int ordinal, int fieldIndex) { sampler.recordFieldAccess(fieldIndex); HollowObjectTypeReadState.ShardsHolder shardsHolder; HollowObjectTypeReadStateShard shard; long fixedLengthValue; do { shardsHolder = this.shardsVolatile; shard = shardsHolder.shards[ordinal & shardsHolder.shardNumberMask]; fixedLengthValue = shard.readValue(ordinal >> shard.shardOrdinalShift, fieldIndex); } while(readWasUnsafe(shardsHolder, ordinal, shard)); switch(((HollowObjectSchema) schema).getFieldType(fieldIndex)) { case BYTES: case STRING: int numBits = shard.dataElements.bitsPerField[fieldIndex]; return (fixedLengthValue & (1L << (numBits - 1))) != 0; case FLOAT: return (int)fixedLengthValue == HollowObjectWriteRecord.NULL_FLOAT_BITS; case DOUBLE: return fixedLengthValue == HollowObjectWriteRecord.NULL_DOUBLE_BITS; default: return fixedLengthValue == shard.dataElements.nullValueForField[fieldIndex]; } } @Override public int readOrdinal(int ordinal, int fieldIndex) { sampler.recordFieldAccess(fieldIndex); HollowObjectTypeReadState.ShardsHolder shardsHolder; HollowObjectTypeReadStateShard shard; long refOrdinal; do { shardsHolder = this.shardsVolatile; shard = shardsHolder.shards[ordinal & shardsHolder.shardNumberMask]; refOrdinal = shard.readOrdinal(ordinal >> shard.shardOrdinalShift, fieldIndex); } while(readWasUnsafe(shardsHolder, ordinal, shard)); if(refOrdinal == shard.dataElements.nullValueForField[fieldIndex]) return ORDINAL_NONE; return (int)refOrdinal; } @Override public int readInt(int ordinal, int fieldIndex) { sampler.recordFieldAccess(fieldIndex); HollowObjectTypeReadState.ShardsHolder shardsHolder; HollowObjectTypeReadStateShard shard; long value; do { shardsHolder = this.shardsVolatile; shard = shardsHolder.shards[ordinal & shardsHolder.shardNumberMask]; value = shard.readInt(ordinal >> shard.shardOrdinalShift, fieldIndex); } while(readWasUnsafe(shardsHolder, ordinal, shard)); if(value == shard.dataElements.nullValueForField[fieldIndex]) return Integer.MIN_VALUE; return ZigZag.decodeInt((int)value); } @Override public float readFloat(int ordinal, int fieldIndex) { sampler.recordFieldAccess(fieldIndex); HollowObjectTypeReadState.ShardsHolder shardsHolder; HollowObjectTypeReadStateShard shard; int value; do { shardsHolder = this.shardsVolatile; shard = shardsHolder.shards[ordinal & shardsHolder.shardNumberMask]; value = shard.readFloat(ordinal >> shard.shardOrdinalShift, fieldIndex); } while(readWasUnsafe(shardsHolder, ordinal, shard)); if(value == HollowObjectWriteRecord.NULL_FLOAT_BITS) return Float.NaN; return Float.intBitsToFloat(value); } @Override public double readDouble(int ordinal, int fieldIndex) { sampler.recordFieldAccess(fieldIndex); HollowObjectTypeReadState.ShardsHolder shardsHolder; HollowObjectTypeReadStateShard shard; long value; do { shardsHolder = this.shardsVolatile; shard = shardsHolder.shards[ordinal & shardsHolder.shardNumberMask]; value = shard.readDouble(ordinal >> shard.shardOrdinalShift, fieldIndex); } while(readWasUnsafe(shardsHolder, ordinal, shard)); if(value == HollowObjectWriteRecord.NULL_DOUBLE_BITS) return Double.NaN; return Double.longBitsToDouble(value); } @Override public long readLong(int ordinal, int fieldIndex) { sampler.recordFieldAccess(fieldIndex); HollowObjectTypeReadState.ShardsHolder shardsHolder; HollowObjectTypeReadStateShard shard; long value; do { shardsHolder = this.shardsVolatile; shard = shardsHolder.shards[ordinal & shardsHolder.shardNumberMask]; value = shard.readLong(ordinal >> shard.shardOrdinalShift, fieldIndex); } while(readWasUnsafe(shardsHolder, ordinal, shard)); if(value == shard.dataElements.nullValueForField[fieldIndex]) return Long.MIN_VALUE; return ZigZag.decodeLong(value); } @Override public Boolean readBoolean(int ordinal, int fieldIndex) { sampler.recordFieldAccess(fieldIndex); HollowObjectTypeReadState.ShardsHolder shardsHolder; HollowObjectTypeReadStateShard shard; long value; do { shardsHolder = this.shardsVolatile; shard = shardsHolder.shards[ordinal & shardsHolder.shardNumberMask]; value = shard.readBoolean(ordinal >> shard.shardOrdinalShift, fieldIndex); } while(readWasUnsafe(shardsHolder, ordinal, shard)); if(value == shard.dataElements.nullValueForField[fieldIndex]) return null; return value == 1 ? Boolean.TRUE : Boolean.FALSE; } @Override public byte[] readBytes(int ordinal, int fieldIndex) { sampler.recordFieldAccess(fieldIndex); HollowObjectTypeReadState.ShardsHolder shardsHolder; HollowObjectTypeReadStateShard shard; byte[] result; int numBitsForField; long currentBitOffset; long endByte; long startByte; int shardOrdinal; do { do { shardsHolder = this.shardsVolatile; shard = shardsHolder.shards[ordinal & shardsHolder.shardNumberMask]; shardOrdinal = ordinal >> shard.shardOrdinalShift; numBitsForField = shard.dataElements.bitsPerField[fieldIndex]; currentBitOffset = shard.fieldOffset(shardOrdinal, fieldIndex); endByte = shard.dataElements.fixedLengthData.getElementValue(currentBitOffset, numBitsForField); startByte = shardOrdinal != 0 ? shard.dataElements.fixedLengthData.getElementValue(currentBitOffset - shard.dataElements.bitsPerRecord, numBitsForField) : 0; } while (readWasUnsafe(shardsHolder, ordinal, shard)); result = shard.readBytes(startByte, endByte, numBitsForField, fieldIndex); } while (readWasUnsafe(shardsHolder, ordinal, shard)); return result; } @Override public String readString(int ordinal, int fieldIndex) { sampler.recordFieldAccess(fieldIndex); HollowObjectTypeReadState.ShardsHolder shardsHolder; HollowObjectTypeReadStateShard shard; String result; int numBitsForField; long currentBitOffset; long endByte; long startByte; int shardOrdinal; do { do { shardsHolder = this.shardsVolatile; shard = shardsHolder.shards[ordinal & shardsHolder.shardNumberMask]; shardOrdinal = ordinal >> shard.shardOrdinalShift; numBitsForField = shard.dataElements.bitsPerField[fieldIndex]; currentBitOffset = shard.fieldOffset(shardOrdinal, fieldIndex); endByte = shard.dataElements.fixedLengthData.getElementValue(currentBitOffset, numBitsForField); startByte = shardOrdinal != 0 ? shard.dataElements.fixedLengthData.getElementValue(currentBitOffset - shard.dataElements.bitsPerRecord, numBitsForField) : 0; } while(readWasUnsafe(shardsHolder, ordinal, shard)); result = shard.readString(startByte, endByte, numBitsForField, fieldIndex); } while(readWasUnsafe(shardsHolder, ordinal, shard)); return result; } @Override public boolean isStringFieldEqual(int ordinal, int fieldIndex, String testValue) { sampler.recordFieldAccess(fieldIndex); HollowObjectTypeReadState.ShardsHolder shardsHolder; HollowObjectTypeReadStateShard shard; boolean result; int numBitsForField; long currentBitOffset; long endByte; long startByte; int shardOrdinal; do { do { shardsHolder = this.shardsVolatile; shard = shardsHolder.shards[ordinal & shardsHolder.shardNumberMask]; shardOrdinal = ordinal >> shard.shardOrdinalShift; numBitsForField = shard.dataElements.bitsPerField[fieldIndex]; currentBitOffset = shard.fieldOffset(shardOrdinal, fieldIndex); endByte = shard.dataElements.fixedLengthData.getElementValue(currentBitOffset, numBitsForField); startByte = shardOrdinal != 0 ? shard.dataElements.fixedLengthData.getElementValue(currentBitOffset - shard.dataElements.bitsPerRecord, numBitsForField) : 0; } while(readWasUnsafe(shardsHolder, ordinal, shard)); result = shard.isStringFieldEqual(startByte, endByte, numBitsForField, fieldIndex, testValue); } while(readWasUnsafe(shardsHolder, ordinal, shard)); return result; } @Override public int findVarLengthFieldHashCode(int ordinal, int fieldIndex) { sampler.recordFieldAccess(fieldIndex); HollowObjectTypeReadState.ShardsHolder shardsHolder; HollowObjectTypeReadStateShard shard; int hashCode; int numBitsForField; long currentBitOffset; long endByte; long startByte; int shardOrdinal; do { do { shardsHolder = this.shardsVolatile; shard = shardsHolder.shards[ordinal & shardsHolder.shardNumberMask]; shardOrdinal = ordinal >> shard.shardOrdinalShift; numBitsForField = shard.dataElements.bitsPerField[fieldIndex]; currentBitOffset = shard.fieldOffset(shardOrdinal, fieldIndex); endByte = shard.dataElements.fixedLengthData.getElementValue(currentBitOffset, numBitsForField); startByte = shardOrdinal != 0 ? shard.dataElements.fixedLengthData.getElementValue(currentBitOffset - shard.dataElements.bitsPerRecord, numBitsForField) : 0; } while(readWasUnsafe(shardsHolder, ordinal, shard)); hashCode = shard.findVarLengthFieldHashCode(startByte, endByte, numBitsForField, fieldIndex); } while(readWasUnsafe(shardsHolder, ordinal, shard)); return hashCode; } private boolean readWasUnsafe(ShardsHolder shardsHolder, int ordinal, HollowObjectTypeReadStateShard shard) { // Use a load (acquire) fence to constrain the compiler reordering prior plain loads so // that they cannot "float down" below the volatile load of shardsVolatile. // This ensures data is checked against current shard holder *after* optimistic calculations // have been performed on data. // // Note: the Java Memory Model allows for the reordering of plain loads and stores // before a volatile load (those plain loads and stores can "float down" below the // volatile load), but forbids the reordering of plain loads after a volatile load // (those plain loads are not allowed to "float above" the volatile load). // Similar reordering also applies to plain loads and stores and volatile stores. // In effect the ordering of volatile loads and stores is retained and plain loads // and stores can be shuffled around and grouped together, which increases // optimization opportunities. // This is why locks can be coarsened; plain loads and stores may enter the lock region // from above (float down the acquire) or below (float above the release) but existing // loads and stores may not exit (a "lock roach motel" and why there is almost universal // misunderstanding of, and many misguided attempts to optimize, the infamous double // checked locking idiom). // // Note: the fence provides stronger ordering guarantees than a corresponding non-plain // load or store since the former affects all prior or subsequent loads and stores, // whereas the latter is scoped to the particular load or store. // // For more details see http://gee.cs.oswego.edu/dl/html/j9mm.html // [Comment credit: Paul Sandoz] // HollowUnsafeHandle.getUnsafe().loadFence(); ShardsHolder currShardsHolder = shardsVolatile; // Validate against the underlying shard so that, during a delta application that involves re-sharding the worst // case no. of times a read will be invalidated is 3: when shards are expanded or truncated, when a shard is affected // by a split or join, and finally when delta is applied to a shard. If only shardsHolder was checked here, the // worst-case scenario could lead to read invalidation (numShards+2) times: once for shards expansion/truncation, o // nce for split/join on any shard, and then once when delta is applied. return shardsHolder != currShardsHolder && (shard != currShardsHolder.shards[ordinal & currShardsHolder.shardNumberMask]); } /** * Warning: Not thread-safe. Should only be called within the update thread. * @param fieldName the field name * @return the number of bits required for the field */ public int bitsRequiredForField(String fieldName) { final HollowObjectTypeReadStateShard[] shards = this.shardsVolatile.shards; int maxBitsRequiredForField = shards[0].bitsRequiredForField(fieldName); for(int i=1;i<shards.length;i++) { int shardRequiredBits = shards[i].bitsRequiredForField(fieldName); if(shardRequiredBits > maxBitsRequiredForField) maxBitsRequiredForField = shardRequiredBits; } return maxBitsRequiredForField; } @Override public HollowSampler getSampler() { return sampler; } @Override protected void invalidate() { stateListeners = EMPTY_LISTENERS; HollowObjectTypeReadStateShard[] shards = this.shardsVolatile.shards; int numShards = shards.length; HollowObjectTypeReadStateShard[] newShards = new HollowObjectTypeReadStateShard[numShards]; for (int i=0;i<numShards;i++) { newShards[i] = new HollowObjectTypeReadStateShard(getSchema(), null, shards[i].shardOrdinalShift); } this.shardsVolatile = new ShardsHolder(newShards); } @Override public void setSamplingDirector(HollowSamplingDirector director) { sampler.setSamplingDirector(director); } @Override public void setFieldSpecificSamplingDirector(HollowFilterConfig fieldSpec, HollowSamplingDirector director) { sampler.setFieldSpecificSamplingDirector(fieldSpec, director); } @Override public void ignoreUpdateThreadForSampling(Thread t) { sampler.setUpdateThread(t); } HollowObjectTypeDataElements[] currentDataElements() { final HollowObjectTypeReadStateShard[] shards = this.shardsVolatile.shards; HollowObjectTypeDataElements[] elements = new HollowObjectTypeDataElements[shards.length]; for (int i=0;i<shards.length;i++) { elements[i] = shards[i].dataElements; } return elements; } @Override protected void applyToChecksum(HollowChecksum checksum, HollowSchema withSchema) { final ShardsHolder shardsHolder = this.shardsVolatile; final HollowObjectTypeReadStateShard[] shards = shardsHolder.shards; int shardNumberMask = shardsHolder.shardNumberMask; if(!(withSchema instanceof HollowObjectSchema)) throw new IllegalArgumentException("HollowObjectTypeReadState can only calculate checksum with a HollowObjectSchema: " + getSchema().getName()); BitSet populatedOrdinals = getPopulatedOrdinals(); for(int i=0;i<shards.length;i++) { shards[i].applyShardToChecksum(checksum, withSchema, populatedOrdinals, i, shardNumberMask); } } @Override public long getApproximateHeapFootprintInBytes() { final HollowObjectTypeReadStateShard[] shards = this.shardsVolatile.shards; long totalApproximateHeapFootprintInBytes = 0; for(int i=0;i<shards.length;i++) totalApproximateHeapFootprintInBytes += shards[i].getApproximateHeapFootprintInBytes(); return totalApproximateHeapFootprintInBytes; } @Override public long getApproximateHoleCostInBytes() { final HollowObjectTypeReadStateShard[] shards = this.shardsVolatile.shards; long totalApproximateHoleCostInBytes = 0; BitSet populatedOrdinals = getPopulatedOrdinals(); for(int i=0;i<shards.length;i++) totalApproximateHoleCostInBytes += shards[i].getApproximateHoleCostInBytes(populatedOrdinals, i, shards.length); return totalApproximateHoleCostInBytes; } @Override public int numShards() { return this.shardsVolatile.shards.length; } }
9,101
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine/object/HollowObjectTypeDataElementsJoiner.java
package com.netflix.hollow.core.read.engine.object; import static com.netflix.hollow.core.read.engine.object.HollowObjectTypeDataElements.copyRecord; import static com.netflix.hollow.core.read.engine.object.HollowObjectTypeDataElements.varLengthSize; import static com.netflix.hollow.core.read.engine.object.HollowObjectTypeDataElements.writeNullField; import com.netflix.hollow.core.memory.FixedLengthDataFactory; import com.netflix.hollow.core.memory.VariableLengthDataFactory; import com.netflix.hollow.core.memory.encoding.GapEncodedVariableLengthIntegerReader; /** * Join multiple {@code HollowObjectTypeDataElements}s into 1 {@code HollowObjectTypeDataElements}. * Ordinals are remapped and corresponding data is copied over. * The original data elements are not destroyed. * The no. of passed data elements must be a power of 2. */ class HollowObjectTypeDataElementsJoiner { HollowObjectTypeDataElements join(HollowObjectTypeDataElements[] from) { final int fromMask = from.length - 1; final int fromOrdinalShift = 31 - Integer.numberOfLeadingZeros(from.length); long[] currentWriteVarLengthDataPointers; if (from.length<=0 || !((from.length&(from.length-1))==0)) { throw new IllegalStateException("No. of DataElements to be joined must be a power of 2"); } HollowObjectTypeDataElements to = new HollowObjectTypeDataElements(from[0].schema, from[0].memoryMode, from[0].memoryRecycler); currentWriteVarLengthDataPointers = new long[from[0].schema.numFields()]; populateStats(to, from); GapEncodedVariableLengthIntegerReader[] fromRemovals = new GapEncodedVariableLengthIntegerReader[from.length]; for (int i=0;i<from.length;i++) { fromRemovals[i] = from[i].encodedRemovals; } to.encodedRemovals = GapEncodedVariableLengthIntegerReader.join(fromRemovals); for (HollowObjectTypeDataElements elements : from) { if (elements.encodedAdditions != null) { throw new IllegalStateException("Encountered encodedAdditions in data elements joiner- this is not expected " + "since encodedAdditions only exist on delta data elements and they dont carry over to target data elements, " + "delta data elements are never split/joined"); } } for(int ordinal=0;ordinal<=to.maxOrdinal;ordinal++) { int fromIndex = ordinal & fromMask; int fromOrdinal = ordinal >> fromOrdinalShift; if (fromOrdinal <= from[fromIndex].maxOrdinal) { copyRecord(to, ordinal, from[fromIndex], fromOrdinal, currentWriteVarLengthDataPointers); } else { // lopsided shards could result for consumers that skip type shards with no additions writeNullRecord(to, ordinal, currentWriteVarLengthDataPointers); } } return to; } private void writeNullRecord(HollowObjectTypeDataElements to, int toOrdinal, long[] currentWriteVarLengthDataPointers) { for(int fieldIndex=0;fieldIndex<to.schema.numFields();fieldIndex++) { long currentWriteFixedLengthStartBit = ((long)toOrdinal * to.bitsPerRecord) + to.bitOffsetPerField[fieldIndex]; writeNullField(to, fieldIndex, currentWriteFixedLengthStartBit, currentWriteVarLengthDataPointers); } } void populateStats(HollowObjectTypeDataElements to, HollowObjectTypeDataElements[] from) { long[] varLengthSizes = new long[to.schema.numFields()]; to.maxOrdinal = -1; for(int fromIndex=0;fromIndex<from.length;fromIndex++) { for(int ordinal=0;ordinal<=from[fromIndex].maxOrdinal;ordinal++) { for(int fieldIdx=0;fieldIdx<to.schema.numFields();fieldIdx++) { if(from[fromIndex].varLengthData[fieldIdx] != null) { varLengthSizes[fieldIdx] += varLengthSize(from[fromIndex], ordinal, fieldIdx); } } } int mappedMaxOrdinal = from[fromIndex].maxOrdinal == -1 ? -1 : (from[fromIndex].maxOrdinal * from.length) + fromIndex; to.maxOrdinal = Math.max(to.maxOrdinal, mappedMaxOrdinal); } for(int fieldIdx=0;fieldIdx<to.schema.numFields();fieldIdx++) { for(int i=0;i<from.length;i++) { if(from[i].varLengthData[fieldIdx] != null) { // if any of the join candidates have var len data set for this field to.varLengthData[fieldIdx] = VariableLengthDataFactory.get(to.memoryMode, to.memoryRecycler); break; } } } for(int fieldIdx=0;fieldIdx<to.schema.numFields();fieldIdx++) { if(to.varLengthData[fieldIdx] == null) { // do not assume bitsPerField will be uniform for(int fromIndex=0;fromIndex<from.length;fromIndex++) { to.bitsPerField[fieldIdx] = Math.max(to.bitsPerField[fieldIdx], from[fromIndex].bitsPerField[fieldIdx]); } } else { to.bitsPerField[fieldIdx] = (64 - Long.numberOfLeadingZeros(varLengthSizes[fieldIdx] + 1)) + 1; } to.nullValueForField[fieldIdx] = to.bitsPerField[fieldIdx] == 64 ? -1L : (1L << to.bitsPerField[fieldIdx]) - 1; to.bitOffsetPerField[fieldIdx] = to.bitsPerRecord; to.bitsPerRecord += to.bitsPerField[fieldIdx]; } to.fixedLengthData = FixedLengthDataFactory.get((long)to.bitsPerRecord * (to.maxOrdinal + 1), to.memoryMode, to.memoryRecycler); // unused // to.bitsPerUnfilteredField // to.unfilteredFieldIsIncluded } }
9,102
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine/object/HollowObjectTypeDataElements.java
/* * Copyright 2016-2020 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.read.engine.object; import com.netflix.hollow.core.memory.FixedLengthData; import com.netflix.hollow.core.memory.FixedLengthDataFactory; import com.netflix.hollow.core.memory.MemoryMode; import com.netflix.hollow.core.memory.VariableLengthData; import com.netflix.hollow.core.memory.VariableLengthDataFactory; import com.netflix.hollow.core.memory.encoding.FixedLengthElementArray; import com.netflix.hollow.core.memory.encoding.GapEncodedVariableLengthIntegerReader; import com.netflix.hollow.core.memory.encoding.VarInt; import com.netflix.hollow.core.memory.pool.ArraySegmentRecycler; import com.netflix.hollow.core.read.HollowBlobInput; import com.netflix.hollow.core.schema.HollowObjectSchema; import java.io.IOException; /** * This class holds the data for a {@link HollowObjectTypeReadState}. * * During a delta, the HollowObjectTypeReadState will create a new HollowObjectTypeDataElements and atomically swap * with the existing one to make sure a consistent view of the data is always available. */ public class HollowObjectTypeDataElements { final HollowObjectSchema schema; int maxOrdinal; FixedLengthData fixedLengthData; final VariableLengthData varLengthData[]; GapEncodedVariableLengthIntegerReader encodedAdditions; GapEncodedVariableLengthIntegerReader encodedRemovals; final int bitsPerField[]; final int bitOffsetPerField[]; final long nullValueForField[]; int bitsPerRecord; private int bitsPerUnfilteredField[]; private boolean unfilteredFieldIsIncluded[]; final ArraySegmentRecycler memoryRecycler; final MemoryMode memoryMode; public HollowObjectTypeDataElements(HollowObjectSchema schema, ArraySegmentRecycler memoryRecycler) { this(schema, MemoryMode.ON_HEAP, memoryRecycler); } public HollowObjectTypeDataElements(HollowObjectSchema schema, MemoryMode memoryMode, ArraySegmentRecycler memoryRecycler) { varLengthData = new VariableLengthData[schema.numFields()]; bitsPerField = new int[schema.numFields()]; bitOffsetPerField = new int[schema.numFields()]; nullValueForField = new long[schema.numFields()]; this.schema = schema; this.memoryMode = memoryMode; this.memoryRecycler = memoryRecycler; } void readSnapshot(HollowBlobInput in, HollowObjectSchema unfilteredSchema) throws IOException { readFromInput(in, false, unfilteredSchema); } void readDelta(HollowBlobInput in) throws IOException { readFromInput(in, true, schema); } void readFromInput(HollowBlobInput in, boolean isDelta, HollowObjectSchema unfilteredSchema) throws IOException { maxOrdinal = VarInt.readVInt(in); if(isDelta) { encodedRemovals = GapEncodedVariableLengthIntegerReader.readEncodedDeltaOrdinals(in, memoryRecycler); encodedAdditions = GapEncodedVariableLengthIntegerReader.readEncodedDeltaOrdinals(in, memoryRecycler); } readFieldStatistics(in, unfilteredSchema); fixedLengthData = FixedLengthDataFactory.get(in, memoryMode, memoryRecycler); removeExcludedFieldsFromFixedLengthData(); readVarLengthData(in, unfilteredSchema); } private void removeExcludedFieldsFromFixedLengthData() { if(bitsPerField.length < bitsPerUnfilteredField.length) { long numBitsRequired = (long)bitsPerRecord * (maxOrdinal + 1); FixedLengthElementArray filteredData = new FixedLengthElementArray(memoryRecycler, numBitsRequired); long currentReadBit = 0; long currentWriteBit = 0; for(int i=0;i<=maxOrdinal;i++) { for(int j=0;j<bitsPerUnfilteredField.length;j++) { if(unfilteredFieldIsIncluded[j]) { long value = bitsPerUnfilteredField[j] < 56 ? fixedLengthData.getElementValue(currentReadBit, bitsPerUnfilteredField[j]) : fixedLengthData.getLargeElementValue(currentReadBit, bitsPerUnfilteredField[j]); filteredData.setElementValue(currentWriteBit, bitsPerUnfilteredField[j], value); currentWriteBit += bitsPerUnfilteredField[j]; } currentReadBit += bitsPerUnfilteredField[j]; } } FixedLengthDataFactory.destroy(fixedLengthData, memoryRecycler); memoryRecycler.swap(); fixedLengthData = filteredData; } } private void readFieldStatistics(HollowBlobInput in, HollowObjectSchema unfilteredSchema) throws IOException { bitsPerRecord = 0; bitsPerUnfilteredField = new int[unfilteredSchema.numFields()]; unfilteredFieldIsIncluded = new boolean[unfilteredSchema.numFields()]; int filteredFieldIdx = 0; for(int i=0;i<unfilteredSchema.numFields();i++) { int readBitsPerField = VarInt.readVInt(in); bitsPerUnfilteredField[i] = readBitsPerField; unfilteredFieldIsIncluded[i] = schema.getPosition(unfilteredSchema.getFieldName(i)) != -1; if(unfilteredFieldIsIncluded[i]) { bitsPerField[filteredFieldIdx] = readBitsPerField; nullValueForField[filteredFieldIdx] = bitsPerField[filteredFieldIdx] == 64 ? -1L : (1L << bitsPerField[filteredFieldIdx]) - 1; bitOffsetPerField[filteredFieldIdx] = bitsPerRecord; bitsPerRecord += bitsPerField[filteredFieldIdx]; filteredFieldIdx++; } } } private void readVarLengthData(HollowBlobInput in, HollowObjectSchema unfilteredSchema) throws IOException { int filteredFieldIdx = 0; for(int i=0;i<unfilteredSchema.numFields();i++) { long numBytesInVarLengthData = VarInt.readVLong(in); if(schema.getPosition(unfilteredSchema.getFieldName(i)) != -1) { if(numBytesInVarLengthData != 0) { varLengthData[filteredFieldIdx] = VariableLengthDataFactory.get(memoryMode, memoryRecycler); varLengthData[filteredFieldIdx].loadFrom(in, numBytesInVarLengthData); } filteredFieldIdx++; } else { while(numBytesInVarLengthData > 0) { numBytesInVarLengthData -= in.skipBytes(numBytesInVarLengthData); } } } } static void discardFromInput(HollowBlobInput in, HollowObjectSchema schema, int numShards, boolean isDelta) throws IOException { if(numShards > 1) VarInt.readVInt(in); // max ordinal for(int i=0;i<numShards;i++) { VarInt.readVInt(in); // max ordinal if(isDelta) { /// addition/removal ordinals GapEncodedVariableLengthIntegerReader.discardEncodedDeltaOrdinals(in); GapEncodedVariableLengthIntegerReader.discardEncodedDeltaOrdinals(in); } /// field statistics for(int j=0;j<schema.numFields();j++) { VarInt.readVInt(in); } /// fixed length data FixedLengthData.discardFrom(in); /// variable length data for(int j=0;j<schema.numFields();j++) { long numBytesInVarLengthData = VarInt.readVLong(in); while(numBytesInVarLengthData > 0) { numBytesInVarLengthData -= in.skipBytes(numBytesInVarLengthData); } } } } void applyDelta(HollowObjectTypeDataElements fromData, HollowObjectTypeDataElements deltaData) { new HollowObjectDeltaApplicator(fromData, deltaData, this).applyDelta(); } public void destroy() { FixedLengthDataFactory.destroy(fixedLengthData, memoryRecycler); for(int i=0;i<varLengthData.length;i++) { if(varLengthData[i] != null) VariableLengthDataFactory.destroy(varLengthData[i]); } } static long varLengthStartByte(HollowObjectTypeDataElements from, int ordinal, int fieldIdx) { if(ordinal == 0) return 0; int numBitsForField = from.bitsPerField[fieldIdx]; long currentBitOffset = ((long)from.bitsPerRecord * ordinal) + from.bitOffsetPerField[fieldIdx]; long startByte = from.fixedLengthData.getElementValue(currentBitOffset - from.bitsPerRecord, numBitsForField) & (1L << (numBitsForField - 1)) - 1; return startByte; } static long varLengthEndByte(HollowObjectTypeDataElements from, int ordinal, int fieldIdx) { int numBitsForField = from.bitsPerField[fieldIdx]; long currentBitOffset = ((long)from.bitsPerRecord * ordinal) + from.bitOffsetPerField[fieldIdx]; long endByte = from.fixedLengthData.getElementValue(currentBitOffset, numBitsForField) & (1L << (numBitsForField - 1)) - 1; return endByte; } static long varLengthSize(HollowObjectTypeDataElements from, int ordinal, int fieldIdx) { int numBitsForField = from.bitsPerField[fieldIdx]; long fromBitOffset = ((long)from.bitsPerRecord*ordinal) + from.bitOffsetPerField[fieldIdx]; long fromEndByte = from.fixedLengthData.getElementValue(fromBitOffset, numBitsForField) & (1L << (numBitsForField - 1)) - 1; long fromStartByte = ordinal != 0 ? from.fixedLengthData.getElementValue(fromBitOffset - from.bitsPerRecord, numBitsForField) & (1L << (numBitsForField - 1)) - 1 : 0; return fromEndByte - fromStartByte; } static void copyRecord(HollowObjectTypeDataElements to, int toOrdinal, HollowObjectTypeDataElements from, int fromOrdinal, long[] currentWriteVarLengthDataPointers) { for(int fieldIndex=0;fieldIndex<to.schema.numFields();fieldIndex++) { if(to.varLengthData[fieldIndex] == null) { long value = from.fixedLengthData.getLargeElementValue(((long)fromOrdinal * from.bitsPerRecord) + from.bitOffsetPerField[fieldIndex], from.bitsPerField[fieldIndex]); to.fixedLengthData.setElementValue(((long)toOrdinal * to.bitsPerRecord) + to.bitOffsetPerField[fieldIndex], to.bitsPerField[fieldIndex], value); } else { long fromStartByte = varLengthStartByte(from, fromOrdinal, fieldIndex); long fromEndByte = varLengthEndByte(from, fromOrdinal, fieldIndex); long size = fromEndByte - fromStartByte; to.fixedLengthData.setElementValue(((long)toOrdinal * to.bitsPerRecord) + to.bitOffsetPerField[fieldIndex], to.bitsPerField[fieldIndex], currentWriteVarLengthDataPointers[fieldIndex] + size); to.varLengthData[fieldIndex].copy(from.varLengthData[fieldIndex], fromStartByte, currentWriteVarLengthDataPointers[fieldIndex], size); currentWriteVarLengthDataPointers[fieldIndex] += size; } } } static void writeNullField(HollowObjectTypeDataElements target, int fieldIndex, long currentWriteFixedLengthStartBit, long[] currentWriteVarLengthDataPointers) { if(target.varLengthData[fieldIndex] != null) { writeNullVarLengthField(target, fieldIndex, currentWriteFixedLengthStartBit, currentWriteVarLengthDataPointers); } else { writeNullFixedLengthField(target, fieldIndex, currentWriteFixedLengthStartBit); } } static void writeNullVarLengthField(HollowObjectTypeDataElements target, int fieldIndex, long currentWriteFixedLengthStartBit, long[] currentWriteVarLengthDataPointers) { long writeValue = (1L << (target.bitsPerField[fieldIndex] - 1)) | currentWriteVarLengthDataPointers[fieldIndex]; target.fixedLengthData.setElementValue(currentWriteFixedLengthStartBit, target.bitsPerField[fieldIndex], writeValue); } static void writeNullFixedLengthField(HollowObjectTypeDataElements target, int fieldIndex, long currentWriteFixedLengthStartBit) { target.fixedLengthData.setElementValue(currentWriteFixedLengthStartBit, target.bitsPerField[fieldIndex], target.nullValueForField[fieldIndex]); } }
9,103
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine/object/HollowObjectTypeDataElementsSplitter.java
package com.netflix.hollow.core.read.engine.object; import static com.netflix.hollow.core.read.engine.object.HollowObjectTypeDataElements.copyRecord; import static com.netflix.hollow.core.read.engine.object.HollowObjectTypeDataElements.varLengthSize; import com.netflix.hollow.core.memory.FixedLengthDataFactory; import com.netflix.hollow.core.memory.VariableLengthDataFactory; import com.netflix.hollow.core.memory.encoding.GapEncodedVariableLengthIntegerReader; /** * Split a {@code HollowObjectTypeDataElements} into multiple {@code HollowObjectTypeDataElements}s. * Ordinals are remapped and corresponding data is copied over. * The original data elements are not destroyed. * {@code numSplits} must be a power of 2. */ public class HollowObjectTypeDataElementsSplitter { HollowObjectTypeDataElements[] split(HollowObjectTypeDataElements from, int numSplits) { final int toMask = numSplits - 1; final int toOrdinalShift = 31 - Integer.numberOfLeadingZeros(numSplits); final long[][] currentWriteVarLengthDataPointers; if (numSplits<=0 || !((numSplits&(numSplits-1))==0)) { throw new IllegalStateException("Must split by power of 2"); } HollowObjectTypeDataElements[] to = new HollowObjectTypeDataElements[numSplits]; for(int i=0;i<to.length;i++) { to[i] = new HollowObjectTypeDataElements(from.schema, from.memoryMode, from.memoryRecycler); to[i].maxOrdinal = -1; } currentWriteVarLengthDataPointers = new long[numSplits][from.schema.numFields()]; populateStats(to, from, toMask, toOrdinalShift); if (from.encodedRemovals != null) { GapEncodedVariableLengthIntegerReader[] splitRemovals = from.encodedRemovals.split(numSplits); for(int i=0;i<to.length;i++) { to[i].encodedRemovals = splitRemovals[i]; } } if (from.encodedAdditions != null) { throw new IllegalStateException("Encountered encodedAdditions in data elements splitter- this is not expected " + "since encodedAdditions only exist on delta data elements and they dont carry over to target data elements, " + "delta data elements are never split/joined"); } for(int i=0;i<to.length;i++) { to[i].fixedLengthData = FixedLengthDataFactory.get((long)to[i].bitsPerRecord * (to[i].maxOrdinal + 1), to[i].memoryMode, to[i].memoryRecycler); for(int fieldIdx=0;fieldIdx<from.schema.numFields();fieldIdx++) { if(from.varLengthData[fieldIdx] != null) { to[i].varLengthData[fieldIdx] = VariableLengthDataFactory.get(from.memoryMode, from.memoryRecycler); } } } for(int i=0;i<=from.maxOrdinal;i++) { int toIndex = i & toMask; int toOrdinal = i >> toOrdinalShift; copyRecord(to[toIndex], toOrdinal, from, i, currentWriteVarLengthDataPointers[toIndex]); } return to; } private void populateStats(HollowObjectTypeDataElements[] to, HollowObjectTypeDataElements from, int toMask, int toOrdinalShift) { long[][] varLengthSizes = new long[to.length][from.schema.numFields()]; for(int ordinal=0;ordinal<=from.maxOrdinal;ordinal++) { int toIndex = ordinal & toMask; int toOrdinal = ordinal >> toOrdinalShift; to[toIndex].maxOrdinal = toOrdinal; for(int fieldIdx=0;fieldIdx<from.schema.numFields();fieldIdx++) { if(from.varLengthData[fieldIdx] != null) { varLengthSizes[toIndex][fieldIdx] += varLengthSize(from, ordinal, fieldIdx); } } } for(int toIndex=0;toIndex<to.length;toIndex++) { for(int fieldIdx=0;fieldIdx<from.schema.numFields();fieldIdx++) { if(from.varLengthData[fieldIdx] == null) { to[toIndex].bitsPerField[fieldIdx] = from.bitsPerField[fieldIdx]; } else { to[toIndex].bitsPerField[fieldIdx] = (64 - Long.numberOfLeadingZeros(varLengthSizes[toIndex][fieldIdx] + 1)) + 1; } to[toIndex].nullValueForField[fieldIdx] = (to[toIndex].bitsPerField[fieldIdx] == 64) ? -1L : (1L << to[toIndex].bitsPerField[fieldIdx]) - 1; to[toIndex].bitOffsetPerField[fieldIdx] = to[toIndex].bitsPerRecord; to[toIndex].bitsPerRecord += to[toIndex].bitsPerField[fieldIdx]; // unused // to[toIndex].bitsPerUnfilteredField = from.bitsPerUnfilteredField; // to[toIndex].unfilteredFieldIsIncluded = from.unfilteredFieldIsIncluded; } } } }
9,104
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine/object/HollowObjectDeltaHistoricalStateCreator.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.read.engine.object; import static com.netflix.hollow.core.HollowConstants.ORDINAL_NONE; import static com.netflix.hollow.core.read.engine.object.HollowObjectTypeDataElements.copyRecord; import static com.netflix.hollow.core.read.engine.object.HollowObjectTypeDataElements.varLengthSize; import com.netflix.hollow.core.memory.SegmentedByteArray; import com.netflix.hollow.core.memory.encoding.FixedLengthElementArray; import com.netflix.hollow.core.memory.pool.WastefulRecycler; import com.netflix.hollow.core.read.engine.PopulatedOrdinalListener; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.util.IntMap; import com.netflix.hollow.core.util.RemovedOrdinalIterator; import java.util.Arrays; /** * This class contains the logic for extracting the removed records from an OBJECT type state * to produce a historical type state. * * Not intended for external consumption. */ public class HollowObjectDeltaHistoricalStateCreator { private final HollowObjectTypeDataElements historicalDataElements; private HollowObjectTypeReadState typeState; private HollowObjectTypeReadState.ShardsHolder shardsHolder; private RemovedOrdinalIterator iter; private IntMap ordinalMapping; private int nextOrdinal; private final long currentWriteVarLengthDataPointers[]; public HollowObjectDeltaHistoricalStateCreator(HollowObjectTypeReadState typeState, boolean reverse) { this.typeState = typeState; this.historicalDataElements = new HollowObjectTypeDataElements(typeState.getSchema(), WastefulRecycler.DEFAULT_INSTANCE); this.iter = new RemovedOrdinalIterator(typeState.getListener(PopulatedOrdinalListener.class), reverse); this.currentWriteVarLengthDataPointers = new long[typeState.getSchema().numFields()]; this.shardsHolder = typeState.shardsVolatile; } public void populateHistory() { populateStats(); historicalDataElements.fixedLengthData = new FixedLengthElementArray(historicalDataElements.memoryRecycler, (long)historicalDataElements.bitsPerRecord * (historicalDataElements.maxOrdinal + 1)); for(int i=0;i<historicalDataElements.schema.numFields();i++) { if(isVarLengthField(typeState.getSchema().getFieldType(i))) { historicalDataElements.varLengthData[i] = new SegmentedByteArray(historicalDataElements.memoryRecycler); } } iter.reset(); int ordinal = iter.next(); while(ordinal != ORDINAL_NONE) { ordinalMapping.put(ordinal, nextOrdinal); int whichShard = ordinal & shardsHolder.shardNumberMask; int shardOrdinal = ordinal >> shardsHolder.shards[whichShard].shardOrdinalShift; copyRecord(historicalDataElements, nextOrdinal, shardsHolder.shards[whichShard].dataElements, shardOrdinal, currentWriteVarLengthDataPointers); nextOrdinal++; ordinal = iter.next(); } } /** * Once a historical state has been created, the references into the original read state can be released so that * the original read state can be GC'ed. */ public void dereferenceTypeState() { this.typeState = null; this.shardsHolder = null; this.iter = null; } public IntMap getOrdinalMapping() { return ordinalMapping; } public HollowObjectTypeReadState createHistoricalTypeReadState() { HollowObjectTypeReadState historicalTypeState = new HollowObjectTypeReadState(typeState.getSchema(), historicalDataElements); return historicalTypeState; } private void populateStats() { iter.reset(); int removedEntryCount = 0; long totalVarLengthSizes[] = new long[typeState.getSchema().numFields()]; int ordinal = iter.next(); while(ordinal != ORDINAL_NONE) { removedEntryCount++; for(int i=0;i<typeState.getSchema().numFields();i++) { if(isVarLengthField(typeState.getSchema().getFieldType(i))) { int whichShard = ordinal & shardsHolder.shardNumberMask; int shardOrdinal = ordinal >> shardsHolder.shards[whichShard].shardOrdinalShift; totalVarLengthSizes[i] += varLengthSize(shardsHolder.shards[whichShard].dataElements, shardOrdinal, i); } } ordinal = iter.next(); } historicalDataElements.maxOrdinal = removedEntryCount - 1; for(int i=0;i<typeState.getSchema().numFields();i++) { if(!isVarLengthField(typeState.getSchema().getFieldType(i))) { final int fieldIdx = i; historicalDataElements.bitsPerField[i] = Arrays.stream(shardsHolder.shards) .map(shard -> shard.dataElements.bitsPerField[fieldIdx]) .max(Integer::compare).get(); } else { historicalDataElements.bitsPerField[i] = (64 - Long.numberOfLeadingZeros(totalVarLengthSizes[i] + 1)) + 1; } historicalDataElements.nullValueForField[i] = historicalDataElements.bitsPerField[i] == 64 ? -1L : (1L << historicalDataElements.bitsPerField[i]) - 1; historicalDataElements.bitOffsetPerField[i] = historicalDataElements.bitsPerRecord; historicalDataElements.bitsPerRecord += historicalDataElements.bitsPerField[i]; } ordinalMapping = new IntMap(removedEntryCount); } private boolean isVarLengthField(HollowObjectSchema.FieldType fieldType) { return fieldType == HollowObjectSchema.FieldType.STRING || fieldType == HollowObjectSchema.FieldType.BYTES; } }
9,105
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine/object/HollowObjectDeltaApplicator.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.read.engine.object; import static com.netflix.hollow.core.read.engine.object.HollowObjectTypeDataElements.writeNullField; import static com.netflix.hollow.core.read.engine.object.HollowObjectTypeDataElements.writeNullFixedLengthField; import static com.netflix.hollow.core.read.engine.object.HollowObjectTypeDataElements.writeNullVarLengthField; import com.netflix.hollow.core.memory.SegmentedByteArray; import com.netflix.hollow.core.memory.encoding.FixedLengthElementArray; import com.netflix.hollow.core.memory.encoding.GapEncodedVariableLengthIntegerReader; import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType; /** * This class contains the logic for applying a delta to a current OBJECT type state * to produce the next OBJECT type state. * * Not intended for external consumption. */ class HollowObjectDeltaApplicator { private final HollowObjectTypeDataElements from; private final HollowObjectTypeDataElements delta; private final HollowObjectTypeDataElements target; long currentDeltaStateReadFixedLengthStartBit = 0; long currentFromStateReadFixedLengthStartBit = 0; long currentWriteFixedLengthStartBit = 0; long currentDeltaReadVarLengthDataPointers[]; long currentFromStateReadVarLengthDataPointers[]; long currentWriteVarLengthDataPointers[]; int deltaFieldIndexMapping[]; GapEncodedVariableLengthIntegerReader removalsReader; GapEncodedVariableLengthIntegerReader additionsReader; int numMergeFields = 0; public HollowObjectDeltaApplicator(HollowObjectTypeDataElements from, HollowObjectTypeDataElements delta, HollowObjectTypeDataElements target) { this.from = from; this.delta = delta; this.target = target; } void applyDelta() { removalsReader = from.encodedRemovals == null ? GapEncodedVariableLengthIntegerReader.EMPTY_READER : from.encodedRemovals; additionsReader = delta.encodedAdditions; removalsReader.reset(); additionsReader.reset(); target.encodedRemovals = delta.encodedRemovals; target.maxOrdinal = delta.maxOrdinal; deltaFieldIndexMapping = new int[target.bitsPerField.length]; for(int i=0;i<target.bitsPerField.length;i++) { deltaFieldIndexMapping[i] = delta.schema.getPosition(target.schema.getFieldName(i)); } for(int i=0;i<target.bitsPerField.length;i++) { target.bitsPerField[i] = deltaFieldIndexMapping[i] == -1 ? from.bitsPerField[i] : delta.bitsPerField[deltaFieldIndexMapping[i]]; target.nullValueForField[i] = target.bitsPerField[i] == 64 ? -1L : (1L << target.bitsPerField[i]) - 1; target.bitOffsetPerField[i] = target.bitsPerRecord; target.bitsPerRecord += target.bitsPerField[i]; if(target.bitsPerField[i] != 0) numMergeFields = i+1; } target.fixedLengthData = new FixedLengthElementArray(target.memoryRecycler, (long)target.bitsPerRecord * (target.maxOrdinal + 1)); for(int i=0;i<target.schema.numFields();i++) { if(target.schema.getFieldType(i) == FieldType.STRING || target.schema.getFieldType(i) == FieldType.BYTES) { target.varLengthData[i] = new SegmentedByteArray(target.memoryRecycler); } } currentDeltaReadVarLengthDataPointers = new long[target.varLengthData.length]; currentFromStateReadVarLengthDataPointers = new long[target.varLengthData.length]; currentWriteVarLengthDataPointers = new long[target.varLengthData.length]; if(canDoFastDelta()) fastDelta(); else slowDelta(); from.encodedRemovals = null; removalsReader.destroy(); } private boolean canDoFastDelta() { for(int i=0;i<target.bitsPerField.length;i++) { if(target.bitsPerField[i] != from.bitsPerField[i]) return false; } return true; } private void fastDelta() { int i = 0; int bulkCopyEndOrdinal = Math.min(from.maxOrdinal, target.maxOrdinal); while(i <= target.maxOrdinal) { int nextElementDiff = Math.min(additionsReader.nextElement(), removalsReader.nextElement()); if(nextElementDiff == i || i > bulkCopyEndOrdinal) { mergeOrdinal(i++); } else { int recordsToCopy = nextElementDiff - i; if(nextElementDiff > bulkCopyEndOrdinal) recordsToCopy = bulkCopyEndOrdinal - i + 1; fastCopyRecords(recordsToCopy); i += recordsToCopy; } } } private void fastCopyRecords(int recordsToCopy) { long fixedLengthBitsToCopy = (long)from.bitsPerRecord * recordsToCopy; target.fixedLengthData.copyBits(from.fixedLengthData, currentFromStateReadFixedLengthStartBit, currentWriteFixedLengthStartBit, fixedLengthBitsToCopy); currentFromStateReadFixedLengthStartBit += fixedLengthBitsToCopy; for(int i=0;i<from.schema.numFields();i++) { if(target.varLengthData[i] != null) { long fromEndByte = from.fixedLengthData.getElementValue(currentFromStateReadFixedLengthStartBit - from.bitsPerRecord + from.bitOffsetPerField[i], from.bitsPerField[i]); fromEndByte &= (from.nullValueForField[i] >>> 1); long varLengthToCopy = fromEndByte - currentFromStateReadVarLengthDataPointers[i]; long varLengthDiff = currentWriteVarLengthDataPointers[i] - currentFromStateReadVarLengthDataPointers[i]; target.varLengthData[i].orderedCopy(from.varLengthData[i], currentFromStateReadVarLengthDataPointers[i], currentWriteVarLengthDataPointers[i], varLengthToCopy); target.fixedLengthData.incrementMany(currentWriteFixedLengthStartBit + from.bitOffsetPerField[i], varLengthDiff, from.bitsPerRecord, recordsToCopy); currentFromStateReadVarLengthDataPointers[i] += varLengthToCopy; currentWriteVarLengthDataPointers[i] += varLengthToCopy; } } currentWriteFixedLengthStartBit += fixedLengthBitsToCopy; } private void slowDelta() { for(int i=0;i<=target.maxOrdinal;i++) { mergeOrdinal(i); } } private void mergeOrdinal(int i) { boolean addFromDelta = additionsReader.nextElement() == i; boolean removeData = removalsReader.nextElement() == i; for(int fieldIndex=0;fieldIndex<numMergeFields;fieldIndex++) { int deltaFieldIndex = deltaFieldIndexMapping[fieldIndex]; if(addFromDelta) { addFromDelta(removeData, fieldIndex, deltaFieldIndex); } else { if(i <= from.maxOrdinal) { long readStartBit = currentFromStateReadFixedLengthStartBit + from.bitOffsetPerField[fieldIndex]; copyRecordField(fieldIndex, fieldIndex, from, readStartBit, currentWriteFixedLengthStartBit, currentFromStateReadVarLengthDataPointers, currentWriteVarLengthDataPointers, removeData); } else if(target.varLengthData[fieldIndex] != null) { writeNullVarLengthField(target, fieldIndex, currentWriteFixedLengthStartBit, currentWriteVarLengthDataPointers); } } currentWriteFixedLengthStartBit += target.bitsPerField[fieldIndex]; } if(addFromDelta) { currentDeltaStateReadFixedLengthStartBit += delta.bitsPerRecord; additionsReader.advance(); } currentFromStateReadFixedLengthStartBit += from.bitsPerRecord; if(removeData) removalsReader.advance(); } private void addFromDelta(boolean removeData, int fieldIndex, int deltaFieldIndex) { if(deltaFieldIndex == -1) { writeNullField(target, fieldIndex, currentWriteFixedLengthStartBit, currentWriteVarLengthDataPointers); } else { long readStartBit = currentDeltaStateReadFixedLengthStartBit + delta.bitOffsetPerField[deltaFieldIndex]; copyRecordField(fieldIndex, deltaFieldIndex, delta, readStartBit, currentWriteFixedLengthStartBit, currentDeltaReadVarLengthDataPointers, currentWriteVarLengthDataPointers, false); } /// skip over var length data in from state, if removed. if(removeData && target.varLengthData[fieldIndex] != null) { long readValue = from.fixedLengthData.getElementValue(currentFromStateReadFixedLengthStartBit + from.bitOffsetPerField[fieldIndex], from.bitsPerField[fieldIndex]); if((readValue & (1L << (from.bitsPerField[fieldIndex] - 1))) == 0) currentFromStateReadVarLengthDataPointers[fieldIndex] = readValue; } } private void copyRecordField(int fieldIndex, int fromFieldIndex, HollowObjectTypeDataElements copyFromData, long currentReadFixedLengthStartBit, long currentWriteFixedLengthStartBit, long[] currentReadVarLengthDataPointers, long[] currentWriteVarLengthDataPointers, boolean removeData) { long readValue = copyFromData.bitsPerField[fromFieldIndex] > 56 ? copyFromData.fixedLengthData.getLargeElementValue(currentReadFixedLengthStartBit, copyFromData.bitsPerField[fromFieldIndex]) : copyFromData.fixedLengthData.getElementValue(currentReadFixedLengthStartBit, copyFromData.bitsPerField[fromFieldIndex]); if(target.varLengthData[fieldIndex] != null) { if((readValue & (1L << (copyFromData.bitsPerField[fromFieldIndex] - 1))) != 0) { writeNullVarLengthField(target, fieldIndex, currentWriteFixedLengthStartBit, currentWriteVarLengthDataPointers); } else { long readStart = currentReadVarLengthDataPointers[fieldIndex]; long length = readValue - readStart; if(!removeData) { long writeStart = currentWriteVarLengthDataPointers[fieldIndex]; target.varLengthData[fieldIndex].orderedCopy(copyFromData.varLengthData[fromFieldIndex], readStart, writeStart, length); currentWriteVarLengthDataPointers[fieldIndex] += length; } target.fixedLengthData.setElementValue(currentWriteFixedLengthStartBit, target.bitsPerField[fieldIndex], currentWriteVarLengthDataPointers[fieldIndex]); currentReadVarLengthDataPointers[fieldIndex] = readValue; } } else if(!removeData) { if(readValue == copyFromData.nullValueForField[fromFieldIndex]) writeNullFixedLengthField(target, fieldIndex, currentWriteFixedLengthStartBit); else target.fixedLengthData.setElementValue(currentWriteFixedLengthStartBit, target.bitsPerField[fieldIndex], readValue); } } }
9,106
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine/list/HollowListTypeReadState.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.read.engine.list; import com.netflix.hollow.api.sampling.DisabledSamplingDirector; import com.netflix.hollow.api.sampling.HollowListSampler; import com.netflix.hollow.api.sampling.HollowSampler; import com.netflix.hollow.api.sampling.HollowSamplingDirector; import com.netflix.hollow.core.memory.MemoryMode; import com.netflix.hollow.core.memory.encoding.GapEncodedVariableLengthIntegerReader; import com.netflix.hollow.core.memory.encoding.VarInt; import com.netflix.hollow.core.memory.pool.ArraySegmentRecycler; import com.netflix.hollow.core.read.HollowBlobInput; import com.netflix.hollow.core.read.dataaccess.HollowListTypeDataAccess; import com.netflix.hollow.core.read.engine.HollowCollectionTypeReadState; import com.netflix.hollow.core.read.engine.HollowReadStateEngine; import com.netflix.hollow.core.read.engine.HollowTypeReadState; import com.netflix.hollow.core.read.engine.PopulatedOrdinalListener; import com.netflix.hollow.core.read.engine.SnapshotPopulatedOrdinalsReader; import com.netflix.hollow.core.read.filter.HollowFilterConfig; import com.netflix.hollow.core.read.iterator.HollowListOrdinalIterator; import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator; import com.netflix.hollow.core.schema.HollowListSchema; import com.netflix.hollow.core.schema.HollowSchema; import com.netflix.hollow.tools.checksum.HollowChecksum; import java.io.IOException; import java.util.BitSet; /** * A {@link HollowTypeReadState} for LIST type records. */ public class HollowListTypeReadState extends HollowCollectionTypeReadState implements HollowListTypeDataAccess { private final HollowListSampler sampler; private final int shardNumberMask; private final int shardOrdinalShift; private final HollowListTypeReadStateShard shards[]; private int maxOrdinal; public HollowListTypeReadState(HollowReadStateEngine stateEngine, HollowListSchema schema, int numShards) { this(stateEngine, MemoryMode.ON_HEAP, schema, numShards); } public HollowListTypeReadState(HollowReadStateEngine stateEngine, MemoryMode memoryMode, HollowListSchema schema, int numShards) { super(stateEngine, memoryMode, schema); this.sampler = new HollowListSampler(schema.getName(), DisabledSamplingDirector.INSTANCE); this.shardNumberMask = numShards - 1; this.shardOrdinalShift = 31 - Integer.numberOfLeadingZeros(numShards); if(numShards < 1 || 1 << shardOrdinalShift != numShards) throw new IllegalArgumentException("Number of shards must be a power of 2!"); HollowListTypeReadStateShard shards[] = new HollowListTypeReadStateShard[numShards]; for(int i=0;i<shards.length;i++) shards[i] = new HollowListTypeReadStateShard(); this.shards = shards; } @Override public void readSnapshot(HollowBlobInput in, ArraySegmentRecycler memoryRecycler, int numShards) throws IOException { throw new UnsupportedOperationException("This type does not yet support numShards specification when reading snapshot"); } @Override public void readSnapshot(HollowBlobInput in, ArraySegmentRecycler memoryRecycler) throws IOException { if(shards.length > 1) maxOrdinal = VarInt.readVInt(in); for(int i=0;i<shards.length;i++) { HollowListTypeDataElements snapshotData = new HollowListTypeDataElements(memoryMode, memoryRecycler); snapshotData.readSnapshot(in); shards[i].setCurrentData(snapshotData); } if(shards.length == 1) maxOrdinal = shards[0].currentDataElements().maxOrdinal; SnapshotPopulatedOrdinalsReader.readOrdinals(in, stateListeners); } @Override public void applyDelta(HollowBlobInput in, HollowSchema schema, ArraySegmentRecycler memoryRecycler, int deltaNumShards) throws IOException { if (shouldReshard(shards.length, deltaNumShards)) { throw new UnsupportedOperationException("Dynamic type sharding not supported for " + schema.getName() + ". Current numShards=" + shards.length + ", delta numShards=" + deltaNumShards); } if(shards.length > 1) maxOrdinal = VarInt.readVInt(in); for(int i=0; i<shards.length; i++) { HollowListTypeDataElements deltaData = new HollowListTypeDataElements(memoryMode, memoryRecycler); deltaData.readDelta(in); if(stateEngine.isSkipTypeShardUpdateWithNoAdditions() && deltaData.encodedAdditions.isEmpty()) { if(!deltaData.encodedRemovals.isEmpty()) notifyListenerAboutDeltaChanges(deltaData.encodedRemovals, deltaData.encodedAdditions, i, shards.length); HollowListTypeDataElements currentData = shards[i].currentDataElements(); GapEncodedVariableLengthIntegerReader oldRemovals = currentData.encodedRemovals == null ? GapEncodedVariableLengthIntegerReader.EMPTY_READER : currentData.encodedRemovals; if(oldRemovals.isEmpty()) { currentData.encodedRemovals = deltaData.encodedRemovals; oldRemovals.destroy(); } else { if(!deltaData.encodedRemovals.isEmpty()) { currentData.encodedRemovals = GapEncodedVariableLengthIntegerReader.combine(oldRemovals, deltaData.encodedRemovals, memoryRecycler); oldRemovals.destroy(); } deltaData.encodedRemovals.destroy(); } deltaData.encodedAdditions.destroy(); } else { HollowListTypeDataElements nextData = new HollowListTypeDataElements(memoryMode, memoryRecycler); HollowListTypeDataElements oldData = shards[i].currentDataElements(); nextData.applyDelta(oldData, deltaData); shards[i].setCurrentData(nextData); notifyListenerAboutDeltaChanges(deltaData.encodedRemovals, deltaData.encodedAdditions, i, shards.length); deltaData.encodedAdditions.destroy(); oldData.destroy(); } deltaData.destroy(); stateEngine.getMemoryRecycler().swap(); } if(shards.length == 1) maxOrdinal = shards[0].currentDataElements().maxOrdinal; } public static void discardSnapshot(HollowBlobInput in, int numShards) throws IOException { discardType(in, numShards, false); } public static void discardDelta(HollowBlobInput in, int numShards) throws IOException { discardType(in, numShards, true); } public static void discardType(HollowBlobInput in, int numShards, boolean delta) throws IOException { HollowListTypeDataElements.discardFromStream(in, numShards, delta); if(!delta) SnapshotPopulatedOrdinalsReader.discardOrdinals(in); } @Override public HollowListSchema getSchema() { return (HollowListSchema) schema; } @Override public int maxOrdinal() { return maxOrdinal; } @Override public int getElementOrdinal(int ordinal, int listIndex) { sampler.recordGet(); return shards[ordinal & shardNumberMask].getElementOrdinal(ordinal >> shardOrdinalShift, listIndex); } @Override public int size(int ordinal) { sampler.recordSize(); return shards[ordinal & shardNumberMask].size(ordinal >> shardOrdinalShift); } @Override public HollowOrdinalIterator ordinalIterator(int ordinal) { sampler.recordIterator(); return new HollowListOrdinalIterator(ordinal, this); } @Override public HollowSampler getSampler() { return sampler; } @Override public void setSamplingDirector(HollowSamplingDirector director) { sampler.setSamplingDirector(director); } @Override public void setFieldSpecificSamplingDirector(HollowFilterConfig fieldSpec, HollowSamplingDirector director) { sampler.setFieldSpecificSamplingDirector(fieldSpec, director); } @Override public void ignoreUpdateThreadForSampling(Thread t) { sampler.setUpdateThread(t); } @Override protected void invalidate() { stateListeners = EMPTY_LISTENERS; for(int i=0;i<shards.length;i++) shards[i].invalidate(); } HollowListTypeDataElements[] currentDataElements() { HollowListTypeDataElements currentDataElements[] = new HollowListTypeDataElements[shards.length]; for(int i=0; i<shards.length; i++) currentDataElements[i] = shards[i].currentDataElements(); return currentDataElements; } void setCurrentData(HollowListTypeDataElements data) { if(shards.length > 1) throw new UnsupportedOperationException("Cannot directly set data on sharded type state"); shards[0].setCurrentData(data); maxOrdinal = data.maxOrdinal; } @Override protected void applyToChecksum(HollowChecksum checksum, HollowSchema withSchema) { if(!getSchema().equals(withSchema)) throw new IllegalArgumentException("HollowListTypeReadState cannot calculate checksum with unequal schemas: " + getSchema().getName()); BitSet populatedOrdinals = getListener(PopulatedOrdinalListener.class).getPopulatedOrdinals(); for(int i=0; i<shards.length; i++) shards[i].applyToChecksum(checksum, populatedOrdinals, i, shards.length); } @Override public long getApproximateHeapFootprintInBytes() { long totalApproximateHeapFootprintInBytes = 0; for(int i=0; i<shards.length; i++) totalApproximateHeapFootprintInBytes += shards[i].getApproximateHeapFootprintInBytes(); return totalApproximateHeapFootprintInBytes; } @Override public long getApproximateHoleCostInBytes() { long totalApproximateHoleCostInBytes = 0; BitSet populatedOrdinals = getPopulatedOrdinals(); for(int i=0; i<shards.length; i++) totalApproximateHoleCostInBytes += shards[i].getApproximateHoleCostInBytes(populatedOrdinals, i, shards.length); return totalApproximateHoleCostInBytes; } @Override public int numShards() { return shards.length; } }
9,107
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine/list/HollowListTypeDataElements.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.read.engine.list; import com.netflix.hollow.core.memory.FixedLengthData; import com.netflix.hollow.core.memory.FixedLengthDataFactory; import com.netflix.hollow.core.memory.MemoryMode; import com.netflix.hollow.core.memory.encoding.GapEncodedVariableLengthIntegerReader; import com.netflix.hollow.core.memory.encoding.VarInt; import com.netflix.hollow.core.memory.pool.ArraySegmentRecycler; import com.netflix.hollow.core.read.HollowBlobInput; import java.io.IOException; /** * This class holds the data for a {@link HollowListTypeReadState}. * * During a delta, the HollowListTypeReadState will create a new HollowListTypeDataElements and atomically swap * with the existing one to make sure a consistent view of the data is always available. */ public class HollowListTypeDataElements { int maxOrdinal; FixedLengthData listPointerData; FixedLengthData elementData; GapEncodedVariableLengthIntegerReader encodedAdditions; GapEncodedVariableLengthIntegerReader encodedRemovals; int bitsPerListPointer; int bitsPerElement; long totalNumberOfElements = 0; final ArraySegmentRecycler memoryRecycler; final MemoryMode memoryMode; public HollowListTypeDataElements(ArraySegmentRecycler memoryRecycler) { this(MemoryMode.ON_HEAP, memoryRecycler); } public HollowListTypeDataElements(MemoryMode memoryMode, ArraySegmentRecycler memoryRecycler) { this.memoryMode = memoryMode; this.memoryRecycler = memoryRecycler; } void readSnapshot(HollowBlobInput in) throws IOException { readFromInput(in,false); } void readDelta(HollowBlobInput in) throws IOException { readFromInput(in,true); } private void readFromInput(HollowBlobInput in, boolean isDelta) throws IOException { maxOrdinal = VarInt.readVInt(in); if(isDelta) { encodedRemovals = GapEncodedVariableLengthIntegerReader.readEncodedDeltaOrdinals(in, memoryRecycler); encodedAdditions = GapEncodedVariableLengthIntegerReader.readEncodedDeltaOrdinals(in, memoryRecycler); } bitsPerListPointer = VarInt.readVInt(in); bitsPerElement = VarInt.readVInt(in); totalNumberOfElements = VarInt.readVLong(in); listPointerData = FixedLengthDataFactory.get(in, memoryMode, memoryRecycler); elementData = FixedLengthDataFactory.get(in, memoryMode, memoryRecycler); } static void discardFromStream(HollowBlobInput in, int numShards, boolean isDelta) throws IOException { if(numShards > 1) VarInt.readVInt(in); /// max ordinal for(int i=0;i<numShards;i++) { VarInt.readVInt(in); /// max ordinal if(isDelta) { /// addition/removal ordinals GapEncodedVariableLengthIntegerReader.discardEncodedDeltaOrdinals(in); GapEncodedVariableLengthIntegerReader.discardEncodedDeltaOrdinals(in); } /// statistics VarInt.readVInt(in); VarInt.readVInt(in); VarInt.readVLong(in); /// fixed-length data FixedLengthData.discardFrom(in); FixedLengthData.discardFrom(in); } } public void applyDelta(HollowListTypeDataElements fromData, HollowListTypeDataElements deltaData) { new HollowListDeltaApplicator(fromData, deltaData, this).applyDelta(); } public void destroy() { FixedLengthDataFactory.destroy(listPointerData, memoryRecycler); FixedLengthDataFactory.destroy(elementData, memoryRecycler); } }
9,108
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine/list/HollowListDeltaHistoricalStateCreator.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.read.engine.list; import static com.netflix.hollow.core.HollowConstants.ORDINAL_NONE; import com.netflix.hollow.core.memory.encoding.FixedLengthElementArray; import com.netflix.hollow.core.memory.pool.WastefulRecycler; import com.netflix.hollow.core.read.engine.PopulatedOrdinalListener; import com.netflix.hollow.core.util.IntMap; import com.netflix.hollow.core.util.RemovedOrdinalIterator; /** * This class contains the logic for extracting the removed records from a LIST type state * to produce a historical type state. * * Not intended for external consumption. */ public class HollowListDeltaHistoricalStateCreator { private final HollowListTypeDataElements historicalDataElements; private final int shardNumberMask; private final int shardOrdinalShift; private HollowListTypeReadState typeState; private HollowListTypeDataElements stateEngineDataElements[]; private RemovedOrdinalIterator iter; private IntMap ordinalMapping; private int nextOrdinal = 0; private long nextStartElement = 0; public HollowListDeltaHistoricalStateCreator(HollowListTypeReadState typeState, boolean reverse) { this.typeState = typeState; this.stateEngineDataElements = typeState.currentDataElements(); this.historicalDataElements = new HollowListTypeDataElements(WastefulRecycler.DEFAULT_INSTANCE); this.iter = new RemovedOrdinalIterator(typeState.getListener(PopulatedOrdinalListener.class), reverse); this.shardNumberMask = stateEngineDataElements.length - 1; this.shardOrdinalShift = 31 - Integer.numberOfLeadingZeros(stateEngineDataElements.length); } public void populateHistory() { populateStats(); historicalDataElements.listPointerData = new FixedLengthElementArray(historicalDataElements.memoryRecycler, ((long)historicalDataElements.maxOrdinal + 1) * historicalDataElements.bitsPerListPointer); historicalDataElements.elementData = new FixedLengthElementArray(historicalDataElements.memoryRecycler, historicalDataElements.totalNumberOfElements * historicalDataElements.bitsPerElement); iter.reset(); int ordinal = iter.next(); while(ordinal != ORDINAL_NONE) { ordinalMapping.put(ordinal, nextOrdinal); copyRecord(ordinal); ordinal = iter.next(); } } public void dereferenceTypeState() { this.typeState = null; this.stateEngineDataElements = null; this.iter = null; } public IntMap getOrdinalMapping() { return ordinalMapping; } public HollowListTypeReadState createHistoricalTypeReadState() { HollowListTypeReadState historicalTypeState = new HollowListTypeReadState(null, typeState.getSchema(), 1); historicalTypeState.setCurrentData(historicalDataElements); return historicalTypeState; } private void populateStats() { iter.reset(); int removedEntryCount = 0; long totalElementCount = 0; int ordinal = iter.next(); while(ordinal != ORDINAL_NONE) { removedEntryCount++; totalElementCount += typeState.size(ordinal); ordinal = iter.next(); } historicalDataElements.maxOrdinal = removedEntryCount - 1; historicalDataElements.totalNumberOfElements = totalElementCount; historicalDataElements.bitsPerListPointer = totalElementCount == 0 ? 1 : 64 - Long.numberOfLeadingZeros(totalElementCount); historicalDataElements.bitsPerElement = stateEngineDataElements[0].bitsPerElement; ordinalMapping = new IntMap(removedEntryCount); } private void copyRecord(int ordinal) { int shard = ordinal & shardNumberMask; int shardOrdinal = ordinal >> shardOrdinalShift; long bitsPerElement = stateEngineDataElements[shard].bitsPerElement; long fromStartElement = shardOrdinal == 0 ? 0 : stateEngineDataElements[shard].listPointerData.getElementValue((long)(shardOrdinal - 1) * stateEngineDataElements[shard].bitsPerListPointer, stateEngineDataElements[shard].bitsPerListPointer); long fromEndElement = stateEngineDataElements[shard].listPointerData.getElementValue((long)shardOrdinal * stateEngineDataElements[shard].bitsPerListPointer, stateEngineDataElements[shard].bitsPerListPointer); long size = fromEndElement - fromStartElement; historicalDataElements.elementData.copyBits(stateEngineDataElements[shard].elementData, fromStartElement * bitsPerElement, nextStartElement * bitsPerElement, size * bitsPerElement); historicalDataElements.listPointerData.setElementValue((long)nextOrdinal * historicalDataElements.bitsPerListPointer, historicalDataElements.bitsPerListPointer, nextStartElement + size); ordinalMapping.put(ordinal, nextOrdinal); nextOrdinal++; nextStartElement += size; } }
9,109
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine/list/HollowListTypeReadStateShard.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.read.engine.list; import static com.netflix.hollow.core.HollowConstants.ORDINAL_NONE; import com.netflix.hollow.core.memory.HollowUnsafeHandle; import com.netflix.hollow.tools.checksum.HollowChecksum; import java.util.BitSet; class HollowListTypeReadStateShard { private volatile HollowListTypeDataElements currentDataVolatile; public int getElementOrdinal(int ordinal, int listIndex) { HollowListTypeDataElements currentData; int elementOrdinal; do { long startElement; long endElement; do { currentData = this.currentDataVolatile; if (ordinal == 0) { startElement = 0; endElement = currentData.listPointerData.getElementValue(0, currentData.bitsPerListPointer); } else { long endFixedLengthOffset = (long)ordinal * currentData.bitsPerListPointer; long startFixedLengthOffset = endFixedLengthOffset - currentData.bitsPerListPointer; startElement = currentData.listPointerData.getElementValue(startFixedLengthOffset, currentData.bitsPerListPointer); endElement = currentData.listPointerData.getElementValue(endFixedLengthOffset, currentData.bitsPerListPointer); } } while(readWasUnsafe(currentData)); long elementIndex = startElement + listIndex; if(elementIndex >= endElement) throw new ArrayIndexOutOfBoundsException("Array index out of bounds: " + listIndex + ", list size: " + (endElement - startElement)); elementOrdinal = (int)currentData.elementData.getElementValue(elementIndex * currentData.bitsPerElement, currentData.bitsPerElement); } while(readWasUnsafe(currentData)); return elementOrdinal; } public int size(int ordinal) { HollowListTypeDataElements currentData; int size; do { currentData = this.currentDataVolatile; long startElement; long endElement; if (ordinal == 0) { startElement = 0; endElement = currentData.listPointerData.getElementValue(0, currentData.bitsPerListPointer); } else { long endFixedLengthOffset = (long)ordinal * currentData.bitsPerListPointer; long startFixedLengthOffset = endFixedLengthOffset - currentData.bitsPerListPointer; startElement = currentData.listPointerData.getElementValue(startFixedLengthOffset, currentData.bitsPerListPointer); endElement = currentData.listPointerData.getElementValue(endFixedLengthOffset, currentData.bitsPerListPointer); } size = (int)(endElement - startElement); } while(readWasUnsafe(currentData)); return size; } void invalidate() { setCurrentData(null); } HollowListTypeDataElements currentDataElements() { return currentDataVolatile; } private boolean readWasUnsafe(HollowListTypeDataElements data) { HollowUnsafeHandle.getUnsafe().loadFence(); return data != currentDataVolatile; } void setCurrentData(HollowListTypeDataElements data) { this.currentDataVolatile = data; } protected void applyToChecksum(HollowChecksum checksum, BitSet populatedOrdinals, int shardNumber, int numShards) { int ordinal = populatedOrdinals.nextSetBit(shardNumber); while(ordinal != ORDINAL_NONE) { if((ordinal & (numShards - 1)) == shardNumber) { int shardOrdinal = ordinal / numShards; int size = size(shardOrdinal); checksum.applyInt(ordinal); for(int i=0;i<size;i++) checksum.applyInt(getElementOrdinal(shardOrdinal, i)); ordinal = ordinal + numShards; } else { // Round up ordinal int r = (ordinal & -numShards) + shardNumber; ordinal = (r <= ordinal) ? r + numShards : r; } ordinal = populatedOrdinals.nextSetBit(ordinal); } } public long getApproximateHeapFootprintInBytes() { HollowListTypeDataElements currentData = currentDataVolatile; long requiredListPointerBits = ((long)currentData.maxOrdinal + 1) * currentData.bitsPerListPointer; long requiredElementBits = currentData.totalNumberOfElements * currentData.bitsPerElement; long requiredBits = requiredListPointerBits + requiredElementBits; return requiredBits / 8; } public long getApproximateHoleCostInBytes(BitSet populatedOrdinals, int shardNumber, int numShards) { HollowListTypeDataElements currentData = currentDataVolatile; long holeBits = 0; int holeOrdinal = populatedOrdinals.nextClearBit(0); while(holeOrdinal <= currentData.maxOrdinal) { if((holeOrdinal & (numShards - 1)) == shardNumber) holeBits += currentData.bitsPerListPointer; holeOrdinal = populatedOrdinals.nextClearBit(holeOrdinal + 1); } return holeBits / 8; } }
9,110
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine/list/HollowListDeltaApplicator.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.read.engine.list; import com.netflix.hollow.core.memory.encoding.FixedLengthElementArray; import com.netflix.hollow.core.memory.encoding.GapEncodedVariableLengthIntegerReader; /** * This class contains the logic for applying a delta to a current LIST type state * to produce the next LIST type state. * * Not intended for external consumption. */ class HollowListDeltaApplicator { private final HollowListTypeDataElements from; private final HollowListTypeDataElements delta; private final HollowListTypeDataElements target; private long currentFromStateCopyStartBit = 0; private long currentDeltaCopyStartBit = 0; private long currentWriteStartBit = 0; private long currentFromStateStartElement = 0; private long currentDeltaStartElement = 0; private long currentWriteStartElement = 0; private GapEncodedVariableLengthIntegerReader removalsReader; private GapEncodedVariableLengthIntegerReader additionsReader; HollowListDeltaApplicator(HollowListTypeDataElements from, HollowListTypeDataElements delta, HollowListTypeDataElements target) { this.from = from; this.delta = delta; this.target = target; } public void applyDelta() { removalsReader = from.encodedRemovals == null ? GapEncodedVariableLengthIntegerReader.EMPTY_READER : from.encodedRemovals; additionsReader = delta.encodedAdditions; removalsReader.reset(); additionsReader.reset(); target.encodedRemovals = delta.encodedRemovals; target.maxOrdinal = delta.maxOrdinal; target.totalNumberOfElements = delta.totalNumberOfElements; target.bitsPerListPointer = delta.bitsPerListPointer; target.bitsPerElement = delta.bitsPerElement; target.listPointerData = new FixedLengthElementArray(target.memoryRecycler, ((long)target.maxOrdinal + 1) * target.bitsPerListPointer); target.elementData = new FixedLengthElementArray(target.memoryRecycler, target.totalNumberOfElements * target.bitsPerElement); if(target.bitsPerListPointer == from.bitsPerListPointer && target.bitsPerElement == from.bitsPerElement) fastDelta(); else slowDelta(); from.encodedRemovals = null; removalsReader.destroy(); } private void slowDelta() { for(int i=0;i<=target.maxOrdinal;i++) { mergeOrdinal(i); } } private void fastDelta() { int i = 0; int bulkCopyEndOrdinal = Math.min(from.maxOrdinal, target.maxOrdinal); while(i <= target.maxOrdinal) { int nextElementDiff = Math.min(additionsReader.nextElement(), removalsReader.nextElement()); if(nextElementDiff == i || i > bulkCopyEndOrdinal) { mergeOrdinal(i++); } else { int recordsToCopy = nextElementDiff - i; if(nextElementDiff > bulkCopyEndOrdinal) recordsToCopy = bulkCopyEndOrdinal - i + 1; fastCopyRecords(recordsToCopy); i += recordsToCopy; } } } private void fastCopyRecords(int recordsToCopy) { long listPointerBitsToCopy = (long)recordsToCopy * target.bitsPerListPointer; long eachListPointerDifference = currentWriteStartElement - currentFromStateStartElement; target.listPointerData.copyBits(from.listPointerData, currentFromStateCopyStartBit, currentWriteStartBit, listPointerBitsToCopy); target.listPointerData.incrementMany(currentWriteStartBit, eachListPointerDifference, target.bitsPerListPointer, recordsToCopy); currentFromStateCopyStartBit += listPointerBitsToCopy; currentWriteStartBit += listPointerBitsToCopy; long fromDataEndElement = from.listPointerData.getElementValue(currentFromStateCopyStartBit - from.bitsPerListPointer, from.bitsPerListPointer); long elementsToCopy = fromDataEndElement - currentFromStateStartElement; long bitsToCopy = elementsToCopy * from.bitsPerElement; target.elementData.copyBits(from.elementData, currentFromStateStartElement * from.bitsPerElement, currentWriteStartElement * from.bitsPerElement, bitsToCopy); currentFromStateStartElement += elementsToCopy; currentWriteStartElement += elementsToCopy; } private void mergeOrdinal(int i) { boolean addFromDelta = additionsReader.nextElement() == i; boolean removeData = removalsReader.nextElement() == i; if(addFromDelta) { addFromDelta(additionsReader); } if(i <= from.maxOrdinal) { long fromDataEndElement = from.listPointerData.getElementValue(currentFromStateCopyStartBit, from.bitsPerListPointer); if(!removeData) { for(long elementIdx=currentFromStateStartElement; elementIdx<fromDataEndElement; elementIdx++) { long elementOrdinal = from.elementData.getElementValue(elementIdx * from.bitsPerElement, from.bitsPerElement); target.elementData.setElementValue(currentWriteStartElement * target.bitsPerElement, target.bitsPerElement, elementOrdinal); currentWriteStartElement++; } } else { removalsReader.advance(); } currentFromStateStartElement = fromDataEndElement; currentFromStateCopyStartBit += from.bitsPerListPointer; } target.listPointerData.setElementValue(currentWriteStartBit, target.bitsPerListPointer, currentWriteStartElement); currentWriteStartBit += target.bitsPerListPointer; } private void addFromDelta(GapEncodedVariableLengthIntegerReader additionsReader) { long deltaDataEndElement = delta.listPointerData.getElementValue(currentDeltaCopyStartBit, delta.bitsPerListPointer); for(long elementIdx=currentDeltaStartElement; elementIdx<deltaDataEndElement; elementIdx++) { long elementOrdinal = delta.elementData.getElementValue(elementIdx * delta.bitsPerElement, delta.bitsPerElement); target.elementData.setElementValue(currentWriteStartElement * target.bitsPerElement, target.bitsPerElement, elementOrdinal); currentWriteStartElement++; } currentDeltaStartElement = deltaDataEndElement; currentDeltaCopyStartBit += delta.bitsPerListPointer; additionsReader.advance(); } }
9,111
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine/set/HollowSetTypeDataElements.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.read.engine.set; import com.netflix.hollow.core.memory.FixedLengthData; import com.netflix.hollow.core.memory.FixedLengthDataFactory; import com.netflix.hollow.core.memory.MemoryMode; import com.netflix.hollow.core.memory.encoding.GapEncodedVariableLengthIntegerReader; import com.netflix.hollow.core.memory.encoding.VarInt; import com.netflix.hollow.core.memory.pool.ArraySegmentRecycler; import com.netflix.hollow.core.read.HollowBlobInput; import java.io.IOException; /** * This class holds the data for a {@link HollowSetTypeReadState}. * * During a delta, the HollowSetTypeReadState will create a new HollowSetTypeDataElements and atomically swap * with the existing one to make sure a consistent view of the data is always available. */ public class HollowSetTypeDataElements { int maxOrdinal; FixedLengthData setPointerAndSizeData; FixedLengthData elementData; GapEncodedVariableLengthIntegerReader encodedRemovals; GapEncodedVariableLengthIntegerReader encodedAdditions; int bitsPerSetPointer; int bitsPerSetSizeValue; int bitsPerFixedLengthSetPortion; int bitsPerElement; int emptyBucketValue; long totalNumberOfBuckets; final ArraySegmentRecycler memoryRecycler; final MemoryMode memoryMode; public HollowSetTypeDataElements(ArraySegmentRecycler memoryRecycler) { this(MemoryMode.ON_HEAP, memoryRecycler); } public HollowSetTypeDataElements(MemoryMode memoryMode, ArraySegmentRecycler memoryRecycler) { this.memoryMode = memoryMode; this.memoryRecycler = memoryRecycler; } void readSnapshot(HollowBlobInput in) throws IOException { readFromInput(in,false); } void readDelta(HollowBlobInput in) throws IOException { readFromInput(in,true); } private void readFromInput(HollowBlobInput in, boolean isDelta) throws IOException { maxOrdinal = VarInt.readVInt(in); if(isDelta) { encodedRemovals = GapEncodedVariableLengthIntegerReader.readEncodedDeltaOrdinals(in, memoryRecycler); encodedAdditions = GapEncodedVariableLengthIntegerReader.readEncodedDeltaOrdinals(in, memoryRecycler); } bitsPerSetPointer = VarInt.readVInt(in); bitsPerSetSizeValue = VarInt.readVInt(in); bitsPerElement = VarInt.readVInt(in); bitsPerFixedLengthSetPortion = bitsPerSetPointer + bitsPerSetSizeValue; emptyBucketValue = (1 << bitsPerElement) - 1; totalNumberOfBuckets = VarInt.readVLong(in); setPointerAndSizeData = FixedLengthDataFactory.get(in, memoryMode, memoryRecycler); elementData = FixedLengthDataFactory.get(in, memoryMode, memoryRecycler); } static void discardFromStream(HollowBlobInput in, int numShards, boolean isDelta) throws IOException { if(numShards > 1) VarInt.readVInt(in); // max ordinal for(int i=0;i<numShards;i++) { VarInt.readVInt(in); // max ordinal if(isDelta) { /// addition/removal ordinals GapEncodedVariableLengthIntegerReader.discardEncodedDeltaOrdinals(in); GapEncodedVariableLengthIntegerReader.discardEncodedDeltaOrdinals(in); } /// statistics VarInt.readVInt(in); VarInt.readVInt(in); VarInt.readVInt(in); VarInt.readVLong(in); /// fixed-length data FixedLengthData.discardFrom(in); FixedLengthData.discardFrom(in); } } public void applyDelta(HollowSetTypeDataElements fromData, HollowSetTypeDataElements deltaData) { new HollowSetDeltaApplicator(fromData, deltaData, this).applyDelta(); } public void destroy() { FixedLengthDataFactory.destroy(setPointerAndSizeData, memoryRecycler); FixedLengthDataFactory.destroy(elementData, memoryRecycler); } }
9,112
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine/set/PotentialMatchHollowSetOrdinalIterator.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.read.engine.set; import static com.netflix.hollow.core.HollowConstants.ORDINAL_NONE; import com.netflix.hollow.core.memory.encoding.HashCodes; import com.netflix.hollow.core.read.dataaccess.HollowSetTypeDataAccess; import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator; /** * This is a {@link HollowOrdinalIterator} which only will iterate over the potential matches for a specific hash code. * * Once this iterator encounters an empty bucket, the iteration ends. */ public class PotentialMatchHollowSetOrdinalIterator implements HollowOrdinalIterator { private final int setOrdinal; private final HollowSetTypeDataAccess dataAccess; private final int numBuckets; private int currentBucket; public PotentialMatchHollowSetOrdinalIterator(int setOrdinal, HollowSetTypeDataAccess dataAccess, int hashCode) { this.setOrdinal = setOrdinal; this.dataAccess = dataAccess; this.numBuckets = HashCodes.hashTableSize(dataAccess.size(setOrdinal)); this.currentBucket = HashCodes.hashInt(hashCode) & (numBuckets - 1); } @Override public int next() { int currentBucketValue; currentBucketValue = dataAccess.relativeBucketValue(setOrdinal, currentBucket); if(currentBucketValue == ORDINAL_NONE) { return NO_MORE_ORDINALS; } currentBucket++; currentBucket &= (numBuckets - 1); return currentBucketValue; } }
9,113
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine/set/HollowSetDeltaHistoricalStateCreator.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.read.engine.set; import static com.netflix.hollow.core.HollowConstants.ORDINAL_NONE; import com.netflix.hollow.core.memory.encoding.FixedLengthElementArray; import com.netflix.hollow.core.memory.encoding.HashCodes; import com.netflix.hollow.core.memory.pool.WastefulRecycler; import com.netflix.hollow.core.read.engine.PopulatedOrdinalListener; import com.netflix.hollow.core.util.IntMap; import com.netflix.hollow.core.util.RemovedOrdinalIterator; /** * This class contains the logic for extracting the removed records from an SET type state * to produce a historical type state. * * Not intended for external consumption. */ public class HollowSetDeltaHistoricalStateCreator { private final HollowSetTypeDataElements historicalDataElements; private final int shardNumberMask; private final int shardOrdinalShift; private HollowSetTypeReadState typeState; private HollowSetTypeDataElements stateEngineDataElements[]; private RemovedOrdinalIterator iter; private IntMap ordinalMapping; private int nextOrdinal; private long nextStartBucket; public HollowSetDeltaHistoricalStateCreator(HollowSetTypeReadState typeState, boolean reverse) { this.typeState = typeState; this.stateEngineDataElements = typeState.currentDataElements(); this.historicalDataElements = new HollowSetTypeDataElements(WastefulRecycler.DEFAULT_INSTANCE); this.iter = new RemovedOrdinalIterator(typeState.getListener(PopulatedOrdinalListener.class), reverse); this.shardNumberMask = stateEngineDataElements.length - 1; this.shardOrdinalShift = 31 - Integer.numberOfLeadingZeros(stateEngineDataElements.length); } public void populateHistory() { populateStats(); historicalDataElements.setPointerAndSizeData = new FixedLengthElementArray(historicalDataElements.memoryRecycler, ((long)historicalDataElements.maxOrdinal + 1) * historicalDataElements.bitsPerFixedLengthSetPortion); historicalDataElements.elementData = new FixedLengthElementArray(historicalDataElements.memoryRecycler, historicalDataElements.totalNumberOfBuckets * historicalDataElements.bitsPerElement); iter.reset(); int ordinal = iter.next(); while(ordinal != ORDINAL_NONE) { ordinalMapping.put(ordinal, nextOrdinal); copyRecord(ordinal); ordinal = iter.next(); } } public void dereferenceTypeState() { this.typeState = null; this.stateEngineDataElements = null; this.iter = null; } public IntMap getOrdinalMapping() { return ordinalMapping; } public HollowSetTypeReadState createHistoricalTypeReadState() { HollowSetTypeReadState historicalTypeState = new HollowSetTypeReadState(null, typeState.getSchema(), 1); historicalTypeState.setCurrentData(historicalDataElements); return historicalTypeState; } private void populateStats() { iter.reset(); int removedEntryCount = 0; int maxSize = 0; long totalBucketCount = 0; int ordinal = iter.next(); while(ordinal != ORDINAL_NONE) { removedEntryCount++; int size = typeState.size(ordinal); if(size > maxSize) maxSize = size; totalBucketCount += HashCodes.hashTableSize(size); ordinal = iter.next(); } historicalDataElements.maxOrdinal = removedEntryCount - 1; historicalDataElements.bitsPerSetPointer = 64 - Long.numberOfLeadingZeros(totalBucketCount); historicalDataElements.bitsPerSetSizeValue = 64 - Long.numberOfLeadingZeros(maxSize); historicalDataElements.bitsPerFixedLengthSetPortion = historicalDataElements.bitsPerSetPointer + historicalDataElements.bitsPerSetSizeValue; historicalDataElements.bitsPerElement = stateEngineDataElements[0].bitsPerElement; historicalDataElements.emptyBucketValue = stateEngineDataElements[0].emptyBucketValue; historicalDataElements.totalNumberOfBuckets = totalBucketCount; ordinalMapping = new IntMap(removedEntryCount); } private void copyRecord(int ordinal) { int shard = ordinal & shardNumberMask; int shardOrdinal = ordinal >> shardOrdinalShift; long bitsPerBucket = historicalDataElements.bitsPerElement; long size = typeState.size(ordinal); long fromStartBucket = shardOrdinal == 0 ? 0 : stateEngineDataElements[shard].setPointerAndSizeData.getElementValue((long)(shardOrdinal - 1) * stateEngineDataElements[shard].bitsPerFixedLengthSetPortion, stateEngineDataElements[shard].bitsPerSetPointer); long fromEndBucket = stateEngineDataElements[shard].setPointerAndSizeData.getElementValue((long)shardOrdinal * stateEngineDataElements[shard].bitsPerFixedLengthSetPortion, stateEngineDataElements[shard].bitsPerSetPointer); long numBuckets = fromEndBucket - fromStartBucket; historicalDataElements.setPointerAndSizeData.setElementValue((long)nextOrdinal * historicalDataElements.bitsPerFixedLengthSetPortion, historicalDataElements.bitsPerSetPointer, nextStartBucket + numBuckets); historicalDataElements.setPointerAndSizeData.setElementValue((long)(nextOrdinal * historicalDataElements.bitsPerFixedLengthSetPortion) + historicalDataElements.bitsPerSetPointer, historicalDataElements.bitsPerSetSizeValue, size); historicalDataElements.elementData.copyBits(stateEngineDataElements[shard].elementData, fromStartBucket * bitsPerBucket, nextStartBucket * bitsPerBucket, numBuckets * bitsPerBucket); nextOrdinal++; nextStartBucket += numBuckets; } }
9,114
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine/set/HollowSetTypeReadStateShard.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.read.engine.set; import static com.netflix.hollow.core.HollowConstants.ORDINAL_NONE; import com.netflix.hollow.core.index.key.HollowPrimaryKeyValueDeriver; import com.netflix.hollow.core.memory.HollowUnsafeHandle; import com.netflix.hollow.core.memory.encoding.HashCodes; import com.netflix.hollow.core.read.engine.SetMapKeyHasher; import com.netflix.hollow.tools.checksum.HollowChecksum; import java.util.BitSet; class HollowSetTypeReadStateShard { private volatile HollowSetTypeDataElements currentDataVolatile; private HollowPrimaryKeyValueDeriver keyDeriver; public int size(int ordinal) { HollowSetTypeDataElements currentData; int size; do { currentData = this.currentDataVolatile; size = (int)currentData.setPointerAndSizeData.getElementValue(((long)ordinal * currentData.bitsPerFixedLengthSetPortion) + currentData.bitsPerSetPointer, currentData.bitsPerSetSizeValue); } while(readWasUnsafe(currentData)); return size; } public boolean contains(int ordinal, int value, int hashCode) { HollowSetTypeDataElements currentData; boolean foundData; threadsafe: do { long startBucket; long endBucket; do { currentData = this.currentDataVolatile; startBucket = getAbsoluteBucketStart(currentData, ordinal); endBucket = currentData.setPointerAndSizeData.getElementValue((long)ordinal * currentData.bitsPerFixedLengthSetPortion, currentData.bitsPerSetPointer); } while(readWasUnsafe(currentData)); hashCode = HashCodes.hashInt(hashCode); long bucket = startBucket + (hashCode & (endBucket - startBucket - 1)); int bucketOrdinal = absoluteBucketValue(currentData, bucket); while(bucketOrdinal != currentData.emptyBucketValue) { if(bucketOrdinal == value) { foundData = true; continue threadsafe; } bucket++; if(bucket == endBucket) bucket = startBucket; bucketOrdinal = absoluteBucketValue(currentData, bucket); } foundData = false; } while(readWasUnsafe(currentData)); return foundData; } public int findElement(int ordinal, Object... hashKey) { int hashCode = SetMapKeyHasher.hash(hashKey, keyDeriver.getFieldTypes()); HollowSetTypeDataElements currentData; threadsafe: do { long startBucket; long endBucket; do { currentData = this.currentDataVolatile; startBucket = getAbsoluteBucketStart(currentData, ordinal); endBucket = currentData.setPointerAndSizeData.getElementValue((long)ordinal * currentData.bitsPerFixedLengthSetPortion, currentData.bitsPerSetPointer); } while(readWasUnsafe(currentData)); long bucket = startBucket + (hashCode & (endBucket - startBucket - 1)); int bucketOrdinal = absoluteBucketValue(currentData, bucket); while(bucketOrdinal != currentData.emptyBucketValue) { if(readWasUnsafe(currentData)) continue threadsafe; if(keyDeriver.keyMatches(bucketOrdinal, hashKey)) return bucketOrdinal; bucket++; if(bucket == endBucket) bucket = startBucket; bucketOrdinal = absoluteBucketValue(currentData, bucket); } } while(readWasUnsafe(currentData)); return ORDINAL_NONE; } public int relativeBucketValue(int setOrdinal, int bucketIndex) { HollowSetTypeDataElements currentData; int value; do { long startBucket; do { currentData = this.currentDataVolatile; startBucket = getAbsoluteBucketStart(currentData, setOrdinal); } while(readWasUnsafe(currentData)); value = absoluteBucketValue(currentData, startBucket + bucketIndex); if(value == currentData.emptyBucketValue) value = ORDINAL_NONE; } while(readWasUnsafe(currentData)); return value; } private long getAbsoluteBucketStart(HollowSetTypeDataElements currentData, int ordinal) { return ordinal == 0 ? 0 : currentData.setPointerAndSizeData.getElementValue((long)(ordinal - 1) * currentData.bitsPerFixedLengthSetPortion, currentData.bitsPerSetPointer); } private int absoluteBucketValue(HollowSetTypeDataElements currentData, long absoluteBucketIndex) { return (int)currentData.elementData.getElementValue(absoluteBucketIndex * currentData.bitsPerElement, currentData.bitsPerElement); } void invalidate() { setCurrentData(null); } HollowSetTypeDataElements currentDataElements() { return currentDataVolatile; } private boolean readWasUnsafe(HollowSetTypeDataElements data) { HollowUnsafeHandle.getUnsafe().loadFence(); return data != currentDataVolatile; } void setCurrentData(HollowSetTypeDataElements data) { this.currentDataVolatile = data; } protected void applyToChecksum(HollowChecksum checksum, BitSet populatedOrdinals, int shardNumber, int numShards) { HollowSetTypeDataElements currentData = currentDataVolatile; int ordinal = populatedOrdinals.nextSetBit(shardNumber); while(ordinal != ORDINAL_NONE) { if((ordinal & (numShards - 1)) == shardNumber) { int shardOrdinal = ordinal / numShards; int numBuckets = HashCodes.hashTableSize(size(shardOrdinal)); long offset = getAbsoluteBucketStart(currentData, shardOrdinal); checksum.applyInt(ordinal); for(int i=0;i<numBuckets;i++) { int bucketValue = absoluteBucketValue(currentData, offset + i); if(bucketValue != currentData.emptyBucketValue) { checksum.applyInt(i); checksum.applyInt(bucketValue); } } ordinal = ordinal + numShards; } else { // Round up ordinal int r = (ordinal & -numShards) + shardNumber; ordinal = (r <= ordinal) ? r + numShards : r; } ordinal = populatedOrdinals.nextSetBit(ordinal); } } public long getApproximateHeapFootprintInBytes() { HollowSetTypeDataElements currentData = currentDataVolatile; long requiredBitsForSetPointers = ((long)currentData.maxOrdinal + 1) * currentData.bitsPerFixedLengthSetPortion; long requiredBitsForBuckets = currentData.totalNumberOfBuckets * currentData.bitsPerElement; long requiredBits = requiredBitsForSetPointers + requiredBitsForBuckets; return requiredBits / 8; } public long getApproximateHoleCostInBytes(BitSet populatedOrdinals, int shardNumber, int numShards) { HollowSetTypeDataElements currentData = currentDataVolatile; long holeBits = 0; int holeOrdinal = populatedOrdinals.nextClearBit(0); while(holeOrdinal <= currentData.maxOrdinal) { if((holeOrdinal & (numShards - 1)) == shardNumber) holeBits += currentData.bitsPerFixedLengthSetPortion; holeOrdinal = populatedOrdinals.nextClearBit(holeOrdinal + 1); } return holeBits / 8; } public void setKeyDeriver(HollowPrimaryKeyValueDeriver keyDeriver) { this.keyDeriver = keyDeriver; } }
9,115
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine/set/HollowSetDeltaApplicator.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.read.engine.set; import com.netflix.hollow.core.memory.encoding.FixedLengthElementArray; import com.netflix.hollow.core.memory.encoding.GapEncodedVariableLengthIntegerReader; /** * This class contains the logic for applying a delta to a current SET type state * to produce the next SET type state. * * Not intended for external consumption. */ class HollowSetDeltaApplicator { private final HollowSetTypeDataElements from; private final HollowSetTypeDataElements delta; private final HollowSetTypeDataElements target; private long currentFromStateCopyStartBit = 0; private long currentDeltaCopyStartBit = 0; private long currentWriteStartBit = 0; private long currentFromStateStartBucket = 0; private long currentDeltaStartBucket = 0; private long currentWriteStartBucket = 0; private GapEncodedVariableLengthIntegerReader removalsReader; private GapEncodedVariableLengthIntegerReader additionsReader; HollowSetDeltaApplicator(HollowSetTypeDataElements from, HollowSetTypeDataElements delta, HollowSetTypeDataElements target) { this.from = from; this.delta = delta; this.target = target; } public void applyDelta() { removalsReader = from.encodedRemovals == null ? GapEncodedVariableLengthIntegerReader.EMPTY_READER : from.encodedRemovals; additionsReader = delta.encodedAdditions; removalsReader.reset(); additionsReader.reset(); target.encodedRemovals = delta.encodedRemovals; target.maxOrdinal = delta.maxOrdinal; target.bitsPerSetPointer = delta.bitsPerSetPointer; target.bitsPerSetSizeValue = delta.bitsPerSetSizeValue; target.bitsPerFixedLengthSetPortion = delta.bitsPerFixedLengthSetPortion; target.bitsPerElement = delta.bitsPerElement; target.emptyBucketValue = delta.emptyBucketValue; target.totalNumberOfBuckets = delta.totalNumberOfBuckets; target.setPointerAndSizeData = new FixedLengthElementArray(target.memoryRecycler, ((long)target.maxOrdinal + 1) * target.bitsPerFixedLengthSetPortion); target.elementData = new FixedLengthElementArray(target.memoryRecycler, target.totalNumberOfBuckets * target.bitsPerElement); if(target.bitsPerSetPointer == from.bitsPerSetPointer && target.bitsPerSetSizeValue == from.bitsPerSetSizeValue && target.bitsPerElement == from.bitsPerElement) fastDelta(); else slowDelta(); from.encodedRemovals = null; removalsReader.destroy(); } private void slowDelta() { for(int i=0; i<=target.maxOrdinal; i++) { mergeOrdinal(i); } } private void fastDelta() { int i=0; int bulkCopyEndOrdinal = Math.min(from.maxOrdinal, target.maxOrdinal); while(i <= target.maxOrdinal) { int nextElementDiff = Math.min(additionsReader.nextElement(), removalsReader.nextElement()); if(nextElementDiff == i || i > bulkCopyEndOrdinal) { mergeOrdinal(i++); } else { int recordsToCopy = nextElementDiff - i; if(nextElementDiff > bulkCopyEndOrdinal) recordsToCopy = bulkCopyEndOrdinal - i + 1; fastCopyRecords(recordsToCopy); i += recordsToCopy; } } } private void fastCopyRecords(int recordsToCopy) { long setPointerAndSizeBitsToCopy = (long)recordsToCopy * target.bitsPerFixedLengthSetPortion; long eachSetPointerDifference = currentWriteStartBucket - currentFromStateStartBucket; target.setPointerAndSizeData.copyBits(from.setPointerAndSizeData, currentFromStateCopyStartBit, currentWriteStartBit, setPointerAndSizeBitsToCopy); target.setPointerAndSizeData.incrementMany(currentWriteStartBit, eachSetPointerDifference, target.bitsPerFixedLengthSetPortion, recordsToCopy); currentFromStateCopyStartBit += setPointerAndSizeBitsToCopy; currentWriteStartBit += setPointerAndSizeBitsToCopy; long fromDataEndElement = from.setPointerAndSizeData.getElementValue(currentFromStateCopyStartBit - from.bitsPerFixedLengthSetPortion, from.bitsPerSetPointer); long bucketsToCopy = fromDataEndElement - currentFromStateStartBucket; long bitsToCopy = bucketsToCopy * from.bitsPerElement; target.elementData.copyBits(from.elementData, currentFromStateStartBucket * from.bitsPerElement, currentWriteStartBucket * from.bitsPerElement, bitsToCopy); currentFromStateStartBucket += bucketsToCopy; currentWriteStartBucket += bucketsToCopy; } private void mergeOrdinal(int i) { boolean addFromDelta = additionsReader.nextElement() == i; boolean removeData = removalsReader.nextElement() == i; if(addFromDelta) { addFromDelta(additionsReader); } if(i <= from.maxOrdinal) { long fromDataEndBucket = from.setPointerAndSizeData.getElementValue(currentFromStateCopyStartBit, from.bitsPerSetPointer); if(!removeData) { for(long bucketIdx=currentFromStateStartBucket; bucketIdx<fromDataEndBucket; bucketIdx++) { long bucketValue = from.elementData.getElementValue(bucketIdx * from.bitsPerElement, from.bitsPerElement); if(bucketValue == from.emptyBucketValue) bucketValue = target.emptyBucketValue; target.elementData.setElementValue(currentWriteStartBucket * target.bitsPerElement, target.bitsPerElement, bucketValue); currentWriteStartBucket++; } long fromDataSize = from.setPointerAndSizeData.getElementValue(currentFromStateCopyStartBit + from.bitsPerSetPointer, from.bitsPerSetSizeValue); target.setPointerAndSizeData.setElementValue(currentWriteStartBit + target.bitsPerSetPointer, target.bitsPerSetSizeValue, fromDataSize); } else { removalsReader.advance(); } currentFromStateStartBucket = fromDataEndBucket; currentFromStateCopyStartBit += from.bitsPerFixedLengthSetPortion; } target.setPointerAndSizeData.setElementValue(currentWriteStartBit, target.bitsPerSetPointer, currentWriteStartBucket); currentWriteStartBit += target.bitsPerFixedLengthSetPortion; } private void addFromDelta(GapEncodedVariableLengthIntegerReader additionsReader) { long deltaDataEndBucket = delta.setPointerAndSizeData.getElementValue(currentDeltaCopyStartBit, delta.bitsPerSetPointer); for(long bucketIdx=currentDeltaStartBucket; bucketIdx<deltaDataEndBucket; bucketIdx++) { long bucketValue = delta.elementData.getElementValue(bucketIdx * delta.bitsPerElement, delta.bitsPerElement); target.elementData.setElementValue(currentWriteStartBucket * target.bitsPerElement, target.bitsPerElement, bucketValue); currentWriteStartBucket++; } long deltaDataSize = delta.setPointerAndSizeData.getElementValue(currentDeltaCopyStartBit + delta.bitsPerSetPointer, delta.bitsPerSetSizeValue); target.setPointerAndSizeData.setElementValue(currentWriteStartBit + target.bitsPerSetPointer, target.bitsPerSetSizeValue, deltaDataSize); currentDeltaStartBucket = deltaDataEndBucket; currentDeltaCopyStartBit += delta.bitsPerFixedLengthSetPortion; additionsReader.advance(); } }
9,116
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/read/engine/set/HollowSetTypeReadState.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.read.engine.set; import static com.netflix.hollow.core.HollowConstants.ORDINAL_NONE; import com.netflix.hollow.api.sampling.DisabledSamplingDirector; import com.netflix.hollow.api.sampling.HollowSampler; import com.netflix.hollow.api.sampling.HollowSamplingDirector; import com.netflix.hollow.api.sampling.HollowSetSampler; import com.netflix.hollow.core.index.key.HollowPrimaryKeyValueDeriver; import com.netflix.hollow.core.memory.MemoryMode; import com.netflix.hollow.core.memory.encoding.GapEncodedVariableLengthIntegerReader; import com.netflix.hollow.core.memory.encoding.VarInt; import com.netflix.hollow.core.memory.pool.ArraySegmentRecycler; import com.netflix.hollow.core.read.HollowBlobInput; import com.netflix.hollow.core.read.dataaccess.HollowSetTypeDataAccess; import com.netflix.hollow.core.read.engine.HollowCollectionTypeReadState; import com.netflix.hollow.core.read.engine.HollowReadStateEngine; import com.netflix.hollow.core.read.engine.HollowTypeReadState; import com.netflix.hollow.core.read.engine.PopulatedOrdinalListener; import com.netflix.hollow.core.read.engine.SnapshotPopulatedOrdinalsReader; import com.netflix.hollow.core.read.filter.HollowFilterConfig; import com.netflix.hollow.core.read.iterator.EmptyOrdinalIterator; import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator; import com.netflix.hollow.core.read.iterator.HollowSetOrdinalIterator; import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType; import com.netflix.hollow.core.schema.HollowSchema; import com.netflix.hollow.core.schema.HollowSetSchema; import com.netflix.hollow.tools.checksum.HollowChecksum; import java.io.IOException; import java.util.BitSet; /** * A {@link HollowTypeReadState} for OBJECT type records. */ public class HollowSetTypeReadState extends HollowCollectionTypeReadState implements HollowSetTypeDataAccess { private final HollowSetSampler sampler; private final int shardNumberMask; private final int shardOrdinalShift; private final HollowSetTypeReadStateShard shards[]; private HollowPrimaryKeyValueDeriver keyDeriver; private int maxOrdinal; public HollowSetTypeReadState(HollowReadStateEngine stateEngine, HollowSetSchema schema, int numShards) { this(stateEngine, MemoryMode.ON_HEAP, schema, numShards); } public HollowSetTypeReadState(HollowReadStateEngine stateEngine, MemoryMode memoryMode, HollowSetSchema schema, int numShards) { super(stateEngine, memoryMode, schema); this.sampler = new HollowSetSampler(schema.getName(), DisabledSamplingDirector.INSTANCE); this.shardNumberMask = numShards - 1; this.shardOrdinalShift = 31 - Integer.numberOfLeadingZeros(numShards); if(numShards < 1 || 1 << shardOrdinalShift != numShards) throw new IllegalArgumentException("Number of shards must be a power of 2!"); HollowSetTypeReadStateShard shards[] = new HollowSetTypeReadStateShard[numShards]; for(int i=0;i<shards.length;i++) shards[i] = new HollowSetTypeReadStateShard(); this.shards = shards; } @Override public void readSnapshot(HollowBlobInput in, ArraySegmentRecycler memoryRecycler, int numShards) throws IOException { throw new UnsupportedOperationException("This type does not yet support numShards specification when reading snapshot"); } @Override public void readSnapshot(HollowBlobInput in, ArraySegmentRecycler memoryRecycler) throws IOException { if(shards.length > 1) maxOrdinal = VarInt.readVInt(in); for(int i=0;i<shards.length;i++) { HollowSetTypeDataElements snapshotData = new HollowSetTypeDataElements(memoryMode, memoryRecycler); snapshotData.readSnapshot(in); shards[i].setCurrentData(snapshotData); } if(shards.length == 1) maxOrdinal = shards[0].currentDataElements().maxOrdinal; SnapshotPopulatedOrdinalsReader.readOrdinals(in, stateListeners); } @Override public void applyDelta(HollowBlobInput in, HollowSchema schema, ArraySegmentRecycler memoryRecycler, int deltaNumShards) throws IOException { if (shouldReshard(shards.length, deltaNumShards)) { throw new UnsupportedOperationException("Dynamic type sharding not supported for " + schema.getName() + ". Current numShards=" + shards.length + ", delta numShards=" + deltaNumShards); } if(shards.length > 1) maxOrdinal = VarInt.readVInt(in); for(int i=0;i<shards.length;i++) { HollowSetTypeDataElements deltaData = new HollowSetTypeDataElements(memoryMode, memoryRecycler); deltaData.readDelta(in); if(stateEngine.isSkipTypeShardUpdateWithNoAdditions() && deltaData.encodedAdditions.isEmpty()) { if(!deltaData.encodedRemovals.isEmpty()) notifyListenerAboutDeltaChanges(deltaData.encodedRemovals, deltaData.encodedAdditions, i, shards.length); HollowSetTypeDataElements currentData = shards[i].currentDataElements(); GapEncodedVariableLengthIntegerReader oldRemovals = currentData.encodedRemovals == null ? GapEncodedVariableLengthIntegerReader.EMPTY_READER : currentData.encodedRemovals; if(oldRemovals.isEmpty()) { currentData.encodedRemovals = deltaData.encodedRemovals; oldRemovals.destroy(); } else { if(!deltaData.encodedRemovals.isEmpty()) { currentData.encodedRemovals = GapEncodedVariableLengthIntegerReader.combine(oldRemovals, deltaData.encodedRemovals, memoryRecycler); oldRemovals.destroy(); } deltaData.encodedRemovals.destroy(); } deltaData.encodedAdditions.destroy(); } else { HollowSetTypeDataElements nextData = new HollowSetTypeDataElements(memoryMode, memoryRecycler); HollowSetTypeDataElements oldData = shards[i].currentDataElements(); nextData.applyDelta(oldData, deltaData); shards[i].setCurrentData(nextData); notifyListenerAboutDeltaChanges(deltaData.encodedRemovals, deltaData.encodedAdditions, i, shards.length); deltaData.encodedAdditions.destroy(); oldData.destroy(); } deltaData.destroy(); stateEngine.getMemoryRecycler().swap(); } if(shards.length == 1) maxOrdinal = shards[0].currentDataElements().maxOrdinal; } public static void discardSnapshot(HollowBlobInput in, int numShards) throws IOException { discardType(in, numShards, false); } public static void discardDelta(HollowBlobInput in, int numShards) throws IOException { discardType(in, numShards, true); } public static void discardType(HollowBlobInput in, int numShards, boolean delta) throws IOException { HollowSetTypeDataElements.discardFromStream(in, numShards, delta); if(!delta) SnapshotPopulatedOrdinalsReader.discardOrdinals(in); } @Override public int maxOrdinal() { return maxOrdinal; } @Override public int size(int ordinal) { sampler.recordSize(); return shards[ordinal & shardNumberMask].size(ordinal >> shardOrdinalShift); } @Override public boolean contains(int ordinal, int value) { return contains(ordinal, value, value); } @Override public boolean contains(int ordinal, int value, int hashCode) { sampler.recordGet(); return shards[ordinal & shardNumberMask].contains(ordinal >> shardOrdinalShift, value, hashCode); } @Override public int findElement(int ordinal, Object... hashKey) { sampler.recordGet(); if(keyDeriver == null) return ORDINAL_NONE; FieldType[] fieldTypes = keyDeriver.getFieldTypes(); if(hashKey.length != fieldTypes.length) return ORDINAL_NONE; return shards[ordinal & shardNumberMask].findElement(ordinal >> shardOrdinalShift, hashKey); } @Override public int relativeBucketValue(int setOrdinal, int bucketIndex) { return shards[setOrdinal & shardNumberMask].relativeBucketValue(setOrdinal >> shardOrdinalShift, bucketIndex); } @Override public HollowOrdinalIterator potentialMatchOrdinalIterator(int ordinal, int hashCode) { sampler.recordGet(); if(size(ordinal) == 0) return EmptyOrdinalIterator.INSTANCE; return new PotentialMatchHollowSetOrdinalIterator(ordinal, this, hashCode); } @Override public HollowOrdinalIterator ordinalIterator(int ordinal) { sampler.recordIterator(); if(size(ordinal) == 0) return EmptyOrdinalIterator.INSTANCE; return new HollowSetOrdinalIterator(ordinal, this); } @Override public HollowSetSchema getSchema() { return (HollowSetSchema)schema; } @Override public HollowSampler getSampler() { return sampler; } @Override public void setSamplingDirector(HollowSamplingDirector director) { sampler.setSamplingDirector(director); } @Override public void setFieldSpecificSamplingDirector(HollowFilterConfig fieldSpec, HollowSamplingDirector director) { sampler.setFieldSpecificSamplingDirector(fieldSpec, director); } @Override public void ignoreUpdateThreadForSampling(Thread t) { sampler.setUpdateThread(t); } @Override protected void invalidate() { stateListeners = EMPTY_LISTENERS; for(int i=0;i<shards.length;i++) shards[i].invalidate(); } HollowSetTypeDataElements[] currentDataElements() { HollowSetTypeDataElements currentDataElements[] = new HollowSetTypeDataElements[shards.length]; for(int i=0;i<shards.length;i++) currentDataElements[i] = shards[i].currentDataElements(); return currentDataElements; } void setCurrentData(HollowSetTypeDataElements data) { if(shards.length > 1) throw new UnsupportedOperationException("Cannot directly set data on sharded type state"); shards[0].setCurrentData(data); maxOrdinal = data.maxOrdinal; } @Override protected void applyToChecksum(HollowChecksum checksum, HollowSchema withSchema) { if(!getSchema().equals(withSchema)) throw new IllegalArgumentException("HollowSetTypeReadState cannot calculate checksum with unequal schemas: " + getSchema().getName()); BitSet populatedOrdinals = getListener(PopulatedOrdinalListener.class).getPopulatedOrdinals(); for(int i=0;i<shards.length;i++) shards[i].applyToChecksum(checksum, populatedOrdinals, i, shards.length); } @Override public long getApproximateHeapFootprintInBytes() { long totalApproximateHeapFootprintInBytes = 0; for(int i=0;i<shards.length;i++) totalApproximateHeapFootprintInBytes += shards[i].getApproximateHeapFootprintInBytes(); return totalApproximateHeapFootprintInBytes; } @Override public long getApproximateHoleCostInBytes() { long totalApproximateHoleCostInBytes = 0; BitSet populatedOrdinals = getPopulatedOrdinals(); for(int i=0;i<shards.length;i++) totalApproximateHoleCostInBytes += shards[i].getApproximateHoleCostInBytes(populatedOrdinals, i, shards.length); return totalApproximateHoleCostInBytes; } public HollowPrimaryKeyValueDeriver getKeyDeriver() { return keyDeriver; } public void buildKeyDeriver() { if(getSchema().getHashKey() != null) this.keyDeriver = new HollowPrimaryKeyValueDeriver(getSchema().getHashKey(), getStateEngine()); for(int i=0;i<shards.length;i++) shards[i].setKeyDeriver(keyDeriver); } @Override public int numShards() { return shards.length; } }
9,117
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/schema/HollowSchema.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.schema; import com.netflix.hollow.core.memory.encoding.VarInt; import com.netflix.hollow.core.read.HollowBlobInput; import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; /** * A HollowSchema defines the structure of a hollow data model. * <p> * Each HollowSchema corresponds to a single named record type, and defines the structure of those records. A schema * will be one of: * * <dl> * <dt>HollowObjectSchema</dt> * <dd>Defines a fixed set of strongly typed fields. See {@link FieldType} for a complete list of allowable types.</dd> * <dt>HollowListSchema</dt> * <dd>Defines an ordered collection of records of a specific element record type.</dd> * <dt>HollowSetSchema</dt> * <dd>Defines an unordered collection of records of a specific element record type, without duplicates.</dd> * <dt>HollowMapSchema</dt> * <dd>Defines a key/value pair mapping between a specific key record type and specific value record type.</dd> * * </dl> * * @author dkoszewnik * */ public abstract class HollowSchema { private final String name; public HollowSchema(String name) { if (name == null || name.isEmpty()) { throw new IllegalArgumentException("Type name in Hollow Schema was " + (name == null ? "null" : "an empty string")); } this.name = name; } public String getName() { return name; } public abstract SchemaType getSchemaType(); public abstract void writeTo(OutputStream os) throws IOException; public static HollowSchema withoutKeys(HollowSchema schema) { switch(schema.getSchemaType()) { case SET: HollowSetSchema setSchema = (HollowSetSchema)schema; if(setSchema.getHashKey() != null) setSchema = new HollowSetSchema(setSchema.getName(), setSchema.getElementType()); return setSchema; case MAP: HollowMapSchema mapSchema = (HollowMapSchema)schema; if(mapSchema.getHashKey() != null) mapSchema = new HollowMapSchema(mapSchema.getName(), mapSchema.getKeyType(), mapSchema.getValueType()); return mapSchema; default: return schema; } } public static HollowSchema readFrom(InputStream is) throws IOException { HollowBlobInput hbi = HollowBlobInput.serial(is); return readFrom(hbi); } public static HollowSchema readFrom(HollowBlobInput in) throws IOException { int schemaTypeId = in.read(); String schemaName = in.readUTF(); switch(SchemaType.fromTypeId(schemaTypeId)) { case OBJECT: return readObjectSchemaFrom(in, schemaName, SchemaType.hasKey(schemaTypeId)); case LIST: return readListSchemaFrom(in, schemaName); case SET: return readSetSchemaFrom(in, schemaName, SchemaType.hasKey(schemaTypeId)); case MAP: return readMapSchemaFrom(in, schemaName, SchemaType.hasKey(schemaTypeId)); } throw new IOException(); } private static HollowObjectSchema readObjectSchemaFrom(HollowBlobInput in, String schemaName, boolean hasPrimaryKey) throws IOException { String[] keyFieldPaths = null; if (hasPrimaryKey) { int numFields = VarInt.readVInt(in); keyFieldPaths = new String[numFields]; for(int i=0;i<numFields;i++) { keyFieldPaths[i] = in.readUTF(); } } int numFields = in.readShort(); HollowObjectSchema schema = new HollowObjectSchema(schemaName, numFields, keyFieldPaths); for(int i=0;i<numFields;i++) { String fieldName = in.readUTF(); FieldType fieldType = FieldType.valueOf(in.readUTF()); String referencedType = fieldType == FieldType.REFERENCE ? in.readUTF() : null; schema.addField(fieldName, fieldType, referencedType); } return schema; } private static HollowSetSchema readSetSchemaFrom(HollowBlobInput in, String schemaName, boolean hasHashKey) throws IOException { String elementType = in.readUTF(); String hashKeyFields[] = null; if(hasHashKey) { int numFields = VarInt.readVInt(in); hashKeyFields = new String[numFields]; for(int i=0;i<numFields;i++) { hashKeyFields[i] = in.readUTF(); } } return new HollowSetSchema(schemaName, elementType, hashKeyFields); } private static HollowListSchema readListSchemaFrom(HollowBlobInput in, String schemaName) throws IOException { String elementType = in.readUTF(); return new HollowListSchema(schemaName, elementType); } private static HollowMapSchema readMapSchemaFrom(HollowBlobInput in, String schemaName, boolean hasHashKey) throws IOException { String keyType = in.readUTF(); String valueType = in.readUTF(); String hashKeyFields[] = null; if(hasHashKey) { int numFields = VarInt.readVInt(in); hashKeyFields = new String[numFields]; for(int i=0;i<numFields;i++) { hashKeyFields[i] = in.readUTF(); } } return new HollowMapSchema(schemaName, keyType, valueType, hashKeyFields); } protected static <T> boolean isNullableObjectEquals(T o1, T o2) { if (o1==o2) return true; if (o1==null && o2==null) return true; if (o1!=null && o1.equals(o2)) return true; return false; } public static enum SchemaType { OBJECT(0, 6), SET(1, 4), LIST(2, -1), MAP(3, 5); private final int typeId; private final int typeIdWithPrimaryKey; private SchemaType(int typeId, int typeIdWithPrimaryKey) { this.typeId = typeId; this.typeIdWithPrimaryKey = typeIdWithPrimaryKey; } public int getTypeId() { return typeId; } public int getTypeIdWithPrimaryKey() { return typeIdWithPrimaryKey; } public static SchemaType fromTypeId(int id) { switch(id) { case 0: case 6: return OBJECT; case 1: case 4: return SET; case 2: return LIST; case 3: case 5: return MAP; } throw new IllegalArgumentException("Cannot recognize HollowSchema type id " + id); } public static boolean hasKey(int typeId) { return typeId == 4 || typeId == 5 || typeId == 6; } public static class UnrecognizedSchemaTypeException extends IllegalStateException { public UnrecognizedSchemaTypeException(String name, SchemaType type) { super("unrecognized schema type; name=" + name + " type=" + type); } } } }
9,118
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/schema/HollowCollectionSchema.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.schema; import com.netflix.hollow.core.read.engine.HollowTypeReadState; /** * A schema for a Collection record type -- parent class of both {@link HollowListSchema} or a {@link HollowSetSchema} * * @see HollowSchema * * @author dkoszewnik * */ public abstract class HollowCollectionSchema extends HollowSchema { public HollowCollectionSchema(String name) { super(name); } public abstract String getElementType(); public abstract HollowTypeReadState getElementTypeState(); }
9,119
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/schema/SimpleHollowDataset.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.schema; import com.netflix.hollow.api.error.SchemaNotFoundException; import com.netflix.hollow.core.HollowDataset; import com.netflix.hollow.core.write.HollowWriteStateEngine; import com.netflix.hollow.core.write.objectmapper.HollowObjectMapper; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * A HollowDataset implementation which only describes a set of schemas comprising a dataset. */ public class SimpleHollowDataset implements HollowDataset { private final Map<String, HollowSchema> schemas; public SimpleHollowDataset(Map<String, HollowSchema> schemas) { this.schemas = schemas; } public SimpleHollowDataset(List<HollowSchema> schemas) { Map<String, HollowSchema> schemaMap = new HashMap<>(schemas.size()); for(HollowSchema schema : schemas) { schemaMap.put(schema.getName(), schema); } this.schemas = schemaMap; } @Override public List<HollowSchema> getSchemas() { return new ArrayList<>(schemas.values()); } @Override public HollowSchema getSchema(String typeName) { return schemas.get(typeName); } @Override public HollowSchema getNonNullSchema(String typeName) throws SchemaNotFoundException { HollowSchema schema = getSchema(typeName); if(schema == null) throw new SchemaNotFoundException(typeName, schemas.keySet()); return schema; } public static SimpleHollowDataset fromClassDefinitions(Class<?>... classes) { HollowWriteStateEngine stateEngine = new HollowWriteStateEngine(); HollowObjectMapper mapper = new HollowObjectMapper(stateEngine); for(Class<?> clazz : classes) { mapper.initializeTypeState(clazz); } return new SimpleHollowDataset(stateEngine.getSchemas()); } }
9,120
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/schema/HollowListSchema.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.schema; import com.netflix.hollow.core.read.engine.HollowTypeReadState; import java.io.DataOutputStream; import java.io.IOException; import java.io.OutputStream; /** * A schema for a List record type. * * @see HollowSchema * * @author dkoszewnik * */ public class HollowListSchema extends HollowCollectionSchema { private final String elementType; private HollowTypeReadState elementTypeState; public HollowListSchema(String schemaName, String elementType) { super(schemaName); this.elementType = elementType; } @Override public String getElementType() { return elementType; } public void setElementTypeState(HollowTypeReadState typeState) { this.elementTypeState = typeState; } @Override public HollowTypeReadState getElementTypeState() { return elementTypeState; } @Override public SchemaType getSchemaType() { return SchemaType.LIST; } @Override public boolean equals(Object other) { if (this == other) return true; if(!(other instanceof HollowListSchema)) return false; HollowListSchema otherSchema = (HollowListSchema)other; if(!getName().equals(otherSchema.getName())) return false; return getElementType().equals(otherSchema.getElementType()); } @Override public int hashCode() { int result = getName().hashCode(); result = 31 * result + getSchemaType().hashCode(); result = 31 * result + elementType.hashCode(); return result; } @Override public String toString() { return getName() + " List<" + getElementType() + ">;"; } @Override public void writeTo(OutputStream os) throws IOException { DataOutputStream dos = new DataOutputStream(os); dos.write(SchemaType.LIST.getTypeId()); dos.writeUTF(getName()); dos.writeUTF(getElementType()); } }
9,121
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/schema/HollowObjectSchema.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.schema; import com.netflix.hollow.api.error.IncompatibleSchemaException; import com.netflix.hollow.core.index.key.PrimaryKey; import com.netflix.hollow.core.memory.encoding.VarInt; import com.netflix.hollow.core.read.engine.HollowTypeReadState; import com.netflix.hollow.core.read.filter.HollowFilterConfig; import com.netflix.hollow.core.read.filter.TypeFilter; import java.io.DataOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.Objects; /** * A schema for an Object record type. * * @see HollowSchema * * @author dkoszewnik * */ public class HollowObjectSchema extends HollowSchema { private final Map<String, Integer> nameFieldIndexLookup; private final String fieldNames[]; private final FieldType fieldTypes[]; protected final String referencedTypes[]; private final HollowTypeReadState referencedFieldTypeStates[]; /// populated during deserialization private final PrimaryKey primaryKey; private int size; public HollowObjectSchema(String schemaName, int numFields, String... keyFieldPaths) { this(schemaName, numFields, keyFieldPaths == null || keyFieldPaths.length == 0 ? null : new PrimaryKey(schemaName, keyFieldPaths)); } public HollowObjectSchema(String schemaName, int numFields, PrimaryKey primaryKey) { super(schemaName); this.nameFieldIndexLookup = new HashMap<>(numFields); this.fieldNames = new String[numFields]; this.fieldTypes = new FieldType[numFields]; this.referencedTypes = new String[numFields]; this.referencedFieldTypeStates = new HollowTypeReadState[numFields]; this.primaryKey = primaryKey; } public int numFields() { return size; } public PrimaryKey getPrimaryKey() { return primaryKey; } public int addField(String fieldName, FieldType fieldType) { return addField(fieldName, fieldType, null); } public int addField(String fieldName, FieldType fieldType, String referencedType) { if (fieldType == FieldType.REFERENCE && referencedType == null) { throw new RuntimeException( "When adding a REFERENCE field to a schema, the referenced type must be provided. Check type: " + getName() + " field: " + fieldName); } fieldNames[size] = fieldName; fieldTypes[size] = fieldType; referencedTypes[size] = referencedType; nameFieldIndexLookup.put(fieldName, size); size++; return size - 1; } /** * @deprecated This method ignores the provided {@code HollowTypeReadState} - you should use * {@link #addField(String, FieldType, String)} and then call * {@link #setReferencedTypeState(int, HollowTypeReadState)} with the returned integer. This * method will be removed in a future release. */ @Deprecated public int addField(String fieldName, FieldType fieldType, String referencedType, HollowTypeReadState referencedTypeState) { return addField(fieldName, fieldType, referencedType); } /** * Returns the position of a field previously added to the map, or -1 if the field has not been added to the map. * * The positions of the fields are hashed into the <code>hashedPositionArray</code> by the hashCode of the fieldName. * * @param fieldName the field name * @return the position */ public int getPosition(String fieldName) { Integer index = nameFieldIndexLookup.get(fieldName); if (index == null) { return -1; } return index; } public String getFieldName(int fieldPosition) { return fieldNames[fieldPosition]; } public FieldType getFieldType(String fieldName) { int fieldPosition = getPosition(fieldName); if(fieldPosition == -1) return null; return getFieldType(fieldPosition); } public FieldType getFieldType(int fieldPosition) { return fieldTypes[fieldPosition]; } public String getReferencedType(String fieldName) { int fieldPosition = getPosition(fieldName); if(fieldPosition == -1) return null; return getReferencedType(fieldPosition); } public String getReferencedType(int fieldPosition) { return referencedTypes[fieldPosition]; } public void setReferencedTypeState(int fieldPosition, HollowTypeReadState state) { referencedFieldTypeStates[fieldPosition] = state; } public HollowTypeReadState getReferencedTypeState(int fieldPosition) { return referencedFieldTypeStates[fieldPosition]; } public HollowObjectSchema findCommonSchema(HollowObjectSchema otherSchema) { if(!getName().equals(otherSchema.getName())) { throw new IllegalArgumentException("Cannot find common schema of two schemas with different names!"); } int commonFields = 0; for (String fieldName : fieldNames) { if(otherSchema.getPosition(fieldName) != -1) { commonFields++; } } PrimaryKey primaryKey = isNullableObjectEquals(this.primaryKey, otherSchema.getPrimaryKey()) ? this.primaryKey : null; HollowObjectSchema commonSchema = new HollowObjectSchema(getName(), commonFields, primaryKey); for (int i = 0; i < fieldNames.length; i++) { int otherFieldIndex = otherSchema.getPosition(fieldNames[i]); if (otherFieldIndex != -1) { if (fieldTypes[i] != otherSchema.getFieldType(otherFieldIndex) || !referencedTypesEqual(referencedTypes[i], otherSchema.getReferencedType(otherFieldIndex))) { String fieldType = fieldTypes[i] == FieldType.REFERENCE ? referencedTypes[i] : fieldTypes[i].toString().toLowerCase(); String otherFieldType = otherSchema.getFieldType(otherFieldIndex) == FieldType.REFERENCE ? otherSchema.getReferencedType(otherFieldIndex) : otherSchema.getFieldType(otherFieldIndex).toString().toLowerCase(); throw new IncompatibleSchemaException(getName(), fieldNames[i], fieldType, otherFieldType); } commonSchema.addField(fieldNames[i], fieldTypes[i], referencedTypes[i]); } } return commonSchema; } public HollowObjectSchema findUnionSchema(HollowObjectSchema otherSchema) { if(!getName().equals(otherSchema.getName())) { throw new IllegalArgumentException("Cannot find common schema of two schemas with different names!"); } int totalFields = otherSchema.numFields(); for (String fieldName : fieldNames) { if(otherSchema.getPosition(fieldName) == -1) totalFields++; } PrimaryKey primaryKey = isNullableObjectEquals(this.primaryKey, otherSchema.getPrimaryKey()) ? this.primaryKey : null; HollowObjectSchema unionSchema = new HollowObjectSchema(getName(), totalFields, primaryKey); for(int i=0;i<fieldNames.length;i++) { unionSchema.addField(fieldNames[i], fieldTypes[i], referencedTypes[i]); } for(int i=0;i<otherSchema.numFields();i++) { if(getPosition(otherSchema.getFieldName(i)) == -1) { unionSchema.addField(otherSchema.getFieldName(i), otherSchema.getFieldType(i), otherSchema.getReferencedType(i)); } } return unionSchema; } public HollowObjectSchema filterSchema(HollowFilterConfig config) { /* * This method is preserved for binary compat from before TypeFilter was introduced. */ return filterSchema((TypeFilter)config); } public HollowObjectSchema filterSchema(TypeFilter filter) { String type = getName(); int includedFields = 0; for(int i=0;i<numFields();i++) { String field = getFieldName(i); if(filter.includes(type, field)) includedFields++; } HollowObjectSchema filteredSchema = new HollowObjectSchema(getName(), includedFields, primaryKey); for(int i=0;i<numFields();i++) { String field = getFieldName(i); if(filter.includes(type, field)) filteredSchema.addField(field, getFieldType(i), getReferencedType(i)); } return filteredSchema; } private boolean referencedTypesEqual(String type1, String type2) { if(type1 == null) return type2 == null; return type1.equals(type2); } @Override public SchemaType getSchemaType() { return SchemaType.OBJECT; } @Override public boolean equals(Object other) { if (this == other) return true; if(!(other instanceof HollowObjectSchema)) return false; HollowObjectSchema otherSchema = (HollowObjectSchema) other; if(!getName().equals(otherSchema.getName())) return false; if(otherSchema.numFields() != numFields()) return false; if (!isNullableObjectEquals(primaryKey, otherSchema.getPrimaryKey())) return false; for(int i=0;i<numFields();i++) { if(getFieldType(i) != otherSchema.getFieldType(i)) return false; if(getFieldType(i) == FieldType.REFERENCE && !getReferencedType(i).equals(otherSchema.getReferencedType(i))) return false; if(!getFieldName(i).equals(otherSchema.getFieldName(i))) return false; } return true; } @Override public int hashCode() { int result = getName().hashCode(); result = 31 * result + getSchemaType().hashCode(); result = 31 * result + Objects.hash(primaryKey); result = 31 * result + Arrays.hashCode(fieldNames); result = 31 * result + Arrays.hashCode(fieldTypes); return result; } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append(getName()); if (primaryKey != null) { builder.append(" @PrimaryKey("); if (primaryKey.numFields() > 0) { builder.append(primaryKey.getFieldPath(0)); for (int i = 1; i < primaryKey.numFields(); i++) { builder.append(", ").append(primaryKey.getFieldPath(i)); } } builder.append(")"); } builder.append(" {\n"); for(int i=0;i<numFields();i++) { builder.append("\t"); if(getFieldType(i) == FieldType.REFERENCE) { builder.append(getReferencedType(i)); } else { builder.append(getFieldType(i).toString().toLowerCase()); } builder.append(" ").append(getFieldName(i)).append(";\n"); } builder.append("}"); return builder.toString(); } @Override public void writeTo(OutputStream os) throws IOException { DataOutputStream dos = new DataOutputStream(os); if (primaryKey != null) dos.write(SchemaType.OBJECT.getTypeIdWithPrimaryKey()); else dos.write(SchemaType.OBJECT.getTypeId()); dos.writeUTF(getName()); if (primaryKey != null) { VarInt.writeVInt(dos, primaryKey.numFields()); for (int i = 0; i < primaryKey.numFields(); i++) { dos.writeUTF(primaryKey.getFieldPath(i)); } } dos.writeShort(size); for(int i=0;i<size;i++) { dos.writeUTF(fieldNames[i]); dos.writeUTF(fieldTypes[i].name()); if(fieldTypes[i] == FieldType.REFERENCE) dos.writeUTF(referencedTypes[i]); } } /** * All allowable field types. * */ public enum FieldType { /** * A reference to another field. References are typed, and are fixed-length fields are encoded as the ordinal of the referenced record. */ REFERENCE(-1, false), /** * An integer value up to 32 bits. Integers are fixed-length fields encoded with zig-zag encoding. * The value Integer.MIN_VALUE is reserved for a sentinel value indicating null. */ INT(-1, false), /** * An integer value up to 64 bits. Longs are fixed-length fields encoded with zig-zag encoding. * The value Long.MIN_VALUE is reserved for a sentinel value indicating null. */ LONG(-1, false), /** * A boolean value. Booleans are encoded as fields requiring two bits each. Two bits are required * because boolean fields can carry any of the three values: true, false, or null. */ BOOLEAN(1, false), /** * A floating-point number. Floats are encoded as fixed-length fields four bytes long. */ FLOAT(4, false), /** * A double-precision floating point number. Doubles are encoded as fixed-length fields eight bytes long. */ DOUBLE(8, false), /** * A String of characters. All Strings for all records containing a given field are encoded in a packed array * of variable-length characters. The values are ordered by the ordinal of the record to which they belong. * Each individual record contains a fixed-length field which holds an integer which points to the end of the * array range containing the value for the specific record. The beginning of the range is determined by * reading the pointer from the previous record. */ STRING(-1, true), /** * A byte array. All byte arrays for all records containing a given field are encoded in a packed array * of bytes. The values are ordered by the ordinal of the record to which they belong. * Each individual record contains a fixed-length field which holds an integer which points to the end of the * array range containing the value for the specific record. The beginning of the range is determined by * reading the pointer from the previous record. */ BYTES(-1, true); private final int fixedLength; private final boolean varIntEncodesLength; FieldType(int fixedLength, boolean varIntEncodesLength) { this.fixedLength = fixedLength; this.varIntEncodesLength = varIntEncodesLength; } public int getFixedLength() { return fixedLength; } public boolean isVariableLength() { return varIntEncodesLength; } } }
9,122
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/schema/HollowSchemaSorter.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.schema; import com.netflix.hollow.core.HollowDataset; import com.netflix.hollow.core.HollowStateEngine; import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; public class HollowSchemaSorter { /** * Dependency types come before dependent types * * @param dataset the data set * @return the dependent schema */ public static List<HollowSchema> dependencyOrderedSchemaList(HollowDataset dataset) { return dependencyOrderedSchemaList(dataset.getSchemas()); } /** * Dependency types come before dependent types * * @param schemas the schema * @return the dependent schema */ public static List<HollowSchema> dependencyOrderedSchemaList(Collection<HollowSchema> schemas) { DependencyIndex idx = new DependencyIndex(); Map<String, HollowSchema> schemaMap = new HashMap<String, HollowSchema>(); for(HollowSchema schema : schemas) { schemaMap.put(schema.getName(), schema); idx.indexSchema(schema, schemas); } List<HollowSchema> orderedSchemas = new ArrayList<HollowSchema>(); while(idx.hasMoreTypes()) orderedSchemas.add(schemaMap.get(idx.getNextType())); return orderedSchemas; } private static class DependencyIndex { private final Map<String, Set<String>> dependencyIndex; private final Map<String, Set<String>> reverseDependencyIndex; public DependencyIndex() { this.dependencyIndex = new HashMap<String, Set<String>>(); this.reverseDependencyIndex = new HashMap<String, Set<String>>(); } public boolean hasMoreTypes() { for(Map.Entry<String, Set<String>> entry : dependencyIndex.entrySet()) { if(entry.getValue().isEmpty()) { return true; } } return false; } public String getNextType() { List<String> availableTypes = new ArrayList<String>(); for(Map.Entry<String, Set<String>> entry : dependencyIndex.entrySet()) { if(entry.getValue().isEmpty()) { availableTypes.add(entry.getKey()); } } String firstAvailableType = availableTypes.get(0); for(int i=1;i<availableTypes.size();i++) { if(availableTypes.get(i).compareTo(firstAvailableType) < 0) firstAvailableType = availableTypes.get(i); } removeType(firstAvailableType); return firstAvailableType; } private void indexSchema(HollowSchema schema, Collection<HollowSchema> allSchemas) { if(schema instanceof HollowCollectionSchema) { String elementType = ((HollowCollectionSchema) schema).getElementType(); addDependency(schema.getName(), elementType, allSchemas); } else if(schema instanceof HollowMapSchema) { String keyType = ((HollowMapSchema)schema).getKeyType(); String valueType = ((HollowMapSchema)schema).getValueType(); addDependency(schema.getName(), keyType, allSchemas); addDependency(schema.getName(), valueType, allSchemas); } else if(schema instanceof HollowObjectSchema) { HollowObjectSchema objectSchema = (HollowObjectSchema) schema; for(int i=0;i<objectSchema.numFields();i++) { if(objectSchema.getFieldType(i) == FieldType.REFERENCE) { String refType = objectSchema.getReferencedType(i); addDependency(schema.getName(), refType, allSchemas); } } } getList(schema.getName(), dependencyIndex); getList(schema.getName(), reverseDependencyIndex); } private void removeType(String type) { Set<String> dependents = reverseDependencyIndex.remove(type); for(String dependent : dependents) dependencyIndex.get(dependent).remove(type); dependencyIndex.remove(type); } private void addDependency(String dependent, String dependency, Collection<HollowSchema> allSchemas) { if(schemaExists(dependency, allSchemas)) { getList(dependent, dependencyIndex).add(dependency); getList(dependency, reverseDependencyIndex).add(dependent); } } private boolean schemaExists(String schemaName, Collection<HollowSchema> allSchemas) { for(HollowSchema schema : allSchemas) { if(schema.getName().equals(schemaName)) return true; } return false; } private Set<String> getList(String key, Map<String, Set<String>> dependencyIndex2) { Set<String> list = dependencyIndex2.get(key); if(list == null) { list = new HashSet<String>(); dependencyIndex2.put(key, list); } return list; } } /** * @param stateEngine the state engine * @param dependentType the dependent type name * @param dependencyType the dependency type name * @return Whether or not the dependencyType is equal to, referenced by, or transitively referenced by the dependentType. */ public static boolean typeIsTransitivelyDependent(HollowStateEngine stateEngine, String dependentType, String dependencyType) { if(dependentType.equals(dependencyType)) return true; HollowSchema dependentTypeSchema = stateEngine.getSchema(dependentType); if(dependentTypeSchema == null) return false; switch(dependentTypeSchema.getSchemaType()) { case OBJECT: HollowObjectSchema objectSchema = (HollowObjectSchema)dependentTypeSchema; for(int i=0;i<objectSchema.numFields();i++) { if(objectSchema.getFieldType(i) == FieldType.REFERENCE) { if(typeIsTransitivelyDependent(stateEngine, objectSchema.getReferencedType(i), dependencyType)) return true; } } break; case LIST: case SET: return typeIsTransitivelyDependent(stateEngine, ((HollowCollectionSchema)dependentTypeSchema).getElementType(), dependencyType); case MAP: return typeIsTransitivelyDependent(stateEngine, ((HollowMapSchema)dependentTypeSchema).getKeyType(), dependencyType) || typeIsTransitivelyDependent(stateEngine, ((HollowMapSchema)dependentTypeSchema).getValueType(), dependencyType); } return false; } }
9,123
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/schema/HollowSchemaParser.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.schema; import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType; import java.io.IOException; import java.io.Reader; import java.io.StreamTokenizer; import java.io.StringReader; import java.util.ArrayList; import java.util.List; import java.util.logging.Logger; /** * Parses text representations of {@link HollowSchema}. * <p> * The text representations are the same format obtained via toString() on a HollowSchema. */ public class HollowSchemaParser { private static final Logger log = Logger.getLogger(HollowSchemaParser.class.getName()); /** * Parse a collection of {@link HollowSchema}s from the provided Reader. * * @param reader the reader * @return the list of schema * @throws IOException if the schema cannot be parsed */ public static List<HollowSchema> parseCollectionOfSchemas(Reader reader) throws IOException { StreamTokenizer tokenizer = new StreamTokenizer(reader); configureTokenizer(tokenizer); List<HollowSchema> schemaList = new ArrayList<HollowSchema>(); HollowSchema schema = parseSchema(tokenizer); while (schema != null) { schemaList.add(schema); schema = parseSchema(tokenizer); } return schemaList; } /** * Parse a collection of {@link HollowSchema}s from the provided String. * * @param schemas the schemas as a string * @return the list of schema * @throws IOException if the schema cannot be parsed */ public static List<HollowSchema> parseCollectionOfSchemas(String schemas) throws IOException { return parseCollectionOfSchemas(new StringReader(schemas)); } /** * Parse a single {@link HollowSchema} from the provided String. * * @param schema the schema as a string * @return the schema * @throws IOException if the schema cannot be parsed */ public static HollowSchema parseSchema(String schema) throws IOException { StreamTokenizer tokenizer = new StreamTokenizer(new StringReader(schema)); configureTokenizer(tokenizer); return parseSchema(tokenizer); } private static HollowSchema parseSchema(StreamTokenizer tokenizer) throws IOException { int tok = tokenizer.nextToken(); while(tok != StreamTokenizer.TT_WORD) { if(tok == StreamTokenizer.TT_EOF) return null; tok = tokenizer.nextToken(); } String typeName = tokenizer.sval; tok = tokenizer.nextToken(); if(tok == StreamTokenizer.TT_WORD) { if("List".equals(tokenizer.sval)) { return parseListSchema(typeName, tokenizer); } else if("Set".equals(tokenizer.sval)) { return parseSetSchema(typeName, tokenizer); } else if("Map".equals(tokenizer.sval)) { return parseMapSchema(typeName, tokenizer); } else { throw new IOException("Invalid syntax: expected one of '{', 'List', 'Set', or 'Map' after type declaration for '" + typeName + "'"); } } return parseObjectSchema(typeName, tokenizer); } private static HollowObjectSchema parseObjectSchema(String typeName, StreamTokenizer tokenizer) throws IOException { String keyFieldPaths[] = parsePrimaryKey(tokenizer); if (tokenizer.ttype != '{') { throw new IOException("Invalid syntax: expecting '{' for '" + typeName + "'"); } int tok = tokenizer.nextToken(); List<String> tokens = new ArrayList<String>(); while(tokenizer.ttype != '}') { if(tok != StreamTokenizer.TT_WORD) throw new IOException("Invalid syntax, expected field type: " + typeName); tokens.add(tokenizer.sval); tokenizer.nextToken(); if(tok != StreamTokenizer.TT_WORD) throw new IOException("Invalid syntax, expected field name: " + typeName); String fieldName = tokenizer.sval; tokens.add(fieldName); tokenizer.nextToken(); if(tokenizer.ttype != ';') throw new IOException("Invalid syntax, expected semicolon: " + typeName + "." + fieldName); tokenizer.nextToken(); } HollowObjectSchema schema = new HollowObjectSchema(typeName, tokens.size() / 2, keyFieldPaths); for(int i=0;i<tokens.size();i+=2) { String fieldType = tokens.get(i); if("int".equals(fieldType)) { schema.addField(tokens.get(i+1), FieldType.INT); } else if("long".equals(fieldType)) { schema.addField(tokens.get(i+1), FieldType.LONG); } else if("float".equals(fieldType)) { schema.addField(tokens.get(i+1), FieldType.FLOAT); } else if("double".equals(fieldType)) { schema.addField(tokens.get(i+1), FieldType.DOUBLE); } else if("boolean".equals(fieldType)) { schema.addField(tokens.get(i+1), FieldType.BOOLEAN); } else if("string".equals(fieldType)) { schema.addField(tokens.get(i+1), FieldType.STRING); } else if("bytes".equals(fieldType)) { schema.addField(tokens.get(i+1), FieldType.BYTES); } else { schema.addField(tokens.get(i+1), FieldType.REFERENCE, fieldType); } } return schema; } private static HollowListSchema parseListSchema(String typeName, StreamTokenizer tokenizer) throws IOException { int tok = tokenizer.nextToken(); if(tokenizer.ttype != '<') throw new IOException("Invalid Syntax: Expected '<' after 'List' for type " + typeName); tok = tokenizer.nextToken(); if(tok != StreamTokenizer.TT_WORD) { log.warning("Invalid Syntax: Expected element type declaration: " + typeName); } String elementType = tokenizer.sval; tok = tokenizer.nextToken(); if(tokenizer.ttype != '>') throw new IOException("Invalid Syntax: Expected '>' element type declaration: " + typeName); tok = tokenizer.nextToken(); if(tokenizer.ttype != ';') throw new IOException("Invalid Syntax: Expected semicolon after List schema declaration: " + typeName); return new HollowListSchema(typeName, elementType); } private static HollowSetSchema parseSetSchema(String typeName, StreamTokenizer tokenizer) throws IOException { int tok = tokenizer.nextToken(); if(tokenizer.ttype != '<') throw new IOException("Invalid Syntax: Expected '<' after 'Set' for type " + typeName); tok = tokenizer.nextToken(); if(tok != StreamTokenizer.TT_WORD) { log.warning("Invalid Syntax: Expected element type declaration: " + typeName); } String elementType = tokenizer.sval; tok = tokenizer.nextToken(); if(tokenizer.ttype != '>') throw new IOException("Invalid Syntax: Expected '>' element type declaration: " + typeName); tok = tokenizer.nextToken(); String hashKeyPaths[] = parseHashKey(tokenizer); if(tokenizer.ttype != ';') throw new IOException("Invalid Syntax: Expected semicolon after Set schema declaration: " + typeName); return new HollowSetSchema(typeName, elementType, hashKeyPaths); } private static HollowMapSchema parseMapSchema(String typeName, StreamTokenizer tokenizer) throws IOException { int tok = tokenizer.nextToken(); if(tokenizer.ttype != '<') throw new IOException("Invalid Syntax: Expected '<' after 'Map' for type " + typeName); tok = tokenizer.nextToken(); if(tok != StreamTokenizer.TT_WORD) { log.warning("Invalid Syntax: Expected element type declaration: " + typeName); } String keyType = tokenizer.sval; tok = tokenizer.nextToken(); if(tokenizer.ttype != ',') throw new IOException("Invalid Syntax: Expected ',' after key type declaration: " + typeName); tok = tokenizer.nextToken(); if(tok != StreamTokenizer.TT_WORD) { log.warning("Invalid Syntax: Expected value type declaration: " + typeName); } String valueType = tokenizer.sval; tok = tokenizer.nextToken(); if(tokenizer.ttype != '>') throw new IOException("Invalid Syntax: Expected '>' after value type declaration: " + typeName); tok = tokenizer.nextToken(); String hashKeyPaths[] = parseHashKey(tokenizer); if(tokenizer.ttype != ';') throw new IOException("Invalid Syntax: Expected semicolon after Map schema declaration: " + typeName); return new HollowMapSchema(typeName, keyType, valueType, hashKeyPaths); } private static String[] parseHashKey(StreamTokenizer tokenizer) throws IOException { return parseKeyFieldPaths(tokenizer, "HashKey"); } private static String[] parsePrimaryKey(StreamTokenizer tokenizer) throws IOException { return parseKeyFieldPaths(tokenizer, "PrimaryKey"); } private static String[] parseKeyFieldPaths(StreamTokenizer tokenizer, String annotationName) throws IOException { if(tokenizer.ttype != '@') return new String[0]; List<String> fieldPaths = new ArrayList<String>(); int tok = tokenizer.nextToken(); if(tok != StreamTokenizer.TT_WORD || !annotationName.equals(tokenizer.sval)) { throw new IOException("Invalid Syntax: Invalid @" + tokenizer.sval + " annotation, expecting @" + annotationName + " declaraction"); } tok = tokenizer.nextToken(); if(tokenizer.ttype != '(') throw new IOException("Expected open parenthesis '(' after @" + annotationName + " declaration"); tok = tokenizer.nextToken(); while(tokenizer.ttype != ')') { if(tok != StreamTokenizer.TT_WORD) throw new IOException("Invalid field declaration inside @" + annotationName + "spec"); fieldPaths.add(tokenizer.sval); tok = tokenizer.nextToken(); if(tokenizer.ttype == ',') { tok = tokenizer.nextToken(); } else if(tokenizer.ttype != ')') { throw new IOException("Invalid char inside @" + annotationName + " spec"); } } tok = tokenizer.nextToken(); return fieldPaths.toArray(new String[fieldPaths.size()]); } private static void configureTokenizer(StreamTokenizer tokenizer) { tokenizer.wordChars('_', '_'); tokenizer.slashSlashComments(true); tokenizer.slashStarComments(true); } }
9,124
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/schema/HollowSchemaHash.java
package com.netflix.hollow.core.schema; import com.netflix.hollow.core.HollowStateEngine; import com.netflix.hollow.core.memory.encoding.HashCodes; import java.util.Collection; import java.util.Map; import java.util.Objects; import java.util.TreeMap; public class HollowSchemaHash { private final String hash; public HollowSchemaHash(String hash) { this.hash = hash; } public HollowSchemaHash(HollowStateEngine stateEngine) { this(stateEngine.getSchemas()); } public HollowSchemaHash(Collection<HollowSchema> schemas) { // Order the Schemas Map<String, HollowSchema> schemaMap = new TreeMap<>(); schemas.forEach( s -> schemaMap.put(s.getName(), s)); // serialize StringBuilder schemaSB = new StringBuilder(); schemaMap.forEach( (k, v) -> schemaSB.append(v)); this.hash = calculateHash(schemaSB.toString()); } private String calculateHash(String schemaString) { int hashCode = HashCodes.hashCode(schemaString); return String.valueOf(hashCode); } public String getHash() { return hash; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; HollowSchemaHash that = (HollowSchemaHash) o; return Objects.equals(hash, that.hash); } @Override public int hashCode() { return Objects.hash(hash); } @Override public String toString() { return getHash(); } }
9,125
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/schema/HollowMapSchema.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.schema; import com.netflix.hollow.core.index.key.PrimaryKey; import com.netflix.hollow.core.memory.encoding.VarInt; import com.netflix.hollow.core.read.engine.HollowTypeReadState; import java.io.DataOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.Objects; /** * A schema for a Map record type * * @see HollowSchema * * @author dkoszewnik * */ public class HollowMapSchema extends HollowSchema { private final String keyType; private final String valueType; private final PrimaryKey hashKey; private HollowTypeReadState keyTypeState; private HollowTypeReadState valueTypeState; public HollowMapSchema(String schemaName, String keyType, String valueType, String... hashKeyFieldPaths) { super(schemaName); this.keyType = keyType; this.valueType = valueType; this.hashKey = hashKeyFieldPaths == null || hashKeyFieldPaths.length == 0 ? null : new PrimaryKey(keyType, hashKeyFieldPaths); } public String getKeyType() { return keyType; } public String getValueType() { return valueType; } public PrimaryKey getHashKey() { return hashKey; } public HollowTypeReadState getKeyTypeState() { return keyTypeState; } public void setKeyTypeState(HollowTypeReadState keyTypeState) { this.keyTypeState = keyTypeState; } public HollowTypeReadState getValueTypeState() { return valueTypeState; } public void setValueTypeState(HollowTypeReadState valueTypeState) { this.valueTypeState = valueTypeState; } @Override public SchemaType getSchemaType() { return SchemaType.MAP; } @Override public boolean equals(Object other) { if (this == other) return true; if(!(other instanceof HollowMapSchema)) return false; HollowMapSchema otherSchema = (HollowMapSchema)other; if(!getName().equals(otherSchema.getName())) return false; if(!getKeyType().equals(otherSchema.getKeyType())) return false; if(!getValueType().equals(otherSchema.getValueType())) return false; return isNullableObjectEquals(hashKey, otherSchema.getHashKey()); } @Override public int hashCode() { int result = getName().hashCode(); result = 31 * result + getSchemaType().hashCode(); result = 31 * result + keyType.hashCode(); result = 31 * result + valueType.hashCode(); result = 31 * result + Objects.hashCode(hashKey); return result; } @Override public String toString() { StringBuilder builder = new StringBuilder(getName()); builder.append(" Map<").append(getKeyType()).append(",").append(getValueType()).append(">"); if(hashKey != null) { builder.append(" @HashKey("); if(hashKey.numFields() > 0) { builder.append(hashKey.getFieldPath(0)); for(int i=1;i<hashKey.numFields();i++) { builder.append(", ").append(hashKey.getFieldPath(i)); } } builder.append(")"); } builder.append(";"); return builder.toString(); } @Override public void writeTo(OutputStream os) throws IOException { DataOutputStream dos = new DataOutputStream(os); if(getHashKey() != null) dos.write(SchemaType.MAP.getTypeIdWithPrimaryKey()); else dos.write(SchemaType.MAP.getTypeId()); dos.writeUTF(getName()); dos.writeUTF(getKeyType()); dos.writeUTF(getValueType()); if(getHashKey() != null) { VarInt.writeVInt(dos, getHashKey().numFields()); for(int i=0;i<getHashKey().numFields();i++) { dos.writeUTF(getHashKey().getFieldPath(i)); } } } }
9,126
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/schema/HollowSetSchema.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.schema; import com.netflix.hollow.core.index.key.PrimaryKey; import com.netflix.hollow.core.memory.encoding.VarInt; import com.netflix.hollow.core.read.engine.HollowTypeReadState; import java.io.DataOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.Objects; /** * A schema for a Set record type. * * @see HollowSchema * * @author dkoszewnik * */ public class HollowSetSchema extends HollowCollectionSchema { private final String elementType; private final PrimaryKey hashKey; private HollowTypeReadState elementTypeState; public HollowSetSchema(String schemaName, String elementType, String... hashKeyFieldPaths) { super(schemaName); this.elementType = elementType; this.hashKey = hashKeyFieldPaths == null || hashKeyFieldPaths.length == 0 ? null : new PrimaryKey(elementType, hashKeyFieldPaths); } @Override public String getElementType() { return elementType; } public PrimaryKey getHashKey() { return hashKey; } public void setElementTypeState(HollowTypeReadState elementTypeState) { this.elementTypeState = elementTypeState; } @Override public HollowTypeReadState getElementTypeState() { return elementTypeState; } @Override public SchemaType getSchemaType() { return SchemaType.SET; } @Override public boolean equals(Object other) { if (this == other) return true; if(!(other instanceof HollowSetSchema)) return false; HollowSetSchema otherSchema = (HollowSetSchema)other; if(!getName().equals(otherSchema.getName())) return false; if(!getElementType().equals(otherSchema.getElementType())) return false; return isNullableObjectEquals(hashKey, otherSchema.getHashKey()); } @Override public int hashCode() { int result = getName().hashCode(); result = 31 * result + getSchemaType().hashCode(); result = 31 * result + elementType.hashCode(); result = 31 * result + Objects.hashCode(hashKey); return result; } @Override public String toString() { StringBuilder builder = new StringBuilder(getName()); builder.append(" Set<").append(getElementType()).append(">"); if(hashKey != null) { builder.append(" @HashKey("); if(hashKey.numFields() > 0) { builder.append(hashKey.getFieldPath(0)); for(int i=1;i<hashKey.numFields();i++) { builder.append(", ").append(hashKey.getFieldPath(i)); } } builder.append(")"); } builder.append(";"); return builder.toString(); } @Override public void writeTo(OutputStream os) throws IOException { DataOutputStream dos = new DataOutputStream(os); if(getHashKey() != null) dos.write(SchemaType.SET.getTypeIdWithPrimaryKey()); else dos.write(SchemaType.SET.getTypeId()); dos.writeUTF(getName()); dos.writeUTF(getElementType()); if(getHashKey() != null) { VarInt.writeVInt(dos, getHashKey().numFields()); for(int i=0;i<getHashKey().numFields();i++) { dos.writeUTF(getHashKey().getFieldPath(i)); } } } }
9,127
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/FloatHollowFactory.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type; import com.netflix.hollow.api.custom.HollowTypeAPI; import com.netflix.hollow.api.objects.provider.HollowFactory; import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess; import com.netflix.hollow.core.type.delegate.FloatDelegateCachedImpl; public class FloatHollowFactory extends HollowFactory<HFloat> { @Override public HFloat newHollowObject(HollowTypeDataAccess dataAccess, HollowTypeAPI typeAPI, int ordinal) { return new HFloat(((FloatTypeAPI)typeAPI).getDelegateLookupImpl(), ordinal); } @Override public HFloat newCachedHollowObject(HollowTypeDataAccess dataAccess, HollowTypeAPI typeAPI, int ordinal) { return new HFloat(new FloatDelegateCachedImpl((FloatTypeAPI)typeAPI, ordinal), ordinal); } }
9,128
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/HFloat.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type; import com.netflix.hollow.api.custom.HollowAPI; import com.netflix.hollow.api.objects.HollowObject; import com.netflix.hollow.core.type.delegate.FloatDelegate; public class HFloat extends HollowObject { public HFloat(FloatDelegate delegate, int ordinal) { super(delegate, ordinal); } public float getValue() { return delegate().getValue(ordinal); } public Float getValueBoxed() { return delegate().getValueBoxed(ordinal); } public HollowAPI api() { return typeApi().getAPI(); } public FloatTypeAPI typeApi() { return delegate().getTypeAPI(); } protected FloatDelegate delegate() { return (FloatDelegate)delegate; } }
9,129
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/BooleanHollowFactory.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type; import com.netflix.hollow.api.custom.HollowTypeAPI; import com.netflix.hollow.api.objects.provider.HollowFactory; import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess; import com.netflix.hollow.core.type.delegate.BooleanDelegateCachedImpl; public class BooleanHollowFactory extends HollowFactory<HBoolean> { @Override public HBoolean newHollowObject(HollowTypeDataAccess dataAccess, HollowTypeAPI typeAPI, int ordinal) { return new HBoolean(((BooleanTypeAPI)typeAPI).getDelegateLookupImpl(), ordinal); } @Override public HBoolean newCachedHollowObject(HollowTypeDataAccess dataAccess, HollowTypeAPI typeAPI, int ordinal) { return new HBoolean(new BooleanDelegateCachedImpl((BooleanTypeAPI)typeAPI, ordinal), ordinal); } }
9,130
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/LongHollowFactory.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type; import com.netflix.hollow.api.custom.HollowTypeAPI; import com.netflix.hollow.api.objects.provider.HollowFactory; import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess; import com.netflix.hollow.core.type.delegate.LongDelegateCachedImpl; public class LongHollowFactory extends HollowFactory<HLong> { @Override public HLong newHollowObject(HollowTypeDataAccess dataAccess, HollowTypeAPI typeAPI, int ordinal) { return new HLong(((LongTypeAPI)typeAPI).getDelegateLookupImpl(), ordinal); } @Override public HLong newCachedHollowObject(HollowTypeDataAccess dataAccess, HollowTypeAPI typeAPI, int ordinal) { return new HLong(new LongDelegateCachedImpl((LongTypeAPI)typeAPI, ordinal), ordinal); } }
9,131
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/FloatTypeAPI.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type; import com.netflix.hollow.api.custom.HollowAPI; import com.netflix.hollow.api.custom.HollowObjectTypeAPI; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.type.delegate.FloatDelegateLookupImpl; public class FloatTypeAPI extends HollowObjectTypeAPI { private final FloatDelegateLookupImpl delegateLookupImpl; public FloatTypeAPI(HollowAPI api, HollowObjectTypeDataAccess typeDataAccess) { super(api, typeDataAccess, new String[] { "value" }); this.delegateLookupImpl = new FloatDelegateLookupImpl(this); } public float getValue(int ordinal) { if(fieldIndex[0] == -1) return missingDataHandler().handleFloat("Float", ordinal, "value"); return getTypeDataAccess().readFloat(ordinal, fieldIndex[0]); } public Float getValueBoxed(int ordinal) { float f; if(fieldIndex[0] == -1) { f = missingDataHandler().handleFloat("Float", ordinal, "value"); } else { boxedFieldAccessSampler.recordFieldAccess(fieldIndex[0]); f = getTypeDataAccess().readFloat(ordinal, fieldIndex[0]); } return Float.isNaN(f) ? null : Float.valueOf(f); } public FloatDelegateLookupImpl getDelegateLookupImpl() { return delegateLookupImpl; } }
9,132
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/HLong.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type; import com.netflix.hollow.api.custom.HollowAPI; import com.netflix.hollow.api.objects.HollowObject; import com.netflix.hollow.core.type.delegate.LongDelegate; public class HLong extends HollowObject { public HLong(LongDelegate delegate, int ordinal) { super(delegate, ordinal); } public long getValue() { return delegate().getValue(ordinal); } public Long getValueBoxed() { return delegate().getValueBoxed(ordinal); } public HollowAPI api() { return typeApi().getAPI(); } public LongTypeAPI typeApi() { return delegate().getTypeAPI(); } protected LongDelegate delegate() { return (LongDelegate)delegate; } }
9,133
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/HInteger.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type; import com.netflix.hollow.api.custom.HollowAPI; import com.netflix.hollow.api.objects.HollowObject; import com.netflix.hollow.core.type.delegate.IntegerDelegate; public class HInteger extends HollowObject { public HInteger(IntegerDelegate delegate, int ordinal) { super(delegate, ordinal); } public int getValue() { return delegate().getValue(ordinal); } public Integer getValueBoxed() { return delegate().getValueBoxed(ordinal); } public HollowAPI api() { return typeApi().getAPI(); } public IntegerTypeAPI typeApi() { return delegate().getTypeAPI(); } protected IntegerDelegate delegate() { return (IntegerDelegate)delegate; } }
9,134
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/IntegerHollowFactory.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type; import com.netflix.hollow.api.custom.HollowTypeAPI; import com.netflix.hollow.api.objects.provider.HollowFactory; import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess; import com.netflix.hollow.core.type.delegate.IntegerDelegateCachedImpl; public class IntegerHollowFactory extends HollowFactory<HInteger> { @Override public HInteger newHollowObject(HollowTypeDataAccess dataAccess, HollowTypeAPI typeAPI, int ordinal) { return new HInteger(((IntegerTypeAPI)typeAPI).getDelegateLookupImpl(), ordinal); } @Override public HInteger newCachedHollowObject(HollowTypeDataAccess dataAccess, HollowTypeAPI typeAPI, int ordinal) { return new HInteger(new IntegerDelegateCachedImpl((IntegerTypeAPI)typeAPI, ordinal), ordinal); } }
9,135
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/DoubleHollowFactory.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type; import com.netflix.hollow.api.custom.HollowTypeAPI; import com.netflix.hollow.api.objects.provider.HollowFactory; import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess; import com.netflix.hollow.core.type.delegate.DoubleDelegateCachedImpl; public class DoubleHollowFactory extends HollowFactory<HDouble> { @Override public HDouble newHollowObject(HollowTypeDataAccess dataAccess, HollowTypeAPI typeAPI, int ordinal) { return new HDouble(((DoubleTypeAPI)typeAPI).getDelegateLookupImpl(), ordinal); } @Override public HDouble newCachedHollowObject(HollowTypeDataAccess dataAccess, HollowTypeAPI typeAPI, int ordinal) { return new HDouble(new DoubleDelegateCachedImpl((DoubleTypeAPI)typeAPI, ordinal), ordinal); } }
9,136
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/HDouble.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type; import com.netflix.hollow.api.custom.HollowAPI; import com.netflix.hollow.api.objects.HollowObject; import com.netflix.hollow.core.type.delegate.DoubleDelegate; public class HDouble extends HollowObject { public HDouble(DoubleDelegate delegate, int ordinal) { super(delegate, ordinal); } public double getValue() { return delegate().getValue(ordinal); } public Double getValueBoxed() { return delegate().getValueBoxed(ordinal); } public HollowAPI api() { return typeApi().getAPI(); } public DoubleTypeAPI typeApi() { return delegate().getTypeAPI(); } protected DoubleDelegate delegate() { return (DoubleDelegate)delegate; } }
9,137
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/BooleanTypeAPI.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type; import com.netflix.hollow.api.custom.HollowAPI; import com.netflix.hollow.api.custom.HollowObjectTypeAPI; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.type.delegate.BooleanDelegateLookupImpl; public class BooleanTypeAPI extends HollowObjectTypeAPI { private final BooleanDelegateLookupImpl delegateLookupImpl; public BooleanTypeAPI(HollowAPI api, HollowObjectTypeDataAccess typeDataAccess) { super(api, typeDataAccess, new String[] { "value" }); this.delegateLookupImpl = new BooleanDelegateLookupImpl(this); } public boolean getValue(int ordinal) { if(fieldIndex[0] == -1) return missingDataHandler().handleBoolean("Boolean", ordinal, "value") == Boolean.TRUE; return getTypeDataAccess().readBoolean(ordinal, fieldIndex[0]) == Boolean.TRUE; } public Boolean getValueBoxed(int ordinal) { if(fieldIndex[0] == -1) return missingDataHandler().handleBoolean("Boolean", ordinal, "value"); return getTypeDataAccess().readBoolean(ordinal, fieldIndex[0]); } public BooleanDelegateLookupImpl getDelegateLookupImpl() { return delegateLookupImpl; } }
9,138
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/HBoolean.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type; import com.netflix.hollow.api.custom.HollowAPI; import com.netflix.hollow.api.objects.HollowObject; import com.netflix.hollow.core.type.delegate.BooleanDelegate; public class HBoolean extends HollowObject { public HBoolean(BooleanDelegate delegate, int ordinal) { super(delegate, ordinal); } public boolean getValue() { return delegate().getValue(ordinal); } public Boolean getValueBoxed() { return delegate().getValueBoxed(ordinal); } public HollowAPI api() { return typeApi().getAPI(); } public BooleanTypeAPI typeApi() { return delegate().getTypeAPI(); } protected BooleanDelegate delegate() { return (BooleanDelegate)delegate; } }
9,139
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/HString.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type; import com.netflix.hollow.api.custom.HollowAPI; import com.netflix.hollow.api.objects.HollowObject; import com.netflix.hollow.core.type.delegate.StringDelegate; public class HString extends HollowObject { public HString(StringDelegate delegate, int ordinal) { super(delegate, ordinal); } public String getValue() { return delegate().getValue(ordinal); } public boolean isValueEqual(String testValue) { return delegate().isValueEqual(ordinal, testValue); } public HollowAPI api() { return typeApi().getAPI(); } public StringTypeAPI typeApi() { return delegate().getTypeAPI(); } protected StringDelegate delegate() { return (StringDelegate)delegate; } }
9,140
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/StringHollowFactory.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type; import com.netflix.hollow.api.custom.HollowTypeAPI; import com.netflix.hollow.api.objects.provider.HollowFactory; import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess; import com.netflix.hollow.core.type.delegate.StringDelegateCachedImpl; public class StringHollowFactory extends HollowFactory<HString> { @Override public HString newHollowObject(HollowTypeDataAccess dataAccess, HollowTypeAPI typeAPI, int ordinal) { return new HString(((StringTypeAPI)typeAPI).getDelegateLookupImpl(), ordinal); } @Override public HString newCachedHollowObject(HollowTypeDataAccess dataAccess, HollowTypeAPI typeAPI, int ordinal) { return new HString(new StringDelegateCachedImpl((StringTypeAPI)typeAPI, ordinal), ordinal); } }
9,141
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/IntegerTypeAPI.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type; import com.netflix.hollow.api.custom.HollowAPI; import com.netflix.hollow.api.custom.HollowObjectTypeAPI; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.type.delegate.IntegerDelegateLookupImpl; public class IntegerTypeAPI extends HollowObjectTypeAPI { private final IntegerDelegateLookupImpl delegateLookupImpl; public IntegerTypeAPI(HollowAPI api, HollowObjectTypeDataAccess typeDataAccess) { super(api, typeDataAccess, new String[] { "value" }); this.delegateLookupImpl = new IntegerDelegateLookupImpl(this); } public int getValue(int ordinal) { if(fieldIndex[0] == -1) return missingDataHandler().handleInt("Integer", ordinal, "value"); return getTypeDataAccess().readInt(ordinal, fieldIndex[0]); } public Integer getValueBoxed(int ordinal) { int i; if(fieldIndex[0] == -1) { i = missingDataHandler().handleInt("Integer", ordinal, "value"); } else { boxedFieldAccessSampler.recordFieldAccess(fieldIndex[0]); i = getTypeDataAccess().readInt(ordinal, fieldIndex[0]); } if(i == Integer.MIN_VALUE) return null; return Integer.valueOf(i); } public IntegerDelegateLookupImpl getDelegateLookupImpl() { return delegateLookupImpl; } }
9,142
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/LongTypeAPI.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type; import com.netflix.hollow.api.custom.HollowAPI; import com.netflix.hollow.api.custom.HollowObjectTypeAPI; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.type.delegate.LongDelegateLookupImpl; public class LongTypeAPI extends HollowObjectTypeAPI { private final LongDelegateLookupImpl delegateLookupImpl; public LongTypeAPI(HollowAPI api, HollowObjectTypeDataAccess typeDataAccess) { super(api, typeDataAccess, new String[] { "value" }); this.delegateLookupImpl = new LongDelegateLookupImpl(this); } public long getValue(int ordinal) { if(fieldIndex[0] == -1) return missingDataHandler().handleLong("Long", ordinal, "value"); return getTypeDataAccess().readLong(ordinal, fieldIndex[0]); } public Long getValueBoxed(int ordinal) { long l; if(fieldIndex[0] == -1) { l = missingDataHandler().handleLong("Long", ordinal, "value"); } else { boxedFieldAccessSampler.recordFieldAccess(fieldIndex[0]); l = getTypeDataAccess().readLong(ordinal, fieldIndex[0]); } if(l == Long.MIN_VALUE) return null; return Long.valueOf(l); } public LongDelegateLookupImpl getDelegateLookupImpl() { return delegateLookupImpl; } }
9,143
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/DoubleTypeAPI.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type; import com.netflix.hollow.api.custom.HollowAPI; import com.netflix.hollow.api.custom.HollowObjectTypeAPI; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.type.delegate.DoubleDelegateLookupImpl; public class DoubleTypeAPI extends HollowObjectTypeAPI { private final DoubleDelegateLookupImpl delegateLookupImpl; public DoubleTypeAPI(HollowAPI api, HollowObjectTypeDataAccess typeDataAccess) { super(api, typeDataAccess, new String[] { "value" }); this.delegateLookupImpl = new DoubleDelegateLookupImpl(this); } public double getValue(int ordinal) { if(fieldIndex[0] == -1) return missingDataHandler().handleDouble("Double", ordinal, "value"); return getTypeDataAccess().readDouble(ordinal, fieldIndex[0]); } public Double getValueBoxed(int ordinal) { double d; if(fieldIndex[0] == -1) { d = missingDataHandler().handleDouble("Double", ordinal, "value"); } else { boxedFieldAccessSampler.recordFieldAccess(fieldIndex[0]); d = getTypeDataAccess().readDouble(ordinal, fieldIndex[0]); } return Double.isNaN(d) ? null : Double.valueOf(d); } public DoubleDelegateLookupImpl getDelegateLookupImpl() { return delegateLookupImpl; } }
9,144
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/StringTypeAPI.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type; import com.netflix.hollow.api.custom.HollowAPI; import com.netflix.hollow.api.custom.HollowObjectTypeAPI; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.type.delegate.StringDelegateLookupImpl; public class StringTypeAPI extends HollowObjectTypeAPI { private final StringDelegateLookupImpl delegateLookupImpl; public StringTypeAPI(HollowAPI api, HollowObjectTypeDataAccess typeDataAccess) { super(api, typeDataAccess, new String[] { "value" }); this.delegateLookupImpl = new StringDelegateLookupImpl(this); } public String getValue(int ordinal) { if(fieldIndex[0] == -1) return missingDataHandler().handleString("String", ordinal, "value"); boxedFieldAccessSampler.recordFieldAccess(fieldIndex[0]); return getTypeDataAccess().readString(ordinal, fieldIndex[0]); } public boolean isValueEqual(int ordinal, String testValue) { if(fieldIndex[0] == -1) return missingDataHandler().handleStringEquals("String", ordinal, "value", testValue); return getTypeDataAccess().isStringFieldEqual(ordinal, fieldIndex[0], testValue); } public StringDelegateLookupImpl getDelegateLookupImpl() { return delegateLookupImpl; } }
9,145
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/accessor/DoubleDataAccessor.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.accessor; import com.netflix.hollow.api.consumer.HollowConsumer; import com.netflix.hollow.api.consumer.HollowConsumerAPI; import com.netflix.hollow.api.consumer.data.AbstractHollowDataAccessor; import com.netflix.hollow.core.read.engine.HollowReadStateEngine; import com.netflix.hollow.core.type.HDouble; public class DoubleDataAccessor extends AbstractHollowDataAccessor<Double> { public static final String TYPE = "Double"; private HollowConsumerAPI.DoubleRetriever api; public DoubleDataAccessor(HollowConsumer consumer) { this(consumer.getStateEngine(), (HollowConsumerAPI.DoubleRetriever)consumer.getAPI()); } public DoubleDataAccessor(HollowReadStateEngine rStateEngine, HollowConsumerAPI.DoubleRetriever api) { super(rStateEngine, TYPE, "value"); this.api = api; } @Override public Double getRecord(int ordinal){ HDouble val = api.getHDouble(ordinal); return val == null ? null : val.getValueBoxed(); } }
9,146
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/accessor/FloatDataAccessor.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.accessor; import com.netflix.hollow.api.consumer.HollowConsumer; import com.netflix.hollow.api.consumer.HollowConsumerAPI; import com.netflix.hollow.api.consumer.data.AbstractHollowDataAccessor; import com.netflix.hollow.core.read.engine.HollowReadStateEngine; import com.netflix.hollow.core.type.HFloat; public class FloatDataAccessor extends AbstractHollowDataAccessor<Float> { public static final String TYPE = "Float"; private HollowConsumerAPI.FloatRetriever api; public FloatDataAccessor(HollowConsumer consumer) { this(consumer.getStateEngine(), (HollowConsumerAPI.FloatRetriever)consumer.getAPI()); } public FloatDataAccessor(HollowReadStateEngine rStateEngine, HollowConsumerAPI.FloatRetriever api) { super(rStateEngine, TYPE, "value"); this.api = api; } @Override public Float getRecord(int ordinal){ HFloat val = api.getHFloat(ordinal); return val == null ? null : val.getValueBoxed(); } }
9,147
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/accessor/BooleanDataAccessor.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.accessor; import com.netflix.hollow.api.consumer.HollowConsumer; import com.netflix.hollow.api.consumer.HollowConsumerAPI; import com.netflix.hollow.api.consumer.data.AbstractHollowDataAccessor; import com.netflix.hollow.core.read.engine.HollowReadStateEngine; import com.netflix.hollow.core.type.HBoolean; public class BooleanDataAccessor extends AbstractHollowDataAccessor<Boolean> { public static final String TYPE = "Boolean"; private HollowConsumerAPI.BooleanRetriever api; public BooleanDataAccessor(HollowConsumer consumer) { this(consumer.getStateEngine(), (HollowConsumerAPI.BooleanRetriever)consumer.getAPI()); } public BooleanDataAccessor(HollowReadStateEngine rStateEngine, HollowConsumerAPI.BooleanRetriever api) { super(rStateEngine, TYPE, "value"); this.api = api; } @Override public Boolean getRecord(int ordinal){ HBoolean val = api.getHBoolean(ordinal); return val == null ? null : val.getValueBoxed(); } }
9,148
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/accessor/IntegerDataAccessor.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.accessor; import com.netflix.hollow.api.consumer.HollowConsumer; import com.netflix.hollow.api.consumer.HollowConsumerAPI; import com.netflix.hollow.api.consumer.data.AbstractHollowDataAccessor; import com.netflix.hollow.core.read.engine.HollowReadStateEngine; import com.netflix.hollow.core.type.HInteger; public class IntegerDataAccessor extends AbstractHollowDataAccessor<Integer> { public static final String TYPE = "Integer"; private HollowConsumerAPI.IntegerRetriever api; public IntegerDataAccessor(HollowConsumer consumer) { this(consumer.getStateEngine(), (HollowConsumerAPI.IntegerRetriever)consumer.getAPI()); } public IntegerDataAccessor(HollowReadStateEngine rStateEngine, HollowConsumerAPI.IntegerRetriever api) { super(rStateEngine, TYPE, "value"); this.api = api; } @Override public Integer getRecord(int ordinal){ HInteger val = api.getHInteger(ordinal); return val == null ? null : val.getValueBoxed(); } }
9,149
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/accessor/StringDataAccessor.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.accessor; import com.netflix.hollow.api.consumer.HollowConsumer; import com.netflix.hollow.api.consumer.HollowConsumerAPI; import com.netflix.hollow.api.consumer.data.AbstractHollowDataAccessor; import com.netflix.hollow.core.read.engine.HollowReadStateEngine; import com.netflix.hollow.core.type.HString; public class StringDataAccessor extends AbstractHollowDataAccessor<String> { public static final String TYPE = "String"; private HollowConsumerAPI.StringRetriever api; public StringDataAccessor(HollowConsumer consumer) { this(consumer.getStateEngine(), (HollowConsumerAPI.StringRetriever)consumer.getAPI()); } public StringDataAccessor(HollowReadStateEngine rStateEngine, HollowConsumerAPI.StringRetriever api) { super(rStateEngine, TYPE, "value"); this.api = api; } @Override public String getRecord(int ordinal){ HString val = api.getHString(ordinal); return val == null ? null : val.getValue(); } }
9,150
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/accessor/LongDataAccessor.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.accessor; import com.netflix.hollow.api.consumer.HollowConsumer; import com.netflix.hollow.api.consumer.HollowConsumerAPI; import com.netflix.hollow.api.consumer.data.AbstractHollowDataAccessor; import com.netflix.hollow.core.read.engine.HollowReadStateEngine; import com.netflix.hollow.core.type.HLong; public class LongDataAccessor extends AbstractHollowDataAccessor<Long> { public static final String TYPE = "Long"; private HollowConsumerAPI.LongRetriever api; public LongDataAccessor(HollowConsumer consumer) { this(consumer.getStateEngine(), (HollowConsumerAPI.LongRetriever)consumer.getAPI()); } public LongDataAccessor(HollowReadStateEngine rStateEngine, HollowConsumerAPI.LongRetriever api) { super(rStateEngine, TYPE, "value"); this.api = api; } @Override public Long getRecord(int ordinal){ HLong val = api.getHLong(ordinal); return val==null ? null : val.getValueBoxed(); } }
9,151
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/delegate/IntegerDelegate.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.delegate; import com.netflix.hollow.api.objects.delegate.HollowObjectDelegate; import com.netflix.hollow.core.type.IntegerTypeAPI; public interface IntegerDelegate extends HollowObjectDelegate { public int getValue(int ordinal); public Integer getValueBoxed(int ordinal); @Override public IntegerTypeAPI getTypeAPI(); }
9,152
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/delegate/LongDelegate.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.delegate; import com.netflix.hollow.api.objects.delegate.HollowObjectDelegate; import com.netflix.hollow.core.type.LongTypeAPI; @SuppressWarnings("all") public interface LongDelegate extends HollowObjectDelegate { public long getValue(int ordinal); public Long getValueBoxed(int ordinal); @Override public LongTypeAPI getTypeAPI(); }
9,153
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/delegate/FloatDelegateCachedImpl.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.delegate; import com.netflix.hollow.api.custom.HollowTypeAPI; import com.netflix.hollow.api.objects.delegate.HollowCachedDelegate; import com.netflix.hollow.api.objects.delegate.HollowObjectAbstractDelegate; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.type.FloatTypeAPI; public class FloatDelegateCachedImpl extends HollowObjectAbstractDelegate implements HollowCachedDelegate, FloatDelegate { private final Float value; private FloatTypeAPI typeAPI; public FloatDelegateCachedImpl(FloatTypeAPI typeAPI, int ordinal) { this.value = typeAPI.getValueBoxed(ordinal); this.typeAPI = typeAPI; } @Override public float getValue(int ordinal) { if(value == null) return Float.NaN; return value.floatValue(); } @Override public Float getValueBoxed(int ordinal) { return value; } @Override public HollowObjectSchema getSchema() { return typeAPI.getTypeDataAccess().getSchema(); } @Override public HollowObjectTypeDataAccess getTypeDataAccess() { return typeAPI.getTypeDataAccess(); } @Override public FloatTypeAPI getTypeAPI() { return typeAPI; } @Override public void updateTypeAPI(HollowTypeAPI typeAPI) { this.typeAPI = (FloatTypeAPI) typeAPI; } }
9,154
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/delegate/BooleanDelegateLookupImpl.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.delegate; import com.netflix.hollow.api.objects.delegate.HollowObjectAbstractDelegate; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.type.BooleanTypeAPI; public class BooleanDelegateLookupImpl extends HollowObjectAbstractDelegate implements BooleanDelegate { private final BooleanTypeAPI typeAPI; public BooleanDelegateLookupImpl(BooleanTypeAPI typeAPI) { this.typeAPI = typeAPI; } @Override public boolean getValue(int ordinal) { return typeAPI.getValue(ordinal); } @Override public Boolean getValueBoxed(int ordinal) { return typeAPI.getValueBoxed(ordinal); } @Override public BooleanTypeAPI getTypeAPI() { return typeAPI; } @Override public HollowObjectSchema getSchema() { return typeAPI.getTypeDataAccess().getSchema(); } @Override public HollowObjectTypeDataAccess getTypeDataAccess() { return typeAPI.getTypeDataAccess(); } }
9,155
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/delegate/StringDelegateCachedImpl.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.delegate; import com.netflix.hollow.api.custom.HollowTypeAPI; import com.netflix.hollow.api.objects.delegate.HollowCachedDelegate; import com.netflix.hollow.api.objects.delegate.HollowObjectAbstractDelegate; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.type.StringTypeAPI; public class StringDelegateCachedImpl extends HollowObjectAbstractDelegate implements HollowCachedDelegate, StringDelegate { private final String value; private StringTypeAPI typeAPI; public StringDelegateCachedImpl(StringTypeAPI typeAPI, int ordinal) { this.value = typeAPI.getValue(ordinal); this.typeAPI = typeAPI; } @Override public String getValue(int ordinal) { return value; } @Override public boolean isValueEqual(int ordinal, String testValue) { if(testValue == null) return value == null; return testValue.equals(value); } @Override public HollowObjectSchema getSchema() { return typeAPI.getTypeDataAccess().getSchema(); } @Override public HollowObjectTypeDataAccess getTypeDataAccess() { return typeAPI.getTypeDataAccess(); } @Override public StringTypeAPI getTypeAPI() { return typeAPI; } @Override public void updateTypeAPI(HollowTypeAPI typeAPI) { this.typeAPI = (StringTypeAPI) typeAPI; } }
9,156
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/delegate/LongDelegateCachedImpl.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.delegate; import com.netflix.hollow.api.custom.HollowTypeAPI; import com.netflix.hollow.api.objects.delegate.HollowCachedDelegate; import com.netflix.hollow.api.objects.delegate.HollowObjectAbstractDelegate; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.type.LongTypeAPI; public class LongDelegateCachedImpl extends HollowObjectAbstractDelegate implements HollowCachedDelegate, LongDelegate { private final Long value; private LongTypeAPI typeAPI; public LongDelegateCachedImpl(LongTypeAPI typeAPI, int ordinal) { this.value = typeAPI.getValueBoxed(ordinal); this.typeAPI = typeAPI; } @Override public long getValue(int ordinal) { if(value == null) return Long.MIN_VALUE; return value.longValue(); } @Override public Long getValueBoxed(int ordinal) { return value; } @Override public HollowObjectSchema getSchema() { return typeAPI.getTypeDataAccess().getSchema(); } @Override public HollowObjectTypeDataAccess getTypeDataAccess() { return typeAPI.getTypeDataAccess(); } @Override public LongTypeAPI getTypeAPI() { return typeAPI; } @Override public void updateTypeAPI(HollowTypeAPI typeAPI) { this.typeAPI = (LongTypeAPI) typeAPI; } }
9,157
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/delegate/DoubleDelegateCachedImpl.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.delegate; import com.netflix.hollow.api.custom.HollowTypeAPI; import com.netflix.hollow.api.objects.delegate.HollowCachedDelegate; import com.netflix.hollow.api.objects.delegate.HollowObjectAbstractDelegate; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.type.DoubleTypeAPI; public class DoubleDelegateCachedImpl extends HollowObjectAbstractDelegate implements HollowCachedDelegate, DoubleDelegate { private final Double value; private DoubleTypeAPI typeAPI; public DoubleDelegateCachedImpl(DoubleTypeAPI typeAPI, int ordinal) { this.value = typeAPI.getValueBoxed(ordinal); this.typeAPI = typeAPI; } @Override public double getValue(int ordinal) { if(value == null) return Double.NaN; return value.doubleValue(); } @Override public Double getValueBoxed(int ordinal) { return value; } @Override public HollowObjectSchema getSchema() { return typeAPI.getTypeDataAccess().getSchema(); } @Override public HollowObjectTypeDataAccess getTypeDataAccess() { return typeAPI.getTypeDataAccess(); } @Override public DoubleTypeAPI getTypeAPI() { return typeAPI; } @Override public void updateTypeAPI(HollowTypeAPI typeAPI) { this.typeAPI = (DoubleTypeAPI) typeAPI; } }
9,158
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/delegate/FloatDelegateLookupImpl.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.delegate; import com.netflix.hollow.api.objects.delegate.HollowObjectAbstractDelegate; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.type.FloatTypeAPI; public class FloatDelegateLookupImpl extends HollowObjectAbstractDelegate implements FloatDelegate { private final FloatTypeAPI typeAPI; public FloatDelegateLookupImpl(FloatTypeAPI typeAPI) { this.typeAPI = typeAPI; } @Override public float getValue(int ordinal) { return typeAPI.getValue(ordinal); } @Override public Float getValueBoxed(int ordinal) { return typeAPI.getValueBoxed(ordinal); } @Override public FloatTypeAPI getTypeAPI() { return typeAPI; } @Override public HollowObjectSchema getSchema() { return typeAPI.getTypeDataAccess().getSchema(); } @Override public HollowObjectTypeDataAccess getTypeDataAccess() { return typeAPI.getTypeDataAccess(); } }
9,159
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/delegate/StringDelegateLookupImpl.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.delegate; import com.netflix.hollow.api.objects.delegate.HollowObjectAbstractDelegate; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.type.StringTypeAPI; public class StringDelegateLookupImpl extends HollowObjectAbstractDelegate implements StringDelegate { private final StringTypeAPI typeAPI; public StringDelegateLookupImpl(StringTypeAPI typeAPI) { this.typeAPI = typeAPI; } @Override public String getValue(int ordinal) { return typeAPI.getValue(ordinal); } @Override public boolean isValueEqual(int ordinal, String testValue) { return typeAPI.isValueEqual(ordinal, testValue); } @Override public StringTypeAPI getTypeAPI() { return typeAPI; } @Override public HollowObjectSchema getSchema() { return typeAPI.getTypeDataAccess().getSchema(); } @Override public HollowObjectTypeDataAccess getTypeDataAccess() { return typeAPI.getTypeDataAccess(); } }
9,160
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/delegate/BooleanDelegateCachedImpl.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.delegate; import com.netflix.hollow.api.custom.HollowTypeAPI; import com.netflix.hollow.api.objects.delegate.HollowCachedDelegate; import com.netflix.hollow.api.objects.delegate.HollowObjectAbstractDelegate; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.type.BooleanTypeAPI; public class BooleanDelegateCachedImpl extends HollowObjectAbstractDelegate implements HollowCachedDelegate, BooleanDelegate { private final Boolean value; private BooleanTypeAPI typeAPI; public BooleanDelegateCachedImpl(BooleanTypeAPI typeAPI, int ordinal) { this.value = typeAPI.getValueBoxed(ordinal); this.typeAPI = typeAPI; } @Override public boolean getValue(int ordinal) { if(value == null) return false; return value.booleanValue(); } @Override public Boolean getValueBoxed(int ordinal) { return value; } @Override public HollowObjectSchema getSchema() { return typeAPI.getTypeDataAccess().getSchema(); } @Override public HollowObjectTypeDataAccess getTypeDataAccess() { return typeAPI.getTypeDataAccess(); } @Override public BooleanTypeAPI getTypeAPI() { return typeAPI; } @Override public void updateTypeAPI(HollowTypeAPI typeAPI) { this.typeAPI = (BooleanTypeAPI) typeAPI; } }
9,161
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/delegate/LongDelegateLookupImpl.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.delegate; import com.netflix.hollow.api.objects.delegate.HollowObjectAbstractDelegate; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.type.LongTypeAPI; public class LongDelegateLookupImpl extends HollowObjectAbstractDelegate implements LongDelegate { private final LongTypeAPI typeAPI; public LongDelegateLookupImpl(LongTypeAPI typeAPI) { this.typeAPI = typeAPI; } @Override public long getValue(int ordinal) { return typeAPI.getValue(ordinal); } @Override public Long getValueBoxed(int ordinal) { return typeAPI.getValueBoxed(ordinal); } @Override public LongTypeAPI getTypeAPI() { return typeAPI; } @Override public HollowObjectSchema getSchema() { return typeAPI.getTypeDataAccess().getSchema(); } @Override public HollowObjectTypeDataAccess getTypeDataAccess() { return typeAPI.getTypeDataAccess(); } }
9,162
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/delegate/StringDelegate.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.delegate; import com.netflix.hollow.api.objects.delegate.HollowObjectDelegate; import com.netflix.hollow.core.type.StringTypeAPI; public interface StringDelegate extends HollowObjectDelegate { public String getValue(int ordinal); public boolean isValueEqual(int ordinal, String testValue); @Override public StringTypeAPI getTypeAPI(); }
9,163
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/delegate/DoubleDelegateLookupImpl.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.delegate; import com.netflix.hollow.api.objects.delegate.HollowObjectAbstractDelegate; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.type.DoubleTypeAPI; public class DoubleDelegateLookupImpl extends HollowObjectAbstractDelegate implements DoubleDelegate { private final DoubleTypeAPI typeAPI; public DoubleDelegateLookupImpl(DoubleTypeAPI typeAPI) { this.typeAPI = typeAPI; } @Override public double getValue(int ordinal) { return typeAPI.getValue(ordinal); } @Override public Double getValueBoxed(int ordinal) { return typeAPI.getValueBoxed(ordinal); } @Override public DoubleTypeAPI getTypeAPI() { return typeAPI; } @Override public HollowObjectSchema getSchema() { return typeAPI.getTypeDataAccess().getSchema(); } @Override public HollowObjectTypeDataAccess getTypeDataAccess() { return typeAPI.getTypeDataAccess(); } }
9,164
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/delegate/IntegerDelegateCachedImpl.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.delegate; import com.netflix.hollow.api.custom.HollowTypeAPI; import com.netflix.hollow.api.objects.delegate.HollowCachedDelegate; import com.netflix.hollow.api.objects.delegate.HollowObjectAbstractDelegate; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.type.IntegerTypeAPI; public class IntegerDelegateCachedImpl extends HollowObjectAbstractDelegate implements HollowCachedDelegate, IntegerDelegate { private final Integer value; private IntegerTypeAPI typeAPI; public IntegerDelegateCachedImpl(IntegerTypeAPI typeAPI, int ordinal) { this.value = typeAPI.getValueBoxed(ordinal); this.typeAPI = typeAPI; } @Override public int getValue(int ordinal) { if(value == null) return Integer.MIN_VALUE; return value.intValue(); } @Override public Integer getValueBoxed(int ordinal) { return value; } @Override public HollowObjectSchema getSchema() { return typeAPI.getTypeDataAccess().getSchema(); } @Override public HollowObjectTypeDataAccess getTypeDataAccess() { return typeAPI.getTypeDataAccess(); } @Override public IntegerTypeAPI getTypeAPI() { return typeAPI; } @Override public void updateTypeAPI(HollowTypeAPI typeAPI) { this.typeAPI = (IntegerTypeAPI) typeAPI; } }
9,165
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/delegate/FloatDelegate.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.delegate; import com.netflix.hollow.api.objects.delegate.HollowObjectDelegate; import com.netflix.hollow.core.type.FloatTypeAPI; public interface FloatDelegate extends HollowObjectDelegate { public float getValue(int ordinal); public Float getValueBoxed(int ordinal); @Override public FloatTypeAPI getTypeAPI(); }
9,166
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/delegate/IntegerDelegateLookupImpl.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.delegate; import com.netflix.hollow.api.objects.delegate.HollowObjectAbstractDelegate; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.type.IntegerTypeAPI; public class IntegerDelegateLookupImpl extends HollowObjectAbstractDelegate implements IntegerDelegate { private final IntegerTypeAPI typeAPI; public IntegerDelegateLookupImpl(IntegerTypeAPI typeAPI) { this.typeAPI = typeAPI; } @Override public int getValue(int ordinal) { return typeAPI.getValue(ordinal); } @Override public Integer getValueBoxed(int ordinal) { return typeAPI.getValueBoxed(ordinal); } @Override public IntegerTypeAPI getTypeAPI() { return typeAPI; } @Override public HollowObjectSchema getSchema() { return typeAPI.getTypeDataAccess().getSchema(); } @Override public HollowObjectTypeDataAccess getTypeDataAccess() { return typeAPI.getTypeDataAccess(); } }
9,167
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/delegate/BooleanDelegate.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.delegate; import com.netflix.hollow.api.objects.delegate.HollowObjectDelegate; import com.netflix.hollow.core.type.BooleanTypeAPI; public interface BooleanDelegate extends HollowObjectDelegate { public boolean getValue(int ordinal); public Boolean getValueBoxed(int ordinal); @Override public BooleanTypeAPI getTypeAPI(); }
9,168
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/type/delegate/DoubleDelegate.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.delegate; import com.netflix.hollow.api.objects.delegate.HollowObjectDelegate; import com.netflix.hollow.core.type.DoubleTypeAPI; public interface DoubleDelegate extends HollowObjectDelegate { public double getValue(int ordinal); public Double getValueBoxed(int ordinal); @Override public DoubleTypeAPI getTypeAPI(); }
9,169
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/HollowHashIndexField.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess; import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType; // Should this class be package private? It appears to be internal public class HollowHashIndexField { private final int baseIteratorFieldIdx; private final FieldPathSegment[] schemaFieldPositionPath; private final HollowTypeDataAccess baseDataAccess; private final FieldType fieldType; public HollowHashIndexField(int baseIteratorFieldIdx, FieldPathSegment[] remainingPath, HollowTypeDataAccess baseDataAccess, FieldType fieldType) { this.baseIteratorFieldIdx = baseIteratorFieldIdx; this.schemaFieldPositionPath = remainingPath; this.baseDataAccess = baseDataAccess; this.fieldType = fieldType; } public HollowTypeDataAccess getBaseDataAccess() { return baseDataAccess; } public int getBaseIteratorFieldIdx() { return baseIteratorFieldIdx; } public FieldPathSegment[] getSchemaFieldPositionPath() { return schemaFieldPositionPath; } FieldPathSegment getLastFieldPositionPathElement() { return schemaFieldPositionPath[schemaFieldPositionPath.length - 1]; } public FieldType getFieldType() { return fieldType; } static class FieldPathSegment { /** * Field position for this segment of the path. For path {@code actor.name}, * {@code actor} is 0 and {@code name} is 1. */ private final int fieldPosition; /** * For the path {@code actor.name}, position 0 is {@code actor} and the data access is * {@code ThingThatReferencesActorDataAccess}. For {@code name}, position is 1 and data access * is {@code ActorTypeDataAccess}. */ private final HollowObjectTypeDataAccess objectTypeDataAccess; FieldPathSegment(int fieldPosition, HollowObjectTypeDataAccess objectTypeDataAccess) { this.fieldPosition = fieldPosition; this.objectTypeDataAccess = objectTypeDataAccess; } /** * @param ordinal ordinal of record containing the desired field. * @return ordinal of the record referenced by the field */ int getOrdinalForField(int ordinal) { return this.objectTypeDataAccess.readOrdinal(ordinal, fieldPosition); } int getSegmentFieldPosition() { return fieldPosition; } HollowObjectTypeDataAccess getObjectTypeDataAccess() { return objectTypeDataAccess; } } }
9,170
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/GrowingSegmentedLongArray.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index; import com.netflix.hollow.core.memory.pool.ArraySegmentRecycler; import java.util.Arrays; public class GrowingSegmentedLongArray { private long[][] segments; private final int log2OfSegmentSize; private final int bitmask; private final ArraySegmentRecycler memoryRecycler; public GrowingSegmentedLongArray(ArraySegmentRecycler memoryRecycler) { this.memoryRecycler = memoryRecycler; this.log2OfSegmentSize = memoryRecycler.getLog2OfLongSegmentSize(); this.bitmask = (1 << log2OfSegmentSize) - 1; this.segments = new long[64][]; } /** * Set the byte at the given index to the specified value * @param index the index * @param value the byte */ public void set(long index, long value) { int segmentIndex = (int)(index >> log2OfSegmentSize); if(segmentIndex >= segments.length) { int nextPowerOfTwo = 1 << (32 - Integer.numberOfLeadingZeros(segmentIndex)); segments = Arrays.copyOf(segments, nextPowerOfTwo); } if(segments[segmentIndex] == null) { segments[segmentIndex] = memoryRecycler.getLongArray(); } int longInSegment = (int)(index & bitmask); segments[segmentIndex][longInSegment] = value; } /** * Get the value of the byte at the specified index. * @param index the index * @return the byte value */ public long get(long index) { int segmentIndex = (int)(index >> log2OfSegmentSize); if(segmentIndex >= segments.length || segments[segmentIndex] == null) return 0; int longInSegment = (int)(index & bitmask); return segments[segmentIndex][longInSegment]; } public void destroy() { for(int i=0;i<segments.length;i++) { if(segments[i] != null) memoryRecycler.recycleLongArray(segments[i]); } } }
9,171
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/HollowUniqueKeyIndex.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index; import static com.netflix.hollow.core.HollowConstants.ORDINAL_NONE; import static java.util.Arrays.stream; import static java.util.Objects.requireNonNull; import static java.util.stream.Collectors.toList; import com.netflix.hollow.api.consumer.HollowConsumer; import com.netflix.hollow.api.custom.HollowAPI; import com.netflix.hollow.core.index.HollowHashIndexField.FieldPathSegment; import com.netflix.hollow.core.index.HollowPrimaryKeyIndex.PrimaryKeyIndexHashTable; import com.netflix.hollow.core.index.key.HollowPrimaryKeyValueDeriver; import com.netflix.hollow.core.index.key.PrimaryKey; import com.netflix.hollow.core.memory.encoding.FixedLengthElementArray; import com.netflix.hollow.core.memory.encoding.HashCodes; import com.netflix.hollow.core.memory.pool.ArraySegmentRecycler; import com.netflix.hollow.core.memory.pool.WastefulRecycler; import com.netflix.hollow.core.read.HollowReadFieldUtils; import com.netflix.hollow.core.read.dataaccess.HollowDataAccess; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.read.engine.HollowTypeStateListener; import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState; import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType; import java.util.ArrayList; import java.util.BitSet; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; /** * A HollowUniqueKeyIndex is a helper class for indexing and querying data in a Hollow Blob. * <p> * A primary key index can be used to index and query a type by a {@link PrimaryKey}. The provided {@link PrimaryKey} does * not have to be the same as declared as the default in the data model. * <p> * This class differs from {@link HollowPrimaryKeyIndex} in that it supports object longevity. HollowPrimaryKeyIndex can * be created when object longevity is enabled, but the index will become unusable after 2 or more deltas. * <p> * HollowUniqueKeyIndex serves the same purpose as HollowPrimaryKeyIndex, but will remain valid beyond 2 or more deltas * as long as object longevity is enabled. The primary difference between these two classes is the use of data accessors * vs. state engines. * * <b>WARNING: The HollowUniqueKeyIndex must be created from the <i>current</i> version. It cannot be created from an * a data accessor of an outdated version. This limitation is caused by the getPopulatedOrdinals method that does not * work against old versions.</b> */ @SuppressWarnings("override") public class HollowUniqueKeyIndex implements HollowTypeStateListener, TestableUniqueKeyIndex { private static final Logger LOG = Logger.getLogger(HollowUniqueKeyIndex.class.getName()); /** * Data access object for the type being indexed. This must be a {@link HollowDataAccess} rather than * a {@link com.netflix.hollow.core.HollowStateEngine} to participate in object longevity. */ private final HollowObjectTypeDataAccess objectTypeDataAccess; /** * Each field that is part of this index. */ private final HollowHashIndexField[] fields; private final PrimaryKey primaryKey; private final ArraySegmentRecycler memoryRecycler; private final BitSet specificOrdinalsToIndex; private volatile PrimaryKeyIndexHashTable hashTableVolatile; /** * <b>To support object longevity, the {@code HollowDataAccess} object must come from {@link HollowAPI#getDataAccess()}.</b> * <p> * If the index is being built from a {@link HollowConsumer.RefreshListener} method, be sure to use passed in HollowAPI's accessor * rather than the state engine. The state engine will change over time rendering this index invalid unless * {@link #listenForDeltaUpdates()} is enabled. * * @param hollowDataAccess hollow data access. <b>For object longevity, this must be from {@link HollowAPI#getDataAccess()}</b> * @param type type of object being indexed * @param fieldPaths paths to fields being indexed */ public HollowUniqueKeyIndex(HollowDataAccess hollowDataAccess, String type, String... fieldPaths) { this(hollowDataAccess, WastefulRecycler.DEFAULT_INSTANCE, type, fieldPaths); } /** * <b>To support object longevity, the {@code HollowDataAccess} object must come from {@link HollowAPI#getDataAccess()}.</b> * <p> * If the index is being built from a {@link HollowConsumer.RefreshListener} method, be sure to use passed in HollowAPI's accessor * rather than the state engine. The state engine will change over time rendering this index invalid unless * {@link #listenForDeltaUpdates()} is enabled. * * @param hollowDataAccess hollow data access. <b>For object longevity, this must be from {@link HollowAPI#getDataAccess()}</b> * @param primaryKey primary key definition. This does not have to match the primary key defined in the type. */ public HollowUniqueKeyIndex(HollowDataAccess hollowDataAccess, PrimaryKey primaryKey) { this(hollowDataAccess, primaryKey, WastefulRecycler.DEFAULT_INSTANCE); } /** * <b>To support object longevity, the {@code HollowDataAccess} object must come from {@link HollowAPI#getDataAccess()}.</b> * <p> * If the index is being built from a {@link HollowConsumer.RefreshListener} method, be sure to use passed in HollowAPI's accessor * rather than the state engine. The state engine will change over time rendering this index invalid unless * {@link #listenForDeltaUpdates()} is enabled. * * @param hollowDataAccess hollow data access. <b>For object longevity, this must be from {@link HollowAPI#getDataAccess()}</b> * @param type type of object being indexed * @param fieldPaths paths to fields being indexed * @param memoryRecycler memory recycler implementation */ public HollowUniqueKeyIndex(HollowDataAccess hollowDataAccess, ArraySegmentRecycler memoryRecycler, String type, String... fieldPaths) { this(hollowDataAccess, PrimaryKey.create(hollowDataAccess, type, fieldPaths), memoryRecycler); } /** * <b>To support object longevity, the {@code HollowDataAccess} object must come from {@link HollowAPI#getDataAccess()}.</b> * <p> * If the index is being built from a {@link HollowConsumer.RefreshListener} method, be sure to use passed in HollowAPI's accessor * rather than the state engine. The state engine will change over time rendering this index invalid unless * {@link #listenForDeltaUpdates()} is enabled. * * @param hollowDataAccess hollow data access. <b>For object longevity, this must be from {@link HollowAPI#getDataAccess()}</b> * @param primaryKey primary key definition. This does not have to match the primary key defined in the type. * @param memoryRecycler memory recycler implementation */ public HollowUniqueKeyIndex(HollowDataAccess hollowDataAccess, PrimaryKey primaryKey, ArraySegmentRecycler memoryRecycler) { this(hollowDataAccess, primaryKey, memoryRecycler, null); } /** * This initializer can be used to create a HollowUniqueKeyIndex which will only index a subset of the records in the specified type. * * @param hollowDataAccess the read state engine * @param primaryKey the primary key * @param memoryRecycler the memory recycler * @param specificOrdinalsToIndex the bit set */ public HollowUniqueKeyIndex(HollowDataAccess hollowDataAccess, PrimaryKey primaryKey, ArraySegmentRecycler memoryRecycler, BitSet specificOrdinalsToIndex) { requireNonNull(primaryKey, "Hollow Primary Key Index creation failed because primaryKey was null"); requireNonNull(hollowDataAccess, "Hollow Primary Key Index creation for type [" + primaryKey.getType() + "] failed because read state wasn't initialized"); this.primaryKey = primaryKey; //Obviously, the type we're indexing must be an object... no point in indexing primitives, etc. this.objectTypeDataAccess = (HollowObjectTypeDataAccess) hollowDataAccess.getTypeDataAccess(primaryKey.getType()); this.fields = new HollowHashIndexField[primaryKey.numFields()]; this.memoryRecycler = memoryRecycler; for (int fieldIdx = 0; fieldIdx < primaryKey.numFields(); fieldIdx++) { //This is the field type of the final item on the path. FieldType fieldType = primaryKey.getFieldType(hollowDataAccess, fieldIdx); //This always starts at the "root" object that's being indexed HollowObjectTypeDataAccess currentDataAccess = this.objectTypeDataAccess; //For each segment of the path, this returns the field position relative within the containing object. //The 0th position is relative to primaryKey.getType(). int[] fieldPathPositions = primaryKey.getFieldPathIndex(hollowDataAccess, fieldIdx); FieldPathSegment[] fieldPathElements = new FieldPathSegment[fieldPathPositions.length]; for (int posIdx = 0; posIdx < fieldPathPositions.length; posIdx++) { if (currentDataAccess == null) { //This gets set to null if the previous segment was a non-reference type (i.e. we can't traverse a primitive, etc). throw new IllegalArgumentException("Path " + primaryKey.getFieldPath(fieldIdx) + " traverses a non-reference type. Non-reference types must be the last element of the path."); } int fieldPosition = fieldPathPositions[posIdx]; fieldPathElements[posIdx] = new FieldPathSegment(fieldPosition, currentDataAccess); //Using schema.getReferencedTypeState(...) will always use the *current* version and not necessarily //the version associated with the hollowDataAccess used to create this. This will break object longevity. //As such, we have to do this indirect lookup that reaches data access objects through //hollowDataAccess. There is a non-zero cost to performing these lookups so we do them here rather than //at the place where we access the data. String referencedType = currentDataAccess.getSchema().getReferencedType(fieldPosition); if (referencedType != null) { //This is instead of currentDataAccess.getSchema().getReferencedTypeState() currentDataAccess = (HollowObjectTypeDataAccess) hollowDataAccess.getTypeDataAccess(referencedType); } else { currentDataAccess = null; } } fields[fieldIdx] = new HollowHashIndexField(fieldIdx, fieldPathElements, currentDataAccess, fieldType); } this.specificOrdinalsToIndex = specificOrdinalsToIndex; reindex(); } /** * Once called, this HollowUniqueKeyIndex will be kept up-to-date when deltas are applied to the indexed state engine. * <p> * This method should be called <b>before</b> any subsequent deltas occur after the index is created. * <p> * In order to prevent memory leaks, if this method is called and the index is no longer needed, call detachFromDeltaUpdates() before * discarding the index. */ public void listenForDeltaUpdates() { if (specificOrdinalsToIndex != null) throw new IllegalStateException("Cannot listen for delta updates when indexing only specified ordinals!"); if (!(objectTypeDataAccess instanceof HollowObjectTypeReadState)) throw new IllegalStateException("Cannot listen for delta updates when objectTypeDataAccess is a " + objectTypeDataAccess.getClass().getSimpleName() + ". You may have created this index from a Data Access instance that has object longevity enabled."); ((HollowObjectTypeReadState) objectTypeDataAccess).addListener(this); } /** * Once called, this HollowUniqueKeyIndex will no longer be kept up-to-date when deltas are applied to the indexed state engine. * <p> * Call this method before discarding indexes which are currently listening for delta updates. */ public void detachFromDeltaUpdates() { //We won't throw here. Just silently fail since it's unlikely this class was ever successfully added as a listener. if (objectTypeDataAccess instanceof HollowObjectTypeReadState) ((HollowObjectTypeReadState) objectTypeDataAccess).removeListener(this); } public HollowObjectTypeDataAccess getObjectTypeDataAccess() { return objectTypeDataAccess; } public PrimaryKey getPrimaryKey() { return primaryKey; } public List<FieldType> getFieldTypes() { return stream(fields).map(HollowHashIndexField::getFieldType).collect(toList()); } /** * Query an index with a single specified field. The returned value with be the ordinal of the matching record. * <p> * Use a generated API or the Generic Object API to use the returned ordinal. * * @param key the field key * @return the matching ordinal for the key, otherwise -1 if the key is not present */ public int getMatchingOrdinal(Object key) { if (isProvidedKeyCountNotEqualToIndexedFieldsCount(1)) return ORDINAL_NONE; return getMatchingOrdinalImpl(key, null, null); } /** * Query an index with two specified fields. The returned value with be the ordinal of the matching record. * <p> * Use a generated API or the Generic Object API to use the returned ordinal. * * @param key0 the first field key * @param key1 the second field key * @return the matching ordinal for the two keys, otherwise -1 if the key is not present */ public int getMatchingOrdinal(Object key0, Object key1) { if (isProvidedKeyCountNotEqualToIndexedFieldsCount(2)) return ORDINAL_NONE; return getMatchingOrdinalImpl(key0, key1, null); } /** * Query an index with three specified fields. The returned value with be the ordinal of the matching record. * <p> * Use a generated API or the Generic Object API to use the returned ordinal. * * @param key0 the first field key * @param key1 the second field key * @param key2 the third field key * @return the matching ordinal for the three keys, otherwise -1 if the key is not present */ public int getMatchingOrdinal(Object key0, Object key1, Object key2) { if (isProvidedKeyCountNotEqualToIndexedFieldsCount(3)) return ORDINAL_NONE; return getMatchingOrdinalImpl(key0, key1, key2); } /** * Single implementation for up to 3 fields. There is a very tiny null array length check to determine whether to * use fields 1 and 2. * * @param key0 key for field 0 * @param key1 key for field 1 * @param key2 key for field 2 * @return ordinal or {@link com.netflix.hollow.core.HollowConstants#ORDINAL_NONE} */ private int getMatchingOrdinalImpl( Object key0, Object key1, Object key2) { int fieldCount = fields.length; int hashCode = generateKeyHashCode(key0, fields[0].getFieldType()); if (fieldCount >= 2) { hashCode ^= generateKeyHashCode(key1, fields[1].getFieldType()); if (fieldCount == 3) { hashCode ^= generateKeyHashCode(key2, fields[2].getFieldType()); } } PrimaryKeyIndexHashTable hashTable; int ordinal; do { hashTable = this.hashTableVolatile; int bucket = hashCode & hashTable.hashMask; ordinal = readOrdinal(hashTable, bucket); while (ordinal != ORDINAL_NONE) { if (keyMatches(key0, ordinal, 0) && (fieldCount < 2 || keyMatches(key1, ordinal, 1)) && (fieldCount < 3 || keyMatches(key2, ordinal, 2))) { //This is a match. Break and return the ordinal. break; } bucket++; bucket &= hashTable.hashMask; ordinal = readOrdinal(hashTable, bucket); } } while (this.hashTableVolatile != hashTable); return ordinal; } /** * Query an index with four or more specified fields. The returned value with be the ordinal of the matching record. * <p> * Use a generated API or the Generic Object API to use the returned ordinal. * * @param keys the field keys * @return the matching ordinal for the keys, otherwise -1 if the key is not present */ public int getMatchingOrdinal(Object... keys) { if (isProvidedKeyCountNotEqualToIndexedFieldsCount(keys.length)) return ORDINAL_NONE; int hashCode = 0; for (int fieldIdx = 0; fieldIdx < keys.length; fieldIdx++) hashCode ^= generateKeyHashCode(keys[fieldIdx], fields[fieldIdx].getFieldType()); PrimaryKeyIndexHashTable hashTable; int ordinal; do { hashTable = this.hashTableVolatile; int bucket = hashCode & hashTable.hashMask; ordinal = readOrdinal(hashTable, bucket); while (ordinal != -1) { if (keysAllMatch(ordinal, keys)) break; bucket++; bucket &= hashTable.hashMask; ordinal = readOrdinal(hashTable, bucket); } } while (hashTableVolatile != hashTable); return ordinal; } private boolean isProvidedKeyCountNotEqualToIndexedFieldsCount(int keyCount) { // mismatched number of fields or the table is empty return this.fields.length != keyCount || this.hashTableVolatile.bitsPerElement == 0; } private int readOrdinal(PrimaryKeyIndexHashTable hashTable, int bucket) { return (int) hashTable.hashTable.getElementValue((long) hashTable.bitsPerElement * (long) bucket, hashTable.bitsPerElement) - 1; } @SuppressWarnings("UnnecessaryUnboxing") private static int generateKeyHashCode(Object key, FieldType fieldType) { switch (fieldType) { case BOOLEAN: return HashCodes.hashInt(HollowReadFieldUtils.booleanHashCode((Boolean) key)); case DOUBLE: return HashCodes.hashInt(HollowReadFieldUtils.doubleHashCode(((Double) key).doubleValue())); case FLOAT: return HashCodes.hashInt(HollowReadFieldUtils.floatHashCode(((Float) key).floatValue())); case INT: return HashCodes.hashInt(HollowReadFieldUtils.intHashCode(((Integer) key).intValue())); case LONG: return HashCodes.hashInt(HollowReadFieldUtils.longHashCode(((Long) key).longValue())); case REFERENCE: return HashCodes.hashInt(((Integer) key).intValue()); case BYTES: return HashCodes.hashCode((byte[]) key); case STRING: return HashCodes.hashCode((String) key); } throw new IllegalArgumentException("I don't know how to hash a " + fieldType); } private void setHashTable(PrimaryKeyIndexHashTable hashTable) { this.hashTableVolatile = hashTable; } /** * @return whether this index contains duplicate records (two or more records mapping to a single primary key). */ public boolean containsDuplicates() { return !getDuplicateKeys().isEmpty(); } public synchronized Collection<Object[]> getDuplicateKeys() { PrimaryKeyIndexHashTable hashTable = hashTableVolatile; if (hashTable.bitsPerElement == 0) return Collections.emptyList(); List<Object[]> duplicateKeys = new ArrayList<>(); for (int i = 0; i < hashTable.hashTableSize; i++) { int ordinal = (int) hashTable.hashTable.getElementValue((long) i * (long) hashTable.bitsPerElement, hashTable.bitsPerElement) - 1; if (ordinal != -1) { int compareBucket = (i + 1) & hashTable.hashMask; int compareOrdinal = (int) hashTable.hashTable.getElementValue((long) compareBucket * (long) hashTable.bitsPerElement, hashTable.bitsPerElement) - 1; while (compareOrdinal != -1) { if (recordsHaveEqualKeys(ordinal, compareOrdinal)) duplicateKeys.add(getRecordKey(ordinal)); compareBucket = (compareBucket + 1) & hashTable.hashMask; compareOrdinal = (int) hashTable.hashTable.getElementValue((long) compareBucket * (long) hashTable.bitsPerElement, hashTable.bitsPerElement) - 1; } } } return duplicateKeys; } @Override public void beginUpdate() { } @Override public void addedOrdinal(int ordinal) { } @Override public void removedOrdinal(int ordinal) { } private static final boolean ALLOW_DELTA_UPDATE = Boolean.getBoolean("com.netflix.hollow.core.index.HollowUniqueKeyIndex.allowDeltaUpdate"); @Override public synchronized void endUpdate() { HollowObjectTypeReadState typeState = (HollowObjectTypeReadState) this.objectTypeDataAccess.getTypeState(); //This doesn't affect compatibility with object longevity since this only gets invoked //when the index is being updated. BitSet ordinals = typeState.getPopulatedOrdinals(); int hashTableSize = HashCodes.hashTableSize(ordinals.cardinality()); int bitsPerElement = (32 - Integer.numberOfLeadingZeros(typeState.maxOrdinal() + 1)); PrimaryKeyIndexHashTable hashTable = hashTableVolatile; if (ALLOW_DELTA_UPDATE && hashTableSize == hashTable.hashTableSize && bitsPerElement == hashTable.bitsPerElement && shouldPerformDeltaUpdate()) { try { deltaUpdate(hashTableSize, bitsPerElement); } catch (OrdinalNotFoundException e) { /* It has been observed that delta updates can result in CPU spinning attempting to find a previous ordinal to remove. It's not clear what the cause of the issue is but it does not appear to be data related (since the failure is not consistent when multiple instances update to the same version) nor concurrency related (since an update occurs in a synchronized block). A rare possibility is it might be a C2 compiler issue. Changing the code shape may well fix that. Attempts to reproduce this locally has so far failed. Given the importance of indexing a full reindex is performed on such a failure. This, however, will make it more difficult to detect such issues. This approach does not protect against the case where the index is corrupt and not yet detected, until a further update. In such cases it may be possible for clients, in the interim of a forced reindex, to operate on a corrupt index: queries may incorrectly return no match. As such delta update of the index have been disabled by default. */ LOG.log(Level.SEVERE, "Delta update of index failed. Performing a full reindex", e); reindex(); } } else { reindex(); } } public void destroy() { PrimaryKeyIndexHashTable hashTable = hashTableVolatile; if (hashTable != null) hashTable.hashTable.destroy(memoryRecycler); } private synchronized void reindex() { PrimaryKeyIndexHashTable hashTable = hashTableVolatile; // Could be null on first reindex if (hashTable != null) { hashTable.hashTable.destroy(memoryRecycler); } HollowObjectTypeReadState typeState = (HollowObjectTypeReadState) this.objectTypeDataAccess.getTypeState(); BitSet ordinals = specificOrdinalsToIndex; if (ordinals == null) { //Note: this call is what makes it impossible to create an index against a non-current client. //This works even when object longevity it turned on *ONLY* if it is created against the //current version. Otherwise, this will return a bit set that does not match the HollowAPI's //historic version. (This method gets called upon construction) ordinals = typeState.getPopulatedOrdinals(); } int hashTableSize = HashCodes.hashTableSize(ordinals.cardinality()); int bitsPerElement = (32 - Integer.numberOfLeadingZeros(typeState.maxOrdinal() + 1)); FixedLengthElementArray hashedArray = new FixedLengthElementArray(memoryRecycler, (long) hashTableSize * (long) bitsPerElement); int hashMask = hashTableSize - 1; int ordinal = ordinals.nextSetBit(0); while (ordinal != ORDINAL_NONE) { int hashCode = generateRecordHash(ordinal); int bucket = hashCode & hashMask; while (hashedArray.getElementValue((long) bucket * (long) bitsPerElement, bitsPerElement) != 0) bucket = (bucket + 1) & hashMask; hashedArray.setElementValue((long) bucket * (long) bitsPerElement, bitsPerElement, ordinal + 1); ordinal = ordinals.nextSetBit(ordinal + 1); } setHashTable(new PrimaryKeyIndexHashTable(hashedArray, hashTableSize, hashMask, bitsPerElement)); memoryRecycler.swap(); } private void deltaUpdate(int hashTableSize, int bitsPerElement) { // For a delta update hashTableVolatile cannot be null PrimaryKeyIndexHashTable hashTable = hashTableVolatile; hashTable.hashTable.destroy(memoryRecycler); HollowObjectTypeReadState typeState = (HollowObjectTypeReadState) this.objectTypeDataAccess.getTypeState(); //This doesn't affect compatibility with object longevity since this only gets invoked //when the index is being updated. BitSet prevOrdinals = typeState.getPreviousOrdinals(); BitSet ordinals = typeState.getPopulatedOrdinals(); long totalBitsInHashTable = (long) hashTableSize * (long) bitsPerElement; FixedLengthElementArray hashedArray = new FixedLengthElementArray(memoryRecycler, totalBitsInHashTable); hashedArray.copyBits(hashTable.hashTable, 0, 0, totalBitsInHashTable); int hashMask = hashTableSize - 1; int prevOrdinal = prevOrdinals.nextSetBit(0); while (prevOrdinal != ORDINAL_NONE) { if (!ordinals.get(prevOrdinal)) { /// find and remove this ordinal int hashCode = generateRecordHash(prevOrdinal); int bucket = findOrdinalBucket(bitsPerElement, hashedArray, hashCode, hashMask, prevOrdinal); hashedArray.clearElementValue((long) bucket * (long) bitsPerElement, bitsPerElement); int emptyBucket = bucket; bucket = (bucket + 1) & hashMask; int moveOrdinal = (int) hashedArray.getElementValue((long) bucket * (long) bitsPerElement, bitsPerElement) - 1; while (moveOrdinal != ORDINAL_NONE) { int naturalHash = generateRecordHash(moveOrdinal); int naturalBucket = naturalHash & hashMask; if (!bucketInRange(emptyBucket, bucket, naturalBucket)) { hashedArray.setElementValue((long) emptyBucket * (long) bitsPerElement, bitsPerElement, moveOrdinal + 1); hashedArray.clearElementValue((long) bucket * (long) bitsPerElement, bitsPerElement); emptyBucket = bucket; } bucket = (bucket + 1) & hashMask; moveOrdinal = (int) hashedArray.getElementValue((long) bucket * (long) bitsPerElement, bitsPerElement) - 1; } } prevOrdinal = prevOrdinals.nextSetBit(prevOrdinal + 1); } int ordinal = ordinals.nextSetBit(0); while (ordinal != ORDINAL_NONE) { if (!prevOrdinals.get(ordinal)) { int hashCode = generateRecordHash(ordinal); int bucket = hashCode & hashMask; while (hashedArray.getElementValue((long) bucket * (long) bitsPerElement, bitsPerElement) != 0) { bucket = (bucket + 1) & hashMask; } hashedArray.setElementValue((long) bucket * (long) bitsPerElement, bitsPerElement, ordinal + 1); } ordinal = ordinals.nextSetBit(ordinal + 1); } setHashTable(new PrimaryKeyIndexHashTable(hashedArray, hashTableSize, hashMask, bitsPerElement)); memoryRecycler.swap(); } private int findOrdinalBucket(int bitsPerElement, FixedLengthElementArray hashedArray, int hashCode, int hashMask, int prevOrdinal) { int startBucket = hashCode & hashMask; int bucket = startBucket; long value; do { value = hashedArray.getElementValue((long) bucket * (long) bitsPerElement, bitsPerElement); if (prevOrdinal + 1 == value) { return bucket; } bucket = (bucket + 1) & hashMask; } while (value != 0 && bucket != startBucket); if (value == 0) { throw new OrdinalNotFoundException(String.format("Ordinal not found (found empty entry): " + "ordinal=%d startBucket=%d", prevOrdinal, startBucket)); } else { throw new OrdinalNotFoundException(String.format("Ordinal not found (wrapped around table): " + "ordinal=%d startBucket=%d", prevOrdinal, startBucket)); } } private boolean bucketInRange(int fromBucket, int toBucket, int testBucket) { if (toBucket > fromBucket) { return testBucket > fromBucket && testBucket <= toBucket; } else { return testBucket > fromBucket || testBucket <= toBucket; } } private int generateRecordHash(int ordinal) { int hashCode = 0; for (int i = 0; i < fields.length; i++) { hashCode ^= generateFieldHash(ordinal, i); } return hashCode; } private int generateFieldHash(int ordinal, int fieldIdx) { //It is super important that all references to data accessors originated //from a HollowAPI to maintain support for object longevity. Do not get an accessor //from a Schema. HollowHashIndexField field = fields[fieldIdx]; int lastPathIdx = field.getSchemaFieldPositionPath().length - 1; for (int pathIdx = 0; pathIdx < lastPathIdx; pathIdx++) { FieldPathSegment pathElement = field.getSchemaFieldPositionPath()[pathIdx]; ordinal = pathElement.getOrdinalForField(ordinal); } //When the loop finishes, we should have the ordinal of the object containing the last field. FieldPathSegment lastPathElement = field.getLastFieldPositionPathElement(); int hashCode = HollowReadFieldUtils.fieldHashCode(lastPathElement.getObjectTypeDataAccess(), ordinal, lastPathElement.getSegmentFieldPosition()); switch (field.getFieldType()) { case STRING: case BYTES: return hashCode; default: return HashCodes.hashInt(hashCode); } } public Object[] getRecordKey(int ordinal) { Object[] results = new Object[fields.length]; for (int i = 0; i < fields.length; i++) { HollowHashIndexField field = fields[i]; int lastPathOrdinal = getOrdinalForFieldPath(field, ordinal); FieldPathSegment lastElement = field.getLastFieldPositionPathElement(); results[i] = HollowReadFieldUtils.fieldValueObject(lastElement.getObjectTypeDataAccess(), lastPathOrdinal, lastElement.getSegmentFieldPosition()); } return results; } /** * @param ordinal ordinal of root object * @param field field to traverse * @return the ordinal of the second-to-last element. This ordinal can be used with the last path element * to retrieve the final ordinal */ private int getOrdinalForFieldPath(HollowHashIndexField field, int ordinal) { //It is super important that all references to data accessors originated //from a HollowAPI to maintain support for object longevity. Do not get an accessor //from a Schema. FieldPathSegment[] pathElements = field.getSchemaFieldPositionPath(); for (int posIdx = 0; posIdx < pathElements.length - 1; posIdx++) { FieldPathSegment fieldPathElement = pathElements[posIdx]; ordinal = fieldPathElement.getOrdinalForField(ordinal); } return ordinal; } /** * This method is similar to {@link HollowPrimaryKeyValueDeriver#keyMatches(int, Object...)}. * * @param ordinal ordinal of record to match * @param keys keys to match against * @return true if object's keys matches the specified keys */ private boolean keysAllMatch(int ordinal, Object... keys) { for (int i = 0; i < keys.length; i++) { if (!keyMatches(keys[i], ordinal, i)) return false; } return true; } /** * This method is similar to {@link HollowPrimaryKeyValueDeriver#keyMatches(Object, int, int)} * * @param key key to match field against * @param recordOrdinal ordinal of record to match * @param fieldIdx index of field to match against * @return true if the object's field matches the specified key */ private boolean keyMatches(Object key, int recordOrdinal, int fieldIdx) { //It is super important that all references to data accessors originated //from a HollowAPI to maintain support for object longevity. Do not get an accessor //from a Schema. HollowHashIndexField field = fields[fieldIdx]; //ordinal of the last element of the path, starting from the recordOrdinal. int lastElementOrdinal = getOrdinalForFieldPath(field, recordOrdinal); FieldPathSegment lastPathElement = field.getLastFieldPositionPathElement(); int lastPathPosition = lastPathElement.getSegmentFieldPosition(); HollowObjectTypeDataAccess typeDataAccess = lastPathElement.getObjectTypeDataAccess(); return HollowPrimaryKeyValueDeriver.keyMatches(key, field.getFieldType(), lastPathPosition, lastElementOrdinal, typeDataAccess); } private boolean recordsHaveEqualKeys(int ordinal1, int ordinal2) { for (int fieldIdx = 0; fieldIdx < fields.length; fieldIdx++) { if (!fieldsAreEqual(ordinal1, ordinal2, fieldIdx)) return false; } return true; } private boolean fieldsAreEqual(int ordinal1, int ordinal2, int fieldIdx) { //It is super important that all references to data accessors originated //from a HollowAPI to maintain support for object longevity. Do not get an accessor //from a Schema. HollowHashIndexField field = fields[fieldIdx]; FieldPathSegment[] fieldPathElements = field.getSchemaFieldPositionPath(); for (int posIdx = 0; posIdx < fieldPathElements.length - 1; posIdx++) { FieldPathSegment pathElement = fieldPathElements[posIdx]; ordinal1 = pathElement.getOrdinalForField(ordinal1); ordinal2 = pathElement.getOrdinalForField(ordinal2); } //Ordinals now reference the record that contains the last field value. //For a path with only one element, ordinal is unchanged. For a path with two elements, //ordinal will refer to the record for the first element. Using that ordinal, you can //then invoke lastPathElement.getOrdinal(ordinal) to get the final element. if (field.getFieldType() == FieldType.REFERENCE) return ordinal1 == ordinal2; FieldPathSegment lastPathElement = field.getLastFieldPositionPathElement(); return HollowReadFieldUtils.fieldsAreEqual( lastPathElement.getObjectTypeDataAccess(), ordinal1, lastPathElement.getSegmentFieldPosition(), lastPathElement.getObjectTypeDataAccess(), ordinal2, lastPathElement.getSegmentFieldPosition()); } private boolean shouldPerformDeltaUpdate() { HollowObjectTypeReadState typeState = (HollowObjectTypeReadState) this.objectTypeDataAccess.getTypeState(); //This doesn't affect compatibility with object longevity since this only gets invoked //when the index is being updated. BitSet previousOrdinals = typeState.getPreviousOrdinals(); BitSet ordinals = typeState.getPopulatedOrdinals(); int prevCardinality = 0; int removedRecords = 0; int prevOrdinal = previousOrdinals.nextSetBit(0); while (prevOrdinal != ORDINAL_NONE) { prevCardinality++; if (!ordinals.get(prevOrdinal)) removedRecords++; prevOrdinal = previousOrdinals.nextSetBit(prevOrdinal + 1); } return !(removedRecords > prevCardinality * 0.1d); } private static class OrdinalNotFoundException extends IllegalStateException { public OrdinalNotFoundException(String message) { super(message); } } }
9,172
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/HollowPrefixIndex.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index; import static java.util.Objects.requireNonNull; import com.netflix.hollow.core.memory.pool.ArraySegmentRecycler; import com.netflix.hollow.core.memory.pool.WastefulRecycler; import com.netflix.hollow.core.read.engine.HollowReadStateEngine; import com.netflix.hollow.core.read.engine.HollowTypeStateListener; import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState; import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator; import com.netflix.hollow.core.schema.HollowObjectSchema; import java.util.BitSet; import java.util.List; import java.util.logging.Logger; /** * This class builds a prefix index. A prefix index can be used to build applications like auto-complete or spell checker. * The current prefix index implementation is backed by a TST (Ternary Search Tree) that is capable of indexing multiple * elements per tree node. * <p> * Although TSTs are typically more space efficient than tries for prefix search, there are some important * considerations when using this implementation that can impact memory usage and query performance: * <p><ul> * <li> Memory utilization will be efficient with more duplicates in the indexed keys. This is because by default * the underlying TST reserves space for one key reference per node but as it encounters duplicate keys it the data it * dynamically resizes each node in the tree to hold multiple references. This leads to un-utilized space at nodes * corresponding to which there is less duplication in keys and memory churn from resize when building the index. If * measure of duplication of values is known upfront it can be specified during index initialization to minimize resizing. * <li> Future: bits per key defaults to 16 (for utf-16), but if the input character set can be represented in fewer bits * (for e.g. 1 for binary strings and 4 for hex strings) then the implementation could support a constructor that accepts * custom bitsPerKey to allocate less space per node. * </ul><p> * Includes methods for getting stats on memory usage and query performance. */ public class HollowPrefixIndex implements HollowTypeStateListener { private static final Logger LOG = Logger.getLogger(HollowPrefixIndex.class.getName()); private final FieldPath fieldPath; private final HollowReadStateEngine readStateEngine; private final String type; private final int estimatedMaxStringDuplicates; private final boolean caseSensitive; private volatile TST prefixIndexVolatile; private ArraySegmentRecycler memoryRecycle; private int totalWords; private int averageWordLen; private int maxOrdinalOfType; private boolean buildIndexOnUpdate; /** * Initializes a new prefix index that is case in-sensitive. * * This constructor defaults the estimatedMaxStringDuplicates to 1, however while building the index it is observed * that an indexed key references more than one records in the type then the prefix index dynamically resizes each node * to accommodate the multiple references. Note that this has an adverse impact on memory usage, both in terms of * memory footprint of prefix index and memory churn when building the index. If the expected number of duplicate * strings across the type are specified upfront (see other constructor) then the memory churn due to resizing can be avoided. * * @param readStateEngine state engine to read data from * @param type type in the read state engine. Ordinals for this type * will be returned when queried for a prefix. * @param fieldPath fieldPath should ultimately lead to a string field. * The fields in the path could reference another Object, * List, Set or a Map. The fields should be separated by ".". * */ public HollowPrefixIndex(HollowReadStateEngine readStateEngine, String type, String fieldPath) { this(readStateEngine, type, fieldPath, 1, false); } /** * Initializes a new prefix index. * * @param readStateEngine state engine to read data from * @param type type in the read state engine. Ordinals for this type * will be returned when queried for a prefix. * @param fieldPath fieldPath should ultimately lead to a string field. * The fields in the path could reference another Object, * List, Set or a Map. The fields should be separated by ".". * @param estimatedMaxStringDuplicates The estimated number of strings that are duplicated * across instances of your type. Note that this means an * exactly matching string, not a prefix match. A higher value will mean * the prefix tree will reserve more memory to reference several elements per node. * @param caseSensitive Specify the case sensitivity for indexing and querying */ public HollowPrefixIndex(HollowReadStateEngine readStateEngine, String type, String fieldPath, int estimatedMaxStringDuplicates, boolean caseSensitive) { requireNonNull(type, "Hollow Prefix Key Index creation failed because type was null"); requireNonNull(readStateEngine, "Hollow Prefix Key Index creation for type [" + type + "] failed because read state wasn't initialized"); if (fieldPath == null || fieldPath.isEmpty()) throw new IllegalArgumentException("fieldPath cannot be null or empty"); if (estimatedMaxStringDuplicates < 1) { throw new IllegalArgumentException("estimatedMaxStringDuplicates cannot be < 1"); } this.readStateEngine = readStateEngine; this.type = type; this.estimatedMaxStringDuplicates = estimatedMaxStringDuplicates; this.caseSensitive = caseSensitive; this.fieldPath = new FieldPath(readStateEngine, type, fieldPath); if (!this.fieldPath.getLastFieldType().equals(HollowObjectSchema.FieldType.STRING)) throw new IllegalArgumentException("Field path should lead to a string type"); // create memory recycle for using shared memory pools. memoryRecycle = WastefulRecycler.DEFAULT_INSTANCE; buildIndexOnUpdate = true; initialize(); } // initialize field positions and field paths. private void initialize() { String lastRefType = this.fieldPath.getLastRefTypeInPath(); // get all cardinality to estimate size of array bits needed. totalWords = readStateEngine.getTypeState(lastRefType).getPopulatedOrdinals().cardinality(); averageWordLen = 0; double avg = 0; HollowObjectTypeReadState objectTypeReadState = (HollowObjectTypeReadState) readStateEngine.getTypeState(lastRefType); BitSet keyBitSet = objectTypeReadState.getPopulatedOrdinals(); int ordinal = keyBitSet.nextSetBit(0); while (ordinal != -1) { avg += ((double) objectTypeReadState.readString(ordinal, 0).length()) / ((double) totalWords); ordinal = keyBitSet.nextSetBit(ordinal + 1); } averageWordLen = (int) Math.ceil(avg); HollowObjectTypeReadState valueState = (HollowObjectTypeReadState) readStateEngine.getTypeDataAccess(type); maxOrdinalOfType = valueState.maxOrdinal(); // initialize the prefix index. build(); } private void build() { if (!buildIndexOnUpdate) return; // tell memory recycler to use current tst's long arrays next time when long array is requested. // note reuse only happens once swap is called and bits are reset TST current = prefixIndexVolatile; if (current != null) current.recycleMemory(memoryRecycle); // This is a hard limit, and currently assumes worst case unbalanced tree i.e. the total length of all words long estimatedMaxNodes = estimateNumNodes(totalWords, averageWordLen); TST tst = new TST(estimatedMaxNodes, estimatedMaxStringDuplicates, maxOrdinalOfType, caseSensitive, memoryRecycle); BitSet ordinals = readStateEngine.getTypeState(type).getPopulatedOrdinals(); int ordinal = ordinals.nextSetBit(0); while (ordinal != -1) { for (String key : getKeys(ordinal, caseSensitive)) { tst.insert(key, ordinal); } ordinal = ordinals.nextSetBit(ordinal + 1); } prefixIndexVolatile = tst; // safe to return previous long arrays on next request for long array. memoryRecycle.swap(); buildIndexOnUpdate = false; Stats stats = usageStats(); LOG.info("Prefix index built with stats= [" + stats + "]"); } /** * Estimates the total number of nodes that will be required to create the index. * Override this method if lower/higher estimate is needed as compared to the default implementation, but note that * this imposes an underlying hard limit until the backing implementation starts supporting resizing dynamically. * * @param totalWords the total number of words * @param averageWordLen the average word length * @return the estimated total number of nodes */ @SuppressWarnings("WeakerAccess") protected long estimateNumNodes(long totalWords, long averageWordLen) { return totalWords * averageWordLen; } /** * Return the key to index in prefix index. Override this method to support tokens for the key. Note that care must * be taken to not return null or empty string tokens as the prefix index does not support indexing nulls or empty string. * <pre>{@code * String[] keys = super.getKey(ordinal, false); * String[] tokens = keys[0].split(" ") * return tokens; * }</pre> * * @param ordinal ordinal of the parent type. * @param caseSensitive controls whether to maintain casing when indexing. * @return keys to index. */ protected String[] getKeys(int ordinal, boolean caseSensitive) { Object[] values = fieldPath.findValues(ordinal); String[] stringValues = new String[values.length]; for (int i = 0; i < values.length; i++) { if (caseSensitive) { stringValues[i] = (String) values[i]; } else { stringValues[i] = ((String) values[i]).toLowerCase(); } } return stringValues; } /** * @deprecated see getKeys(int ordinal, boolean caseSensitive) */ @Deprecated protected String[] getKeys(int ordinal) { return getKeys(ordinal, false); } /** * Query the index to find all the ordinals that match the given prefix. Example - * <pre>{@code * HollowOrdinalIterator iterator = index.findKeysWithPrefix("a"); * int ordinal = iterator.next(); * while(ordinal != HollowOrdinalIterator.NO_MORE_ORDINAL) { * // print the result using API * } * }</pre> * <p> * For larger data sets, querying shorter prefixes will return more results than longer prefixes. Passing a prefix of * empty String "" will return all ordinals indexed. * * @param prefix findKeysWithPrefix prefix. * @return An instance of HollowOrdinalIterator to iterate over ordinals that match the given findKeysWithPrefix. */ public HollowOrdinalIterator findKeysWithPrefix(String prefix) { TST current; HollowOrdinalIterator it; do { current = prefixIndexVolatile; it = current.findKeysWithPrefix(prefix); } while (current != this.prefixIndexVolatile); return it; } /** * Query the index to find the longest matching prefix of key that was indexed. Note that this matches against full * tokens indexed in prefix index, and not against substrings of tokens for e.g. if "abc" and "abcd" were indexed * then findLongestMatch("abce") will return a list containing only ordinal corresponding to "abc" and * findLongestMatch("ab") will return no matches (in the form of an empty list). * If the tokens indexed in the prefix index reference unique values then the result will contain upto one ordinal. * In other words, the returned list contains multiple elements only if duplicate tokens were indexed. * * <pre>{@code * List<Integer> matches = index.findLongestMatch("matrix"); * // if each token indexed in the prefix index points to a unique value then * for (Integer ordinal : matches) { * // print the result using API for e.g. api.getMovie(ordinal) * } * }</pre> * <p> * * @param key a string for which the longest indexed substring needs to be found * @return A list of ordinals corresponding to the longest matching prefix */ public List<Integer> findLongestMatch(String key) { TST current; List<Integer> ordinals; do { current = prefixIndexVolatile; long nodeIndex = current.findLongestMatch(key); ordinals = current.getOrdinals(nodeIndex); } while (current != this.prefixIndexVolatile); return ordinals; } /** * Check if the given key exists in the index. * * @param key the key * @return {@code true} if the key exists, otherwise {@code false} */ public boolean contains(String key) { if (key == null) throw new IllegalArgumentException("key cannot be null"); TST current; boolean result; do { current = prefixIndexVolatile; result = current.contains(key); } while (current != this.prefixIndexVolatile); return result; } /** * Use this method to keep the index updated with delta changes on the read state engine. * Remember to call detachFromDeltaUpdates to stop the delta changes. * NOTE: Each delta updates creates a new prefix index and swaps the new with current. */ @SuppressWarnings("WeakerAccess") public void listenForDeltaUpdates() { readStateEngine.getTypeState(type).addListener(this); } /** * Stop delta updates for this index. */ @SuppressWarnings("WeakerAccess") public void detachFromDeltaUpdates() { readStateEngine.getTypeState(type).removeListener(this); } @Override public void beginUpdate() { // before delta is applied -> no action to be taken } @Override public void addedOrdinal(int ordinal) { buildIndexOnUpdate = true; } @Override public void removedOrdinal(int ordinal) { buildIndexOnUpdate = true; } @Override public void endUpdate() { // pass 1 for delta support - rebuild the tree and swap the new tree with the one that is serving the queries. // next pass - improve the index build time or add support for remove method. initialize(); } /** * Returns memory usage stats for the prefix index. Not thread-safe with concurrent updates to index. * @return approx heap footprint in bytes */ public Stats usageStats() { Stats stats = new Stats(); stats.nodesCapacity = prefixIndexVolatile.getMaxNodes(); stats.nodesUsed = prefixIndexVolatile.getNumNodes(); stats.nodesEmpty = prefixIndexVolatile.getEmptyNodes(); stats.worstCaseLookups = prefixIndexVolatile.getMaxDepth(); stats.maxValuesPerNode = prefixIndexVolatile.getMaxElementsPerNode(); stats.approxHeapFootprintInBytes = prefixIndexVolatile.approxHeapFootprintInBytes(); return stats; } public static class Stats { long nodesCapacity; // allocated capacity in underlying tree long nodesUsed; // utilized nodes long nodesEmpty; // un-utilized nodes (capacity - utilized) long worstCaseLookups; // no. of nodes looked up for serving worst case query int maxValuesPerNode; // a single tree node reserves capacity to reference upto these many records long approxHeapFootprintInBytes; // approx heap footprint of tree @Override public String toString() { return "nodesCapacity=" + nodesCapacity + ", " + "nodesUsed=" + nodesUsed + ", " + "nodesEmpty=" + nodesEmpty + ", " + "worstCaseLookups=" + worstCaseLookups + ", " + "maxValuesPerNode=" + maxValuesPerNode + ", " + "approxHeapFootprintInBytes=" + approxHeapFootprintInBytes; } } }
9,173
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/HollowPreindexer.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index; import static java.util.stream.Collectors.joining; import com.netflix.hollow.core.index.HollowHashIndexField.FieldPathSegment; import com.netflix.hollow.core.index.traversal.HollowIndexerValueTraverser; import com.netflix.hollow.core.read.dataaccess.HollowDataAccess; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess; import com.netflix.hollow.core.schema.HollowCollectionSchema; import com.netflix.hollow.core.schema.HollowMapSchema; import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType; import com.netflix.hollow.core.schema.HollowSchema; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; // Should this class be package private? It appears to be internal public class HollowPreindexer { private final HollowDataAccess stateEngine; private final String type; private final String selectField; private final String[] matchFields; private HollowTypeDataAccess typeState; private HollowHashIndexField[] matchFieldSpecs; private int numMatchTraverserFields; private HollowHashIndexField selectFieldSpec; private HollowIndexerValueTraverser traverser; public HollowPreindexer(HollowDataAccess stateEngine, String type, String selectField, String... matchFields) { this.stateEngine = stateEngine; this.type = type; this.selectField = selectField; this.matchFields = matchFields; } public void buildFieldSpecifications() { Map<String, Integer> baseFieldToIndexMap = new HashMap<>(); this.typeState = stateEngine.getTypeDataAccess(type); matchFieldSpecs = new HollowHashIndexField[matchFields.length]; for(int i=0;i<matchFields.length;i++) { matchFieldSpecs[i] = getHollowHashIndexField(typeState, matchFields[i], baseFieldToIndexMap, true); } numMatchTraverserFields = baseFieldToIndexMap.size(); selectFieldSpec = getHollowHashIndexField(typeState, selectField, baseFieldToIndexMap, false); String[] baseFields = new String[baseFieldToIndexMap.size()]; for(Map.Entry<String, Integer> entry : baseFieldToIndexMap.entrySet()) { baseFields[entry.getValue()] = entry.getKey(); } traverser = new HollowIndexerValueTraverser(stateEngine, type, baseFields); } private HollowHashIndexField getHollowHashIndexField(HollowTypeDataAccess originalDataAccess, String selectField, Map<String, Integer> baseFieldToIndexMap, boolean truncate) { FieldPaths.FieldPath<FieldPaths.FieldSegment> path = FieldPaths.createFieldPathForHashIndex( stateEngine, type, selectField); HollowTypeDataAccess baseTypeState = originalDataAccess; int baseFieldPathIdx = 0; List<FieldPaths.FieldSegment> segments = path.getSegments(); FieldPathSegment[] fieldPathIndexes = new FieldPathSegment[segments.size()]; FieldType fieldType = FieldType.REFERENCE; for (int i = 0; i < segments.size(); i++) { FieldPaths.FieldSegment segment = segments.get(i); HollowSchema schema = segment.enclosingSchema; switch (schema.getSchemaType()) { case OBJECT: FieldPaths.ObjectFieldSegment objectSegment = (FieldPaths.ObjectFieldSegment) segment; fieldType = objectSegment.getType(); int fieldPosition = objectSegment.getIndex(); HollowTypeDataAccess typeDataAccess = originalDataAccess.getDataAccess().getTypeDataAccess(objectSegment.getEnclosingSchema().getName()); fieldPathIndexes[i] = new FieldPathSegment(fieldPosition, (HollowObjectTypeDataAccess) typeDataAccess); if(!truncate) baseFieldPathIdx = i + 1; break; case SET: case LIST: fieldType = FieldType.REFERENCE; HollowCollectionSchema collectionSchema = (HollowCollectionSchema) schema; baseTypeState = originalDataAccess.getDataAccess().getTypeDataAccess(collectionSchema.getElementType()); baseFieldPathIdx = i + 1; break; case MAP: fieldType = FieldType.REFERENCE; HollowMapSchema mapSchema = (HollowMapSchema) schema; boolean isKey = "key".equals(segment.getName()); String elementType = isKey ? mapSchema.getKeyType() : mapSchema.getValueType(); baseTypeState = originalDataAccess.getDataAccess().getTypeDataAccess(elementType); baseFieldPathIdx = i + 1; break; } } String basePath = segments.stream().limit(baseFieldPathIdx) .map(FieldPaths.FieldSegment::getName) .collect(joining(".")); int basePathIdx = baseFieldToIndexMap.computeIfAbsent(basePath, k -> baseFieldToIndexMap.size()); return new HollowHashIndexField(basePathIdx, Arrays.copyOfRange(fieldPathIndexes, baseFieldPathIdx, fieldPathIndexes.length), baseTypeState, fieldType); } public HollowTypeDataAccess getHollowTypeDataAccess() { return typeState; } public HollowHashIndexField[] getMatchFieldSpecs() { return matchFieldSpecs; } public int getNumMatchTraverserFields() { return numMatchTraverserFields; } public HollowHashIndexField getSelectFieldSpec() { return selectFieldSpec; } public HollowIndexerValueTraverser getTraverser() { return traverser; } }
9,174
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/HollowHashIndexResult.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index; import com.netflix.hollow.core.memory.encoding.HashCodes; import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator; import java.util.Spliterator; import java.util.function.IntConsumer; import java.util.stream.IntStream; import java.util.stream.StreamSupport; /** * A HollowHashIndexResult contains the matches for a query to a {@link HollowHashIndex}. * */ public class HollowHashIndexResult { private final HollowHashIndex.HollowHashIndexState hashIndexState; private final long selectTableStartPointer; private final int selectTableSize; private final int selectTableBuckets; private final int selectBucketMask; HollowHashIndexResult(HollowHashIndex.HollowHashIndexState hashIndexState, long selectTableStartPointer, int selectTableSize) { this.hashIndexState = hashIndexState; this.selectTableStartPointer = selectTableStartPointer; this.selectTableSize = selectTableSize; this.selectTableBuckets = HashCodes.hashTableSize(selectTableSize); this.selectBucketMask = selectTableBuckets - 1; } /** * @return the number of matched records */ public int numResults() { return selectTableSize; } /** * @param value the ordinal * @return {@code true} if the ordinal is matched, otherwise {@code false} */ public boolean contains(int value) { int hash = HashCodes.hashInt(value); int bucket = hash & selectBucketMask; int selectOrdinal = (int) hashIndexState.getSelectHashArray().getElementValue((selectTableStartPointer + bucket) * hashIndexState.getBitsPerSelectHashEntry(), hashIndexState.getBitsPerSelectHashEntry()) - 1; while(selectOrdinal != -1) { if(selectOrdinal == value) return true; bucket = (bucket + 1) & selectBucketMask; selectOrdinal = (int) hashIndexState.getSelectHashArray().getElementValue((selectTableStartPointer + bucket) * hashIndexState.getBitsPerSelectHashEntry(), hashIndexState.getBitsPerSelectHashEntry()) - 1; } return false; } /** * @return A {@link HollowOrdinalIterator} over the matched ordinals. The ordinals may be used with a generated API or the Generic Object API to inspect * the matched records. */ public HollowOrdinalIterator iterator() { return new HollowOrdinalIterator() { final long endBucket = selectTableStartPointer + selectTableBuckets; long currentBucket = selectTableStartPointer; @Override public int next() { while(currentBucket < endBucket) { int selectOrdinal = (int) hashIndexState.getSelectHashArray().getElementValue((currentBucket++) * hashIndexState.getBitsPerSelectHashEntry(), hashIndexState.getBitsPerSelectHashEntry()) - 1; if(selectOrdinal != -1) return selectOrdinal; } return NO_MORE_ORDINALS; } }; } /** * Returns a stream of matching ordinals. * <p> * The ordinals may be used with a generated API or the Generic Object API to inspect * the matched records. * * @return an {@code IntStream} of matching ordinals */ public IntStream stream() { Spliterator.OfInt si = new Spliterator.OfInt() { final long endBucket = selectTableStartPointer + selectTableBuckets; long currentBucket = selectTableStartPointer; @Override public OfInt trySplit() { // @@@ Supporting splitting and therefore enable parallelism return null; } @Override public boolean tryAdvance(IntConsumer action) { while (currentBucket < endBucket) { int selectOrdinal = (int) hashIndexState.getSelectHashArray().getElementValue( (currentBucket++) * hashIndexState.getBitsPerSelectHashEntry(), hashIndexState.getBitsPerSelectHashEntry()) - 1; if (selectOrdinal != -1) { action.accept(selectOrdinal); return true; } } return false; } @Override public long estimateSize() { // @@@ return 0; } @Override public int characteristics() { // @@@ ordinals are distinct? return 0; } }; return StreamSupport.intStream(si, false); } }
9,175
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/TST.java
package com.netflix.hollow.core.index; import com.netflix.hollow.core.memory.encoding.FixedLengthElementArray; import com.netflix.hollow.core.memory.encoding.FixedLengthMultipleOccurrenceElementArray; import com.netflix.hollow.core.memory.pool.ArraySegmentRecycler; import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator; import java.util.ArrayDeque; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Queue; import java.util.Set; import java.util.stream.Collectors; /** * Ternary Search Tree implementation. Insertion order of elements in TST controls the balancing factor of the tree and * in turn space utilization and query performance. The tree would be least balanced with worst performance if the keys * are inserted in sorted order, and it would be somewhat balanced with good performance if keys are inserted in random order. * * This implementation supports duplicate element references from a single tree node. As duplicate elements are encountered, * the per-node capacity of referencing elements is dynamically resized. A best effort guess of the measure of duplication * in inserted keys is helpful to avoid expensive resize operations. * * The total node capacity of the tree i.e. max no. of nodes in the tree is pre-allocated at the time of initialization * and can not be dynamically resized. */ class TST { // ternary search tree private enum NodeType { Left, Right, Middle } // each node segment can be thought of as 16 bit key and bits to hold index of its children private int bitsPerNode; private int bitsPerKey; private int bitsForChildPointer; private int bitsPerOrdinal; // helper offsets private long leftChildOffset; private long middleChildOffset; private long rightChildOffset; private long isEndFlagOffset; // indicates end of a value stored in TST private final long maxNodes; private final boolean caseSensitive; // pre-allocated array to store all nodes in the TST // each node contains a data key, links to 3 child nodes, and one bit for indicating if this node marks the end of a value private FixedLengthElementArray nodes; // dynamically resized array that can store multiple ordinals in the type state corresponding to a node in the TST private FixedLengthMultipleOccurrenceElementArray ordinalSet; private long indexTracker; private long maxDepth; /** * Create new prefix index. Represents a ternary search tree. * * @param estimatedMaxNodes estimate number of max nodes that will be created. This is a hard limit. * @param estimatedMaxStringDuplicates estimated number string duplicates across all nodes * @param maxOrdinalValue max ordinal that can be referenced * @param caseSensitive controls whether indexing and querying should be case sensitive * @param memoryRecycler to reuse arrays from memory pool */ TST(long estimatedMaxNodes, int estimatedMaxStringDuplicates, int maxOrdinalValue, boolean caseSensitive, ArraySegmentRecycler memoryRecycler) { // best guess, hard limit maxNodes = estimatedMaxNodes; this.caseSensitive = caseSensitive; // bits for pointers in a single node: bitsPerKey = 16;// key bitsForChildPointer = 64 - Long.numberOfLeadingZeros(maxNodes);// a child pointer bitsPerOrdinal = maxOrdinalValue == 0 ? 1 : 32 - Integer.numberOfLeadingZeros(maxOrdinalValue); // bits to represent one node bitsPerNode = bitsPerKey + (3 * bitsForChildPointer) + 1; nodes = new FixedLengthElementArray(memoryRecycler, bitsPerNode * maxNodes); ordinalSet = new FixedLengthMultipleOccurrenceElementArray(memoryRecycler, maxNodes, bitsPerOrdinal, estimatedMaxStringDuplicates); indexTracker = 0; maxDepth = 0; // initialize offsets leftChildOffset = bitsPerKey;// after first 16 bits in node is first left child offset. middleChildOffset = leftChildOffset + bitsForChildPointer; rightChildOffset = middleChildOffset + bitsForChildPointer; isEndFlagOffset = rightChildOffset + bitsForChildPointer; } // tell memory recycler to use these long array on next long array request from memory ONLY AFTER swap is called on memory recycler void recycleMemory(ArraySegmentRecycler memoryRecycler) { nodes.destroy(memoryRecycler); ordinalSet.destroy(); } private long getChildOffset(NodeType nodeType) { long offset; if (nodeType.equals(NodeType.Left)) offset = leftChildOffset; else if (nodeType.equals(NodeType.Middle)) offset = middleChildOffset; else offset = rightChildOffset; return offset; } private long getChildIndex(long currentNode, NodeType nodeType) { long offset = getChildOffset(nodeType); return nodes.getElementValue((currentNode * bitsPerNode) + offset, bitsForChildPointer); } private void setChildIndex(long currentNode, NodeType nodeType, long indexForNode) { long offset = getChildOffset(nodeType); nodes.setElementValue((currentNode * bitsPerNode) + offset, bitsForChildPointer, indexForNode); } private void setKey(long index, char ch) { nodes.setElementValue(index * bitsPerNode, bitsPerKey, ch); } private long getKey(long nodeIndex) { return nodes.getElementValue(nodeIndex * bitsPerNode, bitsPerKey); } private boolean isEndNode(long nodeIndex) { return nodes.getElementValue((nodeIndex * bitsPerNode) + isEndFlagOffset, 1) == 1; } private void addOrdinal(long nodeIndex, long ordinal) { ordinalSet.addElement(nodeIndex, ordinal); nodes.setElementValue((nodeIndex * bitsPerNode) + isEndFlagOffset, 1, 1); } List<Integer> getOrdinals(long nodeIndex) { if (nodeIndex < 0) { return Collections.EMPTY_LIST; } return ordinalSet.getElements(nodeIndex).stream() .map(Long::intValue).collect(Collectors.toList()); } /** * Insert into ternary search tree for the given key and ordinal. * Case sensitivity is specified at the time of index initialization. nulls and empty strings are not supported. */ void insert(String key, int ordinal) { if (key == null) throw new IllegalArgumentException("Null key cannot be indexed"); if (key.length() == 0) throw new IllegalArgumentException("Empty string cannot be indexed"); long currentNodeIndex = 0; int keyIndex = 0; int depth = 0; if (!caseSensitive) { key = key.toLowerCase(); } while (keyIndex < key.length()) { char ch = key.charAt(keyIndex); if (getKey(currentNodeIndex) == 0) { setKey(currentNodeIndex, ch); indexTracker++; if (indexTracker >= maxNodes) throw new IllegalStateException("Index Tracker reached max capacity. Try with larger estimate of number of nodes"); } long keyAtCurrentNode = getKey(currentNodeIndex); if (ch < keyAtCurrentNode) { long leftIndex = getChildIndex(currentNodeIndex, NodeType.Left); if (leftIndex == 0) leftIndex = indexTracker; setChildIndex(currentNodeIndex, NodeType.Left, leftIndex); currentNodeIndex = leftIndex; } else if (ch > keyAtCurrentNode) { long rightIndex = getChildIndex(currentNodeIndex, NodeType.Right); if (rightIndex == 0) rightIndex = indexTracker; setChildIndex(currentNodeIndex, NodeType.Right, rightIndex); currentNodeIndex = rightIndex; } else { keyIndex++; if (keyIndex < key.length()) { long midIndex = getChildIndex(currentNodeIndex, NodeType.Middle); if (midIndex == 0) midIndex = indexTracker; setChildIndex(currentNodeIndex, NodeType.Middle, midIndex); currentNodeIndex = midIndex; } } depth++; } addOrdinal(currentNodeIndex, ordinal); if (depth > maxDepth) { maxDepth = depth; } } /** * Note that it will match the longest substring in {@code prefix} that was inserted as a key into the tree, and not * match partial prefix with partial key. Case sensitivity of matches is specified at the time of index initialization. * Null values and empty strings are not indexed so those return ORDINAL_NONE (-1). * * @return index of the node corresponding to longest match with a given prefix, -1 if no match or input was null or empty string */ long findLongestMatch(String prefix) { long nodeIndex = -1; if (prefix == null || prefix.length() == 0) { return nodeIndex; } if (!caseSensitive) { prefix = prefix.toLowerCase(); } boolean atRoot = true; long currentNodeIndex = 0; int keyIndex = 0; while (true) { if (currentNodeIndex == 0 && !atRoot) break; long currentValue = getKey(currentNodeIndex); char ch = prefix.charAt(keyIndex); if (ch < currentValue) { currentNodeIndex = getChildIndex(currentNodeIndex, NodeType.Left); } else if (ch > currentValue) { currentNodeIndex = getChildIndex(currentNodeIndex, NodeType.Right); } else { if (isEndNode(currentNodeIndex)) { nodeIndex = currentNodeIndex; // update longest prefix match } if (keyIndex == (prefix.length() - 1)) { break; } currentNodeIndex = getChildIndex(currentNodeIndex, NodeType.Middle); keyIndex ++; } if (atRoot) atRoot = false; } return nodeIndex; } /** * This functions checks if the given key exists in the TST. Case sensitivity of matches is specified at the time of * index initialization. * * @return index of the node corresponding to the last character of the key, or -1 if not found or input was null or empty string. */ long findNodeWithKey(String key) { long index = -1; if (key == null || key.length() == 0) { return index; } if (!caseSensitive) { key = key.toLowerCase(); } boolean atRoot = true; long currentNodeIndex = 0; int keyIndex = 0; while (true) { if (currentNodeIndex == 0 && !atRoot) break; long currentValue = getKey(currentNodeIndex); char ch = key.charAt(keyIndex); if (ch < currentValue) currentNodeIndex = getChildIndex(currentNodeIndex, NodeType.Left); else if (ch > currentValue) currentNodeIndex = getChildIndex(currentNodeIndex, NodeType.Right); else { if (keyIndex == (key.length() - 1)) { index = currentNodeIndex; break; } currentNodeIndex = getChildIndex(currentNodeIndex, NodeType.Middle); keyIndex++; } if (atRoot) atRoot = false; } return index; } boolean contains(String key) { long nodeIndex = findNodeWithKey(key); return nodeIndex >= 0 && isEndNode(nodeIndex); } /** * Find all the ordinals that match the given prefix. * Case sensitivity of matches is specified at the time of index initialization. A prefix of empty string "" will * return all ordinals indexed in the tree. */ HollowOrdinalIterator findKeysWithPrefix(String prefix) { if (prefix == null){ throw new IllegalArgumentException("Cannot findKeysWithPrefix null prefix"); } if (!caseSensitive) { prefix = prefix.toLowerCase(); } final Set<Integer> ordinals = new HashSet<>(); long currentNodeIndex; if (prefix.length() == 0) { currentNodeIndex = 0; } else { currentNodeIndex = findNodeWithKey(prefix); } if (currentNodeIndex >= 0) { if (isEndNode(currentNodeIndex)) ordinals.addAll(getOrdinals(currentNodeIndex)); // go to all leaf nodes from current node mid pointer Queue<Long> queue = new ArrayDeque<>(); if (prefix.length() == 0) { queue.add(0l); // root node index } else { long subTree = getChildIndex(currentNodeIndex, NodeType.Middle); if (subTree != 0) { queue.add(subTree); } } while (!queue.isEmpty()) { long nodeIndex = queue.remove(); long left = getChildIndex(nodeIndex, NodeType.Left); long mid = getChildIndex(nodeIndex, NodeType.Middle); long right = getChildIndex(nodeIndex, NodeType.Right); if (isEndNode(nodeIndex)) ordinals.addAll(getOrdinals(nodeIndex)); if (left != 0) queue.add(left); if (mid != 0) queue.add(mid); if (right != 0) queue.add(right); } } return new HollowOrdinalIterator() { private Iterator<Integer> it = ordinals.iterator(); @Override public int next() { if (it.hasNext()) return it.next(); return NO_MORE_ORDINALS; } }; } /** * Returns the max depth of the prefix tree. The depth depends on insertion order of elements and effects the * worst case no. of hops for search. For e.g., a more balanced tree depth closer to log n (n being the no. of nodes) * will yield closer to O(log n) search time complexity whereas a balanced tree with depth closer to n would mean * upto O(n) search time complexity. * @return the max depth of the tree */ long getMaxDepth() { return maxDepth; } /** * Returns the no. of empty nodes (capacity minus populated) as a measure of how much pre-allocated space is * under utilized. * @return no. of populated nodes in prefix tree */ long getEmptyNodes() { return maxNodes - indexTracker; } /** * Returns the no. of populated nodes (out of the entire node capcity) in the underlying prefix tree. The no. of * nodes required to index a set of records can vary depending on underlying prefix tree implementation and * insertion order of records. * @return no. of populated nodes in prefix tree */ long getNumNodes() { return indexTracker; } /** * Returns the max node capacity of the underlying prefix tree, used as a measure of in-memory space efficiency. * An attempt to insert no. of nodes greater than this value will result in failure. The no. of nodes required to * index over the a set of records, can vary depending on underlying prefix tree implementation and insertion order * of records. * @return max node capacity of underlying prefix tree */ long getMaxNodes() { return maxNodes; } /** * This is a measure of duplication in the indexed field. Duplication has an adverse effect on memory efficiency, since * each node of the tree reserves space to reference multiple records. * @return no. of elements that can be referenced from a single node of the tree */ int getMaxElementsPerNode() { return ordinalSet.getMaxElementsPerNode(); } /** * Returns the approx heap footprint of the prefix tree * @return approx heap footprint in bytes */ long approxHeapFootprintInBytes() { return nodes.approxHeapFootprintInBytes() + ordinalSet.approxHeapFootprintInBytes(); } }
9,176
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/MultiLinkedElementArray.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index; import com.netflix.hollow.core.memory.pool.ArraySegmentRecycler; import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator; public class MultiLinkedElementArray { private final GrowingSegmentedLongArray listPointersAndSizes; private final GrowingSegmentedLongArray linkedElements; private int nextNewPointer = 0; private long nextLinkedElement = 0; public MultiLinkedElementArray(ArraySegmentRecycler memoryRecycler) { this.listPointersAndSizes = new GrowingSegmentedLongArray(memoryRecycler); this.linkedElements = new GrowingSegmentedLongArray(memoryRecycler); } public HollowOrdinalIterator iterator(int listIdx) { if((listPointersAndSizes.get(listIdx) & Long.MIN_VALUE) != 0) return new PivotedElementIterator(listIdx); return new LinkedElementIterator(listIdx); } public void add(int listIdx, int value) { long listPtr = listPointersAndSizes.get(listIdx); if(listPtr == 0) { listPointersAndSizes.set(listIdx, Long.MIN_VALUE | (long)value << 32); return; } if((listPtr & 0xFFFFFFFFL) == 0) { listPointersAndSizes.set(listIdx, listPtr | 0x80000000L | value); return; } if((listPtr & Long.MIN_VALUE) != 0) { linkedElements.set(nextLinkedElement, listPtr); long newLink = (long)value << 32 | nextLinkedElement; linkedElements.set(++nextLinkedElement, newLink); listPtr = (long)(nextLinkedElement++) << 32 | 3; listPointersAndSizes.set(listIdx, listPtr); } else { long linkedElement = listPtr >> 32; long size = listPtr & Integer.MAX_VALUE; long newLink = (long)value << 32 | linkedElement; linkedElements.set(nextLinkedElement, newLink); listPtr = (long)(nextLinkedElement++) << 32 | (size + 1); listPointersAndSizes.set(listIdx, listPtr); } } public int numLists() { return nextNewPointer; } public int newList() { return nextNewPointer++; } public int listSize(int listIdx) { long listPtr = listPointersAndSizes.get(listIdx); if(listPtr == 0) return 0; if((listPtr & Long.MIN_VALUE) != 0) return (listPtr & 0xFFFFFFFFL) == 0 ? 1 : 2; return (int)(listPtr & Integer.MAX_VALUE); } public void destroy() { listPointersAndSizes.destroy(); linkedElements.destroy(); } public class LinkedElementIterator implements HollowOrdinalIterator { private int currentElement; private boolean lastElement; private boolean finished; private LinkedElementIterator(int listIdx) { this.currentElement = (int)(listPointersAndSizes.get(listIdx) >> 32); } @Override public int next() { if(finished) return NO_MORE_ORDINALS; if(lastElement) { int value = (int)(linkedElements.get(currentElement) >>> 32) & Integer.MAX_VALUE; finished = true; return value; } else { long element = linkedElements.get(currentElement); if(element < 0) { lastElement = true; return (int)element & Integer.MAX_VALUE; } else { currentElement = (int)element; return (int)(element >> 32); } } } } public class PivotedElementIterator implements HollowOrdinalIterator { private int listIdx; private int currentElement; private PivotedElementIterator(int listIdx) { this.listIdx = listIdx; } @Override public int next() { if(currentElement > 1) return NO_MORE_ORDINALS; long element = listPointersAndSizes.get(listIdx); if(currentElement++ == 0) { if((element & 0xFFFFFFFFL) != 0) return (int)element & Integer.MAX_VALUE; } currentElement++; return (int)(element >>> 32) & Integer.MAX_VALUE; } } }
9,177
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/FieldPath.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index; import static com.netflix.hollow.core.schema.HollowObjectSchema.FieldType; import static com.netflix.hollow.core.schema.HollowSchema.SchemaType; import com.netflix.hollow.core.read.dataaccess.HollowCollectionTypeDataAccess; import com.netflix.hollow.core.read.dataaccess.HollowDataAccess; import com.netflix.hollow.core.read.dataaccess.HollowMapTypeDataAccess; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess; import com.netflix.hollow.core.read.iterator.HollowMapEntryOrdinalIterator; import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator; import com.netflix.hollow.core.schema.HollowCollectionSchema; import com.netflix.hollow.core.schema.HollowMapSchema; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.schema.HollowSchema; import java.util.ArrayList; import java.util.List; /** * This class is used to represent a field path. A field path is a "." separated list of fields that are used for traversal when looking up values for a type and ordinal of that type. * It is used to read field values for a given type and its ordinal. It is convenient, when the path is deeply nested. * <p> * Upon initializing a instance of this class with given parameters, it does the following checks * <ul> * <li>validates the path by traversing it and making sure all types are present in the HollowDataAccess</li> * <li>auto-expands the path for collections (appending ".element" if missing), and a reference type (e.g String/Integer appending ".value" if missing) with single field. (except for Map schema)</li> * <li>for map schema type, the class looks for "key" to iterate over key types and "value" to iterate over value types.</li> * </ul> * <p> * This class has a convenience method to find values following the field path and given a start field position. */ class FieldPath { private final String fieldPath; private final HollowDataAccess hollowDataAccess; private final String type; private String[] fields; private int[] fieldPositions; private FieldType[] fieldTypes; private String lastRefTypeInPath; private boolean autoExpand; /** * Create new FieldPath with auto-expand feature. * * @param hollowDataAccess hollow data access * @param type parent type of the field path * @param fieldPath "." separated fields */ FieldPath(HollowDataAccess hollowDataAccess, String type, String fieldPath) { this(hollowDataAccess, type, fieldPath, true); } /** * Create new FieldPath. * * @param hollowDataAccess hollow data access * @param type parent type of the field path * @param fieldPath "." separated fields * @param autoExpand if the field path should be auto-expand collections and references with one field. */ FieldPath(HollowDataAccess hollowDataAccess, String type, String fieldPath, boolean autoExpand) { this.fieldPath = fieldPath; this.hollowDataAccess = hollowDataAccess; this.type = type; this.autoExpand = autoExpand; initialize(); } private void initialize() { FieldPaths.FieldPath<FieldPaths.FieldSegment> path = FieldPaths.createFieldPathForPrefixIndex(hollowDataAccess, type, fieldPath, autoExpand); List<String> fields = new ArrayList<>(); List<Integer> fieldPositions = new ArrayList<>(); List<FieldType> fieldTypes = new ArrayList<>(); String lastRefType = type; for (FieldPaths.FieldSegment segment : path.getSegments()) { fields.add(segment.getName()); if (segment.getEnclosingSchema().getSchemaType() == SchemaType.OBJECT) { assert segment instanceof FieldPaths.ObjectFieldSegment; FieldPaths.ObjectFieldSegment oSegment = (FieldPaths.ObjectFieldSegment) segment; fieldPositions.add(oSegment.getIndex()); fieldTypes.add(oSegment.getType()); } else { fieldPositions.add(0); fieldTypes.add(FieldType.REFERENCE); } String refType = segment.getTypeName(); if (refType != null) { lastRefType = refType; } } this.fields = fields.toArray(new String[0]); this.fieldPositions = fieldPositions.stream().mapToInt(i -> i).toArray(); this.fieldTypes = fieldTypes.toArray(new FieldType[0]); this.lastRefTypeInPath = lastRefType; } String getLastRefTypeInPath() { return lastRefTypeInPath; } FieldType getLastFieldType() { return fieldTypes[this.fields.length - 1]; } /** * Recursively find all the values following the field path. * * @param ordinal ordinal record for the given type in field path * @return Array of values found at the field path for the given ordinal record in the type. */ Object[] findValues(int ordinal) { return getAllValues(ordinal, type, 0); } /** * Recursively find a value following the path. If the path contains a collection, then the first value is picked. * * @param ordinal the ordinal used to find a value * @return A value found at the field path for the given ordinal record in the type. */ Object findValue(int ordinal) { return getValue(ordinal, type, 0); } private Object getValue(int ordinal, String type, int fieldIndex) { Object value = null; HollowTypeDataAccess typeDataAccess = hollowDataAccess.getTypeDataAccess(type); SchemaType schemaType = hollowDataAccess.getSchema(type).getSchemaType(); HollowSchema schema = hollowDataAccess.getSchema(type); if (schemaType.equals(SchemaType.LIST) || schemaType.equals(SchemaType.SET)) { HollowCollectionTypeDataAccess collectionTypeDataAccess = (HollowCollectionTypeDataAccess) typeDataAccess; HollowCollectionSchema collectionSchema = (HollowCollectionSchema) schema; String elementType = collectionSchema.getElementType(); HollowOrdinalIterator it = collectionTypeDataAccess.ordinalIterator(ordinal); int refOrdinal = it.next(); if (refOrdinal != HollowOrdinalIterator.NO_MORE_ORDINALS) { value = getValue(refOrdinal, elementType, fieldIndex + 1); } return value; } else if (schemaType.equals(SchemaType.MAP)) { // Map type HollowMapTypeDataAccess mapTypeDataAccess = (HollowMapTypeDataAccess) typeDataAccess; HollowMapSchema mapSchema = (HollowMapSchema) schema; // what to iterate on in map boolean iterateThroughKeys = fields[fieldIndex].equals("key"); String keyOrValueType = iterateThroughKeys ? mapSchema.getKeyType() : mapSchema.getValueType(); HollowMapEntryOrdinalIterator mapEntryIterator = mapTypeDataAccess.ordinalIterator(ordinal); if (mapEntryIterator.next()) { int keyOrValueOrdinal = iterateThroughKeys ? mapEntryIterator.getKey() : mapEntryIterator.getValue(); value = getValue(keyOrValueOrdinal, keyOrValueType, fieldIndex + 1); } return value; } else { // Object type HollowObjectSchema objectSchema = (HollowObjectSchema) schema; HollowObjectTypeDataAccess objectTypeDataAccess = (HollowObjectTypeDataAccess) typeDataAccess; if (fieldTypes[fieldIndex].equals(FieldType.REFERENCE)) { int refOrdinal = objectTypeDataAccess.readOrdinal(ordinal, fieldPositions[fieldIndex]); if (refOrdinal >= 0) { String refType = objectSchema.getReferencedType(fieldPositions[fieldIndex]); value = getValue(refOrdinal, refType, fieldIndex + 1); } } else { value = readFromObject(objectTypeDataAccess, ordinal, fieldTypes[fieldIndex], fieldPositions[fieldIndex]); } } return value; } private Object[] getAllValues(int ordinal, String type, int fieldIndex) { Object[] values; HollowTypeDataAccess typeDataAccess = hollowDataAccess.getTypeDataAccess(type); SchemaType schemaType = hollowDataAccess.getSchema(type).getSchemaType(); HollowSchema schema = hollowDataAccess.getSchema(type); if (schemaType.equals(SchemaType.LIST) || schemaType.equals(SchemaType.SET)) { HollowCollectionTypeDataAccess collectionTypeDataAccess = (HollowCollectionTypeDataAccess) typeDataAccess; HollowCollectionSchema collectionSchema = (HollowCollectionSchema) schema; String elementType = collectionSchema.getElementType(); HollowOrdinalIterator it = collectionTypeDataAccess.ordinalIterator(ordinal); List<Object> valueList = new ArrayList<>(); int refOrdinal = it.next(); while (refOrdinal != HollowOrdinalIterator.NO_MORE_ORDINALS) { Object[] refValues = getAllValues(refOrdinal, elementType, fieldIndex + 1); for (Object value : refValues) valueList.add(value); refOrdinal = it.next(); } values = new Object[valueList.size()]; valueList.toArray(values); } else if (schemaType.equals(SchemaType.MAP)) { // Map type HollowMapTypeDataAccess mapTypeDataAccess = (HollowMapTypeDataAccess) typeDataAccess; HollowMapSchema mapSchema = (HollowMapSchema) schema; // what to iterate on in map boolean iterateThroughKeys = fields[fieldIndex].equals("key"); String keyOrValueType = iterateThroughKeys ? mapSchema.getKeyType() : mapSchema.getValueType(); HollowMapEntryOrdinalIterator mapEntryIterator = mapTypeDataAccess.ordinalIterator(ordinal); List<Object> valueList = new ArrayList<>(); while (mapEntryIterator.next()) { int keyOrValueOrdinal = iterateThroughKeys ? mapEntryIterator.getKey() : mapEntryIterator.getValue(); Object[] refValues = getAllValues(keyOrValueOrdinal, keyOrValueType, fieldIndex + 1); for (Object value : refValues) valueList.add(value); } values = new Object[valueList.size()]; valueList.toArray(values); } else { // Object type HollowObjectSchema objectSchema = (HollowObjectSchema) schema; HollowObjectTypeDataAccess objectTypeDataAccess = (HollowObjectTypeDataAccess) typeDataAccess; if (fieldTypes[fieldIndex].equals(FieldType.REFERENCE)) { int refOrdinal = objectTypeDataAccess.readOrdinal(ordinal, fieldPositions[fieldIndex]); if (refOrdinal >= 0) { String refType = objectSchema.getReferencedType(fieldPositions[fieldIndex]); return getAllValues(refOrdinal, refType, fieldIndex + 1); } return new Object[]{}; } else { return new Object[]{readFromObject(objectTypeDataAccess, ordinal, fieldTypes[fieldIndex], fieldPositions[fieldIndex])}; } } return values; } private Object readFromObject(HollowObjectTypeDataAccess objectTypeDataAccess, int ordinal, FieldType fieldType, int fieldPosition) { Object value; switch (fieldType) { case INT: value = objectTypeDataAccess.readInt(ordinal, fieldPosition); break; case LONG: value = objectTypeDataAccess.readLong(ordinal, fieldPosition); break; case DOUBLE: value = objectTypeDataAccess.readDouble(ordinal, fieldPosition); break; case FLOAT: value = objectTypeDataAccess.readFloat(ordinal, fieldPosition); break; case BOOLEAN: value = objectTypeDataAccess.readBoolean(ordinal, fieldPosition); break; case STRING: value = objectTypeDataAccess.readString(ordinal, fieldPosition); break; default: throw new IllegalStateException("Invalid field type :" + fieldType + " cannot read values for this type"); } return value; } }
9,178
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/HollowPrimaryKeyIndex.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index; import static com.netflix.hollow.core.HollowConstants.ORDINAL_NONE; import static java.util.Objects.requireNonNull; import com.netflix.hollow.core.index.key.HollowPrimaryKeyValueDeriver; import com.netflix.hollow.core.index.key.PrimaryKey; import com.netflix.hollow.core.memory.encoding.FixedLengthElementArray; import com.netflix.hollow.core.memory.encoding.HashCodes; import com.netflix.hollow.core.memory.pool.ArraySegmentRecycler; import com.netflix.hollow.core.memory.pool.WastefulRecycler; import com.netflix.hollow.core.read.HollowReadFieldUtils; import com.netflix.hollow.core.read.engine.HollowReadStateEngine; import com.netflix.hollow.core.read.engine.HollowTypeStateListener; import com.netflix.hollow.core.read.engine.PopulatedOrdinalListener; import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; /** * A HollowPrimaryKeyIndex is the go-to mechanism for indexing and querying data in a Hollow blob. * <p> * A primary key index can be used to index and query a type by a {@link PrimaryKey}. The provided {@link PrimaryKey} does * not have to be the same as declared as the default in the data model. * * <b>This class is not safe to use with object longevity if the index is not being updated for each delta. The internal * implementation of this class uses the type state retrieved through the schema. That resutls in certain operations * always being performed against the current version. As such, this class is only valid for up to 2 updates.</b> * * If you need an index that will survive 2 or more deltas (without being updated), then use {@link HollowUniqueKeyIndex} * or {@link HollowHashIndex}. */ @SuppressWarnings("override") public class HollowPrimaryKeyIndex implements HollowTypeStateListener, TestableUniqueKeyIndex { private static final Logger LOG = Logger.getLogger(HollowPrimaryKeyIndex.class.getName()); private final HollowObjectTypeReadState typeState; private final int[][] fieldPathIndexes; private final FieldType[] fieldTypes; private final PrimaryKey primaryKey; private final HollowPrimaryKeyValueDeriver keyDeriver; private final ArraySegmentRecycler memoryRecycler; private final BitSet specificOrdinalsToIndex; private volatile PrimaryKeyIndexHashTable hashTableVolatile; public HollowPrimaryKeyIndex(HollowReadStateEngine stateEngine, String type, String... fieldPaths) { this(stateEngine, WastefulRecycler.DEFAULT_INSTANCE, type, fieldPaths); } public HollowPrimaryKeyIndex(HollowReadStateEngine stateEngine, PrimaryKey primaryKey) { this(stateEngine, primaryKey, WastefulRecycler.DEFAULT_INSTANCE); } public HollowPrimaryKeyIndex(HollowReadStateEngine stateEngine, ArraySegmentRecycler memoryRecycler, String type, String... fieldPaths) { this(stateEngine, PrimaryKey.create(stateEngine, type, fieldPaths), memoryRecycler); } public HollowPrimaryKeyIndex(HollowReadStateEngine stateEngine, PrimaryKey primaryKey, ArraySegmentRecycler memoryRecycler) { this(stateEngine, primaryKey, memoryRecycler, null); } /** * This initializer can be used to create a HollowPrimaryKeyIndex which will only index a subset of the records in the specified type. * * @param stateEngine the read state engine * @param primaryKey the primary key * @param memoryRecycler the memory recycler * @param specificOrdinalsToIndex the bit set */ public HollowPrimaryKeyIndex(HollowReadStateEngine stateEngine, PrimaryKey primaryKey, ArraySegmentRecycler memoryRecycler, BitSet specificOrdinalsToIndex) { requireNonNull(primaryKey, "Hollow Primary Key Index creation failed because primaryKey was null"); requireNonNull(stateEngine, "Hollow Primary Key Index creation for type [" + primaryKey.getType() + "] failed because read state wasn't initialized"); this.primaryKey = primaryKey; this.typeState = (HollowObjectTypeReadState) stateEngine.getTypeState(primaryKey.getType()); this.fieldPathIndexes = new int[primaryKey.numFields()][]; this.fieldTypes = new FieldType[primaryKey.numFields()]; this.memoryRecycler = memoryRecycler; for(int i=0;i<primaryKey.numFields();i++) { fieldPathIndexes[i] = primaryKey.getFieldPathIndex(stateEngine, i); fieldTypes[i] = primaryKey.getFieldType(stateEngine, i); } this.keyDeriver = new HollowPrimaryKeyValueDeriver(typeState, fieldPathIndexes, fieldTypes); this.specificOrdinalsToIndex = specificOrdinalsToIndex; reindex(); } /** * Once called, this HollowPrimaryKeyIndex will be kept up-to-date when deltas are applied to the indexed state engine. * <p> * This method should be called <b>before</b> any subsequent deltas occur after the index is created. * <p> * In order to prevent memory leaks, if this method is called and the index is no longer needed, call detachFromDeltaUpdates() before * discarding the index. */ public void listenForDeltaUpdates() { if(specificOrdinalsToIndex != null) throw new IllegalStateException("Cannot listen for delta updates when indexing only specified ordinals!"); typeState.addListener(this); } /** * Once called, this HollowPrimaryKeyIndex will no longer be kept up-to-date when deltas are applied to the indexed state engine. * <p> * Call this method before discarding indexes which are currently listening for delta updates. */ public void detachFromDeltaUpdates() { typeState.removeListener(this); } public HollowObjectTypeReadState getTypeState() { return typeState; } public PrimaryKey getPrimaryKey() { return primaryKey; } public List<FieldType> getFieldTypes() { return Arrays.asList(fieldTypes); } /** * Query an index with a single specified field. The returned value with be the ordinal of the matching record. * <p> * Use a generated API or the Generic Object API to use the returned ordinal. * * @param key the field key * @return the matching ordinal for the key, otherwise -1 if the key is not present */ public int getMatchingOrdinal(Object key) { PrimaryKeyIndexHashTable hashTable = hashTableVolatile; if(fieldPathIndexes.length != 1 || hashTable.bitsPerElement == 0) return -1; int hashCode = keyHashCode(key, 0); int ordinal = -1; do { hashTable = this.hashTableVolatile; int bucket = hashCode & hashTable.hashMask; ordinal = readOrdinal(hashTable, bucket); while(ordinal != -1) { if(keyDeriver.keyMatches(key, ordinal, 0)) break; bucket++; bucket &= hashTable.hashMask; ordinal = readOrdinal(hashTable, bucket); } } while(hashTableVolatile != hashTable); return ordinal; } /** * Query an index with two specified fields. The returned value with be the ordinal of the matching record. * <p> * Use a generated API or the Generic Object API to use the returned ordinal. * * @param key1 the first field key * @param key2 the second field key * @return the matching ordinal for the two keys, otherwise -1 if the key is not present */ public int getMatchingOrdinal(Object key1, Object key2) { PrimaryKeyIndexHashTable hashTable = hashTableVolatile; if(fieldPathIndexes.length != 2 || hashTable.bitsPerElement == 0) return -1; int hashCode = keyHashCode(key1, 0); hashCode ^= keyHashCode(key2, 1); int ordinal = -1; do { hashTable = this.hashTableVolatile; int bucket = hashCode & hashTable.hashMask; ordinal = readOrdinal(hashTable, bucket); while(ordinal != -1) { if(keyDeriver.keyMatches(key1, ordinal, 0) && keyDeriver.keyMatches(key2, ordinal, 1)) break; bucket++; bucket &= hashTable.hashMask; ordinal = readOrdinal(hashTable, bucket); } } while(hashTableVolatile != hashTable); return ordinal; } /** * Query an index with three specified fields. The returned value with be the ordinal of the matching record. * <p> * Use a generated API or the Generic Object API to use the returned ordinal. * * @param key1 the first field key * @param key2 the second field key * @param key3 the third field key * @return the matching ordinal for the three keys, otherwise -1 if the key is not present */ public int getMatchingOrdinal(Object key1, Object key2, Object key3) { PrimaryKeyIndexHashTable hashTable = hashTableVolatile; if(fieldPathIndexes.length != 3 || hashTable.bitsPerElement == 0) return -1; int hashCode = keyHashCode(key1, 0); hashCode ^= keyHashCode(key2, 1); hashCode ^= keyHashCode(key3, 2); int ordinal = -1; do { hashTable = this.hashTableVolatile; int bucket = hashCode & hashTable.hashMask; ordinal = readOrdinal(hashTable, bucket); while(ordinal != -1) { if(keyDeriver.keyMatches(key1, ordinal, 0) && keyDeriver.keyMatches(key2, ordinal, 1) && keyDeriver.keyMatches(key3, ordinal, 2)) break; bucket++; bucket &= hashTable.hashMask; ordinal = readOrdinal(hashTable, bucket); } } while(hashTableVolatile != hashTable); return ordinal; } /** * Query an index with four or more specified fields. The returned value with be the ordinal of the matching record. * <p> * Use a generated API or the Generic Object API to use the returned ordinal. * * @param keys the field keys * @return the matching ordinal for the keys, otherwise -1 if the key is not present */ public int getMatchingOrdinal(Object... keys) { PrimaryKeyIndexHashTable hashTable = hashTableVolatile; if(fieldPathIndexes.length != keys.length || hashTable.bitsPerElement == 0) return -1; int hashCode = 0; for(int i=0;i<keys.length;i++) hashCode ^= keyHashCode(keys[i], i); int ordinal = -1; do { hashTable = this.hashTableVolatile; int bucket = hashCode & hashTable.hashMask; ordinal = readOrdinal(hashTable, bucket); while(ordinal != -1) { if(keyDeriver.keyMatches(ordinal, keys)) break; bucket++; bucket &= hashTable.hashMask; ordinal = readOrdinal(hashTable, bucket); } } while(hashTableVolatile != hashTable); return ordinal; } private int readOrdinal(PrimaryKeyIndexHashTable hashTable, int bucket) { return (int)hashTable.hashTable.getElementValue((long)hashTable.bitsPerElement * (long)bucket, hashTable.bitsPerElement) - 1; } private int keyHashCode(Object key, int fieldIdx) { switch(fieldTypes[fieldIdx]) { case BOOLEAN: return HashCodes.hashInt(HollowReadFieldUtils.booleanHashCode((Boolean)key)); case DOUBLE: return HashCodes.hashInt(HollowReadFieldUtils.doubleHashCode(((Double)key).doubleValue())); case FLOAT: return HashCodes.hashInt(HollowReadFieldUtils.floatHashCode(((Float)key).floatValue())); case INT: return HashCodes.hashInt(HollowReadFieldUtils.intHashCode(((Integer)key).intValue())); case LONG: return HashCodes.hashInt(HollowReadFieldUtils.longHashCode(((Long)key).longValue())); case REFERENCE: return HashCodes.hashInt(((Integer)key).intValue()); case BYTES: return HashCodes.hashCode((byte[])key); case STRING: return HashCodes.hashCode((String)key); } throw new IllegalArgumentException("I don't know how to hash a " + fieldTypes[fieldIdx]); } private void setHashTable(PrimaryKeyIndexHashTable hashTable) { this.hashTableVolatile = hashTable; } /** * @return whether or not this index contains duplicate records (two or more records mapping to a single primary key). */ public boolean containsDuplicates() { return !getDuplicateKeys().isEmpty(); } /** * @return any keys which are mapped to two or more records. */ public synchronized Collection<Object[]> getDuplicateKeys() { PrimaryKeyIndexHashTable hashTable = hashTableVolatile; if(hashTable.bitsPerElement == 0) return Collections.emptyList(); List<Object[]> duplicateKeys = new ArrayList<Object[]>(); for(int i=0;i<hashTable.hashTableSize;i++) { int ordinal = (int)hashTable.hashTable.getElementValue((long)i * (long)hashTable.bitsPerElement, hashTable.bitsPerElement) - 1; if(ordinal != -1) { int compareBucket = (i+1) & hashTable.hashMask; int compareOrdinal = (int)hashTable.hashTable.getElementValue((long)compareBucket * (long)hashTable.bitsPerElement, hashTable.bitsPerElement) - 1; while(compareOrdinal != -1) { if(recordsHaveEqualKeys(ordinal, compareOrdinal)) duplicateKeys.add(keyDeriver.getRecordKey(ordinal)); compareBucket = (compareBucket + 1) & hashTable.hashMask; compareOrdinal = (int)hashTable.hashTable.getElementValue((long)compareBucket * (long)hashTable.bitsPerElement, hashTable.bitsPerElement) - 1; } } } return duplicateKeys; } @Override public void beginUpdate() { } @Override public void addedOrdinal(int ordinal) { } @Override public void removedOrdinal(int ordinal) { } private static final boolean ALLOW_DELTA_UPDATE = Boolean.getBoolean("com.netflix.hollow.core.index.HollowPrimaryKeyIndex.allowDeltaUpdate"); @Override public synchronized void endUpdate() { BitSet ordinals = typeState.getListener(PopulatedOrdinalListener.class).getPopulatedOrdinals(); int hashTableSize = HashCodes.hashTableSize(ordinals.cardinality()); int bitsPerElement = (32 - Integer.numberOfLeadingZeros(typeState.maxOrdinal() + 1)); PrimaryKeyIndexHashTable hashTable = hashTableVolatile; if(ALLOW_DELTA_UPDATE && hashTableSize == hashTable.hashTableSize && bitsPerElement == hashTable.bitsPerElement && shouldPerformDeltaUpdate()) { try { deltaUpdate(hashTableSize, bitsPerElement); } catch (OrdinalNotFoundException e) { /* It has been observed that delta updates can result in CPU spinning attempting to find a previous ordinal to remove. It's not clear what the cause of the issue is but it does not appear to be data related (since the failure is not consistent when multiple instances update to the same version) nor concurrency related (since an update occurs in a synchronized block). A rare possibility is it might be a C2 compiler issue. Changing the code shape may well fix that. Attempts to reproduce this locally has so far failed. Given the importance of indexing a full reindex is performed on such a failure. This, however, will make it more difficult to detect such issues. This approach does not protect against the case where the index is corrupt and not yet detected, until a further update. In such cases it may be possible for clients, in the interim of a forced reindex, to operate on a corrupt index: queries may incorrectly return no match. As such delta update of the index have been disabled by default. */ LOG.log(Level.SEVERE, "Delta update of index failed. Performing a full reindex", e); reindex(); } } else { reindex(); } } private static class OrdinalNotFoundException extends IllegalStateException { OrdinalNotFoundException(String s) { super(s); } } public void destroy() { PrimaryKeyIndexHashTable hashTable = hashTableVolatile; if(hashTable != null) hashTable.hashTable.destroy(memoryRecycler); } private synchronized void reindex() { PrimaryKeyIndexHashTable hashTable = hashTableVolatile; // Could be null on first reindex if(hashTable != null) { hashTable.hashTable.destroy(memoryRecycler); } BitSet ordinals = specificOrdinalsToIndex; if(ordinals == null) { PopulatedOrdinalListener listener = typeState.getListener(PopulatedOrdinalListener.class); ordinals = listener.getPopulatedOrdinals(); } int hashTableSize = HashCodes.hashTableSize(ordinals.cardinality()); int bitsPerElement = (32 - Integer.numberOfLeadingZeros(typeState.maxOrdinal() + 1)); FixedLengthElementArray hashedArray = new FixedLengthElementArray(memoryRecycler, (long)hashTableSize * (long)bitsPerElement); int hashMask = hashTableSize - 1; int ordinal = ordinals.nextSetBit(0); while(ordinal != ORDINAL_NONE) { int hashCode = recordHash(ordinal); int bucket = hashCode & hashMask; while(hashedArray.getElementValue((long)bucket * (long)bitsPerElement, bitsPerElement) != 0) bucket = (bucket + 1) & hashMask; hashedArray.setElementValue((long)bucket * (long)bitsPerElement, bitsPerElement, ordinal + 1); ordinal = ordinals.nextSetBit(ordinal + 1); } setHashTable(new PrimaryKeyIndexHashTable(hashedArray, hashTableSize, hashMask, bitsPerElement)); memoryRecycler.swap(); } private void deltaUpdate(int hashTableSize, int bitsPerElement) { // For a delta update hashTableVolatile cannot be null PrimaryKeyIndexHashTable hashTable = hashTableVolatile; hashTable.hashTable.destroy(memoryRecycler); PopulatedOrdinalListener listener = typeState.getListener(PopulatedOrdinalListener.class); BitSet prevOrdinals = listener.getPreviousOrdinals(); BitSet ordinals = listener.getPopulatedOrdinals(); long totalBitsInHashTable = (long)hashTableSize * (long)bitsPerElement; FixedLengthElementArray hashedArray = new FixedLengthElementArray(memoryRecycler, totalBitsInHashTable); hashedArray.copyBits(hashTable.hashTable, 0, 0, totalBitsInHashTable); int hashMask = hashTableSize - 1; int prevOrdinal = prevOrdinals.nextSetBit(0); while(prevOrdinal != ORDINAL_NONE) { if(!ordinals.get(prevOrdinal)) { /// find and remove this ordinal int hashCode = recordHash(prevOrdinal); int bucket = findOrdinalBucket(bitsPerElement, hashedArray, hashCode, hashMask, prevOrdinal); hashedArray.clearElementValue((long)bucket * (long)bitsPerElement, bitsPerElement); int emptyBucket = bucket; bucket = (bucket + 1) & hashMask; int moveOrdinal = (int)hashedArray.getElementValue((long)bucket * (long)bitsPerElement, bitsPerElement) - 1; while(moveOrdinal != ORDINAL_NONE) { int naturalHash = recordHash(moveOrdinal); int naturalBucket = naturalHash & hashMask; if(!bucketInRange(emptyBucket, bucket, naturalBucket)) { hashedArray.setElementValue((long)emptyBucket * (long)bitsPerElement, bitsPerElement, moveOrdinal + 1); hashedArray.clearElementValue((long)bucket * (long)bitsPerElement, bitsPerElement); emptyBucket = bucket; } bucket = (bucket + 1) & hashMask; moveOrdinal = (int)hashedArray.getElementValue((long)bucket * (long)bitsPerElement, bitsPerElement) - 1; } } prevOrdinal = prevOrdinals.nextSetBit(prevOrdinal + 1); } int ordinal = ordinals.nextSetBit(0); while(ordinal != ORDINAL_NONE) { if(!prevOrdinals.get(ordinal)) { int hashCode = recordHash(ordinal); int bucket = hashCode & hashMask; while(hashedArray.getElementValue((long)bucket * (long)bitsPerElement, bitsPerElement) != 0) { bucket = (bucket + 1) & hashMask; } hashedArray.setElementValue((long)bucket * (long)bitsPerElement, bitsPerElement, ordinal + 1); } ordinal = ordinals.nextSetBit(ordinal + 1); } setHashTable(new PrimaryKeyIndexHashTable(hashedArray, hashTableSize, hashMask, bitsPerElement)); memoryRecycler.swap(); } private int findOrdinalBucket(int bitsPerElement, FixedLengthElementArray hashedArray, int hashCode, int hashMask, int prevOrdinal) { int startBucket = hashCode & hashMask; int bucket = startBucket; long value; do { value = hashedArray.getElementValue((long)bucket * (long)bitsPerElement, bitsPerElement); if (prevOrdinal + 1 == value) { return bucket; } bucket = (bucket + 1) & hashMask; } while (value != 0 && bucket != startBucket); if (value == 0) { throw new OrdinalNotFoundException(String.format("Ordinal not found (found empty entry): " + "ordinal=%d startBucket=%d", prevOrdinal, startBucket)); } else { throw new OrdinalNotFoundException(String.format("Ordinal not found (wrapped around table): " + "ordinal=%d startBucket=%d", prevOrdinal, startBucket)); } } private boolean bucketInRange(int fromBucket, int toBucket, int testBucket) { if(toBucket > fromBucket) { return testBucket > fromBucket && testBucket <= toBucket; } else { return testBucket > fromBucket || testBucket <= toBucket; } } private int recordHash(int ordinal) { int hashCode = 0; for(int i=0;i<fieldPathIndexes.length;i++) { hashCode ^= fieldHash(ordinal, i); // hashCode ^= HashCodes.hashInt(hashCode); } return hashCode; } private int fieldHash(int ordinal, int fieldIdx) { HollowObjectTypeReadState typeState = this.typeState; HollowObjectSchema schema = typeState.getSchema(); int lastFieldPath = fieldPathIndexes[fieldIdx].length - 1; for(int i=0;i<lastFieldPath;i++) { int fieldPosition = fieldPathIndexes[fieldIdx][i]; ordinal = typeState.readOrdinal(ordinal, fieldPosition); typeState = (HollowObjectTypeReadState) schema.getReferencedTypeState(fieldPosition); //This causes an incompatibility with object longevity. schema = typeState.getSchema(); } int hashCode = HollowReadFieldUtils.fieldHashCode(typeState, ordinal, fieldPathIndexes[fieldIdx][lastFieldPath]); switch(fieldTypes[fieldIdx]) { case STRING: case BYTES: return hashCode; default: return HashCodes.hashInt(hashCode); } } public Object[] getRecordKey(int ordinal) { return keyDeriver.getRecordKey(ordinal); } private boolean recordsHaveEqualKeys(int ordinal1, int ordinal2) { for(int i=0;i<fieldPathIndexes.length;i++) { if(!fieldsAreEqual(ordinal1, ordinal2, i)) return false; } return true; } private boolean fieldsAreEqual(int ordinal1, int ordinal2, int fieldIdx) { HollowObjectTypeReadState typeState = this.typeState; HollowObjectSchema schema = typeState.getSchema(); int lastFieldPath = fieldPathIndexes[fieldIdx].length - 1; for(int i=0;i<lastFieldPath;i++) { int fieldPosition = fieldPathIndexes[fieldIdx][i]; ordinal1 = typeState.readOrdinal(ordinal1, fieldPosition); ordinal2 = typeState.readOrdinal(ordinal2, fieldPosition); typeState = (HollowObjectTypeReadState) schema.getReferencedTypeState(fieldPosition); //This causes an incompatibility with object longevity. schema = typeState.getSchema(); } if(fieldTypes[fieldIdx] == FieldType.REFERENCE) return typeState.readOrdinal(ordinal1, fieldPathIndexes[fieldIdx][lastFieldPath]) == typeState.readOrdinal(ordinal2, fieldPathIndexes[fieldIdx][lastFieldPath]); return HollowReadFieldUtils.fieldsAreEqual(typeState, ordinal1, fieldPathIndexes[fieldIdx][lastFieldPath], typeState, ordinal2, fieldPathIndexes[fieldIdx][lastFieldPath]); } private boolean shouldPerformDeltaUpdate() { BitSet previousOrdinals = typeState.getListener(PopulatedOrdinalListener.class).getPreviousOrdinals(); BitSet ordinals = typeState.getListener(PopulatedOrdinalListener.class).getPopulatedOrdinals(); int prevCardinality = 0; int removedRecords = 0; int prevOrdinal = previousOrdinals.nextSetBit(0); while(prevOrdinal != ORDINAL_NONE) { prevCardinality++; if(!ordinals.get(prevOrdinal)) removedRecords++; prevOrdinal = previousOrdinals.nextSetBit(prevOrdinal + 1); } if(removedRecords > prevCardinality * 0.1d) return false; return true; } static class PrimaryKeyIndexHashTable { final FixedLengthElementArray hashTable; final int hashTableSize; final int hashMask; final int bitsPerElement; public PrimaryKeyIndexHashTable(FixedLengthElementArray hashTable, int hashTableSize, int hashMask, int bitsPerElement) { this.hashTable = hashTable; this.hashTableSize = hashTableSize; this.hashMask = hashMask; this.bitsPerElement = bitsPerElement; } } }
9,179
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/TestableUniqueKeyIndex.java
package com.netflix.hollow.core.index; import java.util.Collection; /** * This package is for internal use. Do not depend on it. * * This interface allows us to re-use tests for two very similar classes. If we * merge {@link HollowPrimaryKeyIndex} and {@link HollowUniqueKeyIndex}, then this * interface won't be necessary. */ @SuppressWarnings({"DeprecatedIsStillUsed", "override"}) @Deprecated interface TestableUniqueKeyIndex { void listenForDeltaUpdates(); int getMatchingOrdinal(Object key); int getMatchingOrdinal(Object key1, Object key2); int getMatchingOrdinal(Object key1, Object key2, Object key3); Object[] getRecordKey(int ordinal); boolean containsDuplicates(); Collection<Object[]> getDuplicateKeys(); }
9,180
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/HollowSparseIntegerSet.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index; import com.netflix.hollow.core.read.engine.HollowReadStateEngine; import com.netflix.hollow.core.read.engine.HollowTypeStateListener; import java.util.BitSet; import java.util.HashSet; import java.util.Set; import java.util.concurrent.atomic.AtomicReferenceArray; /** * Create hollow integer set for sparse non-negative {@literal &} unique integer values referenced by fieldPath in a type based on a predicate. */ public class HollowSparseIntegerSet implements HollowTypeStateListener { private final HollowReadStateEngine readStateEngine; private final String type; private final FieldPath fieldPath; private final IndexPredicate predicate; protected volatile SparseBitSet sparseBitSetVolatile; private Set<Integer> valuesToSet; private Set<Integer> valuesToClear; private int maxValueToSet; public interface IndexPredicate { boolean shouldIndex(int ordinal); } private final static IndexPredicate DEFAULT_PREDICATE = new IndexPredicate() { @Override public boolean shouldIndex(int ordinal) { return true; } }; /** * Create a index for integer values pointed by the given field path. * * @param readStateEngine the read state * @param type the type name * @param fieldPath the field path */ public HollowSparseIntegerSet(HollowReadStateEngine readStateEngine, String type, String fieldPath) { this(readStateEngine, type, fieldPath, DEFAULT_PREDICATE); } /** * Create a index for integer values based on the given IndexPredicate. * * @param readStateEngine HollowReadStateEngine to read data set. * @param type type which contains the path to integer values for indexing. * @param fieldPath path to the integer values * @param predicate implementation of IndexPredicate, indicating if the record passes the condition for indexing. */ public HollowSparseIntegerSet(HollowReadStateEngine readStateEngine, String type, String fieldPath, IndexPredicate predicate) { // check arguments if (readStateEngine == null) throw new IllegalArgumentException("Read state engine cannot be null"); if (type == null) throw new IllegalArgumentException("type cannot be null"); if (fieldPath == null || fieldPath.isEmpty()) throw new IllegalArgumentException("fieldPath cannot be null or empty"); this.readStateEngine = readStateEngine; this.type = type; this.fieldPath = new FieldPath(readStateEngine, type, fieldPath); this.predicate = predicate; this.valuesToSet = new HashSet<>(); this.valuesToClear = new HashSet<>(); build(); } protected void build() { // initialize an instance of SparseBitSet initSet(Integer.MAX_VALUE); // iterate through all populated ordinals for the type to set the values based on predicate BitSet typeBitSet = readStateEngine.getTypeState(type).getPopulatedOrdinals(); int ordinal = typeBitSet.nextSetBit(0); while (ordinal != -1) { set(ordinal); ordinal = typeBitSet.nextSetBit(ordinal + 1); } // run compaction compact(); } protected void initSet(int maxValue) { sparseBitSetVolatile = new SparseBitSet(maxValue); } protected void set(int ordinal) { if (predicate.shouldIndex(ordinal)) { Object[] values = fieldPath.findValues(ordinal); if (values != null && values.length > 0) { SparseBitSet bitSet = sparseBitSetVolatile; for (Object value : values) { bitSet.set((int) value); } } } } protected void compact() { SparseBitSet current = sparseBitSetVolatile; SparseBitSet compactedSet = SparseBitSet.compact(current); sparseBitSetVolatile = compactedSet; } /** * Check if the given value is contained in the set (or if the given value satisfies the predicate condition.) * * @param i the integer value * @return {@code true} if the value is present */ public boolean get(int i) { SparseBitSet current; boolean result; do { current = sparseBitSetVolatile; result = current.get(i); } while (current != sparseBitSetVolatile); return result; } /** * Estimate the total number of bits used to represent the integer set. * * @return Calculates the total number of bits used by longs in underlying data structure. */ public long size() { SparseBitSet current; long size; do { current = sparseBitSetVolatile; size = current.estimateBitsUsed(); } while (current != sparseBitSetVolatile); return size; } /** * @return the total number of integers added to the set. */ public int cardinality() { SparseBitSet current; int cardinality; do { current = sparseBitSetVolatile; cardinality = current.cardinality(); } while (current != sparseBitSetVolatile); return cardinality; } /** * Use this method to keep the index updated with delta changes on the read state engine. * Remember to call detachFromDeltaUpdates to stop the delta changes. * NOTE: Each delta updates creates a new prefix index and swaps the new with current. */ public void listenForDeltaUpdates() { readStateEngine.getTypeState(type).addListener(this); } /** * Stop delta updates for this index. */ public void detachFromDeltaUpdates() { readStateEngine.getTypeState(type).removeListener(this); } @Override public void beginUpdate() { valuesToSet.clear(); valuesToClear.clear(); maxValueToSet = -1; } @Override public void addedOrdinal(int ordinal) { if (predicate.shouldIndex(ordinal)) { Object[] values = fieldPath.findValues(ordinal); for (Object value : values) { valuesToSet.add((int) value); if (maxValueToSet < (int) value) maxValueToSet = (int) value; } } } @Override public void removedOrdinal(int ordinal) { Object[] values = fieldPath.findValues(ordinal); for (Object value : values) valuesToClear.add((int) value); } @Override public void endUpdate() { boolean didSomeWork = false; SparseBitSet updated = sparseBitSetVolatile; // first check if the max value among the new values to be added is more than the max value of the existing sparse bit set. if (valuesToSet.size() > 0 && maxValueToSet > updated.findMaxValue()) { updated = SparseBitSet.resize(updated, maxValueToSet); didSomeWork = true; } // when applying delta, check for duplicates, increment counts if duplicate values are found else set them for (int value : valuesToSet) { updated.set(value); } // first clear all the values that are meant to be cleared for (int value : valuesToClear) { updated.clear(value); } if (didSomeWork) { sparseBitSetVolatile = updated; } } /** * This implementation is motivated from several ideas to get a compact sparse set. * When using a a bucket of BitSet, problems * - smaller sizes of BitSet are not useful, since null references are themselves 64/32 bit references. * - larger sizes of BitSet for truly sparse integers, has overhead of too many zeroes in one BitSet. * <p> * The idea is to only store longs in bb that have non-zero values where bucket sizes are longs. Bucket size of 64 longs are convenient when using mod operations. * <p> * Each bit in long value in indices array, indicates if a long value is initialized. 64 bits would point to 64 long values ( 1 bucket ). * Each bucket could contain 1-64 longs, we only hold non-zero long values in bucket. */ static class SparseBitSet { // shift used to determine which bucket private static final int BUCKET_SHIFT = 12; // shift used to determine which Long value to use in bucket. private static final int LONG_SHIFT = 6; private final int maxValue; private final AtomicReferenceArray<Bucket> buckets; private static class Bucket { private long idx; private long[] longs; private Bucket(long idx, long[] longs) { this.idx = idx; this.longs = longs; } } SparseBitSet(int maxValue) { int totalBuckets = maxValue >>> BUCKET_SHIFT; this.maxValue = maxValue; this.buckets = new AtomicReferenceArray<>(totalBuckets + 1); } private SparseBitSet(int maxValue, AtomicReferenceArray<Bucket> buckets) { this.maxValue = maxValue; this.buckets = buckets; } private static int getIndex(int i) { // logical right shift return i >>> BUCKET_SHIFT; } /** * This method returns the number of Longs initialized from LSB to the given bitInIndex. * For example longAtIndex (64 bits) = 00...1001 and bitInIndex (3rd bit is set) 000...100 * then this method will return 1 since only one bit is set in longAtIndex to the right of bitInIndex * * @param longAtIndex * @param bitInIndex * @return */ private static int getOffset(long longAtIndex, long bitInIndex) { // set all bits to one before the bit that is set in bitInIndex // example : 000...0100 will become 000...011 long setAllOnesBeforeBitInIndex = bitInIndex - 1; long offset = (longAtIndex & setAllOnesBeforeBitInIndex); return Long.bitCount(offset); } boolean get(int i) { if (i > maxValue || i < 0) return false; int index = getIndex(i); Bucket currentBucket = buckets.get(index); if (currentBucket == null) return false; long currentLongAtIndex = currentBucket.idx; long[] longs = currentBucket.longs; // find which bit in index will point to the long in bb long whichLong = i >>> LONG_SHIFT; long bitInIndex = 1L << whichLong;// whichLong % 64 long isLongInitialized = (currentLongAtIndex & bitInIndex); if (isLongInitialized == 0) return false; int offset = getOffset(currentLongAtIndex, bitInIndex); long value = longs[offset]; long whichBitInLong = 1L << i; return (value & whichBitInLong) != 0; } // thread-safe void set(int i) { if (i > maxValue) throw new IllegalArgumentException("Max value initialized is " + maxValue + " given value is " + i); if (i < 0) throw new IllegalArgumentException("Cannot index negative numbers"); // find which bucket int index = getIndex(i); // find which bit in index will point to the long in bb long whichLong = i >>> LONG_SHIFT; long bitInIndex = 1L << whichLong;// whichLong % 64 long whichBitInLong = 1L << i;// i % 64 while (true) { long longAtIndex = 0; long[] longs = null; Bucket currentBucket = buckets.get(index); if (currentBucket != null) { longAtIndex = currentBucket.idx; longs = currentBucket.longs.clone(); } boolean isLongInitialized = (longAtIndex & bitInIndex) != 0; if (isLongInitialized) { // if a long value is set, the find the correct offset to determine which long in longs to use. int offset = getOffset(longAtIndex, bitInIndex); longs[offset] |= whichBitInLong;// or preserves previous set operations in this long. } else if (longAtIndex == 0) { // first set that bit in idx for that bucket, and assign a new long[] longAtIndex = bitInIndex; longs = new long[]{whichBitInLong}; } else { // update long value at index longAtIndex |= bitInIndex; // find offset int offset = getOffset(longAtIndex, bitInIndex); int oldLongsLen = longs.length; long[] newLongs = new long[oldLongsLen + 1]; // if offset is 2 means 3 longs are needed starting from 0 // if current longs length is 2 (0,1) then append third long at end // if current longs length is greater than offset, then insert long 0 -> (offset - 1), new long, offset to (length -1) if (offset >= oldLongsLen) { // append new long at end int it; for (it = 0; it < oldLongsLen; it++) newLongs[it] = longs[it]; newLongs[it] = whichBitInLong; } else { // insert new long in between int it; for (it = 0; it < offset; it++) newLongs[it] = longs[it]; newLongs[offset] = whichBitInLong; for (it = offset; it < oldLongsLen; it++) newLongs[it + 1] = longs[it]; } longs = newLongs; } Bucket newBucket = new Bucket(longAtIndex, longs); if (buckets.compareAndSet(index, currentBucket, newBucket)) break; } } // thread-safe void clear(int i) { if (i > maxValue || i < 0) return; int index = getIndex(i); while (true) { Bucket currentBucket = buckets.get(index); if (currentBucket == null) return; long longAtIndex = currentBucket.idx; long[] longs = currentBucket.longs.clone(); // find which bit in index will point to the long in bb long whichLong = i >>> LONG_SHIFT; long bitInIndex = 1L << whichLong;// whichLong % 64 long whichBitInLong = 1L << i;// i % 64 long isLongInitialized = (longAtIndex & bitInIndex); if (isLongInitialized == 0) return; int offset = getOffset(longAtIndex, bitInIndex); long value = longs[offset]; // unset whichBitInIndex in value // to clear 3rd bit (00100 whichBitInLong) in 00101(value), & with 11011 to get 00001 long updatedValue = value & ~whichBitInLong; boolean isBucketEmpty = false; if (updatedValue != 0) { longs[offset] = updatedValue; } else { // if updatedValue is 0, then update the bucket removing that long int oldLongsLen = longs.length; // if only one long was initialized in the bucket, then make the reference null, indexAtLong 0 if (oldLongsLen == 1) { longs = null; longAtIndex = 0; isBucketEmpty = true; } else { // copy everything over, except the long at the given offset, long[] newLongs = new long[oldLongsLen - 1]; int it; for (it = 0; it < offset; it++) newLongs[it] = longs[it]; it++; while (it < oldLongsLen) { newLongs[it - 1] = longs[it]; it++; } longs = newLongs; longAtIndex &= ~bitInIndex; } } Bucket updatedBucket = null; if (!isBucketEmpty) updatedBucket = new Bucket(longAtIndex, longs); if (buckets.compareAndSet(index, currentBucket, updatedBucket)) break; } } int findMaxValue() { // find the last index that is initialized int index = buckets.length() - 1; while (index >= 0) { if (buckets.get(index) != null) break; index--; } // if no buckets are initialized, then return -1 ( meaning set is empty) if (index < 0) return -1; // find the highest bit in indexAtLong to see which is last long init in bucket int highestBitSetInIndexAtLong = 63 - Long.numberOfLeadingZeros(Long.highestOneBit(buckets.get(index).idx)); long[] longs = buckets.get(index).longs; long value = longs[longs.length - 1]; long highestBitSetInLong = 63 - Long.numberOfLeadingZeros(Long.highestOneBit(value)); return (int) ((index << BUCKET_SHIFT) + (highestBitSetInIndexAtLong << 6) + highestBitSetInLong); } int cardinality() { int cardinality = 0; int index = 0; while (index < buckets.length()) { if (buckets.get(index) != null) { long[] longs = buckets.get(index).longs; for (long value : longs) cardinality += Long.bitCount(value); } index++; } return cardinality; } long estimateBitsUsed() { long longsUsed = 0; long idxCounts = 0; int index = 0; while (index < buckets.length()) { if (buckets.get(index) != null) { idxCounts++; longsUsed += buckets.get(index).longs.length; } index++; } // total bits used long bitsUsedByArrayPointers = buckets.length() * 64; long bitsUsedByIdx = idxCounts * 64; long bitsUsedByLongs = longsUsed * 64; return bitsUsedByArrayPointers + bitsUsedByIdx + bitsUsedByLongs; } /** * * Use this method to compact an existing SparseBitSet. Note any attempts to add a new value greater than the max value will result in exception. * * @param sparseBitSet * @return new SparseBitSet that is compact, does not hold null references beyond the max int value added in the given input. */ static SparseBitSet compact(SparseBitSet sparseBitSet) { int maxValueAdded = sparseBitSet.findMaxValue(); // if the given set is empty then compact the sparseBitSet to have only 1 bucket i.e. 64 longs if (maxValueAdded < 0) { maxValueAdded = (1 << BUCKET_SHIFT) - 1; } int indexForMaxValueAdded = getIndex(maxValueAdded); int newLength = indexForMaxValueAdded + 1; return copyWithNewLength(sparseBitSet, newLength, newLength, maxValueAdded); } static SparseBitSet resize(SparseBitSet sparseBitSet, int newMaxValue) { if (sparseBitSet.findMaxValue() < newMaxValue) { int indexForNewMaxValue = getIndex(newMaxValue); int newLength = indexForNewMaxValue + 1; return copyWithNewLength(sparseBitSet, newLength, sparseBitSet.buckets.length(), newMaxValue); } return sparseBitSet; } private static SparseBitSet copyWithNewLength(SparseBitSet sparseBitSet, int newLength, int lengthToClone, int newMaxValue) { AtomicReferenceArray<Bucket> compactBuckets = new AtomicReferenceArray<Bucket>(newLength); for (int i = 0; i < lengthToClone; i++) { if (sparseBitSet.buckets.get(i) != null) compactBuckets.set(i, sparseBitSet.buckets.get(i)); } return new SparseBitSet(newMaxValue, compactBuckets); } } }
9,181
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/HollowHashIndexBuilder.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index; import static com.netflix.hollow.core.memory.FixedLengthData.bitsRequiredToRepresentValue; import com.netflix.hollow.core.HollowConstants; import com.netflix.hollow.core.index.HollowHashIndexField.FieldPathSegment; import com.netflix.hollow.core.index.traversal.HollowIndexerValueTraverser; import com.netflix.hollow.core.memory.encoding.FixedLengthElementArray; import com.netflix.hollow.core.memory.encoding.HashCodes; import com.netflix.hollow.core.memory.pool.ArraySegmentRecycler; import com.netflix.hollow.core.memory.pool.WastefulRecycler; import com.netflix.hollow.core.read.HollowReadFieldUtils; import com.netflix.hollow.core.read.dataaccess.HollowDataAccess; import com.netflix.hollow.core.read.engine.HollowReadStateEngine; import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator; import java.util.BitSet; public class HollowHashIndexBuilder { private final HollowPreindexer preindexer; private final int[] bitsPerTraverserField; private final int[] offsetPerTraverserField; private final int bitsPerMatchHashKey; final int bitsPerSelectHashEntry; private final ArraySegmentRecycler memoryRecycler; private FixedLengthElementArray finalMatchHashTable; FixedLengthElementArray finalSelectHashArray; private long finalMatchHashMask; private int finalBitsPerMatchHashEntry; private int finalBitsPerSelectTableSize; private int finalBitsPerSelectTablePointer; private GrowingSegmentedLongArray matchIndexHashAndSizeArray; private FixedLengthElementArray intermediateMatchHashTable; private int intermediateMatchHashTableSize; private int bitsPerIntermediateListIdentifier; private int bitsPerIntermediateMatchHashEntry; private int intermediateMatchHashMask; private int intermediateMatchHashTableSizeBeforeGrow; private int matchCount; /** * This constructor is for binary-compatibility for code compiled against * older builds. * * @param stateEngine state engine * @param type The query starts with the specified type * @param selectField The query will select records at this field (specify "" to select the specified type). * The selectField may span collection elements and/or map keys or values, which can result in multiple matches per record of the specified start type. * @param matchFields The query will match on the specified match fields. The match fields may span collection elements and/or map keys or values. * @deprecated Use {@link #HollowHashIndexBuilder(HollowDataAccess, String, String, String...)} */ @Deprecated public HollowHashIndexBuilder(HollowReadStateEngine stateEngine, String type, String selectField, String... matchFields) { this((HollowDataAccess) stateEngine, type, selectField, matchFields); } ///TODO: Optimization, make the matchFields[].schemaFieldPositionPath as short as possible, to reduce iteration /// this means merging the common roots of path from the same base field, and pushing all unique base fields down /// to the leaves. public HollowHashIndexBuilder(HollowDataAccess stateEngine, String type, String selectField, String... matchFields) { this.preindexer = new HollowPreindexer(stateEngine, type, selectField, matchFields); preindexer.buildFieldSpecifications(); this.memoryRecycler = WastefulRecycler.DEFAULT_INSTANCE; HollowIndexerValueTraverser traverser = preindexer.getTraverser(); this.bitsPerTraverserField = new int[traverser.getNumFieldPaths()]; this.offsetPerTraverserField = new int[traverser.getNumFieldPaths()]; int bitsPerMatchHashKey = 0; for(int i=0;i<traverser.getNumFieldPaths();i++) { int maxOrdinalForTypeState = traverser.getFieldTypeDataAccess(i).getTypeState().maxOrdinal(); bitsPerTraverserField[i] = bitsRequiredToRepresentValue(maxOrdinalForTypeState + 1); offsetPerTraverserField[i] = bitsPerMatchHashKey; if(i < preindexer.getNumMatchTraverserFields()) bitsPerMatchHashKey += bitsPerTraverserField[i]; } this.bitsPerMatchHashKey = bitsPerMatchHashKey; this.bitsPerSelectHashEntry = bitsPerTraverserField[preindexer.getSelectFieldSpec().getBaseIteratorFieldIdx()]; } public void buildIndex() { matchIndexHashAndSizeArray = new GrowingSegmentedLongArray(memoryRecycler); BitSet populatedOrdinals = preindexer.getHollowTypeDataAccess().getTypeState().getPopulatedOrdinals(); /// an initial guess at how big this table might be -- one match per top-level element. int guessNumberOfMatches = populatedOrdinals.cardinality(); intermediateMatchHashTableSize = HashCodes.hashTableSize(guessNumberOfMatches); bitsPerIntermediateListIdentifier = bitsRequiredToRepresentValue(intermediateMatchHashTableSize - 1); bitsPerIntermediateMatchHashEntry = bitsPerMatchHashKey + bitsPerIntermediateListIdentifier; intermediateMatchHashMask = intermediateMatchHashTableSize - 1; intermediateMatchHashTableSizeBeforeGrow = intermediateMatchHashTableSize * 7 / 10; matchCount = 0; /// a data structure which keeps canonical matches for comparison (the matchHashTable) intermediateMatchHashTable = new FixedLengthElementArray(memoryRecycler, (long)intermediateMatchHashTableSize * bitsPerIntermediateMatchHashEntry); /// a data structure which tracks lists of matches under canonical matches. MultiLinkedElementArray intermediateSelectLists = new MultiLinkedElementArray(memoryRecycler); HollowIndexerValueTraverser traverser = preindexer.getTraverser(); int ordinal = populatedOrdinals.nextSetBit(0); while(ordinal != HollowConstants.ORDINAL_NONE) { traverser.traverse(ordinal); for(int i=0;i<traverser.getNumMatches();i++) { int matchHash = getMatchHash(i); long bucket = matchHash & intermediateMatchHashMask; long hashBucketBit = bucket * bitsPerIntermediateMatchHashEntry; boolean bucketIsEmpty = intermediateMatchHashTable.getElementValue(hashBucketBit, bitsPerTraverserField[0]) == 0; long bucketMatchListIdx = intermediateMatchHashTable.getElementValue(hashBucketBit + bitsPerMatchHashKey, bitsPerIntermediateListIdentifier); int bucketMatchHashCode = (int)matchIndexHashAndSizeArray.get(bucketMatchListIdx); while(!bucketIsEmpty && (bucketMatchHashCode != (matchHash & Integer.MAX_VALUE) || !intermediateMatchIsEqual(i, hashBucketBit))) { bucket = (bucket + 1) & intermediateMatchHashMask; hashBucketBit = bucket * bitsPerIntermediateMatchHashEntry; bucketIsEmpty = intermediateMatchHashTable.getElementValue(hashBucketBit, bitsPerTraverserField[0]) == 0; bucketMatchListIdx = intermediateMatchHashTable.getElementValue(hashBucketBit + bitsPerMatchHashKey, bitsPerIntermediateListIdentifier); bucketMatchHashCode = (int)matchIndexHashAndSizeArray.get(bucketMatchListIdx); } int matchListIdx; if(bucketIsEmpty) { matchListIdx = intermediateSelectLists.newList(); for(int j=0;j<preindexer.getNumMatchTraverserFields();j++) intermediateMatchHashTable.setElementValue(hashBucketBit + offsetPerTraverserField[j], bitsPerTraverserField[j], traverser.getMatchOrdinal(i, j) + 1); intermediateMatchHashTable.setElementValue(hashBucketBit + bitsPerMatchHashKey, bitsPerIntermediateListIdentifier, matchListIdx); matchIndexHashAndSizeArray.set(matchListIdx, matchHash & Integer.MAX_VALUE); matchCount++; /// GROW IF NECESSARY! if(matchCount > intermediateMatchHashTableSizeBeforeGrow) { growIntermediateHashTable(); } } else { matchListIdx = (int)intermediateMatchHashTable.getElementValue(hashBucketBit + bitsPerMatchHashKey, bitsPerIntermediateListIdentifier); } intermediateSelectLists.add(matchListIdx, traverser.getMatchOrdinal(i, preindexer.getSelectFieldSpec().getBaseIteratorFieldIdx())); } ordinal = populatedOrdinals.nextSetBit(ordinal + 1); } /// turn those data structures into a compact one optimized for hash lookup long totalNumberOfSelectBucketsAndBitsRequiredForSelectTableSize = calculateDedupedSizesAndTotalNumberOfSelectBuckets(intermediateSelectLists, matchIndexHashAndSizeArray); long totalNumberOfSelectBuckets = totalNumberOfSelectBucketsAndBitsRequiredForSelectTableSize & 0xFFFFFFFFFFFFFFL; long totalNumberOfMatchBuckets = HashCodes.hashTableSize(matchCount); int bitsPerFinalSelectBucketPointer = bitsRequiredToRepresentValue(totalNumberOfSelectBuckets); int bitsPerSelectTableSize = (int)(totalNumberOfSelectBucketsAndBitsRequiredForSelectTableSize >>> 56); int finalBitsPerMatchHashEntry = bitsPerMatchHashKey + bitsPerSelectTableSize + bitsPerFinalSelectBucketPointer; FixedLengthElementArray finalMatchArray = new FixedLengthElementArray(memoryRecycler, totalNumberOfMatchBuckets * finalBitsPerMatchHashEntry); FixedLengthElementArray finalSelectArray = new FixedLengthElementArray(memoryRecycler, totalNumberOfSelectBuckets * bitsPerSelectHashEntry); long finalMatchHashMask = totalNumberOfMatchBuckets - 1; long currentSelectArrayBucket = 0; for(int i=0;i<matchCount;i++) { long matchIndexHashAndSize = matchIndexHashAndSizeArray.get(i); int matchIndexSize = (int)(matchIndexHashAndSize >> 32); int matchIndexTableSize = HashCodes.hashTableSize(matchIndexSize); int matchIndexBucketMask = matchIndexTableSize - 1; HollowOrdinalIterator selectOrdinalIter = intermediateSelectLists.iterator(i); int selectOrdinal = selectOrdinalIter.next(); while(selectOrdinal != HollowOrdinalIterator.NO_MORE_ORDINALS) { int selectBucket = HashCodes.hashInt(selectOrdinal) & matchIndexBucketMask; int bucketOrdinal = (int)finalSelectArray.getElementValue((currentSelectArrayBucket + selectBucket) * bitsPerSelectHashEntry, bitsPerSelectHashEntry) - 1; while(bucketOrdinal != HollowConstants.ORDINAL_NONE && bucketOrdinal != selectOrdinal) { ///TODO: If select field type is not REFERENCE, then we should dedup -- unless we are reference counting for delta application ///ordinals here with the same value for the specified field. selectBucket = (selectBucket + 1) & matchIndexBucketMask; bucketOrdinal = (int)finalSelectArray.getElementValue((currentSelectArrayBucket + selectBucket) * bitsPerSelectHashEntry, bitsPerSelectHashEntry) - 1; } if(bucketOrdinal == HollowConstants.ORDINAL_NONE) finalSelectArray.setElementValue((currentSelectArrayBucket + selectBucket) * bitsPerSelectHashEntry, bitsPerSelectHashEntry, selectOrdinal + 1); selectOrdinal = selectOrdinalIter.next(); } long finalMatchIndexBucket = matchIndexHashAndSize & finalMatchHashMask; long finalMatchIndexBucketBit = finalMatchIndexBucket * finalBitsPerMatchHashEntry; while(finalMatchArray.getElementValue(finalMatchIndexBucketBit, bitsPerTraverserField[0]) != 0) { finalMatchIndexBucket = (finalMatchIndexBucket + 1) & finalMatchHashMask; finalMatchIndexBucketBit = finalMatchIndexBucket * finalBitsPerMatchHashEntry; } long intermediateMatchHashBucket = matchIndexHashAndSize & intermediateMatchHashMask; long intermediateMatchIndexBucketBit = intermediateMatchHashBucket * bitsPerIntermediateMatchHashEntry; while(intermediateMatchHashTable.getElementValue(intermediateMatchIndexBucketBit + bitsPerMatchHashKey, bitsPerIntermediateListIdentifier) != i) { intermediateMatchHashBucket = (intermediateMatchHashBucket + 1) & intermediateMatchHashMask; intermediateMatchIndexBucketBit = intermediateMatchHashBucket * bitsPerIntermediateMatchHashEntry; } if(bitsPerMatchHashKey < 56) { long matchHashKey = intermediateMatchHashTable.getElementValue(intermediateMatchIndexBucketBit, bitsPerMatchHashKey); finalMatchArray.setElementValue(finalMatchIndexBucketBit, bitsPerMatchHashKey, matchHashKey); } else { finalMatchArray.copyBits(intermediateMatchHashTable, intermediateMatchIndexBucketBit, finalMatchIndexBucketBit, bitsPerMatchHashKey); } finalMatchArray.setElementValue(finalMatchIndexBucketBit + bitsPerMatchHashKey, bitsPerSelectTableSize, matchIndexSize); finalMatchArray.setElementValue(finalMatchIndexBucketBit + bitsPerMatchHashKey + bitsPerSelectTableSize, bitsPerFinalSelectBucketPointer, currentSelectArrayBucket); currentSelectArrayBucket += matchIndexTableSize; } this.finalMatchHashTable = finalMatchArray; this.finalSelectHashArray = finalSelectArray; this.finalBitsPerMatchHashEntry = finalBitsPerMatchHashEntry; this.finalBitsPerSelectTablePointer = bitsPerFinalSelectBucketPointer; this.finalBitsPerSelectTableSize = bitsPerSelectTableSize; this.finalMatchHashMask = finalMatchHashMask; } private void growIntermediateHashTable() { int newMatchHashTableSize = intermediateMatchHashTableSize * 2; int newMatchHashMask = newMatchHashTableSize - 1; int newBitsForListIdentifier = bitsRequiredToRepresentValue(newMatchHashTableSize - 1); int newBitsPerMatchHashEntry = bitsPerMatchHashKey + newBitsForListIdentifier; FixedLengthElementArray newMatchHashTable = new FixedLengthElementArray(memoryRecycler, (long)newMatchHashTableSize * newBitsPerMatchHashEntry); for(int j=0;j<matchCount;j++) { int rehashCode = (int)matchIndexHashAndSizeArray.get(j); long oldHashBucket = rehashCode & intermediateMatchHashMask; long oldHashBucketBit = oldHashBucket * bitsPerIntermediateMatchHashEntry; while(intermediateMatchHashTable.getElementValue(oldHashBucketBit + bitsPerMatchHashKey, bitsPerIntermediateListIdentifier) != j) { oldHashBucket = (oldHashBucket+1) & intermediateMatchHashMask; oldHashBucketBit = oldHashBucket * bitsPerIntermediateMatchHashEntry; } long rehashBucket = rehashCode & newMatchHashMask; long rehashBucketBit = rehashBucket * newBitsPerMatchHashEntry; boolean rehashBucketIsEmpty = newMatchHashTable.getElementValue(rehashBucketBit, bitsPerTraverserField[0]) == 0; while(!rehashBucketIsEmpty) { rehashBucket = (rehashBucket + 1) & newMatchHashMask; rehashBucketBit = rehashBucket * newBitsPerMatchHashEntry; rehashBucketIsEmpty = newMatchHashTable.getElementValue(rehashBucketBit, bitsPerTraverserField[0]) == 0; } if(bitsPerMatchHashKey < 56) { newMatchHashTable.setElementValue(rehashBucketBit, bitsPerMatchHashKey, intermediateMatchHashTable.getElementValue(oldHashBucketBit, bitsPerMatchHashKey)); } else { newMatchHashTable.copyBits(intermediateMatchHashTable, oldHashBucketBit, rehashBucketBit, bitsPerMatchHashKey); } int listIdx = (int)intermediateMatchHashTable.getElementValue(oldHashBucketBit + bitsPerMatchHashKey, bitsPerIntermediateListIdentifier); newMatchHashTable.setElementValue(rehashBucketBit + bitsPerMatchHashKey, bitsPerIntermediateListIdentifier, listIdx); } intermediateMatchHashTable.destroy(memoryRecycler); memoryRecycler.swap(); intermediateMatchHashTable = newMatchHashTable; intermediateMatchHashTableSize = newMatchHashTableSize; intermediateMatchHashTableSizeBeforeGrow = intermediateMatchHashTableSize * 7 / 10; bitsPerIntermediateListIdentifier = newBitsForListIdentifier; bitsPerIntermediateMatchHashEntry = newBitsPerMatchHashEntry; intermediateMatchHashMask = newMatchHashMask; } /** * Called after initial pass. * Returns the sum total number of select buckets in the low 7 bytes, and the bits required for the max set size in the high 1 byte. */ private long calculateDedupedSizesAndTotalNumberOfSelectBuckets(MultiLinkedElementArray elementArray, GrowingSegmentedLongArray matchIndexHashAndSizeArray) { long totalBuckets = 0; long maxSize = 0; int[] selectArray = new int[8]; for(int i=0;i<elementArray.numLists();i++) { int listSize = elementArray.listSize(i); int setSize = 0; int predictedBuckets = HashCodes.hashTableSize(listSize); int hashMask = predictedBuckets - 1; if(predictedBuckets > selectArray.length) selectArray = new int[predictedBuckets]; for(int j=0;j<predictedBuckets;j++) selectArray[j] = -1; HollowOrdinalIterator iter = elementArray.iterator(i); int selectOrdinal = iter.next(); while(selectOrdinal != HollowOrdinalIterator.NO_MORE_ORDINALS) { int hash = HashCodes.hashInt(selectOrdinal); int bucket = hash & hashMask; while(true) { if(selectArray[bucket] == selectOrdinal) break; if(selectArray[bucket] == -1) { selectArray[bucket] = selectOrdinal; setSize++; break; } bucket = (bucket+1) & hashMask; } selectOrdinal = iter.next(); } long matchIndexHashAndSize = matchIndexHashAndSizeArray.get(i); matchIndexHashAndSize |= (long)setSize << 32; matchIndexHashAndSizeArray.set(i, matchIndexHashAndSize); totalBuckets += HashCodes.hashTableSize(setSize); if(setSize > maxSize) maxSize = setSize; } return totalBuckets | (long)bitsRequiredToRepresentValue(maxSize) << 56; } private boolean intermediateMatchIsEqual(int matchIdx, long hashBucketBit) { for(int i=0;i<preindexer.getMatchFieldSpecs().length;i++) { HollowHashIndexField field = preindexer.getMatchFieldSpecs()[i]; int matchOrdinal = preindexer.getTraverser().getMatchOrdinal(matchIdx, field.getBaseIteratorFieldIdx()); int hashOrdinal = (int)intermediateMatchHashTable.getElementValue(hashBucketBit + offsetPerTraverserField[field.getBaseIteratorFieldIdx()], bitsPerTraverserField[field.getBaseIteratorFieldIdx()]) - 1; FieldPathSegment[] fieldPath = field.getSchemaFieldPositionPath(); if(fieldPath.length == 0) { if(matchOrdinal != hashOrdinal) return false; } else { for(int j=0;j<fieldPath.length - 1;j++) { FieldPathSegment fieldPathSegment = fieldPath[j]; if(matchOrdinal != HollowConstants.ORDINAL_NONE) { matchOrdinal = fieldPathSegment.getOrdinalForField(matchOrdinal); } if(hashOrdinal != HollowConstants.ORDINAL_NONE) { hashOrdinal = fieldPathSegment.getOrdinalForField(hashOrdinal); } } if(matchOrdinal != hashOrdinal) { FieldPathSegment lastPathElement = fieldPath[fieldPath.length - 1]; if(isAnyFieldNull(matchOrdinal, hashOrdinal) || !HollowReadFieldUtils.fieldsAreEqual( lastPathElement.getObjectTypeDataAccess(), matchOrdinal, lastPathElement.getSegmentFieldPosition(), lastPathElement.getObjectTypeDataAccess(), hashOrdinal, lastPathElement.getSegmentFieldPosition())) return false; } } } return true; } private boolean isAnyFieldNull(int matchOrdinal, int hashOrdinal) { return matchOrdinal == HollowConstants.ORDINAL_NONE || hashOrdinal == HollowConstants.ORDINAL_NONE; } private int getMatchHash(int matchIdx) { int matchHash = 0; for(int i=0;i<preindexer.getMatchFieldSpecs().length;i++) { HollowHashIndexField field = preindexer.getMatchFieldSpecs()[i]; int ordinal = preindexer.getTraverser().getMatchOrdinal(matchIdx, field.getBaseIteratorFieldIdx()); FieldPathSegment[] fieldPath = field.getSchemaFieldPositionPath(); if(fieldPath.length == 0) { matchHash ^= HashCodes.hashInt(ordinal); } else { for(int j=0;j<fieldPath.length-1;j++) { ordinal = fieldPath[j].getOrdinalForField(ordinal); // Cannot find nested ordinal for null parent if(ordinal == HollowConstants.ORDINAL_NONE) { break; } } FieldPathSegment lastPathElement = field.getLastFieldPositionPathElement(); int fieldHashCode = ordinal == HollowConstants.ORDINAL_NONE ? HollowConstants.ORDINAL_NONE : HollowReadFieldUtils.fieldHashCode(lastPathElement.getObjectTypeDataAccess(), ordinal, lastPathElement.getSegmentFieldPosition()); matchHash ^= HashCodes.hashInt(fieldHashCode); } } return matchHash; } public int getBitsPerMatchHashKey() { return bitsPerMatchHashKey; } public FixedLengthElementArray getFinalMatchHashTable() { return finalMatchHashTable; } public long getFinalMatchHashMask() { return finalMatchHashMask; } public int getFinalBitsPerMatchHashEntry() { return finalBitsPerMatchHashEntry; } public int getFinalBitsPerSelectTableSize() { return finalBitsPerSelectTableSize; } public int getFinalBitsPerSelectTablePointer() { return finalBitsPerSelectTablePointer; } public FixedLengthElementArray getFinalSelectHashArray() { return finalSelectHashArray; } public HollowHashIndexField getSelectField() { return preindexer.getSelectFieldSpec(); } public HollowHashIndexField[] getMatchFields() { return preindexer.getMatchFieldSpecs(); } public int[] getBitsPerTraverserField() { return bitsPerTraverserField; } public int[] getOffsetPerTraverserField() { return offsetPerTraverserField; } public int getBitsPerSelectHashEntry() { return bitsPerSelectHashEntry; } }
9,182
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/FieldPaths.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index; import static java.util.stream.Collectors.joining; import com.netflix.hollow.core.HollowDataset; import com.netflix.hollow.core.index.key.PrimaryKey; import com.netflix.hollow.core.schema.HollowCollectionSchema; import com.netflix.hollow.core.schema.HollowMapSchema; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.schema.HollowSchema; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Objects; /** * Functionality for processing field paths. */ public final class FieldPaths { /** * Creates an object-based field path given a data set and the field path in symbolic form conforming to paths * associated with a primary key. * * @param dataset the data set * @param type the type name from which to bind the field path * @param path the symbolic field path * @return the field path * @throws IllegalArgumentException if the symbolic field path is ill-formed and cannot be bound */ public static FieldPath<ObjectFieldSegment> createFieldPathForPrimaryKey( HollowDataset dataset, String type, String path) { boolean autoExpand = !path.endsWith("!"); path = autoExpand ? path : path.substring(0, path.length() - 1); FieldPath<FieldSegment> fp = createFieldPath(dataset, type, path, autoExpand, false, false); // Erasure trick to avoid copying when it is known the list only contains // instances of ObjectFieldSegment assert fp.segments.stream().allMatch(o -> o instanceof ObjectFieldSegment); @SuppressWarnings( {"unchecked", "raw"}) FieldPath<ObjectFieldSegment> result = (FieldPath<ObjectFieldSegment>) (FieldPath) fp; return result; } /** * Creates a field path given a data set and the field path in symbolic form conforming to paths * associated with a hash index. * * @param dataset the data set * @param type the type name from which to bind the field path * @param path the symbolic field path * @return the field path * @throws IllegalArgumentException if the symbolic field path is ill-formed and cannot be bound */ public static FieldPath<FieldSegment> createFieldPathForHashIndex(HollowDataset dataset, String type, String path) { return createFieldPath(dataset, type, path, false, false, true); } /** * Creates a field path given a data set and the field path in symbolic form conforming to paths * associated with a prefix index. * * @param dataset the data set * @param type the type name from which to bind the field path * @param path the symbolic field path * @param autoExpand {@code true} if the field path should be expanded (if needed) to a full path referencing * an object field whose type is a non-reference type, otherwise the field path is not expanded and must be * a full path * @return the field path * @throws IllegalArgumentException if the symbolic field path is ill-formed and cannot be bound */ public static FieldPath<FieldSegment> createFieldPathForPrefixIndex( HollowDataset dataset, String type, String path, boolean autoExpand) { // If autoExpand is false then requireFullPath must be true boolean requireFullPath = !autoExpand; return createFieldPath(dataset, type, path, autoExpand, requireFullPath, true); } /** * Creates a field path given a data set and the field path in symbolic form. * * @param dataset the data set * @param type the type name from which to bind the field path * @param path the symbolic field path * @param autoExpand {@code true} if the field path should be expanded (if needed) to a full path referencing * an object field whose type is a non-reference type, otherwise the field path is not expanded * @param requireFullPath {@code true} if a full path is required when {@code autoExpand} is {@code false}. * Ignored if {@code autoExpand} is {@code true}. * @param traverseSequences {@code true} if lists, sets and maps are traversed, otherwise an * {@code IllegalArgumentException} will be thrown * @return the field path * @throws IllegalArgumentException if the symbolic field path is ill-formed and cannot be bound */ static FieldPath<FieldSegment> createFieldPath( HollowDataset dataset, String type, String path, boolean autoExpand, boolean requireFullPath, boolean traverseSequences) { Objects.requireNonNull(dataset); Objects.requireNonNull(type); Objects.requireNonNull(path); String[] segments = path.isEmpty() ? new String[0] : path.split("\\."); List<FieldSegment> fieldSegments = new ArrayList<>(); String segmentType = type; for (int i = 0; i < segments.length; i++) { HollowSchema schema = dataset.getSchema(segmentType); // @@@ Can this only occur for anything other than the root `type`? if (schema == null) { throw new FieldPathException(FieldPathException.ErrorKind.NOT_BINDABLE, dataset, type, segments, fieldSegments, null, i); } String segment = segments[i]; HollowSchema.SchemaType schemaType = schema.getSchemaType(); if (schemaType == HollowSchema.SchemaType.OBJECT) { HollowObjectSchema objectSchema = (HollowObjectSchema) schema; int index = objectSchema.getPosition(segment); if (index == -1) { throw new FieldPathException(FieldPathException.ErrorKind.NOT_FOUND, dataset, type, segments, fieldSegments, schema, i); } segmentType = objectSchema.getReferencedType(index); fieldSegments.add(new ObjectFieldSegment(objectSchema, segment, segmentType, index)); } else if (traverseSequences && (schemaType == HollowSchema.SchemaType.SET || schemaType == HollowSchema.SchemaType.LIST)) { HollowCollectionSchema collectionSchema = (HollowCollectionSchema) schema; if (!segment.equals("element")) { throw new FieldPathException(FieldPathException.ErrorKind.NOT_FOUND, dataset, type, segments, fieldSegments, schema, i); } segmentType = collectionSchema.getElementType(); fieldSegments.add(new FieldSegment(collectionSchema, segment, segmentType)); } else if (traverseSequences && schemaType == HollowSchema.SchemaType.MAP) { HollowMapSchema mapSchema = (HollowMapSchema) schema; if (segment.equals("key")) { segmentType = mapSchema.getKeyType(); } else if (segment.equals("value")) { segmentType = mapSchema.getValueType(); } else { throw new FieldPathException(FieldPathException.ErrorKind.NOT_FOUND, dataset, type, segments, fieldSegments, schema, i); } fieldSegments.add(new FieldSegment(mapSchema, segment, segmentType)); } else if (!traverseSequences) { throw new FieldPathException(FieldPathException.ErrorKind.NOT_TRAVERSABLE, dataset, type, segments, fieldSegments, schema, i); } if (i < segments.length - 1 && segmentType == null) { throw new FieldPathException(FieldPathException.ErrorKind.NOT_TRAVERSABLE, dataset, type, segments, fieldSegments, schema, i); } } if (autoExpand) { while (segmentType != null) { HollowSchema schema = dataset.getSchema(segmentType); if (schema.getSchemaType() == HollowSchema.SchemaType.OBJECT) { HollowObjectSchema objectSchema = (HollowObjectSchema) schema; if (objectSchema.numFields() == 1) { segmentType = objectSchema.getReferencedType(0); fieldSegments.add( new ObjectFieldSegment(objectSchema, objectSchema.getFieldName(0), segmentType, 0)); } else if (objectSchema.getPrimaryKey() != null && objectSchema.getPrimaryKey().numFields() == 1) { PrimaryKey key = objectSchema.getPrimaryKey(); FieldPath<ObjectFieldSegment> expandedFieldSegments; try { expandedFieldSegments = createFieldPathForPrimaryKey(dataset, key.getType(), key.getFieldPaths()[0]); } catch (FieldPathException cause) { FieldPathException e = new FieldPathException(FieldPathException.ErrorKind.NOT_EXPANDABLE, dataset, type, segments, fieldSegments, objectSchema); e.initCause(cause); throw e; } fieldSegments.addAll(expandedFieldSegments.segments); break; } else { throw new FieldPathException(FieldPathException.ErrorKind.NOT_EXPANDABLE, dataset, type, segments, fieldSegments, objectSchema); } } else { throw new FieldPathException(FieldPathException.ErrorKind.NOT_EXPANDABLE, dataset, type, segments, fieldSegments, schema); } } } else if (requireFullPath && segmentType != null) { throw new FieldPathException(FieldPathException.ErrorKind.NOT_FULL, dataset, type, segments, fieldSegments); } return new FieldPath<>(type, fieldSegments, !autoExpand); } /** * An exception contain structured information when a field path cannot be bound. */ static final class FieldPathException extends IllegalArgumentException { enum ErrorKind { NOT_BINDABLE, NOT_FOUND, NOT_FULL, NOT_TRAVERSABLE, NOT_EXPANDABLE, ; } final ErrorKind error; final String rootType; final String[] segments; // Prior paths final List<FieldSegment> fieldSegments; // Partial state final HollowSchema enclosingSchema; final int segmentIndex; FieldPathException( ErrorKind error, HollowDataset dataset, String rootType, String[] segments, List<FieldSegment> fieldSegments) { this(error, dataset, rootType, segments, fieldSegments, null, segments.length); } FieldPathException( ErrorKind error, HollowDataset dataset, String rootType, String[] segments, List<FieldSegment> fieldSegments, HollowSchema enclosingSchema) { this(error, dataset, rootType, segments, fieldSegments, enclosingSchema, segments.length); } FieldPathException( ErrorKind error, HollowDataset dataset, String rootType, String[] segments, List<FieldSegment> fieldSegments, HollowSchema enclosingSchema, int segmentIndex) { super(message(error, dataset, rootType, segments, fieldSegments, enclosingSchema, segmentIndex)); this.error = error; this.rootType = rootType; this.segments = segments; this.fieldSegments = Collections.unmodifiableList(fieldSegments); this.enclosingSchema = enclosingSchema; this.segmentIndex = segmentIndex; } static String message( ErrorKind error, HollowDataset dataset, String rootType, String[] segments, List<FieldSegment> fieldSegments, HollowSchema enclosingSchema, int segmentIndex) { switch (error) { case NOT_BINDABLE: return String.format("Field path \"%s\" cannot be bound to data set %s. " + "A schema of type named \"%s\" cannot be found for the last segment of the path prefix \"%s\".", toPathString(segments), dataset, getLastTypeName(rootType, fieldSegments), toPathString(segments, segmentIndex + 1)); case NOT_FOUND: return String.format("Field path \"%s\" not found in data set %s. " + "A schema of type named \"%s\" does not contain a field for the last segment of the path prefix \"%s\".", toPathString(segments), dataset, enclosingSchema.getName(), toPathString(segments, segmentIndex + 1)); case NOT_TRAVERSABLE: { if (enclosingSchema.getSchemaType() != HollowSchema.SchemaType.OBJECT) { return String.format("Field path \"%s\" is not traversable in data set %s. " + "A non-object schema of type named \"%s\" and of schema type %s cannot be traversed for the last segment of the path prefix \"%s\".", toPathString(segments), dataset, enclosingSchema.getName(), enclosingSchema.getSchemaType(), toPathString(segments, segmentIndex + 1)); } else { return String.format("Field path \"%s\" is not traversable in data set %s. " + "An object schema of type named \"%s\" cannot be traversed for the last segment of the path prefix \"%s\". " + "The last segment of the path prefix refers to a value (non-reference) field.", toPathString(segments), dataset, enclosingSchema.getName(), toPathString(segments, segmentIndex + 1)); } } case NOT_FULL: return String.format("Field path \"%s\" is not a full path in data set %s. " + "The last segment of the path is not a value (non-reference) field and refers to a reference field whose schema is of type named \"%s\"", toPathString(segments), dataset, fieldSegments.get(fieldSegments.size() - 1).getTypeName()); case NOT_EXPANDABLE: { if (enclosingSchema.getSchemaType() == HollowSchema.SchemaType.OBJECT) { HollowObjectSchema objectSchema = (HollowObjectSchema) enclosingSchema; if (objectSchema.numFields() != 1 || objectSchema.getPrimaryKey() == null || objectSchema.getPrimaryKey().numFields() != 1) { return String.format("Field path \"%s\" is not expandable in data set %s. " + "An object schema of type named \"%s\" cannot be traversed for the last segment of the partially expanded path \"%s\". " + "The schema contains more than one field, or has no primary key, or has a primary key with more than one field path.", toPathString(segments), dataset, enclosingSchema.getName(), toPathString(fieldSegments)); } } return String.format("Field path \"%s\" is not expandable in data set %s. " + "A non-object schema of type named \"%s\" and of schema type %s cannot be traversed for the last segment of the partially expanded path \"%s\".", toPathString(segments), dataset, enclosingSchema.getName(), enclosingSchema.getSchemaType(), toPathString(fieldSegments)); } default: throw new InternalError("Cannot reach here"); } } static String getLastTypeName(String rootType, List<FieldSegment> fieldSegments) { return fieldSegments.isEmpty() ? rootType : fieldSegments.get(fieldSegments.size() - 1).typeName; } static String toPathString(List<FieldSegment> segments) { return segments.stream().map(FieldSegment::getName).collect(joining(".")); } static String toPathString(String[] segments) { return toPathString(segments, segments.length); } static String toPathString(String[] segments, int l) { return Arrays.stream(segments).limit(l).collect(joining(".")); } } /** * A structured representation field path containing field segments. * * @param <T> the field segment type */ public final static class FieldPath<T extends FieldSegment> { final String rootType; final List<T> segments; final boolean noAutoExpand; FieldPath(String rootType, List<T> segments, boolean noAutoExpand) { this.rootType = rootType; this.segments = Collections.unmodifiableList(segments); this.noAutoExpand = noAutoExpand; } /** * Returns the root type from which this field path is bound. * * @return the root type */ public String getRootType() { return rootType; } /** * Returns the field segments. * * @return the field segments. The returned list is unmodifiable. */ public List<T> getSegments() { return segments; } /** * Returns the field path in nominal form. * * @return the field path in nominal form */ public String toString() { String path = segments.stream().map(FieldPaths.FieldSegment::getName) .collect(joining(".")); return noAutoExpand ? path + "!" : path; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } FieldPath<?> fieldPath = (FieldPath<?>) o; return noAutoExpand == fieldPath.noAutoExpand && rootType.equals(fieldPath.rootType) && segments.equals(fieldPath.segments); } @Override public int hashCode() { return Objects.hash(rootType, segments, noAutoExpand); } } /** * A structured representation of field segment. */ public static class FieldSegment { final HollowSchema enclosingSchema; final String name; final String typeName; FieldSegment(HollowSchema enclosingSchema, String name, String typeName) { this.name = name; this.typeName = typeName; this.enclosingSchema = enclosingSchema; } /** * Returns the enclosing schema that declares a field corresponding to this segment. * * @return the enclosing schema */ public HollowSchema getEnclosingSchema() { return enclosingSchema; } /** * Returns the segment name. * * @return the segment name */ public String getName() { return name; } /** * Returns the schema type name associated with this segment. * * @return the schema type name. */ public String getTypeName() { return typeName; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } FieldSegment that = (FieldSegment) o; return Objects.equals(enclosingSchema, that.enclosingSchema) && Objects.equals(name, that.name) && Objects.equals(typeName, that.typeName); } @Override public int hashCode() { return Objects.hash(enclosingSchema, name, typeName); } } /** * A structured representation of field segment corresponding to a field in an object schema. */ public final static class ObjectFieldSegment extends FieldSegment { final int index; final HollowObjectSchema.FieldType type; ObjectFieldSegment( HollowObjectSchema enclosingSchema, String name, String typeName, int index) { super(enclosingSchema, name, typeName); this.index = index; this.type = enclosingSchema.getFieldType(index); } /** * {@inheritDoc} */ public HollowObjectSchema getEnclosingSchema() { return (HollowObjectSchema) super.getEnclosingSchema(); } /** * The field index in the enclosing object schema. * * @return the field index */ public int getIndex() { return index; } /** * The field type of the field in the enclosing object schema. * * @return the field type */ public HollowObjectSchema.FieldType getType() { return type; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } if (!super.equals(o)) { return false; } ObjectFieldSegment that = (ObjectFieldSegment) o; return index == that.index && type == that.type; } @Override public int hashCode() { return Objects.hash(super.hashCode(), index, type); } } }
9,183
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/HollowHashIndex.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index; import static java.util.Objects.requireNonNull; import com.netflix.hollow.core.HollowConstants; import com.netflix.hollow.core.index.HollowHashIndexField.FieldPathSegment; import com.netflix.hollow.core.memory.encoding.FixedLengthElementArray; import com.netflix.hollow.core.memory.encoding.HashCodes; import com.netflix.hollow.core.read.HollowReadFieldUtils; import com.netflix.hollow.core.read.dataaccess.HollowDataAccess; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.read.engine.HollowReadStateEngine; import com.netflix.hollow.core.read.engine.HollowTypeStateListener; import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState; /** * A HollowHashIndex is used for indexing non-primary-key data. This type of index can map multiple keys to a single matching record, and/or * multiple records to a single key. * <p> * The field definitions in a hash key may be hierarchical (traverse multiple record types) via dot-notation. For example, * the field definition <i>actors.element.actorId</i> may be used to traverse a child <b>LIST</b> or <b>SET</b> type record referenced by the field * <i>actors</i>, each elements contained therein, and finally each actors <i>actorId</i> field. */ public class HollowHashIndex implements HollowTypeStateListener { private volatile HollowHashIndexState hashStateVolatile; private final HollowDataAccess hollowDataAccess; private final HollowObjectTypeDataAccess typeState; private final String type; private final String selectField; private final String[] matchFields; /** * This constructor is for binary-compatibility for code compiled against * older builds. * * @param stateEngine The state engine to index * @param type The query starts with the specified type * @param selectField The query will select records at this field (specify "" to select the specified type). * The selectField may span collection elements and/or map keys or values, which can result in multiple matches per record of the specified start type. * @param matchFields The query will match on the specified match fields. The match fields may span collection elements and/or map keys or values. */ public HollowHashIndex(HollowReadStateEngine stateEngine, String type, String selectField, String... matchFields) { this((HollowDataAccess) stateEngine, type, selectField, matchFields); } /** * Define a {@link HollowHashIndex}. * * @param hollowDataAccess The state engine to index * @param type The query starts with the specified type * @param selectField The query will select records at this field (specify "" to select the specified type). * The selectField may span collection elements and/or map keys or values, which can result in multiple matches per record of the specified start type. * @param matchFields The query will match on the specified match fields. The match fields may span collection elements and/or map keys or values. */ public HollowHashIndex(HollowDataAccess hollowDataAccess, String type, String selectField, String... matchFields) { requireNonNull(type, "Hollow Hash Index creation failed because type was null"); requireNonNull(hollowDataAccess, "Hollow Hash Index creation on type [" + type + "] failed because read state wasn't initialized"); this.hollowDataAccess = hollowDataAccess; this.type = type; this.typeState = (HollowObjectTypeDataAccess) hollowDataAccess.getTypeDataAccess(type); this.selectField = selectField; this.matchFields = matchFields; reindexHashIndex(); } /** * Recreate the hash index entirely */ private void reindexHashIndex() { HollowHashIndexBuilder builder = new HollowHashIndexBuilder(hollowDataAccess, type, selectField, matchFields); builder.buildIndex(); this.hashStateVolatile = new HollowHashIndexState(builder); } /** * Query the index. * * @param query the query * @return the hash index result to gather the matched ordinals. A {@code null} value indicated no matches were * found. */ public HollowHashIndexResult findMatches(Object... query) { int hashCode = 0; for(int i=0;i<query.length;i++) { if(query[i] == null) throw new IllegalArgumentException("querying by null unsupported; i=" + i); hashCode ^= HashCodes.hashInt(keyHashCode(query[i], i)); } HollowHashIndexResult result; HollowHashIndexState hashState; do { result = null; hashState = hashStateVolatile; long bucket = hashCode & hashState.getMatchHashMask(); long hashBucketBit = bucket * hashState.getBitsPerMatchHashEntry(); boolean bucketIsEmpty = hashState.getMatchHashTable().getElementValue(hashBucketBit, hashState.getBitsPerTraverserField()[0]) == 0; while (!bucketIsEmpty) { if (matchIsEqual(hashState.getMatchHashTable(), hashBucketBit, query)) { int selectSize = (int) hashState.getMatchHashTable().getElementValue(hashBucketBit + hashState.getBitsPerMatchHashKey(), hashState.getBitsPerSelectTableSize()); long selectBucketPointer = hashState.getMatchHashTable().getElementValue(hashBucketBit + hashState.getBitsPerMatchHashKey() + hashState.getBitsPerSelectTableSize(), hashState.getBitsPerSelectTablePointer()); result = new HollowHashIndexResult(hashState, selectBucketPointer, selectSize); break; } bucket = (bucket + 1) & hashState.getMatchHashMask(); hashBucketBit = bucket * hashState.getBitsPerMatchHashEntry(); bucketIsEmpty = hashState.getMatchHashTable().getElementValue(hashBucketBit, hashState.getBitsPerTraverserField()[0]) == 0; } } while (hashState != hashStateVolatile); return result; } private int keyHashCode(Object key, int fieldIdx) { HollowHashIndexState hashState = hashStateVolatile; switch(hashState.getMatchFields()[fieldIdx].getFieldType()) { case BOOLEAN: return HollowReadFieldUtils.booleanHashCode((Boolean)key); case DOUBLE: return HollowReadFieldUtils.doubleHashCode((Double) key); case FLOAT: return HollowReadFieldUtils.floatHashCode((Float) key); case INT: return HollowReadFieldUtils.intHashCode((Integer) key); case LONG: return HollowReadFieldUtils.longHashCode((Long) key); case REFERENCE: return (Integer) key; case BYTES: return HashCodes.hashCode((byte[])key); case STRING: return HashCodes.hashCode((String)key); } throw new IllegalArgumentException("I don't know how to hash a " + hashState.getMatchFields()[fieldIdx].getFieldType()); } private boolean matchIsEqual(FixedLengthElementArray matchHashTable, long hashBucketBit, Object[] query) { HollowHashIndexState hashState = hashStateVolatile; for(int i = 0; i< hashState.getMatchFields().length; i++) { HollowHashIndexField field = hashState.getMatchFields()[i]; int hashOrdinal = (int)matchHashTable.getElementValue(hashBucketBit + hashState.getOffsetPerTraverserField()[field.getBaseIteratorFieldIdx()], hashState.getBitsPerTraverserField()[field.getBaseIteratorFieldIdx()]) - 1; FieldPathSegment[] fieldPath = field.getSchemaFieldPositionPath(); if(fieldPath.length == 0) { if (!query[i].equals(hashOrdinal)) return false; } else { for(int j=0;j<fieldPath.length - 1;j++) { hashOrdinal = fieldPath[j].getOrdinalForField(hashOrdinal); // Cannot find nested ordinal for null parent if(hashOrdinal == HollowConstants.ORDINAL_NONE) { break; } } FieldPathSegment lastPathElement = fieldPath[fieldPath.length - 1]; if(hashOrdinal == HollowConstants.ORDINAL_NONE || !HollowReadFieldUtils.fieldValueEquals(lastPathElement.getObjectTypeDataAccess(), hashOrdinal, lastPathElement.getSegmentFieldPosition(), query[i])) { return false; } } } return true; } /** * Once called, this HollowHashIndex will be kept up-to-date when deltas are applied to the indexed state engine. * <p> * This method should be called <b>before</b> any subsequent deltas occur after the index is created. * <p> * In order to prevent memory leaks, if this method is called and the index is no longer needed, call detachFromDeltaUpdates() before * discarding the index. */ public void listenForDeltaUpdates() { if (!(typeState instanceof HollowObjectTypeReadState)) throw new IllegalStateException("Cannot listen for delta updates when objectTypeDataAccess is a " + typeState.getClass().getSimpleName() + ". Is this index participating in object longevity?"); ((HollowObjectTypeReadState) typeState).addListener(this); } /** * Once called, this HollowHashIndex will no longer be kept up-to-date when deltas are applied to the indexed state engine. * <p> * Call this method before discarding indexes which are currently listening for delta updates. */ public void detachFromDeltaUpdates() { if ((typeState instanceof HollowObjectTypeReadState)) ((HollowObjectTypeReadState) typeState).removeListener(this); } @Override public void beginUpdate() { } @Override public void addedOrdinal(int ordinal) { } @Override public void removedOrdinal(int ordinal) { } @Override public void endUpdate() { reindexHashIndex(); } /** * @return state engine. * @throws ClassCastException thrown if the underlying hollowDataAccess is not a state engine. This occurs if the * index was created from a consumer with hollow object longevity enabled. */ public HollowReadStateEngine getStateEngine() { return (HollowReadStateEngine) hollowDataAccess; } public HollowDataAccess getHollowDataAccess() { return hollowDataAccess; } public String getType() { return type; } public String getSelectField() { return selectField; } public String[] getMatchFields() { return matchFields; } protected static class HollowHashIndexState { final FixedLengthElementArray selectHashArray; final int bitsPerSelectHashEntry; private final FixedLengthElementArray matchHashTable; private final HollowHashIndexField[] matchFields; private final int matchHashMask; private final int bitsPerMatchHashKey; private final int bitsPerMatchHashEntry; private final int[] bitsPerTraverserField; private final int[] offsetPerTraverserField; private final int bitsPerSelectTableSize; private final int bitsPerSelectTablePointer; public HollowHashIndexState(HollowHashIndexBuilder builder) { matchHashTable = builder.getFinalMatchHashTable(); selectHashArray = builder.getFinalSelectHashArray(); matchFields = builder.getMatchFields(); matchHashMask = (int) builder.getFinalMatchHashMask(); bitsPerMatchHashKey = builder.getBitsPerMatchHashKey(); bitsPerMatchHashEntry = builder.getFinalBitsPerMatchHashEntry(); bitsPerTraverserField = builder.getBitsPerTraverserField(); offsetPerTraverserField = builder.getOffsetPerTraverserField(); bitsPerSelectTableSize = builder.getFinalBitsPerSelectTableSize(); bitsPerSelectTablePointer = builder.getFinalBitsPerSelectTablePointer(); bitsPerSelectHashEntry = builder.getBitsPerSelectHashEntry(); } public FixedLengthElementArray getSelectHashArray() { return selectHashArray; } public int getBitsPerSelectHashEntry() { return bitsPerSelectHashEntry; } public FixedLengthElementArray getMatchHashTable() { return matchHashTable; } public HollowHashIndexField[] getMatchFields() { return matchFields; } public int getMatchHashMask() { return matchHashMask; } public int getBitsPerMatchHashKey() { return bitsPerMatchHashKey; } public int getBitsPerMatchHashEntry() { return bitsPerMatchHashEntry; } public int[] getBitsPerTraverserField() { return bitsPerTraverserField; } public int[] getOffsetPerTraverserField() { return offsetPerTraverserField; } public int getBitsPerSelectTableSize() { return bitsPerSelectTableSize; } public int getBitsPerSelectTablePointer() { return bitsPerSelectTablePointer; } } }
9,184
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/key/PrimaryKey.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index.key; import com.netflix.hollow.core.HollowDataset; import com.netflix.hollow.core.index.FieldPaths; import com.netflix.hollow.core.read.dataaccess.HollowDataAccess; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType; import com.netflix.hollow.core.schema.HollowSchema; import java.util.Arrays; /** * A PrimaryKey defines a set of one or more field(s) which should be unique for each record of a specific type. * <p> * The field definitions in a primary key may be hierarchical (traverse multiple record types) via dot-notation. For example, * the field definition <i>movie.country.id</i> may be used to traverse a child record referenced by the field <i>movie</i>, its child * record referenced by the field <i>country</i>, and finally the country's field <i>id</i>. */ public class PrimaryKey { /** * Creates a primary key instance from the type and field paths. If no fields are specified, then this uses the * the schema attached to the HollowDataAccess to generate a primary key instance based on the * {@link com.netflix.hollow.core.write.objectmapper.HollowPrimaryKey} defined for the type. * * This method is typically used for building indexes. * * @param hollowDataAccess hollow data access or state engine (can be null if fieldPaths are provided) * @param type hollow type * @param fieldPaths field paths for fields that make up the primary key. If no fields are passed in, then create using schema definition of primary key * @return populated primary key */ public static PrimaryKey create(HollowDataAccess hollowDataAccess, String type, String... fieldPaths) { if (fieldPaths != null && fieldPaths.length != 0) { return new PrimaryKey(type, fieldPaths); } HollowSchema schema = hollowDataAccess.getSchema(type); if (schema instanceof HollowObjectSchema) { return ((HollowObjectSchema) schema).getPrimaryKey(); } return null; } private final String type; private final String[] fieldPaths; /** * Define a PrimaryKey, which specifies a set of one or more field(s) which should be unique for each record of the given type. * * @param type the type name * @param fieldPaths The field definitions in a primary key may be hierarchical (traverse multiple record types) via dot-notation. * For example, the field definition <i>movie.country.id</i> may be used to traverse a child record referenced by the field <i>movie</i>, * its child record referenced by the field <i>country</i>, and finally the country's field <i>id</i>. */ public PrimaryKey(String type, String... fieldPaths) { if (fieldPaths == null || fieldPaths.length == 0) { throw new IllegalArgumentException("fieldPaths can't not be null or empty"); } this.type = type; this.fieldPaths = fieldPaths.clone(); } public String getType() { return type; } public int numFields() { return fieldPaths.length; } public String getFieldPath(int idx) { return fieldPaths[idx]; } public String[] getFieldPaths() { return fieldPaths; } public FieldType getFieldType(HollowDataset dataset, int fieldPathIdx) { return getFieldType(dataset, type, fieldPaths[fieldPathIdx]); } public HollowObjectSchema getFieldSchema(HollowDataset dataset, int fieldPathIdx) { return getFieldSchema(dataset, type, fieldPaths[fieldPathIdx]); } /** * The field path index is the object schemas' field positions for a particular field path. * * @param dataset the data set * @param fieldPathIdx the index to a field path string * @return the field path index */ public int[] getFieldPathIndex(HollowDataset dataset, int fieldPathIdx) { return getFieldPathIndex(dataset, type, fieldPaths[fieldPathIdx]); } /** * Returns the ultimate field type of the specified type/field path in the provided dataset. * * @param dataAccess the data set * @param type the type name * @param fieldPath the field path * @return the field type */ public static FieldType getFieldType(HollowDataset dataAccess, String type, String fieldPath) { HollowObjectSchema schema = (HollowObjectSchema)dataAccess.getSchema(type); int pathIndexes[] = getFieldPathIndex(dataAccess, type, fieldPath); for(int i=0;i<pathIndexes.length - 1;i++) schema = (HollowObjectSchema)dataAccess.getSchema(schema.getReferencedType(pathIndexes[i])); return schema.getFieldType(pathIndexes[pathIndexes.length - 1]); } /** * Returns the ultimate field Schema of the specified type/field path in the provided dataset. * * @param dataAccess the data set * @param type the type name * @param fieldPath the field path * @return the field schema */ public static HollowObjectSchema getFieldSchema(HollowDataset dataAccess, String type, String fieldPath) { HollowObjectSchema schema = (HollowObjectSchema)dataAccess.getSchema(type); int pathIndexes[] = getFieldPathIndex(dataAccess, type, fieldPath); for (int i = 0; i < pathIndexes.length; i++) schema = (HollowObjectSchema)dataAccess.getSchema(schema.getReferencedType(pathIndexes[i])); return schema; } /** * Returns a separated field path, which has been auto-expanded if necessary based on the provided primary key field * path. * * @param dataset the data access * @param type the type name * @param fieldPath the field path * @return the separated field path */ public static String[] getCompleteFieldPathParts(HollowDataset dataset, String type, String fieldPath) { int fieldPathIdx[] = getFieldPathIndex(dataset, type, fieldPath); String fieldPathParts[] = new String[fieldPathIdx.length]; HollowObjectSchema schema = (HollowObjectSchema) dataset.getSchema(type); for(int i=0;i<fieldPathParts.length;i++) { fieldPathParts[i] = schema.getFieldName(fieldPathIdx[i]); schema = (HollowObjectSchema) dataset.getSchema(schema.getReferencedType(fieldPathIdx[i])); } return fieldPathParts; } /** * The field path index is the object schemas' field positions for a particular field path. * * @param dataset the data set * @param type the type name * @param fieldPath the field path string * @return the field path index */ public static int[] getFieldPathIndex(HollowDataset dataset, String type, String fieldPath) { return FieldPaths.createFieldPathForPrimaryKey(dataset, type, fieldPath).getSegments().stream() .mapToInt(FieldPaths.ObjectFieldSegment::getIndex) .toArray(); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + Arrays.hashCode(fieldPaths); result = prime * result + ((type == null) ? 0 : type.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; PrimaryKey other = (PrimaryKey) obj; if (!Arrays.equals(fieldPaths, other.fieldPaths)) return false; if (type == null) { if (other.type != null) return false; } else if (!type.equals(other.type)) return false; return true; } @Override public String toString() { return "PrimaryKey [type=" + type + ", fieldPaths=" + Arrays.toString(fieldPaths) + "]"; } }
9,185
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/key/HollowPrimaryKeyValueDeriver.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index.key; import com.netflix.hollow.core.read.HollowReadFieldUtils; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.read.engine.HollowReadStateEngine; import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType; import java.util.Arrays; /** * Used to retrieve and test equality of PrimaryKey values for records. */ public class HollowPrimaryKeyValueDeriver { private final HollowObjectTypeReadState typeState; private final int[][] fieldPathIndexes; private final FieldType[] fieldTypes; /** * Create a new deriver. * * @param primaryKey The primary key spec * @param stateEngine The state engine to retrieve data from */ public HollowPrimaryKeyValueDeriver(PrimaryKey primaryKey, HollowReadStateEngine stateEngine) { this.fieldPathIndexes = new int[primaryKey.numFields()][]; this.fieldTypes = new FieldType[primaryKey.numFields()]; for(int i=0;i<primaryKey.numFields();i++) { fieldPathIndexes[i] = primaryKey.getFieldPathIndex(stateEngine, i); fieldTypes[i] = primaryKey.getFieldType(stateEngine, i); } this.typeState = (HollowObjectTypeReadState) stateEngine.getTypeState(primaryKey.getType()); } public HollowPrimaryKeyValueDeriver(HollowObjectTypeReadState typeState, int[][] fieldPathIndexes, FieldType[] fieldTypes) { this.typeState = typeState; this.fieldPathIndexes = fieldPathIndexes; this.fieldTypes = fieldTypes; } /** * Determine whether or not the specified ordinal contains the provided primary key value. * * @param ordinal the oridinal * @param keys the primary keys * @return true if the ordinal contains the primary keys */ public boolean keyMatches(int ordinal, Object... keys) { if(keys.length != fieldPathIndexes.length) return false; for(int i=0;i<keys.length;i++) { if(!keyMatches(keys[i], ordinal, i)) return false; } return true; } public boolean keyMatches(Object key, int ordinal, int fieldIdx) { HollowObjectTypeReadState typeState = this.typeState; HollowObjectSchema schema = typeState.getSchema(); int lastFieldPath = fieldPathIndexes[fieldIdx].length - 1; for(int i=0;i<lastFieldPath;i++) { int fieldPosition = fieldPathIndexes[fieldIdx][i]; ordinal = typeState.readOrdinal(ordinal, fieldPosition); typeState = (HollowObjectTypeReadState) schema.getReferencedTypeState(fieldPosition); schema = typeState.getSchema(); } int lastFieldIdx = fieldPathIndexes[fieldIdx][lastFieldPath]; return keyMatches(key, fieldTypes[fieldIdx], lastFieldIdx, ordinal, typeState); } @SuppressWarnings("UnnecessaryUnboxing") public static boolean keyMatches(Object key, FieldType fieldType, int lastFieldIdx, int ordinal, HollowObjectTypeDataAccess dataAccess) { switch(fieldType) { case BOOLEAN: Boolean b = dataAccess.readBoolean(ordinal, lastFieldIdx); if(b == key) return true; if(b == null || key == null) return false; return b.booleanValue() == ((Boolean)key).booleanValue(); case BYTES: return Arrays.equals(dataAccess.readBytes(ordinal, lastFieldIdx), (byte[])key); case DOUBLE: return dataAccess.readDouble(ordinal, lastFieldIdx) == ((Double)key).doubleValue(); case FLOAT: return dataAccess.readFloat(ordinal, lastFieldIdx) == ((Float)key).floatValue(); case INT: return dataAccess.readInt(ordinal, lastFieldIdx) == ((Integer)key).intValue(); case LONG: return dataAccess.readLong(ordinal, lastFieldIdx) == ((Long)key).longValue(); case REFERENCE: return dataAccess.readOrdinal(ordinal, lastFieldIdx) == ((Integer)key).intValue(); case STRING: return dataAccess.isStringFieldEqual(ordinal, lastFieldIdx, (String)key); } throw new IllegalArgumentException("I don't know how to compare a " + fieldType); } /** * Retrieve the primary key value for the specified ordinal. * * @param ordinal the oridinal * @return the primary keys */ public Object[] getRecordKey(int ordinal) { Object[] results = new Object[fieldPathIndexes.length]; for (int i = 0; i < fieldPathIndexes.length; i++) { results[i] = readValue(ordinal, i); } return results; } private Object readValue(int ordinal, int fieldIdx) { HollowObjectTypeReadState typeState = this.typeState; HollowObjectSchema schema = typeState.getSchema(); int lastFieldPath = fieldPathIndexes[fieldIdx].length - 1; for (int i = 0; i < lastFieldPath; i++) { int fieldPosition = fieldPathIndexes[fieldIdx][i]; ordinal = typeState.readOrdinal(ordinal, fieldPosition); typeState = (HollowObjectTypeReadState) schema.getReferencedTypeState(fieldPosition); schema = typeState.getSchema(); } return HollowReadFieldUtils.fieldValueObject(typeState, ordinal, fieldPathIndexes[fieldIdx][lastFieldPath]); } public int[][] getFieldPathIndexes() { return fieldPathIndexes; } public FieldType[] getFieldTypes() { return fieldTypes; } }
9,186
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/key/PrimaryKeyValueHasher.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index.key; import com.netflix.hollow.core.HollowDataset; import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType; import java.util.Arrays; /** * Used to hash records by their PrimaryKey. */ public class PrimaryKeyValueHasher { private final FieldType[] fieldTypes; /** * Create a new hasher. * * @param key The primary key spec * @param dataset The dataset to retrieve data from */ public PrimaryKeyValueHasher(PrimaryKey key, HollowDataset dataset) { this.fieldTypes = new FieldType[key.numFields()]; for(int i=0;i<fieldTypes.length;i++) { fieldTypes[i] = key.getFieldType(dataset, i); } } public int hash(Object key) { return hashElement(key, 0); } public int hash(Object... keys) { int hash = 0; for(int i=0;i<keys.length;i++) { hash = hash * 31; hash ^= hashElement(keys[i], i); } return hash; } public int hashElement(Object key, int fieldTypeIdx) { switch(fieldTypes[fieldTypeIdx]) { case BOOLEAN: return key == null ? 0 : ((Boolean)key).booleanValue() ? 1231 : 1237; case BYTES: return key == null ? 0 : Arrays.hashCode((byte[])key); case DOUBLE: long dVal = Double.doubleToRawLongBits(((Double)key).doubleValue()); return (int)(dVal ^ (dVal >>> 32)); case FLOAT: return Float.floatToRawIntBits(((Float)key).floatValue()); case LONG: long lVal = ((Long)key).longValue(); return (int)(lVal ^ (lVal >>> 32)); case INT: return ((Integer)key).intValue(); case REFERENCE: return ((Integer)key).intValue(); case STRING: return key.hashCode(); default: throw new IllegalArgumentException("Unknown field type: " + fieldTypes[fieldTypeIdx]); } } }
9,187
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/traversal/HollowIndexerTraversalNode.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index.traversal; import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess; import com.netflix.hollow.core.util.IntList; import java.util.Arrays; import java.util.HashMap; import java.util.Map; /** * Not intended for external consumption. */ abstract class HollowIndexerTraversalNode { protected final HollowTypeDataAccess dataAccess; protected final IntList[] fieldMatches; protected int indexedFieldPosition = -1; protected final Map<String, HollowIndexerTraversalNode> children; private boolean shouldMultiplyBranchResults; private int childrenRepeatCounts[]; private int childrenMatchCounts[]; private int fieldChildMap[]; private int childFirstFieldMap[]; private int currentMultiplyFieldMatchListPosition; public HollowIndexerTraversalNode(HollowTypeDataAccess dataAccess, IntList[] fieldMatches) { this.dataAccess = dataAccess; this.fieldMatches = fieldMatches; this.children = new HashMap<String, HollowIndexerTraversalNode>(); } public void setIndexedFieldPosition(int indexedFieldPosition) { this.indexedFieldPosition = indexedFieldPosition; } public int getIndexedFieldPosition() { return indexedFieldPosition; } /** * @return the transitive child branch field positions */ public IntList setUpMultiplication() { this.shouldMultiplyBranchResults = shouldMultiplyBranchResults(); this.childrenRepeatCounts = new int[children.size()]; this.childrenMatchCounts = new int[children.size()]; this.fieldChildMap = new int[fieldMatches.length]; this.childFirstFieldMap = new int[children.size()]; Arrays.fill(fieldChildMap, -1); IntList branchFieldPositions = new IntList(); if(indexedFieldPosition != -1) branchFieldPositions.add(indexedFieldPosition); int childCounter = 0; for(Map.Entry<String, HollowIndexerTraversalNode> entry : children.entrySet()) { IntList childBranchFieldPositions = entry.getValue().setUpMultiplication(); this.childFirstFieldMap[childCounter] = childBranchFieldPositions.get(0); for(int i=0;i<childBranchFieldPositions.size();i++) { this.fieldChildMap[childBranchFieldPositions.get(i)] = childCounter; branchFieldPositions.add(childBranchFieldPositions.get(i)); } childCounter++; } return branchFieldPositions; } public void traverse(int ordinal) { if(childFirstFieldMap.length == 0) { doTraversal(ordinal); if(indexedFieldPosition != -1) fieldMatches[indexedFieldPosition].add(ordinal); } else { int childMatchSize = doTraversal(ordinal); if(indexedFieldPosition != -1) { for(int i=0;i<childMatchSize;i++) fieldMatches[indexedFieldPosition].add(ordinal); } } } public void prepareMultiply() { if(childFirstFieldMap.length > 0) this.currentMultiplyFieldMatchListPosition = fieldMatches[childFirstFieldMap[0]].size(); } public int doMultiply() { if(shouldMultiplyBranchResults) { int nextRepeatCount = 1; for(int i=0;i<childrenMatchCounts.length;i++) { childrenMatchCounts[i] = fieldMatches[childFirstFieldMap[i]].size() - currentMultiplyFieldMatchListPosition; childrenRepeatCounts[i] = nextRepeatCount; nextRepeatCount *= childrenMatchCounts[i]; } if(nextRepeatCount == 0) { for(int i=0;i<childrenMatchCounts.length;i++) { fieldMatches[childFirstFieldMap[i]].expandTo(currentMultiplyFieldMatchListPosition); } return 0; } int newFieldMatchListPosition = currentMultiplyFieldMatchListPosition + nextRepeatCount; for(int i=0;i<fieldMatches.length;i++) { if(fieldChildMap[i] != -1) { fieldMatches[i].expandTo(newFieldMatchListPosition); int currentCopyToIdx = newFieldMatchListPosition - 1; int startCopyFromIdx = currentMultiplyFieldMatchListPosition + childrenMatchCounts[fieldChildMap[i]] - 1; int currentCopyFromIdx = startCopyFromIdx; while(currentCopyToIdx > currentMultiplyFieldMatchListPosition) { for(int j=0;j<childrenRepeatCounts[fieldChildMap[i]];j++) { fieldMatches[i].set(currentCopyToIdx, fieldMatches[i].get(currentCopyFromIdx)); currentCopyToIdx--; } currentCopyFromIdx--; if(currentCopyFromIdx < currentMultiplyFieldMatchListPosition) currentCopyFromIdx = startCopyFromIdx; } } } return nextRepeatCount; } if(childFirstFieldMap.length != 0) return fieldMatches[childFirstFieldMap[0]].size() - currentMultiplyFieldMatchListPosition; return 1; } public abstract int doTraversal(int ordinal); protected abstract HollowTypeDataAccess dataAccess(); /** * Called at the end of creation of the indexer tree. * * Implementations of this method should set up data structures necessary for fast traversal of children. */ protected abstract void setUpChildren(); protected abstract boolean followingChildrenMultipliesTraversal(); HollowIndexerTraversalNode getChild(String name) { return children.get(name); } void addChild(String name, HollowIndexerTraversalNode child) { children.put(name, child); } private boolean shouldMultiplyBranchResults() { if(children.size() > 1) { for(Map.Entry<String, HollowIndexerTraversalNode> entry : children.entrySet()) { if(entry.getValue().branchMayProduceMoreThanOneMatch()) return true; } } return false; } private boolean branchMayProduceMoreThanOneMatch() { if(!children.isEmpty() && followingChildrenMultipliesTraversal()) return true; for(Map.Entry<String, HollowIndexerTraversalNode> entry : children.entrySet()) { if(entry.getValue().branchMayProduceMoreThanOneMatch()) return true; } return false; } }
9,188
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/traversal/HollowIndexerListTraversalNode.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index.traversal; import com.netflix.hollow.core.read.dataaccess.HollowListTypeDataAccess; import com.netflix.hollow.core.util.IntList; /** * Not intended for external consumption. */ class HollowIndexerListTraversalNode extends HollowIndexerCollectionTraversalNode { public HollowIndexerListTraversalNode(HollowListTypeDataAccess dataAccess, IntList[] fieldMatches) { super(dataAccess, fieldMatches); } @Override public int doTraversal(int ordinal) { if(child == null) return 1; int size = dataAccess().size(ordinal); int numMatches = 0; for(int i=0;i<size;i++) { prepareMultiply(); child.traverse(dataAccess().getElementOrdinal(ordinal, i)); numMatches += doMultiply(); } return numMatches; } @Override protected HollowListTypeDataAccess dataAccess() { return (HollowListTypeDataAccess) dataAccess; } }
9,189
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/traversal/HollowIndexerValueTraverser.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index.traversal; import com.netflix.hollow.core.memory.encoding.HashCodes; import com.netflix.hollow.core.read.HollowReadFieldUtils; import com.netflix.hollow.core.read.dataaccess.HollowDataAccess; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess; import com.netflix.hollow.core.util.IntList; import java.util.BitSet; /** * Used by the HollowHashIndex to traverse the possible value combinations of individual records given * a set of field paths. */ public class HollowIndexerValueTraverser { private final String fieldPaths[]; private final HollowIndexerTraversalNode rootNode; private final IntList fieldMatchLists[]; private final HollowTypeDataAccess fieldTypeDataAccess[]; private final int fieldSchemaPosition[]; public HollowIndexerValueTraverser(HollowDataAccess dataAccess, String type, String... fieldPaths) { this.fieldPaths = fieldPaths; TraversalTreeBuilder builder = new TraversalTreeBuilder(dataAccess, type, fieldPaths); this.rootNode = builder.buildTree(); this.fieldMatchLists = builder.getFieldMatchLists(); this.fieldTypeDataAccess = builder.getFieldTypeDataAccesses(); this.fieldSchemaPosition = builder.getFieldSchemaPositions(); } public void traverse(int ordinal) { for(int i=0;i<fieldMatchLists.length;i++) fieldMatchLists[i].clear(); rootNode.traverse(ordinal); } public int getNumFieldPaths() { return fieldPaths.length; } public String getFieldPath(int idx) { return fieldPaths[idx]; } public int getNumMatches() { return fieldMatchLists[0].size(); } public Object getMatchedValue(int matchIdx, int fieldIdx) { int matchedOrdinal = fieldMatchLists[fieldIdx].get(matchIdx); return HollowReadFieldUtils.fieldValueObject((HollowObjectTypeDataAccess)fieldTypeDataAccess[fieldIdx], matchedOrdinal, fieldSchemaPosition[fieldIdx]); } public boolean isMatchedValueEqual(int matchIdx, int fieldIdx, Object value) { int matchedOrdinal = fieldMatchLists[fieldIdx].get(matchIdx); return HollowReadFieldUtils.fieldValueEquals((HollowObjectTypeDataAccess)fieldTypeDataAccess[fieldIdx], matchedOrdinal, fieldSchemaPosition[fieldIdx], value); } public int getMatchHash(int matchIdx) { int hashCode = 0; for(int i=0;i<getNumFieldPaths();i++) { hashCode ^= HashCodes.hashInt(HollowReadFieldUtils.fieldHashCode((HollowObjectTypeDataAccess)fieldTypeDataAccess[i], fieldMatchLists[i].get(matchIdx), fieldSchemaPosition[i])); hashCode ^= HashCodes.hashInt(hashCode); } return hashCode; } public int getMatchHash(int matchIdx, BitSet fields) { int hashCode = 0; for(int i=0;i<getNumFieldPaths();i++) { if(fields.get(i)) { hashCode ^= HashCodes.hashInt(HollowReadFieldUtils.fieldHashCode((HollowObjectTypeDataAccess)fieldTypeDataAccess[i], fieldMatchLists[i].get(matchIdx), fieldSchemaPosition[i])); hashCode ^= HashCodes.hashInt(hashCode); } } return hashCode; } /** * This method assumes the other traverser has the same match fields specified in the same order. * * @param matchIdx the match index * @param otherTraverser the other traverser * @param otherMatchIdx the other match index * @return true if this and the other traverser are equal */ public boolean isMatchEqual(int matchIdx, HollowIndexerValueTraverser otherTraverser, int otherMatchIdx) { for(int i=0;i<getNumFieldPaths();i++) { if(!HollowReadFieldUtils.fieldsAreEqual((HollowObjectTypeDataAccess)fieldTypeDataAccess[i], fieldMatchLists[i].get(matchIdx), fieldSchemaPosition[i], (HollowObjectTypeDataAccess)otherTraverser.fieldTypeDataAccess[i], otherTraverser.fieldMatchLists[i].get(otherMatchIdx), otherTraverser.fieldSchemaPosition[i])) return false; } return true; } public boolean isMatchEqual(int matchIdx, HollowIndexerValueTraverser otherTraverser, int otherMatchIdx, BitSet fields) { for(int i=0;i<getNumFieldPaths();i++) { if(fields.get(i)) { if(!HollowReadFieldUtils.fieldsAreEqual((HollowObjectTypeDataAccess)fieldTypeDataAccess[i], fieldMatchLists[i].get(matchIdx), fieldSchemaPosition[i], (HollowObjectTypeDataAccess)otherTraverser.fieldTypeDataAccess[i], otherTraverser.fieldMatchLists[i].get(otherMatchIdx), otherTraverser.fieldSchemaPosition[i])) return false; } } return true; } public int getMatchOrdinal(int matchIdx, int fieldIdx) { return fieldMatchLists[fieldIdx].get(matchIdx); } public HollowTypeDataAccess getFieldTypeDataAccess(int fieldIdx) { return fieldTypeDataAccess[fieldIdx]; } }
9,190
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/traversal/HollowIndexerObjectFieldTraversalNode.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index.traversal; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess; import com.netflix.hollow.core.util.IntList; /** * Not intended for external consumption. */ class HollowIndexerObjectFieldTraversalNode extends HollowIndexerTraversalNode { public HollowIndexerObjectFieldTraversalNode(HollowTypeDataAccess dataAccess, IntList[] fieldMatches) { super(dataAccess, fieldMatches); } @Override protected void setUpChildren() { } @Override public int doTraversal(int ordinal) { return 1; } @Override protected HollowTypeDataAccess dataAccess() { return (HollowObjectTypeDataAccess) dataAccess; } @Override protected boolean followingChildrenMultipliesTraversal() { return false; } }
9,191
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/traversal/HollowIndexerObjectTraversalNode.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index.traversal; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.util.IntList; import java.util.Map; /** * Not intended for external consumption. */ class HollowIndexerObjectTraversalNode extends HollowIndexerTraversalNode { protected HollowIndexerTraversalNode children[]; protected int childOrdinalFieldPositions[]; public HollowIndexerObjectTraversalNode(HollowObjectTypeDataAccess dataAccess, IntList fieldMatches[]) { super(dataAccess, fieldMatches); } @Override protected void setUpChildren() { children = new HollowIndexerTraversalNode[super.children.size()]; childOrdinalFieldPositions = new int[children.length]; int idx = 0; for(Map.Entry<String, HollowIndexerTraversalNode> entry : super.children.entrySet()) { childOrdinalFieldPositions[idx] = dataAccess().getSchema().getPosition(entry.getKey()); children[idx] = entry.getValue(); idx++; } } @Override public int doTraversal(int ordinal) { prepareMultiply(); for(int i=0;i<children.length;i++) { if(children[i] instanceof HollowIndexerObjectFieldTraversalNode) { children[i].traverse(ordinal); } else { int childOrdinal = dataAccess().readOrdinal(ordinal, childOrdinalFieldPositions[i]); if(childOrdinal != -1) children[i].traverse(childOrdinal); } } return doMultiply(); } @Override protected HollowObjectTypeDataAccess dataAccess() { return (HollowObjectTypeDataAccess) dataAccess; } @Override protected boolean followingChildrenMultipliesTraversal() { return false; } }
9,192
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/traversal/TraversalTreeBuilder.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index.traversal; import com.netflix.hollow.core.read.dataaccess.HollowCollectionTypeDataAccess; import com.netflix.hollow.core.read.dataaccess.HollowDataAccess; import com.netflix.hollow.core.read.dataaccess.HollowListTypeDataAccess; import com.netflix.hollow.core.read.dataaccess.HollowMapTypeDataAccess; import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess; import com.netflix.hollow.core.read.dataaccess.HollowSetTypeDataAccess; import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess; import com.netflix.hollow.core.schema.HollowCollectionSchema; import com.netflix.hollow.core.schema.HollowMapSchema; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType; import com.netflix.hollow.core.util.IntList; import java.util.ArrayList; import java.util.List; /** * Not intended for external consumption. */ class TraversalTreeBuilder { private final HollowDataAccess dataAccess; private final String type; private final String[] fieldPaths; private final IntList[] fieldMatchLists; private final HollowTypeDataAccess[] fieldTypeDataAccess; private final int[] fieldSchemaPositions; public TraversalTreeBuilder(HollowDataAccess dataAccess, String type, String[] fieldPaths) { this.dataAccess = dataAccess; this.type = type; this.fieldPaths = fieldPaths; this.fieldMatchLists = new IntList[fieldPaths.length]; for(int i=0;i<fieldPaths.length;i++) fieldMatchLists[i] = new IntList(); this.fieldTypeDataAccess = new HollowTypeDataAccess[fieldPaths.length]; this.fieldSchemaPositions = new int[fieldPaths.length]; } public IntList[] getFieldMatchLists() { return fieldMatchLists; } public HollowTypeDataAccess[] getFieldTypeDataAccesses() { return fieldTypeDataAccess; } public int[] getFieldSchemaPositions() { return fieldSchemaPositions; } public HollowIndexerTraversalNode buildTree() { HollowTypeDataAccess rootTypeDataAccess = dataAccess.getTypeDataAccess(type); HollowIndexerTraversalNode rootNode = createTypeNode(rootTypeDataAccess); List<HollowIndexerTraversalNode> allNodes = new ArrayList<HollowIndexerTraversalNode>(); allNodes.add(rootNode); for(int i=0;i<fieldPaths.length;i++) { String fieldPath = fieldPaths[i]; String pathElements[] = "".equals(fieldPath) ? new String[0] : fieldPath.split("\\."); if(pathElements.length == 0) { rootNode.setIndexedFieldPosition(i); fieldTypeDataAccess[i] = rootTypeDataAccess; } else { HollowTypeDataAccess typeDataAccess = rootTypeDataAccess; HollowIndexerTraversalNode currentNode = rootNode; for(int j=0;j<pathElements.length;j++) { String pathElement = pathElements[j]; HollowIndexerTraversalNode child = currentNode.getChild(pathElement); if(child == null) { child = createChildNode(typeDataAccess, pathElement); currentNode.addChild(pathElement, child); allNodes.add(child); } currentNode = child; if(j == pathElements.length - 1) { currentNode.setIndexedFieldPosition(i); fieldTypeDataAccess[i] = typeDataAccess; if(typeDataAccess instanceof HollowObjectTypeDataAccess) { HollowObjectSchema schema = (HollowObjectSchema)typeDataAccess.getSchema(); if(schema.getFieldType(pathElement) == FieldType.REFERENCE) { fieldSchemaPositions[i] = -1; fieldTypeDataAccess[i] = getChildDataAccess(typeDataAccess, pathElement); } else { fieldSchemaPositions[i] = schema.getPosition(pathElement); fieldTypeDataAccess[i] = typeDataAccess; } } else if (typeDataAccess instanceof HollowMapTypeDataAccess) { fieldTypeDataAccess[i] = getChildDataAccess(typeDataAccess, pathElement); fieldSchemaPositions[i] = -1; } else if (typeDataAccess instanceof HollowCollectionTypeDataAccess) { fieldTypeDataAccess[i] = getChildDataAccess(typeDataAccess, pathElement); fieldSchemaPositions[i] = -1; } } else { typeDataAccess = getChildDataAccess(typeDataAccess, pathElement); } } } } for(HollowIndexerTraversalNode node : allNodes) { node.setUpMultiplication(); node.setUpChildren(); } return rootNode; } private HollowIndexerTraversalNode createChildNode(HollowTypeDataAccess typeDataAccess, String childName) { if(typeDataAccess instanceof HollowObjectTypeDataAccess) { HollowObjectTypeDataAccess objectAccess = (HollowObjectTypeDataAccess) typeDataAccess; HollowObjectSchema schema = objectAccess.getSchema(); int fieldIdx = schema.getPosition(childName); if(schema.getFieldType(fieldIdx) == FieldType.REFERENCE) { String childType = schema.getReferencedType(fieldIdx); HollowTypeDataAccess childTypeAccess = dataAccess.getTypeDataAccess(childType); return createTypeNode(childTypeAccess); } else { return new HollowIndexerObjectFieldTraversalNode(objectAccess, fieldMatchLists); } } else if(typeDataAccess instanceof HollowCollectionTypeDataAccess) { HollowCollectionSchema schema = (HollowCollectionSchema) typeDataAccess.getSchema(); HollowTypeDataAccess childTypeAccess = dataAccess.getTypeDataAccess(schema.getElementType()); return createTypeNode(childTypeAccess); } else if(typeDataAccess instanceof HollowMapTypeDataAccess) { HollowMapSchema schema = (HollowMapSchema) typeDataAccess.getSchema(); String childType = "key".equals(childName) ? schema.getKeyType() : schema.getValueType(); HollowTypeDataAccess childTypeAccess = dataAccess.getTypeDataAccess(childType); return createTypeNode(childTypeAccess); } throw new IllegalArgumentException("I can't create a child node for a " + typeDataAccess.getClass()); } private HollowTypeDataAccess getChildDataAccess(HollowTypeDataAccess typeDataAccess, String childName) { if(typeDataAccess instanceof HollowObjectTypeDataAccess) { HollowObjectSchema schema = (HollowObjectSchema) typeDataAccess.getSchema(); int fieldIdx = schema.getPosition(childName); String childType = schema.getReferencedType(fieldIdx); return dataAccess.getTypeDataAccess(childType); } else if(typeDataAccess instanceof HollowCollectionTypeDataAccess) { HollowCollectionSchema schema = (HollowCollectionSchema) typeDataAccess.getSchema(); return dataAccess.getTypeDataAccess(schema.getElementType()); } else if(typeDataAccess instanceof HollowMapTypeDataAccess) { HollowMapSchema schema = (HollowMapSchema) typeDataAccess.getSchema(); String childType = "key".equals(childName) ? schema.getKeyType() : schema.getValueType(); return dataAccess.getTypeDataAccess(childType); } throw new IllegalArgumentException("I can't create a child node for a " + typeDataAccess.getClass()); } private HollowIndexerTraversalNode createTypeNode(HollowTypeDataAccess typeDataAccess) { if(typeDataAccess instanceof HollowObjectTypeDataAccess) return new HollowIndexerObjectTraversalNode((HollowObjectTypeDataAccess) typeDataAccess, fieldMatchLists); else if(typeDataAccess instanceof HollowListTypeDataAccess) return new HollowIndexerListTraversalNode((HollowListTypeDataAccess) typeDataAccess, fieldMatchLists); else if(typeDataAccess instanceof HollowSetTypeDataAccess) return new HollowIndexerCollectionTraversalNode(typeDataAccess, fieldMatchLists); else if(typeDataAccess instanceof HollowMapTypeDataAccess) return new HollowIndexerMapTraversalNode(typeDataAccess, fieldMatchLists); throw new IllegalArgumentException("I can't create a type node for a " + typeDataAccess.getClass()); } }
9,193
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/traversal/HollowIndexerMapTraversalNode.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index.traversal; import com.netflix.hollow.core.read.dataaccess.HollowMapTypeDataAccess; import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess; import com.netflix.hollow.core.read.iterator.HollowMapEntryOrdinalIterator; import com.netflix.hollow.core.util.IntList; /** * Not intended for external consumption. */ class HollowIndexerMapTraversalNode extends HollowIndexerTraversalNode { private HollowIndexerTraversalNode keyNode; private HollowIndexerTraversalNode valueNode; public HollowIndexerMapTraversalNode(HollowTypeDataAccess dataAccess, IntList[] fieldMatches) { super(dataAccess, fieldMatches); } @Override protected void setUpChildren() { keyNode = children.get("key"); valueNode = children.get("value"); } @Override public int doTraversal(int ordinal) { int numMatches = 0; HollowMapEntryOrdinalIterator ordinalIterator = dataAccess().ordinalIterator(ordinal); while(ordinalIterator.next()) { prepareMultiply(); if(keyNode != null) keyNode.traverse(ordinalIterator.getKey()); if(valueNode != null) valueNode.traverse(ordinalIterator.getValue()); numMatches += doMultiply(); } return numMatches; } @Override protected HollowMapTypeDataAccess dataAccess() { return (HollowMapTypeDataAccess) dataAccess; } @Override protected boolean followingChildrenMultipliesTraversal() { return true; } }
9,194
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/core/index/traversal/HollowIndexerCollectionTraversalNode.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.index.traversal; import com.netflix.hollow.core.read.dataaccess.HollowCollectionTypeDataAccess; import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess; import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator; import com.netflix.hollow.core.util.IntList; /** * Not intended for external consumption. */ class HollowIndexerCollectionTraversalNode extends HollowIndexerTraversalNode { protected HollowIndexerTraversalNode child; public HollowIndexerCollectionTraversalNode(HollowTypeDataAccess dataAccess, IntList[] fieldMatches) { super(dataAccess, fieldMatches); } @Override protected void setUpChildren() { child = children.get("element"); } @Override public int doTraversal(int ordinal) { if(child == null) return 1; HollowOrdinalIterator iter = dataAccess().ordinalIterator(ordinal); int numMatches = 0; int elementOrdinal = iter.next(); while(elementOrdinal != HollowOrdinalIterator.NO_MORE_ORDINALS) { prepareMultiply(); child.traverse(elementOrdinal); numMatches += doMultiply(); elementOrdinal = iter.next(); } return numMatches; } @Override protected HollowCollectionTypeDataAccess dataAccess() { return (HollowCollectionTypeDataAccess) dataAccess; } @Override protected boolean followingChildrenMultipliesTraversal() { return true; } }
9,195
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/metrics/HollowMetrics.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.api.metrics; import com.netflix.hollow.core.read.engine.HollowReadStateEngine; import com.netflix.hollow.core.read.engine.HollowTypeReadState; import java.util.Collection; import java.util.HashMap; public abstract class HollowMetrics { private HashMap<String, Long> typeHeapFootprint = new HashMap<>(); private HashMap<String, Integer> typePopulatedOrdinals = new HashMap<>(); private long currentVersion; private long totalHeapFootprint = 0L; private int totalPopulatedOrdinals = 0; protected void update(long version) { setCurrentVersion(version); } protected void update(HollowReadStateEngine hollowReadStateEngine, long version) { setCurrentVersion(version); calculateTypeMetrics(hollowReadStateEngine); } /** * Calculates the memory heap footprint and populated ordinals per type and total * @param hollowReadStateEngine */ void calculateTypeMetrics(HollowReadStateEngine hollowReadStateEngine) { Collection<HollowTypeReadState> typeStates = hollowReadStateEngine.getTypeStates(); if (typeStates == null) return; totalHeapFootprint = 0L; totalPopulatedOrdinals = 0; for (HollowTypeReadState typeState : typeStates) { long heapCost = typeState.getApproximateHeapFootprintInBytes(); totalHeapFootprint += heapCost; int populatedOrdinals = typeState.getPopulatedOrdinals().cardinality(); totalPopulatedOrdinals += populatedOrdinals; String type = typeState.getSchema().getName(); typeHeapFootprint.put(type, heapCost); typePopulatedOrdinals.put(type, populatedOrdinals); } } public HashMap<String, Long> getTypeHeapFootprint() { return typeHeapFootprint; } public HashMap<String, Integer> getTypePopulatedOrdinals() { return typePopulatedOrdinals; } public long getCurrentVersion() { return this.currentVersion; } public long getTotalHeapFootprint() { return this.totalHeapFootprint; } public long getTotalPopulatedOrdinals() { return this.totalPopulatedOrdinals; } public void setCurrentVersion(long version) { this.currentVersion = version; } }
9,196
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/metrics/HollowMetricsCollector.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.api.metrics; public abstract class HollowMetricsCollector<T extends HollowMetrics> { private T metrics; public T getMetrics() { return metrics; } public void setMetrics(T metrics) { this.metrics = metrics; } public abstract void collect(T metrics); }
9,197
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/metrics/HollowConsumerMetrics.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.api.metrics; import com.netflix.hollow.core.read.engine.HollowReadStateEngine; public class HollowConsumerMetrics extends HollowMetrics { private int refreshFailed; // TODO: Move these metrics over to com.netflix.hollow.api.consumer.metrics.AbstractRefreshMetricsListener private int refreshSucceeded; /** * Updates the consumer metrics: * refresh succeeded, version and type's footprint and ordinals. * @param hollowReadStateEngine the state engine * @param version the version */ public void updateTypeStateMetrics(HollowReadStateEngine hollowReadStateEngine, long version) { this.refreshSucceeded++; super.update(hollowReadStateEngine, version); } public void updateRefreshFailed() { this.refreshFailed++; } public int getRefreshFailed() { return this.refreshFailed; } public int getRefreshSucceded() { return this.refreshSucceeded; } }
9,198
0
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api
Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/metrics/HollowProducerMetrics.java
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.api.metrics; import com.netflix.hollow.api.producer.HollowProducer; import com.netflix.hollow.api.producer.HollowProducerListener; import com.netflix.hollow.api.producer.Status; import com.netflix.hollow.core.read.engine.HollowReadStateEngine; import java.util.concurrent.atomic.AtomicInteger; public class HollowProducerMetrics extends HollowMetrics { private int cyclesCompleted = 0; private int cyclesSucceeded = 0; private int cycleFailed = 0; // Snapshots can be published asynchronously resulting concurrent // access to the snapshot metrics private AtomicInteger snapshotsCompleted = new AtomicInteger(); private AtomicInteger snapshotsFailed = new AtomicInteger(); private int deltasCompleted = 0; private int deltasFailed = 0; private int reverseDeltasCompleted = 0; private int reverseDeltasFailed = 0; /** * Updates the producer metrics: * cycles completed, version and type's footprint and ordinals. * @param producerStatus the producer status */ public void updateCycleMetrics(HollowProducerListener.ProducerStatus producerStatus) { Status.StatusType st = producerStatus.getStatus() == HollowProducerListener.Status.SUCCESS ? Status.StatusType.SUCCESS : Status.StatusType.FAIL; updateCycleMetrics(new Status(st, producerStatus.getCause()), producerStatus.getReadState(), producerStatus.getVersion()); } /** * Updates the producer metrics: * cycles completed, version and type's footprint and ordinals. * @param status the status * @param readState the read state * @param version the version */ public void updateCycleMetrics(Status status, HollowProducer.ReadState readState, long version) { cyclesCompleted++; if(status.getType() == Status.StatusType.FAIL) { cycleFailed++; return; } cyclesSucceeded++; if(readState != null) { HollowReadStateEngine hollowReadStateEngine = readState.getStateEngine(); super.update(hollowReadStateEngine, version); } else { super.update(version); } } public void updateBlobTypeMetrics(HollowProducerListener.PublishStatus publishStatus) { Status.StatusType st = publishStatus.getStatus() == HollowProducerListener.Status.SUCCESS ? Status.StatusType.SUCCESS : Status.StatusType.FAIL; updateBlobTypeMetrics(new Status(st, publishStatus.getCause()), publishStatus.getBlob()); } public void updateBlobTypeMetrics(Status status, HollowProducer.Blob blob) { HollowProducer.Blob.Type blobType = blob.getType(); switch (blobType) { case SNAPSHOT: if(status.getType() == Status.StatusType.SUCCESS) snapshotsCompleted.incrementAndGet(); else snapshotsFailed.incrementAndGet(); break; case DELTA: if(status.getType() == Status.StatusType.SUCCESS) deltasCompleted++; else deltasFailed++; break; case REVERSE_DELTA: if(status.getType() == Status.StatusType.SUCCESS) reverseDeltasCompleted++; else reverseDeltasFailed++; break; } } public int getCyclesCompleted() { return this.cyclesCompleted; } public int getCyclesSucceeded() { return this.cyclesSucceeded; } public int getCycleFailed() { return this.cycleFailed; } public int getSnapshotsCompleted() { return snapshotsCompleted.get(); } public int getSnapshotsFailed() { return snapshotsFailed.get(); } public int getDeltasCompleted() { return deltasCompleted; } public int getDeltasFailed() { return deltasFailed; } public int getReverseDeltasCompleted() { return reverseDeltasCompleted; } public int getReverseDeltasFailed() { return reverseDeltasFailed; } }
9,199