Unnamed: 0
int64 0
6.7k
| func
stringlengths 12
89.6k
| target
bool 2
classes | project
stringlengths 45
151
|
|---|---|---|---|
1,100
|
{
@Override
public Iterator<NodeLabelUpdate> iterator()
{
return new PrefetchingIterator<NodeLabelUpdate>()
{
private final long[] NO_LABELS = new long[0];
private final NodeStore nodeStore = neoStoreProvider.evaluate().getNodeStore();
private final long highId = nodeStore.getHighestPossibleIdInUse();
private long current;
@Override
protected NodeLabelUpdate fetchNextOrNull()
{
while ( current <= highId )
{
NodeRecord node = nodeStore.forceGetRecord( current++ );
if ( node.inUse() )
{
long[] labels = NodeLabelsField.parseLabelsField( node ).get( nodeStore );
if ( labels.length > 0 )
{
return NodeLabelUpdate.labelChanges( node.getId(), NO_LABELS, labels );
}
}
}
return null;
}
};
}
@Override
public PrimitiveLongIterator labelIds()
{
final Token[] labels = neoStoreProvider.evaluate().getLabelTokenStore().getTokens( MAX_VALUE );
return new AbstractPrimitiveLongIterator()
{
int index;
{
computeNext();
}
@Override
protected void computeNext()
{
if ( index <= labels.length )
{
next( labels[index++].id() );
}
else
{
endReached();
}
}
};
}
@Override
public long highestNodeId()
{
return neoStoreProvider.evaluate().getNodeStore().getHighestPossibleIdInUse();
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_api_scan_LabelScanStoreProvider.java
|
1,101
|
{
@Override
public <T> T select( Class<T> type, Iterable<T> candidates )
throws IllegalArgumentException
{
List<Comparable> all = (List<Comparable>) addToCollection( candidates, new ArrayList<T>() );
if ( all.isEmpty() )
{
throw new IllegalArgumentException( "No label scan store provider " +
LabelScanStoreProvider.class.getName() + " found. " + servicesClassPathEntryInformation() );
}
Collections.sort( all );
return (T) all.get( all.size()-1 );
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_api_scan_LabelScanStoreProvider.java
|
1,102
|
public class LabelScanStoreProvider extends LifecycleAdapter implements Comparable<LabelScanStoreProvider>
{
/**
* SelectionStrategy for {@link KernelExtensions kernel extensions loading} where the one with highest
* {@link #priority} will be selected. If there are no such stores then an {@link IllegalStateException} will be
* thrown.
*/
public static SelectionStrategy HIGHEST_PRIORITIZED =
new SelectionStrategy()
{
@Override
public <T> T select( Class<T> type, Iterable<T> candidates )
throws IllegalArgumentException
{
List<Comparable> all = (List<Comparable>) addToCollection( candidates, new ArrayList<T>() );
if ( all.isEmpty() )
{
throw new IllegalArgumentException( "No label scan store provider " +
LabelScanStoreProvider.class.getName() + " found. " + servicesClassPathEntryInformation() );
}
Collections.sort( all );
return (T) all.get( all.size()-1 );
}
};
private final LabelScanStore labelScanStore;
private final int priority;
public LabelScanStoreProvider( LabelScanStore labelScanStore, int priority )
{
this.labelScanStore = labelScanStore;
this.priority = priority;
}
public LabelScanStore getLabelScanStore()
{
return labelScanStore;
}
@Override
public int compareTo( LabelScanStoreProvider o )
{
return priority - o.priority;
}
@Override
public String toString()
{
return getClass().getSimpleName() + "[" + labelScanStore + ", prio:" + priority + "]";
}
public interface FullStoreChangeStream extends Iterable<NodeLabelUpdate>
{
PrimitiveLongIterator labelIds();
long highestNodeId();
}
public static FullStoreChangeStream fullStoreLabelUpdateStream( final NeoStoreProvider neoStoreProvider )
{
return new FullStoreChangeStream()
{
@Override
public Iterator<NodeLabelUpdate> iterator()
{
return new PrefetchingIterator<NodeLabelUpdate>()
{
private final long[] NO_LABELS = new long[0];
private final NodeStore nodeStore = neoStoreProvider.evaluate().getNodeStore();
private final long highId = nodeStore.getHighestPossibleIdInUse();
private long current;
@Override
protected NodeLabelUpdate fetchNextOrNull()
{
while ( current <= highId )
{
NodeRecord node = nodeStore.forceGetRecord( current++ );
if ( node.inUse() )
{
long[] labels = NodeLabelsField.parseLabelsField( node ).get( nodeStore );
if ( labels.length > 0 )
{
return NodeLabelUpdate.labelChanges( node.getId(), NO_LABELS, labels );
}
}
}
return null;
}
};
}
@Override
public PrimitiveLongIterator labelIds()
{
final Token[] labels = neoStoreProvider.evaluate().getLabelTokenStore().getTokens( MAX_VALUE );
return new AbstractPrimitiveLongIterator()
{
int index;
{
computeNext();
}
@Override
protected void computeNext()
{
if ( index <= labels.length )
{
next( labels[index++].id() );
}
else
{
endReached();
}
}
};
}
@Override
public long highestNodeId()
{
return neoStoreProvider.evaluate().getNodeStore().getHighestPossibleIdInUse();
}
};
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_api_scan_LabelScanStoreProvider.java
|
1,103
|
@Service.Implementation( KernelExtensionFactory.class )
public class InMemoryLabelScanStoreExtension extends KernelExtensionFactory<InMemoryLabelScanStoreExtension.NoDependencies>
{
public interface NoDependencies
{ // No dependencies
}
public InMemoryLabelScanStoreExtension()
{
super( "in-memory" );
}
@Override
public LabelScanStoreProvider newKernelExtension( NoDependencies dependencies ) throws Throwable
{
return new LabelScanStoreProvider( new InMemoryLabelScanStore(), 2 );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_api_scan_InMemoryLabelScanStoreExtension.java
|
1,104
|
{
@Override
public void write( NodeLabelUpdate update ) throws IOException
{
// Split up into added/removed from before/after
long[] added = new long[update.getLabelsAfter().length]; // pessimistic length
long[] removed = new long[update.getLabelsBefore().length]; // pessimistic length
int addedIndex = 0, removedIndex = 0;
for ( long labelAfter : update.getLabelsAfter() )
{
if ( binarySearch( update.getLabelsBefore(), labelAfter ) < 0 )
{
added[addedIndex++] = labelAfter;
}
}
for ( long labelBefore : update.getLabelsBefore() )
{
if ( binarySearch( update.getLabelsAfter(), labelBefore ) < 0 )
{
removed[removedIndex++] = labelBefore;
}
}
// Update the internal map with those changes
for ( int i = 0; i < addedIndex; i++ )
{
nodeSetForAdding( added[i] ).add( update.getNodeId() );
}
for ( int i = 0; i < removedIndex; i++ )
{
nodeSetForRemoving( removed[i] ).remove( update.getNodeId() );
}
}
@Override
public void close() throws IOException
{
}
};
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_api_scan_InMemoryLabelScanStore.java
|
1,105
|
{
@Override
public int id()
{
return 0;
}
@Override
public long[] nodes()
{
return toLongArray( nodesToLabels.keySet() );
}
@Override
public long[] labels( long nodeId )
{
return toLongArray( nodesToLabels.get( nodeId ) );
}
};
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_api_scan_InMemoryLabelScanStore.java
|
1,106
|
{
@Override
public long maxCount()
{
return 0;
}
@Override
public void close() throws IOException
{
}
@Override
public Iterator<NodeLabelRange> iterator()
{
NodeLabelRange range = new NodeLabelRange()
{
@Override
public int id()
{
return 0;
}
@Override
public long[] nodes()
{
return toLongArray( nodesToLabels.keySet() );
}
@Override
public long[] labels( long nodeId )
{
return toLongArray( nodesToLabels.get( nodeId ) );
}
};
return singletonList( range ).iterator();
}
private long[] toLongArray( Set<Long> longs )
{
long[] array = new long[longs.size()];
int position = 0;
for ( Long entry : longs )
{
array[position++] = entry;
}
return array;
}
};
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_api_scan_InMemoryLabelScanStore.java
|
1,107
|
{
@Override
public long next()
{
return nodesIterator.next();
}
@Override
public boolean hasNext()
{
return nodesIterator.hasNext();
}
};
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_api_scan_InMemoryLabelScanStore.java
|
1,108
|
{
@Override
public PrimitiveLongIterator nodesWithLabel( int labelId )
{
Set<Long> nodes = data.get( (long) labelId );
if ( null == nodes )
{
return PrimitiveLongIteratorForArray.EMPTY;
}
final Iterator<Long> nodesIterator = nodes.iterator();
return new PrimitiveLongIterator()
{
@Override
public long next()
{
return nodesIterator.next();
}
@Override
public boolean hasNext()
{
return nodesIterator.hasNext();
}
};
}
@Override
public void close()
{ // Nothing to close
}
@Override
public Iterator<Long> labelsForNode( long nodeId )
{
List<Long> nodes = new ArrayList<>();
for ( Map.Entry<Long, Set<Long>> entry : data.entrySet() )
{
if ( entry.getValue().contains( nodeId ) )
{
nodes.add( entry.getKey() );
}
}
return nodes.iterator();
}
};
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_api_scan_InMemoryLabelScanStore.java
|
1,109
|
public class InMemoryLabelScanStore implements LabelScanStore
{
// LabelId --> Set<NodeId>
private final Map<Long, Set<Long>> data = new HashMap<>();
private Set<Long> nodeSetForRemoving( long labelId )
{
Set<Long> nodes = data.get( labelId );
return nodes != null ? nodes : Collections.<Long>emptySet();
}
private Set<Long> nodeSetForAdding( long labelId )
{
Set<Long> nodes = data.get( labelId );
if ( nodes == null )
{
nodes = new HashSet<>();
data.put( labelId, nodes );
}
return nodes;
}
@Override
public void recover( Iterator<NodeLabelUpdate> updates ) throws IOException
{
try(LabelScanWriter writer = newWriter()) {
while ( updates.hasNext() )
{
writer.write( updates.next() );
}
}
}
@Override
public LabelScanReader newReader()
{
return new LabelScanReader()
{
@Override
public PrimitiveLongIterator nodesWithLabel( int labelId )
{
Set<Long> nodes = data.get( (long) labelId );
if ( null == nodes )
{
return PrimitiveLongIteratorForArray.EMPTY;
}
final Iterator<Long> nodesIterator = nodes.iterator();
return new PrimitiveLongIterator()
{
@Override
public long next()
{
return nodesIterator.next();
}
@Override
public boolean hasNext()
{
return nodesIterator.hasNext();
}
};
}
@Override
public void close()
{ // Nothing to close
}
@Override
public Iterator<Long> labelsForNode( long nodeId )
{
List<Long> nodes = new ArrayList<>();
for ( Map.Entry<Long, Set<Long>> entry : data.entrySet() )
{
if ( entry.getValue().contains( nodeId ) )
{
nodes.add( entry.getKey() );
}
}
return nodes.iterator();
}
};
}
@Override
public AllEntriesLabelScanReader newAllEntriesReader()
{
final Map<Long, Set<Long>> nodesToLabels = new HashMap<>();
for ( Map.Entry<Long, Set<Long>> labelToNodes : data.entrySet() )
{
for ( Long nodeId : labelToNodes.getValue() )
{
if ( ! nodesToLabels.containsKey( nodeId ))
{
nodesToLabels.put( nodeId, new HashSet<Long>( ) );
}
nodesToLabels.get( nodeId ).add( labelToNodes.getKey() );
}
}
return new AllEntriesLabelScanReader()
{
@Override
public long maxCount()
{
return 0;
}
@Override
public void close() throws IOException
{
}
@Override
public Iterator<NodeLabelRange> iterator()
{
NodeLabelRange range = new NodeLabelRange()
{
@Override
public int id()
{
return 0;
}
@Override
public long[] nodes()
{
return toLongArray( nodesToLabels.keySet() );
}
@Override
public long[] labels( long nodeId )
{
return toLongArray( nodesToLabels.get( nodeId ) );
}
};
return singletonList( range ).iterator();
}
private long[] toLongArray( Set<Long> longs )
{
long[] array = new long[longs.size()];
int position = 0;
for ( Long entry : longs )
{
array[position++] = entry;
}
return array;
}
};
}
@Override
public ResourceIterator<File> snapshotStoreFiles()
{
return emptyIterator();
}
@Override
public void init()
{ // Nothing to init
}
@Override
public void start()
{ // Nothing to start
}
@Override
public void stop()
{ // Nothing to stop
}
@Override
public void shutdown()
{ // Nothing to shutdown
}
@Override
public LabelScanWriter newWriter()
{
return new LabelScanWriter()
{
@Override
public void write( NodeLabelUpdate update ) throws IOException
{
// Split up into added/removed from before/after
long[] added = new long[update.getLabelsAfter().length]; // pessimistic length
long[] removed = new long[update.getLabelsBefore().length]; // pessimistic length
int addedIndex = 0, removedIndex = 0;
for ( long labelAfter : update.getLabelsAfter() )
{
if ( binarySearch( update.getLabelsBefore(), labelAfter ) < 0 )
{
added[addedIndex++] = labelAfter;
}
}
for ( long labelBefore : update.getLabelsBefore() )
{
if ( binarySearch( update.getLabelsAfter(), labelBefore ) < 0 )
{
removed[removedIndex++] = labelBefore;
}
}
// Update the internal map with those changes
for ( int i = 0; i < addedIndex; i++ )
{
nodeSetForAdding( added[i] ).add( update.getNodeId() );
}
for ( int i = 0; i < removedIndex; i++ )
{
nodeSetForRemoving( removed[i] ).remove( update.getNodeId() );
}
}
@Override
public void close() throws IOException
{
}
};
}
@Override
public void force()
{ // Nothing to force
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_api_scan_InMemoryLabelScanStore.java
|
1,110
|
{
boolean closed;
@Override
public void release()
{
if ( closed )
{
throw new IllegalStateException();
}
released++;
closed = true;
}
@Override
public void registerWithTransaction()
{
if ( closed )
{
throw new IllegalStateException();
}
txBound++;
closed = true;
}
@Override
public void close()
{
if ( !closed )
{
registerWithTransaction();
}
}
};
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_api_operations_ConstraintEnforcingEntityOperationsTest.java
|
1,111
|
private class LockAnswer implements Answer<ReleasableLock>
{
public int acquired, txBound, released;
@Override
public ReleasableLock answer( InvocationOnMock invocation ) throws Throwable
{
acquired++;
return new ReleasableLock()
{
boolean closed;
@Override
public void release()
{
if ( closed )
{
throw new IllegalStateException();
}
released++;
closed = true;
}
@Override
public void registerWithTransaction()
{
if ( closed )
{
throw new IllegalStateException();
}
txBound++;
closed = true;
}
@Override
public void close()
{
if ( !closed )
{
registerWithTransaction();
}
}
};
}
public int held()
{
assertEquals( "locking must be balanced", acquired, txBound + released );
return txBound;
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_api_operations_ConstraintEnforcingEntityOperationsTest.java
|
1,112
|
public class ConstraintEnforcingEntityOperationsTest
{
private final int labelId = 1;
private final int propertyKeyId = 2;
private final String value = "value";
private final IndexDescriptor indexDescriptor = new IndexDescriptor( labelId, propertyKeyId );
private EntityReadOperations readOps;
private SchemaReadOperations schemaOps;
private KernelStatement state;
private LockHolder locks;
private ConstraintEnforcingEntityOperations ops;
@Before
public void given_ConstraintEnforcingEntityOperations_with_OnlineIndex() throws Exception
{
this.readOps = mock( EntityReadOperations.class );
this.schemaOps = mock( SchemaReadOperations.class );
this.state = mock( KernelStatement.class );
when( schemaOps.indexGetState( state, indexDescriptor ) ).thenReturn( InternalIndexState.ONLINE );
this.locks = mock( LockHolder.class );
when( state.locks() ).thenReturn( locks );
this.ops = new ConstraintEnforcingEntityOperations( null, readOps, schemaOps );
}
@Test
public void shouldHoldIndexReadLockIfNodeIsExists() throws Exception
{
// given
long expectedNodeId = 15;
when( readOps.nodeGetUniqueFromIndexLookup( state, indexDescriptor, value ) ).thenReturn( expectedNodeId );
LockAnswer readLocks = new LockAnswer(), writeLocks = new LockAnswer();
when( locks.getReleasableIndexEntryReadLock( labelId, propertyKeyId, value ) ).then( readLocks );
when( locks.getReleasableIndexEntryWriteLock( labelId, propertyKeyId, value ) ).then( writeLocks );
// when
long nodeId = ops.nodeGetUniqueFromIndexLookup( state, indexDescriptor, value );
// then
assertEquals( expectedNodeId, nodeId );
assertEquals( 1, readLocks.held() );
assertEquals( 0, writeLocks.held() );
}
@Test
public void shouldHoldIndexWriteLockIfNodeDoesNotExist() throws Exception
{
// given
when( readOps.nodeGetUniqueFromIndexLookup( state, indexDescriptor, value ) ).thenReturn( NO_SUCH_NODE );
LockAnswer readLocks = new LockAnswer(), writeLocks = new LockAnswer();
when( locks.getReleasableIndexEntryReadLock( labelId, propertyKeyId, value ) ).then( readLocks );
when( locks.getReleasableIndexEntryWriteLock( labelId, propertyKeyId, value ) ).then( writeLocks );
// when
long nodeId = ops.nodeGetUniqueFromIndexLookup( state, indexDescriptor, value );
// then
assertEquals( NO_SUCH_NODE, nodeId );
assertEquals( 0, readLocks.held() );
assertEquals( 1, writeLocks.held() );
}
@Test
public void shouldHoldIndexReadLockIfNodeIsConcurrentlyCreated() throws Exception
{
// given
long expectedNodeId = 15;
when( readOps.nodeGetUniqueFromIndexLookup( state, indexDescriptor, value ) )
.thenReturn( NO_SUCH_NODE )
.thenReturn( expectedNodeId );
LockAnswer readLocks = new LockAnswer(), writeLocks = new LockAnswer();
when( locks.getReleasableIndexEntryReadLock( labelId, propertyKeyId, value ) ).then( readLocks );
when( locks.getReleasableIndexEntryWriteLock( labelId, propertyKeyId, value ) ).then( writeLocks );
// when
long nodeId = ops.nodeGetUniqueFromIndexLookup( state, indexDescriptor, value );
// then
assertEquals( expectedNodeId, nodeId );
assertEquals( 1, readLocks.held() );
assertEquals( 0, writeLocks.held() );
}
private class LockAnswer implements Answer<ReleasableLock>
{
public int acquired, txBound, released;
@Override
public ReleasableLock answer( InvocationOnMock invocation ) throws Throwable
{
acquired++;
return new ReleasableLock()
{
boolean closed;
@Override
public void release()
{
if ( closed )
{
throw new IllegalStateException();
}
released++;
closed = true;
}
@Override
public void registerWithTransaction()
{
if ( closed )
{
throw new IllegalStateException();
}
txBound++;
closed = true;
}
@Override
public void close()
{
if ( !closed )
{
registerWithTransaction();
}
}
};
}
public int held()
{
assertEquals( "locking must be balanced", acquired, txBound + released );
return txBound;
}
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_api_operations_ConstraintEnforcingEntityOperationsTest.java
|
1,113
|
public class GraphState extends PropertyContainerState
{
public GraphState()
{
super( -1 );
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_api_state_GraphState.java
|
1,114
|
public class IndexQueryTransactionStateTest
{
@Test
public void shouldExcludeRemovedNodesFromIndexQuery() throws Exception
{
// Given
when( store.nodesGetFromIndexLookup( state, indexDescriptor, value ) )
.then( answerAsPrimitiveLongIteratorFrom( asList( 1l, 2l, 3l ) ) );
when( oldTxState.getNodesWithChangedProperty( propertyKeyId, value ) ).thenReturn( new DiffSets<Long>() );
when( oldTxState.hasChanges() ).thenReturn( true );
txContext.nodeDelete( state, 2l );
// When
PrimitiveLongIterator result = txContext.nodesGetFromIndexLookup( state, indexDescriptor, value );
// Then
assertThat( asSet( result ), equalTo( asSet( 1l, 3l ) ) );
}
@Test
public void shouldExcludeRemovedNodeFromUniqueIndexQuery() throws Exception
{
// Given
when( store.nodeGetUniqueFromIndexLookup( state, indexDescriptor, value ) ).thenReturn( asPrimitiveIterator( 1l ) );
when( oldTxState.getNodesWithChangedProperty( propertyKeyId, value ) ).thenReturn( new DiffSets<Long>() );
when( oldTxState.hasChanges() ).thenReturn( true );
txContext.nodeDelete( state, 1l );
// When
long result = txContext.nodeGetUniqueFromIndexLookup( state, indexDescriptor, value );
// Then
assertNoSuchNode( result );
}
@Test
public void shouldExcludeChangedNodesWithMissingLabelFromIndexQuery() throws Exception
{
// Given
when( store.nodesGetFromIndexLookup( state, indexDescriptor, value ) )
.then( answerAsPrimitiveLongIteratorFrom( asList( 2l, 3l ) ) );
when( store.nodeHasLabel( state, 1l, labelId ) ).thenReturn( false );
when( oldTxState.getNodesWithChangedProperty( propertyKeyId, value ) ).thenReturn(
new DiffSets<>( asSet( 1l ), Collections.<Long>emptySet() ) );
when( oldTxState.hasChanges() ).thenReturn( true );
// When
PrimitiveLongIterator result = txContext.nodesGetFromIndexLookup( state, indexDescriptor, value );
// Then
assertThat( asSet( result ), equalTo( asSet( 2l, 3l ) ) );
}
@Test
public void shouldExcludeChangedNodeWithMissingLabelFromUniqueIndexQuery() throws Exception
{
// Given
when( store.nodeGetUniqueFromIndexLookup( state, indexDescriptor, value ) ).thenReturn(
emptyPrimitiveLongIterator() );
when( store.nodeHasLabel( state, 1l, labelId ) ).thenReturn( false );
when( oldTxState.getNodesWithChangedProperty( propertyKeyId, value ) ).thenReturn(
new DiffSets<>( asSet( 1l ), Collections.<Long>emptySet() ) );
when( oldTxState.hasChanges() ).thenReturn( true );
// When
long result = txContext.nodeGetUniqueFromIndexLookup( state, indexDescriptor, value );
// Then
assertNoSuchNode( result );
}
@Test
public void shouldIncludeCreatedNodesWithCorrectLabelAndProperty() throws Exception
{
// Given
when( store.nodesGetFromIndexLookup( state, indexDescriptor, value ) )
.then( answerAsPrimitiveLongIteratorFrom( asList( 2l, 3l ) ) );
when( store.nodeGetProperty( eq( state ), anyLong(), eq( propertyKeyId ) ) ).thenReturn( Property
.noNodeProperty( 1, propertyKeyId ) );
when( store.nodeGetAllProperties( eq( state ), anyLong() ) ).thenReturn( IteratorUtil
.<DefinedProperty>emptyIterator() );
when( store.nodeHasLabel( state, 1l, labelId ) ).thenReturn( false );
when( oldTxState.getNodesWithChangedProperty( propertyKeyId, value ) ).thenReturn(
new DiffSets<>( asSet( 1l ), Collections.<Long>emptySet() ) );
// When
txContext.nodeAddLabel( state, 1l, labelId );
PrimitiveLongIterator result = txContext.nodesGetFromIndexLookup( state, indexDescriptor, value );
// Then
assertThat( asSet( result ), equalTo( asSet( 1l, 2l, 3l ) ) );
}
@Test
public void shouldIncludeUniqueCreatedNodeWithCorrectLabelAndProperty() throws Exception
{
// Given
when( store.nodeGetUniqueFromIndexLookup( state, indexDescriptor, value ) ).thenReturn(
emptyPrimitiveLongIterator() );
when( store.nodeGetProperty( eq( state ), anyLong(), eq( propertyKeyId ) ) ).thenReturn( Property
.noNodeProperty( 1, propertyKeyId ) );
when( store.nodeGetAllProperties( eq( state ), anyLong() ) ).thenReturn( IteratorUtil
.<DefinedProperty>emptyIterator() );
when( store.nodeHasLabel( state, 1l, labelId ) ).thenReturn( false );
when( oldTxState.getNodesWithChangedProperty( propertyKeyId, value ) ).thenReturn(
new DiffSets<>( asSet( 1l ), Collections.<Long>emptySet() ) );
// When
txContext.nodeAddLabel( state, 1l, labelId );
long result = txContext.nodeGetUniqueFromIndexLookup( state, indexDescriptor, value );
// Then
assertThat( result, equalTo( 1l ) );
}
@Test
public void shouldIncludeExistingNodesWithCorrectPropertyAfterAddingLabel() throws Exception
{
// Given
when( store.nodesGetFromIndexLookup( state, indexDescriptor, value ) )
.then( answerAsPrimitiveLongIteratorFrom( asList( 2l, 3l ) ) );
when( store.nodeHasLabel( state, 1l, labelId ) ).thenReturn( false );
DefinedProperty stringProperty = Property.stringProperty( propertyKeyId, value );
when( store.nodeGetProperty( state, 1l, propertyKeyId ) ).thenReturn( stringProperty );
when( store.nodeGetAllProperties( eq( state ), anyLong() ) ).thenReturn( iterator( stringProperty ) );
when( oldTxState.getNodesWithChangedProperty( propertyKeyId, value ) ).thenReturn( new DiffSets<Long>() );
txContext.nodeAddLabel( state, 1l, labelId );
// When
PrimitiveLongIterator result = txContext.nodesGetFromIndexLookup( state, indexDescriptor, value );
// Then
assertThat( asSet( result ), equalTo( asSet( 1l, 2l, 3l ) ) );
}
@Test
public void shouldIncludeExistingUniqueNodeWithCorrectPropertyAfterAddingLabel() throws Exception
{
// Given
when( store.nodeGetUniqueFromIndexLookup( state, indexDescriptor, value ) ).thenReturn(
emptyPrimitiveLongIterator() );
when( store.nodeHasLabel( state, 2l, labelId ) ).thenReturn( false );
DefinedProperty stringProperty = Property.stringProperty( propertyKeyId, value );
when( store.nodeGetProperty( state, 2l, propertyKeyId ) ).thenReturn( stringProperty );
when( store.nodeGetAllProperties( eq( state ), anyLong() ) ).thenReturn( iterator( stringProperty ) );
when( oldTxState.getNodesWithChangedProperty( propertyKeyId, value ) ).thenReturn( new DiffSets<Long>() );
txContext.nodeAddLabel( state, 2l, labelId );
// When
long result = txContext.nodeGetUniqueFromIndexLookup( state, indexDescriptor, value );
// Then
assertThat( result, equalTo( 2l ) );
}
@Test
public void shouldExcludeExistingNodesWithCorrectPropertyAfterRemovingLabel() throws Exception
{
// Given
when( store.nodesGetFromIndexLookup( state, indexDescriptor, value ) )
.then( answerAsPrimitiveLongIteratorFrom( asList( 1l, 2l, 3l ) ) );
when( store.nodeHasLabel( state, 1l, labelId ) ).thenReturn( true );
DefinedProperty stringProperty = Property.stringProperty( propertyKeyId, value );
when( store.nodeGetProperty( state, 1l, propertyKeyId ) ).thenReturn( stringProperty );
when( store.nodeGetAllProperties( eq( state ), anyLong() ) ).thenReturn( iterator( stringProperty ) );
when( oldTxState.getNodesWithChangedProperty( propertyKeyId, value ) ).thenReturn( new DiffSets<Long>() );
txContext.nodeRemoveLabel( state, 1l, labelId );
// When
PrimitiveLongIterator result = txContext.nodesGetFromIndexLookup( state, indexDescriptor, value );
// Then
assertThat( asSet( result ), equalTo( asSet( 2l, 3l ) ) );
}
@Test
public void shouldExcludeExistingUniqueNodeWithCorrectPropertyAfterRemovingLabel() throws Exception
{
// Given
when( store.nodeGetUniqueFromIndexLookup( state, indexDescriptor, value ) ).thenReturn(
asPrimitiveIterator( 1l ) );
when( store.nodeHasLabel( state, 1l, labelId ) ).thenReturn( true );
DefinedProperty stringProperty = Property.stringProperty( propertyKeyId, value );
when( store.nodeGetProperty( state, 1l, propertyKeyId ) ).thenReturn( stringProperty );
when( store.nodeGetAllProperties( eq( state ), anyLong() ) ).thenReturn( iterator( stringProperty ) );
when( oldTxState.getNodesWithChangedProperty( propertyKeyId, value ) ).thenReturn( new DiffSets<Long>() );
txContext.nodeRemoveLabel( state, 1l, labelId );
// When
long result = txContext.nodeGetUniqueFromIndexLookup( state, indexDescriptor, value );
// Then
assertNoSuchNode( result );
}
@Test
public void shouldExcludeNodesWithRemovedProperty() throws Exception
{
// Given
when( store.nodesGetFromIndexLookup( state, indexDescriptor, value ) )
.then( answerAsPrimitiveLongIteratorFrom( asList( 2l, 3l ) ) );
when( store.nodeHasLabel( state, 1l, labelId ) ).thenReturn( true );
when( oldTxState.getNodesWithChangedProperty( propertyKeyId, value ) ).thenReturn(
new DiffSets<>( Collections.<Long>emptySet(), asSet( 1l ) ) );
txContext.nodeAddLabel( state, 1l, labelId );
// When
PrimitiveLongIterator result = txContext.nodesGetFromIndexLookup( state, indexDescriptor, value );
// Then
assertThat( asSet( result ), equalTo( asSet( 2l, 3l ) ) );
}
@Test
public void shouldExcludeUniqueNodeWithRemovedProperty() throws Exception
{
// Given
when( store.nodeGetUniqueFromIndexLookup( state, indexDescriptor, value ) ).thenReturn(
asPrimitiveIterator( 1l ) );
when( store.nodeHasLabel( state, 1l, labelId ) ).thenReturn( true );
when( oldTxState.getNodesWithChangedProperty( propertyKeyId, value ) ).thenReturn(
new DiffSets<>( Collections.<Long>emptySet(), asSet( 1l ) ) );
when( oldTxState.hasChanges() ).thenReturn( true );
// When
long result = txContext.nodeGetUniqueFromIndexLookup( state, indexDescriptor, value );
// Then
assertNoSuchNode( result );
}
// exists
int labelId = 2;
int propertyKeyId = 3;
String value = "My Value";
IndexDescriptor indexDescriptor = new IndexDescriptor( labelId, propertyKeyId );
private StoreReadLayer store;
private OldTxStateBridge oldTxState;
private EntityOperations txContext;
private KernelStatement state;
@Before
public void before() throws Exception
{
oldTxState = mock( OldTxStateBridge.class );
TxState txState = new TxStateImpl( oldTxState, mock( PersistenceManager.class ),
mock( TxState.IdGeneration.class ) );
state = StatementOperationsTestHelper.mockedState( txState );
int labelId1 = 10, labelId2 = 12;
store = mock( StoreReadLayer.class );
when( store.indexGetState( state, indexDescriptor )).thenReturn( InternalIndexState.ONLINE );
when( store.indexesGetForLabel( state, labelId1 ) ).then( answerAsIteratorFrom( Collections
.<IndexDescriptor>emptyList() ) );
when( store.indexesGetForLabel( state, labelId2 ) ).then( answerAsIteratorFrom( Collections
.<IndexDescriptor>emptyList() ) );
when( store.indexesGetAll( state ) ).then( answerAsIteratorFrom( Collections.<IndexDescriptor>emptyList() ) );
when( store.constraintsGetForLabel( state, labelId ) ).thenReturn( Collections.<UniquenessConstraint>emptyIterator() );
StateHandlingStatementOperations stateHandlingOperations = new StateHandlingStatementOperations(
store,
mock( LegacyPropertyTrackers.class ),
mock( ConstraintIndexCreator.class ) );
txContext = new ConstraintEnforcingEntityOperations(
stateHandlingOperations, stateHandlingOperations, stateHandlingOperations );
}
private void assertNoSuchNode( long node )
{
assertThat( node, equalTo( NO_SUCH_NODE ) );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_api_state_IndexQueryTransactionStateTest.java
|
1,115
|
public final class LabelState extends EntityState
{
private final DiffSets<Long> nodeDiffSets = new DiffSets<Long>();
private final DiffSets<IndexDescriptor> indexChanges = new DiffSets<IndexDescriptor>();
private final DiffSets<IndexDescriptor> constraintIndexChanges = new DiffSets<IndexDescriptor>();
private final DiffSets<UniquenessConstraint> constraintsChanges = new DiffSets<UniquenessConstraint>();
public LabelState( long id )
{
super( id );
}
public DiffSets<Long> getNodeDiffSets()
{
return nodeDiffSets;
}
public DiffSets<IndexDescriptor> indexChanges()
{
return indexChanges;
}
public DiffSets<IndexDescriptor> constraintIndexChanges()
{
return constraintIndexChanges;
}
public DiffSets<UniquenessConstraint> constraintsChanges()
{
return constraintsChanges;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_api_state_LabelState.java
|
1,116
|
private static class Labels
{
private final long nodeId;
private final Integer[] labelIds;
Labels( long nodeId, Integer... labelIds )
{
this.nodeId = nodeId;
this.labelIds = labelIds;
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_api_state_SchemaTransactionStateTest.java
|
1,117
|
{
@Override
public NodeState newState( long id )
{
return new NodeState( id );
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_api_state_TxStateImpl.java
|
1,118
|
{
@Override
public LabelState newState( long id )
{
return new LabelState( id );
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_api_state_TxStateImpl.java
|
1,119
|
public final class TxStateImpl implements TxState
{
private static final StateCreator<LabelState> LABEL_STATE_CREATOR = new StateCreator<LabelState>()
{
@Override
public LabelState newState( long id )
{
return new LabelState( id );
}
};
private static final StateCreator<NodeState> NODE_STATE_CREATOR = new StateCreator<NodeState>()
{
@Override
public NodeState newState( long id )
{
return new NodeState( id );
}
};
private static final StateCreator<RelationshipState> RELATIONSHIP_STATE_CREATOR =
new StateCreator<RelationshipState>()
{
@Override
public RelationshipState newState( long id )
{
return new RelationshipState( id );
}
};
private Map<Long/*Node ID*/, NodeState> nodeStatesMap;
private Map<Long/*Relationship ID*/, RelationshipState> relationshipStatesMap;
private Map<Long/*Label ID*/, LabelState> labelStatesMap;
private GraphState graphState;
private DiffSets<IndexDescriptor> indexChanges;
private DiffSets<IndexDescriptor> constraintIndexChanges;
private DiffSets<UniquenessConstraint> constraintsChanges;
private DiffSets<Long> deletedNodes;
private DiffSets<Long> deletedRelationships;
private Map<UniquenessConstraint, Long> createdConstraintIndexesByConstraint;
private final OldTxStateBridge legacyState;
private final PersistenceManager persistenceManager; // should go away dammit!
private final IdGeneration idGeneration; // needed when we move createNode() and createRelationship() to here...
private boolean hasChanges;
public TxStateImpl( OldTxStateBridge legacyState,
PersistenceManager legacyTransaction,
IdGeneration idGeneration )
{
this.legacyState = legacyState;
this.persistenceManager = legacyTransaction;
this.idGeneration = idGeneration;
}
@Override
public void accept( final Visitor visitor )
{
if ( hasNodeStatesMap() && !nodeStatesMap().isEmpty() )
{
for ( NodeState node : nodeStates() )
{
DiffSets<Integer> labelDiff = node.labelDiffSets();
visitor.visitNodeLabelChanges( node.getId(), labelDiff.getAdded(), labelDiff.getRemoved() );
}
}
if ( hasIndexChangesDiffSets() && !indexChanges().isEmpty() )
{
indexChanges().accept( indexVisitor( visitor, false ) );
}
if ( hasConstraintIndexChangesDiffSets() && !constraintIndexChanges().isEmpty() )
{
constraintIndexChanges().accept( indexVisitor( visitor, true ) );
}
if ( hasConstraintsChangesDiffSets() && !constraintsChanges().isEmpty() )
{
constraintsChanges().accept( new DiffSets.Visitor<UniquenessConstraint>()
{
@Override
public void visitAdded( UniquenessConstraint element )
{
visitor.visitAddedConstraint( element );
}
@Override
public void visitRemoved( UniquenessConstraint element )
{
visitor.visitRemovedConstraint( element );
}
} );
}
}
private static DiffSets.Visitor<IndexDescriptor> indexVisitor( final Visitor visitor, final boolean forConstraint )
{
return new DiffSets.Visitor<IndexDescriptor>()
{
@Override
public void visitAdded( IndexDescriptor element )
{
visitor.visitAddedIndex( element, forConstraint );
}
@Override
public void visitRemoved( IndexDescriptor element )
{
visitor.visitRemovedIndex( element, forConstraint );
}
};
}
@Override
public boolean hasChanges()
{
return hasChanges || legacyState.hasChanges();
}
@Override
public Iterable<NodeState> nodeStates()
{
return hasNodeStatesMap() ? nodeStatesMap().values() : Iterables.<NodeState>empty();
}
@Override
public DiffSets<Long> labelStateNodeDiffSets( int labelId )
{
return getOrCreateLabelState( labelId ).getNodeDiffSets();
}
@Override
public DiffSets<Integer> nodeStateLabelDiffSets( long nodeId )
{
return getOrCreateNodeState( nodeId ).labelDiffSets();
}
@Override
public DiffSets<DefinedProperty> nodePropertyDiffSets( long nodeId )
{
return getOrCreateNodeState( nodeId ).propertyDiffSets();
}
@Override
public DiffSets<DefinedProperty> relationshipPropertyDiffSets( long relationshipId )
{
return getOrCreateRelationshipState( relationshipId ).propertyDiffSets();
}
@Override
public DiffSets<DefinedProperty> graphPropertyDiffSets()
{
return getOrCreateGraphState().propertyDiffSets();
}
@Override
public boolean nodeIsAddedInThisTx( long nodeId )
{
return legacyState.nodeIsAddedInThisTx( nodeId );
}
@Override
public boolean relationshipIsAddedInThisTx( long relationshipId )
{
return legacyState.relationshipIsAddedInThisTx( relationshipId );
}
@Override
public void nodeDoDelete( long nodeId )
{
legacyState.deleteNode( nodeId );
nodesDeletedInTx().remove( nodeId );
hasChanges = true;
}
@Override
public boolean nodeIsDeletedInThisTx( long nodeId )
{
return hasDeletedNodesDiffSets() && nodesDeletedInTx().isRemoved( nodeId );
}
@Override
public void relationshipDoDelete( long relationshipId )
{
legacyState.deleteRelationship( relationshipId );
deletedRelationships().remove( relationshipId );
hasChanges = true;
}
@Override
public boolean relationshipIsDeletedInThisTx( long relationshipId )
{
return hasDeletedRelationshipsDiffSets() && deletedRelationships().isRemoved( relationshipId );
}
@Override
public void nodeDoReplaceProperty( long nodeId, Property replacedProperty, DefinedProperty newProperty )
{
if ( newProperty.isDefined() )
{
DiffSets<DefinedProperty> diffSets = nodePropertyDiffSets( nodeId );
if ( replacedProperty.isDefined() )
{
diffSets.replace( (DefinedProperty)replacedProperty, newProperty );
}
else
{
diffSets.add( newProperty );
}
legacyState.nodeSetProperty( nodeId, newProperty );
hasChanges = true;
}
}
@Override
public void relationshipDoReplaceProperty( long relationshipId, Property replacedProperty, DefinedProperty newProperty )
{
if ( newProperty.isDefined() )
{
DiffSets<DefinedProperty> diffSets = relationshipPropertyDiffSets( relationshipId );
if ( replacedProperty.isDefined() )
{
diffSets.replace( (DefinedProperty)replacedProperty, newProperty );
}
else
{
diffSets.add( newProperty );
}
legacyState.relationshipSetProperty( relationshipId, newProperty );
hasChanges = true;
}
}
@Override
public void graphDoReplaceProperty( Property replacedProperty, DefinedProperty newProperty )
{
if ( newProperty.isDefined() )
{
DiffSets<DefinedProperty> diffSets = graphPropertyDiffSets();
if ( replacedProperty.isDefined() )
{
diffSets.replace( (DefinedProperty)replacedProperty, newProperty );
}
else
{
diffSets.add( newProperty );
}
legacyState.graphSetProperty( newProperty );
hasChanges = true;
}
}
@Override
public void nodeDoRemoveProperty( long nodeId, Property removedProperty )
{
if ( removedProperty.isDefined() )
{
nodePropertyDiffSets( nodeId ).remove( (DefinedProperty)removedProperty );
legacyState.nodeRemoveProperty( nodeId, (DefinedProperty)removedProperty );
hasChanges = true;
}
}
@Override
public void relationshipDoRemoveProperty( long relationshipId, Property removedProperty )
{
if ( removedProperty.isDefined() )
{
relationshipPropertyDiffSets( relationshipId ).remove( (DefinedProperty)removedProperty );
legacyState.relationshipRemoveProperty( relationshipId, (DefinedProperty)removedProperty );
hasChanges = true;
}
}
@Override
public void graphDoRemoveProperty( Property removedProperty )
{
if ( removedProperty.isDefined() )
{
graphPropertyDiffSets().remove( (DefinedProperty)removedProperty );
legacyState.graphRemoveProperty( (DefinedProperty)removedProperty );
hasChanges = true;
}
}
@Override
public void nodeDoAddLabel( int labelId, long nodeId )
{
labelStateNodeDiffSets( labelId ).add( nodeId );
nodeStateLabelDiffSets( nodeId ).add( labelId );
persistenceManager.addLabelToNode( labelId, nodeId );
hasChanges = true;
}
@Override
public void nodeDoRemoveLabel( int labelId, long nodeId )
{
labelStateNodeDiffSets( labelId ).remove( nodeId );
nodeStateLabelDiffSets( nodeId ).remove( labelId );
persistenceManager.removeLabelFromNode( labelId, nodeId );
hasChanges = true;
}
@Override
public UpdateTriState labelState( long nodeId, int labelId )
{
NodeState nodeState = getState( nodeStatesMap(), nodeId, null );
if ( nodeState != null )
{
DiffSets<Integer> labelDiff = nodeState.labelDiffSets();
if ( labelDiff.isAdded( labelId ) )
{
return UpdateTriState.ADDED;
}
if ( labelDiff.isRemoved( labelId ) )
{
return UpdateTriState.REMOVED;
}
}
return UpdateTriState.UNTOUCHED;
}
@Override
public Set<Long> nodesWithLabelAdded( int labelId )
{
if ( hasLabelStatesMap() )
{
LabelState state = getState( labelStatesMap, labelId, null );
if ( null != state )
{
return state.getNodeDiffSets().getAdded();
}
}
return Collections.emptySet();
}
@Override
public DiffSets<Long> nodesWithLabelChanged( int labelId )
{
if ( hasLabelStatesMap() )
{
LabelState state = getState( labelStatesMap, labelId, null );
if ( null != state )
{
return state.getNodeDiffSets();
}
}
return DiffSets.emptyDiffSets();
}
@Override
public void indexRuleDoAdd( IndexDescriptor descriptor )
{
DiffSets<IndexDescriptor> diff = indexChanges();
if ( diff.unRemove( descriptor ) )
{
getOrCreateLabelState( descriptor.getLabelId() ).indexChanges().unRemove( descriptor );
}
else
{
indexChanges().add( descriptor );
getOrCreateLabelState( descriptor.getLabelId() ).indexChanges().add( descriptor );
}
hasChanges = true;
}
@Override
public void constraintIndexRuleDoAdd( IndexDescriptor descriptor )
{
constraintIndexChanges().add( descriptor );
getOrCreateLabelState( descriptor.getLabelId() ).constraintIndexChanges().add( descriptor );
hasChanges = true;
}
@Override
public void indexDoDrop( IndexDescriptor descriptor )
{
indexChanges().remove( descriptor );
getOrCreateLabelState( descriptor.getLabelId() ).indexChanges().remove( descriptor );
hasChanges = true;
}
@Override
public void constraintIndexDoDrop( IndexDescriptor descriptor )
{
constraintIndexChanges().remove( descriptor );
getOrCreateLabelState( descriptor.getLabelId() ).constraintIndexChanges().remove( descriptor );
hasChanges = true;
}
@Override
public DiffSets<IndexDescriptor> indexDiffSetsByLabel( int labelId )
{
if ( hasLabelStatesMap() )
{
LabelState labelState = getState( labelStatesMap, labelId, null );
if ( null != labelState )
{
return labelState.indexChanges();
}
}
return DiffSets.emptyDiffSets();
}
@Override
public DiffSets<IndexDescriptor> constraintIndexDiffSetsByLabel( int labelId )
{
if ( hasLabelStatesMap() )
{
LabelState labelState = getState( labelStatesMap(), labelId, null );
if (labelState != null)
{
return labelState.constraintIndexChanges();
}
}
return DiffSets.emptyDiffSets();
}
@Override
public DiffSets<IndexDescriptor> indexChanges()
{
if ( !hasIndexChangesDiffSets() )
{
indexChanges = new DiffSets<>();
}
return indexChanges;
}
private boolean hasIndexChangesDiffSets()
{
return indexChanges != null;
}
@Override
public DiffSets<IndexDescriptor> constraintIndexChanges()
{
if ( !hasConstraintIndexChangesDiffSets() )
{
constraintIndexChanges = new DiffSets<>();
}
return constraintIndexChanges;
}
private boolean hasConstraintIndexChangesDiffSets()
{
return constraintIndexChanges != null;
}
@Override
public DiffSets<Long> nodesWithChangedProperty( int propertyKeyId, Object value )
{
return legacyState.getNodesWithChangedProperty( propertyKeyId, value );
}
@Override
public Map<Long, Object> nodesWithChangedProperty( int propertyKeyId )
{
return legacyState.getNodesWithChangedProperty( propertyKeyId );
}
@Override
public DiffSets<Long> nodesDeletedInTx()
{
if ( !hasDeletedNodesDiffSets() )
{
deletedNodes = new DiffSets<>();
}
return deletedNodes;
}
private boolean hasDeletedNodesDiffSets()
{
return deletedNodes != null;
}
public DiffSets<Long> deletedRelationships()
{
if ( !hasDeletedRelationshipsDiffSets() )
{
deletedRelationships = new DiffSets<>();
}
return deletedRelationships;
}
private boolean hasDeletedRelationshipsDiffSets()
{
return deletedRelationships != null;
}
private LabelState getOrCreateLabelState( int labelId )
{
return getState( labelStatesMap(), labelId, LABEL_STATE_CREATOR );
}
private NodeState getOrCreateNodeState( long nodeId )
{
return getState( nodeStatesMap(), nodeId, NODE_STATE_CREATOR );
}
private RelationshipState getOrCreateRelationshipState( long relationshipId )
{
return getState( relationshipStatesMap(), relationshipId, RELATIONSHIP_STATE_CREATOR );
}
private GraphState getOrCreateGraphState()
{
if ( graphState == null )
{
graphState = new GraphState();
}
return graphState;
}
private interface StateCreator<STATE>
{
STATE newState( long id );
}
private <STATE> STATE getState( Map<Long, STATE> states, long id, StateCreator<STATE> creator )
{
STATE result = states.get( id );
if ( result != null )
{
return result;
}
if ( creator != null )
{
result = creator.newState( id );
states.put( id, result );
hasChanges = true;
}
return result;
}
@Override
public void constraintDoAdd( UniquenessConstraint constraint, long indexId )
{
constraintsChanges().add( constraint );
createdConstraintIndexesByConstraint().put( constraint, indexId );
getOrCreateLabelState( constraint.label() ).constraintsChanges().add( constraint );
hasChanges = true;
}
@Override
public DiffSets<UniquenessConstraint> constraintsChangesForLabelAndProperty( int labelId, final int propertyKey )
{
return getOrCreateLabelState( labelId ).constraintsChanges().filterAdded( new Predicate<UniquenessConstraint>()
{
@Override
public boolean accept( UniquenessConstraint item )
{
return item.propertyKeyId() == propertyKey;
}
} );
}
@Override
public DiffSets<UniquenessConstraint> constraintsChangesForLabel( int labelId )
{
return getOrCreateLabelState( labelId ).constraintsChanges();
}
@Override
public DiffSets<UniquenessConstraint> constraintsChanges()
{
if ( !hasConstraintsChangesDiffSets() )
{
constraintsChanges = new DiffSets<>();
}
return constraintsChanges;
}
private boolean hasConstraintsChangesDiffSets()
{
return constraintsChanges != null;
}
@Override
public void constraintDoDrop( UniquenessConstraint constraint )
{
constraintsChanges().remove( constraint );
constraintIndexDoDrop( new IndexDescriptor( constraint.label(), constraint.propertyKeyId() ));
constraintsChangesForLabel( constraint.label() ).remove( constraint );
hasChanges = true;
}
@Override
public boolean constraintDoUnRemove( UniquenessConstraint constraint )
{
if ( constraintsChanges().unRemove( constraint ) )
{
constraintsChangesForLabel( constraint.label() ).unRemove( constraint );
return true;
}
return false;
}
@Override
public boolean constraintIndexDoUnRemove( IndexDescriptor index )
{
if ( constraintIndexChanges().unRemove( index ) )
{
constraintIndexDiffSetsByLabel( index.getLabelId() ).unRemove( index );
return true;
}
return false;
}
@Override
public Iterable<IndexDescriptor> constraintIndexesCreatedInTx()
{
if ( hasCreatedConstraintIndexesMap() )
{
Map<UniquenessConstraint, Long> constraintMap = createdConstraintIndexesByConstraint();
if ( !constraintMap.isEmpty() )
{
return map( new Function<UniquenessConstraint, IndexDescriptor>()
{
@Override
public IndexDescriptor apply( UniquenessConstraint constraint )
{
return new IndexDescriptor( constraint.label(), constraint.propertyKeyId() );
}
}, constraintMap.keySet() );
}
}
return Iterables.empty();
}
public Long indexCreatedForConstraint( UniquenessConstraint constraint )
{
return createdConstraintIndexesByConstraint == null ? null :
createdConstraintIndexesByConstraint.get( constraint );
}
private Map<UniquenessConstraint, Long> createdConstraintIndexesByConstraint()
{
if ( !hasCreatedConstraintIndexesMap() )
{
createdConstraintIndexesByConstraint = new HashMap<>();
}
return createdConstraintIndexesByConstraint;
}
private boolean hasCreatedConstraintIndexesMap()
{
return null != createdConstraintIndexesByConstraint;
}
private Map<Long, NodeState> nodeStatesMap()
{
if ( !hasNodeStatesMap() )
{
nodeStatesMap = new HashMap<>();
}
return nodeStatesMap;
}
private boolean hasNodeStatesMap()
{
return null != nodeStatesMap;
}
private Map<Long, RelationshipState> relationshipStatesMap()
{
if ( !hasRelationshipsStatesMap() )
{
relationshipStatesMap = new HashMap<>();
}
return relationshipStatesMap;
}
private boolean hasRelationshipsStatesMap()
{
return null != relationshipStatesMap;
}
private Map<Long, LabelState> labelStatesMap()
{
if ( !hasLabelStatesMap() )
{
labelStatesMap = new HashMap<>();
}
return labelStatesMap;
}
private boolean hasLabelStatesMap()
{
return null != labelStatesMap;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_api_state_TxStateImpl.java
|
1,120
|
UNTOUCHED
{
@Override
public boolean isTouched()
{
return false;
}
@Override
public boolean isAdded()
{
throw new UnsupportedOperationException( "Cannot convert an UNTOUCHED UpdateTriState to a boolean" );
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_api_state_TxState.java
|
1,121
|
REMOVED
{
@Override
public boolean isTouched()
{
return true;
}
@Override
public boolean isAdded()
{
return false;
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_api_state_TxState.java
|
1,122
|
ADDED
{
@Override
public boolean isTouched()
{
return true;
}
@Override
public boolean isAdded()
{
return true;
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_api_state_TxState.java
|
1,123
|
{
@Override
public Iterator<T> answer( InvocationOnMock invocation ) throws Throwable
{
return values.iterator();
}
};
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_api_state_StateHandlingStatementOperationsTest.java
|
1,124
|
public class StateHandlingStatementOperationsTest
{
// Note: Most of the behavior of this class is tested in separate classes,
// based on the category of state being
// tested. This contains general tests or things that are common to all
// types of state.
StoreReadLayer inner = mock( StoreReadLayer.class );
@Test
public void shouldNeverDelegateWrites() throws Exception
{
KernelStatement state = mockedState();
StateHandlingStatementOperations ctx = newTxStateOps( inner );
// When
ctx.indexCreate( state, 0, 0 );
ctx.nodeAddLabel( state, 0, 0 );
ctx.indexDrop( state, new IndexDescriptor( 0, 0 ) );
ctx.nodeRemoveLabel( state, 0, 0 );
// These are kind of in between.. property key ids are created in
// micro-transactions, so these methods
// circumvent the normal state of affairs. We may want to rub the
// genius-bumps over this at some point.
// ctx.getOrCreateLabelId("0");
// ctx.getOrCreatePropertyKeyId("0");
verify( inner, times( 2 ) ).nodeHasLabel( state, 0, 0 );
verifyNoMoreInteractions( inner );
}
@Test
public void shouldNotAddConstraintAlreadyExistsInTheStore() throws Exception
{
// given
UniquenessConstraint constraint = new UniquenessConstraint( 10, 66 );
TxState txState = mock( TxState.class );
when( txState.nodesWithLabelChanged( anyInt() ) ).thenReturn( DiffSets.<Long>emptyDiffSets() );
when( txState.nodesWithChangedProperty( anyInt() ) ).thenReturn( Collections.<Long, Object>emptyMap() );
KernelStatement state = mockedState( txState );
when( inner.constraintsGetForLabelAndPropertyKey( state, 10, 66 ) )
.thenAnswer( asAnswer( asList( constraint ) ) );
StateHandlingStatementOperations context = newTxStateOps( inner );
// when
context.uniquenessConstraintCreate( state, 10, 66 );
// then
verify( txState ).constraintIndexDoUnRemove( any( IndexDescriptor.class ) );
}
@Test
public void shouldGetConstraintsByLabelAndProperty() throws Exception
{
// given
UniquenessConstraint constraint = new UniquenessConstraint( 10, 66 );
TxState txState = new TxStateImpl( mock( OldTxStateBridge.class ), mock( PersistenceManager.class ),
mock( IdGeneration.class ) );
KernelStatement state = mockedState( txState );
when( inner.constraintsGetForLabelAndPropertyKey( state, 10, 66 ) )
.thenAnswer( asAnswer( Collections.emptyList() ) );
StateHandlingStatementOperations context = newTxStateOps( inner );
context.uniquenessConstraintCreate( state, 10, 66 );
// when
Set<UniquenessConstraint> result = asSet(
asIterable( context.constraintsGetForLabelAndPropertyKey( state, 10, 66 ) ) );
// then
assertEquals( asSet( constraint ), result );
}
@Test
public void shouldGetConstraintsByLabel() throws Exception
{
// given
UniquenessConstraint constraint1 = new UniquenessConstraint( 11, 66 );
UniquenessConstraint constraint2 = new UniquenessConstraint( 11, 99 );
TxState txState = new TxStateImpl( mock( OldTxStateBridge.class ), mock( PersistenceManager.class ),
mock( IdGeneration.class ) );
KernelStatement state = mockedState( txState );
when( inner.constraintsGetForLabelAndPropertyKey( state, 10, 66 ) )
.thenAnswer( asAnswer( Collections.emptyList() ) );
when( inner.constraintsGetForLabelAndPropertyKey( state, 11, 99 ) )
.thenAnswer( asAnswer( Collections.emptyList() ) );
when( inner.constraintsGetForLabel( state, 10 ) )
.thenAnswer( asAnswer( Collections.emptyList() ) );
when( inner.constraintsGetForLabel( state, 11 ) )
.thenAnswer( asAnswer( asIterable( constraint1 ) ) );
StateHandlingStatementOperations context = newTxStateOps( inner );
context.uniquenessConstraintCreate( state, 10, 66 );
context.uniquenessConstraintCreate( state, 11, 99 );
// when
Set<UniquenessConstraint> result = asSet( asIterable( context.constraintsGetForLabel( state, 11 ) ) );
// then
assertEquals( asSet( constraint1, constraint2 ), result );
}
@Test
public void shouldGetAllConstraints() throws Exception
{
// given
UniquenessConstraint constraint1 = new UniquenessConstraint( 10, 66 );
UniquenessConstraint constraint2 = new UniquenessConstraint( 11, 99 );
TxState txState = new TxStateImpl( mock( OldTxStateBridge.class ), mock( PersistenceManager.class ),
mock( IdGeneration.class ) );
KernelStatement state = mockedState( txState );
when( inner.constraintsGetForLabelAndPropertyKey( state, 10, 66 ) )
.thenAnswer( asAnswer( Collections.emptyList() ) );
when( inner.constraintsGetForLabelAndPropertyKey( state, 11, 99 ) )
.thenAnswer( asAnswer( Collections.emptyList() ) );
when( inner.constraintsGetAll( state ) ).thenAnswer( asAnswer( asIterable( constraint2 ) ) );
StateHandlingStatementOperations context = newTxStateOps( inner );
context.uniquenessConstraintCreate( state, 10, 66 );
// when
Set<UniquenessConstraint> result = asSet( asIterable( context.constraintsGetAll( state ) ) );
// then
assertEquals( asSet( constraint1, constraint2 ), result );
}
private static <T> Answer<Iterator<T>> asAnswer( final Iterable<T> values )
{
return new Answer<Iterator<T>>()
{
@Override
public Iterator<T> answer( InvocationOnMock invocation ) throws Throwable
{
return values.iterator();
}
};
}
private StateHandlingStatementOperations newTxStateOps( StoreReadLayer delegate )
{
return new StateHandlingStatementOperations( delegate,
mock( LegacyPropertyTrackers.class ), mock( ConstraintIndexCreator.class ) );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_api_state_StateHandlingStatementOperationsTest.java
|
1,125
|
{
@Override
public Iterator<T> answer( InvocationOnMock invocation ) throws Throwable
{
return values.iterator();
}
};
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_api_state_SchemaTransactionStateTest.java
|
1,126
|
public class LabelTransactionStateTest
{
@Test
public void addOnlyLabelShouldBeVisibleInTx() throws Exception
{
// GIVEN
commitNoLabels();
// WHEN
txContext.nodeAddLabel( state, nodeId, labelId1 );
// THEN
assertLabels( labelId1 );
}
@Test
public void addAdditionalLabelShouldBeReflectedWithinTx() throws Exception
{
// GIVEN
commitLabels( labelId1 );
// WHEN
txContext.nodeAddLabel( state, nodeId, labelId2 );
// THEN
assertLabels( labelId1, labelId2 );
}
@Test
public void addAlreadyExistingLabelShouldBeReflectedWithinTx() throws Exception
{
// GIVEN
commitLabels( labelId1 );
// WHEN
txContext.nodeAddLabel( state, nodeId, labelId1 );
// THEN
assertLabels( labelId1 );
}
@Test
public void removeCommittedLabelShouldBeReflectedWithinTx() throws Exception
{
// GIVEN
commitLabels( labelId1, labelId2 );
// WHEN
txContext.nodeRemoveLabel( state, nodeId, labelId1 );
// THEN
assertLabels( labelId2 );
}
@Test
public void removeAddedLabelInTxShouldBeReflectedWithinTx() throws Exception
{
// GIVEN
commitLabels( labelId1 );
// WHEN
txContext.nodeAddLabel( state, nodeId, labelId2 );
txContext.nodeRemoveLabel( state, nodeId, labelId2 );
// THEN
assertLabels( labelId1 );
}
@Test
public void addRemovedLabelInTxShouldBeReflectedWithinTx() throws Exception
{
// GIVEN
commitLabels( labelId1 );
// WHEN
txContext.nodeRemoveLabel( state, nodeId, labelId1 );
txContext.nodeAddLabel( state, nodeId, labelId1 );
// THEN
assertLabels( labelId1 );
}
@Test
public void addedLabelsShouldBeReflectedWhenGettingNodesForLabel() throws Exception
{
// GIVEN
commitLabels(
labels( 0, 1, 2 ),
labels( 1, 2, 3 ),
labels( 2, 1, 3 ) );
// WHEN
txContext.nodeAddLabel( state, 2, 2 );
// THEN
assertEquals( asSet( 0L, 1L, 2L ), asSet( txContext.nodesGetForLabel( state, 2 ) ) );
}
@Test
public void removedLabelsShouldBeReflectedWhenGettingNodesForLabel() throws Exception
{
// GIVEN
commitLabels(
labels( 0, 1, 2 ),
labels( 1, 2, 3 ),
labels( 2, 1, 3 ) );
// WHEN
txContext.nodeRemoveLabel( state, 1, 2 );
// THEN
assertEquals( asSet( 0L ), asSet( txContext.nodesGetForLabel( state, 2 ) ) );
}
@Test
public void addingNewLabelToNodeShouldRespondTrue() throws Exception
{
// GIVEN
commitNoLabels();
// WHEN
boolean added = txContext.nodeAddLabel( state, nodeId, labelId1 );
// THEN
assertTrue( "Should have been added now", added );
}
@Test
public void addingExistingLabelToNodeShouldRespondFalse() throws Exception
{
// GIVEN
commitLabels( labelId1 );
// WHEN
boolean added = txContext.nodeAddLabel( state, nodeId, labelId1 );
// THEN
assertFalse( "Shouldn't have been added now", added );
}
@Test
public void removingExistingLabelFromNodeShouldRespondTrue() throws Exception
{
// GIVEN
commitLabels( labelId1 );
// WHEN
boolean removed = txContext.nodeRemoveLabel( state, nodeId, labelId1 );
// THEN
assertTrue( "Should have been removed now", removed );
}
@Test
public void removingNonExistentLabelFromNodeShouldRespondFalse() throws Exception
{
// GIVEN
commitNoLabels();
// WHEN
txContext.nodeAddLabel( state, nodeId, labelId1 );
// THEN
assertLabels( labelId1 );
}
@Test
public void should_return_true_when_adding_new_label() throws Exception
{
// GIVEN
when( store.nodeHasLabel( state, 1337, 12 ) ).thenReturn( false );
// WHEN and THEN
assertTrue( "Label should have been added", txContext.nodeAddLabel( state, 1337, 12 ) );
}
@Test
public void should_return_false_when_adding_existing_label() throws Exception
{
// GIVEN
when( store.nodeHasLabel( state, 1337, 12 ) ).thenReturn( true );
// WHEN and THEN
assertFalse( "Label should have been added", txContext.nodeAddLabel( state, 1337, 12 ) );
}
@Test
public void should_return_true_when_removing_existing_label() throws Exception
{
// GIVEN
when( store.nodeHasLabel( state, 1337, 12 ) ).thenReturn( true );
// WHEN and THEN
assertTrue( "Label should have been removed", txContext.nodeRemoveLabel( state, 1337, 12 ) );
}
@Test
public void should_return_true_when_removing_non_existant_label() throws Exception
{
// GIVEN
when( store.nodeHasLabel( state, 1337, 12 ) ).thenReturn( false );
// WHEN and THEN
assertFalse( "Label should have been removed", txContext.nodeRemoveLabel( state, 1337, 12 ) );
}
// exists
private final int labelId1 = 10, labelId2 = 12;
private final long nodeId = 20;
private StoreReadLayer store;
private OldTxStateBridge oldTxState;
private TxState txState;
private StateHandlingStatementOperations txContext;
private KernelStatement state;
@Before
public void before() throws Exception
{
store = mock( StoreReadLayer.class );
when( store.indexesGetForLabel( state, labelId1 ) ).then( answerAsIteratorFrom( Collections
.<IndexDescriptor>emptyList() ) );
when( store.indexesGetForLabel( state, labelId2 ) ).then( answerAsIteratorFrom( Collections
.<IndexDescriptor>emptyList() ) );
when( store.indexesGetAll( state ) ).then( answerAsIteratorFrom( Collections.<IndexDescriptor>emptyList() ) );
oldTxState = mock( OldTxStateBridge.class );
txState = new TxStateImpl( oldTxState, mock( PersistenceManager.class ),
mock( TxState.IdGeneration.class ) );
state = StatementOperationsTestHelper.mockedState( txState );
txContext = new StateHandlingStatementOperations( store, mock( LegacyPropertyTrackers.class ),
mock( ConstraintIndexCreator.class ) );
}
private static class Labels
{
private final long nodeId;
private final Integer[] labelIds;
Labels( long nodeId, Integer... labelIds )
{
this.nodeId = nodeId;
this.labelIds = labelIds;
}
}
private static Labels labels( long nodeId, Integer... labelIds )
{
return new Labels( nodeId, labelIds );
}
private void commitLabels( Labels... labels ) throws Exception
{
Map<Integer, Collection<Long>> allLabels = new HashMap<>();
for ( Labels nodeLabels : labels )
{
when( store.nodeGetLabels( state, nodeLabels.nodeId ) )
.then( answerAsPrimitiveIntIteratorFrom( Arrays.<Integer>asList( nodeLabels.labelIds ) ) );
for ( int label : nodeLabels.labelIds )
{
when( store.nodeHasLabel( state, nodeLabels.nodeId, label ) ).thenReturn( true );
Collection<Long> nodes = allLabels.get( label );
if ( nodes == null )
{
nodes = new ArrayList<>();
allLabels.put( label, nodes );
}
nodes.add( nodeLabels.nodeId );
}
}
for ( Map.Entry<Integer, Collection<Long>> entry : allLabels.entrySet() )
{
when( store.nodesGetForLabel( state, entry.getKey() ) ).then( answerAsPrimitiveLongIteratorFrom( entry
.getValue() ) );
}
}
private void commitNoLabels() throws Exception
{
commitLabels( new Integer[0] );
}
private void commitLabels( Integer... labels ) throws Exception
{
commitLabels( labels( nodeId, labels ) );
}
private void assertLabels( Integer... labels ) throws EntityNotFoundException
{
assertEquals( asSet( labels ), asSet( txContext.nodeGetLabels( state, nodeId ) ) );
for ( int label : labels )
{
assertTrue( "Expected labels not found on node", txContext.nodeHasLabel( state, nodeId, label ) );
}
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_api_state_LabelTransactionStateTest.java
|
1,127
|
{
@Override
public void call() throws SchemaRuleNotFoundException
{
txContext.indexesGetForLabelAndPropertyKey( state, labelId1, key1 );
}
};
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_api_state_SchemaTransactionStateTest.java
|
1,128
|
public class SchemaTransactionStateTest
{
@Test
public void addedRuleShouldBeVisibleInTx() throws Exception
{
// GIVEN
commitNoLabels();
// WHEN
IndexDescriptor rule = txContext.indexCreate( state, labelId1, key1 );
// THEN
assertEquals( asSet( rule ), IteratorUtil.asSet( txContext.indexesGetForLabel( state, labelId1 ) ) );
verify( store ).indexesGetForLabel( state, labelId1 );
assertEquals( asSet( rule ), IteratorUtil.asSet( txContext.indexesGetAll( state ) ) );
verify( store ).indexesGetAll( state );
verifyNoMoreInteractions( store );
}
@Test
public void addedRulesShouldBeVisibleInTx() throws Exception
{
// GIVEN
commitNoLabels();
// WHEN
IndexDescriptor rule1 = txContext.indexCreate( state, labelId1, key1 );
IndexDescriptor rule2 = txContext.indexCreate( state, labelId2, key2 );
// THEN
assertEquals( asSet( rule1 ), IteratorUtil.asSet( txContext.indexesGetForLabel( state, labelId1 ) ) );
verify( store ).indexesGetForLabel( state, labelId1 );
assertEquals( asSet( rule2 ), IteratorUtil.asSet( txContext.indexesGetForLabel( state, labelId2 ) ) );
verify( store ).indexesGetForLabel( state, labelId2 );
assertEquals( asSet( rule1, rule2 ), IteratorUtil.asSet( txContext.indexesGetAll( state ) ) );
verify( store ).indexesGetAll( state );
verifyNoMoreInteractions( store );
}
@Test
public void addedAdditionalRuleShouldBeVisibleInTx() throws Exception
{
// GIVEN
commitNoLabels();
// WHEN
IndexDescriptor rule1 = txContext.indexCreate( state, labelId1, key1 );
IndexDescriptor rule2 = txContext.indexCreate( state, labelId1, key2 );
// THEN
assertEquals( asSet( rule1, rule2 ), IteratorUtil.asSet( txContext.indexesGetForLabel( state, labelId1 ) ) );
}
@Test
public void creatingAnIndexShouldBePopulatingStateWithinTX() throws Exception
{
// GIVEN
commitLabels( labelId1 );
IndexDescriptor rule = txContext.indexCreate( state, labelId1, key1 );
// THEN
assertEquals( InternalIndexState.POPULATING, txContext.indexGetState( state, rule ) );
}
@Test
public void shouldReturnNonExistentRuleAddedInTransaction() throws Exception
{
// GIVEN
// -- non-existent rule added in the transaction
txContext.indexCreate( state, labelId1, key1 );
// WHEN
IndexDescriptor rule = txContext.indexesGetForLabelAndPropertyKey( state, labelId1, key1 );
Iterator<IndexDescriptor> labelRules = txContext.indexesGetForLabel( state, labelId1 );
// THEN
IndexDescriptor expectedRule = new IndexDescriptor( labelId1, key1 );
assertEquals( expectedRule, rule );
assertEquals( asSet( expectedRule ), asSet( labelRules ) );
}
@Test
public void shouldNotReturnExistentRuleDroppedInTransaction() throws Exception
{
// GIVEN
// -- a rule that exists in the store
IndexDescriptor rule = new IndexDescriptor( labelId1, key1 );
when( store.indexesGetForLabel( state, labelId1 ) ).thenReturn( option( rule ).iterator() );
// -- that same rule dropped in the transaction
txContext.indexDrop( state, rule );
// WHEN
assertException( getIndexRule(), SchemaRuleNotFoundException.class );
Iterator<IndexDescriptor> rulesByLabel = txContext.indexesGetForLabel( state, labelId1 );
// THEN
assertEquals( emptySetOf( IndexDescriptor.class ), asSet( rulesByLabel ) );
}
private ExceptionExpectingFunction<SchemaRuleNotFoundException> getIndexRule()
{
return new ExceptionExpectingFunction<SchemaRuleNotFoundException>()
{
@Override
public void call() throws SchemaRuleNotFoundException
{
txContext.indexesGetForLabelAndPropertyKey( state, labelId1, key1 );
}
};
}
private interface ExceptionExpectingFunction<E extends Exception>
{
void call() throws E;
}
private <E extends Exception> void assertException( ExceptionExpectingFunction<E> function,
Class<? extends E> exception )
{
try
{
function.call();
fail( "Should have thrown " + exception.getClass().getName() + " exception" );
}
catch ( Exception e )
{
if ( !exception.isAssignableFrom( e.getClass() ) )
{
throw launderedException( e );
}
}
}
// exists
private final int labelId1 = 10, labelId2 = 12, key1 = 45, key2 = 46;
private final long nodeId = 20;
private StoreReadLayer store;
private OldTxStateBridge oldTxState;
private TxState txState;
private StateHandlingStatementOperations txContext;
private KernelStatement state;
@Before
public void before() throws Exception
{
oldTxState = mock( OldTxStateBridge.class );
txState = new TxStateImpl( oldTxState, mock( PersistenceManager.class ),
mock( TxState.IdGeneration.class ) );
state = StatementOperationsTestHelper.mockedState( txState );
store = mock( StoreReadLayer.class );
when( store.indexesGetForLabel( state, labelId1 ) ).then( asAnswer( Collections.<IndexDescriptor>emptyList() ) );
when( store.indexesGetForLabel( state, labelId2 ) ).then( asAnswer( Collections.<IndexDescriptor>emptyList() ) );
when( store.indexesGetAll( state ) ).then( asAnswer( Collections.<IndexDescriptor>emptyList() ) );
txContext = new StateHandlingStatementOperations( store, mock( LegacyPropertyTrackers.class ),
mock( ConstraintIndexCreator.class ));
}
private static <T> Answer<Iterator<T>> asAnswer( final Iterable<T> values )
{
return new Answer<Iterator<T>>()
{
@Override
public Iterator<T> answer( InvocationOnMock invocation ) throws Throwable
{
return values.iterator();
}
};
}
private static class Labels
{
private final long nodeId;
private final Integer[] labelIds;
Labels( long nodeId, Integer... labelIds )
{
this.nodeId = nodeId;
this.labelIds = labelIds;
}
}
private static Labels labels( long nodeId, Integer... labelIds )
{
return new Labels( nodeId, labelIds );
}
private void commitLabels( Labels... labels ) throws Exception
{
Map<Integer, Collection<Long>> allLabels = new HashMap<>();
for ( Labels nodeLabels : labels )
{
when( store.nodeGetLabels( state, nodeLabels.nodeId ) ).then(
asAnswer( Arrays.<Integer>asList( nodeLabels.labelIds ) ) );
for ( int label : nodeLabels.labelIds )
{
when( store.nodeHasLabel( state, nodeLabels.nodeId, label ) ).thenReturn( true );
Collection<Long> nodes = allLabels.get( label );
if ( nodes == null )
{
nodes = new ArrayList<>();
allLabels.put( label, nodes );
}
nodes.add( nodeLabels.nodeId );
}
}
for ( Map.Entry<Integer, Collection<Long>> entry : allLabels.entrySet() )
{
when( store.nodesGetForLabel( state, entry.getKey() ) ).then( asAnswer( entry.getValue() ) );
}
}
private void commitNoLabels() throws Exception
{
commitLabels( new Integer[0] );
}
private void commitLabels( Integer... labels ) throws Exception
{
commitLabels( labels( nodeId, labels ) );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_api_state_SchemaTransactionStateTest.java
|
1,129
|
public final class RelationshipState extends PropertyContainerState
{
public RelationshipState( long id )
{
super( id );
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_api_state_RelationshipState.java
|
1,130
|
public class PropertyContainerState extends EntityState
{
private DiffSets<DefinedProperty> propertyDiffSets;
public PropertyContainerState( long id )
{
super( id );
}
public DiffSets<DefinedProperty> propertyDiffSets()
{
if ( null == propertyDiffSets )
{
propertyDiffSets = new DiffSets<>();
}
return propertyDiffSets;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_api_state_PropertyContainerState.java
|
1,131
|
public class OldTxStateBridgeImplTest
{
@Test
public void shouldListNodesWithPropertyAdded() throws Exception
{
// Given
long nodeId = 1l;
int propertyKey = 2;
int value = 1337;
WritableTransactionState state = new WritableTransactionState( null, null, new DevNullLoggingService(), null,
null, null );
OldTxStateBridge bridge = new OldTxStateBridgeImpl( null, state );
NodeImpl node = new NodeImpl( nodeId );
// And Given that I've added a relevant property
state.getOrCreateCowPropertyAddMap( node ).put( 2, intProperty( propertyKey, value ) );
// When
DiffSets<Long> nodes = bridge.getNodesWithChangedProperty( propertyKey, value );
// Then
assertEquals( asSet( nodeId ), nodes.getAdded() );
assertEquals( emptySetOf( Long.class ), nodes.getRemoved() );
}
@Test
public void shouldListNodesWithPropertyRemoved() throws Exception
{
// Given
long nodeId = 1l;
int propertyKey = 2;
int value = 1337;
WritableTransactionState state = new WritableTransactionState( null, null, new DevNullLoggingService(), null,
null, null );
OldTxStateBridge bridge = new OldTxStateBridgeImpl( null, state );
NodeImpl node = new NodeImpl( nodeId );
// And Given that I've added a relevant property
state.getOrCreateCowPropertyRemoveMap( node ).put( 2, intProperty( propertyKey, value ) );
// When
DiffSets<Long> nodes = bridge.getNodesWithChangedProperty( propertyKey, value );
// Then
assertEquals( emptySetOf( Long.class ), nodes.getAdded() );
assertEquals( asSet( nodeId ), nodes.getRemoved() );
}
@Test
public void shouldListNodesWithPropertyChanged() throws Exception
{
// Given
long nodeId = 1l;
int propertyKey = 2;
int value = 1337;
WritableTransactionState state = new WritableTransactionState( null, null, new DevNullLoggingService(), null,
null, null );
OldTxStateBridge bridge = new OldTxStateBridgeImpl( null, state );
NodeImpl node = new NodeImpl( nodeId );
// And Given that I've added a relevant property
state.getOrCreateCowPropertyAddMap( node ).put( 2, intProperty( propertyKey, /*other value*/7331 ) );
// When
DiffSets<Long> nodes = bridge.getNodesWithChangedProperty( propertyKey, value );
// Then
assertEquals( emptySetOf( Long.class ), nodes.getAdded() );
assertEquals( asSet( nodeId ), nodes.getRemoved() );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_api_state_OldTxStateBridgeImplTest.java
|
1,132
|
public class OldTxStateBridgeImpl implements OldTxStateBridge
{
private final NodeManager nodeManager;
private final TransactionState state;
public OldTxStateBridgeImpl( NodeManager nodeManager, TransactionState transactionState )
{
this.nodeManager = nodeManager;
this.state = transactionState;
}
@Override
public DiffSets<Long> getNodesWithChangedProperty( int propertyKey, Object value )
{
DiffSets<Long> diff = new DiffSets<>();
for ( WritableTransactionState.CowNodeElement changedNode : state.getChangedNodes() )
{
// All nodes where the property has been removed altogether
DefinedProperty removed = propertyChange( changedNode.getPropertyRemoveMap( false ), propertyKey );
if ( removed != null && removed.value().equals( value ) )
{
diff.remove( changedNode.getId() );
}
// All nodes where property has been added or changed
if ( !changedNode.isDeleted() )
{
DefinedProperty added = propertyChange( changedNode.getPropertyAddMap( false ), propertyKey );
if ( added != null )
{
if ( added.valueEquals( value ) )
{
diff.add( changedNode.getId() );
}
else
{
diff.remove( changedNode.getId() );
}
}
}
}
return diff;
}
@Override
public Map<Long, Object> getNodesWithChangedProperty( int propertyKeyId )
{
HashMap<Long, Object> result = new HashMap<>();
for ( WritableTransactionState.CowNodeElement changedNode : state.getChangedNodes() )
{
if ( changedNode.isDeleted() )
{
result.put( changedNode.getId(), new Object() );
continue;
}
DefinedProperty added = propertyChange( changedNode.getPropertyAddMap( false ), propertyKeyId );
if ( added != null )
{
result.put( changedNode.getId(), added.value() );
}
else if ( null != propertyChange( changedNode.getPropertyRemoveMap( false ), propertyKeyId ) )
{
result.put( changedNode.getId(), new Object() );
}
}
return result;
}
private static DefinedProperty propertyChange( ArrayMap<Integer, DefinedProperty> propertyDataMap, long propertyKeyId )
{
return propertyDataMap == null ? null : propertyDataMap.get( (int) propertyKeyId );
}
@Override
public void deleteNode( long nodeId )
{
NodeImpl node = nodeManager.getNodeForProxy( nodeId, null );
boolean success = false;
try
{
ArrayMap<Integer, DefinedProperty> skipMap = state.getOrCreateCowPropertyRemoveMap( node );
ArrayMap<Integer, DefinedProperty> removedProps = nodeManager.deleteNode( node, state );
if ( removedProps.size() > 0 )
{
for ( Integer index : removedProps.keySet() )
{
skipMap.put( index, removedProps.get( index ) );
}
}
success = true;
}
finally
{
if ( !success )
{
nodeManager.setRollbackOnly();
}
}
}
@Override
public boolean nodeIsAddedInThisTx( long nodeId )
{
return state.getCreatedNodes().contains( nodeId );
}
@Override
public void deleteRelationship( long relationshipId )
{
RelationshipImpl relationship = nodeManager.getRelationshipForProxy( relationshipId );
boolean success = false;
try
{
ArrayMap<Integer, DefinedProperty> skipMap = state.getOrCreateCowPropertyRemoveMap( relationship );
ArrayMap<Integer, DefinedProperty> removedProps = nodeManager.deleteRelationship( relationship, state );
if ( removedProps.size() > 0 )
{
for ( Integer index : removedProps.keySet() )
{
skipMap.put( index, removedProps.get( index ) );
}
}
success = true;
}
finally
{
if ( !success )
{
nodeManager.setRollbackOnly();
}
}
}
@Override
public boolean relationshipIsAddedInThisTx( long relationshipId )
{
return state.getCreatedRelationships().contains( relationshipId );
}
@Override
public boolean hasChanges()
{
return state.hasChanges();
}
@Override
public void nodeSetProperty( long nodeId, DefinedProperty property )
{
NodeImpl node = nodeManager.getNodeForProxy( nodeId, null );
state.getOrCreateCowPropertyAddMap( node ).put( property.propertyKeyId(), property );
ArrayMap<Integer, DefinedProperty> removed = state.getCowPropertyRemoveMap( node );
if ( removed != null )
{
removed.remove( property.propertyKeyId() );
}
}
@Override
public void relationshipSetProperty( long relationshipId, DefinedProperty property )
{
RelationshipImpl relationship = nodeManager.getRelationshipForProxy( relationshipId );
state.getOrCreateCowPropertyAddMap( relationship ).put( property.propertyKeyId(), property );
ArrayMap<Integer, DefinedProperty> removed = state.getCowPropertyRemoveMap( relationship );
if ( removed != null )
{
removed.remove( property.propertyKeyId() );
}
}
@Override
public void graphSetProperty( DefinedProperty property )
{
GraphPropertiesImpl properties = nodeManager.getGraphProperties();
state.getOrCreateCowPropertyAddMap( properties ).put( property.propertyKeyId(), property );
ArrayMap<Integer, DefinedProperty> removed = state.getCowPropertyRemoveMap( properties );
if ( removed != null )
{
removed.remove( property.propertyKeyId() );
}
}
@Override
public void nodeRemoveProperty( long nodeId, DefinedProperty property )
{
NodeImpl node = nodeManager.getNodeForProxy( nodeId, null );
state.getOrCreateCowPropertyRemoveMap( node ).put( property.propertyKeyId(), property );
ArrayMap<Integer, DefinedProperty> added = state.getCowPropertyAddMap( node );
if ( added != null )
{
added.remove( property.propertyKeyId() );
}
}
@Override
public void relationshipRemoveProperty( long relationshipId, DefinedProperty property )
{
RelationshipImpl relationship = nodeManager.getRelationshipForProxy( relationshipId );
state.getOrCreateCowPropertyRemoveMap( relationship ).put( property.propertyKeyId(), property );
ArrayMap<Integer, DefinedProperty> added = state.getCowPropertyAddMap( relationship );
if ( added != null )
{
added.remove( property.propertyKeyId() );
}
}
@Override
public void graphRemoveProperty( DefinedProperty property )
{
GraphPropertiesImpl properties = nodeManager.getGraphProperties();
state.getOrCreateCowPropertyRemoveMap( properties ).put( property.propertyKeyId(), property );
ArrayMap<Integer, DefinedProperty> added = state.getCowPropertyAddMap( properties );
if ( added != null )
{
added.remove( property.propertyKeyId() );
}
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_api_state_OldTxStateBridgeImpl.java
|
1,133
|
public final class NodeState extends PropertyContainerState
{
private DiffSets<Integer> labelDiffSets;
public NodeState( long id )
{
super( id );
}
public DiffSets<Integer> labelDiffSets()
{
if ( null == labelDiffSets )
{
labelDiffSets = new DiffSets<>();
}
return labelDiffSets;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_api_state_NodeState.java
|
1,134
|
private static class Labels
{
private final long nodeId;
private final Integer[] labelIds;
Labels( long nodeId, Integer... labelIds )
{
this.nodeId = nodeId;
this.labelIds = labelIds;
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_api_state_LabelTransactionStateTest.java
|
1,135
|
{
@Override
public void run()
{
try
{
tx( new Runnable()
{
@Override
public void run()
{
for ( @SuppressWarnings("unused")String key : root.getPropertyKeys() )
precondition.set( true );
offenderSetUp.countDown();
root.setProperty( "tx", "offender" );
}
} );
}
finally
{
done.countDown();
}
}
} );
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_core_TestRaceOnMultipleNodeImpl.java
|
1,136
|
{
@Override
public void run()
{
tx( task );
}
} );
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_core_TestRaceOnMultipleNodeImpl.java
|
1,137
|
public class NodeLabelsField
{
public static NodeLabels parseLabelsField( NodeRecord node )
{
long labelField = node.getLabelField();
if ( fieldPointsToDynamicRecordOfLabels( labelField ) )
{
return new DynamicNodeLabels( labelField, node );
}
else
{
return new InlineNodeLabels( labelField, node );
}
}
public static long parseLabelsBody( long labelsField )
{
return labelsField & 0xFFFFFFFFFL;
}
public static boolean fieldPointsToDynamicRecordOfLabels( long labelField )
{
return (labelField & 0x8000000000L) != 0;
}
/**
* @see NodeRecord
*
* @param labelField label field value from a node record
* @return the id of the dynamic record this label field points to or null if it is an inline label field
*/
public static Long fieldDynamicLabelRecordId( long labelField )
{
if ( fieldPointsToDynamicRecordOfLabels( labelField ) )
{
return parseLabelsBody( labelField );
}
else
{
return null;
}
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_labels_NodeLabelsField.java
|
1,138
|
ARRAY( 10 )
{
@Override
public DefinedProperty readProperty( int propertyKeyId, final PropertyBlock block, final Provider<PropertyStore> store )
{
return Property.lazyArrayProperty(propertyKeyId, new Callable<Object>()
{
@Override
public Object call() throws Exception
{
return getValue( block, store.instance() );
}
});
}
@Override
public Object getValue( PropertyBlock block, PropertyStore store )
{
if ( store == null )
{
return null;
}
return store.getArrayFor( block );
}
@Override
byte[] readDynamicRecordHeader( byte[] recordBytes )
{
byte itemType = recordBytes[0];
if ( itemType == STRING.byteValue() )
{
return headOf( recordBytes, DynamicArrayStore.STRING_HEADER_SIZE );
}
else if ( itemType <= DOUBLE.byteValue() )
{
return headOf( recordBytes, DynamicArrayStore.NUMBER_HEADER_SIZE );
}
throw new IllegalArgumentException( "Unknown array type " + itemType );
}
private byte[] headOf( byte[] bytes, int length )
{
return Arrays.copyOf( bytes, length );
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PropertyType.java
|
1,139
|
public static abstract class Configuration extends AbstractStore.Configuration
{
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PropertyStore.java
|
1,140
|
public class PropertyStore extends AbstractRecordStore<PropertyRecord> implements Store
{
public static abstract class Configuration extends AbstractStore.Configuration
{
}
public static final int DEFAULT_DATA_BLOCK_SIZE = 120;
public static final int DEFAULT_PAYLOAD_SIZE = 32;
public static final String TYPE_DESCRIPTOR = "PropertyStore";
public static final int RECORD_SIZE = 1/*next and prev high bits*/
+ 4/*next*/
+ 4/*prev*/
+ DEFAULT_PAYLOAD_SIZE /*property blocks*/;
// = 41
private DynamicStringStore stringPropertyStore;
private PropertyKeyTokenStore propertyKeyTokenStore;
private DynamicArrayStore arrayPropertyStore;
private final PropertyPhysicalToLogicalConverter physicalToLogicalConverter;
public PropertyStore(File fileName, Config configuration,
IdGeneratorFactory idGeneratorFactory, WindowPoolFactory windowPoolFactory,
FileSystemAbstraction fileSystemAbstraction, StringLogger stringLogger,
DynamicStringStore stringPropertyStore, PropertyKeyTokenStore propertyKeyTokenStore,
DynamicArrayStore arrayPropertyStore)
{
super( fileName, configuration, IdType.PROPERTY, idGeneratorFactory, windowPoolFactory,
fileSystemAbstraction, stringLogger );
this.stringPropertyStore = stringPropertyStore;
this.propertyKeyTokenStore = propertyKeyTokenStore;
this.arrayPropertyStore = arrayPropertyStore;
this.physicalToLogicalConverter = new PropertyPhysicalToLogicalConverter( this );
}
@Override
public <FAILURE extends Exception> void accept( RecordStore.Processor<FAILURE> processor, PropertyRecord record )
throws FAILURE
{
processor.processProperty( this, record );
}
public DynamicStringStore getStringStore()
{
return stringPropertyStore;
}
public DynamicArrayStore getArrayStore()
{
return arrayPropertyStore;
}
@Override
protected void setRecovered()
{
super.setRecovered();
stringPropertyStore.setRecovered();
propertyKeyTokenStore.setRecovered();
arrayPropertyStore.setRecovered();
}
@Override
protected void unsetRecovered()
{
super.unsetRecovered();
stringPropertyStore.unsetRecovered();
propertyKeyTokenStore.unsetRecovered();
arrayPropertyStore.unsetRecovered();
}
@Override
protected void closeStorage()
{
if ( stringPropertyStore != null )
{
stringPropertyStore.close();
stringPropertyStore = null;
}
if ( propertyKeyTokenStore != null )
{
propertyKeyTokenStore.close();
propertyKeyTokenStore = null;
}
if ( arrayPropertyStore != null )
{
arrayPropertyStore.close();
arrayPropertyStore = null;
}
}
@Override
public void flushAll()
{
stringPropertyStore.flushAll();
propertyKeyTokenStore.flushAll();
arrayPropertyStore.flushAll();
super.flushAll();
}
@Override
public String getTypeDescriptor()
{
return TYPE_DESCRIPTOR;
}
@Override
public int getRecordSize()
{
return RECORD_SIZE;
}
@Override
public int getRecordHeaderSize()
{
return RECORD_SIZE - DEFAULT_PAYLOAD_SIZE;
}
public void freeStringBlockId( long blockId )
{
stringPropertyStore.freeId( blockId );
}
public void freeArrayBlockId( long blockId )
{
arrayPropertyStore.freeId( blockId );
}
public PropertyKeyTokenStore getPropertyKeyTokenStore()
{
return propertyKeyTokenStore;
}
@Override
public void updateRecord( PropertyRecord record )
{
PersistenceWindow window = acquireWindow( record.getId(),
OperationType.WRITE );
try
{
updateRecord( record, window );
}
finally
{
releaseWindow( window );
}
}
@Override
public void forceUpdateRecord( PropertyRecord record )
{
updateRecord( record ); // TODO: should we do something special for property records?
}
private void updateRecord( PropertyRecord record, PersistenceWindow window )
{
long id = record.getId();
registerIdFromUpdateRecord( id );
Buffer buffer = window.getOffsettedBuffer( id );
if ( record.inUse() )
{
// Set up the record header
short prevModifier = record.getPrevProp() == Record.NO_NEXT_RELATIONSHIP.intValue() ? 0
: (short) ( ( record.getPrevProp() & 0xF00000000L ) >> 28 );
short nextModifier = record.getNextProp() == Record.NO_NEXT_RELATIONSHIP.intValue() ? 0
: (short) ( ( record.getNextProp() & 0xF00000000L ) >> 32 );
byte modifiers = (byte) ( prevModifier | nextModifier );
/*
* [pppp,nnnn] previous, next high bits
*/
buffer.put( modifiers );
buffer.putInt( (int) record.getPrevProp() ).putInt(
(int) record.getNextProp() );
// Then go through the blocks
int longsAppended = 0; // For marking the end of blocks
for ( PropertyBlock block : record.getPropertyBlocks() )
{
long[] propBlockValues = block.getValueBlocks();
for ( long propBlockValue : propBlockValues )
{
buffer.putLong( propBlockValue );
}
longsAppended += propBlockValues.length;
/*
* For each block we need to update its dynamic record chain if
* it is just created. Deleted dynamic records are in the property
* record and dynamic records are never modified. Also, they are
* assigned as a whole, so just checking the first should be enough.
*/
if ( !block.isLight()
&& block.getValueRecords().get( 0 ).isCreated() )
{
updateDynamicRecords( block.getValueRecords() );
}
}
if ( longsAppended < PropertyType.getPayloadSizeLongs() )
{
buffer.putLong( 0 );
}
}
else
{
if ( !isInRecoveryMode() )
{
freeId( id );
}
// skip over the record header, nothing useful there
buffer.setOffset( buffer.getOffset() + 9 );
buffer.putLong( 0 );
}
updateDynamicRecords( record.getDeletedRecords() );
}
private void updateDynamicRecords( List<DynamicRecord> records )
{
for ( DynamicRecord valueRecord : records )
{
if ( valueRecord.getType() == PropertyType.STRING.intValue() )
{
stringPropertyStore.updateRecord( valueRecord );
}
else if ( valueRecord.getType() == PropertyType.ARRAY.intValue() )
{
arrayPropertyStore.updateRecord( valueRecord );
}
else
{
throw new InvalidRecordException( "Unknown dynamic record"
+ valueRecord );
}
}
}
public PropertyRecord getLightRecord( long id )
{
PersistenceWindow window = acquireWindow( id, OperationType.READ );
try
{
return getRecord( id, window, RecordLoad.NORMAL );
}
finally
{
releaseWindow( window );
}
}
public void ensureHeavy( PropertyBlock block )
{
if ( block.getType() == PropertyType.STRING )
{
if ( block.isLight() )
{
Collection<DynamicRecord> stringRecords = stringPropertyStore.getLightRecords( block.getSingleValueLong() );
for ( DynamicRecord stringRecord : stringRecords )
{
stringRecord.setType( PropertyType.STRING.intValue() );
block.addValueRecord( stringRecord );
}
}
for ( DynamicRecord stringRecord : block.getValueRecords() )
{
stringPropertyStore.ensureHeavy( stringRecord );
}
}
else if ( block.getType() == PropertyType.ARRAY )
{
if ( block.isLight() )
{
Collection<DynamicRecord> arrayRecords = arrayPropertyStore.getLightRecords( block.getSingleValueLong() );
for ( DynamicRecord arrayRecord : arrayRecords )
{
arrayRecord.setType( PropertyType.ARRAY.intValue() );
block.addValueRecord( arrayRecord );
}
}
for ( DynamicRecord arrayRecord : block.getValueRecords() )
{
arrayPropertyStore.ensureHeavy( arrayRecord );
}
}
}
@Override
public PropertyRecord getRecord( long id )
{
PropertyRecord record;
PersistenceWindow window = acquireWindow( id, OperationType.READ );
try
{
record = getRecord( id, window, RecordLoad.NORMAL );
}
finally
{
releaseWindow( window );
}
return record;
}
@Override
public PropertyRecord forceGetRecord( long id )
{
PersistenceWindow window;
try
{
window = acquireWindow( id, OperationType.READ );
}
catch ( InvalidRecordException e )
{
return new PropertyRecord( id );
}
try
{
return getRecord( id, window, RecordLoad.FORCE );
}
finally
{
releaseWindow( window );
}
}
@Override
public PropertyRecord forceGetRaw( PropertyRecord record )
{
return record;
}
@Override
public PropertyRecord forceGetRaw( long id )
{
return forceGetRecord( id );
}
private PropertyRecord getRecordFromBuffer( long id, Buffer buffer )
{
int offsetAtBeggining = buffer.getOffset();
PropertyRecord record = new PropertyRecord( id );
/*
* [pppp,nnnn] previous, next high bits
*/
byte modifiers = buffer.get();
long prevMod = ( modifiers & 0xF0L ) << 28;
long nextMod = ( modifiers & 0x0FL ) << 32;
long prevProp = buffer.getUnsignedInt();
long nextProp = buffer.getUnsignedInt();
record.setPrevProp( longFromIntAndMod( prevProp, prevMod ) );
record.setNextProp( longFromIntAndMod( nextProp, nextMod ) );
while ( buffer.getOffset() - offsetAtBeggining < RECORD_SIZE )
{
PropertyBlock newBlock = getPropertyBlock( buffer );
if ( newBlock != null )
{
record.addPropertyBlock( newBlock );
record.setInUse( true );
}
else
{
// We assume that storage is defragged
break;
}
}
return record;
}
private PropertyRecord getRecord( long id, PersistenceWindow window, RecordLoad load )
{
Buffer buffer = window.getOffsettedBuffer( id );
PropertyRecord toReturn = getRecordFromBuffer( id, buffer );
if ( !toReturn.inUse() && load != RecordLoad.FORCE )
{
throw new InvalidRecordException( "PropertyRecord[" + id + "] not in use" );
}
return toReturn;
}
/*
* It is assumed that the argument does hold a property block - all zeros is
* a valid (not in use) block, so even if the Bits object has been exhausted a
* result is returned, that has inUse() return false. Also, the argument is not
* touched.
*/
private PropertyBlock getPropertyBlock( Buffer buffer )
{
long header = buffer.getLong();
PropertyType type = PropertyType.getPropertyType( header, true );
if ( type == null )
{
return null;
}
PropertyBlock toReturn = new PropertyBlock();
// toReturn.setInUse( true );
int numBlocks = type.calculateNumberOfBlocksUsed( header );
long[] blockData = new long[numBlocks];
blockData[0] = header; // we already have that
for ( int i = 1; i < numBlocks; i++ )
{
blockData[i] = buffer.getLong();
}
toReturn.setValueBlocks( blockData );
return toReturn;
}
public Object getValue( PropertyBlock propertyBlock )
{
return propertyBlock.getType().getValue( propertyBlock, this );
}
public void makeHeavyIfLight( PropertyBlock record )
{
if ( record.isLight() )
{
/*
* This will add the value records without checking if they are already
* in the block - so we only call this after checking isLight() or
* else we will end up with duplicates.
*/
if ( record.getType() == PropertyType.STRING )
{
Collection<DynamicRecord> stringRecords = stringPropertyStore.getLightRecords( record.getSingleValueLong() );
for ( DynamicRecord stringRecord : stringRecords )
{
stringRecord.setType( PropertyType.STRING.intValue() );
record.addValueRecord( stringRecord );
}
}
else if ( record.getType() == PropertyType.ARRAY )
{
Collection<DynamicRecord> arrayRecords = arrayPropertyStore.getLightRecords( record.getSingleValueLong() );
for ( DynamicRecord arrayRecord : arrayRecords )
{
arrayRecord.setType( PropertyType.ARRAY.intValue() );
record.addValueRecord( arrayRecord );
}
}
}
}
@Override
public void makeStoreOk()
{
propertyKeyTokenStore.makeStoreOk();
stringPropertyStore.makeStoreOk();
arrayPropertyStore.makeStoreOk();
super.makeStoreOk();
}
@Override
public void rebuildIdGenerators()
{
propertyKeyTokenStore.rebuildIdGenerators();
stringPropertyStore.rebuildIdGenerators();
arrayPropertyStore.rebuildIdGenerators();
super.rebuildIdGenerators();
}
public void updateIdGenerators()
{
propertyKeyTokenStore.updateIdGenerators();
stringPropertyStore.updateHighId();
arrayPropertyStore.updateHighId();
this.updateHighId();
}
private Collection<DynamicRecord> allocateStringRecords( byte[] chars )
{
return stringPropertyStore.allocateRecordsFromBytes( chars );
}
private Collection<DynamicRecord> allocateArrayRecords( Object array )
{
return arrayPropertyStore.allocateRecords( array );
}
public void encodeValue( PropertyBlock block, int keyId, Object value )
{
if ( value instanceof String )
{ // Try short string first, i.e. inlined in the property block
String string = (String) value;
if ( LongerShortString.encode( keyId, string, block, PropertyType.getPayloadSize() ) )
{
return;
}
// Fall back to dynamic string store
byte[] encodedString = encodeString( string );
Collection<DynamicRecord> valueRecords = allocateStringRecords( encodedString );
setSingleBlockValue( block, keyId, PropertyType.STRING, first( valueRecords ).getId() );
for ( DynamicRecord valueRecord : valueRecords )
{
valueRecord.setType( PropertyType.STRING.intValue() );
block.addValueRecord( valueRecord );
}
}
else if ( value instanceof Integer )
{
setSingleBlockValue( block, keyId, PropertyType.INT, ((Integer) value).longValue() );
}
else if ( value instanceof Boolean )
{
setSingleBlockValue( block, keyId, PropertyType.BOOL, ((Boolean) value ? 1L : 0L) );
}
else if ( value instanceof Float )
{
setSingleBlockValue( block, keyId, PropertyType.FLOAT, Float.floatToRawIntBits( (Float) value ) );
}
else if ( value instanceof Long )
{
long keyAndType = keyId | (((long) PropertyType.LONG.intValue()) << 24);
if ( ShortArray.LONG.getRequiredBits( (Long) value ) <= 35 )
{ // We only need one block for this value, special layout compared to, say, an integer
block.setSingleBlock( keyAndType | (1L << 28) | ((Long) value << 29) );
}
else
{ // We need two blocks for this value
block.setValueBlocks( new long[]{keyAndType, (Long) value} );
}
}
else if ( value instanceof Double )
{
block.setValueBlocks( new long[]{
keyId | (((long) PropertyType.DOUBLE.intValue()) << 24),
Double.doubleToRawLongBits( (Double) value )} );
}
else if ( value instanceof Byte )
{
setSingleBlockValue( block, keyId, PropertyType.BYTE, ((Byte) value).longValue() );
}
else if ( value instanceof Character )
{
setSingleBlockValue( block, keyId, PropertyType.CHAR, (Character) value );
}
else if ( value instanceof Short )
{
setSingleBlockValue( block, keyId, PropertyType.SHORT, ((Short) value).longValue() );
}
else if ( value.getClass().isArray() )
{ // Try short array first, i.e. inlined in the property block
if ( ShortArray.encode( keyId, value, block, PropertyType.getPayloadSize() ) )
{
return;
}
// Fall back to dynamic array store
Collection<DynamicRecord> arrayRecords = allocateArrayRecords( value );
setSingleBlockValue( block, keyId, PropertyType.ARRAY, first( arrayRecords ).getId() );
for ( DynamicRecord valueRecord : arrayRecords )
{
valueRecord.setType( PropertyType.ARRAY.intValue() );
block.addValueRecord( valueRecord );
}
}
else
{
throw new IllegalArgumentException( "Unknown property type on: " + value + ", " + value.getClass() );
}
}
private void setSingleBlockValue( PropertyBlock block, int keyId, PropertyType type, long longValue )
{
block.setSingleBlock( keyId | (((long) type.intValue()) << 24)
| (longValue << 28) );
}
public static byte[] encodeString( String string )
{
return UTF8.encode( string );
}
public static String decodeString( byte[] byteArray )
{
return UTF8.decode( byteArray );
}
public String getStringFor( PropertyBlock propertyBlock )
{
ensureHeavy( propertyBlock );
return getStringFor( propertyBlock.getValueRecords() );
}
public String getStringFor( Collection<DynamicRecord> dynamicRecords )
{
Pair<byte[], byte[]> source = stringPropertyStore.readFullByteArray( dynamicRecords, PropertyType.STRING );
// A string doesn't have a header in the data array
return decodeString( source.other() );
}
public Object getArrayFor( PropertyBlock propertyBlock )
{
ensureHeavy( propertyBlock );
return getArrayFor( propertyBlock.getValueRecords() );
}
public Object getArrayFor( Iterable<DynamicRecord> records )
{
return getRightArray( arrayPropertyStore.readFullByteArray( records, PropertyType.ARRAY ) );
}
@Override
public List<WindowPoolStats> getAllWindowPoolStats()
{
List<WindowPoolStats> list = new ArrayList<>();
list.add( stringPropertyStore.getWindowPoolStats() );
list.add( arrayPropertyStore.getWindowPoolStats() );
list.add( getWindowPoolStats() );
return list;
}
@Override
public void logAllWindowPoolStats( StringLogger.LineLogger logger )
{
super.logAllWindowPoolStats( logger );
propertyKeyTokenStore.logAllWindowPoolStats( logger );
logger.logLine( stringPropertyStore.getWindowPoolStats().toString() );
logger.logLine( arrayPropertyStore.getWindowPoolStats().toString() );
}
public int getStringBlockSize()
{
return stringPropertyStore.getBlockSize();
}
public int getArrayBlockSize()
{
return arrayPropertyStore.getBlockSize();
}
@Override
protected boolean isRecordInUse( ByteBuffer buffer )
{
// TODO: The next line is an ugly hack, but works.
Buffer fromByteBuffer = new Buffer( null, buffer );
return buffer.limit() >= RECORD_SIZE && getRecordFromBuffer( 0, fromByteBuffer ).inUse();
}
@Override
public void logVersions(StringLogger.LineLogger logger )
{
super.logVersions( logger );
propertyKeyTokenStore.logVersions( logger );
stringPropertyStore.logVersions( logger );
arrayPropertyStore.logVersions(logger );
}
@Override
public void logIdUsage(StringLogger.LineLogger logger )
{
super.logIdUsage(logger);
propertyKeyTokenStore.logIdUsage( logger );
stringPropertyStore.logIdUsage( logger );
arrayPropertyStore.logIdUsage( logger );
}
@Override
public String toString()
{
return super.toString() + "[blocksPerRecord:" + PropertyType.getPayloadSizeLongs() + "]";
}
public Collection<PropertyRecord> getPropertyRecordChain( long firstRecordId )
{
long nextProp = firstRecordId;
List<PropertyRecord> toReturn = new LinkedList<>();
while ( nextProp != Record.NO_NEXT_PROPERTY.intValue() )
{
PropertyRecord propRecord = getLightRecord( nextProp );
toReturn.add(propRecord);
nextProp = propRecord.getNextProp();
}
return toReturn;
}
public void toLogicalUpdates( Collection<NodePropertyUpdate> target,
Iterable<PropertyRecordChange> changes, long[] nodeLabelsBefore, long[] nodeLabelsAfter )
{
physicalToLogicalConverter.apply( target, changes, nodeLabelsBefore, nodeLabelsAfter );
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PropertyStore.java
|
1,141
|
public class PropertyRecordTest
{
@Test
public void addingDuplicatePropertyBlockShouldOverwriteExisting()
{
// Given these things...
PropertyRecord record = new PropertyRecord( 1 );
PropertyBlock blockA = new PropertyBlock();
blockA.setValueBlocks( new long[1] );
blockA.setKeyIndexId( 2 );
PropertyBlock blockB = new PropertyBlock();
blockB.setValueBlocks( new long[1] );
blockB.setKeyIndexId( 2 ); // also 2, thus a duplicate
// When we set the property block twice that have the same key
record.setPropertyBlock( blockA );
record.setPropertyBlock( blockB );
// Then the record should only contain a single block, because blockB overwrote blockA
List<PropertyBlock> propertyBlocks = record.getPropertyBlocks();
assertThat( propertyBlocks, hasItem( blockB ));
assertThat( propertyBlocks, hasSize( 1 ) );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_nioneo_store_PropertyRecordTest.java
|
1,142
|
public class PropertyRecord extends Abstract64BitRecord
{
private long nextProp = Record.NO_NEXT_PROPERTY.intValue();
private long prevProp = Record.NO_PREVIOUS_PROPERTY.intValue();
private final List<PropertyBlock> blockRecords = new ArrayList<PropertyBlock>( 4 );
private long entityId = -1;
private Boolean nodeIdSet;
private boolean isChanged;
private final List<DynamicRecord> deletedRecords = new LinkedList<>();
public PropertyRecord( long id )
{
super( id );
}
public PropertyRecord( long id, PrimitiveRecord primitive )
{
super( id );
setCreated();
primitive.setIdTo( this );
}
public void setNodeId( long nodeId )
{
nodeIdSet = true;
entityId = nodeId;
}
public void setRelId( long relId )
{
nodeIdSet = false;
entityId = relId;
}
public boolean isNodeSet()
{
return Boolean.TRUE.equals( nodeIdSet );
}
public boolean isRelSet()
{
return Boolean.FALSE.equals( nodeIdSet );
}
public long getNodeId()
{
if ( isNodeSet() )
{
return entityId;
}
return -1;
}
public long getRelId()
{
if ( isRelSet() )
{
return entityId;
}
return -1;
}
/**
* Gets the sum of the sizes of the blocks in this record, in bytes.
*/
public int size()
{
int result = 0;
for ( PropertyBlock blockRecord : blockRecords )
{
result += blockRecord.getSize();
}
return result;
}
public List<PropertyBlock> getPropertyBlocks()
{
return blockRecords;
}
public List<DynamicRecord> getDeletedRecords()
{
return deletedRecords;
}
public void addDeletedRecord( DynamicRecord record )
{
assert !record.inUse();
deletedRecords.add( record );
}
public void addPropertyBlock(PropertyBlock block)
{
assert size() + block.getSize() <= PropertyType.getPayloadSize() :
"Exceeded capacity of property record " + this
+ ". My current size is reported as " + size() + "The added block was " + block +
" (note that size is " + block.getSize() + ")";
blockRecords.add( block );
}
public void setPropertyBlock( PropertyBlock block )
{
removePropertyBlock( block.getKeyIndexId() );
addPropertyBlock( block );
}
public PropertyBlock getPropertyBlock( int keyIndex )
{
for ( PropertyBlock block : blockRecords )
{
if ( block.getKeyIndexId() == keyIndex )
{
return block;
}
}
return null;
}
public PropertyBlock removePropertyBlock( int keyIndex )
{
for ( int i = 0; i < blockRecords.size(); i++ )
{
if ( blockRecords.get( i ).getKeyIndexId() == keyIndex )
{
return blockRecords.remove( i );
}
}
return null;
}
public long getNextProp()
{
return nextProp;
}
public void setNextProp( long nextProp )
{
this.nextProp = nextProp;
}
@Override
public String toString()
{
StringBuilder buf = new StringBuilder();
buf.append( "Property[" ).append( getId() ).append( ",used=" ).append( inUse() ).append( ",prev=" ).append(
prevProp ).append( ",next=" ).append( nextProp );
if ( entityId != -1 ) buf.append( nodeIdSet ? ",node=" : ",rel=" ).append( entityId );
for ( PropertyBlock block : blockRecords )
{
buf.append( ',' ).append( block );
}
for ( DynamicRecord dyn : deletedRecords )
{
buf.append( ",del:" ).append( dyn );
}
buf.append( "]" );
return buf.toString();
}
public boolean isChanged()
{
return isChanged;
}
public void setChanged( PrimitiveRecord primitive )
{
isChanged = true;
primitive.setIdTo( this );
}
public long getPrevProp()
{
return prevProp;
}
public void setPrevProp( long prev )
{
prevProp = prev;
}
@Override
public PropertyRecord clone()
{
PropertyRecord result = new PropertyRecord( getLongId() );
result.setInUse( inUse() );
result.nextProp = nextProp;
result.prevProp = prevProp;
result.entityId = entityId;
result.nodeIdSet = nodeIdSet;
result.isChanged = isChanged;
for ( PropertyBlock block : blockRecords )
result.blockRecords.add( block.clone() );
for ( DynamicRecord deletedRecord : deletedRecords )
result.deletedRecords.add( deletedRecord.clone() );
return result;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PropertyRecord.java
|
1,143
|
public static abstract class Configuration
extends TokenStore.Configuration
{
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PropertyKeyTokenStore.java
|
1,144
|
public class PropertyKeyTokenStore extends TokenStore<PropertyKeyTokenRecord>
{
public static abstract class Configuration
extends TokenStore.Configuration
{
}
// Historical type descriptor, should be called PropertyKeyTokenStore
public static final String TYPE_DESCRIPTOR = "PropertyIndexStore";
private static final int RECORD_SIZE = 1/*inUse*/ + 4/*prop count*/ + 4/*nameId*/;
public PropertyKeyTokenStore( File fileName, Config config,
IdGeneratorFactory idGeneratorFactory, WindowPoolFactory windowPoolFactory,
FileSystemAbstraction fileSystemAbstraction, StringLogger stringLogger,
DynamicStringStore nameStore )
{
super(fileName, config, IdType.PROPERTY_KEY_TOKEN, idGeneratorFactory, windowPoolFactory,
fileSystemAbstraction, stringLogger, nameStore);
}
@Override
public <FAILURE extends Exception> void accept( RecordStore.Processor<FAILURE> processor, PropertyKeyTokenRecord record ) throws FAILURE
{
processor.processPropertyKeyToken( this, record );
}
@Override
protected PropertyKeyTokenRecord newRecord( int id )
{
return new PropertyKeyTokenRecord( id );
}
@Override
protected void readRecord( PropertyKeyTokenRecord record, Buffer buffer )
{
record.setPropertyCount( buffer.getInt() );
record.setNameId( buffer.getInt() );
}
@Override
protected void writeRecord( PropertyKeyTokenRecord record, Buffer buffer )
{
buffer.putInt( record.getPropertyCount() );
buffer.putInt( record.getNameId() );
}
@Override
public int getRecordSize()
{
return RECORD_SIZE;
}
@Override
public String getTypeDescriptor()
{
return TYPE_DESCRIPTOR;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PropertyKeyTokenStore.java
|
1,145
|
public class PropertyKeyTokenRecord extends TokenRecord
{
private int propCount = 0;
public PropertyKeyTokenRecord( int id )
{
super( id );
}
@Override
protected String simpleName()
{
return "PropertyKey";
}
public int getPropertyCount()
{
return propCount;
}
public void setPropertyCount( int count )
{
this.propCount = count;
}
@Override
protected void additionalToString( StringBuilder buf )
{
buf.append( ",propCount=" ).append( propCount );
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PropertyKeyTokenRecord.java
|
1,146
|
public class PropertyBlock implements Cloneable
{
private static final long KEY_BITMASK = 0xFFFFFFL;
private static final int MAX_ARRAY_TOSTRING_SIZE = 4;
private final List<DynamicRecord> valueRecords = new LinkedList<>();
private long[] valueBlocks;
public PropertyType getType()
{
return getType( false );
}
public PropertyType forceGetType()
{
return getType( true );
}
private PropertyType getType( boolean force )
{
return valueBlocks == null ? null : PropertyType.getPropertyType( valueBlocks[0], force );
}
public int getKeyIndexId()
{
// [][][][][][kkkk,kkkk][kkkk,kkkk][kkkk,kkkk]
return (int) (valueBlocks[0] & KEY_BITMASK);
}
public void setKeyIndexId( int key )
{
valueBlocks[0] &= ~KEY_BITMASK;
valueBlocks[0] |= key;
}
public void setSingleBlock( long value )
{
valueBlocks = new long[1];
valueBlocks[0] = value;
valueRecords.clear();
}
public void addValueRecord( DynamicRecord record )
{
valueRecords.add( record );
}
public List<DynamicRecord> getValueRecords()
{
return valueRecords;
}
public long getSingleValueBlock()
{
return valueBlocks[0];
}
/**
* use this for references to the dynamic stores
*/
public long getSingleValueLong()
{
return (valueBlocks[0] & 0xFFFFFFFFF0000000L) >>> 28;
}
public int getSingleValueInt()
{
return (int)((valueBlocks[0] & 0x0FFFFFFFF0000000L) >>> 28);
}
public short getSingleValueShort()
{
return (short)((valueBlocks[0] & 0x00000FFFF0000000L) >>> 28);
}
public byte getSingleValueByte()
{
return (byte)((valueBlocks[0] & 0x0000000FF0000000L) >>> 28);
}
public long[] getValueBlocks()
{
return valueBlocks;
}
public boolean isLight()
{
return valueRecords.isEmpty();
}
public void setValueBlocks( long[] blocks )
{
int expectedPayloadSize = PropertyType.getPayloadSizeLongs();
assert ( blocks == null || blocks.length <= expectedPayloadSize) : (
"I was given an array of size " + blocks.length +", but I wanted it to be " + expectedPayloadSize );
this.valueBlocks = blocks;
valueRecords.clear();
}
/**
* A property block can take a variable size of bytes in a property record.
* This method returns the size of this block in bytes, including the header
* size.
*
* @return The size of this block in bytes, including the header.
*/
public int getSize()
{
// Currently each block is a multiple of 8 in size
return valueBlocks == null ? 0 : valueBlocks.length * 8;
}
@Override
public String toString()
{
StringBuilder result = new StringBuilder("PropertyBlock[");
PropertyType type = getType();
if ( valueBlocks != null )
{
result.append( "blocks=" ).append( valueBlocks.length ).append( "," );
}
result.append( type == null ? "<unknown type>" : type.name() ).append( ',' );
result.append( "key=" ).append( valueBlocks == null ? "?" : Integer.toString( getKeyIndexId() ) );
if ( type != null )
{
switch ( type )
{
case STRING:
case ARRAY:
result.append( ",firstDynamic=" ).append( getSingleValueLong() );
break;
default:
Object value = type.getValue( this, null );
if ( value != null && value.getClass().isArray() )
{
int length = Array.getLength( value );
StringBuilder buf = new StringBuilder( value.getClass().getComponentType().getSimpleName() ).append( "[" );
for ( int i = 0; i < length && i <= MAX_ARRAY_TOSTRING_SIZE; i++ )
{
if ( i != 0 )
{
buf.append( "," );
}
buf.append( Array.get( value, i ) );
}
if ( length > MAX_ARRAY_TOSTRING_SIZE )
{
buf.append( ",..." );
}
value = buf.append( "]" );
}
result.append( ",value=" ).append( value );
break;
}
}
if ( !isLight() )
{
result.append( ",ValueRecords[" );
Iterator<DynamicRecord> recIt = valueRecords.iterator();
while ( recIt.hasNext() )
{
result.append( recIt.next() );
if ( recIt.hasNext() )
{
result.append( ',' );
}
}
result.append( ']' );
}
result.append( ']' );
return result.toString();
}
@Override
public PropertyBlock clone()
{
PropertyBlock result = new PropertyBlock();
if ( valueBlocks != null )
{
result.valueBlocks = valueBlocks.clone();
}
for ( DynamicRecord valueRecord : valueRecords )
{
result.valueRecords.add( valueRecord.clone() );
}
return result;
}
public boolean hasSameContentsAs( PropertyBlock other )
{
// Assumption (which happens to be true) that if a heavy (long string/array) property
// changes it will get another id, making the valueBlocks values differ.
return Arrays.equals( valueBlocks, other.valueBlocks );
}
public DefinedProperty newPropertyData( PropertyStore propertyStore )
{
return getType().readProperty( getKeyIndexId(), this, singletonProvider(propertyStore) );
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PropertyBlock.java
|
1,147
|
public class ProduceUncleanStore
{
public static void main( String[] args ) throws Exception
{
String storeDir = args[0];
boolean setGraphProperty = args.length > 1 ? Boolean.parseBoolean( args[1] ) : false;
GraphDatabaseService db = new EmbeddedGraphDatabase(
storeDir,
stringMap(),
new DefaultGraphDatabaseDependencies( DevNullLoggingService.DEV_NULL ) );
try ( Transaction tx = db.beginTx() )
{
Node node = db.createNode();
node.setProperty( "name", "Something" );
if ( setGraphProperty )
{
//noinspection deprecation
((GraphDatabaseAPI) db).getDependencyResolver().resolveDependency( NodeManager.class )
.getGraphProperties().setProperty( "prop", "Some value" );
}
tx.success();
}
System.exit( 0 );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_nioneo_store_ProduceUncleanStore.java
|
1,148
|
public abstract class PrimitiveRecord extends Abstract64BitRecord
{
private long nextProp;
private final long committedNextProp;
public PrimitiveRecord( long id, long nextProp )
{
super( id );
this.nextProp = nextProp;
this.committedNextProp = this.nextProp = nextProp;
}
public long getNextProp()
{
return nextProp;
}
public void setNextProp( long nextProp )
{
this.nextProp = nextProp;
}
public long getCommittedNextProp()
{
return isCreated() ? Record.NO_NEXT_PROPERTY.intValue() : committedNextProp;
}
public abstract void setIdTo( PropertyRecord property );
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PrimitiveRecord.java
|
1,149
|
public class PreAllocatedRecords implements DynamicRecordAllocator
{
private final int dataSize;
public PreAllocatedRecords( int dataSize )
{
this.dataSize = dataSize;
}
@Override
public int dataSize()
{
return dataSize;
}
@Override
public DynamicRecord nextUsedRecordOrNew( Iterator<DynamicRecord> recordsToUseFirst )
{
return recordsToUseFirst.next();
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_nioneo_store_PreAllocatedRecords.java
|
1,150
|
class PlainPersistenceWindow extends AbstractPersistenceWindow
{
PlainPersistenceWindow( long position, int recordSize, int totalSize,
StoreChannel channel )
{
super( position, recordSize, totalSize, channel,
ByteBuffer.allocate( totalSize ) );
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PlainPersistenceWindow.java
|
1,151
|
{
int invocations = 0;
@Override
public PersistenceRow answer( InvocationOnMock invocationOnMock ) throws Throwable
{
if(invocations++ == 0)
{
return null;
}
return window;
}
};
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_nioneo_store_PersistenceWindowPoolTest.java
|
1,152
|
{
@Override
synchronized void lock()
{
assertEquals( 0, index );
super.lock();
lockedCount.incrementAndGet();
}
@Override
void unLock()
{
assertEquals( 0, index );
super.unLock();
unlockedCount.incrementAndGet();
}
};
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_nioneo_store_PersistenceWindowPoolTest.java
|
1,153
|
{
@Override
public BrickElement create( final int index )
{
return new BrickElement( index )
{
@Override
synchronized void lock()
{
assertEquals( 0, index );
super.lock();
lockedCount.incrementAndGet();
}
@Override
void unLock()
{
assertEquals( 0, index );
super.unLock();
unlockedCount.incrementAndGet();
}
};
}
};
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_nioneo_store_PersistenceWindowPoolTest.java
|
1,154
|
{
@Override
public Throwable doWork( Void state )
{
PersistenceWindow t2Row = pool.acquire( 0, OperationType.READ ); // Will block until t1Row is released.
try
{
assertTrue( t1Row == t2Row );
assertBufferContents( blockSize, t2Row );
return null;
}
catch ( Throwable t )
{
return t;
}
finally
{
pool.release( t2Row );
}
}
} );
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_nioneo_store_PersistenceWindowPoolTest.java
|
1,155
|
public class PersistenceWindowPoolTest
{
private static final TargetDirectory target = TargetDirectory.forTest( MappedPersistenceWindowTest.class );
@Rule
public final ResourceCollection resources = new ResourceCollection();
@Rule
public final TargetDirectory.TestDirectory directory = target.testDirectory();
@Rule
public final ExpectedException expectedUnderlyingException = ExpectedException.none();
@Test
public void shouldBeAbleToReAcquireReleasedWindow() throws Exception
{
// given
String filename = new File( directory.directory(), "mapped.file" ).getAbsolutePath();
RandomAccessFile file = resources.add( new RandomAccessFile( filename, "rw" ) );
StoreChannel channel = new StoreFileChannel( file.getChannel() );
PersistenceWindowPool pool = new PersistenceWindowPool( new File("test.store"), 8,
channel, 0, false, false, new ConcurrentHashMap<Long, PersistenceRow>(),
BrickElementFactory.DEFAULT, StringLogger.DEV_NULL );
PersistenceWindow initialWindow = pool.acquire( 0, OperationType.READ );
pool.release( initialWindow );
// when
PersistenceWindow window = pool.acquire( 0, OperationType.READ );
// then
assertNotSame( initialWindow, window );
pool.close();
file.close();
}
@Test
public void handOverDirtyPersistenceRowToReaderShouldWriteWhenClosing() throws Exception
{
String filename = new File( target.makeGraphDbDir(), "dirty" ).getAbsolutePath();
RandomAccessFile file = resources.add( new RandomAccessFile( filename, "rw" ) );
final int blockSize = 8;
StoreChannel channel = new StoreFileChannel( file.getChannel() );
final PersistenceWindowPool pool = new PersistenceWindowPool( new File("test.store"), blockSize,
channel, 0, false, false, new ConcurrentHashMap<Long, PersistenceRow>(),
BrickElementFactory.DEFAULT, StringLogger.DEV_NULL );
// The gist:
// T1 acquires position 0 as WRITE
// T2 would like to acquire position 0 as READ, marks it and goes to wait in lock()
// T1 writes stuff to the buffer and releases it
// T2 gets the PR handed over from T1, reads and verifies that it got the changes made by T1
// T2 releases it
// Verify that what T1 wrote is on disk
final PersistenceWindow t1Row = pool.acquire( 0, OperationType.WRITE );
OtherThreadExecutor<Void> otherThread = new OtherThreadExecutor<>( "other thread", null );
Future<Throwable> future = otherThread.executeDontWait( new WorkerCommand<Void, Throwable>()
{
@Override
public Throwable doWork( Void state )
{
PersistenceWindow t2Row = pool.acquire( 0, OperationType.READ ); // Will block until t1Row is released.
try
{
assertTrue( t1Row == t2Row );
assertBufferContents( blockSize, t2Row );
return null;
}
catch ( Throwable t )
{
return t;
}
finally
{
pool.release( t2Row );
}
}
} );
try
{
writeBufferContents( blockSize, t1Row );
otherThread.waitUntilWaiting();
}
finally
{
pool.release( t1Row );
}
Throwable failure = future.get();
if ( failure != null )
{
throw launderedException( failure );
}
PersistenceWindow row = pool.acquire( 0, OperationType.READ );
assertFalse( t1Row == row );
assertBufferContents( blockSize, row );
pool.close();
otherThread.close();
file.close();
}
@Test
public void releaseShouldUnlockWindowEvenIfExceptionIsThrown() throws Exception
{
String filename = new File( directory.directory(), "mapped.file" ).getAbsolutePath();
RandomAccessFile file = resources.add( new RandomAccessFile( filename, "rw" ) );
StoreChannel channel = new StoreFileChannel( file.getChannel() );
PersistenceWindowPool pool = new PersistenceWindowPool( new File("test.store"), 8, channel, 0,
false, false, new ConcurrentHashMap<Long, PersistenceRow>(), BrickElementFactory.DEFAULT,
StringLogger.DEV_NULL );
PersistenceRow row = mock( PersistenceRow.class );
when( row.writeOutAndCloseIfFree( false ) ).thenThrow(
new UnderlyingStorageException ("Unable to write record" ) );
expectedUnderlyingException.expect( UnderlyingStorageException.class );
try
{
pool.release( row );
}
finally
{
verify( row ).unLock();
}
pool.close();
file.close();
}
@Test
public void brickSizeZeroShouldNotCauseNPEWhenOtherThreadLoadsPersistenceRow() throws Exception
{
// Given
String filename = new File( directory.directory(), "mapped.file" ).getAbsolutePath();
RandomAccessFile file = resources.add( new RandomAccessFile( filename, "rw" ) );
StoreChannel channel = new StoreFileChannel( file.getChannel() );
PersistenceRow window = new PersistenceRow( 0l, 10, channel );
//noinspection unchecked
ConcurrentMap<Long, PersistenceRow> map = mock(ConcurrentMap.class);
// On the first lookup, pretend the row is not in memory, this makes the current thread decide to load
// the row itself. The second time this method is called will be when the acquire routine realizes another
// thread has loaded the window, and goes off to get that window.
when(map.get( 0l )).then(returnNullFirstTimeButAWindowSecondTime(window));
// TWIST! When the thread has loaded the row, it will try to insert it into the map, except now we pretend
// another thread has already put it in there, triggering a branch where our original thread will undo any
// locks it's grabbed as well as any memory it has allocated.
when( map.putIfAbsent( eq( 0l ), any( PersistenceRow.class ) ) ).thenReturn( window );
PersistenceWindowPool pool = new PersistenceWindowPool( new File("test.store"), 8, channel, 0,
false, false, map, BrickElementFactory.DEFAULT, StringLogger.DEV_NULL );
// When
PersistenceWindow acquiredWindow = pool.acquire( 0l, OperationType.READ );
// Then
assertEquals(window, acquiredWindow);
pool.close();
file.close();
}
@Test
public void shouldSeeEqualNumberBrickLockAndUnlock() throws Exception
{
// GIVEN
// -- a store file that has some records in it already
String filename = new File( directory.directory(), "mapped.file" ).getAbsolutePath();
RandomAccessFile file = resources.add( new RandomAccessFile( filename, "rw" ) );
StoreChannel channel = new StoreFileChannel( file.getChannel() );
file.setLength( 8*10 );
// -- a pool with a brick factory that tracks calls to lock/unlock
final AtomicInteger lockedCount = new AtomicInteger(), unlockedCount = new AtomicInteger();
BrickElementFactory brickFactory = new BrickElementFactory()
{
@Override
public BrickElement create( final int index )
{
return new BrickElement( index )
{
@Override
synchronized void lock()
{
assertEquals( 0, index );
super.lock();
lockedCount.incrementAndGet();
}
@Override
void unLock()
{
assertEquals( 0, index );
super.unLock();
unlockedCount.incrementAndGet();
}
};
}
};
PersistenceWindowPool pool = new PersistenceWindowPool( new File("test.store"), 8,
channel, 10000, false, false, new ConcurrentHashMap<Long, PersistenceRow>(),
brickFactory, StringLogger.DEV_NULL );
try
{
// WHEN
// -- we acquire/release a window for position 0 (which have not been mapped
// and will therefore be of type PersistenceRow
pool.release( pool.acquire( 0, OperationType.READ ) );
// THEN
// -- there should have been
assertEquals( 1, lockedCount.get() );
assertEquals( 1, unlockedCount.get() );
}
finally
{
pool.close();
}
}
private Answer<PersistenceRow> returnNullFirstTimeButAWindowSecondTime(final PersistenceRow window)
{
return new Answer<PersistenceRow>()
{
int invocations = 0;
@Override
public PersistenceRow answer( InvocationOnMock invocationOnMock ) throws Throwable
{
if(invocations++ == 0)
{
return null;
}
return window;
}
};
}
private void writeBufferContents( final int blockSize, final PersistenceWindow t1Row )
{
Buffer buffer = t1Row.getBuffer();
for ( int i = 0; i < blockSize; i++ )
{
buffer.put( (byte) i );
}
}
private void assertBufferContents( final int blockSize, PersistenceWindow row )
{
Buffer buffer = row.getBuffer();
for ( int i = 0; i < blockSize; i++ )
{
assertEquals( (byte)i, buffer.get() );
}
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_nioneo_store_PersistenceWindowPoolTest.java
|
1,156
|
{
@Override
public int compare( BrickElement o1, BrickElement o2 )
{
return o1.getHitCountSnapshot() - o2.getHitCountSnapshot();
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PersistenceWindowPool.java
|
1,157
|
public class PersistenceWindowPool implements WindowPool
{
private static final int MAX_BRICK_COUNT = 100000;
private final File storeName;
// == recordSize
private final int blockSize;
private StoreChannel fileChannel;
private final ConcurrentMap<Long,PersistenceRow> activeRowWindows;
private long availableMem = 0;
private long memUsed = 0;
private int brickCount = 0;
private int brickSize = 0;
private BrickElement brickArray[] = new BrickElement[0];
private int brickMiss = 0;
static final int REFRESH_BRICK_COUNT = 50000;
private final FileChannel.MapMode mapMode;
private int hit = 0;
private int miss = 0;
private int switches = 0;
private int ooe = 0;
private boolean useMemoryMapped = true;
private final boolean readOnly;
private final AtomicBoolean refreshing = new AtomicBoolean();
private final AtomicInteger avertedRefreshes = new AtomicInteger();
private final AtomicLong refreshTime = new AtomicLong();
private final AtomicInteger refreshes = new AtomicInteger();
private final StringLogger log;
private final BrickElementFactory brickFactory;
/**
* Create new pool for a store.
*
*
* @param storeName
* Name of store that use this pool
* @param blockSize
* The size of each record/block in the store
* @param fileChannel
* A fileChannel to the store
* @param mappedMem
* Number of bytes dedicated to memory mapped windows
* @param activeRowWindows
* Data structure for storing active "row windows", generally just provide a concurrent hash map.
* @throws UnderlyingStorageException If unable to create pool
*/
public PersistenceWindowPool( File storeName, int blockSize,
StoreChannel fileChannel, long mappedMem,
boolean useMemoryMappedBuffers, boolean readOnly,
ConcurrentMap<Long, PersistenceRow> activeRowWindows,
BrickElementFactory brickFactory,
StringLogger log )
{
this.storeName = storeName;
this.blockSize = blockSize;
this.fileChannel = fileChannel;
this.availableMem = mappedMem;
this.useMemoryMapped = useMemoryMappedBuffers;
this.readOnly = readOnly;
this.activeRowWindows = activeRowWindows;
this.brickFactory = brickFactory;
this.mapMode = readOnly ? MapMode.READ_ONLY : MapMode.READ_WRITE;
this.log = log;
setupBricks();
dumpStatus();
}
/**
* Acquires a windows for <CODE>position</CODE> and <CODE>operationType</CODE>
* locking the window preventing other threads from using it.
*
* @param position
* The position the needs to be encapsulated by the window
* @param operationType
* The type of operation (READ or WRITE)
* @return A locked window encapsulating the position
*/
@Override
public PersistenceWindow acquire( long position, OperationType operationType )
{
LockableWindow window = null;
if ( brickMiss >= REFRESH_BRICK_COUNT )
{
refreshBricks();
}
BrickElement theBrick = null;
while ( window == null )
{
if ( brickSize > 0 )
{
int brickIndex = positionToBrickIndex( position );
if ( brickIndex >= brickArray.length )
{
expandBricks( brickIndex + 1 );
}
theBrick = brickArray[brickIndex];
window = theBrick.getAndMarkWindow();
}
if ( window == null )
{
// There was no mapped window for this brick. Go for active window instead.
// Should be AtomicIntegers, but it's completely OK to miss some
// updates for these statistics, right?
miss++;
brickMiss++;
// Lock-free implementation of instantiating an active window for this position
// See if there's already an active window for this position
PersistenceRow dpw = activeRowWindows.get( position );
if ( dpw != null && dpw.markAsInUse() )
{ // ... there was and we managed to mark it as in use
window = dpw;
break;
}
// Either there was no active window for this position or it got
// closed right before we managed to mark it as in use.
// Either way instantiate a new active window for this position
dpw = new PersistenceRow( position, blockSize, fileChannel );
PersistenceRow existing = activeRowWindows.putIfAbsent( position, dpw );
if ( existing == null )
{
// No other thread managed to create an active window for
// this position before us.
window = dpw;
}
else
{
// Someone else put it there before us. Close this row
// which was unnecessarily opened. The next go in this loop
// will get that one instead.
dpw.close();;
if(theBrick != null)
{
// theBrick may be null here if brick size is 0.
theBrick.unLock();
}
}
}
else
{
hit++;
}
}
window.lock( operationType );
return window;
}
private int positionToBrickIndex( long position )
{
return (int) (position * blockSize / brickSize);
}
private long brickIndexToPosition( int brickIndex )
{
return (long) brickIndex * brickSize / blockSize;
}
void dumpStatistics()
{
log.info( storeName + " hit=" + hit + " miss=" + miss + " switches="
+ switches + " ooe=" + ooe );
}
/**
* Releases a window used for an operation back to the pool and unlocks it
* so other threads may use it.
*
* @param window
* The window to be released
*/
@Override
public void release( PersistenceWindow window )
{
try
{
if ( window instanceof PersistenceRow )
{
PersistenceRow dpw = (PersistenceRow) window;
try
{
// If the corresponding window has been instantiated while we had
// this active row we need to hand over the changes to that
// window if the window isn't memory mapped.
if ( brickSize > 0 && dpw.isDirty() )
{
applyChangesToWindowIfNecessary( dpw );
}
if ( dpw.writeOutAndCloseIfFree( readOnly ) )
{
activeRowWindows.remove( dpw.position(), dpw );
}
else
{
dpw.reset();
}
}
finally
{
if ( brickSize > 0 )
{
int brickIndex = positionToBrickIndex( dpw.position() );
BrickElement theBrick = brickArray[brickIndex];
theBrick.unLock();
}
}
}
}
finally
{
((LockableWindow) window).unLock();
}
}
private void applyChangesToWindowIfNecessary( PersistenceRow dpw )
{
int brickIndex = positionToBrickIndex( dpw.position() );
LockableWindow existingBrickWindow = brickIndex < brickArray.length ?
brickArray[brickIndex].getWindow() : null;
if ( existingBrickWindow != null && !(existingBrickWindow instanceof MappedPersistenceWindow) &&
existingBrickWindow.markAsInUse() )
{
// There is a non-mapped brick window here, let's have it
// know about my changes.
existingBrickWindow.lock( OperationType.WRITE );
try
{
existingBrickWindow.acceptContents( dpw );
}
finally
{
existingBrickWindow.unLock();
}
}
}
@Override
public synchronized void close()
{
flushAll();
for ( BrickElement element : brickArray )
{
if ( element.getWindow() != null )
{
element.getWindow().close();
element.setWindow( null );
}
}
fileChannel = null;
activeRowWindows.clear();
dumpStatistics();
}
@Override
public void flushAll()
{
if ( readOnly )
{
return;
}
for ( BrickElement element : brickArray )
{
PersistenceWindow window = element.getWindow();
if ( window != null )
{
window.force();
}
}
try
{
fileChannel.force( false );
}
catch ( IOException e )
{
throw new UnderlyingStorageException(
"Failed to flush file channel " + storeName, e );
}
}
/**
* Initial setup of bricks based on the size of the given channel and
* available memory to map.
*/
private void setupBricks()
{
long fileSize = -1;
try
{
fileSize = fileChannel.size();
}
catch ( IOException e )
{
throw new UnderlyingStorageException(
"Unable to get file size for " + storeName, e );
}
if ( blockSize == 0 )
{
return;
}
// If we can't fit even 10 blocks in available memory don't even try
// to use available memory.
if(availableMem > 0 && availableMem < blockSize * 10l )
{
logWarn( "Unable to use " + availableMem
+ "b as memory mapped windows, need at least " + blockSize * 10
+ "b (block size * 10)" );
logWarn( "Memory mapped windows have been turned off" );
availableMem = 0;
brickCount = 0;
brickSize = 0;
return;
}
if ( availableMem > 0 && fileSize > 0 )
{
double ratio = (availableMem + 0.0d) / fileSize;
if ( ratio >= 1 )
{
brickSize = (int) (availableMem / 1000);
if ( brickSize < 0 )
{
brickSize = Integer.MAX_VALUE;
}
brickSize = (brickSize / blockSize) * blockSize;
brickCount = (int) (fileSize / brickSize);
}
else
{
brickCount = (int) (1000.0d / ratio);
if ( brickCount > MAX_BRICK_COUNT )
{
brickCount = MAX_BRICK_COUNT;
}
if ( fileSize / brickCount > availableMem )
{
logWarn( "Unable to use " + (availableMem / 1024)
+ "kb as memory mapped windows, need at least "
+ (fileSize / brickCount / 1024) + "kb" );
logWarn( "Memory mapped windows have been turned off" );
availableMem = 0;
brickCount = 0;
brickSize = 0;
return;
}
brickSize = (int) (fileSize / brickCount);
if ( brickSize < 0 )
{
brickSize = Integer.MAX_VALUE;
brickSize = (brickSize / blockSize) * blockSize;
brickCount = (int) (fileSize / brickSize);
}
else
{
brickSize = (brickSize / blockSize) * blockSize;
}
assert brickSize > blockSize;
}
}
else if ( availableMem > 0 )
{
brickSize = (int) (availableMem / 100);
if ( brickSize < 0 )
{
brickSize = Integer.MAX_VALUE;
}
brickSize = (brickSize / blockSize) * blockSize;
}
brickArray = new BrickElement[brickCount];
for ( int i = 0; i < brickCount; i++ )
{
BrickElement element = brickFactory.create( i );
brickArray[i] = element;
}
}
/**
* Called during expanding of bricks where we see that we use too much
* memory and need to release some windows.
*
* @param nr the number of windows to free.
*/
private void freeWindows( int nr )
{
// Only called from expandBricks, so we're under a lock here
if ( brickSize <= 0 )
{
// memory mapped turned off
return;
}
ArrayList<BrickElement> mappedBricks = new ArrayList<BrickElement>();
for ( int i = 0; i < brickCount; i++ )
{
BrickElement be = brickArray[i];
if ( be.getWindow() != null )
{
be.snapshotHitCount();
mappedBricks.add( be );
}
}
Collections.sort( mappedBricks, BRICK_SORTER );
for ( int i = 0; i < nr && i < mappedBricks.size(); i++ )
{
BrickElement mappedBrick = mappedBricks.get( i );
LockableWindow window = mappedBrick.getWindow();
if ( window.writeOutAndCloseIfFree( readOnly ) )
{
mappedBrick.setWindow( null );
memUsed -= brickSize;
}
}
}
/**
* Go through the bricks and see if they are optimally placed, and change
* accordingly. This happens whenever we see that there has been a certain
* amount of brick misses since the last refresh.
*/
private void refreshBricks()
{
if ( brickMiss < REFRESH_BRICK_COUNT || brickSize <= 0 )
{
return;
}
if ( refreshing.compareAndSet( false, true ) )
{
// No one is doing refresh right now, go ahead and do it
try
{
long t = System.currentTimeMillis();
doRefreshBricks();
refreshes.incrementAndGet();
refreshTime.addAndGet( System.currentTimeMillis()-t );
}
finally
{
refreshing.set( false );
}
}
else
{
// Another thread is doing refresh right now, trust it to refresh the bricks
// and just go about my business.
avertedRefreshes.incrementAndGet();
}
}
private synchronized void doRefreshBricks()
{
brickMiss = 0;
Pair<List<BrickElement>, List<BrickElement>> currentMappings = gatherMappedVersusUnmappedWindows();
List<BrickElement> mappedBricks = currentMappings.first();
List<BrickElement> unmappedBricks = currentMappings.other();
// Fill up unused memory, i.e. map unmapped bricks as much as available memory allows
// and request patterns signals. Start the loop from the end of the array where the
// bricks with highest hit ratio are.
int unmappedIndex = unmappedBricks.size() - 1;
while ( memUsed + brickSize <= availableMem && unmappedIndex >= 0 )
{
BrickElement unmappedBrick = unmappedBricks.get( unmappedIndex-- );
if ( unmappedBrick.getHit() == 0 )
{
// We have more memory available, but no more windows have actually
// been requested so don't map unused random windows.
return;
}
allocateNewWindow( unmappedBrick );
}
// Switch bad/unused mappings. Start iterating over mapped bricks
// from the beginning (those with lowest hit ratio) and unmapped from the end
// (or rather where the fill-up-unused-memory loop above left off) where we've
// got the unmapped bricks with highest hit ratio.
int mappedIndex = 0;
while ( unmappedIndex >= 0 && mappedIndex < mappedBricks.size() )
{
BrickElement mappedBrick = mappedBricks.get( mappedIndex++ );
BrickElement unmappedBrick = unmappedBricks.get( unmappedIndex-- );
if ( mappedBrick.getHit() >= unmappedBrick.getHit() )
{
// We've passed a point where we don't have any unmapped brick
// with a higher hit ratio then the lowest mapped brick. We're done.
break;
}
LockableWindow window = mappedBrick.getWindow();
if (window.writeOutAndCloseIfFree( readOnly ) )
{
mappedBrick.setWindow( null );
memUsed -= brickSize;
if ( allocateNewWindow( unmappedBrick ) )
{
switches++;
}
}
}
}
/**
* Goes through all bricks in this pool and divides them between mapped and unmapped,
* i.e. those with a mapped persistence window assigned to it and those without.
*
* The two {@link List lists} coming back are also sorted where the first element
* has got the lowest {@link BrickElement#getHit()} ratio, and the last the highest.
*
* @return all bricks in this pool divided into mapped and unmapped.
*/
private Pair<List<BrickElement>, List<BrickElement>> gatherMappedVersusUnmappedWindows()
{
List<BrickElement> mappedBricks = new ArrayList<BrickElement>();
List<BrickElement> unmappedBricks = new ArrayList<BrickElement>();
for ( int i = 0; i < brickCount; i++ )
{
BrickElement be = brickArray[i];
be.snapshotHitCount();
if ( be.getWindow() != null )
{
mappedBricks.add( be );
}
else
{
unmappedBricks.add( be );
}
be.refresh();
}
Collections.sort( unmappedBricks, BRICK_SORTER );
Collections.sort( mappedBricks, BRICK_SORTER );
return Pair.of( mappedBricks, unmappedBricks );
}
/**
* Called every time we request a brick that has a greater index than
* the current brick count. This happens as the underlying file channel
* grows as new blocks/records are added to it.
*
* @param newBrickCount the size to expand the brick count to.
*/
private synchronized void expandBricks( int newBrickCount )
{
if ( newBrickCount > brickCount )
{
BrickElement tmpArray[] = new BrickElement[newBrickCount];
System.arraycopy( brickArray, 0, tmpArray, 0, brickArray.length );
if ( memUsed + brickSize >= availableMem )
{
freeWindows( 1 );
}
for ( int i = brickArray.length; i < tmpArray.length; i++ )
{
BrickElement be = brickFactory.create( i );
tmpArray[i] = be;
if ( memUsed + brickSize <= availableMem )
{
allocateNewWindow( be );
}
}
brickArray = tmpArray;
brickCount = tmpArray.length;
}
}
/**
* Allocates a new persistence window for the {@code brick}. Such an
* allocation may fail with memory problems and such an error will be
* caught and logged as well as a counter incremented. It's OK if
* a memory mapping fails, because we can fall back on temporary
* {@link PersistenceRow persistence rows}.
*
* @param brick the {@link BrickElement} to allocate a new window for.
* @return {@code true} if the window was successfully allocated,
* otherwise {@code false}.
*/
boolean allocateNewWindow( BrickElement brick )
{
try
{
LockableWindow window = null;
if ( useMemoryMapped )
{
window = new MappedPersistenceWindow(
brickIndexToPosition( brick.index() ), blockSize,
brickSize, fileChannel, mapMode );
}
else
{
PlainPersistenceWindow dpw =
new PlainPersistenceWindow(
brickIndexToPosition( brick.index() ),
blockSize, brickSize, fileChannel );
dpw.readFullWindow();
window = dpw;
}
while ( true )
{
/*
* This is a busy wait, given that rows are kept for a very short time. What we do is lock the brick so
* no rows can be mapped over it, then we wait for every row mapped over it to be released (which can
* happen because releasing the row does not acquire a lock). When that happens, we can go ahead and
* map the window here. If a thread was waiting for this lock, after it acquires it, it will discover
* the window in place, so it will never allocate a row.
*/
synchronized ( brick )
{
if ( brick.lockCount.get() == 0 )
{
brick.setWindow( window );
break;
}
}
}
memUsed += brickSize;
return true;
}
catch ( MappedMemException e )
{
ooe++;
logWarn( "Unable to memory map", e );
}
catch ( OutOfMemoryError e )
{
ooe++;
logWarn( "Unable to allocate direct buffer", e );
}
return false;
}
private void dumpStatus()
{
try
{
log.info( "[" + storeName + "] brickCount=" + brickCount
+ " brickSize=" + brickSize + "b mappedMem=" + availableMem
+ "b (storeSize=" + fileChannel.size() + "b)" );
}
catch ( IOException e )
{
throw new UnderlyingStorageException(
"Unable to get file size for " + storeName, e );
}
}
private void logWarn( String logMessage )
{
log.warn( "[" + storeName + "] " + logMessage );
}
private void logWarn( String logMessage, Throwable cause )
{
log.warn( "[" + storeName + "] " + logMessage, cause );
}
@Override
public WindowPoolStats getStats()
{
int avgRefreshTime = refreshes.get() == 0 ? 0 : (int)(refreshTime.get()/refreshes.get());
return new WindowPoolStats( storeName, availableMem, memUsed, brickCount,
brickSize, hit, miss, ooe, switches, avgRefreshTime, refreshes.get(), avertedRefreshes.get() );
}
/**
* Sorts {@link BrickElement} by their {@link BrickElement#getHit()} ratio.
* Lowest hit ratio will make that brick end up at a lower index in list,
* so the least requested will be at the beginning and the most requested at the end.
*/
private static final Comparator<BrickElement> BRICK_SORTER = new Comparator<BrickElement>()
{
@Override
public int compare( BrickElement o1, BrickElement o2 )
{
return o1.getHitCountSnapshot() - o2.getHitCountSnapshot();
}
};
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PersistenceWindowPool.java
|
1,158
|
BOOL( 1 )
{
@Override
public DefinedProperty readProperty( int propertyKeyId, PropertyBlock block, Provider<PropertyStore> store )
{
return Property.booleanProperty( propertyKeyId, getValue( block.getSingleValueLong() ) );
}
@Override
public Object getValue( PropertyBlock block, PropertyStore store )
{
return getValue( block.getSingleValueLong() );
}
private boolean getValue( long propBlock )
{
return ( propBlock & 0x1 ) == 1;
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PropertyType.java
|
1,159
|
{
@Override
public Object call() throws Exception
{
return getValue( block, store.instance() );
}
});
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PropertyType.java
|
1,160
|
public class PersistenceRowTest
{
private static final Random RANDOM = new Random();
private static final int RECORD_SIZE = 7;
private PersistenceRow window;
private StoreFileChannel realChannel;
@Before
public void before() throws Exception
{
File directory = new File( "target/test-data" );
directory.mkdirs();
String filename = new File( directory, UUID.randomUUID().toString() ).getAbsolutePath();
RandomAccessFile file = new RandomAccessFile( filename, "rw" );
realChannel = new StoreFileChannel( file.getChannel() );
window = new PersistenceRow( 0, RECORD_SIZE, realChannel );
window.lock( OperationType.WRITE );
}
@Test
public void shouldNotLetChangesToOffsetInterfereWithFlushing() throws Exception
{
new Thread( new Runnable()
{
@Override
public void run()
{
while ( true )
{
// modify buffer's position "because we can" - this is used in several places,
// including Buffer.getOffsettedBuffer which in turn is also used in several places
window.getBuffer().setOffset( RANDOM.nextInt( window.getBuffer().getBuffer().limit() ) );
}
}
} ).start();
try
{
for ( int i = 1; i < 10000; i++ )
{
window.force();
}
}
catch ( BufferOverflowException e )
{
fail( "Changing the state of the buffer's flags should not affect flushing" );
}
}
@Test
public void shouldNotLetFlushingInterfereWithReads() throws Exception
{
window.getBuffer().get();
window.getBuffer().get();
window.getBuffer().get();
window.getBuffer().get();
// ad infinitum, or at least up to RECORD_SIZE
assertThat( window.getBuffer().getBuffer().position(), is( 4 ) );
// then a flush comes along...
window.force();
assertThat( window.getBuffer().getBuffer().position(), is( 4 ) );
}
@Test
public void grabbingWriteLockShouldMarkRowAsDirty() throws Exception
{
// GIVEN a channel and a row over it
StoreFileChannel channel = spy( realChannel );
PersistenceRow row = new PersistenceRow( 0, 1, channel );
// WHEN you grab a write lock
row.lock( OperationType.WRITE );
// THEN it should be dirty
assertThat( "Dirty before force", row.isDirty(), is( true ) );
}
@Test
public void forcingARowShouldMarkItAsClean() throws Exception
{
// GIVEN a channel and a row over it
StoreFileChannel channel = spy( realChannel );
PersistenceRow row = new PersistenceRow( 0, 1, channel );
// WHEN you grab a write lock and force
row.lock( OperationType.WRITE );
row.force();
// THEN the row should be marked clean and a call made to write to the buffer
assertThat( "Dirty after force", row.isDirty(), is( false ) );
verify( channel, times( 1 ) ).write( any( ByteBuffer.class), anyLong() );
// WHEN you subsequently force again
row.force();
// THEN no writes should go through and the status should remain clean
verify( channel, times( 1 ) ).write( any( ByteBuffer.class), anyLong() );
assertThat( "Dirty after non-flushing force", row.isDirty(), is( false ) );
}
@Test
public void explicitlyMarkingAsCleanShouldDoSo() throws Exception
{
// GIVEN a channel and a row over it
StoreFileChannel channel = spy( realChannel );
PersistenceRow row = new PersistenceRow( 0, 1, channel );
// WHEN you grab a write lock, marking as dirty
row.lock( OperationType.WRITE );
// and then manually mark is as clean
row.setClean();
// THEN it should be non-dirty
assertThat( "Dirty after setting clean", row.isDirty(), is( false ) );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_nioneo_store_PersistenceRowTest.java
|
1,161
|
SHORT_STRING( 11 )
{
@Override
public DefinedProperty readProperty( int propertyKeyId, PropertyBlock block, Provider<PropertyStore> store )
{
return Property.stringProperty( propertyKeyId, LongerShortString.decode( block ) );
}
@Override
public Object getValue( PropertyBlock block, PropertyStore store )
{
return LongerShortString.decode( block );
}
@Override
public int calculateNumberOfBlocksUsed( long firstBlock )
{
return LongerShortString.calculateNumberOfBlocksUsed( firstBlock );
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PropertyType.java
|
1,162
|
{
@Override
protected R underlyingObjectToObject( Long id )
{
return store.forceGetRecord( id );
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_RecordStore.java
|
1,163
|
{
final PrimitiveLongIterator ids = new StoreIdIterator( store );
@Override
protected R fetchNextOrNull()
{
scan: while ( ids.hasNext() && continueScanning )
{
R record = getRecord( store, ids.next() );
for ( Predicate<? super R> filter : filters )
{
if ( !filter.accept( record ) ) continue scan;
}
return record;
}
return null;
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_RecordStore.java
|
1,164
|
{
@Override
public Iterator<R> iterator()
{
return new PrefetchingIterator<R>()
{
final PrimitiveLongIterator ids = new StoreIdIterator( store );
@Override
protected R fetchNextOrNull()
{
scan: while ( ids.hasNext() && continueScanning )
{
R record = getRecord( store, ids.next() );
for ( Predicate<? super R> filter : filters )
{
if ( !filter.accept( record ) ) continue scan;
}
return record;
}
return null;
}
};
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_RecordStore.java
|
1,165
|
@SuppressWarnings("unchecked")
abstract class Processor<FAILURE extends Exception>
{
// Have it volatile so that it can be stopped from a different thread.
private volatile boolean continueScanning = true;
public void stopScanning()
{
continueScanning = false;
}
public void processSchema( RecordStore<DynamicRecord> store, DynamicRecord schema ) throws FAILURE
{
processRecord( DynamicRecord.class, store, schema );
}
public void processNode( RecordStore<NodeRecord> store, NodeRecord node ) throws FAILURE
{
processRecord( NodeRecord.class, store, node );
}
public void processRelationship( RecordStore<RelationshipRecord> store, RelationshipRecord rel ) throws FAILURE
{
processRecord( RelationshipRecord.class, store, rel );
}
public void processProperty( RecordStore<PropertyRecord> store, PropertyRecord property ) throws FAILURE
{
processRecord( PropertyRecord.class, store, property );
}
public void processString( RecordStore<DynamicRecord> store, DynamicRecord string,
@SuppressWarnings( "deprecation") IdType idType ) throws FAILURE
{
processDynamic( store, string );
}
public void processArray( RecordStore<DynamicRecord> store, DynamicRecord array ) throws FAILURE
{
processDynamic( store, array );
}
public void processLabelArrayWithOwner( RecordStore<DynamicRecord> store, DynamicRecord labelArray )
throws FAILURE
{
processDynamic( store, labelArray );
}
protected void processDynamic( RecordStore<DynamicRecord> store, DynamicRecord record ) throws FAILURE
{
processRecord( DynamicRecord.class, store, record );
}
public void processRelationshipTypeToken( RecordStore<RelationshipTypeTokenRecord> store,
RelationshipTypeTokenRecord record ) throws FAILURE
{
processRecord( RelationshipTypeTokenRecord.class, store, record );
}
public void processPropertyKeyToken( RecordStore<PropertyKeyTokenRecord> store, PropertyKeyTokenRecord record ) throws FAILURE
{
processRecord( PropertyKeyTokenRecord.class, store, record );
}
public void processLabelToken( RecordStore<LabelTokenRecord> store, LabelTokenRecord record ) throws FAILURE
{
processRecord(LabelTokenRecord.class, store, record);
}
@SuppressWarnings("UnusedParameters")
protected <R extends AbstractBaseRecord> void processRecord( Class<R> type, RecordStore<R> store, R record ) throws FAILURE
{
throw new UnsupportedOperationException( this + " does not process "
+ type.getSimpleName().replace( "Record", "" ) + " records" );
}
@SafeVarargs
public final <R extends AbstractBaseRecord> Iterable<R> scan( final RecordStore<R> store,
final Predicate<? super R>... filters )
{
return new Iterable<R>()
{
@Override
public Iterator<R> iterator()
{
return new PrefetchingIterator<R>()
{
final PrimitiveLongIterator ids = new StoreIdIterator( store );
@Override
protected R fetchNextOrNull()
{
scan: while ( ids.hasNext() && continueScanning )
{
R record = getRecord( store, ids.next() );
for ( Predicate<? super R> filter : filters )
{
if ( !filter.accept( record ) ) continue scan;
}
return record;
}
return null;
}
};
}
};
}
protected <R extends AbstractBaseRecord> R getRecord( RecordStore<R> store, long id )
{
return store.forceGetRecord( id );
}
public static <R extends AbstractBaseRecord> Iterable<R> scanById( final RecordStore<R> store,
Iterable<Long> ids )
{
return new IterableWrapper<R, Long>( ids )
{
@Override
protected R underlyingObjectToObject( Long id )
{
return store.forceGetRecord( id );
}
};
}
public <R extends AbstractBaseRecord> void applyById( RecordStore<R> store, Iterable<Long> ids ) throws FAILURE
{
for ( R record : scanById( store, ids ) )
store.accept( this, record );
}
public <R extends AbstractBaseRecord> void applyFiltered( RecordStore<R> store, Predicate<? super R>... filters ) throws FAILURE
{
apply( store, ProgressListener.NONE, filters );
}
public <R extends AbstractBaseRecord> void applyFiltered( RecordStore<R> store, ProgressListener progressListener,
Predicate<? super R>... filters ) throws FAILURE
{
apply( store, progressListener, filters );
}
private <R extends AbstractBaseRecord> void apply( RecordStore<R> store, ProgressListener progressListener,
Predicate<? super R>... filters ) throws FAILURE
{
for ( R record : scan( store, filters ) )
{
store.accept( this, record );
progressListener.set( record.getLongId() );
}
progressListener.done();
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_RecordStore.java
|
1,166
|
{
@Override
public boolean accept( AbstractBaseRecord item )
{
return item.inUse();
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_RecordStore.java
|
1,167
|
private static class Store
{
final String simpleFileName;
final int recordSize;
final String contentsDescription;
Store( String simpleFileName, int recordSize, String contentsDescription )
{
this.simpleFileName = simpleFileName;
this.recordSize = recordSize;
this.contentsDescription = contentsDescription;
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_nioneo_store_RecordSizesDocTest.java
|
1,168
|
public class RecordSizesDocTest
{
public final @Rule DocsIncludeFile writer = DocsIncludeFile.inSection( "ops" );
@Test
public void record_sizes_table() throws Exception
{
writer.println( "[options=\"header\",cols=\"<45,>20m,<35\", width=\"80%\"]" );
writer.println( "|======================================" );
writer.println( "| Store file | Record size | Contents" );
for ( Store store : asList(
store( NODE_STORE_NAME, NodeStore.RECORD_SIZE, "Nodes" ),
store( RELATIONSHIP_STORE_NAME, RelationshipStore.RECORD_SIZE, "Relationships" ),
store( PROPERTY_STORE_NAME, PropertyStore.RECORD_SIZE, "Properties for nodes and relationships" ),
dynamicStore( PROPERTY_STRINGS_STORE_NAME, string_block_size, "Values of string properties" ),
dynamicStore( PROPERTY_ARRAYS_STORE_NAME, array_block_size, "Values of array properties" )
) )
{
writer.printf( "| %s | %d B | %s%n", store.simpleFileName, store.recordSize, store.contentsDescription );
}
writer.println( "|======================================" );
writer.println();
}
private static Store dynamicStore( String storeFileName, Setting<Integer> blockSizeSetting, String contentsDescription )
{
return store( storeFileName, defaultDynamicSize( blockSizeSetting ), contentsDescription );
}
private static Store store( String storeFileName, int recordSize, String contentsDescription )
{
return new Store( NeoStore.DEFAULT_NAME + storeFileName, recordSize, contentsDescription );
}
private static int defaultDynamicSize( Setting<Integer> setting )
{
return AbstractDynamicStore.BLOCK_HEADER_SIZE + Integer.parseInt( setting.getDefaultValue() );
}
private static class Store
{
final String simpleFileName;
final int recordSize;
final String contentsDescription;
Store( String simpleFileName, int recordSize, String contentsDescription )
{
this.simpleFileName = simpleFileName;
this.recordSize = recordSize;
this.contentsDescription = contentsDescription;
}
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_nioneo_store_RecordSizesDocTest.java
|
1,169
|
public class RecordSerializer
{
private final List<RecordSerializable> serializables = new ArrayList<RecordSerializable>();
public RecordSerializer append( RecordSerializable serializable )
{
this.serializables.add( serializable );
return this;
}
public byte[] serialize()
{
int[] lengths = new int[serializables.size()];
int totalLength = 0;
for ( int i = 0; i < serializables.size(); i++ )
totalLength += (lengths[i] = serializables.get( i ).length());
byte[] array = new byte[totalLength];
ByteBuffer target = ByteBuffer.wrap( array );
for ( int i = 0; i < serializables.size(); i++ )
serializables.get( i ).serialize( target );
return array;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_RecordSerializer.java
|
1,170
|
public class ReadOnlyIdGenerator implements IdGenerator
{
private final String fileName;
private final long highId;
public ReadOnlyIdGenerator( String fileName, long highId )
{
this.fileName = fileName;
this.highId = highId;
}
public long nextId()
{
throw new ReadOnlyDbException();
}
public IdRange nextIdBatch( int size )
{
throw new ReadOnlyDbException();
}
public void setHighId( long id )
{
throw new ReadOnlyDbException();
}
public long getHighId()
{
return highId;
}
public void freeId( long id )
{
throw new ReadOnlyDbException();
}
@Override
public void close()
{
}
public String getFileName()
{
return this.fileName;
}
public long getNumberOfIdsInUse()
{
return highId;
}
public long getDefragCount()
{
return 0;
}
@Override
public void delete()
{
throw new ReadOnlyDbException();
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_ReadOnlyIdGenerator.java
|
1,171
|
{
@Override
public String call() throws Exception
{
return getValue( block, store.instance() );
}
});
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PropertyType.java
|
1,172
|
STRING( 9 )
{
@Override
public DefinedProperty readProperty( int propertyKeyId, final PropertyBlock block,
final Provider<PropertyStore> store )
{
return Property.lazyStringProperty(propertyKeyId, new Callable<String>()
{
@Override
public String call() throws Exception
{
return getValue( block, store.instance() );
}
});
}
@Override
public String getValue( PropertyBlock block, PropertyStore store )
{
if ( store == null )
{
return null;
}
return store.getStringFor( block );
}
@Override
byte[] readDynamicRecordHeader( byte[] recordBytes )
{
return EMPTY_BYTE_ARRAY;
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PropertyType.java
|
1,173
|
DOUBLE( 8 )
{
@Override
public DefinedProperty readProperty( int propertyKeyId, PropertyBlock block, Provider<PropertyStore> store )
{
return Property.doubleProperty( propertyKeyId, Double.longBitsToDouble( block.getValueBlocks()[1] ) );
}
@Override
public Object getValue( PropertyBlock block, PropertyStore store )
{
return Double.valueOf( getValue( block.getValueBlocks()[1] ) );
}
private double getValue( long propBlock )
{
return Double.longBitsToDouble( propBlock );
}
@Override
public int calculateNumberOfBlocksUsed( long firstBlock )
{
return 2;
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PropertyType.java
|
1,174
|
FLOAT( 7 )
{
@Override
public DefinedProperty readProperty( int propertyKeyId, PropertyBlock block, Provider<PropertyStore> store )
{
return Property.floatProperty( propertyKeyId, Float.intBitsToFloat( block.getSingleValueInt() ) );
}
@Override
public Object getValue( PropertyBlock block, PropertyStore store )
{
return Float.valueOf( getValue( block.getSingleValueInt() ) );
}
private float getValue( int propBlock )
{
return Float.intBitsToFloat( propBlock );
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PropertyType.java
|
1,175
|
LONG( 6 )
{
@Override
public DefinedProperty readProperty( int propertyKeyId, PropertyBlock block, Provider<PropertyStore> store )
{
long firstBlock = block.getSingleValueBlock();
long value = valueIsInlined( firstBlock ) ? (block.getSingleValueLong() >>> 1) : block.getValueBlocks()[1];
return Property.longProperty( propertyKeyId, value );
}
@Override
public Object getValue( PropertyBlock block, PropertyStore store )
{
return Long.valueOf( getLongValue( block ) );
}
private long getLongValue( PropertyBlock block )
{
long firstBlock = block.getSingleValueBlock();
return valueIsInlined( firstBlock ) ? (block.getSingleValueLong() >>> 1) :
block.getValueBlocks()[1];
}
private boolean valueIsInlined( long firstBlock )
{
// [][][][][ i,tttt][kkkk,kkkk][kkkk,kkkk][kkkk,kkkk]
return (firstBlock & 0x10000000L) > 0;
}
@Override
public int calculateNumberOfBlocksUsed( long firstBlock )
{
return valueIsInlined( firstBlock ) ? 1 : 2;
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PropertyType.java
|
1,176
|
INT( 5 )
{
@Override
public DefinedProperty readProperty( int propertyKeyId, PropertyBlock block, Provider<PropertyStore> store )
{
return Property.intProperty( propertyKeyId, block.getSingleValueInt() );
}
@Override
public Object getValue( PropertyBlock block, PropertyStore store )
{
return Integer.valueOf( block.getSingleValueInt() );
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PropertyType.java
|
1,177
|
CHAR( 4 )
{
@Override
public DefinedProperty readProperty( int propertyKeyId, PropertyBlock block, Provider<PropertyStore> store )
{
return Property.charProperty( propertyKeyId, (char) block.getSingleValueShort() );
}
@Override
public Object getValue( PropertyBlock block, PropertyStore store )
{
return Character.valueOf( (char) block.getSingleValueShort() );
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PropertyType.java
|
1,178
|
SHORT( 3 )
{
@Override
public DefinedProperty readProperty( int propertyKeyId, PropertyBlock block, Provider<PropertyStore> store )
{
return Property.shortProperty( propertyKeyId, block.getSingleValueShort() );
}
@Override
public Object getValue( PropertyBlock block, PropertyStore store )
{
return Short.valueOf( block.getSingleValueShort() );
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PropertyType.java
|
1,179
|
BYTE( 2 )
{
@Override
public DefinedProperty readProperty( int propertyKeyId, PropertyBlock block, Provider<PropertyStore> store )
{
return Property.byteProperty( propertyKeyId, block.getSingleValueByte() );
}
@Override
public Object getValue( PropertyBlock block, PropertyStore store )
{
return Byte.valueOf( block.getSingleValueByte() );
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PropertyType.java
|
1,180
|
SHORT_ARRAY( 12 )
{
@Override
public DefinedProperty readProperty( int propertyKeyId, PropertyBlock block, Provider<PropertyStore> store )
{
// TODO: Specialize per type
return Property.property( propertyKeyId, ShortArray.decode(block) );
}
@Override
public Object getValue( PropertyBlock block, PropertyStore store )
{
return ShortArray.decode( block );
}
@Override
public int calculateNumberOfBlocksUsed( long firstBlock )
{
return ShortArray.calculateNumberOfBlocksUsed( firstBlock );
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_PropertyType.java
|
1,181
|
{
@Override
public void run()
{
while ( true )
{
// modify buffer's position "because we can" - this is used in several places,
// including Buffer.getOffsettedBuffer which in turn is also used in several places
window.getBuffer().setOffset( RANDOM.nextInt( window.getBuffer().getBuffer().limit() ) );
}
}
} ).start();
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_nioneo_store_PersistenceRowTest.java
|
1,182
|
{
@Override
public void run()
{
try
{
// Wait for theEvilOne to grab the lock on the row
theBreakingOneHasLockedTheRow.await();
/*
* Because of the setup theTriggeringOne did, this will do a refreshBricks() and read in a
* LockableWindow instead of a PersistenceRow.
*/
LockableWindow window = (LockableWindow) pool.acquire( 1, OperationType.WRITE );
// Write the new stuff in - that will be overwritten by the flush when theEvilOne releases
window.getOffsettedBuffer( 1 ).put( new byte[]{5, 6, 7, 8} );
// Release the lock - not really necessary, just good form
pool.release( window );
// Allow theEvilOne to continue
theOverwrittenOneHasWrittenItsChanges.countDown();
}
catch ( Exception e )
{
throw new RuntimeException( e );
}
}
});
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_nioneo_store_PersistenceRowAndWindowDirtyWriteIT.java
|
1,183
|
{ // setup: create the node with the property that we will remove
@Override
public Node call() throws Exception
{
Node node = graphdb.createNode();
node.setProperty( "key", "original" );
return node;
}
} );
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_core_TestRaceOnMultipleNodeImpl.java
|
1,184
|
class MappedMemException extends RuntimeException
{
MappedMemException( Throwable cause )
{
super( cause );
}
MappedMemException( String msg, Throwable cause )
{
super( msg, cause );
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_MappedMemException.java
|
1,185
|
ALPHASYM( 8, 6 )
{
@Override
char decTranslate( byte codePoint )
{
if ( codePoint == 0x0 ) return ' ';
if ( codePoint <= 0x1A ) return (char)('A' + codePoint - 0x1);
if ( codePoint <= 0x1F ) return decPunctuation( codePoint - 0x1B + 1 );
if ( codePoint == 0x20 ) return ';';
if ( codePoint <= 0x3A ) return (char)('a' + codePoint - 0x21);
return decPunctuation( codePoint - 0x3B + 9 );
}
@Override
int encTranslate( byte b )
{
// Punctuation is in the same places as European
if ( b < 0x20 ) return encPunctuation( b ); // Punctuation
// But the rest is transposed by 0x40
// return EUROPEAN.encTranslate( b ) - 0x40;
return b - 0x40;
}
@Override
int encPunctuation( byte b )
{
switch ( b )
{
case 0x0: return 0x0;
case 0x1: return 0x1B;
case 0x2: return 0x1C;
case 0x3: return 0x1D;
case 0x4: return 0x1E;
case 0x5: return 0x1F;
case 0x6: return 0x3B;
case 0x7: return 0x3C;
case 0x8: return 0x3D;
case 0x9: return 0x3E;
case 0xA: return 0x3F;
case 0xB: return 0x20;
default: throw cannotEncode( b );
}
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_LongerShortString.java
|
1,186
|
ALPHANUM( 7, 6 )
{
@Override
char decTranslate( byte codePoint )
{
return EUROPEAN.decTranslate( (byte) ( codePoint + 0x40 ) );
}
@Override
int encTranslate( byte b )
{
// Punctuation is in the same places as European
if ( b < 0x20 ) return encPunctuation( b ); // Punctuation
// But the rest is transposed by 0x40
return EUROPEAN.encTranslate( b ) - 0x40;
}
@Override
int encPunctuation( byte b )
{
switch ( b )
{
case 0:
return 0x00; // SPACE
case 1:
return 0x20; // UNDERSCORE
default:
throw cannotEncode( b );
}
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_LongerShortString.java
|
1,187
|
URI( 6, 6 )
{
@Override
int encTranslate( byte b )
{
if ( b == 0 ) return 0; // space
if ( b >= 0x61 && b <= 0x7A ) return b - 0x60; // lower-case letters
if ( b >= 0x30 && b <= 0x39 ) return b - 0x10; // digits
if ( b >= 0x1 && b <= 0x16 ) return b + 0x29; // symbols
throw cannotEncode( b );
}
@Override
int encPunctuation( byte b )
{
// Handled by encTranslate
throw cannotEncode( b );
}
@Override
char decTranslate( byte codePoint )
{
if ( codePoint == 0 ) return ' ';
if ( codePoint <= 0x1A ) return (char) ( codePoint + 'a' - 1 );
if ( codePoint <= 0x29 ) return (char) (codePoint - 0x20 + '0');
if ( codePoint <= 0x2E ) return decPunctuation( codePoint - 0x29 );
return decPunctuation( codePoint - 0x2F + 9);
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_LongerShortString.java
|
1,188
|
EMAIL( 5, 5 )
{
@Override
int encTranslate( byte b )
{
return super.encTranslate(b) - 0x60;
}
@Override
int encPunctuation( byte b )
{
int encOffset = 0x60;
if ( b == 7 ) return encOffset;
int offset = encOffset + 0x1B;
switch ( b )
{
case 1: return 0 + offset;
case 2: return 1 + offset;
case 3: return 2 + offset;
case 6: return 3 + offset;
case 9: return 4 + offset;
default: throw cannotEncode( b );
}
}
@Override
char decTranslate( byte codePoint )
{
if ( codePoint == 0 ) return ',';
if ( codePoint <= 0x1A ) return (char) ( codePoint + 'a' - 1 );
switch ( codePoint )
{
case 0x1E: return '+';
case 0x1F: return '@';
default: return decPunctuation( codePoint - 0x1A );
}
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_LongerShortString.java
|
1,189
|
LOWER( 4, 5 )
{
@Override
int encTranslate( byte b )
{
return super.encTranslate(b) - 0x60;
}
@Override
int encPunctuation( byte b )
{
return b == 0 ? 0x60 : b + 0x7a;
}
@Override
char decTranslate( byte codePoint )
{
if ( codePoint == 0 ) return ' ';
if ( codePoint <= 0x1A ) return (char) ( codePoint + 'a' - 1 );
return decPunctuation( codePoint - 0x1A );
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_LongerShortString.java
|
1,190
|
UPPER( 3, 5 )
{
@Override
int encTranslate( byte b )
{
return super.encTranslate(b) - 0x40;
}
@Override
int encPunctuation( byte b )
{
return b == 0 ? 0x40 : b + 0x5a;
}
@Override
char decTranslate( byte codePoint )
{
if ( codePoint == 0 ) return ' ';
if ( codePoint <= 0x1A ) return (char) ( codePoint + 'A' - 1 );
return decPunctuation( codePoint - 0x1A );
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_LongerShortString.java
|
1,191
|
DATE( 2, 4 )
{
@Override
int encTranslate( byte b )
{
if ( b >= '0' && b <= '9' ) return b - '0';
switch ( b )
{
case 0: return 0xA;
case 3: return 0xB;
case 4: return 0xC;
case 5: return 0xD;
case 6: return 0xE;
case 7: return 0xF;
default: throw cannotEncode( b );
}
}
@Override
int encPunctuation( byte b )
{
throw cannotEncode( b );
}
@Override
char decTranslate( byte codePoint )
{
if ( codePoint < 0xA ) return (char) ( codePoint + '0' );
switch ( codePoint )
{
case 0xA: return ' ';
case 0xB: return '-';
case 0xC: return ':';
case 0xD: return '/';
case 0xE: return '+';
default: return ',';
}
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_LongerShortString.java
|
1,192
|
UPPERHEX( 12, 4 )
{
@Override
int encTranslate( byte b )
{
if ( b >= '0' && b <= '9' ) return b - '0';
if ( b >= 'A' && b <= 'F' ) return b - 'A' + 10;
throw cannotEncode( b );
}
@Override
int encPunctuation( byte b )
{
throw cannotEncode( b );
}
@Override
char decTranslate( byte codePoint )
{
if ( codePoint < 10 ) return (char) ( codePoint + '0' );
return (char) ( codePoint + 'A' - 10 );
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_LongerShortString.java
|
1,193
|
LOWERHEX( 11, 4 )
{
@Override
int encTranslate( byte b )
{
if ( b >= '0' && b <= '9' ) return b - '0';
if ( b >= 'a' && b <= 'f' ) return b - 'a' + 10;
throw cannotEncode( b );
}
@Override
int encPunctuation( byte b )
{
throw cannotEncode( b );
}
@Override
char decTranslate( byte codePoint )
{
if ( codePoint < 10 ) return (char) ( codePoint + '0' );
return (char) ( codePoint + 'a' - 10 );
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_LongerShortString.java
|
1,194
|
NUMERICAL( 1, 4 )
{
@Override
int encTranslate( byte b )
{
if ( b >= '0' && b <= '9' ) return b - '0';
switch ( b )
{
// interm. encoded
case 0: return 0xA;
case 2: return 0xB;
case 3: return 0xC;
case 6: return 0xD;
case 7: return 0xE;
case 8: return 0xF;
default: throw cannotEncode( b );
}
}
@Override
int encPunctuation( byte b )
{
throw cannotEncode( b );
}
@Override
char decTranslate( byte codePoint )
{
if ( codePoint < 10 ) return (char) ( codePoint + '0' );
return decPunctuation( codePoint - 10 + 6 );
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_LongerShortString.java
|
1,195
|
private static class LockElement
{
private final Thread thread;
private boolean movedOn = false;
LockElement( Thread thread )
{
this.thread = thread;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_LockableWindow.java
|
1,196
|
public abstract class LockableWindow implements PersistenceWindow
{
private final StoreChannel fileChannel;
private Thread lockingThread = null;
private final LinkedList<LockElement> waitingThreadList =
new LinkedList<LockElement>();
private boolean locked;
private int marked = 0;
protected boolean closed;
private boolean isDirty = false;
LockableWindow( StoreChannel fileChannel )
{
this.fileChannel = fileChannel;
}
boolean encapsulates( long position )
{
return position() <= position && position < position() + size();
}
StoreChannel getFileChannel()
{
return fileChannel;
}
/**
* Writes out any changes to the underlying {@link StoreChannel} and is then
* considered unusable.
*/
protected final void writeOutAndClose()
{
force();
close();
}
/**
* @return {@code true} if marked, or {@code false} if this window has been
* closed and couldn't be marked.
*/
synchronized boolean markAsInUse()
{
if ( closed )
return false;
this.marked++;
return true;
}
private static class LockElement
{
private final Thread thread;
private boolean movedOn = false;
LockElement( Thread thread )
{
this.thread = thread;
}
}
synchronized void lock( OperationType operationType )
{
Thread currentThread = Thread.currentThread();
LockElement le = new LockElement( currentThread );
while ( locked && lockingThread != currentThread )
{
waitingThreadList.addFirst( le );
try
{
wait();
}
catch ( InterruptedException e )
{
Thread.interrupted();
}
}
locked = true;
lockingThread = currentThread;
le.movedOn = true;
marked--;
if ( operationType == OperationType.WRITE )
{
isDirty = true;
}
}
synchronized boolean isDirty()
{
return isDirty;
}
synchronized void setClean()
{
isDirty = false;
}
synchronized void unLock()
{
Thread currentThread = Thread.currentThread();
if ( !locked )
{
throw new LockException( currentThread
+ " doesn't have window lock on " + this );
}
locked = false;
lockingThread = null;
while ( !waitingThreadList.isEmpty() )
{
LockElement le = waitingThreadList.removeLast();
if ( !le.movedOn )
{
le.thread.interrupt();
break;
}
}
}
private boolean isFree( boolean assumingOwnerUnlockedIt )
{
return assumingOwnerUnlockedIt ?
marked == 0 : // excluding myself (the owner) no other must have marked this window
marked == 0 && !locked; // no one must have this marked and it mustn't be locked
}
synchronized boolean writeOutAndCloseIfFree( boolean readOnly )
{
if ( isFree( lockingThread == Thread.currentThread() ) )
{
if ( !readOnly )
writeOutAndClose();
return true;
}
return false;
}
/**
* Accepts and applies contents from a {@link PersistenceRow}.
* @param dpw the {@link PersistenceRow} to accept changes from.
*/
void acceptContents( PersistenceRow dpw )
{
throw new UnsupportedOperationException( "Should not be called on " + this + " which is a " + getClass() );
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_LockableWindow.java
|
1,197
|
@Ignore( "Written as a reaction to an observed bug, but it doesn't seem to trigger it though" )
public class LargeByteArraysIT
{
private static final Random RANDOM = new Random();
@Test
public void largeByteArrays() throws Exception
{
GraphDatabaseService db = new GraphDatabaseFactory().newEmbeddedDatabase( forTest( getClass() ).cleanDirectory( "bytearrays" ).getAbsolutePath() );
try
{
// setLogSize( db );
for ( int i = 0; i < 100000; i++ )
{
createNodeWithBigArray( db );
if ( i > 0 && i%100 == 0 ) System.out.println( i );
}
}
finally
{
db.shutdown();
}
}
private void createNodeWithBigArray( GraphDatabaseService db )
{
Transaction tx = db.beginTx();
try
{
Node node = db.createNode();
node.setProperty( "prop", randomBigByteArray() );
tx.success();
}
finally
{
tx.finish();
}
}
private byte[] randomBigByteArray()
{
byte[] array = new byte[max( 248, RANDOM.nextInt( 248*1024 ) )];
for ( int i = 0; i < array.length; i++ ) array[i] = (byte) (currentTimeMillis()%255);
return array;
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_nioneo_store_LargeByteArraysIT.java
|
1,198
|
public static abstract class Configuration
extends TokenStore.Configuration
{
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_LabelTokenStore.java
|
1,199
|
public class LabelTokenStore extends TokenStore<LabelTokenRecord>
{
public static abstract class Configuration
extends TokenStore.Configuration
{
}
public static final String TYPE_DESCRIPTOR = "LabelTokenStore";
private static final int RECORD_SIZE = 1/*inUse*/ + 4/*nameId*/;
public LabelTokenStore( File fileName, Config config,
IdGeneratorFactory idGeneratorFactory, WindowPoolFactory windowPoolFactory,
FileSystemAbstraction fileSystemAbstraction, StringLogger stringLogger,
DynamicStringStore nameStore )
{
super(fileName, config, IdType.LABEL_TOKEN, idGeneratorFactory, windowPoolFactory,
fileSystemAbstraction, stringLogger, nameStore);
}
@Override
public <FAILURE extends Exception> void accept( Processor<FAILURE> processor, LabelTokenRecord record ) throws FAILURE
{
processor.processLabelToken( this, record );
}
@Override
protected LabelTokenRecord newRecord( int id )
{
return new LabelTokenRecord( id );
}
@Override
protected void readRecord( LabelTokenRecord record, Buffer buffer )
{
record.setNameId( buffer.getInt() );
}
@Override
protected void writeRecord( LabelTokenRecord record, Buffer buffer )
{
buffer.putInt( record.getNameId() );
}
@Override
public int getRecordSize()
{
return RECORD_SIZE;
}
@Override
public String getTypeDescriptor()
{
return TYPE_DESCRIPTOR;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_store_LabelTokenStore.java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.