Unnamed: 0
int64 0
6.7k
| func
stringlengths 12
89.6k
| target
bool 2
classes | project
stringlengths 45
151
|
|---|---|---|---|
700
|
public class DebugUtil
{
public static void printShortStackTrace( Throwable cause, int maxNumberOfStackLines )
{
System.out.println( firstLinesOf( stringify( cause ), maxNumberOfStackLines+1 ) );
}
public static String firstLinesOf( String string, int maxNumberOfLines )
{
// Totally verbose implementation of this functionality :)
StringWriter stringWriter = new StringWriter();
PrintWriter writer = new PrintWriter( stringWriter );
try
{
BufferedReader reader = new BufferedReader( new StringReader( string ) );
String line = null;
for ( int count = 0; ( line = reader.readLine() ) != null && count < maxNumberOfLines;
count++ )
{
writer.println( line );
}
writer.close();
return stringWriter.getBuffer().toString();
}
catch ( IOException e )
{
throw new RuntimeException( "Can't happen", e );
}
}
public static boolean currentStackTraceContains( Predicate<StackTraceElement> predicate )
{
for ( StackTraceElement element : Thread.currentThread().getStackTrace() )
{
if ( predicate.accept( element ) )
{
return true;
}
}
return false;
}
public static Predicate<StackTraceElement> classNameIs( final String className )
{
return new Predicate<StackTraceElement>()
{
@Override
public boolean accept( StackTraceElement item )
{
return item.getClassName().equals( className );
}
};
}
public static Predicate<StackTraceElement> classNameContains( final String classNamePart )
{
return new Predicate<StackTraceElement>()
{
@Override
public boolean accept( StackTraceElement item )
{
return item.getClassName().contains( classNamePart );
}
};
}
public static Predicate<StackTraceElement> classIs( final Class<?> cls )
{
return new Predicate<StackTraceElement>()
{
@Override
public boolean accept( StackTraceElement item )
{
return item.getClassName().equals( cls.getName() );
}
};
}
public static Predicate<StackTraceElement> classNameAndMethodAre( final String className,
final String methodName )
{
return new Predicate<StackTraceElement>()
{
@Override
public boolean accept( StackTraceElement item )
{
return item.getClassName().equals( className ) && item.getMethodName().equals( methodName );
}
};
}
public static Predicate<StackTraceElement> classAndMethodAre( final Class<?> cls, final String methodName )
{
return new Predicate<StackTraceElement>()
{
@Override
public boolean accept( StackTraceElement item )
{
return item.getClassName().equals( cls.getName() ) && item.getMethodName().equals( methodName );
}
};
}
public static class StackTracer
{
private final Map<Stack, AtomicInteger> uniqueStackTraces = new HashMap<>();
private boolean considerMessages = true;
public void add( Throwable t )
{
Stack key = new Stack( t, considerMessages );
AtomicInteger count = uniqueStackTraces.get( key );
if ( count == null )
{
count = new AtomicInteger();
uniqueStackTraces.put( key, count );
}
count.incrementAndGet();
}
public void print( PrintStream out, int interestThreshold )
{
System.out.println( "Printing stack trace counts:" );
long total = 0;
for ( Map.Entry<Stack, AtomicInteger> entry : uniqueStackTraces.entrySet() )
{
if ( entry.getValue().get() >= interestThreshold )
{
out.println( entry.getValue() + " times:" );
entry.getKey().stackTrace.printStackTrace( out );
}
total += entry.getValue().get();
}
out.println( "------" );
out.println( "Total:" + total );
}
public StackTracer printAtShutdown( final PrintStream out, final int interestThreshold )
{
Runtime.getRuntime().addShutdownHook( new Thread()
{
@Override
public void run()
{
print( out, interestThreshold );
}
} );
return this;
}
public StackTracer ignoreMessages()
{
considerMessages = false;
return this;
}
}
private static class Stack
{
private final Throwable stackTrace;
private final StackTraceElement[] elements;
private final boolean considerMessage;
Stack( Throwable stackTrace, boolean considerMessage )
{
this.stackTrace = stackTrace;
this.considerMessage = considerMessage;
this.elements = stackTrace.getStackTrace();
}
@Override
public int hashCode()
{
int hashCode = stackTrace.getMessage() == null || !considerMessage ? 31 :
stackTrace.getMessage().hashCode();
for ( StackTraceElement element : stackTrace.getStackTrace() )
{
hashCode = hashCode * 9 + element.hashCode();
}
return hashCode;
}
@Override
public boolean equals( Object obj )
{
if ( !( obj instanceof Stack) )
{
return false;
}
Stack o = (Stack) obj;
if ( considerMessage )
{
if ( stackTrace.getMessage() == null )
{
if ( o.stackTrace.getMessage() != null )
{
return false;
}
}
else if ( !stackTrace.getMessage().equals( o.stackTrace.getMessage() ) )
{
return false;
}
}
if ( elements.length != o.elements.length )
{
return false;
}
for ( int i = 0; i < elements.length; i++ )
{
if ( !elements[i].equals( o.elements[i] ) )
{
return false;
}
}
return true;
}
}
public static class CallCounter<T>
{
private final Map<T, AtomicInteger> calls = new HashMap<>();
private final String name;
public CallCounter( String name )
{
this.name = name;
}
public CallCounter<T> printAtShutdown( final PrintStream out )
{
Runtime.getRuntime().addShutdownHook( new Thread()
{
@Override
public void run()
{
print( out );
}
} );
return this;
}
public void inc( T key )
{
AtomicInteger count = calls.get( key );
if ( count == null )
{
count = new AtomicInteger();
calls.put( key, count );
}
count.incrementAndGet();
}
private void print( PrintStream out )
{
out.println( "Calls made regarding " + name + ":" );
for ( Map.Entry<T, AtomicInteger> entry : calls.entrySet() )
{
out.println( "\t" + entry.getKey() + ": " + entry.getValue() );
}
}
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_DebugUtil.java
|
701
|
private static class UnsupportedRemoveIterator<T> implements Iterator<T>
{
private final Iterator<T> actual;
UnsupportedRemoveIterator( Iterator<T> actual )
{
this.actual = actual;
}
@Override
public boolean hasNext()
{
return actual.hasNext();
}
@Override
public T next()
{
return actual.next();
}
@Override
public void remove()
{
throw new UnsupportedOperationException();
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_CopyOnWriteHashMap.java
|
702
|
{
@Override
public K getKey()
{
return actualNext.getKey();
}
@Override
public V getValue()
{
return actualNext.getValue();
}
@Override
public V setValue( V value )
{
throw new UnsupportedOperationException();
}
@Override
public boolean equals( Object obj )
{
return actualNext.equals( obj );
}
@Override
public int hashCode()
{
return actualNext.hashCode();
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_CopyOnWriteHashMap.java
|
703
|
public class DiffSets<T>
{
public interface Visitor<T>
{
void visitAdded( T element );
void visitRemoved( T element );
}
@SuppressWarnings(
{"rawtypes", "unchecked"})
private static final DiffSets EMPTY = new DiffSets( Collections.emptySet(), Collections.emptySet() )
{
@Override
public Iterator apply( Iterator source )
{
return source;
}
};
@SuppressWarnings("unchecked")
public static <T> DiffSets<T> emptyDiffSets()
{
return EMPTY;
}
private Set<T> addedElements;
private Set<T> removedElements;
private Predicate<T> filter;
public DiffSets()
{
this( null, null );
}
public DiffSets( Set<T> addedElements, Set<T> removedElements )
{
this.addedElements = addedElements;
this.removedElements = removedElements;
}
public void accept( Visitor<T> visitor )
{
for ( T element : added( false ) )
{
visitor.visitAdded( element );
}
for ( T element : removed( false ) )
{
visitor.visitRemoved( element );
}
}
public boolean add( T elem )
{
boolean result = added( true ).add( elem );
removed( false ).remove( elem );
return result;
}
public void replace( T toRemove, T toAdd )
{
Set<T> added = added( true ); // we're doing both add and remove on it, so pass in true
boolean removedFromAdded = added.remove( toRemove );
removed( false ).remove( toAdd );
added.add( toAdd );
if ( !removedFromAdded )
{
removed( true ).add( toRemove );
}
}
public boolean remove( T elem )
{
boolean removedFromAddedElements = added( false ).remove( elem );
// Add to the removedElements only if it was removed from the addedElements.
return removedFromAddedElements || removed( true ).add( elem );
}
public void addAll( Iterator<T> elems )
{
while ( elems.hasNext() )
{
add( elems.next() );
}
}
public void removeAll( Iterator<T> elems )
{
while ( elems.hasNext() )
{
remove( elems.next() );
}
}
public boolean isAdded( T elem )
{
return added( false ).contains( elem );
}
public boolean isRemoved( T elem )
{
return removed( false ).contains( elem );
}
public Set<T> getAdded()
{
return resultSet( addedElements );
}
public Set<T> getRemoved()
{
return resultSet( removedElements );
}
public boolean isEmpty()
{
return added( false ).isEmpty() && removed( false ).isEmpty();
}
public Iterator<T> apply( Iterator<T> source )
{
Iterator<T> result = source;
if ( ( removedElements != null && !removedElements.isEmpty() ) ||
( addedElements != null && !addedElements.isEmpty() ) )
{
ensureFilterHasBeenCreated();
result = Iterables.filter( filter, result );
}
if ( addedElements != null && !addedElements.isEmpty() )
{
result = concat( result, addedElements.iterator() );
}
return result;
}
public PrimitiveLongIterator applyPrimitiveLongIterator( final PrimitiveLongIterator source )
{
return new DiffApplyingPrimitiveLongIterator( source, added( false ), removed( false ) );
}
public PrimitiveIntIterator applyPrimitiveIntIterator( final PrimitiveIntIterator source )
{
return new DiffApplyingPrimitiveIntIterator( source, added( false ), removed( false ) );
}
public DiffSets<T> filterAdded( Predicate<T> addedFilter )
{
return new DiffSets<>(
asSet( Iterables.filter( addedFilter, added( false ) ) ),
asSet( removed( false ) ) );
}
public DiffSets<T> filter( Predicate<T> filter )
{
return new DiffSets<>(
asSet( Iterables.filter( filter, added( false ) ) ),
asSet( Iterables.filter( filter, removed( false ) ) ) );
}
private Set<T> added( boolean create )
{
if ( addedElements == null )
{
if ( !create )
{
return Collections.emptySet();
}
addedElements = newSet();
}
return addedElements;
}
private Set<T> removed( boolean create )
{
if ( removedElements == null )
{
if ( !create )
{
return Collections.emptySet();
}
removedElements = newSet();
}
return removedElements;
}
private void ensureFilterHasBeenCreated()
{
if ( filter == null )
{
filter = new Predicate<T>()
{
@Override
public boolean accept( T item )
{
return !removed( false ).contains( item ) && !added( false ).contains( item );
}
};
}
}
public int delta()
{
return added( false ).size() - removed( false ).size();
}
private Set<T> newSet()
{
return new CopyOnWriteAfterIteratorHashSet<>();
}
private Set<T> resultSet( Set<T> coll )
{
return coll == null ? Collections.<T>emptySet() : Collections.unmodifiableSet( coll );
}
public boolean unRemove( T item )
{
return removed( false ).remove( item );
}
@Override
public String toString()
{
return format( "{+%s, -%s}", added( false ), removed( false ) );
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_DiffSets.java
|
704
|
{
@Override
public Iterator apply( Iterator source )
{
return source;
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_DiffSets.java
|
705
|
{
@Override
public boolean accept( T item )
{
return !removed( false ).contains( item ) && !added( false ).contains( item );
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_DiffSets.java
|
706
|
public abstract class IoPrimitiveUtils
{
public static String readLengthAndString( ReadableByteChannel channel,
ByteBuffer buffer ) throws IOException
{
Integer length = readInt( channel, buffer );
return length != null ? readString( channel, buffer, length ) : null;
}
public static String readString( ReadableByteChannel channel, ByteBuffer buffer,
int length ) throws IOException
{
char[] chars = new char[length];
chars = readCharArray( channel, buffer, chars );
return chars == null ? null : new String( chars );
}
public static void write3bLengthAndString( LogBuffer buffer, String string ) throws IOException
{
char[] chars = string.toCharArray();
// 3 bytes to represent the length (4 is a bit overkill)... maybe
// this space optimization is a bit overkill also :)
buffer.putShort( (short)chars.length );
buffer.put( (byte)(chars.length >> 16) );
buffer.put( chars );
}
public static String read3bLengthAndString( ReadableByteChannel channel, ByteBuffer buffer ) throws IOException
{
Short lengthShort = readShort( channel, buffer );
Byte lengthByte = readByte( channel, buffer );
if ( lengthShort == null || lengthByte == null )
{
return null;
}
int length = (lengthByte << 16) | lengthShort;
return readString( channel, buffer, length );
}
public static void write2bLengthAndString( LogBuffer buffer, String string ) throws IOException
{
char[] chars = string.toCharArray();
buffer.putShort( (short)chars.length );
buffer.put( chars );
}
public static String read2bLengthAndString( ReadableByteChannel channel, ByteBuffer buffer ) throws IOException
{
Short length = readShort( channel, buffer );
return length == null ? null : readString( channel, buffer, length );
}
private static char[] readCharArray( ReadableByteChannel channel,
ByteBuffer buffer, char[] charArray ) throws IOException
{
buffer.clear();
int charsLeft = charArray.length;
int maxSize = buffer.capacity() / 2;
int offset = 0; // offset in chars
while ( charsLeft > 0 )
{
if ( charsLeft > maxSize )
{
buffer.limit( maxSize * 2 );
charsLeft -= maxSize;
}
else
{
buffer.limit( charsLeft * 2 );
charsLeft = 0;
}
if ( channel.read( buffer ) != buffer.limit() )
{
return null;
}
buffer.flip();
int length = buffer.limit() / 2;
buffer.asCharBuffer().get( charArray, offset, length );
offset += length;
buffer.clear();
}
return charArray;
}
public static boolean readAndFlip( ReadableByteChannel channel, ByteBuffer buffer, int bytes )
throws IOException
{
buffer.clear();
buffer.limit( bytes );
int read = channel.read( buffer );
if ( read < bytes )
{
return false;
}
buffer.flip();
return true;
}
public static Byte readByte( ReadableByteChannel channel, ByteBuffer buffer ) throws IOException
{
return readAndFlip( channel, buffer, 1 ) ? buffer.get() : null;
}
public static Short readShort( ReadableByteChannel channel, ByteBuffer buffer ) throws IOException
{
return readAndFlip( channel, buffer, 2 ) ? buffer.getShort() : null;
}
public static Integer readInt( ReadableByteChannel channel, ByteBuffer buffer ) throws IOException
{
return readAndFlip( channel, buffer, 4 ) ? buffer.getInt() : null;
}
public static Long readLong( ReadableByteChannel channel, ByteBuffer buffer ) throws IOException
{
return readAndFlip( channel, buffer, 8 ) ? buffer.getLong() : null;
}
public static Float readFloat( ReadableByteChannel channel, ByteBuffer buffer ) throws IOException
{
return readAndFlip( channel, buffer, 4 ) ? buffer.getFloat() : null;
}
public static Double readDouble( ReadableByteChannel channel, ByteBuffer buffer ) throws IOException
{
return readAndFlip( channel, buffer, 8 ) ? buffer.getDouble() : null;
}
public static byte[] readBytes( ReadableByteChannel channel, byte[] array ) throws IOException
{
return readBytes( channel, array, array.length );
}
public static byte[] readBytes( ReadableByteChannel channel, byte[] array, int bytes ) throws IOException
{
return readAndFlip( channel, ByteBuffer.wrap( array ), bytes ) ? array : null;
}
public static Map<String, String> readMap( ReadableByteChannel channel, ByteBuffer buffer ) throws IOException
{
int size = readInt( channel, buffer );
Map<String, String> map = new HashMap<>();
for ( int i = 0; i < size; i++ )
{
String key = readLengthAndString( channel, buffer );
String value = readLengthAndString( channel, buffer );
if ( key == null || value == null )
{
return null;
}
map.put( key, value );
}
return map;
}
public static Map<String, String> read2bMap( ReadableByteChannel channel, ByteBuffer buffer ) throws IOException
{
Short size = readShort( channel, buffer );
if ( size == null )
{
return null;
}
Map<String, String> map = new HashMap<>();
for ( int i = 0; i < size; i++ )
{
String key = read2bLengthAndString( channel, buffer );
String value = read2bLengthAndString( channel, buffer );
if ( key == null || value == null )
{
return null;
}
map.put( key, value );
}
return map;
}
public static void writeLengthAndString( StoreChannel channel, ByteBuffer buffer, String value )
throws IOException
{
char[] chars = value.toCharArray();
int length = chars.length;
writeInt( channel, buffer, length );
writeChars( channel, buffer, chars );
}
private static void writeChars( StoreChannel channel, ByteBuffer buffer, char[] chars )
throws IOException
{
int position = 0;
do
{
buffer.clear();
int leftToWrite = chars.length - position;
if ( leftToWrite * 2 < buffer.capacity() )
{
buffer.asCharBuffer().put( chars, position, leftToWrite );
buffer.limit( leftToWrite * 2);
channel.write( buffer );
position += leftToWrite;
}
else
{
int length = buffer.capacity() / 2;
buffer.asCharBuffer().put( chars, position, length );
buffer.limit( length * 2 );
channel.write( buffer );
position += length;
}
} while ( position < chars.length );
}
public static void writeInt( StoreChannel channel, ByteBuffer buffer, int value )
throws IOException
{
buffer.clear();
buffer.putInt( value );
buffer.flip();
channel.write( buffer );
}
public static Object[] asArray( Object propertyValue )
{
if ( propertyValue.getClass().isArray() )
{
int length = Array.getLength( propertyValue );
Object[] result = new Object[ length ];
for ( int i = 0; i < length; i++ )
{
result[ i ] = Array.get( propertyValue, i );
}
return result;
}
else
{
return new Object[] { propertyValue };
}
}
public static Collection<Object> arrayAsCollection( Object arrayValue )
{
assert arrayValue.getClass().isArray();
Collection<Object> result = new ArrayList<>();
int length = Array.getLength( arrayValue );
for ( int i = 0; i < length; i++ )
{
result.add( Array.get( arrayValue, i ) );
}
return result;
}
public static int safeCastLongToInt( long value )
{
if ( value >= Integer.MAX_VALUE )
{
throw new IllegalArgumentException( "Casting long value " + value + " to an int would wrap around" );
}
return (int) value;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_IoPrimitiveUtils.java
|
707
|
{
@Override
public Void doWork( Void state ) throws Exception
{
ref.invalidate();
return null;
}
};
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_util_LazySingleReferenceTest.java
|
708
|
{
@Override
public Integer doWork( Void state ) throws Exception
{
return ref.evaluate();
}
};
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_util_LazySingleReferenceTest.java
|
709
|
{
@Override
protected Integer create()
{
return 1;
}
};
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_util_LazySingleReferenceTest.java
|
710
|
{
@Override
protected Integer create()
{
return initCalls.incrementAndGet();
}
};
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_util_LazySingleReferenceTest.java
|
711
|
{
@Override
protected Integer create()
{
awaitLatch( latch );
return initCalls.incrementAndGet();
}
};
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_util_LazySingleReferenceTest.java
|
712
|
{
@Override
protected Integer create()
{
awaitLatch( latch );
return initCalls.incrementAndGet();
}
};
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_util_LazySingleReferenceTest.java
|
713
|
public class LazySingleReferenceTest
{
@Test
public void shouldOnlyAllowSingleThreadToInitialize() throws Exception
{
// GIVEN
final CountDownLatch latch = new CountDownLatch( 1 );
final AtomicInteger initCalls = new AtomicInteger();
LazySingleReference<Integer> ref = new LazySingleReference<Integer>()
{
@Override
protected Integer create()
{
awaitLatch( latch );
return initCalls.incrementAndGet();
}
};
Future<Integer> t1Evaluate = t1.executeDontWait( evaluate( ref ) );
t1.waitUntilWaiting();
// WHEN
Future<Integer> t2Evaluate = t2.executeDontWait( evaluate( ref ) );
t2.waitUntilBlocked();
latch.countDown();
int e1 = t1Evaluate.get();
int e2 = t2Evaluate.get();
// THEN
assertEquals( "T1 evaluation", 1, e1 );
assertEquals( "T2 evaluation", 1, e2 );
}
@Test
public void shouldMutexAccessBetweenInvalidateAndEvaluate() throws Exception
{
// GIVEN
final CountDownLatch latch = new CountDownLatch( 1 );
final AtomicInteger initCalls = new AtomicInteger();
LazySingleReference<Integer> ref = new LazySingleReference<Integer>()
{
@Override
protected Integer create()
{
awaitLatch( latch );
return initCalls.incrementAndGet();
}
};
Future<Integer> t1Evaluate = t1.executeDontWait( evaluate( ref ) );
t1.waitUntilWaiting();
// WHEN
Future<Void> t2Invalidate = t2.executeDontWait( invalidate( ref ) );
t2.waitUntilBlocked();
latch.countDown();
int e = t1Evaluate.get();
t2Invalidate.get();
// THEN
assertEquals( "Evaluation", 1, e );
}
@Test
public void shouldInitializeAgainAfterInvalidated() throws Exception
{
// GIVEN
final AtomicInteger initCalls = new AtomicInteger();
LazySingleReference<Integer> ref = new LazySingleReference<Integer>()
{
@Override
protected Integer create()
{
return initCalls.incrementAndGet();
}
};
assertEquals( "First evaluation", 1, ref.evaluate().intValue() );
// WHEN
ref.invalidate();
int e2 = ref.evaluate();
// THEN
assertEquals( "Second evaluation", 2, e2 );
}
@Test
public void shouldRespondToIsInitialized() throws Exception
{
// GIVEN
LazySingleReference<Integer> ref = new LazySingleReference<Integer>()
{
@Override
protected Integer create()
{
return 1;
}
};
// WHEN
boolean firstResult = ref.isCreated();
ref.evaluate();
boolean secondResult = ref.isCreated();
ref.invalidate();
boolean thirdResult = ref.isCreated();
ref.evaluate();
boolean fourthResult = ref.isCreated();
// THEN
assertFalse( "Should not start off as initialized", firstResult );
assertTrue( "Should be initialized after an evaluation", secondResult );
assertFalse( "Should not be initialized after invalidated", thirdResult );
assertTrue( "Should be initialized after a re-evaluation", fourthResult );
}
private OtherThreadExecutor<Void> t1, t2;
@Before
public void before()
{
t1 = new OtherThreadExecutor<>( "T1", null );
t2 = new OtherThreadExecutor<>( "T2", null );
}
@After
public void after()
{
t2.close();
t1.close();
}
private WorkerCommand<Void,Integer> evaluate( final LazySingleReference<Integer> ref )
{
return new WorkerCommand<Void,Integer>()
{
@Override
public Integer doWork( Void state ) throws Exception
{
return ref.evaluate();
}
};
}
private WorkerCommand<Void,Void> invalidate( final LazySingleReference<Integer> ref )
{
return new WorkerCommand<Void,Void>()
{
@Override
public Void doWork( Void state ) throws Exception
{
ref.invalidate();
return null;
}
};
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_util_LazySingleReferenceTest.java
|
714
|
public abstract class LazySingleReference<T> implements Thunk<T>
{
private volatile T reference;
/**
* @return whether or not the managed reference has been initialized, i.e {@link #evaluate() evaluated}
* for the first time, or after {@link #invalidate() invalidated}.
*/
public boolean isCreated()
{
return reference != null;
}
/**
* Returns the reference, initializing it if need be.
*/
@Override
public T evaluate()
{
T result;
if ( (result = reference) == null )
{
synchronized ( this )
{
if ( (result = reference) == null )
{
result = reference = create();
}
}
}
return result;
}
/**
* Invalidates any initialized reference. A future call to {@link #evaluate()} will have it initialized again.
*/
public synchronized void invalidate()
{
reference = null;
}
/**
* Provides a reference to manage.
*/
protected abstract T create();
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_LazySingleReference.java
|
715
|
{
@Override
public void line( String line )
{
target.println( line );
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_FileUtils.java
|
716
|
public class DirectArrayMap<V>
{
private volatile V[] array;
public DirectArrayMap( int maxSize )
{
array = (V[])new Object[maxSize];
}
public void put( int key, V value )
{
V[] newArray = copyArray();
newArray[key] = value;
array = newArray;
}
private synchronized V[] copyArray()
{
V[] newArray = (V[])new Object[array.length];
arraycopy( array, 0, newArray, 0, array.length );
return newArray;
}
public void remove( int key )
{
V[] newArray = copyArray();
newArray[key] = null;
array = newArray;
}
public V get( int key )
{
return array[key];
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_DirectArrayMap.java
|
717
|
public class FileUtils
{
private static int WINDOWS_RETRY_COUNT = 3;
public static void deleteRecursively( File directory ) throws IOException
{
Stack<File> stack = new Stack<>();
List<File> temp = new LinkedList<>();
stack.push( directory.getAbsoluteFile() );
while ( !stack.isEmpty() )
{
File top = stack.pop();
File[] files = top.listFiles();
if ( files != null )
{
for ( File child : files )
{
if ( child.isFile() )
{
if ( !deleteFile( child ) )
{
throw new IOException( "Failed to delete " + child.getCanonicalPath() );
}
}
else
{
temp.add( child );
}
}
}
files = top.listFiles();
if ( files == null || files.length == 0 )
{
if ( !deleteFile( top ) )
{
throw new IOException( "Failed to delete " + top.getCanonicalPath() );
}
}
else
{
stack.push( top );
for ( File f : temp )
{
stack.push( f );
}
}
temp.clear();
}
}
public static boolean deleteFile( File file )
{
if ( !file.exists() )
{
return true;
}
int count = 0;
boolean deleted;
do
{
deleted = file.delete();
if ( !deleted )
{
count++;
waitSome();
}
}
while ( !deleted && count <= WINDOWS_RETRY_COUNT );
return deleted;
}
public static File[] deleteFiles( File directory, String regexPattern )
throws IOException
{
Pattern pattern = Pattern.compile( regexPattern );
Collection<File> deletedFiles = new ArrayList<>();
File[] files = directory.listFiles();
if ( files == null )
{
throw new IllegalArgumentException( directory + " is not a directory" );
}
for ( File file : files )
{
if ( pattern.matcher( file.getName() ).find() )
{
if ( !file.delete() )
{
throw new IOException( "Couldn't delete file '" + file.getAbsolutePath() + "'" );
}
deletedFiles.add( file );
}
}
return deletedFiles.toArray( new File[deletedFiles.size()] );
}
/**
* Utility method that moves a file from its current location to the
* new target location. If rename fails (for example if the target is
* another disk) a copy/delete will be performed instead. This is not a rename,
* use {@link #renameFile(File, File)} instead.
*
* @param toMove The File object to move.
* @param target Target file to move to.
* @throws IOException
*/
public static void moveFile( File toMove, File target ) throws IOException
{
if ( !toMove.exists() )
{
throw new NotFoundException( "Source file[" + toMove.getName()
+ "] not found" );
}
if ( target.exists() )
{
throw new NotFoundException( "Target file[" + target.getName()
+ "] already exists" );
}
if ( toMove.renameTo( target ) )
{
return;
}
if ( toMove.isDirectory() )
{
target.mkdirs();
copyRecursively( toMove, target );
deleteRecursively( toMove );
}
else
{
copyFile( toMove, target );
deleteFile( toMove );
}
}
/**
* Utility method that moves a file from its current location to the
* provided target directory. If rename fails (for example if the target is
* another disk) a copy/delete will be performed instead. This is not a rename,
* use {@link #renameFile(File, File)} instead.
*
* @param toMove The File object to move.
* @param targetDirectory the destination directory
* @return the new file, null iff the move was unsuccessful
* @throws IOException
*/
public static File moveFileToDirectory( File toMove, File targetDirectory ) throws IOException
{
if ( !targetDirectory.isDirectory() )
{
throw new IllegalArgumentException(
"Move target must be a directory, not " + targetDirectory );
}
File target = new File( targetDirectory, toMove.getName() );
moveFile( toMove, target );
return target;
}
public static boolean renameFile( File srcFile, File renameToFile )
{
if ( !srcFile.exists() )
{
throw new NotFoundException( "Source file[" + srcFile.getName() + "] not found" );
}
if ( renameToFile.exists() )
{
throw new NotFoundException( "Target file[" + renameToFile.getName() + "] already exists" );
}
if ( !renameToFile.getParentFile().isDirectory() )
{
throw new NotFoundException( "Target directory[" + renameToFile.getParent() + "] does not exists" );
}
int count = 0;
boolean renamed;
do
{
renamed = srcFile.renameTo( renameToFile );
if ( !renamed )
{
count++;
waitSome();
}
}
while ( !renamed && count <= WINDOWS_RETRY_COUNT );
return renamed;
}
public static void truncateFile( SeekableByteChannel fileChannel, long position )
throws IOException
{
int count = 0;
boolean success = false;
IOException cause = null;
do
{
count++;
try
{
fileChannel.truncate( position );
success = true;
}
catch ( IOException e )
{
cause = e;
}
}
while ( !success && count <= WINDOWS_RETRY_COUNT );
if ( !success )
{
throw cause;
}
}
public static void truncateFile( File file, long position ) throws IOException
{
try ( RandomAccessFile access = new RandomAccessFile( file, "rw" ) )
{
truncateFile( access.getChannel(), position );
}
}
private static void waitSome()
{
try
{
Thread.sleep( 500 );
}
catch ( InterruptedException ee )
{
Thread.interrupted();
} // ok
System.gc();
}
public static String fixSeparatorsInPath( String path )
{
String fileSeparator = System.getProperty( "file.separator" );
if ( "\\".equals( fileSeparator ) )
{
path = path.replace( '/', '\\' );
}
else if ( "/".equals( fileSeparator ) )
{
path = path.replace( '\\', '/' );
}
return path;
}
public static void copyFile( File srcFile, File dstFile ) throws IOException
{
//noinspection ResultOfMethodCallIgnored
dstFile.getParentFile().mkdirs();
FileInputStream input = null;
FileOutputStream output = null;
try
{
input = new FileInputStream( srcFile );
output = new FileOutputStream( dstFile );
int bufferSize = 1024;
byte[] buffer = new byte[bufferSize];
int bytesRead;
while ( (bytesRead = input.read( buffer )) != -1 )
{
output.write( buffer, 0, bytesRead );
}
}
catch ( IOException e )
{
// Because the message from this cause may not mention which file it's about
throw new IOException( "Could not copy '" + srcFile + "' to '" + dstFile + "'", e );
}
finally
{
if ( input != null )
{
input.close();
}
if ( output != null )
{
output.close();
}
}
}
public static void copyRecursively( File fromDirectory, File toDirectory ) throws IOException
{
copyRecursively( fromDirectory, toDirectory, null );
}
public static void copyRecursively( File fromDirectory, File toDirectory, FileFilter filter) throws IOException
{
for ( File fromFile : fromDirectory.listFiles( filter ) )
{
File toFile = new File( toDirectory, fromFile.getName() );
if ( fromFile.isDirectory() )
{
toFile.mkdir();
copyRecursively( fromFile, toFile, filter );
}
else
{
copyFile( fromFile, toFile );
}
}
}
public static void writeToFile( File target, String text, boolean append ) throws IOException
{
if ( !target.exists() )
{
target.getParentFile().mkdirs();
target.createNewFile();
}
try ( Writer out = new OutputStreamWriter( new FileOutputStream( target, append ), "UTF-8" ) )
{
out.write( text );
}
}
public static BufferedReader newBufferedFileReader( File file, Charset charset ) throws FileNotFoundException
{
return new BufferedReader( new InputStreamReader( new FileInputStream( file ), charset) );
}
public static PrintWriter newFilePrintWriter( File file, Charset charset ) throws FileNotFoundException
{
return new PrintWriter( new OutputStreamWriter( new FileOutputStream( file, true ), charset) );
}
public static File path( String root, String... path )
{
return path( new File( root ), path );
}
public static File path( File root, String... path )
{
for ( String part : path )
{
root = new File( root, part );
}
return root;
}
/**
* Move the contents of one directory into another directory. Allows moving the contents of a directory into a
* sub-directory of itself.
*/
public static void moveDirectoryContents( File baseDir, File targetDir ) throws IOException
{
if(!baseDir.isDirectory())
{
throw new IllegalArgumentException( baseDir.getAbsolutePath() + " must be a directory." );
}
if(!targetDir.exists())
{
targetDir.mkdirs();
}
for ( File file : baseDir.listFiles() )
{
if(!file.equals( targetDir ))
{
moveFileToDirectory( file, targetDir );
}
}
}
/** Gives the recursive size of all files in a directory. */
public static long directorySize( File directory )
{
long length = 0;
for (File file : directory.listFiles())
{
length += file.isFile() ? file.length() : directorySize( file );
}
return length;
}
public interface LineListener
{
void line( String line );
}
public static LineListener echo( final PrintStream target )
{
return new LineListener()
{
@Override
public void line( String line )
{
target.println( line );
}
};
}
public static void readTextFile( File file, LineListener listener ) throws IOException
{
BufferedReader reader = new BufferedReader( new FileReader( file ) );
try
{
String line = null;
while ( (line = reader.readLine()) != null )
{
listener.line( line );
}
}
finally
{
reader.close();
}
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_FileUtils.java
|
718
|
public class ExceptionCauseSetter {
public static void setCause(Throwable original, Throwable cause)
{
try {
Field field = Throwable.class.getDeclaredField("cause");
field.setAccessible(true);
field.set(original, cause);
} catch (Exception e) {
// Failed to set cause. Don't throw an exception from this
// as we are most likely already recovering from an exception,
// and being unable to set the cause is most likely a JVM issue
// that the user can't help anyway.
// Print an exception from this though, including the cause exception
// to help debugging.
// TODO: Use proper logging.
Exception error = new Exception("Unable to set cause of exception (see nested), will print stacktrace of the exception causing all this below.",e);
error.printStackTrace();
cause.printStackTrace();
}
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_ExceptionCauseSetter.java
|
719
|
private static class FilePrinter implements Printer
{
private File directory;
private PrintStream out;
@Override
public PrintStream getFor( String file ) throws FileNotFoundException
{
File absoluteFile = new File( file ).getAbsoluteFile();
File dir = absoluteFile.isDirectory() ? absoluteFile : absoluteFile.getParentFile();
if ( !dir.equals( directory ) )
{
safeClose();
File dumpFile = new File( dir, "dump-logical-log.txt" );
System.out.println( "Redirecting the output to " + dumpFile.getPath() );
out = new PrintStream( dumpFile );
directory = dir;
}
return out;
}
private void safeClose()
{
if ( out != null )
{
out.close();
}
}
@Override
public void close()
{
safeClose();
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_DumpLogicalLog.java
|
720
|
public static class CommandFactory extends XaCommandFactory
{
@Override
public XaCommand readCommand( ReadableByteChannel byteChannel,
ByteBuffer buffer ) throws IOException
{
return Command.readCommand( null, null, byteChannel, buffer );
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_DumpLogicalLog.java
|
721
|
{
@Override
public int compare( String o1, String o2 )
{
return versionOf( o1 ).compareTo( versionOf( o2 ) );
}
private Integer versionOf( String string )
{
String toFind = ".v";
int index = string.indexOf( toFind );
if ( index == -1 )
{
return Integer.MAX_VALUE;
}
return Integer.valueOf( string.substring( index + toFind.length() ) );
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_DumpLogicalLog.java
|
722
|
{
@Override
public boolean accept( File dir, String name )
{
return name.contains( prefix ) && !name.contains( "active" );
}
} );
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_DumpLogicalLog.java
|
723
|
{
@Override
public PrintStream getFor( String file )
{
return System.out;
}
@Override
public void close()
{ // Don't close System.out
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_DumpLogicalLog.java
|
724
|
public class DumpLogicalLog
{
private final FileSystemAbstraction fileSystem;
public DumpLogicalLog( FileSystemAbstraction fileSystem )
{
this.fileSystem = fileSystem;
}
public int dump( String filenameOrDirectory, PrintStream out, TimeZone timeZone ) throws IOException
{
int logsFound = 0;
for ( String fileName : filenamesOf( filenameOrDirectory, getLogPrefix() ) )
{
logsFound++;
out.println( "=== " + fileName + " ===" );
StoreChannel fileChannel = fileSystem.open( new File( fileName ), "r" );
ByteBuffer buffer = ByteBuffer.allocateDirect( 9 + Xid.MAXGTRIDSIZE
+ Xid.MAXBQUALSIZE * 10 );
long logVersion, prevLastCommittedTx;
try
{
long[] header = LogIoUtils.readLogHeader( buffer, fileChannel, true );
logVersion = header[0];
prevLastCommittedTx = header[1];
}
catch ( IOException ex )
{
out.println( "Unable to read timestamp information, "
+ "no records in logical log." );
out.println( ex.getMessage() );
fileChannel.close();
throw ex;
}
out.println( "Logical log version: " + logVersion + " with prev committed tx[" +
prevLastCommittedTx + "]" );
XaCommandFactory cf = instantiateCommandFactory();
while ( readAndPrintEntry( fileChannel, buffer, cf, out, timeZone ) )
{
;
}
fileChannel.close();
}
return logsFound;
}
protected static boolean isAGraphDatabaseDirectory( String fileName )
{
File file = new File( fileName );
return file.isDirectory() && new File( file, NeoStore.DEFAULT_NAME ).exists();
}
protected boolean readAndPrintEntry( StoreChannel fileChannel, ByteBuffer buffer, XaCommandFactory cf,
PrintStream out, TimeZone timeZone ) throws IOException
{
LogEntry entry = LogIoUtils.readEntry( buffer, fileChannel, cf );
if ( entry != null )
{
out.println( entry.toString( timeZone ) );
return true;
}
return false;
}
protected XaCommandFactory instantiateCommandFactory()
{
return new CommandFactory();
}
protected String getLogPrefix()
{
return "nioneo_logical.log";
}
public static void main( String args[] ) throws IOException
{
Args arguments = new Args( args );
TimeZone timeZone = parseTimeZoneConfig( arguments );
try ( Printer printer = getPrinter( arguments ) )
{
for ( String fileAsString : arguments.orphans() )
{
new DumpLogicalLog( new DefaultFileSystemAbstraction() ).dump(
fileAsString, printer.getFor( fileAsString ), timeZone );
}
}
}
public static Printer getPrinter( Args args )
{
boolean toFile = args.getBoolean( "tofile", false, true ).booleanValue();
return toFile ? new FilePrinter() : SYSTEM_OUT_PRINTER;
}
public interface Printer extends AutoCloseable
{
PrintStream getFor( String file ) throws FileNotFoundException;
@Override
void close();
}
private static final Printer SYSTEM_OUT_PRINTER = new Printer()
{
@Override
public PrintStream getFor( String file )
{
return System.out;
}
@Override
public void close()
{ // Don't close System.out
}
};
private static class FilePrinter implements Printer
{
private File directory;
private PrintStream out;
@Override
public PrintStream getFor( String file ) throws FileNotFoundException
{
File absoluteFile = new File( file ).getAbsoluteFile();
File dir = absoluteFile.isDirectory() ? absoluteFile : absoluteFile.getParentFile();
if ( !dir.equals( directory ) )
{
safeClose();
File dumpFile = new File( dir, "dump-logical-log.txt" );
System.out.println( "Redirecting the output to " + dumpFile.getPath() );
out = new PrintStream( dumpFile );
directory = dir;
}
return out;
}
private void safeClose()
{
if ( out != null )
{
out.close();
}
}
@Override
public void close()
{
safeClose();
}
}
public static TimeZone parseTimeZoneConfig( Args arguments )
{
return getTimeZone( arguments.get( "timezone", DEFAULT_TIME_ZONE.getID() ) );
}
protected static String[] filenamesOf( String filenameOrDirectory, final String prefix )
{
File file = new File( filenameOrDirectory );
if ( file.isDirectory() )
{
File[] files = file.listFiles( new FilenameFilter()
{
@Override
public boolean accept( File dir, String name )
{
return name.contains( prefix ) && !name.contains( "active" );
}
} );
Collection<String> result = new TreeSet<String>( sequentialComparator() );
for ( int i = 0; i < files.length; i++ )
{
result.add( files[i].getPath() );
}
return result.toArray( new String[result.size()] );
}
else
{
return new String[] { filenameOrDirectory };
}
}
private static Comparator<? super String> sequentialComparator()
{
return new Comparator<String>()
{
@Override
public int compare( String o1, String o2 )
{
return versionOf( o1 ).compareTo( versionOf( o2 ) );
}
private Integer versionOf( String string )
{
String toFind = ".v";
int index = string.indexOf( toFind );
if ( index == -1 )
{
return Integer.MAX_VALUE;
}
return Integer.valueOf( string.substring( index + toFind.length() ) );
}
};
}
public static class CommandFactory extends XaCommandFactory
{
@Override
public XaCommand readCommand( ReadableByteChannel byteChannel,
ByteBuffer buffer ) throws IOException
{
return Command.readCommand( null, null, byteChannel, buffer );
}
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_DumpLogicalLog.java
|
725
|
{
@Override
public Entry<K, V> next()
{
final Entry<K, V> actualNext = super.next();
return new Entry<K,V>()
{
@Override
public K getKey()
{
return actualNext.getKey();
}
@Override
public V getValue()
{
return actualNext.getValue();
}
@Override
public V setValue( V value )
{
throw new UnsupportedOperationException();
}
@Override
public boolean equals( Object obj )
{
return actualNext.equals( obj );
}
@Override
public int hashCode()
{
return actualNext.hashCode();
}
};
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_CopyOnWriteHashMap.java
|
726
|
{
@Override
public Iterator<V> iterator()
{
return new UnsupportedRemoveIterator<V>( actual.values().iterator() );
}
@Override
public int size()
{
return actual.size();
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_CopyOnWriteHashMap.java
|
727
|
{
@Override
public Iterator<Relationship> iterator()
{
return new PrefetchingIterator<Relationship>()
{
private TraversalBranch branch = TraversalBranchImpl.this;
@Override
protected Relationship fetchNextOrNull()
{
try
{
return branch != null ? branch.lastRelationship() : null;
}
finally
{
branch = branch != null ? branch.parent() : null;
}
}
};
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_traversal_TraversalBranchImpl.java
|
728
|
{
@Override
public boolean remove( Object o )
{
return CopyOnWriteHashMap.this.remove( o ) != null;
}
@Override
public Iterator<K> iterator()
{
return new UnsupportedRemoveIterator<K>( actual.keySet().iterator() );
}
@Override
public int size()
{
return actual.size();
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_CopyOnWriteHashMap.java
|
729
|
BYTE( Byte.SIZE )
{
@Override
public void put( ByteBuffer buffer, Number value )
{
buffer.put( value.byteValue() );
}
@Override
public void put( LogBuffer buffer, Number value ) throws IOException
{
buffer.put( value.byteValue() );
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_BufferNumberPutter.java
|
730
|
public final class Bits implements Cloneable
{
// 3: ...
// 2: [ 23 ][ 22 ][ 21 ][ 20 ][ 19 ][ 18 ][ 17 ][ 16 ] <--\
// |
// /---------------------------------------------------------------------------------------------/
// |
// 1: \-[ 15 ][ 14 ][ 13 ][ 12 ][ 11 ][ 10 ][ 9 ][ 8 ] <--\
// |
// /---------------------------------------------------------------------------------------------/
// |
// 0: \-[ 7 ][ 6 ][ 5 ][ 4 ][ 3 ][ 2 ][ 1 ][ 0 ] <---- START
private final long[] longs;
private final int numberOfBytes;
private int writePosition;
private int readPosition;
/*
* Calculate all the right overflow masks
*/
private static final long[] RIGHT_OVERFLOW_MASKS;
static
{
RIGHT_OVERFLOW_MASKS = new long[Long.SIZE];
long mask = 1L;
for ( int i = 0; i < RIGHT_OVERFLOW_MASKS.length; i++ )
{
RIGHT_OVERFLOW_MASKS[i] = mask;
mask <<= 1;
mask |= 0x1L;
}
}
public static Bits bits( int numberOfBytes )
{
int requiredLongs = requiredLongs(numberOfBytes);
return new Bits( new long[requiredLongs], numberOfBytes );
}
public static int requiredLongs(int numberOfBytes) {
return ((numberOfBytes-1)>>3)+1; // /8
}
public static Bits bitsFromLongs( long[] longs )
{
return new Bits( longs, longs.length<<3 ); // *8
}
public static Bits bitsFromBytes( byte[] bytes )
{
return bitsFromBytes( bytes, 0 );
}
public static Bits bitsFromBytes( byte[] bytes, int startIndex )
{
final int count = bytes.length;
Bits bits = bits( count - startIndex );
for ( int i = startIndex; i < count; i++ ) bits.put( bytes[i] );
return bits;
}
private Bits( long[] longs, int numberOfBytes )
{
this.longs = longs;
this.numberOfBytes = numberOfBytes;
}
/**
* A mask which has the {@code steps} most significant bits set to 1, all others 0.
* It's used to carry bits over between carriers (longs) when shifting left.
* @param steps the number of most significant bits to have set to 1 in the mask.
* @return the created mask.
*/
public static long leftOverflowMask( int steps )
{
long mask = 0L;
for ( int i = 0; i < steps; i++ )
{
mask >>= 1;
mask |= 0x8000000000000000L;
}
return mask;
}
/**
* A mask which has the {@code steps} least significant bits set to 1, all others 0.
* It's used to carry bits over between carriers (longs) when shifting right.
* @param steps the number of least significant bits to have set to 1 in the mask.
* @return the created mask.
*/
public static long rightOverflowMask( int steps )
{
return RIGHT_OVERFLOW_MASKS[steps-1];
}
/**
* Returns the underlying long values that has got all the bits applied.
* The first item in the array has got the most significant bits.
* @return the underlying long values that has got all the bits applied.
*/
@SuppressWarnings( "EI_EXPOSE_REP" )
public long[] getLongs()
{
return longs;
}
public byte[] asBytes()
{
int readPositionBefore = readPosition;
readPosition = 0;
try
{
byte[] result = new byte[numberOfBytes];
final int count = result.length;
for ( int i = 0; i < count; i++ )
{
result[i] = getByte();
}
return result;
}
finally
{
readPosition = readPositionBefore;
}
}
/**
* Writes all bits to {@code buffer}.
* @param buffer the {@link Buffer} to write to.
* @return this instance.
*/
public Bits apply( Buffer buffer )
{
int readPositionBefore = readPosition;
readPosition = 0;
try
{
// TODO byte for byte?
int rest = numberOfBytes;
while ( rest-- > 0 )
{
buffer.put( getByte() );
}
return this;
}
finally
{
readPosition = readPositionBefore;
}
}
/**
* Reads from {@code buffer} and fills up all the bits.
* @param buffer the {@link Buffer} to read from.
* @return this instance.
*/
public Bits read( Buffer buffer )
{
// TODO byte for byte?
int rest = numberOfBytes;
while ( rest > 0 )
{
byte value = buffer.get();
put( value );
rest--;
}
return this;
}
/**
* A very nice toString, showing each bit, divided into groups of bytes and
* lines of 8 bytes.
*/
@Override
public String toString()
{
StringBuilder builder = new StringBuilder();
for ( int longIndex = longs.length-1; longIndex >= 0; longIndex-- )
{
long value = longs[longIndex];
if ( builder.length() > 0 ) builder.append( "\n" );
builder.append( longIndex );
builder.append( ':' );
numberToString( builder, value, 8 );
if ( longIndex == 0 ) builder.append( " <-- START" );
}
return builder.toString();
}
public static StringBuilder numberToString( StringBuilder builder, long value, int numberOfBytes )
{
builder.append( "[" );
for ( int i = 8*numberOfBytes-1; i >= 0; i-- )
{
if ( i > 0 && i % 8 == 0 ) builder.append( "," );
boolean isSet = (value & (1L << i)) != 0;
builder.append( isSet ? "1" : "0" );
}
builder.append( "]" );
return builder;
}
public static String numbersToBitString( byte[] values )
{
StringBuilder builder = new StringBuilder();
for ( byte value : values ) numberToString( builder, value, 1 );
return builder.toString();
}
public static String numbersToBitString( short[] values )
{
StringBuilder builder = new StringBuilder();
for ( short value : values ) numberToString( builder, value, 2 );
return builder.toString();
}
public static String numbersToBitString( int[] values )
{
StringBuilder builder = new StringBuilder();
for ( int value : values ) numberToString( builder, value, 4 );
return builder.toString();
}
public static String numbersToBitString( long[] values )
{
StringBuilder builder = new StringBuilder();
for ( long value : values ) numberToString( builder, value, 8 );
return builder.toString();
}
@Override
public Bits clone()
{
return new Bits( Arrays.copyOf( longs, longs.length ), numberOfBytes );
}
public Bits put( byte value )
{
return put( value, Byte.SIZE );
}
public Bits put( byte value, int steps )
{
return put( (long)value, steps );
}
public Bits put( short value )
{
return put( value, Short.SIZE );
}
public Bits put( short value, int steps )
{
return put( (long)value, steps );
}
public Bits put( int value )
{
return put( value, Integer.SIZE );
}
public Bits put( int value, int steps )
{
return put( (long)value, steps );
}
public Bits put( long value )
{
return put( value, Long.SIZE );
}
public Bits put( long value, int steps )
{
int lowLongIndex = writePosition >> 6; // /64
int lowBitInLong = writePosition%64;
int lowBitsAvailable = 64-lowBitInLong;
long lowValueMask = rightOverflowMask( Math.min( lowBitsAvailable, steps ) );
longs[lowLongIndex] |= ((((long)value)&lowValueMask) << lowBitInLong);
if ( steps > lowBitsAvailable )
{ // High bits
long highValueMask = rightOverflowMask( steps-lowBitsAvailable );
longs[lowLongIndex+1] |= (((long)value) >>> lowBitsAvailable)&highValueMask;
}
writePosition += steps;
return this;
}
public boolean available()
{
return readPosition < writePosition;
}
public byte getByte()
{
return getByte( Byte.SIZE );
}
public byte getByte( int steps )
{
return (byte) getLong( steps );
}
public short getShort()
{
return getShort( Short.SIZE );
}
public short getShort( int steps )
{
return (short) getLong( steps );
}
public int getInt()
{
return getInt( Integer.SIZE );
}
public int getInt( int steps )
{
return (int) getLong( steps );
}
public long getUnsignedInt()
{
return getInt( Integer.SIZE ) & 0xFFFFFFFFL;
}
public long getLong()
{
return getLong( Long.SIZE );
}
public long getLong( int steps )
{
int lowLongIndex = readPosition >> 6; // 64
int lowBitInLong = readPosition%64;
int lowBitsAvailable = 64-lowBitInLong;
long lowLongMask = rightOverflowMask( Math.min( lowBitsAvailable, steps ) ) << lowBitInLong;
long lowValue = longs[lowLongIndex] & lowLongMask;
long result = lowValue >>> lowBitInLong;
if ( steps > lowBitsAvailable )
{ // High bits
long highLongMask = rightOverflowMask( steps-lowBitsAvailable );
result |= ((longs[lowLongIndex+1] & highLongMask) << lowBitsAvailable);
}
readPosition += steps;
return result;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_Bits.java
|
731
|
static class ArrayEntry<K,V> implements Entry<K,V>
{
private final K key;
private V value;
ArrayEntry( K key, V value )
{
this.key = key;
this.value = value;
}
@Override
public K getKey()
{
return key;
}
@Override
public V getValue()
{
return value;
}
void setNewValue( V value )
{
this.value = value;
}
@Override
public V setValue( V value )
{
V oldValue = value;
this.value = value;
return oldValue;
}
@Override
public String toString()
{
return key + "=" + value;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_ArrayMap.java
|
732
|
{
@Override
public void put( Object key, Object value )
{
throw new IllegalStateException( "Immutable" );
}
@Override
public Object remove( Object key )
{
throw new IllegalStateException( "Immutable" );
}
@Override
public void clear()
{
throw new IllegalStateException( "Immutable" );
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_ArrayMap.java
|
733
|
public class ArrayMap<K,V>
{
@SuppressWarnings( "rawtypes" )
private static ArrayMap EMPTY = new ArrayMap()
{
@Override
public void put( Object key, Object value )
{
throw new IllegalStateException( "Immutable" );
}
@Override
public Object remove( Object key )
{
throw new IllegalStateException( "Immutable" );
}
@Override
public void clear()
{
throw new IllegalStateException( "Immutable" );
}
};
@SuppressWarnings( "unchecked" )
public static <K,V> ArrayMap<K,V> empty()
{
return EMPTY;
}
private Object data;
private volatile byte arrayCount;
private byte toMapThreshold = 5;
private final boolean useThreadSafeMap;
private final boolean switchBackToArray;
public ArrayMap()
{
switchBackToArray = false;
useThreadSafeMap = false;
data = new ArrayEntry[toMapThreshold];
}
public ArrayMap( byte mapThreshold, boolean threadSafe, boolean shrinkToArray )
{
this.toMapThreshold = mapThreshold;
this.useThreadSafeMap = threadSafe;
this.switchBackToArray = shrinkToArray;
data = new ArrayEntry[toMapThreshold];
}
@Override
public String toString()
{
final byte size;
final Object snapshot;
if ( useThreadSafeMap )
{
synchronized ( this )
{
size = arrayCount;
snapshot = this.data;
}
}
else
{
size = arrayCount;
snapshot = this.data;
}
if ( size != -1 )
{
StringBuilder result = new StringBuilder();
String sep = "[";
for ( int i = 0; i < size; i++ )
{
result.append( sep ).append( ( (ArrayEntry[]) snapshot )[i] );
sep = ", ";
}
return result.append( "]" ).toString();
}
else
{
return snapshot.toString();
}
}
public void put( K key, V value )
{
if ( useThreadSafeMap )
{
synchronizedPut( key, value );
return;
}
for ( int i = 0; i < arrayCount; i++ )
{
if ( ((ArrayEntry[])data)[i].getKey().equals( key ) )
{
((ArrayEntry[])data)[i].setNewValue( value );
return;
}
}
if ( arrayCount != -1 )
{
if ( arrayCount < ((ArrayEntry[])data).length )
{
((ArrayEntry[])data)[arrayCount++] = new ArrayEntry<K,V>( key, value );
}
else
{
Map propertyMap = new HashMap<K,V>( ((ArrayEntry[])data).length * 2 );
for ( int i = 0; i < arrayCount; i++ )
{
propertyMap.put( ((ArrayEntry[])data)[i].getKey(), ((ArrayEntry[])data)[i].getValue() );
}
data = propertyMap;
arrayCount = -1;
propertyMap.put( key, value );
}
}
else
{
((Map)data).put( key, value );
}
}
private synchronized void synchronizedPut( K key, V value )
{
for ( int i = 0; i < arrayCount; i++ )
{
if ( ((ArrayEntry[])data)[i].getKey().equals( key ) )
{
((ArrayEntry[])data)[i].setNewValue( value );
return;
}
}
if ( arrayCount != -1 )
{
if ( arrayCount < ((ArrayEntry[])data).length )
{
((ArrayEntry[])data)[arrayCount++] = new ArrayEntry<K,V>( key, value );
}
else
{
Map propertyMap = new HashMap<K,V>( ((ArrayEntry[])data).length * 2 );
for ( int i = 0; i < arrayCount; i++ )
{
propertyMap.put( ((ArrayEntry[])data)[i].getKey(), ((ArrayEntry[])data)[i].getValue() );
}
data = propertyMap;
arrayCount = -1;
propertyMap.put( key, value );
}
}
else
{
((Map)data).put( key, value );
}
}
public V get( K key )
{
if ( key == null )
{
return null;
}
if ( useThreadSafeMap )
{
return synchronizedGet( key );
}
int count = arrayCount;
for ( int i = 0; i < count; i++ )
{
ArrayEntry<K, V> entry = ((ArrayEntry[])data)[i];
if ( entry != null && key.equals( entry.getKey() ) )
{
return entry.getValue();
}
}
if ( arrayCount == -1 )
{
return (V) ((Map)data).get( key );
}
return null;
}
private synchronized V synchronizedGet( K key )
{
int count = arrayCount;
for ( int i = 0; i < count; i++ )
{
ArrayEntry<K, V> entry = ((ArrayEntry[])data)[i];
if ( entry != null && key.equals( entry.getKey() ) )
{
return entry.getValue();
}
}
if ( arrayCount == -1 )
{
return (V) ((Map)data).get( key );
}
return null;
}
private synchronized V synchronizedRemove( K key )
{
for ( int i = 0; i < arrayCount; i++ )
{
if ( ((ArrayEntry[])data)[i].getKey().equals( key ) )
{
V removedProperty = (V) ((ArrayEntry[])data)[i].getValue();
arrayCount--;
System.arraycopy( data, i + 1, data, i, arrayCount - i );
((ArrayEntry[])data)[arrayCount] = null;
return removedProperty;
}
}
if ( arrayCount == -1 )
{
V value = (V) ((Map)data).remove( key );
if ( switchBackToArray && ((Map)data).size() < toMapThreshold )
{
ArrayEntry[] arrayEntries = new ArrayEntry[toMapThreshold];
int tmpCount = 0;
for ( Object entryObject : ((Map)data).entrySet() )
{
Entry entry = (Entry) entryObject;
arrayEntries[tmpCount++] = new ArrayEntry( entry.getKey(), entry.getValue() );
}
data = arrayEntries;
arrayCount = (byte) tmpCount;
}
return value;
}
return null;
}
public V remove( K key )
{
if ( useThreadSafeMap )
{
return synchronizedRemove( key );
}
for ( int i = 0; i < arrayCount; i++ )
{
if ( ((ArrayEntry[])data)[i].getKey().equals( key ) )
{
V removedProperty = (V) ((ArrayEntry[])data)[i].getValue();
arrayCount--;
System.arraycopy( data, i + 1, data, i, arrayCount - i );
((ArrayEntry[])data)[arrayCount] = null;
return removedProperty;
}
}
if ( arrayCount == -1 )
{
V value = (V) ((Map)data).remove( key );
if ( switchBackToArray && ((Map)data).size() < toMapThreshold )
{
ArrayEntry[] arrayEntries = new ArrayEntry[toMapThreshold];
int tmpCount = 0;
for ( Object entryObject : ((Map)data).entrySet() )
{
Entry entry = (Entry) entryObject;
arrayEntries[tmpCount++] = new ArrayEntry( entry.getKey(), entry.getValue() );
}
data = arrayEntries;
arrayCount = (byte) tmpCount;
}
return value;
}
return null;
}
static class ArrayEntry<K,V> implements Entry<K,V>
{
private final K key;
private V value;
ArrayEntry( K key, V value )
{
this.key = key;
this.value = value;
}
@Override
public K getKey()
{
return key;
}
@Override
public V getValue()
{
return value;
}
void setNewValue( V value )
{
this.value = value;
}
@Override
public V setValue( V value )
{
V oldValue = value;
this.value = value;
return oldValue;
}
@Override
public String toString()
{
return key + "=" + value;
}
}
public Iterable<K> keySet()
{
if ( arrayCount == -1 )
{
return ((Map)data).keySet();
}
List<K> keys = new LinkedList<K>();
for ( int i = 0; i < arrayCount; i++ )
{
keys.add( (K) ((ArrayEntry[])data)[i].getKey() );
}
return keys;
}
public Iterable<V> values()
{
if ( arrayCount == -1 )
{
return ((Map)data).values();
}
List<V> values = new LinkedList<V>();
for ( int i = 0; i < arrayCount; i++ )
{
values.add( (V) ((ArrayEntry[])data)[i].getValue() );
}
return values;
}
public Set<Entry<K,V>> entrySet()
{
if ( arrayCount == -1 )
{
return ((Map)data).entrySet();
}
Set<Entry<K,V>> entries = new HashSet<Entry<K,V>>();
for ( int i = 0; i < arrayCount; i++ )
{
entries.add( ((ArrayEntry[])data)[i] );
}
return entries;
}
public int size()
{
if ( useThreadSafeMap )
{
return synchronizedSize();
}
if ( arrayCount != -1 )
{
return arrayCount;
}
return ((Map)data).size();
}
private synchronized int synchronizedSize()
{
if ( arrayCount != -1 )
{
return arrayCount;
}
return ((Map)data).size();
}
public void clear()
{
if ( useThreadSafeMap )
{
synchronizedClear();
return;
}
if ( arrayCount != -1 )
{
Arrays.fill( ((ArrayEntry[])data), null );
arrayCount = 0;
}
else
{
((Map)data).clear();
}
}
private synchronized void synchronizedClear()
{
if ( arrayCount != -1 )
{
Arrays.fill( ((ArrayEntry[])data), null );
arrayCount = 0;
}
else
{
((Map)data).clear();
}
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_ArrayMap.java
|
734
|
private static class ArrayIntIterator implements Iterator<Integer>,
Iterable<Integer>
{
private int[] intArray;
private int pos = -1;
private int arrayCount;
ArrayIntIterator( int[] array, int count )
{
this.intArray = array;
this.arrayCount = count;
}
public boolean hasNext()
{
return pos + 1 < arrayCount;
}
public Integer next()
{
return intArray[++pos];
}
public void remove()
{
throw new UnsupportedOperationException();
}
public Iterator<Integer> iterator()
{
return this;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_ArrayIntSet.java
|
735
|
public class ArrayIntSet
{
private int maxRelSize = 256;
private int[] rels = new int[2];
// TODO: figure out if we need volatile here?
private int arrayCount = 0;
private Set<Integer> relationshipSet = null;
public boolean add( int id )
{
for ( int i = 0; i < arrayCount; i++ )
{
if ( rels[i] == id )
{
return false;
}
}
if ( arrayCount == rels.length && rels.length * 2 <= maxRelSize )
{
int newRels[] = new int[rels.length * 2];
System.arraycopy( rels, 0, newRels, 0, rels.length );
rels = newRels;
}
if ( arrayCount != -1 )
{
if ( arrayCount < rels.length )
{
rels[arrayCount++] = id;
return true;
}
relationshipSet = new HashSet<Integer>();
for ( int i = 0; i < arrayCount; i++ )
{
relationshipSet.add( rels[i] );
}
arrayCount = -1;
}
return relationshipSet.add( id );
}
public Iterator<Integer> iterator()
{
if ( arrayCount == -1 )
{
return relationshipSet.iterator();
}
return new ArrayIntIterator( rels, arrayCount );
}
public boolean remove( int id )
{
for ( int i = 0; i < arrayCount; i++ )
{
if ( rels[i] == id )
{
int[] dest = rels;
if ( arrayCount - 1 < rels.length / 3 )
{
dest = new int[rels.length / 2];
System.arraycopy( rels, 0, dest, 0, arrayCount );
}
if ( i + 1 < dest.length && (arrayCount - i - 1) > 0 )
{
System.arraycopy( rels, i + 1, dest, i, arrayCount - i - 1 );
rels = dest;
}
arrayCount--;
return true;
}
}
if ( arrayCount == -1 )
{
return relationshipSet.remove( id );
}
return false;
}
public Iterable<Integer> values()
{
if ( arrayCount == -1 )
{
return relationshipSet;
}
return new ArrayIntIterator( rels, arrayCount );
}
private static class ArrayIntIterator implements Iterator<Integer>,
Iterable<Integer>
{
private int[] intArray;
private int pos = -1;
private int arrayCount;
ArrayIntIterator( int[] array, int count )
{
this.intArray = array;
this.arrayCount = count;
}
public boolean hasNext()
{
return pos + 1 < arrayCount;
}
public Integer next()
{
return intArray[++pos];
}
public void remove()
{
throw new UnsupportedOperationException();
}
public Iterator<Integer> iterator()
{
return this;
}
}
public boolean contains( int id )
{
for ( int i = 0; i < arrayCount; i++ )
{
if ( rels[i] == id )
{
return true;
}
}
if ( arrayCount == -1 )
{
return relationshipSet.contains( id );
}
return false;
}
public int size()
{
if ( arrayCount != -1 )
{
return arrayCount;
}
return relationshipSet.size();
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_ArrayIntSet.java
|
736
|
public abstract class AbstractPrimitiveLongIterator implements PrimitiveLongIterator
{
private boolean hasNext;
private long nextValue;
@Override
public boolean hasNext()
{
return hasNext;
}
@Override
public long next()
{
if ( hasNext )
{
long result = nextValue;
computeNext();
return result;
}
throw new NoSuchElementException();
}
/**
* Computes the next item in this iterator. Implementations must call either {@link #next(long)}
* with the computed value, or {@link #endReached()} if there are no more items in this iterator.
*/
protected abstract void computeNext();
protected void endReached()
{
hasNext = false;
}
protected void next( long value )
{
nextValue = value;
hasNext = true;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_AbstractPrimitiveLongIterator.java
|
737
|
public abstract class AbstractPrimitiveIntIterator implements PrimitiveIntIterator
{
private boolean hasNext;
private int nextValue;
@Override
public boolean hasNext()
{
return hasNext;
}
@Override
public int next()
{
if ( hasNext )
{
int result = nextValue;
computeNext();
return result;
}
throw new NoSuchElementException();
}
/**
* Computes the next item in this iterator. Implementations must call either {@link #next(int)}
* with the computed value, or {@link #endReached()} if there are no more items in this iterator.
*/
protected abstract void computeNext();
protected void endReached()
{
hasNext = false;
}
protected void next( int value )
{
nextValue = value;
hasNext = true;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_AbstractPrimitiveIntIterator.java
|
738
|
public class TreeGraphTest extends TraversalTestBase
{
/*
* (1)
* ------ | ------
* / | \
* (2) (3) (4)
* / | \ / | \ / | \
* (5)(6)(7) (8)(9)(A) (B)(C)(D)
*/
private static final String[] THE_WORLD_AS_WE_KNOWS_IT = new String[] {
"1 TO 2", "1 TO 3", "1 TO 4", "2 TO 5", "2 TO 6", "2 TO 7",
"3 TO 8", "3 TO 9", "3 TO A", "4 TO B", "4 TO C", "4 TO D", };
@Before
public void setupGraph()
{
createGraph( THE_WORLD_AS_WE_KNOWS_IT );
}
@Test
public void nodesIteratorReturnAllNodes() throws Exception
{
Transaction transaction = beginTx();
try
{
Traverser traverser = traversal().traverse( node( "1" ) );
int count = 0;
for ( Node node : traverser.nodes() )
{
assertNotNull( "returned nodes should not be null. node #"
+ count, node );
count++;
}
assertEquals( 13, count );
}
finally
{
transaction.finish();
}
}
@Test
public void relationshipsIteratorReturnAllNodes() throws Exception
{
Transaction transaction = beginTx();
try
{
Traverser traverser = traversal().traverse( node( "1" ) );
int count = 0;
for ( Relationship relationship : traverser.relationships() )
{
assertNotNull(
"returned relationships should not be. relationship #"
+ count, relationship );
count++;
}
assertEquals( 12, count );
}
finally
{
transaction.finish();
}
}
@Test
public void pathsIteratorReturnAllNodes() throws Exception
{
Transaction transaction = beginTx();
try
{
Traverser traverser = traversal().traverse( node( "1" ) );
int count = 0;
for ( Path path : traverser )
{
assertNotNull( "returned paths should not be null. path #"
+ count, path );
count++;
}
assertEquals( 13, count );
}
finally
{
transaction.finish();
}
}
@Test
public void testBreadthFirst() throws Exception
{
Traverser traverser = traversal().breadthFirst().traverse( node( "1" ) );
Stack<Set<String>> levels = new Stack<Set<String>>();
levels.push( new HashSet<String>( asList( "5", "6", "7", "8",
"9", "A", "B", "C", "D" ) ) );
levels.push( new HashSet<String>( asList( "2", "3", "4" ) ) );
levels.push( new HashSet<String>( asList( "1" ) ) );
Transaction tx = beginTx();
try
{
assertLevels( traverser, levels );
tx.success();
}
finally
{
tx.finish();
}
}
@Test
public void testDepthFirstTraversalReturnsNodesOnCorrectDepths()
throws Exception
{
Transaction transaction = beginTx();
try
{
Traverser traverser = traversal().depthFirst().traverse( node( "1" ) );
int i = 0;
for ( Path pos : traverser )
{
assertEquals( expectedDepth( i++ ), pos.length() );
}
assertEquals( 13, i );
}
finally
{
transaction.finish();
}
}
@Test
public void testPostorderDepthFirstReturnsDeeperNodesFirst()
{
Traverser traverser = traversal().order( postorderDepthFirst() ).traverse( node( "1" ) );
int i = 0;
List<String> encounteredNodes = new ArrayList<String>();
Transaction tx = beginTx();
for ( Path pos : traverser )
{
encounteredNodes.add( (String) pos.endNode().getProperty( "name" ) );
assertEquals( expectedDepth( ( 12 - i++ ) ), pos.length() );
}
tx.success();
tx.finish();
assertEquals( 13, i );
assertTrue( encounteredNodes.indexOf( "5" ) < encounteredNodes.indexOf( "2" ) );
assertTrue( encounteredNodes.indexOf( "6" ) < encounteredNodes.indexOf( "2" ) );
assertTrue( encounteredNodes.indexOf( "7" ) < encounteredNodes.indexOf( "2" ) );
assertTrue( encounteredNodes.indexOf( "8" ) < encounteredNodes.indexOf( "3" ) );
assertTrue( encounteredNodes.indexOf( "9" ) < encounteredNodes.indexOf( "3" ) );
assertTrue( encounteredNodes.indexOf( "A" ) < encounteredNodes.indexOf( "3" ) );
assertTrue( encounteredNodes.indexOf( "B" ) < encounteredNodes.indexOf( "4" ) );
assertTrue( encounteredNodes.indexOf( "C" ) < encounteredNodes.indexOf( "4" ) );
assertTrue( encounteredNodes.indexOf( "D" ) < encounteredNodes.indexOf( "4" ) );
assertTrue( encounteredNodes.indexOf( "2" ) < encounteredNodes.indexOf( "1" ) );
assertTrue( encounteredNodes.indexOf( "3" ) < encounteredNodes.indexOf( "1" ) );
assertTrue( encounteredNodes.indexOf( "4" ) < encounteredNodes.indexOf( "1" ) );
}
@Test
public void testPostorderBreadthFirstReturnsDeeperNodesFirst()
{
Traverser traverser = traversal().order( postorderBreadthFirst() ).traverse( node( "1" ) );
Stack<Set<String>> levels = new Stack<Set<String>>();
levels.push( new HashSet<String>( asList( "1" ) ) );
levels.push( new HashSet<String>( asList( "2", "3", "4" ) ) );
levels.push( new HashSet<String>( asList( "5", "6", "7", "8",
"9", "A", "B", "C", "D" ) ) );
Transaction tx = beginTx();
try
{
assertLevels( traverser, levels );
tx.success();
}
finally
{
tx.finish();
}
}
private int expectedDepth( int i )
{
assertTrue( i < 13 );
if ( i == 0 )
{
return 0;
}
else if ( ( i - 1 ) % 4 == 0 )
{
return 1;
}
else
{
return 2;
}
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_traversal_TreeGraphTest.java
|
739
|
protected static final class RelationshipRepresentation implements
Representation<Relationship>
{
private final Representation<? super Node> nodes;
private final Representation<? super Relationship> rel;
public RelationshipRepresentation( Representation<? super Node> nodes )
{
this( nodes, RELATIONSHIP_TYPE_REPRESENTATION );
}
public RelationshipRepresentation( Representation<? super Node> nodes,
Representation<? super Relationship> rel )
{
this.nodes = nodes;
this.rel = rel;
}
public String represent( Relationship item )
{
return nodes.represent( item.getStartNode() ) + " "
+ rel.represent( item ) + " "
+ nodes.represent( item.getEndNode() );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_traversal_TraversalTestBase.java
|
740
|
protected static final class PropertyRepresentation implements
Representation<PropertyContainer>
{
public PropertyRepresentation( String key )
{
this.key = key;
}
private final String key;
public String represent( PropertyContainer item )
{
return (String) item.getProperty( key );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_traversal_TraversalTestBase.java
|
741
|
protected static final class NodePathRepresentation implements
Representation<Path>
{
private final Representation<? super Node> nodes;
public NodePathRepresentation( Representation<? super Node> nodes )
{
this.nodes = nodes;
}
public String represent( Path item )
{
StringBuilder builder = new StringBuilder();
for ( Node node : item.nodes() )
{
builder.append( builder.length() > 0 ? "," : "" );
builder.append( nodes.represent( node ) );
}
return builder.toString();
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_traversal_TraversalTestBase.java
|
742
|
{
public String represent( Relationship item )
{
return item.getType().name();
}
};
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_traversal_TraversalTestBase.java
|
743
|
public abstract class TraversalTestBase extends AbstractNeo4jTestCase
{
private Map<String, Node> nodes;
@Override
protected boolean restartGraphDbBetweenTests()
{
return true;
}
protected Node node( String name )
{
return nodes.get( name );
}
protected Node getNode( long id )
{
return getGraphDb().getNodeById( id );
}
protected Transaction beginTx()
{
return getGraphDb().beginTx();
}
protected void createGraph( String... description )
{
nodes = createGraph( GraphDescription.create( description ) );
}
private Map<String, Node> createGraph( GraphDefinition graph )
{
Transaction tx = beginTx();
try
{
Map<String, Node> result = graph.create( getGraphDb() );
tx.success();
return result;
}
finally
{
tx.finish();
}
}
protected Node getNodeWithName( String name )
{
for ( Node node : GlobalGraphOperations.at( getGraphDb() ).getAllNodes() )
{
String nodeName = (String) node.getProperty( "name", null );
if ( nodeName != null && nodeName.equals( name ) )
{
return node;
}
}
return null;
}
protected void assertLevels( Traverser traverser, Stack<Set<String>> levels )
{
Set<String> current = levels.pop();
for ( Path position : traverser )
{
String nodeName = (String) position.endNode().getProperty( "name" );
if ( current.isEmpty() )
{
current = levels.pop();
}
assertTrue( "Should not contain node (" + nodeName
+ ") at level " + (3 - levels.size()),
current.remove( nodeName ) );
}
assertTrue( "Should have no more levels", levels.isEmpty() );
assertTrue( "Should be empty", current.isEmpty() );
}
protected static final Representation<PropertyContainer> NAME_PROPERTY_REPRESENTATION = new PropertyRepresentation( "name" );
protected static final Representation<Relationship> RELATIONSHIP_TYPE_REPRESENTATION = new Representation<Relationship>()
{
public String represent( Relationship item )
{
return item.getType().name();
}
};
protected interface Representation<T>
{
String represent( T item );
}
protected static final class PropertyRepresentation implements
Representation<PropertyContainer>
{
public PropertyRepresentation( String key )
{
this.key = key;
}
private final String key;
public String represent( PropertyContainer item )
{
return (String) item.getProperty( key );
}
}
protected static final class RelationshipRepresentation implements
Representation<Relationship>
{
private final Representation<? super Node> nodes;
private final Representation<? super Relationship> rel;
public RelationshipRepresentation( Representation<? super Node> nodes )
{
this( nodes, RELATIONSHIP_TYPE_REPRESENTATION );
}
public RelationshipRepresentation( Representation<? super Node> nodes,
Representation<? super Relationship> rel )
{
this.nodes = nodes;
this.rel = rel;
}
public String represent( Relationship item )
{
return nodes.represent( item.getStartNode() ) + " "
+ rel.represent( item ) + " "
+ nodes.represent( item.getEndNode() );
}
}
protected static final class NodePathRepresentation implements
Representation<Path>
{
private final Representation<? super Node> nodes;
public NodePathRepresentation( Representation<? super Node> nodes )
{
this.nodes = nodes;
}
public String represent( Path item )
{
StringBuilder builder = new StringBuilder();
for ( Node node : item.nodes() )
{
builder.append( builder.length() > 0 ? "," : "" );
builder.append( nodes.represent( node ) );
}
return builder.toString();
}
}
protected <T> void expect( Iterable<? extends T> items,
Representation<T> representation, String... expected )
{
expect( items, representation, new HashSet<String>(
Arrays.asList( expected ) ) );
}
protected <T> void expect( Iterable<? extends T> items,
Representation<T> representation, Set<String> expected )
{
Transaction tx = beginTx();
Collection<String> encounteredItems = new ArrayList<String>();
try
{
for ( T item : items )
{
String repr = representation.represent( item );
assertTrue( repr + " not expected ", expected.remove( repr ) );
encounteredItems.add( repr );
}
tx.success();
}
finally
{
tx.finish();
}
if ( !expected.isEmpty() )
{
fail( "The exepected elements " + expected + " were not returned. Returned were: " + encounteredItems );
}
}
protected void expectNodes( Traverser traverser, String... nodes )
{
expect( traverser.nodes(), NAME_PROPERTY_REPRESENTATION, nodes );
}
protected void expectRelationships( Traverser traverser,
String... relationships )
{
expect( traverser.relationships(), new RelationshipRepresentation(
NAME_PROPERTY_REPRESENTATION ), relationships );
}
protected void expectPaths( Traverser traverser, String... paths )
{
expectPaths( traverser, new HashSet<String>( Arrays.asList( paths ) ) );
}
protected void expectPaths( Traverser traverser, Set<String> expected )
{
expect( traverser, new NodePathRepresentation(
NAME_PROPERTY_REPRESENTATION ), expected );
}
public static <E> void assertContains( Iterator<E> actual, E... expected )
{
assertContains( IteratorUtil.asIterable( actual ), expected );
}
public static <E> void assertContains( Iterable<E> actual, E... expected )
{
Set<E> expectation = new HashSet<E>( Arrays.asList( expected ) );
for ( E element : actual )
{
if ( !expectation.remove( element ) )
{
fail( "unexpected element <" + element + ">" );
}
}
if ( !expectation.isEmpty() )
{
fail( "the expected elements <" + expectation
+ "> were not contained" );
}
}
public static <T> void assertContainsInOrder( Collection<T> collection,
T... expectedItems )
{
String collectionString = join( ", ", collection.toArray() );
assertEquals( collectionString, expectedItems.length, collection.size() );
Iterator<T> itr = collection.iterator();
for ( int i = 0; itr.hasNext(); i++ )
{
assertEquals( expectedItems[i], itr.next() );
}
}
public static <T> void assertContainsInOrder( Iterable<T> collection,
T... expectedItems )
{
assertContainsInOrder( IteratorUtil.asCollection( collection ), expectedItems );
}
public static <T> String join( String delimiter, T... items )
{
StringBuilder buffer = new StringBuilder();
for ( T item : items )
{
if ( buffer.length() > 0 )
{
buffer.append( delimiter );
}
buffer.append( item.toString() );
}
return buffer.toString();
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_traversal_TraversalTestBase.java
|
744
|
public class TraversalBranchWithState extends TraversalBranchImpl implements BranchState
{
protected final Object stateForMe;
protected Object stateForChildren;
public TraversalBranchWithState( TraversalBranch parent, int depth, Node source, Relationship toHere, Object inheritedState )
{
super( parent, depth, source, toHere );
this.stateForMe = this.stateForChildren = inheritedState;
}
public TraversalBranchWithState( TraversalBranch parent, Node source, InitialBranchState initialState )
{
super( parent, source );
this.stateForMe = this.stateForChildren = initialState.initialState( this );
}
@Override
public void setState( Object state )
{
this.stateForChildren = state;
}
@Override
public Object getState()
{
return this.stateForMe;
}
@Override
protected TraversalBranch newNextBranch( Node node, Relationship relationship )
{
return new TraversalBranchWithState( this, length() + 1, node, relationship, stateForChildren );
}
@Override
protected Iterator<Relationship> expandRelationshipsWithoutChecks( PathExpander expander )
{
Iterable<Relationship> iterable = expander.expand( this, this );
return iterable.iterator();
}
@Override
public Object state()
{
return this.stateForMe;
}
@Override
protected void evaluate( TraversalContext context )
{
setEvaluation( context.evaluate( this, this ) );
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_traversal_TraversalBranchWithState.java
|
745
|
{
private TraversalBranch branch = TraversalBranchImpl.this;
@Override
protected Node fetchNextOrNull()
{
try
{
return branch.length() >= 0 ? branch.endNode() : null;
}
finally
{
branch = branch.parent();
}
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_traversal_TraversalBranchImpl.java
|
746
|
{
@Override
public Iterator<Node> iterator()
{
return new PrefetchingIterator<Node>()
{
private TraversalBranch branch = TraversalBranchImpl.this;
@Override
protected Node fetchNextOrNull()
{
try
{
return branch.length() >= 0 ? branch.endNode() : null;
}
finally
{
branch = branch.parent();
}
}
};
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_traversal_TraversalBranchImpl.java
|
747
|
{
private TraversalBranch branch = TraversalBranchImpl.this;
@Override
protected Relationship fetchNextOrNull()
{
try
{
return branch != null ? branch.lastRelationship() : null;
}
finally
{
branch = branch != null ? branch.parent() : null;
}
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_traversal_TraversalBranchImpl.java
|
748
|
SHORT( Short.SIZE )
{
@Override
public void put( ByteBuffer buffer, Number value )
{
buffer.putShort( value.shortValue() );
}
@Override
public void put( LogBuffer buffer, Number value )
{
throw new UnsupportedOperationException();
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_BufferNumberPutter.java
|
749
|
INT( Integer.SIZE )
{
@Override
public void put( ByteBuffer buffer, Number value )
{
buffer.putInt( value.intValue() );
}
@Override
public void put( LogBuffer buffer, Number value ) throws IOException
{
buffer.putInt( value.intValue() );
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_BufferNumberPutter.java
|
750
|
LONG( Long.SIZE )
{
@Override
public void put( ByteBuffer buffer, Number value )
{
buffer.putLong( value.longValue() );
}
@Override
public void put( LogBuffer buffer, Number value ) throws IOException
{
buffer.putLong( value.longValue() );
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_BufferNumberPutter.java
|
751
|
public class CappedOperationTest
{
@Test
public void shouldTriggerOnSingleSwitch() throws Exception
{
// GIVEN
AtomicInteger triggerCount = new AtomicInteger();
CappedOperation<String> operation = countingCappedOperations( triggerCount, count( 2 ) );
// WHEN/THEN
operation.event( "test" );
assertEquals( 1, triggerCount.get() );
operation.event( "test" );
assertEquals( 1, triggerCount.get() );
operation.event( "test" );
assertEquals( 2, triggerCount.get() );
operation.event( "test" );
assertEquals( 2, triggerCount.get() );
}
@Test
public void shouldTriggerOnDifferentItemsEvenIfCountSwitch() throws Exception
{
// GIVEN
AtomicInteger triggerCount = new AtomicInteger();
CappedOperation<String> operation = countingCappedOperations( triggerCount,
count( 2 ), differentItems() );
// WHEN/THEN
operation.event( "test" );
assertEquals( 1, triggerCount.get() );
operation.event( "OTHER" );
assertEquals( 2, triggerCount.get() );
operation.event( "OTHER" );
assertEquals( 2, triggerCount.get() );
operation.event( "OTHER" );
assertEquals( 3, triggerCount.get() );
}
private CappedOperation<String> countingCappedOperations( final AtomicInteger triggerCount,
Switch... openers )
{
return new CappedOperation<String>( openers )
{
@Override
protected void triggered( String event )
{
triggerCount.incrementAndGet();
}
};
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_util_CappedOperationTest.java
|
752
|
public class CopyOnWriteHashMap<K, V> implements Map<K, V>
{
private volatile Map<K, V> actual = new HashMap<K, V>();
@Override
public int size()
{
return actual.size();
}
@Override
public boolean isEmpty()
{
return actual.isEmpty();
}
@Override
public boolean containsKey( Object key )
{
return actual.containsKey( key );
}
@Override
public boolean containsValue( Object value )
{
return actual.containsValue( value );
}
@Override
public V get( Object key )
{
return actual.get( key );
}
private Map<K, V> copy()
{
return new HashMap<K, V>( actual );
}
@Override
public synchronized V put( K key, V value )
{
Map<K, V> copy = copy();
V previous = copy.put( key, value );
actual = copy;
return previous;
}
@Override
public synchronized V remove( Object key )
{
Map<K, V> copy = copy();
V previous = copy.remove( key );
actual = copy;
return previous;
}
@Override
public synchronized void putAll( Map<? extends K, ? extends V> m )
{
Map<K, V> copy = copy();
copy.putAll( m );
actual = copy;
}
@Override
public synchronized void clear()
{
actual = new HashMap<K, V>();
}
private static class UnsupportedRemoveIterator<T> implements Iterator<T>
{
private final Iterator<T> actual;
UnsupportedRemoveIterator( Iterator<T> actual )
{
this.actual = actual;
}
@Override
public boolean hasNext()
{
return actual.hasNext();
}
@Override
public T next()
{
return actual.next();
}
@Override
public void remove()
{
throw new UnsupportedOperationException();
}
}
@Override
public Set<K> keySet()
{
return new AbstractSet<K>()
{
@Override
public boolean remove( Object o )
{
return CopyOnWriteHashMap.this.remove( o ) != null;
}
@Override
public Iterator<K> iterator()
{
return new UnsupportedRemoveIterator<K>( actual.keySet().iterator() );
}
@Override
public int size()
{
return actual.size();
}
};
}
@Override
public Collection<V> values()
{
return new AbstractCollection<V>()
{
@Override
public Iterator<V> iterator()
{
return new UnsupportedRemoveIterator<V>( actual.values().iterator() );
}
@Override
public int size()
{
return actual.size();
}
};
}
@Override
public Set<Entry<K, V>> entrySet()
{
return new AbstractSet<Entry<K,V>>()
{
@Override
public boolean remove( Object o )
{
throw new UnsupportedOperationException();
}
@Override
public Iterator<Entry<K, V>> iterator()
{
return new UnsupportedRemoveIterator<Entry<K,V>>( actual.entrySet().iterator() )
{
@Override
public Entry<K, V> next()
{
final Entry<K, V> actualNext = super.next();
return new Entry<K,V>()
{
@Override
public K getKey()
{
return actualNext.getKey();
}
@Override
public V getValue()
{
return actualNext.getValue();
}
@Override
public V setValue( V value )
{
throw new UnsupportedOperationException();
}
@Override
public boolean equals( Object obj )
{
return actualNext.equals( obj );
}
@Override
public int hashCode()
{
return actualNext.hashCode();
}
};
}
};
}
@Override
public int size()
{
return actual.size();
}
};
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_CopyOnWriteHashMap.java
|
753
|
private class Snapshot<E> {
private final Iterator<E> input;
private Snapshot( Iterator<E> input )
{
this.input = input;
}
public Set<E> toSet()
{
return IteratorUtil.asSet( input );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_util_CopyOnWriteAfterIteratorHashSetTest.java
|
754
|
public class CopyOnWriteAfterIteratorHashSetTest {
@Test
public void should_not_change_iterated_snapshot_by_adding() {
// given
CopyOnWriteAfterIteratorHashSet<String> set = newCOWSet( "hullo" );
Snapshot<String> initialSnapshot = snapshot( set );
// when
set.add( "wurld" );
// then
assertEquals( asSet( "hullo" ), initialSnapshot.toSet());
assertEquals( asSet( "hullo", "wurld" ), snapshot( set ).toSet() );
}
@Test
public void should_not_change_iterated_snapshot_by_removing() {
// given
CopyOnWriteAfterIteratorHashSet<String> set = newCOWSet( "hullo", "wurld" );
Snapshot<String> initialSnapshot = snapshot( set );
// when
set.remove( "wurld" );
// then
assertEquals( asSet( "hullo", "wurld" ), initialSnapshot.toSet() );
assertEquals( asSet( "hullo" ), snapshot( set ).toSet() );
}
@Test
public void should_not_change_iterated_snapshot_by_clearing() {
// given
CopyOnWriteAfterIteratorHashSet<String> set = newCOWSet( "hullo", "wurld" );
Snapshot<String> initialSnapshot = snapshot( set );
// when
set.clear();
// then
assertEquals( asSet( "hullo", "wurld" ), initialSnapshot.toSet() );
assertEquals( IteratorUtil.<String>asSet(), snapshot( set ).toSet() );
}
@Test
public void should_support_multiple_stable_snapshots() {
// given
CopyOnWriteAfterIteratorHashSet<String> set = newCOWSet();
set.add( "hullo" );
// when
Snapshot<String> snapshot1 = snapshot( set );
set.add( "wurld" );
Snapshot<String> snapshot2 = snapshot( set );
set.add( "!" );
set.remove( "wurld" );
Snapshot<String> snapshot3 = snapshot( set );
set.clear();
// then
assertEquals( asSet( "hullo" ), snapshot1.toSet() );
assertEquals( asSet( "hullo", "wurld" ), snapshot2.toSet() );
assertEquals( asSet( "hullo", "!" ), snapshot3.toSet() );
assertEquals( IteratorUtil.<String>asSet(), snapshot( set ).toSet() );
}
@Test
public void should_not_change_iterated_snapshot_by_retaining_all() {
// given
CopyOnWriteAfterIteratorHashSet<String> set = newCOWSet( "hullo", "wurld", "!" );
Snapshot<String> initialSnapshot = snapshot( set );
// when
set.retainAll( asList( "!", "!" ) );
// then
assertEquals( asSet( "hullo", "wurld", "!" ), initialSnapshot.toSet());
assertEquals( asSet( "!" ), snapshot( set ).toSet() );
}
@Test
public void should_not_change_iterated_snapshot_by_removing_all() {
// given
CopyOnWriteAfterIteratorHashSet<String> set = newCOWSet( "hullo", "wurld", "!" );
Snapshot<String> initialSnapshot = snapshot( set );
// when
set.removeAll(asList("!", "!"));
// then
assertEquals (asSet( "hullo", "wurld", "!" ), initialSnapshot.toSet());
assertEquals( asSet( "hullo", "wurld" ), snapshot( set ).toSet() );
}
private CopyOnWriteAfterIteratorHashSet<String> newCOWSet( String... elements ) {
CopyOnWriteAfterIteratorHashSet<String> result = new CopyOnWriteAfterIteratorHashSet<>();
Collections.addAll( result, elements );
return result;
}
private Snapshot<String> snapshot( CopyOnWriteAfterIteratorHashSet<String> set ) {
return new Snapshot<String>(set.iterator());
}
private class Snapshot<E> {
private final Iterator<E> input;
private Snapshot( Iterator<E> input )
{
this.input = input;
}
public Set<E> toSet()
{
return IteratorUtil.asSet( input );
}
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_util_CopyOnWriteAfterIteratorHashSetTest.java
|
755
|
class CopyOnWriteAfterIteratorHashSet<E> implements Set<E>
{
private transient HashMap<E, Object> map;
private boolean readOnly = false;
/** Marker to signal that the value is present in the backing map. */
private static final Object PRESENT = new Object();
@Override
public Iterator<E> iterator()
{
if ( map == null || map.isEmpty() )
{
return IteratorUtil.emptyIterator();
}
readOnly = true;
return map.keySet().iterator();
}
@Override
public String toString()
{
return map == null ? "[]" : map.keySet().toString();
}
@Override
public Object[] toArray()
{
return map == null ? new Object[0] : map.keySet().toArray();
}
@Override
@SuppressWarnings("SuspiciousToArrayCall")
public <T> T[] toArray( T[] a )
{
return map == null ? a : map.keySet().toArray( a );
}
@Override
public int size()
{
return map == null ? 0 : map.size();
}
@Override
public boolean isEmpty()
{
return map == null || map.isEmpty();
}
@Override
@SuppressWarnings("SuspiciousMethodCalls")
public boolean contains( Object o )
{
return map != null && map.containsKey( o );
}
@Override
public boolean add( E e )
{
if ( map == null )
{
map = new HashMap<>();
}
if ( readOnly )
{
if ( map.containsKey( e ) )
{
return false;
}
map = new HashMap<>( map );
readOnly = false;
}
return map.put( e, PRESENT ) == null;
}
@Override
@SuppressWarnings("SuspiciousMethodCalls")
public boolean remove( Object o )
{
if ( map == null )
{
return false;
}
if ( readOnly )
{
if ( !map.containsKey( o ) )
{
return false;
}
map = new HashMap<>( map );
readOnly = false;
}
return map.remove( o ) == PRESENT;
}
@Override
public boolean containsAll( Collection<?> c )
{
return map == null ? c.isEmpty() : map.keySet().containsAll( c );
}
@Override
public boolean addAll( Collection<? extends E> c )
{
boolean modified = false;
for ( E e : c )
{
if ( add( e ) )
{
modified = true;
}
}
return modified;
}
@Override
public void clear()
{
readOnly = false;
map = null;
}
@Override
public boolean removeAll( Collection<?> c )
{
return conditionalRemove( c, true );
}
@Override
public boolean retainAll( Collection<?> c )
{
return conditionalRemove( c, false );
}
private boolean conditionalRemove( Collection<?> c, boolean remove )
{
if ( map == null || map.isEmpty() )
{
return false;
}
boolean modified = false;
Iterator<E> it = map.keySet().iterator();
HashMap<E, Object> target = readOnly ? null : map;
while ( it.hasNext() )
{
E item = it.next();
if ( c.contains( item ) == remove )
{
if ( readOnly )
{
if ( target == null )
{
target = new HashMap<>( map );
}
target.remove( item );
}
else
{
it.remove();
}
modified = true;
}
}
if ( target != null && target != map )
{
map = target;
readOnly = false;
}
return modified;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_CopyOnWriteAfterIteratorHashSet.java
|
756
|
public class CompareTxStreams
{
public static void main( String[] args ) throws IOException
{
DefaultFileSystemAbstraction fileSystem = new DefaultFileSystemAbstraction();
Monitors monitors = new Monitors();
compareLogStreams(
LogExtractor.from( fileSystem, new File(args[0]),
monitors.newMonitor( ByteCounterMonitor.class, CompareTxStreams.class, "logExtractor1" ) ),
LogExtractor.from( fileSystem, new File( args[1]),
monitors.newMonitor( ByteCounterMonitor.class, CompareTxStreams.class, "logExtractor1" ) ));
}
protected static void compareLogStreams( LogExtractor extractor1, LogExtractor extractor2 ) throws IOException
{
try
{
boolean branchingDetected = false;
long lastTx = 1;
while ( true )
{
long tx1 = extractor1.extractNext( new InMemoryLogBuffer() );
long tx2 = extractor2.extractNext( new InMemoryLogBuffer() );
if ( tx1 != tx2 ) throw new RuntimeException( "Differing tx " + tx1 + " and " + tx2 );
if ( tx1 == -1 || tx2 == -1 ) break;
lastTx = tx1;
if ( !branchingDetected )
{ // Try to detect branching
if ( extractor1.getLastStartEntry().getMasterId() != extractor2.getLastStartEntry().getMasterId() ||
extractor1.getLastTxChecksum() != extractor2.getLastTxChecksum() )
{
branchingDetected = true;
System.out.println( "Branch at " + tx1 + ": masters:" + extractor1.getLastStartEntry().getMasterId() + "," + extractor2.getLastStartEntry().getMasterId() +
" checksums:" + extractor1.getLastTxChecksum() + "," + extractor2.getLastTxChecksum() );
}
}
else
{ // Try to detect merging of branch
if ( extractor1.getLastStartEntry().getMasterId() == extractor2.getLastStartEntry().getMasterId() &&
extractor1.getLastTxChecksum() == extractor2.getLastTxChecksum() )
{
branchingDetected = false;
System.out.println( "Merged again at " + tx1 );
}
else
{
System.out.println( "Still branched at " + tx1 + ": masters:" + extractor1.getLastStartEntry().getMasterId() + "," + extractor2.getLastStartEntry().getMasterId() +
" checksums:" + extractor1.getLastTxChecksum() + "," + extractor2.getLastTxChecksum() );
}
}
}
System.out.println( "Last tx " + lastTx );
}
finally
{
closeExtractor( extractor1 );
closeExtractor( extractor2 );
}
}
private static void closeExtractor( LogExtractor extractor )
{
if ( extractor != null ) extractor.close();
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_CompareTxStreams.java
|
757
|
public class CombinedRelIdIterator implements RelIdIterator
{
private RelIdIterator srcIterator;
private final RelIdIterator addIterator;
private RelIdIterator currentIterator;
private final Collection<Long> removed;
private final int type;
private boolean nextElementDetermined;
private long nextElement;
public CombinedRelIdIterator( int type, DirectionWrapper direction, RelIdArray src,
RelIdArray add, Collection<Long> remove )
{
this.type = type;
this.srcIterator = src != null ? src.iterator( direction ) : RelIdArray.EMPTY.iterator( direction );
this.addIterator = add != null ? add.iterator( direction ) : RelIdArray.EMPTY.iterator( direction );
this.currentIterator = srcIterator;
this.removed = remove;
}
@Override
public int getType()
{
return type;
}
@Override
public RelIdIterator updateSource( RelIdArray newSource, DirectionWrapper direction )
{
srcIterator = srcIterator.updateSource( newSource, direction );
return this;
}
@Override
public boolean hasNext()
{
if ( nextElementDetermined )
{
return nextElement != -1;
}
while ( currentIterator.hasNext() || currentIterator != addIterator )
{
while ( currentIterator.hasNext() )
{
long value = currentIterator.next();
if ( removed == null || !removed.contains( value ) )
{
nextElement = value;
nextElementDetermined = true;
return true;
}
}
currentIterator = addIterator;
}
nextElementDetermined = true;
nextElement = -1;
return false;
}
@Override
public void doAnotherRound()
{
srcIterator.doAnotherRound();
addIterator.doAnotherRound();
nextElementDetermined = false;
currentIterator = srcIterator;
}
@Override
public long next()
{
if ( !hasNext() )
{
throw new NoSuchElementException();
}
nextElementDetermined = false;
return nextElement;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_CombinedRelIdIterator.java
|
758
|
public class Charsets
{
public static final Charset UTF_8 = Charset.forName( "UTF-8" );
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_Charsets.java
|
759
|
{
@Override
protected void triggered( String event )
{
triggerCount.incrementAndGet();
}
};
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_util_CappedOperationTest.java
|
760
|
{
private Class lastSeenItemClass;
@Override
public boolean accept( T item )
{
boolean accepted = lastSeenItemClass == null || !lastSeenItemClass.equals( item.getClass() );
lastSeenItemClass = item.getClass();
return accepted;
}
@Override
public void reset()
{ // Don't reset
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_CappedOperation.java
|
761
|
FLOAT( Float.SIZE )
{
@Override
public void put( ByteBuffer buffer, Number value )
{
buffer.putFloat( value.floatValue() );
}
@Override
public void put( LogBuffer buffer, Number value ) throws IOException
{
buffer.putFloat( value.floatValue() );
}
},
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_BufferNumberPutter.java
|
762
|
{
private T lastSeenItem;
@Override
public boolean accept( T item )
{
boolean accepted = lastSeenItem == null || !lastSeenItem.equals( item );
lastSeenItem = item;
return accepted;
}
@Override
public void reset()
{ // Don't reset
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_CappedOperation.java
|
763
|
{
private long count;
@Override
public boolean accept( T item )
{
return ++count >= maxCount;
}
@Override
public void reset()
{
count = 0;
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_CappedOperation.java
|
764
|
{
private long lastSeen;
@Override
public boolean accept( T item )
{
return lastSeen == 0 || clock.currentTimeMillis()-lastSeen >= timeMillis;
}
@Override
public void reset()
{
lastSeen = clock.currentTimeMillis();
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_CappedOperation.java
|
765
|
{
private boolean firstTime = true;
@SuppressWarnings( "unchecked" )
@Override
public synchronized boolean accept( T item )
{
boolean accepted = firstTime;
firstTime = false;
// Pass it through all since they are probably stateful
for ( Switch<T> filter : filters )
{
if ( filter.accept( item ) )
{
accepted = true;
}
}
if ( accepted )
{
reset();
}
return accepted;
}
@SuppressWarnings( "unchecked" )
@Override
public synchronized void reset()
{
for ( Switch<T> filter : filters )
{
filter.reset();
}
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_CappedOperation.java
|
766
|
public abstract class CappedOperation<T>
{
public interface Switch<T> extends Predicate<T>
{
void reset();
}
private final Switch<T> opener;
public CappedOperation( Switch... openers )
{
this.opener = firstOccurenceOf( openers );
}
public void event( T event )
{
if ( opener.accept( event ) )
{
triggered( event );
}
}
protected abstract void triggered( T event );
@SuppressWarnings( "rawtypes" )
private static <T> Switch<T> firstOccurenceOf( final Switch... filters )
{
return new Switch<T>()
{
private boolean firstTime = true;
@SuppressWarnings( "unchecked" )
@Override
public synchronized boolean accept( T item )
{
boolean accepted = firstTime;
firstTime = false;
// Pass it through all since they are probably stateful
for ( Switch<T> filter : filters )
{
if ( filter.accept( item ) )
{
accepted = true;
}
}
if ( accepted )
{
reset();
}
return accepted;
}
@SuppressWarnings( "unchecked" )
@Override
public synchronized void reset()
{
for ( Switch<T> filter : filters )
{
filter.reset();
}
}
};
}
public static <T> Switch<T> time( final long time, TimeUnit unit )
{
return time( Clock.SYSTEM_CLOCK, time, unit );
}
public static <T> Switch<T> time( final Clock clock, long time, TimeUnit unit )
{
final long timeMillis = unit.toMillis( time );
return new Switch<T>()
{
private long lastSeen;
@Override
public boolean accept( T item )
{
return lastSeen == 0 || clock.currentTimeMillis()-lastSeen >= timeMillis;
}
@Override
public void reset()
{
lastSeen = clock.currentTimeMillis();
}
};
}
public static <T> Switch<T> count( final long maxCount )
{
return new Switch<T>()
{
private long count;
@Override
public boolean accept( T item )
{
return ++count >= maxCount;
}
@Override
public void reset()
{
count = 0;
}
};
}
public static <T> Switch<T> differentItems()
{
return new Switch<T>()
{
private T lastSeenItem;
@Override
public boolean accept( T item )
{
boolean accepted = lastSeenItem == null || !lastSeenItem.equals( item );
lastSeenItem = item;
return accepted;
}
@Override
public void reset()
{ // Don't reset
}
};
}
public static <T> Switch<T> differentItemClasses()
{
return new Switch<T>()
{
private Class lastSeenItemClass;
@Override
public boolean accept( T item )
{
boolean accepted = lastSeenItemClass == null || !lastSeenItemClass.equals( item.getClass() );
lastSeenItemClass = item.getClass();
return accepted;
}
@Override
public void reset()
{ // Don't reset
}
};
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_CappedOperation.java
|
767
|
public class BytePrinter
{
/**
* Print a full byte array as nicely formatted groups of hex numbers.
* Output looks like:
*
* 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08
* 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08
*
*/
public static void print( byte [] bytes, PrintStream out )
{
print( wrap( bytes ), out, 0, bytes.length );
}
/**
* Print a full byte buffer as nicely formatted groups of hex numbers.
* Output looks like:
*
* 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08
* 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08
*
* @param bytes
* @param out
*/
public static void print( ByteBuffer bytes, PrintStream out )
{
print( bytes, out, 0, bytes.limit() );
}
/**
* Print a subsection of a byte buffer as nicely formatted groups of hex numbers.
* Output looks like:
*
* 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08
* 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08
*
* @param bytes
* @param out
*/
public static void print( ByteBuffer bytes, PrintStream out, int offset, int length )
{
for(int i=offset;i<offset + length;i++)
{
print( bytes.get( i ), out );
if((i - offset + 1) % 32 == 0)
{
out.println( );
} else if((i - offset + 1) % 8 == 0)
{
out.print( " " );
} else {
out.print( " " );
}
}
}
/**
* Print a single byte as a hex number. The number will always be two characters wide.
*
* @param b
* @param out
*/
public static void print( byte b, PrintStream out )
{
out.print( hex( b ) );
}
/**
* This should not be in this class, move to a dedicated ascii-art class when appropriate.
*
* Use this to standardize the width of some text output to all be left-justified and space-padded
* on the right side to fill up the given column width.
*
* @param str
* @param columnWidth
* @return
*/
public static String ljust( String str, int columnWidth )
{
return String.format( "%-" + columnWidth + "s", str);
}
/**
* This should not be in this class, move to a dedicated ascii-art class when appropriate.
*
* Use this to standardize the width of some text output to all be right-justified and space-padded
* on the left side to fill up the given column width.
*
* @param str
* @param columnWidth
* @return
*/
public static String rjust( String str, int columnWidth )
{
return String.format( "%" + columnWidth + "s", str);
}
/**
* Convert a single byte to a human-readable hex number. The number will always be two characters wide.
* @param b
* @return
*/
public static String hex(byte b)
{
return String.format("%02x", b);
}
/**
* Convert a subsection of a byte buffer to a human readable string of nicely formatted hex numbers.
* Output looks like:
*
* 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08
* 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08
*
* @param bytes
* @param offset
* @param length
* @return
*/
public static String hex( ByteBuffer bytes, int offset, int length )
{
try
{
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = null;
ps = new PrintStream(baos, true, "UTF-8");
print( bytes, ps, offset, length );
return baos.toString("UTF-8");
}
catch ( UnsupportedEncodingException e )
{
throw new RuntimeException( e );
}
}
/**
* Convert a full byte buffer to a human readable string of nicely formatted hex numbers.
* Output looks like:
*
* 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08
* 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08
*
* @param bytes
* @return
*/
public static String hex(ByteBuffer bytes)
{
return hex( bytes, 0, bytes.capacity() );
}
/**
* Convert a full byte buffer to a human readable string of nicely formatted hex numbers.
* Output looks like:
*
* 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08
* 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08 01 02 03 04 05 06 07 08
*
* @param bytes
* @return
*/
public static String hex(byte[] bytes)
{
return hex( wrap( bytes ) );
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_BytePrinter.java
|
768
|
public class BufferedFileChannel extends AbstractStoreChannel
{
private final StoreChannel source;
private final ByteCounterMonitor monitor;
private final byte[] intermediaryBuffer = new byte[1024*8];
private int intermediaryBufferSize;
private int intermediaryBufferPosition;
public BufferedFileChannel( StoreChannel source, ByteCounterMonitor monitor ) throws IOException
{
this.source = source;
this.monitor = monitor;
fillUpIntermediaryBuffer();
}
@Override
public int read( ByteBuffer dst ) throws IOException
{
int read = 0;
while ( read < dst.limit() )
{
read += readAsMuchAsPossibleFromIntermediaryBuffer( dst );
if ( read < dst.limit() && fillUpIntermediaryBuffer() == -1 )
{
break;
}
}
return read == 0 && dst.limit() > 0 ? -1 : read;
}
private int readAsMuchAsPossibleFromIntermediaryBuffer( ByteBuffer dst )
{
int howMuchToRead = Math.min( dst.remaining(), remainingInIntermediaryBuffer() );
dst.put( intermediaryBuffer, intermediaryBufferPosition, howMuchToRead );
intermediaryBufferPosition += howMuchToRead;
return howMuchToRead;
}
private int remainingInIntermediaryBuffer()
{
return intermediaryBufferSize-intermediaryBufferPosition;
}
private int fillUpIntermediaryBuffer() throws IOException
{
int result = source.read( ByteBuffer.wrap( intermediaryBuffer ) );
monitor.bytesRead( result );
intermediaryBufferPosition = 0;
intermediaryBufferSize = result == -1 ? 0 : result;
return result;
}
@Override
public long position() throws IOException
{
return source.position() - intermediaryBufferSize + intermediaryBufferPosition;
}
@Override
public BufferedFileChannel position( long newPosition ) throws IOException
{
long bufferEndPosition = source.position();
long bufferStartPosition = bufferEndPosition - intermediaryBufferSize;
if ( newPosition >= bufferStartPosition && newPosition <= bufferEndPosition )
{
// Only an optimization
long diff = newPosition-position();
intermediaryBufferPosition += diff;
}
else
{
source.position( newPosition );
fillUpIntermediaryBuffer();
}
return this;
}
@Override
public long size() throws IOException
{
return source.size();
}
@Override
public BufferedFileChannel truncate( long size ) throws IOException
{
source.truncate( size );
return this;
}
@Override
public void force( boolean metaData ) throws IOException
{
source.force( metaData );
}
public StoreChannel getSource()
{
return source;
}
@Override
public void close() throws IOException
{
source.close();
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_BufferedFileChannel.java
|
769
|
DOUBLE( Double.SIZE )
{
@Override
public void put( ByteBuffer buffer, Number value )
{
buffer.putDouble( value.doubleValue() );
}
@Override
public void put( LogBuffer buffer, Number value ) throws IOException
{
buffer.putDouble( value.doubleValue() );
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_BufferNumberPutter.java
|
770
|
{
@Override
public void add( PropertyKeyTokenRecord target, DynamicRecord record )
{
target.addNameRecord( record );
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_xa_Command.java
|
771
|
public class TestStore
{
public static IdGeneratorFactory ID_GENERATOR_FACTORY =
new DefaultIdGeneratorFactory();
public static WindowPoolFactory WINDOW_POOL_FACTORY =
new DefaultWindowPoolFactory();
public static FileSystemAbstraction FILE_SYSTEM =
new DefaultFileSystemAbstraction();
private File path()
{
String path = AbstractNeo4jTestCase.getStorePath( "teststore" );
File file = new File( path );
file.mkdirs();
return file;
}
private File file( String name )
{
return new File( path() , name);
}
private File storeFile()
{
return file( "testStore.db" );
}
private File storeIdFile()
{
return file( "testStore.db.id" );
}
@Test
public void testCreateStore() throws IOException
{
try
{
try
{
Store.createStore( null );
fail( "Null fileName should throw exception" );
}
catch ( IllegalArgumentException e )
{ // good
}
Store store = Store.createStore( storeFile() );
try
{
Store.createStore( storeFile() );
fail( "Creating existing store should throw exception" );
}
catch ( IllegalStateException e )
{ // good
}
store.close();
}
finally
{
deleteBothFiles();
}
}
private void deleteBothFiles()
{
File file = storeFile();
if ( file.exists() )
{
assertTrue( file.delete() );
}
file = storeIdFile();
if ( file.exists() )
{
assertTrue( file.delete() );
}
}
@Test
public void testStickyStore() throws IOException
{
try
{
Store.createStore( storeFile() ).close();
java.nio.channels.FileChannel fileChannel = new java.io.RandomAccessFile(
storeFile(), "rw" ).getChannel();
fileChannel.truncate( fileChannel.size() - 2 );
fileChannel.close();
Store store = new Store( storeFile() );
store.makeStoreOk();
store.close();
}
finally
{
deleteBothFiles();
}
}
@Test
public void testClose() throws IOException
{
try
{
Store store = Store.createStore( storeFile() );
store.close();
}
finally
{
deleteBothFiles();
}
}
private static class Store extends AbstractStore
{
public static final String TYPE_DESCRIPTOR = "TestVersion";
private static final int RECORD_SIZE = 1;
public Store( File fileName ) throws IOException
{
super( fileName, new Config( MapUtil.stringMap( "store_dir", "target/var/teststore" ),
GraphDatabaseSettings.class ),
IdType.NODE, ID_GENERATOR_FACTORY, WINDOW_POOL_FACTORY, FILE_SYSTEM, StringLogger.DEV_NULL );
}
public int getRecordSize()
{
return RECORD_SIZE;
}
public String getTypeDescriptor()
{
return TYPE_DESCRIPTOR;
}
public static Store createStore( File fileName ) throws IOException
{
new StoreFactory( new Config( Collections.<String, String>emptyMap(), GraphDatabaseSettings.class ),
ID_GENERATOR_FACTORY, new DefaultWindowPoolFactory(),
FILE_SYSTEM, StringLogger.DEV_NULL, null ).
createEmptyStore( fileName, buildTypeDescriptorAndVersion( TYPE_DESCRIPTOR ) );
return new Store( fileName );
}
protected void rebuildIdGenerator()
{
}
@Override
public List<WindowPoolStats> getAllWindowPoolStats()
{
// TODO Auto-generated method stub
return null;
}
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_nioneo_store_TestStore.java
|
772
|
public abstract class Command extends XaCommand
{
private final int keyHash;
private final long key;
private final Mode mode;
/*
* TODO: This is techdebt
* This is used to control the order of how commands are applied, which is done because
* we don't take read locks, and so the order or how we change things lowers the risk
* of reading invalid state. This should be removed once eg. MVCC or read locks has been
* implemented.
*/
public enum Mode
{
CREATE,
UPDATE,
DELETE;
public static Mode fromRecordState( boolean created, boolean inUse )
{
if ( !inUse )
{
return DELETE;
}
if ( created )
{
return CREATE;
}
return UPDATE;
}
public static Mode fromRecordState( AbstractBaseRecord record )
{
return fromRecordState( record.isCreated(), record.inUse() );
}
}
Command( long key, Mode mode )
{
this.mode = mode;
this.keyHash = (int) (( key >>> 32 ) ^ key );
this.key = key;
}
public abstract void accept( CommandRecordVisitor visitor );
@Override
public int hashCode()
{
return keyHash;
}
// Force implementors to implement toString
@Override
public abstract String toString();
long getKey()
{
return key;
}
Mode getMode()
{
return mode;
}
@Override
public boolean equals( Object o )
{
return o != null && o.getClass().equals( getClass() ) && getKey() == ((Command) o).getKey();
}
private static void writePropertyBlock( LogBuffer buffer,
PropertyBlock block ) throws IOException
{
byte blockSize = (byte) block.getSize();
assert blockSize > 0 : blockSize + " is not a valid block size value";
buffer.put( blockSize ); // 1
long[] propBlockValues = block.getValueBlocks();
for ( long propBlockValue : propBlockValues )
{
buffer.putLong( propBlockValue );
}
/*
* For each block we need to keep its dynamic record chain if
* it is just created. Deleted dynamic records are in the property
* record and dynamic records are never modified. Also, they are
* assigned as a whole, so just checking the first should be enough.
*/
if ( block.isLight() )
{
/*
* This has to be int. If this record is not light
* then we have the number of DynamicRecords that follow,
* which is an int. We do not currently want/have a flag bit so
* we simplify by putting an int here always
*/
buffer.putInt( 0 ); // 4 or
}
else
{
writeDynamicRecords( buffer, block.getValueRecords() );
}
}
static void writeDynamicRecords( LogBuffer buffer, Collection<DynamicRecord> records ) throws IOException
{
buffer.putInt( records.size() ); // 4
for ( DynamicRecord record : records )
{
writeDynamicRecord( buffer, record );
}
}
static void writeDynamicRecord( LogBuffer buffer, DynamicRecord record )
throws IOException
{
// id+type+in_use(byte)+nr_of_bytes(int)+next_block(long)
if ( record.inUse() )
{
byte inUse = Record.IN_USE.byteValue();
if ( record.isStartRecord() )
{
inUse |= Record.FIRST_IN_CHAIN.byteValue();
}
buffer.putLong( record.getId() ).putInt( record.getType() ).put(
inUse ).putInt( record.getLength() ).putLong(
record.getNextBlock() );
byte[] data = record.getData();
assert data != null;
buffer.put( data );
}
else
{
byte inUse = Record.NOT_IN_USE.byteValue();
buffer.putLong( record.getId() ).putInt( record.getType() ).put(
inUse );
}
}
static PropertyBlock readPropertyBlock( ReadableByteChannel byteChannel,
ByteBuffer buffer ) throws IOException
{
PropertyBlock toReturn = new PropertyBlock();
if ( !readAndFlip( byteChannel, buffer, 1 ) )
{
return null;
}
byte blockSize = buffer.get(); // the size is stored in bytes // 1
assert blockSize > 0 && blockSize % 8 == 0 : blockSize
+ " is not a valid block size value";
// Read in blocks
if ( !readAndFlip( byteChannel, buffer, blockSize ) )
{
return null;
}
long[] blocks = readLongs( buffer, blockSize / 8 );
assert blocks.length == blockSize / 8 : blocks.length
+ " longs were read in while i asked for what corresponds to "
+ blockSize;
assert PropertyType.getPropertyType( blocks[0], false ).calculateNumberOfBlocksUsed(
blocks[0] ) == blocks.length : blocks.length
+ " is not a valid number of blocks for type "
+ PropertyType.getPropertyType(
blocks[0], false );
/*
* Ok, now we may be ready to return, if there are no DynamicRecords. So
* we start building the Object
*/
toReturn.setValueBlocks( blocks );
/*
* Read in existence of DynamicRecords. Remember, this has already been
* read in the buffer with the blocks, above.
*/
if ( !readDynamicRecords( byteChannel, buffer, toReturn, PROPERTY_BLOCK_DYNAMIC_RECORD_ADDER ) )
{
return null;
}
return toReturn;
}
private static final DynamicRecordAdder<PropertyBlock> PROPERTY_BLOCK_DYNAMIC_RECORD_ADDER =
new DynamicRecordAdder<PropertyBlock>()
{
@Override
public void add( PropertyBlock target, DynamicRecord record )
{
record.setCreated();
target.addValueRecord( record );
}
};
static <T> boolean readDynamicRecords( ReadableByteChannel byteChannel, ByteBuffer buffer,
T target, DynamicRecordAdder<T> adder ) throws IOException
{
if ( !readAndFlip( byteChannel, buffer, 4 ) )
{
return false;
}
int numberOfRecords = buffer.getInt();
assert numberOfRecords >= 0;
while ( numberOfRecords-- > 0 )
{
DynamicRecord read = readDynamicRecord( byteChannel, buffer );
if ( read == null )
{
return false;
}
adder.add( target, read );
}
return true;
}
private interface DynamicRecordAdder<T>
{
void add( T target, DynamicRecord record );
}
static DynamicRecord readDynamicRecord( ReadableByteChannel byteChannel,
ByteBuffer buffer ) throws IOException
{
// id+type+in_use(byte)+nr_of_bytes(int)+next_block(long)
if ( !readAndFlip( byteChannel, buffer, 13 ) )
{
return null;
}
long id = buffer.getLong();
assert id >= 0 && id <= ( 1l << 36 ) - 1 : id
+ " is not a valid dynamic record id";
int type = buffer.getInt();
byte inUseFlag = buffer.get();
boolean inUse = ( inUseFlag & Record.IN_USE.byteValue() ) != 0;
DynamicRecord record = new DynamicRecord( id );
record.setInUse( inUse, type );
if ( inUse )
{
record.setStartRecord( ( inUseFlag & Record.FIRST_IN_CHAIN.byteValue() ) != 0 );
if ( !readAndFlip( byteChannel, buffer, 12 ) )
{
return null;
}
int nrOfBytes = buffer.getInt();
assert nrOfBytes >= 0 && nrOfBytes < ( ( 1 << 24 ) - 1 ) : nrOfBytes
+ " is not valid for a number of bytes field of a dynamic record";
long nextBlock = buffer.getLong();
assert ( nextBlock >= 0 && nextBlock <= ( 1l << 36 - 1 ) )
|| ( nextBlock == Record.NO_NEXT_BLOCK.intValue() ) : nextBlock
+ " is not valid for a next record field of a dynamic record";
record.setNextBlock( nextBlock );
if ( !readAndFlip( byteChannel, buffer, nrOfBytes ) )
{
return null;
}
byte data[] = new byte[nrOfBytes];
buffer.get( data );
record.setData( data );
}
return record;
}
private static long[] readLongs( ByteBuffer buffer, int count )
{
long[] result = new long[count];
for ( int i = 0; i < count; i++ )
{
result[i] = buffer.getLong();
}
return result;
}
// means the first byte of the command record was only written but second
// (saying what type) did not get written but the file still got expanded
private static final byte NONE = (byte) 0;
private static final byte NODE_COMMAND = (byte) 1;
private static final byte PROP_COMMAND = (byte) 2;
private static final byte REL_COMMAND = (byte) 3;
private static final byte REL_TYPE_COMMAND = (byte) 4;
private static final byte PROP_INDEX_COMMAND = (byte) 5;
private static final byte NEOSTORE_COMMAND = (byte) 6;
private static final byte SCHEMA_RULE_COMMAND = (byte) 7;
private static final byte LABEL_KEY_COMMAND = (byte) 8;
abstract void removeFromCache( CacheAccessBackDoor cacheAccess );
static class NodeCommand extends Command
{
private final NodeStore store;
private final NodeRecord before;
private final NodeRecord after;
NodeCommand( NodeStore store, NodeRecord before, NodeRecord after )
{
super( after.getId(), Mode.fromRecordState( after ) );
this.store = store;
this.before = before;
this.after = after;
}
@Override
public void accept( CommandRecordVisitor visitor )
{
visitor.visitNode( after );
}
@Override
public String toString()
{
return beforeAndAfterToString( before, after );
}
@Override
void removeFromCache( CacheAccessBackDoor cacheAccess )
{
cacheAccess.removeNodeFromCache( getKey() );
}
@Override
public void execute()
{
store.updateRecord( after );
// Dynamic Label Records
Collection<DynamicRecord> toUpdate = new ArrayList<>( after.getDynamicLabelRecords() );
addRemoved( toUpdate );
store.updateDynamicLabelRecords( toUpdate );
}
private void addRemoved( Collection<DynamicRecord> toUpdate )
{
// the dynamic label records that exist in before, but not in after should be deleted.
Set<Long> idsToRemove = new HashSet<>();
for ( DynamicRecord record : before.getDynamicLabelRecords() )
{
idsToRemove.add( record.getId() );
}
for ( DynamicRecord record : after.getDynamicLabelRecords() )
{
idsToRemove.remove( record.getId() );
}
for ( long id : idsToRemove )
{
toUpdate.add( new DynamicRecord( id ) );
}
}
@Override
public void writeToFile( LogBuffer buffer ) throws IOException
{
buffer.put( NODE_COMMAND );
buffer.putLong( after.getId() );
writeNodeRecord( buffer, before );
writeNodeRecord( buffer, after );
}
private void writeNodeRecord( LogBuffer buffer, NodeRecord record ) throws IOException
{
byte inUse = record.inUse() ? Record.IN_USE.byteValue()
: Record.NOT_IN_USE.byteValue();
buffer.put( inUse );
if ( record.inUse() )
{
buffer.putLong( record.getNextRel() ).putLong( record.getNextProp() );
// labels
buffer.putLong( record.getLabelField() );
writeDynamicRecords( buffer, record.getDynamicLabelRecords() );
}
}
public static Command readFromFile( NeoStore neoStore, ReadableByteChannel byteChannel, ByteBuffer buffer )
throws IOException
{
if ( !readAndFlip( byteChannel, buffer, 8 ) )
{
return null;
}
long id = buffer.getLong();
NodeRecord before = readNodeRecord( id, byteChannel, buffer );
if ( before == null )
{
return null;
}
NodeRecord after = readNodeRecord( id, byteChannel, buffer );
if ( after == null )
{
return null;
}
if ( !before.inUse() && after.inUse() )
{
after.setCreated();
}
return new NodeCommand( neoStore == null ? null : neoStore.getNodeStore(), before, after );
}
private static NodeRecord readNodeRecord( long id, ReadableByteChannel byteChannel, ByteBuffer buffer )
throws IOException
{
if ( !readAndFlip( byteChannel, buffer, 1 ) )
{
return null;
}
byte inUseFlag = buffer.get();
boolean inUse = false;
if ( inUseFlag == Record.IN_USE.byteValue() )
{
inUse = true;
}
else if ( inUseFlag != Record.NOT_IN_USE.byteValue() )
{
throw new IOException( "Illegal in use flag: " + inUseFlag );
}
NodeRecord record;
if ( inUse )
{
if ( !readAndFlip( byteChannel, buffer, 8*3 ) )
{
return null;
}
record = new NodeRecord( id, buffer.getLong(), buffer.getLong() );
// labels
long labelField = buffer.getLong();
Collection<DynamicRecord> dynamicLabelRecords = new ArrayList<>();
readDynamicRecords( byteChannel, buffer, dynamicLabelRecords, COLLECTION_DYNAMIC_RECORD_ADDER );
record.setLabelField( labelField, dynamicLabelRecords );
}
else
{
record = new NodeRecord( id, Record.NO_NEXT_RELATIONSHIP.intValue(),
Record.NO_NEXT_PROPERTY.intValue() );
}
record.setInUse( inUse );
return record;
}
public NodeRecord getBefore()
{
return before;
}
public NodeRecord getAfter()
{
return after;
}
}
static class RelationshipCommand extends Command
{
private final RelationshipRecord record;
// before update stores the record as it looked before the command is executed
private RelationshipRecord beforeUpdate;
private final RelationshipStore store;
RelationshipCommand( RelationshipStore store, RelationshipRecord record )
{
super( record.getId(), Mode.fromRecordState( record ) );
this.record = record;
// the default (common) case is that the record to be written is complete and not from recovery or HA
this.beforeUpdate = record;
this.store = store;
}
@Override
public void accept( CommandRecordVisitor visitor )
{
visitor.visitRelationship( record );
}
@Override
public String toString()
{
return record.toString();
}
@Override
void removeFromCache( CacheAccessBackDoor cacheAccess )
{
cacheAccess.removeRelationshipFromCache( getKey() );
/*
* If isRecovered() then beforeUpdate is the correct one UNLESS this is the second time this command
* is executed, where it might have been actually written out to disk so the fields are already -1. So
* we still need to check.
* If !isRecovered() then beforeUpdate is the same as record, so we are still ok.
* We don't check for !inUse() though because that is implicit in the call of this method.
* The above is a hand waiving proof that the conditions that lead to the patchDeletedRelationshipNodes()
* in the if below are the same as in RelationshipCommand.execute() so it should be safe.
*/
if ( beforeUpdate.getFirstNode() != -1 || beforeUpdate.getSecondNode() != -1 )
{
cacheAccess.patchDeletedRelationshipNodes( getKey(), beforeUpdate.getFirstNode(),
beforeUpdate.getFirstNextRel(), beforeUpdate.getSecondNode(), beforeUpdate.getSecondNextRel() );
}
if ( record.getFirstNode() != -1 || record.getSecondNode() != -1 )
{
cacheAccess.removeNodeFromCache( record.getFirstNode() );
cacheAccess.removeNodeFromCache( record.getSecondNode() );
}
}
@Override
public void execute()
{
if ( isRecovered() && !record.inUse() )
{
/*
* If read from a log (either on recovery or HA) then all the fields but for the Id are -1. If the
* record is deleted, then we'll need to invalidate the cache and patch the node's relationship chains.
* Therefore, we need to read the record from the store. This is not too expensive, since the window
* will be either in memory or will soon be anyway and we are just saving the write the trouble.
*/
beforeUpdate = store.forceGetRaw( record.getId() );
}
store.updateRecord( record );
}
@Override
public void writeToFile( LogBuffer buffer ) throws IOException
{
byte inUse = record.inUse() ? Record.IN_USE.byteValue()
: Record.NOT_IN_USE.byteValue();
buffer.put( REL_COMMAND );
buffer.putLong( record.getId() );
buffer.put( inUse );
if ( record.inUse() )
{
buffer.putLong( record.getFirstNode() )
.putLong( record.getSecondNode() )
.putInt( record.getType() )
.putLong( record.getFirstPrevRel() )
.putLong( record.getFirstNextRel() )
.putLong( record.getSecondPrevRel() )
.putLong( record.getSecondNextRel() )
.putLong( record.getNextProp() )
;
}
}
public static Command readFromFile( NeoStore neoStore,
ReadableByteChannel byteChannel, ByteBuffer buffer )
throws IOException
{
if ( !readAndFlip( byteChannel, buffer, 9 ) )
{
return null;
}
long id = buffer.getLong();
byte inUseFlag = buffer.get();
boolean inUse = false;
if ( (inUseFlag & Record.IN_USE.byteValue()) == Record.IN_USE
.byteValue() )
{
inUse = true;
}
else if ( (inUseFlag & Record.IN_USE.byteValue()) != Record.NOT_IN_USE
.byteValue() )
{
throw new IOException( "Illegal in use flag: " + inUseFlag );
}
RelationshipRecord record;
if ( inUse )
{
if ( !readAndFlip( byteChannel, buffer, 60 ) )
{
return null;
}
record = new RelationshipRecord( id, buffer.getLong(), buffer
.getLong(), buffer.getInt() );
record.setInUse( inUse );
record.setFirstPrevRel( buffer.getLong() );
record.setFirstNextRel( buffer.getLong() );
record.setSecondPrevRel( buffer.getLong() );
record.setSecondNextRel( buffer.getLong() );
record.setNextProp( buffer.getLong() );
}
else
{
record = new RelationshipRecord( id, -1, -1, -1 );
record.setInUse( false );
}
return new RelationshipCommand( neoStore == null ? null : neoStore.getRelationshipStore(),
record );
}
}
static class NeoStoreCommand extends Command
{
private final NeoStoreRecord record;
private final NeoStore neoStore;
NeoStoreCommand( NeoStore neoStore, NeoStoreRecord record )
{
super( record.getId(), Mode.fromRecordState( record ) );
this.neoStore = neoStore;
this.record = record;
}
@Override
public void execute()
{
neoStore.setGraphNextProp( record.getNextProp() );
}
@Override
public void accept( CommandRecordVisitor visitor )
{
visitor.visitNeoStore( record );
}
@Override
public String toString()
{
return record.toString();
}
@Override
void removeFromCache( CacheAccessBackDoor cacheAccess )
{
// no-op
}
@Override
public void writeToFile( LogBuffer buffer ) throws IOException
{
buffer.put( NEOSTORE_COMMAND ).putLong( record.getNextProp() );
}
public static Command readFromFile( NeoStore neoStore,
ReadableByteChannel byteChannel, ByteBuffer buffer )
throws IOException
{
if ( !readAndFlip( byteChannel, buffer, 8 ) )
{
return null;
}
long nextProp = buffer.getLong();
NeoStoreRecord record = new NeoStoreRecord();
record.setNextProp( nextProp );
return new NeoStoreCommand( neoStore, record );
}
}
static class PropertyKeyTokenCommand extends Command
{
private final PropertyKeyTokenRecord record;
private final PropertyKeyTokenStore store;
PropertyKeyTokenCommand( PropertyKeyTokenStore store,
PropertyKeyTokenRecord record )
{
super( record.getId(), Mode.fromRecordState( record ) );
this.record = record;
this.store = store;
}
@Override
public void accept( CommandRecordVisitor visitor )
{
visitor.visitPropertyKeyToken( record );
}
@Override
public String toString()
{
return record.toString();
}
@Override
void removeFromCache( CacheAccessBackDoor cacheAccess )
{
// no-op
}
@Override
public void execute()
{
store.updateRecord( record );
}
@Override
public void writeToFile( LogBuffer buffer ) throws IOException
{
// id+in_use(byte)+count(int)+key_blockId(int)+nr_key_records(int)
byte inUse = record.inUse() ? Record.IN_USE.byteValue()
: Record.NOT_IN_USE.byteValue();
buffer.put( PROP_INDEX_COMMAND );
buffer.putInt( record.getId() );
buffer.put( inUse );
buffer.putInt( record.getPropertyCount() ).putInt( record.getNameId() );
if ( record.isLight() )
{
buffer.putInt( 0 );
}
else
{
writeDynamicRecords( buffer, record.getNameRecords() );
}
}
public static Command readFromFile( NeoStore neoStore, ReadableByteChannel byteChannel,
ByteBuffer buffer ) throws IOException
{
// id+in_use(byte)+count(int)+key_blockId(int)
if ( !readAndFlip( byteChannel, buffer, 13 ) )
{
return null;
}
int id = buffer.getInt();
byte inUseFlag = buffer.get();
boolean inUse = false;
if ( (inUseFlag & Record.IN_USE.byteValue()) == Record.IN_USE
.byteValue() )
{
inUse = true;
}
else if ( inUseFlag != Record.NOT_IN_USE.byteValue() )
{
throw new IOException( "Illegal in use flag: " + inUseFlag );
}
PropertyKeyTokenRecord record = new PropertyKeyTokenRecord( id );
record.setInUse( inUse );
record.setPropertyCount( buffer.getInt() );
record.setNameId( buffer.getInt() );
if ( !readDynamicRecords( byteChannel, buffer, record, PROPERTY_INDEX_DYNAMIC_RECORD_ADDER ) )
{
return null;
}
return new PropertyKeyTokenCommand( neoStore == null ? null : neoStore.getPropertyStore()
.getPropertyKeyTokenStore(), record );
}
}
private static final DynamicRecordAdder<PropertyKeyTokenRecord> PROPERTY_INDEX_DYNAMIC_RECORD_ADDER =
new DynamicRecordAdder<PropertyKeyTokenRecord>()
{
@Override
public void add( PropertyKeyTokenRecord target, DynamicRecord record )
{
target.addNameRecord( record );
}
};
static class PropertyCommand extends Command implements PropertyRecordChange
{
private final PropertyStore store;
private final PropertyRecord before;
private final PropertyRecord after;
// TODO as optimization the deserialized key/values could be passed in here
// so that the cost of deserializing them only applies in recovery/HA
PropertyCommand( PropertyStore store, PropertyRecord before, PropertyRecord after )
{
super( after.getId(), Mode.fromRecordState( after ) );
this.store = store;
this.before = before;
this.after = after;
}
@Override
public void accept( CommandRecordVisitor visitor )
{
visitor.visitProperty( after );
}
@Override
public String toString()
{
return beforeAndAfterToString( before, after );
}
@Override
void removeFromCache( CacheAccessBackDoor cacheAccess )
{
long nodeId = this.getNodeId();
long relId = this.getRelId();
if ( nodeId != -1 )
{
cacheAccess.removeNodeFromCache( nodeId );
}
else if ( relId != -1 )
{
cacheAccess.removeRelationshipFromCache( relId );
}
}
@Override
public PropertyRecord getBefore()
{
return before;
}
@Override
public PropertyRecord getAfter()
{
return after;
}
@Override
public void execute()
{
store.updateRecord( after );
}
public long getNodeId()
{
return after.getNodeId();
}
public long getRelId()
{
return after.getRelId();
}
@Override
public void writeToFile( LogBuffer buffer ) throws IOException
{
// COMMAND + ID
buffer.put( PROP_COMMAND );
buffer.putLong( getKey() ); // 8
// BEFORE
writeToFile( buffer, before );
// AFTER
writeToFile( buffer, after );
}
private void writeToFile( LogBuffer buffer, PropertyRecord record ) throws IOException
{
byte inUse = record.inUse() ? Record.IN_USE.byteValue()
: Record.NOT_IN_USE.byteValue();
if ( record.getRelId() != -1 )
{
// Here we add 2, i.e. set the second lsb.
inUse += Record.REL_PROPERTY.byteValue();
}
buffer.put( inUse ); // 1
buffer.putLong( record.getNextProp() ).putLong(
record.getPrevProp() ); // 8 + 8
long nodeId = record.getNodeId();
long relId = record.getRelId();
if ( nodeId != -1 )
{
buffer.putLong( nodeId ); // 8 or
}
else if ( relId != -1 )
{
buffer.putLong( relId ); // 8 or
}
else
{
// means this records value has not changed, only place in
// prop chain
buffer.putLong( -1 ); // 8
}
buffer.put( (byte) record.getPropertyBlocks().size() ); // 1
for ( int i = 0; i < record.getPropertyBlocks().size(); i++ )
{
PropertyBlock block = record.getPropertyBlocks().get( i );
assert block.getSize() > 0 : record + " seems kinda broken";
writePropertyBlock( buffer, block );
}
writeDynamicRecords( buffer, record.getDeletedRecords() );
}
public static Command readFromFile( NeoStore neoStore,
ReadableByteChannel byteChannel, ByteBuffer buffer )
throws IOException
{
// ID
if ( !readAndFlip( byteChannel, buffer, 8 ) )
{
return null;
}
long id = buffer.getLong(); // 8
// BEFORE
PropertyRecord before = readPropertyRecord( id, byteChannel, buffer );
if ( before == null )
{
return null;
}
// AFTER
PropertyRecord after = readPropertyRecord( id, byteChannel, buffer );
if ( after == null )
{
return null;
}
return new PropertyCommand( neoStore == null ? null
: neoStore.getPropertyStore(), before, after );
}
private static PropertyRecord readPropertyRecord( long id, ReadableByteChannel byteChannel, ByteBuffer buffer )
throws IOException
{
// in_use(byte)+type(int)+key_indexId(int)+prop_blockId(long)+
// prev_prop_id(long)+next_prop_id(long)
if ( !readAndFlip( byteChannel, buffer, 1 + 8 + 8 + 8 ) )
{
return null;
}
PropertyRecord record = new PropertyRecord( id );
byte inUseFlag = buffer.get(); // 1
long nextProp = buffer.getLong(); // 8
long prevProp = buffer.getLong(); // 8
record.setNextProp( nextProp );
record.setPrevProp( prevProp );
boolean inUse = false;
if ( ( inUseFlag & Record.IN_USE.byteValue() ) == Record.IN_USE.byteValue() )
{
inUse = true;
}
boolean nodeProperty = true;
if ( ( inUseFlag & Record.REL_PROPERTY.byteValue() ) == Record.REL_PROPERTY.byteValue() )
{
nodeProperty = false;
}
long primitiveId = buffer.getLong(); // 8
if ( primitiveId != -1 && nodeProperty )
{
record.setNodeId( primitiveId );
}
else if ( primitiveId != -1 )
{
record.setRelId( primitiveId );
}
if ( !readAndFlip( byteChannel, buffer, 1 ) )
{
return null;
}
int nrPropBlocks = buffer.get();
assert nrPropBlocks >= 0;
if ( nrPropBlocks > 0 )
{
record.setInUse( true );
}
while ( nrPropBlocks-- > 0 )
{
PropertyBlock block = readPropertyBlock( byteChannel, buffer );
if ( block == null )
{
return null;
}
record.addPropertyBlock( block );
}
if ( !readDynamicRecords( byteChannel, buffer, record, PROPERTY_DELETED_DYNAMIC_RECORD_ADDER ) )
{
return null;
}
buffer.flip();
int deletedRecords = buffer.getInt(); // 4
assert deletedRecords >= 0;
while ( deletedRecords-- > 0 )
{
DynamicRecord read = readDynamicRecord( byteChannel, buffer );
if ( read == null )
{
return null;
}
record.addDeletedRecord( read );
}
if ( ( inUse && !record.inUse() ) || ( !inUse && record.inUse() ) )
{
throw new IllegalStateException( "Weird, inUse was read in as "
+ inUse
+ " but the record is "
+ record );
}
return record;
}
}
private static final DynamicRecordAdder<PropertyRecord> PROPERTY_DELETED_DYNAMIC_RECORD_ADDER =
new DynamicRecordAdder<PropertyRecord>()
{
@Override
public void add( PropertyRecord target, DynamicRecord record )
{
assert !record.inUse() : record + " is kinda weird";
target.addDeletedRecord( record );
}
};
static class RelationshipTypeTokenCommand extends Command
{
private final RelationshipTypeTokenRecord record;
private final RelationshipTypeTokenStore store;
RelationshipTypeTokenCommand( RelationshipTypeTokenStore store,
RelationshipTypeTokenRecord record )
{
super( record.getId(), Mode.fromRecordState( record ) );
this.record = record;
this.store = store;
}
@Override
public void accept( CommandRecordVisitor visitor )
{
visitor.visitRelationshipTypeToken( record );
}
@Override
public String toString()
{
return record.toString();
}
@Override
void removeFromCache( CacheAccessBackDoor cacheAccess )
{
// no-op
}
@Override
public void execute()
{
store.updateRecord( record );
}
@Override
public void writeToFile( LogBuffer buffer ) throws IOException
{
// id+in_use(byte)+type_blockId(int)+nr_type_records(int)
byte inUse = record.inUse() ? Record.IN_USE.byteValue()
: Record.NOT_IN_USE.byteValue();
buffer.put( REL_TYPE_COMMAND );
buffer.putInt( record.getId() ).put( inUse ).putInt( record.getNameId() );
writeDynamicRecords( buffer, record.getNameRecords() );
}
public static Command readFromFile( NeoStore neoStore,
ReadableByteChannel byteChannel, ByteBuffer buffer )
throws IOException
{
// id+in_use(byte)+type_blockId(int)+nr_type_records(int)
if ( !readAndFlip( byteChannel, buffer, 13 ) )
{
return null;
}
int id = buffer.getInt();
byte inUseFlag = buffer.get();
boolean inUse = false;
if ( (inUseFlag & Record.IN_USE.byteValue()) ==
Record.IN_USE.byteValue() )
{
inUse = true;
}
else if ( inUseFlag != Record.NOT_IN_USE.byteValue() )
{
throw new IOException( "Illegal in use flag: " + inUseFlag );
}
RelationshipTypeTokenRecord record = new RelationshipTypeTokenRecord( id );
record.setInUse( inUse );
record.setNameId( buffer.getInt() );
int nrTypeRecords = buffer.getInt();
for ( int i = 0; i < nrTypeRecords; i++ )
{
DynamicRecord dr = readDynamicRecord( byteChannel, buffer );
if ( dr == null )
{
return null;
}
record.addNameRecord( dr );
}
return new RelationshipTypeTokenCommand(
neoStore == null ? null : neoStore.getRelationshipTypeStore(), record );
}
}
static class LabelTokenCommand extends Command
{
private final LabelTokenRecord record;
private final LabelTokenStore store;
LabelTokenCommand( LabelTokenStore store,
LabelTokenRecord record )
{
super( record.getId(), Mode.fromRecordState( record ) );
this.record = record;
this.store = store;
}
@Override
public void accept( CommandRecordVisitor visitor )
{
visitor.visitLabelToken( record );
}
@Override
public String toString()
{
return record.toString();
}
@Override
void removeFromCache( CacheAccessBackDoor cacheAccess )
{
// no-op
}
@Override
public void execute()
{
store.updateRecord( record );
}
@Override
public void writeToFile( LogBuffer buffer ) throws IOException
{
// id+in_use(byte)+type_blockId(int)+nr_type_records(int)
byte inUse = record.inUse() ? Record.IN_USE.byteValue()
: Record.NOT_IN_USE.byteValue();
buffer.put( LABEL_KEY_COMMAND );
buffer.putInt( record.getId() ).put( inUse ).putInt( record.getNameId() );
writeDynamicRecords( buffer, record.getNameRecords() );
}
public static Command readFromFile( NeoStore neoStore,
ReadableByteChannel byteChannel, ByteBuffer buffer )
throws IOException
{
// id+in_use(byte)+type_blockId(int)+nr_type_records(int)
if ( !readAndFlip( byteChannel, buffer, 13 ) )
{
return null;
}
int id = buffer.getInt();
byte inUseFlag = buffer.get();
boolean inUse = false;
if ( (inUseFlag & Record.IN_USE.byteValue()) ==
Record.IN_USE.byteValue() )
{
inUse = true;
}
else if ( inUseFlag != Record.NOT_IN_USE.byteValue() )
{
throw new IOException( "Illegal in use flag: " + inUseFlag );
}
LabelTokenRecord record = new LabelTokenRecord( id );
record.setInUse( inUse );
record.setNameId( buffer.getInt() );
int nrTypeRecords = buffer.getInt();
for ( int i = 0; i < nrTypeRecords; i++ )
{
DynamicRecord dr = readDynamicRecord( byteChannel, buffer );
if ( dr == null )
{
return null;
}
record.addNameRecord( dr );
}
return new LabelTokenCommand(
neoStore == null ? null : neoStore.getLabelTokenStore(), record );
}
}
static class SchemaRuleCommand extends Command
{
private final NeoStore neoStore;
private final IndexingService indexes;
private final SchemaStore store;
private final Collection<DynamicRecord> recordsBefore;
private final Collection<DynamicRecord> recordsAfter;
private final SchemaRule schemaRule;
private long txId;
SchemaRuleCommand( NeoStore neoStore, SchemaStore store, IndexingService indexes,
Collection<DynamicRecord> recordsBefore, Collection<DynamicRecord> recordsAfter,
SchemaRule schemaRule, long txId )
{
super( first( recordsAfter ).getId(), Mode.fromRecordState( first( recordsAfter ) ) );
this.neoStore = neoStore;
this.indexes = indexes;
this.store = store;
this.recordsBefore = recordsBefore;
this.recordsAfter = recordsAfter;
this.schemaRule = schemaRule;
this.txId = txId;
}
@Override
public void accept( CommandRecordVisitor visitor )
{
visitor.visitSchemaRule( recordsAfter );
}
@Override
public String toString()
{
if ( schemaRule != null )
{
return getMode() + ":" + schemaRule.toString();
}
return "SchemaRule" + recordsAfter;
}
@Override
void removeFromCache( CacheAccessBackDoor cacheAccess )
{
cacheAccess.removeSchemaRuleFromCache( getKey() );
}
Collection<DynamicRecord> getRecordsAfter()
{
return unmodifiableCollection( recordsAfter );
}
@Override
public void execute()
{
for ( DynamicRecord record : recordsAfter )
{
store.updateRecord( record );
}
if ( schemaRule instanceof IndexRule )
{
switch ( getMode() )
{
case UPDATE:
// Shouldn't we be more clear about that we are waiting for an index to come online here?
// right now we just assume that an update to index records means wait for it to be online.
if ( ((IndexRule) schemaRule).isConstraintIndex() )
{
try
{
indexes.activateIndex( schemaRule.getId() );
}
catch ( IndexNotFoundKernelException | IndexActivationFailedKernelException |
IndexPopulationFailedKernelException e )
{
throw new IllegalStateException( "Unable to enable constraint, backing index is not online.", e );
}
}
break;
case CREATE:
indexes.createIndex( (IndexRule) schemaRule );
break;
case DELETE:
indexes.dropIndex( (IndexRule)schemaRule );
break;
default:
throw new IllegalStateException( getMode().name() );
}
}
if( schemaRule instanceof UniquenessConstraintRule )
{
switch ( getMode() )
{
case UPDATE:
case CREATE:
neoStore.setLatestConstraintIntroducingTx( txId );
break;
case DELETE:
break;
default:
throw new IllegalStateException( getMode().name() );
}
}
}
@Override
public void writeToFile( LogBuffer buffer ) throws IOException
{
buffer.put( SCHEMA_RULE_COMMAND );
writeDynamicRecords( buffer, recordsBefore );
writeDynamicRecords( buffer, recordsAfter );
buffer.put( first( recordsAfter ).isCreated() ? (byte) 1 : 0);
buffer.putLong( txId );
}
public SchemaRule getSchemaRule()
{
return schemaRule;
}
public long getTxId()
{
return txId;
}
public void setTxId( long txId )
{
this.txId = txId;
}
static Command readFromFile( NeoStore neoStore, IndexingService indexes, ReadableByteChannel byteChannel,
ByteBuffer buffer ) throws IOException
{
Collection<DynamicRecord> recordsBefore = new ArrayList<>();
readDynamicRecords( byteChannel, buffer, recordsBefore, COLLECTION_DYNAMIC_RECORD_ADDER );
Collection<DynamicRecord> recordsAfter = new ArrayList<>();
readDynamicRecords( byteChannel, buffer, recordsAfter, COLLECTION_DYNAMIC_RECORD_ADDER );
if ( !readAndFlip( byteChannel, buffer, 1 ) )
{
throw new IllegalStateException( "Missing SchemaRule.isCreated flag in deserialization" );
}
byte isCreated = buffer.get();
if ( 1 == isCreated )
{
for ( DynamicRecord record : recordsAfter )
{
record.setCreated();
}
}
if ( !readAndFlip( byteChannel, buffer, 8 ) )
{
throw new IllegalStateException( "Missing SchemaRule.txId in deserialization" );
}
long txId = buffer.getLong();
SchemaRule rule = first( recordsAfter ).inUse() ?
readSchemaRule( recordsAfter ) :
readSchemaRule( recordsBefore );
return new SchemaRuleCommand( neoStore, neoStore != null ? neoStore.getSchemaStore() : null,
indexes, recordsBefore, recordsAfter, rule, txId );
}
private static SchemaRule readSchemaRule( Collection<DynamicRecord> recordsBefore )
{
assert first(recordsBefore).inUse() : "Asked to deserialize schema records that were not in use.";
SchemaRule rule;
ByteBuffer deserialized = AbstractDynamicStore.concatData( recordsBefore, new byte[100] );
try
{
rule = SchemaRule.Kind.deserialize( first( recordsBefore ).getId(), deserialized );
}
catch ( MalformedSchemaRuleException e )
{
// TODO This is bad. We should probably just shut down if that happens
throw launderedException( e );
}
return rule;
}
}
private static final DynamicRecordAdder<Collection<DynamicRecord>> COLLECTION_DYNAMIC_RECORD_ADDER =
new DynamicRecordAdder<Collection<DynamicRecord>>()
{
@Override
public void add( Collection<DynamicRecord> target, DynamicRecord record )
{
target.add( record );
}
};
public static Command readCommand( NeoStore neoStore, IndexingService indexes, ReadableByteChannel byteChannel,
ByteBuffer buffer ) throws IOException
{
if ( !readAndFlip( byteChannel, buffer, 1 ) )
{
return null;
}
byte commandType = buffer.get();
switch ( commandType )
{
case NODE_COMMAND:
return NodeCommand.readFromFile( neoStore, byteChannel, buffer );
case PROP_COMMAND:
return PropertyCommand.readFromFile( neoStore, byteChannel, buffer );
case PROP_INDEX_COMMAND:
return PropertyKeyTokenCommand.readFromFile( neoStore, byteChannel, buffer );
case REL_COMMAND:
return RelationshipCommand.readFromFile( neoStore, byteChannel, buffer );
case REL_TYPE_COMMAND:
return RelationshipTypeTokenCommand.readFromFile( neoStore, byteChannel, buffer );
case LABEL_KEY_COMMAND:
return LabelTokenCommand.readFromFile( neoStore, byteChannel, buffer );
case NEOSTORE_COMMAND:
return NeoStoreCommand.readFromFile( neoStore, byteChannel, buffer );
case SCHEMA_RULE_COMMAND:
return SchemaRuleCommand.readFromFile( neoStore, indexes, byteChannel, buffer );
case NONE: return null;
default:
throw new IOException( "Unknown command type[" + commandType + "]" );
}
}
static String beforeAndAfterToString( AbstractBaseRecord before, AbstractBaseRecord after )
{
return format( "%n -%s%n +%s", before, after );
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_xa_Command.java
|
773
|
public class IteratingPropertyReceiverTest
{
@Test
public void shouldAcceptAndThenIterateOverProperties()
{
// GIVEN
IteratingPropertyReceiver receiver = new IteratingPropertyReceiver();
int propertyCount = 100;
for ( int i = 0; i < propertyCount; i++ )
{
receiver.receive( Property.intProperty( 1, i ), 5 );
}
// THEN
int count = 0;
while ( receiver.hasNext() )
{
DefinedProperty property = receiver.next();
assertEquals( count++, ((Integer) property.value()).intValue() );
}
assertFalse( receiver.hasNext() );
assertEquals( propertyCount, count );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_core_IteratingPropertyReceiverTest.java
|
774
|
public abstract class IsolatedTransactionTokenCreator implements TokenCreator
{
private final StringLogger logger;
public IsolatedTransactionTokenCreator( Logging logging )
{
this.logger = logging.getMessagesLog( getClass() );
}
@Override
public synchronized int getOrCreate( final AbstractTransactionManager txManager,
final EntityIdGenerator idGenerator,
final PersistenceManager persistence,
final String name )
{
try
{
Transaction runningTransaction = txManager.suspend();
try
{
txManager.begin( ForceMode.unforced );
int id = createKey( idGenerator, persistence, name );
txManager.commit();
return id;
}
catch ( Throwable t )
{
logger.error( "Unable to create key '" + name + "'", t );
try
{
txManager.rollback();
}
catch ( Throwable tt )
{
logger.error( "Unable to rollback after failure to create key '" + name + "'", t );
}
throw new TransactionFailureException( "Unable to create key '" + name + "'" , t );
}
finally
{
if ( runningTransaction != null )
txManager.resume( runningTransaction );
}
}
catch ( SystemException e )
{
throw new TransactionFailureException( "Unable to resume or suspend running transaction", e );
}
catch ( InvalidTransactionException e )
{
throw new TransactionFailureException( "Unable to resume or suspend running transaction", e );
}
}
protected abstract int createKey( EntityIdGenerator idGenerator, PersistenceManager persistence, String name );
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_core_IsolatedTransactionTokenCreator.java
|
775
|
public class IndexLock
{
private final String index;
private final String key;
public IndexLock( String index, String key )
{
this.index = index;
this.key = key;
}
public String getIndex()
{
return index;
}
public String getKey()
{
return key;
}
@Override
public int hashCode()
{ // Auto-generated
final int prime = 31;
int result = 1;
result = prime * result + ((index == null) ? 0 : index.hashCode());
result = prime * result + ((key == null) ? 0 : key.hashCode());
return result;
}
@Override
public boolean equals( Object obj )
{ // Auto-generated
if ( this == obj )
{
return true;
}
if ( obj == null )
{
return false;
}
if ( getClass() != obj.getClass() )
{
return false;
}
IndexLock other = (IndexLock) obj;
if ( index == null )
{
if ( other.index != null )
{
return false;
}
}
else if ( !index.equals( other.index ) )
{
return false;
}
if ( key == null )
{
if ( other.key != null )
{
return false;
}
}
else if ( !key.equals( other.key ) )
{
return false;
}
return true;
}
@Override
public String toString()
{
return "IndexLock[" + index + ":" + key + "]";
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_core_IndexLock.java
|
776
|
public class GraphPropertiesImpl extends Primitive implements GraphProperties
{
private final NodeManager nodeManager;
private Map<Integer, DefinedProperty> properties;
private final ThreadToStatementContextBridge statementContextProvider;
GraphPropertiesImpl( NodeManager nodeManager, ThreadToStatementContextBridge statementContextProvider )
{
super( false );
this.nodeManager = nodeManager;
this.statementContextProvider = statementContextProvider;
}
@Override
public NodeManager getNodeManager()
{
return nodeManager;
}
@Override
public GraphDatabaseService getGraphDatabase()
{
return this.nodeManager.getGraphDbService();
}
@Override
public int sizeOfObjectInBytesIncludingOverhead()
{
return 0;
}
@Override
protected boolean hasLoadedProperties()
{
return properties != null;
}
@Override
protected Iterator<DefinedProperty> loadProperties( NodeManager nodeManager )
{
return nodeManager.loadGraphProperties( false );
}
@Override
public boolean hasProperty( String key )
{
if ( null == key )
{
return false;
}
try ( Statement statement = statementContextProvider.instance() )
{
int propertyId = statement.readOperations().propertyKeyGetForName( key );
return statement.readOperations().graphGetProperty( propertyId ).isDefined();
}
}
@Override
public Object getProperty( String key )
{
if ( null == key )
{
throw new IllegalArgumentException( "(null) property key is not allowed" );
}
try ( Statement statement = statementContextProvider.instance() )
{
try
{
int propertyId = statement.readOperations().propertyKeyGetForName( key );
if ( propertyId == KeyReadOperations.NO_SUCH_PROPERTY_KEY )
{
throw new NotFoundException( String.format( "No such property, '%s'.", key ) );
}
return statement.readOperations().graphGetProperty( propertyId ).value();
}
catch ( PropertyNotFoundException e )
{
throw new NotFoundException(
e.getUserMessage( new StatementTokenNameLookup( statement.readOperations() ) ), e );
}
}
}
@Override
public Object getProperty( String key, Object defaultValue )
{
if ( null == key )
{
throw new IllegalArgumentException( "(null) property key is not allowed" );
}
try ( Statement statement = statementContextProvider.instance() )
{
int propertyId = statement.readOperations().propertyKeyGetForName( key );
if ( propertyId == KeyReadOperations.NO_SUCH_PROPERTY_KEY )
{
return false;
}
return statement.readOperations().graphGetProperty( propertyId ).value( defaultValue );
}
}
@Override
public void setProperty( String key, Object value )
{
boolean success = false;
try ( Statement statement = statementContextProvider.instance() )
{
int propertyKeyId = statement.tokenWriteOperations().propertyKeyGetOrCreateForName( key );
statement.dataWriteOperations().graphSetProperty( property( propertyKeyId, value ) );
success = true;
}
catch ( IllegalTokenNameException e )
{
// TODO: Maybe throw more context-specific error than just IllegalArgument
throw new IllegalArgumentException( e );
}
catch ( InvalidTransactionTypeKernelException e )
{
throw new ConstraintViolationException( e.getMessage(), e );
}
catch ( ReadOnlyDatabaseKernelException e )
{
throw new ReadOnlyDbException();
}
finally
{
if ( !success )
{
nodeManager.setRollbackOnly();
}
}
}
@Override
public Object removeProperty( String key )
{
try ( Statement statement = statementContextProvider.instance() )
{
int propertyId = statement.tokenWriteOperations().propertyKeyGetOrCreateForName( key );
return statement.dataWriteOperations().graphRemoveProperty( propertyId ).value( null );
}
catch ( IllegalTokenNameException e )
{
// TODO: Maybe throw more context-specific error than just IllegalArgument
throw new IllegalArgumentException( e );
}
catch ( InvalidTransactionTypeKernelException e )
{
throw new ConstraintViolationException( e.getMessage(), e );
}
catch ( ReadOnlyDatabaseKernelException e )
{
throw new ReadOnlyDbException();
}
}
@Override
public Iterable<String> getPropertyKeys()
{
try ( Statement statement = statementContextProvider.instance() )
{
List<String> keys = new ArrayList<>();
Iterator<DefinedProperty> properties = statement.readOperations().graphGetAllProperties();
while ( properties.hasNext() )
{
keys.add( statement.readOperations().propertyKeyGetName( properties.next().propertyKeyId() ) );
}
return keys;
}
catch ( PropertyKeyIdNotFoundKernelException e )
{
throw new ThisShouldNotHappenError( "Jake", "Property key retrieved through kernel API should exist." );
}
}
@Override
public String toString()
{
return getClass().getSimpleName();
}
@Override
public boolean equals( Object obj )
{
return obj instanceof GraphProperties && ((GraphProperties) obj).getNodeManager().equals( nodeManager );
}
@Override
public int hashCode()
{
return nodeManager.hashCode();
}
@Override
public long getId()
{
return -1;
}
@Override
protected void setEmptyProperties()
{
properties = new HashMap<>();
}
@Override
protected Iterator<DefinedProperty> getCachedProperties()
{
return properties.values().iterator();
}
@Override
protected PrimitiveLongIterator getCachedPropertyKeys()
{
return new PropertyKeyIdIterator( getCachedProperties() );
}
@Override
protected Property getCachedProperty( int key )
{
Property property = properties.get( key );
return property != null ? property : Property.noGraphProperty( key );
}
@Override
@SuppressWarnings("deprecation")
protected DefinedProperty getPropertyForIndex( int keyId )
{
DefinedProperty property = properties.get( keyId );
return property != null ? property : null;
}
@Override
protected void setProperties( Iterator<DefinedProperty> loadedProperties )
{
if ( loadedProperties != null && loadedProperties.hasNext() )
{
Map<Integer, DefinedProperty> newProperties = new HashMap<>();
while ( loadedProperties.hasNext() )
{
DefinedProperty property = loadedProperties.next();
newProperties.put( property.propertyKeyId(), property );
}
properties = newProperties;
}
else
{
properties = new HashMap<>();
}
}
@Override
public CowEntityElement getEntityElement( PrimitiveElement element, boolean create )
{
return element.graphElement( create );
}
@Override
PropertyContainer asProxy( NodeManager nm )
{
return this;
}
@Override
protected void commitPropertyMaps( ArrayMap<Integer, DefinedProperty> cowPropertyAddMap,
ArrayMap<Integer, DefinedProperty> cowPropertyRemoveMap, long firstProp )
{
if ( cowPropertyAddMap != null )
{
for ( Map.Entry<Integer, DefinedProperty> entry : cowPropertyAddMap.entrySet() )
{
properties.put( entry.getKey(), Property.property( entry.getKey(), entry.getValue().value() ) );
}
}
if ( cowPropertyRemoveMap != null )
{
for ( Map.Entry<Integer, DefinedProperty> entry : cowPropertyRemoveMap.entrySet() )
{
properties.remove( entry.getKey() );
}
}
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_core_GraphPropertiesImpl.java
|
777
|
public class DefaultRelationshipTypeCreator extends IsolatedTransactionTokenCreator
{
public DefaultRelationshipTypeCreator( Logging logging )
{
super( logging );
}
@Override
protected int createKey( EntityIdGenerator idGenerator, PersistenceManager persistence, String name )
{
int id = (int) idGenerator.nextId( RelationshipType.class );
persistence.createRelationshipType( id, name );
return id;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_core_DefaultRelationshipTypeCreator.java
|
778
|
public class DefaultPropertyTokenCreator extends IsolatedTransactionTokenCreator
{
public DefaultPropertyTokenCreator( Logging logging )
{
super( logging );
}
@Override
protected int createKey( EntityIdGenerator idGenerator, PersistenceManager persistence, String name )
{
int id = (int) idGenerator.nextId( PropertyKeyTokenRecord.class );
persistence.createPropertyKeyToken( name, id );
return id;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_core_DefaultPropertyTokenCreator.java
|
779
|
public class DefaultLabelIdCreator extends IsolatedTransactionTokenCreator
{
public DefaultLabelIdCreator( Logging logging )
{
super( logging );
}
@Override
protected int createKey( EntityIdGenerator idGenerator, PersistenceManager persistence, String name )
{
int id = (int) idGenerator.nextId( Label.class );
persistence.createLabelId( name, id );
return id;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_core_DefaultLabelIdCreator.java
|
780
|
public class DefaultCaches implements Caches
{
private CacheProvider provider;
private Config config;
private final StringLogger logger;
private final Monitors monitors;
public DefaultCaches( StringLogger logger, Monitors monitors )
{
this.logger = logger;
this.monitors = monitors;
}
@Override
public void configure( CacheProvider provider, Config config )
{
this.provider = provider;
this.config = config;
}
@Override
public Cache<NodeImpl> node()
{
return provider.newNodeCache( logger, config, monitors );
}
@Override
public Cache<RelationshipImpl> relationship()
{
return provider.newRelationshipCache( logger, config, monitors );
}
@Override
public void invalidate()
{
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_core_DefaultCaches.java
|
781
|
private abstract class Worker implements Runnable
{
protected final Random random = new Random();
@Override
public void run()
{
while ( true )
{
Transaction tx = db.beginTx();
try
{
doSomething();
tx.success();
}
catch ( Throwable t )
{
t.printStackTrace(System.err);
System.err.flush();
// throw Exceptions.launderedException( t );
}
finally
{
tx.finish();
}
}
}
protected abstract void doSomething() throws Throwable;
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_core_ConcurrentPropertyAccessIT.java
|
782
|
private class SetPropertyWorker extends Worker
{
@Override
protected void doSomething() throws Throwable
{
Pair<Integer, Node> pair = getNode( random, false );
Node node = pair.other();
node.setProperty( randomPropertyKey( random ), randomLongPropertyValue( random.nextInt( 8 ) + 2, random ) );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_core_ConcurrentPropertyAccessIT.java
|
783
|
private class ReplaceNodeWorker extends Worker
{
@Override
protected void doSomething() throws Throwable
{
Pair<Integer, Node> pair = getNode( random, true );
int index = pair.first();
Node node = pair.other();
node.delete();
setNode( index, db.createNode() );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_core_ConcurrentPropertyAccessIT.java
|
784
|
private class RemovePropertyWorker extends Worker
{
@Override
protected void doSomething() throws Throwable
{
Pair<Integer, Node> pair = getNode( random, false );
Node node = pair.other();
node.removeProperty( randomPropertyKey( random ) );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_core_ConcurrentPropertyAccessIT.java
|
785
|
private class GetPropertyWorker extends Worker
{
@Override
protected void doSomething() throws Throwable
{
Pair<Integer, Node> pair = getNode( random, false );
Node node = pair.other();
node.getProperty( randomPropertyKey( random ), null );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_core_ConcurrentPropertyAccessIT.java
|
786
|
@Ignore( "Good for driving out problems with loading/heaviness of property records" )
public class ConcurrentPropertyAccessIT
{
@Test
public void tryTriggerIssueWithConcurrentlySettingAndReadingProperties() throws Exception
{
// GIVEN
ExecutorService executor = newCachedThreadPool();
executor.submit( new SetPropertyWorker() );
executor.submit( new RemovePropertyWorker() );
executor.submit( new GetPropertyWorker() );
executor.submit( new ReplaceNodeWorker() );
waitIndefinitely();
// THEN
}
private void waitIndefinitely()
{
while ( true )
{
try
{
sleep( MAX_VALUE );
}
catch ( InterruptedException e )
{
interrupted();
// meh
}
}
}
private GraphDatabaseService db;
private Node[] nodes;
protected Pair<Integer, Node> getNode( Random random, boolean takeOut )
{
synchronized ( nodes )
{
while ( true )
{
int index = random.nextInt( nodes.length );
Node node = nodes[index];
if ( null != node )
{
if ( takeOut )
nodes[index] = null;
return Pair.of( index, node );
}
}
}
}
protected void setNode( int i, Node node )
{
synchronized ( nodes )
{
nodes[i] = node;
}
}
private abstract class Worker implements Runnable
{
protected final Random random = new Random();
@Override
public void run()
{
while ( true )
{
Transaction tx = db.beginTx();
try
{
doSomething();
tx.success();
}
catch ( Throwable t )
{
t.printStackTrace(System.err);
System.err.flush();
// throw Exceptions.launderedException( t );
}
finally
{
tx.finish();
}
}
}
protected abstract void doSomething() throws Throwable;
}
private class SetPropertyWorker extends Worker
{
@Override
protected void doSomething() throws Throwable
{
Pair<Integer, Node> pair = getNode( random, false );
Node node = pair.other();
node.setProperty( randomPropertyKey( random ), randomLongPropertyValue( random.nextInt( 8 ) + 2, random ) );
}
}
private class GetPropertyWorker extends Worker
{
@Override
protected void doSomething() throws Throwable
{
Pair<Integer, Node> pair = getNode( random, false );
Node node = pair.other();
node.getProperty( randomPropertyKey( random ), null );
}
}
private class RemovePropertyWorker extends Worker
{
@Override
protected void doSomething() throws Throwable
{
Pair<Integer, Node> pair = getNode( random, false );
Node node = pair.other();
node.removeProperty( randomPropertyKey( random ) );
}
}
private class ReplaceNodeWorker extends Worker
{
@Override
protected void doSomething() throws Throwable
{
Pair<Integer, Node> pair = getNode( random, true );
int index = pair.first();
Node node = pair.other();
node.delete();
setNode( index, db.createNode() );
}
}
private Object randomLongPropertyValue( int length, Random random )
{
String[] parts = new String[] { "bozo", "bimbo", "basil", "bongo" };
StringBuilder result = new StringBuilder( 4 * length );
for ( int i = 0; i < length; i ++ )
{
result.append( parts[ random.nextInt( parts.length )] );
}
return result.toString();
}
private String randomPropertyKey( Random random )
{
return random.nextBoolean() ? "name" : "animals";
}
@Before
public void before() throws Exception
{
db =
new GraphDatabaseFactory().newEmbeddedDatabase( forTest( getClass() ).makeGraphDbDir().getAbsolutePath() );
nodes = createInitialNodes();
}
private Node[] createInitialNodes()
{
Node[] nodes = new Node[100];
Transaction tx = db.beginTx();
try
{
for ( int i = 0; i < nodes.length; i++ )
{
nodes[i] = db.createNode();
}
tx.success();
}
finally
{
tx.finish();
}
return nodes;
}
@After
public void after() throws Exception
{
db.shutdown();
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_core_ConcurrentPropertyAccessIT.java
|
787
|
private static class Worker extends Thread
{
private final GraphDatabaseService db;
private final CountDownLatch startSignal;
private final AtomicReference<Exception> failure;
private final Node parentNode;
private final AtomicBoolean stopSignal;
public Worker( GraphDatabaseService db, CountDownLatch startSignal, AtomicBoolean stopSignal,
AtomicReference<Exception> failure, Node parentNode )
{
this.db = db;
this.startSignal = startSignal;
this.stopSignal = stopSignal;
this.failure = failure;
this.parentNode = parentNode;
}
@Override
public void run()
{
awaitStartSignal();
while ( failure.get() == null && !stopSignal.get() )
{
Transaction tx = db.beginTx();
try
{
// ArrayIndexOutOfBoundsException happens here
count( parentNode.getRelationships( RELTYPE, OUTGOING ) );
parentNode.createRelationshipTo( db.createNode(), RELTYPE );
tx.success();
}
catch ( Exception e )
{
failure.compareAndSet( null, e );
}
finally
{
tx.finish();
}
}
}
private void awaitStartSignal()
{
try
{
startSignal.await( 10, SECONDS );
}
catch ( InterruptedException e )
{
throw new RuntimeException( e );
}
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_core_ConcurrentCreateAndGetRelationshipsIT.java
|
788
|
public class ConcurrentCreateAndGetRelationshipsIT
{
@Test
public void tryToReproduceTheIssue() throws Exception
{
// GIVEN
GraphDatabaseService db = dbRule.getGraphDatabaseService();
CountDownLatch startSignal = new CountDownLatch( 1 );
AtomicBoolean stopSignal = new AtomicBoolean();
AtomicReference<Exception> failure = new AtomicReference<Exception>();
Node parentNode = createNode( db );
Collection<Worker> workers = createWorkers( db, startSignal, stopSignal, failure, parentNode );
// WHEN
startSignal.countDown();
sleep( 500 );
stopSignal.set( true );
awaitWorkersToEnd( workers );
// THEN
if ( failure.get() != null )
{
throw new Exception( "A worker failed", failure.get() );
}
}
private void awaitWorkersToEnd( Collection<Worker> workers ) throws InterruptedException
{
for ( Worker worker : workers )
{
worker.join();
}
}
private Collection<Worker> createWorkers( GraphDatabaseService db, CountDownLatch startSignal,
AtomicBoolean stopSignal, AtomicReference<Exception> failure, Node parentNode )
{
Collection<Worker> workers = new ArrayList<Worker>();
for ( int i = 0; i < 2; i++ )
{
workers.add( newWorker( db, startSignal, stopSignal, failure, parentNode ) );
}
return workers;
}
private Worker newWorker( GraphDatabaseService db, CountDownLatch startSignal, AtomicBoolean stopSignal,
AtomicReference<Exception> failure, Node parentNode )
{
Worker worker = new Worker( db, startSignal, stopSignal, failure, parentNode );
worker.start();
return worker;
}
private Node createNode( GraphDatabaseService db )
{
Transaction tx = db.beginTx();
try
{
Node node = db.createNode();
tx.success();
return node;
}
finally
{
tx.finish();
}
}
public final @Rule ImpermanentDatabaseRule dbRule = new ImpermanentDatabaseRule();
private static final RelationshipType RELTYPE = MyRelTypes.TEST;
private static class Worker extends Thread
{
private final GraphDatabaseService db;
private final CountDownLatch startSignal;
private final AtomicReference<Exception> failure;
private final Node parentNode;
private final AtomicBoolean stopSignal;
public Worker( GraphDatabaseService db, CountDownLatch startSignal, AtomicBoolean stopSignal,
AtomicReference<Exception> failure, Node parentNode )
{
this.db = db;
this.startSignal = startSignal;
this.stopSignal = stopSignal;
this.failure = failure;
this.parentNode = parentNode;
}
@Override
public void run()
{
awaitStartSignal();
while ( failure.get() == null && !stopSignal.get() )
{
Transaction tx = db.beginTx();
try
{
// ArrayIndexOutOfBoundsException happens here
count( parentNode.getRelationships( RELTYPE, OUTGOING ) );
parentNode.createRelationshipTo( db.createNode(), RELTYPE );
tx.success();
}
catch ( Exception e )
{
failure.compareAndSet( null, e );
}
finally
{
tx.finish();
}
}
}
private void awaitStartSignal()
{
try
{
startSignal.await( 10, SECONDS );
}
catch ( InterruptedException e )
{
throw new RuntimeException( e );
}
}
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_core_ConcurrentCreateAndGetRelationshipsIT.java
|
789
|
{
@Override
public String getMethodName()
{
return BigStoreIT.this.getClass().getSimpleName() + "#" + super.getMethodName();
}
};
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_core_BigStoreIT.java
|
790
|
public class BigStoreIT implements RelationshipType
{
private static final RelationshipType OTHER_TYPE = DynamicRelationshipType.withName( "OTHER" );
private static final String PATH = "target/var/big";
private GraphDatabaseService db;
public @Rule
TestName testName = new TestName()
{
@Override
public String getMethodName()
{
return BigStoreIT.this.getClass().getSimpleName() + "#" + super.getMethodName();
}
};
@Before
public void doBefore()
{
// Delete before just to be sure
deleteFileOrDirectory( new File( PATH ) );
db = new GraphDatabaseFactory().newEmbeddedDatabase( PATH );
}
@After
public void doAfter()
{
db.shutdown();
// Delete after because it's so darn big
deleteFileOrDirectory( new File( PATH ) );
}
@Override
public String name()
{
return "BIG_TYPE";
}
@Test
public void create4BPlusStuff() throws Exception
{
testHighIds( (long) pow( 2, 32 ), 2, 400 );
}
@Test
public void create8BPlusStuff() throws Exception
{
testHighIds( (long) pow( 2, 33 ), 1, 1000 );
}
@Test
public void createAndVerify32BitGraph() throws Exception
{
createAndVerifyGraphStartingWithId( (long) pow( 2, 32 ), 400 );
}
@Test
public void createAndVerify33BitGraph() throws Exception
{
createAndVerifyGraphStartingWithId( (long) pow( 2, 33 ), 1000 );
}
@Ignore("Blows up with a FileTooLarge error")
@Test
public void createAndVerify34BitGraph() throws Exception
{
createAndVerifyGraphStartingWithId( (long) pow( 2, 34 ), 1600 );
}
private void createAndVerifyGraphStartingWithId( long startId, int requiredHeapMb ) throws Exception
{
assumeTrue( machineIsOkToRunThisTest( testName.getMethodName(), requiredHeapMb ) );
/*
* Will create a layout like this:
*
* (refNode) --> (node) --> (highNode)
* ...
* ...
*
* Each node/relationship will have a bunch of different properties on them.
*/
Node refNode = createReferenceNode( db );
setHighIds( startId-1000 );
byte[] bytes = new byte[45];
bytes[2] = 5;
bytes[10] = 42;
Map<String, Object> properties = map( "number", 11, "short string", "test",
"long string", "This is a long value, long enough", "array", bytes );
Transaction tx = db.beginTx();
int count = 10000;
for ( int i = 0; i < count; i++ )
{
Node node = db.createNode();
setProperties( node, properties );
Relationship rel1 = refNode.createRelationshipTo( node, this );
setProperties( rel1, properties );
Node highNode = db.createNode();
Relationship rel2 = node.createRelationshipTo( highNode, OTHER_TYPE );
setProperties( rel2, properties );
setProperties( highNode, properties );
if ( i % 100 == 0 && i > 0 )
{
tx.success();
tx.finish();
tx = db.beginTx();
}
}
tx.success();
tx.finish();
db.shutdown();
db = new GraphDatabaseFactory().newEmbeddedDatabase( PATH );
// Verify the data
int verified = 0;
try ( Transaction transaction = db.beginTx() )
{
refNode = db.getNodeById( refNode.getId() );
for ( Relationship rel : refNode.getRelationships( Direction.OUTGOING ) )
{
Node node = rel.getEndNode();
assertProperties( properties, node );
assertProperties( properties, rel );
Node highNode = node.getSingleRelationship( OTHER_TYPE, Direction.OUTGOING ).getEndNode();
assertProperties( properties, highNode );
verified++;
}
transaction.success();
}
assertEquals( count, verified );
}
private static final Label REFERENCE = DynamicLabel.label( "Reference" );
private Node createReferenceNode( GraphDatabaseService db )
{
try ( Transaction tx = db.beginTx() )
{
Node node = db.createNode( REFERENCE );
tx.success();
return node;
}
}
public static boolean machineIsOkToRunThisTest( String testName, int requiredHeapMb )
{
if ( Settings.osIsWindows() )
{
System.out.println( testName + ": This test cannot be run on Windows because it can't handle files of this size in a timely manner" );
return false;
}
if ( Settings.osIsMacOS() )
{
System.out.println( testName + ": This test cannot be run on Mac OS X because Mac OS X doesn't support sparse files" );
return false;
}
long heapMb = Runtime.getRuntime().maxMemory() / (1000*1000); // Not 1024, matches better wanted result with -Xmx
if ( heapMb < requiredHeapMb )
{
System.out.println( testName + ": This test requires a heap of size " + requiredHeapMb + ", this heap has only " + heapMb );
return false;
}
return true;
}
public static void assertProperties( Map<String, Object> properties, PropertyContainer entity )
{
int count = 0;
for ( String key : entity.getPropertyKeys() )
{
Object expectedValue = properties.get( key );
Object entityValue = entity.getProperty( key );
if ( expectedValue.getClass().isArray() )
{
assertTrue( Arrays.equals( (byte[]) expectedValue, (byte[]) entityValue ) );
}
else
{
assertEquals( expectedValue, entityValue );
}
count++;
}
assertEquals( properties.size(), count );
}
private void setProperties( PropertyContainer entity, Map<String, Object> properties )
{
for ( Map.Entry<String, Object> property : properties.entrySet() )
{
entity.setProperty( property.getKey(), property.getValue() );
}
}
private void testHighIds( long highMark, int minus, int requiredHeapMb )
{
if ( !machineIsOkToRunThisTest( testName.getMethodName(), requiredHeapMb ) )
{
return;
}
long idBelow = highMark-minus;
setHighIds( idBelow );
String propertyKey = "name";
int intPropertyValue = 123;
String stringPropertyValue = "Long string, longer than would fit in shortstring";
long[] arrayPropertyValue = new long[] { 1021L, 321L, 343212L };
Transaction tx = db.beginTx();
Node nodeBelowTheLine = db.createNode();
nodeBelowTheLine.setProperty( propertyKey, intPropertyValue );
assertEquals( idBelow, nodeBelowTheLine.getId() );
Node nodeAboveTheLine = db.createNode();
nodeAboveTheLine.setProperty( propertyKey, stringPropertyValue );
Relationship relBelowTheLine = nodeBelowTheLine.createRelationshipTo( nodeAboveTheLine, this );
relBelowTheLine.setProperty( propertyKey, arrayPropertyValue );
assertEquals( idBelow, relBelowTheLine.getId() );
Relationship relAboveTheLine = nodeAboveTheLine.createRelationshipTo( nodeBelowTheLine, this );
assertEquals( highMark, relAboveTheLine.getId() );
assertEquals( highMark, nodeAboveTheLine.getId() );
assertEquals( intPropertyValue, nodeBelowTheLine.getProperty( propertyKey ) );
assertEquals( stringPropertyValue, nodeAboveTheLine.getProperty( propertyKey ) );
assertTrue( Arrays.equals( arrayPropertyValue, (long[]) relBelowTheLine.getProperty( propertyKey ) ) );
tx.success();
tx.finish();
for ( int i = 0; i < 2; i++ )
{
try ( Transaction transaction = db.beginTx() )
{
assertEquals( nodeAboveTheLine, db.getNodeById( highMark ) );
assertEquals( idBelow, nodeBelowTheLine.getId() );
assertEquals( highMark, nodeAboveTheLine.getId() );
assertEquals( idBelow, relBelowTheLine.getId() );
assertEquals( highMark, relAboveTheLine.getId() );
assertEquals( relBelowTheLine,
db.getNodeById( idBelow ).getSingleRelationship( this, Direction.OUTGOING ) );
assertEquals( relAboveTheLine,
db.getNodeById( idBelow ).getSingleRelationship( this, Direction.INCOMING ) );
assertEquals( idBelow, relBelowTheLine.getId() );
assertEquals( highMark, relAboveTheLine.getId() );
assertEquals( asSet( asList( relBelowTheLine, relAboveTheLine ) ),
asSet( asCollection( db.getNodeById( idBelow ).getRelationships() ) ) );
transaction.success();
}
if ( i == 0 )
{
db.shutdown();
db = new GraphDatabaseFactory().newEmbeddedDatabase( PATH );
}
}
}
private void setHighIds( long id )
{
setHighId( IdType.NODE, id );
setHighId( IdType.RELATIONSHIP, id );
setHighId( IdType.PROPERTY, id );
setHighId( IdType.ARRAY_BLOCK, id );
setHighId( IdType.STRING_BLOCK, id );
}
private static <T> Collection<T> asSet( Collection<T> collection )
{
return new HashSet<T>( collection );
}
private void setHighId( IdType type, long highId )
{
((GraphDatabaseAPI)db).getDependencyResolver().resolveDependency( IdGeneratorFactory.class ).get( type ).setHighId( highId );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_core_BigStoreIT.java
|
791
|
private static class TestDatabase extends InternalAbstractGraphDatabase
{
protected TestDatabase( String storeDir, Map<String, String> params )
{
super( storeDir, params, new DefaultGraphDatabaseDependencies() );
run();
}
@Override
protected IdGeneratorFactory createIdGeneratorFactory()
{
return new JumpingIdGeneratorFactory( SIZE_PER_JUMP );
}
@Override
protected FileSystemAbstraction createFileSystemAbstraction()
{
return life.add( new JumpingFileSystemAbstraction( SIZE_PER_JUMP ) );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_core_BigJumpingStoreIT.java
|
792
|
public class BigJumpingStoreIT
{
private static class TestDatabase extends InternalAbstractGraphDatabase
{
protected TestDatabase( String storeDir, Map<String, String> params )
{
super( storeDir, params, new DefaultGraphDatabaseDependencies() );
run();
}
@Override
protected IdGeneratorFactory createIdGeneratorFactory()
{
return new JumpingIdGeneratorFactory( SIZE_PER_JUMP );
}
@Override
protected FileSystemAbstraction createFileSystemAbstraction()
{
return life.add( new JumpingFileSystemAbstraction( SIZE_PER_JUMP ) );
}
}
private static final int SIZE_PER_JUMP = 1000;
private static final String PATH = "target/var/bigjump";
private static final RelationshipType TYPE = DynamicRelationshipType.withName( "KNOWS" );
private static final RelationshipType TYPE2 = DynamicRelationshipType.withName( "DROP_KICKS" );
private InternalAbstractGraphDatabase db;
@Before
public void doBefore()
{
deleteFileOrDirectory( PATH );
db = new TestDatabase( PATH, configForNoMemoryMapping() );
}
private Map<String, String> configForNoMemoryMapping()
{
return stringMap(
use_memory_mapped_buffers.name(), "false",
"neostore.nodestore.db.mapped_memory", "0M",
"neostore.relationshipstore.db.mapped_memory", "0M",
"neostore.propertystore.db.mapped_memory", "0M",
"neostore.propertystore.db.strings.mapped_memory", "0M",
"neostore.propertystore.db.arrays.mapped_memory", "0M" );
}
@After
public void doAfter()
{
if ( db != null )
{
db.shutdown();
}
db = null;
}
@Test
public void crudOnHighIds() throws Exception
{
// Create stuff
List<Node> nodes = new ArrayList<>();
Transaction tx = db.beginTx();
int numberOfNodes = SIZE_PER_JUMP * 3;
String stringValue = "a longer string than short";
byte[] arrayValue = new byte[]{3, 7};
for ( int i = 0; i < numberOfNodes; i++ )
{
Node node = db.createNode();
node.setProperty( "number", i );
node.setProperty( "string", stringValue );
node.setProperty( "array", arrayValue );
nodes.add( node );
}
int numberOfRels = numberOfNodes - 100;
for ( int i = 0; i < numberOfRels; i++ )
{
Node node1 = nodes.get( i / 100 );
Node node2 = nodes.get( i + 1 );
node1.createRelationshipTo( node2, TYPE );
}
tx.success();
//noinspection deprecation
tx.finish();
// Verify
tx = db.beginTx();
int relCount = 0;
for ( int t = 0; t < 2; t++ )
{
int nodeCount = 0;
relCount = 0;
for ( Node node : nodes )
{
node = db.getNodeById( node.getId() );
assertProperties( map( "number", nodeCount++, "string", stringValue, "array", arrayValue ), node );
relCount += count( node.getRelationships( Direction.OUTGOING ) );
}
nodeManager().clearCache();
}
assertEquals( numberOfRels, relCount );
//noinspection deprecation
tx.finish();
// Remove stuff
tx = db.beginTx();
for ( int i = 0; i < nodes.size(); i++ )
{
Node node = nodes.get( i );
switch ( i % 6 )
{
case 0:
node.removeProperty( "number" );
break;
case 1:
node.removeProperty( "string" );
break;
case 2:
node.removeProperty( "array" );
break;
case 3:
node.removeProperty( "number" );
node.removeProperty( "string" );
node.removeProperty( "array" );
break;
case 4:
node.setProperty( "new", 34 );
break;
case 5:
Object oldValue = node.getProperty( "string", null );
if ( oldValue != null )
{
node.setProperty( "string", "asjdkasdjkasjdkasjdkasdjkasdj" );
node.setProperty( "string", stringValue );
}
}
if ( count( node.getRelationships() ) > 50 )
{
if ( i % 2 == 0 )
{
deleteIfNotNull( firstOrNull( node.getRelationships() ) );
deleteIfNotNull( lastOrNull( node.getRelationships() ) );
}
else
{
deleteEveryOther( node.getRelationships() );
}
setPropertyOnAll( node.getRelationships( Direction.OUTGOING ), "relprop", "rel value" );
}
else if ( i % 20 == 0 )
{
Node otherNode = nodes.get( nodes.size() - i - 1 );
Relationship rel = node.createRelationshipTo( otherNode, TYPE2 );
rel.setProperty( "other relprop", 1010 );
}
}
tx.success();
//noinspection deprecation
tx.finish();
// Verify again
tx = db.beginTx();
for ( int t = 0; t < 2; t++ )
{
int nodeCount = 0;
for ( Node node : nodes )
{
node = db.getNodeById( node.getId() );
switch ( nodeCount % 6 )
{
case 0:
assertProperties( map( "string", stringValue, "array", arrayValue ), node );
break;
case 1:
assertProperties( map( "number", nodeCount, "array", arrayValue ), node );
break;
case 2:
assertProperties( map( "number", nodeCount, "string", stringValue ), node );
break;
case 3:
assertEquals( 0, count( node.getPropertyKeys() ) );
break;
case 4:
assertProperties( map( "number", nodeCount, "string", stringValue, "array", arrayValue,
"new", 34 ), node );
break;
case 5:
assertProperties( map( "number", nodeCount, "string", stringValue, "array", arrayValue ),
node );
break;
default:
}
for ( Relationship rel : node.getRelationships( Direction.OUTGOING ) )
{
if ( rel.isType( TYPE ) )
{
assertProperties( map( "relprop", "rel value" ), rel );
}
else if ( rel.isType( TYPE2 ) )
{
assertProperties( map( "other relprop", 1010 ), rel );
}
else
{
fail( "Invalid type " + rel.getType() + " for " + rel );
}
}
nodeCount++;
}
nodeManager().clearCache();
}
//noinspection deprecation
tx.finish();
}
private NodeManager nodeManager()
{
return db.getDependencyResolver().resolveDependency( NodeManager.class );
}
private void setPropertyOnAll( Iterable<Relationship> relationships, String key,
Object value )
{
for ( Relationship rel : relationships )
{
rel.setProperty( key, value );
}
}
private void deleteEveryOther( Iterable<Relationship> relationships )
{
int relCounter = 0;
for ( Relationship rel : relationships )
{
if ( relCounter++ % 2 == 0 )
{
rel.delete();
}
}
}
private void deleteIfNotNull( Relationship relationship )
{
if ( relationship != null )
{
relationship.delete();
}
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_core_BigJumpingStoreIT.java
|
793
|
public class IteratingPropertyReceiver extends PrefetchingIterator<DefinedProperty> implements PropertyReceiver
{
private DefinedProperty[] properties = new DefinedProperty[9];
private int writeCursor;
private int readCursor;
@Override
public void receive( DefinedProperty property, long propertyRecordId )
{
if ( writeCursor >= properties.length )
{
properties = Arrays.copyOf( properties, properties.length*2 );
}
properties[writeCursor++] = property;
}
@Override
protected DefinedProperty fetchNextOrNull()
{
if ( readCursor >= properties.length )
{
return null;
}
return properties[readCursor++];
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_core_IteratingPropertyReceiver.java
|
794
|
public class JumpingFileSystemAbstraction extends LifecycleAdapter implements FileSystemAbstraction
{
private final int sizePerJump;
private final EphemeralFileSystemAbstraction actualFileSystem = new EphemeralFileSystemAbstraction();
public JumpingFileSystemAbstraction( int sizePerJump )
{
this.sizePerJump = sizePerJump;
}
@Override
public StoreChannel open( File fileName, String mode ) throws IOException
{
StoreFileChannel channel = (StoreFileChannel) actualFileSystem.open( fileName, mode );
if (
fileName.getName().equals( "neostore.nodestore.db" ) ||
fileName.getName().equals( "neostore.nodestore.db.labels" ) ||
fileName.getName().equals( "neostore.relationshipstore.db" ) ||
fileName.getName().equals( "neostore.propertystore.db" ) ||
fileName.getName().equals( "neostore.propertystore.db.strings" ) ||
fileName.getName().equals( "neostore.propertystore.db.arrays" ) )
{
return new JumpingFileChannel( channel, recordSizeFor( fileName ) );
}
return channel;
}
@Override
public OutputStream openAsOutputStream( File fileName, boolean append ) throws IOException
{
return new ChannelOutputStream( open( fileName, "rw" ), append );
}
@Override
public InputStream openAsInputStream( File fileName ) throws IOException
{
return new ChannelInputStream( open( fileName, "r" ) );
}
@Override
public Reader openAsReader( File fileName, String encoding ) throws IOException
{
return new InputStreamReader( openAsInputStream( fileName ), encoding );
}
@Override
public Writer openAsWriter( File fileName, String encoding, boolean append ) throws IOException
{
return new OutputStreamWriter( openAsOutputStream( fileName, append ), encoding );
}
@Override
public StoreChannel create( File fileName ) throws IOException
{
return open( fileName, "rw" );
}
@Override
public boolean fileExists( File fileName )
{
return actualFileSystem.fileExists( fileName );
}
@Override
public long getFileSize( File fileName )
{
return actualFileSystem.getFileSize( fileName );
}
@Override
public boolean deleteFile( File fileName )
{
return actualFileSystem.deleteFile( fileName );
}
@Override
public void deleteRecursively( File directory ) throws IOException
{
actualFileSystem.deleteRecursively( directory );
}
@Override
public boolean mkdir( File fileName )
{
return actualFileSystem.mkdir( fileName );
}
@Override
public void mkdirs( File fileName )
{
actualFileSystem.mkdirs( fileName );
}
@Override
public boolean renameFile( File from, File to ) throws IOException
{
return actualFileSystem.renameFile( from, to );
}
@Override
public FileLock tryLock( File fileName, StoreChannel channel ) throws IOException
{
return actualFileSystem.tryLock( fileName, channel );
}
@Override
public File[] listFiles( File directory )
{
return actualFileSystem.listFiles( directory );
}
@Override
public boolean isDirectory( File file )
{
return actualFileSystem.isDirectory( file );
}
@Override
public void moveToDirectory( File file, File toDirectory ) throws IOException
{
actualFileSystem.moveToDirectory( file, toDirectory );
}
@Override
public void copyFile( File from, File to ) throws IOException
{
actualFileSystem.copyFile( from, to );
}
@Override
public void copyRecursively( File fromDirectory, File toDirectory ) throws IOException
{
actualFileSystem.copyRecursively( fromDirectory, toDirectory );
}
private int recordSizeFor( File fileName )
{
if ( fileName.getName().endsWith( "nodestore.db" ) )
{
return NodeStore.RECORD_SIZE;
}
else if ( fileName.getName().endsWith( "relationshipstore.db" ) )
{
return RelationshipStore.RECORD_SIZE;
}
else if ( fileName.getName().endsWith( "propertystore.db.strings" ) ||
fileName.getName().endsWith( "propertystore.db.arrays" ) )
{
return AbstractDynamicStore.getRecordSize( PropertyStore.DEFAULT_DATA_BLOCK_SIZE );
}
else if ( fileName.getName().endsWith( "propertystore.db" ) )
{
return PropertyStore.RECORD_SIZE;
}
else if ( fileName.getName().endsWith( "nodestore.db.labels" ) )
{
return Integer.parseInt( GraphDatabaseSettings.label_block_size.getDefaultValue() );
}
else if ( fileName.getName().endsWith( "schemastore.db" ) )
{
return SchemaStore.getRecordSize( SchemaStore.BLOCK_SIZE );
}
throw new IllegalArgumentException( fileName.getPath() );
}
public class JumpingFileChannel extends StoreFileChannel
{
private final int recordSize;
public JumpingFileChannel( StoreFileChannel actual, int recordSize )
{
super( actual );
this.recordSize = recordSize;
}
private long translateIncoming( long position )
{
return translateIncoming( position, false );
}
private long translateIncoming( long position, boolean allowFix )
{
long actualRecord = position/recordSize;
if ( actualRecord < sizePerJump/2 )
{
return position;
}
else
{
long jumpIndex = (actualRecord+sizePerJump)/0x100000000L;
long diff = actualRecord - jumpIndex * 0x100000000L;
diff = assertWithinDiff( diff, allowFix );
long offsettedRecord = jumpIndex*sizePerJump + diff;
return offsettedRecord*recordSize;
}
}
private long translateOutgoing( long offsettedPosition )
{
long offsettedRecord = offsettedPosition/recordSize;
if ( offsettedRecord < sizePerJump/2 )
{
return offsettedPosition;
}
else
{
long jumpIndex = (offsettedRecord-sizePerJump/2) / sizePerJump + 1;
long diff = ((offsettedRecord-sizePerJump/2) % sizePerJump) - sizePerJump/2;
assertWithinDiff( diff, false );
long actualRecord = jumpIndex*0x100000000L - sizePerJump/2 + diff;
return actualRecord*recordSize;
}
}
private long assertWithinDiff( long diff, boolean allowFix )
{
if ( diff < -sizePerJump/2 || diff > sizePerJump/2 )
{
if ( allowFix )
{
// This is needed for shutdown() to work, PropertyStore
// gives an invalid offset for truncate.
if ( diff < -sizePerJump / 2 )
{
return -sizePerJump / 2;
}
else
{
return sizePerJump / 2;
}
}
throw new IllegalArgumentException( "" + diff );
}
return diff;
}
@Override
public long position() throws IOException
{
return translateOutgoing( super.position() );
}
@Override
public JumpingFileChannel position( long newPosition ) throws IOException
{
super.position( translateIncoming( newPosition ) );
return this;
}
@Override
public long size() throws IOException
{
return translateOutgoing( super.size() );
}
@Override
public JumpingFileChannel truncate( long size ) throws IOException
{
super.truncate( translateIncoming( size, true ) );
return this;
}
@Override
public int read( ByteBuffer dst, long position ) throws IOException
{
return super.read( dst, translateIncoming( position ) );
}
@Override
public int write( ByteBuffer src, long position ) throws IOException
{
return super.write( src, translateIncoming( position ) );
}
}
@Override
public <K extends ThirdPartyFileSystem> K getOrCreateThirdPartyFileSystem(
Class<K> clazz, Function<Class<K>, K> creator )
{
return actualFileSystem.getOrCreateThirdPartyFileSystem( clazz, creator );
}
@Override
public void shutdown()
{
actualFileSystem.shutdown();
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_core_JumpingFileSystemAbstraction.java
|
795
|
{
private final Property[] localProperties = properties;
private int i;
@Override
public long next()
{
if ( !hasNext() )
{
throw new NoSuchElementException();
}
return localProperties[i++].propertyKeyId();
}
@Override
public boolean hasNext()
{
return i < localProperties.length;
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_core_ArrayBasedPrimitive.java
|
796
|
public class JumpingFileChannel extends StoreFileChannel
{
private final int recordSize;
public JumpingFileChannel( StoreFileChannel actual, int recordSize )
{
super( actual );
this.recordSize = recordSize;
}
private long translateIncoming( long position )
{
return translateIncoming( position, false );
}
private long translateIncoming( long position, boolean allowFix )
{
long actualRecord = position/recordSize;
if ( actualRecord < sizePerJump/2 )
{
return position;
}
else
{
long jumpIndex = (actualRecord+sizePerJump)/0x100000000L;
long diff = actualRecord - jumpIndex * 0x100000000L;
diff = assertWithinDiff( diff, allowFix );
long offsettedRecord = jumpIndex*sizePerJump + diff;
return offsettedRecord*recordSize;
}
}
private long translateOutgoing( long offsettedPosition )
{
long offsettedRecord = offsettedPosition/recordSize;
if ( offsettedRecord < sizePerJump/2 )
{
return offsettedPosition;
}
else
{
long jumpIndex = (offsettedRecord-sizePerJump/2) / sizePerJump + 1;
long diff = ((offsettedRecord-sizePerJump/2) % sizePerJump) - sizePerJump/2;
assertWithinDiff( diff, false );
long actualRecord = jumpIndex*0x100000000L - sizePerJump/2 + diff;
return actualRecord*recordSize;
}
}
private long assertWithinDiff( long diff, boolean allowFix )
{
if ( diff < -sizePerJump/2 || diff > sizePerJump/2 )
{
if ( allowFix )
{
// This is needed for shutdown() to work, PropertyStore
// gives an invalid offset for truncate.
if ( diff < -sizePerJump / 2 )
{
return -sizePerJump / 2;
}
else
{
return sizePerJump / 2;
}
}
throw new IllegalArgumentException( "" + diff );
}
return diff;
}
@Override
public long position() throws IOException
{
return translateOutgoing( super.position() );
}
@Override
public JumpingFileChannel position( long newPosition ) throws IOException
{
super.position( translateIncoming( newPosition ) );
return this;
}
@Override
public long size() throws IOException
{
return translateOutgoing( super.size() );
}
@Override
public JumpingFileChannel truncate( long size ) throws IOException
{
super.truncate( translateIncoming( size, true ) );
return this;
}
@Override
public int read( ByteBuffer dst, long position ) throws IOException
{
return super.read( dst, translateIncoming( position ) );
}
@Override
public int write( ByteBuffer src, long position ) throws IOException
{
return super.write( src, translateIncoming( position ) );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_kernel_impl_core_JumpingFileSystemAbstraction.java
|
797
|
{
@Override
public int compare( RelIdArray o1, RelIdArray o2 )
{
return o1.getType() - o2.getType();
}
};
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_core_NodeImpl.java
|
798
|
public class NodeImpl extends ArrayBasedPrimitive
{
/* relationships[] being null means: not even tried to load any relationships - go ahead and load
* relationships[] being NO_RELATIONSHIPS means: don't bother loading relationships since there aren't any */
private static final RelIdArray[] NO_RELATIONSHIPS = new RelIdArray[0];
private volatile RelIdArray[] relationships;
// Sorted array
private volatile int[] labels;
/*
* This is the id of the next relationship to load from disk.
*/
private volatile long relChainPosition = Record.NO_NEXT_RELATIONSHIP.intValue();
private final long id;
public NodeImpl( long id )
{
this( id, false );
}
// newNode will only be true for NodeManager.createNode
public NodeImpl( long id, boolean newNode )
{
/* TODO firstRel/firstProp isn't used yet due to some unresolved issue with clearing
* of cache and keeping those first ids in the node instead of loading on demand.
*/
super( newNode );
this.id = id;
if ( newNode )
{
relationships = NO_RELATIONSHIPS;
}
}
@Override
protected Iterator<DefinedProperty> loadProperties( NodeManager nodeManager )
{
return nodeManager.loadProperties( this, false );
}
@Override
public long getId()
{
return id;
}
@Override
public int sizeOfObjectInBytesIncludingOverhead()
{
int size = super.sizeOfObjectInBytesIncludingOverhead() +
REFERENCE_SIZE/*relationships reference*/ +
8/*relChainPosition*/ + 8/*id*/ +
REFERENCE_SIZE/*labels reference*/;
if ( relationships != null && relationships.length > 0 )
{
size = withArrayOverheadIncludingReferences( size, relationships.length );
for ( RelIdArray array : relationships )
{
size += array.sizeOfObjectInBytesIncludingOverhead();
}
}
if ( labels != null && labels.length > 0 )
{
size += sizeOfArray( labels );
}
return size;
}
@Override
public int hashCode()
{
long id = getId();
return (int) ((id >>> 32) ^ id);
}
@Override
public boolean equals( Object obj )
{
return this == obj || (obj instanceof NodeImpl && ((NodeImpl) obj).getId() == getId());
}
Iterable<Relationship> getAllRelationships( NodeManager nodeManager, DirectionWrapper direction )
{
ensureRelationshipMapNotNull( nodeManager );
// We need to check if there are more relationships to load before grabbing
// the references to the RelIdArrays since otherwise there could be
// another concurrent thread exhausting the chain position in between the point
// where we got an empty iterator for a type that the other thread loaded and
// the point where we check whether or not there are more relationships to load.
boolean hasMore = hasMoreRelationshipsToLoad();
RelIdArray[] localRelationships = relationships;
RelIdIterator[] result = new RelIdIterator[localRelationships.length];
TransactionState tx = nodeManager.getTransactionState();
ArrayMap<Integer, RelIdArray> addMap = null;
ArrayMap<Integer, Collection<Long>> skipMap = null;
if ( tx.hasChanges() )
{
addMap = tx.getCowRelationshipAddMap( this );
skipMap = tx.getCowRelationshipRemoveMap( this );
}
for ( int i = 0; i < localRelationships.length; i++ )
{
RelIdArray src = localRelationships[i];
int type = src.getType();
RelIdIterator iterator;
if ( addMap != null || skipMap != null )
{
iterator = new CombinedRelIdIterator( type, direction, src,
addMap != null ? addMap.get( type ) : null,
skipMap != null ? skipMap.get( type ) : null );
}
else
{
iterator = src.iterator( direction );
}
result[i] = iterator;
}
// New relationship types for this node which hasn't been committed yet,
// but exists only as transactional state.
if ( addMap != null )
{
RelIdIterator[] additional = new RelIdIterator[addMap.size() /*worst case size*/];
int additionalSize = 0;
for ( int type : addMap.keySet() )
{
if ( getRelIdArray( type ) == null )
{
RelIdArray add = addMap.get( type );
additional[additionalSize++] = new CombinedRelIdIterator( type, direction, null, add,
skipMap != null ? skipMap.get( type ) : null );
}
}
RelIdIterator[] newResult = new RelIdIterator[result.length + additionalSize];
arraycopy( result, 0, newResult, 0, result.length );
arraycopy( additional, 0, newResult, result.length, additionalSize );
result = newResult;
}
if ( result.length == 0 )
{
return Collections.emptyList();
}
return new RelationshipIterator( result, this, direction, nodeManager, hasMore, true );
}
Iterable<Relationship> getAllRelationshipsOfType( NodeManager nodeManager,
DirectionWrapper direction, RelationshipType... types )
{
types = deduplicate( types );
ensureRelationshipMapNotNull( nodeManager );
// We need to check if there are more relationships to load before grabbing
// the references to the RelIdArrays. Otherwise there could be
// another concurrent thread exhausting the chain position in between the point
// where we got an empty iterator for a type that the other thread loaded and
// the point where we check if there are more relationships to load.
boolean hasMore = hasMoreRelationshipsToLoad();
RelIdIterator[] result = new RelIdIterator[types.length];
TransactionState tx = nodeManager.getTransactionState();
ArrayMap<Integer, RelIdArray> addMap = null;
ArrayMap<Integer, Collection<Long>> skipMap = null;
if ( tx.hasChanges() )
{
addMap = tx.getCowRelationshipAddMap( this );
skipMap = tx.getCowRelationshipRemoveMap( this );
}
int actualLength = 0;
for ( RelationshipType type : types )
{
int typeId = nodeManager.getRelationshipTypeIdFor( type );
if ( typeId == TokenHolder.NO_ID )
{
continue;
}
result[actualLength++] = getRelationshipsIterator( direction,
addMap != null ? addMap.get( typeId ) : null,
skipMap != null ? skipMap.get( typeId ) : null, typeId );
}
if ( actualLength < result.length )
{
RelIdIterator[] compacted = new RelIdIterator[actualLength];
arraycopy( result, 0, compacted, 0, actualLength );
result = compacted;
}
if ( result.length == 0 )
{
return Collections.emptyList();
}
return new RelationshipIterator( result, this, direction, nodeManager, hasMore, false );
}
private static RelationshipType[] deduplicate( RelationshipType[] types )
{
int unique = 0;
for ( int i = 0; i < types.length; i++ )
{
String name = types[i].name();
for ( int j = 0; j < unique; j++ )
{
if ( name.equals( types[j].name() ) )
{
name = null; // signal that this relationship is not unique
break; // we will not find more than one conflict
}
}
if ( name != null )
{ // this has to be done outside the inner loop, otherwise we'd never accept a single one...
types[unique++] = types[i];
}
}
if ( unique < types.length )
{
types = Arrays.copyOf( types, unique );
}
return types;
}
private RelIdIterator getRelationshipsIterator( DirectionWrapper direction, RelIdArray add,
Collection<Long> remove, int type )
{
RelIdArray src = getRelIdArray( type );
if ( add != null || remove != null )
{
return new CombinedRelIdIterator( type, direction, src, add, remove );
}
return src != null ? src.iterator( direction ) : empty( type ).iterator( direction );
}
public Iterable<Relationship> getRelationships( NodeManager nodeManager )
{
return getAllRelationships( nodeManager, DirectionWrapper.BOTH );
}
public Iterable<Relationship> getRelationships( NodeManager nodeManager, Direction dir )
{
return getAllRelationships( nodeManager, wrap( dir ) );
}
public Iterable<Relationship> getRelationships( NodeManager nodeManager, RelationshipType type )
{
return getAllRelationshipsOfType( nodeManager, DirectionWrapper.BOTH, type );
}
public Iterable<Relationship> getRelationships( NodeManager nodeManager,
RelationshipType... types )
{
return getAllRelationshipsOfType( nodeManager, DirectionWrapper.BOTH, types );
}
public Iterable<Relationship> getRelationships( NodeManager nodeManager,
Direction direction, RelationshipType... types )
{
return getAllRelationshipsOfType( nodeManager, wrap( direction ), types );
}
public Relationship getSingleRelationship( NodeManager nodeManager, RelationshipType type,
Direction dir )
{
Iterator<Relationship> rels = getAllRelationshipsOfType( nodeManager, wrap( dir ),
new RelationshipType[]{type} ).iterator();
if ( !rels.hasNext() )
{
return null;
}
Relationship rel = rels.next();
while ( rels.hasNext() )
{
Relationship other = rels.next();
if ( !other.equals( rel ) )
{
throw new NotFoundException( "More than one relationship[" +
type + ", " + dir + "] found for " + this );
}
}
return rel;
}
public Iterable<Relationship> getRelationships( NodeManager nodeManager, RelationshipType type,
Direction dir )
{
return getAllRelationshipsOfType( nodeManager, wrap( dir ), type );
}
/**
* Returns this node's string representation.
*
* @return the string representation of this node
*/
@Override
public String toString()
{
return "NodeImpl#" + this.getId();
}
private void ensureRelationshipMapNotNull( NodeManager nodeManager )
{
if ( relationships == null )
{
loadInitialRelationships( nodeManager );
}
}
private void loadInitialRelationships( NodeManager nodeManager )
{
Triplet<ArrayMap<Integer, RelIdArray>, List<RelationshipImpl>, Long> rels = null;
synchronized ( this )
{
if ( relationships == null )
{
try
{
relChainPosition = nodeManager.getRelationshipChainPosition( this );
}
catch ( InvalidRecordException e )
{
throw new NotFoundException( asProxy( nodeManager ) +
" concurrently deleted while loading its relationships?", e );
}
ArrayMap<Integer, RelIdArray> tmpRelMap = new ArrayMap<>();
rels = getMoreRelationships( nodeManager, tmpRelMap );
this.relationships = toRelIdArray( tmpRelMap );
if ( rels != null )
{
setRelChainPosition( rels.third() );
}
updateSize( nodeManager );
}
}
if ( rels != null )
{
nodeManager.putAllInRelCache( rels.second() );
}
}
protected void updateSize( NodeManager nodeManager )
{
nodeManager.updateCacheSize( this, sizeOfObjectInBytesIncludingOverhead() );
}
private RelIdArray[] toRelIdArray( ArrayMap<Integer, RelIdArray> tmpRelMap )
{
RelIdArray[] result = new RelIdArray[tmpRelMap.size()];
int i = 0;
for ( RelIdArray array : tmpRelMap.values() )
{
result[i++] = array;
}
sort( result );
return result;
}
private static final Comparator<RelIdArray> RELATIONSHIP_TYPE_COMPARATOR_FOR_SORTING = new Comparator<RelIdArray>()
{
@Override
public int compare( RelIdArray o1, RelIdArray o2 )
{
return o1.getType() - o2.getType();
}
};
/* This is essentially a deliberate misuse of Comparator, knowing details about Arrays#binarySearch.
* The signature is binarySearch( T[] array, T key, Comparator<T> ), but in this case we're
* comparing RelIdArray[] to an int as key. To avoid having to create a new object for
* the key for each call we create a single Comparator taking the RelIdArray as first
* argument and the key as the second, as #binarySearch does internally. Although the int
* here will be boxed I imagine it to be slightly better, with Integer caching for low
* integers. */
@SuppressWarnings("rawtypes")
private static final Comparator RELATIONSHIP_TYPE_COMPARATOR_FOR_BINARY_SEARCH = new Comparator()
{
@Override
public int compare( Object o1, Object o2 )
{
return ((RelIdArray) o1).getType() - (Integer) o2;
}
};
private static void sort( RelIdArray[] array )
{
Arrays.sort( array, RELATIONSHIP_TYPE_COMPARATOR_FOR_SORTING );
}
private Triplet<ArrayMap<Integer, RelIdArray>, List<RelationshipImpl>, Long> getMoreRelationships(
NodeManager nodeManager, ArrayMap<Integer, RelIdArray> tmpRelMap )
{
if ( !hasMoreRelationshipsToLoad() )
{
return null;
}
Triplet<ArrayMap<Integer, RelIdArray>, List<RelationshipImpl>, Long> rels;
rels = loadMoreRelationshipsFromNodeManager( nodeManager );
ArrayMap<Integer, RelIdArray> addMap = rels.first();
if ( addMap.size() == 0 )
{
return null;
}
for ( Integer type : addMap.keySet() )
{
RelIdArray addRels = addMap.get( type );
RelIdArray srcRels = tmpRelMap.get( type );
if ( srcRels == null )
{
tmpRelMap.put( type, addRels );
}
else
{
RelIdArray newSrcRels = srcRels.addAll( addRels );
// This can happen if srcRels gets upgraded to a RelIdArrayWithLoops
if ( newSrcRels != srcRels )
{
tmpRelMap.put( type, newSrcRels );
}
}
}
return rels;
}
boolean hasMoreRelationshipsToLoad()
{
return getRelChainPosition() != Record.NO_NEXT_RELATIONSHIP.intValue();
}
static enum LoadStatus
{
NOTHING( false, false ),
LOADED_END( true, false ),
LOADED_MORE( true, true );
private final boolean loaded;
private final boolean more;
private LoadStatus( boolean loaded, boolean more )
{
this.loaded = loaded;
this.more = more;
}
public boolean loaded()
{
return this.loaded;
}
public boolean hasMoreToLoad()
{
return this.more;
}
}
LoadStatus getMoreRelationships( NodeManager nodeManager )
{
Triplet<ArrayMap<Integer, RelIdArray>, List<RelationshipImpl>, Long> rels;
if ( !hasMoreRelationshipsToLoad() )
{
return LoadStatus.NOTHING;
}
boolean more;
synchronized ( this )
{
if ( !hasMoreRelationshipsToLoad() )
{
return LoadStatus.NOTHING;
}
rels = loadMoreRelationshipsFromNodeManager( nodeManager );
ArrayMap<Integer, RelIdArray> addMap = rels.first();
if ( addMap.size() == 0 )
{
return LoadStatus.NOTHING;
}
for ( int type : addMap.keySet() )
{
RelIdArray addRels = addMap.get( type );
RelIdArray srcRels = getRelIdArray( type );
if ( srcRels == null )
{
putRelIdArray( addRels );
}
else
{
RelIdArray newSrcRels = srcRels.addAll( addRels );
// This can happen if srcRels gets upgraded to a RelIdArrayWithLoops
if ( newSrcRels != srcRels )
{
putRelIdArray( newSrcRels );
}
}
}
setRelChainPosition( rels.third() );
more = hasMoreRelationshipsToLoad();
updateSize( nodeManager );
}
nodeManager.putAllInRelCache( rels.second() );
return more ? LoadStatus.LOADED_MORE : LoadStatus.LOADED_END;
}
private Triplet<ArrayMap<Integer, RelIdArray>, List<RelationshipImpl>, Long>
loadMoreRelationshipsFromNodeManager( NodeManager nodeManager )
{
try
{
return nodeManager.getMoreRelationships( this );
}
catch ( InvalidRecordException e )
{
throw new NotFoundException( "Unable to load one or more relationships from " + asProxy( nodeManager ) +
". This usually happens when relationships are deleted by someone else just as we are about to " +
"load them. Please try again.", e );
}
}
@SuppressWarnings("unchecked")
private RelIdArray getRelIdArray( int type )
{
RelIdArray[] localRelationships = relationships;
int index = Arrays.binarySearch( localRelationships, type, RELATIONSHIP_TYPE_COMPARATOR_FOR_BINARY_SEARCH );
return index < 0 ? null : localRelationships[index];
}
private void putRelIdArray( RelIdArray addRels )
{
// we don't do size update here, instead performed
// when calling commitRelationshipMaps and in getMoreRelationships
// precondition: called under synchronization
// make a local reference to the array to avoid multiple read barrier hits
RelIdArray[] array = relationships;
// Try to overwrite it if it's already set
int expectedType = addRels.getType();
for ( int i = 0; i < array.length; i++ )
{
if ( array[i].getType() == expectedType )
{
array[i] = addRels;
return;
}
}
// no previous entry of the given type - extend the array
array = Arrays.copyOf( array, array.length + 1 );
array[array.length - 1] = addRels;
sort( array );
relationships = array;
}
public boolean hasRelationship( NodeManager nodeManager )
{
return getRelationships( nodeManager ).iterator().hasNext();
}
public boolean hasRelationship( NodeManager nodeManager, RelationshipType... types )
{
return getRelationships( nodeManager, types ).iterator().hasNext();
}
public boolean hasRelationship( NodeManager nodeManager, Direction direction,
RelationshipType... types )
{
return getRelationships( nodeManager, direction, types ).iterator().hasNext();
}
public boolean hasRelationship( NodeManager nodeManager, Direction dir )
{
return getRelationships( nodeManager, dir ).iterator().hasNext();
}
public boolean hasRelationship( NodeManager nodeManager, RelationshipType type, Direction dir )
{
return getRelationships( nodeManager, type, dir ).iterator().hasNext();
}
protected void commitRelationshipMaps(
ArrayMap<Integer, RelIdArray> cowRelationshipAddMap,
ArrayMap<Integer, Collection<Long>> cowRelationshipRemoveMap )
{
if ( relationships == null )
{
// we will load full in some other tx
return;
}
synchronized ( this )
{
if ( cowRelationshipAddMap != null )
{
for ( int type : cowRelationshipAddMap.keySet() )
{
RelIdArray add = cowRelationshipAddMap.get( type );
Collection<Long> remove = null;
if ( cowRelationshipRemoveMap != null )
{
remove = cowRelationshipRemoveMap.get( type );
}
RelIdArray src = getRelIdArray( type );
putRelIdArray( RelIdArray.from( src, add, remove ) );
}
}
if ( cowRelationshipRemoveMap != null )
{
for ( int type : cowRelationshipRemoveMap.keySet() )
{
if ( cowRelationshipAddMap != null &&
cowRelationshipAddMap.get( type ) != null )
{
continue;
}
RelIdArray src = getRelIdArray( type );
if ( src != null )
{
Collection<Long> remove = cowRelationshipRemoveMap.get( type );
putRelIdArray( RelIdArray.from( src, null, remove ) );
}
}
}
}
}
long getRelChainPosition()
{
return relChainPosition;
}
void setRelChainPosition( long position )
{ // precondition: must be called under synchronization
relChainPosition = position;
// use local reference to avoid multiple read barriers
RelIdArray[] array = relationships;
if ( !hasMoreRelationshipsToLoad() && array != null )
{
// Done loading - Shrink arrays
for ( int i = 0; i < array.length; i++ )
{
array[i] = array[i].shrink();
}
}
}
RelIdArray getRelationshipIds( int type )
{
return getRelIdArray( type );
}
RelIdArray[] getRelationshipIds()
{
return relationships;
}
@Override
public CowEntityElement getEntityElement( PrimitiveElement element, boolean create )
{
return element.nodeElement( getId(), create );
}
@Override
PropertyContainer asProxy( NodeManager nm )
{
return nm.newNodeProxyById( getId() );
}
public int[] getLabels( KernelStatement state, CacheLoader<int[]> loader ) throws EntityNotFoundException
{
if ( labels == null )
{
synchronized ( this )
{
if ( labels == null )
{
labels = loader.load( getId() );
}
}
}
return labels;
}
public boolean hasLabel( KernelStatement state, int labelId, CacheLoader<int[]> loader ) throws EntityNotFoundException
{
int[] labels = getLabels( state, loader );
return binarySearch( labels, labelId ) >= 0;
}
public synchronized void commitLabels( int[] labels )
{
this.labels = labels;
}
@Override
protected Property noProperty( int key )
{
return Property.noNodeProperty( getId(), key );
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_core_NodeImpl.java
|
799
|
public class NoTransactionState implements TransactionState
{
@Override
public LockElement acquireWriteLock( Object resource )
{
throw new NotInTransactionException();
}
@Override
public LockElement acquireReadLock( Object resource )
{
throw new NotInTransactionException();
}
@Override
public ArrayMap<Integer, Collection<Long>> getCowRelationshipRemoveMap( NodeImpl node )
{
return null;
}
@Override
public Collection<Long> getOrCreateCowRelationshipRemoveMap( NodeImpl node, int type )
{
throw new NotInTransactionException();
}
@Override
public void setFirstIds( long nodeId, long firstRel, long firstProp )
{
}
@Override
public ArrayMap<Integer, RelIdArray> getCowRelationshipAddMap( NodeImpl node )
{
return null;
}
@Override
public RelIdArray getOrCreateCowRelationshipAddMap( NodeImpl node, int type )
{
throw new NotInTransactionException();
}
@Override
public void commit()
{
}
@Override
public void commitCows()
{
}
@Override
public void rollback()
{
}
@Override
public boolean hasLocks()
{
return false;
}
@Override
public ArrayMap<Integer, DefinedProperty> getCowPropertyRemoveMap( Primitive primitive )
{
return null;
}
@Override
public ArrayMap<Integer, DefinedProperty> getCowPropertyAddMap( Primitive primitive )
{
return null;
}
@Override
public ArrayMap<Integer, DefinedProperty> getOrCreateCowPropertyAddMap( Primitive primitive )
{
throw new NotInTransactionException();
}
@Override
public ArrayMap<Integer, DefinedProperty> getOrCreateCowPropertyRemoveMap( Primitive primitive )
{
throw new NotInTransactionException();
}
@Override
public void deleteNode( long id )
{
throw new NotInTransactionException();
}
@Override
public void deleteRelationship( long id )
{
throw new NotInTransactionException();
}
@Override
public void createNode( long id )
{
throw new NotInTransactionException();
}
@Override
public void createRelationship( long id )
{
throw new NotInTransactionException();
}
@Override
public TransactionData getTransactionData()
{
throw new NotInTransactionException();
}
@Override
public boolean nodeIsDeleted( long nodeId )
{
return false;
}
@Override
public boolean relationshipIsDeleted( long relationshipId )
{
return false;
}
@Override
public boolean hasChanges()
{
return false;
}
@Override
public RemoteTxHook getTxHook()
{
return null;
}
@Override
public TxIdGenerator getTxIdGenerator()
{
return null;
}
@Override
public Set<Long> getCreatedNodes()
{
return emptySet();
}
@Override
public Set<Long> getCreatedRelationships()
{
return emptySet();
}
@Override
public Iterable<WritableTransactionState.CowNodeElement> getChangedNodes()
{
return Iterables.empty();
}
@Override
public boolean isRemotelyInitialized()
{
return false;
}
@Override
public void markAsRemotelyInitialized()
{
}
@Override
public ResourceHolder getNeoStoreTransaction()
{
return null;
}
@Override
public void setNeoStoreTransaction( ResourceHolder neoStoreTransaction )
{
}
}
| false
|
community_kernel_src_main_java_org_neo4j_kernel_impl_core_NoTransactionState.java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.