Unnamed: 0
int64 0
6.7k
| func
stringlengths 12
89.6k
| target
bool 2
classes | project
stringlengths 45
151
|
|---|---|---|---|
1,900
|
final class RelationshipFont implements StyleParameter
{
private final String name;
private final int size;
/**
* Configure the font of relationships.
* @param fontName
* the name of the relationship font.
* @param fontSize
* the size of the relationship font.
*/
public RelationshipFont( String fontName, int fontSize )
{
this.name = fontName;
this.size = fontSize;
}
public final void configure( StyleConfiguration configuration )
{
configuration.setDefaultRelationshipProperty( "fontname", name );
configuration.setDefaultRelationshipProperty( "fontsize", Integer
.toString( size ) );
}
}
| false
|
community_graphviz_src_main_java_org_neo4j_visualization_graphviz_StyleParameter.java
|
1,901
|
{
public String getParameterValue( Relationship relationship,
String key )
{
if ( key.equals( "color" ) )
{
return getColor( relationship );
}
else
{
return getFontColor( relationship );
}
}
};
| false
|
community_graphviz_src_main_java_org_neo4j_visualization_graphviz_StyleParameter.java
|
1,902
|
abstract class RelationshipColor implements StyleParameter
{
public final void configure( StyleConfiguration configuration )
{
ParameterGetter<Relationship> getter = new ParameterGetter<Relationship>()
{
public String getParameterValue( Relationship relationship,
String key )
{
if ( key.equals( "color" ) )
{
return getColor( relationship );
}
else
{
return getFontColor( relationship );
}
}
};
configuration.setRelationshipParameterGetter( "color", getter );
configuration.setRelationshipParameterGetter( "fontcolor", getter );
}
/**
* Get the font color for the given relationship.
* @param relationship
* the relationship to get the font color for.
* @return the name of the font color for the given relationship.
*/
protected String getFontColor( Relationship relationship )
{
return getColor( relationship );
}
/**
* Get the color for a given relationship.
* @param relationship
* the relationship to get the color for.
* @return the name of the color for the given relationship.
*/
protected abstract String getColor( Relationship relationship );
}
| false
|
community_graphviz_src_main_java_org_neo4j_visualization_graphviz_StyleParameter.java
|
1,903
|
final class NodeTitleProperty extends NodeTitle
{
private final String key;
/**
* Get node title from a property.
* @param key
* the property key to use as title.
*/
public NodeTitleProperty( String key )
{
this.key = key;
}
public String getTitle( Node node )
{
return ( String ) node.getProperty( key );
}
}
| false
|
community_graphviz_src_main_java_org_neo4j_visualization_graphviz_StyleParameter.java
|
1,904
|
abstract class NodeTitle implements StyleParameter, TitleGetter<Node>
{
public final void configure( StyleConfiguration configuration )
{
configuration.setNodeTitleGetter( this );
}
}
| false
|
community_graphviz_src_main_java_org_neo4j_visualization_graphviz_StyleParameter.java
|
1,905
|
abstract class NodePropertyFormat implements StyleParameter,
PropertyFormatter
{
public final void configure( StyleConfiguration configuration )
{
configuration.setNodePropertyFomatter( this );
}
}
| false
|
community_graphviz_src_main_java_org_neo4j_visualization_graphviz_StyleParameter.java
|
1,906
|
abstract class NodePropertyFilter implements StyleParameter, PropertyFilter
{
public final void configure( StyleConfiguration configuration )
{
configuration.setNodePropertyFilter( this );
}
}
| false
|
community_graphviz_src_main_java_org_neo4j_visualization_graphviz_StyleParameter.java
|
1,907
|
final class NodeFont implements StyleParameter
{
private final String name;
private final int size;
/**
* Configure the font of nodes.
* @param fontName
* the name of the node font.
* @param fontSize
* the size of the node font.
*/
public NodeFont( String fontName, int fontSize )
{
this.name = fontName;
this.size = fontSize;
}
public final void configure( StyleConfiguration configuration )
{
configuration.setDefaultNodeProperty( "fontname", name );
configuration.setDefaultNodeProperty( "fontsize", Integer
.toString( size ) );
}
}
| false
|
community_graphviz_src_main_java_org_neo4j_visualization_graphviz_StyleParameter.java
|
1,908
|
{
public String getParameterValue( Node node, String key )
{
if ( key.equals( "color" ) )
{
return getColor( node );
}
else if ( key.equals( "fontcolor" ) )
{
return getFontColor( node );
}
else
{
return getFillColor( node );
}
}
};
| false
|
community_graphviz_src_main_java_org_neo4j_visualization_graphviz_StyleParameter.java
|
1,909
|
abstract class NodeColor implements StyleParameter
{
public final void configure( StyleConfiguration configuration )
{
ParameterGetter<Node> getter = new ParameterGetter<Node>()
{
public String getParameterValue( Node node, String key )
{
if ( key.equals( "color" ) )
{
return getColor( node );
}
else if ( key.equals( "fontcolor" ) )
{
return getFontColor( node );
}
else
{
return getFillColor( node );
}
}
};
configuration.setDefaultNodeProperty( "style", "filled" );
configuration.setNodeParameterGetter( "color", getter );
configuration.setNodeParameterGetter( "fillcolor", getter );
configuration.setNodeParameterGetter( "fontcolor", getter );
}
/**
* Get the font color for the given node.
* @param node
* the node to get the font color for.
* @return the name of the font color for the given node.
*/
protected String getFontColor( Node node )
{
return getColor( node );
}
/**
* Return the default color for the node. This is the color of the
* borders of the node.
* @param node
* the node to get the color for.
* @return the name of the color for the node.
*/
protected abstract String getColor( Node node );
/**
* Return the fill color for the node. This is the color of the interior
* of the node.
* @param node
* the node to get the color for.
* @return the name of the color for the node.
*/
protected String getFillColor( Node node )
{
return null;
}
}
| false
|
community_graphviz_src_main_java_org_neo4j_visualization_graphviz_StyleParameter.java
|
1,910
|
final class GraphLabel implements StyleParameter
{
private final String label;
public GraphLabel( String label )
{
this.label = label;
}
public void configure( StyleConfiguration configuration )
{
configuration.setGraphProperty( "label", configuration.escapeLabel( label ) );
}
}
| false
|
community_graphviz_src_main_java_org_neo4j_visualization_graphviz_StyleParameter.java
|
1,911
|
abstract class GenericRelationshipParameters implements StyleParameter,
ParameterGetter<Relationship>
{
/**
* Add custom generic parameters to relationships.
* @param keys
* the parameters to add.
*/
protected GenericRelationshipParameters( String... keys )
{
this.keys = keys;
}
private final String[] keys;
public final void configure( StyleConfiguration configuration )
{
for ( String key : keys )
{
configuration.setRelationshipParameterGetter( key, this );
}
}
}
| false
|
community_graphviz_src_main_java_org_neo4j_visualization_graphviz_StyleParameter.java
|
1,912
|
abstract class GenericNodeParameters implements StyleParameter,
ParameterGetter<Node>
{
/**
* Add custom generic parameters to nodes.
* @param keys
* the parameters to add.
*/
protected GenericNodeParameters( String... keys )
{
this.keys = Arrays.asList( keys );
}
private final Iterable<String> keys;
public final void configure( StyleConfiguration configuration )
{
for ( String key : keys )
{
configuration.setNodeParameterGetter( key, this );
}
}
}
| false
|
community_graphviz_src_main_java_org_neo4j_visualization_graphviz_StyleParameter.java
|
1,913
|
final class DefaultRelationshipProperty implements StyleParameter
{
private final String property;
private final String value;
/**
* Add a property to the general relationship configuration.
* @param property
* the property key.
* @param value
* the property value.
*/
public DefaultRelationshipProperty( String property, String value )
{
this.property = property;
this.value = value;
}
public final void configure( StyleConfiguration configuration )
{
configuration.setDefaultRelationshipProperty( property, value );
}
}
| false
|
community_graphviz_src_main_java_org_neo4j_visualization_graphviz_StyleParameter.java
|
1,914
|
{
public String formatString( String string )
{
string = string.replace( "\\n", "\\\\n" );
string = string.replace( "\\", "\\\\" );
string = string.replace( "\"", "\\\"" );
string = string.replace( "'", "\\\\'" );
string = string.replace( "\n", "\\\\n" );
string = string.replace( "<", "\\<" );
string = string.replace( ">", "\\>" );
string = string.replace( "[", "\\[" );
string = string.replace( "]", "\\]" );
string = string.replace( "{", "\\{" );
string = string.replace( "}", "\\}" );
string = string.replace( "|", "\\|" );
return "'" + string + "'";
}
public String formatStringArray( String[] value )
{
boolean comma = false;
StringBuilder result = new StringBuilder( "[" );
for ( String string : value )
{
if ( comma )
{
result.append( ", " );
}
result.append( formatString( string ) );
comma = true;
}
result.append( "]" );
return result.toString();
}
public String formatBoxedPrimitive( PropertyType type, Object primitive )
{
return primitive.toString();
}
public String formatBoxedPrimitiveArray( PropertyType elementType,
Object[] array )
{
return Arrays.toString( array );
}
public String formatPrimitiveArray( PropertyType type, Object array )
{
switch ( type )
{
case INT:
return Arrays.toString( ( int[] ) array );
case LONG:
return Arrays.toString( ( long[] ) array );
case BOOLEAN:
return Arrays.toString( ( boolean[] ) array );
case SHORT:
return Arrays.toString( ( short[] ) array );
case CHAR:
return Arrays.toString( ( char[] ) array );
case BYTE:
return Arrays.toString( ( byte[] ) array );
case FLOAT:
return Arrays.toString( ( float[] ) array );
case DOUBLE:
return Arrays.toString( ( double[] ) array );
default:
throw new IllegalArgumentException();
}
}
public String formatUnknownObject( Object value )
{
return value.toString();
}
};
| false
|
community_graphviz_src_main_java_org_neo4j_visualization_ValueFormatter.java
|
1,915
|
UNDEFINED( null, "Object" )
{
@Override
<T> T apply( ValueFormatter<T> formatter, Object value )
{
return formatter.formatUnknownObject( value );
}
};
| false
|
community_graphviz_src_main_java_org_neo4j_visualization_PropertyType.java
|
1,916
|
abstract class RelationshipTitle implements StyleParameter,
TitleGetter<Relationship>
{
public final void configure( StyleConfiguration configuration )
{
configuration.setRelationshipTitleGetter( this );
}
}
| false
|
community_graphviz_src_main_java_org_neo4j_visualization_graphviz_StyleParameter.java
|
1,917
|
private class DependencyResolverImpl extends DependencyResolver.Adapter
{
@Override
public <T> T resolveDependency( Class<T> type, SelectionStrategy selector ) throws IllegalArgumentException
{
if ( type.isInstance( fileSystem ) )
{
return type.cast( fileSystem );
}
if ( type.isInstance( config ) )
{
return type.cast( config );
}
if ( type.isInstance( logging ) )
{
return type.cast( logging );
}
if ( NeoStoreProvider.class.isAssignableFrom( type ) )
{
return type.cast( new NeoStoreProvider()
{
@Override
public NeoStore evaluate()
{
return neoStore;
}
} );
}
throw new IllegalArgumentException( "Unknown dependency " + type );
}
}
| false
|
community_kernel_src_main_java_org_neo4j_unsafe_batchinsert_BatchInserterImpl.java
|
1,918
|
{
@Override
public Iterator<Label> iterator()
{
NodeStore nodeStore = neoStore.getNodeStore();
long[] labels = parseLabelsField( nodeStore.getRecord( node ) ).get( getNodeStore() );
return map( labelIdToLabelFunction, asPrimitiveIterator( labels ) );
}
};
| false
|
community_kernel_src_main_java_org_neo4j_unsafe_batchinsert_BatchInserterImpl.java
|
1,919
|
{
@Override
public boolean visit( NodePropertyUpdate update ) throws IOException
{
// Do a lookup from which property has changed to a list of indexes worried about that property.
int propertyKeyInQuestion = update.getPropertyKeyId();
for ( int i = 0; i < propertyKeyIds.length; i++ )
{
if ( propertyKeyIds[i] == propertyKeyInQuestion )
{
if ( update.forLabel( labelIds[i] ) )
{
try
{
populators[i].add( update.getNodeId(), update.getValueAfter() );
}
catch ( IndexEntryConflictException conflict )
{
throw conflict.notAllowed( rules[i].getLabel(), rules[i].getPropertyKey() );
}
}
}
}
return true;
}
};
| false
|
community_kernel_src_main_java_org_neo4j_unsafe_batchinsert_BatchInserterImpl.java
|
1,920
|
{
@Override
public Label apply( long from )
{
return label( labelTokens.nameOf( safeCastLongToInt( from ) ) );
}
};
| false
|
community_kernel_src_main_java_org_neo4j_unsafe_batchinsert_BatchInserterImpl.java
|
1,921
|
public class BatchInserterImpl implements BatchInserter
{
private static final long MAX_NODE_ID = IdType.NODE.getMaxValue();
private final LifeSupport life;
private final NeoStore neoStore;
private final IndexStore indexStore;
private final File storeDir;
private final BatchTokenHolder propertyKeyTokens;
private final BatchTokenHolder relationshipTypeTokens;
private final BatchTokenHolder labelTokens;
private final IdGeneratorFactory idGeneratorFactory;
private final SchemaIndexProviderMap schemaIndexProviders;
private final LabelScanStore labelScanStore;
// TODO use Logging instead
private final StringLogger msgLog;
private final Logging logging;
private final FileSystemAbstraction fileSystem;
private final SchemaCache schemaCache;
private final Config config;
private final BatchInserterImpl.BatchSchemaActions actions;
private final StoreLocker storeLocker;
private boolean labelsTouched;
private final FunctionFromPrimitiveLong<Label> labelIdToLabelFunction = new FunctionFromPrimitiveLong<Label>()
{
@Override
public Label apply( long from )
{
return label( labelTokens.nameOf( safeCastLongToInt( from ) ) );
}
};
private boolean isShutdown = false;
// Helper structure for setNodeProperty
private final Set<PropertyRecord> updatedPropertyRecords = new HashSet<PropertyRecord>();
BatchInserterImpl( String storeDir,
Map<String, String> stringParams )
{
this( storeDir,
new DefaultFileSystemAbstraction(),
stringParams,
Collections.<KernelExtensionFactory<?>>emptyList()
);
}
BatchInserterImpl( String storeDir, FileSystemAbstraction fileSystem,
Map<String, String> stringParams, Iterable<KernelExtensionFactory<?>> kernelExtensions )
{
life = new LifeSupport();
this.fileSystem = fileSystem;
this.storeDir = new File( FileUtils.fixSeparatorsInPath( storeDir ) );
rejectAutoUpgrade( stringParams );
msgLog = StringLogger.loggerDirectory( fileSystem, this.storeDir );
logging = new SingleLoggingService( msgLog );
Map<String, String> params = getDefaultParams();
params.put( GraphDatabaseSettings.use_memory_mapped_buffers.name(), Settings.FALSE );
params.put( InternalAbstractGraphDatabase.Configuration.store_dir.name(), storeDir );
params.putAll( stringParams );
storeLocker = new StoreLocker( fileSystem );
storeLocker.checkLock( this.storeDir );
config = new Config( params, GraphDatabaseSettings.class );
boolean dump = config.get( GraphDatabaseSettings.dump_configuration );
this.idGeneratorFactory = new DefaultIdGeneratorFactory();
StoreFactory sf = new StoreFactory( config, idGeneratorFactory, new DefaultWindowPoolFactory(), fileSystem,
msgLog, null );
File store = fixPath( this.storeDir, sf );
if ( dump )
{
dumpConfiguration( params );
}
msgLog.logMessage( Thread.currentThread() + " Starting BatchInserter(" + this + ")" );
neoStore = sf.newNeoStore( store );
if ( !neoStore.isStoreOk() )
{
throw new IllegalStateException( storeDir + " store is not cleanly shutdown." );
}
neoStore.makeStoreOk();
Token[] indexes = getPropertyKeyTokenStore().getTokens( 10000 );
propertyKeyTokens = new BatchTokenHolder( indexes );
labelTokens = new BatchTokenHolder( neoStore.getLabelTokenStore().getTokens( Integer.MAX_VALUE ) );
Token[] types = getRelationshipTypeStore().getTokens( Integer.MAX_VALUE );
relationshipTypeTokens = new BatchTokenHolder( types );
indexStore = life.add( new IndexStore( this.storeDir, fileSystem ) );
schemaCache = new SchemaCache( neoStore.getSchemaStore() );
KernelExtensions extensions = life
.add( new KernelExtensions( kernelExtensions, config, new DependencyResolverImpl(),
UnsatisfiedDependencyStrategies.ignore() ) );
life.start();
SchemaIndexProvider provider = extensions.resolveDependency( SchemaIndexProvider.class,
SchemaIndexProvider.HIGHEST_PRIORITIZED_OR_NONE );
schemaIndexProviders = new DefaultSchemaIndexProviderMap( provider );
labelScanStore = life.add( extensions.resolveDependency( LabelScanStoreProvider.class,
LabelScanStoreProvider.HIGHEST_PRIORITIZED ).getLabelScanStore() );
actions = new BatchSchemaActions();
}
private Map<String, String> getDefaultParams()
{
Map<String, String> params = new HashMap<>();
params.put( "neostore.nodestore.db.mapped_memory", "20M" );
params.put( "neostore.propertystore.db.mapped_memory", "90M" );
params.put( "neostore.propertystore.db.index.mapped_memory", "1M" );
params.put( "neostore.propertystore.db.index.keys.mapped_memory", "1M" );
params.put( "neostore.propertystore.db.strings.mapped_memory", "130M" );
params.put( "neostore.propertystore.db.arrays.mapped_memory", "130M" );
params.put( "neostore.relationshipstore.db.mapped_memory", "50M" );
return params;
}
@Override
public boolean nodeHasProperty( long node, String propertyName )
{
return primitiveHasProperty( getNodeRecord( node ), propertyName );
}
@Override
public boolean relationshipHasProperty( long relationship,
String propertyName )
{
return primitiveHasProperty( getRelationshipRecord( relationship ),
propertyName );
}
@Override
public void setNodeProperty( long node, String propertyName,
Object newValue )
{
NodeRecord nodeRec = getNodeRecord( node );
if ( setPrimitiveProperty( nodeRec, propertyName, newValue ) )
{
getNodeStore().updateRecord( nodeRec );
}
}
@Override
public void setRelationshipProperty( long relationship,
String propertyName, Object propertyValue )
{
RelationshipRecord relRec = getRelationshipRecord( relationship );
if ( setPrimitiveProperty( relRec, propertyName, propertyValue ) )
{
getRelationshipStore().updateRecord( relRec );
}
}
@Override
public void removeNodeProperty( long node, String propertyName )
{
NodeRecord nodeRec = getNodeRecord( node );
if ( removePrimitiveProperty( nodeRec, propertyName ) )
{
getNodeStore().updateRecord( nodeRec );
}
}
@Override
public void removeRelationshipProperty( long relationship,
String propertyName )
{
RelationshipRecord relationshipRec = getRelationshipRecord( relationship );
if ( removePrimitiveProperty( relationshipRec, propertyName ) )
{
getRelationshipStore().updateRecord( relationshipRec );
}
}
@Override
public IndexCreator createDeferredSchemaIndex( Label label )
{
return new IndexCreatorImpl( actions, label );
}
private void createIndexRule( Label label, String propertyKey )
{
// TODO: Do not create duplicate index
SchemaStore schemaStore = getSchemaStore();
IndexRule schemaRule = IndexRule.indexRule( schemaStore.nextId(), getOrCreateLabelId( label.name() ),
getOrCreatePropertyKeyId( propertyKey ),
this.schemaIndexProviders.getDefaultProvider()
.getProviderDescriptor() );
for ( DynamicRecord record : schemaStore.allocateFrom( schemaRule ) )
{
schemaStore.updateRecord( record );
}
schemaCache.addSchemaRule( schemaRule );
labelsTouched = true;
}
private void repopulateAllIndexes() throws IOException
{
if ( !labelsTouched )
{
return;
}
final IndexRule[] rules = getIndexesNeedingPopulation();
final IndexPopulator[] populators = new IndexPopulator[rules.length];
// the store is uncontended at this point, so creating a local LockService is safe.
LockService locks = new ReentrantLockService();
IndexStoreView storeView = new NeoStoreIndexStoreView( locks, neoStore );
final int[] labelIds = new int[rules.length];
final int[] propertyKeyIds = new int[rules.length];
for ( int i = 0; i < labelIds.length; i++ )
{
IndexRule rule = rules[i];
int labelId = rule.getLabel();
int propertyKeyId = rule.getPropertyKey();
labelIds[i] = labelId;
propertyKeyIds[i] = propertyKeyId;
IndexDescriptor descriptor = new IndexDescriptor( labelId, propertyKeyId );
populators[i] = schemaIndexProviders.apply( rule.getProviderDescriptor() ).getPopulator(
rule.getId(), descriptor, new IndexConfiguration( rule.isConstraintIndex() ) );
populators[i].create();
}
Visitor<NodePropertyUpdate, IOException> propertyUpdateVisitor = new Visitor<NodePropertyUpdate, IOException>()
{
@Override
public boolean visit( NodePropertyUpdate update ) throws IOException
{
// Do a lookup from which property has changed to a list of indexes worried about that property.
int propertyKeyInQuestion = update.getPropertyKeyId();
for ( int i = 0; i < propertyKeyIds.length; i++ )
{
if ( propertyKeyIds[i] == propertyKeyInQuestion )
{
if ( update.forLabel( labelIds[i] ) )
{
try
{
populators[i].add( update.getNodeId(), update.getValueAfter() );
}
catch ( IndexEntryConflictException conflict )
{
throw conflict.notAllowed( rules[i].getLabel(), rules[i].getPropertyKey() );
}
}
}
}
return true;
}
};
InitialNodeLabelCreationVisitor labelUpdateVisitor = new InitialNodeLabelCreationVisitor();
StoreScan<IOException> storeScan = storeView.visitNodes( labelIds, propertyKeyIds,
propertyUpdateVisitor, labelUpdateVisitor );
storeScan.run();
for ( IndexPopulator populator : populators )
{
populator.close( true );
}
labelUpdateVisitor.close();
}
private class InitialNodeLabelCreationVisitor implements Visitor<NodeLabelUpdate, IOException>
{
LabelScanWriter writer = labelScanStore.newWriter();
@Override
public boolean visit( NodeLabelUpdate update ) throws IOException
{
writer.write( update );
return true;
}
public void close() throws IOException
{
writer.close();
}
}
private IndexRule[] getIndexesNeedingPopulation()
{
List<IndexRule> indexesNeedingPopulation = new ArrayList<>();
for ( SchemaRule rule : schemaCache.schemaRules() )
{
if ( rule.getKind().isIndex() )
{
IndexRule indexRule = (IndexRule) rule;
SchemaIndexProvider provider =
schemaIndexProviders.apply( indexRule.getProviderDescriptor() );
if ( provider.getInitialState( indexRule.getId() ) != InternalIndexState.FAILED )
{
indexesNeedingPopulation.add( indexRule );
}
}
}
return indexesNeedingPopulation.toArray( new IndexRule[indexesNeedingPopulation.size()] );
}
@Override
public ConstraintCreator createDeferredConstraint( Label label )
{
return new BaseConstraintCreator( new BatchSchemaActions(), label );
}
private void createConstraintRule( UniquenessConstraint constraint )
{
// TODO: Do not create duplicate index
SchemaStore schemaStore = getSchemaStore();
long indexRuleId = schemaStore.nextId();
long constraintRuleId = schemaStore.nextId();
IndexRule indexRule = IndexRule.constraintIndexRule(
indexRuleId, constraint.label(), constraint.propertyKeyId(),
this.schemaIndexProviders.getDefaultProvider().getProviderDescriptor(),
constraintRuleId );
UniquenessConstraintRule constraintRule = UniquenessConstraintRule.uniquenessConstraintRule(
schemaStore.nextId(), constraint.label(), constraint.propertyKeyId(), indexRuleId );
for ( DynamicRecord record : schemaStore.allocateFrom( constraintRule ) )
{
schemaStore.updateRecord( record );
}
schemaCache.addSchemaRule( constraintRule );
for ( DynamicRecord record : schemaStore.allocateFrom( indexRule ) )
{
schemaStore.updateRecord( record );
}
schemaCache.addSchemaRule( indexRule );
labelsTouched = true;
}
private boolean removePrimitiveProperty( PrimitiveRecord primitive,
String property )
{
PropertyRecord current = null;
PropertyBlock target;
long nextProp = primitive.getNextProp();
int propIndex = propertyKeyTokens.idOf( property );
if ( nextProp == Record.NO_NEXT_PROPERTY.intValue() || propIndex == -1 )
{
// No properties or no one has that property, nothing changed
return false;
}
while ( nextProp != Record.NO_NEXT_PROPERTY.intValue() )
{
current = getPropertyStore().getRecord( nextProp );
if ( (target = current.removePropertyBlock( propIndex )) != null )
{
getPropertyStore().ensureHeavy( target );
for ( DynamicRecord dynRec : target.getValueRecords() )
{
dynRec.setInUse( false );
current.addDeletedRecord( dynRec );
}
break;
}
nextProp = current.getNextProp();
}
assert current != null : "the if statement above prevents it";
if ( current.size() > 0 )
{
getPropertyStore().updateRecord( current );
return false;
}
else
{
current.setInUse( false );
return unlinkPropertyRecord( current, primitive );
}
}
private boolean unlinkPropertyRecord( PropertyRecord propRecord,
PrimitiveRecord primitive )
{
assert propRecord.size() == 0;
boolean primitiveChanged = false;
long prevProp = propRecord.getPrevProp();
long nextProp = propRecord.getNextProp();
if ( primitive.getNextProp() == propRecord.getId() )
{
assert propRecord.getPrevProp() == Record.NO_PREVIOUS_PROPERTY.intValue() : propRecord
+ " for "
+ primitive;
primitive.setNextProp( nextProp );
primitiveChanged = true;
}
if ( prevProp != Record.NO_PREVIOUS_PROPERTY.intValue() )
{
PropertyRecord prevPropRecord = getPropertyStore().getRecord(
prevProp );
assert prevPropRecord.inUse() : prevPropRecord + "->" + propRecord
+ " for " + primitive;
prevPropRecord.setNextProp( nextProp );
getPropertyStore().updateRecord( prevPropRecord );
}
if ( nextProp != Record.NO_NEXT_PROPERTY.intValue() )
{
PropertyRecord nextPropRecord = getPropertyStore().getRecord(
nextProp );
assert nextPropRecord.inUse() : propRecord + "->" + nextPropRecord
+ " for " + primitive;
nextPropRecord.setPrevProp( prevProp );
getPropertyStore().updateRecord( nextPropRecord );
}
propRecord.setInUse( false );
/*
* The following two are not needed - the above line does all the work (PropertyStore
* does not write out the prev/next for !inUse records). It is nice to set this
* however to check for consistency when assertPropertyChain().
*/
propRecord.setPrevProp( Record.NO_PREVIOUS_PROPERTY.intValue() );
propRecord.setNextProp( Record.NO_NEXT_PROPERTY.intValue() );
getPropertyStore().updateRecord( propRecord );
return primitiveChanged;
}
/** @return true if the passed primitive needs updating in the store. */
private boolean setPrimitiveProperty( PrimitiveRecord primitive,
String name,
Object value )
{
boolean result = false;
long nextProp = primitive.getNextProp();
int index = getOrCreatePropertyKeyId( name );
PropertyBlock block = new PropertyBlock();
getPropertyStore().encodeValue( block, index, value );
int size = block.getSize();
/*
* current is the current record traversed
* thatFits is the earliest record that can host the block
* thatHas is the record that already has a block for this index
*/
PropertyRecord current, thatFits = null, thatHas = null;
updatedPropertyRecords.clear();
/*
* We keep going while there are records or until we both found the
* property if it exists and the place to put it, if exists.
*/
while ( !(nextProp == Record.NO_NEXT_PROPERTY.intValue() || (thatHas != null && thatFits != null)) )
{
current = getPropertyStore().getRecord( nextProp );
/*
* current.getPropertyBlock() is cheap but not free. If we already
* have found thatHas, then we can skip this lookup.
*/
if ( thatHas == null && current.getPropertyBlock( index ) != null )
{
thatHas = current;
PropertyBlock removed = thatHas.removePropertyBlock( index );
getPropertyStore().makeHeavyIfLight( removed );
for ( DynamicRecord dynRec : removed.getValueRecords() )
{
dynRec.setInUse( false );
thatHas.addDeletedRecord( dynRec );
}
updatedPropertyRecords.add( thatHas );
}
/*
* We check the size after we remove - potentially we can put in the same record.
*
* current.size() is cheap but not free. If we already found somewhere
* where it fits, no need to look again.
*/
if ( thatFits == null && (PropertyType.getPayloadSize() - current.size() >= size) )
{
thatFits = current;
}
nextProp = current.getNextProp();
}
/*
* thatHas is of no importance here. We know that the block is definitely not there.
* However, we can be sure that if the property existed, thatHas is not null and does
* not contain the block.
*
* thatFits is interesting. If null, we need to create a new record and link, otherwise
* just add the block there.
*/
if ( thatFits == null )
{
thatFits = new PropertyRecord( getPropertyStore().nextId() );
thatFits.setInUse( true );
result = true;
if ( primitive.getNextProp() != Record.NO_NEXT_PROPERTY.intValue() )
{
PropertyRecord first = getPropertyStore().getRecord( primitive.getNextProp() );
thatFits.setNextProp( first.getId() );
first.setPrevProp( thatFits.getId() );
updatedPropertyRecords.add( first );
}
primitive.setNextProp( thatFits.getId() );
}
thatFits.addPropertyBlock( block );
updatedPropertyRecords.add( thatFits );
// This ensures that a particular record is not updated twice in this method
// It could lead to freeId being called multiple times for same id
for ( PropertyRecord updatedRecord : updatedPropertyRecords )
{
getPropertyStore().updateRecord( updatedRecord );
}
return result;
}
private int getOrCreatePropertyKeyId( String name )
{
int propertyKeyId = getPropertyKeyId( name );
if ( propertyKeyId == -1 )
{
propertyKeyId = createNewPropertyKeyId( name );
}
return propertyKeyId;
}
private int getPropertyKeyId( String name )
{
return propertyKeyTokens.idOf( name );
}
private int getOrCreateLabelId( String name )
{
int labelId = getLabelId( name );
if ( labelId == -1 )
{
labelId = createNewLabelId( name );
}
return labelId;
}
private int getLabelId( String name )
{
return labelTokens.idOf( name );
}
private boolean primitiveHasProperty( PrimitiveRecord record,
String propertyName )
{
long nextProp = record.getNextProp();
int propertyKeyId = propertyKeyTokens.idOf( propertyName );
if ( nextProp == Record.NO_NEXT_PROPERTY.intValue() || propertyKeyId == -1 )
{
return false;
}
PropertyRecord current;
while ( nextProp != Record.NO_NEXT_PROPERTY.intValue() )
{
current = getPropertyStore().getRecord( nextProp );
if ( current.getPropertyBlock( propertyKeyId ) != null )
{
return true;
}
nextProp = current.getNextProp();
}
return false;
}
private void rejectAutoUpgrade( Map<String, String> params )
{
if ( parseBoolean( params.get( GraphDatabaseSettings.allow_store_upgrade.name() ) ) )
{
throw new IllegalArgumentException( "Batch inserter is not allowed to do upgrade of a store" +
", use " + EmbeddedGraphDatabase.class.getSimpleName() + " instead" );
}
}
@Override
public long createNode( Map<String, Object> properties, Label... labels )
{
return internalCreateNode( getNodeStore().nextId(), properties, labels );
}
private long internalCreateNode( long nodeId, Map<String, Object> properties, Label... labels )
{
NodeRecord nodeRecord = new NodeRecord( nodeId, Record.NO_NEXT_RELATIONSHIP.intValue(),
Record.NO_NEXT_PROPERTY.intValue() );
nodeRecord.setInUse( true );
nodeRecord.setCreated();
nodeRecord.setNextProp( createPropertyChain( properties ) );
if ( labels.length > 0 )
{
setNodeLabels( nodeRecord, labels );
}
getNodeStore().updateRecord( nodeRecord );
return nodeId;
}
private void setNodeLabels( NodeRecord nodeRecord, Label... labels )
{
NodeLabels nodeLabels = parseLabelsField( nodeRecord );
getNodeStore().updateDynamicLabelRecords( nodeLabels.put( getOrCreateLabelIds( labels ), getNodeStore() ) );
labelsTouched = true;
}
private long[] getOrCreateLabelIds( Label[] labels )
{
long[] ids = new long[labels.length];
for ( int i = 0; i < ids.length; i++ )
{
ids[i] = getOrCreateLabelId( labels[i].name() );
}
return ids;
}
@Override
public void createNode( long id, Map<String, Object> properties, Label... labels )
{
if ( id < 0 || id > MAX_NODE_ID )
{
throw new IllegalArgumentException( "id=" + id );
}
if ( id == IdGeneratorImpl.INTEGER_MINUS_ONE )
{
throw new IllegalArgumentException( "id " + id + " is reserved for internal use" );
}
NodeStore nodeStore = neoStore.getNodeStore();
if ( neoStore.getNodeStore().loadLightNode( id ) != null )
{
throw new IllegalArgumentException( "id=" + id + " already in use" );
}
long highId = nodeStore.getHighId();
if ( highId <= id )
{
nodeStore.setHighId( id + 1 );
}
internalCreateNode( id, properties, labels );
}
@Override
public void setNodeLabels( long node, Label... labels )
{
NodeRecord record = getNodeRecord( node );
setNodeLabels( record, labels );
getNodeStore().updateRecord( record );
}
@Override
public Iterable<Label> getNodeLabels( final long node )
{
return new Iterable<Label>()
{
@Override
public Iterator<Label> iterator()
{
NodeStore nodeStore = neoStore.getNodeStore();
long[] labels = parseLabelsField( nodeStore.getRecord( node ) ).get( getNodeStore() );
return map( labelIdToLabelFunction, asPrimitiveIterator( labels ) );
}
};
}
@Override
public boolean nodeHasLabel( long node, Label label )
{
int labelId = getLabelId( label.name() );
return labelId != -1 && nodeHasLabel( node, labelId );
}
private boolean nodeHasLabel( long node, int labelId )
{
NodeStore nodeStore = neoStore.getNodeStore();
for ( long label : parseLabelsField( nodeStore.getRecord( node ) ).get( getNodeStore() ) )
{
if ( label == labelId )
{
return true;
}
}
return false;
}
@Override
public long createRelationship( long node1, long node2, RelationshipType
type, Map<String, Object> properties )
{
NodeRecord firstNode = getNodeRecord( node1 );
NodeRecord secondNode = getNodeRecord( node2 );
int typeId = relationshipTypeTokens.idOf( type.name() );
if ( typeId == -1 )
{
typeId = createNewRelationshipType( type.name() );
}
long id = getRelationshipStore().nextId();
RelationshipRecord record = new RelationshipRecord( id, node1, node2, typeId );
record.setInUse( true );
record.setCreated();
connectRelationship( firstNode, secondNode, record );
getNodeStore().updateRecord( firstNode );
getNodeStore().updateRecord( secondNode );
record.setNextProp( createPropertyChain( properties ) );
getRelationshipStore().updateRecord( record );
return id;
}
private void connectRelationship( NodeRecord firstNode,
NodeRecord secondNode, RelationshipRecord rel )
{
assert firstNode.getNextRel() != rel.getId();
assert secondNode.getNextRel() != rel.getId();
rel.setFirstNextRel( firstNode.getNextRel() );
rel.setSecondNextRel( secondNode.getNextRel() );
connect( firstNode, rel );
connect( secondNode, rel );
firstNode.setNextRel( rel.getId() );
secondNode.setNextRel( rel.getId() );
}
private void connect( NodeRecord node, RelationshipRecord rel )
{
if ( node.getNextRel() != Record.NO_NEXT_RELATIONSHIP.intValue() )
{
RelationshipRecord nextRel = getRelationshipStore().getRecord( node.getNextRel() );
boolean changed = false;
if ( nextRel.getFirstNode() == node.getId() )
{
nextRel.setFirstPrevRel( rel.getId() );
changed = true;
}
if ( nextRel.getSecondNode() == node.getId() )
{
nextRel.setSecondPrevRel( rel.getId() );
changed = true;
}
if ( !changed )
{
throw new InvalidRecordException( node + " dont match " + nextRel );
}
getRelationshipStore().updateRecord( nextRel );
}
}
@Override
public void setNodeProperties( long node, Map<String, Object> properties )
{
NodeRecord record = getNodeRecord( node );
if ( record.getNextProp() != Record.NO_NEXT_PROPERTY.intValue() )
{
deletePropertyChain( record.getNextProp() );
/*
* Batch inserter does not make any attempt to maintain the store's
* integrity. It makes sense however to keep some things intact where
* the cost is relatively low. So here, when we delete the property
* chain we first make sure that the node record (or the relationship
* record below) does not point anymore to the deleted properties. This
* way, if during creation, something goes wrong, it will not have the properties
* expected instead of throwing invalid record exceptions.
*/
record.setNextProp( Record.NO_NEXT_PROPERTY.intValue() );
getNodeStore().updateRecord( record );
}
record.setNextProp( createPropertyChain( properties ) );
getNodeStore().updateRecord( record );
}
@Override
public void setRelationshipProperties( long rel,
Map<String, Object> properties )
{
RelationshipRecord record = getRelationshipRecord( rel );
if ( record.getNextProp() != Record.NO_NEXT_PROPERTY.intValue() )
{
deletePropertyChain( record.getNextProp() );
/*
* See setNodeProperties above for an explanation of what goes on
* here
*/
record.setNextProp( Record.NO_NEXT_PROPERTY.intValue() );
getRelationshipStore().updateRecord( record );
}
record.setNextProp( createPropertyChain( properties ) );
getRelationshipStore().updateRecord( record );
}
@Override
public boolean nodeExists( long nodeId )
{
return neoStore.getNodeStore().loadLightNode( nodeId ) != null;
}
@Override
public Map<String, Object> getNodeProperties( long nodeId )
{
NodeRecord record = getNodeRecord( nodeId );
if ( record.getNextProp() != Record.NO_NEXT_PROPERTY.intValue() )
{
return getPropertyChain( record.getNextProp() );
}
return Collections.emptyMap();
}
@Override
public Iterable<Long> getRelationshipIds( long nodeId )
{
NodeRecord nodeRecord = getNodeRecord( nodeId );
long nextRel = nodeRecord.getNextRel();
List<Long> ids = new ArrayList<>();
while ( nextRel != Record.NO_NEXT_RELATIONSHIP.intValue() )
{
RelationshipRecord relRecord = getRelationshipRecord( nextRel );
ids.add( relRecord.getId() );
long firstNode = relRecord.getFirstNode();
long secondNode = relRecord.getSecondNode();
if ( firstNode == nodeId )
{
nextRel = relRecord.getFirstNextRel();
}
else if ( secondNode == nodeId )
{
nextRel = relRecord.getSecondNextRel();
}
else
{
throw new InvalidRecordException( "Node[" + nodeId +
"] not part of firstNode[" + firstNode +
"] or secondNode[" + secondNode + "]" );
}
}
return ids;
}
@Override
public Iterable<BatchRelationship> getRelationships( long nodeId )
{
NodeRecord nodeRecord = getNodeRecord( nodeId );
long nextRel = nodeRecord.getNextRel();
List<BatchRelationship> rels = new ArrayList<>();
while ( nextRel != Record.NO_NEXT_RELATIONSHIP.intValue() )
{
RelationshipRecord relRecord = getRelationshipRecord( nextRel );
RelationshipType type = new RelationshipTypeImpl(
relationshipTypeTokens.nameOf( relRecord.getType() ) );
rels.add( new BatchRelationship( relRecord.getId(),
relRecord.getFirstNode(), relRecord.getSecondNode(), type ) );
long firstNode = relRecord.getFirstNode();
long secondNode = relRecord.getSecondNode();
if ( firstNode == nodeId )
{
nextRel = relRecord.getFirstNextRel();
}
else if ( secondNode == nodeId )
{
nextRel = relRecord.getSecondNextRel();
}
else
{
throw new InvalidRecordException( "Node[" + nodeId +
"] not part of firstNode[" + firstNode +
"] or secondNode[" + secondNode + "]" );
}
}
return rels;
}
@Override
public BatchRelationship getRelationshipById( long relId )
{
RelationshipRecord record = getRelationshipRecord( relId );
RelationshipType type = new RelationshipTypeImpl(
relationshipTypeTokens.nameOf( record.getType() ) );
return new BatchRelationship( record.getId(), record.getFirstNode(),
record.getSecondNode(), type );
}
@Override
public Map<String, Object> getRelationshipProperties( long relId )
{
RelationshipRecord record = getRelationshipRecord( relId );
if ( record.getNextProp() != Record.NO_NEXT_PROPERTY.intValue() )
{
return getPropertyChain( record.getNextProp() );
}
return Collections.emptyMap();
}
@Override
public void shutdown()
{
if ( isShutdown )
{
throw new IllegalStateException( "Batch inserter already has shutdown" );
}
isShutdown = true;
try
{
repopulateAllIndexes();
}
catch ( IOException e )
{
throw new RuntimeException( e );
}
neoStore.close();
try
{
storeLocker.release();
}
catch ( IOException e )
{
throw new UnderlyingStorageException( "Could not release store lock", e );
}
msgLog.logMessage( Thread.currentThread() + " Clean shutdown on BatchInserter(" + this + ")", true );
msgLog.close();
life.shutdown();
}
@Override
public String toString()
{
return "EmbeddedBatchInserter[" + storeDir + "]";
}
private static class RelationshipTypeImpl implements RelationshipType
{
private final String name;
RelationshipTypeImpl( String name )
{
this.name = name;
}
@Override
public String name()
{
return name;
}
}
private long createPropertyChain( Map<String, Object> properties )
{
if ( properties == null || properties.isEmpty() )
{
return Record.NO_NEXT_PROPERTY.intValue();
}
PropertyStore propStore = getPropertyStore();
List<PropertyRecord> propRecords = new ArrayList<>();
PropertyRecord currentRecord = new PropertyRecord( propStore.nextId() );
currentRecord.setInUse( true );
currentRecord.setCreated();
propRecords.add( currentRecord );
for ( Entry<String, Object> entry : properties.entrySet() )
{
int keyId = propertyKeyTokens.idOf( entry.getKey() );
if ( keyId == -1 )
{
keyId = createNewPropertyKeyId( entry.getKey() );
}
PropertyBlock block = new PropertyBlock();
propStore.encodeValue( block, keyId, entry.getValue() );
if ( currentRecord.size() + block.getSize() > PropertyType.getPayloadSize() )
{
// Here it means the current block is done for
PropertyRecord prevRecord = currentRecord;
// Create new record
long propertyId = propStore.nextId();
currentRecord = new PropertyRecord( propertyId );
currentRecord.setInUse( true );
currentRecord.setCreated();
// Set up links
prevRecord.setNextProp( propertyId );
currentRecord.setPrevProp( prevRecord.getId() );
propRecords.add( currentRecord );
// Now current is ready to start picking up blocks
}
currentRecord.addPropertyBlock( block );
}
/*
* Add the property records in reverse order, which means largest
* id first. That is to make sure we expand the property store file
* only once.
*/
for ( int i = propRecords.size() - 1; i >= 0; i-- )
{
propStore.updateRecord( propRecords.get( i ) );
}
/*
* 0 will always exist, if the map was empty we wouldn't be here
* and even one property will create at least one record.
*/
return propRecords.get( 0 ).getId();
}
private void deletePropertyChain( long nextProp )
{
PropertyStore propStore = getPropertyStore();
while ( nextProp != Record.NO_NEXT_PROPERTY.intValue() )
{
PropertyRecord propRecord = propStore.getRecord( nextProp );
/*
* The only reason to loop over the blocks is to handle the dynamic
* records that possibly hang under them. Otherwise, we could just
* set the property record not in use and be done with it. The
* residue of the convenience is that we do not remove individual
* property blocks - we just mark the whole record as !inUse.
*/
for ( PropertyBlock propBlock : propRecord.getPropertyBlocks() )
{
propStore.ensureHeavy( propBlock );
for ( DynamicRecord rec : propBlock.getValueRecords() )
{
rec.setInUse( false );
propRecord.addDeletedRecord( rec );
}
}
propRecord.setInUse( false );
nextProp = propRecord.getNextProp();
propStore.updateRecord( propRecord );
}
}
private Map<String, Object> getPropertyChain( long nextProp )
{
PropertyStore propStore = getPropertyStore();
Map<String, Object> properties = new HashMap<>();
while ( nextProp != Record.NO_NEXT_PROPERTY.intValue() )
{
PropertyRecord propRecord = propStore.getRecord( nextProp );
for ( PropertyBlock propBlock : propRecord.getPropertyBlocks() )
{
String key = propertyKeyTokens.nameOf( propBlock.getKeyIndexId() );
DefinedProperty propertyData = propBlock.newPropertyData( propStore );
Object value = propertyData.value() != null ? propertyData.value() :
propBlock.getType().getValue( propBlock, getPropertyStore() );
properties.put( key, value );
}
nextProp = propRecord.getNextProp();
}
return properties;
}
private int createNewPropertyKeyId( String stringKey )
{
PropertyKeyTokenStore idxStore = getPropertyKeyTokenStore();
int keyId = (int) idxStore.nextId();
PropertyKeyTokenRecord record = new PropertyKeyTokenRecord( keyId );
record.setInUse( true );
record.setCreated();
Collection<DynamicRecord> keyRecords =
idxStore.allocateNameRecords( encodeString( stringKey ) );
record.setNameId( (int) first( keyRecords ).getId() );
record.addNameRecords( keyRecords );
idxStore.updateRecord( record );
propertyKeyTokens.addToken( stringKey, keyId );
return keyId;
}
private int createNewLabelId( String stringKey )
{
LabelTokenStore labelTokenStore = neoStore.getLabelTokenStore();
int keyId = (int) labelTokenStore.nextId();
LabelTokenRecord record = new LabelTokenRecord( keyId );
record.setInUse( true );
record.setCreated();
Collection<DynamicRecord> keyRecords =
labelTokenStore.allocateNameRecords( encodeString( stringKey ) );
record.setNameId( (int) first( keyRecords ).getId() );
record.addNameRecords( keyRecords );
labelTokenStore.updateRecord( record );
labelTokens.addToken( stringKey, keyId );
return keyId;
}
private int createNewRelationshipType( String name )
{
RelationshipTypeTokenStore typeStore = getRelationshipTypeStore();
int id = (int) typeStore.nextId();
RelationshipTypeTokenRecord record = new RelationshipTypeTokenRecord( id );
record.setInUse( true );
record.setCreated();
Collection<DynamicRecord> nameRecords = typeStore.allocateNameRecords( encodeString( name ) );
record.setNameId( (int) first( nameRecords ).getId() );
record.addNameRecords( nameRecords );
typeStore.updateRecord( record );
relationshipTypeTokens.addToken( name, id );
return id;
}
private NodeStore getNodeStore()
{
return neoStore.getNodeStore();
}
private PropertyStore getPropertyStore()
{
return neoStore.getPropertyStore();
}
private PropertyKeyTokenStore getPropertyKeyTokenStore()
{
return getPropertyStore().getPropertyKeyTokenStore();
}
private RelationshipStore getRelationshipStore()
{
return neoStore.getRelationshipStore();
}
private RelationshipTypeTokenStore getRelationshipTypeStore()
{
return neoStore.getRelationshipTypeStore();
}
private SchemaStore getSchemaStore()
{
return neoStore.getSchemaStore();
}
private NodeRecord getNodeRecord( long id )
{
if ( id < 0 || id >= getNodeStore().getHighId() )
{
throw new NotFoundException( "id=" + id );
}
return getNodeStore().getRecord( id );
}
private RelationshipRecord getRelationshipRecord( long id )
{
if ( id < 0 || id >= getRelationshipStore().getHighId() )
{
throw new NotFoundException( "id=" + id );
}
return getRelationshipStore().getRecord( id );
}
private File fixPath( File dir, StoreFactory sf )
{
try
{
fileSystem.mkdirs( dir );
}
catch ( IOException e )
{
throw new UnderlyingStorageException(
"Unable to create directory path["
+ storeDir + "] for Neo4j kernel store." );
}
File store = new File( dir, NeoStore.DEFAULT_NAME );
if ( !fileSystem.fileExists( store ) )
{
sf.createNeoStore( store ).close();
}
return store;
}
@Override
public String getStoreDir()
{
return storeDir.getPath();
}
// needed by lucene-index
public IndexStore getIndexStore()
{
return this.indexStore;
}
public IdGeneratorFactory getIdGeneratorFactory()
{
return idGeneratorFactory;
}
private void dumpConfiguration( Map<String, String> config )
{
for ( String key : config.keySet() )
{
Object value = config.get( key );
if ( value != null )
{
System.out.println( key + "=" + value );
}
}
}
private class BatchSchemaActions implements InternalSchemaActions
{
@Override
public IndexDefinition createIndexDefinition( Label label, String propertyKey )
{
createIndexRule( label, propertyKey );
return new IndexDefinitionImpl( this, label, propertyKey, false );
}
@Override
public void dropIndexDefinitions( Label label, String propertyKey )
{
throw unsupportedException();
}
@Override
public ConstraintDefinition createPropertyUniquenessConstraint( Label label, String propertyKey )
{
int labelId = getOrCreateLabelId( label.name() );
int propertyKeyId = getOrCreatePropertyKeyId( propertyKey );
createConstraintRule( new UniquenessConstraint( labelId, propertyKeyId ) );
return new PropertyUniqueConstraintDefinition( this, label, propertyKey );
}
@Override
public void dropPropertyUniquenessConstraint( Label label, String propertyKey )
{
throw unsupportedException();
}
@Override
public String getUserMessage( KernelException e )
{
throw unsupportedException();
}
@Override
public void assertInTransaction()
{
// BatchInserterImpl always is expected to be running in one big single "transaction"
}
private UnsupportedOperationException unsupportedException()
{
return new UnsupportedOperationException( "Batch inserter doesn't support this" );
}
}
private class DependencyResolverImpl extends DependencyResolver.Adapter
{
@Override
public <T> T resolveDependency( Class<T> type, SelectionStrategy selector ) throws IllegalArgumentException
{
if ( type.isInstance( fileSystem ) )
{
return type.cast( fileSystem );
}
if ( type.isInstance( config ) )
{
return type.cast( config );
}
if ( type.isInstance( logging ) )
{
return type.cast( logging );
}
if ( NeoStoreProvider.class.isAssignableFrom( type ) )
{
return type.cast( new NeoStoreProvider()
{
@Override
public NeoStore evaluate()
{
return neoStore;
}
} );
}
throw new IllegalArgumentException( "Unknown dependency " + type );
}
}
}
| false
|
community_kernel_src_main_java_org_neo4j_unsafe_batchinsert_BatchInserterImpl.java
|
1,922
|
private static class RelationshipBatchImpl implements Relationship
{
private final BatchRelationship rel;
private final BatchGraphDatabaseImpl graphDbService;
private final Map<String, Object> properties;
RelationshipBatchImpl( BatchRelationship rel,
BatchGraphDatabaseImpl graphDbService, Map<String, Object> properties )
{
this.rel = rel;
this.graphDbService = graphDbService;
this.properties = properties;
}
@Override
public GraphDatabaseService getGraphDatabase()
{
return graphDbService;
}
Map<String, Object> getProperties()
{
return properties;
}
@Override
public void delete()
{
throw unsupportedOperation();
}
@Override
public Node getEndNode()
{
return graphDbService.getNodeById( rel.getEndNode() );
}
@Override
public long getId()
{
return rel.getId();
}
@Override
public Node[] getNodes()
{
return new Node[]{getStartNode(), getEndNode()};
}
@Override
public Node getOtherNode( Node node )
{
Node startNode = getStartNode();
Node endNode = getEndNode();
if ( node.equals( endNode ) )
{
return startNode;
}
if ( node.equals( startNode ) )
{
return endNode;
}
throw new IllegalArgumentException( "" + node );
}
@Override
public Node getStartNode()
{
return graphDbService.getNodeById( rel.getStartNode() );
}
@Override
public RelationshipType getType()
{
return rel.getType();
}
@Override
public boolean isType( RelationshipType type )
{
return rel.getType().name().equals( type.name() );
}
@Override
public Object getProperty( String key )
{
Object val = properties.get( key );
if ( val == null )
{
throw new NotFoundException( key );
}
return val;
}
@Override
public Object getProperty( String key, Object defaultValue )
{
Object val = properties.get( key );
if ( val == null )
{
return defaultValue;
}
return val;
}
@Override
public Iterable<String> getPropertyKeys()
{
return properties.keySet();
}
@Override
public boolean hasProperty( String key )
{
return properties.containsKey( key );
}
@Override
public Object removeProperty( String key )
{
Object val = properties.remove( key );
if ( val == null )
{
throw new NotFoundException( "Property " + key );
}
return val;
}
@Override
public void setProperty( String key, Object value )
{
properties.put( key, value );
}
@Override
public boolean equals( Object o )
{
if ( !(o instanceof Relationship) )
{
return false;
}
return this.getId() == ((Relationship) o).getId();
}
@Override
public int hashCode()
{
return (int) (rel.getId() ^ (rel.getId() >>> 32));
}
}
| false
|
community_kernel_src_main_java_org_neo4j_unsafe_batchinsert_BatchGraphDatabaseImpl.java
|
1,923
|
static class RelIterator implements
Iterable<Relationship>, Iterator<Relationship>
{
private final BatchGraphDatabaseImpl graphDbService;
private final Iterable<Long> relIds;
private final Iterator<Long> relItr;
private final long nodeId;
private final Direction dir;
private final RelationshipType[] types;
private Relationship nextElement;
RelIterator( BatchGraphDatabaseImpl graphDbService, Iterable<Long> relIds,
long nodeId, Direction dir, RelationshipType[] types )
{
this.graphDbService = graphDbService;
this.relIds = relIds;
this.relItr = relIds.iterator();
this.nodeId = nodeId;
this.dir = dir;
this.types = types;
}
@Override
public Iterator<Relationship> iterator()
{
return new RelIterator( graphDbService, relIds, nodeId, dir, types );
}
@Override
public boolean hasNext()
{
getNextElement();
if ( nextElement != null )
{
return true;
}
return false;
}
@Override
public Relationship next()
{
getNextElement();
if ( nextElement != null )
{
Relationship returnVal = nextElement;
nextElement = null;
return returnVal;
}
throw new NoSuchElementException();
}
private void getNextElement()
{
while ( nextElement == null && relItr.hasNext() )
{
Relationship possibleRel =
graphDbService.getRelationshipById( relItr.next() );
if ( dir == Direction.OUTGOING &&
possibleRel.getEndNode().getId() == nodeId )
{
continue;
}
if ( dir == Direction.INCOMING &&
possibleRel.getStartNode().getId() == nodeId )
{
continue;
}
if ( types != null )
{
for ( RelationshipType type : types )
{
if ( type.name().equals(
possibleRel.getType().name() ) )
{
nextElement = possibleRel;
break;
}
}
}
else
{
nextElement = possibleRel;
}
}
}
@Override
public void remove()
{
throw new UnsupportedOperationException();
}
}
| false
|
community_kernel_src_main_java_org_neo4j_unsafe_batchinsert_BatchGraphDatabaseImpl.java
|
1,924
|
private static class NodeBatchImpl implements Node
{
private final BatchGraphDatabaseImpl graphDbService;
private final long id;
private final Map<String, Object> properties;
NodeBatchImpl( long id, BatchGraphDatabaseImpl graphDbService,
Map<String, Object> properties )
{
this.id = id;
this.graphDbService = graphDbService;
this.properties = properties;
}
@Override
public GraphDatabaseService getGraphDatabase()
{
return graphDbService;
}
@Override
public Relationship createRelationshipTo( Node otherNode,
RelationshipType type )
{
long relId = graphDbService.getBatchInserter().createRelationship( id,
otherNode.getId(), type, null );
RelationshipBatchImpl rel = new RelationshipBatchImpl(
new BatchRelationship( relId, id, otherNode.getId(), type ),
graphDbService, emptyProps() );
graphDbService.addRelationshipToCache( relId, rel );
return rel;
}
Map<String, Object> getProperties()
{
return properties;
}
@Override
public void delete()
{
throw unsupportedOperation();
}
@Override
public long getId()
{
return id;
}
private RelIterator newRelIterator( Direction dir,
RelationshipType[] types )
{
Iterable<Long> relIds =
graphDbService.getBatchInserter().getRelationshipIds( id );
return new RelIterator( graphDbService, relIds, id, dir, types );
}
@Override
public Iterable<Relationship> getRelationships()
{
return newRelIterator( Direction.BOTH, null );
}
@Override
public Iterable<Relationship> getRelationships(
RelationshipType... types )
{
return newRelIterator( Direction.BOTH, types );
}
@Override
public Iterable<Relationship> getRelationships( Direction direction,
RelationshipType... types )
{
return newRelIterator( direction, types );
}
@Override
public Iterable<Relationship> getRelationships( Direction dir )
{
return newRelIterator( dir, null );
}
@Override
public Iterable<Relationship> getRelationships( RelationshipType type,
Direction dir )
{
return newRelIterator( dir, new RelationshipType[]{type} );
}
@Override
public Relationship getSingleRelationship( RelationshipType type,
Direction dir )
{
Iterator<Relationship> relItr =
newRelIterator( dir, new RelationshipType[]{type} );
if ( relItr.hasNext() )
{
Relationship rel = relItr.next();
if ( relItr.hasNext() )
{
throw new NotFoundException( "More than one relationship[" +
type + ", " + dir + "] found for " + this );
}
return rel;
}
return null;
}
@Override
public boolean hasRelationship()
{
return newRelIterator( Direction.BOTH, null ).hasNext();
}
@Override
public boolean hasRelationship( RelationshipType... types )
{
return newRelIterator( Direction.BOTH, types ).hasNext();
}
@Override
public boolean hasRelationship( Direction direction, RelationshipType... types )
{
return newRelIterator( direction, types ).hasNext();
}
@Override
public boolean hasRelationship( Direction dir )
{
Iterator<Relationship> relItr =
newRelIterator( dir, null );
return relItr.hasNext();
}
@Override
public boolean hasRelationship( RelationshipType type, Direction dir )
{
return newRelIterator( dir, new RelationshipType[]{type} ).hasNext();
}
/* Tentative expansion API
public Expansion<Relationship> expandAll()
{
return Traversal.expanderForAllTypes().expand( this );
}
public Expansion<Relationship> expand( RelationshipType type )
{
return expand( type, Direction.BOTH );
}
public Expansion<Relationship> expand( RelationshipType type,
Direction direction )
{
return Traversal.expanderForTypes( type, direction ).expand(
this );
}
public Expansion<Relationship> expand( Direction direction )
{
return Traversal.expanderForAllTypes( direction ).expand(
this );
}
public Expansion<Relationship> expand( RelationshipExpander expander )
{
return Traversal.expander( expander ).expand( this );
}
*/
@Override
public Traverser traverse( Order traversalOrder,
StopEvaluator stopEvaluator,
ReturnableEvaluator returnableEvaluator,
RelationshipType relationshipType, Direction direction )
{
throw unsupportedOperation();
}
@Override
public Traverser traverse( Order traversalOrder,
StopEvaluator stopEvaluator,
ReturnableEvaluator returnableEvaluator,
RelationshipType firstRelationshipType, Direction firstDirection,
RelationshipType secondRelationshipType, Direction secondDirection )
{
throw unsupportedOperation();
}
@Override
public Traverser traverse( Order traversalOrder,
StopEvaluator stopEvaluator,
ReturnableEvaluator returnableEvaluator,
Object... relationshipTypesAndDirections )
{
throw unsupportedOperation();
}
@Override
public void addLabel( Label label )
{
Set<Label> labelSet = asSet( graphDbService.batchInserter.getNodeLabels( getId() ) );
labelSet.add( label );
Label[] labelIds = new Label[ labelSet.size() ];
graphDbService.batchInserter.setNodeLabels( getId(), labelSet.toArray( labelIds ) );
}
@Override
public void removeLabel( Label label )
{
Set<Label> labelSet = asSet( graphDbService.batchInserter.getNodeLabels( getId() ) );
labelSet.remove( label );
Label[] labelIds = new Label[ labelSet.size() ];
graphDbService.batchInserter.setNodeLabels( getId(), labelSet.toArray( labelIds ) );
}
@Override
public boolean hasLabel( Label label )
{
return graphDbService.batchInserter.nodeHasLabel( getId(), label );
}
@Override
public ResourceIterable<Label> getLabels()
{
final Iterable<Label> labels = graphDbService.batchInserter.getNodeLabels( getId() );
return asResourceIterable( labels );
}
@Override
public Object getProperty( String key )
{
Object val = properties.get( key );
if ( val == null )
{
throw new NotFoundException( key );
}
return val;
}
@Override
public Object getProperty( String key, Object defaultValue )
{
Object val = properties.get( key );
if ( val == null )
{
return defaultValue;
}
return val;
}
@Override
public Iterable<String> getPropertyKeys()
{
return properties.keySet();
}
@Override
public boolean hasProperty( String key )
{
return properties.containsKey( key );
}
@Override
public Object removeProperty( String key )
{
Object val = properties.remove( key );
if ( val == null )
{
throw new NotFoundException( "Property " + key );
}
return val;
}
@Override
public void setProperty( String key, Object value )
{
properties.put( key, value );
}
@Override
public boolean equals( Object o )
{
if ( !(o instanceof Node) )
{
return false;
}
return this.getId() == ((Node) o).getId();
}
@Override
public int hashCode()
{
return (int) (id ^ (id >>> 32));
}
}
| false
|
community_kernel_src_main_java_org_neo4j_unsafe_batchinsert_BatchGraphDatabaseImpl.java
|
1,925
|
static class FakeTransaction implements Transaction
{
@Override
public void failure()
{
throw new NotInTransactionException( "Batch insert mode, " +
"failure is not an option." );
}
@Override
public void finish()
{
}
@Override
public void close()
{
}
@Override
public void success()
{
}
@Override
public Lock acquireWriteLock( PropertyContainer entity )
{
return PlaceboTransaction.NO_LOCK;
}
@Override
public Lock acquireReadLock( PropertyContainer entity )
{
return PlaceboTransaction.NO_LOCK;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_unsafe_batchinsert_BatchGraphDatabaseImpl.java
|
1,926
|
{
@Override
public void elementCleaned( RelationshipBatchImpl rel )
{
Map<String, Object> properties = rel.getProperties();
if ( properties != null )
{
batchInserter.setRelationshipProperties( rel.getId(),
properties );
}
}
};
| false
|
community_kernel_src_main_java_org_neo4j_unsafe_batchinsert_BatchGraphDatabaseImpl.java
|
1,927
|
{
@Override
public void elementCleaned( NodeBatchImpl node )
{
Map<String, Object> properties = node.getProperties();
if ( properties != null )
{
batchInserter.setNodeProperties( node.getId(), properties );
}
}
};
| false
|
community_kernel_src_main_java_org_neo4j_unsafe_batchinsert_BatchGraphDatabaseImpl.java
|
1,928
|
class BatchGraphDatabaseImpl implements GraphDatabaseService
{
final BatchInserter batchInserter;
private final LruCache<Long, NodeBatchImpl> nodes =
new LruCache<Long, NodeBatchImpl>( "NodeCache", 10000 )
{
@Override
public void elementCleaned( NodeBatchImpl node )
{
Map<String, Object> properties = node.getProperties();
if ( properties != null )
{
batchInserter.setNodeProperties( node.getId(), properties );
}
}
};
private final LruCache<Long, RelationshipBatchImpl> rels =
new LruCache<Long, RelationshipBatchImpl>( "RelCache", 10000 )
{
@Override
public void elementCleaned( RelationshipBatchImpl rel )
{
Map<String, Object> properties = rel.getProperties();
if ( properties != null )
{
batchInserter.setRelationshipProperties( rel.getId(),
properties );
}
}
};
BatchGraphDatabaseImpl( String storeDir, FileSystemAbstraction fileSystem,
Map<String, String> stringParams, Iterable<KernelExtensionFactory<?>> kernelExtensions )
{
this.batchInserter = new BatchInserterImpl( storeDir, fileSystem, stringParams, kernelExtensions );
}
/**
* Intended for use in tests only.
*/
BatchGraphDatabaseImpl( BatchInserter batchInserter )
{
this.batchInserter = batchInserter;
}
BatchInserter getBatchInserter()
{
return batchInserter;
}
@Override
public Transaction beginTx()
{
return new FakeTransaction();
}
@Override
public Node createNode()
{
long id = batchInserter.createNode( null );
NodeBatchImpl node = new NodeBatchImpl( id, this, emptyProps() );
nodes.put( id, node );
return node;
}
@Override
public Node createNode( Label... labels )
{
long id = batchInserter.createNode( null, labels );
NodeBatchImpl node = new NodeBatchImpl( id, this, emptyProps() );
nodes.put( id, node );
return node;
}
private static UnsupportedOperationException unsupportedOperation()
{
return new UnsupportedOperationException( "Batch inserter mode" );
}
static Map<String,Object> emptyProps()
{
return new HashMap<>();
}
@Override
public Iterable<Node> getAllNodes()
{
throw unsupportedOperation();
}
public Iterable<Relationship> getAllRelationships()
{
throw unsupportedOperation();
}
@Override
public Node getNodeById( long id )
{
NodeBatchImpl node = nodes.get( id );
if ( node == null )
{
try
{
node = new NodeBatchImpl( id, this, mutableCopyOf( batchInserter.getNodeProperties( id ) ) );
nodes.put( id, node );
}
catch ( InvalidRecordException e )
{
throw new NotFoundException( e );
}
}
return node;
}
private Map<String, Object> mutableCopyOf( Map<String, Object> source )
{
return new HashMap<>( source );
}
@Override
public Relationship getRelationshipById( long id )
{
RelationshipBatchImpl rel = rels.get( id );
if ( rel == null )
{
try
{
BatchRelationship simpleRel =
batchInserter.getRelationshipById( id );
Map<String, Object> props =
batchInserter.getRelationshipProperties( id );
rel = new RelationshipBatchImpl( simpleRel, this, mutableCopyOf( props ) );
rels.put( id, rel );
}
catch ( InvalidRecordException e )
{
throw new NotFoundException( e );
}
}
return rel;
}
@Override
public Iterable<RelationshipType> getRelationshipTypes()
{
throw unsupportedOperation();
}
@Override
public boolean isAvailable( long timeout )
{
return true;
}
@Override
public void shutdown()
{
clearCaches();
batchInserter.shutdown();
}
static class FakeTransaction implements Transaction
{
@Override
public void failure()
{
throw new NotInTransactionException( "Batch insert mode, " +
"failure is not an option." );
}
@Override
public void finish()
{
}
@Override
public void close()
{
}
@Override
public void success()
{
}
@Override
public Lock acquireWriteLock( PropertyContainer entity )
{
return PlaceboTransaction.NO_LOCK;
}
@Override
public Lock acquireReadLock( PropertyContainer entity )
{
return PlaceboTransaction.NO_LOCK;
}
}
private static class NodeBatchImpl implements Node
{
private final BatchGraphDatabaseImpl graphDbService;
private final long id;
private final Map<String, Object> properties;
NodeBatchImpl( long id, BatchGraphDatabaseImpl graphDbService,
Map<String, Object> properties )
{
this.id = id;
this.graphDbService = graphDbService;
this.properties = properties;
}
@Override
public GraphDatabaseService getGraphDatabase()
{
return graphDbService;
}
@Override
public Relationship createRelationshipTo( Node otherNode,
RelationshipType type )
{
long relId = graphDbService.getBatchInserter().createRelationship( id,
otherNode.getId(), type, null );
RelationshipBatchImpl rel = new RelationshipBatchImpl(
new BatchRelationship( relId, id, otherNode.getId(), type ),
graphDbService, emptyProps() );
graphDbService.addRelationshipToCache( relId, rel );
return rel;
}
Map<String, Object> getProperties()
{
return properties;
}
@Override
public void delete()
{
throw unsupportedOperation();
}
@Override
public long getId()
{
return id;
}
private RelIterator newRelIterator( Direction dir,
RelationshipType[] types )
{
Iterable<Long> relIds =
graphDbService.getBatchInserter().getRelationshipIds( id );
return new RelIterator( graphDbService, relIds, id, dir, types );
}
@Override
public Iterable<Relationship> getRelationships()
{
return newRelIterator( Direction.BOTH, null );
}
@Override
public Iterable<Relationship> getRelationships(
RelationshipType... types )
{
return newRelIterator( Direction.BOTH, types );
}
@Override
public Iterable<Relationship> getRelationships( Direction direction,
RelationshipType... types )
{
return newRelIterator( direction, types );
}
@Override
public Iterable<Relationship> getRelationships( Direction dir )
{
return newRelIterator( dir, null );
}
@Override
public Iterable<Relationship> getRelationships( RelationshipType type,
Direction dir )
{
return newRelIterator( dir, new RelationshipType[]{type} );
}
@Override
public Relationship getSingleRelationship( RelationshipType type,
Direction dir )
{
Iterator<Relationship> relItr =
newRelIterator( dir, new RelationshipType[]{type} );
if ( relItr.hasNext() )
{
Relationship rel = relItr.next();
if ( relItr.hasNext() )
{
throw new NotFoundException( "More than one relationship[" +
type + ", " + dir + "] found for " + this );
}
return rel;
}
return null;
}
@Override
public boolean hasRelationship()
{
return newRelIterator( Direction.BOTH, null ).hasNext();
}
@Override
public boolean hasRelationship( RelationshipType... types )
{
return newRelIterator( Direction.BOTH, types ).hasNext();
}
@Override
public boolean hasRelationship( Direction direction, RelationshipType... types )
{
return newRelIterator( direction, types ).hasNext();
}
@Override
public boolean hasRelationship( Direction dir )
{
Iterator<Relationship> relItr =
newRelIterator( dir, null );
return relItr.hasNext();
}
@Override
public boolean hasRelationship( RelationshipType type, Direction dir )
{
return newRelIterator( dir, new RelationshipType[]{type} ).hasNext();
}
/* Tentative expansion API
public Expansion<Relationship> expandAll()
{
return Traversal.expanderForAllTypes().expand( this );
}
public Expansion<Relationship> expand( RelationshipType type )
{
return expand( type, Direction.BOTH );
}
public Expansion<Relationship> expand( RelationshipType type,
Direction direction )
{
return Traversal.expanderForTypes( type, direction ).expand(
this );
}
public Expansion<Relationship> expand( Direction direction )
{
return Traversal.expanderForAllTypes( direction ).expand(
this );
}
public Expansion<Relationship> expand( RelationshipExpander expander )
{
return Traversal.expander( expander ).expand( this );
}
*/
@Override
public Traverser traverse( Order traversalOrder,
StopEvaluator stopEvaluator,
ReturnableEvaluator returnableEvaluator,
RelationshipType relationshipType, Direction direction )
{
throw unsupportedOperation();
}
@Override
public Traverser traverse( Order traversalOrder,
StopEvaluator stopEvaluator,
ReturnableEvaluator returnableEvaluator,
RelationshipType firstRelationshipType, Direction firstDirection,
RelationshipType secondRelationshipType, Direction secondDirection )
{
throw unsupportedOperation();
}
@Override
public Traverser traverse( Order traversalOrder,
StopEvaluator stopEvaluator,
ReturnableEvaluator returnableEvaluator,
Object... relationshipTypesAndDirections )
{
throw unsupportedOperation();
}
@Override
public void addLabel( Label label )
{
Set<Label> labelSet = asSet( graphDbService.batchInserter.getNodeLabels( getId() ) );
labelSet.add( label );
Label[] labelIds = new Label[ labelSet.size() ];
graphDbService.batchInserter.setNodeLabels( getId(), labelSet.toArray( labelIds ) );
}
@Override
public void removeLabel( Label label )
{
Set<Label> labelSet = asSet( graphDbService.batchInserter.getNodeLabels( getId() ) );
labelSet.remove( label );
Label[] labelIds = new Label[ labelSet.size() ];
graphDbService.batchInserter.setNodeLabels( getId(), labelSet.toArray( labelIds ) );
}
@Override
public boolean hasLabel( Label label )
{
return graphDbService.batchInserter.nodeHasLabel( getId(), label );
}
@Override
public ResourceIterable<Label> getLabels()
{
final Iterable<Label> labels = graphDbService.batchInserter.getNodeLabels( getId() );
return asResourceIterable( labels );
}
@Override
public Object getProperty( String key )
{
Object val = properties.get( key );
if ( val == null )
{
throw new NotFoundException( key );
}
return val;
}
@Override
public Object getProperty( String key, Object defaultValue )
{
Object val = properties.get( key );
if ( val == null )
{
return defaultValue;
}
return val;
}
@Override
public Iterable<String> getPropertyKeys()
{
return properties.keySet();
}
@Override
public boolean hasProperty( String key )
{
return properties.containsKey( key );
}
@Override
public Object removeProperty( String key )
{
Object val = properties.remove( key );
if ( val == null )
{
throw new NotFoundException( "Property " + key );
}
return val;
}
@Override
public void setProperty( String key, Object value )
{
properties.put( key, value );
}
@Override
public boolean equals( Object o )
{
if ( !(o instanceof Node) )
{
return false;
}
return this.getId() == ((Node) o).getId();
}
@Override
public int hashCode()
{
return (int) (id ^ (id >>> 32));
}
}
private static class RelationshipBatchImpl implements Relationship
{
private final BatchRelationship rel;
private final BatchGraphDatabaseImpl graphDbService;
private final Map<String, Object> properties;
RelationshipBatchImpl( BatchRelationship rel,
BatchGraphDatabaseImpl graphDbService, Map<String, Object> properties )
{
this.rel = rel;
this.graphDbService = graphDbService;
this.properties = properties;
}
@Override
public GraphDatabaseService getGraphDatabase()
{
return graphDbService;
}
Map<String, Object> getProperties()
{
return properties;
}
@Override
public void delete()
{
throw unsupportedOperation();
}
@Override
public Node getEndNode()
{
return graphDbService.getNodeById( rel.getEndNode() );
}
@Override
public long getId()
{
return rel.getId();
}
@Override
public Node[] getNodes()
{
return new Node[]{getStartNode(), getEndNode()};
}
@Override
public Node getOtherNode( Node node )
{
Node startNode = getStartNode();
Node endNode = getEndNode();
if ( node.equals( endNode ) )
{
return startNode;
}
if ( node.equals( startNode ) )
{
return endNode;
}
throw new IllegalArgumentException( "" + node );
}
@Override
public Node getStartNode()
{
return graphDbService.getNodeById( rel.getStartNode() );
}
@Override
public RelationshipType getType()
{
return rel.getType();
}
@Override
public boolean isType( RelationshipType type )
{
return rel.getType().name().equals( type.name() );
}
@Override
public Object getProperty( String key )
{
Object val = properties.get( key );
if ( val == null )
{
throw new NotFoundException( key );
}
return val;
}
@Override
public Object getProperty( String key, Object defaultValue )
{
Object val = properties.get( key );
if ( val == null )
{
return defaultValue;
}
return val;
}
@Override
public Iterable<String> getPropertyKeys()
{
return properties.keySet();
}
@Override
public boolean hasProperty( String key )
{
return properties.containsKey( key );
}
@Override
public Object removeProperty( String key )
{
Object val = properties.remove( key );
if ( val == null )
{
throw new NotFoundException( "Property " + key );
}
return val;
}
@Override
public void setProperty( String key, Object value )
{
properties.put( key, value );
}
@Override
public boolean equals( Object o )
{
if ( !(o instanceof Relationship) )
{
return false;
}
return this.getId() == ((Relationship) o).getId();
}
@Override
public int hashCode()
{
return (int) (rel.getId() ^ (rel.getId() >>> 32));
}
}
void addRelationshipToCache( long id, RelationshipBatchImpl rel )
{
rels.put( id, rel );
}
static class RelIterator implements
Iterable<Relationship>, Iterator<Relationship>
{
private final BatchGraphDatabaseImpl graphDbService;
private final Iterable<Long> relIds;
private final Iterator<Long> relItr;
private final long nodeId;
private final Direction dir;
private final RelationshipType[] types;
private Relationship nextElement;
RelIterator( BatchGraphDatabaseImpl graphDbService, Iterable<Long> relIds,
long nodeId, Direction dir, RelationshipType[] types )
{
this.graphDbService = graphDbService;
this.relIds = relIds;
this.relItr = relIds.iterator();
this.nodeId = nodeId;
this.dir = dir;
this.types = types;
}
@Override
public Iterator<Relationship> iterator()
{
return new RelIterator( graphDbService, relIds, nodeId, dir, types );
}
@Override
public boolean hasNext()
{
getNextElement();
if ( nextElement != null )
{
return true;
}
return false;
}
@Override
public Relationship next()
{
getNextElement();
if ( nextElement != null )
{
Relationship returnVal = nextElement;
nextElement = null;
return returnVal;
}
throw new NoSuchElementException();
}
private void getNextElement()
{
while ( nextElement == null && relItr.hasNext() )
{
Relationship possibleRel =
graphDbService.getRelationshipById( relItr.next() );
if ( dir == Direction.OUTGOING &&
possibleRel.getEndNode().getId() == nodeId )
{
continue;
}
if ( dir == Direction.INCOMING &&
possibleRel.getStartNode().getId() == nodeId )
{
continue;
}
if ( types != null )
{
for ( RelationshipType type : types )
{
if ( type.name().equals(
possibleRel.getType().name() ) )
{
nextElement = possibleRel;
break;
}
}
}
else
{
nextElement = possibleRel;
}
}
}
@Override
public void remove()
{
throw new UnsupportedOperationException();
}
}
void clearCaches()
{
nodes.clear();
rels.clear();
}
@Override
public KernelEventHandler registerKernelEventHandler(
KernelEventHandler handler )
{
throw new UnsupportedOperationException();
}
@Override
public <T> TransactionEventHandler<T> registerTransactionEventHandler(
TransactionEventHandler<T> handler )
{
throw new UnsupportedOperationException();
}
@Override
public KernelEventHandler unregisterKernelEventHandler(
KernelEventHandler handler )
{
throw new UnsupportedOperationException();
}
@Override
public Schema schema()
{
throw
new UnsupportedOperationException("Schema modification is currently not available through the BatchDatabase API.");
}
@Override
public <T> TransactionEventHandler<T> unregisterTransactionEventHandler(
TransactionEventHandler<T> handler )
{
throw new UnsupportedOperationException();
}
@Override
public IndexManager index()
{
throw new UnsupportedOperationException();
}
@Override
public TraversalDescription traversalDescription()
{
throw new UnsupportedOperationException();
}
@Override
public BidirectionalTraversalDescription bidirectionalTraversalDescription()
{
throw new UnsupportedOperationException();
}
@Override
public ResourceIterable<Node> findNodesByLabelAndProperty( Label myLabel, String key, Object value )
{
throw new UnsupportedOperationException();
}
}
| false
|
community_kernel_src_main_java_org_neo4j_unsafe_batchinsert_BatchGraphDatabaseImpl.java
|
1,929
|
public class GlobalGraphOperationsIT
{
@Rule public ImpermanentDatabaseRule dbRule = new ImpermanentDatabaseRule();
@Test
public void shouldListAllPropertyKeys() throws Exception
{
// Given
GraphDatabaseService db = dbRule.getGraphDatabaseAPI();
try( Transaction tx = db.beginTx() )
{
db.createNode().setProperty( "myProperty", 12);
tx.success();
}
GlobalGraphOperations gg = GlobalGraphOperations.at( db );
// When
try( Transaction _ = db.beginTx() )
{
assertThat( toList( gg.getAllPropertyKeys() ), equalTo( asList( "myProperty" ) ) );
}
}
}
| false
|
community_kernel_src_test_java_org_neo4j_tooling_GlobalGraphOperationsIT.java
|
1,930
|
{
@Override
public Node apply( long nodeId )
{
return nodeManager.getNodeById( nodeId );
}
}, nodeIds ) );
| false
|
community_kernel_src_main_java_org_neo4j_tooling_GlobalGraphOperations.java
|
1,931
|
{
@Override
public ResourceIterator<Node> iterator()
{
return allNodesWithLabel( label.name() );
}
};
| false
|
community_kernel_src_main_java_org_neo4j_tooling_GlobalGraphOperations.java
|
1,932
|
return ResourceClosingIterator.newResourceIterator( statement, map( new Function<Token, String>() {
@Override
public String apply( Token propertyToken )
{
return propertyToken.name();
}
}, statement.readOperations().propertyKeyGetAllTokens() ) );
| false
|
community_kernel_src_main_java_org_neo4j_tooling_GlobalGraphOperations.java
|
1,933
|
{
@Override
public ResourceIterator<String> iterator()
{
Statement statement = statementCtxProvider.instance();
return ResourceClosingIterator.newResourceIterator( statement, map( new Function<Token, String>() {
@Override
public String apply( Token propertyToken )
{
return propertyToken.name();
}
}, statement.readOperations().propertyKeyGetAllTokens() ) );
}
};
| false
|
community_kernel_src_main_java_org_neo4j_tooling_GlobalGraphOperations.java
|
1,934
|
{
@Override
public Label apply( Token labelToken )
{
return label( labelToken.name() );
}
}, statement.readOperations().labelsGetAllTokens() ) );
| false
|
community_kernel_src_main_java_org_neo4j_tooling_GlobalGraphOperations.java
|
1,935
|
{
@Override
public ResourceIterator<Label> iterator()
{
Statement statement = statementCtxProvider.instance();
return ResourceClosingIterator.newResourceIterator( statement, map( new Function<Token, Label>()
{
@Override
public Label apply( Token labelToken )
{
return label( labelToken.name() );
}
}, statement.readOperations().labelsGetAllTokens() ) );
}
};
| false
|
community_kernel_src_main_java_org_neo4j_tooling_GlobalGraphOperations.java
|
1,936
|
{
@Override
public Iterator<Relationship> iterator()
{
return nodeManager.getAllRelationships();
}
};
| false
|
community_kernel_src_main_java_org_neo4j_tooling_GlobalGraphOperations.java
|
1,937
|
private class BatchSchemaActions implements InternalSchemaActions
{
@Override
public IndexDefinition createIndexDefinition( Label label, String propertyKey )
{
createIndexRule( label, propertyKey );
return new IndexDefinitionImpl( this, label, propertyKey, false );
}
@Override
public void dropIndexDefinitions( Label label, String propertyKey )
{
throw unsupportedException();
}
@Override
public ConstraintDefinition createPropertyUniquenessConstraint( Label label, String propertyKey )
{
int labelId = getOrCreateLabelId( label.name() );
int propertyKeyId = getOrCreatePropertyKeyId( propertyKey );
createConstraintRule( new UniquenessConstraint( labelId, propertyKeyId ) );
return new PropertyUniqueConstraintDefinition( this, label, propertyKey );
}
@Override
public void dropPropertyUniquenessConstraint( Label label, String propertyKey )
{
throw unsupportedException();
}
@Override
public String getUserMessage( KernelException e )
{
throw unsupportedException();
}
@Override
public void assertInTransaction()
{
// BatchInserterImpl always is expected to be running in one big single "transaction"
}
private UnsupportedOperationException unsupportedException()
{
return new UnsupportedOperationException( "Batch inserter doesn't support this" );
}
}
| false
|
community_kernel_src_main_java_org_neo4j_unsafe_batchinsert_BatchInserterImpl.java
|
1,938
|
{
@Override
public NeoStore evaluate()
{
return neoStore;
}
} );
| false
|
community_kernel_src_main_java_org_neo4j_unsafe_batchinsert_BatchInserterImpl.java
|
1,939
|
STRING_ARRAY( String.class, "String[]", String[].class )
{
@Override
<T> T apply( ValueFormatter<T> formatter, Object value )
{
return formatter.formatStringArray( ( String[] ) value );
}
},
| false
|
community_graphviz_src_main_java_org_neo4j_visualization_PropertyType.java
|
1,940
|
private class InitialNodeLabelCreationVisitor implements Visitor<NodeLabelUpdate, IOException>
{
LabelScanWriter writer = labelScanStore.newWriter();
@Override
public boolean visit( NodeLabelUpdate update ) throws IOException
{
writer.write( update );
return true;
}
public void close() throws IOException
{
writer.close();
}
}
| false
|
community_kernel_src_main_java_org_neo4j_unsafe_batchinsert_BatchInserterImpl.java
|
1,941
|
STRING( null, "String", String.class )
{
@Override
<T> T apply( ValueFormatter<T> formatter, Object value )
{
return formatter.formatString( ( String ) value );
}
},
| false
|
community_graphviz_src_main_java_org_neo4j_visualization_PropertyType.java
|
1,942
|
public class TestLuceneSchemaBatchInsertIT
{
@Test
public void shouldLoadAndUseLuceneProvider() throws Exception
{
// GIVEN
String storeDir = forTest( getClass() ).makeGraphDbDir().getAbsolutePath();
BatchInserter inserter = inserter( storeDir );
inserter.createDeferredSchemaIndex( LABEL ).on( "name" ).create();
// WHEN
inserter.createNode( map( "name", "Mattias" ), LABEL );
inserter.shutdown();
// THEN
GraphDatabaseFactory graphDatabaseFactory = new GraphDatabaseFactory();
GraphDatabaseAPI db = (GraphDatabaseAPI) graphDatabaseFactory.newEmbeddedDatabase( storeDir );
DependencyResolver dependencyResolver = db.getDependencyResolver();
SchemaIndexProvider schemaIndexProvider = dependencyResolver.resolveDependency(
SchemaIndexProvider.class,
SchemaIndexProvider.HIGHEST_PRIORITIZED_OR_NONE );
// assert the indexProvider is a Lucene one
try ( Transaction ignore = db.beginTx() )
{
IndexDefinition indexDefinition = single( db.schema().getIndexes( LABEL ) );
assertThat( db.schema().getIndexState( indexDefinition ), is( Schema.IndexState.ONLINE ) );
assertThat( schemaIndexProvider, instanceOf( LuceneSchemaIndexProvider.class ) );
}
// CLEANUP
db.shutdown();
}
private static final Label LABEL = label( "Person" );
}
| false
|
community_lucene-index_src_test_java_org_neo4j_unsafe_batchinsert_TestLuceneSchemaBatchInsertIT.java
|
1,943
|
private static class ConfigurationParameter
{
private final Setting<?> key;
private final String value;
public ConfigurationParameter( Setting<?> key, String value )
{
this.key = key;
this.value = value;
}
}
| false
|
community_lucene-index_src_test_java_org_neo4j_unsafe_batchinsert_TestLuceneBatchInsert.java
|
1,944
|
{
@Override
public boolean accept( KernelExtensionFactory extension )
{
if ( extension instanceof InMemoryLabelScanStoreExtension ||
extension instanceof InMemoryIndexProviderFactory )
{
return false;
}
return true;
}
};
| false
|
community_lucene-index_src_test_java_org_neo4j_unsafe_batchinsert_TestLuceneBatchInsert.java
|
1,945
|
public class TestLuceneBatchInsert
{
@Test
public void testSome() throws Exception
{
BatchInserterIndexProvider provider = new LuceneBatchInserterIndexProviderNewImpl( inserter );
String indexName = "users";
BatchInserterIndex index = provider.nodeIndex( indexName, EXACT_CONFIG );
Map<Integer, Long> ids = new HashMap<>();
int count = 5;
for ( int i = 0; i < count; i++ )
{
long id = inserter.createNode( null );
index.add( id, map( "name", "Joe" + i, "other", "Schmoe" ) );
ids.put( i, id );
}
for ( int i = 0; i < count; i++ )
{
assertContains( index.get( "name", "Joe" + i ), ids.get( i ) );
}
assertContains( index.query( "name:Joe0 AND other:Schmoe" ), ids.get( 0 ) );
assertContains( index.query( "name", "Joe*" ), ids.values().toArray( new Long[ids.size()] ) );
provider.shutdown();
switchToGraphDatabaseService();
try ( Transaction transaction = db.beginTx() )
{
IndexManager indexManager = db.index();
assertFalse( indexManager.existsForRelationships( indexName ) );
assertTrue( indexManager.existsForNodes( indexName ) );
assertNotNull( indexManager.forNodes( indexName ) );
Index<Node> dbIndex = db.index().forNodes( "users" );
for ( int i = 0; i < count; i++ )
{
assertContains( dbIndex.get( "name", "Joe" + i ), db.getNodeById( ids.get( i ) ) );
}
Collection<Node> nodes = new ArrayList<>();
for ( long id : ids.values() )
{
nodes.add( db.getNodeById( id ) );
}
assertContains( dbIndex.query( "name", "Joe*" ), nodes.toArray( new Node[nodes.size()] ) );
assertContains( dbIndex.query( "name:Joe0 AND other:Schmoe" ), db.getNodeById( ids.get( 0 ) ) );
transaction.success();
}
}
@Test
public void testFulltext()
{
BatchInserterIndexProvider provider = new LuceneBatchInserterIndexProviderNewImpl( inserter );
String name = "users";
BatchInserterIndex index = provider.nodeIndex( name, stringMap( "type", "fulltext" ) );
long id1 = inserter.createNode( null );
index.add( id1, map( "name", "Mattias Persson", "email", "something@somewhere", "something", "bad" ) );
long id2 = inserter.createNode( null );
index.add( id2, map( "name", "Lars PerssoN" ) );
index.flush();
assertContains( index.get( "name", "Mattias Persson" ), id1 );
assertContains( index.query( "name", "mattias" ), id1 );
assertContains( index.query( "name", "bla" ) );
assertContains( index.query( "name", "persson" ), id1, id2 );
assertContains( index.query( "email", "*@*" ), id1 );
assertContains( index.get( "something", "bad" ), id1 );
long id3 = inserter.createNode( null );
index.add( id3, map( "name", new String[] { "What Ever", "Anything" } ) );
index.flush();
assertContains( index.get( "name", "What Ever" ), id3 );
assertContains( index.get( "name", "Anything" ), id3 );
provider.shutdown();
switchToGraphDatabaseService();
try ( Transaction transaction = db.beginTx() )
{
Index<Node> dbIndex = db.index().forNodes( name );
Node node1 = db.getNodeById( id1 );
Node node2 = db.getNodeById( id2 );
assertContains( dbIndex.query( "name", "persson" ), node1, node2 );
transaction.success();
}
}
@Test
public void testCanIndexRelationships()
{
BatchInserterIndexProvider indexProvider = new LuceneBatchInserterIndexProviderNewImpl( inserter );
BatchInserterIndex edgesIndex = indexProvider.relationshipIndex(
"edgeIndex", stringMap( IndexManager.PROVIDER, "lucene", "type", "exact" ) );
long nodeId1 = inserter.createNode( map( "ID", "1" ) );
long nodeId2 = inserter.createNode( map( "ID", "2" ) );
long relationshipId = inserter.createRelationship( nodeId1, nodeId2,
EdgeType.KNOWS, null );
edgesIndex.add( relationshipId, map( "EDGE_TYPE", EdgeType.KNOWS.name() ) );
edgesIndex.flush();
assertEquals(
String.format( "Should return relationship id" ),
new Long( relationshipId ),
edgesIndex.query( "EDGE_TYPE", EdgeType.KNOWS.name() ).getSingle() );
indexProvider.shutdown();
}
@Test
public void triggerNPEAfterFlush()
{
BatchInserterIndexProvider provider = new LuceneBatchInserterIndexProviderNewImpl( inserter );
BatchInserterIndex index = provider.nodeIndex( "Node-exact", EXACT_CONFIG );
Map<String, Object> map = map( "name", "Something" );
long node = inserter.createNode( map );
index.add( node, map );
index.flush();
assertContains( index.get( "name", "Something" ), node );
provider.shutdown();
}
@Test
public void testNumericValues()
{
BatchInserterIndexProvider provider = new LuceneBatchInserterIndexProviderNewImpl( inserter );
BatchInserterIndex index = provider.nodeIndex( "mine", EXACT_CONFIG );
long node1 = inserter.createNode( null );
index.add( node1, map( "number", numeric( 45 ) ) );
long node2 = inserter.createNode( null );
index.add( node2, map( "number", numeric( 21 ) ) );
assertContains( index.query( "number",
newIntRange( "number", 21, 50, true, true ) ), node1, node2 );
provider.shutdown();
switchToGraphDatabaseService();
try ( Transaction transaction = db.beginTx() )
{
Node n1 = db.getNodeById( node1 );
db.getNodeById( node2 );
Index<Node> idx = db.index().forNodes( "mine" );
assertContains( idx.query( "number", newIntRange( "number", 21, 45, false, true ) ), n1 );
transaction.success();
}
}
@Test
public void testNumericValueArrays()
{
BatchInserterIndexProvider provider = new LuceneBatchInserterIndexProviderNewImpl( inserter );
BatchInserterIndex batchIndex = provider.nodeIndex( "mine", EXACT_CONFIG );
long nodeId1 = inserter.createNode( null );
batchIndex.add( nodeId1, map( "number", new ValueContext[]{ numeric( 45 ), numeric( 98 ) } ) );
long nodeId2 = inserter.createNode( null );
batchIndex.add( nodeId2, map( "number", new ValueContext[]{ numeric( 47 ), numeric( 100 ) } ) );
IndexHits<Long> batchIndexResult1 = batchIndex.query( "number", newIntRange( "number", 47, 98, true, true ) );
assertThat( batchIndexResult1, contains(nodeId1, nodeId2));
assertThat( batchIndexResult1.size(), is( 2 ));
IndexHits<Long> batchIndexResult2 = batchIndex.query( "number", newIntRange( "number", 44, 46, true, true ) );
assertThat( batchIndexResult2, contains(nodeId1));
assertThat( batchIndexResult2.size(), is( 1 ) );
IndexHits<Long> batchIndexResult3 = batchIndex.query( "number", newIntRange( "number", 99, 101, true, true ) );
assertThat( batchIndexResult3, contains( nodeId2 ) );
assertThat( batchIndexResult3.size(), is( 1 ) );
IndexHits<Long> batchIndexResult4 = batchIndex.query( "number", newIntRange( "number", 47, 98, false, false ) );
assertThat( batchIndexResult4, isEmpty() );
provider.shutdown();
switchToGraphDatabaseService();
try ( Transaction transaction = db.beginTx() )
{
Node node1 = db.getNodeById( nodeId1 );
Node node2 = db.getNodeById( nodeId2 );
Index<Node> index = db.index().forNodes( "mine" );
IndexHits<Node> indexResult1 = index.query( "number", newIntRange( "number", 47, 98, true, true ) );
assertThat(indexResult1, contains(node1, node2));
assertThat( indexResult1.size(), is( 2 ));
IndexHits<Node> indexResult2 = index.query( "number", newIntRange( "number", 44, 46, true, true ) );
assertThat(indexResult2, contains(node1));
assertThat( indexResult2.size(), is( 1 ) );
IndexHits<Node> indexResult3 = index.query( "number", newIntRange( "number", 99, 101, true, true ) );
assertThat( indexResult3, contains( node2 ) );
assertThat( indexResult3.size(), is( 1 ) );
IndexHits<Node> indexResult4 = index.query( "number", newIntRange( "number", 47, 98, false, false ) );
assertThat( indexResult4, isEmpty() );
transaction.success();
}
}
@Test
public void indexNumbers() throws Exception
{
BatchInserterIndexProvider provider = new LuceneBatchInserterIndexProviderNewImpl( inserter );
BatchInserterIndex index = provider.nodeIndex( "mine", EXACT_CONFIG );
long id = inserter.createNode( null );
Map<String, Object> props = new HashMap<>();
props.put( "key", 123L );
index.add( id, props );
index.flush();
assertEquals( 1, index.get( "key", 123L ).size() );
assertEquals( 1, index.get( "key", "123" ).size() );
provider.shutdown();
}
@Test
public void shouldCreateAutoIndexThatIsUsableInEmbedded() throws Exception
{
BatchInserterIndexProvider provider = new LuceneBatchInserterIndexProviderNewImpl( inserter );
BatchInserterIndex index = provider.nodeIndex( "node_auto_index", EXACT_CONFIG );
long id = inserter.createNode( null );
Map<String, Object> props = new HashMap<>();
props.put( "name", "peter" );
index.add( id, props );
index.flush();
provider.shutdown();
shutdownInserter();
switchToGraphDatabaseService( configure( GraphDatabaseSettings.node_keys_indexable, "name" ),
configure( GraphDatabaseSettings.relationship_keys_indexable, "relProp1,relProp2" ),
configure( GraphDatabaseSettings.node_auto_indexing, "true" ),
configure( GraphDatabaseSettings.relationship_auto_indexing, "true" ) );
try ( Transaction tx = db.beginTx() )
{
// Create the primitives
Node node1 = db.createNode();
// Add indexable and non-indexable properties
node1.setProperty( "name", "bob" );
// Make things persistent
tx.success();
}
try ( Transaction tx = db.beginTx() )
{
assertTrue( db.index().getNodeAutoIndexer().getAutoIndex().get( "name", "peter" ).hasNext() );
assertTrue( db.index().getNodeAutoIndexer().getAutoIndex().get( "name", "bob" ).hasNext() );
assertFalse( db.index().getNodeAutoIndexer().getAutoIndex().get( "name", "joe" ).hasNext() );
tx.success();
}
}
@Test
public void addOrUpdateFlushBehaviour() throws Exception
{
BatchInserterIndexProvider provider = new LuceneBatchInserterIndexProviderNewImpl( inserter );
BatchInserterIndex index = provider.nodeIndex( "update", EXACT_CONFIG );
long id = inserter.createNode( null );
Map<String, Object> props = new HashMap<>();
props.put( "key", "value" );
index.add( id, props );
index.updateOrAdd( id, props );
assertEquals( 1, index.get( "key", "value" ).size() );
index.flush();
props.put( "key", "value2" );
index.updateOrAdd( id, props );
index.flush();
assertEquals( 1, index.get( "key", "value2" ).size() );
assertEquals( 0, index.get( "key", "value" ).size() );
props.put( "key2", "value2" );
props.put( "key", "value" );
index.updateOrAdd( id, props );
assertEquals( 0, index.get( "key2", "value2" ).size() );
index.flush();
assertEquals( 1, index.get( "key2", "value2" ).size() );
assertEquals( 1, index.get( "key", "value" ).size() );
long id2 = inserter.createNode( null );
props = new HashMap<>();
props.put("2key","value");
index.updateOrAdd( id2, props );
props.put("2key","value2");
props.put("2key2","value3");
index.updateOrAdd( id2, props );
index.flush();
assertEquals( 1, index.get( "2key", "value2" ).size() );
provider.shutdown();
}
@Test
public void useStandardAnalyzer() throws Exception
{
BatchInserterIndexProvider provider = new LuceneBatchInserterIndexProviderNewImpl( inserter );
BatchInserterIndex index = provider.nodeIndex( "myindex",
stringMap( "analyzer", MyStandardAnalyzer.class.getName() ) );
index.add( 0, map( "name", "Mattias" ) );
provider.shutdown();
}
@Test
public void cachesShouldBeFilledWhenAddToMultipleIndexesCreatedNow() throws Exception
{
BatchInserterIndexProvider provider = new LuceneBatchInserterIndexProvider( inserter );
BatchInserterIndex index = provider.nodeIndex( "index1", LuceneIndexImplementation.EXACT_CONFIG );
index.setCacheCapacity( "name", 100000 );
String nameKey = "name";
String titleKey = "title";
assertCacheIsEmpty( index, nameKey, titleKey );
index.add( 0, map( "name", "Neo", "title", "Matrix" ) );
assertCacheContainsSomething( index, nameKey );
assertCacheIsEmpty( index, titleKey );
BatchInserterIndex index2 = provider.nodeIndex( "index2", LuceneIndexImplementation.EXACT_CONFIG );
index2.setCacheCapacity( "title", 100000 );
assertCacheIsEmpty( index2, nameKey, titleKey );
index2.add( 0, map( "name", "Neo", "title", "Matrix" ) );
assertCacheContainsSomething( index2, titleKey );
assertCacheIsEmpty( index2, nameKey );
provider.shutdown();
}
@Test
public void cachesDoesntGetFilledWhenAddingForAnExistingIndex() throws Exception
{
// Prepare the test case, i.e. create a store with a populated index.
BatchInserterIndexProvider provider = new LuceneBatchInserterIndexProvider( inserter );
String indexName = "index";
BatchInserterIndex index = provider.nodeIndex( indexName, LuceneIndexImplementation.EXACT_CONFIG );
String key = "name";
index.add( 0, map( key, "Mattias" ) );
provider.shutdown();
shutdownInserter();
// Test so that the next run doesn't start caching inserted stuff right away,
// because that would lead to invalid results being returned.
startInserter();
provider = new LuceneBatchInserterIndexProvider( inserter );
index = provider.nodeIndex( indexName, LuceneIndexImplementation.EXACT_CONFIG );
index.setCacheCapacity( key, 100000 );
assertCacheIsEmpty( index, key );
index.add( 1, map( key, "Persson" ) );
assertCacheIsEmpty( index, key );
assertEquals( 1, index.get( key, "Persson" ).getSingle().intValue() );
provider.shutdown();
}
@Test
public void shouldKeepAroundUnusedIndexesAfterConsecutiveInsertion() throws Exception
{
// GIVEN -- a batch insertion creating two indexes
String indexName1 = "first", indexName2 = "second", key = "name";
{
BatchInserterIndexProvider provider = new LuceneBatchInserterIndexProvider( inserter );
BatchInserterIndex index1 = provider.nodeIndex( indexName1, LuceneIndexImplementation.EXACT_CONFIG );
index1.add( 0, map( key, "Mattias" ) );
BatchInserterIndex index2 = provider.nodeIndex( indexName1, LuceneIndexImplementation.EXACT_CONFIG );
index2.add( 0, map( key, "Mattias" ) );
provider.shutdown();
shutdownInserter();
}
// WHEN -- doing a second insertion, only adding to the second index
{
startInserter();
BatchInserterIndexProvider provider = new LuceneBatchInserterIndexProvider( inserter );
BatchInserterIndex index2 = provider.nodeIndex( indexName2, LuceneIndexImplementation.EXACT_CONFIG );
index2.add( 1, map( key, "Mattias" ) );
provider.shutdown();
shutdownInserter();
}
// THEN -- both indexes should exist when starting up in "graph mode"
{
switchToGraphDatabaseService();
try ( Transaction transaction = db.beginTx() )
{
assertTrue( indexName1 + " should exist", db.index().existsForNodes( indexName1 ) );
assertTrue( indexName2 + " should exist", db.index().existsForNodes( indexName2 ) );
transaction.success();
}
}
}
private enum EdgeType implements RelationshipType
{
KNOWS
}
private void assertCacheContainsSomething( BatchInserterIndex index, String... keys )
{
Map<String, LruCache<String, Collection<Long>>> cache = getIndexCache( index );
for ( String key : keys )
{
assertTrue( cache.get( key ).size() > 0 );
}
}
private void assertCacheIsEmpty( BatchInserterIndex index, String... keys )
{
Map<String, LruCache<String, Collection<Long>>> cache = getIndexCache( index );
for ( String key : keys )
{
LruCache<String, Collection<Long>> keyCache = cache.get( key );
assertTrue( keyCache == null || keyCache.size() == 0 );
}
}
@SuppressWarnings( "unchecked" )
private Map<String, LruCache<String, Collection<Long>>> getIndexCache( BatchInserterIndex index )
{
try
{
Field field = index.getClass().getDeclaredField( "cache" );
field.setAccessible( true );
return (Map<String, LruCache<String, Collection<Long>>>) field.get( index );
}
catch ( Exception e )
{
throw launderedException( e );
}
}
private final String storeDir = TargetDirectory.forTest( getClass() ).makeGraphDbDir().getAbsolutePath();
private BatchInserter inserter;
private GraphDatabaseService db;
@SuppressWarnings( { "rawtypes", "unchecked" } )
@Before
public void startInserter() throws Exception
{
Iterable filteredKernelExtensions = filter( onlyRealLuceneExtensions(),
Service.load( KernelExtensionFactory.class ) );
inserter = inserter( storeDir, new DefaultFileSystemAbstraction(), stringMap(),
filteredKernelExtensions );
}
@SuppressWarnings( "rawtypes" )
private Predicate<? super KernelExtensionFactory> onlyRealLuceneExtensions()
{
return new Predicate<KernelExtensionFactory>()
{
@Override
public boolean accept( KernelExtensionFactory extension )
{
if ( extension instanceof InMemoryLabelScanStoreExtension ||
extension instanceof InMemoryIndexProviderFactory )
{
return false;
}
return true;
}
};
}
private void switchToGraphDatabaseService( ConfigurationParameter... config )
{
shutdownInserter();
GraphDatabaseBuilder builder = new GraphDatabaseFactory().newEmbeddedDatabaseBuilder( storeDir );
for ( ConfigurationParameter configurationParameter : config )
{
builder = builder.setConfig( configurationParameter.key, configurationParameter.value );
}
db = builder.newGraphDatabase();
}
private static ConfigurationParameter configure( Setting<?> key, String value )
{
return new ConfigurationParameter( key, value );
}
private static class ConfigurationParameter
{
private final Setting<?> key;
private final String value;
public ConfigurationParameter( Setting<?> key, String value )
{
this.key = key;
this.value = value;
}
}
private void shutdownInserter()
{
if ( inserter != null )
{
inserter.shutdown();
inserter = null;
}
}
@After
public void shutdown()
{
shutdownInserter();
if ( db != null )
{
db.shutdown();
db = null;
}
}
}
| false
|
community_lucene-index_src_test_java_org_neo4j_unsafe_batchinsert_TestLuceneBatchInsert.java
|
1,946
|
{
@Override
public void write( NodeLabelUpdate update ) throws IOException
{
addToCollection( Collections.singletonList( update ).iterator(), allUpdates );
}
@Override
public void close() throws IOException
{
}
};
| false
|
community_kernel_src_test_java_org_neo4j_unsafe_batchinsert_TestBatchInsert.java
|
1,947
|
private static class UpdateTrackingLabelScanStore implements LabelScanStore
{
private final List<NodeLabelUpdate> allUpdates = new ArrayList<>();
int writersCreated;
public void assertRecivedUpdate( long node, long... labels )
{
for ( NodeLabelUpdate update : allUpdates )
{
if ( update.getNodeId() == node &&
Arrays.equals( update.getLabelsAfter(), labels ) )
{
return;
}
}
fail( "No update matching [nodeId:" + node + ", labels:" + Arrays.toString( labels ) + " found among: " +
allUpdates );
}
@Override
public void recover( Iterator<NodeLabelUpdate> updates ) throws IOException
{
}
@Override
public void force() throws UnderlyingStorageException
{
}
@Override
public LabelScanReader newReader()
{
return null;
}
@Override
public AllEntriesLabelScanReader newAllEntriesReader()
{
return null;
}
@Override
public ResourceIterator<File> snapshotStoreFiles() throws IOException
{
return null;
}
@Override
public void init() throws IOException
{
}
@Override
public void start() throws IOException
{
}
@Override
public void stop() throws IOException
{
}
@Override
public void shutdown() throws IOException
{
}
@Override public LabelScanWriter newWriter()
{
writersCreated++;
return new LabelScanWriter()
{
@Override
public void write( NodeLabelUpdate update ) throws IOException
{
addToCollection( Collections.singletonList( update ).iterator(), allUpdates );
}
@Override
public void close() throws IOException
{
}
};
}
}
| false
|
community_kernel_src_test_java_org_neo4j_unsafe_batchinsert_TestBatchInsert.java
|
1,948
|
private static class ControlledLabelScanStore extends KernelExtensionFactory<InMemoryLabelScanStoreExtension.NoDependencies>
{
private final LabelScanStore labelScanStore;
public ControlledLabelScanStore( LabelScanStore labelScanStore )
{
super( "batch" );
this.labelScanStore = labelScanStore;
}
@Override
public Lifecycle newKernelExtension( InMemoryLabelScanStoreExtension.NoDependencies dependencies ) throws Throwable
{
return new LabelScanStoreProvider( labelScanStore, 100 );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_unsafe_batchinsert_TestBatchInsert.java
|
1,949
|
{
@Override
public String apply( Label from )
{
return from.name();
}
}, nodeLabels );
| false
|
community_kernel_src_test_java_org_neo4j_unsafe_batchinsert_TestBatchInsert.java
|
1,950
|
public class TestBatchInsert
{
private static Map<String,Object> properties = new HashMap<>();
private static enum RelTypes implements RelationshipType
{
BATCH_TEST,
REL_TYPE1,
REL_TYPE2,
REL_TYPE3,
REL_TYPE4,
REL_TYPE5
}
private static RelationshipType[] relTypeArray = {
RelTypes.REL_TYPE1, RelTypes.REL_TYPE2, RelTypes.REL_TYPE3,
RelTypes.REL_TYPE4, RelTypes.REL_TYPE5 };
static
{
properties.put( "key0", "SDSDASSDLKSDSAKLSLDAKSLKDLSDAKLDSLA" );
properties.put( "key1", 1 );
properties.put( "key2", (short) 2 );
properties.put( "key3", 3L );
properties.put( "key4", 4.0f );
properties.put( "key5", 5.0d );
properties.put( "key6", (byte) 6 );
properties.put( "key7", true );
properties.put( "key8", (char) 8 );
properties.put( "key10", new String[] {
"SDSDASSDLKSDSAKLSLDAKSLKDLSDAKLDSLA", "dsasda", "dssadsad"
} );
properties.put( "key11", new int[] {1,2,3,4,5,6,7,8,9 } );
properties.put( "key12", new short[] {1,2,3,4,5,6,7,8,9} );
properties.put( "key13", new long[] {1,2,3,4,5,6,7,8,9 } );
properties.put( "key14", new float[] {1,2,3,4,5,6,7,8,9} );
properties.put( "key15", new double[] {1,2,3,4,5,6,7,8,9} );
properties.put( "key16", new byte[] {1,2,3,4,5,6,7,8,9} );
properties.put( "key17", new boolean[] {true,false,true,false} );
properties.put( "key18", new char[] {1,2,3,4,5,6,7,8,9} );
}
@Rule public EphemeralFileSystemRule fs = new EphemeralFileSystemRule();
private BatchInserter newBatchInserter()
{
return BatchInserters.inserter( "neo-batch-db", fs.get(), stringMap() );
}
private BatchInserter newBatchInserterWithSchemaIndexProvider( KernelExtensionFactory<?> provider )
{
List<KernelExtensionFactory<?>> extensions = Arrays.asList(
provider, new InMemoryLabelScanStoreExtension() );
return BatchInserters.inserter( "neo-batch-db", fs.get(), stringMap(), extensions );
}
private BatchInserter newBatchInserterWithLabelScanStore( KernelExtensionFactory<?> provider )
{
List<KernelExtensionFactory<?>> extensions = Arrays.asList(
new InMemoryIndexProviderFactory(), provider );
return BatchInserters.inserter( "neo-batch-db", fs.get(), stringMap(), extensions );
}
private GraphDatabaseService newBatchGraphDatabase()
{
return BatchInserters.batchDatabase( "neo-batch-db", fs.get() );
}
@Test
public void shouldUpdateStringArrayPropertiesOnNodesUsingBatchInserter1()
{
// Given
BatchInserter batchInserter = newBatchInserter();
String[] array1 = { "1" };
String[] array2 = { "a" };
long id1 = batchInserter.createNode(map("array", array1));
long id2 = batchInserter.createNode(map());
// When
batchInserter.getNodeProperties( id1 ).get( "array" );
batchInserter.setNodeProperty( id1, "array", array1 );
batchInserter.setNodeProperty( id2, "array", array2 );
batchInserter.getNodeProperties( id1 ).get( "array" );
batchInserter.setNodeProperty( id1, "array", array1 );
batchInserter.setNodeProperty( id2, "array", array2 );
// Then
assertThat( (String[])batchInserter.getNodeProperties( id1 ).get( "array" ), equalTo(array1) );
batchInserter.shutdown();
}
@Test
public void testSimple()
{
BatchInserter graphDb = newBatchInserter();
long node1 = graphDb.createNode( null );
long node2 = graphDb.createNode( null );
long rel1 = graphDb.createRelationship( node1, node2, RelTypes.BATCH_TEST,
null );
BatchRelationship rel = graphDb.getRelationshipById( rel1 );
assertEquals( rel.getStartNode(), node1 );
assertEquals( rel.getEndNode(), node2 );
assertEquals( RelTypes.BATCH_TEST.name(), rel.getType().name() );
graphDb.shutdown();
}
@Test
public void testPropertySetFromGraphDbIsPersisted()
{
GraphDatabaseService gds = newBatchGraphDatabase();
Node from = gds.createNode();
long fromId = from.getId();
Node to = gds.createNode();
long toId = to.getId();
Relationship rel = from.createRelationshipTo( to,
DynamicRelationshipType.withName( "PROP_TEST" ) );
long relId = rel.getId();
from.setProperty( "1", "one" );
to.setProperty( "2", "two" );
rel.setProperty( "3", "three" );
gds.shutdown();
GraphDatabaseService db = newBatchGraphDatabase();
from = db.getNodeById( fromId );
assertEquals( "one", from.getProperty( "1" ) );
to = db.getNodeById( toId );
assertEquals( "two", to.getProperty( "2" ) );
rel = db.getRelationshipById( relId );
assertEquals( "three", rel.getProperty( "3" ) );
db.shutdown();
}
@Test
public void testSetAndAddNodeProperties()
{
BatchInserter inserter = newBatchInserter();
long tehNode = inserter.createNode( MapUtil.map( "one", "one" ,"two","two","three","three") );
inserter.setNodeProperty( tehNode, "four", "four" );
inserter.setNodeProperty( tehNode, "five", "five" );
Map<String, Object> props = inserter.getNodeProperties( tehNode );
assertEquals( 5, props.size() );
assertEquals( "one", props.get( "one" ) );
assertEquals( "five", props.get( "five" ) );
inserter.shutdown();
}
@Test
public void setSingleProperty() throws Exception
{
BatchInserter inserter = newBatchInserter();
long node = inserter.createNode( null );
String value = "Something";
String key = "name";
inserter.setNodeProperty( node, key, value );
GraphDatabaseService db = switchToEmbeddedGraphDatabaseService( inserter );
assertThat( getNodeInTx( node, db ), inTx( db, hasProperty( key ).withValue( value ) ) );
db.shutdown();
}
private GraphDatabaseService switchToEmbeddedGraphDatabaseService( BatchInserter inserter )
{
inserter.shutdown();
TestGraphDatabaseFactory factory = new TestGraphDatabaseFactory();
factory.setFileSystem( fs.get() );
return factory.newImpermanentDatabase( inserter.getStoreDir() );
}
@Test
public void testSetAndKeepNodeProperty()
{
BatchInserter inserter = newBatchInserter();
long tehNode = inserter.createNode( MapUtil.map( "foo", "bar" ) );
inserter.setNodeProperty( tehNode, "foo2", "bar2" );
Map<String, Object> props = inserter.getNodeProperties( tehNode );
assertEquals( 2, props.size() );
assertEquals( "bar", props.get( "foo" ) );
assertEquals( "bar2", props.get( "foo2" ) );
inserter.shutdown();
inserter = newBatchInserter();
props = inserter.getNodeProperties( tehNode );
assertEquals( 2, props.size() );
assertEquals( "bar", props.get( "foo" ) );
assertEquals( "bar2", props.get( "foo2" ) );
inserter.setNodeProperty( tehNode, "foo", "bar3" );
props = inserter.getNodeProperties( tehNode );
assertEquals( "bar3", props.get( "foo" ) );
assertEquals( 2, props.size() );
assertEquals( "bar3", props.get( "foo" ) );
assertEquals( "bar2", props.get( "foo2" ) );
inserter.shutdown();
inserter = newBatchInserter();
props = inserter.getNodeProperties( tehNode );
assertEquals( "bar3", props.get( "foo" ) );
assertEquals( 2, props.size() );
assertEquals( "bar3", props.get( "foo" ) );
assertEquals( "bar2", props.get( "foo2" ) );
inserter.shutdown();
}
@Test
public void testSetAndKeepRelationshipProperty()
{
BatchInserter inserter = newBatchInserter();
long from = inserter.createNode( Collections.<String,Object>emptyMap() );
long to = inserter.createNode( Collections.<String,Object>emptyMap() );
long theRel = inserter.createRelationship( from, to,
DynamicRelationshipType.withName( "TestingPropsHere" ),
MapUtil.map( "foo", "bar" ) );
inserter.setRelationshipProperty( theRel, "foo2", "bar2" );
Map<String, Object> props = inserter.getRelationshipProperties( theRel );
assertEquals( 2, props.size() );
assertEquals( "bar", props.get( "foo" ) );
assertEquals( "bar2", props.get( "foo2" ) );
inserter.shutdown();
inserter = newBatchInserter();
props = inserter.getRelationshipProperties( theRel );
assertEquals( 2, props.size() );
assertEquals( "bar", props.get( "foo" ) );
assertEquals( "bar2", props.get( "foo2" ) );
inserter.setRelationshipProperty( theRel, "foo", "bar3" );
props = inserter.getRelationshipProperties( theRel );
assertEquals( "bar3", props.get( "foo" ) );
assertEquals( 2, props.size() );
assertEquals( "bar3", props.get( "foo" ) );
assertEquals( "bar2", props.get( "foo2" ) );
inserter.shutdown();
inserter = newBatchInserter();
props = inserter.getRelationshipProperties( theRel );
assertEquals( "bar3", props.get( "foo" ) );
assertEquals( 2, props.size() );
assertEquals( "bar3", props.get( "foo" ) );
assertEquals( "bar2", props.get( "foo2" ) );
inserter.shutdown();
}
@Test
public void testNodeHasProperty()
{
BatchInserter inserter = newBatchInserter();
long theNode = inserter.createNode( properties );
long anotherNode = inserter.createNode( Collections.<String,Object>emptyMap() );
long relationship = inserter.createRelationship( theNode, anotherNode,
DynamicRelationshipType.withName( "foo" ), properties );
for ( String key : properties.keySet() )
{
assertTrue( inserter.nodeHasProperty( theNode, key ) );
assertFalse( inserter.nodeHasProperty( theNode, key + "-" ) );
assertTrue( inserter.relationshipHasProperty( relationship, key ) );
assertFalse( inserter.relationshipHasProperty( relationship, key
+ "-" ) );
}
inserter.shutdown();
}
@Test
public void testRemoveProperties()
{
BatchInserter inserter = newBatchInserter();
long theNode = inserter.createNode( properties );
long anotherNode = inserter.createNode( Collections.<String,Object>emptyMap() );
long relationship = inserter.createRelationship( theNode, anotherNode,
DynamicRelationshipType.withName( "foo" ), properties );
inserter.removeNodeProperty( theNode, "key0" );
inserter.removeRelationshipProperty( relationship, "key1" );
for ( String key : properties.keySet() )
{
switch ( key )
{
case "key0":
assertFalse( inserter.nodeHasProperty( theNode, key ) );
assertTrue( inserter.relationshipHasProperty( relationship, key ) );
break;
case "key1":
assertTrue( inserter.nodeHasProperty( theNode, key ) );
assertFalse( inserter.relationshipHasProperty( relationship,
key ) );
break;
default:
assertTrue( inserter.nodeHasProperty( theNode, key ) );
assertTrue( inserter.relationshipHasProperty( relationship, key ) );
break;
}
}
inserter.shutdown();
inserter = newBatchInserter();
for ( String key : properties.keySet() )
{
switch ( key )
{
case "key0":
assertFalse( inserter.nodeHasProperty( theNode, key ) );
assertTrue( inserter.relationshipHasProperty( relationship, key ) );
break;
case "key1":
assertTrue( inserter.nodeHasProperty( theNode, key ) );
assertFalse( inserter.relationshipHasProperty( relationship,
key ) );
break;
default:
assertTrue( inserter.nodeHasProperty( theNode, key ) );
assertTrue( inserter.relationshipHasProperty( relationship, key ) );
break;
}
}
inserter.shutdown();
}
@Test
public void shouldBeAbleToRemoveDynamicProperty()
{
// Only triggered if assertions are enabled
// GIVEN
BatchInserter batchInserter = newBatchInserter();
String key = "tags";
long nodeId = batchInserter.createNode( MapUtil.map( key, new String[] { "one", "two", "three" } ) );
// WHEN
batchInserter.removeNodeProperty( nodeId, key );
// THEN
assertFalse( batchInserter.getNodeProperties( nodeId ).containsKey( key ) );
}
@Test
public void shouldBeAbleToOverwriteDynamicProperty()
{
// Only triggered if assertions are enabled
// GIVEN
BatchInserter batchInserter = newBatchInserter();
String key = "tags";
long nodeId = batchInserter.createNode( MapUtil.map( key, new String[] { "one", "two", "three" } ) );
// WHEN
String[] secondValue = new String[] { "four", "five", "six" };
batchInserter.setNodeProperty( nodeId, key, secondValue );
// THEN
assertTrue( Arrays.equals( secondValue, (String[]) batchInserter.getNodeProperties( nodeId ).get( key ) ) );
}
@Test
public void testMore()
{
BatchInserter graphDb = newBatchInserter();
long startNode = graphDb.createNode( properties );
long endNodes[] = new long[25];
Set<Long> rels = new HashSet<>();
for ( int i = 0; i < 25; i++ )
{
endNodes[i] = graphDb.createNode( properties );
rels.add( graphDb.createRelationship( startNode, endNodes[i],
relTypeArray[i % 5], properties ) );
}
for ( BatchRelationship rel : graphDb.getRelationships( startNode ) )
{
assertTrue( rels.contains( rel.getId() ) );
assertEquals( rel.getStartNode(), startNode );
}
graphDb.setNodeProperties( startNode, properties );
graphDb.shutdown();
}
@Test
public void testPropSetAndReset()
{
BatchInserter graphDb = newBatchInserter();
BatchGraphDatabaseImpl gds = new BatchGraphDatabaseImpl( graphDb );
long startNode = graphDb.createNode( properties );
assertProperties( gds.getNodeById( startNode ) );
graphDb.setNodeProperties( startNode, properties );
assertProperties( gds.getNodeById( startNode ) );
graphDb.setNodeProperties( startNode, properties );
assertProperties( gds.getNodeById( startNode ) );
gds.shutdown();
}
@Test
public void makeSureLoopsCanBeCreated()
{
BatchInserter graphDb = newBatchInserter();
long startNode = graphDb.createNode( properties );
long otherNode = graphDb.createNode( properties );
long selfRelationship = graphDb.createRelationship( startNode, startNode,
relTypeArray[0], properties );
long relationship = graphDb.createRelationship( startNode, otherNode,
relTypeArray[0], properties );
for ( BatchRelationship rel : graphDb.getRelationships( startNode ) )
{
if ( rel.getId() == selfRelationship )
{
assertEquals( startNode, rel.getStartNode() );
assertEquals( startNode, rel.getEndNode() );
}
else if ( rel.getId() == relationship )
{
assertEquals( startNode, rel.getStartNode() );
assertEquals( otherNode, rel.getEndNode() );
}
else
{
fail( "Unexpected relationship " + rel.getId() );
}
}
GraphDatabaseService db = switchToEmbeddedGraphDatabaseService( graphDb );
try ( Transaction ignored = db.beginTx() )
{
Node realStartNode = db.getNodeById( startNode );
Relationship realSelfRelationship = db.getRelationshipById( selfRelationship );
Relationship realRelationship = db.getRelationshipById( relationship );
assertEquals( realSelfRelationship, realStartNode.getSingleRelationship( RelTypes.REL_TYPE1, Direction.INCOMING ) );
assertEquals( asSet( realSelfRelationship, realRelationship ), asSet( realStartNode.getRelationships( Direction.OUTGOING ) ) );
assertEquals( asSet( realSelfRelationship, realRelationship ), asSet( realStartNode.getRelationships() ) );
}
finally {
db.shutdown();
}
}
private Node getNodeInTx( long nodeId, GraphDatabaseService db )
{
try ( Transaction ignored = db.beginTx() )
{
return db.getNodeById( nodeId );
}
}
private void setProperties( Node node )
{
for ( String key : properties.keySet() )
{
node.setProperty( key, properties.get( key ) );
}
}
private void setProperties( Relationship rel )
{
for ( String key : properties.keySet() )
{
rel.setProperty( key, properties.get( key ) );
}
}
private void assertProperties( Node node )
{
for ( String key : properties.keySet() )
{
if ( properties.get( key ).getClass().isArray() )
{
Class<?> component = properties.get( key ).getClass().getComponentType();
if ( !component.isPrimitive() ) // then it is String, cast to
// Object[] is safe
{
assertTrue( Arrays.equals(
(Object[]) properties.get( key ),
(Object[]) node.getProperty( key ) ) );
}
else
{
if ( component == Integer.TYPE )
{
if ( component.isPrimitive() )
{
assertTrue( Arrays.equals(
(int[]) properties.get( key ),
(int[]) node.getProperty( key ) ) );
}
}
else if ( component == Boolean.TYPE )
{
if ( component.isPrimitive() )
{
assertTrue( Arrays.equals(
(boolean[]) properties.get( key ),
(boolean[]) node.getProperty( key ) ) );
}
}
else if ( component == Byte.TYPE )
{
if ( component.isPrimitive() )
{
assertTrue( Arrays.equals(
(byte[]) properties.get( key ),
(byte[]) node.getProperty( key ) ) );
}
}
else if ( component == Character.TYPE )
{
if ( component.isPrimitive() )
{
assertTrue( Arrays.equals(
(char[]) properties.get( key ),
(char[]) node.getProperty( key ) ) );
}
}
else if ( component == Long.TYPE )
{
if ( component.isPrimitive() )
{
assertTrue( Arrays.equals(
(long[]) properties.get( key ),
(long[]) node.getProperty( key ) ) );
}
}
else if ( component == Float.TYPE )
{
if ( component.isPrimitive() )
{
assertTrue( Arrays.equals(
(float[]) properties.get( key ),
(float[]) node.getProperty( key ) ) );
}
}
else if ( component == Double.TYPE )
{
if ( component.isPrimitive() )
{
assertTrue( Arrays.equals(
(double[]) properties.get( key ),
(double[]) node.getProperty( key ) ) );
}
}
else if ( component == Short.TYPE )
{
if ( component.isPrimitive() )
{
assertTrue( Arrays.equals(
(short[]) properties.get( key ),
(short[]) node.getProperty( key ) ) );
}
}
}
}
else
{
assertEquals( properties.get( key ), node.getProperty( key ) );
}
}
for ( String stored : node.getPropertyKeys() )
{
assertTrue( properties.containsKey( stored ) );
}
}
@Test
public void testWithGraphDbService()
{
GraphDatabaseService graphDb = newBatchGraphDatabase();
Node startNode = graphDb.createNode();
setProperties( startNode );
Node endNodes[] = new Node[25];
Set<Relationship> rels = new HashSet<>();
for ( int i = 0; i < 25; i++ )
{
endNodes[i] = graphDb.createNode();
setProperties( endNodes[i] );
Relationship rel = startNode.createRelationshipTo( endNodes[i],
relTypeArray[i % 5] );
rels.add( rel );
setProperties( rel );
}
for ( Relationship rel : startNode.getRelationships() )
{
assertTrue( rels.contains( rel ) );
assertEquals( rel.getStartNode(), startNode );
}
setProperties( startNode );
graphDb.shutdown();
}
@Test
public void testGraphDbServiceGetRelationships()
{
GraphDatabaseService graphDb = newBatchGraphDatabase();
Node startNode = graphDb.createNode();
for ( int i = 0; i < 5; i++ )
{
Node endNode = graphDb.createNode();
startNode.createRelationshipTo( endNode, relTypeArray[i] );
}
for ( int i = 0; i < 5; i++ )
{
assertTrue( startNode.getSingleRelationship(
relTypeArray[i], Direction.OUTGOING ) != null );
}
for ( int i = 0; i < 5; i++ )
{
Iterator<Relationship> relItr =
startNode.getRelationships( relTypeArray[i],
Direction.OUTGOING ).iterator();
relItr.next();
assertTrue( !relItr.hasNext() );
}
for ( int i = 0; i < 5; i++ )
{
Iterator<Relationship> relItr =
startNode.getRelationships( relTypeArray[i] ).iterator();
relItr.next();
assertTrue( !relItr.hasNext() );
}
graphDb.shutdown();
}
@Test
public void createBatchNodeAndRelationshipsDeleteAllInEmbedded() throws Exception
{
/*
* ()--[REL_TYPE1]-->(node)--[BATCH_TEST]->()
*/
BatchInserter inserter = newBatchInserter();
long nodeId = inserter.createNode( null );
inserter.createRelationship( nodeId, inserter.createNode( null ),
RelTypes.BATCH_TEST, null );
inserter.createRelationship( inserter.createNode( null ), nodeId,
RelTypes.REL_TYPE1, null );
// Delete node and all its relationships
GraphDatabaseService db = switchToEmbeddedGraphDatabaseService( inserter );
try ( Transaction tx = db.beginTx() )
{
Node node = db.getNodeById( nodeId );
for ( Relationship relationship : node.getRelationships() )
{
relationship.delete();
}
node.delete();
tx.success();
}
db.shutdown();
}
@Test
public void batchDbShouldBeAbleToSetPropertyOnNodeWithNoProperties()
{
// GIVEN
GraphDatabaseService database = newBatchGraphDatabase();
Node node = database.createNode();
database.shutdown();
database = newBatchGraphDatabase();
node = database.getNodeById( node.getId() );
// WHEN
node.setProperty( "test", "test" );
// THEN
assertEquals( "test", node.getProperty( "test" ) );
}
@Test
public void batchDbShouldBeAbleToSetPropertyOnRelationshipWithNoProperties()
{
// GIVEN
GraphDatabaseService database = newBatchGraphDatabase();
Relationship relationship = database.createNode().createRelationshipTo(
database.createNode(), MyRelTypes.TEST );
database.shutdown();
database = newBatchGraphDatabase();
relationship = database.getRelationshipById( relationship.getId() );
// WHEN
relationship.setProperty( "test", "test" );
// THEN
assertEquals( "test", relationship.getProperty( "test" ) );
}
@Test
public void messagesLogGetsClosed() throws Exception
{
String storeDir = TargetDirectory.forTest( getClass() ).makeGraphDbDir().getAbsolutePath();
BatchInserter inserter = BatchInserters.inserter( storeDir, new DefaultFileSystemAbstraction(),
stringMap() );
inserter.shutdown();
assertTrue( new File( storeDir, StringLogger.DEFAULT_NAME ).delete() );
}
@Test
public void createEntitiesWithEmptyPropertiesMap() throws Exception
{
BatchInserter inserter = newBatchInserter();
// Assert for node
long nodeId = inserter.createNode( map() );
inserter.getNodeProperties( nodeId );
//cp=N U http://www.w3.org/1999/02/22-rdf-syntax-ns#type, c=N
// Assert for relationship
long anotherNodeId = inserter.createNode( null );
long relId = inserter.createRelationship( nodeId, anotherNodeId, RelTypes.BATCH_TEST, map() );
inserter.getRelationshipProperties( relId );
inserter.shutdown();
}
@Test
public void createEntitiesWithDynamicPropertiesMap() throws Exception
{
BatchInserter inserter = newBatchInserter();
setAndGet( inserter, "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" );
setAndGet( inserter, intArray( 20 ) );
inserter.shutdown();
}
@Test
public void shouldAddInitialLabelsToCreatedNode() throws Exception
{
// GIVEN
BatchInserter inserter = newBatchInserter();
// WHEN
long node = inserter.createNode( map(), Labels.FIRST, Labels.SECOND );
// THEN
assertTrue( inserter.nodeHasLabel( node, Labels.FIRST ) );
assertTrue( inserter.nodeHasLabel( node, Labels.SECOND ) );
assertFalse( inserter.nodeHasLabel( node, Labels.THIRD ) );
}
@Test
public void shouldGetNodeLabels() throws Exception
{
// GIVEN
BatchInserter inserter = newBatchInserter();
long node = inserter.createNode( map(), Labels.FIRST, Labels.THIRD );
// WHEN
Iterable<String> labelNames = asNames( inserter.getNodeLabels( node ) );
// THEN
assertEquals( asSet( Labels.FIRST.name(), Labels.THIRD.name() ), asSet( labelNames ) );
}
@Test
public void shouldAddManyInitialLabelsAsDynamicRecords() throws Exception
{
// GIVEN
BatchInserter inserter = newBatchInserter();
Pair<Label[], Set<String>> labels = manyLabels( 200 );
long node = inserter.createNode( map(), labels.first() );
// WHEN
Iterable<String> labelNames = asNames( inserter.getNodeLabels( node ) );
// THEN
assertEquals( labels.other(), asSet( labelNames ) );
}
@Test
public void shouldReplaceExistingInlinedLabelsWithDynamic() throws Exception
{
// GIVEN
BatchInserter inserter = newBatchInserter();
long node = inserter.createNode( map(), Labels.FIRST );
// WHEN
Pair<Label[], Set<String>> labels = manyLabels( 100 );
inserter.setNodeLabels( node, labels.first() );
// THEN
Iterable<String> labelNames = asNames( inserter.getNodeLabels( node ) );
assertEquals( labels.other(), asSet( labelNames ) );
}
@Test
public void shouldReplaceExistingDynamicLabelsWithInlined() throws Exception
{
// GIVEN
BatchInserter inserter = newBatchInserter();
long node = inserter.createNode( map(), manyLabels( 150 ).first() );
// WHEN
inserter.setNodeLabels( node, Labels.FIRST );
// THEN
Iterable<String> labelNames = asNames( inserter.getNodeLabels( node ) );
assertEquals( asSet( Labels.FIRST.name() ), asSet( labelNames ) );
}
@Test
public void shouldCreateDeferredSchemaIndexesInEmptyDatabase() throws Exception
{
// GIVEN
BatchInserter inserter = newBatchInserter();
// WHEN
IndexDefinition definition = inserter.createDeferredSchemaIndex( label( "Hacker" ) ).on( "handle" ).create();
// THEN
assertEquals( "Hacker", definition.getLabel().name() );
assertEquals( asCollection( iterator( "handle" ) ), asCollection( definition.getPropertyKeys() ) );
}
@Test
public void shouldCreateDeferredUniquenessConstraintInEmptyDatabase() throws Exception
{
// GIVEN
BatchInserter inserter = newBatchInserter();
// WHEN
ConstraintDefinition definition =
inserter.createDeferredConstraint( label( "Hacker" ) ).assertPropertyIsUnique( "handle" ).create();
// THEN
assertEquals( "Hacker", definition.getLabel().name() );
assertEquals( ConstraintType.UNIQUENESS, definition.getConstraintType() );
assertEquals( asSet( "handle" ), asSet( definition.getPropertyKeys() ) );
}
@Test
public void shouldRunIndexPopulationJobAtShutdown() throws Throwable
{
// GIVEN
IndexPopulator populator = mock( IndexPopulator.class );
SchemaIndexProvider provider = mock( SchemaIndexProvider.class );
when( provider.getProviderDescriptor() ).thenReturn( InMemoryIndexProviderFactory.PROVIDER_DESCRIPTOR );
when( provider.getPopulator( anyLong(), any( IndexDescriptor.class ), any( IndexConfiguration.class ) ) ).thenReturn( populator );
BatchInserter inserter = newBatchInserterWithSchemaIndexProvider(
singleInstanceSchemaIndexProviderFactory( InMemoryIndexProviderFactory.KEY, provider ) );
inserter.createDeferredSchemaIndex( label("Hacker") ).on( "handle" ).create();
long nodeId = inserter.createNode( map( "handle", "Jakewins" ), label( "Hacker" ) );
// WHEN
inserter.shutdown();
// THEN
verify( provider ).init();
verify( provider ).start();
verify( provider ).getPopulator( anyLong(), any( IndexDescriptor.class ), any( IndexConfiguration.class ) );
verify( populator ).create();
verify( populator ).add( nodeId, "Jakewins" );
verify( populator ).close( true );
verify( provider ).stop();
verify( provider ).shutdown();
verifyNoMoreInteractions( populator );
}
@Test
public void shouldRunConstraintPopulationJobAtShutdown() throws Throwable
{
// GIVEN
IndexPopulator populator = mock( IndexPopulator.class );
SchemaIndexProvider provider = mock( SchemaIndexProvider.class );
when( provider.getProviderDescriptor() ).thenReturn( InMemoryIndexProviderFactory.PROVIDER_DESCRIPTOR );
when( provider.getPopulator( anyLong(), any( IndexDescriptor.class ), any( IndexConfiguration.class ) ) ).thenReturn( populator );
BatchInserter inserter = newBatchInserterWithSchemaIndexProvider(
singleInstanceSchemaIndexProviderFactory( InMemoryIndexProviderFactory.KEY, provider ) );
inserter.createDeferredConstraint( label("Hacker") ).assertPropertyIsUnique( "handle" ).create();
long nodeId = inserter.createNode( map( "handle", "Jakewins" ), label( "Hacker" ) );
// WHEN
inserter.shutdown();
// THEN
verify( provider ).init();
verify( provider ).start();
verify( provider ).getPopulator( anyLong(), any( IndexDescriptor.class ), any( IndexConfiguration.class ) );
verify( populator ).create();
verify( populator ).add( nodeId, "Jakewins" );
verify( populator ).close( true );
verify( provider ).stop();
verify( provider ).shutdown();
verifyNoMoreInteractions( populator );
}
@Test
public void shouldRepopulatePreexistingIndexed() throws Throwable
{
// GIVEN
long jakewins = dbWithIndexAndSingleIndexedNode();
IndexPopulator populator = mock( IndexPopulator.class );
SchemaIndexProvider provider = mock( SchemaIndexProvider.class );
when( provider.getProviderDescriptor() ).thenReturn( InMemoryIndexProviderFactory.PROVIDER_DESCRIPTOR );
when( provider.getPopulator( anyLong(), any( IndexDescriptor.class ), any( IndexConfiguration.class ) ) ).thenReturn( populator );
BatchInserter inserter = newBatchInserterWithSchemaIndexProvider(
singleInstanceSchemaIndexProviderFactory( InMemoryIndexProviderFactory.KEY, provider ) );
long boggle = inserter.createNode( map( "handle", "b0ggl3" ), label( "Hacker" ) );
// WHEN
inserter.shutdown();
// THEN
verify( provider ).init();
verify( provider ).start();
verify( provider ).getPopulator( anyLong(), any( IndexDescriptor.class ), any( IndexConfiguration.class ) );
verify( populator ).create();
verify( populator ).add( jakewins, "Jakewins" );
verify( populator ).add( boggle, "b0ggl3" );
verify( populator ).close( true );
verify( provider ).stop();
verify( provider ).shutdown();
verifyNoMoreInteractions( populator );
}
private long dbWithIndexAndSingleIndexedNode()
{
IndexPopulator populator = mock( IndexPopulator.class );
SchemaIndexProvider provider = mock( SchemaIndexProvider.class );
when( provider.getProviderDescriptor() ).thenReturn( InMemoryIndexProviderFactory.PROVIDER_DESCRIPTOR );
when( provider.getPopulator( anyLong(), any( IndexDescriptor.class ), any( IndexConfiguration.class ) ) ).thenReturn( populator );
BatchInserter inserter = newBatchInserterWithSchemaIndexProvider(
singleInstanceSchemaIndexProviderFactory( InMemoryIndexProviderFactory.KEY, provider ) );
inserter.createDeferredSchemaIndex( label("Hacker") ).on( "handle" ).create();
long nodeId = inserter.createNode( map( "handle", "Jakewins" ), label( "Hacker" ) );
inserter.shutdown();
return nodeId;
}
@Test
public void shouldCorrectlyJudgeRelationshipType()
{
// GIVEN
GraphDatabaseService database = newBatchGraphDatabase();
DynamicRelationshipType type = DynamicRelationshipType.withName( "TEST" );
long relationshipId = database.createNode().createRelationshipTo( database.createNode(), type ).getId();
// WHEN restarting (guaranteeing new RelationshipType instances internally)
database.shutdown();
database = newBatchGraphDatabase();
Relationship relationship = database.getRelationshipById( relationshipId );
// THEN
assertTrue( "Relationship#isType returned false for the correct type", relationship.isType( type ) );
}
@Test
public void shouldPopulateLabelScanStoreOnShutdown() throws Exception
{
// GIVEN
// -- a database and a mocked label scan store
UpdateTrackingLabelScanStore labelScanStore = new UpdateTrackingLabelScanStore();
BatchInserter inserter = newBatchInserterWithLabelScanStore( new ControlledLabelScanStore( labelScanStore ) );
// -- and some data that we insert
long node1 = inserter.createNode( null, Labels.FIRST );
long node2 = inserter.createNode( null, Labels.SECOND );
long node3 = inserter.createNode( null, Labels.THIRD );
long node4 = inserter.createNode( null, Labels.FIRST, Labels.SECOND );
long node5 = inserter.createNode( null, Labels.FIRST, Labels.THIRD );
// WHEN we shut down the batch inserter
inserter.shutdown();
// THEN the label scan store should receive all the updates.
// of course, we don't know the label ids at this point, but we're assuming 0..2 (bad boy)
labelScanStore.assertRecivedUpdate( node1, 0 );
labelScanStore.assertRecivedUpdate( node2, 1 );
labelScanStore.assertRecivedUpdate( node3, 2 );
labelScanStore.assertRecivedUpdate( node4, 0, 1 );
labelScanStore.assertRecivedUpdate( node5, 0, 2 );
}
@Test
public void shouldSkipStoreScanIfNoLabelsAdded() throws Exception
{
// GIVEN
UpdateTrackingLabelScanStore labelScanStore = new UpdateTrackingLabelScanStore();
BatchInserter inserter = newBatchInserterWithLabelScanStore( new ControlledLabelScanStore( labelScanStore ) );
// WHEN
inserter.createNode( null );
inserter.createNode( null );
inserter.shutdown();
// THEN
assertEquals( 0, labelScanStore.writersCreated );
}
@Test
public void propertiesCanBeReSetUsingBatchInserter()
{
// GIVEN
BatchInserter batchInserter = newBatchInserter();
Map<String, Object> props = new HashMap<>();
props.put( "name", "One" );
props.put( "count", 1 );
props.put( "tags", new String[] { "one", "two" } );
props.put( "something", "something" );
batchInserter.createNode( 1, props );
batchInserter.setNodeProperty( 1, "name", "NewOne" );
batchInserter.removeNodeProperty( 1, "count" );
batchInserter.removeNodeProperty( 1, "something" );
// WHEN setting new properties
batchInserter.setNodeProperty( 1, "name", "YetAnotherOne" );
batchInserter.setNodeProperty( 1, "additional", "something" );
// THEN there should be no problems doing so
assertEquals( "YetAnotherOne", batchInserter.getNodeProperties( 1 ).get( "name" ) );
assertEquals( "something", batchInserter.getNodeProperties( 1 ).get( "additional" ) );
batchInserter.shutdown();
}
@Test
public void propertiesCanBeReSetUsingBatchInserter2()
{
// GIVEN
BatchInserter batchInserter = newBatchInserter();
long id = batchInserter.createNode( new HashMap<String, Object>() );
// WHEN
batchInserter.setNodeProperty( id, "test", "looooooooooong test" );
batchInserter.setNodeProperty( id, "test", "small test" );
// THEN
assertEquals( "small test", batchInserter.getNodeProperties( id ).get( "test" ) );
batchInserter.shutdown();
}
@Test
public void replaceWithBiggerPropertySpillsOverIntoNewPropertyRecord()
{
// GIVEN
BatchInserter batchInserter = newBatchInserter();
Map<String, Object> props = new HashMap<>();
props.put( "name", "One" );
props.put( "count", 1 );
props.put( "tags", new String[] { "one", "two" } );
long id = batchInserter.createNode( props );
batchInserter.setNodeProperty( id, "name", "NewOne" );
// WHEN
batchInserter.setNodeProperty( id, "count", "something" );
// THEN
assertEquals( "something", batchInserter.getNodeProperties( id ).get( "count" ) );
batchInserter.shutdown();
}
private static class UpdateTrackingLabelScanStore implements LabelScanStore
{
private final List<NodeLabelUpdate> allUpdates = new ArrayList<>();
int writersCreated;
public void assertRecivedUpdate( long node, long... labels )
{
for ( NodeLabelUpdate update : allUpdates )
{
if ( update.getNodeId() == node &&
Arrays.equals( update.getLabelsAfter(), labels ) )
{
return;
}
}
fail( "No update matching [nodeId:" + node + ", labels:" + Arrays.toString( labels ) + " found among: " +
allUpdates );
}
@Override
public void recover( Iterator<NodeLabelUpdate> updates ) throws IOException
{
}
@Override
public void force() throws UnderlyingStorageException
{
}
@Override
public LabelScanReader newReader()
{
return null;
}
@Override
public AllEntriesLabelScanReader newAllEntriesReader()
{
return null;
}
@Override
public ResourceIterator<File> snapshotStoreFiles() throws IOException
{
return null;
}
@Override
public void init() throws IOException
{
}
@Override
public void start() throws IOException
{
}
@Override
public void stop() throws IOException
{
}
@Override
public void shutdown() throws IOException
{
}
@Override public LabelScanWriter newWriter()
{
writersCreated++;
return new LabelScanWriter()
{
@Override
public void write( NodeLabelUpdate update ) throws IOException
{
addToCollection( Collections.singletonList( update ).iterator(), allUpdates );
}
@Override
public void close() throws IOException
{
}
};
}
}
private static class ControlledLabelScanStore extends KernelExtensionFactory<InMemoryLabelScanStoreExtension.NoDependencies>
{
private final LabelScanStore labelScanStore;
public ControlledLabelScanStore( LabelScanStore labelScanStore )
{
super( "batch" );
this.labelScanStore = labelScanStore;
}
@Override
public Lifecycle newKernelExtension( InMemoryLabelScanStoreExtension.NoDependencies dependencies ) throws Throwable
{
return new LabelScanStoreProvider( labelScanStore, 100 );
}
}
private void setAndGet( BatchInserter inserter, Object value )
{
long nodeId = inserter.createNode( map( "key", value ) );
Object readValue = inserter.getNodeProperties( nodeId ).get( "key" );
if ( readValue.getClass().isArray() )
{
assertTrue( Arrays.equals( (int[])value, (int[])readValue ) );
}
else
{
assertEquals( value, readValue );
}
}
private int[] intArray( int length )
{
int[] array = new int[length];
for ( int i = 0, startValue = (int)Math.pow( 2, 30 ); i < length; i++ )
{
array[i] = startValue+i;
}
return array;
}
private static enum Labels implements Label
{
FIRST,
SECOND,
THIRD
}
private Iterable<String> asNames( Iterable<Label> nodeLabels )
{
return map( new Function<Label,String>()
{
@Override
public String apply( Label from )
{
return from.name();
}
}, nodeLabels );
}
private Pair<Label[],Set<String>> manyLabels( int count )
{
Label[] labels = new Label[count];
Set<String> expectedLabelNames = new HashSet<>();
for ( int i = 0; i < labels.length; i++ )
{
String labelName = "bach label " + i;
labels[i] = label( labelName );
expectedLabelNames.add( labelName );
}
return Pair.of( labels, expectedLabelNames );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_unsafe_batchinsert_TestBatchInsert.java
|
1,951
|
public class TestBatchDatabase
{
private final String storeDir = "/tmp/dblala";
private EphemeralFileSystemAbstraction fs;
@Test
public void shouldCreateLabeledNodes()
{
// given
GraphDatabaseService gdb = BatchInserters.batchDatabase( storeDir, fs );
Label luluLabel = label( "lulu" );
// when
long nodeId = gdb.createNode( luluLabel ).getId();
// and
gdb = turnIntoRealGraphDatabase( gdb );
// then
Transaction transaction = gdb.beginTx();
assertThat( gdb.getNodeById( nodeId ), hasLabels( luluLabel ) );
transaction.finish();
gdb.shutdown();
}
@Test
public void shouldCreateAndSeeLabeledNodes()
{
// given
GraphDatabaseService gdb = BatchInserters.batchDatabase( storeDir, fs );
Label luluLabel = label( "lulu" );
// when
long nodeId = gdb.createNode( luluLabel ).getId();
// then
assertThat( gdb.getNodeById( nodeId ), hasLabels( luluLabel ) );
}
@Test
public void shouldCreateAndTestLabeledNodes()
{
// given
GraphDatabaseService gdb = BatchInserters.batchDatabase( storeDir, fs );
Label luluLabel = label( "lulu" );
// when
long nodeId = gdb.createNode( luluLabel ).getId();
// then
assertThat( gdb.getNodeById( nodeId ), hasLabels( luluLabel ) );
}
@Test
public void shouldAddLabelToNode()
{
// given
GraphDatabaseService gdb = BatchInserters.batchDatabase( storeDir, fs );
Label luluLabel = label( "lulu" );
Node node = gdb.createNode();
// when
node.addLabel( luluLabel );
// then
assertThat( node, hasLabels( luluLabel ) );
}
@Test
public void shouldAddLabelTwiceToNode()
{
// given
GraphDatabaseService gdb = BatchInserters.batchDatabase( storeDir, fs );
Label luluLabel = label( "lulu" );
Node node = gdb.createNode();
// when
node.addLabel( luluLabel );
node.addLabel( luluLabel );
// then
assertThat( node, hasLabels( luluLabel ) );
}
@Test
public void removingNonExistantLabelFromNodeShouldBeNoOp()
{
// given
GraphDatabaseService gdb = BatchInserters.batchDatabase( storeDir, fs );
Label luluLabel = label( "lulu" );
Node node = gdb.createNode();
// when
node.removeLabel( luluLabel );
// then
assertTrue( asSet( gdb.getNodeById( node.getId() ).getLabels() ).isEmpty() );
}
@Test
public void shouldRemoveLabelFromNode()
{
// given
GraphDatabaseService gdb = BatchInserters.batchDatabase( storeDir, fs );
Label luluLabel = label( "lulu" );
Label lalaLabel = label( "lala" );
Node node = gdb.createNode();
node.addLabel( lalaLabel );
node.addLabel( luluLabel );
// when
node.removeLabel( luluLabel );
// then
assertThat( node, hasLabels( lalaLabel ) );
}
private GraphDatabaseService turnIntoRealGraphDatabase( GraphDatabaseService gdb )
{
gdb.shutdown();
TestGraphDatabaseFactory factory = new TestGraphDatabaseFactory();
factory.setFileSystem( fs );
return factory.newImpermanentDatabase( storeDir );
}
@Before
public void before()
{
fs = new EphemeralFileSystemAbstraction();
}
@After
public void after()
{
fs.shutdown();
}
}
| false
|
community_kernel_src_test_java_org_neo4j_unsafe_batchinsert_TestBatchDatabase.java
|
1,952
|
{
@Override
public void write( NodeLabelUpdate update ) throws IOException
{
// do nothing
}
@Override
public void close() throws IOException
{
// nothing to close
}
};
| false
|
community_kernel_src_main_java_org_neo4j_unsafe_batchinsert_LabelScanWriter.java
|
1,953
|
{
@Override
public String getMethodName()
{
return BigBatchStoreIT.this.getClass().getSimpleName() + "#" + super.getMethodName();
}
};
| false
|
community_kernel_src_test_java_org_neo4j_unsafe_batchinsert_BigBatchStoreIT.java
|
1,954
|
public class BigBatchStoreIT implements RelationshipType
{
private static final String PATH = "target/var/bigb";
@Rule public EphemeralFileSystemRule fs = new EphemeralFileSystemRule();
private org.neo4j.unsafe.batchinsert.BatchInserter db;
public @Rule
TestName testName = new TestName()
{
@Override
public String getMethodName()
{
return BigBatchStoreIT.this.getClass().getSimpleName() + "#" + super.getMethodName();
}
};
@Before
public void doBefore()
{
db = BatchInserters.inserter( PATH, fs.get());
}
@After
public void doAfter()
{
db.shutdown();
}
@Override
public String name()
{
return "BIG_TYPE";
}
@Test
public void create4BPlusStuff() throws Exception
{
testHighIds( (long) pow( 2, 32 ), 2, 1000 );
}
@Test
public void create8BPlusStuff() throws Exception
{
testHighIds( (long) pow( 2, 33 ), 1, 1600 );
}
private void testHighIds( long highMark, int minus, int requiredHeapMb )
{
assumeTrue( machineIsOkToRunThisTest( testName.getMethodName(), requiredHeapMb ) );
long idBelow = highMark-minus;
setHighId( IdType.NODE, idBelow );
setHighId( IdType.RELATIONSHIP, idBelow );
setHighId( IdType.PROPERTY, idBelow );
setHighId( IdType.ARRAY_BLOCK, idBelow );
setHighId( IdType.STRING_BLOCK, idBelow );
String propertyKey = "name";
int intPropertyValue = 123;
String stringPropertyValue = "Long string, longer than would fit in shortstring";
long[] arrayPropertyValue = new long[] { 1021L, 321L, 343212L };
long nodeBelowTheLine = db.createNode( map( propertyKey, intPropertyValue ) );
assertEquals( idBelow, nodeBelowTheLine );
long nodeAboveTheLine = db.createNode( map( propertyKey, stringPropertyValue ) );
long relBelowTheLine = db.createRelationship( nodeBelowTheLine, nodeAboveTheLine, this, map( propertyKey, arrayPropertyValue ) );
assertEquals( idBelow, relBelowTheLine );
long relAboveTheLine = db.createRelationship( nodeAboveTheLine, nodeBelowTheLine, this, null );
assertEquals( highMark, relAboveTheLine );
assertEquals( highMark, nodeAboveTheLine );
assertEquals( intPropertyValue, db.getNodeProperties( nodeBelowTheLine ).get( propertyKey ) );
assertEquals( stringPropertyValue, db.getNodeProperties( nodeAboveTheLine ).get( propertyKey ) );
assertTrue( Arrays.equals( arrayPropertyValue, (long[]) db.getRelationshipProperties( relBelowTheLine ).get( propertyKey ) ) );
assertEquals( asSet( asList( relBelowTheLine, relAboveTheLine ) ), asIds( db.getRelationships( idBelow ) ) );
db.shutdown();
db = BatchInserters.inserter( PATH, fs.get() );
assertEquals( asSet( asList( relBelowTheLine, relAboveTheLine ) ), asIds( db.getRelationships( idBelow ) ) );
db.shutdown();
GraphDatabaseService edb = new TestGraphDatabaseFactory().setFileSystem( fs.get() ).newImpermanentDatabase( PATH );
assertEquals( nodeAboveTheLine, edb.getNodeById( highMark ).getId() );
assertEquals( relBelowTheLine, edb.getNodeById( idBelow ).getSingleRelationship( this, Direction.OUTGOING ).getId() );
assertEquals( relAboveTheLine, edb.getNodeById( idBelow ).getSingleRelationship( this, Direction.INCOMING ).getId() );
assertEquals( asSet( asList( edb.getRelationshipById( relBelowTheLine ), edb.getRelationshipById( relAboveTheLine ) ) ),
asSet( asCollection( edb.getNodeById( idBelow ).getRelationships() ) ) );
edb.shutdown();
db = BatchInserters.inserter( PATH, fs.get() );
}
@Test( expected=IllegalArgumentException.class )
public void makeSureCantCreateNodeWithMagicNumber()
{
long id = (long) Math.pow( 2, 32 )-1;
db.createNode( id, null );
}
private Collection<Long> asIds( Iterable<BatchRelationship> relationships )
{
Collection<Long> ids = new HashSet<Long>();
for ( BatchRelationship rel : relationships )
{
ids.add( rel.getId() );
}
return ids;
}
private static <T> Collection<T> asSet( Collection<T> collection )
{
return new HashSet<T>( collection );
}
private void setHighId( IdType type, long highId )
{
((BatchInserterImpl) db).getIdGeneratorFactory().get( type ).setHighId( highId );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_unsafe_batchinsert_BigBatchStoreIT.java
|
1,955
|
class BatchTokenHolder
{
private final ArrayMap<String,Integer> nameToId =
new ArrayMap<String,Integer>( (byte)5, false, false);
private final ArrayMap<Integer,String> idToName =
new ArrayMap<Integer,String>( (byte)5, false, false);
BatchTokenHolder( Token[] tokens )
{
for ( Token token : tokens )
{
nameToId.put( token.name(), token.id() );
idToName.put( token.id(), token.name() );
}
}
void addToken( String stringKey, int keyId )
{
nameToId.put( stringKey, keyId );
idToName.put( keyId, stringKey );
}
int idOf( String stringKey )
{
Integer id = nameToId.get( stringKey );
if ( id != null )
{
return id;
}
return -1;
}
String nameOf( int id )
{
String name = idToName.get( id );
if ( name == null )
{
throw new NotFoundException( "No token with id:" + id );
}
return name;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_unsafe_batchinsert_BatchTokenHolder.java
|
1,956
|
public final class BatchRelationship
{
private final long id;
private final long startNodeId;
private final long endNodeId;
private final RelationshipType type;
BatchRelationship( long id, long startNodeId, long endNodeId,
RelationshipType type )
{
this.id = id;
this.startNodeId = startNodeId;
this.endNodeId = endNodeId;
this.type = type;
}
public long getId()
{
return id; // & 0xFFFFFFFFL;
}
public long getStartNode()
{
return startNodeId; // & 0xFFFFFFFFL;
}
public long getEndNode()
{
return endNodeId; // & 0xFFFFFFFFL;
}
public RelationshipType getType()
{
return type;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_unsafe_batchinsert_BatchRelationship.java
|
1,957
|
public final class BatchInserters
{
/**
* Get a {@link BatchInserter} given a store directory.
*
* @param storeDir the store directory
* @return a new {@link BatchInserter}
*/
public static BatchInserter inserter( String storeDir )
{
return inserter( storeDir, stringMap() );
}
/**
* Get a {@link BatchInserter} given a store directory.
*
* @param storeDir the store directory
* @param config configuration settings to use
* @return a new {@link BatchInserter}
*/
public static BatchInserter inserter( String storeDir, Map<String,String> config )
{
return inserter( storeDir, new DefaultFileSystemAbstraction(), config );
}
/**
* Get a {@link BatchInserter} given a store directory.
*
* @param storeDir the store directory
* @return a new {@link BatchInserter}
*/
public static BatchInserter inserter( String storeDir, FileSystemAbstraction fileSystem )
{
return inserter( storeDir, fileSystem, stringMap() );
}
/**
* Get a {@link BatchInserter} given a store directory.
*
* @param storeDir the store directory
* @param config configuration settings to use
* @return a new {@link BatchInserter}
*/
@SuppressWarnings( { "unchecked", "rawtypes" } )
public static BatchInserter inserter( String storeDir, FileSystemAbstraction fileSystem,
Map<String,String> config )
{
return inserter( storeDir, fileSystem, config, (Iterable) Service.load( KernelExtensionFactory.class ) );
}
public static BatchInserter inserter( String storeDir, FileSystemAbstraction fileSystem,
Map<String, String> config, Iterable<KernelExtensionFactory<?>> kernelExtensions )
{
return new BatchInserterImpl( storeDir, fileSystem, config, kernelExtensions );
}
/**
* Get a {@link GraphDatabaseService} that does not support deletions and
* transactions.
*
* @param storeDir the store directory
* @return a {@link GraphDatabaseService} that does not support deletions
* and transactions
*/
public static GraphDatabaseService batchDatabase( String storeDir )
{
return batchDatabase( storeDir, stringMap() );
}
/**
* Get a {@link GraphDatabaseService} that does not support deletions and
* transactions.
*
* @param storeDir the store directory
* @param config configuration settings to use
* @return a {@link GraphDatabaseService} that does not support deletions
* and transactions
*/
public static GraphDatabaseService batchDatabase( String storeDir,
Map<String, String> config )
{
return batchDatabase( storeDir, new DefaultFileSystemAbstraction(), config );
}
/**
* Get a {@link GraphDatabaseService} that does not support deletions and
* transactions.
*
* @param storeDir the store directory
* @return a {@link GraphDatabaseService} that does not support deletions
* and transactions
*/
public static GraphDatabaseService batchDatabase( String storeDir, FileSystemAbstraction fileSystem )
{
return batchDatabase( storeDir, fileSystem, stringMap() );
}
/**
* Get a {@link GraphDatabaseService} that does not support deletions and
* transactions.
*
* @param storeDir the store directory
* @param config configuration settings to use
* @return a {@link GraphDatabaseService} that does not support deletions
* and transactions
*/
@SuppressWarnings( { "unchecked", "rawtypes" } )
public static GraphDatabaseService batchDatabase( String storeDir,
FileSystemAbstraction fileSystem, Map<String, String> config )
{
return batchDatabase( storeDir, fileSystem, config, (Iterable) Service.load( KernelExtensionFactory.class ) );
}
public static GraphDatabaseService batchDatabase( String storeDir, FileSystemAbstraction fileSystem,
Map<String, String> config, Iterable<KernelExtensionFactory<?>> kernelExtensions )
{
return new BatchGraphDatabaseImpl( storeDir, fileSystem, config, kernelExtensions );
}
}
| false
|
community_kernel_src_main_java_org_neo4j_unsafe_batchinsert_BatchInserters.java
|
1,958
|
public class BatchInserterImplTest
{
private void assumeNotWindows()
{
// Windows doesn't work well at all with memory mapping. The problem being that
// in Java there's no way to unmap a memory mapping from a file, instead that
// is handed over to GC and GC isn't deterministic. Well, actually there is a way
// unmap if using reflection. Anyways Windows has problems with truncating a file
// or similar if a memory mapped section of it is still open, i.e. hasn't yet
// been GCed... which may happen from time to time.
assumeTrue( !osIsWindows() );
}
@Test
public void testHonorsPassedInParams() throws Exception
{
assumeNotWindows();
Boolean memoryMappingConfig = createInserterAndGetMemoryMappingConfig( stringMap( GraphDatabaseSettings
.use_memory_mapped_buffers.name(), "true" ) );
assertTrue( "memory mapped config is active", memoryMappingConfig );
}
@Test
public void testDefaultsToNoMemoryMapping() throws Exception
{
assumeNotWindows();
Boolean memoryMappingConfig = createInserterAndGetMemoryMappingConfig( stringMap() );
assertFalse( "memory mapped config is active", memoryMappingConfig );
}
@Test
public void testCreatesStoreLockFile()
{
// Given
File file = TargetDirectory.forTest( getClass() ).makeGraphDbDir();
// When
BatchInserter inserter = BatchInserters.inserter( file.getAbsolutePath() );
// Then
assertThat( new File( file, StoreLocker.STORE_LOCK_FILENAME ).exists(), equalTo( true ) );
inserter.shutdown();
}
@Test
public void testFailsOnExistingStoreLockFile() throws IOException
{
// Given
File parent = TargetDirectory.forTest( getClass() ).makeGraphDbDir();
StoreLocker lock = new StoreLocker( new DefaultFileSystemAbstraction() );
lock.checkLock( parent );
// When
try
{
BatchInserters.inserter( parent.getAbsolutePath() );
// Then
fail();
}
catch ( StoreLockException e )
{
// OK
}
finally
{
lock.release();
}
}
private Boolean createInserterAndGetMemoryMappingConfig( Map<String, String> initialConfig ) throws Exception
{
BatchInserter inserter = BatchInserters.inserter(
TargetDirectory.forTest( getClass() ).makeGraphDbDir().getAbsolutePath(), initialConfig );
NeoStore neoStore = ReflectionUtil.getPrivateField( inserter, "neoStore", NeoStore.class );
Config config = ReflectionUtil.getPrivateField( neoStore, "conf", Config.class );
inserter.shutdown();
return config.get( GraphDatabaseSettings.use_memory_mapped_buffers );
}
}
| false
|
community_kernel_src_test_java_org_neo4j_unsafe_batchinsert_BatchInserterImplTest.java
|
1,959
|
private static class RelationshipTypeImpl implements RelationshipType
{
private final String name;
RelationshipTypeImpl( String name )
{
this.name = name;
}
@Override
public String name()
{
return name;
}
}
| false
|
community_kernel_src_main_java_org_neo4j_unsafe_batchinsert_BatchInserterImpl.java
|
1,960
|
{
public String getParameterValue( Relationship container,
String key )
{
return getTailLabel( container );
}
} );
| false
|
community_graphviz_src_main_java_org_neo4j_visualization_graphviz_StyleParameter.java
|
1,961
|
final class RelationshipTitleProperty extends RelationshipTitle
{
private final String key;
/**
* Get relationship title from a property.
* @param key
* the property key to use as title.
*/
public RelationshipTitleProperty( String key )
{
this.key = key;
}
public String getTitle( Relationship relationship )
{
return ( String ) relationship.getProperty( key );
}
}
| false
|
community_graphviz_src_main_java_org_neo4j_visualization_graphviz_StyleParameter.java
|
1,962
|
{
@Override
public void run()
{
db.shutdown();
}
};
| false
|
community_kernel_src_test_java_org_neo4j_test_EphemeralFileSystemRule.java
|
1,963
|
private static class TestGraphDatabase extends InternalAbstractGraphDatabase
{
private final RecoveryVerifier verifier;
TestGraphDatabase( String dir, RecoveryVerifier recoveryVerifier )
{
super( dir, stringMap(), new DefaultGraphDatabaseDependencies() );
this.verifier = recoveryVerifier;
run();
}
@Override
protected RecoveryVerifier createRecoveryVerifier()
{
return this.verifier;
}
}
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryVerification.java
|
1,964
|
{
@Override
public boolean isValid( TransactionInfo txInfo )
{
return false;
}
};
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryVerification.java
|
1,965
|
public class TestRecoveryVerification
{
private static class TestGraphDatabase extends InternalAbstractGraphDatabase
{
private final RecoveryVerifier verifier;
TestGraphDatabase( String dir, RecoveryVerifier recoveryVerifier )
{
super( dir, stringMap(), new DefaultGraphDatabaseDependencies() );
this.verifier = recoveryVerifier;
run();
}
@Override
protected RecoveryVerifier createRecoveryVerifier()
{
return this.verifier;
}
}
@Test
public void recoveryVerificationShouldBeCalledForRecoveredTransactions() throws Exception
{
int count = 2;
String dir = produceNonCleanDbWhichWillRecover2PCsOnStartup( "count", count );
CountingRecoveryVerifier countingVerifier = new CountingRecoveryVerifier();
GraphDatabaseService db = new TestGraphDatabase( dir, countingVerifier );
assertEquals( 2, countingVerifier.count2PC );
db.shutdown();
}
@Test
public void failingRecoveryVerificationShouldThrowCorrectException() throws Exception
{
String dir = produceNonCleanDbWhichWillRecover2PCsOnStartup( "fail", 2 );
RecoveryVerifier failingVerifier = new RecoveryVerifier()
{
@Override
public boolean isValid( TransactionInfo txInfo )
{
return false;
}
};
try
{
new TestGraphDatabase( dir, failingVerifier );
fail( "Was expecting recovery exception" );
}
catch ( RuntimeException e )
{
assertEquals( e.getMessage(), RecoveryVerificationException.class, e.getCause().getClass() );
}
}
@Test
public void recovered2PCRecordsShouldBeWrittenInRisingTxIdOrder() throws Exception
{
int count = 10;
String dir = produceNonCleanDbWhichWillRecover2PCsOnStartup( "order", count );
// Just make it recover
new GraphDatabaseFactory().newEmbeddedDatabaseBuilder( dir ).setConfig( GraphDatabaseSettings
.keep_logical_logs, Settings.TRUE ).newGraphDatabase().shutdown();
verifyOrderedRecords( dir, count );
}
private void verifyOrderedRecords( String storeDir, int expectedCount ) throws IOException
{
/* Look in the .v0 log for the 2PC records and that they are ordered by txId */
RandomAccessFile file = new RandomAccessFile( new File( storeDir, "nioneo_logical.log.v0" ), "r" );
CommandFactory cf = new CommandFactory();
try
{
FileChannel channel = file.getChannel();
ByteBuffer buffer = allocate( 10000 );
readLogHeader( buffer, channel, true );
long lastOne = -1;
int counted = 0;
for ( LogEntry entry; (entry = readEntry( buffer, channel, cf )) != null; )
{
if ( entry instanceof TwoPhaseCommit )
{
long txId = ((TwoPhaseCommit) entry).getTxId();
if ( lastOne != -1 )
{
assertEquals( lastOne + 1, txId );
}
lastOne = txId;
counted++;
}
}
assertEquals( expectedCount, counted );
}
finally
{
file.close();
}
}
private static class CountingRecoveryVerifier implements RecoveryVerifier
{
private int count2PC;
@Override
public boolean isValid( TransactionInfo txInfo )
{
if ( !txInfo.isOnePhase() )
{
count2PC++;
}
return true;
}
}
}
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryVerification.java
|
1,966
|
public class TestRecoveryScenarios
{
@Test
public void shouldRecoverTransactionWhereNodeIsDeletedInTheFuture() throws Exception
{
// GIVEN
Node node = createNodeWithProperty( "key", "value", label );
rotateLog();
setProperty( node, "other-key", 1 );
deleteNode( node );
flushAll();
// WHEN
crashAndRestart( indexProvider );
// THEN
// -- really the problem was that recovery threw exception, so mostly assert that.
try ( Transaction tx = db.beginTx() )
{
node = db.getNodeById( node.getId() );
tx.success();
fail( "Should not exist" );
}
catch ( NotFoundException e )
{
assertEquals( "Node " + node.getId() + " not found", e.getMessage() );
}
}
@Test
public void shouldRecoverTransactionWherePropertyIsRemovedInTheFuture() throws Exception
{
// GIVEN
createIndex( label, "key" );
Node node = createNodeWithProperty( "key", "value" );
rotateLog();
addLabel( node, label );
InMemoryIndexProvider outdatedIndexProvider = indexProvider.snapshot();
removeProperty( node, "key" );
flushAll();
// WHEN
crashAndRestart( outdatedIndexProvider );
// THEN
// -- really the problem was that recovery threw exception, so mostly assert that.
try ( Transaction tx = db.beginTx() )
{
assertEquals( "Updates not propagated correctly during recovery", Collections.<Node>emptyList(),
IteratorUtil.asList( db.findNodesByLabelAndProperty( label, "key", "value" ) ) );
tx.success();
}
}
@Test
public void shouldRecoverTransactionWhereManyLabelsAreRemovedInTheFuture() throws Exception
{
// GIVEN
createIndex( label, "key" );
Label[] labels = new Label[16];
for (int i = 0; i < labels.length; i++ )
{
labels[i] = DynamicLabel.label( "Label" + Integer.toHexString( i ) );
}
Node node;
try ( Transaction tx = db.beginTx() )
{
node = db.createNode( labels );
node.addLabel( label );
tx.success();
}
rotateLog();
InMemoryIndexProvider outdatedIndexProvider = indexProvider.snapshot();
setProperty( node, "key", "value" );
removeLabels( node, labels );
flushAll();
// WHEN
crashAndRestart( outdatedIndexProvider );
// THEN
// -- really the problem was that recovery threw exception, so mostly assert that.
try ( Transaction tx = db.beginTx() )
{
assertEquals( node, single( db.findNodesByLabelAndProperty( label, "key", "value" ) ) );
tx.success();
}
}
private void removeLabels( Node node, Label... labels )
{
try ( Transaction tx = db.beginTx() )
{
for ( Label label : labels )
{
node.removeLabel( label );
}
tx.success();
}
}
private void removeProperty( Node node, String key )
{
try ( Transaction tx = db.beginTx() )
{
node.removeProperty( key );
tx.success();
}
}
private void addLabel( Node node, Label label )
{
try ( Transaction tx = db.beginTx() )
{
node.addLabel( label );
tx.success();
}
}
private Node createNodeWithProperty( String key, String value, Label... labels )
{
try ( Transaction tx = db.beginTx() )
{
Node node = db.createNode( labels );
node.setProperty( key, value );
tx.success();
return node;
}
}
private void createIndex( Label label, String key )
{
try ( Transaction tx = db.beginTx() )
{
db.schema().indexFor( label ).on( key ).create();
tx.success();
}
try ( Transaction tx = db.beginTx() )
{
db.schema().awaitIndexesOnline( 10, SECONDS );
tx.success();
}
}
public final @Rule EphemeralFileSystemRule fsRule = new EphemeralFileSystemRule();
private final Label label = label( "label" );
@SuppressWarnings("deprecation") private GraphDatabaseAPI db;
private final InMemoryIndexProvider indexProvider = new InMemoryIndexProvider( 100 );
@SuppressWarnings("deprecation")
@Before
public void before()
{
db = (GraphDatabaseAPI) databaseFactory( fsRule.get(), indexProvider ).newImpermanentDatabase();
}
private TestGraphDatabaseFactory databaseFactory( FileSystemAbstraction fs, InMemoryIndexProvider indexProvider )
{
return new TestGraphDatabaseFactory()
.setFileSystem( fs ).addKernelExtension( new InMemoryIndexProviderFactory( indexProvider ) );
}
@After
public void after()
{
db.shutdown();
}
private void rotateLog()
{
db.getDependencyResolver().resolveDependency( XaDataSourceManager.class ).rotateLogicalLogs();
}
@SuppressWarnings("deprecation")
private void flushAll()
{
db.getDependencyResolver().resolveDependency(
XaDataSourceManager.class ).getNeoStoreDataSource().getNeoStore().flushAll();
}
private void deleteNode( Node node )
{
try ( Transaction tx = db.beginTx() )
{
node.delete();
tx.success();
}
}
private void setProperty( Node node, String key, Object value )
{
try ( Transaction tx = db.beginTx() )
{
node.setProperty( key, value );
tx.success();
}
}
@SuppressWarnings("deprecation")
private void crashAndRestart( InMemoryIndexProvider indexProvider )
{
FileSystemAbstraction uncleanFs = fsRule.snapshot( shutdownDb( db ) );
db = (GraphDatabaseAPI) databaseFactory( uncleanFs, indexProvider ).newImpermanentDatabase();
}
}
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryScenarios.java
|
1,967
|
{
@Override
public boolean accept( LogEntry item )
{
return !(item instanceof LogEntry.Done) && !(item instanceof LogEntry.TwoPhaseCommit);
}
} );
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryNotHappening.java
|
1,968
|
public class TestRecoveryNotHappening
{
private final File storeDirectory = TargetDirectory.forTest( getClass() ).makeGraphDbDir();
private GraphDatabaseService db;
/* So this test is here to assert that even if we have a scenario where
* there's a transaction which is started and prepared, but lacking
* commit or done, also unknown to the tx log, the store still recovers.
*
* The main problem was that first each data source recovered what it could,
* but kept any 2PC transactions that it didn't know what to do with around
* for the tx manager, later during the recovery process, telling them what
* to do with. If the tx manager (the tx log it has) didn't have any
* recollection of any such a transaction it wouldn't even notify that data
* source. The result would be that the database started, but one or more
* data sources would be in a state where it was awaiting recovery to complete.
*
* The particular case bringing this up was a transaction that failed to write
* the 2PC log entry, due to the thread being interrupted, but the transaction
* was successfully rolled back after that.
*/
@Test
public void uncompletedPreparedTransactionUnknownToTxLogWontPreventRecovery() throws Exception
{
// Given
// * a log containing a START and PREPARE entry
// * an empty tx log
create2PCTransactionAndShutDownNonClean();
modifyTransactionMakingItLookPreparedAndUncompleted();
// When
// * starting (and recovering) this store should not result in exception
startDb();
// Then
// * it should have started and recovered properly and be able to handle requests
createNodeWithNameProperty( db, "Everything is OK" );
}
private void create2PCTransactionAndShutDownNonClean() throws Exception
{
assertEquals( 0, getRuntime().exec( new String[] { "java", "-cp", getProperty( "java.class.path" ),
getClass().getName(), storeDirectory.getAbsolutePath() } ).waitFor() );
}
private void modifyTransactionMakingItLookPreparedAndUncompleted() throws Exception
{
filterNeostoreLogicalLog( new DefaultFileSystemAbstraction(), storeDirectory.getAbsolutePath(), new LogHookAdapter<LogEntry>()
{
@Override
public boolean accept( LogEntry item )
{
return !(item instanceof LogEntry.Done) && !(item instanceof LogEntry.TwoPhaseCommit);
}
} );
truncateFile( new File( storeDirectory, "tm_tx_log.1" ), 0 );
}
public static void main( String[] args )
{
String storeDir = args[0];
GraphDatabaseService db = new GraphDatabaseFactory().newEmbeddedDatabase( storeDir );
createNodeWithNameProperty( db, "test" );
System.exit( 0 );
}
private void startDb()
{
db = new GraphDatabaseFactory().newEmbeddedDatabase( storeDirectory.getAbsolutePath() );
}
private static Node createNodeWithNameProperty( GraphDatabaseService db, String name )
{
try(Transaction tx = db.beginTx())
{
Node node = db.createNode();
node.setProperty( "name", name );
db.index().forNodes( "index" ).add( node, "name", name );
tx.success();
return node;
}
}
@After
public void doAfter()
{
if ( db != null )
db.shutdown();
}
}
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryNotHappening.java
|
1,969
|
public class TestRecoveryMultipleDataSources
{
private static final String dir = forTest( TestRecoveryMultipleDataSources.class ).makeGraphDbDir().getAbsolutePath();
/**
* Tests an issue where loading all relationship types and property indexes after
* the neostore data source had been started internally. The db would be in a
* state where it would need recovery for the neostore data source, as well as some
* other data source. This would fail since eventually TxManager#getTransaction()
* would be called, which would fail since it hadn't as of yet recovered fully.
* Whereas that failure would happen in a listener and merely be logged, one effect
* of it would be that there would seem to be no relationship types in the database.
*/
@Test
public void recoverNeoAndIndexHavingAllRelationshipTypesAfterRecovery() throws Exception
{
// Given (create transactions and kill process, leaving it needing for recovery)
deleteRecursively( new File( dir ) );
assertEquals( 0, getRuntime().exec( new String[] { "java", "-cp", getProperty( "java.class.path" ),
getClass().getName() } ).waitFor() );
// When
GraphDatabaseService db = new GraphDatabaseFactory().newEmbeddedDatabase( dir );
// Then
try(Transaction ignored = db.beginTx())
{
assertEquals( MyRelTypes.TEST.name(), GlobalGraphOperations.at( db ).getAllRelationshipTypes().iterator().next().name() );
}
finally
{
db.shutdown();
}
}
public static void main( String[] args )
{
GraphDatabaseAPI db = (GraphDatabaseAPI) new GraphDatabaseFactory().newEmbeddedDatabase( dir );
Transaction tx = db.beginTx();
db.createNode().createRelationshipTo( db.createNode(), MyRelTypes.TEST );
tx.success();
tx.close();
db.getDependencyResolver().resolveDependency( XaDataSourceManager.class ).rotateLogicalLogs();
tx = db.beginTx();
db.index().forNodes( "index" ).add( db.createNode(), dir, db.createNode() );
tx.success();
tx.close();
exit( 0 );
}
}
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryMultipleDataSources.java
|
1,970
|
private static class VerifyLastTxId implements Task
{
private long tx;
private String dataSource;
VerifyLastTxId( String dataSource, long tx )
{
this.dataSource = dataSource;
this.tx = tx;
}
@Override
@SuppressWarnings("deprecation")
public void run( GraphDatabaseAPI graphdb )
{
assertEquals( tx, dsManager( graphdb ).getXaDataSource( dataSource ).getLastCommittedTxId() );
}
}
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryLogTimingIssues.java
|
1,971
|
static class Shutdown implements Task
{
@Override
public void run( @SuppressWarnings("deprecation") GraphDatabaseAPI graphdb )
{
graphdb.shutdown();
}
}
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryLogTimingIssues.java
|
1,972
|
static class RotateLogs implements Task
{
private final Set<String> dataSources = new HashSet<>();
RotateLogs( String... dataSources )
{
this.dataSources.addAll( Arrays.asList( dataSources ) );
}
@Override
@SuppressWarnings("deprecation")
public void run( GraphDatabaseAPI graphdb )
{
try
{
if ( dataSources.isEmpty() )
dsManager( graphdb ).getNeoStoreDataSource().rotateLogicalLog();
else
{
for ( XaDataSource ds : dsManager( graphdb ).getAllRegisteredDataSources() )
{
if ( dataSources.contains( ds.getName() ) )
ds.rotateLogicalLog();
}
}
}
catch ( IOException e )
{
e.printStackTrace();
throw new RuntimeException( e );
}
}
}
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryLogTimingIssues.java
|
1,973
|
static class GetCommittedTransactions implements Task
{
private final long highestLogVersion;
private final long highestTxId;
public GetCommittedTransactions( long highestLogVersion, long highestTxId )
{
this.highestLogVersion = highestLogVersion;
this.highestTxId = highestTxId;
}
@Override
@SuppressWarnings("deprecation")
public void run( GraphDatabaseAPI graphdb )
{
try
{
XaDataSource dataSource = dsManager( graphdb ).getNeoStoreDataSource();
for ( long logVersion = 0; logVersion < highestLogVersion; logVersion++ )
{
dataSource.getLogicalLog( logVersion ).close();
}
LogExtractor extractor = dataSource.getLogExtractor( 2, highestTxId );
try
{
for ( long txId = 2; txId <= highestTxId; txId++ )
{
extractor.extractNext( NullLogBuffer.INSTANCE );
}
}
finally
{
extractor.close();
}
}
catch ( Exception e )
{
throw new RuntimeException( e );
}
}
}
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryLogTimingIssues.java
|
1,974
|
static class DoSimpleTransaction implements Task
{
@Override
public void run( @SuppressWarnings("deprecation") GraphDatabaseAPI graphdb )
{
try(Transaction tx = graphdb.beginTx())
{
Node parent = graphdb.createNode();
for ( int i = 0; i < 10; i++ )
{
Node child = graphdb.createNode();
parent.createRelationshipTo( child, TYPE );
}
tx.success();
}
}
}
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryLogTimingIssues.java
|
1,975
|
static class DoGraphAndIndexTransaction implements Task
{
@Override
public void run( @SuppressWarnings("deprecation") GraphDatabaseAPI graphdb )
{
try(Transaction tx = graphdb.beginTx())
{
Node parent = graphdb.createNode();
graphdb.index().forNodes( "index" ).add( parent, "name", "test" );
for ( int i = 0; i < 10; i++ )
{
Node child = graphdb.createNode();
parent.createRelationshipTo( child, TYPE );
}
tx.success();
}
}
}
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryLogTimingIssues.java
|
1,976
|
public class TestRecoveryLogTimingIssues extends AbstractSubProcessTestBase
{
private static final DynamicRelationshipType TYPE = withName( "TYPE" );
private final CountDownLatch breakpointNotification = new CountDownLatch( 1 );
private final BreakPoint SET_VERSION = BreakPoint.thatCrashesTheProcess( breakpointNotification, 0,
NeoStore.class, "setVersion", long.class );
private final BreakPoint RELEASE_CURRENT_LOG_FILE = BreakPoint.thatCrashesTheProcess( breakpointNotification, 0,
XaLogicalLog.class, "releaseCurrentLogFile" );
private final BreakPoint RENAME_LOG_FILE = BreakPoint.thatCrashesTheProcess( breakpointNotification, 0,
XaLogicalLog.class, "renameLogFileToRightVersion", File.class, long.class );
private final BreakPoint SET_VERSION_2 = BreakPoint.thatCrashesTheProcess( breakpointNotification, 1,
NeoStore.class, "setVersion", long.class );
private final BreakPoint RELEASE_CURRENT_LOG_FILE_2 = BreakPoint.thatCrashesTheProcess( breakpointNotification, 1,
XaLogicalLog.class, "releaseCurrentLogFile" );
private final BreakPoint RENAME_LOG_FILE_2 = BreakPoint.thatCrashesTheProcess( breakpointNotification, 1,
XaLogicalLog.class, "renameLogFileToRightVersion", File.class, long.class );
private final BreakPoint EXIT_RENAME_LOG_FILE_LUCENE = BreakPoint.thatCrashesTheProcess( Event.EXIT, breakpointNotification, 0,
BreakPoint.stackTraceMustContainClass( LuceneDataSource.class ), XaLogicalLog.class, "renameLogFileToRightVersion", File.class, long.class );
private final BreakPoint EXIT_RENAME_LOG_FILE_NIONEO = BreakPoint.thatCrashesTheProcess( Event.EXIT, breakpointNotification, 0,
BreakPoint.stackTraceMustNotContainClass( LuceneDataSource.class ), XaLogicalLog.class, "renameLogFileToRightVersion", File.class, long.class );
private final BreakPoint[] breakpoints = new BreakPoint[] {
SET_VERSION,
RELEASE_CURRENT_LOG_FILE,
RENAME_LOG_FILE,
SET_VERSION_2,
RELEASE_CURRENT_LOG_FILE_2,
RENAME_LOG_FILE_2,
EXIT_RENAME_LOG_FILE_LUCENE,
EXIT_RENAME_LOG_FILE_NIONEO };
@Override
protected BreakPoint[] breakpoints( int id )
{
return breakpoints;
}
private final Bootstrapper bootstrapper = killAwareBootstrapper( this, 0, stringMap(
GraphDatabaseSettings.keep_logical_logs.name(), Settings.TRUE ) );
@Override
protected Bootstrapper bootstrap( int id ) throws IOException
{
return bootstrapper;
}
static class DoSimpleTransaction implements Task
{
@Override
public void run( @SuppressWarnings("deprecation") GraphDatabaseAPI graphdb )
{
try(Transaction tx = graphdb.beginTx())
{
Node parent = graphdb.createNode();
for ( int i = 0; i < 10; i++ )
{
Node child = graphdb.createNode();
parent.createRelationshipTo( child, TYPE );
}
tx.success();
}
}
}
static class DoGraphAndIndexTransaction implements Task
{
@Override
public void run( @SuppressWarnings("deprecation") GraphDatabaseAPI graphdb )
{
try(Transaction tx = graphdb.beginTx())
{
Node parent = graphdb.createNode();
graphdb.index().forNodes( "index" ).add( parent, "name", "test" );
for ( int i = 0; i < 10; i++ )
{
Node child = graphdb.createNode();
parent.createRelationshipTo( child, TYPE );
}
tx.success();
}
}
}
static class RotateLogs implements Task
{
private final Set<String> dataSources = new HashSet<>();
RotateLogs( String... dataSources )
{
this.dataSources.addAll( Arrays.asList( dataSources ) );
}
@Override
@SuppressWarnings("deprecation")
public void run( GraphDatabaseAPI graphdb )
{
try
{
if ( dataSources.isEmpty() )
dsManager( graphdb ).getNeoStoreDataSource().rotateLogicalLog();
else
{
for ( XaDataSource ds : dsManager( graphdb ).getAllRegisteredDataSources() )
{
if ( dataSources.contains( ds.getName() ) )
ds.rotateLogicalLog();
}
}
}
catch ( IOException e )
{
e.printStackTrace();
throw new RuntimeException( e );
}
}
}
private static XaDataSourceManager dsManager( @SuppressWarnings("deprecation") GraphDatabaseAPI graphdb )
{
return graphdb.getDependencyResolver().resolveDependency( XaDataSourceManager.class );
}
static class GetCommittedTransactions implements Task
{
private final long highestLogVersion;
private final long highestTxId;
public GetCommittedTransactions( long highestLogVersion, long highestTxId )
{
this.highestLogVersion = highestLogVersion;
this.highestTxId = highestTxId;
}
@Override
@SuppressWarnings("deprecation")
public void run( GraphDatabaseAPI graphdb )
{
try
{
XaDataSource dataSource = dsManager( graphdb ).getNeoStoreDataSource();
for ( long logVersion = 0; logVersion < highestLogVersion; logVersion++ )
{
dataSource.getLogicalLog( logVersion ).close();
}
LogExtractor extractor = dataSource.getLogExtractor( 2, highestTxId );
try
{
for ( long txId = 2; txId <= highestTxId; txId++ )
{
extractor.extractNext( NullLogBuffer.INSTANCE );
}
}
finally
{
extractor.close();
}
}
catch ( Exception e )
{
throw new RuntimeException( e );
}
}
}
static class Shutdown implements Task
{
@Override
public void run( @SuppressWarnings("deprecation") GraphDatabaseAPI graphdb )
{
graphdb.shutdown();
}
}
private static class VerifyLastTxId implements Task
{
private long tx;
private String dataSource;
VerifyLastTxId( String dataSource, long tx )
{
this.dataSource = dataSource;
this.tx = tx;
}
@Override
@SuppressWarnings("deprecation")
public void run( GraphDatabaseAPI graphdb )
{
assertEquals( tx, dsManager( graphdb ).getXaDataSource( dataSource ).getLastCommittedTxId() );
}
}
private void crashDuringRotateAndVerify( long highestLogVersion, long highestTxId ) throws Exception
{
runInThread( new RotateLogs() );
breakpointNotification.await();
startSubprocesses();
run( new GetCommittedTransactions( highestLogVersion, highestTxId ) );
}
@Test
public void logsShouldContainAllTransactionsEvenIfCrashJustBeforeNeostoreSetVersion() throws Exception
{
SET_VERSION.enable();
run( new DoSimpleTransaction() );
// tx(2) is the first one, for creating the relationship type
// tx(3) is the second one, for doing the actual transaction in DoSimpleTransaction
crashDuringRotateAndVerify( 1, 3 );
}
@Test
public void logsShouldContainAllTransactionsEvenIfCrashJustBeforeReleaseCurrentLogFile() throws Exception
{
RELEASE_CURRENT_LOG_FILE.enable();
run( new DoSimpleTransaction() );
crashDuringRotateAndVerify( 1, 3 );
}
@Test
public void logsShouldContainAllTransactionsEvenIfCrashJustAfterSetActiveVersion() throws Exception
{
RENAME_LOG_FILE.enable();
run( new DoSimpleTransaction() );
crashDuringRotateAndVerify( 1, 3 );
}
@Test
public void logsShouldContainAllTransactionsEvenIfCrashJustBeforeNeostoreSetVersionTwoLogs() throws Exception
{
SET_VERSION_2.enable();
run( new DoSimpleTransaction() );
run( new RotateLogs() );
run( new DoSimpleTransaction() );
crashDuringRotateAndVerify( 2, 4 );
}
@Test
public void logsShouldContainAllTransactionsEvenIfCrashJustBeforeReleaseCurrentLogFileTwoLogs() throws Exception
{
RELEASE_CURRENT_LOG_FILE_2.enable();
run( new DoSimpleTransaction() );
run( new RotateLogs() );
run( new DoSimpleTransaction() );
crashDuringRotateAndVerify( 2, 4 );
}
@Test
public void logsShouldContainAllTransactionsEvenIfCrashJustAfterSetActiveVersionTwoLogs() throws Exception
{
RENAME_LOG_FILE_2.enable();
run( new DoSimpleTransaction() );
run( new RotateLogs() );
run( new DoSimpleTransaction() );
crashDuringRotateAndVerify( 2, 4 );
}
@Test
public void nextLogVersionAfterCrashBetweenActiveSetToCleanAndRename() throws Exception
{
RENAME_LOG_FILE.enable();
runInThread( new Shutdown() );
breakpointNotification.await();
startSubprocesses();
run( new Shutdown() );
}
@Test
public void nextLogVersionAfterCrashBetweenRenameAndIncrementVersionInCloseShouldBeTheNextOne() throws Exception
{
EXIT_RENAME_LOG_FILE_LUCENE.enable();
runInThread( new Shutdown() );
breakpointNotification.await();
startSubprocesses();
run( new Shutdown() );
assertLuceneLogVersionsExists( 0, 1 );
}
@Test
public void nextLogVersionAfterCrashBetweenRenameAndIncrementVersionInRotateShouldBeTheNextOne() throws Exception
{
EXIT_RENAME_LOG_FILE_NIONEO.enable();
run( new DoSimpleTransaction() );
runInThread( new RotateLogs( LuceneDataSource.DEFAULT_NAME ) );
breakpointNotification.await();
startSubprocesses();
run( new Shutdown() );
assertLuceneLogVersionsExists( 0, 1 );
}
@Test
public void lastLuceneTxAfterRecoveredCrashBetweenRenameAndIncrementVersionInCloseShouldBeCorrect() throws Exception
{
EXIT_RENAME_LOG_FILE_LUCENE.enable();
run( new DoGraphAndIndexTransaction() );
runInThread( new Shutdown() );
breakpointNotification.await();
startSubprocesses();
run( new VerifyLastTxId( LuceneDataSource.DEFAULT_NAME, 3 ) );
}
@Test
public void lastNeoTxAfterRecoveredCrashBetweenRenameAndIncrementVersionInCloseShouldBeCorrect() throws Exception
{
EXIT_RENAME_LOG_FILE_NIONEO.enable();
run( new DoSimpleTransaction() );
runInThread( new Shutdown() );
breakpointNotification.await();
startSubprocesses();
run( new VerifyLastTxId( NeoStoreXaDataSource.DEFAULT_DATA_SOURCE_NAME, 3 ) );
}
private void assertLuceneLogVersionsExists( int... versions ) throws Exception
{
Set<Integer> versionSet = new HashSet<>();
for ( int version : versions )
versionSet.add( version );
File path = new File( target.existingDirectory( "graphdb." + 0 ), "index" );
for ( File file : nonNull( path.listFiles() ) )
{
if ( file.getName().contains( ".log.v" ) )
{
int v = parseInt( file.getName().substring( file.getName().lastIndexOf( ".v" )+2 ) );
assertTrue( "Unexpected version found " + v, versionSet.remove( v ) );
}
}
assertTrue( "These versions weren't found " + versionSet, versionSet.isEmpty() );
}
}
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryLogTimingIssues.java
|
1,977
|
static class Verification implements Task
{
@Override
public void run( GraphDatabaseAPI graphdb )
{
assertNotNull( "No graph database", graphdb );
Index<Node> index = graphdb.index().forNodes( "nodes" );
assertNotNull( "No index", index );
Node node = index.get( "name", "value" ).getSingle();
assertNotNull( "could not get the node", node );
assertEquals( "yes", node.getProperty( "correct" ) );
}
}
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryIssues.java
|
1,978
|
static class TwoWriteTransactions implements Task
{
@Override
public void run( GraphDatabaseAPI graphdb )
{
Node node;
try(Transaction tx = graphdb.beginTx())
{
node = graphdb.createNode();
tx.success();
}
try(Transaction tx = graphdb.beginTx())
{
node.setProperty( "correct", "yes" );
graphdb.index().forNodes( "nodes" ).add( node, "name", "value" );
tx.success();
}
}
}
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryIssues.java
|
1,979
|
static class SingleWriteTransaction implements Task
{
@Override
public void run( GraphDatabaseAPI graphdb )
{
try(Transaction tx = graphdb.beginTx())
{
Node node = graphdb.createNode();
node.setProperty( "correct", "yes" );
graphdb.index().forNodes( "nodes" ).add( node, "name", "value" );
tx.success();
}
}
}
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryIssues.java
|
1,980
|
static class Crash implements Task
{
@Override
public void run( GraphDatabaseAPI graphdb )
{
throw new AssertionError( "Should not reach here - the breakpoint should avoid it" );
}
}
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryIssues.java
|
1,981
|
{
@Override
protected void shutdown( GraphDatabaseService graphdb, boolean normal )
{
if ( normal ) super.shutdown( graphdb, normal );
};
};
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryIssues.java
|
1,982
|
{
@Override
protected void callback( DebugInterface debug ) throws KillSubProcess
{
afterCrash.countDown();
throw KillSubProcess.withExitCode( -1 );
}
}, };
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryIssues.java
|
1,983
|
private static class CountingRecoveryVerifier implements RecoveryVerifier
{
private int count2PC;
@Override
public boolean isValid( TransactionInfo txInfo )
{
if ( !txInfo.isOnePhase() )
{
count2PC++;
}
return true;
}
}
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryVerification.java
|
1,984
|
public class TestInstanceJoin
{
private final TargetDirectory dir = forTest( getClass() );
@Test
public void makeSureSlaveCanJoinEvenIfTooFarBackComparedToMaster() throws Exception
{
String key = "foo";
String value = "bar";
HighlyAvailableGraphDatabase master = null;
HighlyAvailableGraphDatabase slave = null;
try
{
master = start( dir.cleanDirectory( "master" ).getAbsolutePath(), 0, stringMap( keep_logical_logs.name(),
"1 files", ClusterSettings.initial_hosts.name(), "127.0.0.1:5001" ) );
createNode( master, "something", "unimportant" );
// Need to start and shutdown the slave so when we start it up later it verifies instead of copying
slave = start( dir.cleanDirectory( "slave" ).getAbsolutePath(), 1,
stringMap( ClusterSettings.initial_hosts.name(), "127.0.0.1:5001,127.0.0.1:5002" ) );
slave.shutdown();
long nodeId = createNode( master, key, value );
createNode( master, "something", "unimportant" );
// Rotating, moving the above transactions away so they are removed on shutdown.
rotateLogs( getXaDataSourceManager( master ), getKernelPanicGenerator( master ),
master.getDependencyResolver().resolveDependency( StringLogger.class ) );
/*
* We need to shutdown - rotating is not enough. The problem is that log positions are cached and they
* are not removed from the cache until we run into the cache limit. This means that the information
* contained in the log can actually be available even if the log is removed. So, to trigger the case
* of the master information missing from the master we need to also flush the log entry cache - hence,
* restart.
*/
master.shutdown();
master = start( dir.existingDirectory( "master" ).getAbsolutePath(), 0, stringMap( keep_logical_logs.name(),
"1 files", ClusterSettings.initial_hosts.name(), "127.0.0.1:5001" ) );
slave = start( dir.existingDirectory( "slave" ).getAbsolutePath(), 1,
stringMap( ClusterSettings.initial_hosts.name(), "127.0.0.1:5001,127.0.0.1:5002" ) );
slave.getDependencyResolver().resolveDependency( UpdatePuller.class ).pullUpdates();
try ( Transaction ignore = slave.beginTx() )
{
assertEquals( "store contents differ", value, slave.getNodeById( nodeId ).getProperty( key ) );
}
}
finally
{
if ( slave != null )
{
slave.shutdown();
}
if ( master != null )
{
master.shutdown();
}
}
}
private KernelPanicEventGenerator getKernelPanicGenerator( HighlyAvailableGraphDatabase database )
{
return database.getDependencyResolver().resolveDependency( KernelPanicEventGenerator.class );
}
private XaDataSourceManager getXaDataSourceManager( HighlyAvailableGraphDatabase database )
{
return database.getDependencyResolver().resolveDependency( XaDataSourceManager.class );
}
private long createNode( HighlyAvailableGraphDatabase db, String key, String value )
{
try ( Transaction tx = db.beginTx() )
{
Node node = db.createNode();
node.setProperty( key, value );
tx.success();
return node.getId();
}
}
private static HighlyAvailableGraphDatabase start( String storeDir, int i, Map<String, String> additionalConfig )
{
HighlyAvailableGraphDatabase db = (HighlyAvailableGraphDatabase) new HighlyAvailableGraphDatabaseFactory().
newHighlyAvailableDatabaseBuilder( storeDir )
.setConfig( ClusterSettings.cluster_server, "127.0.0.1:" + (5001 + i) )
.setConfig( ClusterSettings.server_id, i + "" )
.setConfig( HaSettings.ha_server, "127.0.0.1:" + (6666 + i) )
.setConfig( HaSettings.pull_interval, "0ms" )
.setConfig( additionalConfig )
.newGraphDatabase();
awaitStart( db );
return db;
}
private static void awaitStart( HighlyAvailableGraphDatabase db )
{
db.beginTx().finish();
}
}
| false
|
enterprise_ha_src_test_java_slavetest_TestInstanceJoin.java
|
1,985
|
@SuppressWarnings( "serial" )
public class TestRecoveryIssues extends AbstractSubProcessTestBase
{
private static final byte[] NEOKERNL = { 'N', 'E', 'O', 'K', 'E', 'R', 'N', 'L', '\0' };
private final CountDownLatch afterWrite = new CountDownLatch( 1 ), afterCrash = new CountDownLatch( 1 );
//@Test
public void canRecoverPreparedTransactionByDirectionFromTxManagerAfterCrashInCommit() throws Exception
{
for ( BreakPoint bp : breakpoints )
bp.enable();
runInThread( new TwoWriteTransactions() );
afterWrite.await();
startSubprocesses();
run( new Verification() );
}
//@Test
public void canRecoverPreparedTransactionByDirectionFromTxManagerIfItIsTheOnlyTransactionInTheLogicalLog() throws Exception
{
for ( BreakPoint bp : breakpoints )
bp.enable();
runInThread( new SingleWriteTransaction() );
afterWrite.await();
startSubprocesses();
run( new Verification() );
}
//@Test
public void canRecoverPreparedTransactionByDirectionFromTxManagerIfCrashingTwice() throws Exception
{
stopSubprocesses();
startSubprocesses();
for ( BreakPoint bp : breakpoints )
bp.enable();
runInThread( new TwoWriteTransactions() );
afterWrite.await();
startSubprocesses();
runInThread( new Crash() );
afterCrash.await();
startSubprocesses();
run( new Verification() );
}
static class TwoWriteTransactions implements Task
{
@Override
public void run( GraphDatabaseAPI graphdb )
{
Node node;
try(Transaction tx = graphdb.beginTx())
{
node = graphdb.createNode();
tx.success();
}
try(Transaction tx = graphdb.beginTx())
{
node.setProperty( "correct", "yes" );
graphdb.index().forNodes( "nodes" ).add( node, "name", "value" );
tx.success();
}
}
}
static class SingleWriteTransaction implements Task
{
@Override
public void run( GraphDatabaseAPI graphdb )
{
try(Transaction tx = graphdb.beginTx())
{
Node node = graphdb.createNode();
node.setProperty( "correct", "yes" );
graphdb.index().forNodes( "nodes" ).add( node, "name", "value" );
tx.success();
}
}
}
static class Crash implements Task
{
@Override
public void run( GraphDatabaseAPI graphdb )
{
throw new AssertionError( "Should not reach here - the breakpoint should avoid it" );
}
}
static class Verification implements Task
{
@Override
public void run( GraphDatabaseAPI graphdb )
{
assertNotNull( "No graph database", graphdb );
Index<Node> index = graphdb.index().forNodes( "nodes" );
assertNotNull( "No index", index );
Node node = index.get( "name", "value" ).getSingle();
assertNotNull( "could not get the node", node );
assertEquals( "yes", node.getProperty( "correct" ) );
}
}
private final BreakPoint[] breakpoints = new BreakPoint[] {
new BreakPoint( XaResourceHelpImpl.class, "commit", Xid.class, boolean.class )
{
@Override
protected void callback( DebugInterface debug ) throws KillSubProcess
{
if ( twoPhaseCommitIn( debug.thread() ) )
{
debug.thread().suspend( null );
this.disable();
afterWrite.countDown();
throw KillSubProcess.withExitCode( -1 );
}
}
private boolean twoPhaseCommitIn( DebuggedThread thread )
{
return !Boolean.parseBoolean( thread.getLocal( 1, "onePhase" ) );
}
}, new BreakPoint( Crash.class, "run", InternalAbstractGraphDatabase.class )
{
@Override
protected void callback( DebugInterface debug ) throws KillSubProcess
{
afterCrash.countDown();
throw KillSubProcess.withExitCode( -1 );
}
}, };
@Override
protected BreakPoint[] breakpoints( int id )
{
return breakpoints;
}
private final Bootstrapper bootstrap = getBootstrapperInstance( this );
@Override
protected Bootstrapper bootstrap( int id ) throws IOException
{
return bootstrap;
}
private static Bootstrapper getBootstrapperInstance( TestRecoveryIssues test )
{
try
{
return new Bootstrapper( test, 0 )
{
@Override
protected void shutdown( GraphDatabaseService graphdb, boolean normal )
{
if ( normal ) super.shutdown( graphdb, normal );
};
};
}
catch ( IOException e )
{
throw new RuntimeException( e );
}
}
/**
* Create a log file that fixes a store that has been subject to this issue.
*
* Parameters: [filename] [globalId.time] [globalId.sequence]
*
* Example: TestDoubleRecovery tm_tx_log.1 661819753510181175 3826
*/
public static void main( String... args ) throws Exception
{
TxLog log = new TxLog( new File(args[0]), new DefaultFileSystemAbstraction(), new Monitors() );
byte globalId[] = new byte[NEOKERNL.length + 16];
System.arraycopy( NEOKERNL, 0, globalId, 0, NEOKERNL.length );
ByteBuffer byteBuf = ByteBuffer.wrap( globalId );
byteBuf.position( NEOKERNL.length );
byteBuf.putLong( Long.parseLong( args[1] ) ).putLong( Long.parseLong( args[2] ) );
log.txStart( globalId );
log.addBranch( globalId, UTF8.encode( "414141" ) );
log.addBranch( globalId, LuceneDataSource.DEFAULT_BRANCH_ID );
log.markAsCommitting( globalId, ForceMode.unforced );
log.force();
log.close();
}
}
| false
|
community_neo4j_src_test_java_recovery_TestRecoveryIssues.java
|
1,986
|
public class TestConcurrentRotation extends AbstractSubProcessTestBase
{
private final CountDownLatch barrier1 = new CountDownLatch( 1 ), barrier2 = new CountDownLatch( 1 );
private DebuggedThread thread;
private final BreakPoint commitIndexWriter = new BreakPoint( IndexWriter.class, "commit" )
{
private int counter = 0;
@Override
protected void callback( DebugInterface debug ) throws KillSubProcess
{
if ( counter++ > 0 ) return;
thread = debug.thread().suspend( this );
this.disable();
barrier1.countDown();
}
};
private final BreakPoint resumeFlushThread = new BreakPoint( TestConcurrentRotation.class, "resumeFlushThread" )
{
@Override
protected void callback( DebugInterface debug ) throws KillSubProcess
{
thread.resume();
this.disable();
}
};
private final BreakPoint done = new BreakPoint( TestConcurrentRotation.class, "rotateDone" )
{
@Override
protected void callback( DebugInterface debug ) throws KillSubProcess
{
this.disable();
barrier2.countDown();
}
};
static void resumeFlushThread()
{ // Activates breakpoint
}
static void rotateDone()
{ // Activate breakpoint
}
@Override
protected BreakPoint[] breakpoints( int id )
{
return new BreakPoint[] { commitIndexWriter, resumeFlushThread.enable(), done.enable() };
}
@Test
public void rotateLogAtTheSameTimeInitializeIndexWriters() throws Exception
{
run( new CreateInitialStateTask() );
restart();
commitIndexWriter.enable();
run( new LoadIndexesTask( 2, false ) );
RotateIndexLogTask rotateTask = new RotateIndexLogTask();
runInThread( rotateTask );
barrier1.await();
run( new LoadIndexesTask( 3, true ) );
resumeFlushThread();
barrier2.await();
run( new Verifier() );
}
private static class Verifier implements Task
{
@Override
public void run( GraphDatabaseAPI graphdb )
{
try(Transaction ignored = graphdb.beginTx())
{
// TODO: Pass a node reference around of assuming the id will be deterministically assigned,
// artifact of removing the reference node, upon which this test used to depend.
assertTrue( (Boolean) graphdb.getNodeById(3).getProperty( "success" ) );
}
}
}
private static class CreateInitialStateTask implements Task
{
@Override
public void run( GraphDatabaseAPI graphdb )
{
try(Transaction tx = graphdb.beginTx())
{
for ( int i = 0; i < 3; i++ ) graphdb.index().forNodes( "index" + i ).add( graphdb.createNode(), "name", "" + i );
tx.success();
}
}
}
private static class LoadIndexesTask implements Task
{
private final int count;
private final boolean resume;
public LoadIndexesTask( int count, boolean resume )
{
this.count = count;
this.resume = resume;
}
@Override
public void run( GraphDatabaseAPI graphdb )
{
try(Transaction ignored = graphdb.beginTx())
{
for ( int i = 0; i < count; i++ ) graphdb.index().forNodes( "index" + i ).get( "name", i ).getSingle();
}
if ( resume ) resumeFlushThread();
}
}
private static class RotateIndexLogTask implements Task
{
@Override
public void run( GraphDatabaseAPI graphdb )
{
try
{
graphdb.getDependencyResolver().resolveDependency( XaDataSourceManager.class )
.getXaDataSource( LuceneDataSource.DEFAULT_NAME ).rotateLogicalLog();
setSuccess( graphdb, true );
}
catch ( Exception e )
{
setSuccess( graphdb, false );
throw Exceptions.launderedException( e );
}
finally
{
rotateDone();
}
}
private void setSuccess( GraphDatabaseAPI graphdb, boolean success )
{
try(Transaction tx = graphdb.beginTx())
{
Node node = graphdb.createNode();
node.setProperty( "success", success );
tx.success();
}
}
}
}
| false
|
community_neo4j_src_test_java_synchronization_TestConcurrentRotation.java
|
1,987
|
private static class CreateData implements Task
{
@Override
public void run( GraphDatabaseAPI graphdb )
{
try(Transaction tx = graphdb.beginTx())
{
Node node = graphdb.createNode();
node.setProperty( "value", "present" );
graphdb.index().forNodes( "nodes" ).add( node, "value", "present" );
enableBreakPoints();
tx.success();
}
done();
}
}
| false
|
community_neo4j_src_test_java_visibility_TestPropertyReadOnNewEntityBeforeLockRelease.java
|
1,988
|
{
@Override
protected void callback( DebugInterface debug ) throws KillSubProcess
{
latch2.countDown();
this.disable();
}
};
| false
|
community_neo4j_src_test_java_visibility_TestPropertyReadOnNewEntityBeforeLockRelease.java
|
1,989
|
{
@Override
protected void callback( DebugInterface debug ) throws KillSubProcess
{
thread.resume();
this.disable();
}
}, done = new BreakPoint( TestPropertyReadOnNewEntityBeforeLockRelease.class, "done" )
| false
|
community_neo4j_src_test_java_visibility_TestPropertyReadOnNewEntityBeforeLockRelease.java
|
1,990
|
{
@Override
protected void callback( DebugInterface debug ) throws KillSubProcess
{
lockReleaserCommit.enable();
this.disable();
}
}, resumeThread = new BreakPoint( TestPropertyReadOnNewEntityBeforeLockRelease.class, "resumeThread" )
| false
|
community_neo4j_src_test_java_visibility_TestPropertyReadOnNewEntityBeforeLockRelease.java
|
1,991
|
class TaskRunner implements Runnable
{
private final Task task;
TaskRunner( Task task )
{
this.task = task;
}
@Override
public void run()
{
try
{
task.run( graphdb );
}
finally
{
completion.countDown();
}
}
}
| false
|
community_neo4j_src_test_java_visibility_TestPropertyReadOnNewEntityBeforeLockRelease.java
|
1,992
|
{
@Override
protected void callback( DebugInterface debug ) throws KillSubProcess
{
thread = debug.thread().suspend( this );
resumeThread.enable();
this.disable();
latch1.countDown();
}
}, enableBreakPoints = new BreakPoint( TestPropertyReadOnNewEntityBeforeLockRelease.class, "enableBreakPoints" )
| false
|
community_neo4j_src_test_java_visibility_TestPropertyReadOnNewEntityBeforeLockRelease.java
|
1,993
|
@SuppressWarnings( "serial" )
public class TestPropertyReadOnNewEntityBeforeLockRelease extends AbstractSubProcessTestBase
{
private final CountDownLatch latch1 = new CountDownLatch( 1 ), latch2 = new CountDownLatch( 1 );
@Test
public void shouldBeAbleToReadPropertiesFromNewNodeReturnedFromIndex() throws Exception
{
runInThread( new CreateData() );
latch1.await();
run( new ReadData() );
latch2.await();
}
private static class CreateData implements Task
{
@Override
public void run( GraphDatabaseAPI graphdb )
{
try(Transaction tx = graphdb.beginTx())
{
Node node = graphdb.createNode();
node.setProperty( "value", "present" );
graphdb.index().forNodes( "nodes" ).add( node, "value", "present" );
enableBreakPoints();
tx.success();
}
done();
}
}
static void enableBreakPoints()
{
// triggers breakpoint
}
static void done()
{
// triggers breakpoint
}
static void resumeThread()
{
// triggers breakpoint
}
private static class ReadData implements Task
{
@Override
public void run( GraphDatabaseAPI graphdb )
{
try(Transaction ignored = graphdb.beginTx())
{
Node node = graphdb.index().forNodes( "nodes" ).get( "value", "present" ).getSingle();
assertNotNull( "did not get the node from the index", node );
assertEquals( "present", node.getProperty( "value" ) );
}
resumeThread();
}
}
private volatile DebuggedThread thread;
private final BreakPoint lockReleaserCommit = new BreakPoint( WritableTransactionState.class, "commit" )
{
@Override
protected void callback( DebugInterface debug ) throws KillSubProcess
{
thread = debug.thread().suspend( this );
resumeThread.enable();
this.disable();
latch1.countDown();
}
}, enableBreakPoints = new BreakPoint( TestPropertyReadOnNewEntityBeforeLockRelease.class, "enableBreakPoints" )
{
@Override
protected void callback( DebugInterface debug ) throws KillSubProcess
{
lockReleaserCommit.enable();
this.disable();
}
}, resumeThread = new BreakPoint( TestPropertyReadOnNewEntityBeforeLockRelease.class, "resumeThread" )
{
@Override
protected void callback( DebugInterface debug ) throws KillSubProcess
{
thread.resume();
this.disable();
}
}, done = new BreakPoint( TestPropertyReadOnNewEntityBeforeLockRelease.class, "done" )
{
@Override
protected void callback( DebugInterface debug ) throws KillSubProcess
{
latch2.countDown();
this.disable();
}
};
@Override
protected BreakPoint[] breakpoints( int id )
{
return new BreakPoint[] { lockReleaserCommit, enableBreakPoints.enable(), resumeThread, done.enable() };
}
/**
* Version of the test case useful for manual debugging.
*/
public static void main( String... args ) throws Exception
{
final GraphDatabaseAPI graphdb = (GraphDatabaseAPI) new GraphDatabaseFactory().
newEmbeddedDatabase( "target/test-data/" + TestPropertyReadOnNewEntityBeforeLockRelease.class
.getName() + "/graphdb" );
final CountDownLatch completion = new CountDownLatch( 2 );
class TaskRunner implements Runnable
{
private final Task task;
TaskRunner( Task task )
{
this.task = task;
}
@Override
public void run()
{
try
{
task.run( graphdb );
}
finally
{
completion.countDown();
}
}
}
new Thread( new TaskRunner( new CreateData() ) ).start();
new Thread( new TaskRunner( new ReadData() ) ).start();
try
{
completion.await();
}
finally
{
graphdb.shutdown();
}
}
}
| false
|
community_neo4j_src_test_java_visibility_TestPropertyReadOnNewEntityBeforeLockRelease.java
|
1,994
|
{
@Override
public void run()
{
try(Transaction ignored = graphDatabaseService.beginTx())
{
assertThat( (Integer) commonNode.getProperty( PROPERTY_NAME ),
is( PROPERTY_VALUE ) );
}
catch ( Throwable t )
{
problems.add( new Exception( t ) );
}
}
} );
| false
|
community_neo4j_src_test_java_visibility_TestDatasourceCommitOrderDataVisibility.java
|
1,995
|
{
@Override
public void run()
{
try(Transaction ignored = graphDatabaseService.beginTx())
{
Node node = graphDatabaseService.index().forNodes( INDEX_NAME ).get( INDEX_KEY,
INDEX_VALUE ).getSingle();
assertThat( node, is( commonNode) );
}
catch ( Throwable t )
{
problems.add( new Exception( t ) );
}
}
} );
| false
|
community_neo4j_src_test_java_visibility_TestDatasourceCommitOrderDataVisibility.java
|
1,996
|
{
@Override
public void run()
{
try(Transaction ignored = graphDatabaseService.beginTx())
{
assertThat( commonNode.hasProperty( PROPERTY_NAME ), is( false ) );
}
catch ( Throwable t )
{
problems.add( new Exception( t ) );
}
}
} );
| false
|
community_neo4j_src_test_java_visibility_TestDatasourceCommitOrderDataVisibility.java
|
1,997
|
{
@Override
public void run()
{
try(Transaction ignored = graphDatabaseService.beginTx())
{
assertThat( graphDatabaseService.index().forNodes( INDEX_NAME ).get( INDEX_KEY,
INDEX_VALUE ).size(), is( 0 ) );
}
catch ( Throwable t )
{
problems.add( new Exception( t ) );
}
}
} );
| false
|
community_neo4j_src_test_java_visibility_TestDatasourceCommitOrderDataVisibility.java
|
1,998
|
public class TestDatasourceCommitOrderDataVisibility
{
private static final String INDEX_NAME = "foo";
private static final String INDEX_KEY = "bar";
private static final String INDEX_VALUE = "baz";
private static final String PROPERTY_NAME = "quux";
private static final int PROPERTY_VALUE = 42;
private GraphDatabaseService graphDatabaseService;
@Before
public void setUp() throws Exception
{
graphDatabaseService = new TestGraphDatabaseFactory().newImpermanentDatabase();
}
@Test
public void shouldNotMakeIndexWritesVisibleUntilCommit() throws Exception
{
Node commonNode;
try(Transaction tx = graphDatabaseService.beginTx())
{
commonNode = graphDatabaseService.createNode();
tx.success();
}
try(Transaction transaction = graphDatabaseService.beginTx())
{
// index write first so that that datastore is added first
graphDatabaseService.index().forNodes( INDEX_NAME ).add( commonNode, INDEX_KEY, INDEX_VALUE );
commonNode.setProperty( PROPERTY_NAME, PROPERTY_VALUE );
assertNodeIsNotIndexedOutsideThisTransaction();
assertNodeIsUnchangedOutsideThisTransaction(commonNode);
transaction.success();
assertNodeIsNotIndexedOutsideThisTransaction();
assertNodeIsUnchangedOutsideThisTransaction(commonNode);
}
assertNodeIsIndexed(commonNode);
assertNodeHasBeenUpdated( commonNode );
}
private void assertNodeIsNotIndexedOutsideThisTransaction() throws Exception
{
final Collection<Exception> problems = new HashSet<>();
Thread thread = new Thread( new Runnable()
{
@Override
public void run()
{
try(Transaction ignored = graphDatabaseService.beginTx())
{
assertThat( graphDatabaseService.index().forNodes( INDEX_NAME ).get( INDEX_KEY,
INDEX_VALUE ).size(), is( 0 ) );
}
catch ( Throwable t )
{
problems.add( new Exception( t ) );
}
}
} );
thread.start();
thread.join();
if ( problems.size() > 0 )
{
throw problems.iterator().next();
}
}
private void assertNodeIsUnchangedOutsideThisTransaction( final Node commonNode ) throws Exception
{
final Collection<Exception> problems = new HashSet<>();
Thread thread = new Thread( new Runnable()
{
@Override
public void run()
{
try(Transaction ignored = graphDatabaseService.beginTx())
{
assertThat( commonNode.hasProperty( PROPERTY_NAME ), is( false ) );
}
catch ( Throwable t )
{
problems.add( new Exception( t ) );
}
}
} );
thread.start();
thread.join();
if ( problems.size() > 0 )
{
throw problems.iterator().next();
}
}
private void assertNodeIsIndexed(final Node commonNode) throws Exception
{
final Collection<Exception> problems = new HashSet<>();
Thread thread = new Thread( new Runnable()
{
@Override
public void run()
{
try(Transaction ignored = graphDatabaseService.beginTx())
{
Node node = graphDatabaseService.index().forNodes( INDEX_NAME ).get( INDEX_KEY,
INDEX_VALUE ).getSingle();
assertThat( node, is( commonNode) );
}
catch ( Throwable t )
{
problems.add( new Exception( t ) );
}
}
} );
thread.start();
thread.join();
if ( problems.size() > 0 )
{
throw problems.iterator().next();
}
}
private void assertNodeHasBeenUpdated( final Node commonNode ) throws Exception
{
final Collection<Exception> problems = new HashSet<>();
Thread thread = new Thread( new Runnable()
{
@Override
public void run()
{
try(Transaction ignored = graphDatabaseService.beginTx())
{
assertThat( (Integer) commonNode.getProperty( PROPERTY_NAME ),
is( PROPERTY_VALUE ) );
}
catch ( Throwable t )
{
problems.add( new Exception( t ) );
}
}
} );
thread.start();
thread.join();
if ( problems.size() > 0 )
{
throw problems.iterator().next();
}
}
}
| false
|
community_neo4j_src_test_java_visibility_TestDatasourceCommitOrderDataVisibility.java
|
1,999
|
private static class Verifier implements Task
{
@Override
public void run( GraphDatabaseAPI graphdb )
{
try(Transaction ignored = graphdb.beginTx())
{
// TODO: Pass a node reference around of assuming the id will be deterministically assigned,
// artifact of removing the reference node, upon which this test used to depend.
assertTrue( (Boolean) graphdb.getNodeById(3).getProperty( "success" ) );
}
}
}
| false
|
community_neo4j_src_test_java_synchronization_TestConcurrentRotation.java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.