proj_name
stringclasses 131
values | relative_path
stringlengths 30
228
| class_name
stringlengths 1
68
| func_name
stringlengths 1
48
| masked_class
stringlengths 78
9.82k
| func_body
stringlengths 46
9.61k
| len_input
int64 29
2.01k
| len_output
int64 14
1.94k
| total
int64 55
2.05k
| relevant_context
stringlengths 0
38.4k
|
|---|---|---|---|---|---|---|---|---|---|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/OpenedRocksDB.java
|
CFHandle
|
waitForDrop
|
class CFHandle implements AutoCloseable {
private final ColumnFamilyHandle handle;
private final AtomicInteger refs;
private final RocksDBIteratorPool iterPool;
public CFHandle(RocksDB rocksdb, ColumnFamilyHandle handle) {
E.checkNotNull(handle, "handle");
this.handle = handle;
this.refs = new AtomicInteger(1);
this.iterPool = new RocksDBIteratorPool(rocksdb, this.handle);
}
public synchronized ColumnFamilyHandle get() {
E.checkState(this.handle.isOwningHandle(), "It seems CF has been closed");
assert this.refs.get() >= 1;
return this.handle;
}
public synchronized ReusedRocksIterator newIterator() {
assert this.handle.isOwningHandle();
assert this.refs.get() >= 1;
return this.iterPool.newIterator();
}
public synchronized void open() {
this.refs.incrementAndGet();
}
@Override
public void close() {
if (this.refs.decrementAndGet() <= 0) {
this.iterPool.close();
this.handle.close();
}
}
public synchronized ColumnFamilyHandle waitForDrop() {<FILL_FUNCTION_BODY>}
public synchronized void destroy() {
this.close();
assert this.refs.get() == 0 && !this.handle.isOwningHandle();
}
}
|
assert this.refs.get() >= 1;
// When entering this method, the refs won't increase anymore
final long timeout = TimeUnit.MINUTES.toMillis(30L);
final long unit = 100L;
for (long i = 1; this.refs.get() > 1; i++) {
try {
Thread.sleep(unit);
} catch (InterruptedException ignored) {
// 30s rest api timeout may cause InterruptedException
}
if (i * unit > timeout) {
throw new BackendException("Timeout after %sms to drop CF", timeout);
}
}
assert this.refs.get() == 1;
return this.handle;
| 385
| 184
| 569
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/RocksDBFeatures.java
|
RocksDBFeatures
|
supportsQueryWithContainsKey
|
class RocksDBFeatures implements BackendFeatures {
@Override
public boolean supportsSharedStorage() {
return false;
}
@Override
public boolean supportsSnapshot() {
return true;
}
@Override
public boolean supportsScanToken() {
return false;
}
@Override
public boolean supportsScanKeyPrefix() {
return true;
}
@Override
public boolean supportsScanKeyRange() {
return true;
}
@Override
public boolean supportsQuerySchemaByName() {
// No index in RocksDB
return false;
}
@Override
public boolean supportsQueryByLabel() {
// No index in RocksDB
return false;
}
@Override
public boolean supportsQueryWithInCondition() {
return false;
}
@Override
public boolean supportsQueryWithRangeCondition() {
return true;
}
@Override
public boolean supportsQueryWithOrderBy() {
return true;
}
@Override
public boolean supportsQueryWithContains() {
// TODO: Need to traversal all items
return false;
}
@Override
public boolean supportsQueryWithContainsKey() {<FILL_FUNCTION_BODY>}
@Override
public boolean supportsQueryByPage() {
return true;
}
@Override
public boolean supportsQuerySortByInputIds() {
return true;
}
@Override
public boolean supportsDeleteEdgeByLabel() {
// No index in RocksDB
return false;
}
@Override
public boolean supportsUpdateVertexProperty() {
// Vertex properties are stored in a cell(column value)
return false;
}
@Override
public boolean supportsMergeVertexProperty() {
return false;
}
@Override
public boolean supportsUpdateEdgeProperty() {
// Edge properties are stored in a cell(column value)
return false;
}
@Override
public boolean supportsTransaction() {
// Supports tx with WriteBatch
return true;
}
@Override
public boolean supportsNumberType() {
return false;
}
@Override
public boolean supportsAggregateProperty() {
return false;
}
@Override
public boolean supportsTtl() {
return false;
}
@Override
public boolean supportsOlapProperties() {
return true;
}
}
|
// TODO: Need to traversal all items
return false;
| 624
| 20
| 644
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/RocksDBIngester.java
|
RocksDBIngester
|
ingest
|
class RocksDBIngester {
public static final String SST = ".sst";
private static final Logger LOG = Log.logger(RocksDBIngester.class);
private final RocksDB rocksdb;
private final IngestExternalFileOptions options;
public RocksDBIngester(RocksDB rocksdb) {
this.rocksdb = rocksdb;
this.options = new IngestExternalFileOptions();
this.options.setMoveFiles(true);
}
public List<String> ingest(Path path, ColumnFamilyHandle cf) throws RocksDBException {<FILL_FUNCTION_BODY>}
public void ingest(ColumnFamilyHandle cf, List<String> ssts) throws RocksDBException {
LOG.info("Ingest sst files to CF '{}': {}", RocksDBStdSessions.decode(cf.getName()), ssts);
if (!ssts.isEmpty()) {
this.rocksdb.ingestExternalFile(cf, ssts, this.options);
}
}
public static class SuffixFileVisitor extends SimpleFileVisitor<Path> {
private final List<Path> files = new ArrayList<>();
private final String suffix;
public SuffixFileVisitor(String suffix) {
this.suffix = suffix;
}
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) {
if (file.toString().endsWith(this.suffix)) {
this.files.add(file);
}
return FileVisitResult.CONTINUE;
}
public List<Path> files() {
return this.files;
}
}
}
|
SuffixFileVisitor visitor = new SuffixFileVisitor(SST);
try {
Files.walkFileTree(path, visitor);
} catch (IOException e) {
throw new BackendException("Failed to walk path '%s'", e, path);
}
List<Path> files = visitor.files();
List<String> ssts = new ArrayList<>(files.size());
for (Path file : files) {
File sst = file.toFile();
if (sst.exists() && sst.length() > 0L) {
ssts.add(sst.getPath());
}
}
this.ingest(cf, ssts);
return ssts;
| 430
| 185
| 615
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/RocksDBIteratorPool.java
|
RocksDBIteratorPool
|
refreshIterator
|
class RocksDBIteratorPool implements AutoCloseable {
private static final Logger LOG = Log.logger(RocksDBIteratorPool.class);
private static final int ITERATOR_POOL_CAPACITY = CoreOptions.CPUS * 2;
private final Queue<RocksIterator> pool;
private final RocksDB rocksdb;
private final ColumnFamilyHandle cfh;
private final String cfName;
public RocksDBIteratorPool(RocksDB rocksdb, ColumnFamilyHandle cfh) {
this.pool = new ArrayBlockingQueue<>(ITERATOR_POOL_CAPACITY);
this.rocksdb = rocksdb;
this.cfh = cfh;
String cfName;
try {
cfName = StringEncoding.decode(this.cfh.getName());
} catch (RocksDBException e) {
LOG.warn("Can't get column family name", e);
cfName = "CF-" + cfh.getID();
}
this.cfName = cfName;
}
public ReusedRocksIterator newIterator() {
return new ReusedRocksIterator();
}
@Override
public void close() {
LOG.debug("Close IteratorPool with pool size {} ({})", this.pool.size(), this);
for (RocksIterator iter; (iter = this.pool.poll()) != null; ) {
this.closeIterator(iter);
}
assert this.pool.isEmpty();
}
@Override
public String toString() {
return "IteratorPool-" + this.cfName;
}
private RocksIterator allocIterator() {
/*
* NOTE: Seems there is a bug if share RocksIterator between threads
* RocksIterator iter = this.pool.poll();
*/
RocksIterator iter = this.pool.poll();
if (iter != null) {
if (this.refreshIterator(iter)) {
// Must refresh when an iterator is reused
return iter;
} else {
// Close it if can't fresh, and create a new one later
this.closeIterator(iter);
}
}
/*
* Create a new iterator if:
* - the pool is empty,
* - or the iterator obtained from the pool is closed,
* - or the iterator can't refresh.
*/
iter = this.createIterator();
try {
iter.status();
return iter;
} catch (RocksDBException e) {
this.closeIterator(iter);
throw new BackendException(e);
}
}
private void releaseIterator(RocksIterator iter) {
assert iter.isOwningHandle();
boolean added = this.pool.offer(iter);
if (!added) {
// Really close iterator if the pool is full
LOG.debug("Really close iterator {} since the pool is full({})",
iter, this.pool.size());
this.closeIterator(iter);
} else {
// Not sure whether it needs to refresh
assert this.refreshIterator(iter);
}
}
private boolean refreshIterator(RocksIterator iter) {<FILL_FUNCTION_BODY>}
private RocksIterator createIterator() {
RocksIterator iter = this.rocksdb.newIterator(this.cfh);
LOG.debug("Create iterator: {}", iter);
return iter;
}
private void closeIterator(RocksIterator iter) {
LOG.debug("Really close iterator {}", iter);
if (iter.isOwningHandle()) {
iter.close();
}
}
protected final class ReusedRocksIterator {
private static final boolean REUSING_ENABLED = false;
private final RocksIterator iterator;
private boolean closed;
public ReusedRocksIterator() {
this.closed = false;
if (REUSING_ENABLED) {
this.iterator = allocIterator();
} else {
this.iterator = createIterator();
}
}
public RocksIterator iterator() {
assert !this.closed;
return this.iterator;
}
public void close() {
if (this.closed) {
return;
}
this.closed = true;
if (REUSING_ENABLED) {
releaseIterator(this.iterator);
} else {
closeIterator(this.iterator);
}
}
}
}
|
if (iter.isOwningHandle()) {
try {
iter.refresh();
return true;
} catch (RocksDBException e) {
LOG.warn("Can't refresh RocksIterator: {}", e.getMessage(), e);
}
}
return false;
| 1,135
| 77
| 1,212
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/RocksDBStoreProvider.java
|
RocksDBStoreProvider
|
driverVersion
|
class RocksDBStoreProvider extends AbstractBackendStoreProvider {
protected String database() {
return this.graph().toLowerCase();
}
@Override
protected BackendStore newSchemaStore(HugeConfig config, String store) {
return new RocksDBStore.RocksDBSchemaStore(this, this.database(), store);
}
@Override
protected BackendStore newGraphStore(HugeConfig config, String store) {
return new RocksDBStore.RocksDBGraphStore(this, this.database(), store);
}
@Override
public void onCloneConfig(HugeConfig config, String newGraph) {
super.onCloneConfig(config, newGraph);
// NOTE: rocksdb can't use same data path for different graph
String suffix = "_" + newGraph;
String dataPath = config.get(RocksDBOptions.DATA_PATH);
config.setProperty(RocksDBOptions.DATA_PATH.name(), dataPath + suffix);
String walPath = config.get(RocksDBOptions.WAL_PATH);
config.setProperty(RocksDBOptions.WAL_PATH.name(), walPath + suffix);
}
@Override
public void onDeleteConfig(HugeConfig config) {
super.onDeleteConfig(config);
String dataPath = config.get(RocksDBOptions.DATA_PATH);
String walPath = config.get(RocksDBOptions.WAL_PATH);
ConfigUtil.deleteFile(new File(dataPath));
ConfigUtil.deleteFile(new File(walPath));
}
@Override
protected BackendStore newSystemStore(HugeConfig config, String store) {
return new RocksDBStore.RocksDBSystemStore(this, this.database(), store);
}
@Override
public String type() {
return "rocksdb";
}
@Override
public String driverVersion() {<FILL_FUNCTION_BODY>}
}
|
/*
* Versions history:
* [1.0] HugeGraph-1328: supports backend table version checking
* [1.1] HugeGraph-1322: add support for full-text search
* [1.2] #296: support range sortKey feature
* [1.3] #270 & #398: support shard-index and vertex + sortkey prefix,
* also split range table to rangeInt, rangeFloat,
* rangeLong and rangeDouble
* [1.4] #633: support unique index
* [1.5] #680: update index element-id to bin format
* [1.6] #746: support userdata for indexlabel
* [1.7] #820: store vertex properties in one column
* [1.8] #894: encode label id in string index
* [1.9] #295: support ttl for vertex and edge
* [1.10] #1333: support read frequency for property key
* [1.11] #1506: support olap properties
* [1.11] #1533: add meta table in system store
*/
return "1.11";
| 494
| 318
| 812
|
<methods>public non-sealed void <init>() ,public void clear() throws org.apache.hugegraph.backend.BackendException,public void close() throws org.apache.hugegraph.backend.BackendException,public void createSnapshot() ,public java.lang.String graph() ,public void init() ,public boolean initialized() ,public void listen(EventListener) ,public org.apache.hugegraph.backend.store.BackendStore loadGraphStore(HugeConfig) ,public org.apache.hugegraph.backend.store.BackendStore loadSchemaStore(HugeConfig) ,public org.apache.hugegraph.backend.store.BackendStore loadSystemStore(HugeConfig) ,public void onCloneConfig(HugeConfig, java.lang.String) ,public void onDeleteConfig(HugeConfig) ,public void open(java.lang.String) ,public void resumeSnapshot() ,public EventHub storeEventHub() ,public java.lang.String storedVersion() ,public void truncate() ,public void unlisten(EventListener) ,public void waitReady(RpcServer) <variables>private static final Logger LOG,private java.lang.String graph,private final EventHub storeEventHub,protected Map<java.lang.String,org.apache.hugegraph.backend.store.BackendStore> stores
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/RocksDBTables.java
|
RangeIndex
|
queryByCond
|
class RangeIndex extends IndexTable {
public RangeIndex(String database, String table) {
super(database, table);
}
@Override
protected BackendColumnIterator queryByCond(RocksDBSessions.Session session,
ConditionQuery query) {<FILL_FUNCTION_BODY>}
}
|
assert query.conditionsSize() > 0;
List<Condition> conds = query.syspropConditions(HugeKeys.ID);
E.checkArgument(!conds.isEmpty(), "Please specify the index conditions");
Id prefix = null;
Id min = null;
boolean minEq = false;
Id max = null;
boolean maxEq = false;
for (Condition c : conds) {
Relation r = (Relation) c;
switch (r.relation()) {
case PREFIX:
prefix = (Id) r.value();
break;
case GTE:
minEq = true;
min = (Id) r.value();
break;
case GT:
min = (Id) r.value();
break;
case LTE:
maxEq = true;
max = (Id) r.value();
break;
case LT:
max = (Id) r.value();
break;
default:
E.checkArgument(false, "Unsupported relation '%s'", r.relation());
}
}
E.checkArgumentNotNull(min, "Range index begin key is missing");
byte[] begin = min.asBytes();
if (!minEq) {
BinarySerializer.increaseOne(begin);
}
if (max == null) {
E.checkArgumentNotNull(prefix, "Range index prefix is missing");
return session.scan(this.table(), begin, prefix.asBytes(),
RocksDBSessions.Session.SCAN_PREFIX_END);
} else {
byte[] end = max.asBytes();
int type = maxEq ? RocksDBSessions.Session.SCAN_LTE_END
: RocksDBSessions.Session.SCAN_LT_END;
return session.scan(this.table(), begin, end, type);
}
| 81
| 469
| 550
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdbsst/RocksDBSstSessions.java
|
SstSession
|
commit
|
class SstSession extends Session {
private final Map<String, Changes> batch;
public SstSession() {
this.batch = new HashMap<>();
}
@Override
public void open() {
this.opened = true;
}
@Override
public void close() {
assert this.closeable();
this.opened = false;
}
/**
* Any change in the session
*/
@Override
public boolean hasChanges() {
return !this.batch.isEmpty();
}
/**
* Commit all updates(put/delete) to DB
*/
@Override
public Integer commit() {<FILL_FUNCTION_BODY>}
/**
* Rollback updates not committed in the session
*/
@Override
public void rollback() {
this.batch.clear();
}
@Override
public String dataPath() {
return RocksDBSstSessions.this.dataPath;
}
@Override
public String walPath() {
return RocksDBSstSessions.this.dataPath;
}
/**
* Get property value by name from specified table
*/
@Override
public String property(String table, String property) {
throw new NotSupportException("RocksDBSstStore property()");
}
@Override
public Pair<byte[], byte[]> keyRange(String table) {
return null;
}
@Override
public void compactRange(String table) {
throw new NotSupportException("RocksDBSstStore compactRange()");
}
/**
* Add a KV record to a table
*/
@Override
public void put(String table, byte[] key, byte[] value) {
Changes changes = this.batch.get(table);
if (changes == null) {
changes = new Changes();
this.batch.put(table, changes);
}
changes.add(Pair.of(key, value));
}
/**
* Merge a record to an existing key to a table
* For more details about merge-operator:
* <a href="https://github.com/facebook/rocksdb/wiki/merge-operator">...</a>
*/
@Override
public void merge(String table, byte[] key, byte[] value) {
throw new NotSupportException("RocksDBSstStore merge()");
}
/**
* Merge a record to an existing key to a table and commit immediately
*/
@Override
public void increase(String table, byte[] key, byte[] value) {
throw new NotSupportException("RocksDBSstStore increase()");
}
/**
* Delete a record by key from a table
*/
@Override
public void delete(String table, byte[] key) {
throw new NotSupportException("RocksDBSstStore delete()");
}
/**
* Delete the only one version of a record by key from a table
* NOTE: requires that the key exists and was not overwritten.
*/
@Override
public void deleteSingle(String table, byte[] key) {
throw new NotSupportException("RocksDBSstStore deleteSingle()");
}
/**
* Delete a record by key(or prefix with key) from a table
*/
@Override
public void deletePrefix(String table, byte[] key) {
throw new NotSupportException("RocksDBSstStore deletePrefix()");
}
/**
* Delete a range of keys from a table
*/
@Override
public void deleteRange(String table, byte[] keyFrom, byte[] keyTo) {
throw new NotSupportException("RocksDBSstStore deleteRange()");
}
/**
* Get a record by key from a table
*/
@Override
public byte[] get(String table, byte[] key) {
return null;
}
/**
* Get records by a list of keys from a table
*/
@Override
public BackendColumnIterator get(String table, List<byte[]> keys) {
assert !this.hasChanges();
return BackendColumnIterator.empty();
}
/**
* Scan all records from a table
*/
@Override
public BackendColumnIterator scan(String table) {
assert !this.hasChanges();
return BackendColumnIterator.empty();
}
/**
* Scan records by key prefix from a table
*/
@Override
public BackendColumnIterator scan(String table, byte[] prefix) {
assert !this.hasChanges();
return BackendColumnIterator.empty();
}
/**
* Scan records by key range from a table
*/
@Override
public BackendColumnIterator scan(String table, byte[] keyFrom,
byte[] keyTo, int scanType) {
assert !this.hasChanges();
return BackendColumnIterator.empty();
}
}
|
int count = this.batch.size();
if (count == 0) {
return 0;
}
try {
for (Entry<String, Changes> table : this.batch.entrySet()) {
if (table.getValue().isEmpty() ||
table.getKey().endsWith("i")) {
// Skip empty value table or index table
continue;
}
// TODO: limit individual SST file size
SstFileWriter sst = table(table.getKey());
for (Pair<byte[], byte[]> change : table.getValue()) {
sst.put(change.getKey(), change.getValue());
}
}
} catch (RocksDBException e) {
throw new BackendException("Failed to commit", e);
}
// Clear batch if write() successfully (retained if failed)
this.batch.clear();
return count;
| 1,260
| 228
| 1,488
|
<methods>public void <init>(HugeConfig, java.lang.String, java.lang.String) ,public abstract java.lang.String buildSnapshotPath(java.lang.String) ,public abstract void compactRange() ,public abstract org.apache.hugegraph.backend.store.rocksdb.RocksDBSessions copy(HugeConfig, java.lang.String, java.lang.String) ,public abstract void createSnapshot(java.lang.String) ,public transient abstract void createTable(java.lang.String[]) throws RocksDBException,public transient abstract void dropTable(java.lang.String[]) throws RocksDBException,public abstract boolean existsTable(java.lang.String) ,public abstract void forceCloseRocksDB() ,public abstract java.lang.String hardLinkSnapshot(java.lang.String) throws RocksDBException,public abstract Set<java.lang.String> openedTables() ,public abstract List<java.lang.String> property(java.lang.String) ,public abstract void reloadRocksDB() throws RocksDBException,public abstract void resumeSnapshot(java.lang.String) ,public abstract org.apache.hugegraph.backend.store.rocksdb.RocksDBSessions.Session session() <variables>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdbsst/RocksDBSstStore.java
|
RocksDBSstStore
|
openSessionPool
|
class RocksDBSstStore extends RocksDBStore {
public RocksDBSstStore(final BackendStoreProvider provider,
final String database, final String store) {
super(provider, database, store);
}
@Override
protected RocksDBSessions openSessionPool(HugeConfig config,
String dataPath, String walPath,
List<String> tableNames)
throws RocksDBException {<FILL_FUNCTION_BODY>}
/***************************** Store defines *****************************/
public static class RocksDBSstGraphStore extends RocksDBSstStore {
public RocksDBSstGraphStore(BackendStoreProvider provider,
String database, String store) {
super(provider, database, store);
registerTableManager(HugeType.VERTEX, new RocksDBTables.Vertex(database));
registerTableManager(HugeType.EDGE_OUT, RocksDBTables.Edge.out(database));
registerTableManager(HugeType.EDGE_IN, RocksDBTables.Edge.in(database));
registerTableManager(HugeType.SECONDARY_INDEX,
new RocksDBTables.SecondaryIndex(database));
registerTableManager(HugeType.VERTEX_LABEL_INDEX,
new RocksDBTables.VertexLabelIndex(database));
registerTableManager(HugeType.EDGE_LABEL_INDEX,
new RocksDBTables.EdgeLabelIndex(database));
registerTableManager(HugeType.RANGE_INT_INDEX,
new RocksDBTables.RangeIntIndex(database));
registerTableManager(HugeType.RANGE_FLOAT_INDEX,
new RocksDBTables.RangeFloatIndex(database));
registerTableManager(HugeType.RANGE_LONG_INDEX,
new RocksDBTables.RangeLongIndex(database));
registerTableManager(HugeType.RANGE_DOUBLE_INDEX,
new RocksDBTables.RangeDoubleIndex(database));
registerTableManager(HugeType.SEARCH_INDEX, new RocksDBTables.SearchIndex(database));
registerTableManager(HugeType.SHARD_INDEX, new RocksDBTables.ShardIndex(database));
registerTableManager(HugeType.UNIQUE_INDEX, new RocksDBTables.UniqueIndex(database));
}
@Override
public boolean isSchemaStore() {
return false;
}
@Override
public Id nextId(HugeType type) {
throw new UnsupportedOperationException("RocksDBSstGraphStore.nextId()");
}
@Override
public void increaseCounter(HugeType type, long increment) {
throw new UnsupportedOperationException("RocksDBSstGraphStore.increaseCounter()");
}
@Override
public long getCounter(HugeType type) {
throw new UnsupportedOperationException("RocksDBSstGraphStore.getCounter()");
}
}
}
|
if (tableNames == null) {
return new RocksDBSstSessions(config, this.database(), this.store(), dataPath);
} else {
return new RocksDBSstSessions(config, this.database(), this.store(),
dataPath, tableNames);
}
| 752
| 76
| 828
|
<methods>public void <init>(org.apache.hugegraph.backend.store.BackendStoreProvider, java.lang.String, java.lang.String) ,public void beginTx() ,public synchronized void clear(boolean) ,public void close() ,public void commitTx() ,public Map<java.lang.String,java.lang.String> createSnapshot(java.lang.String) ,public java.lang.String database() ,public org.apache.hugegraph.backend.store.BackendFeatures features() ,public synchronized void init() ,public boolean initialized() ,public void mutate(org.apache.hugegraph.backend.store.BackendMutation) ,public synchronized void open(HugeConfig) ,public boolean opened() ,public org.apache.hugegraph.backend.store.BackendStoreProvider provider() ,public Iterator<org.apache.hugegraph.backend.store.BackendEntry> query(org.apache.hugegraph.backend.query.Query) ,public java.lang.Number queryNumber(org.apache.hugegraph.backend.query.Query) ,public void resumeSnapshot(java.lang.String, boolean) ,public void rollbackTx() ,public java.lang.String store() ,public synchronized void truncate() <variables>private static final long DB_CLOSE_TIMEOUT,private static final java.lang.String DB_OPEN,private static final long DB_OPEN_TIMEOUT,private static final org.apache.hugegraph.backend.store.BackendFeatures FEATURES,private static final Logger LOG,private static final int OPEN_POOL_THREADS,private static final java.lang.String TABLE_GENERAL_KEY,private java.lang.String dataPath,private final non-sealed java.lang.String database,private final non-sealed ConcurrentMap<java.lang.String,org.apache.hugegraph.backend.store.rocksdb.RocksDBSessions> dbs,private boolean isGraphStore,private final non-sealed Map<java.lang.String,org.apache.hugegraph.backend.store.rocksdb.RocksDBTable> olapTables,private final non-sealed org.apache.hugegraph.backend.store.BackendStoreProvider provider,private org.apache.hugegraph.backend.store.rocksdb.RocksDBSessions sessions,private final non-sealed java.lang.String store,private final non-sealed java.util.concurrent.locks.ReadWriteLock storeLock,private final non-sealed Map<org.apache.hugegraph.type.HugeType,java.lang.String> tableDiskMapping,private final non-sealed Map<org.apache.hugegraph.type.HugeType,org.apache.hugegraph.backend.store.rocksdb.RocksDBTable> tables
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-server/hugegraph-scylladb/src/main/java/org/apache/hugegraph/backend/store/scylladb/ScyllaDBMetrics.java
|
ScyllaDBMetrics
|
appendExtraMetrics
|
class ScyllaDBMetrics extends CassandraMetrics {
public ScyllaDBMetrics(HugeConfig conf,
CassandraSessionPool sessions,
String keyspace) {
super(conf, sessions, keyspace);
}
@Override
protected void appendExtraMetrics(Map<String, Object> metrics,
NodeProbe probe) {<FILL_FUNCTION_BODY>}
}
|
// Table counter Metrics
appendCounterMetrics(metrics, probe, this.keyspace(), this.tables(),
"EstimatedPartitionCount");
appendCounterMetrics(metrics, probe, this.keyspace(), this.tables(),
"DroppedMutations");
appendCounterMetrics(metrics, probe, this.keyspace(), this.tables(),
"PendingFlushes");
//appendCounterMetrics(metrics, probe, this.keyspace(), this.tables(),
// "KeyCacheHitRate");
appendCounterMetrics(metrics, probe, this.keyspace(), this.tables(),
"BloomFilterFalseRatio");
// Table timer Metrics
//appendTimerMetrics(metrics, probe, this.keyspace(), "WriteLatency");
//appendTimerMetrics(metrics, probe, this.keyspace(), "ReadLatency");
//appendTimerMetrics(metrics, probe, null, "WriteLatency");
//appendTimerMetrics(metrics, probe, null, "ReadLatency");
// Cache Metrics
appendCacheMetrics(metrics, probe, "KeyCache", "Size");
appendCacheMetrics(metrics, probe, "KeyCache", "Entries");
appendCacheMetrics(metrics, probe, "RowCache", "Size");
appendCacheMetrics(metrics, probe, "RowCache", "Entries");
appendCacheMetrics(metrics, probe, "CounterCache", "Size");
appendCacheMetrics(metrics, probe, "CounterCache", "Entries");
// Compaction Metrics
appendCompactionMetrics(metrics, probe, "CompletedTasks");
appendCompactionMetrics(metrics, probe, "PendingTasks");
//appendCompactionMetrics(metrics, probe, "BytesCompacted");
| 99
| 423
| 522
|
<methods>public void <init>(HugeConfig, org.apache.hugegraph.backend.store.cassandra.CassandraSessionPool, java.lang.String) ,public Map<java.lang.String,java.lang.Object> compact() ,public Map<java.lang.String,java.lang.Object> metrics() <variables>private static final Logger LOG,private final non-sealed Cluster cluster,private final non-sealed java.lang.String keyspace,private final non-sealed java.lang.String password,private final non-sealed int port,private final non-sealed List<java.lang.String> tables,private final non-sealed java.lang.String username
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-server/hugegraph-scylladb/src/main/java/org/apache/hugegraph/backend/store/scylladb/ScyllaDBStoreProvider.java
|
ScyllaDBStoreProvider
|
loadGraphStore
|
class ScyllaDBStoreProvider extends CassandraStoreProvider {
private static final Logger LOG = Log.logger(CassandraStore.class);
private static final BackendFeatures FEATURES = new ScyllaDBFeatures();
@Override
public String type() {
return "scylladb";
}
@Override
public BackendStore loadSchemaStore(HugeConfig config) {
String name = SCHEMA_STORE;
LOG.debug("ScyllaDBStoreProvider load SchemaStore '{}'", name);
if (!this.stores.containsKey(name)) {
BackendStore s = new ScyllaDBSchemaStore(this, keyspace(), name);
this.stores.putIfAbsent(name, s);
}
BackendStore store = this.stores.get(name);
E.checkNotNull(store, "store");
E.checkState(store instanceof ScyllaDBSchemaStore,
"SchemaStore must be an instance of ScyllaDBSchemaStore");
return store;
}
@Override
public BackendStore loadGraphStore(HugeConfig config) {<FILL_FUNCTION_BODY>}
@Override
public BackendStore loadSystemStore(HugeConfig config) {
String name = SYSTEM_STORE;
LOG.debug("ScyllaDBStoreProvider load SystemStore '{}'", name);
if (!this.stores.containsKey(name)) {
BackendStore s = new ScyllaDBSystemStore(this, keyspace(), name);
this.stores.putIfAbsent(name, s);
}
BackendStore store = this.stores.get(name);
E.checkNotNull(store, "store");
E.checkState(store instanceof ScyllaDBSystemStore,
"SystemStore must be an instance of ScyllaDBSystemStore");
return store;
}
public static class ScyllaDBSchemaStore
extends CassandraStore.CassandraSchemaStore {
public ScyllaDBSchemaStore(BackendStoreProvider provider,
String keyspace, String store) {
super(provider, keyspace, store);
registerTableManager(HugeType.VERTEX_LABEL,
new ScyllaDBTablesWithMV.VertexLabel());
registerTableManager(HugeType.EDGE_LABEL,
new ScyllaDBTablesWithMV.EdgeLabel());
registerTableManager(HugeType.PROPERTY_KEY,
new ScyllaDBTablesWithMV.PropertyKey());
registerTableManager(HugeType.INDEX_LABEL,
new ScyllaDBTablesWithMV.IndexLabel());
}
@Override
public BackendFeatures features() {
return FEATURES;
}
@Override
protected CassandraMetrics createMetrics(HugeConfig conf,
CassandraSessionPool sessions,
String keyspace) {
return new ScyllaDBMetrics(conf, sessions, keyspace);
}
}
public static class ScyllaDBGraphStore
extends CassandraStore.CassandraGraphStore {
public ScyllaDBGraphStore(BackendStoreProvider provider,
String keyspace, String store) {
super(provider, keyspace, store);
registerTableManager(HugeType.VERTEX,
new ScyllaDBTablesWithMV.Vertex(store));
registerTableManager(HugeType.EDGE_OUT,
ScyllaDBTablesWithMV.Edge.out(store));
registerTableManager(HugeType.EDGE_IN,
ScyllaDBTablesWithMV.Edge.in(store));
}
@Override
public BackendFeatures features() {
return FEATURES;
}
@Override
protected CassandraMetrics createMetrics(HugeConfig conf,
CassandraSessionPool sessions,
String keyspace) {
return new ScyllaDBMetrics(conf, sessions, keyspace);
}
}
public static class ScyllaDBSystemStore
extends CassandraStore.CassandraSystemStore {
public ScyllaDBSystemStore(BackendStoreProvider provider,
String keyspace, String store) {
super(provider, keyspace, store);
}
@Override
public BackendFeatures features() {
return FEATURES;
}
@Override
protected CassandraMetrics createMetrics(HugeConfig conf,
CassandraSessionPool sessions,
String keyspace) {
return new ScyllaDBMetrics(conf, sessions, keyspace);
}
}
}
|
String name = GRAPH_STORE;
LOG.debug("ScyllaDBStoreProvider load GraphStore '{}'", name);
if (!this.stores.containsKey(name)) {
BackendStore s = new ScyllaDBGraphStore(this, keyspace(), name);
this.stores.putIfAbsent(name, s);
}
BackendStore store = this.stores.get(name);
E.checkNotNull(store, "store");
E.checkState(store instanceof ScyllaDBGraphStore,
"GraphStore must be an instance of ScyllaDBGraphStore");
return store;
| 1,155
| 161
| 1,316
|
<methods>public non-sealed void <init>() ,public java.lang.String driverVersion() ,public java.lang.String type() <variables>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/HgOwnerKey.java
|
HgOwnerKey
|
equals
|
class HgOwnerKey implements Serializable {
private final byte[] owner; // TODO: consider remove? since it seems to be useless
private int keyCode = 0;// TODO: Be here OK?
private byte[] key;
// Sequence number, used for batch queries to ensure the order of returned results
private int serialNo;
/**
* @param owner
* @param key
* @see HgOwnerKey:of(byte[] owner, byte[] key)
*/
@Deprecated
public HgOwnerKey(byte[] owner, byte[] key) {
if (owner == null) {
owner = EMPTY_BYTES;
}
if (key == null) {
key = EMPTY_BYTES;
}
this.owner = owner;
this.key = key;
}
public HgOwnerKey(int code, byte[] key) {
if (key == null) {
key = EMPTY_BYTES;
}
this.owner = EMPTY_BYTES;
this.key = key;
this.keyCode = code;
}
public static HgOwnerKey emptyOf() {
return EMPTY_OWNER_KEY;
}
public static HgOwnerKey newEmpty() {
return HgOwnerKey.of(EMPTY_BYTES, EMPTY_BYTES);
}
public static HgOwnerKey ownerOf(byte[] owner) {
return new HgOwnerKey(owner, EMPTY_BYTES);
}
public static HgOwnerKey codeOf(int code) {
return HgOwnerKey.of(EMPTY_BYTES, EMPTY_BYTES).setKeyCode(code);
}
public static HgOwnerKey of(byte[] owner, byte[] key) {
return new HgOwnerKey(owner, key);
}
public static HgOwnerKey of(int keyCode, byte[] key) {
return new HgOwnerKey(keyCode, key);
}
public byte[] getOwner() {
return owner;
}
public byte[] getKey() {
return key;
}
public int getKeyCode() {
return keyCode;
}
public HgOwnerKey setKeyCode(int keyCode) {
this.keyCode = keyCode;
return this;
}
public HgOwnerKey codeToKey(int keyCode) {
this.keyCode = keyCode;
this.key = HgStoreClientUtil.toIntBytes(keyCode);
return this;
}
public int getSerialNo() {
return this.serialNo;
}
public HgOwnerKey setSerialNo(int serialNo) {
this.serialNo = serialNo;
return this;
}
@Override
public boolean equals(Object o) {<FILL_FUNCTION_BODY>}
@Override
public int hashCode() {
int result = Arrays.hashCode(owner);
result = 31 * result + Arrays.hashCode(key);
return result;
}
@Override
public String toString() {
return "HgOwnerKey{" +
"owner=" + Arrays.toString(owner) +
", key=" + Arrays.toString(key) +
", code=" + keyCode +
'}';
}
}
|
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
HgOwnerKey that = (HgOwnerKey) o;
return Arrays.equals(owner, that.owner) && Arrays.equals(key, that.key);
| 867
| 89
| 956
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/HgStoreClient.java
|
HgStoreClient
|
setPdClient
|
class HgStoreClient {
// TODO: Holding more than one HgSessionManager is available,if you want to connect multi
private final HgSessionProvider sessionProvider;
private PDClient pdClient;
public HgStoreClient() {
this.sessionProvider = new HgStoreSessionProvider();
}
public HgStoreClient(PDConfig config) {
this.sessionProvider = new HgStoreSessionProvider();
pdClient = PDClient.create(config);
setPdClient(pdClient);
}
public HgStoreClient(PDClient pdClient) {
this.sessionProvider = new HgStoreSessionProvider();
setPdClient(pdClient);
}
public static HgStoreClient create(PDConfig config) {
return new HgStoreClient(config);
}
public static HgStoreClient create(PDClient pdClient) {
return new HgStoreClient(pdClient);
}
public static HgStoreClient create() {
return new HgStoreClient();
}
public void setPDConfig(PDConfig config) {
pdClient = PDClient.create(config);
setPdClient(pdClient);
}
/**
* Retrieve or create a HgStoreSession.
*
* @param graphName
* @return
*/
public HgStoreSession openSession(String graphName) {
return this.sessionProvider.createSession(graphName);
}
public PDClient getPdClient() {
return pdClient;
}
public void setPdClient(PDClient client) {<FILL_FUNCTION_BODY>}
}
|
this.pdClient = client;
HgStoreNodeManager nodeManager =
HgStoreNodeManager.getInstance();
HgStoreNodePartitionerImpl p = new HgStoreNodePartitionerImpl(pdClient, nodeManager);
nodeManager.setNodeProvider(p);
nodeManager.setNodePartitioner(p);
nodeManager.setNodeNotifier(p);
| 425
| 94
| 519
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/HgNodePartition.java
|
HgNodePartition
|
equals
|
class HgNodePartition {
private final Long nodeId;
//当前key的hashcode
private final Integer keyCode;
//分区的开始结束范围
private final Integer startKey;
private final Integer endKey;
private int hash = -1;
HgNodePartition(Long nodeId, Integer keyCode) {
this.nodeId = nodeId;
this.keyCode = keyCode;
this.startKey = this.endKey = keyCode;
}
HgNodePartition(Long nodeId, Integer keyCode, Integer startKey, Integer endKey) {
this.nodeId = nodeId;
this.keyCode = keyCode;
this.startKey = startKey;
this.endKey = endKey;
}
public static HgNodePartition of(Long nodeId, Integer keyCode) {
return new HgNodePartition(nodeId, keyCode);
}
public static HgNodePartition of(Long nodeId, Integer keyCode, Integer startKey,
Integer endKey) {
return new HgNodePartition(nodeId, keyCode, startKey, endKey);
}
public Long getNodeId() {
return nodeId;
}
public Integer getKeyCode() {
return keyCode;
}
public Integer getStartKey() {
return startKey;
}
public Integer getEndKey() {
return endKey;
}
@Override
public boolean equals(Object o) {<FILL_FUNCTION_BODY>}
@Override
public int hashCode() {
if (this.hash == -1) {
this.hash = Objects.hash(nodeId, keyCode);
}
return this.hash;
}
@Override
public String toString() {
return "HgNodePartition{" +
"nodeId=" + nodeId +
", partitionId=" + keyCode +
'}';
}
}
|
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
HgNodePartition that = (HgNodePartition) o;
return Objects.equals(nodeId, that.nodeId) && Objects.equals(keyCode, that.keyCode);
| 494
| 93
| 587
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/HgNodePartitionerBuilder.java
|
HgNodePartitionerBuilder
|
add
|
class HgNodePartitionerBuilder {
private Set<HgNodePartition> partitions = null;
static HgNodePartitionerBuilder resetAndGet() {
return new HgNodePartitionerBuilder();
}
/**
* @param nodeId
* @param keyCode
* @return
* @see HgNodePartitionerBuilder:setPartitions(Set<HgNodePartition> partitions)
*/
@Deprecated
public HgNodePartitionerBuilder add(Long nodeId, Integer keyCode) {<FILL_FUNCTION_BODY>}
Collection<HgNodePartition> getPartitions() {
return this.partitions;
}
public void setPartitions(Set<HgNodePartition> partitions) {
isFalse(partitions == null, "The argument is invalid: partitions");
this.partitions = partitions;
}
}
|
isFalse(nodeId == null, "The argument is invalid: nodeId");
isFalse(keyCode == null, "The argument is invalid: keyCode");
if (this.partitions == null) {
this.partitions = new HashSet<>(16, 1);
}
this.partitions.add(HgNodePartition.of(nodeId, keyCode));
return this;
| 219
| 103
| 322
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/HgStoreNodeManager.java
|
HgStoreNodeManager
|
addNode
|
class HgStoreNodeManager {
private final static Set<String> CLUSTER_ID_SET = new HashSet<>();
private final static HgStoreNodeManager instance = new HgStoreNodeManager();
private final String clusterId;
private final Map<String, HgStoreNode> addressMap = new ConcurrentHashMap<>();
private final Map<Long, HgStoreNode> nodeIdMap = new ConcurrentHashMap<>();
private final Map<String, List<HgStoreNode>> graphNodesMap = new ConcurrentHashMap<>();
private HgStoreNodeProvider nodeProvider;
private HgStoreNodePartitioner nodePartitioner;
private HgStoreNodeNotifier nodeNotifier;
private HgStoreNodeManager() {
this.clusterId = HgStoreClientConst.DEFAULT_NODE_CLUSTER_ID;
}
private HgStoreNodeManager(String clusterId) {
synchronized (CLUSTER_ID_SET) {
if (CLUSTER_ID_SET.contains(clusterId)) {
throw new RuntimeException("The cluster [" + clusterId + "] has been existing.");
}
CLUSTER_ID_SET.add(clusterId);
this.clusterId = clusterId;
}
}
public static HgStoreNodeManager getInstance() {
return instance;
}
/**
* Return the HgStoreNodeBuilder
*
* @return
*/
public HgStoreNodeBuilder getNodeBuilder() {
// TODO: Constructed by a provider that retrieved by SPI
return new GrpcStoreNodeBuilder(this, HgPrivate.getInstance());
}
/**
* Return an instance of HgStoreNode whose ID is matched to the argument.
*
* @param nodeId
* @return null when none of instance is matched to the argument,or argument is invalid.
*/
public HgStoreNode getStoreNode(Long nodeId) {
if (nodeId == null) {
return null;
}
return this.nodeIdMap.get(nodeId);
}
/**
* Apply a HgStoreNode instance with graph-name and node-id.
* <b>CAUTION:</b>
* <b>It won't work when user haven't set a HgStoreNodeProvider via setNodeProvider method.</b>
*
* @param graphName
* @param nodeId
* @return
*/
HgStoreNode applyNode(String graphName, Long nodeId) {
HgStoreNode node = this.nodeIdMap.get(nodeId);
if (node != null) {
return node;
}
if (this.nodeProvider == null) {
return null;
}
node = this.nodeProvider.apply(graphName, nodeId);
if (node == null) {
log.warn("Failed to apply a HgStoreNode instance form the nodeProvider [ "
+ this.nodeProvider.getClass().getName() + " ].");
notifying(graphName, nodeId, HgNodeStatus.NOT_EXIST);
return null;
}
this.addNode(graphName, node);
return node;
}
private void notifying(String graphName, Long nodeId, HgNodeStatus status) {
if (this.nodeNotifier != null) {
try {
this.nodeNotifier.notice(graphName, HgStoreNotice.of(nodeId, status));
} catch (Throwable t) {
log.error("Failed to invoke " + this.nodeNotifier.getClass().getSimpleName() +
":notice(" + nodeId + "," + status + ")", t);
}
}
}
/**
* @param graphName
* @param notice
* @return null: when there is no HgStoreNodeNotifier in the nodeManager;
* @throws HgStoreClientException
*/
public Integer notifying(String graphName, HgStoreNotice notice) {
if (this.nodeNotifier != null) {
synchronized (Thread.currentThread()) {
try {
return this.nodeNotifier.notice(graphName, notice);
} catch (Throwable t) {
String msg =
"Failed to invoke " + this.nodeNotifier.getClass().getSimpleName() +
", notice: [ " + notice + " ]";
log.error(msg, t);
throw new HgStoreClientException(msg);
}
}
}
return null;
}
/**
* Return a collection of HgStoreNode who is in charge of the graph passed in the argument.
*
* @param graphName
* @return null when none matched to argument or any argument is invalid.
*/
public List<HgStoreNode> getStoreNodes(String graphName) {
if (HgAssert.isInvalid(graphName)) {
return null;
}
return this.graphNodesMap.get(graphName);
}
/**
* Adding a new Store-Node, return the argument's value if the host+port was not existing,
* otherwise return the HgStoreNode-instance added early.
*
* @param storeNode
* @return
* @throws IllegalArgumentException when any argument is invalid.
*/
public HgStoreNode addNode(HgStoreNode storeNode) {
HgAssert.isFalse(storeNode == null, "the argument: storeNode is null.");
Long nodeId = storeNode.getNodeId();
HgStoreNode node = null;
synchronized (this.nodeIdMap) {
node = this.addressMap.get(nodeId);
if (node == null) {
node = storeNode;
this.nodeIdMap.put(nodeId, node);
this.addressMap.put(storeNode.getAddress(), node);
}
}
return node;
}
/**
* @param graphName
* @param storeNode
* @return
* @throws IllegalArgumentException when any argument is invalid.
*/
public HgStoreNode addNode(String graphName, HgStoreNode storeNode) {<FILL_FUNCTION_BODY>}
public HgStoreNodePartitioner getNodePartitioner() {
return nodePartitioner;
}
public HgStoreNodeManager setNodePartitioner(HgStoreNodePartitioner nodePartitioner) {
HgAssert.isFalse(nodePartitioner == null, "the argument is invalid: nodePartitioner");
this.nodePartitioner = nodePartitioner;
return this;
}
public HgStoreNodeNotifier getNodeNotifier() {
return nodeNotifier;
}
public HgStoreNodeManager setNodeNotifier(HgStoreNodeNotifier nodeNotifier) {
HgAssert.isFalse(nodeNotifier == null, "the argument is invalid: nodeNotifier");
this.nodeNotifier = nodeNotifier;
return this;
}
public HgStoreNodeManager setNodeProvider(HgStoreNodeProvider nodeProvider) {
this.nodeProvider = nodeProvider;
return this;
}
}
|
HgAssert.isFalse(HgAssert.isInvalid(graphName), "the argument is invalid: graphName");
HgStoreNode node = this.addNode(storeNode);
List<HgStoreNode> nodes = null;
synchronized (this.graphNodesMap) {
nodes = this.graphNodesMap.get(graphName);
if (nodes == null) {
nodes = new ArrayList<>();
this.graphNodesMap.put(graphName, nodes);
}
nodes.add(node);
}
return node;
| 1,800
| 141
| 1,941
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/HgStoreNodePartitionerImpl.java
|
HgStoreNodePartitionerImpl
|
partition
|
class HgStoreNodePartitionerImpl implements HgStoreNodePartitioner,
HgStoreNodeProvider,
HgStoreNodeNotifier {
private PDClient pdClient;
private HgStoreNodeManager nodeManager;
protected HgStoreNodePartitionerImpl() {
}
public HgStoreNodePartitionerImpl(PDClient pdClient, HgStoreNodeManager nodeManager) {
this.pdClient = pdClient;
this.nodeManager = nodeManager;
}
/**
* 查询分区信息,结果通过HgNodePartitionerBuilder返回
*/
@Override
public int partition(HgNodePartitionerBuilder builder, String graphName,
byte[] startKey, byte[] endKey) {
try {
HashSet<HgNodePartition> partitions = null;
if (HgStoreClientConst.ALL_PARTITION_OWNER == startKey) {
List<Metapb.Store> stores = pdClient.getActiveStores(graphName);
partitions = new HashSet<>(stores.size());
for (Metapb.Store store : stores) {
partitions.add(HgNodePartition.of(store.getId(), -1));
}
} else if (endKey == HgStoreClientConst.EMPTY_BYTES
|| startKey == endKey || Arrays.equals(startKey, endKey)) {
KVPair<Metapb.Partition, Metapb.Shard> partShard =
pdClient.getPartition(graphName, startKey);
Metapb.Shard leader = partShard.getValue();
partitions = new HashSet<>();
partitions.add(HgNodePartition.of(leader.getStoreId(),
pdClient.keyToCode(graphName, startKey)));
} else {
log.warn(
"StartOwnerkey is not equal to endOwnerkey, which is meaningless!!, It is" +
" a error!!");
List<Metapb.Store> stores = pdClient.getActiveStores(graphName);
partitions = new HashSet<>(stores.size());
for (Metapb.Store store : stores) {
partitions.add(HgNodePartition.of(store.getId(), -1));
}
}
builder.setPartitions(partitions);
} catch (PDException e) {
log.error("An error occurred while getting partition information :{}", e.getMessage());
throw new RuntimeException(e.getMessage(), e);
}
return 0;
}
@Override
public int partition(HgNodePartitionerBuilder builder, String graphName,
int startKey, int endKey) {
try {
HashSet<HgNodePartition> partitions = new HashSet<>();
Metapb.Partition partition = null;
while ((partition == null || partition.getEndKey() < endKey)
&& startKey < PartitionUtils.MAX_VALUE) {
KVPair<Metapb.Partition, Metapb.Shard> partShard =
pdClient.getPartitionByCode(graphName, startKey);
if (partShard != null) {
partition = partShard.getKey();
Metapb.Shard leader = partShard.getValue();
partitions.add(HgNodePartition.of(leader.getStoreId(), startKey,
(int) partition.getStartKey(),
(int) partition.getEndKey()));
startKey = (int) partition.getEndKey();
} else {
break;
}
}
builder.setPartitions(partitions);
} catch (PDException e) {
log.error("An error occurred while getting partition information :{}", e.getMessage());
throw new RuntimeException(e.getMessage(), e);
}
return 0;
}
@Override
public int partition(HgNodePartitionerBuilder builder, String graphName,
int partitionId) {<FILL_FUNCTION_BODY>}
/**
* 查询hgstore信息
*
* @return hgstore
*/
@Override
public HgStoreNode apply(String graphName, Long nodeId) {
try {
Metapb.Store store = pdClient.getStore(nodeId);
return nodeManager.getNodeBuilder().setNodeId(store.getId())
.setAddress(store.getAddress()).build();
} catch (PDException e) {
throw new RuntimeException(e.getMessage(), e);
}
}
/**
* 通知更新缓存
*/
@Override
public int notice(String graphName, HgStoreNotice storeNotice) {
log.warn(storeNotice.toString());
if (storeNotice.getPartitionLeaders() != null) {
storeNotice.getPartitionLeaders().forEach((partId, leader) -> {
pdClient.updatePartitionLeader(graphName, partId, leader);
log.warn("updatePartitionLeader:{}-{}-{}",
graphName, partId, leader);
});
}
if (storeNotice.getPartitionIds() != null) {
storeNotice.getPartitionIds().forEach(partId -> {
pdClient.invalidPartitionCache(graphName, partId);
});
}
if (!storeNotice.getNodeStatus().equals(
HgNodeStatus.PARTITION_COMMON_FAULT)
&& !storeNotice.getNodeStatus().equals(
HgNodeStatus.NOT_PARTITION_LEADER)) {
pdClient.invalidPartitionCache();
log.warn("invalidPartitionCache:{} ", storeNotice.getNodeStatus());
}
return 0;
}
public Metapb.Graph delGraph(String graphName) {
try {
return pdClient.delGraph(graphName);
} catch (PDException e) {
log.error("delGraph {} exception, {}", graphName, e.getMessage());
}
return null;
}
public void setNodeManager(HgStoreNodeManager nodeManager) {
this.nodeManager = nodeManager;
}
}
|
try {
HashSet<HgNodePartition> partitions = new HashSet<>();
Metapb.Partition partition = null;
KVPair<Metapb.Partition, Metapb.Shard> partShard =
pdClient.getPartitionById(graphName, partitionId);
if (partShard != null) {
partition = partShard.getKey();
Metapb.Shard leader = partShard.getValue();
partitions.add(
HgNodePartition.of(leader.getStoreId(), (int) partition.getStartKey()));
}
builder.setPartitions(partitions);
} catch (PDException e) {
log.error("An error occurred while getting partition information :{}", e.getMessage());
throw new RuntimeException(e.getMessage(), e);
}
return 0;
| 1,528
| 211
| 1,739
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/HgStoreNotice.java
|
HgStoreNotice
|
toString
|
class HgStoreNotice {
private final Long nodeId;
private final HgNodeStatus nodeStatus;
private final String message;
private Map<Integer, Long> partitionLeaders;
private List<Integer> partitionIds;
private HgStoreNotice(Long nodeId, HgNodeStatus nodeStatus, String message) {
this.nodeId = nodeId;
this.nodeStatus = nodeStatus;
this.message = message;
}
public static HgStoreNotice of(Long nodeId, HgNodeStatus nodeStatus) {
HgAssert.isArgumentNotNull(nodeId, "nodeId");
HgAssert.isArgumentNotNull(nodeStatus, "nodeStatus");
return new HgStoreNotice(nodeId, nodeStatus, "");
}
public static HgStoreNotice of(Long nodeId, HgNodeStatus nodeStatus, String message) {
HgAssert.isArgumentNotNull(nodeId, "nodeId");
HgAssert.isArgumentNotNull(nodeStatus, "nodeStatus");
HgAssert.isArgumentNotNull(message, "message");
return new HgStoreNotice(nodeId, nodeStatus, message);
}
public Long getNodeId() {
return nodeId;
}
public HgNodeStatus getNodeStatus() {
return nodeStatus;
}
public String getMessage() {
return message;
}
public Map<Integer, Long> getPartitionLeaders() {
return partitionLeaders;
}
public HgStoreNotice setPartitionLeaders(Map<Integer, Long> partitionLeaders) {
this.partitionLeaders = partitionLeaders;
return this;
}
public List<Integer> getPartitionIds() {
return partitionIds;
}
public HgStoreNotice setPartitionIds(List<Integer> partitionIds) {
this.partitionIds = partitionIds;
return this;
}
@Override
public String toString() {<FILL_FUNCTION_BODY>}
}
|
return "HgStoreNotice{" +
"nodeId=" + nodeId +
", nodeStatus=" + nodeStatus +
", message='" + message + '\'' +
", partitionLeaders=" + partitionLeaders +
", partitionIds=" + partitionIds +
'}';
| 500
| 76
| 576
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/HgTkvEntryImpl.java
|
HgTkvEntryImpl
|
equals
|
class HgTkvEntryImpl implements HgTkvEntry {
private final String table;
private final byte[] key;
private final byte[] value;
HgTkvEntryImpl(String table, byte[] key, byte[] value) {
this.table = table;
this.key = key;
this.value = value;
}
@Override
public String table() {
return this.table;
}
@Override
public byte[] key() {
return this.key;
}
@Override
public byte[] value() {
return this.value;
}
@Override
public boolean equals(Object o) {<FILL_FUNCTION_BODY>}
@Override
public int hashCode() {
int result = Objects.hash(table);
result = 31 * result + Arrays.hashCode(key);
result = 31 * result + Arrays.hashCode(value);
return result;
}
@Override
public String toString() {
return "HgTkvEntryImpl{" +
"table='" + table + '\'' +
", key=" + Arrays.toString(key) +
", value=" + Arrays.toString(value) +
'}';
}
}
|
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
HgTkvEntryImpl that = (HgTkvEntryImpl) o;
return Objects.equals(table, that.table) && Arrays.equals(key, that.key) &&
Arrays.equals(value, that.value);
| 331
| 109
| 440
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/HgTokvEntryImpl.java
|
HgTokvEntryImpl
|
equals
|
class HgTokvEntryImpl implements HgTokvEntry {
private final String table;
private final HgOwnerKey ownerKey;
private final byte[] value;
HgTokvEntryImpl(String table, HgOwnerKey ownerKey, byte[] value) {
this.table = table;
this.ownerKey = ownerKey;
this.value = value;
}
@Override
public String table() {
return this.table;
}
@Override
public HgOwnerKey ownerKey() {
return this.ownerKey;
}
@Override
public byte[] value() {
return this.value;
}
@Override
public boolean equals(Object o) {<FILL_FUNCTION_BODY>}
@Override
public int hashCode() {
int result = Objects.hash(table, ownerKey);
result = 31 * result + Arrays.hashCode(value);
return result;
}
@Override
public String toString() {
return "HgTokvEntryImpl{" +
"table='" + table + '\'' +
", okv=" + ownerKey +
", value=" + Arrays.toString(value) +
'}';
}
}
|
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
HgTokvEntryImpl that = (HgTokvEntryImpl) o;
return Objects.equals(table, that.table) && Objects.equals(ownerKey, that.ownerKey) &&
Arrays.equals(value, that.value);
| 321
| 109
| 430
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/MultiNodeSessionFactory.java
|
MultiNodeSessionFactory
|
buildProxy
|
class MultiNodeSessionFactory {
// TODO multi-instance ?
private final static MultiNodeSessionFactory INSTANCE = new MultiNodeSessionFactory();
// TODO multi-instance ?
private final HgStoreNodeManager nodeManager = HgStoreNodeManager.getInstance();
// TODO: to be a chain assigned to each graph
//private HgStoreNodeDispatcher storeNodeDispatcher;
private MultiNodeSessionFactory() {
}
static MultiNodeSessionFactory getInstance() {
return INSTANCE;
}
HgStoreSession createStoreSession(String graphName) {
return buildProxy(graphName);
}
private HgStoreSession buildProxy(String graphName) {<FILL_FUNCTION_BODY>}
}
|
//return new MultiNodeSessionProxy(graphName, nodeManager, storeNodeDispatcher);
//return new NodePartitionSessionProxy(graphName,nodeManager);
//return new NodeRetrySessionProxy(graphName,nodeManager);
return new NodeTxSessionProxy(graphName, nodeManager);
| 184
| 72
| 256
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/NodeTkv.java
|
NodeTkv
|
equals
|
class NodeTkv {
private final HgNodePartition nodePartition;
private final String table;
private final HgOwnerKey key;
private final HgOwnerKey endKey;
private HgStoreSession session;
NodeTkv(HgNodePartition nodePartition, String table, HgOwnerKey key) {
this.nodePartition = nodePartition;
this.table = table;
this.key = key;
this.endKey = key;
this.key.setKeyCode(this.nodePartition.getKeyCode());
}
NodeTkv(HgNodePartition nodePartition, String table, HgOwnerKey key, int keyCode) {
this.nodePartition = nodePartition;
this.table = table;
this.key = key;
this.endKey = key;
this.key.setKeyCode(keyCode);
}
NodeTkv(HgNodePartition nodePartition, String table, HgOwnerKey startKey,
HgOwnerKey endKey) {
this.nodePartition = nodePartition;
this.table = table;
this.key = startKey;
this.endKey = endKey;
this.key.setKeyCode(nodePartition.getStartKey());
this.endKey.setKeyCode(nodePartition.getEndKey());
}
public Long getNodeId() {
return this.nodePartition.getNodeId();
}
public String getTable() {
return table;
}
public HgOwnerKey getKey() {
return key;
}
public HgOwnerKey getEndKey() {
return endKey;
}
public NodeTkv setKeyCode(int code) {
this.key.setKeyCode(code);
return this;
}
@Override
public boolean equals(Object o) {<FILL_FUNCTION_BODY>}
@Override
public int hashCode() {
int result = Objects.hash(nodePartition, table, key, endKey);
return result;
}
@Override
public String toString() {
return "NptKv{" +
"nodePartition=" + nodePartition +
", table='" + table + '\'' +
", key=" + key +
", endKey=" + endKey +
'}';
}
public HgStoreSession getSession() {
return session;
}
public void setSession(HgStoreSession session) {
this.session = session;
}
}
|
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
NodeTkv nptKv = (NodeTkv) o;
return Objects.equals(nodePartition, nptKv.nodePartition) &&
Objects.equals(table, nptKv.table)
&& Objects.equals(key, nptKv.key)
&& Objects.equals(endKey, nptKv.endKey);
| 639
| 138
| 777
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/SequencedIterator.java
|
SequencedIterator
|
next
|
class SequencedIterator implements HgKvIterator {
private static final byte[] EMPTY_BYTES = new byte[0];
private final Queue<HgKvOrderedIterator> queue;
private final long limit;
private HgKvOrderedIterator<HgKvEntry> iterator;
private HgKvEntry entry;
private int count;
private byte[] position = EMPTY_BYTES;
private byte[] position4Seeking = EMPTY_BYTES;
SequencedIterator(List<HgKvOrderedIterator> iterators, long limit) {
Collections.sort(iterators);
this.queue = new LinkedList(iterators);
this.limit = limit <= 0 ? Integer.MAX_VALUE : limit;
}
private HgKvOrderedIterator getIterator() {
if (this.queue.isEmpty()) {
return null;
}
HgKvOrderedIterator buf;
while ((buf = this.queue.poll()) != null) {
buf.seek(this.position4Seeking);
if (buf.hasNext()) {
break;
}
}
return buf;
}
private void closeIterators() {
if (this.queue.isEmpty()) {
return;
}
HgKvOrderedIterator buf;
while ((buf = this.queue.poll()) != null) {
buf.close();
}
}
@Override
public byte[] key() {
if (this.entry != null) {
return this.entry.key();
}
return null;
}
@Override
public byte[] value() {
if (this.entry != null) {
return this.entry.value();
}
return null;
}
@Override
public byte[] position() {
return this.position;
}
@Override
public void seek(byte[] pos) {
if (pos != null) {
this.position4Seeking = pos;
}
}
@Override
public boolean hasNext() {
if (this.count >= this.limit) {
return false;
}
if (this.iterator == null) {
this.iterator = this.getIterator();
} else if (!this.iterator.hasNext()) {
this.iterator.close();
this.iterator = this.getIterator();
}
return this.iterator != null;
}
@Override
public Object next() {<FILL_FUNCTION_BODY>}
@Override
public void close() {
if (this.iterator != null) {
this.iterator.close();
}
this.closeIterators();
}
}
|
if (this.iterator == null) {
hasNext();
}
if (this.iterator == null) {
throw new NoSuchElementException();
}
this.entry = this.iterator.next();
this.position = this.iterator.position();
if (!this.iterator.hasNext()) {
this.iterator.close();
this.iterator = null;
}
this.count++;
return this.entry;
| 703
| 114
| 817
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/ShiftWorkIteratorProxy.java
|
ShiftWorkIteratorProxy
|
setIterator
|
class ShiftWorkIteratorProxy implements HgKvIterator {
private static final byte[] EMPTY_BYTES = new byte[0];
private final int limit;
private HgKvPagingIterator<HgKvEntry> iterator;
private Queue<HgKvPagingIterator> queue = new LinkedList<>();
private HgKvEntry entry;
private int count;
private int shiftCount;
ShiftWorkIteratorProxy(List<HgKvPagingIterator> iterators, int limit) {
this.queue = new LinkedList<>(iterators);
this.limit = limit <= 0 ? Integer.MAX_VALUE : limit;
}
private HgKvPagingIterator getIterator() {
if (this.queue.isEmpty()) {
return null;
}
HgKvPagingIterator buf = null;
while ((buf = this.queue.poll()) != null) {
if (buf.hasNext()) {
break;
}
}
if (buf == null) {
return null;
}
this.queue.add(buf);
return buf;
}
private void closeIterators() {
if (this.queue.isEmpty()) {
return;
}
HgKvPagingIterator buf;
while ((buf = this.queue.poll()) != null) {
buf.close();
}
}
private void setIterator() {<FILL_FUNCTION_BODY>}
private void doNext() {
}
@Override
public byte[] key() {
if (this.entry != null) {
return this.entry.key();
}
return null;
}
@Override
public byte[] value() {
if (this.entry != null) {
return this.entry.value();
}
return null;
}
@Override
public byte[] position() {
return this.iterator != null ? this.iterator.position() : EMPTY_BYTES;
}
@Override
public void seek(byte[] position) {
if (this.iterator != null) {
this.iterator.seek(position);
}
}
@Override
public boolean hasNext() {
if (this.count >= this.limit) {
return false;
}
if (this.iterator == null
|| !this.iterator.hasNext()) {
this.iterator = this.getIterator();
}
return this.iterator != null;
}
@Override
public Object next() {
if (this.iterator == null) {
hasNext();
}
if (this.iterator == null) {
throw new NoSuchElementException();
}
this.entry = this.iterator.next();
this.setIterator();
this.count++;
//log.info("next - > {}",this.entry);
return this.entry;
}
@Override
public void close() {
if (this.iterator != null) {
this.iterator.close();
}
this.closeIterators();
}
}
|
// if (++this.shiftCount >= this.iterator.getPageSize() / 2) {
if (++this.shiftCount >= this.iterator.getPageSize()) {
this.iterator = null;
this.shiftCount = 0;
}
| 809
| 69
| 878
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/TopWorkIteratorProxy.java
|
TopWorkIteratorProxy
|
key
|
class TopWorkIteratorProxy implements HgKvIterator {
private static final byte[] EMPTY_BYTES = new byte[0];
private final Queue<HgKvIterator> queue;
private final long limit;
private HgKvIterator<HgKvEntry> iterator;
private HgKvEntry entry;
// result count
private int count;
TopWorkIteratorProxy(List<HgKvIterator> iterators, long limit) {
this.queue = new LinkedList<>(iterators);
this.limit = limit <= 0 ? Integer.MAX_VALUE : limit;
}
private HgKvIterator getIterator() {
if (this.queue.isEmpty()) {
return null;
}
HgKvIterator buf = null;
while ((buf = this.queue.poll()) != null) {
if (buf.hasNext()) {
break;
}
}
if (buf == null) {
return null;
}
this.queue.add(buf);
return buf;
}
private void closeIterators() {
if (this.queue.isEmpty()) {
return;
}
HgKvIterator buf;
while ((buf = this.queue.poll()) != null) {
buf.close();
}
}
private void setIterator() {
this.iterator = null;
}
@Override
public byte[] key() {<FILL_FUNCTION_BODY>}
@Override
public byte[] value() {
if (this.entry != null) {
return this.entry.value();
}
return null;
}
@Override
public byte[] position() {
return this.iterator != null ? this.iterator.position() : EMPTY_BYTES;
}
@Override
public void seek(byte[] position) {
if (this.iterator != null) {
this.iterator.seek(position);
}
}
@Override
public boolean hasNext() {
if (this.count >= this.limit) {
return false;
}
if (this.iterator == null) {
this.iterator = this.getIterator();
}
return this.iterator != null;
}
@Override
public Object next() {
if (this.iterator == null) {
hasNext();
}
if (this.iterator == null) {
throw new NoSuchElementException();
}
this.entry = this.iterator.next();
this.setIterator();
this.count++;
return this.entry;
}
@Override
public void close() {
if (this.iterator != null) {
this.iterator.close();
}
this.closeIterators();
}
}
|
if (this.entry != null) {
return this.entry.key();
}
return null;
| 730
| 32
| 762
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/AbstractGrpcClient.java
|
AbstractGrpcClient
|
getAsyncStub
|
class AbstractGrpcClient {
private static Map<String, ManagedChannel[]> channels = new ConcurrentHashMap<>();
private static int n = 5;
private static int concurrency = 1 << n;
private static AtomicLong counter = new AtomicLong(0);
private static long limit = Long.MAX_VALUE >> 1;
private static HgStoreClientConfig config = HgStoreClientConfig.of();
private Map<String, HgPair<ManagedChannel, AbstractBlockingStub>[]> blockingStubs =
new ConcurrentHashMap<>();
private Map<String, HgPair<ManagedChannel, AbstractAsyncStub>[]> asyncStubs =
new ConcurrentHashMap<>();
private ThreadPoolExecutor executor;
{
executor = ExecutorPool.createExecutor("common", 60, concurrency, concurrency);
}
public AbstractGrpcClient() {
}
public ManagedChannel[] getChannels(String target) {
ManagedChannel[] tc;
if ((tc = channels.get(target)) == null) {
synchronized (channels) {
if ((tc = channels.get(target)) == null) {
try {
ManagedChannel[] value = new ManagedChannel[concurrency];
CountDownLatch latch = new CountDownLatch(concurrency);
for (int i = 0; i < concurrency; i++) {
int fi = i;
executor.execute(() -> {
try {
value[fi] = getManagedChannel(target);
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
latch.countDown();
}
});
}
latch.await();
channels.put(target, tc = value);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
return tc;
}
public abstract AbstractBlockingStub getBlockingStub(ManagedChannel channel);
public AbstractBlockingStub getBlockingStub(String target) {
ManagedChannel[] channels = getChannels(target);
HgPair<ManagedChannel, AbstractBlockingStub>[] pairs = blockingStubs.get(target);
long l = counter.getAndIncrement();
if (l >= limit) {
counter.set(0);
}
int index = (int) (l & (concurrency - 1));
if (pairs == null) {
synchronized (blockingStubs) {
pairs = blockingStubs.get(target);
if (pairs == null) {
HgPair<ManagedChannel, AbstractBlockingStub>[] value = new HgPair[concurrency];
IntStream.range(0, concurrency).forEach(i -> {
ManagedChannel channel = channels[index];
AbstractBlockingStub stub = getBlockingStub(channel);
value[i] = new HgPair<>(channel, stub);
// log.info("create channel for {}",target);
});
blockingStubs.put(target, value);
AbstractBlockingStub stub = value[index].getValue();
return (AbstractBlockingStub) setBlockingStubOption(stub);
}
}
}
return (AbstractBlockingStub) setBlockingStubOption(pairs[index].getValue());
}
private AbstractStub setBlockingStubOption(AbstractBlockingStub stub) {
return stub.withDeadlineAfter(config.getGrpcTimeoutSeconds(), TimeUnit.SECONDS)
.withMaxInboundMessageSize(
config.getGrpcMaxInboundMessageSize())
.withMaxOutboundMessageSize(
config.getGrpcMaxOutboundMessageSize());
}
public AbstractAsyncStub getAsyncStub(ManagedChannel channel) {
return null;
}
public AbstractAsyncStub getAsyncStub(String target) {<FILL_FUNCTION_BODY>}
private AbstractStub setStubOption(AbstractStub value) {
return value.withMaxInboundMessageSize(
config.getGrpcMaxInboundMessageSize())
.withMaxOutboundMessageSize(
config.getGrpcMaxOutboundMessageSize());
}
private ManagedChannel getManagedChannel(String target) {
return ManagedChannelBuilder.forTarget(target).usePlaintext().build();
}
}
|
ManagedChannel[] channels = getChannels(target);
HgPair<ManagedChannel, AbstractAsyncStub>[] pairs = asyncStubs.get(target);
long l = counter.getAndIncrement();
if (l >= limit) {
counter.set(0);
}
int index = (int) (l & (concurrency - 1));
if (pairs == null) {
synchronized (asyncStubs) {
pairs = asyncStubs.get(target);
if (pairs == null) {
HgPair<ManagedChannel, AbstractAsyncStub>[] value = new HgPair[concurrency];
IntStream.range(0, concurrency).parallel().forEach(i -> {
ManagedChannel channel = channels[index];
AbstractAsyncStub stub = getAsyncStub(channel);
// stub.withMaxInboundMessageSize(config.getGrpcMaxInboundMessageSize())
// .withMaxOutboundMessageSize(config.getGrpcMaxOutboundMessageSize());
value[i] = new HgPair<>(channel, stub);
// log.info("create channel for {}",target);
});
asyncStubs.put(target, value);
AbstractAsyncStub stub =
(AbstractAsyncStub) setStubOption(value[index].getValue());
return stub;
}
}
}
return (AbstractAsyncStub) setStubOption(pairs[index].getValue());
| 1,114
| 363
| 1,477
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/GrpcKvEntryImpl.java
|
GrpcKvEntryImpl
|
equals
|
class GrpcKvEntryImpl implements HgKvEntry {
private final byte[] key;
private final byte[] value;
private final int code;
GrpcKvEntryImpl(byte[] k, byte[] v, int code) {
this.key = k;
this.value = v;
this.code = code;
}
@Override
public int code() {
return code;
}
@Override
public byte[] key() {
return key;
}
@Override
public byte[] value() {
return value;
}
@Override
public boolean equals(Object o) {<FILL_FUNCTION_BODY>}
@Override
public int hashCode() {
int result = Arrays.hashCode(key);
result = 31 * result + Arrays.hashCode(value);
return result;
}
@Override
public String toString() {
return "HgKvEntryImpl{" +
"key=" + Arrays.toString(key) +
", value=" + Arrays.toString(value) +
", code=" + code +
'}';
}
}
|
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
GrpcKvEntryImpl hgKvEntry = (GrpcKvEntryImpl) o;
return Arrays.equals(key, hgKvEntry.key) && Arrays.equals(value, hgKvEntry.value);
| 302
| 105
| 407
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/GrpcKvIteratorImpl.java
|
GrpcKvIteratorImpl
|
of
|
class GrpcKvIteratorImpl implements HgKvPagingIterator<HgKvEntry>, HgKvOrderedIterator<HgKvEntry> {
private final byte[] emptyBytes = HgStoreClientConst.EMPTY_BYTES;
private final KvCloseableIterator<Kv> iterator;
private final HgPageSize pageLimiter;
private final HgStoreNodeSession session;
private HgKvEntry element;
private GrpcKvIteratorImpl(HgStoreNodeSession session, KvCloseableIterator<Kv> iterator,
HgPageSize pageLimiter) {
this.iterator = iterator;
this.pageLimiter = pageLimiter;
this.session = session;
}
public static HgKvIterator<HgKvEntry> of(HgStoreNodeSession nodeSession,
KvCloseableIterator<Kv> iterator) {
if (iterator instanceof HgPageSize) {
return of(nodeSession, iterator, (HgPageSize) iterator);
}
return new GrpcKvIteratorImpl(nodeSession, iterator, () -> 1);
}
public static HgKvIterator<HgKvEntry> of(HgStoreNodeSession nodeSession,
KvCloseableIterator<Kv> iterator,
HgPageSize pageLimiter) {
return new GrpcKvIteratorImpl(nodeSession, iterator, pageLimiter);
}
public static HgKvIterator<HgKvEntry> of(HgStoreNodeSession nodeSession, List<Kv> kvList) {<FILL_FUNCTION_BODY>}
@Override
public boolean hasNext() {
// if (log.isDebugEnabled()) {
// if (!this.iterator.hasNext() && !nodeSession.getGraphName().endsWith("/s")) {
// log.debug("[ANALYSIS GrpcKv hasNext-> FALSE] ");
// }
// }
return this.iterator.hasNext();
}
@Override
public HgKvEntry next() {
Kv kv = this.iterator.next();
this.element = new GrpcKvEntryImpl(kv.getKey().toByteArray(), kv.getValue().toByteArray(),
kv.getCode());
return this.element;
}
@Override
public byte[] key() {
if (this.element == null) {
return null;
}
return this.element.key();
}
@Override
public byte[] value() {
if (this.element == null) {
return null;
}
return this.element.value();
}
@Override
public byte[] position() {
if (this.element == null) {
return emptyBytes;
}
byte[] key = this.element.key();
if (key == null) {
return emptyBytes;
}
if (!(this.iterator instanceof HgSeekAble)) {
return emptyBytes;
}
byte[] upstream = ((HgSeekAble) this.iterator).position();
byte[] code = HgStoreClientUtil.toIntBytes(this.element.code());
byte[] result = new byte[upstream.length + Integer.BYTES + key.length];
System.arraycopy(upstream, 0, result, 0, upstream.length);
System.arraycopy(code, 0, result, upstream.length, Integer.BYTES);
System.arraycopy(key, 0, result, upstream.length + Integer.BYTES, key.length);
return result;
}
@Override
public void seek(byte[] position) {
if (this.iterator instanceof HgSeekAble) {
((HgSeekAble) this.iterator).seek(position);
}
}
@Override
public long getPageSize() {
return pageLimiter.getPageSize();
}
@Override
public boolean isPageEmpty() {
return !iterator.hasNext();
}
@Override
public int compareTo(HgKvOrderedIterator o) {
return Long.compare(this.getSequence(), o.getSequence());
}
@Override
public long getSequence() {
return this.session.getStoreNode().getNodeId().longValue();
}
@Override
public void close() {
this.iterator.close();
}
}
|
int pageSize = kvList.size();
return new GrpcKvIteratorImpl(nodeSession, new KvListIterator<Kv>(kvList), () -> pageSize);
| 1,139
| 49
| 1,188
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/GrpcNodeHealthyClient.java
|
GrpcNodeHealthyClient
|
isHealthy
|
class GrpcNodeHealthyClient {
private final static Map<String, ManagedChannel> CHANNEL_MAP = new ConcurrentHashMap<>();
private final static Map<String, HealthyGrpc.HealthyBlockingStub> STUB_MAP =
new ConcurrentHashMap<>();
// TODO: Forbid constructing out of the package.
public GrpcNodeHealthyClient() {
}
private ManagedChannel getChannel(String target) {
ManagedChannel channel = CHANNEL_MAP.get(target);
if (channel == null) {
channel = ManagedChannelBuilder.forTarget(target).usePlaintext().build();
CHANNEL_MAP.put(target, channel);
}
return channel;
}
private HealthyGrpc.HealthyBlockingStub getStub(String target) {
HealthyGrpc.HealthyBlockingStub stub = STUB_MAP.get(target);
if (stub == null) {
stub = HealthyGrpc.newBlockingStub(getChannel(target));
STUB_MAP.put(target, stub);
}
return stub;
}
/* boolean isHealthy(GrpcStoreNodeImpl node) {
String target = node.getAddress();
HealthyOuterClass.StringReply response = getStub(target).isOk(Empty.newBuilder().build());
String res = response.getMessage();
if ("ok".equals(res)) {
return true;
} else {
System.out.printf("gRPC-res-msg: %s%n", res);
return false;
}
}*/
public boolean isHealthy() {<FILL_FUNCTION_BODY>}
}
|
String target = "localhost:9080";
ManagedChannel channel = ManagedChannelBuilder.forTarget(target).usePlaintext().build();
HealthyGrpc.HealthyBlockingStub stub = HealthyGrpc.newBlockingStub(channel);
HealthyOuterClass.StringReply response = stub.isOk(Empty.newBuilder().build());
String res = response.getMessage();
System.out.printf("gRPC response message:%s%n", res);
return "ok".equals(res);
| 438
| 134
| 572
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/GrpcStoreNodeBuilder.java
|
GrpcStoreNodeBuilder
|
setAddress
|
class GrpcStoreNodeBuilder implements HgStoreNodeBuilder {
private static final GrpcStoreSessionClient sessionClient = new GrpcStoreSessionClient();
private static final GrpcStoreStreamClient streamClient = new GrpcStoreStreamClient();
private static final AtomicLong ids = new AtomicLong(0);
private final HgStoreNodeManager nodeManager;
private Long nodeId;
private String address;
public GrpcStoreNodeBuilder(HgStoreNodeManager nodeManager, HgPrivate hgPrivate) {
HgAssert.isArgumentNotNull(hgPrivate, "hgPrivate");
HgAssert.isArgumentNotNull(nodeManager, "nodeManager");
this.nodeManager = nodeManager;
}
@Override
public GrpcStoreNodeBuilder setAddress(String address) {<FILL_FUNCTION_BODY>}
@Override
public GrpcStoreNodeBuilder setNodeId(Long nodeId) {
HgAssert.isFalse(nodeId == null, "The argument is invalid: nodeId.");
this.nodeId = nodeId;
return this;
}
@Override
public HgStoreNode build() {
// TODO: delete
if (this.nodeId == null) {
this.nodeId = ids.addAndGet(-1L);
}
HgAssert.isFalse(this.nodeId == null, "nodeId can't to be null");
HgAssert.isFalse(this.address == null, "address can't to be null");
GrpcStoreNodeImpl node =
new GrpcStoreNodeImpl(this.nodeManager, sessionClient, streamClient);
node.setNodeId(this.nodeId);
node.setAddress(this.address);
return node;
}
}
|
HgAssert.isFalse(HgAssert.isInvalid(address), "The argument is invalid: address.");
this.address = address;
return this;
| 437
| 42
| 479
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/GrpcStoreNodeImpl.java
|
GrpcStoreNodeImpl
|
equals
|
class GrpcStoreNodeImpl implements HgStoreNode {
private final GrpcStoreSessionClient sessionClient;
private final GrpcStoreStreamClient streamClient;
private final HgStoreNodeManager nodeManager;
private String address;
private Long nodeId;
GrpcStoreNodeImpl(HgStoreNodeManager nodeManager, GrpcStoreSessionClient sessionClient,
GrpcStoreStreamClient streamClient) {
this.nodeManager = nodeManager;
this.sessionClient = sessionClient;
this.streamClient = streamClient;
}
@Override
public Long getNodeId() {
return this.nodeId;
}
GrpcStoreNodeImpl setNodeId(Long nodeId) {
this.nodeId = nodeId;
return this;
}
@Override
public String getAddress() {
return this.address;
}
GrpcStoreNodeImpl setAddress(String address) {
this.address = address;
return this;
}
@Override
public HgStoreSession openSession(String graphName) {
// HgAssert.isFalse(HgAssert.isInvalid(graphName), "the argument: graphName is invalid.");
// return new GrpcStoreNodeSessionImpl2(this, graphName,this.nodeManager, this
// .sessionClient, this
// .streamClient);
return new GrpcStoreNodeSessionImpl(this, graphName, this.nodeManager, this.sessionClient,
this.streamClient);
}
@Override
public boolean equals(Object o) {<FILL_FUNCTION_BODY>}
@Override
public int hashCode() {
return Objects.hash(address, nodeId);
}
@Override
public String toString() {
return "storeNode: {" +
"address: \"" + address + "\"" +
", nodeId: " + nodeId +
"}";
}
}
|
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
GrpcStoreNodeImpl that = (GrpcStoreNodeImpl) o;
return Objects.equals(address, that.address) && Objects.equals(nodeId, that.nodeId);
| 482
| 93
| 575
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/GrpcStoreSessionClient.java
|
GrpcStoreSessionClient
|
doGet
|
class GrpcStoreSessionClient extends AbstractGrpcClient {
@Override
public HgStoreSessionBlockingStub getBlockingStub(ManagedChannel channel) {
HgStoreSessionBlockingStub stub;
stub = HgStoreSessionGrpc.newBlockingStub(channel);
return stub;
}
private HgStoreSessionBlockingStub getBlockingStub(HgStoreNodeSession nodeSession) {
HgStoreSessionBlockingStub stub =
(HgStoreSessionBlockingStub) getBlockingStub(
nodeSession.getStoreNode().getAddress());
return stub;
}
FeedbackRes doGet(HgStoreNodeSession nodeSession, String table, HgOwnerKey ownerKey) {<FILL_FUNCTION_BODY>}
FeedbackRes doClean(HgStoreNodeSession nodeSession, int partId) {
return this.getBlockingStub(nodeSession)
.clean(CleanReq.newBuilder()
.setHeader(GrpcUtil.getHeader(nodeSession))
.setPartition(partId)
.build()
);
}
FeedbackRes doBatchGet(HgStoreNodeSession nodeSession, String table, List<HgOwnerKey> keyList) {
BatchGetReq.Builder builder = BatchGetReq.newBuilder();
builder.setHeader(GrpcUtil.getHeader(nodeSession)).setTable(table);
for (HgOwnerKey key : keyList) {
builder.addKey(GrpcUtil.toKey(key));
}
if (log.isDebugEnabled()) {
log.debug("batchGet2: {}-{}-{}-{}", nodeSession, table, keyList, builder.build());
}
return this.getBlockingStub(nodeSession).batchGet2(builder.build());
}
FeedbackRes doBatch(HgStoreNodeSession nodeSession, String batchId, List<BatchEntry> entries) {
BatchWriteReq.Builder writeReq = BatchWriteReq.newBuilder();
writeReq.addAllEntry(entries);
return this.getBlockingStub(nodeSession)
.batch(BatchReq.newBuilder()
.setHeader(GrpcUtil.getHeader(nodeSession))
.setWriteReq(writeReq)
.setBatchId(batchId)
.build()
);
}
FeedbackRes doTable(HgStoreNodeSession nodeSession, String table, TableMethod method) {
return this.getBlockingStub(nodeSession)
.table(TableReq.newBuilder()
.setHeader(GrpcUtil.getHeader(nodeSession))
.setTableName(table)
.setMethod(method)
.build()
);
}
FeedbackRes doGraph(HgStoreNodeSession nodeSession, String graph, GraphMethod method) {
return this.getBlockingStub(nodeSession)
.graph(GraphReq.newBuilder()
.setHeader(GrpcUtil.getHeader(nodeSession))
.setGraphName(graph)
.setMethod(method)
.build()
);
}
public long count(HgStoreNodeSession nodeSession, String table) {
Agg agg = this.getBlockingStub(nodeSession).withDeadline(Deadline.after(24, TimeUnit.HOURS))
.count(ScanStreamReq.newBuilder()
.setHeader(getHeader(nodeSession))
.setTable(table)
.setMethod(ScanMethod.ALL)
.build()
);
return agg.getCount();
}
}
|
if (log.isDebugEnabled()) {
log.debug("doGet: {}-{}-{}-{}", nodeSession, table, ownerKey, GetReq.newBuilder()
.setHeader(
GrpcUtil.getHeader(
nodeSession))
.setTk(GrpcUtil.toTk(
table,
ownerKey))
.build());
}
return this.getBlockingStub(nodeSession)
.get2(GetReq.newBuilder()
.setHeader(GrpcUtil.getHeader(nodeSession))
.setTk(GrpcUtil.toTk(table, ownerKey))
.build()
);
| 913
| 176
| 1,089
|
<methods>public void <init>() ,public AbstractAsyncStub getAsyncStub(ManagedChannel) ,public AbstractAsyncStub getAsyncStub(java.lang.String) ,public abstract AbstractBlockingStub getBlockingStub(ManagedChannel) ,public AbstractBlockingStub getBlockingStub(java.lang.String) ,public ManagedChannel[] getChannels(java.lang.String) <variables>private Map<java.lang.String,HgPair<ManagedChannel,AbstractAsyncStub>[]> asyncStubs,private Map<java.lang.String,HgPair<ManagedChannel,AbstractBlockingStub>[]> blockingStubs,private static Map<java.lang.String,ManagedChannel[]> channels,private static int concurrency,private static org.apache.hugegraph.store.client.util.HgStoreClientConfig config,private static java.util.concurrent.atomic.AtomicLong counter,private java.util.concurrent.ThreadPoolExecutor executor,private static long limit,private static int n
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/GrpcStoreStateClient.java
|
GrpcStoreStateClient
|
getScanState
|
class GrpcStoreStateClient extends AbstractGrpcClient {
private final PDConfig pdConfig;
private final PDClient pdClient;
public GrpcStoreStateClient(PDConfig pdConfig) {
this.pdConfig = pdConfig;
pdClient = PDClient.create(this.pdConfig);
}
public Set<ScanState> getScanState() throws Exception {<FILL_FUNCTION_BODY>}
@Override
public AbstractBlockingStub getBlockingStub(ManagedChannel channel) {
HgStoreStateBlockingStub stub;
stub = HgStoreStateGrpc.newBlockingStub(channel);
return stub;
}
}
|
try {
List<Metapb.Store> activeStores = pdClient.getActiveStores();
Set<ScanState> states = activeStores.parallelStream().map(node -> {
String address = node.getAddress();
HgStoreStateBlockingStub stub = (HgStoreStateBlockingStub) getBlockingStub(address);
SubStateReq req = SubStateReq.newBuilder().build();
return stub.getScanState(req);
}).collect(Collectors.toSet());
return states;
} catch (Exception e) {
throw e;
}
| 178
| 159
| 337
|
<methods>public void <init>() ,public AbstractAsyncStub getAsyncStub(ManagedChannel) ,public AbstractAsyncStub getAsyncStub(java.lang.String) ,public abstract AbstractBlockingStub getBlockingStub(ManagedChannel) ,public AbstractBlockingStub getBlockingStub(java.lang.String) ,public ManagedChannel[] getChannels(java.lang.String) <variables>private Map<java.lang.String,HgPair<ManagedChannel,AbstractAsyncStub>[]> asyncStubs,private Map<java.lang.String,HgPair<ManagedChannel,AbstractBlockingStub>[]> blockingStubs,private static Map<java.lang.String,ManagedChannel[]> channels,private static int concurrency,private static org.apache.hugegraph.store.client.util.HgStoreClientConfig config,private static java.util.concurrent.atomic.AtomicLong counter,private java.util.concurrent.ThreadPoolExecutor executor,private static long limit,private static int n
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/GrpcStoreStreamClient.java
|
GrpcStoreStreamClient
|
doScan
|
class GrpcStoreStreamClient extends AbstractGrpcClient {
public HgStoreStreamStub getStub(HgStoreNodeSession nodeSession) {
return (HgStoreStreamStub) getAsyncStub(nodeSession.getStoreNode().getAddress());
}
@Override
public AbstractAsyncStub getAsyncStub(ManagedChannel channel) {
return HgStoreStreamGrpc.newStub(channel);
}
private HgStoreStreamBlockingStub getBlockingStub(HgStoreNodeSession nodeSession) {
return (HgStoreStreamBlockingStub) getBlockingStub(nodeSession.getStoreNode().getAddress());
}
@Override
public AbstractBlockingStub getBlockingStub(ManagedChannel channel) {
return HgStoreStreamGrpc.newBlockingStub(channel);
}
KvCloseableIterator<Kv> doScanOneShot(HgStoreNodeSession nodeSession, String table, long limit,
byte[] query) {
return KvOneShotScanner.scanAll(nodeSession
, this.getBlockingStub(nodeSession)
, table
, limit
, query
);
}
KvCloseableIterator<Kv> doScanOneShot(HgStoreNodeSession nodeSession, String table,
long limit) {
return KvOneShotScanner.scanAll(nodeSession
, this.getBlockingStub(nodeSession)
, table
, limit
, null
);
}
KvCloseableIterator<Kv> doScanOneShot(HgStoreNodeSession nodeSession, String table,
HgOwnerKey prefix, long limit) {
return KvOneShotScanner.scanPrefix(nodeSession
, this.getBlockingStub(nodeSession)
, table
, prefix
, limit
, null
);
}
KvCloseableIterator<Kv> doScanOneShot(HgStoreNodeSession nodeSession, String table,
HgOwnerKey prefix, long limit,
byte[] query) {
return KvOneShotScanner.scanPrefix(nodeSession
, this.getBlockingStub(nodeSession)
, table
, prefix
, limit
, query
);
}
KvCloseableIterator<Kv> doScanOneShot(HgStoreNodeSession nodeSession, String table,
HgOwnerKey startKey,
HgOwnerKey endKey
, long limit
, int scanType
, byte[] query) {
return KvOneShotScanner.scanRange(nodeSession
, this.getBlockingStub(nodeSession)
, table
, startKey
, endKey
, limit
, scanType
, query
);
}
KvCloseableIterator<Kv> doScan(HgStoreNodeSession nodeSession
, String table
, long limit
, byte[] query) {<FILL_FUNCTION_BODY>}
KvCloseableIterator<Kv> doScan(HgStoreNodeSession nodeSession
, String table
, long limit) {
return KvPageScanner.scanAll(nodeSession
, this.getStub(nodeSession)
, table
, limit
, null
);
}
KvCloseableIterator<Kv> doScan(HgStoreNodeSession nodeSession
, String table
, HgOwnerKey prefix
, long limit) {
return KvPageScanner.scanPrefix(nodeSession
, this.getStub(nodeSession)
, table
, prefix
, limit
, null
);
}
KvCloseableIterator<Kv> doScan(HgStoreNodeSession nodeSession
, String table
, HgOwnerKey prefix
, long limit
, byte[] query) {
return KvPageScanner.scanPrefix(nodeSession
, this.getStub(nodeSession)
, table
, prefix
, limit
, query
);
}
KvCloseableIterator<Kv> doScan(HgStoreNodeSession nodeSession
, String table
, HgOwnerKey startKey
, HgOwnerKey endKey
, long limit
, int scanType
, byte[] query) {
return KvPageScanner.scanRange(nodeSession
, this.getStub(nodeSession)
, table
, startKey
, endKey
, limit
, scanType
, query
);
}
KvCloseableIterator<Kv> doBatchScan(HgStoreNodeSession nodeSession, HgScanQuery scanQuery) {
return KvBatchScanner5.scan(nodeSession, this.getStub(nodeSession), scanQuery);
}
// 返回多个小的迭代器,允许上层并行处理
KvCloseableIterator<HgKvIterator<HgKvEntry>> doBatchScan3(HgStoreNodeSession nodeSession,
HgScanQuery scanQuery,
KvCloseableIterator iterator) {
KvBatchScanner.scan(this.getStub(nodeSession), nodeSession.getGraphName(), scanQuery,
iterator);
return iterator;
}
KvCloseableIterator<Kv> doBatchScanOneShot(HgStoreNodeSession nodeSession,
HgScanQuery scanQuery) {
return KvBatchOneShotScanner.scan(nodeSession, this.getBlockingStub(nodeSession),
scanQuery);
}
}
|
return KvPageScanner.scanAll(nodeSession
, this.getStub(nodeSession)
, table
, limit
, query
);
| 1,403
| 45
| 1,448
|
<methods>public void <init>() ,public AbstractAsyncStub getAsyncStub(ManagedChannel) ,public AbstractAsyncStub getAsyncStub(java.lang.String) ,public abstract AbstractBlockingStub getBlockingStub(ManagedChannel) ,public AbstractBlockingStub getBlockingStub(java.lang.String) ,public ManagedChannel[] getChannels(java.lang.String) <variables>private Map<java.lang.String,HgPair<ManagedChannel,AbstractAsyncStub>[]> asyncStubs,private Map<java.lang.String,HgPair<ManagedChannel,AbstractBlockingStub>[]> blockingStubs,private static Map<java.lang.String,ManagedChannel[]> channels,private static int concurrency,private static org.apache.hugegraph.store.client.util.HgStoreClientConfig config,private static java.util.concurrent.atomic.AtomicLong counter,private java.util.concurrent.ThreadPoolExecutor executor,private static long limit,private static int n
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/GrpcUtil.java
|
GrpcUtil
|
getOwnerKeyBuilder
|
class GrpcUtil {
private static final ThreadLocal<Key.Builder> keyBuilder = new ThreadLocal<Key.Builder>();
static Header getHeader(HgStoreNodeSession nodeSession) {
return Header.newBuilder()
.setGraph(nodeSession.getGraphName())
.build();
}
static Tk toTk(String table, HgOwnerKey ownerKey) {
return Tk.newBuilder()
.setTable(table)
.setKey(ByteString.copyFrom(ownerKey.getKey()))
.setCode(ownerKey.getKeyCode())
.build();
}
static Key.Builder getOwnerKeyBuilder() {<FILL_FUNCTION_BODY>}
static Key toKey(HgOwnerKey ownerKey, Key.Builder builder) {
if (ownerKey == null) {
return null;
}
return builder
.setKey(ByteString.copyFrom(ownerKey.getKey()))
.setCode(ownerKey.getKeyCode())
.build();
}
static Key toKey(HgOwnerKey ownerKey) {
if (ownerKey == null) {
return null;
}
Key.Builder builder = keyBuilder.get();
if (builder == null) {
builder = Key.newBuilder();
// TODO 线程级变量,寻找删除时机
keyBuilder.set(builder);
}
return builder
.setKey(ByteString.copyFrom(ownerKey.getKey()))
.setCode(ownerKey.getKeyCode())
.build();
}
static Tkv toTkv(String table, HgOwnerKey ownerKey, byte[] value) {
return Tkv.newBuilder()
.setTable(table)
.setKey(ByteString.copyFrom(ownerKey.getKey()))
.setValue(ByteString.copyFrom(value))
.setCode(ownerKey.getKeyCode())
.build();
}
static Tp toTp(String table, HgOwnerKey ownerKey) {
return Tp.newBuilder()
.setTable(table)
.setPrefix(ByteString.copyFrom(ownerKey.getKey()))
.setCode(ownerKey.getKeyCode())
.build();
}
static Tse toTse(String table, HgOwnerKey startKey, HgOwnerKey endKey) {
return Tse.newBuilder()
.setTable(table)
.setStart(toKey(startKey))
.setEnd(toKey(endKey))
.build();
}
static List<HgKvEntry> toList(List<Kv> kvList) {
if (kvList == null || kvList.isEmpty()) {
return HgStoreClientConst.EMPTY_LIST;
}
Iterator<Kv> iter = kvList.iterator();
List<HgKvEntry> resList = new ArrayList<>(kvList.size());
while (iter.hasNext()) {
Kv entry = iter.next();
resList.add(new GrpcKvEntryImpl(entry.getKey().toByteArray(),
entry.getValue().toByteArray(), entry.getCode()));
}
return resList;
}
static StatusRuntimeException toErr(String msg) {
return new StatusRuntimeException(Status.UNKNOWN.withDescription(msg));
}
static ByteString toBs(byte[] bytes) {
return ByteString.copyFrom((bytes != null) ? bytes : HgStoreClientConst.EMPTY_BYTES);
}
}
|
Key.Builder builder = keyBuilder.get();
if (builder == null) {
builder = Key.newBuilder();
// TODO 线程级变量,寻找删除时机
keyBuilder.set(builder);
}
return builder;
| 911
| 66
| 977
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/KvBatchOneShotScanner.java
|
KvBatchOneShotScanner
|
next
|
class KvBatchOneShotScanner implements KvCloseableIterator<Kv>, HgPageSize, HgSeekAble {
private final HgStoreNodeSession nodeSession;
private final HgStoreStreamGrpc.HgStoreStreamBlockingStub stub;
private final HgScanQuery scanQuery;
private Iterator<Kv> iterator;
private List<Kv> list = null;
private KvBatchOneShotScanner(HgStoreNodeSession nodeSession,
HgStoreStreamGrpc.HgStoreStreamBlockingStub stub,
HgScanQuery scanQuery) {
this.nodeSession = nodeSession;
this.stub = stub;
this.scanQuery = scanQuery;
}
public static KvCloseableIterator scan(HgStoreNodeSession nodeSession,
HgStoreStreamGrpc.HgStoreStreamBlockingStub stub,
HgScanQuery scanQuery) {
return new KvBatchOneShotScanner(nodeSession, stub, scanQuery);
}
private ScanStreamBatchReq createReq() {
return ScanStreamBatchReq.newBuilder()
.setHeader(getHeader(this.nodeSession))
.setQueryRequest(createQueryReq(this.scanQuery, Integer.MAX_VALUE))
.build();
}
private Iterator<Kv> createIterator() {
this.list = this.stub.scanBatchOneShot(this.createReq()).getDataList();
return this.list.iterator();
}
/*** Iterator ***/
@Override
public boolean hasNext() {
if (this.iterator == null) {
this.iterator = this.createIterator();
}
return this.iterator.hasNext();
}
@Override
public Kv next() {<FILL_FUNCTION_BODY>}
@Override
public long getPageSize() {
return Integer.MAX_VALUE;
}
@Override
public boolean isPageEmpty() {
return !this.iterator.hasNext();
}
@Override
public byte[] position() {
//TODO: to implement
return EMPTY_POSITION;
}
@Override
public void seek(byte[] position) {
//TODO: to implement
}
@Override
public void close() {
//Nothing to do
}
}
|
if (this.iterator == null) {
this.iterator = this.createIterator();
}
return this.iterator.next();
| 609
| 37
| 646
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/KvBatchScanner.java
|
TaskSplitter
|
evaluateMaxTaskSize
|
class TaskSplitter {
final HgScanQuery scanQuery;
final BiFunction<HgScanQuery, KvCloseableIterator, Boolean> taskHandler;
private KvBatchScannerMerger notifier;
private Iterator<HgOwnerKey> prefixItr;
private int maxTaskSize = 0; // 最大并行任务数
private int maxBatchSize = PropertyUtil.getInt("net.kv.scanner.batch.size", 1000);
// 每批次最大点数量
private volatile boolean finished = false;
private volatile boolean splitting = false;
private volatile int nextKeySerialNo = 1;
public TaskSplitter(HgScanQuery scanQuery,
BiFunction<HgScanQuery, KvCloseableIterator, Boolean> handler) {
this.scanQuery = scanQuery;
this.taskHandler = handler;
if (scanQuery.getScanMethod() == HgScanQuery.ScanMethod.PREFIX) {
if (scanQuery.getPrefixItr() != null) {
prefixItr = scanQuery.getPrefixItr();
} else {
prefixItr = scanQuery.getPrefixList().listIterator();
}
}
}
public void setNotifier(KvBatchScannerMerger notifier) {
this.notifier = notifier;
}
public boolean isFinished() {
return finished;
}
/**
* 评估最大任务数
*/
private void evaluateMaxTaskSize() {<FILL_FUNCTION_BODY>}
/**
* 拆分任务,任务拆分为多个grpc请求
*/
public void splitTask() {
if (this.finished || this.splitting) {
return;
}
synchronized (this) {
if (this.finished) {
return;
}
this.splitting = true;
if (scanQuery.getScanMethod() == HgScanQuery.ScanMethod.PREFIX) {
if (prefixItr.hasNext() &&
(maxTaskSize == 0 || notifier.getScannerCount() < maxTaskSize)) {
List<HgOwnerKey> keys = new ArrayList<>(maxBatchSize);
for (int i = 0; i < maxBatchSize && prefixItr.hasNext(); i++) {
keys.add(prefixItr.next().setSerialNo(nextKeySerialNo++));
}
taskHandler.apply(
HgScanQuery.prefixOf(scanQuery.getTable(), keys,
scanQuery.getOrderType()), this.notifier);
// 评估最大任务数
evaluateMaxTaskSize();
if (this.notifier.getScannerCount() < this.maxTaskSize) {
splitTask(); // 未达到最大任务数,继续拆分
}
}
this.finished = !prefixItr.hasNext();
} else {
taskHandler.apply(scanQuery, this.notifier);
this.finished = true;
}
this.splitting = false;
}
}
public synchronized void close() {
finished = true;
}
}
|
if (maxTaskSize == 0) { // 根据第一批次任务,得到store数量,然后计算最大任务数
if (scanQuery.getOrderType() == ScanOrderType.ORDER_STRICT) {
maxTaskSize = 1; // 点排序,每台机器一个流, 所有store流结束后才能启动其他流
} else {
maxTaskSize = this.notifier.getScannerCount() * maxTaskSizePerStore;
}
maxBatchSize = this.notifier.getScannerCount() * maxBatchSize; // 每台机器最多1000条
/*
* Limit少于10000时启动一个流,节省网络带宽
*/
if (scanQuery.getLimit() < maxBatchSize * 30L) {
maxTaskSize = 1;
}
}
| 780
| 211
| 991
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/KvBatchScanner5.java
|
OrderConsumer
|
hasNext
|
class OrderConsumer implements KvCloseableIterator<Kv>, HgPageSize {
private final OrderBroker broker;
private final String consumerId;
private Iterator<Kv> dataIterator;
private long tookCount = 0;
OrderConsumer(OrderBroker broker) {
this.broker = broker;
consumerId = broker.brokerId;
}
private Iterator<Kv> getIterator() {
List<Kv> list = this.broker.oneMore();
if (log.isDebugEnabled()) {
if (list != null && list.isEmpty()) {
log.debug("[ANALYSIS EMPTY] [{}] , tookCount: {}", consumerId, tookCount);
}
}
if (list == null || list.isEmpty()) {
return null;
} else {
return list.iterator();
}
}
@Override
public void close() {
this.broker.finish(this.tookCount);
}
@Override
public long getPageSize() {
return PAGE_SIZE;
}
@Override
public boolean hasNext() {<FILL_FUNCTION_BODY>}
@Override
public Kv next() {
if (this.dataIterator == null) {
if (!this.hasNext()) {
throw new NoSuchElementException();
}
}
if (log.isDebugEnabled()) {
tookCount++;
if (tookCount % 10000 == 0) {
log.debug("[ANALYSIS NEXT] [{}] , tookCount: {}", consumerId, tookCount);
}
}
return this.dataIterator.next();
}
}
|
if (this.dataIterator == null) {
this.dataIterator = this.getIterator();
} else {
if (this.dataIterator.hasNext()) {
return true;
} else {
this.dataIterator = this.getIterator();
}
}
if (this.dataIterator == null) {
if (log.isDebugEnabled()) {
log.debug("[ANALYSIS NULL -> FALSE] [{}] , tookCount: {}", consumerId,
tookCount);
}
return false;
} else {
if (log.isDebugEnabled()) {
if (!this.dataIterator.hasNext()) {
log.debug("[ANALYSIS hasNext -> FALSE] [{}] , tookCount: {}", consumerId,
tookCount);
}
}
return this.dataIterator.hasNext();
}
| 441
| 223
| 664
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/KvBatchUtil.java
|
KvBatchUtil
|
addRangeCondition
|
class KvBatchUtil {
static final byte[] EMPTY_POSITION = HgStoreClientConst.EMPTY_BYTES;
static ScanStreamBatchReq.Builder getRequestBuilder(HgStoreNodeSession nodeSession) {
return ScanStreamBatchReq.newBuilder().setHeader(getHeader(nodeSession));
}
static ScanQueryRequest createQueryReq(HgScanQuery scanQuery, long pageSize) {
ScanQueryRequest.Builder qb = ScanQueryRequest.newBuilder();
ScanCondition.Builder cb = ScanCondition.newBuilder();
qb.setLimit(getLimit(scanQuery.getLimit()));
qb.setPerKeyLimit(getLimit(scanQuery.getPerKeyLimit()));
qb.setPerKeyMax(getLimit(scanQuery.getPerKeyMax()));
switch (scanQuery.getScanMethod()) {
case ALL:
qb.setMethod(ScanMethod.ALL);
break;
case PREFIX:
qb.setMethod(ScanMethod.PREFIX);
addPrefixCondition(scanQuery, qb, cb);
break;
case RANGE:
qb.setMethod(ScanMethod.RANGE);
addRangeCondition(scanQuery, qb, cb);
break;
default:
throw new RuntimeException("Unsupported ScanType: " + scanQuery.getScanMethod());
}
qb.setTable(scanQuery.getTable());
qb.setPageSize(pageSize);
qb.setQuery(toBs(scanQuery.getQuery()));
qb.setScanType(scanQuery.getScanType());
qb.setOrderType(scanQuery.getOrderType());
qb.setSkipDegree(scanQuery.getSkipDegree());
return qb.build();
}
static long getLimit(long limit) {
return limit <= HgStoreClientConst.NO_LIMIT ? Integer.MAX_VALUE : limit;
}
static Header getHeader(HgStoreNodeSession nodeSession) {
return Header.newBuilder().setGraph(nodeSession.getGraphName()).build();
}
static void addPrefixCondition(HgScanQuery scanQuery, ScanQueryRequest.Builder qb,
ScanCondition.Builder cb) {
List<HgOwnerKey> prefixList = scanQuery.getPrefixList();
if (prefixList == null || prefixList.isEmpty()) {
throw new RuntimeException(
"The start-list of ScanQuery shouldn't to be invalid in ScanMethod.PREFIX " +
"mode.");
}
prefixList.forEach((e) -> {
qb.addCondition(cb.clear()
.setPrefix(toBs(e.getKey()))
.setCode(e.getKeyCode())
.setSerialNo(e.getSerialNo())
.build()
);
});
}
static void addRangeCondition(HgScanQuery scanQuery, ScanQueryRequest.Builder qb,
ScanCondition.Builder cb) {<FILL_FUNCTION_BODY>}
static HgOwnerKey toOk(HgOwnerKey key) {
return key == null ? HgStoreClientConst.EMPTY_OWNER_KEY : key;
}
static ByteString toBs(byte[] bytes) {
return ByteString.copyFrom((bytes != null) ? bytes : HgStoreClientConst.EMPTY_BYTES);
}
}
|
List<HgOwnerKey> startList = scanQuery.getStartList();
List<HgOwnerKey> endList = scanQuery.getEndList();
if (startList == null || startList.isEmpty()) {
throw new RuntimeException(
"The start-list of ScanQuery shouldn't to be invalid in ScanMethod.RANGE mode" +
".");
}
if (endList == null || endList.isEmpty()) {
throw new RuntimeException(
"The end-list of ScanQuery shouldn't to be invalid in ScanMethod.RANGE mode.");
}
if (startList.size() != endList.size()) {
throw new RuntimeException("The size of start-list not equals end-list's.");
}
for (int i = 0, s = startList.size(); i < s; i++) {
HgOwnerKey start = startList.get(i);
HgOwnerKey end = endList.get(i);
qb.addCondition(cb.clear().setCode(start.getKeyCode())
.setStart(toBs(start.getKey()))
.setEnd(toBs(end.getKey()))
.setSerialNo(start.getSerialNo())
.build()
);
}
| 855
| 317
| 1,172
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/KvOneShotScanner.java
|
KvOneShotScanner
|
next
|
class KvOneShotScanner implements KvCloseableIterator<Kv>, HgPageSize, HgSeekAble {
private static final HgStoreClientConfig storeClientConfig = HgStoreClientConfig.of();
private final HgStoreNodeSession session;
private final HgStoreStreamBlockingStub stub;
private final ScanStreamReq.Builder reqBuilder = ScanStreamReq.newBuilder();
private final String table;
private final HgOwnerKey startKey;
private final HgOwnerKey endKey;
private final HgOwnerKey prefix;
private final ScanMethod scanMethod;
private final long limit;
private final int partition;
private final int scanType;
private final byte[] query;
private final int pageSize;
private ScanStreamReq req;
private Iterator<Kv> iterator;
private List<Kv> list = null;
private boolean in = true;
private byte[] nodePosition = HgStoreClientConst.EMPTY_BYTES;
private KvOneShotScanner(ScanMethod scanMethod, HgStoreNodeSession session,
HgStoreStreamBlockingStub stub,
String table, HgOwnerKey prefix, HgOwnerKey startKey,
HgOwnerKey endKey, long limit,
int partition, int scanType, byte[] query) {
this.scanMethod = scanMethod;
this.session = session;
this.stub = stub;
this.table = table;
this.startKey = toOk(startKey);
this.endKey = toOk(endKey);
this.prefix = toOk(prefix);
this.partition = partition;
this.scanType = scanType;
this.query = query != null ? query : HgStoreClientConst.EMPTY_BYTES;
this.limit = limit <= HgStoreClientConst.NO_LIMIT ? Integer.MAX_VALUE :
limit; // <=0 means no limit
this.pageSize = storeClientConfig.getNetKvScannerPageSize();
}
public static KvCloseableIterator<Kv> scanAll(HgStoreNodeSession session,
HgStoreStreamBlockingStub stub,
String table, long limit, byte[] query) {
return new KvOneShotScanner(ScanMethod.ALL, session, stub, table, null, null, null, limit,
-1, HgKvStore.SCAN_ANY,
query);
}
public static KvCloseableIterator<Kv> scanPrefix(HgStoreNodeSession session,
HgStoreStreamBlockingStub stub,
String table, HgOwnerKey prefix, long limit,
byte[] query) {
return new KvOneShotScanner(ScanMethod.PREFIX, session, stub, table, prefix, null, null,
limit,
prefix.getKeyCode(), HgKvStore.SCAN_PREFIX_BEGIN, query);
}
public static KvCloseableIterator<Kv> scanRange(HgStoreNodeSession nodeSession,
HgStoreStreamBlockingStub stub,
String table, HgOwnerKey startKey,
HgOwnerKey endKey, long limit,
int scanType, byte[] query) {
return new KvOneShotScanner(ScanMethod.RANGE, nodeSession, stub, table, null, startKey,
endKey, limit,
startKey.getKeyCode(), scanType, query);
}
static HgOwnerKey toOk(HgOwnerKey key) {
return key == null ? HgStoreClientConst.EMPTY_OWNER_KEY : key;
}
static ByteString toBs(byte[] bytes) {
return ByteString.copyFrom((bytes != null) ? bytes : HgStoreClientConst.EMPTY_BYTES);
}
private Header getHeader(HgStoreNodeSession nodeSession) {
return Header.newBuilder().setGraph(nodeSession.getGraphName()).build();
}
private void createReq() {
this.req = this.reqBuilder
.setHeader(this.getHeader(this.session))
.setMethod(this.scanMethod)
.setTable(this.table)
.setStart(toBs(this.startKey.getKey()))
.setEnd(toBs(this.endKey.getKey()))
.setLimit(this.limit)
.setPrefix(toBs(this.prefix.getKey()))
.setCode(this.partition)
.setScanType(this.scanType)
.setQuery(toBs(this.query))
.setPageSize(pageSize)
.setPosition(toBs(this.nodePosition))
.build();
}
private void init() {
if (this.iterator == null) {
this.createReq();
this.list = this.stub.scanOneShot(this.req).getDataList();
this.iterator = this.list.iterator();
}
}
@Override
public boolean hasNext() {
if (!this.in) {
return false;
}
if (this.iterator == null) {
this.init();
}
return this.iterator.hasNext();
}
@Override
public Kv next() {<FILL_FUNCTION_BODY>}
@Override
public long getPageSize() {
return this.limit;
}
@Override
public boolean isPageEmpty() {
return !this.iterator.hasNext();
}
@Override
public byte[] position() {
return HgStoreClientUtil.toBytes(this.session.getStoreNode().getNodeId().longValue());
}
@Override
public void seek(byte[] position) {
if (position == null || position.length < Long.BYTES) {
return;
}
byte[] nodeIdBytes = new byte[Long.BYTES];
System.arraycopy(position, 0, nodeIdBytes, 0, Long.BYTES);
long nodeId = this.session.getStoreNode().getNodeId().longValue();
long pId = HgStoreClientUtil.toLong(nodeIdBytes);
this.in = nodeId >= pId;
if (this.in && nodeId == pId) {
this.nodePosition = new byte[position.length - Long.BYTES];
System.arraycopy(position, Long.BYTES, this.nodePosition, 0, this.nodePosition.length);
} else {
this.nodePosition = HgStoreClientConst.EMPTY_BYTES;
}
}
@Override
public void close() {
//TODO: implements
}
}
|
if (this.iterator == null) {
this.init();
}
return this.iterator.next();
| 1,692
| 32
| 1,724
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/NotifyingExecutor.java
|
NotifyingExecutor
|
notifyErrConsumer
|
class NotifyingExecutor {
private final String graphName;
private final HgStoreNodeManager nodeManager;
private final HgStoreNodeSession nodeSession;
private Map<PartitionFaultType, Consumer<PartitionFaultResponse>> partitionFaultHandlers;
NotifyingExecutor(String graphName, HgStoreNodeManager nodeManager,
HgStoreNodeSession nodeSession) {
this.graphName = graphName;
this.nodeManager = nodeManager;
this.nodeSession = nodeSession;
}
private void initHandler() {
this.partitionFaultHandlers = new HashMap<>();
this.partitionFaultHandlers.put(
PartitionFaultType.PARTITION_FAULT_TYPE_NOT_LEADER, notifyPartitionLeaderConsumer()
);
}
<T> Optional<T> invoke(Supplier<FeedbackRes> supplier, Function<FeedbackRes, T> okFunction) {
FeedbackRes res = null;
try {
res = supplier.get();
} catch (Throwable t) {
log.error("Failed to invoke: " + supplier.toString() + ", caused " +
"by:", t);
handleErr(t);
throw err(t);
}
if (log.isDebugEnabled()) {
log.debug("gRPC [{}] status: {}"
, this.nodeSession.getStoreNode().getAddress(), res.getStatus().getCode());
}
Optional<T> option = null;
switch (res.getStatus().getCode()) {
case RES_CODE_OK:
option = Optional.of(okFunction.apply(res));
break;
case RES_CODE_FAIL:
handleFail(res);
break;
case RES_CODE_NOT_EXIST:
break;
case RES_CODE_EXCESS:
normalFail(res);
break;
default:
log.error("gRPC [{}] status-msg: {}"
, nodeSession.getStoreNode().getAddress(), res.getStatus().getMsg());
}
if (option == null) {
option = Optional.empty();
}
return option;
}
private void handleErr(Throwable t) {
try {
notifyErrConsumer(HgNodeStatus.NOT_WORK).accept(t);
} catch (Throwable tt) {
log.error("Failed to notify error to HgStoreNodeNotifier, cause:", tt);
}
}
private void handleFail(FeedbackRes feedbackRes) {
Supplier<HgStoreClientException> exSup;
if (
(exSup = handlePartitionFault(feedbackRes)) != null
// add more fault-handler here.
|| (exSup = defaultExceptionSupplier(feedbackRes)) != null
) {
throw exSup.get();
}
}
private void normalFail(FeedbackRes res) {
ResStatus status = res.getStatus();
HgStoreClientException ex;
try {
String msg = JsonFormat.printer().omittingInsignificantWhitespace()
.print(res);
ex = err(msg);
} catch (Exception e) {
ex = err(status.getCode() + ", " + status.getMsg());
}
throw ex;
}
private Supplier<HgStoreClientException> defaultExceptionSupplier(FeedbackRes feedbackRes) {
return () -> HgStoreClientException.of(err(feedbackRes.getStatus().getMsg()));
}
private Supplier<HgStoreClientException> handlePartitionFault(
FeedbackRes feedbackRes) {
PartitionFaultResponse res = feedbackRes.getPartitionFaultResponse();
if (res == null) {
return null;
}
if (this.partitionFaultHandlers == null) {
initHandler();
}
Consumer<PartitionFaultResponse> consumer =
this.partitionFaultHandlers.get(res.getFaultType());
if (consumer == null) {
consumer = notifyPartitionConsumer();
}
String msg = res.toString();
if (msg == null || msg.length() == 0) {
msg = feedbackRes.getStatus().getMsg();
}
consumer.accept(res);
String finalMsg = msg;
return () -> HgStoreClientException.of(
err(res.getFaultType() + ", " +
finalMsg));
}
private HgStoreClientException err(String msg) {
return err(msg, null);
}
private HgStoreClientException err(Throwable t) {
return err(t.getMessage(), t);
}
private HgStoreClientException err(String reason, Throwable t) {
StringBuilder builder = new StringBuilder().append(
"{sessionInfo: {" + this.nodeSession.toString() +
"}, reason: ");
if (reason.startsWith("{")) {
builder.append(reason);
} else {
builder.append("\"").append(reason).append("\"");
}
String msg = builder.append("}").toString();
if (t != null) {
return HgStoreClientException.of(msg, t);
}
return HgStoreClientException.of(msg);
}
private Consumer<PartitionFaultResponse> notifyPartitionLeaderConsumer() {
return res -> {
log.info("partitions' leader have changed: [partitionId - leaderId] ");
nodeManager.notifying(
this.graphName,
HgStoreNotice.of(this.nodeSession.getStoreNode().getNodeId(),
HgNodeStatus.NOT_PARTITION_LEADER)
.setPartitionLeaders(
res.getPartitionLeadersList()
.stream()
.peek((e) -> {
log.info("[{} - {}]", e.getPartitionId(),
e.getLeaderId());
}
)
.collect(
Collectors.toMap(
PartitionLeader::getPartitionId,
PartitionLeader::getLeaderId
)
)
)
);
};
}
private Consumer<PartitionFaultResponse> notifyPartitionConsumer() {
return notifyPartitionConsumer(HgNodeStatus.PARTITION_COMMON_FAULT);
}
private Consumer<PartitionFaultResponse> notifyPartitionConsumer(HgNodeStatus status) {
return res -> {
nodeManager.notifying(
this.graphName,
HgStoreNotice.of(this.nodeSession.getStoreNode().getNodeId(), status)
.setPartitionIds(res.getPartitionIdsList())
);
};
}
private Consumer<Throwable> notifyErrConsumer(HgNodeStatus status) {<FILL_FUNCTION_BODY>}
}
|
return t -> {
nodeManager.notifying(
this.graphName,
HgStoreNotice.of(this.nodeSession.getStoreNode().getNodeId(), status,
t.getMessage())
);
};
| 1,733
| 61
| 1,794
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/SeekAbleIterator.java
|
SeekAbleIterator
|
of
|
class SeekAbleIterator<E> implements Iterator, HgSeekAble {
private final Iterator<E> iterator;
private final Consumer<byte[]> seeker;
private final Supplier<byte[]> positioner;
private SeekAbleIterator(Iterator<E> iterator, Supplier<byte[]> positioner,
Consumer<byte[]> seeker) {
this.iterator = iterator;
this.positioner = positioner;
this.seeker = seeker;
}
public static <E> SeekAbleIterator of(Iterator<E> iterator, Supplier<byte[]> positioner,
Consumer<byte[]> seeker) {<FILL_FUNCTION_BODY>}
@Override
public byte[] position() {
return this.positioner.get();
}
@Override
public void seek(byte[] position) {
this.seeker.accept(position);
}
@Override
public boolean hasNext() {
return this.iterator.hasNext();
}
@Override
public E next() {
return this.iterator.next();
}
}
|
HgAssert.isArgumentNotNull(iterator, "iterator");
HgAssert.isArgumentNotNull(positioner, "positioner");
HgAssert.isArgumentNotNull(seeker, "seeker");
return new SeekAbleIterator(iterator, positioner, seeker);
| 293
| 70
| 363
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/SwitchingExecutor.java
|
SwitchingExecutor
|
invoke
|
class SwitchingExecutor {
private SwitchingExecutor() {
}
static SwitchingExecutor of() {
return new SwitchingExecutor();
}
<T> Optional<T> invoke(Supplier<Boolean> switcher, Supplier<T> trueSupplier,
Supplier<T> falseSupplier) {<FILL_FUNCTION_BODY>}
}
|
Optional<T> option = null;
if (switcher.get()) {
option = Optional.of(trueSupplier.get());
} else {
option = Optional.of(falseSupplier.get());
}
if (option == null) {
option = Optional.empty();
}
return option;
| 93
| 86
| 179
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/util/Base58.java
|
Base58
|
divmod58
|
class Base58 {
public static final char[] ALPHABET =
"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz".toCharArray();
private static final int[] INDEXES = new int[128];
static {
for (int i = 0; i < INDEXES.length; i++) {
INDEXES[i] = -1;
}
for (int i = 0; i < ALPHABET.length; i++) {
INDEXES[ALPHABET[i]] = i;
}
}
/**
* Encodes the given bytes in base58. No checksum is appended.
*/
public static String encode(byte[] input) {
if (input.length == 0) {
return "";
}
input = copyOfRange(input, 0, input.length);
// Count leading zeroes.
int zeroCount = 0;
while (zeroCount < input.length && input[zeroCount] == 0) {
++zeroCount;
}
// The actual encoding.
byte[] temp = new byte[input.length * 2];
int j = temp.length;
int startAt = zeroCount;
while (startAt < input.length) {
byte mod = divmod58(input, startAt);
if (input[startAt] == 0) {
++startAt;
}
temp[--j] = (byte) ALPHABET[mod];
}
// Strip extra '1' if there are some after decoding.
while (j < temp.length && temp[j] == ALPHABET[0]) {
++j;
}
// Add as many leading '1' as there were leading zeros.
while (--zeroCount >= 0) {
temp[--j] = (byte) ALPHABET[0];
}
byte[] output = copyOfRange(temp, j, temp.length);
return new String(output, StandardCharsets.US_ASCII);
}
public static byte[] decode(String input) throws IllegalArgumentException {
if (input.length() == 0) {
return new byte[0];
}
byte[] input58 = new byte[input.length()];
// Transform the String to a base58 byte sequence
for (int i = 0; i < input.length(); ++i) {
char c = input.charAt(i);
int digit58 = -1;
if (c >= 0 && c < 128) {
digit58 = INDEXES[c];
}
if (digit58 < 0) {
throw new IllegalArgumentException("Illegal character " + c + " at " + i);
}
input58[i] = (byte) digit58;
}
// Count leading zeroes
int zeroCount = 0;
while (zeroCount < input58.length && input58[zeroCount] == 0) {
++zeroCount;
}
// The encoding
byte[] temp = new byte[input.length()];
int j = temp.length;
int startAt = zeroCount;
while (startAt < input58.length) {
byte mod = divmod256(input58, startAt);
if (input58[startAt] == 0) {
++startAt;
}
temp[--j] = mod;
}
// Do no add extra leading zeroes, move j to first non null byte.
while (j < temp.length && temp[j] == 0) {
++j;
}
return copyOfRange(temp, j - zeroCount, temp.length);
}
public static BigInteger decodeToBigInteger(String input) throws IllegalArgumentException {
return new BigInteger(1, decode(input));
}
//
// number -> number / 58, returns number % 58
//
private static byte divmod58(byte[] number, int startAt) {<FILL_FUNCTION_BODY>}
//
// number -> number / 256, returns number % 256
//
private static byte divmod256(byte[] number58, int startAt) {
int remainder = 0;
for (int i = startAt; i < number58.length; i++) {
int digit58 = (int) number58[i] & 0xFF;
int temp = remainder * 58 + digit58;
number58[i] = (byte) (temp / 256);
remainder = temp % 256;
}
return (byte) remainder;
}
private static byte[] copyOfRange(byte[] source, int from, int to) {
byte[] range = new byte[to - from];
System.arraycopy(source, from, range, 0, range.length);
return range;
}
}
|
int remainder = 0;
for (int i = startAt; i < number.length; i++) {
int digit256 = (int) number[i] & 0xFF;
int temp = remainder * 256 + digit256;
number[i] = (byte) (temp / 58);
remainder = temp % 58;
}
return (byte) remainder;
| 1,275
| 109
| 1,384
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/util/ExecutorPool.java
|
ExecutorPool
|
createExecutor
|
class ExecutorPool {
public static ThreadFactory newThreadFactory(String namePrefix) {
HgAssert.isArgumentNotNull(namePrefix, "namePrefix");
return new DefaultThreadFactory(namePrefix);
}
public static ThreadPoolExecutor createExecutor(String name, long keepAliveTime,
int coreThreads, int maxThreads) {<FILL_FUNCTION_BODY>}
public static class DefaultThreadFactory implements ThreadFactory {
private final AtomicInteger threadNumber = new AtomicInteger(1);
private final String namePrefix;
public DefaultThreadFactory(String threadNamePrefix) {
this.namePrefix = threadNamePrefix + "-";
}
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(null, r, namePrefix + threadNumber.getAndIncrement(), 0);
t.setDaemon(true);
t.setPriority(Thread.NORM_PRIORITY);
return t;
}
}
}
|
return new ThreadPoolExecutor(coreThreads, maxThreads, keepAliveTime, TimeUnit.SECONDS,
new SynchronousQueue<>(),
newThreadFactory(name),
new ThreadPoolExecutor.CallerRunsPolicy()
);
| 253
| 63
| 316
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/util/HgAssert.java
|
HgAssert
|
isTrue
|
class HgAssert {
public static void isTrue(boolean expression, String message) {<FILL_FUNCTION_BODY>}
public static void isTrue(boolean expression, Supplier<String> msg) {
if (msg == null) {
throw new IllegalArgumentException("message supplier is null");
}
if (!expression) {
throw new IllegalArgumentException(msg.get());
}
}
public static void isFalse(boolean expression, String message) {
isTrue(!expression, message);
}
public static void isFalse(boolean expression, Supplier<String> msg) {
isTrue(!expression, msg);
}
public static void isArgumentValid(byte[] bytes, String parameter) {
isFalse(isInvalid(bytes), () -> "The argument is invalid: " + parameter);
}
public static void isArgumentValid(String str, String parameter) {
isFalse(isInvalid(str), () -> "The argument is invalid: " + parameter);
}
public static void isArgumentValid(Collection<?> collection, String parameter) {
isFalse(isInvalid(collection), () -> "The argument is invalid: " + parameter);
}
public static void isArgumentNotNull(Object obj, String parameter) {
isTrue(obj != null, () -> "The argument is null: " + parameter);
}
public static void istValid(byte[] bytes, String msg) {
isFalse(isInvalid(bytes), msg);
}
public static void isValid(String str, String msg) {
isFalse(isInvalid(str), msg);
}
public static void isNotNull(Object obj, String msg) {
isTrue(obj != null, msg);
}
public static boolean isContains(Object[] objs, Object obj) {
if (objs == null || objs.length == 0 || obj == null) {
return false;
}
for (Object item : objs) {
if (obj.equals(item)) {
return true;
}
}
return false;
}
public static boolean isInvalid(String... strs) {
if (strs == null || strs.length == 0) {
return true;
}
for (String item : strs) {
if (item == null || "".equals(item.trim())) {
return true;
}
}
return false;
}
public static boolean isInvalid(byte[] bytes) {
return bytes == null || bytes.length == 0;
}
public static boolean isInvalid(Map<?, ?> map) {
return map == null || map.isEmpty();
}
public static boolean isInvalid(Collection<?> list) {
return list == null || list.isEmpty();
}
public static <T> boolean isContains(Collection<T> list, T item) {
if (list == null || item == null) {
return false;
}
return list.contains(item);
}
public static boolean isNull(Object... objs) {
if (objs == null) {
return true;
}
for (Object item : objs) {
if (item == null) {
return true;
}
}
return false;
}
}
|
if (message == null) {
throw new IllegalArgumentException("message is null");
}
if (!expression) {
throw new IllegalArgumentException(message);
}
| 828
| 45
| 873
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/util/HgBufferProxy.java
|
HgBufferProxy
|
receive
|
class HgBufferProxy<T> {
private final BlockingQueue<Supplier<T>> queue;
private final AtomicBoolean closed = new AtomicBoolean(false);
private final ReentrantLock lock = new ReentrantLock();
private final Runnable task;
private Throwable err;
private HgBufferProxy(Runnable task) {
this.task = task;
this.queue = new LinkedBlockingQueue<>();
}
public static HgBufferProxy of(Runnable task) {
HgAssert.isArgumentNotNull(task, "task");
return new HgBufferProxy(task);
}
public void send(T t) {
if (t == null) {
throw new IllegalArgumentException("the argument t is null");
}
if (this.closed.get()) {
return;
}
this.lock.lock();
try {
this.queue.offer(() -> t);
} finally {
lock.unlock();
}
}
private void apply() {
this.lock.lock();
try {
if (!this.closed.get()) {
this.task.run();
Thread.yield();
}
} finally {
this.lock.unlock();
}
}
/**
* return an item from the chan
*
* @return null when the chan has been closed
* @throws RuntimeException
*/
@CheckForNull
public T receive(int time, Consumer<Integer> callback) {<FILL_FUNCTION_BODY>}
public boolean isClosed() {
return this.closed.get();
}
/**
* @throws RuntimeException when fail to close the chan
*/
public void close() {
if (this.closed.get()) {
return;
}
lock.lock();
this.closed.set(true);
try {
this.queue.offer(() -> null);
} finally {
lock.unlock();
}
}
public void setError(Throwable streamErr) {
this.err = streamErr;
}
private void checkErr() {
if (this.err != null) {
throw HgStoreClientException.of(this.err);
}
}
}
|
Supplier<T> s;
if (this.closed.get()) {
s = this.queue.poll();
this.checkErr();
return s != null ? s.get() : null;
}
if (this.queue.size() <= 1) {
this.apply();
}
lock.lock();
try {
if (this.isClosed()) {
s = this.queue.poll();
this.checkErr();
return s != null ? s.get() : null;
}
} finally {
lock.unlock();
}
try {
s = this.queue.poll(time, TimeUnit.SECONDS);
} catch (Throwable t) {
log.error("failed to receive a item from channel, cause by: ", t);
throw HgStoreClientException.of(t);
}
if (s == null) {
if (this.closed.get()) {
s = this.queue.poll();
} else {
if (callback == null) {
throw new RuntimeException("timeout, max time: " + time + " seconds");
} else {
callback.accept(time);
}
}
}
this.checkErr();
return s != null ? s.get() : null;
| 587
| 331
| 918
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/util/HgStoreClientConfig.java
|
HgStoreClientConfig
|
overrideViaProperties
|
class HgStoreClientConfig {
private static final int GRPC_DEFAULT_TIMEOUT_SECONDS = 100;
private static final int GRPC_DEFAULT_MAX_INBOUND_MESSAGE_SIZE = 1024 * 1024 * 1024;
private static final int GRPC_DEFAULT_MAX_OUTBOUND_MESSAGE_SIZE = 1024 * 1024 * 1024;
private static final int NET_KV_SCANNER_PAGE_SIZE = 10_000;
private static final int NET_KV_SCANNER_HAVE_NEXT_TIMEOUT = 30 * 60;
private static final String fileName = "hg-store-client";
private static PropertyResourceBundle prb = null;
private static HgStoreClientConfig defaultInstance;
private Integer grpcTimeoutSeconds = GRPC_DEFAULT_TIMEOUT_SECONDS;
private Integer grpcMaxInboundMessageSize = GRPC_DEFAULT_MAX_INBOUND_MESSAGE_SIZE;
private Integer grpcMaxOutboundMessageSize = GRPC_DEFAULT_MAX_OUTBOUND_MESSAGE_SIZE;
private Integer netKvScannerPageSize = NET_KV_SCANNER_PAGE_SIZE;
private Integer netKvScannerHaveNextTimeout = NET_KV_SCANNER_HAVE_NEXT_TIMEOUT;
private HgStoreClientConfig() {
}
public synchronized static HgStoreClientConfig of() {
if (defaultInstance != null) {
return defaultInstance;
}
defaultInstance = new HgStoreClientConfig();
overrideViaProperties(defaultInstance);
return defaultInstance;
}
private static void overrideViaProperties(HgStoreClientConfig config) {<FILL_FUNCTION_BODY>}
public Integer getGrpcTimeoutSeconds() {
return grpcTimeoutSeconds;
}
public HgStoreClientConfig setGrpcTimeoutSeconds(Integer grpcTimeoutSeconds) {
this.grpcTimeoutSeconds = grpcTimeoutSeconds;
return this;
}
public Integer getGrpcMaxInboundMessageSize() {
return grpcMaxInboundMessageSize;
}
public HgStoreClientConfig setGrpcMaxInboundMessageSize(Integer grpcMaxInboundMessageSize) {
this.grpcMaxInboundMessageSize = grpcMaxInboundMessageSize;
return this;
}
public Integer getGrpcMaxOutboundMessageSize() {
return grpcMaxOutboundMessageSize;
}
public HgStoreClientConfig setGrpcMaxOutboundMessageSize(Integer grpcMaxOutboundMessageSize) {
this.grpcMaxOutboundMessageSize = grpcMaxOutboundMessageSize;
return this;
}
public Integer getNetKvScannerPageSize() {
return netKvScannerPageSize;
}
public HgStoreClientConfig setNetKvScannerPageSize(Integer netKvScannerPageSize) {
this.netKvScannerPageSize = netKvScannerPageSize;
return this;
}
public Integer getNetKvScannerHaveNextTimeout() {
return netKvScannerHaveNextTimeout;
}
public HgStoreClientConfig setNetKvScannerHaveNextTimeout(Integer netKvScannerHaveNextTimeout) {
this.netKvScannerHaveNextTimeout = netKvScannerHaveNextTimeout;
return this;
}
private static class PropertiesWrapper {
private final PropertyResourceBundle prb;
PropertiesWrapper(PropertyResourceBundle prb) {
this.prb = prb;
}
Integer getInt(String key, Integer defaultValue) {
String buf = this.getStr(key);
if (buf == null || buf.isEmpty()) {
return defaultValue;
}
Integer res = null;
try {
res = Integer.valueOf(buf);
} catch (Throwable t) {
log.error("Failed to parse a int value[ " + buf + " ] of the key[ " + key + " ].",
t);
}
return res;
}
String getStr(String key, String defaultValue) {
String res = getStr(key);
if (res == null && defaultValue != null) {
return defaultValue;
}
return res;
}
String getStr(String key) {
String value = null;
if (!prb.containsKey(key)) {
return null;
}
try {
value = prb.getString(key);
} catch (Exception e) {
log.warn("Failed to get value with key: [" + key + "]");
return null;
}
if (value != null) {
value = value.trim();
}
return value;
}
}
}
|
try {
prb = (PropertyResourceBundle) PropertyResourceBundle.getBundle(fileName);
} catch (Throwable t) {
log.warn("Failed to load " + fileName + ".properties.");
log.info("Default configuration was activated.");
return;
}
PropertiesWrapper wrapper = new PropertiesWrapper(prb);
log.info("grpc.timeout.seconds = "
+ (config.grpcTimeoutSeconds = wrapper.getInt("grpc.timeout.seconds"
, config.grpcTimeoutSeconds))
);
log.info("grpc.max.inbound.message.size = "
+ (config.grpcMaxInboundMessageSize = GRPC_DEFAULT_MAX_INBOUND_MESSAGE_SIZE)
);
log.info("grpc.max.outbound.message.size = "
+ (config.grpcMaxOutboundMessageSize = GRPC_DEFAULT_MAX_OUTBOUND_MESSAGE_SIZE)
);
log.info("net.kv.scanner.page.size = "
+ (config.netKvScannerPageSize = wrapper.getInt("net.kv.scanner.page.size"
, config.netKvScannerPageSize))
);
log.info("net.kv.scanner.have.next.timeout = {}", config.netKvScannerHaveNextTimeout);
| 1,250
| 343
| 1,593
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/util/HgStoreClientUtil.java
|
HgStoreClientUtil
|
toStr
|
class HgStoreClientUtil {
public static HgOwnerKey toOwnerKey(byte[] key) {
return new HgOwnerKey(HgStoreClientConst.EMPTY_BYTES, key);
}
public static HgOwnerKey toOwnerKey(String key) {
return new HgOwnerKey(HgStoreClientConst.EMPTY_BYTES, toBytes(key));
}
public static HgOwnerKey toAllNodeKey(String key) {
return new HgOwnerKey(HgStoreClientConst.ALL_PARTITION_OWNER, toBytes(key));
}
public static HgOwnerKey toOwnerKey(String owner, String key) {
return new HgOwnerKey(toBytes(owner), toBytes(key));
}
public static HgStoreClientException err(String msg) {
log.error(msg);
return HgStoreClientException.of(msg);
}
public static boolean isValid(HgOwnerKey key) {
if (key == null) {
return false;
}
if (key.getKey() == null) {
return false;
}
return key.getKey().length != 0;
}
public static String toStr(byte[] b) {
if (b == null) {
return "";
}
if (b.length == 0) {
return "";
}
return new String(b, StandardCharsets.UTF_8);
}
public static String toByteStr(byte[] b) {
if (b == null) {
return "";
}
if (b.length == 0) {
return "";
}
return Arrays.toString(b);
}
public static String toStr(HgOwnerKey ownerKey) {<FILL_FUNCTION_BODY>}
public static byte[] toBytes(String str) {
if (str == null) {
return null;
}
return str.getBytes(StandardCharsets.UTF_8);
}
public static byte[] toBytes(long l) {
ByteBuffer buffer = ByteBuffer.allocate(Long.BYTES);
buffer.putLong(l);
return buffer.array();
}
public static byte[] toIntBytes(final int i) {
ByteBuffer buffer = ByteBuffer.allocate(Integer.BYTES);
buffer.putInt(i);
return buffer.array();
}
public static long toLong(byte[] bytes) {
ByteBuffer buffer = ByteBuffer.allocate(Long.BYTES);
buffer.put(bytes);
buffer.flip();//need flip
return buffer.getLong();
}
public static int toInt(byte[] bytes) {
ByteBuffer buffer = ByteBuffer.allocate(Integer.BYTES);
buffer.put(bytes);
buffer.flip();//need flip
return buffer.getInt();
}
public static String getHostAddress() {
String res = null;
try {
res = InetAddress.getLocalHost().getHostAddress();
} catch (UnknownHostException e) {
e.printStackTrace();
res = "";
}
return res;
}
public static byte[] combine(byte[] first, byte[] second) {
if (first == null) {
first = HgStoreClientConst.EMPTY_BYTES;
}
if (second == null) {
second = HgStoreClientConst.EMPTY_BYTES;
}
byte[] result = new byte[first.length + second.length];
System.arraycopy(first, 0, result, 0, first.length);
System.arraycopy(second, 0, result, first.length, second.length);
return result;
}
public static void printCallStack(String txt, Throwable ex) {
StackTraceElement[] stackElements = ex.getStackTrace();
StringBuilder sb = new StringBuilder();
sb.append(txt).append(":\n");
if (stackElements != null) {
for (int i = 0; i < stackElements.length; i++) {
sb.append(stackElements[i].getClassName()).append(" : ")
.append(stackElements[i].getMethodName()).append(" [ ");
sb.append(stackElements[i].getLineNumber()).append(" ]\n");
}
sb.append(
"--------------------------------------------------------------------------------------\n");
}
log.error(sb.toString());
}
}
|
if (ownerKey == null) {
return "";
}
return "{ " +
"owner: " + Arrays.toString(ownerKey.getOwner()) +
", key: " + toStr(ownerKey.getKey()) +
" }";
| 1,143
| 68
| 1,211
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/util/HgUuid.java
|
HgUuid
|
encode
|
class HgUuid {
private static String encode(UUID uuid) {<FILL_FUNCTION_BODY>}
/**
* Get a UUID in Base58 FORM
*
* @return
*/
public static String newUUID() {
return encode(UUID.randomUUID());
}
}
|
ByteBuffer bb = ByteBuffer.wrap(new byte[16]);
bb.putLong(uuid.getMostSignificantBits());
bb.putLong(uuid.getLeastSignificantBits());
return Base58.encode(bb.array());
| 84
| 69
| 153
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/util/MetricX.java
|
MetricX
|
plusIteratorWait
|
class MetricX {
// Total working time consumed by Iterators
public static AtomicLong iteratorSum = new AtomicLong();
// Num of Iterators
public static AtomicLong iteratorCount = new AtomicLong();
// Max working time consumed by Iterators
public static AtomicLong iteratorMax = new AtomicLong();
public AtomicLong failureCount = new AtomicLong();
// Combined to be used to record a task's time
private long start;
private long end;
private MetricX(long start) {
this.start = start;
}
public static MetricX ofStart() {
return new MetricX(System.currentTimeMillis());
}
public static void plusIteratorWait(long nanoSeconds) {<FILL_FUNCTION_BODY>}
/**
* amount of waiting
*
* @return millisecond
*/
public static long getIteratorWait() {
return iteratorSum.get() / 1_000_000;
}
/**
* average of waiting
*
* @return millisecond
*/
public static long getIteratorWaitAvg() {
if (iteratorCount.get() == 0) {
return -1;
}
return getIteratorWait() / iteratorCount.get();
}
/**
* maximum of waiting
*
* @return millisecond
*/
public static long getIteratorWaitMax() {
return iteratorMax.get() / 1_000_000;
}
public static long getIteratorCount() {
return iteratorCount.get();
}
public long start() {
return this.start = System.currentTimeMillis();
}
public long end() {
return this.end = System.currentTimeMillis();
}
public long past() {
return this.end - this.start;
}
public void countFail() {
this.failureCount.getAndIncrement();
}
public long getFailureCount() {
return this.failureCount.get();
}
}
|
iteratorSum.addAndGet(nanoSeconds);
iteratorCount.getAndIncrement();
if (iteratorMax.get() < nanoSeconds) {
iteratorMax.set(nanoSeconds);
}
| 542
| 62
| 604
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/util/PropertyUtil.java
|
PropertyUtil
|
getInt
|
class PropertyUtil {
private static final Logger LOG = LoggerFactory.getLogger(PropertyUtil.class);
public static String get(String key) {
return get(key, null);
}
public static String get(final String key, String def) {
if (key == null) {
throw new NullPointerException("key");
}
if (key.isEmpty()) {
throw new IllegalArgumentException("key must not be empty.");
}
String value = null;
try {
if (System.getSecurityManager() == null) {
value = System.getProperty(key);
} else {
value = AccessController.doPrivileged(
(PrivilegedAction<String>) () -> System.getProperty(key));
}
} catch (Exception e) {
LOG.error("exception {}", e);
}
if (value == null) {
return def;
}
return value;
}
public static boolean getBoolean(String key, boolean def) {
String value = get(key, Boolean.toString(def));
value = value.trim().toLowerCase();
if (value.isEmpty()) {
return true;
}
if ("true".equals(value) || "yes".equals(value) || "1".equals(value)) {
return true;
}
if ("false".equals(value) || "no".equals(value) || "0".equals(value)) {
return false;
}
return def;
}
public static int getInt(String key, int def) {<FILL_FUNCTION_BODY>}
public static Object setProperty(String key, String value) {
return System.getProperties().setProperty(key, value);
}
}
|
String value = get(key);
if (value == null) {
return def;
}
value = value.trim().toLowerCase();
try {
return Integer.parseInt(value);
} catch (Exception e) {
LOG.warn("exception ", e);
}
return def;
| 449
| 83
| 532
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-common/src/main/java/org/apache/hugegraph/store/buffer/ByteBufferAllocator.java
|
ByteBufferAllocator
|
get
|
class ByteBufferAllocator {
// size of each Buffer
final int capacity;
// max num of Buffers
final int maxCount;
final BlockingQueue<ByteBuffer> freeQueue = new LinkedBlockingQueue<>();
// current num of Buffers in queue
AtomicInteger totalCount;
public ByteBufferAllocator(int cap, int count) {
this.capacity = cap;
this.maxCount = count;
this.totalCount = new AtomicInteger(0);
}
public ByteBuffer get() throws InterruptedException {<FILL_FUNCTION_BODY>}
public void release(ByteBuffer buffer) {
if (freeQueue.size() < maxCount) {
buffer.clear();
freeQueue.add(buffer);
}
}
}
|
ByteBuffer buffer = null;
while (buffer == null) {
if (freeQueue.size() > 0) {
buffer = freeQueue.poll();
} else if (totalCount.get() < maxCount) {
buffer = ByteBuffer.allocate(capacity);
totalCount.incrementAndGet();
} else {
buffer = freeQueue.poll(1, TimeUnit.SECONDS);
}
}
return buffer;
| 200
| 117
| 317
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-common/src/main/java/org/apache/hugegraph/store/buffer/KVByteBuffer.java
|
KVByteBuffer
|
getBytes
|
class KVByteBuffer {
ByteBuffer buffer;
public KVByteBuffer(int capacity) {
buffer = ByteBuffer.allocate(capacity);
}
public KVByteBuffer(byte[] buffer) {
this.buffer = ByteBuffer.wrap(buffer);
}
public KVByteBuffer(ByteBuffer buffer) {
this.buffer = buffer;
}
public void clear() {
this.buffer.clear();
}
public KVByteBuffer flip() {
buffer.flip();
return this;
}
public ByteBuffer getBuffer() {
return buffer;
}
public ByteBuffer copyBuffer() {
byte[] buf = new byte[buffer.position()];
System.arraycopy(buffer.array(), 0, buf, 0, buffer.position());
return ByteBuffer.wrap(buf);
}
public void put(byte data) {
buffer.put(data);
}
public void put(byte[] data) {
if (data != null) {
buffer.putInt(data.length);
buffer.put(data);
}
}
public byte[] getBytes() {<FILL_FUNCTION_BODY>}
public byte get() {
return buffer.get();
}
public void putInt(int data) {
buffer.putInt(data);
}
public int getInt() {
return buffer.getInt();
}
public byte[] array() {
return this.buffer.array();
}
public int position() {
return this.buffer.position();
}
public final boolean hasRemaining() {
return this.buffer.hasRemaining();
}
}
|
int len = buffer.getInt();
byte[] data = new byte[len];
buffer.get(data);
return data;
| 442
| 37
| 479
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-common/src/main/java/org/apache/hugegraph/store/term/Bits.java
|
Bits
|
getInt
|
class Bits {
/**
* 大头字节序写入short
*/
public static void putShort(byte[] buf, int offSet, int x) {
buf[offSet] = (byte) (x >> 8);
buf[offSet + 1] = (byte) (x);
}
public static void putInt(byte[] buf, int offSet, int x) {
buf[offSet] = (byte) (x >> 24);
buf[offSet + 1] = (byte) (x >> 16);
buf[offSet + 2] = (byte) (x >> 8);
buf[offSet + 3] = (byte) (x);
}
/**
* 大头字节序读取short
*/
public static int getShort(byte[] buf, int offSet) {
int x = buf[offSet] & 0xff;
x = (x << 8) + (buf[offSet + 1] & 0xff);
return x;
}
public static int getInt(byte[] buf, int offSet) {<FILL_FUNCTION_BODY>}
public static void put(byte[] buf, int offSet, byte[] srcBuf) {
System.arraycopy(srcBuf, 0, buf, offSet, srcBuf.length);
}
public static int toInt(byte[] bytes) {
ByteBuffer buffer = ByteBuffer.allocate(Integer.BYTES);
buffer.put(bytes);
buffer.flip();//need flip
return buffer.getInt();
}
}
|
int x = (buf[offSet] << 24)
+ ((buf[offSet + 1] & 0xff) << 16)
+ ((buf[offSet + 2] & 0xff) << 8)
+ (buf[offSet + 3] & 0xff);
return x;
| 402
| 81
| 483
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-common/src/main/java/org/apache/hugegraph/store/term/HgPair.java
|
HgPair
|
hashCode
|
class HgPair<K, V> implements Serializable {
/**
* Key of this <code>Pair</code>.
*/
private K key;
/**
* Value of this <code>Pair</code>.
*/
private V value;
public HgPair() {
}
/**
* Creates a new pair
*
* @param key The key for this pair
* @param value The value to use for this pair
*/
public HgPair(K key, V value) {
this.key = key;
this.value = value;
}
/**
* Gets the key for this pair.
*
* @return key for this pair
*/
public K getKey() {
return key;
}
public void setKey(K key) {
this.key = key;
}
/**
* Gets the value for this pair.
*
* @return value for this pair
*/
public V getValue() {
return value;
}
public void setValue(V value) {
this.value = value;
}
/**
* <p><code>String</code> representation of this
* <code>Pair</code>.</p>
*
* <p>The default name/value delimiter '=' is always used.</p>
*
* @return <code>String</code> representation of this <code>Pair</code>
*/
@Override
public String toString() {
return key + "=" + value;
}
/**
* <p>Generate a hash code for this <code>Pair</code>.</p>
*
* <p>The hash code is calculated using both the name and
* the value of the <code>Pair</code>.</p>
*
* @return hash code for this <code>Pair</code>
*/
@Override
public int hashCode() {<FILL_FUNCTION_BODY>}
/**
* <p>Test this <code>Pair</code> for equality with another
* <code>Object</code>.</p>
*
* <p>If the <code>Object</code> to be tested is not a
* <code>Pair</code> or is <code>null</code>, then this method
* returns <code>false</code>.</p>
*
* <p>Two <code>Pair</code>s are considered equal if and only if
* both the names and values are equal.</p>
*
* @param o the <code>Object</code> to test for
* equality with this <code>Pair</code>
* @return <code>true</code> if the given <code>Object</code> is
* equal to this <code>Pair</code> else <code>false</code>
*/
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o instanceof HgPair) {
HgPair pair = (HgPair) o;
if (!Objects.equals(key, pair.key)) {
return false;
}
return Objects.equals(value, pair.value);
}
return false;
}
}
|
// name's hashCode is multiplied by an arbitrary prime number (13)
// in order to make sure there is a difference in the hashCode between
// these two parameters:
// name: a value: aa
// name: aa value: a
return key.hashCode() * 13 + (value == null ? 0 : value.hashCode());
| 871
| 96
| 967
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-common/src/main/java/org/apache/hugegraph/store/term/HgTriple.java
|
HgTriple
|
hashCode
|
class HgTriple<X, Y, Z> {
private final X x;
private final Y y;
private final Z z;
private int hash = -1;
public HgTriple(X x, Y y, Z z) {
this.x = x;
this.y = y;
this.z = z;
}
public X getX() {
return x;
}
public Y getY() {
return y;
}
public Z getZ() {
return z;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
HgTriple<?, ?, ?> hgTriple = (HgTriple<?, ?, ?>) o;
return Objects.equals(x, hgTriple.x) && Objects.equals(y, hgTriple.y) &&
Objects.equals(z, hgTriple.z);
}
@Override
public int hashCode() {<FILL_FUNCTION_BODY>}
@Override
public String toString() {
return "HgTriple{" +
"x=" + x +
", y=" + y +
", z=" + z +
'}';
}
}
|
if (hash == -1) {
hash = Objects.hash(x, y, z);
}
return this.hash;
| 369
| 38
| 407
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-grpc/src/main/java/org/apache/hugegraph/store/grpc/GraphStoreGrpc.java
|
MethodHandlers
|
invoke
|
class MethodHandlers<Req, Resp> implements
io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final GraphStoreImplBase serviceImpl;
private final int methodId;
MethodHandlers(GraphStoreImplBase serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {<FILL_FUNCTION_BODY>}
}
|
switch (methodId) {
case METHODID_SCAN_PARTITION:
return (io.grpc.stub.StreamObserver<Req>) serviceImpl.scanPartition(
(io.grpc.stub.StreamObserver<org.apache.hugegraph.store.grpc.Graphpb.ScanResponse>) responseObserver);
default:
throw new AssertionError();
}
| 336
| 101
| 437
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-grpc/src/main/java/org/apache/hugegraph/store/grpc/session/BatchCommitReq.java
|
Builder
|
mergeFrom
|
class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:BatchCommitReq)
org.apache.hugegraph.store.grpc.session.BatchCommitReqOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hugegraph.store.grpc.session.HgStoreSessionProto.internal_static_BatchCommitReq_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hugegraph.store.grpc.session.HgStoreSessionProto.internal_static_BatchCommitReq_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hugegraph.store.grpc.session.BatchCommitReq.class, org.apache.hugegraph.store.grpc.session.BatchCommitReq.Builder.class);
}
// Construct using org.apache.hugegraph.store.grpc.session.BatchCommitReq.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hugegraph.store.grpc.session.HgStoreSessionProto.internal_static_BatchCommitReq_descriptor;
}
@java.lang.Override
public org.apache.hugegraph.store.grpc.session.BatchCommitReq getDefaultInstanceForType() {
return org.apache.hugegraph.store.grpc.session.BatchCommitReq.getDefaultInstance();
}
@java.lang.Override
public org.apache.hugegraph.store.grpc.session.BatchCommitReq build() {
org.apache.hugegraph.store.grpc.session.BatchCommitReq result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hugegraph.store.grpc.session.BatchCommitReq buildPartial() {
org.apache.hugegraph.store.grpc.session.BatchCommitReq result = new org.apache.hugegraph.store.grpc.session.BatchCommitReq(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hugegraph.store.grpc.session.BatchCommitReq) {
return mergeFrom((org.apache.hugegraph.store.grpc.session.BatchCommitReq)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hugegraph.store.grpc.session.BatchCommitReq other) {<FILL_FUNCTION_BODY>}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hugegraph.store.grpc.session.BatchCommitReq parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hugegraph.store.grpc.session.BatchCommitReq) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:BatchCommitReq)
}
|
if (other == org.apache.hugegraph.store.grpc.session.BatchCommitReq.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
| 1,577
| 60
| 1,637
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-grpc/src/main/java/org/apache/hugegraph/store/grpc/session/BatchRollbackReq.java
|
Builder
|
mergeFrom
|
class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:BatchRollbackReq)
org.apache.hugegraph.store.grpc.session.BatchRollbackReqOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hugegraph.store.grpc.session.HgStoreSessionProto.internal_static_BatchRollbackReq_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hugegraph.store.grpc.session.HgStoreSessionProto.internal_static_BatchRollbackReq_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hugegraph.store.grpc.session.BatchRollbackReq.class, org.apache.hugegraph.store.grpc.session.BatchRollbackReq.Builder.class);
}
// Construct using org.apache.hugegraph.store.grpc.session.BatchRollbackReq.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hugegraph.store.grpc.session.HgStoreSessionProto.internal_static_BatchRollbackReq_descriptor;
}
@java.lang.Override
public org.apache.hugegraph.store.grpc.session.BatchRollbackReq getDefaultInstanceForType() {
return org.apache.hugegraph.store.grpc.session.BatchRollbackReq.getDefaultInstance();
}
@java.lang.Override
public org.apache.hugegraph.store.grpc.session.BatchRollbackReq build() {
org.apache.hugegraph.store.grpc.session.BatchRollbackReq result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hugegraph.store.grpc.session.BatchRollbackReq buildPartial() {
org.apache.hugegraph.store.grpc.session.BatchRollbackReq result = new org.apache.hugegraph.store.grpc.session.BatchRollbackReq(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hugegraph.store.grpc.session.BatchRollbackReq) {
return mergeFrom((org.apache.hugegraph.store.grpc.session.BatchRollbackReq)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hugegraph.store.grpc.session.BatchRollbackReq other) {
if (other == org.apache.hugegraph.store.grpc.session.BatchRollbackReq.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {<FILL_FUNCTION_BODY>}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:BatchRollbackReq)
}
|
org.apache.hugegraph.store.grpc.session.BatchRollbackReq parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hugegraph.store.grpc.session.BatchRollbackReq) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
| 1,507
| 152
| 1,659
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-grpc/src/main/java/org/apache/hugegraph/store/grpc/stream/ScanPauseRequest.java
|
Builder
|
build
|
class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:ScanPauseRequest)
org.apache.hugegraph.store.grpc.stream.ScanPauseRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hugegraph.store.grpc.stream.HgStoreStreamMetaProto.internal_static_ScanPauseRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hugegraph.store.grpc.stream.HgStoreStreamMetaProto.internal_static_ScanPauseRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hugegraph.store.grpc.stream.ScanPauseRequest.class, org.apache.hugegraph.store.grpc.stream.ScanPauseRequest.Builder.class);
}
// Construct using org.apache.hugegraph.store.grpc.stream.ScanPauseRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hugegraph.store.grpc.stream.HgStoreStreamMetaProto.internal_static_ScanPauseRequest_descriptor;
}
@java.lang.Override
public org.apache.hugegraph.store.grpc.stream.ScanPauseRequest getDefaultInstanceForType() {
return org.apache.hugegraph.store.grpc.stream.ScanPauseRequest.getDefaultInstance();
}
@java.lang.Override
public org.apache.hugegraph.store.grpc.stream.ScanPauseRequest build() {<FILL_FUNCTION_BODY>}
@java.lang.Override
public org.apache.hugegraph.store.grpc.stream.ScanPauseRequest buildPartial() {
org.apache.hugegraph.store.grpc.stream.ScanPauseRequest result = new org.apache.hugegraph.store.grpc.stream.ScanPauseRequest(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hugegraph.store.grpc.stream.ScanPauseRequest) {
return mergeFrom((org.apache.hugegraph.store.grpc.stream.ScanPauseRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hugegraph.store.grpc.stream.ScanPauseRequest other) {
if (other == org.apache.hugegraph.store.grpc.stream.ScanPauseRequest.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hugegraph.store.grpc.stream.ScanPauseRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hugegraph.store.grpc.stream.ScanPauseRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:ScanPauseRequest)
}
|
org.apache.hugegraph.store.grpc.stream.ScanPauseRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
| 1,557
| 61
| 1,618
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-grpc/src/main/java/org/apache/hugegraph/store/grpc/stream/ScanReceiptRequest.java
|
Builder
|
mergeFrom
|
class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:ScanReceiptRequest)
org.apache.hugegraph.store.grpc.stream.ScanReceiptRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hugegraph.store.grpc.stream.HgStoreStreamMetaProto.internal_static_ScanReceiptRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hugegraph.store.grpc.stream.HgStoreStreamMetaProto.internal_static_ScanReceiptRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hugegraph.store.grpc.stream.ScanReceiptRequest.class, org.apache.hugegraph.store.grpc.stream.ScanReceiptRequest.Builder.class);
}
// Construct using org.apache.hugegraph.store.grpc.stream.ScanReceiptRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
times_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hugegraph.store.grpc.stream.HgStoreStreamMetaProto.internal_static_ScanReceiptRequest_descriptor;
}
@java.lang.Override
public org.apache.hugegraph.store.grpc.stream.ScanReceiptRequest getDefaultInstanceForType() {
return org.apache.hugegraph.store.grpc.stream.ScanReceiptRequest.getDefaultInstance();
}
@java.lang.Override
public org.apache.hugegraph.store.grpc.stream.ScanReceiptRequest build() {
org.apache.hugegraph.store.grpc.stream.ScanReceiptRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.apache.hugegraph.store.grpc.stream.ScanReceiptRequest buildPartial() {
org.apache.hugegraph.store.grpc.stream.ScanReceiptRequest result = new org.apache.hugegraph.store.grpc.stream.ScanReceiptRequest(this);
result.times_ = times_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hugegraph.store.grpc.stream.ScanReceiptRequest) {
return mergeFrom((org.apache.hugegraph.store.grpc.stream.ScanReceiptRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hugegraph.store.grpc.stream.ScanReceiptRequest other) {
if (other == org.apache.hugegraph.store.grpc.stream.ScanReceiptRequest.getDefaultInstance()) return this;
if (other.getTimes() != 0) {
setTimes(other.getTimes());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {<FILL_FUNCTION_BODY>}
private int times_ ;
/**
* <code>uint32 times = 1;</code>
* @return The times.
*/
@java.lang.Override
public int getTimes() {
return times_;
}
/**
* <code>uint32 times = 1;</code>
* @param value The times to set.
* @return This builder for chaining.
*/
public Builder setTimes(int value) {
times_ = value;
onChanged();
return this;
}
/**
* <code>uint32 times = 1;</code>
* @return This builder for chaining.
*/
public Builder clearTimes() {
times_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:ScanReceiptRequest)
}
|
org.apache.hugegraph.store.grpc.stream.ScanReceiptRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hugegraph.store.grpc.stream.ScanReceiptRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
| 1,739
| 150
| 1,889
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-rocksdb/src/main/java/org/apache/hugegraph/rocksdb/access/DBStoreException.java
|
DBStoreException
|
rootCause
|
class DBStoreException extends RuntimeException {
private static final long serialVersionUID = 5956983547131986887L;
public DBStoreException(String message) {
super(message);
}
public DBStoreException(String message, Throwable cause) {
super(message, cause);
}
public DBStoreException(String message, Object... args) {
super(String.format(message, args));
}
public DBStoreException(String message, Throwable cause, Object... args) {
super(String.format(message, args), cause);
}
public DBStoreException(Throwable cause) {
this("Exception in DBStore " + cause.getMessage(), cause);
}
public static Throwable rootCause(Throwable e) {<FILL_FUNCTION_BODY>}
public static boolean isInterrupted(Throwable e) {
Throwable rootCause = DBStoreException.rootCause(e);
return rootCause instanceof InterruptedException ||
rootCause instanceof InterruptedIOException;
}
public Throwable rootCause() {
return rootCause(this);
}
}
|
Throwable cause = e;
while (cause.getCause() != null) {
cause = cause.getCause();
}
return cause;
| 304
| 45
| 349
|
<methods>public void <init>() ,public void <init>(java.lang.String) ,public void <init>(java.lang.Throwable) ,public void <init>(java.lang.String, java.lang.Throwable) <variables>static final long serialVersionUID
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-rocksdb/src/main/java/org/apache/hugegraph/rocksdb/access/RocksDBScanIterator.java
|
RocksDBScanIterator
|
checkArguments
|
class RocksDBScanIterator<T> implements ScanIterator {
private static final byte[] EMPTY_VALUE = new byte[0];
private final RocksIterator rawIt;
private final byte[] keyBegin;
private final byte[] keyEnd;
private final int scanType;
private final AtomicBoolean closed = new AtomicBoolean(false);
private final RocksDBSession.RefCounter iterReference;
private byte[] key;
private boolean matched;
public RocksDBScanIterator(RocksIterator rawIt, byte[] keyBegin, byte[] keyEnd,
int scanType, RocksDBSession.RefCounter iterReference) {
this.rawIt = rawIt;
this.keyBegin = keyBegin;
this.keyEnd = keyEnd;
this.scanType = scanType;
this.key = keyBegin;
this.matched = false;
this.iterReference = iterReference;
this.seek();
}
private void checkArguments() {<FILL_FUNCTION_BODY>}
@Override
public boolean hasNext() {
if (this.closed.get()) {
//log.warn("Iterator has been closed");
return false;
}
this.matched = this.rawIt.isOwningHandle();
if (!this.matched) {
// Maybe closed
return this.matched;
}
this.matched = this.rawIt.isValid();
if (this.matched) {
// Update position for paging
this.key = this.rawIt.key();
this.matched = this.filter(this.key);
}
if (!this.matched) {
// The end
this.key = null;
// Free the iterator if finished
this.close();
}
return this.matched;
}
private void seek() {
if (this.closed.get()) {
log.warn("Iterator has been closed");
return;
}
if (this.keyBegin == null) {
// Seek to the first if no `keyBegin`
this.rawIt.seekToFirst();
} else {
/*
* Seek to `keyBegin`:
* if set SCAN_GT_BEGIN/SCAN_GTE_BEGIN (key > / >= 'xx')
* or if set SCAN_PREFIX_WITH_BEGIN (key prefix with 'xx')
*/
this.rawIt.seek(this.keyBegin);
// Skip `keyBegin` if set SCAN_GT_BEGIN (key > 'xx')
if (this.match(ScanIterator.Trait.SCAN_GT_BEGIN) &&
!this.match(ScanIterator.Trait.SCAN_GTE_BEGIN)) {
while (this.rawIt.isValid() &&
Bytes.equals(this.rawIt.key(), this.keyBegin)) {
this.rawIt.next();
}
}
}
}
@Override
public boolean isValid() {
return this.rawIt.isValid();
}
@Override
public BackendColumn next() {
if (this.closed.get()) {
log.warn("Iterator has been closed");
throw new NoSuchElementException();
}
if (!this.matched) {
if (!this.hasNext()) {
throw new NoSuchElementException();
}
}
BackendColumn col = BackendColumn.of(this.key,
this.match(Trait.SCAN_KEYONLY) ? EMPTY_VALUE :
this.rawIt.value());
this.rawIt.next();
this.matched = false;
return col;
}
@Override
public long count() {
long count = 0L;
while (this.hasNext()) {
this.rawIt.next();
count++;
}
return count;
}
@Override
public byte[] position() {
return this.key;
}
private boolean filter(byte[] v) {
if (this.match(ScanIterator.Trait.SCAN_PREFIX_BEGIN)) {
return Bytes.prefixWith(v, this.keyBegin);
} else if (this.match(ScanIterator.Trait.SCAN_PREFIX_END)) {
assert this.keyEnd != null;
return Bytes.prefixWith(v, this.keyEnd);
} else if (this.match(ScanIterator.Trait.SCAN_LT_END)) {
assert this.keyEnd != null;
if (this.match(ScanIterator.Trait.SCAN_LTE_END)) {
v = Arrays.copyOfRange(v, 0, this.keyEnd.length);
return Bytes.compare(v, this.keyEnd) <= 0;
} else {
return Bytes.compare(v, this.keyEnd) < 0;
}
} else {
return true;
}
}
@Override
public void close() {
if (!this.closed.getAndSet(true)) {
if (this.rawIt.isOwningHandle()) {
this.rawIt.close();
}
this.iterReference.release();
}
}
private boolean match(int expected) {
return (expected & this.scanType) == expected;
}
}
|
E.checkArgument(!(this.match(ScanIterator.Trait.SCAN_PREFIX_BEGIN) &&
this.match(ScanIterator.Trait.SCAN_PREFIX_END)),
"Can't set SCAN_PREFIX_WITH_BEGIN and " +
"SCAN_PREFIX_WITH_END at the same time");
E.checkArgument(!(this.match(ScanIterator.Trait.SCAN_PREFIX_BEGIN) &&
this.match(ScanIterator.Trait.SCAN_GT_BEGIN)),
"Can't set SCAN_PREFIX_WITH_BEGIN and " +
"SCAN_GT_BEGIN/SCAN_GTE_BEGIN at the same time");
E.checkArgument(!(this.match(ScanIterator.Trait.SCAN_PREFIX_END) &&
this.match(ScanIterator.Trait.SCAN_LT_END)),
"Can't set SCAN_PREFIX_WITH_END and " +
"SCAN_LT_END/SCAN_LTE_END at the same time");
if (this.match(ScanIterator.Trait.SCAN_PREFIX_BEGIN)) {
E.checkArgument(this.keyBegin != null,
"Parameter `keyBegin` can't be null " +
"if set SCAN_PREFIX_WITH_BEGIN");
E.checkArgument(this.keyEnd == null,
"Parameter `keyEnd` must be null " +
"if set SCAN_PREFIX_WITH_BEGIN");
}
if (this.match(ScanIterator.Trait.SCAN_PREFIX_END)) {
E.checkArgument(this.keyEnd != null,
"Parameter `keyEnd` can't be null " +
"if set SCAN_PREFIX_WITH_END");
}
if (this.match(ScanIterator.Trait.SCAN_GT_BEGIN)) {
E.checkArgument(this.keyBegin != null,
"Parameter `keyBegin` can't be null " +
"if set SCAN_GT_BEGIN or SCAN_GTE_BEGIN");
}
if (this.match(ScanIterator.Trait.SCAN_LT_END)) {
E.checkArgument(this.keyEnd != null,
"Parameter `keyEnd` can't be null " +
"if set SCAN_LT_END or SCAN_LTE_END");
}
| 1,358
| 600
| 1,958
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-rocksdb/src/main/java/org/apache/hugegraph/rocksdb/access/util/Asserts.java
|
Asserts
|
isTrue
|
class Asserts {
public static void isTrue(boolean expression, String message) {
if (message == null) {
throw new IllegalArgumentException("message is null");
}
if (!expression) {
throw new IllegalArgumentException(message);
}
}
public static void isTrue(boolean expression, Supplier<RuntimeException> s) {<FILL_FUNCTION_BODY>}
}
|
if (s == null) {
throw new IllegalArgumentException("Supplier<RuntimeException> is null");
}
if (!expression) {
throw s.get();
}
| 102
| 48
| 150
|
<no_super_class>
|
apache_incubator-hugegraph
|
incubator-hugegraph/hugegraph-store/hg-store-rocksdb/src/main/java/org/apache/hugegraph/rocksdb/access/util/ZipUtils.java
|
ZipUtils
|
compressDirectoryToZipFile
|
class ZipUtils {
public static void compress(final String rootDir, final String sourceDir,
final String outputFile, final Checksum checksum) throws
IOException {
try (final FileOutputStream fos = new FileOutputStream(outputFile);
final CheckedOutputStream cos = new CheckedOutputStream(fos, checksum);
final ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(cos))) {
ZipUtils.compressDirectoryToZipFile(rootDir, sourceDir, zos);
zos.flush();
fos.getFD().sync();
}
}
private static void compressDirectoryToZipFile(final String rootDir, final String sourceDir,
final ZipOutputStream zos) throws IOException {<FILL_FUNCTION_BODY>}
public static void decompress(final String sourceFile, final String outputDir,
final Checksum checksum) throws IOException {
try (final FileInputStream fis = new FileInputStream(sourceFile);
final CheckedInputStream cis = new CheckedInputStream(fis, checksum);
final ZipInputStream zis = new ZipInputStream(new BufferedInputStream(cis))) {
ZipEntry entry;
while ((entry = zis.getNextEntry()) != null) {
final String fileName = entry.getName();
final Path entryPath = Paths.get(outputDir).resolve(fileName).normalize();
if (!entryPath.startsWith(Paths.get(outputDir).normalize())) {
// The file path is not in the expected directory. There may be a Zip Slip
// vulnerability. Ignore it or handle it accordingly.
continue;
}
final File entryFile = entryPath.toFile();
FileUtils.forceMkdir(entryFile.getParentFile());
try (final FileOutputStream fos = new FileOutputStream(entryFile);
final BufferedOutputStream bos = new BufferedOutputStream(fos)) {
IOUtils.copy(zis, bos);
bos.flush();
fos.getFD().sync();
}
}
IOUtils.copy(cis, NullOutputStream.NULL_OUTPUT_STREAM);
}
}
}
|
final String dir = Paths.get(rootDir, sourceDir).toString();
final File[] files = new File(dir).listFiles();
for (final File file : files) {
final String child = Paths.get(sourceDir, file.getName()).toString();
if (file.isDirectory()) {
compressDirectoryToZipFile(rootDir, child, zos);
} else {
zos.putNextEntry(new ZipEntry(child));
try (final FileInputStream fis = new FileInputStream(file);
final BufferedInputStream bis = new BufferedInputStream(fis)) {
IOUtils.copy(bis, zos);
}
}
}
| 534
| 169
| 703
|
<no_super_class>
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-drl/src/main/java/org/optaplanner/constraint/drl/DrlScoreDirectorFactory.java
|
DrlScoreDirectorFactory
|
assertGlobalScoreHolderExists
|
class DrlScoreDirectorFactory<Solution_, Score_ extends Score<Score_>>
extends AbstractScoreDirectorFactory<Solution_, Score_> {
private final KieBase kieBase;
protected Map<Rule, Function<Solution_, Score_>> ruleToConstraintWeightExtractorMap;
/**
* @param solutionDescriptor never null
* @param kieBase never null
*/
public DrlScoreDirectorFactory(SolutionDescriptor<Solution_> solutionDescriptor, KieBase kieBase) {
super(solutionDescriptor);
this.kieBase = kieBase;
assertGlobalScoreHolderExists(kieBase);
createRuleToConstraintWeightExtractorMap(kieBase);
solutionDescriptor.assertProblemFactsExist();
}
protected void assertGlobalScoreHolderExists(KieBase kieBase) {<FILL_FUNCTION_BODY>}
protected void createRuleToConstraintWeightExtractorMap(KieBase kieBase) {
ConstraintConfigurationDescriptor<Solution_> constraintConfigurationDescriptor = solutionDescriptor
.getConstraintConfigurationDescriptor();
if (constraintConfigurationDescriptor == null) {
ruleToConstraintWeightExtractorMap = new LinkedHashMap<>(0);
return;
}
Collection<ConstraintWeightDescriptor<Solution_>> constraintWeightDescriptors = constraintConfigurationDescriptor
.getConstraintWeightDescriptors();
ruleToConstraintWeightExtractorMap = new LinkedHashMap<>(constraintWeightDescriptors.size());
for (ConstraintWeightDescriptor<Solution_> constraintWeightDescriptor : constraintWeightDescriptors) {
String constraintPackage = constraintWeightDescriptor.getConstraintPackage();
String constraintName = constraintWeightDescriptor.getConstraintName();
Rule rule = kieBase.getRule(constraintPackage, constraintName);
if (rule == null) {
Rule potentialRule = kieBase.getKiePackages().stream().flatMap(kiePackage -> kiePackage.getRules().stream())
.filter(selectedRule -> selectedRule.getName().equals(constraintName)).findFirst().orElse(null);
throw new IllegalStateException("The constraintConfigurationClass ("
+ constraintConfigurationDescriptor.getConstraintConfigurationClass()
+ ") has a @" + ConstraintWeight.class.getSimpleName()
+ " annotated member (" + constraintWeightDescriptor.getMemberAccessor()
+ ") with constraintPackage/rulePackage (" + constraintPackage
+ ") and constraintName/ruleName (" + constraintName
+ ") for which no Drools rule exist in the DRL.\n"
+ (potentialRule != null ? "Maybe the constraintPackage (" + constraintPackage + ") is wrong,"
+ " because there is a rule with the same ruleName (" + constraintName
+ "), but in a different rulePackage (" + potentialRule.getPackageName() + ")."
: "Maybe there is a typo in the constraintName (" + constraintName
+ ") so it not identical to the constraint's ruleName."));
}
Function<Solution_, Score_> constraintWeightExtractor =
(Function<Solution_, Score_>) constraintWeightDescriptor.createExtractor();
ruleToConstraintWeightExtractorMap.put(rule, constraintWeightExtractor);
}
}
public Map<Rule, Function<Solution_, Score_>> getRuleToConstraintWeightExtractorMap() {
return ruleToConstraintWeightExtractorMap;
}
// ************************************************************************
// Complex methods
// ************************************************************************
@Override
public DrlScoreDirector<Solution_, Score_> buildScoreDirector(
boolean lookUpEnabled, boolean constraintMatchEnabledPreference) {
return new DrlScoreDirector<>(this, lookUpEnabled, constraintMatchEnabledPreference);
}
public KieSession newKieSession() {
return kieBase.newKieSession();
}
}
|
boolean hasGlobalScoreHolder = false;
for (KiePackage kiePackage : kieBase.getKiePackages()) {
for (Global global : kiePackage.getGlobalVariables()) {
if (DrlScoreDirector.GLOBAL_SCORE_HOLDER_KEY.equals(global.getName())) {
hasGlobalScoreHolder = true;
// TODO Fail fast once global.getType() can be turned into a Class instead of a String
// if (!ScoreHolder.class.isAssignableFrom(global.getType())) {
// throw new IllegalStateException("The global with name (" + global.getName()
// + ") has a type (" + global.getType()
// + ") that does not implement " + ScoreHolder.class.getSimpleName() + ".");
// }
break;
}
}
}
if (!hasGlobalScoreHolder) {
throw new IllegalArgumentException("The kieBase with kiePackages (" + kieBase.getKiePackages()
+ ") has no global field called " + DrlScoreDirector.GLOBAL_SCORE_HOLDER_KEY + ".\n"
+ "Check if the rule files are found and if the global field is spelled correctly.");
}
| 940
| 314
| 1,254
|
<methods>public void <init>(SolutionDescriptor<Solution_>) ,public void assertScoreFromScratch(Solution_) ,public InnerScoreDirector<Solution_,Score_> buildScoreDirector() ,public InnerScoreDirectorFactory<Solution_,Score_> getAssertionScoreDirectorFactory() ,public org.optaplanner.core.impl.score.trend.InitializingScoreTrend getInitializingScoreTrend() ,public ScoreDefinition<Score_> getScoreDefinition() ,public SolutionDescriptor<Solution_> getSolutionDescriptor() ,public boolean isAssertClonedSolution() ,public void setAssertClonedSolution(boolean) ,public void setAssertionScoreDirectorFactory(InnerScoreDirectorFactory<Solution_,Score_>) ,public void setInitializingScoreTrend(org.optaplanner.core.impl.score.trend.InitializingScoreTrend) <variables>protected boolean assertClonedSolution,protected InnerScoreDirectorFactory<Solution_,Score_> assertionScoreDirectorFactory,protected org.optaplanner.core.impl.score.trend.InitializingScoreTrend initializingScoreTrend,protected final transient Logger logger,protected SolutionDescriptor<Solution_> solutionDescriptor
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-drl/src/main/java/org/optaplanner/constraint/drl/DrlScoreDirectorFactoryService.java
|
DrlScoreDirectorFactoryService
|
buildScoreDirectorFactory
|
class DrlScoreDirectorFactoryService<Solution_, Score_ extends Score<Score_>>
implements ScoreDirectorFactoryService<Solution_, Score_> {
private static final Logger LOGGER = LoggerFactory.getLogger(DrlScoreDirectorFactoryService.class);
private static final AtomicBoolean DRL_DEPRECATION_MESSAGE_SHOWN = new AtomicBoolean();
@Override
public ScoreDirectorType getSupportedScoreDirectorType() {
return ScoreDirectorType.DRL;
}
@Override
public Supplier<AbstractScoreDirectorFactory<Solution_, Score_>> buildScoreDirectorFactory(ClassLoader classLoader,
SolutionDescriptor<Solution_> solutionDescriptor, ScoreDirectorFactoryConfig config,
EnvironmentMode environmentMode) {
if (ConfigUtils.isEmptyCollection(config.getScoreDrlList())
&& ConfigUtils.isEmptyCollection(config.getScoreDrlFileList())) {
if (config.getKieBaseConfigurationProperties() != null) {
throw new IllegalArgumentException(
"If kieBaseConfigurationProperties (" + config.getKieBaseConfigurationProperties()
+ ") is not null, the scoreDrlList (" + config.getScoreDrlList()
+ ") or the scoreDrlFileList (" + config.getScoreDrlFileList() + ") must not be empty.");
}
return null;
}
if (!DRL_DEPRECATION_MESSAGE_SHOWN.getAndSet(true)) {
LOGGER.info("Score DRL is deprecated and will be removed in a future major version of OptaPlanner.\n" +
"Consider migrating to the Constraint Streams API.\n" +
"See migration recipe: https://www.optaplanner.org/download/upgradeRecipe/drl-to-constraint-streams-migration.html");
}
List<String> scoreDrlList = new ArrayList<>();
if (config.getGizmoKieBaseSupplier() == null) {
if (!ConfigUtils.isEmptyCollection(config.getScoreDrlList())) {
for (String scoreDrl : config.getScoreDrlList()) {
if (scoreDrl == null) {
throw new IllegalArgumentException("The scoreDrl (" + scoreDrl + ") cannot be null.");
}
scoreDrlList.add(scoreDrl);
}
}
}
return () -> buildScoreDirectorFactory(classLoader, solutionDescriptor, config, scoreDrlList);
}
private DrlScoreDirectorFactory<Solution_, Score_> buildScoreDirectorFactory(ClassLoader classLoader,
SolutionDescriptor<Solution_> solutionDescriptor, ScoreDirectorFactoryConfig config,
List<String> scoreDrlList) {<FILL_FUNCTION_BODY>}
private static KieBaseConfiguration buildKieBaseConfiguration(ScoreDirectorFactoryConfig config,
KieServices kieServices) {
KieBaseConfiguration kieBaseConfiguration = kieServices.newKieBaseConfiguration();
if (config.getKieBaseConfigurationProperties() != null) {
for (Map.Entry<String, String> entry : config.getKieBaseConfigurationProperties().entrySet()) {
kieBaseConfiguration.setProperty(entry.getKey(), entry.getValue());
}
}
return kieBaseConfiguration;
}
}
|
KieBase kieBase;
if (config.getGizmoKieBaseSupplier() != null) {
kieBase = ((Supplier<KieBase>) config.getGizmoKieBaseSupplier()).get();
} else {
KieHelper kieHelper = new KieHelper(PropertySpecificOption.ALLOWED)
.setClassLoader(classLoader);
scoreDrlList.forEach(scoreDrl -> kieHelper
.addResource(KieServices.get().getResources().newClassPathResource(scoreDrl, classLoader)));
if (!ConfigUtils.isEmptyCollection(config.getScoreDrlFileList())) {
for (File scoreDrlFile : config.getScoreDrlFileList()) {
kieHelper.addResource(KieServices.get().getResources().newFileSystemResource(scoreDrlFile));
}
}
KieBaseConfiguration kieBaseConfiguration = buildKieBaseConfiguration(config, KieServices.get());
kieBaseConfiguration.setOption(KieBaseMutabilityOption.DISABLED); // Performance improvement.
try {
kieBase = kieHelper.build(ExecutableModelProject.class, kieBaseConfiguration);
} catch (Exception ex) {
throw new IllegalStateException("There is an error in a scoreDrl or scoreDrlFile.", ex);
}
}
boolean isDroolsAlphaNetworkEnabled =
Objects.requireNonNullElse(config.isDroolsAlphaNetworkCompilationEnabled(), true);
if (isDroolsAlphaNetworkEnabled) {
KieBaseUpdaterANC.generateAndSetInMemoryANC(kieBase); // Enable Alpha Network Compiler for performance.
}
return new DrlScoreDirectorFactory<>(solutionDescriptor, kieBase);
| 830
| 445
| 1,275
|
<no_super_class>
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-drl/src/main/java/org/optaplanner/constraint/drl/holder/HardMediumSoftLongScoreHolderImpl.java
|
HardMediumSoftLongScoreHolderImpl
|
impactScore
|
class HardMediumSoftLongScoreHolderImpl extends AbstractScoreHolder<HardMediumSoftLongScore>
implements HardMediumSoftLongScoreHolder {
protected final Map<Rule, LongMatchExecutor> matchExecutorByNumberMap = new LinkedHashMap<>();
/** Slower than {@link #matchExecutorByNumberMap} */
protected final Map<Rule, ScoreMatchExecutor<HardMediumSoftLongScore>> matchExecutorByScoreMap = new LinkedHashMap<>();
protected long hardScore;
protected long mediumScore;
protected long softScore;
public HardMediumSoftLongScoreHolderImpl(boolean constraintMatchEnabled) {
super(constraintMatchEnabled);
}
public long getHardScore() {
return hardScore;
}
public long getMediumScore() {
return mediumScore;
}
public long getSoftScore() {
return softScore;
}
// ************************************************************************
// Setup methods
// ************************************************************************
@Override
public void configureConstraintWeight(Rule rule, HardMediumSoftLongScore constraintWeight) {
super.configureConstraintWeight(rule, constraintWeight);
LongMatchExecutor matchExecutor;
if (constraintWeight.isZero()) {
matchExecutor = (RuleContext kcontext, long matchWeight) -> {
};
} else if (constraintWeight.mediumScore() == 0 && constraintWeight.softScore() == 0) {
matchExecutor =
(RuleContext kcontext, long matchWeight) -> addHardConstraintMatch(kcontext,
constraintWeight.hardScore() * matchWeight);
} else if (constraintWeight.hardScore() == 0 && constraintWeight.softScore() == 0) {
matchExecutor =
(RuleContext kcontext, long matchWeight) -> addMediumConstraintMatch(kcontext,
constraintWeight.mediumScore() * matchWeight);
} else if (constraintWeight.hardScore() == 0 && constraintWeight.mediumScore() == 0) {
matchExecutor =
(RuleContext kcontext, long matchWeight) -> addSoftConstraintMatch(kcontext,
constraintWeight.softScore() * matchWeight);
} else {
matchExecutor =
(RuleContext kcontext, long matchWeight) -> addMultiConstraintMatch(kcontext,
constraintWeight.hardScore() * matchWeight, constraintWeight.mediumScore() * matchWeight,
constraintWeight.softScore() * matchWeight);
}
matchExecutorByNumberMap.put(rule, matchExecutor);
matchExecutorByScoreMap.put(rule, (RuleContext kcontext,
HardMediumSoftLongScore weightMultiplier) -> addMultiConstraintMatch(kcontext,
constraintWeight.hardScore() * weightMultiplier.hardScore(),
constraintWeight.mediumScore() * weightMultiplier.mediumScore(),
constraintWeight.softScore() * weightMultiplier.softScore()));
}
// ************************************************************************
// Penalize and reward methods
// ************************************************************************
@Override
public void penalize(RuleContext kcontext) {
impactScore(kcontext, -1L);
}
@Override
public void penalize(RuleContext kcontext, long weightMultiplier) {
impactScore(kcontext, -weightMultiplier);
}
@Override
public void penalize(RuleContext kcontext, long hardWeightMultiplier, long mediumWeightMultiplier,
long softWeightMultiplier) {
impactScore(kcontext, -hardWeightMultiplier, -mediumWeightMultiplier, -softWeightMultiplier);
}
@Override
public void reward(RuleContext kcontext) {
impactScore(kcontext, 1L);
}
@Override
public void reward(RuleContext kcontext, long weightMultiplier) {
impactScore(kcontext, weightMultiplier);
}
@Override
public void reward(RuleContext kcontext, long hardWeightMultiplier, long mediumWeightMultiplier,
long softWeightMultiplier) {
impactScore(kcontext, hardWeightMultiplier, mediumWeightMultiplier, softWeightMultiplier);
}
@Override
public void impactScore(RuleContext kcontext) {
impactScore(kcontext, 1L);
}
@Override
public void impactScore(RuleContext kcontext, int weightMultiplier) {
impactScore(kcontext, (long) weightMultiplier);
}
@Override
public void impactScore(RuleContext kcontext, long weightMultiplier) {
Rule rule = kcontext.getRule();
LongMatchExecutor matchExecutor = matchExecutorByNumberMap.get(rule);
if (matchExecutor == null) {
throw new IllegalStateException("The DRL rule (" + rule.getPackageName() + ":" + rule.getName()
+ ") does not match a @" + ConstraintWeight.class.getSimpleName() + " on the @"
+ ConstraintConfiguration.class.getSimpleName() + " annotated class.");
}
matchExecutor.accept(kcontext, weightMultiplier);
}
@Override
public void impactScore(RuleContext kcontext, BigDecimal weightMultiplier) {
throw new UnsupportedOperationException("In the rule (" + kcontext.getRule().getName()
+ "), the scoreHolder class (" + getClass()
+ ") does not support a BigDecimal weightMultiplier (" + weightMultiplier + ").\n"
+ "If you're using constraint streams, maybe switch from penalizeBigDecimal() to penalizeLong().");
}
private void impactScore(RuleContext kcontext, long hardWeightMultiplier, long mediumWeightMultiplier,
long softWeightMultiplier) {<FILL_FUNCTION_BODY>}
// ************************************************************************
// Other match methods
// ************************************************************************
@Override
public void addHardConstraintMatch(RuleContext kcontext, long hardWeight) {
hardScore += hardWeight;
registerConstraintMatch(kcontext, () -> hardScore -= hardWeight, () -> HardMediumSoftLongScore.ofHard(hardWeight));
}
@Override
public void addMediumConstraintMatch(RuleContext kcontext, long mediumWeight) {
mediumScore += mediumWeight;
registerConstraintMatch(kcontext, () -> mediumScore -= mediumWeight,
() -> HardMediumSoftLongScore.ofMedium(mediumWeight));
}
@Override
public void addSoftConstraintMatch(RuleContext kcontext, long softWeight) {
softScore += softWeight;
registerConstraintMatch(kcontext, () -> softScore -= softWeight, () -> HardMediumSoftLongScore.ofSoft(softWeight));
}
@Override
public void addMultiConstraintMatch(RuleContext kcontext, long hardWeight, long mediumWeight, long softWeight) {
hardScore += hardWeight;
mediumScore += mediumWeight;
softScore += softWeight;
registerConstraintMatch(kcontext,
() -> {
hardScore -= hardWeight;
mediumScore -= mediumWeight;
softScore -= softWeight;
},
() -> HardMediumSoftLongScore.of(hardWeight, mediumWeight, softWeight));
}
@Override
public HardMediumSoftLongScore extractScore(int initScore) {
return HardMediumSoftLongScore.ofUninitialized(initScore, hardScore, mediumScore, softScore);
}
}
|
Rule rule = kcontext.getRule();
ScoreMatchExecutor<HardMediumSoftLongScore> matchExecutor = matchExecutorByScoreMap.get(rule);
if (matchExecutor == null) {
throw new IllegalStateException("The DRL rule (" + rule.getPackageName() + ":" + rule.getName()
+ ") does not match a @" + ConstraintWeight.class.getSimpleName() + " on the @"
+ ConstraintConfiguration.class.getSimpleName() + " annotated class.");
}
matchExecutor.accept(kcontext,
HardMediumSoftLongScore.of(hardWeightMultiplier, mediumWeightMultiplier, softWeightMultiplier));
| 1,816
| 165
| 1,981
|
<methods>public static ScoreHolder_ buildScoreHolder(ScoreDefinition<Score_>, boolean) ,public void configureConstraintWeight(Rule, org.optaplanner.core.api.score.buildin.hardmediumsoftlong.HardMediumSoftLongScore) ,public abstract org.optaplanner.core.api.score.buildin.hardmediumsoftlong.HardMediumSoftLongScore extractScore(int) ,public Map<java.lang.String,ConstraintMatchTotal<org.optaplanner.core.api.score.buildin.hardmediumsoftlong.HardMediumSoftLongScore>> getConstraintMatchTotalMap() ,public Map<java.lang.Object,Indictment<org.optaplanner.core.api.score.buildin.hardmediumsoftlong.HardMediumSoftLongScore>> getIndictmentMap() ,public void impactScore(RuleContext) ,public abstract void impactScore(RuleContext, int) ,public abstract void impactScore(RuleContext, long) ,public abstract void impactScore(RuleContext, java.math.BigDecimal) ,public boolean isConstraintMatchEnabled() <variables>private static final java.lang.String CUSTOM_SCORE_HOLDER_CLASS_PROPERTY_NAME,protected final non-sealed boolean constraintMatchEnabled,protected final non-sealed Map<java.lang.String,ConstraintMatchTotal<org.optaplanner.core.api.score.buildin.hardmediumsoftlong.HardMediumSoftLongScore>> constraintMatchTotalMap,protected final non-sealed Map<java.lang.Object,Indictment<org.optaplanner.core.api.score.buildin.hardmediumsoftlong.HardMediumSoftLongScore>> indictmentMap
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-drl/src/main/java/org/optaplanner/constraint/drl/holder/HardSoftLongScoreHolderImpl.java
|
HardSoftLongScoreHolderImpl
|
addMultiConstraintMatch
|
class HardSoftLongScoreHolderImpl extends AbstractScoreHolder<HardSoftLongScore>
implements HardSoftLongScoreHolder {
protected final Map<Rule, LongMatchExecutor> matchExecutorByNumberMap = new LinkedHashMap<>();
/** Slower than {@link #matchExecutorByNumberMap} */
protected final Map<Rule, ScoreMatchExecutor<HardSoftLongScore>> matchExecutorByScoreMap = new LinkedHashMap<>();
protected long hardScore;
protected long softScore;
public HardSoftLongScoreHolderImpl(boolean constraintMatchEnabled) {
super(constraintMatchEnabled);
}
public long getHardScore() {
return hardScore;
}
public long getSoftScore() {
return softScore;
}
// ************************************************************************
// Setup methods
// ************************************************************************
@Override
public void configureConstraintWeight(Rule rule, HardSoftLongScore constraintWeight) {
super.configureConstraintWeight(rule, constraintWeight);
LongMatchExecutor matchExecutor;
if (constraintWeight.isZero()) {
matchExecutor = (RuleContext kcontext, long matchWeight) -> {
};
} else if (constraintWeight.softScore() == 0L) {
matchExecutor =
(RuleContext kcontext, long matchWeight) -> addHardConstraintMatch(kcontext,
constraintWeight.hardScore() * matchWeight);
} else if (constraintWeight.hardScore() == 0L) {
matchExecutor =
(RuleContext kcontext, long matchWeight) -> addSoftConstraintMatch(kcontext,
constraintWeight.softScore() * matchWeight);
} else {
matchExecutor =
(RuleContext kcontext, long matchWeight) -> addMultiConstraintMatch(kcontext,
constraintWeight.hardScore() * matchWeight, constraintWeight.softScore() * matchWeight);
}
matchExecutorByNumberMap.put(rule, matchExecutor);
matchExecutorByScoreMap.put(rule, (RuleContext kcontext,
HardSoftLongScore weightMultiplier) -> addMultiConstraintMatch(kcontext,
constraintWeight.hardScore() * weightMultiplier.hardScore(),
constraintWeight.softScore() * weightMultiplier.softScore()));
}
// ************************************************************************
// Penalize and reward methods
// ************************************************************************
@Override
public void penalize(RuleContext kcontext) {
impactScore(kcontext, -1L);
}
@Override
public void penalize(RuleContext kcontext, long weightMultiplier) {
impactScore(kcontext, -weightMultiplier);
}
@Override
public void penalize(RuleContext kcontext, long hardWeightMultiplier, long softWeightMultiplier) {
impactScore(kcontext, -hardWeightMultiplier, -softWeightMultiplier);
}
@Override
public void reward(RuleContext kcontext) {
impactScore(kcontext, 1L);
}
@Override
public void reward(RuleContext kcontext, long weightMultiplier) {
impactScore(kcontext, weightMultiplier);
}
@Override
public void reward(RuleContext kcontext, long hardWeightMultiplier, long softWeightMultiplier) {
impactScore(kcontext, hardWeightMultiplier, softWeightMultiplier);
}
@Override
public void impactScore(RuleContext kcontext) {
impactScore(kcontext, 1L);
}
@Override
public void impactScore(RuleContext kcontext, int weightMultiplier) {
impactScore(kcontext, (long) weightMultiplier);
}
@Override
public void impactScore(RuleContext kcontext, long weightMultiplier) {
Rule rule = kcontext.getRule();
LongMatchExecutor matchExecutor = matchExecutorByNumberMap.get(rule);
if (matchExecutor == null) {
throw new IllegalStateException("The DRL rule (" + rule.getPackageName() + ":" + rule.getName()
+ ") does not match a @" + ConstraintWeight.class.getSimpleName() + " on the @"
+ ConstraintConfiguration.class.getSimpleName() + " annotated class.");
}
matchExecutor.accept(kcontext, weightMultiplier);
}
@Override
public void impactScore(RuleContext kcontext, BigDecimal weightMultiplier) {
throw new UnsupportedOperationException("In the rule (" + kcontext.getRule().getName()
+ "), the scoreHolder class (" + getClass()
+ ") does not support a BigDecimal weightMultiplier (" + weightMultiplier + ").\n"
+ "If you're using constraint streams, maybe switch from penalizeBigDecimal() to penalizeLong().");
}
private void impactScore(RuleContext kcontext, long hardWeightMultiplier, long softWeightMultiplier) {
Rule rule = kcontext.getRule();
ScoreMatchExecutor<HardSoftLongScore> matchExecutor = matchExecutorByScoreMap.get(rule);
if (matchExecutor == null) {
throw new IllegalStateException("The DRL rule (" + rule.getPackageName() + ":" + rule.getName()
+ ") does not match a @" + ConstraintWeight.class.getSimpleName() + " on the @"
+ ConstraintConfiguration.class.getSimpleName() + " annotated class.");
}
matchExecutor.accept(kcontext, HardSoftLongScore.of(hardWeightMultiplier, softWeightMultiplier));
}
// ************************************************************************
// Other match methods
// ************************************************************************
@Override
public void addHardConstraintMatch(RuleContext kcontext, long hardWeight) {
hardScore += hardWeight;
registerConstraintMatch(kcontext, () -> hardScore -= hardWeight, () -> HardSoftLongScore.ofHard(hardWeight));
}
@Override
public void addSoftConstraintMatch(RuleContext kcontext, long softWeight) {
softScore += softWeight;
registerConstraintMatch(kcontext, () -> softScore -= softWeight, () -> HardSoftLongScore.ofSoft(softWeight));
}
@Override
public void addMultiConstraintMatch(RuleContext kcontext, long hardWeight, long softWeight) {<FILL_FUNCTION_BODY>}
@Override
public HardSoftLongScore extractScore(int initScore) {
return HardSoftLongScore.ofUninitialized(initScore, hardScore, softScore);
}
}
|
hardScore += hardWeight;
softScore += softWeight;
registerConstraintMatch(kcontext,
() -> {
hardScore -= hardWeight;
softScore -= softWeight;
},
() -> HardSoftLongScore.of(hardWeight, softWeight));
| 1,594
| 73
| 1,667
|
<methods>public static ScoreHolder_ buildScoreHolder(ScoreDefinition<Score_>, boolean) ,public void configureConstraintWeight(Rule, org.optaplanner.core.api.score.buildin.hardsoftlong.HardSoftLongScore) ,public abstract org.optaplanner.core.api.score.buildin.hardsoftlong.HardSoftLongScore extractScore(int) ,public Map<java.lang.String,ConstraintMatchTotal<org.optaplanner.core.api.score.buildin.hardsoftlong.HardSoftLongScore>> getConstraintMatchTotalMap() ,public Map<java.lang.Object,Indictment<org.optaplanner.core.api.score.buildin.hardsoftlong.HardSoftLongScore>> getIndictmentMap() ,public void impactScore(RuleContext) ,public abstract void impactScore(RuleContext, int) ,public abstract void impactScore(RuleContext, long) ,public abstract void impactScore(RuleContext, java.math.BigDecimal) ,public boolean isConstraintMatchEnabled() <variables>private static final java.lang.String CUSTOM_SCORE_HOLDER_CLASS_PROPERTY_NAME,protected final non-sealed boolean constraintMatchEnabled,protected final non-sealed Map<java.lang.String,ConstraintMatchTotal<org.optaplanner.core.api.score.buildin.hardsoftlong.HardSoftLongScore>> constraintMatchTotalMap,protected final non-sealed Map<java.lang.Object,Indictment<org.optaplanner.core.api.score.buildin.hardsoftlong.HardSoftLongScore>> indictmentMap
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-drl/src/main/java/org/optaplanner/constraint/drl/holder/HardSoftScoreHolderImpl.java
|
HardSoftScoreHolderImpl
|
configureConstraintWeight
|
class HardSoftScoreHolderImpl extends AbstractScoreHolder<HardSoftScore> implements HardSoftScoreHolder {
protected final Map<Rule, IntMatchExecutor> matchExecutorByNumberMap = new LinkedHashMap<>();
/** Slower than {@link #matchExecutorByNumberMap} */
protected final Map<Rule, ScoreMatchExecutor<HardSoftScore>> matchExecutorByScoreMap = new LinkedHashMap<>();
protected int hardScore;
protected int softScore;
public HardSoftScoreHolderImpl(boolean constraintMatchEnabled) {
super(constraintMatchEnabled);
}
public int getHardScore() {
return hardScore;
}
public int getSoftScore() {
return softScore;
}
// ************************************************************************
// Setup methods
// ************************************************************************
@Override
public void configureConstraintWeight(Rule rule, HardSoftScore constraintWeight) {<FILL_FUNCTION_BODY>}
// ************************************************************************
// Penalize and reward methods
// ************************************************************************
@Override
public void penalize(RuleContext kcontext) {
impactScore(kcontext, -1);
}
@Override
public void penalize(RuleContext kcontext, int weightMultiplier) {
impactScore(kcontext, -weightMultiplier);
}
@Override
public void penalize(RuleContext kcontext, int hardWeightMultiplier, int softWeightMultiplier) {
impactScore(kcontext, -hardWeightMultiplier, -softWeightMultiplier);
}
@Override
public void reward(RuleContext kcontext) {
impactScore(kcontext, 1);
}
@Override
public void reward(RuleContext kcontext, int weightMultiplier) {
impactScore(kcontext, weightMultiplier);
}
@Override
public void reward(RuleContext kcontext, int hardWeightMultiplier, int softWeightMultiplier) {
impactScore(kcontext, hardWeightMultiplier, softWeightMultiplier);
}
@Override
public void impactScore(RuleContext kcontext) {
impactScore(kcontext, 1);
}
@Override
public void impactScore(RuleContext kcontext, int weightMultiplier) {
Rule rule = kcontext.getRule();
IntMatchExecutor matchExecutor = matchExecutorByNumberMap.get(rule);
if (matchExecutor == null) {
throw new IllegalStateException("The DRL rule (" + rule.getPackageName() + ":" + rule.getName()
+ ") does not match a @" + ConstraintWeight.class.getSimpleName() + " on the @"
+ ConstraintConfiguration.class.getSimpleName() + " annotated class.");
}
matchExecutor.accept(kcontext, weightMultiplier);
}
@Override
public void impactScore(RuleContext kcontext, long weightMultiplier) {
throw new UnsupportedOperationException("In the rule (" + kcontext.getRule().getName()
+ "), the scoreHolder class (" + getClass()
+ ") does not support a long weightMultiplier (" + weightMultiplier + ").\n"
+ "If you're using constraint streams, maybe switch from penalizeLong() to penalize().");
}
@Override
public void impactScore(RuleContext kcontext, BigDecimal weightMultiplier) {
throw new UnsupportedOperationException("In the rule (" + kcontext.getRule().getName()
+ "), the scoreHolder class (" + getClass()
+ ") does not support a BigDecimal weightMultiplier (" + weightMultiplier + ").\n"
+ "If you're using constraint streams, maybe switch from penalizeBigDecimal() to penalize().");
}
private void impactScore(RuleContext kcontext, int hardWeightMultiplier, int softWeightMultiplier) {
Rule rule = kcontext.getRule();
ScoreMatchExecutor<HardSoftScore> matchExecutor = matchExecutorByScoreMap.get(rule);
if (matchExecutor == null) {
throw new IllegalStateException("The DRL rule (" + rule.getPackageName() + ":" + rule.getName()
+ ") does not match a @" + ConstraintWeight.class.getSimpleName() + " on the @"
+ ConstraintConfiguration.class.getSimpleName() + " annotated class.");
}
matchExecutor.accept(kcontext, HardSoftScore.of(hardWeightMultiplier, softWeightMultiplier));
}
// ************************************************************************
// Other match methods
// ************************************************************************
@Override
public void addHardConstraintMatch(RuleContext kcontext, int hardWeight) {
hardScore += hardWeight;
registerConstraintMatch(kcontext, () -> hardScore -= hardWeight, () -> HardSoftScore.ofHard(hardWeight));
}
@Override
public void addSoftConstraintMatch(RuleContext kcontext, int softWeight) {
softScore += softWeight;
registerConstraintMatch(kcontext, () -> softScore -= softWeight, () -> HardSoftScore.ofSoft(softWeight));
}
@Override
public void addMultiConstraintMatch(RuleContext kcontext, int hardWeight, int softWeight) {
hardScore += hardWeight;
softScore += softWeight;
registerConstraintMatch(kcontext,
() -> {
hardScore -= hardWeight;
softScore -= softWeight;
},
() -> HardSoftScore.of(hardWeight, softWeight));
}
@Override
public HardSoftScore extractScore(int initScore) {
return HardSoftScore.ofUninitialized(initScore, hardScore, softScore);
}
}
|
super.configureConstraintWeight(rule, constraintWeight);
IntMatchExecutor matchExecutor;
if (constraintWeight.isZero()) {
matchExecutor = (RuleContext kcontext, int matchWeight) -> {
};
} else if (constraintWeight.softScore() == 0) {
matchExecutor =
(RuleContext kcontext, int matchWeight) -> addHardConstraintMatch(kcontext,
constraintWeight.hardScore() * matchWeight);
} else if (constraintWeight.hardScore() == 0) {
matchExecutor =
(RuleContext kcontext, int matchWeight) -> addSoftConstraintMatch(kcontext,
constraintWeight.softScore() * matchWeight);
} else {
matchExecutor =
(RuleContext kcontext, int matchWeight) -> addMultiConstraintMatch(kcontext,
constraintWeight.hardScore() * matchWeight, constraintWeight.softScore() * matchWeight);
}
matchExecutorByNumberMap.put(rule, matchExecutor);
matchExecutorByScoreMap.put(rule,
(RuleContext kcontext, HardSoftScore weightMultiplier) -> addMultiConstraintMatch(
kcontext, constraintWeight.hardScore() * weightMultiplier.hardScore(),
constraintWeight.softScore() * weightMultiplier.softScore()));
| 1,404
| 310
| 1,714
|
<methods>public static ScoreHolder_ buildScoreHolder(ScoreDefinition<Score_>, boolean) ,public void configureConstraintWeight(Rule, org.optaplanner.core.api.score.buildin.hardsoft.HardSoftScore) ,public abstract org.optaplanner.core.api.score.buildin.hardsoft.HardSoftScore extractScore(int) ,public Map<java.lang.String,ConstraintMatchTotal<org.optaplanner.core.api.score.buildin.hardsoft.HardSoftScore>> getConstraintMatchTotalMap() ,public Map<java.lang.Object,Indictment<org.optaplanner.core.api.score.buildin.hardsoft.HardSoftScore>> getIndictmentMap() ,public void impactScore(RuleContext) ,public abstract void impactScore(RuleContext, int) ,public abstract void impactScore(RuleContext, long) ,public abstract void impactScore(RuleContext, java.math.BigDecimal) ,public boolean isConstraintMatchEnabled() <variables>private static final java.lang.String CUSTOM_SCORE_HOLDER_CLASS_PROPERTY_NAME,protected final non-sealed boolean constraintMatchEnabled,protected final non-sealed Map<java.lang.String,ConstraintMatchTotal<org.optaplanner.core.api.score.buildin.hardsoft.HardSoftScore>> constraintMatchTotalMap,protected final non-sealed Map<java.lang.Object,Indictment<org.optaplanner.core.api.score.buildin.hardsoft.HardSoftScore>> indictmentMap
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-drl/src/main/java/org/optaplanner/constraint/drl/holder/SimpleBigDecimalScoreHolderImpl.java
|
SimpleBigDecimalScoreHolderImpl
|
impactScore
|
class SimpleBigDecimalScoreHolderImpl extends AbstractScoreHolder<SimpleBigDecimalScore>
implements SimpleBigDecimalScoreHolder {
protected final Map<Rule, BigDecimalMatchExecutor> matchExecutorByNumberMap = new LinkedHashMap<>();
protected BigDecimal score = BigDecimal.ZERO;
public SimpleBigDecimalScoreHolderImpl(boolean constraintMatchEnabled) {
super(constraintMatchEnabled);
}
public BigDecimal getScore() {
return score;
}
// ************************************************************************
// Setup methods
// ************************************************************************
@Override
public void configureConstraintWeight(Rule rule, SimpleBigDecimalScore constraintWeight) {
super.configureConstraintWeight(rule, constraintWeight);
BigDecimalMatchExecutor matchExecutor;
if (constraintWeight.isZero()) {
matchExecutor = (RuleContext kcontext, BigDecimal matchWeight) -> {
};
} else {
matchExecutor =
(RuleContext kcontext, BigDecimal matchWeight) -> addConstraintMatch(kcontext,
constraintWeight.score().multiply(matchWeight));
}
matchExecutorByNumberMap.put(rule, matchExecutor);
}
// ************************************************************************
// Penalize and reward methods
// ************************************************************************
@Override
public void penalize(RuleContext kcontext) {
impactScore(kcontext, BigDecimal.ONE.negate());
}
@Override
public void penalize(RuleContext kcontext, BigDecimal weightMultiplier) {
impactScore(kcontext, weightMultiplier.negate());
}
@Override
public void reward(RuleContext kcontext) {
impactScore(kcontext, BigDecimal.ONE);
}
@Override
public void reward(RuleContext kcontext, BigDecimal weightMultiplier) {
impactScore(kcontext, weightMultiplier);
}
@Override
public void impactScore(RuleContext kcontext) {
impactScore(kcontext, BigDecimal.ONE);
}
@Override
public void impactScore(RuleContext kcontext, int weightMultiplier) {
impactScore(kcontext, BigDecimal.valueOf(weightMultiplier));
}
@Override
public void impactScore(RuleContext kcontext, long weightMultiplier) {
impactScore(kcontext, BigDecimal.valueOf(weightMultiplier));
}
@Override
public void impactScore(RuleContext kcontext, BigDecimal weightMultiplier) {<FILL_FUNCTION_BODY>}
// ************************************************************************
// Other match methods
// ************************************************************************
@Override
public void addConstraintMatch(RuleContext kcontext, BigDecimal weight) {
score = score.add(weight);
registerConstraintMatch(kcontext, () -> score = score.subtract(weight), () -> SimpleBigDecimalScore.of(weight));
}
@Override
public SimpleBigDecimalScore extractScore(int initScore) {
return SimpleBigDecimalScore.ofUninitialized(initScore, score);
}
}
|
Rule rule = kcontext.getRule();
BigDecimalMatchExecutor matchExecutor = matchExecutorByNumberMap.get(rule);
if (matchExecutor == null) {
throw new IllegalStateException("The DRL rule (" + rule.getPackageName() + ":" + rule.getName()
+ ") does not match a @" + ConstraintWeight.class.getSimpleName() + " on the @"
+ ConstraintConfiguration.class.getSimpleName() + " annotated class.");
}
matchExecutor.accept(kcontext, weightMultiplier);
| 781
| 134
| 915
|
<methods>public static ScoreHolder_ buildScoreHolder(ScoreDefinition<Score_>, boolean) ,public void configureConstraintWeight(Rule, org.optaplanner.core.api.score.buildin.simplebigdecimal.SimpleBigDecimalScore) ,public abstract org.optaplanner.core.api.score.buildin.simplebigdecimal.SimpleBigDecimalScore extractScore(int) ,public Map<java.lang.String,ConstraintMatchTotal<org.optaplanner.core.api.score.buildin.simplebigdecimal.SimpleBigDecimalScore>> getConstraintMatchTotalMap() ,public Map<java.lang.Object,Indictment<org.optaplanner.core.api.score.buildin.simplebigdecimal.SimpleBigDecimalScore>> getIndictmentMap() ,public void impactScore(RuleContext) ,public abstract void impactScore(RuleContext, int) ,public abstract void impactScore(RuleContext, long) ,public abstract void impactScore(RuleContext, java.math.BigDecimal) ,public boolean isConstraintMatchEnabled() <variables>private static final java.lang.String CUSTOM_SCORE_HOLDER_CLASS_PROPERTY_NAME,protected final non-sealed boolean constraintMatchEnabled,protected final non-sealed Map<java.lang.String,ConstraintMatchTotal<org.optaplanner.core.api.score.buildin.simplebigdecimal.SimpleBigDecimalScore>> constraintMatchTotalMap,protected final non-sealed Map<java.lang.Object,Indictment<org.optaplanner.core.api.score.buildin.simplebigdecimal.SimpleBigDecimalScore>> indictmentMap
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-drl/src/main/java/org/optaplanner/constraint/drl/holder/SimpleLongScoreHolderImpl.java
|
SimpleLongScoreHolderImpl
|
impactScore
|
class SimpleLongScoreHolderImpl extends AbstractScoreHolder<SimpleLongScore>
implements SimpleLongScoreHolder {
protected final Map<Rule, LongMatchExecutor> matchExecutorByNumberMap = new LinkedHashMap<>();
protected long score;
public SimpleLongScoreHolderImpl(boolean constraintMatchEnabled) {
super(constraintMatchEnabled);
}
public long getScore() {
return score;
}
// ************************************************************************
// Setup methods
// ************************************************************************
@Override
public void configureConstraintWeight(Rule rule, SimpleLongScore constraintWeight) {
super.configureConstraintWeight(rule, constraintWeight);
LongMatchExecutor matchExecutor;
if (constraintWeight.isZero()) {
matchExecutor = (RuleContext kcontext, long matchWeight) -> {
};
} else {
matchExecutor = (RuleContext kcontext, long matchWeight) -> addConstraintMatch(kcontext,
constraintWeight.score() * matchWeight);
}
matchExecutorByNumberMap.put(rule, matchExecutor);
}
// ************************************************************************
// Penalize and reward methods
// ************************************************************************
@Override
public void penalize(RuleContext kcontext) {
impactScore(kcontext, -1L);
}
@Override
public void penalize(RuleContext kcontext, long weightMultiplier) {
impactScore(kcontext, -weightMultiplier);
}
@Override
public void reward(RuleContext kcontext) {
impactScore(kcontext, 1L);
}
@Override
public void reward(RuleContext kcontext, long weightMultiplier) {
impactScore(kcontext, weightMultiplier);
}
@Override
public void impactScore(RuleContext kcontext) {
impactScore(kcontext, 1L);
}
@Override
public void impactScore(RuleContext kcontext, int weightMultiplier) {
impactScore(kcontext, (long) weightMultiplier);
}
@Override
public void impactScore(RuleContext kcontext, long weightMultiplier) {
Rule rule = kcontext.getRule();
LongMatchExecutor matchExecutor = matchExecutorByNumberMap.get(rule);
if (matchExecutor == null) {
throw new IllegalStateException("The DRL rule (" + rule.getPackageName() + ":" + rule.getName()
+ ") does not match a @" + ConstraintWeight.class.getSimpleName() + " on the @"
+ ConstraintConfiguration.class.getSimpleName() + " annotated class.");
}
matchExecutor.accept(kcontext, weightMultiplier);
}
@Override
public void impactScore(RuleContext kcontext, BigDecimal weightMultiplier) {<FILL_FUNCTION_BODY>}
// ************************************************************************
// Other match methods
// ************************************************************************
@Override
public void addConstraintMatch(RuleContext kcontext, long weight) {
score += weight;
registerConstraintMatch(kcontext,
() -> score -= weight,
() -> SimpleLongScore.of(weight));
}
@Override
public SimpleLongScore extractScore(int initScore) {
return SimpleLongScore.ofUninitialized(initScore, score);
}
}
|
throw new UnsupportedOperationException("In the rule (" + kcontext.getRule().getName()
+ "), the scoreHolder class (" + getClass()
+ ") does not support a BigDecimal weightMultiplier (" + weightMultiplier + ").\n"
+ "If you're using constraint streams, maybe switch from penalizeBigDecimal() to penalizeLong().");
| 823
| 93
| 916
|
<methods>public static ScoreHolder_ buildScoreHolder(ScoreDefinition<Score_>, boolean) ,public void configureConstraintWeight(Rule, org.optaplanner.core.api.score.buildin.simplelong.SimpleLongScore) ,public abstract org.optaplanner.core.api.score.buildin.simplelong.SimpleLongScore extractScore(int) ,public Map<java.lang.String,ConstraintMatchTotal<org.optaplanner.core.api.score.buildin.simplelong.SimpleLongScore>> getConstraintMatchTotalMap() ,public Map<java.lang.Object,Indictment<org.optaplanner.core.api.score.buildin.simplelong.SimpleLongScore>> getIndictmentMap() ,public void impactScore(RuleContext) ,public abstract void impactScore(RuleContext, int) ,public abstract void impactScore(RuleContext, long) ,public abstract void impactScore(RuleContext, java.math.BigDecimal) ,public boolean isConstraintMatchEnabled() <variables>private static final java.lang.String CUSTOM_SCORE_HOLDER_CLASS_PROPERTY_NAME,protected final non-sealed boolean constraintMatchEnabled,protected final non-sealed Map<java.lang.String,ConstraintMatchTotal<org.optaplanner.core.api.score.buildin.simplelong.SimpleLongScore>> constraintMatchTotalMap,protected final non-sealed Map<java.lang.Object,Indictment<org.optaplanner.core.api.score.buildin.simplelong.SimpleLongScore>> indictmentMap
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-drl/src/main/java/org/optaplanner/constraint/drl/holder/SimpleScoreHolderImpl.java
|
SimpleScoreHolderImpl
|
impactScore
|
class SimpleScoreHolderImpl extends AbstractScoreHolder<SimpleScore> implements SimpleScoreHolder {
protected final Map<Rule, IntMatchExecutor> matchExecutorByNumberMap = new LinkedHashMap<>();
protected int score;
public SimpleScoreHolderImpl(boolean constraintMatchEnabled) {
super(constraintMatchEnabled);
}
public int getScore() {
return score;
}
// ************************************************************************
// Setup methods
// ************************************************************************
@Override
public void configureConstraintWeight(Rule rule, SimpleScore constraintWeight) {
super.configureConstraintWeight(rule, constraintWeight);
IntMatchExecutor matchExecutor;
if (constraintWeight.isZero()) {
matchExecutor = (RuleContext kcontext, int matchWeight) -> {
};
} else {
matchExecutor = (RuleContext kcontext, int matchWeight) -> addConstraintMatch(kcontext,
constraintWeight.score() * matchWeight);
}
matchExecutorByNumberMap.put(rule, matchExecutor);
}
// ************************************************************************
// Penalize and reward methods
// ************************************************************************
@Override
public void penalize(RuleContext kcontext) {
impactScore(kcontext, -1);
}
@Override
public void penalize(RuleContext kcontext, int weightMultiplier) {
impactScore(kcontext, -weightMultiplier);
}
@Override
public void reward(RuleContext kcontext) {
impactScore(kcontext, 1);
}
@Override
public void reward(RuleContext kcontext, int weightMultiplier) {
impactScore(kcontext, weightMultiplier);
}
@Override
public void impactScore(RuleContext kcontext) {
impactScore(kcontext, 1);
}
@Override
public void impactScore(RuleContext kcontext, int weightMultiplier) {<FILL_FUNCTION_BODY>}
@Override
public void impactScore(RuleContext kcontext, long weightMultiplier) {
throw new UnsupportedOperationException("In the rule (" + kcontext.getRule().getName()
+ "), the scoreHolder class (" + getClass()
+ ") does not support a long weightMultiplier (" + weightMultiplier + ").\n"
+ "If you're using constraint streams, maybe switch from penalizeLong() to penalize().");
}
@Override
public void impactScore(RuleContext kcontext, BigDecimal weightMultiplier) {
throw new UnsupportedOperationException("In the rule (" + kcontext.getRule().getName()
+ "), the scoreHolder class (" + getClass()
+ ") does not support a BigDecimal weightMultiplier (" + weightMultiplier + ").\n"
+ "If you're using constraint streams, maybe switch from penalizeBigDecimal() to penalize().");
}
// ************************************************************************
// Other match methods
// ************************************************************************
@Override
public void addConstraintMatch(RuleContext kcontext, int weight) {
score += weight;
registerConstraintMatch(kcontext, () -> score -= weight, () -> SimpleScore.of(weight));
}
@Override
public SimpleScore extractScore(int initScore) {
return SimpleScore.ofUninitialized(initScore, score);
}
}
|
Rule rule = kcontext.getRule();
IntMatchExecutor matchExecutor = matchExecutorByNumberMap.get(rule);
if (matchExecutor == null) {
throw new IllegalStateException("The DRL rule (" + rule.getPackageName() + ":" + rule.getName()
+ ") does not match a @" + ConstraintWeight.class.getSimpleName() + " on the @"
+ ConstraintConfiguration.class.getSimpleName() + " annotated class.");
}
matchExecutor.accept(kcontext, weightMultiplier);
| 835
| 132
| 967
|
<methods>public static ScoreHolder_ buildScoreHolder(ScoreDefinition<Score_>, boolean) ,public void configureConstraintWeight(Rule, org.optaplanner.core.api.score.buildin.simple.SimpleScore) ,public abstract org.optaplanner.core.api.score.buildin.simple.SimpleScore extractScore(int) ,public Map<java.lang.String,ConstraintMatchTotal<org.optaplanner.core.api.score.buildin.simple.SimpleScore>> getConstraintMatchTotalMap() ,public Map<java.lang.Object,Indictment<org.optaplanner.core.api.score.buildin.simple.SimpleScore>> getIndictmentMap() ,public void impactScore(RuleContext) ,public abstract void impactScore(RuleContext, int) ,public abstract void impactScore(RuleContext, long) ,public abstract void impactScore(RuleContext, java.math.BigDecimal) ,public boolean isConstraintMatchEnabled() <variables>private static final java.lang.String CUSTOM_SCORE_HOLDER_CLASS_PROPERTY_NAME,protected final non-sealed boolean constraintMatchEnabled,protected final non-sealed Map<java.lang.String,ConstraintMatchTotal<org.optaplanner.core.api.score.buildin.simple.SimpleScore>> constraintMatchTotalMap,protected final non-sealed Map<java.lang.Object,Indictment<org.optaplanner.core.api.score.buildin.simple.SimpleScore>> indictmentMap
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/BavetConstraintFactory.java
|
BavetConstraintFactory
|
share
|
class BavetConstraintFactory<Solution_>
extends InnerConstraintFactory<Solution_, BavetConstraint<Solution_>> {
private final SolutionDescriptor<Solution_> solutionDescriptor;
private final EnvironmentMode environmentMode;
private final String defaultConstraintPackage;
private final Map<BavetAbstractConstraintStream<Solution_>, BavetAbstractConstraintStream<Solution_>> sharingStreamMap =
new HashMap<>(256);
public BavetConstraintFactory(SolutionDescriptor<Solution_> solutionDescriptor, EnvironmentMode environmentMode) {
this.solutionDescriptor = solutionDescriptor;
this.environmentMode = Objects.requireNonNull(environmentMode);
ConstraintConfigurationDescriptor<Solution_> configurationDescriptor = solutionDescriptor
.getConstraintConfigurationDescriptor();
if (configurationDescriptor == null) {
Package pack = solutionDescriptor.getSolutionClass().getPackage();
defaultConstraintPackage = (pack == null) ? "" : pack.getName();
} else {
defaultConstraintPackage = configurationDescriptor.getConstraintPackage();
}
}
public <Stream_ extends BavetAbstractConstraintStream<Solution_>> Stream_ share(Stream_ stream) {
return share(stream, t -> {
});
}
/**
* Enables node sharing.
* If a constraint already exists in this factory, it replaces it by the old copy.
* {@link BavetAbstractConstraintStream} implement equals/hashcode ignoring child streams.
* <p>
* {@link BavetConstraintSessionFactory#buildSession(boolean, Object)} relies on this occurring for all streams.
* <p>
* This must be called before the stream receives child streams.
*
* @param stream never null
* @param consumer never null
* @param <Stream_> the {@link BavetAbstractConstraintStream} subclass
* @return never null
*/
public <Stream_ extends BavetAbstractConstraintStream<Solution_>> Stream_ share(Stream_ stream,
Consumer<Stream_> consumer) {<FILL_FUNCTION_BODY>}
// ************************************************************************
// from
// ************************************************************************
@Override
public <A> UniConstraintStream<A> forEachIncludingNullVars(Class<A> sourceClass) {
assertValidFromType(sourceClass);
return share(new BavetForEachUniConstraintStream<>(this, sourceClass, RetrievalSemantics.STANDARD));
}
@Override
public <A> BavetAbstractUniConstraintStream<Solution_, A> fromUnfiltered(Class<A> fromClass) {
assertValidFromType(fromClass);
return share(new BavetForEachUniConstraintStream<>(this, fromClass, RetrievalSemantics.LEGACY));
}
// ************************************************************************
// Getters/setters
// ************************************************************************
@Override
public SolutionDescriptor<Solution_> getSolutionDescriptor() {
return solutionDescriptor;
}
public EnvironmentMode getEnvironmentMode() {
return environmentMode;
}
@Override
public String getDefaultConstraintPackage() {
return defaultConstraintPackage;
}
}
|
return (Stream_) sharingStreamMap.computeIfAbsent(stream, k -> {
consumer.accept(stream);
return stream;
});
| 795
| 40
| 835
|
<methods>public non-sealed void <init>() ,public void assertValidFromType(Class<A>) ,public List<BavetConstraint<Solution_>> buildConstraints(org.optaplanner.core.api.score.stream.ConstraintProvider) ,public UniConstraintStream<A> forEach(Class<A>) ,public transient BiConstraintStream<A,A> forEachUniquePair(Class<A>, BiJoiner<A,A>[]) ,public UniConstraintStream<A> from(Class<A>) ,public transient BiConstraintStream<A,A> fromUniquePair(Class<A>, BiJoiner<A,A>[]) ,public Predicate<A> getNullityFilter(Class<A>) ,public abstract SolutionDescriptor<Solution_> getSolutionDescriptor() <variables>
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/BavetConstraintSession.java
|
BavetConstraintSession
|
calculateScore
|
class BavetConstraintSession<Score_ extends Score<Score_>> {
private final AbstractScoreInliner<Score_> scoreInliner;
private final Map<Class<?>, ForEachUniNode<Object>> declaredClassToNodeMap;
private final AbstractNode[] nodes; // Indexed by nodeIndex
private final Map<Class<?>, ForEachUniNode<Object>[]> effectiveClassToNodeArrayMap;
public BavetConstraintSession(AbstractScoreInliner<Score_> scoreInliner,
Map<Class<?>, ForEachUniNode<Object>> declaredClassToNodeMap,
AbstractNode[] nodes) {
this.scoreInliner = scoreInliner;
this.declaredClassToNodeMap = declaredClassToNodeMap;
this.nodes = nodes;
this.effectiveClassToNodeArrayMap = new IdentityHashMap<>(declaredClassToNodeMap.size());
}
public void insert(Object fact) {
Class<?> factClass = fact.getClass();
for (ForEachUniNode<Object> node : findNodes(factClass)) {
node.insert(fact);
}
}
private ForEachUniNode<Object>[] findNodes(Class<?> factClass) {
// Map.computeIfAbsent() would have created lambdas on the hot path, this will not.
ForEachUniNode<Object>[] nodeArray = effectiveClassToNodeArrayMap.get(factClass);
if (nodeArray == null) {
nodeArray = declaredClassToNodeMap.entrySet()
.stream()
.filter(entry -> entry.getKey().isAssignableFrom(factClass))
.map(Map.Entry::getValue)
.toArray(ForEachUniNode[]::new);
effectiveClassToNodeArrayMap.put(factClass, nodeArray);
}
return nodeArray;
}
public void update(Object fact) {
Class<?> factClass = fact.getClass();
for (ForEachUniNode<Object> node : findNodes(factClass)) {
node.update(fact);
}
}
public void retract(Object fact) {
Class<?> factClass = fact.getClass();
for (ForEachUniNode<Object> node : findNodes(factClass)) {
node.retract(fact);
}
}
public Score_ calculateScore(int initScore) {<FILL_FUNCTION_BODY>}
public AbstractScoreInliner<Score_> getScoreInliner() {
return scoreInliner;
}
public Map<String, ConstraintMatchTotal<Score_>> getConstraintMatchTotalMap() {
return scoreInliner.getConstraintMatchTotalMap();
}
public Map<Object, Indictment<Score_>> getIndictmentMap() {
return scoreInliner.getIndictmentMap();
}
}
|
for (AbstractNode node : nodes) {
node.calculateScore();
}
return scoreInliner.extractScore(initScore);
| 732
| 40
| 772
|
<no_super_class>
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/BavetConstraintSessionFactory.java
|
BavetConstraintSessionFactory
|
buildSession
|
class BavetConstraintSessionFactory<Solution_, Score_ extends Score<Score_>> {
private final SolutionDescriptor<Solution_> solutionDescriptor;
private final List<BavetConstraint<Solution_>> constraintList;
public BavetConstraintSessionFactory(SolutionDescriptor<Solution_> solutionDescriptor,
List<BavetConstraint<Solution_>> constraintList) {
this.solutionDescriptor = solutionDescriptor;
this.constraintList = constraintList;
}
// ************************************************************************
// Node creation
// ************************************************************************
public BavetConstraintSession<Score_> buildSession(boolean constraintMatchEnabled,
Solution_ workingSolution) {<FILL_FUNCTION_BODY>}
}
|
ScoreDefinition<Score_> scoreDefinition = solutionDescriptor.getScoreDefinition();
AbstractScoreInliner<Score_> scoreInliner = AbstractScoreInliner.buildScoreInliner(scoreDefinition,
constraintMatchEnabled);
Score_ zeroScore = scoreDefinition.getZeroScore();
Set<BavetAbstractConstraintStream<Solution_>> constraintStreamSet = new LinkedHashSet<>();
Map<Constraint, Score_> constraintWeightMap = new HashMap<>(constraintList.size());
for (BavetConstraint<Solution_> constraint : constraintList) {
Score_ constraintWeight = constraint.extractConstraintWeight(workingSolution);
// Filter out nodes that only lead to constraints with zero weight.
// Note: Node sharing happens earlier, in BavetConstraintFactory#share(Stream_).
if (!constraintWeight.equals(zeroScore)) {
// Relies on BavetConstraintFactory#share(Stream_) occurring for all constraint stream instances
// to ensure there are no 2 equal ConstraintStream instances (with different child stream lists).
constraint.collectActiveConstraintStreams(constraintStreamSet);
constraintWeightMap.put(constraint, constraintWeight);
}
}
NodeBuildHelper<Score_> buildHelper = new NodeBuildHelper<>(constraintStreamSet, constraintWeightMap, scoreInliner);
// Build constraintStreamSet in reverse order to create downstream nodes first
// so every node only has final variables (some of which have downstream node method references).
List<BavetAbstractConstraintStream<Solution_>> reversedConstraintStreamList = new ArrayList<>(constraintStreamSet);
Collections.reverse(reversedConstraintStreamList);
for (BavetAbstractConstraintStream<Solution_> constraintStream : reversedConstraintStreamList) {
constraintStream.buildNode(buildHelper);
}
List<AbstractNode> nodeList = buildHelper.destroyAndGetNodeList();
Map<Class<?>, ForEachUniNode<Object>> declaredClassToNodeMap = new LinkedHashMap<>();
long nextNodeId = 0;
for (AbstractNode node : nodeList) {
node.setId(nextNodeId++);
if (node instanceof ForEachUniNode) {
ForEachUniNode<Object> forEachUniNode = (ForEachUniNode<Object>) node;
ForEachUniNode<Object> old = declaredClassToNodeMap.put(forEachUniNode.getForEachClass(), forEachUniNode);
if (old != null) {
throw new IllegalStateException("Impossible state: For class (" + forEachUniNode.getForEachClass()
+ ") there are 2 nodes (" + forEachUniNode + ", " + old + ").");
}
}
}
return new BavetConstraintSession<>(scoreInliner, declaredClassToNodeMap, nodeList.toArray(new AbstractNode[0]));
| 186
| 704
| 890
|
<no_super_class>
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/BavetConstraintStreamScoreDirector.java
|
BavetConstraintStreamScoreDirector
|
getIndictmentMap
|
class BavetConstraintStreamScoreDirector<Solution_, Score_ extends Score<Score_>>
extends AbstractScoreDirector<Solution_, Score_, BavetConstraintStreamScoreDirectorFactory<Solution_, Score_>> {
protected BavetConstraintSession<Score_> session;
public BavetConstraintStreamScoreDirector(
BavetConstraintStreamScoreDirectorFactory<Solution_, Score_> scoreDirectorFactory,
boolean lookUpEnabled, boolean constraintMatchEnabledPreference) {
super(scoreDirectorFactory, lookUpEnabled, constraintMatchEnabledPreference);
}
// ************************************************************************
// Complex methods
// ************************************************************************
@Override
public void setWorkingSolution(Solution_ workingSolution) {
super.setWorkingSolution(workingSolution);
resetConstraintStreamingSession();
}
private void resetConstraintStreamingSession() {
session = scoreDirectorFactory.newSession(constraintMatchEnabledPreference, workingSolution);
getSolutionDescriptor().visitAll(workingSolution, session::insert);
}
@Override
public Score_ calculateScore() {
variableListenerSupport.assertNotificationQueuesAreEmpty();
Score_ score = session.calculateScore(workingInitScore);
setCalculatedScore(score);
return score;
}
@Override
public boolean isConstraintMatchEnabled() {
return constraintMatchEnabledPreference;
}
@Override
public Map<String, ConstraintMatchTotal<Score_>> getConstraintMatchTotalMap() {
if (workingSolution == null) {
throw new IllegalStateException(
"The method setWorkingSolution() must be called before the method getConstraintMatchTotalMap().");
}
return session.getConstraintMatchTotalMap();
}
@Override
public Map<Object, Indictment<Score_>> getIndictmentMap() {<FILL_FUNCTION_BODY>}
@Override
public boolean requiresFlushing() {
return true; // Tuple refresh happens during score calculation.
}
@Override
public void close() {
super.close();
session = null;
}
// ************************************************************************
// Entity/variable add/change/remove methods
// ************************************************************************
// public void beforeEntityAdded(EntityDescriptor entityDescriptor, Object entity) // Do nothing
@Override
public void afterEntityAdded(EntityDescriptor<Solution_> entityDescriptor, Object entity) {
if (entity == null) {
throw new IllegalArgumentException("The entity (" + entity + ") cannot be added to the ScoreDirector.");
}
if (!getSolutionDescriptor().hasEntityDescriptor(entity.getClass())) {
throw new IllegalArgumentException("The entity (" + entity + ") of class (" + entity.getClass()
+ ") is not a configured @" + PlanningEntity.class.getSimpleName() + ".");
}
session.insert(entity);
super.afterEntityAdded(entityDescriptor, entity);
}
// public void beforeVariableChanged(VariableDescriptor variableDescriptor, Object entity) // Do nothing
@Override
public void afterVariableChanged(VariableDescriptor<Solution_> variableDescriptor, Object entity) {
session.update(entity);
super.afterVariableChanged(variableDescriptor, entity);
}
@Override
public void afterListVariableChanged(ListVariableDescriptor<Solution_> variableDescriptor, Object entity, int fromIndex,
int toIndex) {
session.update(entity);
super.afterListVariableChanged(variableDescriptor, entity, fromIndex, toIndex);
}
// public void beforeEntityRemoved(EntityDescriptor entityDescriptor, Object entity) // Do nothing
@Override
public void afterEntityRemoved(EntityDescriptor<Solution_> entityDescriptor, Object entity) {
session.retract(entity);
super.afterEntityRemoved(entityDescriptor, entity);
}
// ************************************************************************
// Problem fact add/change/remove methods
// ************************************************************************
// public void beforeProblemFactAdded(Object problemFact) // Do nothing
@Override
public void afterProblemFactAdded(Object problemFact) {
if (problemFact == null) {
throw new IllegalArgumentException("The problemFact (" + problemFact + ") cannot be added to the ScoreDirector.");
}
session.insert(problemFact);
super.afterProblemFactAdded(problemFact);
}
// public void beforeProblemPropertyChanged(Object problemFactOrEntity) // Do nothing
@Override
public void afterProblemPropertyChanged(Object problemFactOrEntity) {
session.update(problemFactOrEntity);
super.afterProblemPropertyChanged(problemFactOrEntity);
}
// public void beforeProblemFactRemoved(Object problemFact) // Do nothing
@Override
public void afterProblemFactRemoved(Object problemFact) {
session.retract(problemFact);
super.afterProblemFactRemoved(problemFact);
}
// ************************************************************************
// Getters/setters
// ************************************************************************
public BavetConstraintSession<Score_> getSession() {
return session;
}
}
|
if (workingSolution == null) {
throw new IllegalStateException(
"The method setWorkingSolution() must be called before the method getIndictmentMap().");
}
return session.getIndictmentMap();
| 1,282
| 60
| 1,342
|
<methods>public final void afterEntityAdded(java.lang.Object) ,public void afterEntityAdded(EntityDescriptor<Solution_>, java.lang.Object) ,public final void afterEntityRemoved(java.lang.Object) ,public void afterEntityRemoved(EntityDescriptor<Solution_>, java.lang.Object) ,public void afterListVariableChanged(java.lang.Object, java.lang.String, int, int) ,public void afterListVariableChanged(ListVariableDescriptor<Solution_>, java.lang.Object, int, int) ,public void afterListVariableElementAssigned(java.lang.Object, java.lang.String, java.lang.Object) ,public void afterListVariableElementAssigned(ListVariableDescriptor<Solution_>, java.lang.Object) ,public void afterListVariableElementUnassigned(java.lang.Object, java.lang.String, java.lang.Object) ,public void afterListVariableElementUnassigned(ListVariableDescriptor<Solution_>, java.lang.Object) ,public void afterProblemFactAdded(java.lang.Object) ,public void afterProblemFactRemoved(java.lang.Object) ,public void afterProblemPropertyChanged(java.lang.Object) ,public final void afterVariableChanged(java.lang.Object, java.lang.String) ,public void afterVariableChanged(VariableDescriptor<Solution_>, java.lang.Object) ,public void assertExpectedUndoMoveScore(Move<Solution_>, Score_) ,public void assertExpectedWorkingScore(Score_, java.lang.Object) ,public void assertNonNullPlanningIds() ,public void assertPredictedScoreFromScratch(Score_, java.lang.Object) ,public void assertShadowVariablesAreNotStale(Score_, java.lang.Object) ,public void assertWorkingScoreFromScratch(Score_, java.lang.Object) ,public final void beforeEntityAdded(java.lang.Object) ,public void beforeEntityAdded(EntityDescriptor<Solution_>, java.lang.Object) ,public final void beforeEntityRemoved(java.lang.Object) ,public void beforeEntityRemoved(EntityDescriptor<Solution_>, java.lang.Object) ,public void beforeListVariableChanged(java.lang.Object, java.lang.String, int, int) ,public void beforeListVariableChanged(ListVariableDescriptor<Solution_>, java.lang.Object, int, int) ,public void beforeListVariableElementAssigned(java.lang.Object, java.lang.String, java.lang.Object) ,public void beforeListVariableElementAssigned(ListVariableDescriptor<Solution_>, java.lang.Object) ,public void beforeListVariableElementUnassigned(java.lang.Object, java.lang.String, java.lang.Object) ,public void beforeListVariableElementUnassigned(ListVariableDescriptor<Solution_>, java.lang.Object) ,public void beforeProblemFactAdded(java.lang.Object) ,public void beforeProblemFactRemoved(java.lang.Object) ,public void beforeProblemPropertyChanged(java.lang.Object) ,public final void beforeVariableChanged(java.lang.Object, java.lang.String) ,public void beforeVariableChanged(VariableDescriptor<Solution_>, java.lang.Object) ,public void changeVariableFacade(VariableDescriptor<Solution_>, java.lang.Object, java.lang.Object) ,public AbstractScoreDirector<Solution_,Score_,BavetConstraintStreamScoreDirectorFactory<Solution_,Score_>> clone() ,public Solution_ cloneSolution(Solution_) ,public Solution_ cloneWorkingSolution() ,public void close() ,public InnerScoreDirector<Solution_,Score_> createChildThreadScoreDirector(org.optaplanner.core.impl.solver.thread.ChildThreadType) ,public Score_ doAndProcessMove(Move<Solution_>, boolean) ,public void doAndProcessMove(Move<Solution_>, boolean, Consumer<Score_>) ,public void forceTriggerVariableListeners() ,public long getCalculationCount() ,public ScoreDefinition<Score_> getScoreDefinition() ,public BavetConstraintStreamScoreDirectorFactory<Solution_,Score_> getScoreDirectorFactory() ,public SolutionDescriptor<Solution_> getSolutionDescriptor() ,public org.optaplanner.core.impl.domain.variable.supply.SupplyManager getSupplyManager() ,public long getWorkingEntityListRevision() ,public Solution_ getWorkingSolution() ,public void incrementCalculationCount() ,public boolean isWorkingEntityListDirty(long) ,public E lookUpWorkingObject(E) ,public E lookUpWorkingObjectOrReturnNull(E) ,public void overwriteConstraintMatchEnabledPreference(boolean) ,public void resetCalculationCount() ,public void setAllChangesWillBeUndoneBeforeStepEnds(boolean) ,public void setWorkingSolution(Solution_) ,public java.lang.String toString() ,public void triggerVariableListeners() <variables>protected boolean allChangesWillBeUndoneBeforeStepEnds,protected long calculationCount,protected boolean constraintMatchEnabledPreference,protected final transient Logger logger,protected final non-sealed boolean lookUpEnabled,protected final non-sealed org.optaplanner.core.impl.domain.lookup.LookUpManager lookUpManager,private final Map<Class#RAW,org.optaplanner.core.impl.domain.common.accessor.MemberAccessor> planningIdAccessorCacheMap,protected final non-sealed BavetConstraintStreamScoreDirectorFactory<Solution_,Score_> scoreDirectorFactory,protected final non-sealed VariableListenerSupport<Solution_> variableListenerSupport,protected long workingEntityListRevision,protected java.lang.Integer workingInitScore,protected Solution_ workingSolution
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/BavetConstraintStreamScoreDirectorFactory.java
|
BavetConstraintStreamScoreDirectorFactory
|
fireAndForget
|
class BavetConstraintStreamScoreDirectorFactory<Solution_, Score_ extends Score<Score_>>
extends AbstractConstraintStreamScoreDirectorFactory<Solution_, Score_> {
private final BavetConstraintSessionFactory<Solution_, Score_> constraintSessionFactory;
private final List<BavetConstraint<Solution_>> constraintList;
public BavetConstraintStreamScoreDirectorFactory(SolutionDescriptor<Solution_> solutionDescriptor,
ConstraintProvider constraintProvider, EnvironmentMode environmentMode) {
super(solutionDescriptor);
BavetConstraintFactory<Solution_> constraintFactory = new BavetConstraintFactory<>(solutionDescriptor, environmentMode);
constraintList = constraintFactory.buildConstraints(constraintProvider);
constraintSessionFactory = new BavetConstraintSessionFactory<>(solutionDescriptor, constraintList);
}
@Override
public BavetConstraintStreamScoreDirector<Solution_, Score_> buildScoreDirector(boolean lookUpEnabled,
boolean constraintMatchEnabledPreference) {
return new BavetConstraintStreamScoreDirector<>(this, lookUpEnabled, constraintMatchEnabledPreference);
}
public BavetConstraintSession<Score_> newSession(boolean constraintMatchEnabled, Solution_ workingSolution) {
return constraintSessionFactory.buildSession(constraintMatchEnabled, workingSolution);
}
@Override
public AbstractScoreInliner<Score_> fireAndForget(Object... facts) {<FILL_FUNCTION_BODY>}
@Override
public SolutionDescriptor<Solution_> getSolutionDescriptor() {
return solutionDescriptor;
}
@Override
public Constraint[] getConstraints() {
return constraintList.toArray(new Constraint[0]);
}
}
|
BavetConstraintSession<Score_> session = newSession(true, null);
Arrays.stream(facts).forEach(session::insert);
session.calculateScore(0);
return session.getScoreInliner();
| 432
| 60
| 492
|
<methods>public transient abstract AbstractScoreInliner<Score_> fireAndForget(java.lang.Object[]) ,public abstract org.optaplanner.core.api.score.stream.Constraint[] getConstraints() <variables>
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/bi/BavetFilterBiConstraintStream.java
|
BavetFilterBiConstraintStream
|
buildNode
|
class BavetFilterBiConstraintStream<Solution_, A, B> extends BavetAbstractBiConstraintStream<Solution_, A, B> {
private final BavetAbstractBiConstraintStream<Solution_, A, B> parent;
private final BiPredicate<A, B> predicate;
public BavetFilterBiConstraintStream(BavetConstraintFactory<Solution_> constraintFactory,
BavetAbstractBiConstraintStream<Solution_, A, B> parent,
BiPredicate<A, B> predicate) {
super(constraintFactory, parent.getRetrievalSemantics());
this.parent = parent;
this.predicate = predicate;
if (predicate == null) {
throw new IllegalArgumentException("The predicate (null) cannot be null.");
}
}
@Override
public boolean guaranteesDistinct() {
return parent.guaranteesDistinct();
}
// ************************************************************************
// Node creation
// ************************************************************************
@Override
public void collectActiveConstraintStreams(Set<BavetAbstractConstraintStream<Solution_>> constraintStreamSet) {
parent.collectActiveConstraintStreams(constraintStreamSet);
constraintStreamSet.add(this);
}
@Override
public BavetAbstractConstraintStream<Solution_> getTupleSource() {
return parent.getTupleSource();
}
@Override
public <Score_ extends Score<Score_>> void buildNode(NodeBuildHelper<Score_> buildHelper) {<FILL_FUNCTION_BODY>}
// ************************************************************************
// Equality for node sharing
// ************************************************************************
@Override
public int hashCode() {
return Objects.hash(parent, predicate);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
} else if (o instanceof BavetFilterBiConstraintStream) {
BavetFilterBiConstraintStream<?, ?, ?> other = (BavetFilterBiConstraintStream<?, ?, ?>) o;
return parent == other.parent
&& predicate == other.predicate;
} else {
return false;
}
}
@Override
public String toString() {
return "Filter() with " + childStreamList.size() + " children";
}
// ************************************************************************
// Getters/setters
// ************************************************************************
}
|
buildHelper.<BiTuple<A, B>> putInsertUpdateRetract(this, childStreamList,
tupleLifecycle -> new ConditionalBiTupleLifecycle<>(predicate, tupleLifecycle));
| 609
| 59
| 668
|
<methods>public void <init>(BavetConstraintFactory<Solution_>, org.optaplanner.constraint.streams.common.RetrievalSemantics) ,public BavetAbstractBiConstraintStream<Solution_,A,B> filter(BiPredicate<A,B>) ,public BiConstraintStream<A,ResultB_> flattenLast(Function<B,Iterable<ResultB_>>) ,public List<BavetAbstractBiConstraintStream<Solution_,A,B>> getChildStreamList() ,public UniConstraintStream<Result_> groupBy(BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public BiConstraintStream<ResultA_,ResultB_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>) ,public TriConstraintStream<ResultA_,ResultB_,ResultC_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>) ,public QuadConstraintStream<ResultA_,ResultB_,ResultC_,ResultD_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public UniConstraintStream<GroupKey_> groupBy(BiFunction<A,B,GroupKey_>) ,public TriConstraintStream<GroupKey_,ResultB_,ResultC_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>) ,public QuadConstraintStream<GroupKey_,ResultB_,ResultC_,ResultD_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public BiConstraintStream<GroupKey_,Result_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public BiConstraintStream<GroupKeyA_,GroupKeyB_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>) ,public TriConstraintStream<GroupKeyA_,GroupKeyB_,Result_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,ResultC_,ResultD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public TriConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_,ResultD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_,GroupKeyD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>, BiFunction<A,B,GroupKeyD_>) ,public final transient BiConstraintStream<A,B> ifExists(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifExists(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifExistsIncludingNullVars(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExists(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExists(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExistsIncludingNullVars(Class<C>, TriJoiner<A,B,C>[]) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, ToIntBiFunction<A,B>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, ToLongBiFunction<A,B>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, BiFunction<A,B,java.math.BigDecimal>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public final transient TriConstraintStream<A,B,C> join(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public UniConstraintStream<ResultA_> map(BiFunction<A,B,ResultA_>) ,public Stream_ shareAndAddChild(Stream_) <variables>protected final List<BavetAbstractBiConstraintStream<Solution_,A,B>> childStreamList
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/bi/BavetFlattenLastBridgeBiConstraintStream.java
|
BavetFlattenLastBridgeBiConstraintStream
|
buildNode
|
class BavetFlattenLastBridgeBiConstraintStream<Solution_, A, B, NewB>
extends BavetAbstractBiConstraintStream<Solution_, A, B> {
private final BavetAbstractBiConstraintStream<Solution_, A, B> parent;
private final Function<B, Iterable<NewB>> mappingFunction;
private BavetFlattenLastBiConstraintStream<Solution_, A, NewB> flattenLastStream;
public BavetFlattenLastBridgeBiConstraintStream(BavetConstraintFactory<Solution_> constraintFactory,
BavetAbstractBiConstraintStream<Solution_, A, B> parent,
Function<B, Iterable<NewB>> mappingFunction) {
super(constraintFactory, parent.getRetrievalSemantics());
this.parent = parent;
this.mappingFunction = mappingFunction;
}
@Override
public boolean guaranteesDistinct() {
return false;
}
public void setFlattenLastStream(BavetFlattenLastBiConstraintStream<Solution_, A, NewB> flattenLastStream) {
this.flattenLastStream = flattenLastStream;
}
// ************************************************************************
// Node creation
// ************************************************************************
@Override
public void collectActiveConstraintStreams(Set<BavetAbstractConstraintStream<Solution_>> constraintStreamSet) {
parent.collectActiveConstraintStreams(constraintStreamSet);
constraintStreamSet.add(this);
}
@Override
public BavetAbstractConstraintStream<Solution_> getTupleSource() {
return parent.getTupleSource();
}
@Override
public <Score_ extends Score<Score_>> void buildNode(NodeBuildHelper<Score_> buildHelper) {<FILL_FUNCTION_BODY>}
// ************************************************************************
// Equality for node sharing
// ************************************************************************
// TODO
// ************************************************************************
// Getters/setters
// ************************************************************************
}
|
if (!childStreamList.isEmpty()) {
throw new IllegalStateException("Impossible state: the stream (" + this
+ ") has an non-empty childStreamList (" + childStreamList + ") but it's a flattenLast bridge.");
}
int inputStoreIndex = buildHelper.reserveTupleStoreIndex(parent.getTupleSource());
int outputStoreSize = buildHelper.extractTupleStoreSize(flattenLastStream);
AbstractFlattenLastNode<BiTuple<A, B>, BiTuple<A, NewB>, B, NewB> node = new FlattenLastBiNode<>(
inputStoreIndex, mappingFunction,
buildHelper.getAggregatedTupleLifecycle(flattenLastStream.getChildStreamList()),
outputStoreSize);
buildHelper.addNode(node, this);
| 505
| 204
| 709
|
<methods>public void <init>(BavetConstraintFactory<Solution_>, org.optaplanner.constraint.streams.common.RetrievalSemantics) ,public BavetAbstractBiConstraintStream<Solution_,A,B> filter(BiPredicate<A,B>) ,public BiConstraintStream<A,ResultB_> flattenLast(Function<B,Iterable<ResultB_>>) ,public List<BavetAbstractBiConstraintStream<Solution_,A,B>> getChildStreamList() ,public UniConstraintStream<Result_> groupBy(BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public BiConstraintStream<ResultA_,ResultB_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>) ,public TriConstraintStream<ResultA_,ResultB_,ResultC_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>) ,public QuadConstraintStream<ResultA_,ResultB_,ResultC_,ResultD_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public UniConstraintStream<GroupKey_> groupBy(BiFunction<A,B,GroupKey_>) ,public TriConstraintStream<GroupKey_,ResultB_,ResultC_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>) ,public QuadConstraintStream<GroupKey_,ResultB_,ResultC_,ResultD_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public BiConstraintStream<GroupKey_,Result_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public BiConstraintStream<GroupKeyA_,GroupKeyB_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>) ,public TriConstraintStream<GroupKeyA_,GroupKeyB_,Result_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,ResultC_,ResultD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public TriConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_,ResultD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_,GroupKeyD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>, BiFunction<A,B,GroupKeyD_>) ,public final transient BiConstraintStream<A,B> ifExists(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifExists(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifExistsIncludingNullVars(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExists(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExists(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExistsIncludingNullVars(Class<C>, TriJoiner<A,B,C>[]) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, ToIntBiFunction<A,B>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, ToLongBiFunction<A,B>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, BiFunction<A,B,java.math.BigDecimal>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public final transient TriConstraintStream<A,B,C> join(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public UniConstraintStream<ResultA_> map(BiFunction<A,B,ResultA_>) ,public Stream_ shareAndAddChild(Stream_) <variables>protected final List<BavetAbstractBiConstraintStream<Solution_,A,B>> childStreamList
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/bi/BavetGroupBiConstraintStream.java
|
BavetGroupBiConstraintStream
|
toString
|
class BavetGroupBiConstraintStream<Solution_, A, B>
extends BavetAbstractBiConstraintStream<Solution_, A, B> {
private final BavetAbstractConstraintStream<Solution_> parent;
public BavetGroupBiConstraintStream(BavetConstraintFactory<Solution_> constraintFactory,
BavetAbstractConstraintStream<Solution_> parent) {
super(constraintFactory, parent.getRetrievalSemantics());
this.parent = parent;
}
@Override
public boolean guaranteesDistinct() {
return true;
}
// ************************************************************************
// Node creation
// ************************************************************************
@Override
public void collectActiveConstraintStreams(Set<BavetAbstractConstraintStream<Solution_>> constraintStreamSet) {
parent.collectActiveConstraintStreams(constraintStreamSet);
constraintStreamSet.add(this);
}
@Override
public <Score_ extends Score<Score_>> void buildNode(NodeBuildHelper<Score_> buildHelper) {
// Do nothing. BavetGroupBridgeUniConstraintStream, etc build everything.
}
// ************************************************************************
// Equality for node sharing
// ************************************************************************
// TODO
@Override
public String toString() {<FILL_FUNCTION_BODY>}
// ************************************************************************
// Getters/setters
// ************************************************************************
}
|
return "Group() with " + childStreamList.size() + " children";
| 360
| 22
| 382
|
<methods>public void <init>(BavetConstraintFactory<Solution_>, org.optaplanner.constraint.streams.common.RetrievalSemantics) ,public BavetAbstractBiConstraintStream<Solution_,A,B> filter(BiPredicate<A,B>) ,public BiConstraintStream<A,ResultB_> flattenLast(Function<B,Iterable<ResultB_>>) ,public List<BavetAbstractBiConstraintStream<Solution_,A,B>> getChildStreamList() ,public UniConstraintStream<Result_> groupBy(BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public BiConstraintStream<ResultA_,ResultB_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>) ,public TriConstraintStream<ResultA_,ResultB_,ResultC_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>) ,public QuadConstraintStream<ResultA_,ResultB_,ResultC_,ResultD_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public UniConstraintStream<GroupKey_> groupBy(BiFunction<A,B,GroupKey_>) ,public TriConstraintStream<GroupKey_,ResultB_,ResultC_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>) ,public QuadConstraintStream<GroupKey_,ResultB_,ResultC_,ResultD_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public BiConstraintStream<GroupKey_,Result_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public BiConstraintStream<GroupKeyA_,GroupKeyB_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>) ,public TriConstraintStream<GroupKeyA_,GroupKeyB_,Result_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,ResultC_,ResultD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public TriConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_,ResultD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_,GroupKeyD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>, BiFunction<A,B,GroupKeyD_>) ,public final transient BiConstraintStream<A,B> ifExists(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifExists(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifExistsIncludingNullVars(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExists(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExists(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExistsIncludingNullVars(Class<C>, TriJoiner<A,B,C>[]) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, ToIntBiFunction<A,B>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, ToLongBiFunction<A,B>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, BiFunction<A,B,java.math.BigDecimal>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public final transient TriConstraintStream<A,B,C> join(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public UniConstraintStream<ResultA_> map(BiFunction<A,B,ResultA_>) ,public Stream_ shareAndAddChild(Stream_) <variables>protected final List<BavetAbstractBiConstraintStream<Solution_,A,B>> childStreamList
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/bi/BavetIfExistsBiConstraintStream.java
|
BavetIfExistsBiConstraintStream
|
toString
|
class BavetIfExistsBiConstraintStream<Solution_, A, B, C>
extends BavetAbstractBiConstraintStream<Solution_, A, B> {
private final BavetAbstractBiConstraintStream<Solution_, A, B> parentAB;
private final BavetIfExistsBridgeUniConstraintStream<Solution_, C> parentBridgeC;
private final boolean shouldExist;
private final DefaultTriJoiner<A, B, C> joiner;
private final TriPredicate<A, B, C> filtering;
public BavetIfExistsBiConstraintStream(BavetConstraintFactory<Solution_> constraintFactory,
BavetAbstractBiConstraintStream<Solution_, A, B> parentAB,
BavetIfExistsBridgeUniConstraintStream<Solution_, C> parentBridgeC,
boolean shouldExist,
DefaultTriJoiner<A, B, C> joiner, TriPredicate<A, B, C> filtering) {
super(constraintFactory, parentAB.getRetrievalSemantics());
this.parentAB = parentAB;
this.parentBridgeC = parentBridgeC;
this.shouldExist = shouldExist;
this.joiner = joiner;
this.filtering = filtering;
}
@Override
public boolean guaranteesDistinct() {
return parentAB.guaranteesDistinct();
}
// ************************************************************************
// Node creation
// ************************************************************************
@Override
public void collectActiveConstraintStreams(Set<BavetAbstractConstraintStream<Solution_>> constraintStreamSet) {
parentAB.collectActiveConstraintStreams(constraintStreamSet);
parentBridgeC.collectActiveConstraintStreams(constraintStreamSet);
constraintStreamSet.add(this);
}
@Override
public BavetAbstractConstraintStream<Solution_> getTupleSource() {
return parentAB.getTupleSource();
}
@Override
public <Score_ extends Score<Score_>> void buildNode(NodeBuildHelper<Score_> buildHelper) {
TupleLifecycle<BiTuple<A, B>> downstream = buildHelper.getAggregatedTupleLifecycle(childStreamList);
IndexerFactory indexerFactory = new IndexerFactory(joiner);
AbstractIfExistsNode<BiTuple<A, B>, C> node = indexerFactory.hasJoiners()
? (filtering == null ? new IndexedIfExistsBiNode<>(shouldExist,
JoinerUtils.combineLeftMappings(joiner), JoinerUtils.combineRightMappings(joiner),
buildHelper.reserveTupleStoreIndex(parentAB.getTupleSource()),
buildHelper.reserveTupleStoreIndex(parentAB.getTupleSource()),
buildHelper.reserveTupleStoreIndex(parentBridgeC.getTupleSource()),
buildHelper.reserveTupleStoreIndex(parentBridgeC.getTupleSource()),
downstream, indexerFactory.buildIndexer(true), indexerFactory.buildIndexer(false))
: new IndexedIfExistsBiNode<>(shouldExist,
JoinerUtils.combineLeftMappings(joiner), JoinerUtils.combineRightMappings(joiner),
buildHelper.reserveTupleStoreIndex(parentAB.getTupleSource()),
buildHelper.reserveTupleStoreIndex(parentAB.getTupleSource()),
buildHelper.reserveTupleStoreIndex(parentAB.getTupleSource()),
buildHelper.reserveTupleStoreIndex(parentBridgeC.getTupleSource()),
buildHelper.reserveTupleStoreIndex(parentBridgeC.getTupleSource()),
buildHelper.reserveTupleStoreIndex(parentBridgeC.getTupleSource()),
downstream, indexerFactory.buildIndexer(true), indexerFactory.buildIndexer(false),
filtering))
: (filtering == null ? new UnindexedIfExistsBiNode<>(shouldExist,
buildHelper.reserveTupleStoreIndex(parentAB.getTupleSource()),
buildHelper.reserveTupleStoreIndex(parentBridgeC.getTupleSource()), downstream)
: new UnindexedIfExistsBiNode<>(shouldExist,
buildHelper.reserveTupleStoreIndex(parentAB.getTupleSource()),
buildHelper.reserveTupleStoreIndex(parentAB.getTupleSource()),
buildHelper.reserveTupleStoreIndex(parentBridgeC.getTupleSource()),
buildHelper.reserveTupleStoreIndex(parentBridgeC.getTupleSource()),
downstream, filtering));
buildHelper.addNode(node, this, parentBridgeC);
}
// ************************************************************************
// Equality for node sharing
// ************************************************************************
// TODO
@Override
public String toString() {<FILL_FUNCTION_BODY>}
// ************************************************************************
// Getters/setters
// ************************************************************************
}
|
return "IfExists() with " + childStreamList.size() + " children";
| 1,227
| 23
| 1,250
|
<methods>public void <init>(BavetConstraintFactory<Solution_>, org.optaplanner.constraint.streams.common.RetrievalSemantics) ,public BavetAbstractBiConstraintStream<Solution_,A,B> filter(BiPredicate<A,B>) ,public BiConstraintStream<A,ResultB_> flattenLast(Function<B,Iterable<ResultB_>>) ,public List<BavetAbstractBiConstraintStream<Solution_,A,B>> getChildStreamList() ,public UniConstraintStream<Result_> groupBy(BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public BiConstraintStream<ResultA_,ResultB_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>) ,public TriConstraintStream<ResultA_,ResultB_,ResultC_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>) ,public QuadConstraintStream<ResultA_,ResultB_,ResultC_,ResultD_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public UniConstraintStream<GroupKey_> groupBy(BiFunction<A,B,GroupKey_>) ,public TriConstraintStream<GroupKey_,ResultB_,ResultC_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>) ,public QuadConstraintStream<GroupKey_,ResultB_,ResultC_,ResultD_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public BiConstraintStream<GroupKey_,Result_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public BiConstraintStream<GroupKeyA_,GroupKeyB_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>) ,public TriConstraintStream<GroupKeyA_,GroupKeyB_,Result_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,ResultC_,ResultD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public TriConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_,ResultD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_,GroupKeyD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>, BiFunction<A,B,GroupKeyD_>) ,public final transient BiConstraintStream<A,B> ifExists(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifExists(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifExistsIncludingNullVars(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExists(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExists(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExistsIncludingNullVars(Class<C>, TriJoiner<A,B,C>[]) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, ToIntBiFunction<A,B>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, ToLongBiFunction<A,B>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, BiFunction<A,B,java.math.BigDecimal>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public final transient TriConstraintStream<A,B,C> join(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public UniConstraintStream<ResultA_> map(BiFunction<A,B,ResultA_>) ,public Stream_ shareAndAddChild(Stream_) <variables>protected final List<BavetAbstractBiConstraintStream<Solution_,A,B>> childStreamList
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/bi/BavetMapBridgeBiConstraintStream.java
|
BavetMapBridgeBiConstraintStream
|
buildNode
|
class BavetMapBridgeBiConstraintStream<Solution_, A, B, NewA>
extends BavetAbstractBiConstraintStream<Solution_, A, B> {
private final BavetAbstractBiConstraintStream<Solution_, A, B> parent;
private final BiFunction<A, B, NewA> mappingFunction;
private BavetMapUniConstraintStream<Solution_, NewA> mapStream;
public BavetMapBridgeBiConstraintStream(BavetConstraintFactory<Solution_> constraintFactory,
BavetAbstractBiConstraintStream<Solution_, A, B> parent, BiFunction<A, B, NewA> mappingFunction) {
super(constraintFactory, parent.getRetrievalSemantics());
this.parent = parent;
this.mappingFunction = mappingFunction;
}
@Override
public boolean guaranteesDistinct() {
return false;
}
public void setMapStream(BavetMapUniConstraintStream<Solution_, NewA> mapStream) {
this.mapStream = mapStream;
}
// ************************************************************************
// Node creation
// ************************************************************************
@Override
public void collectActiveConstraintStreams(Set<BavetAbstractConstraintStream<Solution_>> constraintStreamSet) {
parent.collectActiveConstraintStreams(constraintStreamSet);
constraintStreamSet.add(this);
}
@Override
public BavetAbstractConstraintStream<Solution_> getTupleSource() {
return parent.getTupleSource();
}
@Override
public <Score_ extends Score<Score_>> void buildNode(NodeBuildHelper<Score_> buildHelper) {<FILL_FUNCTION_BODY>}
// ************************************************************************
// Equality for node sharing
// ************************************************************************
// TODO
// ************************************************************************
// Getters/setters
// ************************************************************************
}
|
if (!childStreamList.isEmpty()) {
throw new IllegalStateException("Impossible state: the stream (" + this
+ ") has a non-empty childStreamList (" + childStreamList + ") but it's a flattenLast bridge.");
}
int inputStoreIndex = buildHelper.reserveTupleStoreIndex(parent.getTupleSource());
int outputStoreSize = buildHelper.extractTupleStoreSize(mapStream);
AbstractMapNode<BiTuple<A, B>, NewA> node = new MapBiNode<>(inputStoreIndex, mappingFunction,
buildHelper.getAggregatedTupleLifecycle(mapStream.getChildStreamList()), outputStoreSize);
buildHelper.addNode(node, this);
| 478
| 179
| 657
|
<methods>public void <init>(BavetConstraintFactory<Solution_>, org.optaplanner.constraint.streams.common.RetrievalSemantics) ,public BavetAbstractBiConstraintStream<Solution_,A,B> filter(BiPredicate<A,B>) ,public BiConstraintStream<A,ResultB_> flattenLast(Function<B,Iterable<ResultB_>>) ,public List<BavetAbstractBiConstraintStream<Solution_,A,B>> getChildStreamList() ,public UniConstraintStream<Result_> groupBy(BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public BiConstraintStream<ResultA_,ResultB_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>) ,public TriConstraintStream<ResultA_,ResultB_,ResultC_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>) ,public QuadConstraintStream<ResultA_,ResultB_,ResultC_,ResultD_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public UniConstraintStream<GroupKey_> groupBy(BiFunction<A,B,GroupKey_>) ,public TriConstraintStream<GroupKey_,ResultB_,ResultC_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>) ,public QuadConstraintStream<GroupKey_,ResultB_,ResultC_,ResultD_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public BiConstraintStream<GroupKey_,Result_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public BiConstraintStream<GroupKeyA_,GroupKeyB_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>) ,public TriConstraintStream<GroupKeyA_,GroupKeyB_,Result_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,ResultC_,ResultD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public TriConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_,ResultD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_,GroupKeyD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>, BiFunction<A,B,GroupKeyD_>) ,public final transient BiConstraintStream<A,B> ifExists(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifExists(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifExistsIncludingNullVars(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExists(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExists(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExistsIncludingNullVars(Class<C>, TriJoiner<A,B,C>[]) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, ToIntBiFunction<A,B>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, ToLongBiFunction<A,B>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, BiFunction<A,B,java.math.BigDecimal>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public final transient TriConstraintStream<A,B,C> join(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public UniConstraintStream<ResultA_> map(BiFunction<A,B,ResultA_>) ,public Stream_ shareAndAddChild(Stream_) <variables>protected final List<BavetAbstractBiConstraintStream<Solution_,A,B>> childStreamList
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/bi/BavetQuadGroupBridgeBiConstraintStream.java
|
BavetQuadGroupBridgeBiConstraintStream
|
buildNode
|
class BavetQuadGroupBridgeBiConstraintStream<Solution_, A, B, NewA, NewB, NewC, NewD>
extends BavetAbstractBiConstraintStream<Solution_, A, B> {
private final BavetAbstractBiConstraintStream<Solution_, A, B> parent;
private BavetGroupQuadConstraintStream<Solution_, NewA, NewB, NewC, NewD> groupStream;
private final GroupNodeConstructor<QuadTuple<NewA, NewB, NewC, NewD>> nodeConstructor;
public BavetQuadGroupBridgeBiConstraintStream(BavetConstraintFactory<Solution_> constraintFactory,
BavetAbstractBiConstraintStream<Solution_, A, B> parent,
GroupNodeConstructor<QuadTuple<NewA, NewB, NewC, NewD>> nodeConstructor) {
super(constraintFactory, parent.getRetrievalSemantics());
this.parent = parent;
this.nodeConstructor = nodeConstructor;
}
@Override
public boolean guaranteesDistinct() {
return true;
}
public void setGroupStream(BavetGroupQuadConstraintStream<Solution_, NewA, NewB, NewC, NewD> groupStream) {
this.groupStream = groupStream;
}
// ************************************************************************
// Node creation
// ************************************************************************
@Override
public void collectActiveConstraintStreams(Set<BavetAbstractConstraintStream<Solution_>> constraintStreamSet) {
parent.collectActiveConstraintStreams(constraintStreamSet);
constraintStreamSet.add(this);
}
@Override
public <Score_ extends Score<Score_>> void buildNode(NodeBuildHelper<Score_> buildHelper) {<FILL_FUNCTION_BODY>}
@Override
public BavetAbstractConstraintStream<Solution_> getTupleSource() {
return parent.getTupleSource();
}
}
|
List<? extends ConstraintStream> groupStreamChildList = groupStream.getChildStreamList();
nodeConstructor.build(buildHelper, parent.getTupleSource(), groupStream, groupStreamChildList, this, childStreamList,
constraintFactory.getEnvironmentMode());
| 482
| 66
| 548
|
<methods>public void <init>(BavetConstraintFactory<Solution_>, org.optaplanner.constraint.streams.common.RetrievalSemantics) ,public BavetAbstractBiConstraintStream<Solution_,A,B> filter(BiPredicate<A,B>) ,public BiConstraintStream<A,ResultB_> flattenLast(Function<B,Iterable<ResultB_>>) ,public List<BavetAbstractBiConstraintStream<Solution_,A,B>> getChildStreamList() ,public UniConstraintStream<Result_> groupBy(BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public BiConstraintStream<ResultA_,ResultB_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>) ,public TriConstraintStream<ResultA_,ResultB_,ResultC_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>) ,public QuadConstraintStream<ResultA_,ResultB_,ResultC_,ResultD_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public UniConstraintStream<GroupKey_> groupBy(BiFunction<A,B,GroupKey_>) ,public TriConstraintStream<GroupKey_,ResultB_,ResultC_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>) ,public QuadConstraintStream<GroupKey_,ResultB_,ResultC_,ResultD_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public BiConstraintStream<GroupKey_,Result_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public BiConstraintStream<GroupKeyA_,GroupKeyB_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>) ,public TriConstraintStream<GroupKeyA_,GroupKeyB_,Result_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,ResultC_,ResultD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public TriConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_,ResultD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_,GroupKeyD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>, BiFunction<A,B,GroupKeyD_>) ,public final transient BiConstraintStream<A,B> ifExists(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifExists(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifExistsIncludingNullVars(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExists(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExists(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExistsIncludingNullVars(Class<C>, TriJoiner<A,B,C>[]) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, ToIntBiFunction<A,B>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, ToLongBiFunction<A,B>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, BiFunction<A,B,java.math.BigDecimal>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public final transient TriConstraintStream<A,B,C> join(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public UniConstraintStream<ResultA_> map(BiFunction<A,B,ResultA_>) ,public Stream_ shareAndAddChild(Stream_) <variables>protected final List<BavetAbstractBiConstraintStream<Solution_,A,B>> childStreamList
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/bi/BavetTriGroupBridgeBiConstraintStream.java
|
BavetTriGroupBridgeBiConstraintStream
|
buildNode
|
class BavetTriGroupBridgeBiConstraintStream<Solution_, A, B, NewA, NewB, NewC>
extends BavetAbstractBiConstraintStream<Solution_, A, B> {
private final BavetAbstractBiConstraintStream<Solution_, A, B> parent;
private BavetGroupTriConstraintStream<Solution_, NewA, NewB, NewC> groupStream;
private final GroupNodeConstructor<TriTuple<NewA, NewB, NewC>> nodeConstructor;
public BavetTriGroupBridgeBiConstraintStream(BavetConstraintFactory<Solution_> constraintFactory,
BavetAbstractBiConstraintStream<Solution_, A, B> parent,
GroupNodeConstructor<TriTuple<NewA, NewB, NewC>> nodeConstructor) {
super(constraintFactory, parent.getRetrievalSemantics());
this.parent = parent;
this.nodeConstructor = nodeConstructor;
}
@Override
public boolean guaranteesDistinct() {
return true;
}
public void setGroupStream(BavetGroupTriConstraintStream<Solution_, NewA, NewB, NewC> groupStream) {
this.groupStream = groupStream;
}
// ************************************************************************
// Node creation
// ************************************************************************
@Override
public void collectActiveConstraintStreams(Set<BavetAbstractConstraintStream<Solution_>> constraintStreamSet) {
parent.collectActiveConstraintStreams(constraintStreamSet);
constraintStreamSet.add(this);
}
@Override
public <Score_ extends Score<Score_>> void buildNode(NodeBuildHelper<Score_> buildHelper) {<FILL_FUNCTION_BODY>}
@Override
public BavetAbstractConstraintStream<Solution_> getTupleSource() {
return parent.getTupleSource();
}
}
|
List<? extends ConstraintStream> groupStreamChildList = groupStream.getChildStreamList();
nodeConstructor.build(buildHelper, parent.getTupleSource(), groupStream, groupStreamChildList, this, childStreamList,
constraintFactory.getEnvironmentMode());
| 460
| 66
| 526
|
<methods>public void <init>(BavetConstraintFactory<Solution_>, org.optaplanner.constraint.streams.common.RetrievalSemantics) ,public BavetAbstractBiConstraintStream<Solution_,A,B> filter(BiPredicate<A,B>) ,public BiConstraintStream<A,ResultB_> flattenLast(Function<B,Iterable<ResultB_>>) ,public List<BavetAbstractBiConstraintStream<Solution_,A,B>> getChildStreamList() ,public UniConstraintStream<Result_> groupBy(BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public BiConstraintStream<ResultA_,ResultB_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>) ,public TriConstraintStream<ResultA_,ResultB_,ResultC_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>) ,public QuadConstraintStream<ResultA_,ResultB_,ResultC_,ResultD_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public UniConstraintStream<GroupKey_> groupBy(BiFunction<A,B,GroupKey_>) ,public TriConstraintStream<GroupKey_,ResultB_,ResultC_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>) ,public QuadConstraintStream<GroupKey_,ResultB_,ResultC_,ResultD_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public BiConstraintStream<GroupKey_,Result_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public BiConstraintStream<GroupKeyA_,GroupKeyB_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>) ,public TriConstraintStream<GroupKeyA_,GroupKeyB_,Result_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,ResultC_,ResultD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public TriConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_,ResultD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_,GroupKeyD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>, BiFunction<A,B,GroupKeyD_>) ,public final transient BiConstraintStream<A,B> ifExists(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifExists(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifExistsIncludingNullVars(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExists(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExists(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExistsIncludingNullVars(Class<C>, TriJoiner<A,B,C>[]) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, ToIntBiFunction<A,B>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, ToLongBiFunction<A,B>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, BiFunction<A,B,java.math.BigDecimal>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public final transient TriConstraintStream<A,B,C> join(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public UniConstraintStream<ResultA_> map(BiFunction<A,B,ResultA_>) ,public Stream_ shareAndAddChild(Stream_) <variables>protected final List<BavetAbstractBiConstraintStream<Solution_,A,B>> childStreamList
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/bi/BavetUniGroupBridgeBiConstraintStream.java
|
BavetUniGroupBridgeBiConstraintStream
|
buildNode
|
class BavetUniGroupBridgeBiConstraintStream<Solution_, A, B, NewA>
extends BavetAbstractBiConstraintStream<Solution_, A, B> {
private final BavetAbstractBiConstraintStream<Solution_, A, B> parent;
private BavetGroupUniConstraintStream<Solution_, NewA> groupStream;
private final GroupNodeConstructor<UniTuple<NewA>> nodeConstructor;
public BavetUniGroupBridgeBiConstraintStream(BavetConstraintFactory<Solution_> constraintFactory,
BavetAbstractBiConstraintStream<Solution_, A, B> parent,
GroupNodeConstructor<UniTuple<NewA>> nodeConstructor) {
super(constraintFactory, parent.getRetrievalSemantics());
this.parent = parent;
this.nodeConstructor = nodeConstructor;
}
@Override
public boolean guaranteesDistinct() {
return true;
}
public void setGroupStream(BavetGroupUniConstraintStream<Solution_, NewA> groupStream) {
this.groupStream = groupStream;
}
// ************************************************************************
// Node creation
// ************************************************************************
@Override
public void collectActiveConstraintStreams(Set<BavetAbstractConstraintStream<Solution_>> constraintStreamSet) {
parent.collectActiveConstraintStreams(constraintStreamSet);
constraintStreamSet.add(this);
}
@Override
public <Score_ extends Score<Score_>> void buildNode(NodeBuildHelper<Score_> buildHelper) {<FILL_FUNCTION_BODY>}
@Override
public BavetAbstractConstraintStream<Solution_> getTupleSource() {
return parent.getTupleSource();
}
}
|
List<? extends ConstraintStream> groupStreamChildList = groupStream.getChildStreamList();
nodeConstructor.build(buildHelper, parent.getTupleSource(), groupStream, groupStreamChildList, this, childStreamList,
constraintFactory.getEnvironmentMode());
| 437
| 66
| 503
|
<methods>public void <init>(BavetConstraintFactory<Solution_>, org.optaplanner.constraint.streams.common.RetrievalSemantics) ,public BavetAbstractBiConstraintStream<Solution_,A,B> filter(BiPredicate<A,B>) ,public BiConstraintStream<A,ResultB_> flattenLast(Function<B,Iterable<ResultB_>>) ,public List<BavetAbstractBiConstraintStream<Solution_,A,B>> getChildStreamList() ,public UniConstraintStream<Result_> groupBy(BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public BiConstraintStream<ResultA_,ResultB_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>) ,public TriConstraintStream<ResultA_,ResultB_,ResultC_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>) ,public QuadConstraintStream<ResultA_,ResultB_,ResultC_,ResultD_> groupBy(BiConstraintCollector<A,B,ResultContainerA_,ResultA_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public UniConstraintStream<GroupKey_> groupBy(BiFunction<A,B,GroupKey_>) ,public TriConstraintStream<GroupKey_,ResultB_,ResultC_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>) ,public QuadConstraintStream<GroupKey_,ResultB_,ResultC_,ResultD_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainerB_,ResultB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public BiConstraintStream<GroupKey_,Result_> groupBy(BiFunction<A,B,GroupKey_>, BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public BiConstraintStream<GroupKeyA_,GroupKeyB_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>) ,public TriConstraintStream<GroupKeyA_,GroupKeyB_,Result_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiConstraintCollector<A,B,ResultContainer_,Result_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,ResultC_,ResultD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiConstraintCollector<A,B,ResultContainerC_,ResultC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public TriConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_,ResultD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>, BiConstraintCollector<A,B,ResultContainerD_,ResultD_>) ,public QuadConstraintStream<GroupKeyA_,GroupKeyB_,GroupKeyC_,GroupKeyD_> groupBy(BiFunction<A,B,GroupKeyA_>, BiFunction<A,B,GroupKeyB_>, BiFunction<A,B,GroupKeyC_>, BiFunction<A,B,GroupKeyD_>) ,public final transient BiConstraintStream<A,B> ifExists(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifExists(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifExistsIncludingNullVars(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExists(Class<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExists(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public final transient BiConstraintStream<A,B> ifNotExistsIncludingNullVars(Class<C>, TriJoiner<A,B,C>[]) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, ToIntBiFunction<A,B>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, ToLongBiFunction<A,B>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public BiConstraintBuilder<A,B,Score_> innerImpact(Score_, BiFunction<A,B,java.math.BigDecimal>, org.optaplanner.constraint.streams.common.ScoreImpactType) ,public final transient TriConstraintStream<A,B,C> join(UniConstraintStream<C>, TriJoiner<A,B,C>[]) ,public UniConstraintStream<ResultA_> map(BiFunction<A,B,ResultA_>) ,public Stream_ shareAndAddChild(Stream_) <variables>protected final List<BavetAbstractBiConstraintStream<Solution_,A,B>> childStreamList
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/bi/BiScorer.java
|
BiScorer
|
impact
|
class BiScorer<A, B> extends AbstractScorer<BiTuple<A, B>> {
private final BiFunction<A, B, UndoScoreImpacter> scoreImpacter;
public BiScorer(String constraintPackage, String constraintName, Score<?> constraintWeight,
BiFunction<A, B, UndoScoreImpacter> scoreImpacter, int inputStoreIndex) {
super(constraintPackage, constraintName, constraintWeight, inputStoreIndex);
this.scoreImpacter = scoreImpacter;
}
@Override
protected UndoScoreImpacter impact(BiTuple<A, B> tuple) {<FILL_FUNCTION_BODY>}
}
|
try {
return scoreImpacter.apply(tuple.getFactA(), tuple.getFactB());
} catch (Exception e) {
throw createExceptionOnImpact(tuple, e);
}
| 174
| 57
| 231
|
<methods>public final void insert(BiTuple<A,B>) ,public final void retract(BiTuple<A,B>) ,public final java.lang.String toString() ,public final void update(BiTuple<A,B>) <variables>private final non-sealed java.lang.String constraintId,private final non-sealed Score<?> constraintWeight,private final non-sealed int inputStoreIndex
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/bi/Group0Mapping2CollectorBiNode.java
|
Group0Mapping2CollectorBiNode
|
mergeCollectors
|
class Group0Mapping2CollectorBiNode<OldA, OldB, A, B, ResultContainerA_, ResultContainerB_>
extends AbstractGroupBiNode<OldA, OldB, BiTuple<A, B>, BiTupleImpl<A, B>, Void, Object, Pair<A, B>> {
private final int outputStoreSize;
public Group0Mapping2CollectorBiNode(int groupStoreIndex, int undoStoreIndex,
BiConstraintCollector<OldA, OldB, ResultContainerA_, A> collectorA,
BiConstraintCollector<OldA, OldB, ResultContainerB_, B> collectorB,
TupleLifecycle<BiTuple<A, B>> nextNodesTupleLifecycle, int outputStoreSize, EnvironmentMode environmentMode) {
super(groupStoreIndex, undoStoreIndex, null, mergeCollectors(collectorA, collectorB), nextNodesTupleLifecycle,
environmentMode);
this.outputStoreSize = outputStoreSize;
}
static <OldA, OldB, A, B, ResultContainerA_, ResultContainerB_>
BiConstraintCollector<OldA, OldB, Object, Pair<A, B>> mergeCollectors(
BiConstraintCollector<OldA, OldB, ResultContainerA_, A> collectorA,
BiConstraintCollector<OldA, OldB, ResultContainerB_, B> collectorB) {<FILL_FUNCTION_BODY>}
@Override
protected BiTupleImpl<A, B> createOutTuple(Void groupKey) {
return new BiTupleImpl<>(null, null, outputStoreSize);
}
@Override
protected void updateOutTupleToResult(BiTupleImpl<A, B> outTuple, Pair<A, B> result) {
outTuple.factA = result.getKey();
outTuple.factB = result.getValue();
}
}
|
return (BiConstraintCollector<OldA, OldB, Object, Pair<A, B>>) ConstraintCollectors.compose(collectorA, collectorB,
Pair::of);
| 474
| 50
| 524
|
<methods><variables>private final non-sealed TriFunction<java.lang.Object,OldA,OldB,java.lang.Runnable> accumulator
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/bi/Group0Mapping3CollectorBiNode.java
|
Group0Mapping3CollectorBiNode
|
mergeCollectors
|
class Group0Mapping3CollectorBiNode<OldA, OldB, A, B, C, ResultContainerA_, ResultContainerB_, ResultContainerC_>
extends AbstractGroupBiNode<OldA, OldB, TriTuple<A, B, C>, TriTupleImpl<A, B, C>, Void, Object, Triple<A, B, C>> {
private final int outputStoreSize;
public Group0Mapping3CollectorBiNode(int groupStoreIndex, int undoStoreIndex,
BiConstraintCollector<OldA, OldB, ResultContainerA_, A> collectorA,
BiConstraintCollector<OldA, OldB, ResultContainerB_, B> collectorB,
BiConstraintCollector<OldA, OldB, ResultContainerC_, C> collectorC,
TupleLifecycle<TriTuple<A, B, C>> nextNodesTupleLifecycle, int outputStoreSize, EnvironmentMode environmentMode) {
super(groupStoreIndex, undoStoreIndex, null, mergeCollectors(collectorA, collectorB, collectorC),
nextNodesTupleLifecycle, environmentMode);
this.outputStoreSize = outputStoreSize;
}
static <OldA, OldB, A, B, C, ResultContainerA_, ResultContainerB_, ResultContainerC_>
BiConstraintCollector<OldA, OldB, Object, Triple<A, B, C>> mergeCollectors(
BiConstraintCollector<OldA, OldB, ResultContainerA_, A> collectorA,
BiConstraintCollector<OldA, OldB, ResultContainerB_, B> collectorB,
BiConstraintCollector<OldA, OldB, ResultContainerC_, C> collectorC) {<FILL_FUNCTION_BODY>}
@Override
protected TriTupleImpl<A, B, C> createOutTuple(Void groupKey) {
return new TriTupleImpl<>(null, null, null, outputStoreSize);
}
@Override
protected void updateOutTupleToResult(TriTupleImpl<A, B, C> outTuple, Triple<A, B, C> result) {
outTuple.factA = result.getA();
outTuple.factB = result.getB();
outTuple.factC = result.getC();
}
}
|
return (BiConstraintCollector<OldA, OldB, Object, Triple<A, B, C>>) ConstraintCollectors.compose(collectorA, collectorB,
collectorC, Triple::of);
| 566
| 55
| 621
|
<methods><variables>private final non-sealed TriFunction<java.lang.Object,OldA,OldB,java.lang.Runnable> accumulator
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/bi/Group0Mapping4CollectorBiNode.java
|
Group0Mapping4CollectorBiNode
|
updateOutTupleToResult
|
class Group0Mapping4CollectorBiNode<OldA, OldB, A, B, C, D, ResultContainerA_, ResultContainerB_, ResultContainerC_, ResultContainerD_>
extends
AbstractGroupBiNode<OldA, OldB, QuadTuple<A, B, C, D>, QuadTupleImpl<A, B, C, D>, Void, Object, Quadruple<A, B, C, D>> {
private final int outputStoreSize;
public Group0Mapping4CollectorBiNode(int groupStoreIndex, int undoStoreIndex,
BiConstraintCollector<OldA, OldB, ResultContainerA_, A> collectorA,
BiConstraintCollector<OldA, OldB, ResultContainerB_, B> collectorB,
BiConstraintCollector<OldA, OldB, ResultContainerC_, C> collectorC,
BiConstraintCollector<OldA, OldB, ResultContainerD_, D> collectorD,
TupleLifecycle<QuadTuple<A, B, C, D>> nextNodesTupleLifecycle, int outputStoreSize,
EnvironmentMode environmentMode) {
super(groupStoreIndex, undoStoreIndex, null, mergeCollectors(collectorA, collectorB, collectorC, collectorD),
nextNodesTupleLifecycle, environmentMode);
this.outputStoreSize = outputStoreSize;
}
private static <OldA, OldB, A, B, C, D, ResultContainerA_, ResultContainerB_, ResultContainerC_, ResultContainerD_>
BiConstraintCollector<OldA, OldB, Object, Quadruple<A, B, C, D>> mergeCollectors(
BiConstraintCollector<OldA, OldB, ResultContainerA_, A> collectorA,
BiConstraintCollector<OldA, OldB, ResultContainerB_, B> collectorB,
BiConstraintCollector<OldA, OldB, ResultContainerC_, C> collectorC,
BiConstraintCollector<OldA, OldB, ResultContainerD_, D> collectorD) {
return (BiConstraintCollector<OldA, OldB, Object, Quadruple<A, B, C, D>>) ConstraintCollectors.compose(collectorA,
collectorB, collectorC, collectorD, Quadruple::of);
}
@Override
protected QuadTupleImpl<A, B, C, D> createOutTuple(Void groupKey) {
return new QuadTupleImpl<>(null, null, null, null, outputStoreSize);
}
@Override
protected void updateOutTupleToResult(QuadTupleImpl<A, B, C, D> outTuple, Quadruple<A, B, C, D> result) {<FILL_FUNCTION_BODY>}
}
|
outTuple.factA = result.getA();
outTuple.factB = result.getB();
outTuple.factC = result.getC();
outTuple.factD = result.getD();
| 679
| 59
| 738
|
<methods><variables>private final non-sealed TriFunction<java.lang.Object,OldA,OldB,java.lang.Runnable> accumulator
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/bi/Group1Mapping3CollectorBiNode.java
|
Group1Mapping3CollectorBiNode
|
updateOutTupleToResult
|
class Group1Mapping3CollectorBiNode<OldA, OldB, A, B, C, D, ResultContainerB_, ResultContainerC_, ResultContainerD_>
extends AbstractGroupBiNode<OldA, OldB, QuadTuple<A, B, C, D>, QuadTupleImpl<A, B, C, D>, A, Object, Triple<B, C, D>> {
private final int outputStoreSize;
public Group1Mapping3CollectorBiNode(BiFunction<OldA, OldB, A> groupKeyMapping, int groupStoreIndex, int undoStoreIndex,
BiConstraintCollector<OldA, OldB, ResultContainerB_, B> collectorB,
BiConstraintCollector<OldA, OldB, ResultContainerC_, C> collectorC,
BiConstraintCollector<OldA, OldB, ResultContainerD_, D> collectorD,
TupleLifecycle<QuadTuple<A, B, C, D>> nextNodesTupleLifecycle, int outputStoreSize,
EnvironmentMode environmentMode) {
super(groupStoreIndex, undoStoreIndex, tuple -> createGroupKey(groupKeyMapping, tuple),
mergeCollectors(collectorB, collectorC, collectorD), nextNodesTupleLifecycle, environmentMode);
this.outputStoreSize = outputStoreSize;
}
@Override
protected QuadTupleImpl<A, B, C, D> createOutTuple(A a) {
return new QuadTupleImpl<>(a, null, null, null, outputStoreSize);
}
@Override
protected void updateOutTupleToResult(QuadTupleImpl<A, B, C, D> outTuple, Triple<B, C, D> result) {<FILL_FUNCTION_BODY>}
}
|
outTuple.factB = result.getA();
outTuple.factC = result.getB();
outTuple.factD = result.getC();
| 441
| 45
| 486
|
<methods><variables>private final non-sealed TriFunction<java.lang.Object,OldA,OldB,java.lang.Runnable> accumulator
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/bi/Group2Mapping0CollectorBiNode.java
|
Group2Mapping0CollectorBiNode
|
createGroupKey
|
class Group2Mapping0CollectorBiNode<OldA, OldB, A, B>
extends AbstractGroupBiNode<OldA, OldB, BiTuple<A, B>, BiTupleImpl<A, B>, Pair<A, B>, Void, Void> {
private final int outputStoreSize;
public Group2Mapping0CollectorBiNode(BiFunction<OldA, OldB, A> groupKeyMappingA, BiFunction<OldA, OldB, B> groupKeyMappingB,
int groupStoreIndex, TupleLifecycle<BiTuple<A, B>> nextNodesTupleLifecycle, int outputStoreSize,
EnvironmentMode environmentMode) {
super(groupStoreIndex, tuple -> createGroupKey(groupKeyMappingA, groupKeyMappingB, tuple), nextNodesTupleLifecycle,
environmentMode);
this.outputStoreSize = outputStoreSize;
}
static <A, B, OldA, OldB> Pair<A, B> createGroupKey(BiFunction<OldA, OldB, A> groupKeyMappingA,
BiFunction<OldA, OldB, B> groupKeyMappingB, BiTuple<OldA, OldB> tuple) {<FILL_FUNCTION_BODY>}
@Override
protected BiTupleImpl<A, B> createOutTuple(Pair<A, B> groupKey) {
return new BiTupleImpl<>(groupKey.getKey(), groupKey.getValue(), outputStoreSize);
}
@Override
protected void updateOutTupleToResult(BiTupleImpl<A, B> outTuple, Void unused) {
throw new IllegalStateException("Impossible state: collector is null.");
}
}
|
OldA oldA = tuple.getFactA();
OldB oldB = tuple.getFactB();
A a = groupKeyMappingA.apply(oldA, oldB);
B b = groupKeyMappingB.apply(oldA, oldB);
return Pair.of(a, b);
| 425
| 77
| 502
|
<methods><variables>private final non-sealed TriFunction<java.lang.Void,OldA,OldB,java.lang.Runnable> accumulator
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/bi/Group3Mapping0CollectorBiNode.java
|
Group3Mapping0CollectorBiNode
|
createGroupKey
|
class Group3Mapping0CollectorBiNode<OldA, OldB, A, B, C>
extends AbstractGroupBiNode<OldA, OldB, TriTuple<A, B, C>, TriTupleImpl<A, B, C>, Triple<A, B, C>, Void, Void> {
private final int outputStoreSize;
public Group3Mapping0CollectorBiNode(BiFunction<OldA, OldB, A> groupKeyMappingA, BiFunction<OldA, OldB, B> groupKeyMappingB,
BiFunction<OldA, OldB, C> groupKeyMappingC, int groupStoreIndex,
TupleLifecycle<TriTuple<A, B, C>> nextNodesTupleLifecycle, int outputStoreSize, EnvironmentMode environmentMode) {
super(groupStoreIndex, tuple -> createGroupKey(groupKeyMappingA, groupKeyMappingB, groupKeyMappingC, tuple),
nextNodesTupleLifecycle, environmentMode);
this.outputStoreSize = outputStoreSize;
}
static <A, B, C, OldA, OldB> Triple<A, B, C> createGroupKey(BiFunction<OldA, OldB, A> groupKeyMappingA,
BiFunction<OldA, OldB, B> groupKeyMappingB, BiFunction<OldA, OldB, C> groupKeyMappingC,
BiTuple<OldA, OldB> tuple) {<FILL_FUNCTION_BODY>}
@Override
protected TriTupleImpl<A, B, C> createOutTuple(Triple<A, B, C> groupKey) {
return new TriTupleImpl<>(groupKey.getA(), groupKey.getB(), groupKey.getC(), outputStoreSize);
}
@Override
protected void updateOutTupleToResult(TriTupleImpl<A, B, C> outTuple, Void unused) {
throw new IllegalStateException("Impossible state: collector is null.");
}
}
|
OldA oldA = tuple.getFactA();
OldB oldB = tuple.getFactB();
A a = groupKeyMappingA.apply(oldA, oldB);
B b = groupKeyMappingB.apply(oldA, oldB);
C c = groupKeyMappingC.apply(oldA, oldB);
return Triple.of(a, b, c);
| 492
| 97
| 589
|
<methods><variables>private final non-sealed TriFunction<java.lang.Void,OldA,OldB,java.lang.Runnable> accumulator
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/common/AbstractFlattenLastNode.java
|
AbstractFlattenLastNode
|
calculateScore
|
class AbstractFlattenLastNode<InTuple_ extends Tuple, OutTuple_ extends Tuple, EffectiveItem_, FlattenedItem_>
extends AbstractNode
implements TupleLifecycle<InTuple_> {
private final int flattenLastStoreIndex;
private final Function<EffectiveItem_, Iterable<FlattenedItem_>> mappingFunction;
/**
* Calls for example {@link AbstractScorer#insert(Tuple)}, and/or ...
*/
private final TupleLifecycle<OutTuple_> nextNodesTupleLifecycle;
private final Queue<OutTuple_> dirtyTupleQueue = new ArrayDeque<>(1000);
protected AbstractFlattenLastNode(int flattenLastStoreIndex,
Function<EffectiveItem_, Iterable<FlattenedItem_>> mappingFunction,
TupleLifecycle<OutTuple_> nextNodesTupleLifecycle) {
this.flattenLastStoreIndex = flattenLastStoreIndex;
this.mappingFunction = Objects.requireNonNull(mappingFunction);
this.nextNodesTupleLifecycle = Objects.requireNonNull(nextNodesTupleLifecycle);
}
@Override
public void insert(InTuple_ tuple) {
if (tuple.getStore(flattenLastStoreIndex) != null) {
throw new IllegalStateException("Impossible state: the input for the tuple (" + tuple
+ ") was already added in the tupleStore.");
}
List<OutTuple_> outTupleList = new ArrayList<>();
for (FlattenedItem_ item : mappingFunction.apply(getEffectiveFactIn(tuple))) {
addTuple(tuple, item, outTupleList);
}
if (!outTupleList.isEmpty()) {
tuple.setStore(flattenLastStoreIndex, outTupleList);
}
}
private void addTuple(InTuple_ originalTuple, FlattenedItem_ item, List<OutTuple_> outTupleList) {
OutTuple_ tuple = createTuple(originalTuple, item);
outTupleList.add(tuple);
dirtyTupleQueue.add(tuple);
}
protected abstract OutTuple_ createTuple(InTuple_ originalTuple, FlattenedItem_ item);
@Override
public void update(InTuple_ tuple) {
List<OutTuple_> outTupleList = tuple.getStore(flattenLastStoreIndex);
if (outTupleList == null) {
// No fail fast if null because we don't track which tuples made it through the filter predicate(s).
insert(tuple);
return;
}
Iterator<FlattenedItem_> iterator = mappingFunction.apply(getEffectiveFactIn(tuple)).iterator();
if (!iterator.hasNext()) { // No need for incremental logic as everything will be removed.
retract(tuple);
return;
}
// Convert Iterable into something we can query.
List<FlattenedItem_> newFlattenedItemList = new ArrayList<>();
iterator.forEachRemaining(newFlattenedItemList::add);
// Remove all facts from the input that are already contained.
Iterator<OutTuple_> outTupleIterator = outTupleList.listIterator();
while (outTupleIterator.hasNext()) {
OutTuple_ outTuple = outTupleIterator.next();
FlattenedItem_ existingFlattenedItem = getEffectiveFactOut(outTuple);
// A fact can be present more than once and every iteration should only remove one instance.
boolean existsAlsoInNew = false;
Iterator<FlattenedItem_> newFlattenedItemIterator = newFlattenedItemList.listIterator();
while (newFlattenedItemIterator.hasNext()) {
FlattenedItem_ newFlattenedItem = newFlattenedItemIterator.next();
// We check for identity, not equality, to not introduce dependency on user equals().
if (newFlattenedItem == existingFlattenedItem) {
// Remove item from the list, as it means its tuple need not be added later.
newFlattenedItemIterator.remove();
existsAlsoInNew = true;
break;
}
}
if (!existsAlsoInNew) {
outTupleIterator.remove();
removeTuple(outTuple);
} else {
outTuple.setState(BavetTupleState.UPDATING);
dirtyTupleQueue.add(outTuple);
}
}
// Whatever is left in the input needs to be added.
for (FlattenedItem_ newFlattenedItem : newFlattenedItemList) {
addTuple(tuple, newFlattenedItem, outTupleList);
}
}
protected abstract EffectiveItem_ getEffectiveFactIn(InTuple_ tuple);
protected abstract FlattenedItem_ getEffectiveFactOut(OutTuple_ outTuple);
@Override
public void retract(InTuple_ tuple) {
List<OutTuple_> outTupleList = tuple.removeStore(flattenLastStoreIndex);
if (outTupleList == null) {
// No fail fast if null because we don't track which tuples made it through the filter predicate(s)
return;
}
for (OutTuple_ item : outTupleList) {
removeTuple(item);
}
}
private void removeTuple(OutTuple_ outTuple) {
switch (outTuple.getState()) {
case CREATING:
outTuple.setState(BavetTupleState.ABORTING);
break;
case UPDATING:
case OK:
outTuple.setState(BavetTupleState.DYING);
break;
default:
throw new IllegalStateException("Impossible state: The tuple (" + outTuple +
") is in an unexpected state (" + outTuple.getState() + ").");
}
dirtyTupleQueue.add(outTuple);
}
@Override
public void calculateScore() {<FILL_FUNCTION_BODY>}
}
|
for (OutTuple_ outTuple : dirtyTupleQueue) {
switch (outTuple.getState()) {
case CREATING:
nextNodesTupleLifecycle.insert(outTuple);
outTuple.setState(BavetTupleState.OK);
break;
case UPDATING:
nextNodesTupleLifecycle.update(outTuple);
outTuple.setState(BavetTupleState.OK);
break;
case DYING:
nextNodesTupleLifecycle.retract(outTuple);
outTuple.setState(BavetTupleState.DEAD);
break;
case ABORTING:
outTuple.setState(BavetTupleState.DEAD);
break;
default:
throw new IllegalStateException("Impossible state: The tuple (" + outTuple + ") in node (" +
this + ") is in an unexpected state (" + outTuple.getState() + ").");
}
}
dirtyTupleQueue.clear();
| 1,581
| 271
| 1,852
|
<methods>public non-sealed void <init>() ,public abstract void calculateScore() ,public void setId(long) ,public java.lang.String toString() <variables>private long id
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/common/AbstractMapNode.java
|
AbstractMapNode
|
insert
|
class AbstractMapNode<InTuple_ extends Tuple, Right_>
extends AbstractNode
implements TupleLifecycle<InTuple_> {
private final int inputStoreIndex;
/**
* Calls for example {@link AbstractScorer#insert(Tuple)} and/or ...
*/
private final TupleLifecycle<UniTuple<Right_>> nextNodesTupleLifecycle;
private final int outputStoreSize;
private final Queue<UniTuple<Right_>> dirtyTupleQueue;
protected AbstractMapNode(int inputStoreIndex, TupleLifecycle<UniTuple<Right_>> nextNodesTupleLifecycle,
int outputStoreSize) {
this.inputStoreIndex = inputStoreIndex;
this.nextNodesTupleLifecycle = nextNodesTupleLifecycle;
this.outputStoreSize = outputStoreSize;
dirtyTupleQueue = new ArrayDeque<>(1000);
}
@Override
public void insert(InTuple_ tuple) {<FILL_FUNCTION_BODY>}
protected abstract Right_ map(InTuple_ tuple);
@Override
public void update(InTuple_ tuple) {
UniTupleImpl<Right_> outTuple = tuple.getStore(inputStoreIndex);
if (outTuple == null) {
// No fail fast if null because we don't track which tuples made it through the filter predicate(s)
insert(tuple);
return;
}
Right_ oldMapped = outTuple.factA;
Right_ mapped = map(tuple);
// We check for identity, not equality, to not introduce dependency on user equals().
if (mapped != oldMapped) {
outTuple.factA = mapped;
outTuple.state = BavetTupleState.UPDATING;
dirtyTupleQueue.add(outTuple);
}
}
@Override
public void retract(InTuple_ tuple) {
UniTuple<Right_> outTuple = tuple.removeStore(inputStoreIndex);
if (outTuple == null) {
// No fail fast if null because we don't track which tuples made it through the filter predicate(s)
return;
}
outTuple.setState(BavetTupleState.DYING);
dirtyTupleQueue.add(outTuple);
}
@Override
public void calculateScore() {
for (UniTuple<Right_> tuple : dirtyTupleQueue) {
switch (tuple.getState()) {
case CREATING:
nextNodesTupleLifecycle.insert(tuple);
tuple.setState(BavetTupleState.OK);
break;
case UPDATING:
nextNodesTupleLifecycle.update(tuple);
tuple.setState(BavetTupleState.OK);
break;
case DYING:
nextNodesTupleLifecycle.retract(tuple);
tuple.setState(BavetTupleState.DEAD);
break;
case ABORTING:
tuple.setState(BavetTupleState.DEAD);
break;
case OK:
case DEAD:
default:
throw new IllegalStateException("Impossible state: The tuple (" + tuple + ") in node (" +
this + ") is in an unexpected state (" + tuple.getState() + ").");
}
}
dirtyTupleQueue.clear();
}
}
|
if (tuple.getStore(inputStoreIndex) != null) {
throw new IllegalStateException("Impossible state: the input for the tuple (" + tuple
+ ") was already added in the tupleStore.");
}
Right_ mapped = map(tuple);
UniTuple<Right_> outTuple = new UniTupleImpl<>(mapped, outputStoreSize);
tuple.setStore(inputStoreIndex, outTuple);
dirtyTupleQueue.add(outTuple);
| 900
| 125
| 1,025
|
<methods>public non-sealed void <init>() ,public abstract void calculateScore() ,public void setId(long) ,public java.lang.String toString() <variables>private long id
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/common/AbstractTuple.java
|
AbstractTuple
|
removeStore
|
class AbstractTuple implements Tuple {
/*
* We create a lot of tuples, many of them having store size of 1.
* If an array of size 1 was created for each such tuple, memory would be wasted and indirection created.
* This trade-off of increased memory efficiency for marginally slower access time is proven beneficial.
*/
private final boolean storeIsArray;
private Object store;
public BavetTupleState state = BavetTupleState.CREATING;
protected AbstractTuple(int storeSize) {
this.store = (storeSize < 2) ? null : new Object[storeSize];
this.storeIsArray = store != null;
}
@Override
public final BavetTupleState getState() {
return state;
}
@Override
public final void setState(BavetTupleState state) {
this.state = state;
}
@Override
public final <Value_> Value_ getStore(int index) {
if (storeIsArray) {
return (Value_) ((Object[]) store)[index];
}
return (Value_) store;
}
@Override
public final void setStore(int index, Object value) {
if (storeIsArray) {
((Object[]) store)[index] = value;
return;
}
store = value;
}
@Override
public <Value_> Value_ removeStore(int index) {<FILL_FUNCTION_BODY>}
}
|
Value_ value;
if (storeIsArray) {
Object[] array = (Object[]) store;
value = (Value_) array[index];
array[index] = null;
} else {
value = (Value_) store;
store = null;
}
return value;
| 386
| 78
| 464
|
<no_super_class>
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/common/AbstractUnindexedIfExistsNode.java
|
AbstractUnindexedIfExistsNode
|
retractRight
|
class AbstractUnindexedIfExistsNode<LeftTuple_ extends Tuple, Right_>
extends AbstractIfExistsNode<LeftTuple_, Right_>
implements LeftTupleLifecycle<LeftTuple_>, RightTupleLifecycle<UniTuple<Right_>> {
private final int inputStoreIndexLeftCounterEntry;
private final int inputStoreIndexRightEntry;
// Acts as a leftTupleList too
private final TupleList<ExistsCounter<LeftTuple_>> leftCounterList = new TupleList<>();
private final TupleList<UniTuple<Right_>> rightTupleList = new TupleList<>();
protected AbstractUnindexedIfExistsNode(boolean shouldExist,
int inputStoreIndexLeftCounterEntry, int inputStoreIndexLeftTrackerList,
int inputStoreIndexRightEntry, int inputStoreIndexRightTrackerList,
TupleLifecycle<LeftTuple_> nextNodesTupleLifecycle,
boolean isFiltering) {
super(shouldExist, inputStoreIndexLeftTrackerList, inputStoreIndexRightTrackerList,
nextNodesTupleLifecycle, isFiltering);
this.inputStoreIndexLeftCounterEntry = inputStoreIndexLeftCounterEntry;
this.inputStoreIndexRightEntry = inputStoreIndexRightEntry;
}
@Override
public final void insertLeft(LeftTuple_ leftTuple) {
if (leftTuple.getStore(inputStoreIndexLeftCounterEntry) != null) {
throw new IllegalStateException("Impossible state: the input for the tuple (" + leftTuple
+ ") was already added in the tupleStore.");
}
ExistsCounter<LeftTuple_> counter = new ExistsCounter<>(leftTuple);
TupleListEntry<ExistsCounter<LeftTuple_>> counterEntry = leftCounterList.add(counter);
leftTuple.setStore(inputStoreIndexLeftCounterEntry, counterEntry);
if (!isFiltering) {
counter.countRight = rightTupleList.size();
} else {
TupleList<FilteringTracker<LeftTuple_>> leftTrackerList = new TupleList<>();
rightTupleList.forEach(rightTuple -> updateCounterFromLeft(leftTuple, rightTuple, counter, leftTrackerList));
leftTuple.setStore(inputStoreIndexLeftTrackerList, leftTrackerList);
}
initCounterLeft(counter);
}
@Override
public final void updateLeft(LeftTuple_ leftTuple) {
TupleListEntry<ExistsCounter<LeftTuple_>> counterEntry = leftTuple.getStore(inputStoreIndexLeftCounterEntry);
if (counterEntry == null) {
// No fail fast if null because we don't track which tuples made it through the filter predicate(s)
insertLeft(leftTuple);
return;
}
ExistsCounter<LeftTuple_> counter = counterEntry.getElement();
// The indexers contain counters in the DEAD state, to track the rightCount.
if (!isFiltering) {
updateUnchangedCounterLeft(counter);
} else {
// Call filtering for the leftTuple and rightTuple combinations again
TupleList<FilteringTracker<LeftTuple_>> leftTrackerList = leftTuple.getStore(inputStoreIndexLeftTrackerList);
leftTrackerList.forEach(FilteringTracker::remove);
counter.countRight = 0;
rightTupleList.forEach(rightTuple -> updateCounterFromLeft(leftTuple, rightTuple, counter, leftTrackerList));
updateCounterLeft(counter);
}
}
@Override
public final void retractLeft(LeftTuple_ leftTuple) {
TupleListEntry<ExistsCounter<LeftTuple_>> counterEntry = leftTuple.removeStore(inputStoreIndexLeftCounterEntry);
if (counterEntry == null) {
// No fail fast if null because we don't track which tuples made it through the filter predicate(s)
return;
}
ExistsCounter<LeftTuple_> counter = counterEntry.getElement();
counterEntry.remove();
if (isFiltering) {
TupleList<FilteringTracker<LeftTuple_>> leftTrackerList = leftTuple.getStore(inputStoreIndexLeftTrackerList);
leftTrackerList.forEach(FilteringTracker::remove);
}
killCounterLeft(counter);
}
@Override
public final void insertRight(UniTuple<Right_> rightTuple) {
if (rightTuple.getStore(inputStoreIndexRightEntry) != null) {
throw new IllegalStateException("Impossible state: the input for the tuple (" + rightTuple
+ ") was already added in the tupleStore.");
}
TupleListEntry<UniTuple<Right_>> rightEntry = rightTupleList.add(rightTuple);
rightTuple.setStore(inputStoreIndexRightEntry, rightEntry);
if (!isFiltering) {
leftCounterList.forEach(this::incrementCounterRight);
} else {
TupleList<FilteringTracker<LeftTuple_>> rightTrackerList = new TupleList<>();
leftCounterList.forEach(counter -> updateCounterFromRight(rightTuple, counter, rightTrackerList));
rightTuple.setStore(inputStoreIndexRightTrackerList, rightTrackerList);
}
}
@Override
public final void updateRight(UniTuple<Right_> rightTuple) {
TupleListEntry<UniTuple<Right_>> rightEntry = rightTuple.getStore(inputStoreIndexRightEntry);
if (rightEntry == null) {
// No fail fast if null because we don't track which tuples made it through the filter predicate(s)
insertRight(rightTuple);
return;
}
if (isFiltering) {
TupleList<FilteringTracker<LeftTuple_>> rightTrackerList = updateRightTrackerList(rightTuple);
leftCounterList.forEach(counter -> updateCounterFromRight(rightTuple, counter, rightTrackerList));
}
}
@Override
public final void retractRight(UniTuple<Right_> rightTuple) {<FILL_FUNCTION_BODY>}
}
|
TupleListEntry<UniTuple<Right_>> rightEntry = rightTuple.removeStore(inputStoreIndexRightEntry);
if (rightEntry == null) {
// No fail fast if null because we don't track which tuples made it through the filter predicate(s)
return;
}
rightEntry.remove();
if (!isFiltering) {
leftCounterList.forEach(this::decrementCounterRight);
} else {
updateRightTrackerList(rightTuple);
}
| 1,560
| 130
| 1,690
|
<methods>public final void calculateScore() <variables>protected final non-sealed Queue<ExistsCounter<LeftTuple_>> dirtyCounterQueue,protected final non-sealed int inputStoreIndexLeftTrackerList,protected final non-sealed int inputStoreIndexRightTrackerList,protected final non-sealed boolean isFiltering,private final non-sealed TupleLifecycle<LeftTuple_> nextNodesTupleLifecycle,protected final non-sealed boolean shouldExist
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/common/AbstractUnindexedJoinNode.java
|
AbstractUnindexedJoinNode
|
insertLeft
|
class AbstractUnindexedJoinNode<LeftTuple_ extends Tuple, Right_, OutTuple_ extends Tuple, MutableOutTuple_ extends OutTuple_>
extends AbstractJoinNode<LeftTuple_, Right_, OutTuple_, MutableOutTuple_>
implements LeftTupleLifecycle<LeftTuple_>, RightTupleLifecycle<UniTuple<Right_>> {
private final int inputStoreIndexLeftEntry;
private final int inputStoreIndexRightEntry;
private final TupleList<LeftTuple_> leftTupleList = new TupleList<>();
private final TupleList<UniTuple<Right_>> rightTupleList = new TupleList<>();
protected AbstractUnindexedJoinNode(int inputStoreIndexLeftEntry, int inputStoreIndexLeftOutTupleList,
int inputStoreIndexRightEntry, int inputStoreIndexRightOutTupleList,
TupleLifecycle<OutTuple_> nextNodesTupleLifecycle, boolean isFiltering, int outputStoreIndexLeftOutEntry,
int outputStoreIndexRightOutEntry) {
super(inputStoreIndexLeftOutTupleList, inputStoreIndexRightOutTupleList, nextNodesTupleLifecycle, isFiltering,
outputStoreIndexLeftOutEntry, outputStoreIndexRightOutEntry);
this.inputStoreIndexLeftEntry = inputStoreIndexLeftEntry;
this.inputStoreIndexRightEntry = inputStoreIndexRightEntry;
}
@Override
public final void insertLeft(LeftTuple_ leftTuple) {<FILL_FUNCTION_BODY>}
@Override
public final void updateLeft(LeftTuple_ leftTuple) {
TupleListEntry<LeftTuple_> leftEntry = leftTuple.getStore(inputStoreIndexLeftEntry);
if (leftEntry == null) {
// No fail fast if null because we don't track which tuples made it through the filter predicate(s)
insertLeft(leftTuple);
return;
}
innerUpdateLeft(leftTuple, rightTupleList::forEach);
}
@Override
public final void retractLeft(LeftTuple_ leftTuple) {
TupleListEntry<LeftTuple_> leftEntry = leftTuple.removeStore(inputStoreIndexLeftEntry);
if (leftEntry == null) {
// No fail fast if null because we don't track which tuples made it through the filter predicate(s)
return;
}
TupleList<MutableOutTuple_> outTupleListLeft = leftTuple.removeStore(inputStoreIndexLeftOutTupleList);
leftEntry.remove();
outTupleListLeft.forEach(this::retractOutTuple);
}
@Override
public final void insertRight(UniTuple<Right_> rightTuple) {
if (rightTuple.getStore(inputStoreIndexRightEntry) != null) {
throw new IllegalStateException("Impossible state: the input for the tuple (" + rightTuple
+ ") was already added in the tupleStore.");
}
TupleListEntry<UniTuple<Right_>> rightEntry = rightTupleList.add(rightTuple);
rightTuple.setStore(inputStoreIndexRightEntry, rightEntry);
TupleList<MutableOutTuple_> outTupleListRight = new TupleList<>();
rightTuple.setStore(inputStoreIndexRightOutTupleList, outTupleListRight);
leftTupleList.forEach(leftTuple -> insertOutTupleFiltered(leftTuple, rightTuple));
}
@Override
public final void updateRight(UniTuple<Right_> rightTuple) {
TupleListEntry<UniTuple<Right_>> rightEntry = rightTuple.getStore(inputStoreIndexRightEntry);
if (rightEntry == null) {
// No fail fast if null because we don't track which tuples made it through the filter predicate(s)
insertRight(rightTuple);
return;
}
innerUpdateRight(rightTuple, leftTupleList::forEach);
}
@Override
public final void retractRight(UniTuple<Right_> rightTuple) {
TupleListEntry<UniTuple<Right_>> rightEntry = rightTuple.removeStore(inputStoreIndexRightEntry);
if (rightEntry == null) {
// No fail fast if null because we don't track which tuples made it through the filter predicate(s)
return;
}
TupleList<MutableOutTuple_> outTupleListRight = rightTuple.removeStore(inputStoreIndexRightOutTupleList);
rightEntry.remove();
outTupleListRight.forEach(this::retractOutTuple);
}
}
|
if (leftTuple.getStore(inputStoreIndexLeftEntry) != null) {
throw new IllegalStateException("Impossible state: the input for the tuple (" + leftTuple
+ ") was already added in the tupleStore.");
}
TupleListEntry<LeftTuple_> leftEntry = leftTupleList.add(leftTuple);
leftTuple.setStore(inputStoreIndexLeftEntry, leftEntry);
TupleList<MutableOutTuple_> outTupleListLeft = new TupleList<>();
leftTuple.setStore(inputStoreIndexLeftOutTupleList, outTupleListLeft);
rightTupleList.forEach(rightTuple -> insertOutTupleFiltered(leftTuple, rightTuple));
| 1,177
| 184
| 1,361
|
<methods>public final void calculateScore() <variables>protected final non-sealed Queue<OutTuple_> dirtyTupleQueue,protected final non-sealed int inputStoreIndexLeftOutTupleList,protected final non-sealed int inputStoreIndexRightOutTupleList,private final non-sealed boolean isFiltering,private final non-sealed TupleLifecycle<OutTuple_> nextNodesTupleLifecycle,private final non-sealed int outputStoreIndexLeftOutEntry,private final non-sealed int outputStoreIndexRightOutEntry
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/common/BavetAbstractConstraintStream.java
|
BavetAbstractConstraintStream
|
assertBavetUniConstraintStream
|
class BavetAbstractConstraintStream<Solution_> extends AbstractConstraintStream<Solution_> {
protected final BavetConstraintFactory<Solution_> constraintFactory;
public BavetAbstractConstraintStream(BavetConstraintFactory<Solution_> constraintFactory,
RetrievalSemantics retrievalSemantics) {
super(retrievalSemantics);
this.constraintFactory = constraintFactory;
}
// ************************************************************************
// Penalize/reward
// ************************************************************************
protected Constraint buildConstraint(String constraintPackage, String constraintName, Score<?> constraintWeight,
ScoreImpactType impactType, Object justificationFunction, Object indictedObjectsMapping,
BavetScoringConstraintStream<Solution_> stream) {
var resolvedConstraintPackage =
Objects.requireNonNullElseGet(constraintPackage, this.constraintFactory::getDefaultConstraintPackage);
var resolvedJustificationMapping =
Objects.requireNonNullElseGet(justificationFunction, this::getDefaultJustificationMapping);
var resolvedIndictedObjectsMapping =
Objects.requireNonNullElseGet(indictedObjectsMapping, this::getDefaultIndictedObjectsMapping);
var isConstraintWeightConfigurable = constraintWeight == null;
var constraintWeightExtractor = isConstraintWeightConfigurable
? buildConstraintWeightExtractor(resolvedConstraintPackage, constraintName)
: buildConstraintWeightExtractor(resolvedConstraintPackage, constraintName, constraintWeight);
var constraint =
new BavetConstraint<>(constraintFactory, resolvedConstraintPackage, constraintName, constraintWeightExtractor,
impactType, resolvedJustificationMapping, resolvedIndictedObjectsMapping,
isConstraintWeightConfigurable, stream);
stream.setConstraint(constraint);
return constraint;
}
// ************************************************************************
// Node creation
// ************************************************************************
public abstract void collectActiveConstraintStreams(Set<BavetAbstractConstraintStream<Solution_>> constraintStreamSet);
public BavetAbstractConstraintStream<Solution_> getTupleSource() {
return this;
}
public abstract <Score_ extends Score<Score_>> void buildNode(NodeBuildHelper<Score_> buildHelper);
// ************************************************************************
// Helper methods
// ************************************************************************
protected <A> BavetAbstractUniConstraintStream<Solution_, A> assertBavetUniConstraintStream(
UniConstraintStream<A> otherStream) {<FILL_FUNCTION_BODY>}
// ************************************************************************
// Getters/setters
// ************************************************************************
@Override
public BavetConstraintFactory<Solution_> getConstraintFactory() {
return constraintFactory;
}
}
|
if (!(otherStream instanceof BavetAbstractUniConstraintStream)) {
throw new IllegalStateException("The streams (" + this + ", " + otherStream
+ ") are not built from the same " + ConstraintFactory.class.getSimpleName() + ".");
}
BavetAbstractUniConstraintStream<Solution_, A> other = (BavetAbstractUniConstraintStream<Solution_, A>) otherStream;
if (constraintFactory != other.getConstraintFactory()) {
throw new IllegalStateException("The streams (" + this + ", " + other
+ ") are built from different constraintFactories (" + constraintFactory + ", "
+ other.getConstraintFactory()
+ ").");
}
return other;
| 663
| 179
| 842
|
<methods>public abstract InnerConstraintFactory<Solution_,?> getConstraintFactory() ,public org.optaplanner.constraint.streams.common.RetrievalSemantics getRetrievalSemantics() <variables>private final non-sealed org.optaplanner.constraint.streams.common.RetrievalSemantics retrievalSemantics
|
apache_incubator-kie-optaplanner
|
incubator-kie-optaplanner/core/optaplanner-constraint-streams-bavet/src/main/java/org/optaplanner/constraint/streams/bavet/common/GroupNodeConstructorWithoutAccumulate.java
|
GroupNodeConstructorWithoutAccumulate
|
build
|
class GroupNodeConstructorWithoutAccumulate<Tuple_ extends Tuple> implements GroupNodeConstructor<Tuple_> {
private final NodeConstructorWithoutAccumulate<Tuple_> nodeConstructorFunction;
public GroupNodeConstructorWithoutAccumulate(NodeConstructorWithoutAccumulate<Tuple_> nodeConstructorFunction) {
this.nodeConstructorFunction = nodeConstructorFunction;
}
@Override
public <Solution_, Score_ extends Score<Score_>> void build(NodeBuildHelper<Score_> buildHelper,
BavetAbstractConstraintStream<Solution_> parentTupleSource,
BavetAbstractConstraintStream<Solution_> groupStream, List<? extends ConstraintStream> groupStreamChildList,
BavetAbstractConstraintStream<Solution_> bridgeStream, List<? extends ConstraintStream> bridgeStreamChildList,
EnvironmentMode environmentMode) {<FILL_FUNCTION_BODY>}
}
|
if (!bridgeStreamChildList.isEmpty()) {
throw new IllegalStateException("Impossible state: the stream (" + bridgeStream
+ ") has an non-empty childStreamList (" + bridgeStreamChildList + ") but it's a groupBy bridge.");
}
int groupStoreIndex = buildHelper.reserveTupleStoreIndex(parentTupleSource);
TupleLifecycle<Tuple_> tupleLifecycle = buildHelper.getAggregatedTupleLifecycle(groupStreamChildList);
int outputStoreSize = buildHelper.extractTupleStoreSize(groupStream);
var node = nodeConstructorFunction.apply(groupStoreIndex, tupleLifecycle, outputStoreSize, environmentMode);
buildHelper.addNode(node, bridgeStream);
| 223
| 185
| 408
|
<no_super_class>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.