repo
stringlengths
7
58
path
stringlengths
12
218
func_name
stringlengths
3
140
original_string
stringlengths
73
34.1k
language
stringclasses
1 value
code
stringlengths
73
34.1k
code_tokens
list
docstring
stringlengths
3
16k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
105
339
partition
stringclasses
1 value
elki-project/elki
elki-index/src/main/java/de/lmu/ifi/dbs/elki/index/tree/AbstractNode.java
AbstractNode.splitTo
public final void splitTo(AbstractNode<E> newNode, List<E> sorting, int splitPoint) { assert (isLeaf() == newNode.isLeaf()); deleteAllEntries(); StringBuilder msg = LoggingConfiguration.DEBUG ? new StringBuilder(1000) : null; for(int i = 0; i < splitPoint; i++) { addEntry(sorting.get(i)); if(msg != null) { msg.append("n_").append(getPageID()).append(' ').append(sorting.get(i)).append('\n'); } } for(int i = splitPoint; i < sorting.size(); i++) { newNode.addEntry(sorting.get(i)); if(msg != null) { msg.append("n_").append(newNode.getPageID()).append(' ').append(sorting.get(i)).append('\n'); } } if(msg != null) { Logging.getLogger(this.getClass().getName()).fine(msg.toString()); } }
java
public final void splitTo(AbstractNode<E> newNode, List<E> sorting, int splitPoint) { assert (isLeaf() == newNode.isLeaf()); deleteAllEntries(); StringBuilder msg = LoggingConfiguration.DEBUG ? new StringBuilder(1000) : null; for(int i = 0; i < splitPoint; i++) { addEntry(sorting.get(i)); if(msg != null) { msg.append("n_").append(getPageID()).append(' ').append(sorting.get(i)).append('\n'); } } for(int i = splitPoint; i < sorting.size(); i++) { newNode.addEntry(sorting.get(i)); if(msg != null) { msg.append("n_").append(newNode.getPageID()).append(' ').append(sorting.get(i)).append('\n'); } } if(msg != null) { Logging.getLogger(this.getClass().getName()).fine(msg.toString()); } }
[ "public", "final", "void", "splitTo", "(", "AbstractNode", "<", "E", ">", "newNode", ",", "List", "<", "E", ">", "sorting", ",", "int", "splitPoint", ")", "{", "assert", "(", "isLeaf", "(", ")", "==", "newNode", ".", "isLeaf", "(", ")", ")", ";", "...
Redistribute entries according to the given sorting. @param newNode Node to split to @param sorting Sorting to use @param splitPoint Split point
[ "Redistribute", "entries", "according", "to", "the", "given", "sorting", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-index/src/main/java/de/lmu/ifi/dbs/elki/index/tree/AbstractNode.java#L302-L323
train
elki-project/elki
elki/src/main/java/de/lmu/ifi/dbs/elki/evaluation/AutomaticEvaluation.java
AutomaticEvaluation.ensureClusteringResult
public static void ensureClusteringResult(final Database db, final Result result) { Collection<Clustering<?>> clusterings = ResultUtil.filterResults(db.getHierarchy(), result, Clustering.class); if(clusterings.isEmpty()) { ResultUtil.addChildResult(db, new ByLabelOrAllInOneClustering().run(db)); } }
java
public static void ensureClusteringResult(final Database db, final Result result) { Collection<Clustering<?>> clusterings = ResultUtil.filterResults(db.getHierarchy(), result, Clustering.class); if(clusterings.isEmpty()) { ResultUtil.addChildResult(db, new ByLabelOrAllInOneClustering().run(db)); } }
[ "public", "static", "void", "ensureClusteringResult", "(", "final", "Database", "db", ",", "final", "Result", "result", ")", "{", "Collection", "<", "Clustering", "<", "?", ">", ">", "clusterings", "=", "ResultUtil", ".", "filterResults", "(", "db", ".", "ge...
Ensure that the result contains at least one Clustering. @param db Database to process @param result result
[ "Ensure", "that", "the", "result", "contains", "at", "least", "one", "Clustering", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki/src/main/java/de/lmu/ifi/dbs/elki/evaluation/AutomaticEvaluation.java#L160-L165
train
elki-project/elki
elki-core-math/src/main/java/de/lmu/ifi/dbs/elki/math/statistics/distribution/estimator/meta/TrimmedEstimator.java
TrimmedEstimator.toPrimitiveDoubleArray
public static <A> double[] toPrimitiveDoubleArray(A data, NumberArrayAdapter<?, A> adapter) { if(adapter == DoubleArrayAdapter.STATIC) { return ((double[]) data).clone(); } final int len = adapter.size(data); double[] x = new double[len]; for(int i = 0; i < len; i++) { x[i] = adapter.getDouble(data, i); } return x; }
java
public static <A> double[] toPrimitiveDoubleArray(A data, NumberArrayAdapter<?, A> adapter) { if(adapter == DoubleArrayAdapter.STATIC) { return ((double[]) data).clone(); } final int len = adapter.size(data); double[] x = new double[len]; for(int i = 0; i < len; i++) { x[i] = adapter.getDouble(data, i); } return x; }
[ "public", "static", "<", "A", ">", "double", "[", "]", "toPrimitiveDoubleArray", "(", "A", "data", ",", "NumberArrayAdapter", "<", "?", ",", "A", ">", "adapter", ")", "{", "if", "(", "adapter", "==", "DoubleArrayAdapter", ".", "STATIC", ")", "{", "return...
Local copy, see ArrayLikeUtil.toPrimitiveDoubleArray. @param data Data @param adapter Adapter @return Copy of the data, as {@code double[]}
[ "Local", "copy", "see", "ArrayLikeUtil", ".", "toPrimitiveDoubleArray", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-core-math/src/main/java/de/lmu/ifi/dbs/elki/math/statistics/distribution/estimator/meta/TrimmedEstimator.java#L101-L111
train
elki-project/elki
elki-logging/src/main/java/de/lmu/ifi/dbs/elki/logging/CLISmartHandler.java
CLISmartHandler.flush
@Override public void flush() { try { out.flush(); } catch(Exception ex) { reportError(null, ex, ErrorManager.FLUSH_FAILURE); } try { err.flush(); } catch(Exception ex) { reportError(null, ex, ErrorManager.FLUSH_FAILURE); } }
java
@Override public void flush() { try { out.flush(); } catch(Exception ex) { reportError(null, ex, ErrorManager.FLUSH_FAILURE); } try { err.flush(); } catch(Exception ex) { reportError(null, ex, ErrorManager.FLUSH_FAILURE); } }
[ "@", "Override", "public", "void", "flush", "(", ")", "{", "try", "{", "out", ".", "flush", "(", ")", ";", "}", "catch", "(", "Exception", "ex", ")", "{", "reportError", "(", "null", ",", "ex", ",", "ErrorManager", ".", "FLUSH_FAILURE", ")", ";", "...
Flush output streams
[ "Flush", "output", "streams" ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-logging/src/main/java/de/lmu/ifi/dbs/elki/logging/CLISmartHandler.java#L110-L124
train
elki-project/elki
elki-logging/src/main/java/de/lmu/ifi/dbs/elki/logging/CLISmartHandler.java
CLISmartHandler.publish
@Override public void publish(final LogRecord record) { // determine destination final Writer destination; if(record.getLevel().intValue() >= Level.WARNING.intValue()) { destination = this.err; } else { destination = this.out; } // format final String m; // Progress records are handled specially. if(record instanceof ProgressLogRecord) { ProgressLogRecord prec = (ProgressLogRecord) record; ptrack.addProgress(prec.getProgress()); Collection<Progress> completed = ptrack.removeCompleted(); Collection<Progress> progresses = ptrack.getProgresses(); StringBuilder buf = new StringBuilder(); if(!completed.isEmpty()) { buf.append(OutputStreamLogger.CARRIAGE_RETURN); for(Progress prog : completed) { // TODO: use formatter, somehow? prog.appendToBuffer(buf); buf.append(OutputStreamLogger.NEWLINE); } } if(!progresses.isEmpty()) { boolean first = true; buf.append(OutputStreamLogger.CARRIAGE_RETURN); for(Progress prog : progresses) { if(first) { first = false; } else { buf.append(' '); } // TODO: use formatter, somehow? prog.appendToBuffer(buf); } } m = buf.toString(); } else { // choose an appropriate formatter final Formatter fmt; // always format progress messages using the progress formatter. if(record.getLevel().intValue() >= Level.WARNING.intValue()) { // format errors using the error formatter fmt = errformat; } else if(record.getLevel().intValue() <= Level.FINE.intValue()) { // format debug statements using the debug formatter. fmt = debugformat; } else { // default to the message formatter. fmt = msgformat; } try { m = fmt.format(record); } catch(Exception ex) { reportError(null, ex, ErrorManager.FORMAT_FAILURE); return; } } // write try { destination.write(m); // always flush (although the streams should auto-flush already) destination.flush(); } catch(Exception ex) { reportError(null, ex, ErrorManager.WRITE_FAILURE); return; } }
java
@Override public void publish(final LogRecord record) { // determine destination final Writer destination; if(record.getLevel().intValue() >= Level.WARNING.intValue()) { destination = this.err; } else { destination = this.out; } // format final String m; // Progress records are handled specially. if(record instanceof ProgressLogRecord) { ProgressLogRecord prec = (ProgressLogRecord) record; ptrack.addProgress(prec.getProgress()); Collection<Progress> completed = ptrack.removeCompleted(); Collection<Progress> progresses = ptrack.getProgresses(); StringBuilder buf = new StringBuilder(); if(!completed.isEmpty()) { buf.append(OutputStreamLogger.CARRIAGE_RETURN); for(Progress prog : completed) { // TODO: use formatter, somehow? prog.appendToBuffer(buf); buf.append(OutputStreamLogger.NEWLINE); } } if(!progresses.isEmpty()) { boolean first = true; buf.append(OutputStreamLogger.CARRIAGE_RETURN); for(Progress prog : progresses) { if(first) { first = false; } else { buf.append(' '); } // TODO: use formatter, somehow? prog.appendToBuffer(buf); } } m = buf.toString(); } else { // choose an appropriate formatter final Formatter fmt; // always format progress messages using the progress formatter. if(record.getLevel().intValue() >= Level.WARNING.intValue()) { // format errors using the error formatter fmt = errformat; } else if(record.getLevel().intValue() <= Level.FINE.intValue()) { // format debug statements using the debug formatter. fmt = debugformat; } else { // default to the message formatter. fmt = msgformat; } try { m = fmt.format(record); } catch(Exception ex) { reportError(null, ex, ErrorManager.FORMAT_FAILURE); return; } } // write try { destination.write(m); // always flush (although the streams should auto-flush already) destination.flush(); } catch(Exception ex) { reportError(null, ex, ErrorManager.WRITE_FAILURE); return; } }
[ "@", "Override", "public", "void", "publish", "(", "final", "LogRecord", "record", ")", "{", "// determine destination", "final", "Writer", "destination", ";", "if", "(", "record", ".", "getLevel", "(", ")", ".", "intValue", "(", ")", ">=", "Level", ".", "...
Publish a log record.
[ "Publish", "a", "log", "record", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-logging/src/main/java/de/lmu/ifi/dbs/elki/logging/CLISmartHandler.java#L129-L210
train
elki-project/elki
elki/src/main/java/de/lmu/ifi/dbs/elki/application/greedyensemble/EvaluatePrecomputedOutlierScores.java
EvaluatePrecomputedOutlierScores.checkForNaNs
private boolean checkForNaNs(NumberVector vec) { for(int i = 0, d = vec.getDimensionality(); i < d; i++) { double v = vec.doubleValue(i); if(v != v) { // NaN! return true; } } return false; }
java
private boolean checkForNaNs(NumberVector vec) { for(int i = 0, d = vec.getDimensionality(); i < d; i++) { double v = vec.doubleValue(i); if(v != v) { // NaN! return true; } } return false; }
[ "private", "boolean", "checkForNaNs", "(", "NumberVector", "vec", ")", "{", "for", "(", "int", "i", "=", "0", ",", "d", "=", "vec", ".", "getDimensionality", "(", ")", ";", "i", "<", "d", ";", "i", "++", ")", "{", "double", "v", "=", "vec", ".", ...
Check for NaN values. @param vec Vector @return {@code true} if NaN values are present.
[ "Check", "for", "NaN", "values", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki/src/main/java/de/lmu/ifi/dbs/elki/application/greedyensemble/EvaluatePrecomputedOutlierScores.java#L273-L281
train
elki-project/elki
elki-core/src/main/java/de/lmu/ifi/dbs/elki/database/DatabaseUtil.java
DatabaseUtil.guessLabelRepresentation
public static Relation<String> guessLabelRepresentation(Database database) throws NoSupportedDataTypeException { try { Relation<? extends ClassLabel> classrep = database.getRelation(TypeUtil.CLASSLABEL); if(classrep != null) { return new ConvertToStringView(classrep); } } catch(NoSupportedDataTypeException e) { // retry. } try { Relation<? extends LabelList> labelsrep = database.getRelation(TypeUtil.LABELLIST); if(labelsrep != null) { return new ConvertToStringView(labelsrep); } } catch(NoSupportedDataTypeException e) { // retry. } try { Relation<String> stringrep = database.getRelation(TypeUtil.STRING); if(stringrep != null) { return stringrep; } } catch(NoSupportedDataTypeException e) { // retry. } throw new NoSupportedDataTypeException("No label-like representation was found."); }
java
public static Relation<String> guessLabelRepresentation(Database database) throws NoSupportedDataTypeException { try { Relation<? extends ClassLabel> classrep = database.getRelation(TypeUtil.CLASSLABEL); if(classrep != null) { return new ConvertToStringView(classrep); } } catch(NoSupportedDataTypeException e) { // retry. } try { Relation<? extends LabelList> labelsrep = database.getRelation(TypeUtil.LABELLIST); if(labelsrep != null) { return new ConvertToStringView(labelsrep); } } catch(NoSupportedDataTypeException e) { // retry. } try { Relation<String> stringrep = database.getRelation(TypeUtil.STRING); if(stringrep != null) { return stringrep; } } catch(NoSupportedDataTypeException e) { // retry. } throw new NoSupportedDataTypeException("No label-like representation was found."); }
[ "public", "static", "Relation", "<", "String", ">", "guessLabelRepresentation", "(", "Database", "database", ")", "throws", "NoSupportedDataTypeException", "{", "try", "{", "Relation", "<", "?", "extends", "ClassLabel", ">", "classrep", "=", "database", ".", "getR...
Guess a potentially label-like representation, preferring class labels. @param database @return string representation
[ "Guess", "a", "potentially", "label", "-", "like", "representation", "preferring", "class", "labels", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-core/src/main/java/de/lmu/ifi/dbs/elki/database/DatabaseUtil.java#L64-L93
train
elki-project/elki
elki-core/src/main/java/de/lmu/ifi/dbs/elki/database/DatabaseUtil.java
DatabaseUtil.getObjectsByLabelMatch
public static ArrayModifiableDBIDs getObjectsByLabelMatch(Database database, Pattern name_pattern) { Relation<String> relation = guessLabelRepresentation(database); if(name_pattern == null) { return DBIDUtil.newArray(); } ArrayModifiableDBIDs ret = DBIDUtil.newArray(); for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { if(name_pattern.matcher(relation.get(iditer)).find()) { ret.add(iditer); } } return ret; }
java
public static ArrayModifiableDBIDs getObjectsByLabelMatch(Database database, Pattern name_pattern) { Relation<String> relation = guessLabelRepresentation(database); if(name_pattern == null) { return DBIDUtil.newArray(); } ArrayModifiableDBIDs ret = DBIDUtil.newArray(); for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { if(name_pattern.matcher(relation.get(iditer)).find()) { ret.add(iditer); } } return ret; }
[ "public", "static", "ArrayModifiableDBIDs", "getObjectsByLabelMatch", "(", "Database", "database", ",", "Pattern", "name_pattern", ")", "{", "Relation", "<", "String", ">", "relation", "=", "guessLabelRepresentation", "(", "database", ")", ";", "if", "(", "name_patt...
Find object by matching their labels. @param database Database to search in @param name_pattern Name to match against class or object label @return found cluster or it throws an exception.
[ "Find", "object", "by", "matching", "their", "labels", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-core/src/main/java/de/lmu/ifi/dbs/elki/database/DatabaseUtil.java#L166-L178
train
elki-project/elki
elki-index-mtree/src/main/java/de/lmu/ifi/dbs/elki/index/tree/metrical/mtreevariants/mktrees/mkcop/MkCoPDirectoryEntry.java
MkCoPDirectoryEntry.writeExternal
@Override public void writeExternal(ObjectOutput out) throws IOException { super.writeExternal(out); out.writeObject(conservativeApproximation); }
java
@Override public void writeExternal(ObjectOutput out) throws IOException { super.writeExternal(out); out.writeObject(conservativeApproximation); }
[ "@", "Override", "public", "void", "writeExternal", "(", "ObjectOutput", "out", ")", "throws", "IOException", "{", "super", ".", "writeExternal", "(", "out", ")", ";", "out", ".", "writeObject", "(", "conservativeApproximation", ")", ";", "}" ]
Calls the super method and writes the conservative approximation of the knn distances of this entry to the specified stream. @param out the stream to write the object to @throws java.io.IOException Includes any I/O exceptions that may occur
[ "Calls", "the", "super", "method", "and", "writes", "the", "conservative", "approximation", "of", "the", "knn", "distances", "of", "this", "entry", "to", "the", "specified", "stream", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-index-mtree/src/main/java/de/lmu/ifi/dbs/elki/index/tree/metrical/mtreevariants/mktrees/mkcop/MkCoPDirectoryEntry.java#L110-L114
train
elki-project/elki
elki-index-mtree/src/main/java/de/lmu/ifi/dbs/elki/index/tree/metrical/mtreevariants/mktrees/mkcop/MkCoPDirectoryEntry.java
MkCoPDirectoryEntry.readExternal
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { super.readExternal(in); conservativeApproximation = (ApproximationLine) in.readObject(); }
java
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { super.readExternal(in); conservativeApproximation = (ApproximationLine) in.readObject(); }
[ "@", "Override", "public", "void", "readExternal", "(", "ObjectInput", "in", ")", "throws", "IOException", ",", "ClassNotFoundException", "{", "super", ".", "readExternal", "(", "in", ")", ";", "conservativeApproximation", "=", "(", "ApproximationLine", ")", "in",...
Calls the super method and reads the the conservative approximation of the knn distances of this entry from the specified input stream. @param in the stream to read data from in order to restore the object @throws java.io.IOException if I/O errors occur @throws ClassNotFoundException If the class for an object being restored cannot be found.
[ "Calls", "the", "super", "method", "and", "reads", "the", "the", "conservative", "approximation", "of", "the", "knn", "distances", "of", "this", "entry", "from", "the", "specified", "input", "stream", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-index-mtree/src/main/java/de/lmu/ifi/dbs/elki/index/tree/metrical/mtreevariants/mktrees/mkcop/MkCoPDirectoryEntry.java#L125-L129
train
elki-project/elki
elki-outlier/src/main/java/de/lmu/ifi/dbs/elki/algorithm/outlier/distance/ReferenceBasedOutlierDetection.java
ReferenceBasedOutlierDetection.updateDensities
protected void updateDensities(WritableDoubleDataStore rbod_score, DoubleDBIDList referenceDists) { DoubleDBIDListIter it = referenceDists.iter(); for(int l = 0; l < referenceDists.size(); l++) { double density = computeDensity(referenceDists, it, l); // computeDensity modified the iterator, reset: it.seek(l); // NaN indicates the first run. if(!(density > rbod_score.doubleValue(it))) { rbod_score.putDouble(it, density); } } }
java
protected void updateDensities(WritableDoubleDataStore rbod_score, DoubleDBIDList referenceDists) { DoubleDBIDListIter it = referenceDists.iter(); for(int l = 0; l < referenceDists.size(); l++) { double density = computeDensity(referenceDists, it, l); // computeDensity modified the iterator, reset: it.seek(l); // NaN indicates the first run. if(!(density > rbod_score.doubleValue(it))) { rbod_score.putDouble(it, density); } } }
[ "protected", "void", "updateDensities", "(", "WritableDoubleDataStore", "rbod_score", ",", "DoubleDBIDList", "referenceDists", ")", "{", "DoubleDBIDListIter", "it", "=", "referenceDists", ".", "iter", "(", ")", ";", "for", "(", "int", "l", "=", "0", ";", "l", ...
Update the density estimates for each object. @param rbod_score Density storage @param referenceDists Distances from current reference point
[ "Update", "the", "density", "estimates", "for", "each", "object", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-outlier/src/main/java/de/lmu/ifi/dbs/elki/algorithm/outlier/distance/ReferenceBasedOutlierDetection.java#L197-L208
train
elki-project/elki
elki-clustering/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/kmeans/initialization/KMeansPlusPlusInitialMeans.java
KMeansPlusPlusInitialMeans.chooseRemaining
static void chooseRemaining(Relation<? extends NumberVector> relation, DBIDs ids, DistanceQuery<NumberVector> distQ, int k, List<NumberVector> means, WritableDoubleDataStore weights, double weightsum, Random random) { while(true) { if(weightsum > Double.MAX_VALUE) { throw new IllegalStateException("Could not choose a reasonable mean - too many data points, too large distance sum?"); } if(weightsum < Double.MIN_NORMAL) { LoggingUtil.warning("Could not choose a reasonable mean - to few data points?"); } double r = random.nextDouble() * weightsum; while(r <= 0 && weightsum > Double.MIN_NORMAL) { r = random.nextDouble() * weightsum; // Try harder to not choose 0. } DBIDIter it = ids.iter(); while(it.valid()) { if((r -= weights.doubleValue(it)) < 0) { break; } it.advance(); } if(!it.valid()) { // Rare case, but happens due to floating math weightsum -= r; // Decrease continue; // Retry } // Add new mean: final NumberVector newmean = relation.get(it); means.add(newmean); if(means.size() >= k) { break; } // Update weights: weights.putDouble(it, 0.); weightsum = updateWeights(weights, ids, newmean, distQ); } }
java
static void chooseRemaining(Relation<? extends NumberVector> relation, DBIDs ids, DistanceQuery<NumberVector> distQ, int k, List<NumberVector> means, WritableDoubleDataStore weights, double weightsum, Random random) { while(true) { if(weightsum > Double.MAX_VALUE) { throw new IllegalStateException("Could not choose a reasonable mean - too many data points, too large distance sum?"); } if(weightsum < Double.MIN_NORMAL) { LoggingUtil.warning("Could not choose a reasonable mean - to few data points?"); } double r = random.nextDouble() * weightsum; while(r <= 0 && weightsum > Double.MIN_NORMAL) { r = random.nextDouble() * weightsum; // Try harder to not choose 0. } DBIDIter it = ids.iter(); while(it.valid()) { if((r -= weights.doubleValue(it)) < 0) { break; } it.advance(); } if(!it.valid()) { // Rare case, but happens due to floating math weightsum -= r; // Decrease continue; // Retry } // Add new mean: final NumberVector newmean = relation.get(it); means.add(newmean); if(means.size() >= k) { break; } // Update weights: weights.putDouble(it, 0.); weightsum = updateWeights(weights, ids, newmean, distQ); } }
[ "static", "void", "chooseRemaining", "(", "Relation", "<", "?", "extends", "NumberVector", ">", "relation", ",", "DBIDs", "ids", ",", "DistanceQuery", "<", "NumberVector", ">", "distQ", ",", "int", "k", ",", "List", "<", "NumberVector", ">", "means", ",", ...
Choose remaining means, weighted by distance. @param relation Data relation @param ids IDs @param distQ Distance function @param k Number of means to choose @param means Means storage @param weights Weights (initialized!) @param weightsum Sum of weights @param random Random generator
[ "Choose", "remaining", "means", "weighted", "by", "distance", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-clustering/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/kmeans/initialization/KMeansPlusPlusInitialMeans.java#L162-L195
train
elki-project/elki
elki-input/src/main/java/de/lmu/ifi/dbs/elki/datasource/filter/normalization/columnwise/AttributeWiseMinMaxNormalization.java
AttributeWiseMinMaxNormalization.factor
private double factor(int dimension) { return maxima[dimension] > minima[dimension] ? maxima[dimension] - minima[dimension] : maxima[dimension] > 0 ? maxima[dimension] : 1; }
java
private double factor(int dimension) { return maxima[dimension] > minima[dimension] ? maxima[dimension] - minima[dimension] : maxima[dimension] > 0 ? maxima[dimension] : 1; }
[ "private", "double", "factor", "(", "int", "dimension", ")", "{", "return", "maxima", "[", "dimension", "]", ">", "minima", "[", "dimension", "]", "?", "maxima", "[", "dimension", "]", "-", "minima", "[", "dimension", "]", ":", "maxima", "[", "dimension"...
Returns a factor for normalization in a certain dimension. The provided factor is the maximum-minimum in the specified dimension, if these two values differ, otherwise it is the maximum if this value differs from 0, otherwise it is 1. @param dimension the dimension to get a factor for normalization @return a factor for normalization in a certain dimension
[ "Returns", "a", "factor", "for", "normalization", "in", "a", "certain", "dimension", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-input/src/main/java/de/lmu/ifi/dbs/elki/datasource/filter/normalization/columnwise/AttributeWiseMinMaxNormalization.java#L159-L161
train
elki-project/elki
elki-core-distance/src/main/java/de/lmu/ifi/dbs/elki/distance/distancefunction/timeseries/DerivativeDTWDistanceFunction.java
DerivativeDTWDistanceFunction.derivative
protected double derivative(int i, NumberVector v) { final int dim = v.getDimensionality(); if(dim == 1) { return 0.; } // Adjust for boundary conditions, as per the article: i = (i == 0) ? 1 : (i == dim - 1) ? dim - 2 : i; return (v.doubleValue(i) - v.doubleValue(i - 1) + (v.doubleValue(i + 1) - v.doubleValue(i - 1)) * .5) * .5; }
java
protected double derivative(int i, NumberVector v) { final int dim = v.getDimensionality(); if(dim == 1) { return 0.; } // Adjust for boundary conditions, as per the article: i = (i == 0) ? 1 : (i == dim - 1) ? dim - 2 : i; return (v.doubleValue(i) - v.doubleValue(i - 1) + (v.doubleValue(i + 1) - v.doubleValue(i - 1)) * .5) * .5; }
[ "protected", "double", "derivative", "(", "int", "i", ",", "NumberVector", "v", ")", "{", "final", "int", "dim", "=", "v", ".", "getDimensionality", "(", ")", ";", "if", "(", "dim", "==", "1", ")", "{", "return", "0.", ";", "}", "// Adjust for boundary...
Given a NumberVector and the position of an element, approximates the gradient of given element. @return Derivative as double
[ "Given", "a", "NumberVector", "and", "the", "position", "of", "an", "element", "approximates", "the", "gradient", "of", "given", "element", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-core-distance/src/main/java/de/lmu/ifi/dbs/elki/distance/distancefunction/timeseries/DerivativeDTWDistanceFunction.java#L143-L151
train
elki-project/elki
elki-index-mtree/src/main/java/de/lmu/ifi/dbs/elki/index/tree/metrical/mtreevariants/strategies/split/RandomSplit.java
RandomSplit.split
@Override public Assignments<E> split(AbstractMTree<?, N, E, ?> tree, N node) { final int n = node.getNumEntries(); int pos1 = random.nextInt(n), pos2 = random.nextInt(n - 1); pos2 = pos2 >= pos1 ? pos2 + 1 : pos2; // Build distance arrays: double[] dis1 = new double[n], dis2 = new double[n]; E e1 = node.getEntry(pos1), e2 = node.getEntry(pos2); for(int i = 0; i < n; i++) { if(i == pos1 || i == pos2) { continue; } final E ej = node.getEntry(i); dis1[i] = tree.distance(e1, ej); dis2[i] = tree.distance(e2, ej); } return distributor.distribute(node, pos1, dis1, pos2, dis2); }
java
@Override public Assignments<E> split(AbstractMTree<?, N, E, ?> tree, N node) { final int n = node.getNumEntries(); int pos1 = random.nextInt(n), pos2 = random.nextInt(n - 1); pos2 = pos2 >= pos1 ? pos2 + 1 : pos2; // Build distance arrays: double[] dis1 = new double[n], dis2 = new double[n]; E e1 = node.getEntry(pos1), e2 = node.getEntry(pos2); for(int i = 0; i < n; i++) { if(i == pos1 || i == pos2) { continue; } final E ej = node.getEntry(i); dis1[i] = tree.distance(e1, ej); dis2[i] = tree.distance(e2, ej); } return distributor.distribute(node, pos1, dis1, pos2, dis2); }
[ "@", "Override", "public", "Assignments", "<", "E", ">", "split", "(", "AbstractMTree", "<", "?", ",", "N", ",", "E", ",", "?", ">", "tree", ",", "N", "node", ")", "{", "final", "int", "n", "=", "node", ".", "getNumEntries", "(", ")", ";", "int",...
Selects two objects of the specified node to be promoted and stored into the parent node. The m-RAD strategy considers all possible pairs of objects and, after partitioning the set of entries, promotes the pair of objects for which the sum of covering radiuses is minimum. @param tree Tree to use @param node the node to be split
[ "Selects", "two", "objects", "of", "the", "specified", "node", "to", "be", "promoted", "and", "stored", "into", "the", "parent", "node", ".", "The", "m", "-", "RAD", "strategy", "considers", "all", "possible", "pairs", "of", "objects", "and", "after", "par...
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-index-mtree/src/main/java/de/lmu/ifi/dbs/elki/index/tree/metrical/mtreevariants/strategies/split/RandomSplit.java#L86-L104
train
elki-project/elki
addons/batikvis/src/main/java/de/lmu/ifi/dbs/elki/visualization/css/CSSClass.java
CSSClass.checkCSSStatements
public static boolean checkCSSStatements(Collection<Pair<String,String>> statements) { for (Pair<String, String> pair : statements) { if (!checkCSSStatement(pair.getFirst(), pair.getSecond())) { return false; } } return true; }
java
public static boolean checkCSSStatements(Collection<Pair<String,String>> statements) { for (Pair<String, String> pair : statements) { if (!checkCSSStatement(pair.getFirst(), pair.getSecond())) { return false; } } return true; }
[ "public", "static", "boolean", "checkCSSStatements", "(", "Collection", "<", "Pair", "<", "String", ",", "String", ">", ">", "statements", ")", "{", "for", "(", "Pair", "<", "String", ",", "String", ">", "pair", ":", "statements", ")", "{", "if", "(", ...
Validate a set of CSS statements. TODO: checks are currently not very extensive. @param statements Statements to check @return true if valid
[ "Validate", "a", "set", "of", "CSS", "statements", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/addons/batikvis/src/main/java/de/lmu/ifi/dbs/elki/visualization/css/CSSClass.java#L147-L154
train
elki-project/elki
addons/batikvis/src/main/java/de/lmu/ifi/dbs/elki/visualization/css/CSSClass.java
CSSClass.getStatement
public String getStatement(String key) { for (Pair<String, String> pair : statements) { if (pair.getFirst().equals(key)) { return pair.getSecond(); } } return null; }
java
public String getStatement(String key) { for (Pair<String, String> pair : statements) { if (pair.getFirst().equals(key)) { return pair.getSecond(); } } return null; }
[ "public", "String", "getStatement", "(", "String", "key", ")", "{", "for", "(", "Pair", "<", "String", ",", "String", ">", "pair", ":", "statements", ")", "{", "if", "(", "pair", ".", "getFirst", "(", ")", ".", "equals", "(", "key", ")", ")", "{", ...
Get the current value of a particular CSS statement. @param key statement key. @return current value or null.
[ "Get", "the", "current", "value", "of", "a", "particular", "CSS", "statement", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/addons/batikvis/src/main/java/de/lmu/ifi/dbs/elki/visualization/css/CSSClass.java#L189-L196
train
elki-project/elki
addons/batikvis/src/main/java/de/lmu/ifi/dbs/elki/visualization/css/CSSClass.java
CSSClass.setStatement
public void setStatement(String key, String value) { if (value != null && !checkCSSStatement(key, value)) { throw new InvalidCSS("Invalid CSS statement."); } for (Pair<String, String> pair : statements) { if (pair.getFirst().equals(key)) { if (value != null) { pair.setSecond(value); } else { statements.remove(pair); } return; } } if (value != null) { statements.add(new Pair<>(key, value)); } }
java
public void setStatement(String key, String value) { if (value != null && !checkCSSStatement(key, value)) { throw new InvalidCSS("Invalid CSS statement."); } for (Pair<String, String> pair : statements) { if (pair.getFirst().equals(key)) { if (value != null) { pair.setSecond(value); } else { statements.remove(pair); } return; } } if (value != null) { statements.add(new Pair<>(key, value)); } }
[ "public", "void", "setStatement", "(", "String", "key", ",", "String", "value", ")", "{", "if", "(", "value", "!=", "null", "&&", "!", "checkCSSStatement", "(", "key", ",", "value", ")", ")", "{", "throw", "new", "InvalidCSS", "(", "\"Invalid CSS statement...
Set a CSS statement. @param key Statement key. @param value Value or null (to unset)
[ "Set", "a", "CSS", "statement", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/addons/batikvis/src/main/java/de/lmu/ifi/dbs/elki/visualization/css/CSSClass.java#L213-L230
train
elki-project/elki
addons/batikvis/src/main/java/de/lmu/ifi/dbs/elki/visualization/css/CSSClass.java
CSSClass.appendCSSDefinition
public void appendCSSDefinition(StringBuilder buf) { buf.append("\n."); buf.append(name); buf.append('{'); for (Pair<String, String> pair : statements) { buf.append(pair.getFirst()); buf.append(':'); buf.append(pair.getSecond()); buf.append(";\n"); } buf.append("}\n"); }
java
public void appendCSSDefinition(StringBuilder buf) { buf.append("\n."); buf.append(name); buf.append('{'); for (Pair<String, String> pair : statements) { buf.append(pair.getFirst()); buf.append(':'); buf.append(pair.getSecond()); buf.append(";\n"); } buf.append("}\n"); }
[ "public", "void", "appendCSSDefinition", "(", "StringBuilder", "buf", ")", "{", "buf", ".", "append", "(", "\"\\n.\"", ")", ";", "buf", ".", "append", "(", "name", ")", ";", "buf", ".", "append", "(", "'", "'", ")", ";", "for", "(", "Pair", "<", "S...
Append CSS definition to a stream @param buf String buffer to append to.
[ "Append", "CSS", "definition", "to", "a", "stream" ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/addons/batikvis/src/main/java/de/lmu/ifi/dbs/elki/visualization/css/CSSClass.java#L266-L277
train
elki-project/elki
addons/batikvis/src/main/java/de/lmu/ifi/dbs/elki/visualization/css/CSSClass.java
CSSClass.inlineCSS
public String inlineCSS() { StringBuilder buf = new StringBuilder(); for (Pair<String, String> pair : statements) { buf.append(pair.getFirst()); buf.append(':'); buf.append(pair.getSecond()); buf.append(';'); } return buf.toString(); }
java
public String inlineCSS() { StringBuilder buf = new StringBuilder(); for (Pair<String, String> pair : statements) { buf.append(pair.getFirst()); buf.append(':'); buf.append(pair.getSecond()); buf.append(';'); } return buf.toString(); }
[ "public", "String", "inlineCSS", "(", ")", "{", "StringBuilder", "buf", "=", "new", "StringBuilder", "(", ")", ";", "for", "(", "Pair", "<", "String", ",", "String", ">", "pair", ":", "statements", ")", "{", "buf", ".", "append", "(", "pair", ".", "g...
Render CSS class to inline formatting @return string rendition of CSS for inline use
[ "Render", "CSS", "class", "to", "inline", "formatting" ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/addons/batikvis/src/main/java/de/lmu/ifi/dbs/elki/visualization/css/CSSClass.java#L303-L312
train
elki-project/elki
elki-input/src/main/java/de/lmu/ifi/dbs/elki/datasource/filter/normalization/columnwise/AttributeWiseCDFNormalization.java
AttributeWiseCDFNormalization.findBestFit
protected Distribution findBestFit(final List<V> col, Adapter adapter, int d, double[] test) { if(estimators.size() == 1) { return estimators.get(0).estimate(col, adapter); } Distribution best = null; double bestq = Double.POSITIVE_INFINITY; trials: for(DistributionEstimator<?> est : estimators) { try { Distribution dist = est.estimate(col, adapter); for(int i = 0; i < test.length; i++) { test[i] = dist.cdf(col.get(i).doubleValue(d)); if(Double.isNaN(test[i])) { LOG.warning("Got NaN after fitting " + est + ": " + dist); continue trials; } if(Double.isInfinite(test[i])) { LOG.warning("Got infinite value after fitting " + est + ": " + dist); continue trials; } } Arrays.sort(test); double q = KolmogorovSmirnovTest.simpleTest(test); if(LOG.isVeryVerbose()) { LOG.veryverbose("Estimator " + est + " (" + dist + ") has maximum deviation " + q + " for dimension " + d); } if(best == null || q < bestq) { best = dist; bestq = q; } } catch(ArithmeticException e) { if(LOG.isVeryVerbose()) { LOG.veryverbose("Fitting distribution " + est + " failed: " + e.getMessage()); } continue trials; } } if(LOG.isVerbose()) { LOG.verbose("Best fit for dimension " + d + ": " + best); } return best; }
java
protected Distribution findBestFit(final List<V> col, Adapter adapter, int d, double[] test) { if(estimators.size() == 1) { return estimators.get(0).estimate(col, adapter); } Distribution best = null; double bestq = Double.POSITIVE_INFINITY; trials: for(DistributionEstimator<?> est : estimators) { try { Distribution dist = est.estimate(col, adapter); for(int i = 0; i < test.length; i++) { test[i] = dist.cdf(col.get(i).doubleValue(d)); if(Double.isNaN(test[i])) { LOG.warning("Got NaN after fitting " + est + ": " + dist); continue trials; } if(Double.isInfinite(test[i])) { LOG.warning("Got infinite value after fitting " + est + ": " + dist); continue trials; } } Arrays.sort(test); double q = KolmogorovSmirnovTest.simpleTest(test); if(LOG.isVeryVerbose()) { LOG.veryverbose("Estimator " + est + " (" + dist + ") has maximum deviation " + q + " for dimension " + d); } if(best == null || q < bestq) { best = dist; bestq = q; } } catch(ArithmeticException e) { if(LOG.isVeryVerbose()) { LOG.veryverbose("Fitting distribution " + est + " failed: " + e.getMessage()); } continue trials; } } if(LOG.isVerbose()) { LOG.verbose("Best fit for dimension " + d + ": " + best); } return best; }
[ "protected", "Distribution", "findBestFit", "(", "final", "List", "<", "V", ">", "col", ",", "Adapter", "adapter", ",", "int", "d", ",", "double", "[", "]", "test", ")", "{", "if", "(", "estimators", ".", "size", "(", ")", "==", "1", ")", "{", "ret...
Find the best fitting distribution. @param col Column of table @param adapter Adapter for accessing the data @param d Dimension @param test Scatch space for testing goodness of fit @return Best fit distribution
[ "Find", "the", "best", "fitting", "distribution", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-input/src/main/java/de/lmu/ifi/dbs/elki/datasource/filter/normalization/columnwise/AttributeWiseCDFNormalization.java#L160-L201
train
elki-project/elki
elki-input/src/main/java/de/lmu/ifi/dbs/elki/datasource/filter/normalization/columnwise/AttributeWiseCDFNormalization.java
AttributeWiseCDFNormalization.constantZero
protected boolean constantZero(List<V> column, Adapter adapter) { for(int i = 0, s = adapter.size(column); i < s; i++) { if(adapter.get(column, i) != 0.) { return false; } } return true; }
java
protected boolean constantZero(List<V> column, Adapter adapter) { for(int i = 0, s = adapter.size(column); i < s; i++) { if(adapter.get(column, i) != 0.) { return false; } } return true; }
[ "protected", "boolean", "constantZero", "(", "List", "<", "V", ">", "column", ",", "Adapter", "adapter", ")", "{", "for", "(", "int", "i", "=", "0", ",", "s", "=", "adapter", ".", "size", "(", "column", ")", ";", "i", "<", "s", ";", "i", "++", ...
Test if an attribute is constant zero. @param column Column @param adapter Data accessor. @return {@code true} if all values are zero
[ "Test", "if", "an", "attribute", "is", "constant", "zero", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-input/src/main/java/de/lmu/ifi/dbs/elki/datasource/filter/normalization/columnwise/AttributeWiseCDFNormalization.java#L210-L217
train
elki-project/elki
elki-input/src/main/java/de/lmu/ifi/dbs/elki/datasource/parser/ArffParser.java
ArffParser.makeArffTokenizer
private StreamTokenizer makeArffTokenizer(BufferedReader br) { // Setup tokenizer StreamTokenizer tokenizer = new StreamTokenizer(br); { tokenizer.resetSyntax(); tokenizer.whitespaceChars(0, ' '); tokenizer.ordinaryChars('0', '9'); // Do not parse numbers tokenizer.ordinaryChar('-'); tokenizer.ordinaryChar('.'); tokenizer.wordChars(' ' + 1, '\u00FF'); tokenizer.whitespaceChars(',', ','); tokenizer.commentChar('%'); tokenizer.quoteChar('"'); tokenizer.quoteChar('\''); tokenizer.ordinaryChar('{'); tokenizer.ordinaryChar('}'); tokenizer.eolIsSignificant(true); } return tokenizer; }
java
private StreamTokenizer makeArffTokenizer(BufferedReader br) { // Setup tokenizer StreamTokenizer tokenizer = new StreamTokenizer(br); { tokenizer.resetSyntax(); tokenizer.whitespaceChars(0, ' '); tokenizer.ordinaryChars('0', '9'); // Do not parse numbers tokenizer.ordinaryChar('-'); tokenizer.ordinaryChar('.'); tokenizer.wordChars(' ' + 1, '\u00FF'); tokenizer.whitespaceChars(',', ','); tokenizer.commentChar('%'); tokenizer.quoteChar('"'); tokenizer.quoteChar('\''); tokenizer.ordinaryChar('{'); tokenizer.ordinaryChar('}'); tokenizer.eolIsSignificant(true); } return tokenizer; }
[ "private", "StreamTokenizer", "makeArffTokenizer", "(", "BufferedReader", "br", ")", "{", "// Setup tokenizer", "StreamTokenizer", "tokenizer", "=", "new", "StreamTokenizer", "(", "br", ")", ";", "{", "tokenizer", ".", "resetSyntax", "(", ")", ";", "tokenizer", "....
Make a StreamTokenizer for the ARFF format. @param br Buffered reader @return Tokenizer
[ "Make", "a", "StreamTokenizer", "for", "the", "ARFF", "format", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-input/src/main/java/de/lmu/ifi/dbs/elki/datasource/parser/ArffParser.java#L383-L402
train
elki-project/elki
elki-input/src/main/java/de/lmu/ifi/dbs/elki/datasource/parser/ArffParser.java
ArffParser.setupBundleHeaders
private void setupBundleHeaders(ArrayList<String> names, int[] targ, TypeInformation[] etyp, int[] dimsize, MultipleObjectsBundle bundle, boolean sparse) { for(int in = 0, out = 0; in < targ.length; out++) { int nin = in + 1; for(; nin < targ.length; nin++) { if(targ[nin] != targ[in]) { break; } } if(TypeUtil.NUMBER_VECTOR_FIELD.equals(etyp[out])) { String[] labels = new String[dimsize[out]]; // Collect labels: for(int i = 0; i < dimsize[out]; i++) { labels[i] = names.get(out + i); } if(!sparse) { VectorFieldTypeInformation<DoubleVector> type = new VectorFieldTypeInformation<>(DoubleVector.FACTORY, dimsize[out], labels); bundle.appendColumn(type, new ArrayList<DoubleVector>()); } else { VectorFieldTypeInformation<SparseDoubleVector> type = new VectorFieldTypeInformation<>(SparseDoubleVector.FACTORY, dimsize[out], labels); bundle.appendColumn(type, new ArrayList<SparseDoubleVector>()); } } else if(TypeUtil.LABELLIST.equals(etyp[out])) { StringBuilder label = new StringBuilder(names.get(out)); for(int i = 1; i < dimsize[out]; i++) { label.append(' ').append(names.get(out + i)); } bundle.appendColumn(new SimpleTypeInformation<>(LabelList.class, label.toString()), new ArrayList<LabelList>()); } else if(TypeUtil.EXTERNALID.equals(etyp[out])) { bundle.appendColumn(new SimpleTypeInformation<>(ExternalID.class, names.get(out)), new ArrayList<ExternalID>()); } else if(TypeUtil.CLASSLABEL.equals(etyp[out])) { bundle.appendColumn(new SimpleTypeInformation<>(ClassLabel.class, names.get(out)), new ArrayList<ClassLabel>()); } else { throw new AbortException("Unsupported type for column " + in + "->" + out + ": " + ((etyp[out] != null) ? etyp[out].toString() : "null")); } assert (out == bundle.metaLength() - 1); in = nin; } }
java
private void setupBundleHeaders(ArrayList<String> names, int[] targ, TypeInformation[] etyp, int[] dimsize, MultipleObjectsBundle bundle, boolean sparse) { for(int in = 0, out = 0; in < targ.length; out++) { int nin = in + 1; for(; nin < targ.length; nin++) { if(targ[nin] != targ[in]) { break; } } if(TypeUtil.NUMBER_VECTOR_FIELD.equals(etyp[out])) { String[] labels = new String[dimsize[out]]; // Collect labels: for(int i = 0; i < dimsize[out]; i++) { labels[i] = names.get(out + i); } if(!sparse) { VectorFieldTypeInformation<DoubleVector> type = new VectorFieldTypeInformation<>(DoubleVector.FACTORY, dimsize[out], labels); bundle.appendColumn(type, new ArrayList<DoubleVector>()); } else { VectorFieldTypeInformation<SparseDoubleVector> type = new VectorFieldTypeInformation<>(SparseDoubleVector.FACTORY, dimsize[out], labels); bundle.appendColumn(type, new ArrayList<SparseDoubleVector>()); } } else if(TypeUtil.LABELLIST.equals(etyp[out])) { StringBuilder label = new StringBuilder(names.get(out)); for(int i = 1; i < dimsize[out]; i++) { label.append(' ').append(names.get(out + i)); } bundle.appendColumn(new SimpleTypeInformation<>(LabelList.class, label.toString()), new ArrayList<LabelList>()); } else if(TypeUtil.EXTERNALID.equals(etyp[out])) { bundle.appendColumn(new SimpleTypeInformation<>(ExternalID.class, names.get(out)), new ArrayList<ExternalID>()); } else if(TypeUtil.CLASSLABEL.equals(etyp[out])) { bundle.appendColumn(new SimpleTypeInformation<>(ClassLabel.class, names.get(out)), new ArrayList<ClassLabel>()); } else { throw new AbortException("Unsupported type for column " + in + "->" + out + ": " + ((etyp[out] != null) ? etyp[out].toString() : "null")); } assert (out == bundle.metaLength() - 1); in = nin; } }
[ "private", "void", "setupBundleHeaders", "(", "ArrayList", "<", "String", ">", "names", ",", "int", "[", "]", "targ", ",", "TypeInformation", "[", "]", "etyp", ",", "int", "[", "]", "dimsize", ",", "MultipleObjectsBundle", "bundle", ",", "boolean", "sparse",...
Setup the headers for the object bundle. @param names Attribute names @param targ Target columns @param etyp ELKI type information @param dimsize Number of dimensions in the individual types @param bundle Output bundle @param sparse Flag to create sparse vectors
[ "Setup", "the", "headers", "for", "the", "object", "bundle", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-input/src/main/java/de/lmu/ifi/dbs/elki/datasource/parser/ArffParser.java#L414-L456
train
elki-project/elki
elki-input/src/main/java/de/lmu/ifi/dbs/elki/datasource/parser/ArffParser.java
ArffParser.readHeader
private void readHeader(BufferedReader br) throws IOException { String line; // Locate header line while(true) { line = br.readLine(); if(line == null) { throw new AbortException(ARFF_HEADER_RELATION + " not found in file."); } // Skip comments and empty lines if(ARFF_COMMENT.reset(line).matches() || EMPTY.reset(line).matches()) { continue; } // Break on relation statement if(ARFF_HEADER_RELATION.reset(line).matches()) { break; } throw new AbortException("Expected relation declaration: " + line); } }
java
private void readHeader(BufferedReader br) throws IOException { String line; // Locate header line while(true) { line = br.readLine(); if(line == null) { throw new AbortException(ARFF_HEADER_RELATION + " not found in file."); } // Skip comments and empty lines if(ARFF_COMMENT.reset(line).matches() || EMPTY.reset(line).matches()) { continue; } // Break on relation statement if(ARFF_HEADER_RELATION.reset(line).matches()) { break; } throw new AbortException("Expected relation declaration: " + line); } }
[ "private", "void", "readHeader", "(", "BufferedReader", "br", ")", "throws", "IOException", "{", "String", "line", ";", "// Locate header line", "while", "(", "true", ")", "{", "line", "=", "br", ".", "readLine", "(", ")", ";", "if", "(", "line", "==", "...
Read the dataset header part of the ARFF file, to ensure consistency. @param br Buffered Reader @throws IOException
[ "Read", "the", "dataset", "header", "part", "of", "the", "ARFF", "file", "to", "ensure", "consistency", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-input/src/main/java/de/lmu/ifi/dbs/elki/datasource/parser/ArffParser.java#L464-L482
train
elki-project/elki
elki-input/src/main/java/de/lmu/ifi/dbs/elki/datasource/parser/ArffParser.java
ArffParser.nextToken
private void nextToken(StreamTokenizer tokenizer) throws IOException { tokenizer.nextToken(); if((tokenizer.ttype == '\'') || (tokenizer.ttype == '"')) { tokenizer.ttype = StreamTokenizer.TT_WORD; } else if((tokenizer.ttype == StreamTokenizer.TT_WORD) && (tokenizer.sval.equals("?"))) { tokenizer.ttype = '?'; } if(LOG.isDebugging()) { if(tokenizer.ttype == StreamTokenizer.TT_NUMBER) { LOG.debug("token: " + tokenizer.nval); } else if(tokenizer.ttype == StreamTokenizer.TT_WORD) { LOG.debug("token: " + tokenizer.sval); } else if(tokenizer.ttype == StreamTokenizer.TT_EOF) { LOG.debug("token: EOF"); } else if(tokenizer.ttype == StreamTokenizer.TT_EOL) { LOG.debug("token: EOL"); } else { LOG.debug("token type: " + tokenizer.ttype); } } }
java
private void nextToken(StreamTokenizer tokenizer) throws IOException { tokenizer.nextToken(); if((tokenizer.ttype == '\'') || (tokenizer.ttype == '"')) { tokenizer.ttype = StreamTokenizer.TT_WORD; } else if((tokenizer.ttype == StreamTokenizer.TT_WORD) && (tokenizer.sval.equals("?"))) { tokenizer.ttype = '?'; } if(LOG.isDebugging()) { if(tokenizer.ttype == StreamTokenizer.TT_NUMBER) { LOG.debug("token: " + tokenizer.nval); } else if(tokenizer.ttype == StreamTokenizer.TT_WORD) { LOG.debug("token: " + tokenizer.sval); } else if(tokenizer.ttype == StreamTokenizer.TT_EOF) { LOG.debug("token: EOF"); } else if(tokenizer.ttype == StreamTokenizer.TT_EOL) { LOG.debug("token: EOL"); } else { LOG.debug("token type: " + tokenizer.ttype); } } }
[ "private", "void", "nextToken", "(", "StreamTokenizer", "tokenizer", ")", "throws", "IOException", "{", "tokenizer", ".", "nextToken", "(", ")", ";", "if", "(", "(", "tokenizer", ".", "ttype", "==", "'", "'", ")", "||", "(", "tokenizer", ".", "ttype", "=...
Helper function for token handling. @param tokenizer Tokenizer @throws IOException
[ "Helper", "function", "for", "token", "handling", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-input/src/main/java/de/lmu/ifi/dbs/elki/datasource/parser/ArffParser.java#L598-L623
train
elki-project/elki
addons/batikvis/src/main/java/de/lmu/ifi/dbs/elki/visualization/opticsplot/OPTICSCut.java
OPTICSCut.makeOPTICSCut
public static <E extends ClusterOrder> Clustering<Model> makeOPTICSCut(E co, double epsilon) { // Clustering model we are building Clustering<Model> clustering = new Clustering<>("OPTICS Cut Clustering", "optics-cut"); // Collects noise elements ModifiableDBIDs noise = DBIDUtil.newHashSet(); double lastDist = Double.MAX_VALUE; double actDist = Double.MAX_VALUE; // Current working set ModifiableDBIDs current = DBIDUtil.newHashSet(); // TODO: can we implement this more nicely with a 1-lookahead? DBIDVar prev = DBIDUtil.newVar(); for(DBIDIter it = co.iter(); it.valid(); prev.set(it), it.advance()) { lastDist = actDist; actDist = co.getReachability(it); if(actDist <= epsilon) { // the last element before the plot drops belongs to the cluster if(lastDist > epsilon && prev.isSet()) { // So un-noise it noise.remove(prev); // Add it to the cluster current.add(prev); } current.add(it); } else { // 'Finish' the previous cluster if(!current.isEmpty()) { // TODO: do we want a minpts restriction? // But we get have only core points guaranteed anyway. clustering.addToplevelCluster(new Cluster<Model>(current, ClusterModel.CLUSTER)); current = DBIDUtil.newHashSet(); } // Add to noise noise.add(it); } } // Any unfinished cluster will also be added if(!current.isEmpty()) { clustering.addToplevelCluster(new Cluster<Model>(current, ClusterModel.CLUSTER)); } // Add noise clustering.addToplevelCluster(new Cluster<Model>(noise, true, ClusterModel.CLUSTER)); return clustering; }
java
public static <E extends ClusterOrder> Clustering<Model> makeOPTICSCut(E co, double epsilon) { // Clustering model we are building Clustering<Model> clustering = new Clustering<>("OPTICS Cut Clustering", "optics-cut"); // Collects noise elements ModifiableDBIDs noise = DBIDUtil.newHashSet(); double lastDist = Double.MAX_VALUE; double actDist = Double.MAX_VALUE; // Current working set ModifiableDBIDs current = DBIDUtil.newHashSet(); // TODO: can we implement this more nicely with a 1-lookahead? DBIDVar prev = DBIDUtil.newVar(); for(DBIDIter it = co.iter(); it.valid(); prev.set(it), it.advance()) { lastDist = actDist; actDist = co.getReachability(it); if(actDist <= epsilon) { // the last element before the plot drops belongs to the cluster if(lastDist > epsilon && prev.isSet()) { // So un-noise it noise.remove(prev); // Add it to the cluster current.add(prev); } current.add(it); } else { // 'Finish' the previous cluster if(!current.isEmpty()) { // TODO: do we want a minpts restriction? // But we get have only core points guaranteed anyway. clustering.addToplevelCluster(new Cluster<Model>(current, ClusterModel.CLUSTER)); current = DBIDUtil.newHashSet(); } // Add to noise noise.add(it); } } // Any unfinished cluster will also be added if(!current.isEmpty()) { clustering.addToplevelCluster(new Cluster<Model>(current, ClusterModel.CLUSTER)); } // Add noise clustering.addToplevelCluster(new Cluster<Model>(noise, true, ClusterModel.CLUSTER)); return clustering; }
[ "public", "static", "<", "E", "extends", "ClusterOrder", ">", "Clustering", "<", "Model", ">", "makeOPTICSCut", "(", "E", "co", ",", "double", "epsilon", ")", "{", "// Clustering model we are building", "Clustering", "<", "Model", ">", "clustering", "=", "new", ...
Compute an OPTICS cut clustering @param co Cluster order result @param epsilon Epsilon value for cut @return New partitioning clustering
[ "Compute", "an", "OPTICS", "cut", "clustering" ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/addons/batikvis/src/main/java/de/lmu/ifi/dbs/elki/visualization/opticsplot/OPTICSCut.java#L58-L105
train
elki-project/elki
elki-core-data/src/main/java/de/lmu/ifi/dbs/elki/data/LabelList.java
LabelList.make
public static LabelList make(Collection<String> labels) { int size = labels.size(); if(size == 0) { return EMPTY_LABELS; } return new LabelList(labels.toArray(new String[size])); }
java
public static LabelList make(Collection<String> labels) { int size = labels.size(); if(size == 0) { return EMPTY_LABELS; } return new LabelList(labels.toArray(new String[size])); }
[ "public", "static", "LabelList", "make", "(", "Collection", "<", "String", ">", "labels", ")", "{", "int", "size", "=", "labels", ".", "size", "(", ")", ";", "if", "(", "size", "==", "0", ")", "{", "return", "EMPTY_LABELS", ";", "}", "return", "new",...
Constructor replacement. When the label list is empty, it will produce the same instance! @param labels Existing labels @return Label list instance.
[ "Constructor", "replacement", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-core-data/src/main/java/de/lmu/ifi/dbs/elki/data/LabelList.java#L83-L89
train
elki-project/elki
elki/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/DiSH.java
DiSH.run
public Clustering<SubspaceModel> run(Database db, Relation<V> relation) { if(mu >= relation.size()) { throw new AbortException("Parameter mu is chosen unreasonably large. This won't yield meaningful results."); } DiSHClusterOrder opticsResult = new Instance(db, relation).run(); if(LOG.isVerbose()) { LOG.verbose("Compute Clusters."); } return computeClusters(relation, opticsResult); }
java
public Clustering<SubspaceModel> run(Database db, Relation<V> relation) { if(mu >= relation.size()) { throw new AbortException("Parameter mu is chosen unreasonably large. This won't yield meaningful results."); } DiSHClusterOrder opticsResult = new Instance(db, relation).run(); if(LOG.isVerbose()) { LOG.verbose("Compute Clusters."); } return computeClusters(relation, opticsResult); }
[ "public", "Clustering", "<", "SubspaceModel", ">", "run", "(", "Database", "db", ",", "Relation", "<", "V", ">", "relation", ")", "{", "if", "(", "mu", ">=", "relation", ".", "size", "(", ")", ")", "{", "throw", "new", "AbortException", "(", "\"Paramet...
Performs the DiSH algorithm on the given database. @param relation Relation to process
[ "Performs", "the", "DiSH", "algorithm", "on", "the", "given", "database", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/DiSH.java#L148-L158
train
elki-project/elki
elki/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/DiSH.java
DiSH.computeClusters
private Clustering<SubspaceModel> computeClusters(Relation<V> database, DiSHClusterOrder clusterOrder) { final int dimensionality = RelationUtil.dimensionality(database); // extract clusters Object2ObjectOpenCustomHashMap<long[], List<ArrayModifiableDBIDs>> clustersMap = extractClusters(database, clusterOrder); logClusterSizes("Step 1: extract clusters", dimensionality, clustersMap); // check if there are clusters < minpts checkClusters(database, clustersMap); logClusterSizes("Step 2: check clusters", dimensionality, clustersMap); // sort the clusters List<Cluster<SubspaceModel>> clusters = sortClusters(database, clustersMap); if(LOG.isVerbose()) { StringBuilder msg = new StringBuilder("Step 3: sort clusters"); for(Cluster<SubspaceModel> c : clusters) { msg.append('\n').append(BitsUtil.toStringLow(c.getModel().getSubspace().getDimensions(), dimensionality)).append(" ids ").append(c.size()); } LOG.verbose(msg.toString()); } // build the hierarchy Clustering<SubspaceModel> clustering = new Clustering<>("DiSH clustering", "dish-clustering"); buildHierarchy(database, clustering, clusters, dimensionality); if(LOG.isVerbose()) { StringBuilder msg = new StringBuilder("Step 4: build hierarchy"); for(Cluster<SubspaceModel> c : clusters) { msg.append('\n').append(BitsUtil.toStringLow(c.getModel().getSubspace().getDimensions(), dimensionality)).append(" ids ").append(c.size()); for(It<Cluster<SubspaceModel>> iter = clustering.getClusterHierarchy().iterParents(c); iter.valid(); iter.advance()) { msg.append("\n parent ").append(iter.get()); } for(It<Cluster<SubspaceModel>> iter = clustering.getClusterHierarchy().iterChildren(c); iter.valid(); iter.advance()) { msg.append("\n child ").append(iter.get()); } } LOG.verbose(msg.toString()); } // build result for(Cluster<SubspaceModel> c : clusters) { if(clustering.getClusterHierarchy().numParents(c) == 0) { clustering.addToplevelCluster(c); } } return clustering; }
java
private Clustering<SubspaceModel> computeClusters(Relation<V> database, DiSHClusterOrder clusterOrder) { final int dimensionality = RelationUtil.dimensionality(database); // extract clusters Object2ObjectOpenCustomHashMap<long[], List<ArrayModifiableDBIDs>> clustersMap = extractClusters(database, clusterOrder); logClusterSizes("Step 1: extract clusters", dimensionality, clustersMap); // check if there are clusters < minpts checkClusters(database, clustersMap); logClusterSizes("Step 2: check clusters", dimensionality, clustersMap); // sort the clusters List<Cluster<SubspaceModel>> clusters = sortClusters(database, clustersMap); if(LOG.isVerbose()) { StringBuilder msg = new StringBuilder("Step 3: sort clusters"); for(Cluster<SubspaceModel> c : clusters) { msg.append('\n').append(BitsUtil.toStringLow(c.getModel().getSubspace().getDimensions(), dimensionality)).append(" ids ").append(c.size()); } LOG.verbose(msg.toString()); } // build the hierarchy Clustering<SubspaceModel> clustering = new Clustering<>("DiSH clustering", "dish-clustering"); buildHierarchy(database, clustering, clusters, dimensionality); if(LOG.isVerbose()) { StringBuilder msg = new StringBuilder("Step 4: build hierarchy"); for(Cluster<SubspaceModel> c : clusters) { msg.append('\n').append(BitsUtil.toStringLow(c.getModel().getSubspace().getDimensions(), dimensionality)).append(" ids ").append(c.size()); for(It<Cluster<SubspaceModel>> iter = clustering.getClusterHierarchy().iterParents(c); iter.valid(); iter.advance()) { msg.append("\n parent ").append(iter.get()); } for(It<Cluster<SubspaceModel>> iter = clustering.getClusterHierarchy().iterChildren(c); iter.valid(); iter.advance()) { msg.append("\n child ").append(iter.get()); } } LOG.verbose(msg.toString()); } // build result for(Cluster<SubspaceModel> c : clusters) { if(clustering.getClusterHierarchy().numParents(c) == 0) { clustering.addToplevelCluster(c); } } return clustering; }
[ "private", "Clustering", "<", "SubspaceModel", ">", "computeClusters", "(", "Relation", "<", "V", ">", "database", ",", "DiSHClusterOrder", "clusterOrder", ")", "{", "final", "int", "dimensionality", "=", "RelationUtil", ".", "dimensionality", "(", "database", ")"...
Computes the hierarchical clusters according to the cluster order. @param database the database holding the objects @param clusterOrder the cluster order
[ "Computes", "the", "hierarchical", "clusters", "according", "to", "the", "cluster", "order", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/DiSH.java#L166-L211
train
elki-project/elki
elki/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/DiSH.java
DiSH.logClusterSizes
private void logClusterSizes(String m, int dimensionality, Object2ObjectOpenCustomHashMap<long[], List<ArrayModifiableDBIDs>> clustersMap) { if(LOG.isVerbose()) { final StringBuilder msg = new StringBuilder(1000).append(m).append('\n'); for(ObjectIterator<Object2ObjectMap.Entry<long[], List<ArrayModifiableDBIDs>>> iter = clustersMap.object2ObjectEntrySet().fastIterator(); iter.hasNext();) { Object2ObjectMap.Entry<long[], List<ArrayModifiableDBIDs>> entry = iter.next(); msg.append(BitsUtil.toStringLow(entry.getKey(), dimensionality)).append(" sizes:"); for(ArrayModifiableDBIDs c : entry.getValue()) { msg.append(' ').append(c.size()); } msg.append('\n'); } LOG.verbose(msg.toString()); } }
java
private void logClusterSizes(String m, int dimensionality, Object2ObjectOpenCustomHashMap<long[], List<ArrayModifiableDBIDs>> clustersMap) { if(LOG.isVerbose()) { final StringBuilder msg = new StringBuilder(1000).append(m).append('\n'); for(ObjectIterator<Object2ObjectMap.Entry<long[], List<ArrayModifiableDBIDs>>> iter = clustersMap.object2ObjectEntrySet().fastIterator(); iter.hasNext();) { Object2ObjectMap.Entry<long[], List<ArrayModifiableDBIDs>> entry = iter.next(); msg.append(BitsUtil.toStringLow(entry.getKey(), dimensionality)).append(" sizes:"); for(ArrayModifiableDBIDs c : entry.getValue()) { msg.append(' ').append(c.size()); } msg.append('\n'); } LOG.verbose(msg.toString()); } }
[ "private", "void", "logClusterSizes", "(", "String", "m", ",", "int", "dimensionality", ",", "Object2ObjectOpenCustomHashMap", "<", "long", "[", "]", ",", "List", "<", "ArrayModifiableDBIDs", ">", ">", "clustersMap", ")", "{", "if", "(", "LOG", ".", "isVerbose...
Log cluster sizes in verbose mode. @param m Log message @param dimensionality Dimensionality @param clustersMap Cluster map
[ "Log", "cluster", "sizes", "in", "verbose", "mode", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/DiSH.java#L220-L233
train
elki-project/elki
elki/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/DiSH.java
DiSH.sortClusters
private List<Cluster<SubspaceModel>> sortClusters(Relation<V> relation, Object2ObjectMap<long[], List<ArrayModifiableDBIDs>> clustersMap) { final int db_dim = RelationUtil.dimensionality(relation); // int num = 1; List<Cluster<SubspaceModel>> clusters = new ArrayList<>(); for(long[] pv : clustersMap.keySet()) { List<ArrayModifiableDBIDs> parallelClusters = clustersMap.get(pv); for(int i = 0; i < parallelClusters.size(); i++) { ArrayModifiableDBIDs c = parallelClusters.get(i); Cluster<SubspaceModel> cluster = new Cluster<>(c); cluster.setModel(new SubspaceModel(new Subspace(pv), Centroid.make(relation, c).getArrayRef())); String subspace = BitsUtil.toStringLow(cluster.getModel().getSubspace().getDimensions(), db_dim); cluster.setName(parallelClusters.size() > 1 ? ("Cluster_" + subspace + "_" + i) : ("Cluster_" + subspace)); clusters.add(cluster); } } // sort the clusters w.r.t. lambda Comparator<Cluster<SubspaceModel>> comparator = new Comparator<Cluster<SubspaceModel>>() { @Override public int compare(Cluster<SubspaceModel> c1, Cluster<SubspaceModel> c2) { return c2.getModel().getSubspace().dimensionality() - c1.getModel().getSubspace().dimensionality(); } }; Collections.sort(clusters, comparator); return clusters; }
java
private List<Cluster<SubspaceModel>> sortClusters(Relation<V> relation, Object2ObjectMap<long[], List<ArrayModifiableDBIDs>> clustersMap) { final int db_dim = RelationUtil.dimensionality(relation); // int num = 1; List<Cluster<SubspaceModel>> clusters = new ArrayList<>(); for(long[] pv : clustersMap.keySet()) { List<ArrayModifiableDBIDs> parallelClusters = clustersMap.get(pv); for(int i = 0; i < parallelClusters.size(); i++) { ArrayModifiableDBIDs c = parallelClusters.get(i); Cluster<SubspaceModel> cluster = new Cluster<>(c); cluster.setModel(new SubspaceModel(new Subspace(pv), Centroid.make(relation, c).getArrayRef())); String subspace = BitsUtil.toStringLow(cluster.getModel().getSubspace().getDimensions(), db_dim); cluster.setName(parallelClusters.size() > 1 ? ("Cluster_" + subspace + "_" + i) : ("Cluster_" + subspace)); clusters.add(cluster); } } // sort the clusters w.r.t. lambda Comparator<Cluster<SubspaceModel>> comparator = new Comparator<Cluster<SubspaceModel>>() { @Override public int compare(Cluster<SubspaceModel> c1, Cluster<SubspaceModel> c2) { return c2.getModel().getSubspace().dimensionality() - c1.getModel().getSubspace().dimensionality(); } }; Collections.sort(clusters, comparator); return clusters; }
[ "private", "List", "<", "Cluster", "<", "SubspaceModel", ">", ">", "sortClusters", "(", "Relation", "<", "V", ">", "relation", ",", "Object2ObjectMap", "<", "long", "[", "]", ",", "List", "<", "ArrayModifiableDBIDs", ">", ">", "clustersMap", ")", "{", "fin...
Returns a sorted list of the clusters w.r.t. the subspace dimensionality in descending order. @param relation the database storing the objects @param clustersMap the mapping of bits sets to clusters @return a sorted list of the clusters
[ "Returns", "a", "sorted", "list", "of", "the", "clusters", "w", ".", "r", ".", "t", ".", "the", "subspace", "dimensionality", "in", "descending", "order", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/DiSH.java#L334-L359
train
elki-project/elki
elki/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/DiSH.java
DiSH.checkClusters
private void checkClusters(Relation<V> relation, Object2ObjectMap<long[], List<ArrayModifiableDBIDs>> clustersMap) { final int dimensionality = RelationUtil.dimensionality(relation); // check if there are clusters < minpts // and add them to not assigned List<Pair<long[], ArrayModifiableDBIDs>> notAssigned = new ArrayList<>(); Object2ObjectMap<long[], List<ArrayModifiableDBIDs>> newClustersMap = new Object2ObjectOpenCustomHashMap<>(BitsUtil.FASTUTIL_HASH_STRATEGY); Pair<long[], ArrayModifiableDBIDs> noise = new Pair<>(BitsUtil.zero(dimensionality), DBIDUtil.newArray()); for(long[] pv : clustersMap.keySet()) { // noise if(BitsUtil.cardinality(pv) == 0) { List<ArrayModifiableDBIDs> parallelClusters = clustersMap.get(pv); for(ArrayModifiableDBIDs c : parallelClusters) { noise.second.addDBIDs(c); } } // clusters else { List<ArrayModifiableDBIDs> parallelClusters = clustersMap.get(pv); List<ArrayModifiableDBIDs> newParallelClusters = new ArrayList<>(parallelClusters.size()); for(ArrayModifiableDBIDs c : parallelClusters) { if(!BitsUtil.isZero(pv) && c.size() < mu) { notAssigned.add(new Pair<>(pv, c)); } else { newParallelClusters.add(c); } } newClustersMap.put(pv, newParallelClusters); } } clustersMap.clear(); clustersMap.putAll(newClustersMap); for(Pair<long[], ArrayModifiableDBIDs> c : notAssigned) { if(c.second.isEmpty()) { continue; } Pair<long[], ArrayModifiableDBIDs> parent = findParent(relation, c, clustersMap); (parent != null ? parent : noise).second.addDBIDs(c.second); } List<ArrayModifiableDBIDs> noiseList = new ArrayList<>(1); noiseList.add(noise.second); clustersMap.put(noise.first, noiseList); }
java
private void checkClusters(Relation<V> relation, Object2ObjectMap<long[], List<ArrayModifiableDBIDs>> clustersMap) { final int dimensionality = RelationUtil.dimensionality(relation); // check if there are clusters < minpts // and add them to not assigned List<Pair<long[], ArrayModifiableDBIDs>> notAssigned = new ArrayList<>(); Object2ObjectMap<long[], List<ArrayModifiableDBIDs>> newClustersMap = new Object2ObjectOpenCustomHashMap<>(BitsUtil.FASTUTIL_HASH_STRATEGY); Pair<long[], ArrayModifiableDBIDs> noise = new Pair<>(BitsUtil.zero(dimensionality), DBIDUtil.newArray()); for(long[] pv : clustersMap.keySet()) { // noise if(BitsUtil.cardinality(pv) == 0) { List<ArrayModifiableDBIDs> parallelClusters = clustersMap.get(pv); for(ArrayModifiableDBIDs c : parallelClusters) { noise.second.addDBIDs(c); } } // clusters else { List<ArrayModifiableDBIDs> parallelClusters = clustersMap.get(pv); List<ArrayModifiableDBIDs> newParallelClusters = new ArrayList<>(parallelClusters.size()); for(ArrayModifiableDBIDs c : parallelClusters) { if(!BitsUtil.isZero(pv) && c.size() < mu) { notAssigned.add(new Pair<>(pv, c)); } else { newParallelClusters.add(c); } } newClustersMap.put(pv, newParallelClusters); } } clustersMap.clear(); clustersMap.putAll(newClustersMap); for(Pair<long[], ArrayModifiableDBIDs> c : notAssigned) { if(c.second.isEmpty()) { continue; } Pair<long[], ArrayModifiableDBIDs> parent = findParent(relation, c, clustersMap); (parent != null ? parent : noise).second.addDBIDs(c.second); } List<ArrayModifiableDBIDs> noiseList = new ArrayList<>(1); noiseList.add(noise.second); clustersMap.put(noise.first, noiseList); }
[ "private", "void", "checkClusters", "(", "Relation", "<", "V", ">", "relation", ",", "Object2ObjectMap", "<", "long", "[", "]", ",", "List", "<", "ArrayModifiableDBIDs", ">", ">", "clustersMap", ")", "{", "final", "int", "dimensionality", "=", "RelationUtil", ...
Removes the clusters with size &lt; minpts from the cluster map and adds them to their parents. @param relation the relation storing the objects @param clustersMap the map containing the clusters
[ "Removes", "the", "clusters", "with", "size", "&lt", ";", "minpts", "from", "the", "cluster", "map", "and", "adds", "them", "to", "their", "parents", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/DiSH.java#L368-L413
train
elki-project/elki
elki/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/DiSH.java
DiSH.findParent
private Pair<long[], ArrayModifiableDBIDs> findParent(Relation<V> relation, Pair<long[], ArrayModifiableDBIDs> child, Object2ObjectMap<long[], List<ArrayModifiableDBIDs>> clustersMap) { Centroid child_centroid = ProjectedCentroid.make(child.first, relation, child.second); Pair<long[], ArrayModifiableDBIDs> result = null; int resultCardinality = -1; long[] childPV = child.first; int childCardinality = BitsUtil.cardinality(childPV); for(long[] parentPV : clustersMap.keySet()) { int parentCardinality = BitsUtil.cardinality(parentPV); if(parentCardinality >= childCardinality || (resultCardinality != -1 && parentCardinality <= resultCardinality)) { continue; } long[] pv = BitsUtil.andCMin(childPV, parentPV); if(BitsUtil.equal(pv, parentPV)) { List<ArrayModifiableDBIDs> parentList = clustersMap.get(parentPV); for(ArrayModifiableDBIDs parent : parentList) { NumberVector parent_centroid = ProjectedCentroid.make(parentPV, relation, parent); double d = weightedDistance(child_centroid, parent_centroid, parentPV); if(d <= 2 * epsilon) { result = new Pair<>(parentPV, parent); resultCardinality = parentCardinality; break; } } } } return result; }
java
private Pair<long[], ArrayModifiableDBIDs> findParent(Relation<V> relation, Pair<long[], ArrayModifiableDBIDs> child, Object2ObjectMap<long[], List<ArrayModifiableDBIDs>> clustersMap) { Centroid child_centroid = ProjectedCentroid.make(child.first, relation, child.second); Pair<long[], ArrayModifiableDBIDs> result = null; int resultCardinality = -1; long[] childPV = child.first; int childCardinality = BitsUtil.cardinality(childPV); for(long[] parentPV : clustersMap.keySet()) { int parentCardinality = BitsUtil.cardinality(parentPV); if(parentCardinality >= childCardinality || (resultCardinality != -1 && parentCardinality <= resultCardinality)) { continue; } long[] pv = BitsUtil.andCMin(childPV, parentPV); if(BitsUtil.equal(pv, parentPV)) { List<ArrayModifiableDBIDs> parentList = clustersMap.get(parentPV); for(ArrayModifiableDBIDs parent : parentList) { NumberVector parent_centroid = ProjectedCentroid.make(parentPV, relation, parent); double d = weightedDistance(child_centroid, parent_centroid, parentPV); if(d <= 2 * epsilon) { result = new Pair<>(parentPV, parent); resultCardinality = parentCardinality; break; } } } } return result; }
[ "private", "Pair", "<", "long", "[", "]", ",", "ArrayModifiableDBIDs", ">", "findParent", "(", "Relation", "<", "V", ">", "relation", ",", "Pair", "<", "long", "[", "]", ",", "ArrayModifiableDBIDs", ">", "child", ",", "Object2ObjectMap", "<", "long", "[", ...
Returns the parent of the specified cluster @param relation the relation storing the objects @param child the child to search the parent for @param clustersMap the map containing the clusters @return the parent of the specified cluster
[ "Returns", "the", "parent", "of", "the", "specified", "cluster" ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/DiSH.java#L423-L453
train
elki-project/elki
elki/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/DiSH.java
DiSH.subspaceDimensionality
private int subspaceDimensionality(NumberVector v1, NumberVector v2, long[] pv1, long[] pv2, long[] commonPreferenceVector) { // number of zero values in commonPreferenceVector int subspaceDim = v1.getDimensionality() - BitsUtil.cardinality(commonPreferenceVector); // special case: v1 and v2 are in parallel subspaces if(BitsUtil.equal(commonPreferenceVector, pv1) || BitsUtil.equal(commonPreferenceVector, pv2)) { double d = weightedDistance(v1, v2, commonPreferenceVector); if(d > 2 * epsilon) { subspaceDim++; } } return subspaceDim; }
java
private int subspaceDimensionality(NumberVector v1, NumberVector v2, long[] pv1, long[] pv2, long[] commonPreferenceVector) { // number of zero values in commonPreferenceVector int subspaceDim = v1.getDimensionality() - BitsUtil.cardinality(commonPreferenceVector); // special case: v1 and v2 are in parallel subspaces if(BitsUtil.equal(commonPreferenceVector, pv1) || BitsUtil.equal(commonPreferenceVector, pv2)) { double d = weightedDistance(v1, v2, commonPreferenceVector); if(d > 2 * epsilon) { subspaceDim++; } } return subspaceDim; }
[ "private", "int", "subspaceDimensionality", "(", "NumberVector", "v1", ",", "NumberVector", "v2", ",", "long", "[", "]", "pv1", ",", "long", "[", "]", "pv2", ",", "long", "[", "]", "commonPreferenceVector", ")", "{", "// number of zero values in commonPreferenceVe...
Compute the common subspace dimensionality of two vectors. @param v1 First vector @param v2 Second vector @param pv1 First preference @param pv2 Second preference @param commonPreferenceVector Common preference @return Usually, v1.dim - commonPreference.cardinality, unless either pv1 and pv2 are a subset of the other.
[ "Compute", "the", "common", "subspace", "dimensionality", "of", "two", "vectors", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/DiSH.java#L577-L589
train
elki-project/elki
elki/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/DiSH.java
DiSH.weightedDistance
protected static double weightedDistance(NumberVector v1, NumberVector v2, long[] weightVector) { double sqrDist = 0; for(int i = BitsUtil.nextSetBit(weightVector, 0); i >= 0; i = BitsUtil.nextSetBit(weightVector, i + 1)) { double manhattanI = v1.doubleValue(i) - v2.doubleValue(i); sqrDist += manhattanI * manhattanI; } return FastMath.sqrt(sqrDist); }
java
protected static double weightedDistance(NumberVector v1, NumberVector v2, long[] weightVector) { double sqrDist = 0; for(int i = BitsUtil.nextSetBit(weightVector, 0); i >= 0; i = BitsUtil.nextSetBit(weightVector, i + 1)) { double manhattanI = v1.doubleValue(i) - v2.doubleValue(i); sqrDist += manhattanI * manhattanI; } return FastMath.sqrt(sqrDist); }
[ "protected", "static", "double", "weightedDistance", "(", "NumberVector", "v1", ",", "NumberVector", "v2", ",", "long", "[", "]", "weightVector", ")", "{", "double", "sqrDist", "=", "0", ";", "for", "(", "int", "i", "=", "BitsUtil", ".", "nextSetBit", "(",...
Computes the weighted distance between the two specified vectors according to the given preference vector. @param v1 the first vector @param v2 the second vector @param weightVector the preference vector @return the weighted distance between the two specified vectors according to the given preference vector
[ "Computes", "the", "weighted", "distance", "between", "the", "two", "specified", "vectors", "according", "to", "the", "given", "preference", "vector", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/DiSH.java#L601-L608
train
elki-project/elki
elki-core-math/src/main/java/de/lmu/ifi/dbs/elki/math/linearalgebra/pca/StandardCovarianceMatrixBuilder.java
StandardCovarianceMatrixBuilder.processIds
@Override public double[][] processIds(DBIDs ids, Relation<? extends NumberVector> database) { return CovarianceMatrix.make(database, ids).destroyToPopulationMatrix(); }
java
@Override public double[][] processIds(DBIDs ids, Relation<? extends NumberVector> database) { return CovarianceMatrix.make(database, ids).destroyToPopulationMatrix(); }
[ "@", "Override", "public", "double", "[", "]", "[", "]", "processIds", "(", "DBIDs", "ids", ",", "Relation", "<", "?", "extends", "NumberVector", ">", "database", ")", "{", "return", "CovarianceMatrix", ".", "make", "(", "database", ",", "ids", ")", ".",...
Compute Covariance Matrix for a collection of database IDs. @param ids a collection of ids @param database the database used @return Covariance Matrix
[ "Compute", "Covariance", "Matrix", "for", "a", "collection", "of", "database", "IDs", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-core-math/src/main/java/de/lmu/ifi/dbs/elki/math/linearalgebra/pca/StandardCovarianceMatrixBuilder.java#L46-L49
train
elki-project/elki
elki-input/src/main/java/de/lmu/ifi/dbs/elki/datasource/filter/cleaning/VectorDimensionalityFilter.java
VectorDimensionalityFilter.updateMeta
private void updateMeta() { meta = new BundleMeta(); BundleMeta origmeta = source.getMeta(); for(int i = 0; i < origmeta.size(); i++) { SimpleTypeInformation<?> type = origmeta.get(i); if(column < 0) { // Test whether this type matches if(TypeUtil.NUMBER_VECTOR_VARIABLE_LENGTH.isAssignableFromType(type)) { if(type instanceof VectorFieldTypeInformation) { @SuppressWarnings("unchecked") final VectorFieldTypeInformation<V> castType = (VectorFieldTypeInformation<V>) type; if(dim != -1 && castType.mindim() > dim) { throw new AbortException("Would filter all vectors: minimum dimensionality " + castType.mindim() + " > desired dimensionality " + dim); } if(dim != -1 && castType.maxdim() < dim) { throw new AbortException("Would filter all vectors: maximum dimensionality " + castType.maxdim() + " < desired dimensionality " + dim); } if(dim == -1) { dim = castType.mindim(); } if(castType.mindim() == castType.maxdim()) { meta.add(castType); column = i; continue; } } @SuppressWarnings("unchecked") final VectorTypeInformation<V> castType = (VectorTypeInformation<V>) type; if(dim != -1) { meta.add(new VectorFieldTypeInformation<>(FilterUtil.guessFactory(castType), dim, dim, castType.getSerializer())); } else { LOG.warning("No dimensionality yet for column " + i); meta.add(castType); } column = i; continue; } } meta.add(type); } }
java
private void updateMeta() { meta = new BundleMeta(); BundleMeta origmeta = source.getMeta(); for(int i = 0; i < origmeta.size(); i++) { SimpleTypeInformation<?> type = origmeta.get(i); if(column < 0) { // Test whether this type matches if(TypeUtil.NUMBER_VECTOR_VARIABLE_LENGTH.isAssignableFromType(type)) { if(type instanceof VectorFieldTypeInformation) { @SuppressWarnings("unchecked") final VectorFieldTypeInformation<V> castType = (VectorFieldTypeInformation<V>) type; if(dim != -1 && castType.mindim() > dim) { throw new AbortException("Would filter all vectors: minimum dimensionality " + castType.mindim() + " > desired dimensionality " + dim); } if(dim != -1 && castType.maxdim() < dim) { throw new AbortException("Would filter all vectors: maximum dimensionality " + castType.maxdim() + " < desired dimensionality " + dim); } if(dim == -1) { dim = castType.mindim(); } if(castType.mindim() == castType.maxdim()) { meta.add(castType); column = i; continue; } } @SuppressWarnings("unchecked") final VectorTypeInformation<V> castType = (VectorTypeInformation<V>) type; if(dim != -1) { meta.add(new VectorFieldTypeInformation<>(FilterUtil.guessFactory(castType), dim, dim, castType.getSerializer())); } else { LOG.warning("No dimensionality yet for column " + i); meta.add(castType); } column = i; continue; } } meta.add(type); } }
[ "private", "void", "updateMeta", "(", ")", "{", "meta", "=", "new", "BundleMeta", "(", ")", ";", "BundleMeta", "origmeta", "=", "source", ".", "getMeta", "(", ")", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "origmeta", ".", "size", "(", ...
Update metadata.
[ "Update", "metadata", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-input/src/main/java/de/lmu/ifi/dbs/elki/datasource/filter/cleaning/VectorDimensionalityFilter.java#L136-L177
train
elki-project/elki
elki-core-math/src/main/java/de/lmu/ifi/dbs/elki/math/statistics/distribution/LogisticDistribution.java
LogisticDistribution.logquantile
public static double logquantile(double val, double loc, double scale) { return loc + scale * (val - MathUtil.log1mexp(-val)); }
java
public static double logquantile(double val, double loc, double scale) { return loc + scale * (val - MathUtil.log1mexp(-val)); }
[ "public", "static", "double", "logquantile", "(", "double", "val", ",", "double", "loc", ",", "double", "scale", ")", "{", "return", "loc", "+", "scale", "*", "(", "val", "-", "MathUtil", ".", "log1mexp", "(", "-", "val", ")", ")", ";", "}" ]
log Quantile function. TODO: untested. @param val Value @param loc Location @param scale Scale @return Quantile
[ "log", "Quantile", "function", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-core-math/src/main/java/de/lmu/ifi/dbs/elki/math/statistics/distribution/LogisticDistribution.java#L199-L201
train
elki-project/elki
elki-clustering/src/main/java/de/lmu/ifi/dbs/elki/evaluation/clustering/LogClusterSizes.java
LogClusterSizes.logClusterSizes
public static <C extends Model> void logClusterSizes(Clustering<C> c) { if(!LOG.isStatistics()) { return; } final List<Cluster<C>> clusters = c.getAllClusters(); final int numc = clusters.size(); LOG.statistics(new StringStatistic(PREFIX + "name", c.getLongName())); LOG.statistics(new LongStatistic(PREFIX + "clusters", numc)); Hierarchy<Cluster<C>> h = c.getClusterHierarchy(); int cnum = 0; for(Cluster<C> clu : clusters) { final String p = PREFIX + "cluster-" + cnum + "."; if(clu.getName() != null) { LOG.statistics(new StringStatistic(p + "name", clu.getName())); } LOG.statistics(new LongStatistic(p + "size", clu.size())); if(clu.isNoise()) { LOG.statistics(new StringStatistic(p + "noise", "true")); } if(h.numChildren(clu) > 0) { // TODO: this only works if we have cluster names! StringBuilder buf = new StringBuilder(); for(It<Cluster<C>> it = h.iterChildren(clu); it.valid(); it.advance()) { if(buf.length() > 0) { buf.append(", "); } buf.append(it.get().getName()); } LOG.statistics(new StringStatistic(p + "children", buf.toString())); } // TODO: also log parents? ++cnum; } }
java
public static <C extends Model> void logClusterSizes(Clustering<C> c) { if(!LOG.isStatistics()) { return; } final List<Cluster<C>> clusters = c.getAllClusters(); final int numc = clusters.size(); LOG.statistics(new StringStatistic(PREFIX + "name", c.getLongName())); LOG.statistics(new LongStatistic(PREFIX + "clusters", numc)); Hierarchy<Cluster<C>> h = c.getClusterHierarchy(); int cnum = 0; for(Cluster<C> clu : clusters) { final String p = PREFIX + "cluster-" + cnum + "."; if(clu.getName() != null) { LOG.statistics(new StringStatistic(p + "name", clu.getName())); } LOG.statistics(new LongStatistic(p + "size", clu.size())); if(clu.isNoise()) { LOG.statistics(new StringStatistic(p + "noise", "true")); } if(h.numChildren(clu) > 0) { // TODO: this only works if we have cluster names! StringBuilder buf = new StringBuilder(); for(It<Cluster<C>> it = h.iterChildren(clu); it.valid(); it.advance()) { if(buf.length() > 0) { buf.append(", "); } buf.append(it.get().getName()); } LOG.statistics(new StringStatistic(p + "children", buf.toString())); } // TODO: also log parents? ++cnum; } }
[ "public", "static", "<", "C", "extends", "Model", ">", "void", "logClusterSizes", "(", "Clustering", "<", "C", ">", "c", ")", "{", "if", "(", "!", "LOG", ".", "isStatistics", "(", ")", ")", "{", "return", ";", "}", "final", "List", "<", "Cluster", ...
Log the cluster sizes of a clustering. @param c Clustering ot analyze
[ "Log", "the", "cluster", "sizes", "of", "a", "clustering", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-clustering/src/main/java/de/lmu/ifi/dbs/elki/evaluation/clustering/LogClusterSizes.java#L62-L96
train
elki-project/elki
elki-clustering/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/clique/CLIQUESubspace.java
CLIQUESubspace.addDenseUnit
public void addDenseUnit(CLIQUEUnit unit) { int numdim = unit.dimensionality(); for(int i = 0; i < numdim; i++) { BitsUtil.setI(getDimensions(), unit.getDimension(i)); } denseUnits.add(unit); coverage += unit.numberOfFeatureVectors(); }
java
public void addDenseUnit(CLIQUEUnit unit) { int numdim = unit.dimensionality(); for(int i = 0; i < numdim; i++) { BitsUtil.setI(getDimensions(), unit.getDimension(i)); } denseUnits.add(unit); coverage += unit.numberOfFeatureVectors(); }
[ "public", "void", "addDenseUnit", "(", "CLIQUEUnit", "unit", ")", "{", "int", "numdim", "=", "unit", ".", "dimensionality", "(", ")", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "numdim", ";", "i", "++", ")", "{", "BitsUtil", ".", "setI",...
Adds the specified dense unit to this subspace. @param unit the unit to be added.
[ "Adds", "the", "specified", "dense", "unit", "to", "this", "subspace", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-clustering/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/clique/CLIQUESubspace.java#L81-L89
train
elki-project/elki
elki-clustering/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/clique/CLIQUESubspace.java
CLIQUESubspace.determineClusters
public List<Pair<Subspace, ModifiableDBIDs>> determineClusters() { List<Pair<Subspace, ModifiableDBIDs>> clusters = new ArrayList<>(); for(CLIQUEUnit unit : denseUnits) { if(!unit.isAssigned()) { ModifiableDBIDs cluster = DBIDUtil.newHashSet(); CLIQUESubspace model = new CLIQUESubspace(getDimensions()); clusters.add(new Pair<Subspace, ModifiableDBIDs>(model, cluster)); dfs(unit, cluster, model); } } return clusters; }
java
public List<Pair<Subspace, ModifiableDBIDs>> determineClusters() { List<Pair<Subspace, ModifiableDBIDs>> clusters = new ArrayList<>(); for(CLIQUEUnit unit : denseUnits) { if(!unit.isAssigned()) { ModifiableDBIDs cluster = DBIDUtil.newHashSet(); CLIQUESubspace model = new CLIQUESubspace(getDimensions()); clusters.add(new Pair<Subspace, ModifiableDBIDs>(model, cluster)); dfs(unit, cluster, model); } } return clusters; }
[ "public", "List", "<", "Pair", "<", "Subspace", ",", "ModifiableDBIDs", ">", ">", "determineClusters", "(", ")", "{", "List", "<", "Pair", "<", "Subspace", ",", "ModifiableDBIDs", ">", ">", "clusters", "=", "new", "ArrayList", "<>", "(", ")", ";", "for",...
Determines all clusters in this subspace by performing a depth-first search algorithm to find connected dense units. @return the clusters in this subspace and the corresponding cluster models
[ "Determines", "all", "clusters", "in", "this", "subspace", "by", "performing", "a", "depth", "-", "first", "search", "algorithm", "to", "find", "connected", "dense", "units", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-clustering/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/clique/CLIQUESubspace.java#L97-L109
train
elki-project/elki
elki-clustering/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/clique/CLIQUESubspace.java
CLIQUESubspace.dfs
public void dfs(CLIQUEUnit unit, ModifiableDBIDs cluster, CLIQUESubspace model) { cluster.addDBIDs(unit.getIds()); unit.markAsAssigned(); model.addDenseUnit(unit); final long[] dims = getDimensions(); for(int dim = BitsUtil.nextSetBit(dims, 0); dim >= 0; dim = BitsUtil.nextSetBit(dims, dim + 1)) { CLIQUEUnit left = leftNeighbor(unit, dim); if(left != null && !left.isAssigned()) { dfs(left, cluster, model); } CLIQUEUnit right = rightNeighbor(unit, dim); if(right != null && !right.isAssigned()) { dfs(right, cluster, model); } } }
java
public void dfs(CLIQUEUnit unit, ModifiableDBIDs cluster, CLIQUESubspace model) { cluster.addDBIDs(unit.getIds()); unit.markAsAssigned(); model.addDenseUnit(unit); final long[] dims = getDimensions(); for(int dim = BitsUtil.nextSetBit(dims, 0); dim >= 0; dim = BitsUtil.nextSetBit(dims, dim + 1)) { CLIQUEUnit left = leftNeighbor(unit, dim); if(left != null && !left.isAssigned()) { dfs(left, cluster, model); } CLIQUEUnit right = rightNeighbor(unit, dim); if(right != null && !right.isAssigned()) { dfs(right, cluster, model); } } }
[ "public", "void", "dfs", "(", "CLIQUEUnit", "unit", ",", "ModifiableDBIDs", "cluster", ",", "CLIQUESubspace", "model", ")", "{", "cluster", ".", "addDBIDs", "(", "unit", ".", "getIds", "(", ")", ")", ";", "unit", ".", "markAsAssigned", "(", ")", ";", "mo...
Depth-first search algorithm to find connected dense units in this subspace that build a cluster. It starts with a unit, assigns it to a cluster and finds all units it is connected to. @param unit the unit @param cluster the IDs of the feature vectors of the current cluster @param model the model of the cluster
[ "Depth", "-", "first", "search", "algorithm", "to", "find", "connected", "dense", "units", "in", "this", "subspace", "that", "build", "a", "cluster", ".", "It", "starts", "with", "a", "unit", "assigns", "it", "to", "a", "cluster", "and", "finds", "all", ...
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-clustering/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/clique/CLIQUESubspace.java#L120-L137
train
elki-project/elki
elki-clustering/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/clique/CLIQUESubspace.java
CLIQUESubspace.leftNeighbor
protected CLIQUEUnit leftNeighbor(CLIQUEUnit unit, int dim) { for(CLIQUEUnit u : denseUnits) { if(u.containsLeftNeighbor(unit, dim)) { return u; } } return null; }
java
protected CLIQUEUnit leftNeighbor(CLIQUEUnit unit, int dim) { for(CLIQUEUnit u : denseUnits) { if(u.containsLeftNeighbor(unit, dim)) { return u; } } return null; }
[ "protected", "CLIQUEUnit", "leftNeighbor", "(", "CLIQUEUnit", "unit", ",", "int", "dim", ")", "{", "for", "(", "CLIQUEUnit", "u", ":", "denseUnits", ")", "{", "if", "(", "u", ".", "containsLeftNeighbor", "(", "unit", ",", "dim", ")", ")", "{", "return", ...
Returns the left neighbor of the given unit in the specified dimension. @param unit the unit to determine the left neighbor for @param dim the dimension @return the left neighbor of the given unit in the specified dimension
[ "Returns", "the", "left", "neighbor", "of", "the", "given", "unit", "in", "the", "specified", "dimension", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-clustering/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/clique/CLIQUESubspace.java#L146-L153
train
elki-project/elki
elki-clustering/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/clique/CLIQUESubspace.java
CLIQUESubspace.rightNeighbor
protected CLIQUEUnit rightNeighbor(CLIQUEUnit unit, int dim) { for(CLIQUEUnit u : denseUnits) { if(u.containsRightNeighbor(unit, dim)) { return u; } } return null; }
java
protected CLIQUEUnit rightNeighbor(CLIQUEUnit unit, int dim) { for(CLIQUEUnit u : denseUnits) { if(u.containsRightNeighbor(unit, dim)) { return u; } } return null; }
[ "protected", "CLIQUEUnit", "rightNeighbor", "(", "CLIQUEUnit", "unit", ",", "int", "dim", ")", "{", "for", "(", "CLIQUEUnit", "u", ":", "denseUnits", ")", "{", "if", "(", "u", ".", "containsRightNeighbor", "(", "unit", ",", "dim", ")", ")", "{", "return"...
Returns the right neighbor of the given unit in the specified dimension. @param unit the unit to determine the right neighbor for @param dim the dimension @return the right neighbor of the given unit in the specified dimension
[ "Returns", "the", "right", "neighbor", "of", "the", "given", "unit", "in", "the", "specified", "dimension", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-clustering/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/subspace/clique/CLIQUESubspace.java#L162-L169
train
elki-project/elki
addons/xtree/src/main/java/de/lmu/ifi/dbs/elki/index/tree/spatial/rstarvariants/xtree/util/XSplitter.java
XSplitter.getCommonSplitDimensions
private IntIterator getCommonSplitDimensions(N node) { Collection<SplitHistory> splitHistories = new ArrayList<>(node.getNumEntries()); for(int i = 0; i < node.getNumEntries(); i++) { SpatialEntry entry = node.getEntry(i); if(!(entry instanceof XTreeDirectoryEntry)) { throw new RuntimeException("Wrong entry type to derive split dimension from: " + entry.getClass().getName()); } splitHistories.add(((XTreeDirectoryEntry) entry).getSplitHistory()); } return SplitHistory.getCommonDimensions(splitHistories); }
java
private IntIterator getCommonSplitDimensions(N node) { Collection<SplitHistory> splitHistories = new ArrayList<>(node.getNumEntries()); for(int i = 0; i < node.getNumEntries(); i++) { SpatialEntry entry = node.getEntry(i); if(!(entry instanceof XTreeDirectoryEntry)) { throw new RuntimeException("Wrong entry type to derive split dimension from: " + entry.getClass().getName()); } splitHistories.add(((XTreeDirectoryEntry) entry).getSplitHistory()); } return SplitHistory.getCommonDimensions(splitHistories); }
[ "private", "IntIterator", "getCommonSplitDimensions", "(", "N", "node", ")", "{", "Collection", "<", "SplitHistory", ">", "splitHistories", "=", "new", "ArrayList", "<>", "(", "node", ".", "getNumEntries", "(", ")", ")", ";", "for", "(", "int", "i", "=", "...
Determine the common split dimensions from a list of entries. @param node node for which to determine the common split dimensions @return common split dimensions
[ "Determine", "the", "common", "split", "dimensions", "from", "a", "list", "of", "entries", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/addons/xtree/src/main/java/de/lmu/ifi/dbs/elki/index/tree/spatial/rstarvariants/xtree/util/XSplitter.java#L379-L389
train
elki-project/elki
addons/xtree/src/main/java/de/lmu/ifi/dbs/elki/index/tree/spatial/rstarvariants/xtree/util/XSplitter.java
XSplitter.mbr
private HyperBoundingBox mbr(final int[] entries, final int from, final int to) { SpatialEntry first = this.node.getEntry(entries[from]); ModifiableHyperBoundingBox mbr = new ModifiableHyperBoundingBox(first); for(int i = from + 1; i < to; i++) { mbr.extend(this.node.getEntry(entries[i])); } return mbr; }
java
private HyperBoundingBox mbr(final int[] entries, final int from, final int to) { SpatialEntry first = this.node.getEntry(entries[from]); ModifiableHyperBoundingBox mbr = new ModifiableHyperBoundingBox(first); for(int i = from + 1; i < to; i++) { mbr.extend(this.node.getEntry(entries[i])); } return mbr; }
[ "private", "HyperBoundingBox", "mbr", "(", "final", "int", "[", "]", "entries", ",", "final", "int", "from", ",", "final", "int", "to", ")", "{", "SpatialEntry", "first", "=", "this", ".", "node", ".", "getEntry", "(", "entries", "[", "from", "]", ")",...
Computes and returns the mbr of the specified nodes, only the nodes between from and to index are considered. @param entries the array of node indices @param from the start index @param to the end index @return the mbr of the specified nodes
[ "Computes", "and", "returns", "the", "mbr", "of", "the", "specified", "nodes", "only", "the", "nodes", "between", "from", "and", "to", "index", "are", "considered", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/addons/xtree/src/main/java/de/lmu/ifi/dbs/elki/index/tree/spatial/rstarvariants/xtree/util/XSplitter.java#L724-L731
train
elki-project/elki
elki-core-dbids-int/src/main/java/de/lmu/ifi/dbs/elki/database/ids/integer/ArrayModifiableIntegerDBIDs.java
ArrayModifiableIntegerDBIDs.ensureSize
private void ensureSize(int minsize) { if(minsize <= store.length) { return; } int asize = store.length; while(asize < minsize) { asize = (asize >>> 1) + asize; } final int[] prev = store; store = new int[asize]; System.arraycopy(prev, 0, store, 0, size); }
java
private void ensureSize(int minsize) { if(minsize <= store.length) { return; } int asize = store.length; while(asize < minsize) { asize = (asize >>> 1) + asize; } final int[] prev = store; store = new int[asize]; System.arraycopy(prev, 0, store, 0, size); }
[ "private", "void", "ensureSize", "(", "int", "minsize", ")", "{", "if", "(", "minsize", "<=", "store", ".", "length", ")", "{", "return", ";", "}", "int", "asize", "=", "store", ".", "length", ";", "while", "(", "asize", "<", "minsize", ")", "{", "...
Resize as desired. @param minsize Desired size
[ "Resize", "as", "desired", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-core-dbids-int/src/main/java/de/lmu/ifi/dbs/elki/database/ids/integer/ArrayModifiableIntegerDBIDs.java#L123-L134
train
elki-project/elki
elki-core-dbids-int/src/main/java/de/lmu/ifi/dbs/elki/database/ids/integer/ArrayModifiableIntegerDBIDs.java
ArrayModifiableIntegerDBIDs.grow
private void grow() { final int newsize = store.length + (store.length >>> 1); final int[] prev = store; store = new int[newsize]; System.arraycopy(prev, 0, store, 0, size); }
java
private void grow() { final int newsize = store.length + (store.length >>> 1); final int[] prev = store; store = new int[newsize]; System.arraycopy(prev, 0, store, 0, size); }
[ "private", "void", "grow", "(", ")", "{", "final", "int", "newsize", "=", "store", ".", "length", "+", "(", "store", ".", "length", ">>>", "1", ")", ";", "final", "int", "[", "]", "prev", "=", "store", ";", "store", "=", "new", "int", "[", "newsi...
Grow array by 50%.
[ "Grow", "array", "by", "50%", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-core-dbids-int/src/main/java/de/lmu/ifi/dbs/elki/database/ids/integer/ArrayModifiableIntegerDBIDs.java#L139-L144
train
elki-project/elki
elki-outlier/src/main/java/de/lmu/ifi/dbs/elki/algorithm/outlier/distance/SOS.java
SOS.sumOfProbabilities
public static double sumOfProbabilities(DBIDIter ignore, DBIDArrayIter di, double[] p) { double s = 0; for(di.seek(0); di.valid(); di.advance()) { if(DBIDUtil.equal(ignore, di)) { continue; } final double v = p[di.getOffset()]; if(!(v > 0)) { break; } s += v; } return s; }
java
public static double sumOfProbabilities(DBIDIter ignore, DBIDArrayIter di, double[] p) { double s = 0; for(di.seek(0); di.valid(); di.advance()) { if(DBIDUtil.equal(ignore, di)) { continue; } final double v = p[di.getOffset()]; if(!(v > 0)) { break; } s += v; } return s; }
[ "public", "static", "double", "sumOfProbabilities", "(", "DBIDIter", "ignore", ",", "DBIDArrayIter", "di", ",", "double", "[", "]", "p", ")", "{", "double", "s", "=", "0", ";", "for", "(", "di", ".", "seek", "(", "0", ")", ";", "di", ".", "valid", ...
Compute the sum of probabilities, stop at first 0, ignore query object. Note: while SOS ensures the 'ignore' object is not added in the first place, KNNSOS cannot do so efficiently (yet). @param ignore Object to ignore. @param di Object list @param p Probabilities @return Sum.
[ "Compute", "the", "sum", "of", "probabilities", "stop", "at", "first", "0", "ignore", "query", "object", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-outlier/src/main/java/de/lmu/ifi/dbs/elki/algorithm/outlier/distance/SOS.java#L164-L177
train
elki-project/elki
addons/batikvis/src/main/java/de/lmu/ifi/dbs/elki/visualization/visualizers/thumbs/ThumbnailThread.java
ThumbnailThread.queue
public synchronized static Task queue(Listener callback) { final Task task = new Task(callback); // TODO: synchronization? if(THREAD != null && THREAD.isAlive()) { THREAD.queue.add(task); return task; } THREAD = new ThumbnailThread(); THREAD.queue.add(task); THREAD.start(); return task; }
java
public synchronized static Task queue(Listener callback) { final Task task = new Task(callback); // TODO: synchronization? if(THREAD != null && THREAD.isAlive()) { THREAD.queue.add(task); return task; } THREAD = new ThumbnailThread(); THREAD.queue.add(task); THREAD.start(); return task; }
[ "public", "synchronized", "static", "Task", "queue", "(", "Listener", "callback", ")", "{", "final", "Task", "task", "=", "new", "Task", "(", "callback", ")", ";", "// TODO: synchronization?", "if", "(", "THREAD", "!=", "null", "&&", "THREAD", ".", "isAlive"...
Queue a thumbnail task in a global thumbnail thread. @param callback Callback
[ "Queue", "a", "thumbnail", "task", "in", "a", "global", "thumbnail", "thread", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/addons/batikvis/src/main/java/de/lmu/ifi/dbs/elki/visualization/visualizers/thumbs/ThumbnailThread.java#L55-L66
train
elki-project/elki
addons/batikvis/src/main/java/de/lmu/ifi/dbs/elki/visualization/visualizers/thumbs/ThumbnailThread.java
ThumbnailThread.unqueue
public static void unqueue(Task task) { if(THREAD != null) { synchronized(THREAD) { THREAD.queue.remove(task); } } }
java
public static void unqueue(Task task) { if(THREAD != null) { synchronized(THREAD) { THREAD.queue.remove(task); } } }
[ "public", "static", "void", "unqueue", "(", "Task", "task", ")", "{", "if", "(", "THREAD", "!=", "null", ")", "{", "synchronized", "(", "THREAD", ")", "{", "THREAD", ".", "queue", ".", "remove", "(", "task", ")", ";", "}", "}", "}" ]
Remove a pending task from the queue. @param task Task to remove.
[ "Remove", "a", "pending", "task", "from", "the", "queue", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/addons/batikvis/src/main/java/de/lmu/ifi/dbs/elki/visualization/visualizers/thumbs/ThumbnailThread.java#L73-L79
train
elki-project/elki
elki-logging/src/main/java/de/lmu/ifi/dbs/elki/logging/progress/StepProgress.java
StepProgress.beginStep
public void beginStep(int step, String stepTitle, Logging logger) { setProcessed(step - 1); this.stepTitle = stepTitle; logger.progress(this); }
java
public void beginStep(int step, String stepTitle, Logging logger) { setProcessed(step - 1); this.stepTitle = stepTitle; logger.progress(this); }
[ "public", "void", "beginStep", "(", "int", "step", ",", "String", "stepTitle", ",", "Logging", "logger", ")", "{", "setProcessed", "(", "step", "-", "1", ")", ";", "this", ".", "stepTitle", "=", "stepTitle", ";", "logger", ".", "progress", "(", "this", ...
Do a new step and log it @param step Step number @param stepTitle Step title @param logger Logger to report to.
[ "Do", "a", "new", "step", "and", "log", "it" ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-logging/src/main/java/de/lmu/ifi/dbs/elki/logging/progress/StepProgress.java#L80-L84
train
elki-project/elki
addons/uncertain/src/main/java/de/lmu/ifi/dbs/elki/data/uncertain/AbstractUncertainObject.java
AbstractUncertainObject.computeBounds
protected static HyperBoundingBox computeBounds(NumberVector[] samples) { assert(samples.length > 0) : "Cannot compute bounding box of empty set."; // Compute bounds: final int dimensions = samples[0].getDimensionality(); final double[] min = new double[dimensions]; final double[] max = new double[dimensions]; NumberVector first = samples[0]; for(int d = 0; d < dimensions; d++) { min[d] = max[d] = first.doubleValue(d); } for(int i = 1; i < samples.length; i++) { NumberVector v = samples[i]; for(int d = 0; d < dimensions; d++) { final double c = v.doubleValue(d); min[d] = c < min[d] ? c : min[d]; max[d] = c > max[d] ? c : max[d]; } } return new HyperBoundingBox(min, max); }
java
protected static HyperBoundingBox computeBounds(NumberVector[] samples) { assert(samples.length > 0) : "Cannot compute bounding box of empty set."; // Compute bounds: final int dimensions = samples[0].getDimensionality(); final double[] min = new double[dimensions]; final double[] max = new double[dimensions]; NumberVector first = samples[0]; for(int d = 0; d < dimensions; d++) { min[d] = max[d] = first.doubleValue(d); } for(int i = 1; i < samples.length; i++) { NumberVector v = samples[i]; for(int d = 0; d < dimensions; d++) { final double c = v.doubleValue(d); min[d] = c < min[d] ? c : min[d]; max[d] = c > max[d] ? c : max[d]; } } return new HyperBoundingBox(min, max); }
[ "protected", "static", "HyperBoundingBox", "computeBounds", "(", "NumberVector", "[", "]", "samples", ")", "{", "assert", "(", "samples", ".", "length", ">", "0", ")", ":", "\"Cannot compute bounding box of empty set.\"", ";", "// Compute bounds:", "final", "int", "...
Compute the bounding box for some samples. @param samples Samples @return Bounding box.
[ "Compute", "the", "bounding", "box", "for", "some", "samples", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/addons/uncertain/src/main/java/de/lmu/ifi/dbs/elki/data/uncertain/AbstractUncertainObject.java#L50-L69
train
elki-project/elki
elki-index-preprocessed/src/main/java/de/lmu/ifi/dbs/elki/index/preprocessed/knn/MaterializeKNNPreprocessor.java
MaterializeKNNPreprocessor.preprocess
@Override protected void preprocess() { final Logging log = getLogger(); // Could be subclass createStorage(); ArrayDBIDs ids = DBIDUtil.ensureArray(relation.getDBIDs()); if(log.isStatistics()) { log.statistics(new LongStatistic(this.getClass().getName() + ".k", k)); } Duration duration = log.isStatistics() ? log.newDuration(this.getClass().getName() + ".precomputation-time").begin() : null; FiniteProgress progress = getLogger().isVerbose() ? new FiniteProgress("Materializing k nearest neighbors (k=" + k + ")", ids.size(), getLogger()) : null; // Try bulk List<? extends KNNList> kNNList = null; if(usebulk) { kNNList = knnQuery.getKNNForBulkDBIDs(ids, k); if(kNNList != null) { int i = 0; for(DBIDIter id = ids.iter(); id.valid(); id.advance(), i++) { storage.put(id, kNNList.get(i)); log.incrementProcessed(progress); } } } else { final boolean ismetric = getDistanceQuery().getDistanceFunction().isMetric(); for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { if(ismetric && storage.get(iter) != null) { log.incrementProcessed(progress); continue; // Previously computed (duplicate point?) } KNNList knn = knnQuery.getKNNForDBID(iter, k); storage.put(iter, knn); if(ismetric) { for(DoubleDBIDListIter it = knn.iter(); it.valid() && it.doubleValue() == 0.; it.advance()) { storage.put(it, knn); // Reuse } } log.incrementProcessed(progress); } } log.ensureCompleted(progress); if(duration != null) { log.statistics(duration.end()); } }
java
@Override protected void preprocess() { final Logging log = getLogger(); // Could be subclass createStorage(); ArrayDBIDs ids = DBIDUtil.ensureArray(relation.getDBIDs()); if(log.isStatistics()) { log.statistics(new LongStatistic(this.getClass().getName() + ".k", k)); } Duration duration = log.isStatistics() ? log.newDuration(this.getClass().getName() + ".precomputation-time").begin() : null; FiniteProgress progress = getLogger().isVerbose() ? new FiniteProgress("Materializing k nearest neighbors (k=" + k + ")", ids.size(), getLogger()) : null; // Try bulk List<? extends KNNList> kNNList = null; if(usebulk) { kNNList = knnQuery.getKNNForBulkDBIDs(ids, k); if(kNNList != null) { int i = 0; for(DBIDIter id = ids.iter(); id.valid(); id.advance(), i++) { storage.put(id, kNNList.get(i)); log.incrementProcessed(progress); } } } else { final boolean ismetric = getDistanceQuery().getDistanceFunction().isMetric(); for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { if(ismetric && storage.get(iter) != null) { log.incrementProcessed(progress); continue; // Previously computed (duplicate point?) } KNNList knn = knnQuery.getKNNForDBID(iter, k); storage.put(iter, knn); if(ismetric) { for(DoubleDBIDListIter it = knn.iter(); it.valid() && it.doubleValue() == 0.; it.advance()) { storage.put(it, knn); // Reuse } } log.incrementProcessed(progress); } } log.ensureCompleted(progress); if(duration != null) { log.statistics(duration.end()); } }
[ "@", "Override", "protected", "void", "preprocess", "(", ")", "{", "final", "Logging", "log", "=", "getLogger", "(", ")", ";", "// Could be subclass", "createStorage", "(", ")", ";", "ArrayDBIDs", "ids", "=", "DBIDUtil", ".", "ensureArray", "(", "relation", ...
The actual preprocessing step.
[ "The", "actual", "preprocessing", "step", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-index-preprocessed/src/main/java/de/lmu/ifi/dbs/elki/index/preprocessed/knn/MaterializeKNNPreprocessor.java#L107-L152
train
elki-project/elki
elki-index-preprocessed/src/main/java/de/lmu/ifi/dbs/elki/index/preprocessed/knn/MaterializeKNNPreprocessor.java
MaterializeKNNPreprocessor.objectsInserted
protected void objectsInserted(DBIDs ids) { final Logging log = getLogger(); // Could be subclass StepProgress stepprog = log.isVerbose() ? new StepProgress(3) : null; ArrayDBIDs aids = DBIDUtil.ensureArray(ids); // materialize the new kNNs log.beginStep(stepprog, 1, "New insertions ocurred, materialize their new kNNs."); // Bulk-query kNNs List<? extends KNNList> kNNList = knnQuery.getKNNForBulkDBIDs(aids, k); // Store in storage DBIDIter iter = aids.iter(); for(int i = 0; i < aids.size(); i++, iter.advance()) { storage.put(iter, kNNList.get(i)); } // update the affected kNNs log.beginStep(stepprog, 2, "New insertions ocurred, update the affected kNNs."); ArrayDBIDs rkNN_ids = updateKNNsAfterInsertion(ids); // inform listener log.beginStep(stepprog, 3, "New insertions ocurred, inform listeners."); fireKNNsInserted(ids, rkNN_ids); log.setCompleted(stepprog); }
java
protected void objectsInserted(DBIDs ids) { final Logging log = getLogger(); // Could be subclass StepProgress stepprog = log.isVerbose() ? new StepProgress(3) : null; ArrayDBIDs aids = DBIDUtil.ensureArray(ids); // materialize the new kNNs log.beginStep(stepprog, 1, "New insertions ocurred, materialize their new kNNs."); // Bulk-query kNNs List<? extends KNNList> kNNList = knnQuery.getKNNForBulkDBIDs(aids, k); // Store in storage DBIDIter iter = aids.iter(); for(int i = 0; i < aids.size(); i++, iter.advance()) { storage.put(iter, kNNList.get(i)); } // update the affected kNNs log.beginStep(stepprog, 2, "New insertions ocurred, update the affected kNNs."); ArrayDBIDs rkNN_ids = updateKNNsAfterInsertion(ids); // inform listener log.beginStep(stepprog, 3, "New insertions ocurred, inform listeners."); fireKNNsInserted(ids, rkNN_ids); log.setCompleted(stepprog); }
[ "protected", "void", "objectsInserted", "(", "DBIDs", "ids", ")", "{", "final", "Logging", "log", "=", "getLogger", "(", ")", ";", "// Could be subclass", "StepProgress", "stepprog", "=", "log", ".", "isVerbose", "(", ")", "?", "new", "StepProgress", "(", "3...
Called after new objects have been inserted, updates the materialized neighborhood. @param ids the ids of the newly inserted objects
[ "Called", "after", "new", "objects", "have", "been", "inserted", "updates", "the", "materialized", "neighborhood", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-index-preprocessed/src/main/java/de/lmu/ifi/dbs/elki/index/preprocessed/knn/MaterializeKNNPreprocessor.java#L186-L210
train
elki-project/elki
elki-index-preprocessed/src/main/java/de/lmu/ifi/dbs/elki/index/preprocessed/knn/MaterializeKNNPreprocessor.java
MaterializeKNNPreprocessor.objectsRemoved
protected void objectsRemoved(DBIDs ids) { final Logging log = getLogger(); StepProgress stepprog = log.isVerbose() ? new StepProgress(3) : null; // delete the materialized (old) kNNs log.beginStep(stepprog, 1, "New deletions ocurred, remove their materialized kNNs."); for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { storage.delete(iter); } // update the affected kNNs log.beginStep(stepprog, 2, "New deletions ocurred, update the affected kNNs."); ArrayDBIDs rkNN_ids = updateKNNsAfterDeletion(ids); // inform listener log.beginStep(stepprog, 3, "New deletions ocurred, inform listeners."); fireKNNsRemoved(ids, rkNN_ids); log.ensureCompleted(stepprog); }
java
protected void objectsRemoved(DBIDs ids) { final Logging log = getLogger(); StepProgress stepprog = log.isVerbose() ? new StepProgress(3) : null; // delete the materialized (old) kNNs log.beginStep(stepprog, 1, "New deletions ocurred, remove their materialized kNNs."); for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { storage.delete(iter); } // update the affected kNNs log.beginStep(stepprog, 2, "New deletions ocurred, update the affected kNNs."); ArrayDBIDs rkNN_ids = updateKNNsAfterDeletion(ids); // inform listener log.beginStep(stepprog, 3, "New deletions ocurred, inform listeners."); fireKNNsRemoved(ids, rkNN_ids); log.ensureCompleted(stepprog); }
[ "protected", "void", "objectsRemoved", "(", "DBIDs", "ids", ")", "{", "final", "Logging", "log", "=", "getLogger", "(", ")", ";", "StepProgress", "stepprog", "=", "log", ".", "isVerbose", "(", ")", "?", "new", "StepProgress", "(", "3", ")", ":", "null", ...
Called after objects have been removed, updates the materialized neighborhood. @param ids the ids of the removed objects
[ "Called", "after", "objects", "have", "been", "removed", "updates", "the", "materialized", "neighborhood", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-index-preprocessed/src/main/java/de/lmu/ifi/dbs/elki/index/preprocessed/knn/MaterializeKNNPreprocessor.java#L279-L298
train
elki-project/elki
elki-index-preprocessed/src/main/java/de/lmu/ifi/dbs/elki/index/preprocessed/knn/MaterializeKNNPreprocessor.java
MaterializeKNNPreprocessor.fireKNNsInserted
protected void fireKNNsInserted(DBIDs insertions, DBIDs updates) { KNNChangeEvent e = new KNNChangeEvent(this, KNNChangeEvent.Type.INSERT, insertions, updates); Object[] listeners = listenerList.getListenerList(); for(int i = listeners.length - 2; i >= 0; i -= 2) { if(listeners[i] == KNNListener.class) { ((KNNListener) listeners[i + 1]).kNNsChanged(e); } } }
java
protected void fireKNNsInserted(DBIDs insertions, DBIDs updates) { KNNChangeEvent e = new KNNChangeEvent(this, KNNChangeEvent.Type.INSERT, insertions, updates); Object[] listeners = listenerList.getListenerList(); for(int i = listeners.length - 2; i >= 0; i -= 2) { if(listeners[i] == KNNListener.class) { ((KNNListener) listeners[i + 1]).kNNsChanged(e); } } }
[ "protected", "void", "fireKNNsInserted", "(", "DBIDs", "insertions", ",", "DBIDs", "updates", ")", "{", "KNNChangeEvent", "e", "=", "new", "KNNChangeEvent", "(", "this", ",", "KNNChangeEvent", ".", "Type", ".", "INSERT", ",", "insertions", ",", "updates", ")",...
Informs all registered KNNListener that new kNNs have been inserted and as a result some kNNs have been changed. @param insertions the ids of the newly inserted kNNs @param updates the ids of kNNs which have been changed due to the insertions @see KNNListener
[ "Informs", "all", "registered", "KNNListener", "that", "new", "kNNs", "have", "been", "inserted", "and", "as", "a", "result", "some", "kNNs", "have", "been", "changed", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-index-preprocessed/src/main/java/de/lmu/ifi/dbs/elki/index/preprocessed/knn/MaterializeKNNPreprocessor.java#L309-L317
train
elki-project/elki
elki-index-preprocessed/src/main/java/de/lmu/ifi/dbs/elki/index/preprocessed/knn/MaterializeKNNPreprocessor.java
MaterializeKNNPreprocessor.fireKNNsRemoved
protected void fireKNNsRemoved(DBIDs removals, DBIDs updates) { KNNChangeEvent e = new KNNChangeEvent(this, KNNChangeEvent.Type.DELETE, removals, updates); Object[] listeners = listenerList.getListenerList(); for(int i = listeners.length - 2; i >= 0; i -= 2) { if(listeners[i] == KNNListener.class) { ((KNNListener) listeners[i + 1]).kNNsChanged(e); } } }
java
protected void fireKNNsRemoved(DBIDs removals, DBIDs updates) { KNNChangeEvent e = new KNNChangeEvent(this, KNNChangeEvent.Type.DELETE, removals, updates); Object[] listeners = listenerList.getListenerList(); for(int i = listeners.length - 2; i >= 0; i -= 2) { if(listeners[i] == KNNListener.class) { ((KNNListener) listeners[i + 1]).kNNsChanged(e); } } }
[ "protected", "void", "fireKNNsRemoved", "(", "DBIDs", "removals", ",", "DBIDs", "updates", ")", "{", "KNNChangeEvent", "e", "=", "new", "KNNChangeEvent", "(", "this", ",", "KNNChangeEvent", ".", "Type", ".", "DELETE", ",", "removals", ",", "updates", ")", ";...
Informs all registered KNNListener that existing kNNs have been removed and as a result some kNNs have been changed. @param removals the ids of the removed kNNs @param updates the ids of kNNs which have been changed due to the removals @see KNNListener
[ "Informs", "all", "registered", "KNNListener", "that", "existing", "kNNs", "have", "been", "removed", "and", "as", "a", "result", "some", "kNNs", "have", "been", "changed", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-index-preprocessed/src/main/java/de/lmu/ifi/dbs/elki/index/preprocessed/knn/MaterializeKNNPreprocessor.java#L327-L335
train
elki-project/elki
elki-classification/src/main/java/de/lmu/ifi/dbs/elki/algorithm/classification/PriorProbabilityClassifier.java
PriorProbabilityClassifier.buildClassifier
@Override public void buildClassifier(Database database, Relation<? extends ClassLabel> labelrep) { Object2IntOpenHashMap<ClassLabel> count = new Object2IntOpenHashMap<>(); for(DBIDIter iter = labelrep.iterDBIDs(); iter.valid(); iter.advance()) { count.addTo(labelrep.get(iter), 1); } int max = Integer.MIN_VALUE; double size = labelrep.size(); distribution = new double[count.size()]; labels = new ArrayList<>(count.size()); ObjectIterator<Entry<ClassLabel>> iter = count.object2IntEntrySet().fastIterator(); for(int i = 0; iter.hasNext(); ++i) { Entry<ClassLabel> entry = iter.next(); distribution[i] = entry.getIntValue() / size; labels.add(entry.getKey()); if(entry.getIntValue() > max) { max = entry.getIntValue(); prediction = entry.getKey(); } } }
java
@Override public void buildClassifier(Database database, Relation<? extends ClassLabel> labelrep) { Object2IntOpenHashMap<ClassLabel> count = new Object2IntOpenHashMap<>(); for(DBIDIter iter = labelrep.iterDBIDs(); iter.valid(); iter.advance()) { count.addTo(labelrep.get(iter), 1); } int max = Integer.MIN_VALUE; double size = labelrep.size(); distribution = new double[count.size()]; labels = new ArrayList<>(count.size()); ObjectIterator<Entry<ClassLabel>> iter = count.object2IntEntrySet().fastIterator(); for(int i = 0; iter.hasNext(); ++i) { Entry<ClassLabel> entry = iter.next(); distribution[i] = entry.getIntValue() / size; labels.add(entry.getKey()); if(entry.getIntValue() > max) { max = entry.getIntValue(); prediction = entry.getKey(); } } }
[ "@", "Override", "public", "void", "buildClassifier", "(", "Database", "database", ",", "Relation", "<", "?", "extends", "ClassLabel", ">", "labelrep", ")", "{", "Object2IntOpenHashMap", "<", "ClassLabel", ">", "count", "=", "new", "Object2IntOpenHashMap", "<>", ...
Learns the prior probability for all classes.
[ "Learns", "the", "prior", "probability", "for", "all", "classes", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-classification/src/main/java/de/lmu/ifi/dbs/elki/algorithm/classification/PriorProbabilityClassifier.java#L79-L100
train
elki-project/elki
elki-index-mtree/src/main/java/de/lmu/ifi/dbs/elki/index/tree/metrical/mtreevariants/strategies/split/MMRadSplit.java
MMRadSplit.split
@Override public Assignments<E> split(AbstractMTree<?, N, E, ?> tree, N node) { final int n = node.getNumEntries(); double[][] distanceMatrix = computeDistanceMatrix(tree, node); double miSumCR = Double.POSITIVE_INFINITY; boolean leaf = node.isLeaf(); Assignments<E> bestAssignment = null; for(int i = 0; i < n; i++) { for(int j = i + 1; j < n; j++) { Assignments<E> currentAssignments = distributor.distribute(node, i, distanceMatrix[i], j, distanceMatrix[j]); double maxCR = Math.max(currentAssignments.computeFirstCover(leaf), currentAssignments.computeSecondCover(leaf)); if(maxCR < miSumCR) { miSumCR = maxCR; bestAssignment = currentAssignments; } } } return bestAssignment; }
java
@Override public Assignments<E> split(AbstractMTree<?, N, E, ?> tree, N node) { final int n = node.getNumEntries(); double[][] distanceMatrix = computeDistanceMatrix(tree, node); double miSumCR = Double.POSITIVE_INFINITY; boolean leaf = node.isLeaf(); Assignments<E> bestAssignment = null; for(int i = 0; i < n; i++) { for(int j = i + 1; j < n; j++) { Assignments<E> currentAssignments = distributor.distribute(node, i, distanceMatrix[i], j, distanceMatrix[j]); double maxCR = Math.max(currentAssignments.computeFirstCover(leaf), currentAssignments.computeSecondCover(leaf)); if(maxCR < miSumCR) { miSumCR = maxCR; bestAssignment = currentAssignments; } } } return bestAssignment; }
[ "@", "Override", "public", "Assignments", "<", "E", ">", "split", "(", "AbstractMTree", "<", "?", ",", "N", ",", "E", ",", "?", ">", "tree", ",", "N", "node", ")", "{", "final", "int", "n", "=", "node", ".", "getNumEntries", "(", ")", ";", "doubl...
Selects two objects of the specified node to be promoted and stored into the parent node. The mM-RAD strategy considers all possible pairs of objects and, after partitioning the set of entries, promotes the pair of objects for which the larger of the two covering radiuses is minimum. @param tree Tree to use @param node the node to be split
[ "Selects", "two", "objects", "of", "the", "specified", "node", "to", "be", "promoted", "and", "stored", "into", "the", "parent", "node", ".", "The", "mM", "-", "RAD", "strategy", "considers", "all", "possible", "pairs", "of", "objects", "and", "after", "pa...
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-index-mtree/src/main/java/de/lmu/ifi/dbs/elki/index/tree/metrical/mtreevariants/strategies/split/MMRadSplit.java#L70-L90
train
elki-project/elki
elki-clustering/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/kmeans/initialization/AbstractKMeansInitialization.java
AbstractKMeansInitialization.unboxVectors
public static double[][] unboxVectors(List<? extends NumberVector> means) { double[][] ret = new double[means.size()][]; for(int i = 0; i < ret.length; i++) { ret[i] = means.get(i).toArray(); } return ret; }
java
public static double[][] unboxVectors(List<? extends NumberVector> means) { double[][] ret = new double[means.size()][]; for(int i = 0; i < ret.length; i++) { ret[i] = means.get(i).toArray(); } return ret; }
[ "public", "static", "double", "[", "]", "[", "]", "unboxVectors", "(", "List", "<", "?", "extends", "NumberVector", ">", "means", ")", "{", "double", "[", "]", "[", "]", "ret", "=", "new", "double", "[", "means", ".", "size", "(", ")", "]", "[", ...
Unbox database means to primitive means. @param means Database means @return List of primitive {@code double[]} means
[ "Unbox", "database", "means", "to", "primitive", "means", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-clustering/src/main/java/de/lmu/ifi/dbs/elki/algorithm/clustering/kmeans/initialization/AbstractKMeansInitialization.java#L59-L65
train
elki-project/elki
elki-core-math/src/main/java/de/lmu/ifi/dbs/elki/math/PearsonCorrelation.java
PearsonCorrelation.put
public void put(double x, double y, double w) { if(w == 0.) { return; } if(sumWe <= 0.) { sumX = x * w; sumY = y * w; sumWe = w; return; } // Delta to previous mean final double deltaX = x * sumWe - sumX; final double deltaY = y * sumWe - sumY; final double oldWe = sumWe; // Incremental update sumWe += w; final double f = w / (sumWe * oldWe); // Update sumXX += f * deltaX * deltaX; sumYY += f * deltaY * deltaY; // should equal weight * deltaY * neltaX! sumXY += f * deltaX * deltaY; // Update means sumX += x * w; sumY += y * w; }
java
public void put(double x, double y, double w) { if(w == 0.) { return; } if(sumWe <= 0.) { sumX = x * w; sumY = y * w; sumWe = w; return; } // Delta to previous mean final double deltaX = x * sumWe - sumX; final double deltaY = y * sumWe - sumY; final double oldWe = sumWe; // Incremental update sumWe += w; final double f = w / (sumWe * oldWe); // Update sumXX += f * deltaX * deltaX; sumYY += f * deltaY * deltaY; // should equal weight * deltaY * neltaX! sumXY += f * deltaX * deltaY; // Update means sumX += x * w; sumY += y * w; }
[ "public", "void", "put", "(", "double", "x", ",", "double", "y", ",", "double", "w", ")", "{", "if", "(", "w", "==", "0.", ")", "{", "return", ";", "}", "if", "(", "sumWe", "<=", "0.", ")", "{", "sumX", "=", "x", "*", "w", ";", "sumY", "=",...
Put a single value into the correlation statistic. @param x Value in X @param y Value in Y @param w Weight
[ "Put", "a", "single", "value", "into", "the", "correlation", "statistic", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-core-math/src/main/java/de/lmu/ifi/dbs/elki/math/PearsonCorrelation.java#L72-L97
train
elki-project/elki
elki-core-math/src/main/java/de/lmu/ifi/dbs/elki/math/PearsonCorrelation.java
PearsonCorrelation.getCorrelation
public double getCorrelation() { if(!(sumXX > 0. && sumYY > 0.)) { return (sumXX == sumYY) ? 1. : 0.; } return sumXY / FastMath.sqrt(sumXX * sumYY); }
java
public double getCorrelation() { if(!(sumXX > 0. && sumYY > 0.)) { return (sumXX == sumYY) ? 1. : 0.; } return sumXY / FastMath.sqrt(sumXX * sumYY); }
[ "public", "double", "getCorrelation", "(", ")", "{", "if", "(", "!", "(", "sumXX", ">", "0.", "&&", "sumYY", ">", "0.", ")", ")", "{", "return", "(", "sumXX", "==", "sumYY", ")", "?", "1.", ":", "0.", ";", "}", "return", "sumXY", "/", "FastMath",...
Get the Pearson correlation value. @return Correlation value
[ "Get", "the", "Pearson", "correlation", "value", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-core-math/src/main/java/de/lmu/ifi/dbs/elki/math/PearsonCorrelation.java#L134-L139
train
elki-project/elki
elki-core-math/src/main/java/de/lmu/ifi/dbs/elki/math/PearsonCorrelation.java
PearsonCorrelation.coefficient
public static double coefficient(double[] x, double[] y) { final int xdim = x.length; final int ydim = y.length; if(xdim != ydim) { throw new IllegalArgumentException("Invalid arguments: arrays differ in length."); } if(xdim == 0) { throw new IllegalArgumentException("Empty vector."); } // Inlined computation of Pearson correlation, to avoid allocating objects! // This is a numerically stabilized version, avoiding sum-of-squares. double sumXX = 0., sumYY = 0., sumXY = 0.; double sumX = x[0], sumY = y[0]; int i = 1; while(i < xdim) { final double xv = x[i], yv = y[i]; // Delta to previous mean final double deltaX = xv * i - sumX; final double deltaY = yv * i - sumY; // Increment count first final double oldi = i; // Convert to double! ++i; final double f = 1. / (i * oldi); // Update sumXX += f * deltaX * deltaX; sumYY += f * deltaY * deltaY; // should equal deltaY * neltaX! sumXY += f * deltaX * deltaY; // Update sums sumX += xv; sumY += yv; } // One or both series were constant: if(!(sumXX > 0. && sumYY > 0.)) { return (sumXX == sumYY) ? 1. : 0.; } return sumXY / FastMath.sqrt(sumXX * sumYY); }
java
public static double coefficient(double[] x, double[] y) { final int xdim = x.length; final int ydim = y.length; if(xdim != ydim) { throw new IllegalArgumentException("Invalid arguments: arrays differ in length."); } if(xdim == 0) { throw new IllegalArgumentException("Empty vector."); } // Inlined computation of Pearson correlation, to avoid allocating objects! // This is a numerically stabilized version, avoiding sum-of-squares. double sumXX = 0., sumYY = 0., sumXY = 0.; double sumX = x[0], sumY = y[0]; int i = 1; while(i < xdim) { final double xv = x[i], yv = y[i]; // Delta to previous mean final double deltaX = xv * i - sumX; final double deltaY = yv * i - sumY; // Increment count first final double oldi = i; // Convert to double! ++i; final double f = 1. / (i * oldi); // Update sumXX += f * deltaX * deltaX; sumYY += f * deltaY * deltaY; // should equal deltaY * neltaX! sumXY += f * deltaX * deltaY; // Update sums sumX += xv; sumY += yv; } // One or both series were constant: if(!(sumXX > 0. && sumYY > 0.)) { return (sumXX == sumYY) ? 1. : 0.; } return sumXY / FastMath.sqrt(sumXX * sumYY); }
[ "public", "static", "double", "coefficient", "(", "double", "[", "]", "x", ",", "double", "[", "]", "y", ")", "{", "final", "int", "xdim", "=", "x", ".", "length", ";", "final", "int", "ydim", "=", "y", ".", "length", ";", "if", "(", "xdim", "!="...
Compute the Pearson product-moment correlation coefficient for two FeatureVectors. @param x first FeatureVector @param y second FeatureVector @return the Pearson product-moment correlation coefficient for x and y
[ "Compute", "the", "Pearson", "product", "-", "moment", "correlation", "coefficient", "for", "two", "FeatureVectors", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-core-math/src/main/java/de/lmu/ifi/dbs/elki/math/PearsonCorrelation.java#L284-L321
train
elki-project/elki
elki-core-math/src/main/java/de/lmu/ifi/dbs/elki/math/PearsonCorrelation.java
PearsonCorrelation.weightedCoefficient
public static double weightedCoefficient(NumberVector x, NumberVector y, double[] weights) { final int xdim = x.getDimensionality(); final int ydim = y.getDimensionality(); if(xdim != ydim) { throw new IllegalArgumentException("Invalid arguments: number vectors differ in dimensionality."); } if(xdim != weights.length) { throw new IllegalArgumentException("Dimensionality doesn't agree to weights."); } if(xdim == 0) { throw new IllegalArgumentException("Empty vector."); } // Inlined computation of Pearson correlation, to avoid allocating objects! // This is a numerically stabilized version, avoiding sum-of-squares. double sumXX = 0., sumYY = 0., sumXY = 0., sumWe = weights[0]; double sumX = x.doubleValue(0) * sumWe, sumY = y.doubleValue(0) * sumWe; for(int i = 1; i < xdim; ++i) { final double xv = x.doubleValue(i), yv = y.doubleValue(i), w = weights[i]; // Delta to previous mean final double deltaX = xv * sumWe - sumX; final double deltaY = yv * sumWe - sumY; // Increment count first final double oldWe = sumWe; // Convert to double! sumWe += w; final double f = w / (sumWe * oldWe); // Update sumXX += f * deltaX * deltaX; sumYY += f * deltaY * deltaY; // should equal deltaY * neltaX! sumXY += f * deltaX * deltaY; // Update sums sumX += xv * w; sumY += yv * w; } // One or both series were constant: if(!(sumXX > 0. && sumYY > 0.)) { return (sumXX == sumYY) ? 1. : 0.; } return sumXY / FastMath.sqrt(sumXX * sumYY); }
java
public static double weightedCoefficient(NumberVector x, NumberVector y, double[] weights) { final int xdim = x.getDimensionality(); final int ydim = y.getDimensionality(); if(xdim != ydim) { throw new IllegalArgumentException("Invalid arguments: number vectors differ in dimensionality."); } if(xdim != weights.length) { throw new IllegalArgumentException("Dimensionality doesn't agree to weights."); } if(xdim == 0) { throw new IllegalArgumentException("Empty vector."); } // Inlined computation of Pearson correlation, to avoid allocating objects! // This is a numerically stabilized version, avoiding sum-of-squares. double sumXX = 0., sumYY = 0., sumXY = 0., sumWe = weights[0]; double sumX = x.doubleValue(0) * sumWe, sumY = y.doubleValue(0) * sumWe; for(int i = 1; i < xdim; ++i) { final double xv = x.doubleValue(i), yv = y.doubleValue(i), w = weights[i]; // Delta to previous mean final double deltaX = xv * sumWe - sumX; final double deltaY = yv * sumWe - sumY; // Increment count first final double oldWe = sumWe; // Convert to double! sumWe += w; final double f = w / (sumWe * oldWe); // Update sumXX += f * deltaX * deltaX; sumYY += f * deltaY * deltaY; // should equal deltaY * neltaX! sumXY += f * deltaX * deltaY; // Update sums sumX += xv * w; sumY += yv * w; } // One or both series were constant: if(!(sumXX > 0. && sumYY > 0.)) { return (sumXX == sumYY) ? 1. : 0.; } return sumXY / FastMath.sqrt(sumXX * sumYY); }
[ "public", "static", "double", "weightedCoefficient", "(", "NumberVector", "x", ",", "NumberVector", "y", ",", "double", "[", "]", "weights", ")", "{", "final", "int", "xdim", "=", "x", ".", "getDimensionality", "(", ")", ";", "final", "int", "ydim", "=", ...
Compute the Pearson product-moment correlation coefficient for two NumberVectors. @param x first NumberVector @param y second NumberVector @param weights Weights @return the Pearson product-moment correlation coefficient for x and y
[ "Compute", "the", "Pearson", "product", "-", "moment", "correlation", "coefficient", "for", "two", "NumberVectors", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-core-math/src/main/java/de/lmu/ifi/dbs/elki/math/PearsonCorrelation.java#L429-L468
train
elki-project/elki
elki-core-util/src/main/java/de/lmu/ifi/dbs/elki/utilities/datastructures/arraylike/ExtendedArray.java
ExtendedArray.extend
@SuppressWarnings("unchecked") public static <T, A> ExtendedArray<T> extend(A array, ArrayAdapter<T, A> getter, T extra) { return new ExtendedArray<>(array, (ArrayAdapter<T, Object>) getter, extra); }
java
@SuppressWarnings("unchecked") public static <T, A> ExtendedArray<T> extend(A array, ArrayAdapter<T, A> getter, T extra) { return new ExtendedArray<>(array, (ArrayAdapter<T, Object>) getter, extra); }
[ "@", "SuppressWarnings", "(", "\"unchecked\"", ")", "public", "static", "<", "T", ",", "A", ">", "ExtendedArray", "<", "T", ">", "extend", "(", "A", "array", ",", "ArrayAdapter", "<", "T", ",", "A", ">", "getter", ",", "T", "extra", ")", "{", "return...
Static wrapper that has a nicer generics signature. @param array Array to extend @param getter Getter for array @param extra Extra element @return Extended array
[ "Static", "wrapper", "that", "has", "a", "nicer", "generics", "signature", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-core-util/src/main/java/de/lmu/ifi/dbs/elki/utilities/datastructures/arraylike/ExtendedArray.java#L90-L93
train
elki-project/elki
elki/src/main/java/de/lmu/ifi/dbs/elki/result/SelectionResult.java
SelectionResult.ensureSelectionResult
public static SelectionResult ensureSelectionResult(final Database db) { List<SelectionResult> selections = ResultUtil.filterResults(db.getHierarchy(), db, SelectionResult.class); if(!selections.isEmpty()) { return selections.get(0); } SelectionResult sel = new SelectionResult(); ResultUtil.addChildResult(db, sel); return sel; }
java
public static SelectionResult ensureSelectionResult(final Database db) { List<SelectionResult> selections = ResultUtil.filterResults(db.getHierarchy(), db, SelectionResult.class); if(!selections.isEmpty()) { return selections.get(0); } SelectionResult sel = new SelectionResult(); ResultUtil.addChildResult(db, sel); return sel; }
[ "public", "static", "SelectionResult", "ensureSelectionResult", "(", "final", "Database", "db", ")", "{", "List", "<", "SelectionResult", ">", "selections", "=", "ResultUtil", ".", "filterResults", "(", "db", ".", "getHierarchy", "(", ")", ",", "db", ",", "Sel...
Ensure that there also is a selection container object. @param db Database @return selection result
[ "Ensure", "that", "there", "also", "is", "a", "selection", "container", "object", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki/src/main/java/de/lmu/ifi/dbs/elki/result/SelectionResult.java#L81-L89
train
elki-project/elki
addons/3dpc/src/main/java/de/lmu/ifi/dbs/elki/visualization/parallel3d/util/Arcball1DOFAdapter.java
Arcball1DOFAdapter.debugRender
@SuppressWarnings("unused") public void debugRender(GL2 gl) { if (!DEBUG || (startcamera == null)) { return; } gl.glLineWidth(3f); gl.glColor4f(1.f, 0.f, 0.f, .66f); gl.glBegin(GL.GL_LINES); gl.glVertex3f(0.f, 0.f, 0.f); double rot = startangle - startcamera.getRotationZ(); gl.glVertex3f((float) FastMath.cos(rot) * 4.f, (float) -FastMath.sin(rot) * 4.f, 0.f); gl.glVertex3f((float) FastMath.cos(rot) * 1.f, (float) -FastMath.sin(rot) * 1.f, 0.f); gl.glVertex3f((float) FastMath.cos(rot) * 1.f, (float) -FastMath.sin(rot) * 1.f, 1.f); gl.glEnd(); }
java
@SuppressWarnings("unused") public void debugRender(GL2 gl) { if (!DEBUG || (startcamera == null)) { return; } gl.glLineWidth(3f); gl.glColor4f(1.f, 0.f, 0.f, .66f); gl.glBegin(GL.GL_LINES); gl.glVertex3f(0.f, 0.f, 0.f); double rot = startangle - startcamera.getRotationZ(); gl.glVertex3f((float) FastMath.cos(rot) * 4.f, (float) -FastMath.sin(rot) * 4.f, 0.f); gl.glVertex3f((float) FastMath.cos(rot) * 1.f, (float) -FastMath.sin(rot) * 1.f, 0.f); gl.glVertex3f((float) FastMath.cos(rot) * 1.f, (float) -FastMath.sin(rot) * 1.f, 1.f); gl.glEnd(); }
[ "@", "SuppressWarnings", "(", "\"unused\"", ")", "public", "void", "debugRender", "(", "GL2", "gl", ")", "{", "if", "(", "!", "DEBUG", "||", "(", "startcamera", "==", "null", ")", ")", "{", "return", ";", "}", "gl", ".", "glLineWidth", "(", "3f", ")"...
Render a debugging hint for the arcball tool. @param gl GL class for rendering-
[ "Render", "a", "debugging", "hint", "for", "the", "arcball", "tool", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/addons/3dpc/src/main/java/de/lmu/ifi/dbs/elki/visualization/parallel3d/util/Arcball1DOFAdapter.java#L150-L164
train
elki-project/elki
elki/src/main/java/de/lmu/ifi/dbs/elki/result/SettingsResult.java
SettingsResult.getSettingsResults
public static List<SettingsResult> getSettingsResults(Result r) { if(r instanceof SettingsResult) { List<SettingsResult> ors = new ArrayList<>(1); ors.add((SettingsResult) r); return ors; } if(r instanceof HierarchicalResult) { return ResultUtil.filterResults(((HierarchicalResult) r).getHierarchy(), r, SettingsResult.class); } return Collections.emptyList(); }
java
public static List<SettingsResult> getSettingsResults(Result r) { if(r instanceof SettingsResult) { List<SettingsResult> ors = new ArrayList<>(1); ors.add((SettingsResult) r); return ors; } if(r instanceof HierarchicalResult) { return ResultUtil.filterResults(((HierarchicalResult) r).getHierarchy(), r, SettingsResult.class); } return Collections.emptyList(); }
[ "public", "static", "List", "<", "SettingsResult", ">", "getSettingsResults", "(", "Result", "r", ")", "{", "if", "(", "r", "instanceof", "SettingsResult", ")", "{", "List", "<", "SettingsResult", ">", "ors", "=", "new", "ArrayList", "<>", "(", "1", ")", ...
Collect all settings results from a Result @param r Result @return List of settings results
[ "Collect", "all", "settings", "results", "from", "a", "Result" ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki/src/main/java/de/lmu/ifi/dbs/elki/result/SettingsResult.java#L68-L78
train
elki-project/elki
elki-core-api/src/main/java/de/lmu/ifi/dbs/elki/application/AbstractApplication.java
AbstractApplication.usage
public static String usage(Collection<TrackedParameter> options) { StringBuilder usage = new StringBuilder(10000); if(!REFERENCE_VERSION.equals(VERSION)) { usage.append("ELKI build: ").append(VERSION).append(NEWLINE).append(NEWLINE); } usage.append(REFERENCE); // Collect options OptionUtil.formatForConsole(usage.append(NEWLINE).append("Parameters:").append(NEWLINE), // FormatUtil.getConsoleWidth(), options); return usage.toString(); }
java
public static String usage(Collection<TrackedParameter> options) { StringBuilder usage = new StringBuilder(10000); if(!REFERENCE_VERSION.equals(VERSION)) { usage.append("ELKI build: ").append(VERSION).append(NEWLINE).append(NEWLINE); } usage.append(REFERENCE); // Collect options OptionUtil.formatForConsole(usage.append(NEWLINE).append("Parameters:").append(NEWLINE), // FormatUtil.getConsoleWidth(), options); return usage.toString(); }
[ "public", "static", "String", "usage", "(", "Collection", "<", "TrackedParameter", ">", "options", ")", "{", "StringBuilder", "usage", "=", "new", "StringBuilder", "(", "10000", ")", ";", "if", "(", "!", "REFERENCE_VERSION", ".", "equals", "(", "VERSION", ")...
Returns a usage message, explaining all known options @param options Options to show in usage. @return a usage message explaining all known options
[ "Returns", "a", "usage", "message", "explaining", "all", "known", "options" ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-core-api/src/main/java/de/lmu/ifi/dbs/elki/application/AbstractApplication.java#L195-L206
train
elki-project/elki
elki-core-api/src/main/java/de/lmu/ifi/dbs/elki/application/AbstractApplication.java
AbstractApplication.printErrorMessage
protected static void printErrorMessage(Exception e) { if(e instanceof AbortException) { // ensure we actually show the message: LoggingConfiguration.setVerbose(Level.VERBOSE); LOG.verbose(e.getMessage()); } else if(e instanceof UnspecifiedParameterException) { LOG.error(e.getMessage()); } else if(e instanceof ParameterException) { LOG.error(e.getMessage()); } else { LOG.exception(e); } }
java
protected static void printErrorMessage(Exception e) { if(e instanceof AbortException) { // ensure we actually show the message: LoggingConfiguration.setVerbose(Level.VERBOSE); LOG.verbose(e.getMessage()); } else if(e instanceof UnspecifiedParameterException) { LOG.error(e.getMessage()); } else if(e instanceof ParameterException) { LOG.error(e.getMessage()); } else { LOG.exception(e); } }
[ "protected", "static", "void", "printErrorMessage", "(", "Exception", "e", ")", "{", "if", "(", "e", "instanceof", "AbortException", ")", "{", "// ensure we actually show the message:", "LoggingConfiguration", ".", "setVerbose", "(", "Level", ".", "VERBOSE", ")", ";...
Print an error message for the given error. @param e Error Exception.
[ "Print", "an", "error", "message", "for", "the", "given", "error", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-core-api/src/main/java/de/lmu/ifi/dbs/elki/application/AbstractApplication.java#L213-L228
train
elki-project/elki
elki-core-api/src/main/java/de/lmu/ifi/dbs/elki/application/AbstractApplication.java
AbstractApplication.printDescription
private static void printDescription(Class<?> descriptionClass) { if(descriptionClass == null) { return; } try { LoggingConfiguration.setVerbose(Level.VERBOSE); LOG.verbose(OptionUtil.describeParameterizable(new StringBuilder(), descriptionClass, FormatUtil.getConsoleWidth(), "").toString()); } catch(Exception e) { LOG.exception("Error instantiating class to describe.", e.getCause()); } }
java
private static void printDescription(Class<?> descriptionClass) { if(descriptionClass == null) { return; } try { LoggingConfiguration.setVerbose(Level.VERBOSE); LOG.verbose(OptionUtil.describeParameterizable(new StringBuilder(), descriptionClass, FormatUtil.getConsoleWidth(), "").toString()); } catch(Exception e) { LOG.exception("Error instantiating class to describe.", e.getCause()); } }
[ "private", "static", "void", "printDescription", "(", "Class", "<", "?", ">", "descriptionClass", ")", "{", "if", "(", "descriptionClass", "==", "null", ")", "{", "return", ";", "}", "try", "{", "LoggingConfiguration", ".", "setVerbose", "(", "Level", ".", ...
Print the description for the given parameter
[ "Print", "the", "description", "for", "the", "given", "parameter" ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-core-api/src/main/java/de/lmu/ifi/dbs/elki/application/AbstractApplication.java#L233-L244
train
elki-project/elki
elki-core-math/src/main/java/de/lmu/ifi/dbs/elki/math/geometry/PrimsMinimumSpanningTree.java
PrimsMinimumSpanningTree.processDense
public static <T> void processDense(T data, Adapter<T> adapter, Collector collector) { // Number of nodes final int n = adapter.size(data); // Best distance for each node double[] best = new double[n]; Arrays.fill(best, Double.POSITIVE_INFINITY); // Best previous node int[] src = new int[n]; // Nodes already handled // byte[] uses more memory, but it will be faster. byte[] connected = new byte[n]; // We always start at "random" node 0 // Note: we use this below in the "j" loop! int current = 0; connected[current] = 1; best[current] = 0; // Search for(int i = n - 2; i >= 0; i--) { // Update best and src from current: int newbesti = -1; double newbestd = Double.POSITIVE_INFINITY; // Note: we assume we started with 0, and can thus skip it for(int j = 0; j < n; ++j) { if(connected[j] == 1) { continue; } final double dist = adapter.distance(data, current, j); if(dist < best[j]) { best[j] = dist; src[j] = current; } if(best[j] < newbestd || newbesti == -1) { newbestd = best[j]; newbesti = j; } } assert (newbesti >= 0); // Flag connected[newbesti] = 1; // Store edge collector.addEdge(newbestd, src[newbesti], newbesti); // Continue current = newbesti; } }
java
public static <T> void processDense(T data, Adapter<T> adapter, Collector collector) { // Number of nodes final int n = adapter.size(data); // Best distance for each node double[] best = new double[n]; Arrays.fill(best, Double.POSITIVE_INFINITY); // Best previous node int[] src = new int[n]; // Nodes already handled // byte[] uses more memory, but it will be faster. byte[] connected = new byte[n]; // We always start at "random" node 0 // Note: we use this below in the "j" loop! int current = 0; connected[current] = 1; best[current] = 0; // Search for(int i = n - 2; i >= 0; i--) { // Update best and src from current: int newbesti = -1; double newbestd = Double.POSITIVE_INFINITY; // Note: we assume we started with 0, and can thus skip it for(int j = 0; j < n; ++j) { if(connected[j] == 1) { continue; } final double dist = adapter.distance(data, current, j); if(dist < best[j]) { best[j] = dist; src[j] = current; } if(best[j] < newbestd || newbesti == -1) { newbestd = best[j]; newbesti = j; } } assert (newbesti >= 0); // Flag connected[newbesti] = 1; // Store edge collector.addEdge(newbestd, src[newbesti], newbesti); // Continue current = newbesti; } }
[ "public", "static", "<", "T", ">", "void", "processDense", "(", "T", "data", ",", "Adapter", "<", "T", ">", "adapter", ",", "Collector", "collector", ")", "{", "// Number of nodes", "final", "int", "n", "=", "adapter", ".", "size", "(", "data", ")", ";...
Run Prim's algorithm on a dense graph. @param data Data set @param adapter Adapter instance @param collector Edge collector
[ "Run", "Prim", "s", "algorithm", "on", "a", "dense", "graph", "." ]
b54673327e76198ecd4c8a2a901021f1a9174498
https://github.com/elki-project/elki/blob/b54673327e76198ecd4c8a2a901021f1a9174498/elki-core-math/src/main/java/de/lmu/ifi/dbs/elki/math/geometry/PrimsMinimumSpanningTree.java#L131-L177
train
SonarOpenCommunity/sonar-cxx
cxx-sensors/src/main/java/org/sonar/cxx/sensors/compiler/CxxCompilerSensor.java
CxxCompilerSensor.isInputValid
protected boolean isInputValid(String filename, String line, String id, String msg) { return !filename.isEmpty() || !line.isEmpty() || !id.isEmpty() || !msg.isEmpty(); }
java
protected boolean isInputValid(String filename, String line, String id, String msg) { return !filename.isEmpty() || !line.isEmpty() || !id.isEmpty() || !msg.isEmpty(); }
[ "protected", "boolean", "isInputValid", "(", "String", "filename", ",", "String", "line", ",", "String", "id", ",", "String", "msg", ")", "{", "return", "!", "filename", ".", "isEmpty", "(", ")", "||", "!", "line", ".", "isEmpty", "(", ")", "||", "!", ...
Derived classes can overload this method @param filename @param line @param id @param msg @return true, if valid
[ "Derived", "classes", "can", "overload", "this", "method" ]
7e7a3a44d6d86382a0434652a798f8235503c9b8
https://github.com/SonarOpenCommunity/sonar-cxx/blob/7e7a3a44d6d86382a0434652a798f8235503c9b8/cxx-sensors/src/main/java/org/sonar/cxx/sensors/compiler/CxxCompilerSensor.java#L116-L118
train
SonarOpenCommunity/sonar-cxx
cxx-squid/src/main/java/org/sonar/cxx/CxxVCppBuildLogParser.java
CxxVCppBuildLogParser.parseVCppLine
public void parseVCppLine(String line, String projectPath, String compilationFile) { this.parseVCppCompilerCLLine(line, projectPath, compilationFile); }
java
public void parseVCppLine(String line, String projectPath, String compilationFile) { this.parseVCppCompilerCLLine(line, projectPath, compilationFile); }
[ "public", "void", "parseVCppLine", "(", "String", "line", ",", "String", "projectPath", ",", "String", "compilationFile", ")", "{", "this", ".", "parseVCppCompilerCLLine", "(", "line", ",", "projectPath", ",", "compilationFile", ")", ";", "}" ]
Can be used to create a list of includes, defines and options for a single line If it follows the format of VC++ @param line @param projectPath @param compilationFile
[ "Can", "be", "used", "to", "create", "a", "list", "of", "includes", "defines", "and", "options", "for", "a", "single", "line", "If", "it", "follows", "the", "format", "of", "VC", "++" ]
7e7a3a44d6d86382a0434652a798f8235503c9b8
https://github.com/SonarOpenCommunity/sonar-cxx/blob/7e7a3a44d6d86382a0434652a798f8235503c9b8/cxx-squid/src/main/java/org/sonar/cxx/CxxVCppBuildLogParser.java#L111-L113
train
SonarOpenCommunity/sonar-cxx
cxx-sensors/src/main/java/org/sonar/cxx/sensors/valgrind/ValgrindReportParser.java
ValgrindReportParser.processReport
public Set<ValgrindError> processReport(File report) throws XMLStreamException { ValgrindReportStreamHandler streamHandler = new ValgrindReportStreamHandler(); new StaxParser(streamHandler).parse(report); return streamHandler.valgrindErrors; }
java
public Set<ValgrindError> processReport(File report) throws XMLStreamException { ValgrindReportStreamHandler streamHandler = new ValgrindReportStreamHandler(); new StaxParser(streamHandler).parse(report); return streamHandler.valgrindErrors; }
[ "public", "Set", "<", "ValgrindError", ">", "processReport", "(", "File", "report", ")", "throws", "XMLStreamException", "{", "ValgrindReportStreamHandler", "streamHandler", "=", "new", "ValgrindReportStreamHandler", "(", ")", ";", "new", "StaxParser", "(", "streamHan...
Parses given valgrind report @param report full path of XML report @return Set<ValgrindError> @exception XMLStreamException javax.xml.stream.XMLStreamException
[ "Parses", "given", "valgrind", "report" ]
7e7a3a44d6d86382a0434652a798f8235503c9b8
https://github.com/SonarOpenCommunity/sonar-cxx/blob/7e7a3a44d6d86382a0434652a798f8235503c9b8/cxx-sensors/src/main/java/org/sonar/cxx/sensors/valgrind/ValgrindReportParser.java#L46-L50
train
SonarOpenCommunity/sonar-cxx
cxx-checks/src/main/java/org/sonar/cxx/checks/metrics/TooManyStatementsPerLineCheck.java
TooManyStatementsPerLineCheck.isGeneratedNodeExcluded
private static boolean isGeneratedNodeExcluded(AstNode astNode) { AstNode prev = astNode.getPreviousAstNode(); return prev != null && prev.getTokenLine() == astNode.getTokenLine() && prev.isCopyBookOrGeneratedNode(); }
java
private static boolean isGeneratedNodeExcluded(AstNode astNode) { AstNode prev = astNode.getPreviousAstNode(); return prev != null && prev.getTokenLine() == astNode.getTokenLine() && prev.isCopyBookOrGeneratedNode(); }
[ "private", "static", "boolean", "isGeneratedNodeExcluded", "(", "AstNode", "astNode", ")", "{", "AstNode", "prev", "=", "astNode", ".", "getPreviousAstNode", "(", ")", ";", "return", "prev", "!=", "null", "&&", "prev", ".", "getTokenLine", "(", ")", "==", "a...
Exclude subsequent generated nodes, if they are consecutive and on the same line.
[ "Exclude", "subsequent", "generated", "nodes", "if", "they", "are", "consecutive", "and", "on", "the", "same", "line", "." ]
7e7a3a44d6d86382a0434652a798f8235503c9b8
https://github.com/SonarOpenCommunity/sonar-cxx/blob/7e7a3a44d6d86382a0434652a798f8235503c9b8/cxx-checks/src/main/java/org/sonar/cxx/checks/metrics/TooManyStatementsPerLineCheck.java#L60-L65
train
SonarOpenCommunity/sonar-cxx
cxx-checks/src/main/java/org/sonar/cxx/checks/metrics/TooManyStatementsPerLineCheck.java
TooManyStatementsPerLineCheck.isBreakStatementExcluded
private boolean isBreakStatementExcluded(AstNode astNode) { boolean exclude = false; if (excludeCaseBreak && astNode.getToken().getType().equals(CxxKeyword.BREAK)) { for (AstNode statement = astNode.getFirstAncestor(CxxGrammarImpl.statement); statement != null; statement = statement.getPreviousSibling()) { if (astNode.getTokenLine() != statement.getTokenLine()) { break; } TokenType type = statement.getToken().getType(); if (type.equals(CxxKeyword.CASE) || type.equals(CxxKeyword.DEFAULT)) { exclude = true; break; } } } return exclude; }
java
private boolean isBreakStatementExcluded(AstNode astNode) { boolean exclude = false; if (excludeCaseBreak && astNode.getToken().getType().equals(CxxKeyword.BREAK)) { for (AstNode statement = astNode.getFirstAncestor(CxxGrammarImpl.statement); statement != null; statement = statement.getPreviousSibling()) { if (astNode.getTokenLine() != statement.getTokenLine()) { break; } TokenType type = statement.getToken().getType(); if (type.equals(CxxKeyword.CASE) || type.equals(CxxKeyword.DEFAULT)) { exclude = true; break; } } } return exclude; }
[ "private", "boolean", "isBreakStatementExcluded", "(", "AstNode", "astNode", ")", "{", "boolean", "exclude", "=", "false", ";", "if", "(", "excludeCaseBreak", "&&", "astNode", ".", "getToken", "(", ")", ".", "getType", "(", ")", ".", "equals", "(", "CxxKeywo...
Exclude 'break' statement if it is on the same line as the switch label
[ "Exclude", "break", "statement", "if", "it", "is", "on", "the", "same", "line", "as", "the", "switch", "label" ]
7e7a3a44d6d86382a0434652a798f8235503c9b8
https://github.com/SonarOpenCommunity/sonar-cxx/blob/7e7a3a44d6d86382a0434652a798f8235503c9b8/cxx-checks/src/main/java/org/sonar/cxx/checks/metrics/TooManyStatementsPerLineCheck.java#L101-L118
train
SonarOpenCommunity/sonar-cxx
cxx-checks/src/main/java/org/sonar/cxx/checks/metrics/TooManyStatementsPerLineCheck.java
TooManyStatementsPerLineCheck.isEmptyExpressionStatement
private boolean isEmptyExpressionStatement(AstNode astNode) { if (astNode.is(CxxGrammarImpl.expressionStatement) && ";".equals(astNode.getToken().getValue())) { AstNode statement = astNode.getFirstAncestor(CxxGrammarImpl.selectionStatement); if (statement != null) { return astNode.getTokenLine() == statement.getTokenLine(); } return isGeneratedNodeExcluded(astNode); } return false; }
java
private boolean isEmptyExpressionStatement(AstNode astNode) { if (astNode.is(CxxGrammarImpl.expressionStatement) && ";".equals(astNode.getToken().getValue())) { AstNode statement = astNode.getFirstAncestor(CxxGrammarImpl.selectionStatement); if (statement != null) { return astNode.getTokenLine() == statement.getTokenLine(); } return isGeneratedNodeExcluded(astNode); } return false; }
[ "private", "boolean", "isEmptyExpressionStatement", "(", "AstNode", "astNode", ")", "{", "if", "(", "astNode", ".", "is", "(", "CxxGrammarImpl", ".", "expressionStatement", ")", "&&", "\";\"", ".", "equals", "(", "astNode", ".", "getToken", "(", ")", ".", "g...
Exclude empty expression statement
[ "Exclude", "empty", "expression", "statement" ]
7e7a3a44d6d86382a0434652a798f8235503c9b8
https://github.com/SonarOpenCommunity/sonar-cxx/blob/7e7a3a44d6d86382a0434652a798f8235503c9b8/cxx-checks/src/main/java/org/sonar/cxx/checks/metrics/TooManyStatementsPerLineCheck.java#L123-L133
train
SonarOpenCommunity/sonar-cxx
cxx-squid/src/main/java/org/sonar/cxx/preprocessor/CxxPreprocessor.java
CxxPreprocessor.parsePredefinedUnitMacros
private Map<String, Macro> parsePredefinedUnitMacros(Map<String, Macro> configuredMacros) { if (!ctorInProgress || (unitMacros != null)) { throw new IllegalStateException("Preconditions for initial fill-out of predefinedUnitMacros were violated"); } if (conf.getCompilationUnitSourceFiles().isEmpty() && (conf.getGlobalCompilationUnitSettings() == null)) { // configuration doesn't contain any settings for compilation units. // CxxPreprocessor will use fixedMacros only return Collections.emptyMap(); } unitMacros = new MapChain<>(); if (getMacros() != unitMacros) { throw new IllegalStateException("expected unitMacros as active macros map"); } try { getMacros().setHighPrio(true); getMacros().putAll(Macro.UNIT_MACROS); getMacros().putAll(configuredMacros); parseForcedIncludes(); final HashMap<String, Macro> result = new HashMap<>(unitMacros.getHighPrioMap()); return result; } finally { getMacros().setHighPrio(false); // just for the symmetry unitMacros = null; // remove unitMacros, switch getMacros() to fixedMacros } }
java
private Map<String, Macro> parsePredefinedUnitMacros(Map<String, Macro> configuredMacros) { if (!ctorInProgress || (unitMacros != null)) { throw new IllegalStateException("Preconditions for initial fill-out of predefinedUnitMacros were violated"); } if (conf.getCompilationUnitSourceFiles().isEmpty() && (conf.getGlobalCompilationUnitSettings() == null)) { // configuration doesn't contain any settings for compilation units. // CxxPreprocessor will use fixedMacros only return Collections.emptyMap(); } unitMacros = new MapChain<>(); if (getMacros() != unitMacros) { throw new IllegalStateException("expected unitMacros as active macros map"); } try { getMacros().setHighPrio(true); getMacros().putAll(Macro.UNIT_MACROS); getMacros().putAll(configuredMacros); parseForcedIncludes(); final HashMap<String, Macro> result = new HashMap<>(unitMacros.getHighPrioMap()); return result; } finally { getMacros().setHighPrio(false); // just for the symmetry unitMacros = null; // remove unitMacros, switch getMacros() to fixedMacros } }
[ "private", "Map", "<", "String", ",", "Macro", ">", "parsePredefinedUnitMacros", "(", "Map", "<", "String", ",", "Macro", ">", "configuredMacros", ")", "{", "if", "(", "!", "ctorInProgress", "||", "(", "unitMacros", "!=", "null", ")", ")", "{", "throw", ...
Create temporary unitMacros map; This map will be used as an active macros' storage while parsing of forced includes. After parsing was over extract resulting macros and destroy the unitMacros. fixedMacros will be set as active macros again.
[ "Create", "temporary", "unitMacros", "map", ";", "This", "map", "will", "be", "used", "as", "an", "active", "macros", "storage", "while", "parsing", "of", "forced", "includes", ".", "After", "parsing", "was", "over", "extract", "resulting", "macros", "and", ...
7e7a3a44d6d86382a0434652a798f8235503c9b8
https://github.com/SonarOpenCommunity/sonar-cxx/blob/7e7a3a44d6d86382a0434652a798f8235503c9b8/cxx-squid/src/main/java/org/sonar/cxx/preprocessor/CxxPreprocessor.java#L214-L241
train
SonarOpenCommunity/sonar-cxx
cxx-squid/src/main/java/org/sonar/cxx/CxxLanguage.java
CxxLanguage.getMetric
public <G extends Serializable> Metric<G> getMetric(CxxMetricsFactory.Key metricKey) { Metric<G> metric = (Metric<G>) this.langSpecificMetrics.get(metricKey); if (metric == null) { throw new IllegalStateException("Requested metric " + metricKey + " couldn't be found"); } return metric; }
java
public <G extends Serializable> Metric<G> getMetric(CxxMetricsFactory.Key metricKey) { Metric<G> metric = (Metric<G>) this.langSpecificMetrics.get(metricKey); if (metric == null) { throw new IllegalStateException("Requested metric " + metricKey + " couldn't be found"); } return metric; }
[ "public", "<", "G", "extends", "Serializable", ">", "Metric", "<", "G", ">", "getMetric", "(", "CxxMetricsFactory", ".", "Key", "metricKey", ")", "{", "Metric", "<", "G", ">", "metric", "=", "(", "Metric", "<", "G", ">", ")", "this", ".", "langSpecific...
Get language specific metric @throws IllegalStateException if metric was not registered
[ "Get", "language", "specific", "metric" ]
7e7a3a44d6d86382a0434652a798f8235503c9b8
https://github.com/SonarOpenCommunity/sonar-cxx/blob/7e7a3a44d6d86382a0434652a798f8235503c9b8/cxx-squid/src/main/java/org/sonar/cxx/CxxLanguage.java#L117-L123
train
SonarOpenCommunity/sonar-cxx
cxx-sensors/src/main/java/org/sonar/cxx/sensors/drmemory/DrMemoryParser.java
DrMemoryParser.getElements
public static List<String> getElements(File file, String charset) { List<String> list = new ArrayList<>(); try (BufferedReader br = new BufferedReader( new InputStreamReader(java.nio.file.Files.newInputStream(file.toPath()), charset))) { StringBuilder sb = new StringBuilder(4096); String line; int cnt = 0; final Pattern whitespacesOnly = Pattern.compile("^\\s*$"); while ((line = br.readLine()) != null) { if (cnt > (TOP_COUNT)) { if (whitespacesOnly.matcher(line).matches()) { list.add(sb.toString()); sb.setLength(0); } else { sb.append(line); sb.append('\n'); } } ++cnt; } if (sb.length() > 0) { list.add(sb.toString()); } } catch (IOException e) { String msg = new StringBuilder(512).append("Cannot feed the data into sonar, details: '") .append(e) .append("'").toString(); LOG.error(msg); } return list; }
java
public static List<String> getElements(File file, String charset) { List<String> list = new ArrayList<>(); try (BufferedReader br = new BufferedReader( new InputStreamReader(java.nio.file.Files.newInputStream(file.toPath()), charset))) { StringBuilder sb = new StringBuilder(4096); String line; int cnt = 0; final Pattern whitespacesOnly = Pattern.compile("^\\s*$"); while ((line = br.readLine()) != null) { if (cnt > (TOP_COUNT)) { if (whitespacesOnly.matcher(line).matches()) { list.add(sb.toString()); sb.setLength(0); } else { sb.append(line); sb.append('\n'); } } ++cnt; } if (sb.length() > 0) { list.add(sb.toString()); } } catch (IOException e) { String msg = new StringBuilder(512).append("Cannot feed the data into sonar, details: '") .append(e) .append("'").toString(); LOG.error(msg); } return list; }
[ "public", "static", "List", "<", "String", ">", "getElements", "(", "File", "file", ",", "String", "charset", ")", "{", "List", "<", "String", ">", "list", "=", "new", "ArrayList", "<>", "(", ")", ";", "try", "(", "BufferedReader", "br", "=", "new", ...
get all DrMemory elements from file @param file with findings @param charset file encoding character set @return list of elements from report file
[ "get", "all", "DrMemory", "elements", "from", "file" ]
7e7a3a44d6d86382a0434652a798f8235503c9b8
https://github.com/SonarOpenCommunity/sonar-cxx/blob/7e7a3a44d6d86382a0434652a798f8235503c9b8/cxx-sensors/src/main/java/org/sonar/cxx/sensors/drmemory/DrMemoryParser.java#L92-L125
train
SonarOpenCommunity/sonar-cxx
cxx-sensors/src/main/java/org/sonar/cxx/sensors/utils/CxxIssuesReportSensor.java
CxxIssuesReportSensor.saveUniqueViolation
public void saveUniqueViolation(SensorContext sensorContext, CxxReportIssue issue) { if (uniqueIssues.add(issue)) { saveViolation(sensorContext, issue); } }
java
public void saveUniqueViolation(SensorContext sensorContext, CxxReportIssue issue) { if (uniqueIssues.add(issue)) { saveViolation(sensorContext, issue); } }
[ "public", "void", "saveUniqueViolation", "(", "SensorContext", "sensorContext", ",", "CxxReportIssue", "issue", ")", "{", "if", "(", "uniqueIssues", ".", "add", "(", "issue", ")", ")", "{", "saveViolation", "(", "sensorContext", ",", "issue", ")", ";", "}", ...
Saves code violation only if it wasn't already saved @param sensorContext @param issue
[ "Saves", "code", "violation", "only", "if", "it", "wasn", "t", "already", "saved" ]
7e7a3a44d6d86382a0434652a798f8235503c9b8
https://github.com/SonarOpenCommunity/sonar-cxx/blob/7e7a3a44d6d86382a0434652a798f8235503c9b8/cxx-sensors/src/main/java/org/sonar/cxx/sensors/utils/CxxIssuesReportSensor.java#L130-L134
train
SonarOpenCommunity/sonar-cxx
cxx-squid/src/main/java/org/sonar/cxx/CxxAstScanner.java
CxxAstScanner.scanSingleFile
@SafeVarargs public static SourceFile scanSingleFile(InputFile file, SensorContext sensorContext, CxxLanguage language, SquidAstVisitor<Grammar>... visitors) { return scanSingleFileConfig(language, file, new CxxConfiguration(sensorContext.fileSystem().encoding()), visitors); }
java
@SafeVarargs public static SourceFile scanSingleFile(InputFile file, SensorContext sensorContext, CxxLanguage language, SquidAstVisitor<Grammar>... visitors) { return scanSingleFileConfig(language, file, new CxxConfiguration(sensorContext.fileSystem().encoding()), visitors); }
[ "@", "SafeVarargs", "public", "static", "SourceFile", "scanSingleFile", "(", "InputFile", "file", ",", "SensorContext", "sensorContext", ",", "CxxLanguage", "language", ",", "SquidAstVisitor", "<", "Grammar", ">", "...", "visitors", ")", "{", "return", "scanSingleFi...
Helper method for testing checks without having to deploy them on a Sonar instance. @param file is the file to be checked @param sensorContext SQ API batch side context @param visitors AST checks and visitors to use @param language CxxLanguage to use @return file checked with measures and issues
[ "Helper", "method", "for", "testing", "checks", "without", "having", "to", "deploy", "them", "on", "a", "Sonar", "instance", "." ]
7e7a3a44d6d86382a0434652a798f8235503c9b8
https://github.com/SonarOpenCommunity/sonar-cxx/blob/7e7a3a44d6d86382a0434652a798f8235503c9b8/cxx-squid/src/main/java/org/sonar/cxx/CxxAstScanner.java#L74-L79
train
SonarOpenCommunity/sonar-cxx
cxx-squid/src/main/java/org/sonar/cxx/CxxAstScanner.java
CxxAstScanner.scanSingleFileConfig
public static SourceFile scanSingleFileConfig(CxxLanguage language, InputFile file, CxxConfiguration cxxConfig, SquidAstVisitor<Grammar>... visitors) { if (!file.isFile()) { throw new IllegalArgumentException("File '" + file + "' not found."); } AstScanner<Grammar> scanner = create(language, cxxConfig, visitors); scanner.scanFile(file.file()); Collection<SourceCode> sources = scanner.getIndex().search(new QueryByType(SourceFile.class)); if (sources.size() != 1) { throw new IllegalStateException("Only one SourceFile was expected whereas " + sources.size() + " has been returned."); } return (SourceFile) sources.iterator().next(); }
java
public static SourceFile scanSingleFileConfig(CxxLanguage language, InputFile file, CxxConfiguration cxxConfig, SquidAstVisitor<Grammar>... visitors) { if (!file.isFile()) { throw new IllegalArgumentException("File '" + file + "' not found."); } AstScanner<Grammar> scanner = create(language, cxxConfig, visitors); scanner.scanFile(file.file()); Collection<SourceCode> sources = scanner.getIndex().search(new QueryByType(SourceFile.class)); if (sources.size() != 1) { throw new IllegalStateException("Only one SourceFile was expected whereas " + sources.size() + " has been returned."); } return (SourceFile) sources.iterator().next(); }
[ "public", "static", "SourceFile", "scanSingleFileConfig", "(", "CxxLanguage", "language", ",", "InputFile", "file", ",", "CxxConfiguration", "cxxConfig", ",", "SquidAstVisitor", "<", "Grammar", ">", "...", "visitors", ")", "{", "if", "(", "!", "file", ".", "isFi...
Helper method for scanning a single file @param file is the file to be checked @param cxxConfig the plugin configuration @param visitors AST checks and visitors to use @param language for sensor @return file checked with measures and issues
[ "Helper", "method", "for", "scanning", "a", "single", "file" ]
7e7a3a44d6d86382a0434652a798f8235503c9b8
https://github.com/SonarOpenCommunity/sonar-cxx/blob/7e7a3a44d6d86382a0434652a798f8235503c9b8/cxx-squid/src/main/java/org/sonar/cxx/CxxAstScanner.java#L90-L103
train
SonarOpenCommunity/sonar-cxx
cxx-sensors/src/main/java/org/sonar/cxx/sensors/coverage/CoberturaParser.java
CoberturaParser.join
public static String join(Path path1, Path path2) { if (path2.toString().isEmpty()) { return ""; } if (!path1.isAbsolute()) { path1 = Paths.get(".", path1.toString()); } if (!path2.isAbsolute()) { path2 = Paths.get(".", path2.toString()); } Path result = path1.resolve(path2).normalize(); if (!result.isAbsolute()) { result = Paths.get(".", result.toString()); } return result.toString(); }
java
public static String join(Path path1, Path path2) { if (path2.toString().isEmpty()) { return ""; } if (!path1.isAbsolute()) { path1 = Paths.get(".", path1.toString()); } if (!path2.isAbsolute()) { path2 = Paths.get(".", path2.toString()); } Path result = path1.resolve(path2).normalize(); if (!result.isAbsolute()) { result = Paths.get(".", result.toString()); } return result.toString(); }
[ "public", "static", "String", "join", "(", "Path", "path1", ",", "Path", "path2", ")", "{", "if", "(", "path2", ".", "toString", "(", ")", ".", "isEmpty", "(", ")", ")", "{", "return", "\"\"", ";", "}", "if", "(", "!", "path1", ".", "isAbsolute", ...
Join two paths path1 | path2 | result ---------|----------|------- empty | empty | empty empty | absolute | absolute path2 empty | relative | relative path2 absolute | empty | empty relative | empty | empty absolute | absolute | absolute path2 absolute | relative | absolute path1 + relative path2 relative | absolute | absolute path2 relative | relative | relative path1 + relative path2 @param path1 first path @param path2 second path to be joined to first path @return joined path as string
[ "Join", "two", "paths" ]
7e7a3a44d6d86382a0434652a798f8235503c9b8
https://github.com/SonarOpenCommunity/sonar-cxx/blob/7e7a3a44d6d86382a0434652a798f8235503c9b8/cxx-sensors/src/main/java/org/sonar/cxx/sensors/coverage/CoberturaParser.java#L138-L155
train
SonarOpenCommunity/sonar-cxx
cxx-sensors/src/main/java/org/sonar/cxx/sensors/utils/JsonCompilationDatabase.java
JsonCompilationDatabase.parse
public static void parse(CxxConfiguration config, File compileCommandsFile) throws IOException { LOG.debug("Parsing 'JSON Compilation Database' format"); ObjectMapper mapper = new ObjectMapper(); mapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES); mapper.enable(DeserializationFeature.USE_JAVA_ARRAY_FOR_JSON_ARRAY); JsonCompilationDatabaseCommandObject[] commandObjects = mapper.readValue(compileCommandsFile, JsonCompilationDatabaseCommandObject[].class); for (JsonCompilationDatabaseCommandObject commandObject : commandObjects) { Path cwd = Paths.get("."); if (commandObject.getDirectory() != null) { cwd = Paths.get(commandObject.getDirectory()); } Path absPath = cwd.resolve(commandObject.getFile()); if ("__global__".equals(commandObject.getFile())) { CxxCompilationUnitSettings globalSettings = new CxxCompilationUnitSettings(); parseCommandObject(globalSettings, commandObject); config.setGlobalCompilationUnitSettings(globalSettings); } else { CxxCompilationUnitSettings settings = new CxxCompilationUnitSettings(); parseCommandObject(settings, commandObject); config.addCompilationUnitSettings(absPath.toAbsolutePath().normalize().toString(), settings); } } }
java
public static void parse(CxxConfiguration config, File compileCommandsFile) throws IOException { LOG.debug("Parsing 'JSON Compilation Database' format"); ObjectMapper mapper = new ObjectMapper(); mapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES); mapper.enable(DeserializationFeature.USE_JAVA_ARRAY_FOR_JSON_ARRAY); JsonCompilationDatabaseCommandObject[] commandObjects = mapper.readValue(compileCommandsFile, JsonCompilationDatabaseCommandObject[].class); for (JsonCompilationDatabaseCommandObject commandObject : commandObjects) { Path cwd = Paths.get("."); if (commandObject.getDirectory() != null) { cwd = Paths.get(commandObject.getDirectory()); } Path absPath = cwd.resolve(commandObject.getFile()); if ("__global__".equals(commandObject.getFile())) { CxxCompilationUnitSettings globalSettings = new CxxCompilationUnitSettings(); parseCommandObject(globalSettings, commandObject); config.setGlobalCompilationUnitSettings(globalSettings); } else { CxxCompilationUnitSettings settings = new CxxCompilationUnitSettings(); parseCommandObject(settings, commandObject); config.addCompilationUnitSettings(absPath.toAbsolutePath().normalize().toString(), settings); } } }
[ "public", "static", "void", "parse", "(", "CxxConfiguration", "config", ",", "File", "compileCommandsFile", ")", "throws", "IOException", "{", "LOG", ".", "debug", "(", "\"Parsing 'JSON Compilation Database' format\"", ")", ";", "ObjectMapper", "mapper", "=", "new", ...
Set up the given CxxConfiguration from the JSON compilation database @param config @param compileCommandsFile @throws IOException
[ "Set", "up", "the", "given", "CxxConfiguration", "from", "the", "JSON", "compilation", "database" ]
7e7a3a44d6d86382a0434652a798f8235503c9b8
https://github.com/SonarOpenCommunity/sonar-cxx/blob/7e7a3a44d6d86382a0434652a798f8235503c9b8/cxx-sensors/src/main/java/org/sonar/cxx/sensors/utils/JsonCompilationDatabase.java#L54-L89
train
SonarOpenCommunity/sonar-cxx
cxx-squid/src/main/java/org/sonar/cxx/visitors/AbstractCxxPublicApiVisitor.java
AbstractCxxPublicApiVisitor.getOperatorId
private static String getOperatorId(AstNode operatorFunctionId) { StringBuilder builder = new StringBuilder( operatorFunctionId.getTokenValue()); AstNode operator = operatorFunctionId .getFirstDescendant(CxxGrammarImpl.overloadableOperator); if (operator != null) { AstNode opNode = operator.getFirstChild(); while (opNode != null) { builder.append(opNode.getTokenValue()); opNode = opNode.getNextSibling(); } } return builder.toString(); }
java
private static String getOperatorId(AstNode operatorFunctionId) { StringBuilder builder = new StringBuilder( operatorFunctionId.getTokenValue()); AstNode operator = operatorFunctionId .getFirstDescendant(CxxGrammarImpl.overloadableOperator); if (operator != null) { AstNode opNode = operator.getFirstChild(); while (opNode != null) { builder.append(opNode.getTokenValue()); opNode = opNode.getNextSibling(); } } return builder.toString(); }
[ "private", "static", "String", "getOperatorId", "(", "AstNode", "operatorFunctionId", ")", "{", "StringBuilder", "builder", "=", "new", "StringBuilder", "(", "operatorFunctionId", ".", "getTokenValue", "(", ")", ")", ";", "AstNode", "operator", "=", "operatorFunctio...
XXX may go to a utility class
[ "XXX", "may", "go", "to", "a", "utility", "class" ]
7e7a3a44d6d86382a0434652a798f8235503c9b8
https://github.com/SonarOpenCommunity/sonar-cxx/blob/7e7a3a44d6d86382a0434652a798f8235503c9b8/cxx-squid/src/main/java/org/sonar/cxx/visitors/AbstractCxxPublicApiVisitor.java#L210-L227
train
SonarOpenCommunity/sonar-cxx
cxx-squid/src/main/java/org/sonar/cxx/visitors/AbstractCxxPublicApiVisitor.java
AbstractCxxPublicApiVisitor.getInlineDocumentation
private static List<Token> getInlineDocumentation(Token token, int line) { List<Token> comments = new ArrayList<>(); for (Trivia trivia : token.getTrivia()) { if (trivia.isComment()) { Token triviaToken = trivia.getToken(); if ((triviaToken != null) && (triviaToken.getLine() == line) && (isDoxygenInlineComment(triviaToken.getValue()))) { comments.add(triviaToken); if (LOG.isTraceEnabled()) { LOG.trace("Inline doc: " + triviaToken.getValue()); } } } } return comments; }
java
private static List<Token> getInlineDocumentation(Token token, int line) { List<Token> comments = new ArrayList<>(); for (Trivia trivia : token.getTrivia()) { if (trivia.isComment()) { Token triviaToken = trivia.getToken(); if ((triviaToken != null) && (triviaToken.getLine() == line) && (isDoxygenInlineComment(triviaToken.getValue()))) { comments.add(triviaToken); if (LOG.isTraceEnabled()) { LOG.trace("Inline doc: " + triviaToken.getValue()); } } } } return comments; }
[ "private", "static", "List", "<", "Token", ">", "getInlineDocumentation", "(", "Token", "token", ",", "int", "line", ")", "{", "List", "<", "Token", ">", "comments", "=", "new", "ArrayList", "<>", "(", ")", ";", "for", "(", "Trivia", "trivia", ":", "to...
Check if inline Doxygen documentation is attached to the given token at specified line @param token the token to inspect @param line line of the inlined documentation @return true if documentation is found for specified line, false otherwise
[ "Check", "if", "inline", "Doxygen", "documentation", "is", "attached", "to", "the", "given", "token", "at", "specified", "line" ]
7e7a3a44d6d86382a0434652a798f8235503c9b8
https://github.com/SonarOpenCommunity/sonar-cxx/blob/7e7a3a44d6d86382a0434652a798f8235503c9b8/cxx-squid/src/main/java/org/sonar/cxx/visitors/AbstractCxxPublicApiVisitor.java#L319-L336
train
SonarOpenCommunity/sonar-cxx
cxx-sensors/src/main/java/org/sonar/cxx/sensors/utils/CxxReportSensor.java
CxxReportSensor.getContextStringProperty
public static String getContextStringProperty(SensorContext context, String name, String def) { String s = context.config().get(name).orElse(null); if (s == null || s.isEmpty()) { return def; } return s; }
java
public static String getContextStringProperty(SensorContext context, String name, String def) { String s = context.config().get(name).orElse(null); if (s == null || s.isEmpty()) { return def; } return s; }
[ "public", "static", "String", "getContextStringProperty", "(", "SensorContext", "context", ",", "String", "name", ",", "String", "def", ")", "{", "String", "s", "=", "context", ".", "config", "(", ")", ".", "get", "(", "name", ")", ".", "orElse", "(", "n...
Get string property from configuration. If the string is not set or empty, return the default value. @param context sensor context @param name Name of the property @param def Default value @return Value of the property if set and not empty, else default value.
[ "Get", "string", "property", "from", "configuration", ".", "If", "the", "string", "is", "not", "set", "or", "empty", "return", "the", "default", "value", "." ]
7e7a3a44d6d86382a0434652a798f8235503c9b8
https://github.com/SonarOpenCommunity/sonar-cxx/blob/7e7a3a44d6d86382a0434652a798f8235503c9b8/cxx-sensors/src/main/java/org/sonar/cxx/sensors/utils/CxxReportSensor.java#L71-L77
train
SonarOpenCommunity/sonar-cxx
cxx-sensors/src/main/java/org/sonar/cxx/sensors/utils/CxxReportSensor.java
CxxReportSensor.resolveFilename
@Nullable public static String resolveFilename(final String baseDir, @Nullable final String filename) { if (filename != null) { // Normalization can return null if path is null, is invalid, // or is a path with back-ticks outside known directory structure String normalizedPath = FilenameUtils.normalize(filename); if ((normalizedPath != null) && (new File(normalizedPath).isAbsolute())) { return normalizedPath; } // Prefix with absolute module base directory, attempt normalization again -- can still get null here normalizedPath = FilenameUtils.normalize(baseDir + File.separator + filename); if (normalizedPath != null) { return normalizedPath; } } return null; }
java
@Nullable public static String resolveFilename(final String baseDir, @Nullable final String filename) { if (filename != null) { // Normalization can return null if path is null, is invalid, // or is a path with back-ticks outside known directory structure String normalizedPath = FilenameUtils.normalize(filename); if ((normalizedPath != null) && (new File(normalizedPath).isAbsolute())) { return normalizedPath; } // Prefix with absolute module base directory, attempt normalization again -- can still get null here normalizedPath = FilenameUtils.normalize(baseDir + File.separator + filename); if (normalizedPath != null) { return normalizedPath; } } return null; }
[ "@", "Nullable", "public", "static", "String", "resolveFilename", "(", "final", "String", "baseDir", ",", "@", "Nullable", "final", "String", "filename", ")", "{", "if", "(", "filename", "!=", "null", ")", "{", "// Normalization can return null if path is null, is i...
resolveFilename normalizes the report full path @param baseDir of the project @param filename of the report @return String
[ "resolveFilename", "normalizes", "the", "report", "full", "path" ]
7e7a3a44d6d86382a0434652a798f8235503c9b8
https://github.com/SonarOpenCommunity/sonar-cxx/blob/7e7a3a44d6d86382a0434652a798f8235503c9b8/cxx-sensors/src/main/java/org/sonar/cxx/sensors/utils/CxxReportSensor.java#L86-L104
train
SonarOpenCommunity/sonar-cxx
cxx-squid/src/main/java/org/sonar/cxx/visitors/MultiLocatitionSquidCheck.java
MultiLocatitionSquidCheck.createMultiLocationViolation
protected void createMultiLocationViolation(CxxReportIssue message) { SourceFile sourceFile = getSourceFile(); Set<CxxReportIssue> messages = getMultiLocationCheckMessages(sourceFile); if (messages == null) { messages = new HashSet<>(); } messages.add(message); setMultiLocationViolation(sourceFile, messages); }
java
protected void createMultiLocationViolation(CxxReportIssue message) { SourceFile sourceFile = getSourceFile(); Set<CxxReportIssue> messages = getMultiLocationCheckMessages(sourceFile); if (messages == null) { messages = new HashSet<>(); } messages.add(message); setMultiLocationViolation(sourceFile, messages); }
[ "protected", "void", "createMultiLocationViolation", "(", "CxxReportIssue", "message", ")", "{", "SourceFile", "sourceFile", "=", "getSourceFile", "(", ")", ";", "Set", "<", "CxxReportIssue", ">", "messages", "=", "getMultiLocationCheckMessages", "(", "sourceFile", ")...
Add the given message to the current SourceFile object @see SquidAstVisitorContext<G extends Grammar>.createLineViolation() for simple violations
[ "Add", "the", "given", "message", "to", "the", "current", "SourceFile", "object" ]
7e7a3a44d6d86382a0434652a798f8235503c9b8
https://github.com/SonarOpenCommunity/sonar-cxx/blob/7e7a3a44d6d86382a0434652a798f8235503c9b8/cxx-squid/src/main/java/org/sonar/cxx/visitors/MultiLocatitionSquidCheck.java#L98-L106
train
gmessner/gitlab4j-api
src/main/java/org/gitlab4j/api/EpicsApi.java
EpicsApi.getEpics
public List<Epic> getEpics(Object groupIdOrPath, Integer authorId, String labels, EpicOrderBy orderBy, SortOrder sortOrder, String search, int page, int perPage) throws GitLabApiException { GitLabApiForm formData = new GitLabApiForm(page, perPage) .withParam("author_id", authorId) .withParam("labels", labels) .withParam("order_by", orderBy) .withParam("sort", sortOrder) .withParam("search", search); Response response = get(Response.Status.OK, formData.asMap(), "groups", getGroupIdOrPath(groupIdOrPath), "epics"); return (response.readEntity(new GenericType<List<Epic>>() { })); }
java
public List<Epic> getEpics(Object groupIdOrPath, Integer authorId, String labels, EpicOrderBy orderBy, SortOrder sortOrder, String search, int page, int perPage) throws GitLabApiException { GitLabApiForm formData = new GitLabApiForm(page, perPage) .withParam("author_id", authorId) .withParam("labels", labels) .withParam("order_by", orderBy) .withParam("sort", sortOrder) .withParam("search", search); Response response = get(Response.Status.OK, formData.asMap(), "groups", getGroupIdOrPath(groupIdOrPath), "epics"); return (response.readEntity(new GenericType<List<Epic>>() { })); }
[ "public", "List", "<", "Epic", ">", "getEpics", "(", "Object", "groupIdOrPath", ",", "Integer", "authorId", ",", "String", "labels", ",", "EpicOrderBy", "orderBy", ",", "SortOrder", "sortOrder", ",", "String", "search", ",", "int", "page", ",", "int", "perPa...
Gets all epics of the requested group and its subgroups using the specified page and per page setting. <pre><code>GitLab Endpoint: GET /groups/:id/epics</code></pre> @param groupIdOrPath the group ID, path of the group, or a Group instance holding the group ID or path @param authorId returns epics created by the given user id @param labels return epics matching a comma separated list of labels names Label names from the epic group or a parent group can be used @param orderBy return epics ordered by CREATED_AT or UPDATED_AT. Default is CREATED_AT @param sortOrder return epics sorted in ASC or DESC order. Default is DESC @param search search epics against their title and description @param page the page to get @param perPage the number of issues per page @return a list of matching epics of the requested group and its subgroups in the specified range @throws GitLabApiException if any exception occurs
[ "Gets", "all", "epics", "of", "the", "requested", "group", "and", "its", "subgroups", "using", "the", "specified", "page", "and", "per", "page", "setting", "." ]
ab045070abac0a8f4ccbf17b5ed9bfdef5723eed
https://github.com/gmessner/gitlab4j-api/blob/ab045070abac0a8f4ccbf17b5ed9bfdef5723eed/src/main/java/org/gitlab4j/api/EpicsApi.java#L124-L134
train
gmessner/gitlab4j-api
src/main/java/org/gitlab4j/api/EpicsApi.java
EpicsApi.getEpic
public Epic getEpic(Object groupIdOrPath, Integer epicIid) throws GitLabApiException { Response response = get(Response.Status.OK, null, "groups", getGroupIdOrPath(groupIdOrPath), "epics", epicIid); return (response.readEntity(Epic.class)); }
java
public Epic getEpic(Object groupIdOrPath, Integer epicIid) throws GitLabApiException { Response response = get(Response.Status.OK, null, "groups", getGroupIdOrPath(groupIdOrPath), "epics", epicIid); return (response.readEntity(Epic.class)); }
[ "public", "Epic", "getEpic", "(", "Object", "groupIdOrPath", ",", "Integer", "epicIid", ")", "throws", "GitLabApiException", "{", "Response", "response", "=", "get", "(", "Response", ".", "Status", ".", "OK", ",", "null", ",", "\"groups\"", ",", "getGroupIdOrP...
Get a single epic for the specified group. <pre><code>GitLab Endpoint: GET /groups/:id/epics/:epic_iid</code></pre> @param groupIdOrPath the group ID, path of the group, or a Group instance holding the group ID or path @param epicIid the IID of the epic to get @return an Epic instance for the specified Epic @throws GitLabApiException if any exception occurs
[ "Get", "a", "single", "epic", "for", "the", "specified", "group", "." ]
ab045070abac0a8f4ccbf17b5ed9bfdef5723eed
https://github.com/gmessner/gitlab4j-api/blob/ab045070abac0a8f4ccbf17b5ed9bfdef5723eed/src/main/java/org/gitlab4j/api/EpicsApi.java#L193-L196
train
gmessner/gitlab4j-api
src/main/java/org/gitlab4j/api/EpicsApi.java
EpicsApi.getOptionalEpic
public Optional<Epic> getOptionalEpic(Object groupIdOrPath, Integer epicIid) { try { return (Optional.ofNullable(getEpic(groupIdOrPath, epicIid))); } catch (GitLabApiException glae) { return (GitLabApi.createOptionalFromException(glae)); } }
java
public Optional<Epic> getOptionalEpic(Object groupIdOrPath, Integer epicIid) { try { return (Optional.ofNullable(getEpic(groupIdOrPath, epicIid))); } catch (GitLabApiException glae) { return (GitLabApi.createOptionalFromException(glae)); } }
[ "public", "Optional", "<", "Epic", ">", "getOptionalEpic", "(", "Object", "groupIdOrPath", ",", "Integer", "epicIid", ")", "{", "try", "{", "return", "(", "Optional", ".", "ofNullable", "(", "getEpic", "(", "groupIdOrPath", ",", "epicIid", ")", ")", ")", "...
Get an Optional instance with the value for the specific Epic. <pre><code>GitLab Endpoint: GET /groups/:id/epics/:epic_iid</code></pre> @param groupIdOrPath the group ID, path of the group, or a Group instance holding the group ID or path @param epicIid the IID of the epic to get @return an Optional instance with the specified Epic as a value
[ "Get", "an", "Optional", "instance", "with", "the", "value", "for", "the", "specific", "Epic", "." ]
ab045070abac0a8f4ccbf17b5ed9bfdef5723eed
https://github.com/gmessner/gitlab4j-api/blob/ab045070abac0a8f4ccbf17b5ed9bfdef5723eed/src/main/java/org/gitlab4j/api/EpicsApi.java#L207-L213
train
gmessner/gitlab4j-api
src/main/java/org/gitlab4j/api/EpicsApi.java
EpicsApi.createEpic
public Epic createEpic(Object groupIdOrPath, String title, String labels, String description, Date startDate, Date endDate) throws GitLabApiException { Form formData = new GitLabApiForm() .withParam("title", title, true) .withParam("labels", labels) .withParam("description", description) .withParam("start_date", startDate) .withParam("end_date", endDate); Response response = post(Response.Status.CREATED, formData.asMap(), "groups", getGroupIdOrPath(groupIdOrPath), "epics"); return (response.readEntity(Epic.class)); }
java
public Epic createEpic(Object groupIdOrPath, String title, String labels, String description, Date startDate, Date endDate) throws GitLabApiException { Form formData = new GitLabApiForm() .withParam("title", title, true) .withParam("labels", labels) .withParam("description", description) .withParam("start_date", startDate) .withParam("end_date", endDate); Response response = post(Response.Status.CREATED, formData.asMap(), "groups", getGroupIdOrPath(groupIdOrPath), "epics"); return (response.readEntity(Epic.class)); }
[ "public", "Epic", "createEpic", "(", "Object", "groupIdOrPath", ",", "String", "title", ",", "String", "labels", ",", "String", "description", ",", "Date", "startDate", ",", "Date", "endDate", ")", "throws", "GitLabApiException", "{", "Form", "formData", "=", ...
Creates a new epic. <pre><code>GitLab Endpoint: POST /groups/:id/epics</code></pre> @param groupIdOrPath the group ID, path of the group, or a Group instance holding the group ID or path @param title the title of the epic (required) @param labels comma separated list of labels (optional) @param description the description of the epic (optional) @param startDate the start date of the epic (optional) @param endDate the end date of the epic (optional) @return an Epic instance containing info on the newly created epic @throws GitLabApiException if any exception occurs
[ "Creates", "a", "new", "epic", "." ]
ab045070abac0a8f4ccbf17b5ed9bfdef5723eed
https://github.com/gmessner/gitlab4j-api/blob/ab045070abac0a8f4ccbf17b5ed9bfdef5723eed/src/main/java/org/gitlab4j/api/EpicsApi.java#L229-L240
train
gmessner/gitlab4j-api
src/main/java/org/gitlab4j/api/EpicsApi.java
EpicsApi.updateEpic
public Epic updateEpic(Object groupIdOrPath, Integer epicIid, String title, String labels, String description, Date startDate, Date endDate) throws GitLabApiException { Form formData = new GitLabApiForm() .withParam("title", title, true) .withParam("labels", labels) .withParam("description", description) .withParam("start_date", startDate) .withParam("end_date", endDate); Response response = put(Response.Status.OK, formData.asMap(), "groups", getGroupIdOrPath(groupIdOrPath), "epics", epicIid); return (response.readEntity(Epic.class)); }
java
public Epic updateEpic(Object groupIdOrPath, Integer epicIid, String title, String labels, String description, Date startDate, Date endDate) throws GitLabApiException { Form formData = new GitLabApiForm() .withParam("title", title, true) .withParam("labels", labels) .withParam("description", description) .withParam("start_date", startDate) .withParam("end_date", endDate); Response response = put(Response.Status.OK, formData.asMap(), "groups", getGroupIdOrPath(groupIdOrPath), "epics", epicIid); return (response.readEntity(Epic.class)); }
[ "public", "Epic", "updateEpic", "(", "Object", "groupIdOrPath", ",", "Integer", "epicIid", ",", "String", "title", ",", "String", "labels", ",", "String", "description", ",", "Date", "startDate", ",", "Date", "endDate", ")", "throws", "GitLabApiException", "{", ...
Updates an existing epic. <pre><code>GitLab Endpoint: PUT /groups/:id/epics/:epic_iid</code></pre> @param groupIdOrPath the group ID, path of the group, or a Group instance holding the group ID or path @param epicIid the IID of the epic to update @param title the title of the epic (optional) @param labels comma separated list of labels (optional) @param description the description of the epic (optional) @param startDate the start date of the epic (optional) @param endDate the end date of the epic (optional) @return an Epic instance containing info on the newly created epic @throws GitLabApiException if any exception occurs
[ "Updates", "an", "existing", "epic", "." ]
ab045070abac0a8f4ccbf17b5ed9bfdef5723eed
https://github.com/gmessner/gitlab4j-api/blob/ab045070abac0a8f4ccbf17b5ed9bfdef5723eed/src/main/java/org/gitlab4j/api/EpicsApi.java#L287-L298
train
gmessner/gitlab4j-api
src/main/java/org/gitlab4j/api/EpicsApi.java
EpicsApi.deleteEpic
public void deleteEpic(Object groupIdOrPath, Integer epicIid) throws GitLabApiException { delete(Response.Status.NO_CONTENT, null, "groups", getGroupIdOrPath(groupIdOrPath), "epics", epicIid); }
java
public void deleteEpic(Object groupIdOrPath, Integer epicIid) throws GitLabApiException { delete(Response.Status.NO_CONTENT, null, "groups", getGroupIdOrPath(groupIdOrPath), "epics", epicIid); }
[ "public", "void", "deleteEpic", "(", "Object", "groupIdOrPath", ",", "Integer", "epicIid", ")", "throws", "GitLabApiException", "{", "delete", "(", "Response", ".", "Status", ".", "NO_CONTENT", ",", "null", ",", "\"groups\"", ",", "getGroupIdOrPath", "(", "group...
Deletes an epic. <pre><code>GitLab Endpoint: DELETE /groups/:id/epics/:epic_iid</code></pre> @param groupIdOrPath the group ID, path of the group, or a Group instance holding the group ID or path @param epicIid the IID of the epic to delete @throws GitLabApiException if any exception occurs
[ "Deletes", "an", "epic", "." ]
ab045070abac0a8f4ccbf17b5ed9bfdef5723eed
https://github.com/gmessner/gitlab4j-api/blob/ab045070abac0a8f4ccbf17b5ed9bfdef5723eed/src/main/java/org/gitlab4j/api/EpicsApi.java#L339-L341
train
gmessner/gitlab4j-api
src/main/java/org/gitlab4j/api/EpicsApi.java
EpicsApi.getEpicIssues
public List<Epic> getEpicIssues(Object groupIdOrPath, Integer epicIid) throws GitLabApiException { return (getEpicIssues(groupIdOrPath, epicIid, getDefaultPerPage()).all()); }
java
public List<Epic> getEpicIssues(Object groupIdOrPath, Integer epicIid) throws GitLabApiException { return (getEpicIssues(groupIdOrPath, epicIid, getDefaultPerPage()).all()); }
[ "public", "List", "<", "Epic", ">", "getEpicIssues", "(", "Object", "groupIdOrPath", ",", "Integer", "epicIid", ")", "throws", "GitLabApiException", "{", "return", "(", "getEpicIssues", "(", "groupIdOrPath", ",", "epicIid", ",", "getDefaultPerPage", "(", ")", ")...
Gets all issues that are assigned to an epic and the authenticated user has access to. <pre><code>GitLab Endpoint: GET /groups/:id/epics/:epic_iid/issues</code></pre> @param groupIdOrPath the group ID, path of the group, or a Group instance holding the group ID or path @param epicIid the IID of the epic to get issues for @return a list of all epic issues belonging to the specified epic @throws GitLabApiException if any exception occurs
[ "Gets", "all", "issues", "that", "are", "assigned", "to", "an", "epic", "and", "the", "authenticated", "user", "has", "access", "to", "." ]
ab045070abac0a8f4ccbf17b5ed9bfdef5723eed
https://github.com/gmessner/gitlab4j-api/blob/ab045070abac0a8f4ccbf17b5ed9bfdef5723eed/src/main/java/org/gitlab4j/api/EpicsApi.java#L353-L355
train