idx
int64 0
165k
| question
stringlengths 73
4.15k
| target
stringlengths 5
918
| len_question
int64 21
890
| len_target
int64 3
255
|
|---|---|---|---|---|
157,800
|
@ Override public Assignments < E > split ( AbstractMTree < ? , N , E , ? > tree , N node ) { final int n = node . getNumEntries ( ) ; int pos1 = random . nextInt ( n ) , pos2 = random . nextInt ( n - 1 ) ; pos2 = pos2 >= pos1 ? pos2 + 1 : pos2 ; // Build distance arrays: double [ ] dis1 = new double [ n ] , dis2 = new double [ n ] ; E e1 = node . getEntry ( pos1 ) , e2 = node . getEntry ( pos2 ) ; for ( int i = 0 ; i < n ; i ++ ) { if ( i == pos1 || i == pos2 ) { continue ; } final E ej = node . getEntry ( i ) ; dis1 [ i ] = tree . distance ( e1 , ej ) ; dis2 [ i ] = tree . distance ( e2 , ej ) ; } return distributor . distribute ( node , pos1 , dis1 , pos2 , dis2 ) ; }
|
Selects two objects of the specified node to be promoted and stored into the parent node . The m - RAD strategy considers all possible pairs of objects and after partitioning the set of entries promotes the pair of objects for which the sum of covering radiuses is minimum .
| 236
| 53
|
157,801
|
public static boolean checkCSSStatements ( Collection < Pair < String , String > > statements ) { for ( Pair < String , String > pair : statements ) { if ( ! checkCSSStatement ( pair . getFirst ( ) , pair . getSecond ( ) ) ) { return false ; } } return true ; }
|
Validate a set of CSS statements .
| 65
| 8
|
157,802
|
public String getStatement ( String key ) { for ( Pair < String , String > pair : statements ) { if ( pair . getFirst ( ) . equals ( key ) ) { return pair . getSecond ( ) ; } } return null ; }
|
Get the current value of a particular CSS statement .
| 51
| 10
|
157,803
|
public void setStatement ( String key , String value ) { if ( value != null && ! checkCSSStatement ( key , value ) ) { throw new InvalidCSS ( "Invalid CSS statement." ) ; } for ( Pair < String , String > pair : statements ) { if ( pair . getFirst ( ) . equals ( key ) ) { if ( value != null ) { pair . setSecond ( value ) ; } else { statements . remove ( pair ) ; } return ; } } if ( value != null ) { statements . add ( new Pair <> ( key , value ) ) ; } }
|
Set a CSS statement .
| 124
| 5
|
157,804
|
public void appendCSSDefinition ( StringBuilder buf ) { buf . append ( "\n." ) ; buf . append ( name ) ; buf . append ( ' ' ) ; for ( Pair < String , String > pair : statements ) { buf . append ( pair . getFirst ( ) ) ; buf . append ( ' ' ) ; buf . append ( pair . getSecond ( ) ) ; buf . append ( ";\n" ) ; } buf . append ( "}\n" ) ; }
|
Append CSS definition to a stream
| 103
| 7
|
157,805
|
public String inlineCSS ( ) { StringBuilder buf = new StringBuilder ( ) ; for ( Pair < String , String > pair : statements ) { buf . append ( pair . getFirst ( ) ) ; buf . append ( ' ' ) ; buf . append ( pair . getSecond ( ) ) ; buf . append ( ' ' ) ; } return buf . toString ( ) ; }
|
Render CSS class to inline formatting
| 80
| 6
|
157,806
|
protected Distribution findBestFit ( final List < V > col , Adapter adapter , int d , double [ ] test ) { if ( estimators . size ( ) == 1 ) { return estimators . get ( 0 ) . estimate ( col , adapter ) ; } Distribution best = null ; double bestq = Double . POSITIVE_INFINITY ; trials : for ( DistributionEstimator < ? > est : estimators ) { try { Distribution dist = est . estimate ( col , adapter ) ; for ( int i = 0 ; i < test . length ; i ++ ) { test [ i ] = dist . cdf ( col . get ( i ) . doubleValue ( d ) ) ; if ( Double . isNaN ( test [ i ] ) ) { LOG . warning ( "Got NaN after fitting " + est + ": " + dist ) ; continue trials ; } if ( Double . isInfinite ( test [ i ] ) ) { LOG . warning ( "Got infinite value after fitting " + est + ": " + dist ) ; continue trials ; } } Arrays . sort ( test ) ; double q = KolmogorovSmirnovTest . simpleTest ( test ) ; if ( LOG . isVeryVerbose ( ) ) { LOG . veryverbose ( "Estimator " + est + " (" + dist + ") has maximum deviation " + q + " for dimension " + d ) ; } if ( best == null || q < bestq ) { best = dist ; bestq = q ; } } catch ( ArithmeticException e ) { if ( LOG . isVeryVerbose ( ) ) { LOG . veryverbose ( "Fitting distribution " + est + " failed: " + e . getMessage ( ) ) ; } continue trials ; } } if ( LOG . isVerbose ( ) ) { LOG . verbose ( "Best fit for dimension " + d + ": " + best ) ; } return best ; }
|
Find the best fitting distribution .
| 418
| 6
|
157,807
|
protected boolean constantZero ( List < V > column , Adapter adapter ) { for ( int i = 0 , s = adapter . size ( column ) ; i < s ; i ++ ) { if ( adapter . get ( column , i ) != 0. ) { return false ; } } return true ; }
|
Test if an attribute is constant zero .
| 63
| 8
|
157,808
|
private StreamTokenizer makeArffTokenizer ( BufferedReader br ) { // Setup tokenizer StreamTokenizer tokenizer = new StreamTokenizer ( br ) ; { tokenizer . resetSyntax ( ) ; tokenizer . whitespaceChars ( 0 , ' ' ) ; tokenizer . ordinaryChars ( ' ' , ' ' ) ; // Do not parse numbers tokenizer . ordinaryChar ( ' ' ) ; tokenizer . ordinaryChar ( ' ' ) ; tokenizer . wordChars ( ' ' + 1 , ' ' ) ; tokenizer . whitespaceChars ( ' ' , ' ' ) ; tokenizer . commentChar ( ' ' ) ; tokenizer . quoteChar ( ' ' ) ; tokenizer . quoteChar ( ' ' ) ; tokenizer . ordinaryChar ( ' ' ) ; tokenizer . ordinaryChar ( ' ' ) ; tokenizer . eolIsSignificant ( true ) ; } return tokenizer ; }
|
Make a StreamTokenizer for the ARFF format .
| 196
| 11
|
157,809
|
private void setupBundleHeaders ( ArrayList < String > names , int [ ] targ , TypeInformation [ ] etyp , int [ ] dimsize , MultipleObjectsBundle bundle , boolean sparse ) { for ( int in = 0 , out = 0 ; in < targ . length ; out ++ ) { int nin = in + 1 ; for ( ; nin < targ . length ; nin ++ ) { if ( targ [ nin ] != targ [ in ] ) { break ; } } if ( TypeUtil . NUMBER_VECTOR_FIELD . equals ( etyp [ out ] ) ) { String [ ] labels = new String [ dimsize [ out ] ] ; // Collect labels: for ( int i = 0 ; i < dimsize [ out ] ; i ++ ) { labels [ i ] = names . get ( out + i ) ; } if ( ! sparse ) { VectorFieldTypeInformation < DoubleVector > type = new VectorFieldTypeInformation <> ( DoubleVector . FACTORY , dimsize [ out ] , labels ) ; bundle . appendColumn ( type , new ArrayList < DoubleVector > ( ) ) ; } else { VectorFieldTypeInformation < SparseDoubleVector > type = new VectorFieldTypeInformation <> ( SparseDoubleVector . FACTORY , dimsize [ out ] , labels ) ; bundle . appendColumn ( type , new ArrayList < SparseDoubleVector > ( ) ) ; } } else if ( TypeUtil . LABELLIST . equals ( etyp [ out ] ) ) { StringBuilder label = new StringBuilder ( names . get ( out ) ) ; for ( int i = 1 ; i < dimsize [ out ] ; i ++ ) { label . append ( ' ' ) . append ( names . get ( out + i ) ) ; } bundle . appendColumn ( new SimpleTypeInformation <> ( LabelList . class , label . toString ( ) ) , new ArrayList < LabelList > ( ) ) ; } else if ( TypeUtil . EXTERNALID . equals ( etyp [ out ] ) ) { bundle . appendColumn ( new SimpleTypeInformation <> ( ExternalID . class , names . get ( out ) ) , new ArrayList < ExternalID > ( ) ) ; } else if ( TypeUtil . CLASSLABEL . equals ( etyp [ out ] ) ) { bundle . appendColumn ( new SimpleTypeInformation <> ( ClassLabel . class , names . get ( out ) ) , new ArrayList < ClassLabel > ( ) ) ; } else { throw new AbortException ( "Unsupported type for column " + in + "->" + out + ": " + ( ( etyp [ out ] != null ) ? etyp [ out ] . toString ( ) : "null" ) ) ; } assert ( out == bundle . metaLength ( ) - 1 ) ; in = nin ; } }
|
Setup the headers for the object bundle .
| 612
| 8
|
157,810
|
private void readHeader ( BufferedReader br ) throws IOException { String line ; // Locate header line while ( true ) { line = br . readLine ( ) ; if ( line == null ) { throw new AbortException ( ARFF_HEADER_RELATION + " not found in file." ) ; } // Skip comments and empty lines if ( ARFF_COMMENT . reset ( line ) . matches ( ) || EMPTY . reset ( line ) . matches ( ) ) { continue ; } // Break on relation statement if ( ARFF_HEADER_RELATION . reset ( line ) . matches ( ) ) { break ; } throw new AbortException ( "Expected relation declaration: " + line ) ; } }
|
Read the dataset header part of the ARFF file to ensure consistency .
| 154
| 14
|
157,811
|
private void nextToken ( StreamTokenizer tokenizer ) throws IOException { tokenizer . nextToken ( ) ; if ( ( tokenizer . ttype == ' ' ) || ( tokenizer . ttype == ' ' ) ) { tokenizer . ttype = StreamTokenizer . TT_WORD ; } else if ( ( tokenizer . ttype == StreamTokenizer . TT_WORD ) && ( tokenizer . sval . equals ( "?" ) ) ) { tokenizer . ttype = ' ' ; } if ( LOG . isDebugging ( ) ) { if ( tokenizer . ttype == StreamTokenizer . TT_NUMBER ) { LOG . debug ( "token: " + tokenizer . nval ) ; } else if ( tokenizer . ttype == StreamTokenizer . TT_WORD ) { LOG . debug ( "token: " + tokenizer . sval ) ; } else if ( tokenizer . ttype == StreamTokenizer . TT_EOF ) { LOG . debug ( "token: EOF" ) ; } else if ( tokenizer . ttype == StreamTokenizer . TT_EOL ) { LOG . debug ( "token: EOL" ) ; } else { LOG . debug ( "token type: " + tokenizer . ttype ) ; } } }
|
Helper function for token handling .
| 277
| 6
|
157,812
|
public static < E extends ClusterOrder > Clustering < Model > makeOPTICSCut ( E co , double epsilon ) { // Clustering model we are building Clustering < Model > clustering = new Clustering <> ( "OPTICS Cut Clustering" , "optics-cut" ) ; // Collects noise elements ModifiableDBIDs noise = DBIDUtil . newHashSet ( ) ; double lastDist = Double . MAX_VALUE ; double actDist = Double . MAX_VALUE ; // Current working set ModifiableDBIDs current = DBIDUtil . newHashSet ( ) ; // TODO: can we implement this more nicely with a 1-lookahead? DBIDVar prev = DBIDUtil . newVar ( ) ; for ( DBIDIter it = co . iter ( ) ; it . valid ( ) ; prev . set ( it ) , it . advance ( ) ) { lastDist = actDist ; actDist = co . getReachability ( it ) ; if ( actDist <= epsilon ) { // the last element before the plot drops belongs to the cluster if ( lastDist > epsilon && prev . isSet ( ) ) { // So un-noise it noise . remove ( prev ) ; // Add it to the cluster current . add ( prev ) ; } current . add ( it ) ; } else { // 'Finish' the previous cluster if ( ! current . isEmpty ( ) ) { // TODO: do we want a minpts restriction? // But we get have only core points guaranteed anyway. clustering . addToplevelCluster ( new Cluster < Model > ( current , ClusterModel . CLUSTER ) ) ; current = DBIDUtil . newHashSet ( ) ; } // Add to noise noise . add ( it ) ; } } // Any unfinished cluster will also be added if ( ! current . isEmpty ( ) ) { clustering . addToplevelCluster ( new Cluster < Model > ( current , ClusterModel . CLUSTER ) ) ; } // Add noise clustering . addToplevelCluster ( new Cluster < Model > ( noise , true , ClusterModel . CLUSTER ) ) ; return clustering ; }
|
Compute an OPTICS cut clustering
| 476
| 8
|
157,813
|
public static LabelList make ( Collection < String > labels ) { int size = labels . size ( ) ; if ( size == 0 ) { return EMPTY_LABELS ; } return new LabelList ( labels . toArray ( new String [ size ] ) ) ; }
|
Constructor replacement .
| 56
| 4
|
157,814
|
public Clustering < SubspaceModel > run ( Database db , Relation < V > relation ) { if ( mu >= relation . size ( ) ) { throw new AbortException ( "Parameter mu is chosen unreasonably large. This won't yield meaningful results." ) ; } DiSHClusterOrder opticsResult = new Instance ( db , relation ) . run ( ) ; if ( LOG . isVerbose ( ) ) { LOG . verbose ( "Compute Clusters." ) ; } return computeClusters ( relation , opticsResult ) ; }
|
Performs the DiSH algorithm on the given database .
| 118
| 11
|
157,815
|
private Clustering < SubspaceModel > computeClusters ( Relation < V > database , DiSHClusterOrder clusterOrder ) { final int dimensionality = RelationUtil . dimensionality ( database ) ; // extract clusters Object2ObjectOpenCustomHashMap < long [ ] , List < ArrayModifiableDBIDs > > clustersMap = extractClusters ( database , clusterOrder ) ; logClusterSizes ( "Step 1: extract clusters" , dimensionality , clustersMap ) ; // check if there are clusters < minpts checkClusters ( database , clustersMap ) ; logClusterSizes ( "Step 2: check clusters" , dimensionality , clustersMap ) ; // sort the clusters List < Cluster < SubspaceModel > > clusters = sortClusters ( database , clustersMap ) ; if ( LOG . isVerbose ( ) ) { StringBuilder msg = new StringBuilder ( "Step 3: sort clusters" ) ; for ( Cluster < SubspaceModel > c : clusters ) { msg . append ( ' ' ) . append ( BitsUtil . toStringLow ( c . getModel ( ) . getSubspace ( ) . getDimensions ( ) , dimensionality ) ) . append ( " ids " ) . append ( c . size ( ) ) ; } LOG . verbose ( msg . toString ( ) ) ; } // build the hierarchy Clustering < SubspaceModel > clustering = new Clustering <> ( "DiSH clustering" , "dish-clustering" ) ; buildHierarchy ( database , clustering , clusters , dimensionality ) ; if ( LOG . isVerbose ( ) ) { StringBuilder msg = new StringBuilder ( "Step 4: build hierarchy" ) ; for ( Cluster < SubspaceModel > c : clusters ) { msg . append ( ' ' ) . append ( BitsUtil . toStringLow ( c . getModel ( ) . getSubspace ( ) . getDimensions ( ) , dimensionality ) ) . append ( " ids " ) . append ( c . size ( ) ) ; for ( It < Cluster < SubspaceModel > > iter = clustering . getClusterHierarchy ( ) . iterParents ( c ) ; iter . valid ( ) ; iter . advance ( ) ) { msg . append ( "\n parent " ) . append ( iter . get ( ) ) ; } for ( It < Cluster < SubspaceModel > > iter = clustering . getClusterHierarchy ( ) . iterChildren ( c ) ; iter . valid ( ) ; iter . advance ( ) ) { msg . append ( "\n child " ) . append ( iter . get ( ) ) ; } } LOG . verbose ( msg . toString ( ) ) ; } // build result for ( Cluster < SubspaceModel > c : clusters ) { if ( clustering . getClusterHierarchy ( ) . numParents ( c ) == 0 ) { clustering . addToplevelCluster ( c ) ; } } return clustering ; }
|
Computes the hierarchical clusters according to the cluster order .
| 643
| 11
|
157,816
|
private void logClusterSizes ( String m , int dimensionality , Object2ObjectOpenCustomHashMap < long [ ] , List < ArrayModifiableDBIDs > > clustersMap ) { if ( LOG . isVerbose ( ) ) { final StringBuilder msg = new StringBuilder ( 1000 ) . append ( m ) . append ( ' ' ) ; for ( ObjectIterator < Object2ObjectMap . Entry < long [ ] , List < ArrayModifiableDBIDs > > > iter = clustersMap . object2ObjectEntrySet ( ) . fastIterator ( ) ; iter . hasNext ( ) ; ) { Object2ObjectMap . Entry < long [ ] , List < ArrayModifiableDBIDs > > entry = iter . next ( ) ; msg . append ( BitsUtil . toStringLow ( entry . getKey ( ) , dimensionality ) ) . append ( " sizes:" ) ; for ( ArrayModifiableDBIDs c : entry . getValue ( ) ) { msg . append ( ' ' ) . append ( c . size ( ) ) ; } msg . append ( ' ' ) ; } LOG . verbose ( msg . toString ( ) ) ; } }
|
Log cluster sizes in verbose mode .
| 246
| 8
|
157,817
|
private List < Cluster < SubspaceModel > > sortClusters ( Relation < V > relation , Object2ObjectMap < long [ ] , List < ArrayModifiableDBIDs > > clustersMap ) { final int db_dim = RelationUtil . dimensionality ( relation ) ; // int num = 1; List < Cluster < SubspaceModel > > clusters = new ArrayList <> ( ) ; for ( long [ ] pv : clustersMap . keySet ( ) ) { List < ArrayModifiableDBIDs > parallelClusters = clustersMap . get ( pv ) ; for ( int i = 0 ; i < parallelClusters . size ( ) ; i ++ ) { ArrayModifiableDBIDs c = parallelClusters . get ( i ) ; Cluster < SubspaceModel > cluster = new Cluster <> ( c ) ; cluster . setModel ( new SubspaceModel ( new Subspace ( pv ) , Centroid . make ( relation , c ) . getArrayRef ( ) ) ) ; String subspace = BitsUtil . toStringLow ( cluster . getModel ( ) . getSubspace ( ) . getDimensions ( ) , db_dim ) ; cluster . setName ( parallelClusters . size ( ) > 1 ? ( "Cluster_" + subspace + "_" + i ) : ( "Cluster_" + subspace ) ) ; clusters . add ( cluster ) ; } } // sort the clusters w.r.t. lambda Comparator < Cluster < SubspaceModel > > comparator = new Comparator < Cluster < SubspaceModel > > ( ) { @ Override public int compare ( Cluster < SubspaceModel > c1 , Cluster < SubspaceModel > c2 ) { return c2 . getModel ( ) . getSubspace ( ) . dimensionality ( ) - c1 . getModel ( ) . getSubspace ( ) . dimensionality ( ) ; } } ; Collections . sort ( clusters , comparator ) ; return clusters ; }
|
Returns a sorted list of the clusters w . r . t . the subspace dimensionality in descending order .
| 420
| 22
|
157,818
|
private void checkClusters ( Relation < V > relation , Object2ObjectMap < long [ ] , List < ArrayModifiableDBIDs > > clustersMap ) { final int dimensionality = RelationUtil . dimensionality ( relation ) ; // check if there are clusters < minpts // and add them to not assigned List < Pair < long [ ] , ArrayModifiableDBIDs > > notAssigned = new ArrayList <> ( ) ; Object2ObjectMap < long [ ] , List < ArrayModifiableDBIDs > > newClustersMap = new Object2ObjectOpenCustomHashMap <> ( BitsUtil . FASTUTIL_HASH_STRATEGY ) ; Pair < long [ ] , ArrayModifiableDBIDs > noise = new Pair <> ( BitsUtil . zero ( dimensionality ) , DBIDUtil . newArray ( ) ) ; for ( long [ ] pv : clustersMap . keySet ( ) ) { // noise if ( BitsUtil . cardinality ( pv ) == 0 ) { List < ArrayModifiableDBIDs > parallelClusters = clustersMap . get ( pv ) ; for ( ArrayModifiableDBIDs c : parallelClusters ) { noise . second . addDBIDs ( c ) ; } } // clusters else { List < ArrayModifiableDBIDs > parallelClusters = clustersMap . get ( pv ) ; List < ArrayModifiableDBIDs > newParallelClusters = new ArrayList <> ( parallelClusters . size ( ) ) ; for ( ArrayModifiableDBIDs c : parallelClusters ) { if ( ! BitsUtil . isZero ( pv ) && c . size ( ) < mu ) { notAssigned . add ( new Pair <> ( pv , c ) ) ; } else { newParallelClusters . add ( c ) ; } } newClustersMap . put ( pv , newParallelClusters ) ; } } clustersMap . clear ( ) ; clustersMap . putAll ( newClustersMap ) ; for ( Pair < long [ ] , ArrayModifiableDBIDs > c : notAssigned ) { if ( c . second . isEmpty ( ) ) { continue ; } Pair < long [ ] , ArrayModifiableDBIDs > parent = findParent ( relation , c , clustersMap ) ; ( parent != null ? parent : noise ) . second . addDBIDs ( c . second ) ; } List < ArrayModifiableDBIDs > noiseList = new ArrayList <> ( 1 ) ; noiseList . add ( noise . second ) ; clustersMap . put ( noise . first , noiseList ) ; }
|
Removes the clusters with size < ; minpts from the cluster map and adds them to their parents .
| 559
| 23
|
157,819
|
private Pair < long [ ] , ArrayModifiableDBIDs > findParent ( Relation < V > relation , Pair < long [ ] , ArrayModifiableDBIDs > child , Object2ObjectMap < long [ ] , List < ArrayModifiableDBIDs > > clustersMap ) { Centroid child_centroid = ProjectedCentroid . make ( child . first , relation , child . second ) ; Pair < long [ ] , ArrayModifiableDBIDs > result = null ; int resultCardinality = - 1 ; long [ ] childPV = child . first ; int childCardinality = BitsUtil . cardinality ( childPV ) ; for ( long [ ] parentPV : clustersMap . keySet ( ) ) { int parentCardinality = BitsUtil . cardinality ( parentPV ) ; if ( parentCardinality >= childCardinality || ( resultCardinality != - 1 && parentCardinality <= resultCardinality ) ) { continue ; } long [ ] pv = BitsUtil . andCMin ( childPV , parentPV ) ; if ( BitsUtil . equal ( pv , parentPV ) ) { List < ArrayModifiableDBIDs > parentList = clustersMap . get ( parentPV ) ; for ( ArrayModifiableDBIDs parent : parentList ) { NumberVector parent_centroid = ProjectedCentroid . make ( parentPV , relation , parent ) ; double d = weightedDistance ( child_centroid , parent_centroid , parentPV ) ; if ( d <= 2 * epsilon ) { result = new Pair <> ( parentPV , parent ) ; resultCardinality = parentCardinality ; break ; } } } } return result ; }
|
Returns the parent of the specified cluster
| 373
| 7
|
157,820
|
private int subspaceDimensionality ( NumberVector v1 , NumberVector v2 , long [ ] pv1 , long [ ] pv2 , long [ ] commonPreferenceVector ) { // number of zero values in commonPreferenceVector int subspaceDim = v1 . getDimensionality ( ) - BitsUtil . cardinality ( commonPreferenceVector ) ; // special case: v1 and v2 are in parallel subspaces if ( BitsUtil . equal ( commonPreferenceVector , pv1 ) || BitsUtil . equal ( commonPreferenceVector , pv2 ) ) { double d = weightedDistance ( v1 , v2 , commonPreferenceVector ) ; if ( d > 2 * epsilon ) { subspaceDim ++ ; } } return subspaceDim ; }
|
Compute the common subspace dimensionality of two vectors .
| 171
| 12
|
157,821
|
protected static double weightedDistance ( NumberVector v1 , NumberVector v2 , long [ ] weightVector ) { double sqrDist = 0 ; for ( int i = BitsUtil . nextSetBit ( weightVector , 0 ) ; i >= 0 ; i = BitsUtil . nextSetBit ( weightVector , i + 1 ) ) { double manhattanI = v1 . doubleValue ( i ) - v2 . doubleValue ( i ) ; sqrDist += manhattanI * manhattanI ; } return FastMath . sqrt ( sqrDist ) ; }
|
Computes the weighted distance between the two specified vectors according to the given preference vector .
| 121
| 17
|
157,822
|
@ Override public double [ ] [ ] processIds ( DBIDs ids , Relation < ? extends NumberVector > database ) { return CovarianceMatrix . make ( database , ids ) . destroyToPopulationMatrix ( ) ; }
|
Compute Covariance Matrix for a collection of database IDs .
| 51
| 13
|
157,823
|
private void updateMeta ( ) { meta = new BundleMeta ( ) ; BundleMeta origmeta = source . getMeta ( ) ; for ( int i = 0 ; i < origmeta . size ( ) ; i ++ ) { SimpleTypeInformation < ? > type = origmeta . get ( i ) ; if ( column < 0 ) { // Test whether this type matches if ( TypeUtil . NUMBER_VECTOR_VARIABLE_LENGTH . isAssignableFromType ( type ) ) { if ( type instanceof VectorFieldTypeInformation ) { @ SuppressWarnings ( "unchecked" ) final VectorFieldTypeInformation < V > castType = ( VectorFieldTypeInformation < V > ) type ; if ( dim != - 1 && castType . mindim ( ) > dim ) { throw new AbortException ( "Would filter all vectors: minimum dimensionality " + castType . mindim ( ) + " > desired dimensionality " + dim ) ; } if ( dim != - 1 && castType . maxdim ( ) < dim ) { throw new AbortException ( "Would filter all vectors: maximum dimensionality " + castType . maxdim ( ) + " < desired dimensionality " + dim ) ; } if ( dim == - 1 ) { dim = castType . mindim ( ) ; } if ( castType . mindim ( ) == castType . maxdim ( ) ) { meta . add ( castType ) ; column = i ; continue ; } } @ SuppressWarnings ( "unchecked" ) final VectorTypeInformation < V > castType = ( VectorTypeInformation < V > ) type ; if ( dim != - 1 ) { meta . add ( new VectorFieldTypeInformation <> ( FilterUtil . guessFactory ( castType ) , dim , dim , castType . getSerializer ( ) ) ) ; } else { LOG . warning ( "No dimensionality yet for column " + i ) ; meta . add ( castType ) ; } column = i ; continue ; } } meta . add ( type ) ; } }
|
Update metadata .
| 438
| 3
|
157,824
|
public static double logquantile ( double val , double loc , double scale ) { return loc + scale * ( val - MathUtil . log1mexp ( - val ) ) ; }
|
log Quantile function .
| 40
| 5
|
157,825
|
public static < C extends Model > void logClusterSizes ( Clustering < C > c ) { if ( ! LOG . isStatistics ( ) ) { return ; } final List < Cluster < C > > clusters = c . getAllClusters ( ) ; final int numc = clusters . size ( ) ; LOG . statistics ( new StringStatistic ( PREFIX + "name" , c . getLongName ( ) ) ) ; LOG . statistics ( new LongStatistic ( PREFIX + "clusters" , numc ) ) ; Hierarchy < Cluster < C > > h = c . getClusterHierarchy ( ) ; int cnum = 0 ; for ( Cluster < C > clu : clusters ) { final String p = PREFIX + "cluster-" + cnum + "." ; if ( clu . getName ( ) != null ) { LOG . statistics ( new StringStatistic ( p + "name" , clu . getName ( ) ) ) ; } LOG . statistics ( new LongStatistic ( p + "size" , clu . size ( ) ) ) ; if ( clu . isNoise ( ) ) { LOG . statistics ( new StringStatistic ( p + "noise" , "true" ) ) ; } if ( h . numChildren ( clu ) > 0 ) { // TODO: this only works if we have cluster names! StringBuilder buf = new StringBuilder ( ) ; for ( It < Cluster < C > > it = h . iterChildren ( clu ) ; it . valid ( ) ; it . advance ( ) ) { if ( buf . length ( ) > 0 ) { buf . append ( ", " ) ; } buf . append ( it . get ( ) . getName ( ) ) ; } LOG . statistics ( new StringStatistic ( p + "children" , buf . toString ( ) ) ) ; } // TODO: also log parents? ++ cnum ; } }
|
Log the cluster sizes of a clustering .
| 416
| 9
|
157,826
|
public void addDenseUnit ( CLIQUEUnit unit ) { int numdim = unit . dimensionality ( ) ; for ( int i = 0 ; i < numdim ; i ++ ) { BitsUtil . setI ( getDimensions ( ) , unit . getDimension ( i ) ) ; } denseUnits . add ( unit ) ; coverage += unit . numberOfFeatureVectors ( ) ; }
|
Adds the specified dense unit to this subspace .
| 88
| 10
|
157,827
|
public List < Pair < Subspace , ModifiableDBIDs > > determineClusters ( ) { List < Pair < Subspace , ModifiableDBIDs > > clusters = new ArrayList <> ( ) ; for ( CLIQUEUnit unit : denseUnits ) { if ( ! unit . isAssigned ( ) ) { ModifiableDBIDs cluster = DBIDUtil . newHashSet ( ) ; CLIQUESubspace model = new CLIQUESubspace ( getDimensions ( ) ) ; clusters . add ( new Pair < Subspace , ModifiableDBIDs > ( model , cluster ) ) ; dfs ( unit , cluster , model ) ; } } return clusters ; }
|
Determines all clusters in this subspace by performing a depth - first search algorithm to find connected dense units .
| 144
| 23
|
157,828
|
public void dfs ( CLIQUEUnit unit , ModifiableDBIDs cluster , CLIQUESubspace model ) { cluster . addDBIDs ( unit . getIds ( ) ) ; unit . markAsAssigned ( ) ; model . addDenseUnit ( unit ) ; final long [ ] dims = getDimensions ( ) ; for ( int dim = BitsUtil . nextSetBit ( dims , 0 ) ; dim >= 0 ; dim = BitsUtil . nextSetBit ( dims , dim + 1 ) ) { CLIQUEUnit left = leftNeighbor ( unit , dim ) ; if ( left != null && ! left . isAssigned ( ) ) { dfs ( left , cluster , model ) ; } CLIQUEUnit right = rightNeighbor ( unit , dim ) ; if ( right != null && ! right . isAssigned ( ) ) { dfs ( right , cluster , model ) ; } } }
|
Depth - first search algorithm to find connected dense units in this subspace that build a cluster . It starts with a unit assigns it to a cluster and finds all units it is connected to .
| 200
| 38
|
157,829
|
protected CLIQUEUnit leftNeighbor ( CLIQUEUnit unit , int dim ) { for ( CLIQUEUnit u : denseUnits ) { if ( u . containsLeftNeighbor ( unit , dim ) ) { return u ; } } return null ; }
|
Returns the left neighbor of the given unit in the specified dimension .
| 56
| 13
|
157,830
|
protected CLIQUEUnit rightNeighbor ( CLIQUEUnit unit , int dim ) { for ( CLIQUEUnit u : denseUnits ) { if ( u . containsRightNeighbor ( unit , dim ) ) { return u ; } } return null ; }
|
Returns the right neighbor of the given unit in the specified dimension .
| 56
| 13
|
157,831
|
private IntIterator getCommonSplitDimensions ( N node ) { Collection < SplitHistory > splitHistories = new ArrayList <> ( node . getNumEntries ( ) ) ; for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { SpatialEntry entry = node . getEntry ( i ) ; if ( ! ( entry instanceof XTreeDirectoryEntry ) ) { throw new RuntimeException ( "Wrong entry type to derive split dimension from: " + entry . getClass ( ) . getName ( ) ) ; } splitHistories . add ( ( ( XTreeDirectoryEntry ) entry ) . getSplitHistory ( ) ) ; } return SplitHistory . getCommonDimensions ( splitHistories ) ; }
|
Determine the common split dimensions from a list of entries .
| 158
| 13
|
157,832
|
private HyperBoundingBox mbr ( final int [ ] entries , final int from , final int to ) { SpatialEntry first = this . node . getEntry ( entries [ from ] ) ; ModifiableHyperBoundingBox mbr = new ModifiableHyperBoundingBox ( first ) ; for ( int i = from + 1 ; i < to ; i ++ ) { mbr . extend ( this . node . getEntry ( entries [ i ] ) ) ; } return mbr ; }
|
Computes and returns the mbr of the specified nodes only the nodes between from and to index are considered .
| 103
| 22
|
157,833
|
private void ensureSize ( int minsize ) { if ( minsize <= store . length ) { return ; } int asize = store . length ; while ( asize < minsize ) { asize = ( asize >>> 1 ) + asize ; } final int [ ] prev = store ; store = new int [ asize ] ; System . arraycopy ( prev , 0 , store , 0 , size ) ; }
|
Resize as desired .
| 88
| 5
|
157,834
|
private void grow ( ) { final int newsize = store . length + ( store . length >>> 1 ) ; final int [ ] prev = store ; store = new int [ newsize ] ; System . arraycopy ( prev , 0 , store , 0 , size ) ; }
|
Grow array by 50% .
| 57
| 7
|
157,835
|
public static double sumOfProbabilities ( DBIDIter ignore , DBIDArrayIter di , double [ ] p ) { double s = 0 ; for ( di . seek ( 0 ) ; di . valid ( ) ; di . advance ( ) ) { if ( DBIDUtil . equal ( ignore , di ) ) { continue ; } final double v = p [ di . getOffset ( ) ] ; if ( ! ( v > 0 ) ) { break ; } s += v ; } return s ; }
|
Compute the sum of probabilities stop at first 0 ignore query object .
| 107
| 14
|
157,836
|
public synchronized static Task queue ( Listener callback ) { final Task task = new Task ( callback ) ; // TODO: synchronization? if ( THREAD != null && THREAD . isAlive ( ) ) { THREAD . queue . add ( task ) ; return task ; } THREAD = new ThumbnailThread ( ) ; THREAD . queue . add ( task ) ; THREAD . start ( ) ; return task ; }
|
Queue a thumbnail task in a global thumbnail thread .
| 89
| 10
|
157,837
|
public static void unqueue ( Task task ) { if ( THREAD != null ) { synchronized ( THREAD ) { THREAD . queue . remove ( task ) ; } } }
|
Remove a pending task from the queue .
| 37
| 8
|
157,838
|
public void beginStep ( int step , String stepTitle , Logging logger ) { setProcessed ( step - 1 ) ; this . stepTitle = stepTitle ; logger . progress ( this ) ; }
|
Do a new step and log it
| 42
| 7
|
157,839
|
protected static HyperBoundingBox computeBounds ( NumberVector [ ] samples ) { assert ( samples . length > 0 ) : "Cannot compute bounding box of empty set." ; // Compute bounds: final int dimensions = samples [ 0 ] . getDimensionality ( ) ; final double [ ] min = new double [ dimensions ] ; final double [ ] max = new double [ dimensions ] ; NumberVector first = samples [ 0 ] ; for ( int d = 0 ; d < dimensions ; d ++ ) { min [ d ] = max [ d ] = first . doubleValue ( d ) ; } for ( int i = 1 ; i < samples . length ; i ++ ) { NumberVector v = samples [ i ] ; for ( int d = 0 ; d < dimensions ; d ++ ) { final double c = v . doubleValue ( d ) ; min [ d ] = c < min [ d ] ? c : min [ d ] ; max [ d ] = c > max [ d ] ? c : max [ d ] ; } } return new HyperBoundingBox ( min , max ) ; }
|
Compute the bounding box for some samples .
| 232
| 10
|
157,840
|
@ Override protected void preprocess ( ) { final Logging log = getLogger ( ) ; // Could be subclass createStorage ( ) ; ArrayDBIDs ids = DBIDUtil . ensureArray ( relation . getDBIDs ( ) ) ; if ( log . isStatistics ( ) ) { log . statistics ( new LongStatistic ( this . getClass ( ) . getName ( ) + ".k" , k ) ) ; } Duration duration = log . isStatistics ( ) ? log . newDuration ( this . getClass ( ) . getName ( ) + ".precomputation-time" ) . begin ( ) : null ; FiniteProgress progress = getLogger ( ) . isVerbose ( ) ? new FiniteProgress ( "Materializing k nearest neighbors (k=" + k + ")" , ids . size ( ) , getLogger ( ) ) : null ; // Try bulk List < ? extends KNNList > kNNList = null ; if ( usebulk ) { kNNList = knnQuery . getKNNForBulkDBIDs ( ids , k ) ; if ( kNNList != null ) { int i = 0 ; for ( DBIDIter id = ids . iter ( ) ; id . valid ( ) ; id . advance ( ) , i ++ ) { storage . put ( id , kNNList . get ( i ) ) ; log . incrementProcessed ( progress ) ; } } } else { final boolean ismetric = getDistanceQuery ( ) . getDistanceFunction ( ) . isMetric ( ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { if ( ismetric && storage . get ( iter ) != null ) { log . incrementProcessed ( progress ) ; continue ; // Previously computed (duplicate point?) } KNNList knn = knnQuery . getKNNForDBID ( iter , k ) ; storage . put ( iter , knn ) ; if ( ismetric ) { for ( DoubleDBIDListIter it = knn . iter ( ) ; it . valid ( ) && it . doubleValue ( ) == 0. ; it . advance ( ) ) { storage . put ( it , knn ) ; // Reuse } } log . incrementProcessed ( progress ) ; } } log . ensureCompleted ( progress ) ; if ( duration != null ) { log . statistics ( duration . end ( ) ) ; } }
|
The actual preprocessing step .
| 531
| 6
|
157,841
|
protected void objectsInserted ( DBIDs ids ) { final Logging log = getLogger ( ) ; // Could be subclass StepProgress stepprog = log . isVerbose ( ) ? new StepProgress ( 3 ) : null ; ArrayDBIDs aids = DBIDUtil . ensureArray ( ids ) ; // materialize the new kNNs log . beginStep ( stepprog , 1 , "New insertions ocurred, materialize their new kNNs." ) ; // Bulk-query kNNs List < ? extends KNNList > kNNList = knnQuery . getKNNForBulkDBIDs ( aids , k ) ; // Store in storage DBIDIter iter = aids . iter ( ) ; for ( int i = 0 ; i < aids . size ( ) ; i ++ , iter . advance ( ) ) { storage . put ( iter , kNNList . get ( i ) ) ; } // update the affected kNNs log . beginStep ( stepprog , 2 , "New insertions ocurred, update the affected kNNs." ) ; ArrayDBIDs rkNN_ids = updateKNNsAfterInsertion ( ids ) ; // inform listener log . beginStep ( stepprog , 3 , "New insertions ocurred, inform listeners." ) ; fireKNNsInserted ( ids , rkNN_ids ) ; log . setCompleted ( stepprog ) ; }
|
Called after new objects have been inserted updates the materialized neighborhood .
| 310
| 14
|
157,842
|
protected void objectsRemoved ( DBIDs ids ) { final Logging log = getLogger ( ) ; StepProgress stepprog = log . isVerbose ( ) ? new StepProgress ( 3 ) : null ; // delete the materialized (old) kNNs log . beginStep ( stepprog , 1 , "New deletions ocurred, remove their materialized kNNs." ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { storage . delete ( iter ) ; } // update the affected kNNs log . beginStep ( stepprog , 2 , "New deletions ocurred, update the affected kNNs." ) ; ArrayDBIDs rkNN_ids = updateKNNsAfterDeletion ( ids ) ; // inform listener log . beginStep ( stepprog , 3 , "New deletions ocurred, inform listeners." ) ; fireKNNsRemoved ( ids , rkNN_ids ) ; log . ensureCompleted ( stepprog ) ; }
|
Called after objects have been removed updates the materialized neighborhood .
| 232
| 13
|
157,843
|
protected void fireKNNsInserted ( DBIDs insertions , DBIDs updates ) { KNNChangeEvent e = new KNNChangeEvent ( this , KNNChangeEvent . Type . INSERT , insertions , updates ) ; Object [ ] listeners = listenerList . getListenerList ( ) ; for ( int i = listeners . length - 2 ; i >= 0 ; i -= 2 ) { if ( listeners [ i ] == KNNListener . class ) { ( ( KNNListener ) listeners [ i + 1 ] ) . kNNsChanged ( e ) ; } } }
|
Informs all registered KNNListener that new kNNs have been inserted and as a result some kNNs have been changed .
| 122
| 27
|
157,844
|
protected void fireKNNsRemoved ( DBIDs removals , DBIDs updates ) { KNNChangeEvent e = new KNNChangeEvent ( this , KNNChangeEvent . Type . DELETE , removals , updates ) ; Object [ ] listeners = listenerList . getListenerList ( ) ; for ( int i = listeners . length - 2 ; i >= 0 ; i -= 2 ) { if ( listeners [ i ] == KNNListener . class ) { ( ( KNNListener ) listeners [ i + 1 ] ) . kNNsChanged ( e ) ; } } }
|
Informs all registered KNNListener that existing kNNs have been removed and as a result some kNNs have been changed .
| 124
| 27
|
157,845
|
@ Override public void buildClassifier ( Database database , Relation < ? extends ClassLabel > labelrep ) { Object2IntOpenHashMap < ClassLabel > count = new Object2IntOpenHashMap <> ( ) ; for ( DBIDIter iter = labelrep . iterDBIDs ( ) ; iter . valid ( ) ; iter . advance ( ) ) { count . addTo ( labelrep . get ( iter ) , 1 ) ; } int max = Integer . MIN_VALUE ; double size = labelrep . size ( ) ; distribution = new double [ count . size ( ) ] ; labels = new ArrayList <> ( count . size ( ) ) ; ObjectIterator < Entry < ClassLabel > > iter = count . object2IntEntrySet ( ) . fastIterator ( ) ; for ( int i = 0 ; iter . hasNext ( ) ; ++ i ) { Entry < ClassLabel > entry = iter . next ( ) ; distribution [ i ] = entry . getIntValue ( ) / size ; labels . add ( entry . getKey ( ) ) ; if ( entry . getIntValue ( ) > max ) { max = entry . getIntValue ( ) ; prediction = entry . getKey ( ) ; } } }
|
Learns the prior probability for all classes .
| 259
| 9
|
157,846
|
@ Override public Assignments < E > split ( AbstractMTree < ? , N , E , ? > tree , N node ) { final int n = node . getNumEntries ( ) ; double [ ] [ ] distanceMatrix = computeDistanceMatrix ( tree , node ) ; double miSumCR = Double . POSITIVE_INFINITY ; boolean leaf = node . isLeaf ( ) ; Assignments < E > bestAssignment = null ; for ( int i = 0 ; i < n ; i ++ ) { for ( int j = i + 1 ; j < n ; j ++ ) { Assignments < E > currentAssignments = distributor . distribute ( node , i , distanceMatrix [ i ] , j , distanceMatrix [ j ] ) ; double maxCR = Math . max ( currentAssignments . computeFirstCover ( leaf ) , currentAssignments . computeSecondCover ( leaf ) ) ; if ( maxCR < miSumCR ) { miSumCR = maxCR ; bestAssignment = currentAssignments ; } } } return bestAssignment ; }
|
Selects two objects of the specified node to be promoted and stored into the parent node . The mM - RAD strategy considers all possible pairs of objects and after partitioning the set of entries promotes the pair of objects for which the larger of the two covering radiuses is minimum .
| 232
| 55
|
157,847
|
public static double [ ] [ ] unboxVectors ( List < ? extends NumberVector > means ) { double [ ] [ ] ret = new double [ means . size ( ) ] [ ] ; for ( int i = 0 ; i < ret . length ; i ++ ) { ret [ i ] = means . get ( i ) . toArray ( ) ; } return ret ; }
|
Unbox database means to primitive means .
| 81
| 8
|
157,848
|
public void put ( double x , double y , double w ) { if ( w == 0. ) { return ; } if ( sumWe <= 0. ) { sumX = x * w ; sumY = y * w ; sumWe = w ; return ; } // Delta to previous mean final double deltaX = x * sumWe - sumX ; final double deltaY = y * sumWe - sumY ; final double oldWe = sumWe ; // Incremental update sumWe += w ; final double f = w / ( sumWe * oldWe ) ; // Update sumXX += f * deltaX * deltaX ; sumYY += f * deltaY * deltaY ; // should equal weight * deltaY * neltaX! sumXY += f * deltaX * deltaY ; // Update means sumX += x * w ; sumY += y * w ; }
|
Put a single value into the correlation statistic .
| 183
| 9
|
157,849
|
public double getCorrelation ( ) { if ( ! ( sumXX > 0. && sumYY > 0. ) ) { return ( sumXX == sumYY ) ? 1. : 0. ; } return sumXY / FastMath . sqrt ( sumXX * sumYY ) ; }
|
Get the Pearson correlation value .
| 60
| 6
|
157,850
|
public static double coefficient ( double [ ] x , double [ ] y ) { final int xdim = x . length ; final int ydim = y . length ; if ( xdim != ydim ) { throw new IllegalArgumentException ( "Invalid arguments: arrays differ in length." ) ; } if ( xdim == 0 ) { throw new IllegalArgumentException ( "Empty vector." ) ; } // Inlined computation of Pearson correlation, to avoid allocating objects! // This is a numerically stabilized version, avoiding sum-of-squares. double sumXX = 0. , sumYY = 0. , sumXY = 0. ; double sumX = x [ 0 ] , sumY = y [ 0 ] ; int i = 1 ; while ( i < xdim ) { final double xv = x [ i ] , yv = y [ i ] ; // Delta to previous mean final double deltaX = xv * i - sumX ; final double deltaY = yv * i - sumY ; // Increment count first final double oldi = i ; // Convert to double! ++ i ; final double f = 1. / ( i * oldi ) ; // Update sumXX += f * deltaX * deltaX ; sumYY += f * deltaY * deltaY ; // should equal deltaY * neltaX! sumXY += f * deltaX * deltaY ; // Update sums sumX += xv ; sumY += yv ; } // One or both series were constant: if ( ! ( sumXX > 0. && sumYY > 0. ) ) { return ( sumXX == sumYY ) ? 1. : 0. ; } return sumXY / FastMath . sqrt ( sumXX * sumYY ) ; }
|
Compute the Pearson product - moment correlation coefficient for two FeatureVectors .
| 368
| 16
|
157,851
|
public static double weightedCoefficient ( NumberVector x , NumberVector y , double [ ] weights ) { final int xdim = x . getDimensionality ( ) ; final int ydim = y . getDimensionality ( ) ; if ( xdim != ydim ) { throw new IllegalArgumentException ( "Invalid arguments: number vectors differ in dimensionality." ) ; } if ( xdim != weights . length ) { throw new IllegalArgumentException ( "Dimensionality doesn't agree to weights." ) ; } if ( xdim == 0 ) { throw new IllegalArgumentException ( "Empty vector." ) ; } // Inlined computation of Pearson correlation, to avoid allocating objects! // This is a numerically stabilized version, avoiding sum-of-squares. double sumXX = 0. , sumYY = 0. , sumXY = 0. , sumWe = weights [ 0 ] ; double sumX = x . doubleValue ( 0 ) * sumWe , sumY = y . doubleValue ( 0 ) * sumWe ; for ( int i = 1 ; i < xdim ; ++ i ) { final double xv = x . doubleValue ( i ) , yv = y . doubleValue ( i ) , w = weights [ i ] ; // Delta to previous mean final double deltaX = xv * sumWe - sumX ; final double deltaY = yv * sumWe - sumY ; // Increment count first final double oldWe = sumWe ; // Convert to double! sumWe += w ; final double f = w / ( sumWe * oldWe ) ; // Update sumXX += f * deltaX * deltaX ; sumYY += f * deltaY * deltaY ; // should equal deltaY * neltaX! sumXY += f * deltaX * deltaY ; // Update sums sumX += xv * w ; sumY += yv * w ; } // One or both series were constant: if ( ! ( sumXX > 0. && sumYY > 0. ) ) { return ( sumXX == sumYY ) ? 1. : 0. ; } return sumXY / FastMath . sqrt ( sumXX * sumYY ) ; }
|
Compute the Pearson product - moment correlation coefficient for two NumberVectors .
| 461
| 16
|
157,852
|
@ SuppressWarnings ( "unchecked" ) public static < T , A > ExtendedArray < T > extend ( A array , ArrayAdapter < T , A > getter , T extra ) { return new ExtendedArray <> ( array , ( ArrayAdapter < T , Object > ) getter , extra ) ; }
|
Static wrapper that has a nicer generics signature .
| 68
| 10
|
157,853
|
public static SelectionResult ensureSelectionResult ( final Database db ) { List < SelectionResult > selections = ResultUtil . filterResults ( db . getHierarchy ( ) , db , SelectionResult . class ) ; if ( ! selections . isEmpty ( ) ) { return selections . get ( 0 ) ; } SelectionResult sel = new SelectionResult ( ) ; ResultUtil . addChildResult ( db , sel ) ; return sel ; }
|
Ensure that there also is a selection container object .
| 95
| 11
|
157,854
|
@ SuppressWarnings ( "unused" ) public void debugRender ( GL2 gl ) { if ( ! DEBUG || ( startcamera == null ) ) { return ; } gl . glLineWidth ( 3f ) ; gl . glColor4f ( 1.f , 0.f , 0.f , .66f ) ; gl . glBegin ( GL . GL_LINES ) ; gl . glVertex3f ( 0.f , 0.f , 0.f ) ; double rot = startangle - startcamera . getRotationZ ( ) ; gl . glVertex3f ( ( float ) FastMath . cos ( rot ) * 4.f , ( float ) - FastMath . sin ( rot ) * 4.f , 0.f ) ; gl . glVertex3f ( ( float ) FastMath . cos ( rot ) * 1.f , ( float ) - FastMath . sin ( rot ) * 1.f , 0.f ) ; gl . glVertex3f ( ( float ) FastMath . cos ( rot ) * 1.f , ( float ) - FastMath . sin ( rot ) * 1.f , 1.f ) ; gl . glEnd ( ) ; }
|
Render a debugging hint for the arcball tool .
| 262
| 10
|
157,855
|
public static List < SettingsResult > getSettingsResults ( Result r ) { if ( r instanceof SettingsResult ) { List < SettingsResult > ors = new ArrayList <> ( 1 ) ; ors . add ( ( SettingsResult ) r ) ; return ors ; } if ( r instanceof HierarchicalResult ) { return ResultUtil . filterResults ( ( ( HierarchicalResult ) r ) . getHierarchy ( ) , r , SettingsResult . class ) ; } return Collections . emptyList ( ) ; }
|
Collect all settings results from a Result
| 112
| 7
|
157,856
|
public static String usage ( Collection < TrackedParameter > options ) { StringBuilder usage = new StringBuilder ( 10000 ) ; if ( ! REFERENCE_VERSION . equals ( VERSION ) ) { usage . append ( "ELKI build: " ) . append ( VERSION ) . append ( NEWLINE ) . append ( NEWLINE ) ; } usage . append ( REFERENCE ) ; // Collect options OptionUtil . formatForConsole ( usage . append ( NEWLINE ) . append ( "Parameters:" ) . append ( NEWLINE ) , // FormatUtil . getConsoleWidth ( ) , options ) ; return usage . toString ( ) ; }
|
Returns a usage message explaining all known options
| 136
| 8
|
157,857
|
protected static void printErrorMessage ( Exception e ) { if ( e instanceof AbortException ) { // ensure we actually show the message: LoggingConfiguration . setVerbose ( Level . VERBOSE ) ; LOG . verbose ( e . getMessage ( ) ) ; } else if ( e instanceof UnspecifiedParameterException ) { LOG . error ( e . getMessage ( ) ) ; } else if ( e instanceof ParameterException ) { LOG . error ( e . getMessage ( ) ) ; } else { LOG . exception ( e ) ; } }
|
Print an error message for the given error .
| 119
| 9
|
157,858
|
private static void printDescription ( Class < ? > descriptionClass ) { if ( descriptionClass == null ) { return ; } try { LoggingConfiguration . setVerbose ( Level . VERBOSE ) ; LOG . verbose ( OptionUtil . describeParameterizable ( new StringBuilder ( ) , descriptionClass , FormatUtil . getConsoleWidth ( ) , "" ) . toString ( ) ) ; } catch ( Exception e ) { LOG . exception ( "Error instantiating class to describe." , e . getCause ( ) ) ; } }
|
Print the description for the given parameter
| 114
| 7
|
157,859
|
public static < T > void processDense ( T data , Adapter < T > adapter , Collector collector ) { // Number of nodes final int n = adapter . size ( data ) ; // Best distance for each node double [ ] best = new double [ n ] ; Arrays . fill ( best , Double . POSITIVE_INFINITY ) ; // Best previous node int [ ] src = new int [ n ] ; // Nodes already handled // byte[] uses more memory, but it will be faster. byte [ ] connected = new byte [ n ] ; // We always start at "random" node 0 // Note: we use this below in the "j" loop! int current = 0 ; connected [ current ] = 1 ; best [ current ] = 0 ; // Search for ( int i = n - 2 ; i >= 0 ; i -- ) { // Update best and src from current: int newbesti = - 1 ; double newbestd = Double . POSITIVE_INFINITY ; // Note: we assume we started with 0, and can thus skip it for ( int j = 0 ; j < n ; ++ j ) { if ( connected [ j ] == 1 ) { continue ; } final double dist = adapter . distance ( data , current , j ) ; if ( dist < best [ j ] ) { best [ j ] = dist ; src [ j ] = current ; } if ( best [ j ] < newbestd || newbesti == - 1 ) { newbestd = best [ j ] ; newbesti = j ; } } assert ( newbesti >= 0 ) ; // Flag connected [ newbesti ] = 1 ; // Store edge collector . addEdge ( newbestd , src [ newbesti ] , newbesti ) ; // Continue current = newbesti ; } }
|
Run Prim s algorithm on a dense graph .
| 383
| 9
|
157,860
|
protected boolean isInputValid ( String filename , String line , String id , String msg ) { return ! filename . isEmpty ( ) || ! line . isEmpty ( ) || ! id . isEmpty ( ) || ! msg . isEmpty ( ) ; }
|
Derived classes can overload this method
| 53
| 7
|
157,861
|
public void parseVCppLine ( String line , String projectPath , String compilationFile ) { this . parseVCppCompilerCLLine ( line , projectPath , compilationFile ) ; }
|
Can be used to create a list of includes defines and options for a single line If it follows the format of VC ++
| 40
| 24
|
157,862
|
public Set < ValgrindError > processReport ( File report ) throws XMLStreamException { ValgrindReportStreamHandler streamHandler = new ValgrindReportStreamHandler ( ) ; new StaxParser ( streamHandler ) . parse ( report ) ; return streamHandler . valgrindErrors ; }
|
Parses given valgrind report
| 63
| 8
|
157,863
|
private static boolean isGeneratedNodeExcluded ( AstNode astNode ) { AstNode prev = astNode . getPreviousAstNode ( ) ; return prev != null && prev . getTokenLine ( ) == astNode . getTokenLine ( ) && prev . isCopyBookOrGeneratedNode ( ) ; }
|
Exclude subsequent generated nodes if they are consecutive and on the same line .
| 65
| 15
|
157,864
|
private boolean isBreakStatementExcluded ( AstNode astNode ) { boolean exclude = false ; if ( excludeCaseBreak && astNode . getToken ( ) . getType ( ) . equals ( CxxKeyword . BREAK ) ) { for ( AstNode statement = astNode . getFirstAncestor ( CxxGrammarImpl . statement ) ; statement != null ; statement = statement . getPreviousSibling ( ) ) { if ( astNode . getTokenLine ( ) != statement . getTokenLine ( ) ) { break ; } TokenType type = statement . getToken ( ) . getType ( ) ; if ( type . equals ( CxxKeyword . CASE ) || type . equals ( CxxKeyword . DEFAULT ) ) { exclude = true ; break ; } } } return exclude ; }
|
Exclude break statement if it is on the same line as the switch label
| 171
| 15
|
157,865
|
private boolean isEmptyExpressionStatement ( AstNode astNode ) { if ( astNode . is ( CxxGrammarImpl . expressionStatement ) && ";" . equals ( astNode . getToken ( ) . getValue ( ) ) ) { AstNode statement = astNode . getFirstAncestor ( CxxGrammarImpl . selectionStatement ) ; if ( statement != null ) { return astNode . getTokenLine ( ) == statement . getTokenLine ( ) ; } return isGeneratedNodeExcluded ( astNode ) ; } return false ; }
|
Exclude empty expression statement
| 119
| 5
|
157,866
|
private Map < String , Macro > parsePredefinedUnitMacros ( Map < String , Macro > configuredMacros ) { if ( ! ctorInProgress || ( unitMacros != null ) ) { throw new IllegalStateException ( "Preconditions for initial fill-out of predefinedUnitMacros were violated" ) ; } if ( conf . getCompilationUnitSourceFiles ( ) . isEmpty ( ) && ( conf . getGlobalCompilationUnitSettings ( ) == null ) ) { // configuration doesn't contain any settings for compilation units. // CxxPreprocessor will use fixedMacros only return Collections . emptyMap ( ) ; } unitMacros = new MapChain <> ( ) ; if ( getMacros ( ) != unitMacros ) { throw new IllegalStateException ( "expected unitMacros as active macros map" ) ; } try { getMacros ( ) . setHighPrio ( true ) ; getMacros ( ) . putAll ( Macro . UNIT_MACROS ) ; getMacros ( ) . putAll ( configuredMacros ) ; parseForcedIncludes ( ) ; final HashMap < String , Macro > result = new HashMap <> ( unitMacros . getHighPrioMap ( ) ) ; return result ; } finally { getMacros ( ) . setHighPrio ( false ) ; // just for the symmetry unitMacros = null ; // remove unitMacros, switch getMacros() to fixedMacros } }
|
Create temporary unitMacros map ; This map will be used as an active macros storage while parsing of forced includes . After parsing was over extract resulting macros and destroy the unitMacros . fixedMacros will be set as active macros again .
| 311
| 48
|
157,867
|
public < G extends Serializable > Metric < G > getMetric ( CxxMetricsFactory . Key metricKey ) { Metric < G > metric = ( Metric < G > ) this . langSpecificMetrics . get ( metricKey ) ; if ( metric == null ) { throw new IllegalStateException ( "Requested metric " + metricKey + " couldn't be found" ) ; } return metric ; }
|
Get language specific metric
| 89
| 4
|
157,868
|
public static List < String > getElements ( File file , String charset ) { List < String > list = new ArrayList <> ( ) ; try ( BufferedReader br = new BufferedReader ( new InputStreamReader ( java . nio . file . Files . newInputStream ( file . toPath ( ) ) , charset ) ) ) { StringBuilder sb = new StringBuilder ( 4096 ) ; String line ; int cnt = 0 ; final Pattern whitespacesOnly = Pattern . compile ( "^\\s*$" ) ; while ( ( line = br . readLine ( ) ) != null ) { if ( cnt > ( TOP_COUNT ) ) { if ( whitespacesOnly . matcher ( line ) . matches ( ) ) { list . add ( sb . toString ( ) ) ; sb . setLength ( 0 ) ; } else { sb . append ( line ) ; sb . append ( ' ' ) ; } } ++ cnt ; } if ( sb . length ( ) > 0 ) { list . add ( sb . toString ( ) ) ; } } catch ( IOException e ) { String msg = new StringBuilder ( 512 ) . append ( "Cannot feed the data into sonar, details: '" ) . append ( e ) . append ( "'" ) . toString ( ) ; LOG . error ( msg ) ; } return list ; }
|
get all DrMemory elements from file
| 301
| 7
|
157,869
|
public void saveUniqueViolation ( SensorContext sensorContext , CxxReportIssue issue ) { if ( uniqueIssues . add ( issue ) ) { saveViolation ( sensorContext , issue ) ; } }
|
Saves code violation only if it wasn t already saved
| 43
| 11
|
157,870
|
@ SafeVarargs public static SourceFile scanSingleFile ( InputFile file , SensorContext sensorContext , CxxLanguage language , SquidAstVisitor < Grammar > ... visitors ) { return scanSingleFileConfig ( language , file , new CxxConfiguration ( sensorContext . fileSystem ( ) . encoding ( ) ) , visitors ) ; }
|
Helper method for testing checks without having to deploy them on a Sonar instance .
| 70
| 16
|
157,871
|
public static SourceFile scanSingleFileConfig ( CxxLanguage language , InputFile file , CxxConfiguration cxxConfig , SquidAstVisitor < Grammar > ... visitors ) { if ( ! file . isFile ( ) ) { throw new IllegalArgumentException ( "File '" + file + "' not found." ) ; } AstScanner < Grammar > scanner = create ( language , cxxConfig , visitors ) ; scanner . scanFile ( file . file ( ) ) ; Collection < SourceCode > sources = scanner . getIndex ( ) . search ( new QueryByType ( SourceFile . class ) ) ; if ( sources . size ( ) != 1 ) { throw new IllegalStateException ( "Only one SourceFile was expected whereas " + sources . size ( ) + " has been returned." ) ; } return ( SourceFile ) sources . iterator ( ) . next ( ) ; }
|
Helper method for scanning a single file
| 186
| 7
|
157,872
|
public static String join ( Path path1 , Path path2 ) { if ( path2 . toString ( ) . isEmpty ( ) ) { return "" ; } if ( ! path1 . isAbsolute ( ) ) { path1 = Paths . get ( "." , path1 . toString ( ) ) ; } if ( ! path2 . isAbsolute ( ) ) { path2 = Paths . get ( "." , path2 . toString ( ) ) ; } Path result = path1 . resolve ( path2 ) . normalize ( ) ; if ( ! result . isAbsolute ( ) ) { result = Paths . get ( "." , result . toString ( ) ) ; } return result . toString ( ) ; }
|
Join two paths
| 159
| 3
|
157,873
|
public static void parse ( CxxConfiguration config , File compileCommandsFile ) throws IOException { LOG . debug ( "Parsing 'JSON Compilation Database' format" ) ; ObjectMapper mapper = new ObjectMapper ( ) ; mapper . disable ( DeserializationFeature . FAIL_ON_UNKNOWN_PROPERTIES ) ; mapper . enable ( DeserializationFeature . USE_JAVA_ARRAY_FOR_JSON_ARRAY ) ; JsonCompilationDatabaseCommandObject [ ] commandObjects = mapper . readValue ( compileCommandsFile , JsonCompilationDatabaseCommandObject [ ] . class ) ; for ( JsonCompilationDatabaseCommandObject commandObject : commandObjects ) { Path cwd = Paths . get ( "." ) ; if ( commandObject . getDirectory ( ) != null ) { cwd = Paths . get ( commandObject . getDirectory ( ) ) ; } Path absPath = cwd . resolve ( commandObject . getFile ( ) ) ; if ( "__global__" . equals ( commandObject . getFile ( ) ) ) { CxxCompilationUnitSettings globalSettings = new CxxCompilationUnitSettings ( ) ; parseCommandObject ( globalSettings , commandObject ) ; config . setGlobalCompilationUnitSettings ( globalSettings ) ; } else { CxxCompilationUnitSettings settings = new CxxCompilationUnitSettings ( ) ; parseCommandObject ( settings , commandObject ) ; config . addCompilationUnitSettings ( absPath . toAbsolutePath ( ) . normalize ( ) . toString ( ) , settings ) ; } } }
|
Set up the given CxxConfiguration from the JSON compilation database
| 346
| 12
|
157,874
|
private static String getOperatorId ( AstNode operatorFunctionId ) { StringBuilder builder = new StringBuilder ( operatorFunctionId . getTokenValue ( ) ) ; AstNode operator = operatorFunctionId . getFirstDescendant ( CxxGrammarImpl . overloadableOperator ) ; if ( operator != null ) { AstNode opNode = operator . getFirstChild ( ) ; while ( opNode != null ) { builder . append ( opNode . getTokenValue ( ) ) ; opNode = opNode . getNextSibling ( ) ; } } return builder . toString ( ) ; }
|
XXX may go to a utility class
| 126
| 7
|
157,875
|
private static List < Token > getInlineDocumentation ( Token token , int line ) { List < Token > comments = new ArrayList <> ( ) ; for ( Trivia trivia : token . getTrivia ( ) ) { if ( trivia . isComment ( ) ) { Token triviaToken = trivia . getToken ( ) ; if ( ( triviaToken != null ) && ( triviaToken . getLine ( ) == line ) && ( isDoxygenInlineComment ( triviaToken . getValue ( ) ) ) ) { comments . add ( triviaToken ) ; if ( LOG . isTraceEnabled ( ) ) { LOG . trace ( "Inline doc: " + triviaToken . getValue ( ) ) ; } } } } return comments ; }
|
Check if inline Doxygen documentation is attached to the given token at specified line
| 157
| 16
|
157,876
|
public static String getContextStringProperty ( SensorContext context , String name , String def ) { String s = context . config ( ) . get ( name ) . orElse ( null ) ; if ( s == null || s . isEmpty ( ) ) { return def ; } return s ; }
|
Get string property from configuration . If the string is not set or empty return the default value .
| 61
| 19
|
157,877
|
@ Nullable public static String resolveFilename ( final String baseDir , @ Nullable final String filename ) { if ( filename != null ) { // Normalization can return null if path is null, is invalid, // or is a path with back-ticks outside known directory structure String normalizedPath = FilenameUtils . normalize ( filename ) ; if ( ( normalizedPath != null ) && ( new File ( normalizedPath ) . isAbsolute ( ) ) ) { return normalizedPath ; } // Prefix with absolute module base directory, attempt normalization again -- can still get null here normalizedPath = FilenameUtils . normalize ( baseDir + File . separator + filename ) ; if ( normalizedPath != null ) { return normalizedPath ; } } return null ; }
|
resolveFilename normalizes the report full path
| 161
| 9
|
157,878
|
protected void createMultiLocationViolation ( CxxReportIssue message ) { SourceFile sourceFile = getSourceFile ( ) ; Set < CxxReportIssue > messages = getMultiLocationCheckMessages ( sourceFile ) ; if ( messages == null ) { messages = new HashSet <> ( ) ; } messages . add ( message ) ; setMultiLocationViolation ( sourceFile , messages ) ; }
|
Add the given message to the current SourceFile object
| 83
| 10
|
157,879
|
public List < Epic > getEpics ( Object groupIdOrPath , Integer authorId , String labels , EpicOrderBy orderBy , SortOrder sortOrder , String search , int page , int perPage ) throws GitLabApiException { GitLabApiForm formData = new GitLabApiForm ( page , perPage ) . withParam ( "author_id" , authorId ) . withParam ( "labels" , labels ) . withParam ( "order_by" , orderBy ) . withParam ( "sort" , sortOrder ) . withParam ( "search" , search ) ; Response response = get ( Response . Status . OK , formData . asMap ( ) , "groups" , getGroupIdOrPath ( groupIdOrPath ) , "epics" ) ; return ( response . readEntity ( new GenericType < List < Epic > > ( ) { } ) ) ; }
|
Gets all epics of the requested group and its subgroups using the specified page and per page setting .
| 194
| 22
|
157,880
|
public Epic getEpic ( Object groupIdOrPath , Integer epicIid ) throws GitLabApiException { Response response = get ( Response . Status . OK , null , "groups" , getGroupIdOrPath ( groupIdOrPath ) , "epics" , epicIid ) ; return ( response . readEntity ( Epic . class ) ) ; }
|
Get a single epic for the specified group .
| 77
| 9
|
157,881
|
public Optional < Epic > getOptionalEpic ( Object groupIdOrPath , Integer epicIid ) { try { return ( Optional . ofNullable ( getEpic ( groupIdOrPath , epicIid ) ) ) ; } catch ( GitLabApiException glae ) { return ( GitLabApi . createOptionalFromException ( glae ) ) ; } }
|
Get an Optional instance with the value for the specific Epic .
| 79
| 12
|
157,882
|
public Epic createEpic ( Object groupIdOrPath , String title , String labels , String description , Date startDate , Date endDate ) throws GitLabApiException { Form formData = new GitLabApiForm ( ) . withParam ( "title" , title , true ) . withParam ( "labels" , labels ) . withParam ( "description" , description ) . withParam ( "start_date" , startDate ) . withParam ( "end_date" , endDate ) ; Response response = post ( Response . Status . CREATED , formData . asMap ( ) , "groups" , getGroupIdOrPath ( groupIdOrPath ) , "epics" ) ; return ( response . readEntity ( Epic . class ) ) ; }
|
Creates a new epic .
| 164
| 6
|
157,883
|
public Epic updateEpic ( Object groupIdOrPath , Integer epicIid , String title , String labels , String description , Date startDate , Date endDate ) throws GitLabApiException { Form formData = new GitLabApiForm ( ) . withParam ( "title" , title , true ) . withParam ( "labels" , labels ) . withParam ( "description" , description ) . withParam ( "start_date" , startDate ) . withParam ( "end_date" , endDate ) ; Response response = put ( Response . Status . OK , formData . asMap ( ) , "groups" , getGroupIdOrPath ( groupIdOrPath ) , "epics" , epicIid ) ; return ( response . readEntity ( Epic . class ) ) ; }
|
Updates an existing epic .
| 172
| 6
|
157,884
|
public void deleteEpic ( Object groupIdOrPath , Integer epicIid ) throws GitLabApiException { delete ( Response . Status . NO_CONTENT , null , "groups" , getGroupIdOrPath ( groupIdOrPath ) , "epics" , epicIid ) ; }
|
Deletes an epic .
| 64
| 5
|
157,885
|
public List < Epic > getEpicIssues ( Object groupIdOrPath , Integer epicIid ) throws GitLabApiException { return ( getEpicIssues ( groupIdOrPath , epicIid , getDefaultPerPage ( ) ) . all ( ) ) ; }
|
Gets all issues that are assigned to an epic and the authenticated user has access to .
| 60
| 18
|
157,886
|
public List < Epic > getEpicIssues ( Object groupIdOrPath , Integer epicIid , int page , int perPage ) throws GitLabApiException { Response response = get ( Response . Status . OK , getPageQueryParams ( page , perPage ) , "groups" , getGroupIdOrPath ( groupIdOrPath ) , "epics" , epicIid , "issues" ) ; return ( response . readEntity ( new GenericType < List < Epic > > ( ) { } ) ) ; }
|
Gets all issues that are assigned to an epic and the authenticated user has access to using the specified page and per page setting .
| 113
| 26
|
157,887
|
public Pager < Epic > getEpicIssues ( Object groupIdOrPath , Integer epicIid , int itemsPerPage ) throws GitLabApiException { return ( new Pager < Epic > ( this , Epic . class , itemsPerPage , null , "groups" , getGroupIdOrPath ( groupIdOrPath ) , "epics" , epicIid , "issues" ) ) ; }
|
Get a Pager of all issues that are assigned to an epic and the authenticated user has access to .
| 88
| 21
|
157,888
|
public Stream < Epic > getEpicIssuesStream ( Object groupIdOrPath , Integer epicIid ) throws GitLabApiException { return ( getEpicIssues ( groupIdOrPath , epicIid , getDefaultPerPage ( ) ) . stream ( ) ) ; }
|
Gets all issues that are assigned to an epic and the authenticated user has access to as a Stream .
| 61
| 21
|
157,889
|
public EpicIssue assignIssue ( Object groupIdOrPath , Integer epicIid , Integer issueIid ) throws GitLabApiException { Response response = post ( Response . Status . CREATED , ( Form ) null , "groups" , getGroupIdOrPath ( groupIdOrPath ) , "epics" , epicIid , "issues" , issueIid ) ; return ( response . readEntity ( EpicIssue . class ) ) ; }
|
Creates an epic - issue association . If the issue in question belongs to another epic it is unassigned from that epic .
| 95
| 26
|
157,890
|
public EpicIssue removeIssue ( Object groupIdOrPath , Integer epicIid , Integer issueIid ) throws GitLabApiException { Response response = delete ( Response . Status . OK , null , "groups" , getGroupIdOrPath ( groupIdOrPath ) , "epics" , epicIid , "issues" , issueIid ) ; return ( response . readEntity ( EpicIssue . class ) ) ; }
|
Remove an epic - issue association .
| 91
| 7
|
157,891
|
public EpicIssue updateIssue ( Object groupIdOrPath , Integer epicIid , Integer issueIid , Integer moveBeforeId , Integer moveAfterId ) throws GitLabApiException { GitLabApiForm form = new GitLabApiForm ( ) . withParam ( "move_before_id" , moveBeforeId ) . withParam ( "move_after_id" , moveAfterId ) ; Response response = post ( Response . Status . OK , form , "groups" , getGroupIdOrPath ( groupIdOrPath ) , "epics" , epicIid , "issues" , issueIid ) ; return ( response . readEntity ( EpicIssue . class ) ) ; }
|
Updates an epic - issue association .
| 149
| 8
|
157,892
|
private String getHeaderValue ( Response response , String key ) throws GitLabApiException { String value = response . getHeaderString ( key ) ; value = ( value != null ? value . trim ( ) : null ) ; if ( value == null || value . length ( ) == 0 ) { return ( null ) ; } return ( value ) ; }
|
Get the specified header value from the Response instance .
| 74
| 10
|
157,893
|
private int getIntHeaderValue ( Response response , String key ) throws GitLabApiException { String value = getHeaderValue ( response , key ) ; if ( value == null ) { return - 1 ; } try { return ( Integer . parseInt ( value ) ) ; } catch ( NumberFormatException nfe ) { throw new GitLabApiException ( "Invalid '" + key + "' header value (" + value + ") from server" ) ; } }
|
Get the specified integer header value from the Response instance .
| 98
| 11
|
157,894
|
private void setPageParam ( int page ) { pageParam . set ( 0 , Integer . toString ( page ) ) ; queryParams . put ( PAGE_PARAM , pageParam ) ; }
|
Sets the page query parameter .
| 42
| 7
|
157,895
|
public List < T > page ( int pageNumber ) { if ( pageNumber > totalPages && pageNumber > kaminariNextPage ) { throw new NoSuchElementException ( ) ; } else if ( pageNumber < 1 ) { throw new NoSuchElementException ( ) ; } if ( currentPage == 0 && pageNumber == 1 ) { currentPage = 1 ; return ( currentItems ) ; } if ( currentPage == pageNumber ) { return ( currentItems ) ; } try { setPageParam ( pageNumber ) ; Response response = api . get ( Response . Status . OK , queryParams , pathArgs ) ; currentItems = mapper . readValue ( ( InputStream ) response . getEntity ( ) , javaType ) ; currentPage = pageNumber ; if ( kaminariNextPage > 0 ) { kaminariNextPage = getIntHeaderValue ( response , NEXT_PAGE_HEADER ) ; } return ( currentItems ) ; } catch ( GitLabApiException | IOException e ) { throw new RuntimeException ( e ) ; } }
|
Returns the specified page of List .
| 225
| 7
|
157,896
|
public List < T > all ( ) throws GitLabApiException { // Make sure that current page is 0, this will ensure the whole list is fetched // regardless of what page the instance is currently on. currentPage = 0 ; List < T > allItems = new ArrayList <> ( totalItems ) ; // Iterate through the pages and append each page of items to the list while ( hasNext ( ) ) { allItems . addAll ( next ( ) ) ; } return ( allItems ) ; }
|
Gets all the items from each page as a single List instance .
| 108
| 14
|
157,897
|
public Stream < T > stream ( ) throws GitLabApiException , IllegalStateException { if ( pagerStream == null ) { synchronized ( this ) { if ( pagerStream == null ) { // Make sure that current page is 0, this will ensure the whole list is streamed // regardless of what page the instance is currently on. currentPage = 0 ; // Create a Stream.Builder to contain all the items. This is more efficient than // getting a List with all() and streaming that List Stream . Builder < T > streamBuilder = Stream . builder ( ) ; // Iterate through the pages and append each page of items to the stream builder while ( hasNext ( ) ) { next ( ) . forEach ( streamBuilder ) ; } pagerStream = streamBuilder . build ( ) ; return ( pagerStream ) ; } } } throw new IllegalStateException ( "Stream already issued" ) ; }
|
Builds and returns a Stream instance which is pre - populated with all items from all pages .
| 189
| 19
|
157,898
|
public Stream < T > lazyStream ( ) throws IllegalStateException { if ( pagerStream == null ) { synchronized ( this ) { if ( pagerStream == null ) { // Make sure that current page is 0, this will ensure the whole list is streamed // regardless of what page the instance is currently on. currentPage = 0 ; pagerStream = StreamSupport . stream ( new PagerSpliterator < T > ( this ) , false ) ; return ( pagerStream ) ; } } } throw new IllegalStateException ( "Stream already issued" ) ; }
|
Creates a Stream instance for lazily streaming items from the GitLab server .
| 118
| 16
|
157,899
|
public void setMaskedHeaderNames ( final List < String > maskedHeaderNames ) { this . maskedHeaderNames . clear ( ) ; if ( maskedHeaderNames != null ) { maskedHeaderNames . forEach ( h -> { addMaskedHeaderName ( h ) ; } ) ; } }
|
Set the list of header names to mask values for . If null will clear the header names to mask .
| 61
| 21
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.