idx
int64
0
165k
question
stringlengths
73
4.15k
target
stringlengths
5
918
len_question
int64
21
890
len_target
int64
3
255
140,600
@ Override public int getPropagationConditions ( int idx ) { return idx < bins . length ? IntEventType . all ( ) : IntEventType . BOUND . getMask ( ) + IntEventType . INSTANTIATE . getMask ( ) ; }
react on removal events on bins variables react on bound events on loads variables
60
14
140,601
private void computeSumItemSizes ( ) { for ( int d = 0 ; d < nbDims ; d ++ ) { long sum = 0 ; for ( int i = 0 ; i < iSizes [ d ] . length ; i ++ ) { sum += iSizes [ d ] [ i ] ; } this . sumISizes [ d ] = sum ; } }
Compute the sum of the item sizes for each dimension .
81
12
140,602
@ Override public void clearCached ( ICacheManager mgr ) { Map < String , CacheEntry > moduleBuilds ; synchronized ( this ) { moduleBuilds = _moduleBuilds ; _moduleBuilds = null ; } if ( moduleBuilds != null ) { for ( Map . Entry < String , CacheEntry > entry : moduleBuilds . entrySet ( ) ) { entry . getValue ( ) . delete ( mgr ) ; } moduleBuilds . clear ( ) ; } }
Asynchronously delete the set of cached files for this module .
105
13
140,603
public static SplittableElementSet < VM > newVMIndex ( Collection < VM > c , TIntIntHashMap idx ) { return new SplittableElementSet <> ( c , idx ) ; }
Make a new splittable set from a collection of VM . We consider the collection does not have duplicated elements .
46
24
140,604
public static SplittableElementSet < Node > newNodeIndex ( Collection < Node > c , TIntIntHashMap idx ) { return new SplittableElementSet <> ( c , idx ) ; }
Make a new splittable set from a collection of nodes . We consider the collection does not have duplicated elements .
46
24
140,605
public boolean forEachPartition ( IterateProcedure < E > p ) { int curIdx = index . get ( values . get ( 0 ) . id ( ) ) ; int from ; int to ; for ( from = 0 , to = 0 ; to < values . size ( ) ; to ++ ) { int cIdx = index . get ( values . get ( to ) . id ( ) ) ; if ( curIdx != cIdx ) { if ( ! p . extract ( this , curIdx , from , to ) ) { return false ; } from = to ; curIdx = cIdx ; } } return p . extract ( this , curIdx , from , to ) ; }
Execute a procedure on each partition . The partition is indicated by its bounds on the backend array .
153
20
140,606
public Set < E > getSubSet ( int k ) { int from = - 1 ; //TODO: very bad. Bounds should be memorized for ( int x = 0 ; x < values . size ( ) ; x ++ ) { int cIdx = index . get ( values . get ( x ) . id ( ) ) ; if ( cIdx == k && from == - 1 ) { from = x ; } if ( from >= 0 && cIdx > k ) { return new ElementSubSet <> ( this , k , from , x ) ; } } if ( from >= 0 ) { return new ElementSubSet <> ( this , k , from , values . size ( ) ) ; } return Collections . emptySet ( ) ; }
Get a subset for the given partition .
162
8
140,607
public List < ElementSubSet < E > > getPartitions ( ) { final List < ElementSubSet < E > > partitions = new ArrayList <> ( ) ; forEachPartition ( ( idx , key , from , to ) -> { partitions . add ( new ElementSubSet <> ( SplittableElementSet . this , key , from , to ) ) ; return true ; } ) ; return partitions ; }
Get all the partitions as subsets .
90
8
140,608
public ICacheKeyGenerator combine ( ICacheKeyGenerator otherKeyGen ) { if ( this . equals ( otherKeyGen ) ) { return this ; } @ SuppressWarnings ( "unchecked" ) AbstractCollectionCacheKeyGenerator < T > other = ( AbstractCollectionCacheKeyGenerator < T > ) otherKeyGen ; if ( isProvisional ( ) && other . isProvisional ( ) ) { // should never happen throw new IllegalStateException ( ) ; } // If either generator is provisional, return a provisional result if ( isProvisional ( ) ) { return other ; } else if ( other . isProvisional ( ) ) { return this ; } if ( getCollection ( ) == null ) { return this ; } if ( other . getCollection ( ) == null ) { return other ; } // See if one of the keygens encompasses the other. This is the most likely // case and is more performant than always creating a new keygen. int size = getCollection ( ) . size ( ) , otherSize = other . getCollection ( ) . size ( ) ; if ( size > otherSize && getCollection ( ) . containsAll ( other . getCollection ( ) ) ) { return this ; } if ( otherSize > size && other . getCollection ( ) . containsAll ( getCollection ( ) ) ) { return other ; } // Neither keygen encompasses the other, so create a new one that is a combination // of the both of them. Set < T > combined = new HashSet < T > ( ) ; combined . addAll ( getCollection ( ) ) ; combined . addAll ( other . getCollection ( ) ) ; return newKeyGen ( combined , false ) ; }
Returns a cache key generator that is the combination of this cache key generator and the specified cache key generator ( i . e . the cache keys generated by the returned object vary according to the conditions honored by this generator and the specified generator .
365
47
140,609
public static final void setReader ( String format , TreeReader reader ) { String key = format . toLowerCase ( ) ; readers . put ( key , reader ) ; if ( JSON . equals ( key ) ) { cachedJsonReader = reader ; } }
Binds the given TreeReader instance to the specified data format .
54
13
140,610
protected int resolveInclude ( ) throws XMLStreamException { // we are no more in fallback mode this . fallback = false ; this . depth ++ ; int eventType = - 1 ; // read attributes... String href = getAttributeValue ( null , "href" ) ; LOGGER . trace ( "Resolving xi:include to href {}" , href ) ; String xpointer = getAttributeValue ( null , "xpointer" ) ; // get the included resource... DataResource includeResource = this . resource . navigate ( href ) ; // and try to include it... boolean success = false ; if ( includeResource . isAvailable ( ) ) { // determine inclusion format type... String parse = getAttributeValue ( null , "parse" ) ; if ( ( parse == null ) || ( "xml" . equals ( parse ) ) ) { this . includeReader = new XIncludeStreamReader ( this . factory , includeResource , this ) ; if ( xpointer != null ) { // shorthand form: id // scheme-based form: e.g. element(/1/*) this . includeReader = new XPointerStreamReader ( this . includeReader , xpointer ) ; } eventType = this . includeReader . nextTag ( ) ; setParent ( this . includeReader ) ; // we ascend the XML until the initial include is closed. closeInitialInclude ( ) ; success = true ; } else if ( "text" . equals ( parse ) ) { String encoding = getAttributeValue ( null , "encoding" ) ; Charset charset ; if ( encoding == null ) { charset = Charset . defaultCharset ( ) ; } else { charset = Charset . forName ( encoding ) ; } InputStream textInputStream = includeResource . openStream ( ) ; Reader reader = new InputStreamReader ( textInputStream , charset ) ; this . includeText = read ( reader ) ; // we ascend the XML until the initial include is closed. closeInitialInclude ( ) ; return XMLStreamConstants . CHARACTERS ; } else { throw new XMLStreamException ( "Unsupported XInclude parse type:" + parse ) ; } } if ( ! success ) { // search for fallback do { eventType = super . next ( ) ; } while ( ( eventType != XMLStreamConstants . START_ELEMENT ) && ( eventType != XMLStreamConstants . END_ELEMENT ) ) ; if ( eventType == XMLStreamConstants . START_ELEMENT ) { if ( ( XmlUtil . NAMESPACE_URI_XINCLUDE . equals ( getNamespaceURI ( ) ) ) && ( "fallback" . equals ( getLocalName ( ) ) ) ) { // found fallback this . fallback = true ; return next ( ) ; } } // no fallback available, ignore include... closeInitialInclude ( ) ; return next ( ) ; } return eventType ; }
This method is called when an include tag of the XInclude namespace was started . It resolves the include and finds a fallback on failure .
630
29
140,611
protected void closeInitialInclude ( ) throws XMLStreamException { LOGGER . trace ( "Closing xi:include" ) ; int eventType = - 1 ; // we ascend the XML until the initial include is closed. while ( this . depth > 0 ) { eventType = this . mainReader . next ( ) ; if ( eventType == XMLStreamConstants . START_ELEMENT ) { LOGGER . trace ( "Closing loop: Start {}" , this . mainReader . getLocalName ( ) ) ; this . depth ++ ; } else if ( eventType == XMLStreamConstants . END_ELEMENT ) { LOGGER . trace ( "Closing loop: End {}" , this . mainReader . getLocalName ( ) ) ; this . depth -- ; } } LOGGER . trace ( "Closing xi:include complete" ) ; }
This method ascends the XML until the initial include is closed .
185
13
140,612
public boolean read ( ) { if ( state == State . READING_FRAME_SIZE ) { // try to read the frame size completely if ( ! internalRead ( frameSizeBuffer ) ) return false ; // if the frame size has been read completely, then prepare to read the // actual frame. if ( frameSizeBuffer . remaining ( ) == 0 ) { // pull out the frame size as an integer. int frameSize = frameSizeBuffer . getInt ( 0 ) ; if ( frameSize <= 0 ) { logger . error ( "Read an invalid frame size of " + frameSize + ". Are you using TFramedTransport on the client side?" ) ; return false ; } if ( frameSize > thriftFactories . maxFrameSizeInBytes ) { logger . error ( "Invalid frame size got (" + frameSize + "), maximum expected " + thriftFactories . maxFrameSizeInBytes ) ; return false ; } // reallocate to match frame size (if needed) reallocateDataBuffer ( frameSize ) ; frameSizeBuffer . clear ( ) ; // prepare it to the next round of reading (if any) state = State . READING_FRAME ; } else { // this skips the check of READING_FRAME state below, since we can't // possibly go on to that state if there's data left to be read at // this one. state = State . READY_TO_READ_FRAME_SIZE ; return true ; } } // it is possible to fall through from the READING_FRAME_SIZE section // to READING_FRAME if there's already some frame data available once // READING_FRAME_SIZE is complete. if ( state == State . READING_FRAME ) { if ( ! internalRead ( dataBuffer ) ) return false ; state = ( dataBuffer . remaining ( ) == 0 ) ? State . READ_FRAME_COMPLETE : State . READY_TO_READ_FRAME ; // Do not read until we finish processing request. if ( state == State . READ_FRAME_COMPLETE ) { switchMode ( State . READ_FRAME_COMPLETE ) ; } return true ; } // if we fall through to this point, then the state must be invalid. logger . error ( "Read was called but state is invalid (" + state + ")" ) ; return false ; }
Give this Message a chance to read . The selector loop should have received a read event for this Message .
501
21
140,613
public boolean write ( ) { assert state == State . WRITING ; boolean writeFailed = false ; try { if ( response . streamTo ( transport ) < 0 ) { writeFailed = true ; return false ; } else if ( ! response . isFullyStreamed ( ) ) { // if socket couldn't accommodate whole write buffer, // continue writing when we get next write signal. switchToWrite ( ) ; return true ; } } catch ( IOException e ) { logger . error ( "Got an IOException during write!" , e ) ; writeFailed = true ; return false ; } finally { if ( writeFailed || response . isFullyStreamed ( ) ) response . close ( ) ; } // we're done writing. Now we need to switch back to reading. switchToRead ( ) ; return true ; }
Give this Message a chance to write its output to the final client .
176
14
140,614
public void changeSelectInterests ( ) { switch ( state ) { case READY_TO_WRITE : // set the OP_WRITE interest state = State . WRITING ; break ; case READY_TO_READ_FRAME_SIZE : state = State . READING_FRAME_SIZE ; break ; case READY_TO_READ_FRAME : state = State . READING_FRAME ; break ; case AWAITING_CLOSE : close ( ) ; selectionKey . cancel ( ) ; break ; default : logger . error ( "changeSelectInterest was called, but state is invalid (" + state + ")" ) ; } }
Give this Message a chance to change its interests .
139
10
140,615
public void invoke ( ) { assert state == State . READ_FRAME_COMPLETE : "Invoke called in invalid state: " + state ; TTransport inTrans = getInputTransport ( ) ; TProtocol inProt = thriftFactories . inputProtocolFactory . getProtocol ( inTrans ) ; TProtocol outProt = thriftFactories . outputProtocolFactory . getProtocol ( getOutputTransport ( ) ) ; try { thriftFactories . processorFactory . getProcessor ( inTrans ) . process ( inProt , outProt ) ; responseReady ( ) ; return ; } catch ( TException te ) { logger . warn ( "Exception while invoking!" , te ) ; } catch ( Throwable t ) { logger . error ( "Unexpected throwable while invoking!" , t ) ; } // This will only be reached when there is a throwable. state = State . AWAITING_CLOSE ; changeSelectInterests ( ) ; }
Actually invoke the method signified by this Message .
208
10
140,616
private boolean internalRead ( Buffer buffer ) { try { return ! ( buffer . readFrom ( transport ) < 0 ) ; } catch ( IOException e ) { logger . warn ( "Got an IOException in internalRead!" , e ) ; return false ; } }
Perform a read into dataBuffer .
55
8
140,617
public void close ( ) { freeDataBuffer ( ) ; frameSizeBuffer . free ( ) ; transport . close ( ) ; if ( response != null ) response . close ( ) ; }
Shut the connection down .
39
5
140,618
public ArrayList < SameLengthMotifs > classifyMotifs ( double lengthThreshold , GrammarRules grammarRules ) { // reset vars ArrayList < SameLengthMotifs > allClassifiedMotifs = new ArrayList < SameLengthMotifs > ( ) ; // down to business ArrayList < SAXMotif > allMotifs = getAllMotifs ( grammarRules ) ; // is this one better? int currentIndex = 0 ; for ( SAXMotif tmpMotif : allMotifs ) { currentIndex ++ ; if ( tmpMotif . isClassified ( ) ) { // this breaks the loop flow, so it goes to //for (SAXMotif // tempMotif : allMotifs) { continue ; } SameLengthMotifs tmpSameLengthMotifs = new SameLengthMotifs ( ) ; int tmpMotifLen = tmpMotif . getPos ( ) . getEnd ( ) - tmpMotif . getPos ( ) . getStart ( ) + 1 ; int minLen = tmpMotifLen ; int maxLen = tmpMotifLen ; // TODO: assuming that this motif has not been processed, right? ArrayList < SAXMotif > newMotifClass = new ArrayList < SAXMotif > ( ) ; newMotifClass . add ( tmpMotif ) ; tmpMotif . setClassified ( true ) ; // TODO: this motif assumed to be the first one of it's class, // traverse the rest down for ( int i = currentIndex ; i < allMotifs . size ( ) ; i ++ ) { SAXMotif anotherMotif = allMotifs . get ( i ) ; // if the two motifs are similar or not. int anotherMotifLen = anotherMotif . getPos ( ) . getEnd ( ) - anotherMotif . getPos ( ) . getStart ( ) + 1 ; // if they have the similar length. if ( Math . abs ( anotherMotifLen - tmpMotifLen ) < ( tmpMotifLen * lengthThreshold ) ) { newMotifClass . add ( anotherMotif ) ; anotherMotif . setClassified ( true ) ; if ( anotherMotifLen > maxLen ) { maxLen = anotherMotifLen ; } else if ( anotherMotifLen < minLen ) { minLen = anotherMotifLen ; } } } tmpSameLengthMotifs . setSameLenMotifs ( newMotifClass ) ; tmpSameLengthMotifs . setMinMotifLen ( minLen ) ; tmpSameLengthMotifs . setMaxMotifLen ( maxLen ) ; allClassifiedMotifs . add ( tmpSameLengthMotifs ) ; } return allClassifiedMotifs ; // System.out.println(); }
Classify the motifs based on their length .
603
10
140,619
protected ArrayList < SAXMotif > getAllMotifs ( GrammarRules grammarRules ) { // result ArrayList < SAXMotif > allMotifs = new ArrayList < SAXMotif > ( ) ; int ruleNumber = grammarRules . size ( ) ; // iterate over all rules for ( int i = 0 ; i < ruleNumber ; i ++ ) { // iterate over all segments/motifs/sub-sequences which correspond // to the rule ArrayList < RuleInterval > arrPos = grammarRules . getRuleRecord ( i ) . getRuleIntervals ( ) ; for ( RuleInterval saxPos : arrPos ) { SAXMotif motif = new SAXMotif ( ) ; motif . setPos ( saxPos ) ; motif . setRuleIndex ( i ) ; motif . setClassified ( false ) ; allMotifs . add ( motif ) ; } } // ascending order Collections . sort ( allMotifs ) ; return allMotifs ; }
Stores all the sub - sequences that generated by Sequitur rules into an array list sorted by sub - sequence length in ascending order .
215
28
140,620
protected ArrayList < SameLengthMotifs > removeOverlappingInSimiliar ( ArrayList < SameLengthMotifs > allClassifiedMotifs , GrammarRules grammarRules , double [ ] ts , double thresouldCom ) { ArrayList < SAXMotif > motifsBeDeleted = new ArrayList < SAXMotif > ( ) ; SAXPointsNumber [ ] pointsNumberRemoveStrategy = countPointNumber ( grammarRules , ts ) ; for ( SameLengthMotifs sameLenMotifs : allClassifiedMotifs ) { outer : for ( int j = 0 ; j < sameLenMotifs . getSameLenMotifs ( ) . size ( ) ; j ++ ) { SAXMotif tempMotif = sameLenMotifs . getSameLenMotifs ( ) . get ( j ) ; int tempMotifLen = tempMotif . getPos ( ) . getEnd ( ) - tempMotif . getPos ( ) . getStart ( ) + 1 ; for ( int i = j + 1 ; i < sameLenMotifs . getSameLenMotifs ( ) . size ( ) ; i ++ ) { SAXMotif anotherMotif = sameLenMotifs . getSameLenMotifs ( ) . get ( i ) ; int anotherMotifLen = anotherMotif . getPos ( ) . getEnd ( ) - anotherMotif . getPos ( ) . getStart ( ) + 1 ; double minEndPos = Math . min ( tempMotif . getPos ( ) . getEnd ( ) , anotherMotif . getPos ( ) . getEnd ( ) ) ; double maxStartPos = Math . max ( tempMotif . getPos ( ) . getStart ( ) , anotherMotif . getPos ( ) . getStart ( ) ) ; // the length in common. double commonLen = minEndPos - maxStartPos + 1 ; // if they are overlapped motif, remove the shorter one if ( commonLen > ( tempMotifLen * thresouldCom ) ) { SAXMotif deletedMotif = new SAXMotif ( ) ; SAXMotif similarWith = new SAXMotif ( ) ; boolean isAnotherBetter ; if ( pointsNumberRemoveStrategy != null ) { isAnotherBetter = decideRemove ( anotherMotif , tempMotif , pointsNumberRemoveStrategy ) ; } else { isAnotherBetter = anotherMotifLen > tempMotifLen ; } if ( isAnotherBetter ) { deletedMotif = tempMotif ; similarWith = anotherMotif ; sameLenMotifs . getSameLenMotifs ( ) . remove ( j ) ; deletedMotif . setSimilarWith ( similarWith ) ; motifsBeDeleted . add ( deletedMotif ) ; j -- ; continue outer ; } else { deletedMotif = anotherMotif ; similarWith = tempMotif ; sameLenMotifs . getSameLenMotifs ( ) . remove ( i ) ; deletedMotif . setSimilarWith ( similarWith ) ; motifsBeDeleted . add ( deletedMotif ) ; i -- ; } } } } int minLength = sameLenMotifs . getSameLenMotifs ( ) . get ( 0 ) . getPos ( ) . endPos - sameLenMotifs . getSameLenMotifs ( ) . get ( 0 ) . getPos ( ) . startPos + 1 ; int sameLenMotifsSize = sameLenMotifs . getSameLenMotifs ( ) . size ( ) ; int maxLength = sameLenMotifs . getSameLenMotifs ( ) . get ( sameLenMotifsSize - 1 ) . getPos ( ) . endPos - sameLenMotifs . getSameLenMotifs ( ) . get ( sameLenMotifsSize - 1 ) . getPos ( ) . startPos + 1 ; sameLenMotifs . setMinMotifLen ( minLength ) ; sameLenMotifs . setMaxMotifLen ( maxLength ) ; } return allClassifiedMotifs ; }
Removes overlapping rules in similar rule set .
880
9
140,621
protected boolean decideRemove ( SAXMotif motif1 , SAXMotif motif2 , SAXPointsNumber [ ] pointsNumberRemoveStrategy ) { // motif1 details int motif1Start = motif1 . getPos ( ) . getStart ( ) ; int motif1End = motif1 . getPos ( ) . getEnd ( ) - 1 ; int length1 = motif1End - motif1Start ; // motif2 details int motif2Start = motif2 . getPos ( ) . getStart ( ) ; int motif2End = motif1 . getPos ( ) . getEnd ( ) - 1 ; int length2 = motif2End - motif2Start ; int countsMotif1 = 0 ; int countsMotif2 = 0 ; // compute the averageWeight double averageWeight = 1 ; int count = 0 ; for ( int i = 0 ; i < pointsNumberRemoveStrategy . length ; i ++ ) { count += pointsNumberRemoveStrategy [ i ] . getPointOccurenceNumber ( ) ; } averageWeight = ( double ) count / ( double ) pointsNumberRemoveStrategy . length ; // compute counts for motif 1 for ( int i = motif1Start ; i <= motif1End ; i ++ ) { countsMotif1 += pointsNumberRemoveStrategy [ i ] . getPointOccurenceNumber ( ) ; } // compute counts for motif 2 for ( int i = motif2Start ; i <= motif2End ; i ++ ) { countsMotif2 += pointsNumberRemoveStrategy [ i ] . getPointOccurenceNumber ( ) ; } // get weights double weight1 = countsMotif1 / ( averageWeight * length1 ) ; double weight2 = countsMotif2 / ( averageWeight * length2 ) ; if ( weight1 > weight2 ) { return true ; } return false ; }
Decide which one from overlapping subsequences should be removed . The decision rule is that each sub - sequence has a weight the one with the smaller weight should be removed .
385
34
140,622
protected ArrayList < SameLengthMotifs > refinePatternsByClustering ( GrammarRules grammarRules , double [ ] ts , ArrayList < SameLengthMotifs > allClassifiedMotifs , double fractionTopDist ) { DistanceComputation dc = new DistanceComputation ( ) ; double [ ] origTS = ts ; ArrayList < SameLengthMotifs > newAllClassifiedMotifs = new ArrayList < SameLengthMotifs > ( ) ; for ( SameLengthMotifs sameLenMotifs : allClassifiedMotifs ) { ArrayList < RuleInterval > arrPos = new ArrayList < RuleInterval > ( ) ; ArrayList < SAXMotif > subsequences = sameLenMotifs . getSameLenMotifs ( ) ; for ( SAXMotif ss : subsequences ) { arrPos . add ( ss . getPos ( ) ) ; } int patternNum = arrPos . size ( ) ; if ( patternNum < 2 ) { continue ; } double dt [ ] [ ] = new double [ patternNum ] [ patternNum ] ; // Build distance matrix. for ( int i = 0 ; i < patternNum ; i ++ ) { RuleInterval saxPos = arrPos . get ( i ) ; int start1 = saxPos . getStart ( ) ; int end1 = saxPos . getEnd ( ) ; double [ ] ts1 = Arrays . copyOfRange ( origTS , start1 , end1 ) ; for ( int j = 0 ; j < arrPos . size ( ) ; j ++ ) { RuleInterval saxPos2 = arrPos . get ( j ) ; if ( dt [ i ] [ j ] > 0 ) { continue ; } double d = 0 ; dt [ i ] [ j ] = d ; if ( i == j ) { continue ; } int start2 = saxPos2 . getStart ( ) ; int end2 = saxPos2 . getEnd ( ) ; double [ ] ts2 = Arrays . copyOfRange ( origTS , start2 , end2 ) ; if ( ts1 . length > ts2 . length ) d = dc . calcDistTSAndPattern ( ts1 , ts2 ) ; else d = dc . calcDistTSAndPattern ( ts2 , ts1 ) ; // DTW dtw = new DTW(ts1, ts2); // d = dtw.warpingDistance; dt [ i ] [ j ] = d ; } } String [ ] patternsName = new String [ patternNum ] ; for ( int i = 0 ; i < patternNum ; i ++ ) { patternsName [ i ] = String . valueOf ( i ) ; } ClusteringAlgorithm alg = new DefaultClusteringAlgorithm ( ) ; Cluster cluster = alg . performClustering ( dt , patternsName , new AverageLinkageStrategy ( ) ) ; // int minPatternPerCls = (int) (0.3 * patternNum); // minPatternPerCls = minPatternPerCls > 0 ? minPatternPerCls : 1; int minPatternPerCls = 1 ; if ( cluster . getDistance ( ) == null ) { // System.out.print(false); continue ; } // TODO: refine hard coded threshold // double cutDist = cluster.getDistance() * 0.67; double cutDist = cluster . getDistanceValue ( ) * fractionTopDist ; ArrayList < String [ ] > clusterTSIdx = findCluster ( cluster , cutDist , minPatternPerCls ) ; while ( clusterTSIdx . size ( ) <= 0 ) { cutDist += cutDist / 2 ; clusterTSIdx = findCluster ( cluster , cutDist , minPatternPerCls ) ; } newAllClassifiedMotifs . addAll ( SeparateMotifsByClustering ( clusterTSIdx , sameLenMotifs ) ) ; } return newAllClassifiedMotifs ; }
Refines patterns by clustering .
854
7
140,623
private ArrayList < String [ ] > findCluster ( Cluster cluster , double cutDist , int minPatternPerCls ) { ArrayList < String [ ] > clusterTSIdx = new ArrayList < String [ ] > ( ) ; if ( cluster . getDistance ( ) != null ) { // if (cluster.getDistance() > cutDist) { if ( cluster . getDistanceValue ( ) > cutDist ) { if ( cluster . getChildren ( ) . size ( ) > 0 ) { clusterTSIdx . addAll ( findCluster ( cluster . getChildren ( ) . get ( 0 ) , cutDist , minPatternPerCls ) ) ; clusterTSIdx . addAll ( findCluster ( cluster . getChildren ( ) . get ( 1 ) , cutDist , minPatternPerCls ) ) ; } } else { // String[] idxes = cluster.getName().split("&"); ArrayList < String > itemsInCluster = getNameInCluster ( cluster ) ; String [ ] idxes = itemsInCluster . toArray ( new String [ itemsInCluster . size ( ) ] ) ; if ( idxes . length > minPatternPerCls ) { clusterTSIdx . add ( idxes ) ; } } } return clusterTSIdx ; }
Finds clusters .
277
4
140,624
private ArrayList < String > getNameInCluster ( Cluster cluster ) { ArrayList < String > itemsInCluster = new ArrayList < String > ( ) ; String nodeName ; if ( cluster . isLeaf ( ) ) { nodeName = cluster . getName ( ) ; itemsInCluster . add ( nodeName ) ; } else { // String[] clusterName = cluster.getName().split("#"); // nodeName = clusterName[1]; } for ( Cluster child : cluster . getChildren ( ) ) { ArrayList < String > childrenNames = getNameInCluster ( child ) ; itemsInCluster . addAll ( childrenNames ) ; } return itemsInCluster ; }
Finds out cluster names .
149
6
140,625
private ArrayList < SameLengthMotifs > SeparateMotifsByClustering ( ArrayList < String [ ] > clusterTSIdx , SameLengthMotifs sameLenMotifs ) { ArrayList < SameLengthMotifs > newResult = new ArrayList < SameLengthMotifs > ( ) ; if ( clusterTSIdx . size ( ) > 1 ) { ArrayList < SAXMotif > subsequences = sameLenMotifs . getSameLenMotifs ( ) ; for ( String [ ] idxesInCluster : clusterTSIdx ) { SameLengthMotifs newIthSLM = new SameLengthMotifs ( ) ; ArrayList < SAXMotif > sameLenSS = new ArrayList < SAXMotif > ( ) ; int minL = sameLenMotifs . getMinMotifLen ( ) ; int maxL = sameLenMotifs . getMaxMotifLen ( ) ; for ( String i : idxesInCluster ) { SAXMotif ssI = subsequences . get ( Integer . parseInt ( i ) ) ; int len = ssI . getPos ( ) . getEnd ( ) - ssI . getPos ( ) . getStart ( ) ; if ( len < minL ) { minL = len ; } else if ( len > maxL ) { maxL = len ; } sameLenSS . add ( ssI ) ; } newIthSLM . setSameLenMotifs ( sameLenSS ) ; newIthSLM . setMaxMotifLen ( maxL ) ; newIthSLM . setMinMotifLen ( minL ) ; newResult . add ( newIthSLM ) ; } } else { newResult . add ( sameLenMotifs ) ; } return newResult ; }
Separates motifs via clustering .
392
9
140,626
@ Override public BtrpOperand go ( BtrPlaceTree parent ) { String cname = getText ( ) ; if ( catalog == null ) { return ignoreError ( "No constraints available" ) ; } SatConstraintBuilder b = catalog . getConstraint ( cname ) ; if ( b == null ) { ignoreError ( "Unknown constraint '" + cname + "'" ) ; } //Get the params int i = 0 ; boolean discrete = false ; if ( ">>" . equals ( getChild ( 0 ) . getText ( ) ) ) { i = 1 ; discrete = true ; } List < BtrpOperand > params = new ArrayList <> ( ) ; for ( ; i < getChildCount ( ) ; i ++ ) { params . add ( getChild ( i ) . go ( this ) ) ; } if ( b != null ) { List < ? extends SatConstraint > constraints = b . buildConstraint ( this , params ) ; for ( SatConstraint c : constraints ) { if ( c != null ) { if ( discrete ) { if ( ! c . setContinuous ( false ) ) { return ignoreError ( "Discrete restriction is not supported by constraint '" + cname + "'" ) ; } } else { //force the continuous mode, if available c . setContinuous ( true ) ; } script . addConstraint ( c ) ; } } } return IgnorableOperand . getInstance ( ) ; }
Build the constraint . The constraint is built if it exists in the catalog and if the parameters are compatible with the constraint signature .
318
25
140,627
public static Process runProcess ( String ... cmdParts ) throws IOException { return new ProcessBuilder ( buildCommandline ( cmdParts ) ) . inheritIO ( ) . start ( ) ; }
Execute a command with the following arguments .
39
9
140,628
public static String getXMLValidString ( final String input , final boolean replace , final char replacement ) { if ( input == null ) { return null ; } if ( "" . equals ( input ) ) { return "" ; } StringBuilder sb = new StringBuilder ( ) ; for ( char c : input . toCharArray ( ) ) { if ( XMLStringUtil . isXMLValid ( c ) ) { sb . append ( c ) ; } else if ( replace ) { sb . append ( replacement ) ; } } return sb . toString ( ) ; }
Remove or replace XML invalid chars from input .
123
9
140,629
public boolean release ( ) { if ( this . released ) { // already (marked as) released... return false ; } this . released = true ; if ( this . childCount == 0 ) { if ( this . parent == null ) { return true ; } else { assert ( this . parent . childCount > 0 ) ; this . parent . childCount -- ; if ( ( this . parent . childCount == 0 ) && ( this . parent . released ) ) { return true ; } } } return false ; }
This method marks this array to be released .
108
9
140,630
@ Nonnull public static < T > ApprovalBuilder < T > of ( Class < T > clazz ) { return new ApprovalBuilder < T > ( clazz ) ; }
Create a new approval builder that will be able to approve objects from the specified class type .
38
18
140,631
@ Nonnull public static Path getApprovalPath ( Path filePath ) { Pre . notNull ( filePath , "filePath" ) ; String s = filePath . toString ( ) ; int extensionIndex = s . lastIndexOf ( ' ' ) ; if ( extensionIndex == - 1 ) { return Paths . get ( s + FOR_APPROVAL_EXTENSION ) ; } int lastPartOfPath = s . lastIndexOf ( ' ' ) ; if ( lastPartOfPath != - 1 && lastPartOfPath > extensionIndex ) { //there was no extension and the directory contains dots. return Paths . get ( s + FOR_APPROVAL_EXTENSION ) ; } String firstPart = s . substring ( 0 , extensionIndex ) ; String extension = s . substring ( extensionIndex ) ; return Paths . get ( firstPart + FOR_APPROVAL_EXTENSION + extension ) ; }
Get the path for approval from the original file path .
201
11
140,632
@ Nonnull public static Point fromUniformlyDistributedRandomPoints ( @ Nonnull final Random randomGenerator ) { checkNonnull ( "randomGenerator" , randomGenerator ) ; // Calculate uniformly distributed 3D point on sphere (radius = 1.0): // http://mathproofs.blogspot.co.il/2005/04/uniform-random-distribution-on-sphere.html final double unitRand1 = randomGenerator . nextDouble ( ) ; final double unitRand2 = randomGenerator . nextDouble ( ) ; final double theta0 = ( 2.0 * Math . PI ) * unitRand1 ; final double theta1 = Math . acos ( 1.0 - ( 2.0 * unitRand2 ) ) ; final double x = Math . sin ( theta0 ) * Math . sin ( theta1 ) ; final double y = Math . cos ( theta0 ) * Math . sin ( theta1 ) ; final double z = Math . cos ( theta1 ) ; // Convert Carthesian 3D point into lat/lon (radius = 1.0): // http://stackoverflow.com/questions/1185408/converting-from-longitude-latitude-to-cartesian-coordinates final double latRad = Math . asin ( z ) ; final double lonRad = Math . atan2 ( y , x ) ; // Convert radians to degrees. assert ! Double . isNaN ( latRad ) ; assert ! Double . isNaN ( lonRad ) ; final double lat = latRad * ( 180.0 / Math . PI ) ; final double lon = lonRad * ( 180.0 / Math . PI ) ; return fromDeg ( lat , lon ) ; }
Create a random point uniformly distributed over the surface of the Earth .
384
13
140,633
public static double distanceInMeters ( @ Nonnull final Point p1 , @ Nonnull final Point p2 ) { checkNonnull ( "p1" , p1 ) ; checkNonnull ( "p2" , p2 ) ; final Point from ; final Point to ; if ( p1 . getLonDeg ( ) <= p2 . getLonDeg ( ) ) { from = p1 ; to = p2 ; } else { from = p2 ; to = p1 ; } // Calculate mid point of 2 latitudes. final double avgLat = ( from . getLatDeg ( ) + to . getLatDeg ( ) ) / 2.0 ; final double deltaLatDeg = Math . abs ( to . getLatDeg ( ) - from . getLatDeg ( ) ) ; final double deltaLonDeg360 = Math . abs ( to . getLonDeg ( ) - from . getLonDeg ( ) ) ; final double deltaLonDeg = ( ( deltaLonDeg360 <= 180.0 ) ? deltaLonDeg360 : ( 360.0 - deltaLonDeg360 ) ) ; // Meters per longitude is fixed; per latitude requires * cos(avg(lat)). final double deltaXMeters = degreesLonToMetersAtLat ( deltaLonDeg , avgLat ) ; final double deltaYMeters = degreesLatToMeters ( deltaLatDeg ) ; // Calculate length through Earth. This is an approximation, but works fine for short distances. return Math . sqrt ( ( deltaXMeters * deltaXMeters ) + ( deltaYMeters * deltaYMeters ) ) ; }
Calculate the distance between two points . This algorithm does not take the curvature of the Earth into account so it only works for small distance up to say 200 km and not too close to the poles .
367
42
140,634
@ Override public List < ? extends SatConstraint > buildConstraint ( BtrPlaceTree t , List < BtrpOperand > args ) { if ( ! checkConformance ( t , args ) ) { return Collections . emptyList ( ) ; } // Get the first parameter @ SuppressWarnings ( "unchecked" ) List < VM > s = ( List < VM > ) params [ 0 ] . transform ( this , t , args . get ( 0 ) ) ; if ( s == null ) { return Collections . emptyList ( ) ; } // Get param 'OneOf' Object obj = params [ 1 ] . transform ( this , t , args . get ( 1 ) ) ; if ( obj == null ) { return Collections . emptyList ( ) ; } if ( obj instanceof List ) { @ SuppressWarnings ( "unchecked" ) List < VM > s2 = ( List < VM > ) obj ; if ( s2 . isEmpty ( ) ) { t . ignoreError ( "Parameter '" + params [ 1 ] . getName ( ) + "' expects a non-empty list of VMs" ) ; return Collections . emptyList ( ) ; } return Precedence . newPrecedence ( s , s2 ) ; } else if ( obj instanceof String ) { String timestamp = ( String ) obj ; if ( "" . equals ( timestamp ) ) { t . ignoreError ( "Parameter '" + params [ 1 ] . getName ( ) + "' expects a non-empty string" ) ; return Collections . emptyList ( ) ; } return Deadline . newDeadline ( s , timestamp ) ; } else { return Collections . emptyList ( ) ; } }
Build a precedence constraint .
362
5
140,635
public void remove ( ) { BasicDoubleLinkedNode < V > next = getNext ( ) ; if ( next != null ) { next . previous = this . previous ; } if ( this . previous != null ) { this . previous . setNext ( next ) ; } }
This method removes this node from the double linked list .
58
11
140,636
@ Override public boolean applyAction ( Model i ) { Mapping c = i . getMapping ( ) ; return c . isRunning ( vm ) && c . getVMLocation ( vm ) . equals ( src ) && ! src . equals ( dst ) && c . addRunningVM ( vm , dst ) ; }
Make the VM running on the destination node in the given model .
68
13
140,637
public static void resetCaches ( int size ) { nodesCache = new LinkedHashMap < String , List < Node > > ( ) { @ Override protected boolean removeEldestEntry ( Map . Entry < String , List < Node > > foo ) { return size ( ) == size ; } } ; vmsCache = new LinkedHashMap < String , List < VM > > ( ) { @ Override protected boolean removeEldestEntry ( Map . Entry < String , List < VM > > foo ) { return size ( ) == size ; } } ; }
Reset the cache of element sets .
121
8
140,638
public static int requiredInt ( JSONObject o , String id ) throws JSONConverterException { checkKeys ( o , id ) ; try { return ( Integer ) o . get ( id ) ; } catch ( ClassCastException e ) { throw new JSONConverterException ( "Unable to read a int from string '" + id + "'" , e ) ; } }
Read an expected integer .
80
5
140,639
public static int optInt ( JSONObject o , String id , int def ) throws JSONConverterException { if ( o . containsKey ( id ) ) { try { return ( Integer ) o . get ( id ) ; } catch ( ClassCastException e ) { throw new JSONConverterException ( "Unable to read a int from string '" + id + "'" , e ) ; } } return def ; }
Read an optional integer .
90
5
140,640
public static void checkKeys ( JSONObject o , String ... keys ) throws JSONConverterException { for ( String k : keys ) { if ( ! o . containsKey ( k ) ) { throw new JSONConverterException ( "Missing key '" + k + "'" ) ; } } }
Check if some keys are present .
64
7
140,641
public static String requiredString ( JSONObject o , String id ) throws JSONConverterException { checkKeys ( o , id ) ; Object x = o . get ( id ) ; return x . toString ( ) ; }
Read an expected string .
47
5
140,642
public static double requiredDouble ( JSONObject o , String id ) throws JSONConverterException { checkKeys ( o , id ) ; Object x = o . get ( id ) ; if ( ! ( x instanceof Number ) ) { throw new JSONConverterException ( "Number expected at key '" + id + "' but was '" + x . getClass ( ) + "'." ) ; } return ( ( Number ) x ) . doubleValue ( ) ; }
Read an expected double .
99
5
140,643
public static boolean requiredBoolean ( JSONObject o , String id ) throws JSONConverterException { checkKeys ( o , id ) ; Object x = o . get ( id ) ; if ( ! ( x instanceof Boolean ) ) { throw new JSONConverterException ( "Boolean expected at key '" + id + "' but was '" + x . getClass ( ) + "'." ) ; } return ( Boolean ) x ; }
Read an expected boolean .
94
5
140,644
public static List < VM > vmsFromJSON ( Model mo , JSONArray a ) throws JSONConverterException { String json = a . toJSONString ( ) ; List < VM > s = vmsCache . get ( json ) ; if ( s != null ) { return s ; } s = new ArrayList <> ( a . size ( ) ) ; for ( Object o : a ) { s . add ( getVM ( mo , ( int ) o ) ) ; } vmsCache . put ( json , s ) ; return s ; }
Convert an array of VM identifiers to a set of VMs . This operation uses a cache of previously converted set of VMs .
117
27
140,645
public static List < Node > nodesFromJSON ( Model mo , JSONArray a ) throws JSONConverterException { String json = a . toJSONString ( ) ; List < Node > s = nodesCache . get ( json ) ; if ( s != null ) { return s ; } s = new ArrayList <> ( a . size ( ) ) ; for ( Object o : a ) { s . add ( getNode ( mo , ( int ) o ) ) ; } nodesCache . put ( json , s ) ; return s ; }
Convert an array of VM identifiers to a set of VMs . This operation uses a cache of previously converted set of nodes .
114
26
140,646
public static JSONArray vmsToJSON ( Collection < VM > s ) { JSONArray a = new JSONArray ( ) ; for ( Element e : s ) { a . add ( e . id ( ) ) ; } return a ; }
Convert a collection of VMs to an array of VM identifiers .
50
14
140,647
public static JSONArray nodesToJSON ( Collection < Node > s ) { JSONArray a = new JSONArray ( ) ; for ( Element e : s ) { a . add ( e . id ( ) ) ; } return a ; }
Convert a collection nodes to an array of nodes identifiers .
49
12
140,648
public static List < VM > requiredVMs ( Model mo , JSONObject o , String id ) throws JSONConverterException { checkKeys ( o , id ) ; Object x = o . get ( id ) ; if ( ! ( x instanceof JSONArray ) ) { throw new JSONConverterException ( "integers expected at key '" + id + "'" ) ; } return vmsFromJSON ( mo , ( JSONArray ) x ) ; }
Read an expected list of VMs .
97
8
140,649
public static List < Node > requiredNodes ( Model mo , JSONObject o , String id ) throws JSONConverterException { checkKeys ( o , id ) ; Object x = o . get ( id ) ; if ( ! ( x instanceof JSONArray ) ) { throw new JSONConverterException ( "integers expected at key '" + id + "'" ) ; } return nodesFromJSON ( mo , ( JSONArray ) x ) ; }
Read an expected list of nodes .
96
7
140,650
public static Set < Collection < VM > > requiredVMPart ( Model mo , JSONObject o , String id ) throws JSONConverterException { Set < Collection < VM >> vms = new HashSet <> ( ) ; Object x = o . get ( id ) ; if ( ! ( x instanceof JSONArray ) ) { throw new JSONConverterException ( "Set of identifiers sets expected at key '" + id + "'" ) ; } for ( Object obj : ( JSONArray ) x ) { vms . add ( vmsFromJSON ( mo , ( JSONArray ) obj ) ) ; } return vms ; }
Read partitions of VMs .
134
6
140,651
public static Set < Collection < Node > > requiredNodePart ( Model mo , JSONObject o , String id ) throws JSONConverterException { Set < Collection < Node >> nodes = new HashSet <> ( ) ; Object x = o . get ( id ) ; if ( ! ( x instanceof JSONArray ) ) { throw new JSONConverterException ( "Set of identifiers sets expected at key '" + id + "'" ) ; } for ( Object obj : ( JSONArray ) x ) { nodes . add ( nodesFromJSON ( mo , ( JSONArray ) obj ) ) ; } return nodes ; }
Read partitions of nodes .
129
5
140,652
public static VM requiredVM ( Model mo , JSONObject o , String id ) throws JSONConverterException { checkKeys ( o , id ) ; try { return getVM ( mo , ( Integer ) o . get ( id ) ) ; } catch ( ClassCastException e ) { throw new JSONConverterException ( "Unable to read a VM identifier from string at key '" + id + "'" , e ) ; } }
Read an expected VM .
92
5
140,653
public static Node requiredNode ( Model mo , JSONObject o , String id ) throws JSONConverterException { checkKeys ( o , id ) ; try { return getNode ( mo , ( Integer ) o . get ( id ) ) ; } catch ( ClassCastException e ) { throw new JSONConverterException ( "Unable to read a Node identifier from string at key '" + id + "'" , e ) ; } }
Read an expected node .
92
5
140,654
public static VM getVM ( Model mo , int vmID ) throws JSONConverterException { VM vm = new VM ( vmID ) ; if ( ! mo . contains ( vm ) ) { throw new JSONConverterException ( "Undeclared vm '" + vmID + "'" ) ; } return vm ; }
Get a VM from its identifier . The VM is already a part of the model .
70
17
140,655
public static Node getNode ( Model mo , int nodeID ) throws JSONConverterException { Node n = new Node ( nodeID ) ; if ( ! mo . contains ( n ) ) { throw new JSONConverterException ( "Undeclared node '" + nodeID + "'" ) ; } return n ; }
Get a node from its identifier . The node is already a part of the model
70
16
140,656
@ Override public void postCostConstraints ( ) { if ( ! costActivated ) { costActivated = true ; rp . getLogger ( ) . debug ( "Post the cost-oriented constraints" ) ; List < IntVar > mttrs = Stream . concat ( rp . getVMActions ( ) . stream ( ) , rp . getNodeActions ( ) . stream ( ) ) . map ( Transition :: getEnd ) . collect ( Collectors . toList ( ) ) ; rp . getModel ( ) . post ( rp . getModel ( ) . sum ( mttrs . toArray ( new IntVar [ 0 ] ) , "=" , cost ) ) ; } }
Post the constraints related to the objective .
154
8
140,657
@ SuppressWarnings ( "PointlessArithmeticExpression" ) int getDataFirstRecord ( final int territoryNumber ) { assert ( 0 <= territoryNumber ) && ( territoryNumber <= Territory . AAA . getNumber ( ) ) ; return index [ territoryNumber + POS_INDEX_FIRST_RECORD ] ; }
Low - level routines for data access .
69
8
140,658
@ Override public List < Script > getScripts ( String name ) throws ScriptBuilderException { List < Script > scripts = new ArrayList <> ( ) ; if ( ! name . endsWith ( ".*" ) ) { String toSearch = name . replaceAll ( "\\." , File . separator ) + Script . EXTENSION ; for ( File path : paths ) { File f = new File ( path . getPath ( ) + File . separator + toSearch ) ; if ( f . exists ( ) ) { scripts . add ( builder . build ( f ) ) ; break ; } } } else { //We need to consolidate the errors in allEx and rethrow it at the end if necessary ScriptBuilderException allEx = null ; String base = name . substring ( 0 , name . length ( ) - 2 ) . replaceAll ( "\\." , File . separator ) ; for ( File path : paths ) { File f = new File ( path . getPath ( ) + File . separator + base ) ; File [ ] files = f . listFiles ( ) ; if ( f . isDirectory ( ) && files != null ) { for ( File sf : files ) { if ( sf . getName ( ) . endsWith ( Script . EXTENSION ) ) { try { scripts . add ( builder . build ( sf ) ) ; } catch ( ScriptBuilderException ex ) { if ( allEx == null ) { allEx = ex ; } else { allEx . getErrorReporter ( ) . getErrors ( ) . addAll ( ex . getErrorReporter ( ) . getErrors ( ) ) ; } } } } } } if ( allEx != null ) { throw allEx ; } } return scripts ; }
Get the script associated to a given identifier by browsing the given paths . The first script having a matching identifier is selected whatever the parsing process result will be
374
30
140,659
public VMConsumptionComparator append ( ShareableResource r , boolean asc ) { rcs . add ( r ) ; ascs . add ( asc ? 1 : - 1 ) ; return this ; }
Append a new resource to use to make the comparison
42
11
140,660
public void register ( RoutingConverter < ? extends Routing > r ) { java2json . put ( r . getSupportedRouting ( ) , r ) ; json2java . put ( r . getJSONId ( ) , r ) ; }
Register a routing converter .
54
5
140,661
public JSONObject switchToJSON ( Switch s ) { JSONObject o = new JSONObject ( ) ; o . put ( "id" , s . id ( ) ) ; o . put ( CAPACITY_LABEL , s . getCapacity ( ) ) ; return o ; }
Convert a Switch to a JSON object .
60
9
140,662
public JSONArray switchesToJSON ( Collection < Switch > c ) { JSONArray a = new JSONArray ( ) ; for ( Switch s : c ) { a . add ( switchToJSON ( s ) ) ; } return a ; }
Convert a collection of switches to an array of JSON switches objects .
49
14
140,663
public JSONObject physicalElementToJSON ( PhysicalElement pe ) { JSONObject o = new JSONObject ( ) ; if ( pe instanceof Node ) { o . put ( "type" , NODE_LABEL ) ; o . put ( "id" , ( ( Node ) pe ) . id ( ) ) ; } else if ( pe instanceof Switch ) { o . put ( "type" , SWITCH_LABEL ) ; o . put ( "id" , ( ( Switch ) pe ) . id ( ) ) ; } else { throw new IllegalArgumentException ( "Unsupported physical element '" + pe . getClass ( ) . toString ( ) + "'" ) ; } return o ; }
Convert a PhysicalElement to a JSON object .
150
10
140,664
public JSONObject linkToJSON ( Link s ) { JSONObject o = new JSONObject ( ) ; o . put ( "id" , s . id ( ) ) ; o . put ( CAPACITY_LABEL , s . getCapacity ( ) ) ; o . put ( SWITCH_LABEL , s . getSwitch ( ) . id ( ) ) ; o . put ( "physicalElement" , physicalElementToJSON ( s . getElement ( ) ) ) ; return o ; }
Convert a Link to a JSON object .
105
9
140,665
public JSONArray linksToJSON ( Collection < Link > c ) { JSONArray a = new JSONArray ( ) ; for ( Link l : c ) { a . add ( linkToJSON ( l ) ) ; } return a ; }
Convert a collection of links to an array of JSON links objects .
49
14
140,666
public Routing routingFromJSON ( Model mo , JSONObject o ) throws JSONConverterException { String type = requiredString ( o , "type" ) ; RoutingConverter < ? extends Routing > c = json2java . get ( type ) ; if ( c == null ) { throw new JSONConverterException ( "No converter available for a routing of type '" + type + "'" ) ; } return c . fromJSON ( mo , o ) ; }
Convert a JSON routing object into the corresponding java Routing implementation .
102
14
140,667
public Switch switchFromJSON ( JSONObject o ) throws JSONConverterException { return new Switch ( requiredInt ( o , "id" ) , readCapacity ( o ) ) ; }
Convert a JSON switch object to a Switch .
40
10
140,668
public void switchesFromJSON ( Network net , JSONArray a ) throws JSONConverterException { for ( Object o : a ) { net . newSwitch ( requiredInt ( ( JSONObject ) o , "id" ) , readCapacity ( ( JSONObject ) o ) ) ; } }
Convert a JSON array of switches to a Java List of switches .
61
14
140,669
public PhysicalElement physicalElementFromJSON ( Model mo , Network net , JSONObject o ) throws JSONConverterException { String type = requiredString ( o , "type" ) ; switch ( type ) { case NODE_LABEL : return requiredNode ( mo , o , "id" ) ; case SWITCH_LABEL : return getSwitch ( net , requiredInt ( o , "id" ) ) ; default : throw new JSONConverterException ( "type '" + type + "' is not a physical element" ) ; } }
Convert a JSON physical element object to a Java PhysicalElement object .
115
14
140,670
public void linkFromJSON ( Model mo , Network net , JSONObject o ) throws JSONConverterException { net . connect ( requiredInt ( o , "id" ) , readCapacity ( o ) , getSwitch ( net , requiredInt ( o , SWITCH_LABEL ) ) , physicalElementFromJSON ( mo , net , ( JSONObject ) o . get ( "physicalElement" ) ) ) ; }
Convert a JSON link object into a Java Link object .
88
12
140,671
public void linksFromJSON ( Model mo , Network net , JSONArray a ) throws JSONConverterException { for ( Object o : a ) { linkFromJSON ( mo , net , ( JSONObject ) o ) ; } }
Convert a JSON array of links to a Java List of links .
48
14
140,672
public List < SatConstraint > makeConstraints ( ) { List < SatConstraint > cstrs = new ArrayList <> ( ) ; //VM1 and VM2 must be running on distinct nodes cstrs . add ( new Spread ( new HashSet <> ( Arrays . asList ( vms . get ( 1 ) , vms . get ( 2 ) ) ) ) ) ; //VM0 must have at least 3 virtual CPU dedicated to it cstrs . add ( new Preserve ( vms . get ( 0 ) , "cpu" , 3 ) ) ; //N3 must be set offline cstrs . add ( new Offline ( nodes . get ( 3 ) ) ) ; //VM4 must be running, It asks for 3 cpu and 2 mem resources cstrs . add ( new Running ( vms . get ( 4 ) ) ) ; cstrs . add ( new Preserve ( vms . get ( 4 ) , "cpu" , 3 ) ) ; cstrs . add ( new Preserve ( vms . get ( 4 ) , "mem" , 2 ) ) ; //VM3 must be turned off, i.e. set back to the ready state cstrs . add ( new Ready ( vms . get ( 3 ) ) ) ; return cstrs ; }
Declare some constraints .
281
5
140,673
private boolean verifyWithOfColumns ( ColumnState [ ] columnStates , TextTableInfo tableInfo ) { int tableWidth = tableInfo . getWidth ( ) ; if ( tableWidth != TextColumnInfo . WIDTH_AUTO_ADJUST ) { int calculatedWidth = 0 ; for ( ColumnState columnState : columnStates ) { if ( columnState . width < 0 ) { throw new AssertionError ( "columnWidth=" + columnState . width ) ; // return false; } calculatedWidth = calculatedWidth + columnState . width + columnState . getColumnInfo ( ) . getBorderWidth ( ) ; } if ( calculatedWidth != tableWidth ) { throw new AssertionError ( "with=" + tableWidth + ", sum-of-columns=" + calculatedWidth ) ; // return false; } } return true ; }
This method verifies that the width of the columns are sane .
177
13
140,674
public Switch newSwitch ( int id , int capacity ) { Switch s = swBuilder . newSwitch ( id , capacity ) ; switches . add ( s ) ; return s ; }
Create a new switch with a specific identifier and a given maximal capacity
37
13
140,675
public List < Link > connect ( int bandwidth , Switch sw , Node ... nodes ) { List < Link > l = new ArrayList <> ( ) ; for ( Node n : nodes ) { l . add ( connect ( bandwidth , sw , n ) ) ; } return l ; }
Create connections between a single switch and multiple nodes
59
9
140,676
public List < Link > getConnectedLinks ( PhysicalElement pe ) { List < Link > myLinks = new ArrayList <> ( ) ; for ( Link l : this . links ) { if ( l . getElement ( ) . equals ( pe ) ) { myLinks . add ( l ) ; } else if ( l . getSwitch ( ) . equals ( pe ) ) { myLinks . add ( l ) ; } } return myLinks ; }
Get the list of links connected to a given physical element
95
11
140,677
public List < Node > getConnectedNodes ( ) { List < Node > nodes = new ArrayList <> ( ) ; for ( Link l : links ) { if ( l . getElement ( ) instanceof Node ) { nodes . add ( ( Node ) l . getElement ( ) ) ; } } return nodes ; }
Get the full list of nodes that have been connected into the network
69
13
140,678
public void addInclusion ( SVar < T > s1 , SVar < T > s2 ) { this . add ( new LtConstraint < SVar < T > , Set < T > > ( s1 , s2 ) ) ; }
Adds an inclusion constraints between two sets .
54
8
140,679
private int randomWithRankedValues ( IntVar x ) { TIntArrayList [ ] values = new TIntArrayList [ ranks . length ] ; DisposableValueIterator ite = x . getValueIterator ( true ) ; try { while ( ite . hasNext ( ) ) { int v = ite . next ( ) ; int i ; for ( i = 0 ; i < ranks . length ; i ++ ) { if ( ranks [ i ] . contains ( v ) ) { if ( values [ i ] == null ) { values [ i ] = new TIntArrayList ( ) ; } values [ i ] . add ( v ) ; } } } } finally { ite . dispose ( ) ; } //We pick a random value in the first rank that is not empty (aka null here) for ( TIntArrayList rank : values ) { if ( rank != null ) { int v = rnd . nextInt ( rank . size ( ) ) ; return rank . get ( v ) ; } } return - 1 ; }
Random value but that consider the rank of nodes . So values are picked up from the first rank possible .
219
21
140,680
private int randomValue ( IntVar x ) { int i = rnd . nextInt ( x . getDomainSize ( ) ) ; DisposableValueIterator ite = x . getValueIterator ( true ) ; int pos = - 1 ; try { while ( i >= 0 ) { pos = ite . next ( ) ; i -- ; } } finally { ite . dispose ( ) ; } return pos ; }
Pick a random value inside the variable domain .
88
9
140,681
public static SAXRule runSequitur ( String inputString ) throws Exception { LOGGER . trace ( "digesting the string " + inputString ) ; // clear global collections // SAXRule . numRules = new AtomicInteger ( 0 ) ; SAXRule . theRules . clear ( ) ; SAXSymbol . theDigrams . clear ( ) ; SAXSymbol . theSubstituteTable . clear ( ) ; // init the top-level rule // SAXRule resRule = new SAXRule ( ) ; // tokenize the input string // StringTokenizer st = new StringTokenizer ( inputString , " " ) ; // while there are tokens int currentPosition = 0 ; while ( st . hasMoreTokens ( ) ) { String token = st . nextToken ( ) ; // System.out.println(" processing the token " + token); // extract next token SAXTerminal symbol = new SAXTerminal ( token , currentPosition ) ; // append to the end of the current sequitur string // ... As each new input symbol is observed, append it to rule S.... resRule . last ( ) . insertAfter ( symbol ) ; // once appended, check if the resulting digram is new or recurrent // // ... Each time a link is made between two symbols if the new digram is repeated elsewhere // and the repetitions do not overlap, if the other occurrence is a complete rule, // replace the new digram with the non-terminal symbol that heads the rule, // otherwise,form a new rule and replace both digrams with the new non-terminal symbol // otherwise, insert the digram into the index... resRule . last ( ) . p . check ( ) ; currentPosition ++ ; // LOGGER.debug("Current grammar:\n" + SAXRule.getRules()); } return resRule ; }
Digests a string of terminals separated by a space .
389
11
140,682
public static GrammarRules series2SequiturRules ( double [ ] timeseries , int saxWindowSize , int saxPAASize , int saxAlphabetSize , NumerosityReductionStrategy numerosityReductionStrategy , double normalizationThreshold ) throws Exception , IOException { LOGGER . debug ( "Discretizing time series..." ) ; SAXRecords saxFrequencyData = sp . ts2saxViaWindow ( timeseries , saxWindowSize , saxPAASize , normalA . getCuts ( saxAlphabetSize ) , numerosityReductionStrategy , normalizationThreshold ) ; LOGGER . debug ( "Inferring the grammar..." ) ; // this is a string we are about to feed into Sequitur // String saxDisplayString = saxFrequencyData . getSAXString ( " " ) ; // reset the Sequitur data structures SAXRule . numRules = new AtomicInteger ( 0 ) ; SAXRule . theRules . clear ( ) ; SAXSymbol . theDigrams . clear ( ) ; // bootstrap the grammar SAXRule grammar = new SAXRule ( ) ; SAXRule . arrRuleRecords = new ArrayList < GrammarRuleRecord > ( ) ; // digest the string via the tokenizer and build the grammar StringTokenizer st = new StringTokenizer ( saxDisplayString , " " ) ; int currentPosition = 0 ; while ( st . hasMoreTokens ( ) ) { grammar . last ( ) . insertAfter ( new SAXTerminal ( st . nextToken ( ) , currentPosition ) ) ; grammar . last ( ) . p . check ( ) ; currentPosition ++ ; } // bw.close(); LOGGER . debug ( "Collecting the grammar rules statistics and expanding the rules..." ) ; GrammarRules rules = grammar . toGrammarRulesData ( ) ; LOGGER . debug ( "Mapping expanded rules to time-series intervals..." ) ; SequiturFactory . updateRuleIntervals ( rules , saxFrequencyData , true , timeseries , saxWindowSize , saxPAASize ) ; return rules ; }
Takes a time series and returns a grammar .
449
10
140,683
public static ArrayList < RuleInterval > getRulePositionsByRuleNum ( int ruleIdx , SAXRule grammar , SAXRecords saxFrequencyData , double [ ] originalTimeSeries , int saxWindowSize ) { // this will be the result ArrayList < RuleInterval > resultIntervals = new ArrayList < RuleInterval > ( ) ; // the rule container GrammarRuleRecord ruleContainer = grammar . getRuleRecords ( ) . get ( ruleIdx ) ; // the original indexes of all SAX words ArrayList < Integer > saxWordsIndexes = new ArrayList < Integer > ( saxFrequencyData . getAllIndices ( ) ) ; // debug printout LOGGER . trace ( "Expanded rule: \"" + ruleContainer . getExpandedRuleString ( ) + ' ' ) ; LOGGER . trace ( "Indexes: " + ruleContainer . getOccurrences ( ) ) ; // array of all words of this expanded rule String [ ] expandedRuleSplit = ruleContainer . getExpandedRuleString ( ) . trim ( ) . split ( " " ) ; for ( Integer currentIndex : ruleContainer . getOccurrences ( ) ) { String extractedStr = "" ; StringBuffer sb = new StringBuffer ( expandedRuleSplit . length ) ; for ( int i = 0 ; i < expandedRuleSplit . length ; i ++ ) { LOGGER . trace ( "currentIndex " + currentIndex + ", i: " + i ) ; extractedStr = extractedStr . concat ( " " ) . concat ( String . valueOf ( saxFrequencyData . getByIndex ( saxWordsIndexes . get ( currentIndex + i ) ) . getPayload ( ) ) ) ; sb . append ( saxWordsIndexes . get ( currentIndex + i ) ) . append ( " " ) ; } LOGGER . trace ( "Recovered string: " + extractedStr ) ; LOGGER . trace ( "Recovered positions: " + sb . toString ( ) ) ; int start = saxWordsIndexes . get ( currentIndex ) ; int end = - 1 ; // need to care about bouncing beyond the all SAX words index array if ( ( currentIndex + expandedRuleSplit . length ) >= saxWordsIndexes . size ( ) ) { // if we at the last index - then it's easy - end is the timeseries end end = originalTimeSeries . length - 1 ; } else { // if we OK with indexes, the Rule subsequence end is the start of the very next SAX word // after the kast in this expanded rule end = saxWordsIndexes . get ( currentIndex + expandedRuleSplit . length ) - 1 + saxWindowSize ; } // save it resultIntervals . add ( new RuleInterval ( start , end ) ) ; } return resultIntervals ; }
Recovers start and stop coordinates of a rule subsequences .
601
12
140,684
public static List < Preserve > newPreserve ( Collection < VM > vms , String r , int q ) { return vms . stream ( ) . map ( v -> new Preserve ( v , r , q ) ) . collect ( Collectors . toList ( ) ) ; }
Make multiple constraints
61
3
140,685
private Node < E > _link ( Node < E > node1 , Node < E > node2 ) { // maybe there is no real unification here if ( node1 == node2 ) return node2 ; // from now on, we know that we unify really disjoint trees if ( node1 . rank > node2 . rank ) { node2 . parent = node1 ; node1 . nbElems += node2 . nbElems ; return node1 ; } else { node1 . parent = node2 ; if ( node1 . rank == node2 . rank ) { node2 . rank ++ ; } node2 . nbElems += node1 . nbElems ; return node2 ; } }
Unifies the tree rooted in node1 with the tree rooted in node2 by making one of them the subtree of the other one ; returns the root of the new tree .
153
36
140,686
public static void reset ( ) { SAXRule . numRules = new AtomicInteger ( 0 ) ; SAXSymbol . theDigrams . clear ( ) ; SAXSymbol . theSubstituteTable . clear ( ) ; SAXRule . arrRuleRecords = new ArrayList < GrammarRuleRecord > ( ) ; }
Cleans up data structures .
71
6
140,687
protected void assignLevel ( ) { int lvl = Integer . MAX_VALUE ; SAXSymbol sym ; for ( sym = this . first ( ) ; ( ! sym . isGuard ( ) ) ; sym = sym . n ) { if ( sym . isNonTerminal ( ) ) { SAXRule referedTo = ( ( SAXNonTerminal ) sym ) . r ; lvl = Math . min ( referedTo . level + 1 , lvl ) ; } else { level = 1 ; return ; } } level = lvl ; }
This traces the rule level .
114
6
140,688
private static void expandRules ( ) { // long start = System.currentTimeMillis(); // iterate over all SAX containers // ArrayList<SAXMapEntry<Integer, Integer>> recs = new ArrayList<SAXMapEntry<Integer, Integer>>( // arrRuleRecords.size()); // // for (GrammarRuleRecord ruleRecord : arrRuleRecords) { // recs.add(new SAXMapEntry<Integer, Integer>(ruleRecord.getRuleLevel(), ruleRecord // .getRuleNumber())); // } // // Collections.sort(recs, new Comparator<SAXMapEntry<Integer, Integer>>() { // @Override // public int compare(SAXMapEntry<Integer, Integer> o1, SAXMapEntry<Integer, Integer> o2) { // return o1.getKey().compareTo(o2.getKey()); // } // }); // for (SAXMapEntry<Integer, Integer> entry : recs) { for ( GrammarRuleRecord ruleRecord : arrRuleRecords ) { if ( ruleRecord . getRuleNumber ( ) == 0 ) { continue ; } String curString = ruleRecord . getRuleString ( ) ; StringBuilder resultString = new StringBuilder ( 8192 ) ; String [ ] split = curString . split ( " " ) ; for ( String s : split ) { if ( s . startsWith ( "R" ) ) { resultString . append ( " " ) . append ( expandRule ( Integer . valueOf ( s . substring ( 1 , s . length ( ) ) ) ) ) ; } else { resultString . append ( " " ) . append ( s ) ; } } // need to trim space at the very end String rr = resultString . delete ( 0 , 1 ) . append ( " " ) . toString ( ) ; ruleRecord . setExpandedRuleString ( rr ) ; ruleRecord . setRuleYield ( countSpaces ( rr ) ) ; } StringBuilder resultString = new StringBuilder ( 8192 ) ; GrammarRuleRecord ruleRecord = arrRuleRecords . get ( 0 ) ; resultString . append ( ruleRecord . getRuleString ( ) ) ; int currentSearchStart = resultString . indexOf ( "R" ) ; while ( currentSearchStart >= 0 ) { int spaceIdx = resultString . indexOf ( " " , currentSearchStart ) ; String ruleName = resultString . substring ( currentSearchStart , spaceIdx + 1 ) ; Integer ruleId = Integer . valueOf ( ruleName . substring ( 1 , ruleName . length ( ) - 1 ) ) ; resultString . replace ( spaceIdx - ruleName . length ( ) + 1 , spaceIdx + 1 , arrRuleRecords . get ( ruleId ) . getExpandedRuleString ( ) ) ; currentSearchStart = resultString . indexOf ( "R" ) ; } ruleRecord . setExpandedRuleString ( resultString . toString ( ) . trim ( ) ) ; // ruleRecord.setRuleYield(countSpaces(resultString)); // long end = System.currentTimeMillis(); // System.out.println("Rules expanded in " + SAXFactory.timeToString(start, end)); }
Manfred s cool trick to get out all expanded rules . Expands the rule of each SAX container into SAX words string . Can be rewritten recursively though .
699
35
140,689
private int [ ] getIndexes ( ) { int [ ] res = new int [ this . indexes . size ( ) ] ; int i = 0 ; for ( Integer idx : this . indexes ) { res [ i ] = idx ; i ++ ; } return res ; }
Get all the rule occurrences .
59
6
140,690
public Action fromJSON ( JSONObject in ) throws JSONConverterException { String id = requiredString ( in , ACTION_ID_LABEL ) ; Action a ; switch ( id ) { case "bootVM" : a = bootVMFromJSON ( in ) ; break ; case "shutdownVM" : a = shutdownVMFromJSON ( in ) ; break ; case "shutdownNode" : a = shutdownNodeFromJSON ( in ) ; break ; case "bootNode" : a = bootNodeFromJSON ( in ) ; break ; case "forgeVM" : a = forgeVMFromJSON ( in ) ; break ; case "killVM" : a = killVMFromJSON ( in ) ; break ; case "migrateVM" : a = migrateVMFromJSON ( in ) ; break ; case "resumeVM" : a = resumeVMFromJSON ( in ) ; break ; case "suspendVM" : a = suspendVMFromJSON ( in ) ; break ; case RC_ALLOCATE_LABEL : a = allocateFromJSON ( in ) ; break ; default : throw new JSONConverterException ( "Unsupported action '" + id + "'" ) ; } attachEvents ( a , in ) ; return a ; }
decode a json - encoded action .
264
8
140,691
private void attachEvents ( Action a , JSONObject in ) throws JSONConverterException { if ( in . containsKey ( HOOK_LABEL ) ) { JSONObject hooks = ( JSONObject ) in . get ( HOOK_LABEL ) ; for ( Map . Entry < String , Object > e : hooks . entrySet ( ) ) { String k = e . getKey ( ) ; try { Action . Hook h = Action . Hook . valueOf ( k . toUpperCase ( ) ) ; for ( Object o : ( JSONArray ) e . getValue ( ) ) { a . addEvent ( h , eventFromJSON ( ( JSONObject ) o ) ) ; } } catch ( IllegalArgumentException ex ) { throw new JSONConverterException ( "Unsupported hook type '" + k + "'" , ex ) ; } } } }
Decorate the action with optional events .
182
8
140,692
private JSONObject makeActionSkeleton ( Action a ) { JSONObject o = new JSONObject ( ) ; o . put ( START_LABEL , a . getStart ( ) ) ; o . put ( END_LABEL , a . getEnd ( ) ) ; JSONObject hooks = new JSONObject ( ) ; for ( Action . Hook k : Action . Hook . values ( ) ) { JSONArray arr = new JSONArray ( ) ; for ( Event e : a . getEvents ( k ) ) { arr . add ( toJSON ( e ) ) ; } hooks . put ( k . toString ( ) , arr ) ; } o . put ( HOOK_LABEL , hooks ) ; return o ; }
Just create the JSONObject and set the consume and the end attribute .
150
14
140,693
public List < Action > listFromJSON ( JSONArray in ) throws JSONConverterException { List < Action > l = new ArrayList <> ( in . size ( ) ) ; for ( Object o : in ) { if ( ! ( o instanceof JSONObject ) ) { throw new JSONConverterException ( "Expected an array of JSONObject but got an array of " + o . getClass ( ) . getName ( ) ) ; } l . add ( fromJSON ( ( JSONObject ) o ) ) ; } return l ; }
Convert a list of json - encoded actions .
116
10
140,694
@ Nonnull static String decodeUTF16 ( @ Nonnull final String mapcode ) { String result ; final StringBuilder asciiBuf = new StringBuilder ( ) ; for ( final char ch : mapcode . toCharArray ( ) ) { if ( ch == ' ' ) { asciiBuf . append ( ch ) ; } else if ( ( ch >= 1 ) && ( ch <= ' ' ) ) { // normal ascii asciiBuf . append ( ch ) ; } else { boolean found = false ; for ( final Unicode2Ascii unicode2Ascii : UNICODE2ASCII ) { if ( ( ch >= unicode2Ascii . min ) && ( ch <= unicode2Ascii . max ) ) { final int pos = ( ( int ) ch ) - ( int ) unicode2Ascii . min ; asciiBuf . append ( unicode2Ascii . convert . charAt ( pos ) ) ; found = true ; break ; } } if ( ! found ) { asciiBuf . append ( ' ' ) ; break ; } } } result = asciiBuf . toString ( ) ; // Repack if this was a Greek 'alpha' code. This will have been converted to a regular 'A' after one iteration. if ( mapcode . startsWith ( String . valueOf ( GREEK_CAPITAL_ALPHA ) ) ) { final String unpacked = aeuUnpack ( result ) ; if ( unpacked . isEmpty ( ) ) { throw new AssertionError ( "decodeUTF16: cannot decode " + mapcode ) ; } result = Encoder . aeuPack ( unpacked , false ) ; } if ( isAbjadScript ( mapcode ) ) { return convertFromAbjad ( result ) ; } else { return result ; } }
This method decodes a Unicode string to ASCII . Package private for access by other modules .
402
18
140,695
private static int decodeBase31 ( @ Nonnull final String code ) { int value = 0 ; for ( final char c : code . toCharArray ( ) ) { if ( c == ' ' ) { return value ; } if ( DECODE_CHARS [ c ] < 0 ) { return - 1 ; } value = ( value * 31 ) + DECODE_CHARS [ c ] ; } return value ; }
returns negative in case of error
88
7
140,696
public boolean declareImmutable ( String label , BtrpOperand t ) { if ( isDeclared ( label ) ) { return false ; } level . put ( label , - 1 ) ; type . put ( label , t ) ; return true ; }
Declare an immutable variable . The variable must not has been already declared .
54
15
140,697
public boolean remove ( String label ) { if ( ! isDeclared ( label ) ) { return false ; } level . remove ( label ) ; type . remove ( label ) ; return true ; }
Remove a symbol from the table .
41
7
140,698
public final boolean declare ( String label , BtrpOperand t ) { if ( isDeclared ( label ) && level . get ( label ) < 0 ) { //Disallow immutable value return false ; } if ( ! isDeclared ( label ) ) { level . put ( label , currentLevel ) ; } type . put ( label , t ) ; return true ; }
Declare a new variable . The variable is inserted into the current script .
79
15
140,699
protected List < JSSourceFile > getJSSource ( String mid , IResource resource , HttpServletRequest request , List < ICacheKeyGenerator > keyGens ) throws IOException { List < JSSourceFile > result = new ArrayList < JSSourceFile > ( 1 ) ; InputStream in = resource . getInputStream ( ) ; JSSourceFile sf = JSSourceFile . fromInputStream ( mid , in ) ; sf . setOriginalPath ( resource . getURI ( ) . toString ( ) ) ; in . close ( ) ; result . add ( sf ) ; return result ; }
Overrideable method for getting the source modules to compile
136
10