signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class druidGParser { /** * druidG . g : 145:1 : queryStmnt returns [ QueryMeta qMeta ] : SELECT ( ( WS selectItems [ qMeta ] ( ( WS ) ? ' , ' ( WS ) ? selectItems [ qMeta ] ) * ) | ( WS ' * ' ) ) ? WS FROM ( ( WS id = ID ) | ( WS LPARAN ( fromQuery = queryStmnt ) RPARAN ) ) ( WS WHERE WS whereClause [ qMeta ] ( ( WS BREAK WS BY WS gran = granularityClause ) ? ( WS GROUP WS BY WS ( id = ID ( ( WS ) ? ' , ' ( WS ) ? id = ID ) * ) ( WS HAVING WS h = havingClause ) ? ) ? ( WS ORDER WS BY WS ( id = ID ) ( WS dir = ( ASC | DESC ) ) ? ) ? ( WS LIMIT WS ( l = LONG ) ) ? ( WS THEN WS p = postAggItem ) ? ) ( WS WHICH WS CONTAINS ( WS ) ? LPARAN ( WS ) ? ( s1 = SINGLE _ QUOTE _ STRING ( ( WS ) ? ' , ' ( WS ) ? s2 = SINGLE _ QUOTE _ STRING ) * ) ( WS ) ? RPARAN WS SORT ( WS ) ? LPARAN ( WS ) ? ( s = SINGLE _ QUOTE _ STRING ) ( WS ) ? RPARAN ) ? ( WS HINT ( WS ) ? LPARAN ( WS ) ? s = SINGLE _ QUOTE _ STRING ( WS ) ? RPARAN ) ? ) ? ; */ public final QueryMeta queryStmnt ( ) throws RecognitionException { } }
QueryMeta qMeta = null ; Token id = null ; Token dir = null ; Token l = null ; Token s1 = null ; Token s2 = null ; Token s = null ; QueryMeta fromQuery = null ; Pair < Granularity , List < Pair < Integer , Integer > > > gran = null ; Having h = null ; PostAggItem p = null ; qMeta = GroupByQueryMeta . promote ( new QueryMeta ( ) ) ; ( ( BaseAggQueryMeta ) qMeta ) . aggregations = new ArrayList < > ( ) ; qMeta . intervals = new ArrayList < > ( ) ; try { // druidG . g : 150:2 : ( SELECT ( ( WS selectItems [ qMeta ] ( ( WS ) ? ' , ' ( WS ) ? selectItems [ qMeta ] ) * ) | ( WS ' * ' ) ) ? WS FROM ( ( WS id = ID ) | ( WS LPARAN ( fromQuery = queryStmnt ) RPARAN ) ) ( WS WHERE WS whereClause [ qMeta ] ( ( WS BREAK WS BY WS gran = granularityClause ) ? ( WS GROUP WS BY WS ( id = ID ( ( WS ) ? ' , ' ( WS ) ? id = ID ) * ) ( WS HAVING WS h = havingClause ) ? ) ? ( WS ORDER WS BY WS ( id = ID ) ( WS dir = ( ASC | DESC ) ) ? ) ? ( WS LIMIT WS ( l = LONG ) ) ? ( WS THEN WS p = postAggItem ) ? ) ( WS WHICH WS CONTAINS ( WS ) ? LPARAN ( WS ) ? ( s1 = SINGLE _ QUOTE _ STRING ( ( WS ) ? ' , ' ( WS ) ? s2 = SINGLE _ QUOTE _ STRING ) * ) ( WS ) ? RPARAN WS SORT ( WS ) ? LPARAN ( WS ) ? ( s = SINGLE _ QUOTE _ STRING ) ( WS ) ? RPARAN ) ? ( WS HINT ( WS ) ? LPARAN ( WS ) ? s = SINGLE _ QUOTE _ STRING ( WS ) ? RPARAN ) ? ) ? ) // druidG . g : 150:4 : SELECT ( ( WS selectItems [ qMeta ] ( ( WS ) ? ' , ' ( WS ) ? selectItems [ qMeta ] ) * ) | ( WS ' * ' ) ) ? WS FROM ( ( WS id = ID ) | ( WS LPARAN ( fromQuery = queryStmnt ) RPARAN ) ) ( WS WHERE WS whereClause [ qMeta ] ( ( WS BREAK WS BY WS gran = granularityClause ) ? ( WS GROUP WS BY WS ( id = ID ( ( WS ) ? ' , ' ( WS ) ? id = ID ) * ) ( WS HAVING WS h = havingClause ) ? ) ? ( WS ORDER WS BY WS ( id = ID ) ( WS dir = ( ASC | DESC ) ) ? ) ? ( WS LIMIT WS ( l = LONG ) ) ? ( WS THEN WS p = postAggItem ) ? ) ( WS WHICH WS CONTAINS ( WS ) ? LPARAN ( WS ) ? ( s1 = SINGLE _ QUOTE _ STRING ( ( WS ) ? ' , ' ( WS ) ? s2 = SINGLE _ QUOTE _ STRING ) * ) ( WS ) ? RPARAN WS SORT ( WS ) ? LPARAN ( WS ) ? ( s = SINGLE _ QUOTE _ STRING ) ( WS ) ? RPARAN ) ? ( WS HINT ( WS ) ? LPARAN ( WS ) ? s = SINGLE _ QUOTE _ STRING ( WS ) ? RPARAN ) ? ) ? { match ( input , SELECT , FOLLOW_SELECT_in_queryStmnt1180 ) ; // druidG . g : 151:7 : ( ( WS selectItems [ qMeta ] ( ( WS ) ? ' , ' ( WS ) ? selectItems [ qMeta ] ) * ) | ( WS ' * ' ) ) ? int alt101 = 3 ; int LA101_0 = input . LA ( 1 ) ; if ( ( LA101_0 == WS ) ) { int LA101_1 = input . LA ( 2 ) ; if ( ( LA101_1 == 90 ) ) { alt101 = 2 ; } else if ( ( LA101_1 == COUNT || LA101_1 == DOUBLE_SUM || ( LA101_1 >= HYPER_UNIQUE && LA101_1 <= ID ) || LA101_1 == JAVASCRIPT || LA101_1 == LONG_SUM || LA101_1 == MAX || LA101_1 == MIN || LA101_1 == UNIQUE ) ) { alt101 = 1 ; } } switch ( alt101 ) { case 1 : // druidG . g : 152:8 : ( WS selectItems [ qMeta ] ( ( WS ) ? ' , ' ( WS ) ? selectItems [ qMeta ] ) * ) { // druidG . g : 152:8 : ( WS selectItems [ qMeta ] ( ( WS ) ? ' , ' ( WS ) ? selectItems [ qMeta ] ) * ) // druidG . g : 153:9 : WS selectItems [ qMeta ] ( ( WS ) ? ' , ' ( WS ) ? selectItems [ qMeta ] ) * { match ( input , WS , FOLLOW_WS_in_queryStmnt1208 ) ; pushFollow ( FOLLOW_selectItems_in_queryStmnt1210 ) ; selectItems ( qMeta ) ; state . _fsp -- ; // druidG . g : 153:31 : ( ( WS ) ? ' , ' ( WS ) ? selectItems [ qMeta ] ) * loop100 : while ( true ) { int alt100 = 2 ; int LA100_0 = input . LA ( 1 ) ; if ( ( LA100_0 == WS ) ) { int LA100_1 = input . LA ( 2 ) ; if ( ( LA100_1 == 91 ) ) { alt100 = 1 ; } } else if ( ( LA100_0 == 91 ) ) { alt100 = 1 ; } switch ( alt100 ) { case 1 : // druidG . g : 153:32 : ( WS ) ? ' , ' ( WS ) ? selectItems [ qMeta ] { // druidG . g : 153:32 : ( WS ) ? int alt98 = 2 ; int LA98_0 = input . LA ( 1 ) ; if ( ( LA98_0 == WS ) ) { alt98 = 1 ; } switch ( alt98 ) { case 1 : // druidG . g : 153:32 : WS { match ( input , WS , FOLLOW_WS_in_queryStmnt1214 ) ; } break ; } match ( input , 91 , FOLLOW_91_in_queryStmnt1217 ) ; // druidG . g : 153:40 : ( WS ) ? int alt99 = 2 ; int LA99_0 = input . LA ( 1 ) ; if ( ( LA99_0 == WS ) ) { alt99 = 1 ; } switch ( alt99 ) { case 1 : // druidG . g : 153:40 : WS { match ( input , WS , FOLLOW_WS_in_queryStmnt1219 ) ; } break ; } pushFollow ( FOLLOW_selectItems_in_queryStmnt1222 ) ; selectItems ( qMeta ) ; state . _fsp -- ; } break ; default : break loop100 ; } } } } break ; case 2 : // druidG . g : 156:8 : ( WS ' * ' ) { // druidG . g : 156:8 : ( WS ' * ' ) // druidG . g : 156:9 : WS ' * ' { match ( input , WS , FOLLOW_WS_in_queryStmnt1253 ) ; match ( input , 90 , FOLLOW_90_in_queryStmnt1255 ) ; } } break ; } match ( input , WS , FOLLOW_WS_in_queryStmnt1270 ) ; match ( input , FROM , FOLLOW_FROM_in_queryStmnt1272 ) ; // druidG . g : 159:4 : ( ( WS id = ID ) | ( WS LPARAN ( fromQuery = queryStmnt ) RPARAN ) ) int alt102 = 2 ; int LA102_0 = input . LA ( 1 ) ; if ( ( LA102_0 == WS ) ) { int LA102_1 = input . LA ( 2 ) ; if ( ( LA102_1 == ID ) ) { alt102 = 1 ; } else if ( ( LA102_1 == LPARAN ) ) { alt102 = 2 ; } else { int nvaeMark = input . mark ( ) ; try { input . consume ( ) ; NoViableAltException nvae = new NoViableAltException ( "" , 102 , 1 , input ) ; throw nvae ; } finally { input . rewind ( nvaeMark ) ; } } } else { NoViableAltException nvae = new NoViableAltException ( "" , 102 , 0 , input ) ; throw nvae ; } switch ( alt102 ) { case 1 : // druidG . g : 160:13 : ( WS id = ID ) { // druidG . g : 160:13 : ( WS id = ID ) // druidG . g : 160:14 : WS id = ID { match ( input , WS , FOLLOW_WS_in_queryStmnt1293 ) ; id = ( Token ) match ( input , ID , FOLLOW_ID_in_queryStmnt1297 ) ; qMeta . dataSource = ( id != null ? id . getText ( ) : null ) ; } } break ; case 2 : // druidG . g : 162:11 : ( WS LPARAN ( fromQuery = queryStmnt ) RPARAN ) { // druidG . g : 162:11 : ( WS LPARAN ( fromQuery = queryStmnt ) RPARAN ) // druidG . g : 162:12 : WS LPARAN ( fromQuery = queryStmnt ) RPARAN { match ( input , WS , FOLLOW_WS_in_queryStmnt1334 ) ; match ( input , LPARAN , FOLLOW_LPARAN_in_queryStmnt1336 ) ; // druidG . g : 162:22 : ( fromQuery = queryStmnt ) // druidG . g : 162:23 : fromQuery = queryStmnt { pushFollow ( FOLLOW_queryStmnt_in_queryStmnt1341 ) ; fromQuery = queryStmnt ( ) ; state . _fsp -- ; } match ( input , RPARAN , FOLLOW_RPARAN_in_queryStmnt1344 ) ; qMeta . queryDataSource = fromQuery ; } } break ; } if ( ( ( BaseAggQueryMeta ) qMeta ) . aggregations . isEmpty ( ) ) { qMeta = SelectQueryMeta . promote ( qMeta ) ; } // druidG . g : 168:2 : ( WS WHERE WS whereClause [ qMeta ] ( ( WS BREAK WS BY WS gran = granularityClause ) ? ( WS GROUP WS BY WS ( id = ID ( ( WS ) ? ' , ' ( WS ) ? id = ID ) * ) ( WS HAVING WS h = havingClause ) ? ) ? ( WS ORDER WS BY WS ( id = ID ) ( WS dir = ( ASC | DESC ) ) ? ) ? ( WS LIMIT WS ( l = LONG ) ) ? ( WS THEN WS p = postAggItem ) ? ) ( WS WHICH WS CONTAINS ( WS ) ? LPARAN ( WS ) ? ( s1 = SINGLE _ QUOTE _ STRING ( ( WS ) ? ' , ' ( WS ) ? s2 = SINGLE _ QUOTE _ STRING ) * ) ( WS ) ? RPARAN WS SORT ( WS ) ? LPARAN ( WS ) ? ( s = SINGLE _ QUOTE _ STRING ) ( WS ) ? RPARAN ) ? ( WS HINT ( WS ) ? LPARAN ( WS ) ? s = SINGLE _ QUOTE _ STRING ( WS ) ? RPARAN ) ? ) ? int alt127 = 2 ; int LA127_0 = input . LA ( 1 ) ; if ( ( LA127_0 == WS ) ) { int LA127_1 = input . LA ( 2 ) ; if ( ( LA127_1 == WHERE ) ) { alt127 = 1 ; } } switch ( alt127 ) { case 1 : // druidG . g : 169:4 : WS WHERE WS whereClause [ qMeta ] ( ( WS BREAK WS BY WS gran = granularityClause ) ? ( WS GROUP WS BY WS ( id = ID ( ( WS ) ? ' , ' ( WS ) ? id = ID ) * ) ( WS HAVING WS h = havingClause ) ? ) ? ( WS ORDER WS BY WS ( id = ID ) ( WS dir = ( ASC | DESC ) ) ? ) ? ( WS LIMIT WS ( l = LONG ) ) ? ( WS THEN WS p = postAggItem ) ? ) ( WS WHICH WS CONTAINS ( WS ) ? LPARAN ( WS ) ? ( s1 = SINGLE _ QUOTE _ STRING ( ( WS ) ? ' , ' ( WS ) ? s2 = SINGLE _ QUOTE _ STRING ) * ) ( WS ) ? RPARAN WS SORT ( WS ) ? LPARAN ( WS ) ? ( s = SINGLE _ QUOTE _ STRING ) ( WS ) ? RPARAN ) ? ( WS HINT ( WS ) ? LPARAN ( WS ) ? s = SINGLE _ QUOTE _ STRING ( WS ) ? RPARAN ) ? { match ( input , WS , FOLLOW_WS_in_queryStmnt1363 ) ; match ( input , WHERE , FOLLOW_WHERE_in_queryStmnt1365 ) ; match ( input , WS , FOLLOW_WS_in_queryStmnt1367 ) ; pushFollow ( FOLLOW_whereClause_in_queryStmnt1369 ) ; whereClause ( qMeta ) ; state . _fsp -- ; // druidG . g : 170:4 : ( ( WS BREAK WS BY WS gran = granularityClause ) ? ( WS GROUP WS BY WS ( id = ID ( ( WS ) ? ' , ' ( WS ) ? id = ID ) * ) ( WS HAVING WS h = havingClause ) ? ) ? ( WS ORDER WS BY WS ( id = ID ) ( WS dir = ( ASC | DESC ) ) ? ) ? ( WS LIMIT WS ( l = LONG ) ) ? ( WS THEN WS p = postAggItem ) ? ) // druidG . g : 171:5 : ( WS BREAK WS BY WS gran = granularityClause ) ? ( WS GROUP WS BY WS ( id = ID ( ( WS ) ? ' , ' ( WS ) ? id = ID ) * ) ( WS HAVING WS h = havingClause ) ? ) ? ( WS ORDER WS BY WS ( id = ID ) ( WS dir = ( ASC | DESC ) ) ? ) ? ( WS LIMIT WS ( l = LONG ) ) ? ( WS THEN WS p = postAggItem ) ? { // druidG . g : 171:5 : ( WS BREAK WS BY WS gran = granularityClause ) ? int alt103 = 2 ; int LA103_0 = input . LA ( 1 ) ; if ( ( LA103_0 == WS ) ) { int LA103_1 = input . LA ( 2 ) ; if ( ( LA103_1 == BREAK ) ) { alt103 = 1 ; } } switch ( alt103 ) { case 1 : // druidG . g : 171:6 : WS BREAK WS BY WS gran = granularityClause { match ( input , WS , FOLLOW_WS_in_queryStmnt1387 ) ; match ( input , BREAK , FOLLOW_BREAK_in_queryStmnt1389 ) ; match ( input , WS , FOLLOW_WS_in_queryStmnt1391 ) ; match ( input , BY , FOLLOW_BY_in_queryStmnt1393 ) ; match ( input , WS , FOLLOW_WS_in_queryStmnt1395 ) ; pushFollow ( FOLLOW_granularityClause_in_queryStmnt1399 ) ; gran = granularityClause ( ) ; state . _fsp -- ; qMeta . granularity = gran . a ; if ( gran . b != null ) { qMeta . microIntervals . addAll ( gran . b ) ; } } break ; } // druidG . g : 178:5 : ( WS GROUP WS BY WS ( id = ID ( ( WS ) ? ' , ' ( WS ) ? id = ID ) * ) ( WS HAVING WS h = havingClause ) ? ) ? int alt108 = 2 ; int LA108_0 = input . LA ( 1 ) ; if ( ( LA108_0 == WS ) ) { int LA108_1 = input . LA ( 2 ) ; if ( ( LA108_1 == GROUP ) ) { alt108 = 1 ; } } switch ( alt108 ) { case 1 : // druidG . g : 178:6 : WS GROUP WS BY WS ( id = ID ( ( WS ) ? ' , ' ( WS ) ? id = ID ) * ) ( WS HAVING WS h = havingClause ) ? { match ( input , WS , FOLLOW_WS_in_queryStmnt1416 ) ; match ( input , GROUP , FOLLOW_GROUP_in_queryStmnt1418 ) ; match ( input , WS , FOLLOW_WS_in_queryStmnt1420 ) ; match ( input , BY , FOLLOW_BY_in_queryStmnt1422 ) ; match ( input , WS , FOLLOW_WS_in_queryStmnt1424 ) ; qMeta = GroupByQueryMeta . promote ( qMeta ) ; if ( ( ( GroupByQueryMeta ) qMeta ) . fetchDimensions == null ) { System . err . println ( "No dimensions !! " ) ; } // druidG . g : 185:10 : ( id = ID ( ( WS ) ? ' , ' ( WS ) ? id = ID ) * ) // druidG . g : 185:11 : id = ID ( ( WS ) ? ' , ' ( WS ) ? id = ID ) * { id = ( Token ) match ( input , ID , FOLLOW_ID_in_queryStmnt1450 ) ; if ( ! ( ( GroupByQueryMeta ) qMeta ) . checkDimOrAlias ( ( id != null ? id . getText ( ) : null ) ) ) { System . err . println ( "Dimension/Alias " + ( id != null ? id . getText ( ) : null ) + " not valid.." ) ; } // druidG . g : 191:14 : ( ( WS ) ? ' , ' ( WS ) ? id = ID ) * loop106 : while ( true ) { int alt106 = 2 ; int LA106_0 = input . LA ( 1 ) ; if ( ( LA106_0 == WS ) ) { int LA106_1 = input . LA ( 2 ) ; if ( ( LA106_1 == 91 ) ) { alt106 = 1 ; } } else if ( ( LA106_0 == 91 ) ) { alt106 = 1 ; } switch ( alt106 ) { case 1 : // druidG . g : 191:15 : ( WS ) ? ' , ' ( WS ) ? id = ID { // druidG . g : 191:15 : ( WS ) ? int alt104 = 2 ; int LA104_0 = input . LA ( 1 ) ; if ( ( LA104_0 == WS ) ) { alt104 = 1 ; } switch ( alt104 ) { case 1 : // druidG . g : 191:15 : WS { match ( input , WS , FOLLOW_WS_in_queryStmnt1482 ) ; } break ; } match ( input , 91 , FOLLOW_91_in_queryStmnt1485 ) ; // druidG . g : 191:23 : ( WS ) ? int alt105 = 2 ; int LA105_0 = input . LA ( 1 ) ; if ( ( LA105_0 == WS ) ) { alt105 = 1 ; } switch ( alt105 ) { case 1 : // druidG . g : 191:23 : WS { match ( input , WS , FOLLOW_WS_in_queryStmnt1487 ) ; } break ; } id = ( Token ) match ( input , ID , FOLLOW_ID_in_queryStmnt1492 ) ; if ( ! ( ( GroupByQueryMeta ) qMeta ) . checkDimOrAlias ( ( id != null ? id . getText ( ) : null ) ) ) { System . err . println ( "Dimension/Alias " + ( id != null ? id . getText ( ) : null ) + " not valid.." ) ; } } break ; default : break loop106 ; } } } // druidG . g : 199:10 : ( WS HAVING WS h = havingClause ) ? int alt107 = 2 ; int LA107_0 = input . LA ( 1 ) ; if ( ( LA107_0 == WS ) ) { int LA107_1 = input . LA ( 2 ) ; if ( ( LA107_1 == HAVING ) ) { alt107 = 1 ; } } switch ( alt107 ) { case 1 : // druidG . g : 199:11 : WS HAVING WS h = havingClause { match ( input , WS , FOLLOW_WS_in_queryStmnt1549 ) ; match ( input , HAVING , FOLLOW_HAVING_in_queryStmnt1551 ) ; match ( input , WS , FOLLOW_WS_in_queryStmnt1553 ) ; pushFollow ( FOLLOW_havingClause_in_queryStmnt1557 ) ; h = havingClause ( ) ; state . _fsp -- ; ( ( GroupByQueryMeta ) qMeta ) . having = h ; } break ; } } break ; } qMeta = QueryUtils . checkAndPromoteToTimeSeries ( qMeta ) ; // druidG . g : 203:5 : ( WS ORDER WS BY WS ( id = ID ) ( WS dir = ( ASC | DESC ) ) ? ) ? int alt110 = 2 ; int LA110_0 = input . LA ( 1 ) ; if ( ( LA110_0 == WS ) ) { int LA110_1 = input . LA ( 2 ) ; if ( ( LA110_1 == ORDER ) ) { alt110 = 1 ; } } switch ( alt110 ) { case 1 : // druidG . g : 203:6 : WS ORDER WS BY WS ( id = ID ) ( WS dir = ( ASC | DESC ) ) ? { match ( input , WS , FOLLOW_WS_in_queryStmnt1601 ) ; match ( input , ORDER , FOLLOW_ORDER_in_queryStmnt1603 ) ; match ( input , WS , FOLLOW_WS_in_queryStmnt1605 ) ; match ( input , BY , FOLLOW_BY_in_queryStmnt1607 ) ; match ( input , WS , FOLLOW_WS_in_queryStmnt1609 ) ; // druidG . g : 203:24 : ( id = ID ) // druidG . g : 203:25 : id = ID { id = ( Token ) match ( input , ID , FOLLOW_ID_in_queryStmnt1614 ) ; } if ( ( ( PlainDimQueryMeta ) qMeta ) . fetchDimensions . size ( ) != 1 ) { ( ( GroupByQueryMeta ) qMeta ) . limitSpec = new LimitSpec ( ) ; } else { // If fetchDimensions = 1 then TopN is more optimal . qMeta = TopNQueryMeta . promote ( qMeta ) ; ( ( TopNQueryMeta ) qMeta ) . metric = ( id != null ? id . getText ( ) : null ) ; } // druidG . g : 214:9 : ( WS dir = ( ASC | DESC ) ) ? int alt109 = 2 ; int LA109_0 = input . LA ( 1 ) ; if ( ( LA109_0 == WS ) ) { int LA109_1 = input . LA ( 2 ) ; if ( ( LA109_1 == ASC || LA109_1 == DESC ) ) { alt109 = 1 ; } } switch ( alt109 ) { case 1 : // druidG . g : 214:10 : WS dir = ( ASC | DESC ) { match ( input , WS , FOLLOW_WS_in_queryStmnt1647 ) ; dir = input . LT ( 1 ) ; if ( input . LA ( 1 ) == ASC || input . LA ( 1 ) == DESC ) { input . consume ( ) ; state . errorRecovery = false ; } else { MismatchedSetException mse = new MismatchedSetException ( null , input ) ; throw mse ; } if ( qMeta instanceof GroupByQueryMeta && ( ( GroupByQueryMeta ) qMeta ) . limitSpec != null ) { if ( dir != null && ( dir != null ? dir . getText ( ) : null ) != null ) { ( ( GroupByQueryMeta ) qMeta ) . limitSpec . addColumn ( ( id != null ? id . getText ( ) : null ) , ( dir != null ? dir . getText ( ) : null ) ) ; } else { ( ( GroupByQueryMeta ) qMeta ) . limitSpec . addColumn ( ( id != null ? id . getText ( ) : null ) , "ASC" ) ; } } } break ; } // At this point if the qMeta is not TopN and is still GroupBy then do the following ( default is ascending sort ) . if ( qMeta instanceof GroupByQueryMeta && ( ( GroupByQueryMeta ) qMeta ) . limitSpec != null ) { if ( ! ( ( GroupByQueryMeta ) qMeta ) . limitSpec . columns . containsKey ( ( id != null ? id . getText ( ) : null ) ) ) { ( ( GroupByQueryMeta ) qMeta ) . limitSpec . addColumn ( ( id != null ? id . getText ( ) : null ) , "ASC" ) ; } } } break ; } // druidG . g : 235:5 : ( WS LIMIT WS ( l = LONG ) ) ? int alt111 = 2 ; int LA111_0 = input . LA ( 1 ) ; if ( ( LA111_0 == WS ) ) { int LA111_1 = input . LA ( 2 ) ; if ( ( LA111_1 == LIMIT ) ) { alt111 = 1 ; } } switch ( alt111 ) { case 1 : // druidG . g : 236:6 : WS LIMIT WS ( l = LONG ) { match ( input , WS , FOLLOW_WS_in_queryStmnt1701 ) ; match ( input , LIMIT , FOLLOW_LIMIT_in_queryStmnt1703 ) ; match ( input , WS , FOLLOW_WS_in_queryStmnt1705 ) ; // druidG . g : 236:18 : ( l = LONG ) // druidG . g : 236:19 : l = LONG { l = ( Token ) match ( input , LONG , FOLLOW_LONG_in_queryStmnt1710 ) ; } if ( qMeta instanceof SelectQueryMeta ) { ( ( SelectQueryMeta ) qMeta ) . pagingSpec . threshold = Integer . valueOf ( ( l != null ? l . getText ( ) : null ) ) ; } else if ( qMeta instanceof TopNQueryMeta ) { ( ( TopNQueryMeta ) qMeta ) . threshold = Integer . valueOf ( ( l != null ? l . getText ( ) : null ) ) ; } else if ( ( ( PlainDimQueryMeta ) qMeta ) . fetchDimensions . size ( ) != 1 ) { if ( ( ( GroupByQueryMeta ) qMeta ) . limitSpec != null ) { ( ( GroupByQueryMeta ) qMeta ) . limitSpec . limit = Long . valueOf ( ( l != null ? l . getText ( ) : null ) ) ; } } } break ; } // druidG . g : 250:7 : ( WS THEN WS p = postAggItem ) ? int alt112 = 2 ; int LA112_0 = input . LA ( 1 ) ; if ( ( LA112_0 == WS ) ) { int LA112_1 = input . LA ( 2 ) ; if ( ( LA112_1 == THEN ) ) { alt112 = 1 ; } } switch ( alt112 ) { case 1 : // druidG . g : 250:8 : WS THEN WS p = postAggItem { match ( input , WS , FOLLOW_WS_in_queryStmnt1748 ) ; match ( input , THEN , FOLLOW_THEN_in_queryStmnt1750 ) ; match ( input , WS , FOLLOW_WS_in_queryStmnt1752 ) ; pushFollow ( FOLLOW_postAggItem_in_queryStmnt1756 ) ; p = postAggItem ( ) ; state . _fsp -- ; QueryUtils . setPostAggregation ( qMeta , p ) ; } break ; } } // druidG . g : 252:4 : ( WS WHICH WS CONTAINS ( WS ) ? LPARAN ( WS ) ? ( s1 = SINGLE _ QUOTE _ STRING ( ( WS ) ? ' , ' ( WS ) ? s2 = SINGLE _ QUOTE _ STRING ) * ) ( WS ) ? RPARAN WS SORT ( WS ) ? LPARAN ( WS ) ? ( s = SINGLE _ QUOTE _ STRING ) ( WS ) ? RPARAN ) ? int alt122 = 2 ; int LA122_0 = input . LA ( 1 ) ; if ( ( LA122_0 == WS ) ) { int LA122_1 = input . LA ( 2 ) ; if ( ( LA122_1 == WHICH ) ) { alt122 = 1 ; } } switch ( alt122 ) { case 1 : // druidG . g : 252:5 : WS WHICH WS CONTAINS ( WS ) ? LPARAN ( WS ) ? ( s1 = SINGLE _ QUOTE _ STRING ( ( WS ) ? ' , ' ( WS ) ? s2 = SINGLE _ QUOTE _ STRING ) * ) ( WS ) ? RPARAN WS SORT ( WS ) ? LPARAN ( WS ) ? ( s = SINGLE _ QUOTE _ STRING ) ( WS ) ? RPARAN { match ( input , WS , FOLLOW_WS_in_queryStmnt1771 ) ; match ( input , WHICH , FOLLOW_WHICH_in_queryStmnt1773 ) ; match ( input , WS , FOLLOW_WS_in_queryStmnt1775 ) ; match ( input , CONTAINS , FOLLOW_CONTAINS_in_queryStmnt1777 ) ; qMeta = SearchQueryMeta . promote ( qMeta ) ; // druidG . g : 252:68 : ( WS ) ? int alt113 = 2 ; int LA113_0 = input . LA ( 1 ) ; if ( ( LA113_0 == WS ) ) { alt113 = 1 ; } switch ( alt113 ) { case 1 : // druidG . g : 252:68 : WS { match ( input , WS , FOLLOW_WS_in_queryStmnt1781 ) ; } break ; } match ( input , LPARAN , FOLLOW_LPARAN_in_queryStmnt1784 ) ; // druidG . g : 252:79 : ( WS ) ? int alt114 = 2 ; int LA114_0 = input . LA ( 1 ) ; if ( ( LA114_0 == WS ) ) { alt114 = 1 ; } switch ( alt114 ) { case 1 : // druidG . g : 252:79 : WS { match ( input , WS , FOLLOW_WS_in_queryStmnt1786 ) ; } break ; } // druidG . g : 252:83 : ( s1 = SINGLE _ QUOTE _ STRING ( ( WS ) ? ' , ' ( WS ) ? s2 = SINGLE _ QUOTE _ STRING ) * ) // druidG . g : 252:84 : s1 = SINGLE _ QUOTE _ STRING ( ( WS ) ? ' , ' ( WS ) ? s2 = SINGLE _ QUOTE _ STRING ) * { s1 = ( Token ) match ( input , SINGLE_QUOTE_STRING , FOLLOW_SINGLE_QUOTE_STRING_in_queryStmnt1792 ) ; ( ( SearchQueryMeta ) qMeta ) . type = "insensitive_contains" ; ( ( SearchQueryMeta ) qMeta ) . addValue ( ( s1 != null ? s1 . getText ( ) : null ) ) ; // druidG . g : 252:208 : ( ( WS ) ? ' , ' ( WS ) ? s2 = SINGLE _ QUOTE _ STRING ) * loop117 : while ( true ) { int alt117 = 2 ; int LA117_0 = input . LA ( 1 ) ; if ( ( LA117_0 == WS ) ) { int LA117_1 = input . LA ( 2 ) ; if ( ( LA117_1 == 91 ) ) { alt117 = 1 ; } } else if ( ( LA117_0 == 91 ) ) { alt117 = 1 ; } switch ( alt117 ) { case 1 : // druidG . g : 252:209 : ( WS ) ? ' , ' ( WS ) ? s2 = SINGLE _ QUOTE _ STRING { // druidG . g : 252:209 : ( WS ) ? int alt115 = 2 ; int LA115_0 = input . LA ( 1 ) ; if ( ( LA115_0 == WS ) ) { alt115 = 1 ; } switch ( alt115 ) { case 1 : // druidG . g : 252:209 : WS { match ( input , WS , FOLLOW_WS_in_queryStmnt1796 ) ; } break ; } match ( input , 91 , FOLLOW_91_in_queryStmnt1799 ) ; // druidG . g : 252:217 : ( WS ) ? int alt116 = 2 ; int LA116_0 = input . LA ( 1 ) ; if ( ( LA116_0 == WS ) ) { alt116 = 1 ; } switch ( alt116 ) { case 1 : // druidG . g : 252:217 : WS { match ( input , WS , FOLLOW_WS_in_queryStmnt1801 ) ; } break ; } s2 = ( Token ) match ( input , SINGLE_QUOTE_STRING , FOLLOW_SINGLE_QUOTE_STRING_in_queryStmnt1806 ) ; ( ( SearchQueryMeta ) qMeta ) . type = "fragment" ; ( ( SearchQueryMeta ) qMeta ) . addValue ( ( s2 != null ? s2 . getText ( ) : null ) ) ; } break ; default : break loop117 ; } } } // druidG . g : 252:337 : ( WS ) ? int alt118 = 2 ; int LA118_0 = input . LA ( 1 ) ; if ( ( LA118_0 == WS ) ) { alt118 = 1 ; } switch ( alt118 ) { case 1 : // druidG . g : 252:337 : WS { match ( input , WS , FOLLOW_WS_in_queryStmnt1813 ) ; } break ; } match ( input , RPARAN , FOLLOW_RPARAN_in_queryStmnt1816 ) ; match ( input , WS , FOLLOW_WS_in_queryStmnt1822 ) ; match ( input , SORT , FOLLOW_SORT_in_queryStmnt1824 ) ; // druidG . g : 253:13 : ( WS ) ? int alt119 = 2 ; int LA119_0 = input . LA ( 1 ) ; if ( ( LA119_0 == WS ) ) { alt119 = 1 ; } switch ( alt119 ) { case 1 : // druidG . g : 253:13 : WS { match ( input , WS , FOLLOW_WS_in_queryStmnt1826 ) ; } break ; } match ( input , LPARAN , FOLLOW_LPARAN_in_queryStmnt1829 ) ; // druidG . g : 253:24 : ( WS ) ? int alt120 = 2 ; int LA120_0 = input . LA ( 1 ) ; if ( ( LA120_0 == WS ) ) { alt120 = 1 ; } switch ( alt120 ) { case 1 : // druidG . g : 253:24 : WS { match ( input , WS , FOLLOW_WS_in_queryStmnt1831 ) ; } break ; } // druidG . g : 253:28 : ( s = SINGLE _ QUOTE _ STRING ) // druidG . g : 253:29 : s = SINGLE _ QUOTE _ STRING { s = ( Token ) match ( input , SINGLE_QUOTE_STRING , FOLLOW_SINGLE_QUOTE_STRING_in_queryStmnt1837 ) ; } ( ( SearchQueryMeta ) qMeta ) . setSort ( ( s != null ? s . getText ( ) : null ) ) ; // druidG . g : 253:97 : ( WS ) ? int alt121 = 2 ; int LA121_0 = input . LA ( 1 ) ; if ( ( LA121_0 == WS ) ) { alt121 = 1 ; } switch ( alt121 ) { case 1 : // druidG . g : 253:97 : WS { match ( input , WS , FOLLOW_WS_in_queryStmnt1842 ) ; } break ; } match ( input , RPARAN , FOLLOW_RPARAN_in_queryStmnt1845 ) ; } break ; } // druidG . g : 255:4 : ( WS HINT ( WS ) ? LPARAN ( WS ) ? s = SINGLE _ QUOTE _ STRING ( WS ) ? RPARAN ) ? int alt126 = 2 ; int LA126_0 = input . LA ( 1 ) ; if ( ( LA126_0 == WS ) ) { int LA126_1 = input . LA ( 2 ) ; if ( ( LA126_1 == HINT ) ) { alt126 = 1 ; } } switch ( alt126 ) { case 1 : // druidG . g : 255:5 : WS HINT ( WS ) ? LPARAN ( WS ) ? s = SINGLE _ QUOTE _ STRING ( WS ) ? RPARAN { match ( input , WS , FOLLOW_WS_in_queryStmnt1857 ) ; match ( input , HINT , FOLLOW_HINT_in_queryStmnt1859 ) ; // druidG . g : 255:13 : ( WS ) ? int alt123 = 2 ; int LA123_0 = input . LA ( 1 ) ; if ( ( LA123_0 == WS ) ) { alt123 = 1 ; } switch ( alt123 ) { case 1 : // druidG . g : 255:13 : WS { match ( input , WS , FOLLOW_WS_in_queryStmnt1861 ) ; } break ; } match ( input , LPARAN , FOLLOW_LPARAN_in_queryStmnt1864 ) ; // druidG . g : 255:24 : ( WS ) ? int alt124 = 2 ; int LA124_0 = input . LA ( 1 ) ; if ( ( LA124_0 == WS ) ) { alt124 = 1 ; } switch ( alt124 ) { case 1 : // druidG . g : 255:24 : WS { match ( input , WS , FOLLOW_WS_in_queryStmnt1866 ) ; } break ; } s = ( Token ) match ( input , SINGLE_QUOTE_STRING , FOLLOW_SINGLE_QUOTE_STRING_in_queryStmnt1871 ) ; qMeta = HintProcessor . process ( qMeta , ( s != null ? s . getText ( ) : null ) ) ; // druidG . g : 255:99 : ( WS ) ? int alt125 = 2 ; int LA125_0 = input . LA ( 1 ) ; if ( ( LA125_0 == WS ) ) { alt125 = 1 ; } switch ( alt125 ) { case 1 : // druidG . g : 255:99 : WS { match ( input , WS , FOLLOW_WS_in_queryStmnt1875 ) ; } break ; } match ( input , RPARAN , FOLLOW_RPARAN_in_queryStmnt1878 ) ; } break ; } } break ; } if ( qMeta . intervals == null || qMeta . intervals . isEmpty ( ) ) { qMeta = TimeBoundaryQueryMeta . promote ( qMeta ) ; } } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving } return qMeta ;
public class DeviceAttribute_3DAODefaultImpl { public void init ( final String name , final int dim_x , final int dim_y ) { } }
buildAttributeValueObject ( name ) ; attrval . r_dim . dim_x = dim_x ; attrval . r_dim . dim_y = dim_y ;
public class BinaryType { /** * Readable string value for a type ID . * @ param id The type ID . * @ return The type string . */ public static String asString ( byte id ) { } }
switch ( id ) { case STOP : return "stop(0)" ; case VOID : return "void(1)" ; case BOOL : return "bool(2)" ; case BYTE : return "byte(3)" ; case DOUBLE : return "double(4)" ; // case 5: case I16 : return "i16(6)" ; // case 7: case I32 : // ENUM is same as I32. return "i32(8)" ; // case 9: case I64 : return "i64(10)" ; case STRING : // BINARY is same as STRING . return "string(11)" ; case STRUCT : return "struct(12)" ; case MAP : return "map(13)" ; case SET : return "set(14)" ; case LIST : return "list(15)" ; default : return "unknown(" + id + ")" ; }
public class Registry { /** * Returns all of the policy variations that have been configured . */ public static Set < Policy > policy ( BasicSettings settings , String name ) { } }
Function < Config , Set < Policy > > factory = FACTORIES . get ( name . toLowerCase ( US ) ) ; checkNotNull ( factory , "%s not found" , name ) ; return factory . apply ( settings . config ( ) ) ;
public class AbstractStandardTransformationOperation { /** * Get a parameter from the parameter map . If abortOnError is true , then a TransformationOperationException is * thrown in case that the asked parameter does not exists . If this value is false , the default value will be taken * instead of throwing an exception . */ private String getParameter ( Map < String , String > parameters , String paramName , boolean abortOnError , String defaultValue ) throws TransformationOperationException { } }
String value = parameters . get ( paramName ) ; if ( value != null ) { return value ; } if ( abortOnError ) { String error = String . format ( "There is no parameter with the name %s present. The step will be ignored." , paramName ) ; throw new TransformationOperationException ( error ) ; } logger . debug ( "There is no parameter with the name {} present. The value {} will be taken instead." , paramName , defaultValue ) ; return defaultValue ;
public class IOUtil { /** * Reads data from < code > reader < / code > and writes it into an instanceof { @ link CharArrayWriter2 } . < br > * @ param reader reader from which data is read * @ param closeReader close reader or not * @ return the instance of { @ link CharArrayWriter2 } into which data is written * @ throws IOException if an I / O error occurs . */ public static CharArrayWriter2 pump ( Reader reader , boolean closeReader ) throws IOException { } }
return pump ( reader , new CharArrayWriter2 ( ) , closeReader , true ) ;
public class ObjectGraphDump { /** * Implementation of the tree walk . * @ param currentNode the node being visited . */ private void visit ( final ObjectGraphNode currentNode ) { } }
Object currentValue = currentNode . getValue ( ) ; if ( currentValue == null || ( currentValue instanceof java . lang . ref . SoftReference ) || currentNode . isPrimitive ( ) || currentNode . isSimpleType ( ) ) { return ; } if ( isObjectVisited ( currentNode ) ) { ObjectGraphNode ref = visitedNodes . get ( currentValue ) ; currentNode . setRefNode ( ref ) ; return ; } markObjectVisited ( currentNode ) ; if ( currentValue instanceof List ) { // ArrayList ' s elementData is marked transient , and others may be as well , so we have to do this ourselves . visitList ( currentNode ) ; } else if ( currentValue instanceof Map ) { // HashMap ' s table is marked transient , and others may be as well , so we have to do this ourselves . visitMap ( currentNode ) ; } else if ( currentValue instanceof ComponentModel ) { // Special case for ComponentModel , so we can figure out if any fields are overridden visitComponentModel ( currentNode ) ; } else if ( currentValue instanceof Field ) { visitComplexType ( currentNode ) ; summariseNode ( currentNode ) ; } else if ( currentValue . getClass ( ) . isArray ( ) ) { visitArray ( currentNode ) ; } else { visitComplexType ( currentNode ) ; }
public class RemoteFieldTable { /** * Add this record to this table . * Add this record to the table and set the current edit mode to NONE . * @ param record The record to add . * @ throws Exception */ public void doAdd ( Rec record ) throws DBException { } }
try { m_objLastModBookmark = m_tableRemote . add ( this . getDataSource ( ) , record . getOpenMode ( ) ) ; } catch ( RemoteException ex ) { throw new DBException ( ex . getMessage ( ) ) ; }
public class HtmlSerialFieldWriter { /** * Check to see if overview details should be printed . If * nocomment option set or if there is no text to be printed * for deprecation info , comment or tags , do not print overview details . * @ param field the field to check overview details for . * @ return true if overview details need to be printed */ public boolean shouldPrintOverview ( VariableElement field ) { } }
if ( ! configuration . nocomment ) { if ( ! utils . getFullBody ( field ) . isEmpty ( ) || writer . hasSerializationOverviewTags ( field ) ) return true ; } if ( utils . isDeprecated ( field ) ) return true ; return false ;
public class HttpSession2 { /** * get session attributes ' snapshot * @ return session attributes ' map object */ public Map < String , Object > snapshot ( ) { } }
Map < String , Object > snap = Maps . newHashMap ( ) ; snap . putAll ( sessionStore ) ; snap . putAll ( newAttributes ) ; for ( String name : deleteAttribute ) { snap . remove ( name ) ; } return snap ;
public class AbstractMapBasedWALDAO { /** * Mark an item as " deleted " without actually deleting it from the map . This * method only triggers the update action but does not alter the item . Must * only be invoked inside a write - lock . * @ param aItem * The item that was marked as " deleted " * @ param bInvokeCallbacks * < code > true < / code > to invoke callbacks , < code > false < / code > to not do * so . * @ since 9.2.1 */ @ MustBeLocked ( ELockType . WRITE ) protected final void internalMarkItemDeleted ( @ Nonnull final IMPLTYPE aItem , final boolean bInvokeCallbacks ) { } }
// Trigger save changes super . markAsChanged ( aItem , EDAOActionType . UPDATE ) ; if ( bInvokeCallbacks ) { // Invoke callbacks m_aCallbacks . forEach ( aCB -> aCB . onMarkItemDeleted ( aItem ) ) ; }
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcTrimmingPreference ( ) { } }
if ( ifcTrimmingPreferenceEEnum == null ) { ifcTrimmingPreferenceEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 921 ) ; } return ifcTrimmingPreferenceEEnum ;
public class CmsWorkplace { /** * Generates the header for the extended report view . < p > * @ param cms the current users context * @ param encoding the encoding string * @ return html code */ public static String generatePageStartExtended ( CmsObject cms , String encoding ) { } }
StringBuffer result = new StringBuffer ( 128 ) ; result . append ( "<html>\n<head>\n" ) ; result . append ( "<meta HTTP-EQUIV='Content-Type' CONTENT='text/html; charset=" ) ; result . append ( encoding ) ; result . append ( "'>\n" ) ; result . append ( generateCssStyle ( cms ) ) ; result . append ( "</head>\n" ) ; result . append ( "<body style='overflow: auto;'>\n" ) ; result . append ( "<div class='main'>\n" ) ; return result . toString ( ) ;
public class TrackBox { /** * Gets the SampleTableBox at mdia / minf / stbl if existing . * @ return the SampleTableBox or < code > null < / code > */ public SampleTableBox getSampleTableBox ( ) { } }
if ( sampleTableBox != null ) { return sampleTableBox ; } MediaBox mdia = getMediaBox ( ) ; if ( mdia != null ) { MediaInformationBox minf = mdia . getMediaInformationBox ( ) ; if ( minf != null ) { sampleTableBox = minf . getSampleTableBox ( ) ; return sampleTableBox ; } } return null ;
public class ICPImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setYCOset ( Integer newYCOset ) { } }
Integer oldYCOset = ycOset ; ycOset = newYCOset ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . ICP__YC_OSET , oldYCOset , ycOset ) ) ;
public class OutHttpApp { /** * Writes a byte to the output . */ @ Override public void write ( int ch ) throws IOException { } }
if ( isClosed ( ) || isHead ( ) ) { return ; } int offset = _offset ; if ( SIZE <= offset ) { flushByteBuffer ( ) ; offset = _offset ; } _buffer [ offset ++ ] = ( byte ) ch ; _offset = offset ;
public class NGAExtensions { /** * Delete the Tile Scaling extensions for the table * @ param geoPackage * GeoPackage * @ param table * table name * @ since 2.0.2 */ public static void deleteTileScaling ( GeoPackageCore geoPackage , String table ) { } }
TileScalingDao tileScalingDao = geoPackage . getTileScalingDao ( ) ; ExtensionsDao extensionsDao = geoPackage . getExtensionsDao ( ) ; try { if ( tileScalingDao . isTableExists ( ) ) { tileScalingDao . deleteById ( table ) ; } if ( extensionsDao . isTableExists ( ) ) { extensionsDao . deleteByExtension ( TileTableScaling . EXTENSION_NAME , table ) ; } } catch ( SQLException e ) { throw new GeoPackageException ( "Failed to delete Tile Scaling. GeoPackage: " + geoPackage . getName ( ) + ", Table: " + table , e ) ; }
public class ServerConfiguration { /** * Deserialize , then output the given configuration . If two parameters are * given , the first one is the filename and the second is the properties * file to apply before re - serializing . */ public static void main ( String [ ] args ) throws Exception { } }
if ( args . length < 1 || args . length > 2 ) { throw new IOException ( "One or two arguments expected." ) ; } ServerConfiguration config = new ServerConfigurationParser ( new FileInputStream ( new File ( args [ 0 ] ) ) ) . parse ( ) ; if ( args . length == 2 ) { Properties props = new Properties ( ) ; props . load ( new FileInputStream ( new File ( args [ 1 ] ) ) ) ; config . applyProperties ( props ) ; } ByteArrayOutputStream out = new ByteArrayOutputStream ( ) ; config . serialize ( out ) ; String content = new String ( out . toByteArray ( ) , "UTF-8" ) ; System . out . println ( content ) ;
public class JSONResource { /** * Transforming the JSON on the fly */ protected Object unmarshal ( ) throws IOException , JSONException { } }
json = new JSONTokener ( new InputStreamReader ( inputStream , "UTF-8" ) ) . nextValue ( ) ; inputStream . close ( ) ; return json ;
public class SpekeKeyProvider { /** * Relates to SPEKE implementation . DRM system identifiers . DASH output groups support a max of two system ids . * Other group types support one system id . * @ param systemIds * Relates to SPEKE implementation . DRM system identifiers . DASH output groups support a max of two system * ids . Other group types support one system id . */ public void setSystemIds ( java . util . Collection < String > systemIds ) { } }
if ( systemIds == null ) { this . systemIds = null ; return ; } this . systemIds = new java . util . ArrayList < String > ( systemIds ) ;
public class AbstrCFMLExprTransformer { /** * Liest die Argumente eines Funktonsaufruf ein und prueft ob die Funktion innerhalb der FLD * ( Function Library Descriptor ) definiert ist . Falls sie existiert wird die Funktion gegen diese * geprueft und ein build - in - function CFXD Element generiert , ansonsten ein normales funcion - call * Element . < br / > * EBNF : < br / > * < code > [ impOp { " , " impOp } ] ; < / code > * @ param name Identifier der Funktion als Zeichenkette * @ param checkLibrary Soll geprueft werden ob die Funktion innerhalb der Library existiert . * @ return CFXD Element * @ throws TemplateException */ private FunctionMember getFunctionMember ( Data data , final ExprString name , boolean checkLibrary ) throws TemplateException { } }
// get Function Library checkLibrary = checkLibrary && data . flibs != null ; FunctionLibFunction flf = null ; if ( checkLibrary ) { if ( ! ( name instanceof Literal ) ) throw new TemplateException ( data . srcCode , "syntax error" ) ; // should never happen ! for ( int i = 0 ; i < data . flibs . length ; i ++ ) { flf = data . flibs [ i ] . getFunction ( ( ( Literal ) name ) . getString ( ) ) ; if ( flf != null ) break ; } if ( flf == null ) { checkLibrary = false ; } } FunctionMember fm = null ; while ( true ) { int pos = data . srcCode . getPos ( ) ; // Element Function if ( checkLibrary ) { BIF bif = new BIF ( data . factory , data . settings , flf ) ; // TODO data . ep . add ( flf , bif , data . srcCode ) ; bif . setArgType ( flf . getArgType ( ) ) ; try { bif . setClassDefinition ( flf . getFunctionClassDefinition ( ) ) ; } catch ( Throwable t ) { ExceptionUtil . rethrowIfNecessary ( t ) ; throw new PageRuntimeException ( t ) ; } bif . setReturnType ( flf . getReturnTypeAsString ( ) ) ; fm = bif ; if ( flf . getArgType ( ) == FunctionLibFunction . ARG_DYNAMIC && flf . hasDefaultValues ( ) ) { ArrayList < FunctionLibFunctionArg > args = flf . getArg ( ) ; Iterator < FunctionLibFunctionArg > it = args . iterator ( ) ; FunctionLibFunctionArg arg ; while ( it . hasNext ( ) ) { arg = it . next ( ) ; if ( arg . getDefaultValue ( ) != null ) bif . addArgument ( new NamedArgument ( data . factory . createLitString ( arg . getName ( ) ) , data . factory . createLitString ( arg . getDefaultValue ( ) ) , arg . getTypeAsString ( ) , false ) ) ; } } } else { fm = new UDF ( name ) ; } int count = getFunctionMemberAttrs ( data , name , checkLibrary , fm , flf ) ; if ( checkLibrary ) { // pre if ( flf . hasTteClass ( ) ) { FunctionLibFunction tmp = flf . getEvaluator ( ) . pre ( ( BIF ) fm , flf ) ; if ( tmp != null && tmp != flf ) { flf = tmp ; data . srcCode . setPos ( pos ) ; continue ; } } // check max attributes { boolean isDynamic = flf . getArgType ( ) == FunctionLibFunction . ARG_DYNAMIC ; int max = flf . getArgMax ( ) ; // Dynamic if ( isDynamic ) { if ( max != - 1 && max < fm . getArguments ( ) . length ) throw new TemplateException ( data . srcCode , "too many Attributes (" + max + ":" + fm . getArguments ( ) . length + ") in function [ " + ASMUtil . display ( name ) + " ]" ) ; } // Fix else { if ( flf . getArg ( ) . size ( ) < fm . getArguments ( ) . length ) { TemplateException te = new TemplateException ( data . srcCode , "too many Attributes (" + flf . getArg ( ) . size ( ) + ":" + fm . getArguments ( ) . length + ") in function call [" + ASMUtil . display ( name ) + "]" ) ; UDFUtil . addFunctionDoc ( te , flf ) ; throw te ; } } } // check min attributes if ( flf . getArgMin ( ) > count ) { TemplateException te = new TemplateException ( data . srcCode , "too few attributes in function [" + ASMUtil . display ( name ) + "]" ) ; if ( flf . getArgType ( ) == FunctionLibFunction . ARG_FIX ) UDFUtil . addFunctionDoc ( te , flf ) ; throw te ; } // evaluator if ( flf . hasTteClass ( ) ) { flf . getEvaluator ( ) . execute ( ( BIF ) fm , flf ) ; } } comments ( data ) ; if ( checkLibrary ) data . ep . add ( flf , ( BIF ) fm , data . srcCode ) ; break ; } return fm ;
public class PredictionsImpl { /** * Predict an image url and saves the result . * @ param projectId The project id * @ param predictImageUrlOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the ImagePrediction object */ public Observable < ServiceResponse < ImagePrediction > > predictImageUrlWithServiceResponseAsync ( UUID projectId , PredictImageUrlOptionalParameter predictImageUrlOptionalParameter ) { } }
if ( projectId == null ) { throw new IllegalArgumentException ( "Parameter projectId is required and cannot be null." ) ; } if ( this . client . apiKey ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiKey() is required and cannot be null." ) ; } final UUID iterationId = predictImageUrlOptionalParameter != null ? predictImageUrlOptionalParameter . iterationId ( ) : null ; final String application = predictImageUrlOptionalParameter != null ? predictImageUrlOptionalParameter . application ( ) : null ; final String url = predictImageUrlOptionalParameter != null ? predictImageUrlOptionalParameter . url ( ) : null ; return predictImageUrlWithServiceResponseAsync ( projectId , iterationId , application , url ) ;
public class ListApiKeysRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListApiKeysRequest listApiKeysRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listApiKeysRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listApiKeysRequest . getApiId ( ) , APIID_BINDING ) ; protocolMarshaller . marshall ( listApiKeysRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listApiKeysRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class W3CDom { /** * Convert a jsoup Document to a W3C Document . * @ param in jsoup doc * @ return w3c doc */ public Document fromJsoup ( org . jsoup . nodes . Document in ) { } }
Validate . notNull ( in ) ; DocumentBuilder builder ; try { // set the factory to be namespace - aware factory . setNamespaceAware ( true ) ; builder = factory . newDocumentBuilder ( ) ; Document out = builder . newDocument ( ) ; convert ( in , out ) ; return out ; } catch ( ParserConfigurationException e ) { throw new IllegalStateException ( e ) ; }
public class FrameworkSerializer { /** * Can be overridden to avoid boxing an int where appropriate */ public void serializePrimitive ( S rec , String fieldName , boolean value ) { } }
serializePrimitive ( rec , fieldName , Boolean . valueOf ( value ) ) ;
public class Clock { /** * Defines if the title of the clock will be drawn . * @ param VISIBLE */ public void setTitleVisible ( final boolean VISIBLE ) { } }
if ( null == titleVisible ) { _titleVisible = VISIBLE ; fireUpdateEvent ( VISIBILITY_EVENT ) ; } else { titleVisible . set ( VISIBLE ) ; }
public class Resources { /** * Retrieve a boolean from bundle . * @ param key the key of resource * @ return the resource boolean * @ throws MissingResourceException if the requested key is unknown */ public boolean getBoolean ( String key ) throws MissingResourceException { } }
ResourceBundle bundle = getBundle ( ) ; String value = bundle . getString ( key ) ; return "true" . equalsIgnoreCase ( value ) ;
public class Get { /** * Validates that the value from { @ code map } for the given { @ code key } is a * valid , populated string URL . Returns the value when valid ; otherwise , * throws an { @ code IllegalArgumentException } . * @ param map a map * @ param key a key * @ param < T > the type of value * @ return the string value * @ throws java . util . NoSuchElementException if the required value is not * present * @ throws java . lang . IllegalArgumentException if the value is in valid */ public static < T > String populatedUrl ( Map < String , T > map , String key ) { } }
String url = populatedStringOfType ( map , key , "URL" ) ; UrlValidator . http ( url ) . orThrow ( String . format ( "Invalid URL value \"%s\" for key \"%s\"" , url , key ) ) ; return url ;
public class Selector { /** * CollidableListener */ @ Override public void notifyCollided ( Collidable collidable , Collision collision ) { } }
if ( collidable . hasFeature ( Selectable . class ) ) { final Selectable selectable = collidable . getFeature ( Selectable . class ) ; selectable . onSelection ( true ) ; selected . add ( selectable ) ; }
public class JSettingsPanel { /** * GEN - LAST : event _ jRadioButtonGraphAggregatedActionPerformed */ private void jRadioButtonGraphDetailedActionPerformed ( java . awt . event . ActionEvent evt ) // GEN - FIRST : event _ jRadioButtonGraphDetailedActionPerformed { } }
// GEN - HEADEREND : event _ jRadioButtonGraphDetailedActionPerformed parent . switchModel ( false ) ; refreshGraphPreview ( ) ;
public class ManagementLocksInner { /** * Gets all the management locks for a resource or any level below resource . * @ param resourceGroupName The name of the resource group containing the locked resource . The name is case insensitive . * @ param resourceProviderNamespace The namespace of the resource provider . * @ param parentResourcePath The parent resource identity . * @ param resourceType The resource type of the locked resource . * @ param resourceName The name of the locked resource . * @ param filter The filter to apply on the operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; ManagementLockObjectInner & gt ; object */ public Observable < ServiceResponse < Page < ManagementLockObjectInner > > > listAtResourceLevelWithServiceResponseAsync ( final String resourceGroupName , final String resourceProviderNamespace , final String parentResourcePath , final String resourceType , final String resourceName , final String filter ) { } }
return listAtResourceLevelSinglePageAsync ( resourceGroupName , resourceProviderNamespace , parentResourcePath , resourceType , resourceName , filter ) . concatMap ( new Func1 < ServiceResponse < Page < ManagementLockObjectInner > > , Observable < ServiceResponse < Page < ManagementLockObjectInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < ManagementLockObjectInner > > > call ( ServiceResponse < Page < ManagementLockObjectInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listAtResourceLevelNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ;
public class TerminalParser { /** * A { @ link Parser } that succeeds only if the { @ link Token } objects in the { @ link List } are * adjacent . */ public static Parser < Token > adjacent ( Parser < List < Token > > parser , final Parser < ? > otherwise ) { } }
return parser . next ( tokens -> { if ( tokens . isEmpty ( ) ) return Parsers . always ( ) ; int offset = tokens . get ( 0 ) . index ( ) ; for ( Token token : tokens ) { if ( token . index ( ) != offset ) { return otherwise ; } offset += token . length ( ) ; } return Parsers . always ( ) ; } ) . atomic ( ) . source ( ) . token ( ) ;
public class IOUtils { /** * Deserializes the given byte array back into an { @ link Object } of the desired { @ link Class } type . * @ param < T > class type of the object deserialized from the given bytes . * @ param serializedObjectBytes an array containing the bytes of a serialized object . * @ return a { @ link java . io . Serializable } object from the array of bytes . * @ throws ClassNotFoundException if the class type of the serialized object cannot be resolved . * @ throws IOException if an I / O error occurs during the deserialization process . * @ throws NullPointerException if the serialized object byte array is null . * @ see # deserialize ( byte [ ] , ClassLoader ) * @ see # serialize ( Object ) * @ see java . io . ByteArrayInputStream * @ see java . io . ObjectInputStream * @ see java . io . Serializable */ @ SuppressWarnings ( "unchecked" ) public static < T > T deserialize ( byte [ ] serializedObjectBytes ) throws ClassNotFoundException , IOException { } }
ObjectInputStream in = null ; try { in = new ObjectInputStream ( new ByteArrayInputStream ( serializedObjectBytes ) ) ; return ( T ) in . readObject ( ) ; } finally { close ( in ) ; }
public class CommonMessagePrintTopology { /** * { @ inheritDoc } */ @ Override public void buildTopology ( ) throws Exception { } }
// Get setting from StormConfig Object List < String > kestrelHosts = StormConfigUtil . getStringListValue ( getConfig ( ) , KestrelJsonSpout . KESTREL_SERVERS ) ; String kestrelQueueName = StormConfigUtil . getStringValue ( getConfig ( ) , KestrelJsonSpout . KESTREL_QUEUE , "MessageQueue" ) ; int kestrelSpoutPara = StormConfigUtil . getIntValue ( getConfig ( ) , "kestrel.parallelism" , 1 ) ; int printPara = StormConfigUtil . getIntValue ( getConfig ( ) , "print.parallelism" , 1 ) ; // Topology Setting // Add Spout ( KestrelJsonSpout ) KestrelJsonSpout kestrelSpout = new KestrelJsonSpout ( kestrelHosts , kestrelQueueName , new StringScheme ( ) ) ; getBuilder ( ) . setSpout ( "KestrelJsonSpout" , kestrelSpout , kestrelSpoutPara ) ; // Add Bolt ( KestrelJsonSpout - > StreamMessagePrintBolt ) StreamMessagePrintBolt streamMessagePrintBolt = new StreamMessagePrintBolt ( ) ; getBuilder ( ) . setBolt ( "StreamMessagePrintBolt" , streamMessagePrintBolt , printPara ) . fieldsGrouping ( "KestrelJsonSpout" , new Fields ( FieldName . MESSAGE_KEY ) ) ; // Regist Serialize Setting . getConfig ( ) . registerSerialization ( StreamMessage . class ) ; getConfig ( ) . registerSerialization ( StreamMessageHeader . class ) ; getConfig ( ) . registerSerialization ( MessageEntity . class ) ;
public class CodecUtils { /** * Deserialize a given Base64 encoded data string into an object * @ param data The base64 encoded data string * @ param < T > Just for JavaDoc can be ignored * @ return The required object */ public static < T > T deserializeFromBase64 ( String data ) { } }
Objects . requireNonNull ( data , Required . DATA . toString ( ) ) ; byte [ ] bytes = base64Decoder . decode ( data ) ; return SerializationUtils . deserialize ( bytes ) ;
public class Circle { /** * Check if circle contains the line * @ param line Line to check against * @ return True if line inside circle */ private boolean contains ( Line line ) { } }
return contains ( line . getX1 ( ) , line . getY1 ( ) ) && contains ( line . getX2 ( ) , line . getY2 ( ) ) ;
public class CoinsuperMarketDataServiceRaw { /** * getCoinsuperTicker * @ return Object * @ throws IOException */ public Object getKlines ( CurrencyPair currencyPair ) throws IOException { } }
Map < String , String > data = new HashMap < String , String > ( ) ; data . put ( "symbol" , currencyPair . toString ( ) ) ; RestRequestParam parameters = RestApiRequestHandler . generateRequestParam ( data , this . apiKey , this . secretKey ) ; return coinsuper . getKlines ( parameters ) ;
public class EJSDeployedSupport { /** * d395666 - rewrote entire method . */ public final void setUncheckedLocalException ( Throwable ex ) throws EJBException { } }
ExceptionMappingStrategy exceptionStrategy = getExceptionMappingStrategy ( ) ; Throwable mappedException = exceptionStrategy . setUncheckedException ( this , ex ) ; if ( mappedException != null ) { if ( mappedException instanceof EJBException ) { throw ( EJBException ) mappedException ; } else if ( mappedException instanceof RuntimeException ) { throw ( RuntimeException ) mappedException ; } else if ( mappedException instanceof Error ) { throw ( Error ) mappedException ; } else { // Unless there is a defect in mapping strategy , this should // never happen . But if it does , we are going to // wrap what is returned with a EJBException . This is added // measure to ensure we do not break applications that // existed prior to EJB 3. if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "unexpected Throwable returned by exception mapping strategy" , new Object [ ] { mappedException , exceptionStrategy } ) ; } throw ExceptionUtil . EJBException ( mappedException ) ; } }
public class ST_MakeLine { /** * Returns true as soon as we know the collection contains at least two * points . Start counting from the initial number of points . * @ param points Collection of points * @ param initialNumberOfPoints The initial number of points * @ return True as soon as we know the collection contains at least two * points . */ private static boolean atLeastTwoPoints ( GeometryCollection points , int initialNumberOfPoints ) throws SQLException { } }
int numberOfPoints = initialNumberOfPoints ; for ( int i = 0 ; i < points . getNumGeometries ( ) ; i ++ ) { Geometry p = points . getGeometryN ( i ) ; if ( numberOfPoints >= REQUIRED_NUMBER_OF_POINTS ) { return true ; } numberOfPoints = numberOfPoints + countPoints ( p ) ; } return numberOfPoints >= REQUIRED_NUMBER_OF_POINTS ;
public class FtpUtil { /** * List files in a directory . Do not return sub - directories , just plain files * @ param muleContext * @ param endpointName * @ return a list of the names of the files in the directory * @ throws IOException */ static public List < String > listFilesInDirectory ( MuleContext muleContext , String endpointName ) { } }
logger . info ( "List endpoint: {}" , endpointName ) ; FTPClient ftpClient = null ; List < String > fileNames = new ArrayList < String > ( ) ; try { ftpClient = getFtpClient ( muleContext , endpointName ) ; EndpointURI endpointURI = getImmutableEndpoint ( muleContext , endpointName ) . getEndpointURI ( ) ; String path = endpointURI . getPath ( ) ; logger . info ( "List directory: {}" , path ) ; FTPFile [ ] ftpFiles = ftpClient . listFiles ( path ) ; logger . debug ( "Number of files and sub-folders found: {}" , ftpFiles . length ) ; for ( FTPFile ftpFile : ftpFiles ) { if ( ftpFile . getType ( ) == FTPFile . FILE_TYPE ) { String filename = path + "/" + ftpFile . getName ( ) ; fileNames . add ( filename ) ; logger . debug ( "Added file {}" , filename ) ; } } logger . debug ( "Found {} files in {}" , fileNames . size ( ) , path ) ; } catch ( Exception e ) { if ( logger . isErrorEnabled ( ) ) logger . error ( "Failed to list files in endpoint " + endpointName , e ) ; throw new RuntimeException ( e ) ; } finally { if ( ftpClient != null ) { try { ftpClient . disconnect ( ) ; } catch ( IOException e ) { } } } return fileNames ;
public class MapMatching { /** * Returns the path length plus a penalty if the starting / ending edge is unfavored . */ private double penalizedPathDistance ( Path path , Set < EdgeIteratorState > penalizedVirtualEdges ) { } }
double totalPenalty = 0 ; // Unfavored edges in the middle of the path should not be penalized because we are // only concerned about the direction at the start / end . final List < EdgeIteratorState > edges = path . calcEdges ( ) ; if ( ! edges . isEmpty ( ) ) { if ( penalizedVirtualEdges . contains ( edges . get ( 0 ) ) ) { totalPenalty += uTurnDistancePenalty ; } } if ( edges . size ( ) > 1 ) { if ( penalizedVirtualEdges . contains ( edges . get ( edges . size ( ) - 1 ) ) ) { totalPenalty += uTurnDistancePenalty ; } } return path . getDistance ( ) + totalPenalty ;
public class URLTemplateDescriptor { /** * Returns URL template name of the given type ( by key ) . * @ param refGroupName name of a group of templates from the config file . * @ param key type of the template * @ return template name */ public String getURLTemplateRef ( String refGroupName , String key ) { } }
String ref = null ; if ( _servletContext != null ) { URLTemplatesFactory urlTemplatesFactory = URLTemplatesFactory . getURLTemplatesFactory ( _servletContext ) ; if ( urlTemplatesFactory != null ) { ref = urlTemplatesFactory . getTemplateNameByRef ( refGroupName , key ) ; } } return ref ;
public class NagiosWriter { /** * Define if a value is in a critical , warning or ok state . */ protected String nagiosCheckValue ( String value , String composeRange ) { } }
List < String > simpleRange = Arrays . asList ( composeRange . split ( "," ) ) ; double doubleValue = Double . parseDouble ( value ) ; if ( composeRange . isEmpty ( ) ) { return "0" ; } if ( simpleRange . size ( ) == 1 ) { if ( composeRange . endsWith ( "," ) ) { if ( valueCheck ( doubleValue , simpleRange . get ( 0 ) ) ) { return "1" ; } else { return "0" ; } } else if ( valueCheck ( doubleValue , simpleRange . get ( 0 ) ) ) { return "2" ; } else { return "0" ; } } if ( valueCheck ( doubleValue , simpleRange . get ( 1 ) ) ) { return "2" ; } if ( valueCheck ( doubleValue , simpleRange . get ( 0 ) ) ) { return "1" ; } return "0" ;
public class WebGroup { /** * PK37449 synchronizing */ public synchronized void destroy ( ) { } }
if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) logger . logp ( Level . FINE , CLASS_NAME , "destroy" , "entry" ) ; /* destroying = true ; if ( nServicing > 0 ) { try { this . wait ( ) ; } catch ( InterruptedException e ) { / / TODO Auto - generated catch block */ super . destroy ( ) ; // parent . removeSubContainer ( name ) ; / / PK37449 this . requestMapper = null ; this . config = null ; this . webApp = null ; // PK25527 if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) logger . exiting ( CLASS_NAME , "destroy" ) ; // 569469
public class ServerMessageBlock2Request { /** * { @ inheritDoc } * @ see jcifs . internal . smb2 . ServerMessageBlock2 # encode ( byte [ ] , int ) */ @ Override public int encode ( byte [ ] dst , int dstIndex ) { } }
int len = super . encode ( dst , dstIndex ) ; int exp = size ( ) ; int actual = getLength ( ) ; if ( exp != actual ) { throw new IllegalStateException ( String . format ( "Wrong size calculation have %d expect %d" , exp , actual ) ) ; } return len ;
public class TermOfUsePanel { /** * Factory method for creating the new { @ link Component } for the cancellation . This method is * invoked in the constructor from the derived classes and can be overridden so users can * provide their own version of a new { @ link Component } for the cancellation . * @ param id * the id * @ param model * the model * @ return the new { @ link Component } for the cancellation */ protected Component newCancellationPanel ( final String id , final IModel < HeaderContentListModelBean > model ) { } }
return new CancellationPanel ( id , Model . of ( model . getObject ( ) ) ) ;
public class BaseMessagingEngineImpl { /** * Gets the instance of the MP associated with this ME * @ deprecated * @ param name * @ return JsEngineComponent */ @ Deprecated public JsEngineComponent getMessageProcessor ( String name ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "getMessageProcessor" , this ) ; SibTr . exit ( tc , "getMessageProcessor" , _messageProcessor ) ; } return _messageProcessor ;
public class DeleteAliasRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteAliasRequest deleteAliasRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteAliasRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteAliasRequest . getOrganizationId ( ) , ORGANIZATIONID_BINDING ) ; protocolMarshaller . marshall ( deleteAliasRequest . getEntityId ( ) , ENTITYID_BINDING ) ; protocolMarshaller . marshall ( deleteAliasRequest . getAlias ( ) , ALIAS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ValidatingStreamReader { /** * Method called by < code > finishDTD < / code > , to locate the specified * external DTD subset . Subset may be obtained from a cache , if cached * copy exists and is compatible ; if not , it will be read from the * source identified by the public and / or system identifier passed . */ private DTDSubset findDtdExtSubset ( String pubId , String sysId , DTDSubset intSubset ) throws XMLStreamException { } }
boolean cache = hasConfigFlags ( CFG_CACHE_DTDS ) ; DTDId dtdId ; try { dtdId = constructDtdId ( pubId , sysId ) ; } catch ( IOException ioe ) { throw constructFromIOE ( ioe ) ; } if ( cache ) { DTDSubset extSubset = findCachedSubset ( dtdId , intSubset ) ; if ( extSubset != null ) { return extSubset ; } } // No useful cached copy ? Need to read it then . /* For now , we do require system identifier ; otherwise we don ' t * know how to resolve DTDs by public id . In future should * probably also have some simple catalog resolving facility ? */ if ( sysId == null ) { throwParseError ( "Can not resolve DTD with public id \"{0}\"; missing system identifier" , mDtdPublicId , null ) ; } WstxInputSource src = null ; try { int xmlVersion = mDocXmlVersion ; // 05 - Feb - 2006 , TSa : If xmlVersion not explicitly known , defaults to 1.0 if ( xmlVersion == XmlConsts . XML_V_UNKNOWN ) { xmlVersion = XmlConsts . XML_V_10 ; } /* null - > no explicit path context , use parent ' s * null - > not an entity expansion , no name . * Note , too , that we can NOT just pass mEntityResolver , since * that ' s the one used for general entities , whereas ext subset * should be resolved by the param entity resolver . */ src = DefaultInputResolver . resolveEntity ( mInput , null , null , pubId , sysId , mConfig . getDtdResolver ( ) , mConfig , xmlVersion ) ; } catch ( FileNotFoundException fex ) { /* Let ' s catch and rethrow this just so we get more meaningful * description ( with input source position etc ) */ throwParseError ( "(was {0}) {1}" , fex . getClass ( ) . getName ( ) , fex . getMessage ( ) ) ; } catch ( IOException ioe ) { throwFromIOE ( ioe ) ; } DTDSubset extSubset = FullDTDReader . readExternalSubset ( src , mConfig , intSubset , hasConfigFlags ( CFG_VALIDATE_AGAINST_DTD ) , mDocXmlVersion ) ; if ( cache ) { /* Ok ; can be cached , but only if it does NOT refer to * parameter entities defined in the internal subset ( if * it does , there ' s no easy / efficient to check if it could * be used later on , plus it ' s unlikely it could be ) */ if ( extSubset . isCachable ( ) ) { mOwner . addCachedDTD ( dtdId , extSubset ) ; } } return extSubset ;
public class DFAs { /** * Minimizes the given DFA over the given alphabet . This method does not modify the given DFA , but returns the * minimized version as a new instance . * < b > Note : < / b > the DFA must be completely specified . * @ param dfa * the DFA to be minimized * @ param alphabet * the input alphabet to consider for minimization ( this will also be the input alphabet of the resulting * automaton ) * @ return a minimized version of the specified DFA */ public static < I > CompactDFA < I > minimize ( DFA < ? , I > dfa , Alphabet < I > alphabet ) { } }
return HopcroftMinimization . minimizeDFA ( dfa , alphabet ) ;
public class AJP13Packet { public void addString ( String s ) throws IOException { } }
if ( s == null ) { addInt ( 0xFFFF ) ; return ; } if ( _byteWriter == null ) _byteWriter = new ByteArrayISO8859Writer ( _buf ) ; int p = _bytes + 2 ; _byteWriter . setLength ( p ) ; _byteWriter . write ( s ) ; int l = _byteWriter . size ( ) - p ; addInt ( l ) ; _bytes += l ; _buf [ _bytes ++ ] = ( byte ) 0 ;
public class GVRPose { /** * Get the world rotations for all the bones in this pose . * The world space rotations for each bone are copied into the * destination array as quaterions in the order of their bone index . * The array must be as large as the number of bones in the skeleton * ( which can be obtained by calling { @ link # getNumBones } ) . * All bones in the skeleton start out at the origin oriented along the bone axis ( usually 0,0,1 ) . * The pose orients and positions each bone in the skeleton with respect to this initial state . * The world bone matrix expresses the orientation and position of the bone relative * to the root of the skeleton . This function returns the world space bone rotations * as an array of quaternions . * @ param rotationsdestination array to get world space joint rotations . * @ see # setWorldRotations * @ see # getWorldRotation * @ see # getWorldMatrix * @ see # getNumBones * @ see GVRSkeleton # setBoneAxis */ public void getWorldRotations ( float [ ] rotations ) { } }
if ( rotations . length != mBones . length * 4 ) { throw new IllegalArgumentException ( "Destination array is the wrong size" ) ; } sync ( ) ; for ( int i = 0 ; i < mBones . length ; i ++ ) { Bone bone = mBones [ i ] ; int t = i * 4 ; bone . WorldMatrix . getUnnormalizedRotation ( mTempQuat ) ; mTempQuat . normalize ( ) ; rotations [ t ++ ] = mTempQuat . x ; rotations [ t ++ ] = mTempQuat . y ; rotations [ t ++ ] = mTempQuat . z ; rotations [ t ] = mTempQuat . w ; }
public class Client { /** * ( non - Javadoc ) * @ see org . restcomm . protocols . ss7 . map . api . MAPDialogListener # onDialogTimeout * ( org . restcomm . protocols . ss7 . map . api . MAPDialog ) */ @ Override public void onDialogTimeout ( MAPDialog mapDialog ) { } }
logger . error ( String . format ( "onDialogTimeout for DialogId=%d" , mapDialog . getLocalDialogId ( ) ) ) ; this . csvWriter . incrementCounter ( ERROR_DIALOGS ) ;
public class Browser { /** * allows the browser selected to be passed in with a case insensitive name * @ param b - the string name of the browser * @ return Browser : the enum version of the browser * @ throws InvalidBrowserException If a browser that is not one specified in the * Selenium . Browser class is used , this exception will be thrown */ public static BrowserName lookup ( String b ) throws InvalidBrowserException { } }
for ( BrowserName browser : BrowserName . values ( ) ) { if ( browser . name ( ) . equalsIgnoreCase ( b ) ) { return browser ; } } throw new InvalidBrowserException ( "The selected browser " + b + " is not an applicable choice" ) ;
public class BindingInstaller { /** * Adds the given implicit binding in the graph to the injector hierarchy in the position * specified by the { @ link BindingPositioner } . Also ensures that the dependencies of the implicit * binding are available at the chosen position . */ private void installBinding ( DependencyGraph graph , Key < ? > key , Binding binding ) { } }
// Figure out where we ' re putting the implicit entry GinjectorBindings implicitEntryPosition = positions . getInstallPosition ( key ) ; // Ensure that the dependencies are available to the ginjector inheritBindingsForDeps ( implicitEntryPosition , graph . getDependenciesOf ( key ) ) ; // Now add the implicit binding to the ginjector implicitEntryPosition . addBinding ( key , binding ) ;
public class EIPManager { /** * Get the list of EIPS from the DNS . * This mechanism looks for the EIP pool in the zone the instance is in by * looking up the DNS name < code > { zone } . { region } . { domainName } < / code > . The * zone is fetched from the { @ link InstanceInfo } object ; the region is picked * up from the specified configuration * { @ link com . netflix . discovery . EurekaClientConfig # getRegion ( ) } ; the domain name is picked up from * the specified configuration { @ link com . netflix . discovery . EurekaClientConfig # getEurekaServerDNSName ( ) } * with a " txt . " prefix ( see { @ link com . netflix . discovery . endpoint . EndpointUtils * # getZoneBasedDiscoveryUrlsFromRegion ( com . netflix . discovery . EurekaClientConfig , String ) } . * @ param myZone * the zone where this instance exist in . * @ return the collection of EIPs that exist in the zone this instance is * in . */ private Collection < String > getEIPsForZoneFromDNS ( String myZone ) { } }
List < String > ec2Urls = EndpointUtils . getServiceUrlsFromDNS ( clientConfig , myZone , true , new EndpointUtils . InstanceInfoBasedUrlRandomizer ( applicationInfoManager . getInfo ( ) ) ) ; return getEIPsFromServiceUrls ( ec2Urls ) ;
public class Graphics { /** * Fill a rectangle on the canvas in the current color * @ param x1 * The x coordinate of the top left corner * @ param y1 * The y coordinate of the top left corner * @ param width * The width of the rectangle to fill * @ param height * The height of the rectangle to fill */ public void fillRect ( float x1 , float y1 , float width , float height ) { } }
predraw ( ) ; TextureImpl . bindNone ( ) ; currentColor . bind ( ) ; GL . glBegin ( SGL . GL_QUADS ) ; GL . glVertex2f ( x1 , y1 ) ; GL . glVertex2f ( x1 + width , y1 ) ; GL . glVertex2f ( x1 + width , y1 + height ) ; GL . glVertex2f ( x1 , y1 + height ) ; GL . glEnd ( ) ; postdraw ( ) ;
public class Drawer { /** * Set a footerDrawerItem at a specific position * @ param drawerItem * @ param position */ public void setStickyFooterItemAtPosition ( @ NonNull IDrawerItem drawerItem , int position ) { } }
if ( mDrawerBuilder . mStickyDrawerItems != null && mDrawerBuilder . mStickyDrawerItems . size ( ) > position ) { mDrawerBuilder . mStickyDrawerItems . set ( position , drawerItem ) ; } DrawerUtils . rebuildStickyFooterView ( mDrawerBuilder ) ;
public class Phrase { /** * Gets a special kind of Phrase that changes some characters into corresponding symbols . * @ param leading * @ param string * @ param font * @ return a newly constructed Phrase */ public static final Phrase getInstance ( int leading , String string , Font font ) { } }
Phrase p = new Phrase ( true ) ; p . setLeading ( leading ) ; p . font = font ; if ( font . getFamily ( ) != Font . SYMBOL && font . getFamily ( ) != Font . ZAPFDINGBATS && font . getBaseFont ( ) == null ) { int index ; while ( ( index = SpecialSymbol . index ( string ) ) > - 1 ) { if ( index > 0 ) { String firstPart = string . substring ( 0 , index ) ; ( ( ArrayList ) p ) . add ( new Chunk ( firstPart , font ) ) ; string = string . substring ( index ) ; } Font symbol = new Font ( Font . SYMBOL , font . getSize ( ) , font . getStyle ( ) , font . getColor ( ) ) ; StringBuffer buf = new StringBuffer ( ) ; buf . append ( SpecialSymbol . getCorrespondingSymbol ( string . charAt ( 0 ) ) ) ; string = string . substring ( 1 ) ; while ( SpecialSymbol . index ( string ) == 0 ) { buf . append ( SpecialSymbol . getCorrespondingSymbol ( string . charAt ( 0 ) ) ) ; string = string . substring ( 1 ) ; } ( ( ArrayList ) p ) . add ( new Chunk ( buf . toString ( ) , symbol ) ) ; } } if ( string != null && string . length ( ) != 0 ) { ( ( ArrayList ) p ) . add ( new Chunk ( string , font ) ) ; } return p ;
public class OcciVMUtils { /** * Creates a VM ( OCCI / VMWare ) using JSON rendering . * @ param hostIpPort IP and port of OCCI server ( eg . " 172.16.225.91:8080 " ) * @ param id Unique VM ID * @ param template VM image ID ( null means no image specified ) * @ param title VM title * @ param summary VM summary * @ param userData User data for the VM * @ param user VM credentials ( user name ) * @ param password VM credentials ( password ) * @ param config A map of parameters ( eg . mixin attributes ) * @ param waitForActive If true , wait until VM is active * @ return The VM ID */ public static String createVMJson ( String hostIpPort , String id , String template , String title , String summary , String userData , String user , String password , Map < String , String > config , boolean waitForActive ) throws TargetException { } }
// Count VM creations ( + make title unique , it is used as VM ID by VMWare ! ) String uniqueTitle = title + ( ++ vmCount ) ; // TODO Expecting more interoperable implementation ! if ( config . get ( CloudautomationMixins . PROVIDER_ENDPOINT ) != null ) { return createCloudAutomationVM ( hostIpPort , id , template , title , summary , userData , config , false ) ; } else { String vmId = null ; URL url = null ; try { CookieHandler . setDefault ( new CookieManager ( null , CookiePolicy . ACCEPT_ALL ) ) ; url = new URL ( "http://" + hostIpPort + "/vm/" ) ; } catch ( MalformedURLException e ) { throw new TargetException ( e ) ; } HttpURLConnection httpURLConnection = null ; DataInputStream in = null ; DataOutputStream output = null ; try { httpURLConnection = ( HttpURLConnection ) url . openConnection ( ) ; httpURLConnection . setRequestMethod ( "PUT" ) ; httpURLConnection . setRequestProperty ( "Content-Type" , "application/json" ) ; httpURLConnection . setRequestProperty ( "Accept" , "application/json" ) ; httpURLConnection . setDoInput ( true ) ; httpURLConnection . setDoOutput ( true ) ; String userDataString = "name: value" ; if ( userData != null ) { userDataString = userData . replaceAll ( "\n\r" , "\\\\n" ) . replaceAll ( "\n" , "\\\\n" ) . replaceAll ( System . lineSeparator ( ) , "\\\\n" ) ; } String request = "{\n" + "\"id\": \"" + id + "\",\n" + "\"title\": \"" + uniqueTitle + "\",\n" + "\"summary\": \"" + summary + "\",\n" + "\"kind\": \"http://schemas.ogf.org/occi/infrastructure#compute\",\n" + "\"mixins\": [" + "\"http://occiware.org/occi/infrastructure/crtp/backend#vmimage\",\n" + "\"http://occiware.org/occi/infrastructure/crtp/backend#vmwarefolders\",\n" + "\"http://schemas.ogf.org/occi/infrastructure/compute#user_data\",\n" + "\"http://occiware.org/occi/infrastructure/crtp/backend#credential\"\n" + "],\n" + "\"attributes\": {\n" + "\"occi.compute.state\": \"" + "active" + "\",\n" + "\"occi.compute.speed\": " + 3 + ",\n" + "\"occi.compute.memory\": " + 2 + ",\n" + "\"occi.compute.cores\": " + 2 + ",\n" + "\"occi.compute.architecture\": \"" + "x64" + "\",\n" + "\"imagename\": \"" + template + "\",\n" + "\"datacentername\": \"" + config . get ( VmwareFoldersMixin . DATACENTERNAME ) + "\",\n" + "\"datastorename\": \"" + config . get ( VmwareFoldersMixin . DATASTORENAME ) + "\",\n" + "\"clustername\": \"" + config . get ( VmwareFoldersMixin . CLUSTERNAME ) + "\",\n" + "\"hostsystemname\": \"" + config . get ( VmwareFoldersMixin . HOSTSYSTEMNAME ) + "\",\n" + "\"inventorypath\": \"" + config . get ( VmwareFoldersMixin . INVENTORYPATH ) + "\",\n" + "\"occi.compute.userdata\": \"" + userDataString + "\",\n" + "\"user\": \"" + user + "\",\n" + "\"password\": \"" + password + "\"\n" + "}\n}" ; final Logger logger = Logger . getLogger ( OcciVMUtils . class . getName ( ) ) ; logger . finest ( request ) ; httpURLConnection . setRequestProperty ( "Content-Length" , Integer . toString ( request . getBytes ( StandardCharsets . UTF_8 ) . length ) ) ; output = new DataOutputStream ( httpURLConnection . getOutputStream ( ) ) ; output . writeBytes ( request ) ; output . flush ( ) ; Utils . closeQuietly ( output ) ; output = null ; in = new DataInputStream ( httpURLConnection . getInputStream ( ) ) ; ByteArrayOutputStream out = new ByteArrayOutputStream ( ) ; Utils . copyStreamSafely ( in , out ) ; // Parse JSON response to extract VM ID ObjectMapper objectMapper = new ObjectMapper ( ) ; JsonResponse rsp = objectMapper . readValue ( out . toString ( "UTF-8" ) , JsonResponse . class ) ; vmId = rsp . getId ( ) ; if ( ! Utils . isEmptyOrWhitespaces ( vmId ) ) { // Wait until VM is active , if requested if ( waitForActive ) { int retries = 15 ; boolean active = false ; while ( ! active && retries -- > 0 ) { logger . finest ( "retry: " + retries ) ; try { Thread . sleep ( 10000 ) ; // 10 seconds } catch ( InterruptedException e ) { // ignore } active = ! Utils . isEmptyOrWhitespaces ( getVMIP ( hostIpPort + "/compute" , vmId ) ) ; // active = " ACTIVE " . equalsIgnoreCase ( getVMStatus ( hostIpPort , ret ) ) ; } } } } catch ( IOException e ) { throw new TargetException ( e ) ; } finally { Utils . closeQuietly ( in ) ; Utils . closeQuietly ( output ) ; if ( httpURLConnection != null ) { httpURLConnection . disconnect ( ) ; } } return ( vmId ) ; }
public class PEMKeyStore { /** * Add a certificate to the keystore . * @ param alias * The certificate alias . * @ param certificate * The certificate to store . * @ throws KeyStoreException */ @ Override public void engineSetCertificateEntry ( String alias , Certificate certificate ) throws KeyStoreException { } }
if ( ! ( certificate instanceof X509Certificate ) ) { throw new KeyStoreException ( "Certificate must be instance of X509Certificate" ) ; } File file ; ResourceTrustAnchor trustAnchor = getCertificateEntry ( alias ) ; if ( trustAnchor != null ) { file = trustAnchor . getFile ( ) ; } else { file = new File ( defaultDirectory , alias ) ; } X509Certificate x509Cert = ( X509Certificate ) certificate ; try { if ( ! inMemoryOnly ) { writeCertificate ( x509Cert , file ) ; } ResourceTrustAnchor anchor = new ResourceTrustAnchor ( inMemoryOnly , new GlobusResource ( file . getAbsolutePath ( ) ) , new TrustAnchor ( x509Cert , null ) ) ; this . aliasObjectMap . put ( alias , anchor ) ; this . certFilenameMap . put ( x509Cert , alias ) ; } catch ( ResourceStoreException e ) { throw new KeyStoreException ( e ) ; } catch ( IOException e ) { throw new KeyStoreException ( e ) ; } catch ( CertificateEncodingException e ) { throw new KeyStoreException ( e ) ; }
public class ManagementRequestHeader { /** * { @ inheritDoc } */ public void read ( final DataInput input ) throws IOException { } }
ProtocolUtils . expectHeader ( input , ManagementProtocol . REQUEST_ID ) ; requestId = input . readInt ( ) ; ProtocolUtils . expectHeader ( input , ManagementProtocol . BATCH_ID ) ; batchId = input . readInt ( ) ; ProtocolUtils . expectHeader ( input , ManagementProtocol . OPERATION_ID ) ; operationId = input . readByte ( ) ; ProtocolUtils . expectHeader ( input , ManagementProtocol . ONE_WAY ) ; oneWay = input . readBoolean ( ) ; ProtocolUtils . expectHeader ( input , ManagementProtocol . REQUEST_BODY ) ;
public class Operation { /** * preceeds . * @ param rightOp a { @ link com . obdobion . algebrain . Operation } object . * @ return a boolean . */ public boolean preceeds ( final Operation rightOp ) { } }
if ( getLevel ( ) > rightOp . getLevel ( ) ) return true ; if ( getLevel ( ) < rightOp . getLevel ( ) ) return false ; if ( precedence ( ) <= rightOp . precedence ( ) ) return true ; return false ;
public class ConstructorDefImpl { /** * Check to see if two boundConstructors take indistinguishable arguments . If * so ( and they are in the same class ) , then this would lead to ambiguous * injection targets , and we want to fail fast . * @ param def * @ return */ private boolean equalsIgnoreOrder ( final ConstructorDef < ? > def ) { } }
HashMap map = new HashMap ( ) ; for ( ConstructorArg a : def . getArgs ( ) ) { map . put ( a . getName ( ) , null ) ; } for ( ConstructorArg a : getArgs ( ) ) { if ( ! map . containsKey ( a . getName ( ) ) ) { return false ; } } return true ;
public class ImageUtils { /** * Returns a String identifying the format of given web image . < br > * e . g : image source is " < span style = ' color : blue ' > example . png < / span > " then * return " < span style = ' color : blue ' > png < / span > " . Even the name just is * " < span style = ' color : blue ' > example < / span > " , we can handle it if this is an * image . * @ param sourceImageHttpURL * the web image url which you want to handle . * @ return the format name of given image . * @ throws IOException */ public static String getFormatNameFromHttpImage ( final String sourceImageHttpURL ) throws IOException { } }
ImageReader imageReaderFromHttpImage = getImageReaderFromHttpImage ( sourceImageHttpURL ) ; return imageReaderFromHttpImage != null ? imageReaderFromHttpImage . getFormatName ( ) : null ;
public class UNode { /** * Return true if this node has child nodes and all of them are simple value nodes . * This means that each child must be a { @ link NodeType # VALUE } and its name must be * " value " . If this node has no children or if at least one child is not a simple * value , false is returned . * @ return True if this node has children and they are all value nodes . */ public boolean childrenAreValues ( ) { } }
if ( m_children == null || m_children . size ( ) == 0 ) { return false ; } for ( UNode child : m_children ) { if ( ! child . isValue ( ) || ! child . getName ( ) . equals ( "value" ) ) { return false ; } } return true ;
public class ClassFileWriter { /** * Add a method and begin adding code . * This method must be called before other methods for adding code , exception tables , etc . can be * invoked . * @ param methodName the name of the method * @ param type a string representing the type * @ param flags the attributes of the field , such as ACC _ PUBLIC , etc . bitwise or ' d together */ public void startMethod ( String methodName , String type , short flags ) { } }
short methodNameIndex = itsConstantPool . addUtf8 ( methodName ) ; short typeIndex = itsConstantPool . addUtf8 ( type ) ; itsCurrentMethod = new ClassFileMethod ( methodName , methodNameIndex , type , typeIndex , flags ) ; itsJumpFroms = new UintMap ( ) ; itsMethods . add ( itsCurrentMethod ) ; addSuperBlockStart ( 0 ) ;
public class ObjectByteOffsetImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . OBJECT_BYTE_OFFSET__DIR_BY_OFF : setDirByOff ( DIR_BY_OFF_EDEFAULT ) ; return ; case AfplibPackage . OBJECT_BYTE_OFFSET__DIR_BY_HI : setDirByHi ( DIR_BY_HI_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ;
public class MPP14Reader { /** * This method extracts and collates constraint data . * @ throws java . io . IOException */ private void processConstraintData ( ) throws IOException { } }
ConstraintFactory factory = new ConstraintFactory ( ) ; factory . process ( m_projectDir , m_file , m_inputStreamFactory ) ;
public class AnnotationImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case XtextPackage . ANNOTATION__NAME : return NAME_EDEFAULT == null ? name != null : ! NAME_EDEFAULT . equals ( name ) ; } return super . eIsSet ( featureID ) ;
public class Postconditions { /** * An { @ code int } specialized version of { @ link # checkPostconditions ( Object , * ContractConditionType [ ] ) } * @ param value The value * @ param conditions The conditions the value must obey * @ return value * @ throws PostconditionViolationException If any of the conditions are false */ public static int checkPostconditionsI ( final int value , final ContractIntConditionType ... conditions ) throws PostconditionViolationException { } }
final Violations violations = innerCheckAllInt ( value , conditions ) ; if ( violations != null ) { throw failed ( null , Integer . valueOf ( value ) , violations ) ; } return value ;
public class vlan_channel_binding { /** * Use this API to fetch vlan _ channel _ binding resources of given name . */ public static vlan_channel_binding [ ] get ( nitro_service service , Long id ) throws Exception { } }
vlan_channel_binding obj = new vlan_channel_binding ( ) ; obj . set_id ( id ) ; vlan_channel_binding response [ ] = ( vlan_channel_binding [ ] ) obj . get_resources ( service ) ; return response ;
public class AbstractBigtableConnection { /** * This should not be used . The hbase shell needs this in hbase 0.99.2 . Remove this once * 1.0.0 comes out . * @ param tableName a { @ link java . lang . String } object . * @ return a { @ link org . apache . hadoop . hbase . client . Table } object . * @ throws java . io . IOException if any . */ @ Deprecated public Table getTable ( String tableName ) throws IOException { } }
return getTable ( TableName . valueOf ( tableName ) ) ;
public class JawrRequestHandler { /** * Initialize the config properties source that will provide with all * configuration options . * @ param context * the servlet context * @ param configProps * the config properties * @ return the config properties source * @ throws ServletException * if an exception occurs */ private ConfigPropertiesSource initConfigPropertiesSource ( ServletContext context , Properties configProps ) throws ServletException { } }
String configLocation = getInitParameter ( "configLocation" ) ; String configPropsSourceClass = getInitParameter ( "configPropertiesSourceClass" ) ; if ( null == configProps && null == configLocation && null == configPropsSourceClass ) throw new ServletException ( "Neither configLocation nor configPropertiesSourceClass init params were set." + " You must set at least the configLocation param. Please check your web.xml file" ) ; // Initialize the config properties source that will provide with all // configuration options . ConfigPropertiesSource propsSrc = null ; // Load a custom class to set config properties if ( null != configPropsSourceClass ) { propsSrc = ( ConfigPropertiesSource ) ClassLoaderResourceUtils . buildObjectInstance ( configPropsSourceClass ) ; if ( propsSrc instanceof ServletContextAware ) { ( ( ServletContextAware ) propsSrc ) . setServletContext ( context ) ; } } else if ( configLocation == null && configProps != null ) { // configuration retrieved from the in memory configuration // properties propsSrc = new PropsConfigPropertiesSource ( configProps ) ; } else { // Default config properties source , reads from a . properties file // in the classpath . propsSrc = new PropsFilePropertiesSource ( ) ; } // If a custom properties source is a subclass of // PropsFilePropertiesSource , we hand it the configLocation param . // This affects the standard one as well . if ( propsSrc instanceof PropsFilePropertiesSource ) ( ( PropsFilePropertiesSource ) propsSrc ) . setConfigLocation ( configLocation ) ; return propsSrc ;
public class StripeSourceTypeModel { /** * Put the key - value pairs from the second map into the first map . Note : this does * not protect against overwriting original values . This method assumes * a 1 - level map . * @ param map a { @ link Map } into which new values are being written * @ param additionalFields a { @ link Map } of key - value pairs to add to the object . */ static void putAdditionalFieldsIntoMap ( @ Nullable Map < String , Object > map , @ Nullable Map < String , Object > additionalFields ) { } }
if ( map == null || additionalFields == null || additionalFields . isEmpty ( ) ) { return ; } map . putAll ( additionalFields ) ;
public class OWLInverseObjectPropertiesAxiomImpl_CustomFieldSerializer { /** * Serializes the content of the object into the * { @ link com . google . gwt . user . client . rpc . SerializationStreamWriter } . * @ param streamWriter the { @ link com . google . gwt . user . client . rpc . SerializationStreamWriter } to write the * object ' s content to * @ param instance the object instance to serialize * @ throws com . google . gwt . user . client . rpc . SerializationException * if the serialization operation is not * successful */ @ Override public void serializeInstance ( SerializationStreamWriter streamWriter , OWLInverseObjectPropertiesAxiomImpl instance ) throws SerializationException { } }
serialize ( streamWriter , instance ) ;
public class BlockingQueueOperationListener { /** * Retrieves and removes the head of this queue , waiting up to the specified wait time if necessary for an element to become available . * @ param timeout the timeout * @ param timeUnit the time unit * @ return the prepared operation * @ throws InterruptedException */ public TransactionalProtocolClient . PreparedOperation < T > retrievePreparedOperation ( final long timeout , final TimeUnit timeUnit ) throws InterruptedException { } }
return queue . poll ( timeout , timeUnit ) ;
public class TcpWorker { /** * First close the connection . Then reply . * @ param response * the response * @ param error * the error * @ param errorMessage * the error message * @ param stackTrace * the stack trace * @ param statusCode * the status code * @ param statusCodeInt * the status code int */ private void reply ( final String response , final boolean error , final String errorMessage , final String stackTrace , final String statusCode , final int statusCodeInt ) { } }
if ( ! sentReply ) { // must update sentReply first to avoid duplicated msg . sentReply = true ; // Close the connection . Make sure the close operation ends because // all I / O operations are asynchronous in Netty . if ( channel != null && channel . isOpen ( ) ) channel . close ( ) . awaitUninterruptibly ( ) ; final ResponseOnSingeRequest res = new ResponseOnSingeRequest ( response , error , errorMessage , stackTrace , statusCode , statusCodeInt , PcDateUtils . getNowDateTimeStrStandard ( ) , null ) ; if ( ! getContext ( ) . system ( ) . deadLetters ( ) . equals ( sender ) ) { sender . tell ( res , getSelf ( ) ) ; } if ( getContext ( ) != null ) { getContext ( ) . stop ( getSelf ( ) ) ; } }
public class LongHashMap { /** * Associates the specified value with the specified key in this identity * hash map . If the map previously contained a mapping for the key , the * old value is replaced . * @ param key the key with which the specified value is to be associated * @ param value the value to be associated with the specified key * @ return the previous value associated with < tt > key < / tt > , or * < tt > null < / tt > if there was no mapping for < tt > key < / tt > . * ( A < tt > null < / tt > return can also indicate that the map * previously associated < tt > null < / tt > with < tt > key < / tt > . ) * @ see Object # equals ( Object ) * @ see # get ( Object ) * @ see # containsKey ( Object ) */ long put ( long key , long value ) { } }
assert key != 0 ; long k = key ; long [ ] tab = table ; int len = tab . length ; int i = hash ( k , len ) ; long item ; while ( ( item = tab [ i ] ) != 0 ) { if ( item == k ) { long oldValue = tab [ i + 1 ] ; tab [ i + 1 ] = value ; return oldValue ; } i = nextKeyIndex ( i , len ) ; } modCount ++ ; tab [ i ] = k ; tab [ i + 1 ] = value ; if ( ++ size >= threshold ) resize ( len ) ; // len = = 2 * current capacity . return 0 ;
public class SpringJdbcRepository { /** * Removes an entity from the repository . * For this operation the delete query received on the constructor will be * used . * @ param entity * the entity to remove */ @ Override public final void remove ( final V entity ) { } }
final SqlParameterSource parameterSource ; // Parameters source parameterSource = new BeanPropertySqlParameterSource ( entity ) ; getTemplate ( ) . update ( getDeleteQueryTemplate ( ) , parameterSource ) ;
public class ColorHolder { /** * a small static helper class to get the color from the colorHolder * @ param colorHolder * @ param ctx * @ return */ public static int color ( ColorHolder colorHolder , Context ctx ) { } }
if ( colorHolder == null ) { return 0 ; } else { return colorHolder . color ( ctx ) ; }
public class JspCompilationContext { /** * Gets the actual path of a URI relative to the context of * the compilation . * @ param path The webapp path * @ return the corresponding path in the filesystem */ public String getRealPath ( String path ) { } }
if ( context != null ) { return context . getRealPath ( path ) ; } return path ;
public class OObjectDatabaseTx { /** * Register a new POJO */ @ Override public void registerUserObject ( final Object iObject , final ORecordInternal < ? > iRecord ) { } }
if ( ! ( iRecord instanceof ODocument ) ) return ; final ODocument doc = ( ODocument ) iRecord ; if ( retainObjects ) { final ORID rid = iRecord . getIdentity ( ) ; if ( rid . isValid ( ) ) rid2Records . put ( rid , doc ) ; }
public class NUID { /** * Generate the next NUID string from this instance . * @ return the next NUID string from this instance . */ public final synchronized String next ( ) { } }
// Increment and capture . seq += inc ; if ( seq >= maxSeq ) { randomizePrefix ( ) ; resetSequential ( ) ; } // Copy prefix char [ ] b = new char [ totalLen ] ; System . arraycopy ( pre , 0 , b , 0 , preLen ) ; // copy in the seq in base36. int i = b . length ; for ( long l = seq ; i > preLen ; l /= base ) { i -- ; b [ i ] = digits [ ( int ) ( l % base ) ] ; } return new String ( b ) ;
public class BigMoney { /** * Returns a copy of this monetary value converted into another currency * using the specified conversion rate , with a rounding mode used to adjust * the decimal places in the result . * The result will have the same scale as this instance even though it will * be in a different currency . * This instance is immutable and unaffected by this method . * @ param currency the new currency , not null * @ param conversionMultipler the conversion factor between the currencies , not null * @ param roundingMode the rounding mode to use to bring the decimal places back in line , not null * @ return the new multiplied instance , never null * @ throws IllegalArgumentException if the currency is the same as this currency and the * conversion is not one ; or if the conversion multiplier is negative * @ throws ArithmeticException if the rounding fails */ public BigMoney convertRetainScale ( CurrencyUnit currency , BigDecimal conversionMultipler , RoundingMode roundingMode ) { } }
return convertedTo ( currency , conversionMultipler ) . withScale ( getScale ( ) , roundingMode ) ;
public class DateUtils { /** * 校验两段时间是否有重合 * @ param date1StartStr 时间段1开始 * @ param date1EndStr 时间段1结束 * @ param date2StartStr 时间段2开始 * @ param date2EndStr 时间段2结束 * @ param patten * @ return < p / > * < code > true < / code > : 有重合 * < code > false < / code > : 无重合 */ public static boolean isOverlay ( String date1StartStr , String date1EndStr , String date2StartStr , String date2EndStr , String patten ) { } }
Date date1Start = DateUtils . parseDate ( date1StartStr , patten ) ; Date date1End = DateUtils . parseDate ( date1EndStr , patten ) ; Date date2Start = DateUtils . parseDate ( date2StartStr , patten ) ; Date date2End = DateUtils . parseDate ( date2EndStr , patten ) ; return isOverlay ( date1Start , date1End , date2Start , date2End ) ;
public class StringUtils { /** * Constructs a new < code > String < / code > by decoding the specified array of bytes using the given charset . * This method catches { @ link UnsupportedEncodingException } and re - throws it as { @ link IllegalStateException } , which * should never happen for a required charset name . Use this method when the encoding is required to be in the JRE . * @ param bytes * The bytes to be decoded into characters * @ param charsetName * The name of a required { @ link java . nio . charset . Charset } * @ return A new < code > String < / code > decoded from the specified array of bytes using the given charset . * @ throws IllegalStateException * Thrown when a { @ link UnsupportedEncodingException } is caught , which should never happen for a * required charset name . * @ see < a href = " http : / / commons . apache . org / proper / commons - codec / apidocs / org / apache / commons / codec / CharEncoding . html " > CharEncoding < / a > * @ see String # String ( byte [ ] , String ) */ public static String newString ( byte [ ] bytes , String charsetName ) { } }
if ( bytes == null ) { return null ; } try { return new String ( bytes , charsetName ) ; } catch ( UnsupportedEncodingException e ) { throw StringUtils . newIllegalStateException ( charsetName , e ) ; }
public class JSchema { /** * variant */ private JMFType findChildByIndex ( JMFType start , int index ) { } }
if ( index < 0 ) return null ; if ( start instanceof JSVariant ) { JSVariant var = ( JSVariant ) start ; int cases = var . getCaseCount ( ) ; if ( index >= cases ) return null ; else return getEffectiveType ( var . getCase ( index ) ) ; } else { JSTuple tup = ( JSTuple ) start ; int fields = tup . getFieldCount ( ) ; if ( index >= fields ) return null ; else return getEffectiveType ( tup . getField ( index ) ) ; }
public class QueryBuilder { /** * Query for selecting changes ( or snapshots ) * made on all ValueObjects at given path , owned by any instance of given Entity . * < br / > < br / > * See < b > path < / b > parameter hints in { @ link # byValueObjectId ( Object , Class , String ) } . */ public static QueryBuilder byValueObject ( Class ownerEntityClass , String path ) { } }
Validate . argumentsAreNotNull ( ownerEntityClass , path ) ; return new QueryBuilder ( new VoOwnerFilterDefinition ( ownerEntityClass , path ) ) ;
public class Computer { /** * Used to render the list of executors . * @ return a snapshot of the executor display information * @ since 1.607 */ @ Restricted ( NoExternalUse . class ) public List < DisplayExecutor > getDisplayExecutors ( ) { } }
// The size may change while we are populating , but let ' s start with a reasonable guess to minimize resizing List < DisplayExecutor > result = new ArrayList < > ( executors . size ( ) + oneOffExecutors . size ( ) ) ; int index = 0 ; for ( Executor e : executors ) { if ( e . isDisplayCell ( ) ) { result . add ( new DisplayExecutor ( Integer . toString ( index + 1 ) , String . format ( "executors/%d" , index ) , e ) ) ; } index ++ ; } index = 0 ; for ( OneOffExecutor e : oneOffExecutors ) { if ( e . isDisplayCell ( ) ) { result . add ( new DisplayExecutor ( "" , String . format ( "oneOffExecutors/%d" , index ) , e ) ) ; } index ++ ; } return result ;
public class UpdateBuilder { /** * Deletes the document referred to by this DocumentReference . * @ param documentReference The DocumentReference to delete . * @ param precondition Precondition for the delete operation . * @ return The instance for chaining . */ @ Nonnull public T delete ( @ Nonnull DocumentReference documentReference , @ Nonnull Precondition precondition ) { } }
return performDelete ( documentReference , precondition ) ;
public class HttpMethodBase { /** * Generates < tt > Proxy - Connection : Keep - Alive < / tt > request header when * communicating via a proxy server . * @ param state the { @ link HttpState state } information associated with this method * @ param conn the { @ link HttpConnection connection } used to execute * this HTTP method * @ throws IOException if an I / O ( transport ) error occurs . Some transport exceptions * can be recovered from . * @ throws HttpException if a protocol exception occurs . Usually protocol exceptions * cannot be recovered from . */ protected void addProxyConnectionHeader ( HttpState state , HttpConnection conn ) throws IOException , HttpException { } }
LOG . trace ( "enter HttpMethodBase.addProxyConnectionHeader(" + "HttpState, HttpConnection)" ) ; if ( ! conn . isTransparent ( ) ) { if ( getRequestHeader ( "Proxy-Connection" ) == null ) { addRequestHeader ( "Proxy-Connection" , "Keep-Alive" ) ; } }
public class DenseGrid { /** * Creates an empty { @ code DenseGrid } of the specified size . * @ param < V > the type of the value * @ param rowCount the number of rows , zero or greater * @ param columnCount the number of rows , zero or greater * @ return the mutable grid , not null */ public static < V > DenseGrid < V > create ( int rowCount , int columnCount ) { } }
return new DenseGrid < V > ( rowCount , columnCount ) ;
public class ImmutableRoaringBitmap { /** * iterate over the positions of the true values . * @ return the iterator */ @ Override public Iterator < Integer > iterator ( ) { } }
return new Iterator < Integer > ( ) { int hs = 0 ; ShortIterator iter ; int pos = 0 ; int x ; @ Override public boolean hasNext ( ) { return pos < ImmutableRoaringBitmap . this . highLowContainer . size ( ) ; } public Iterator < Integer > init ( ) { if ( pos < ImmutableRoaringBitmap . this . highLowContainer . size ( ) ) { iter = ImmutableRoaringBitmap . this . highLowContainer . getContainerAtIndex ( pos ) . getShortIterator ( ) ; hs = BufferUtil . toIntUnsigned ( ImmutableRoaringBitmap . this . highLowContainer . getKeyAtIndex ( pos ) ) << 16 ; } return this ; } @ Override public Integer next ( ) { x = iter . nextAsInt ( ) | hs ; if ( ! iter . hasNext ( ) ) { ++ pos ; init ( ) ; } return x ; } @ Override public void remove ( ) { throw new RuntimeException ( "Cannot modify." ) ; } } . init ( ) ;
public class JsonPathAssert { /** * Extracts a JSON array using a JsonPath expression and wrap it in a { @ link ListAssert } . This method requires * the JsonPath to be < a href = " https : / / github . com / jayway / JsonPath # jsonprovider - spi " > configured with Jackson or * Gson < / a > . * @ param path JsonPath to extract the array * @ param type The type to cast the content of the array , i . e . : { @ link String } , { @ link Integer } * @ param < T > The generic type of the type field * @ return an instance of { @ link ListAssert } */ public < T > AbstractListAssert < ? , ? extends List < ? extends T > , T , ? extends AbstractAssert < ? , T > > jsonPathAsListOf ( String path , Class < T > type ) { } }
return Assertions . assertThat ( actual . read ( path , new TypeRef < List < T > > ( ) { } ) ) ;
public class BoxDeveloperEditionAPIConnection { /** * Creates a new Box Developer Edition connection with App User token . * @ param userId the user ID to use for an App User . * @ param clientId the client ID to use when exchanging the JWT assertion for an access token . * @ param clientSecret the client secret to use when exchanging the JWT assertion for an access token . * @ param encryptionPref the encryption preferences for signing the JWT . * @ param accessTokenCache the cache for storing access token information ( to minimize fetching new tokens ) * @ return a new instance of BoxAPIConnection . */ public static BoxDeveloperEditionAPIConnection getAppUserConnection ( String userId , String clientId , String clientSecret , JWTEncryptionPreferences encryptionPref , IAccessTokenCache accessTokenCache ) { } }
BoxDeveloperEditionAPIConnection connection = new BoxDeveloperEditionAPIConnection ( userId , DeveloperEditionEntityType . USER , clientId , clientSecret , encryptionPref , accessTokenCache ) ; connection . tryRestoreUsingAccessTokenCache ( ) ; return connection ;
public class ListEventSourceMappingsResult { /** * A list of event source mappings . * @ param eventSourceMappings * A list of event source mappings . */ public void setEventSourceMappings ( java . util . Collection < EventSourceMappingConfiguration > eventSourceMappings ) { } }
if ( eventSourceMappings == null ) { this . eventSourceMappings = null ; return ; } this . eventSourceMappings = new com . amazonaws . internal . SdkInternalList < EventSourceMappingConfiguration > ( eventSourceMappings ) ;
public class Iterate { /** * Returns true if the predicate evaluates to true for any element of the iterable . * Returns false if the iterable is empty or if no elements return true for the predicate . */ public static < T > boolean anySatisfy ( Iterable < T > iterable , Predicate < ? super T > predicate ) { } }
if ( iterable instanceof RichIterable ) { return ( ( RichIterable < T > ) iterable ) . anySatisfy ( predicate ) ; } if ( iterable instanceof ArrayList ) { return ArrayListIterate . anySatisfy ( ( ArrayList < T > ) iterable , predicate ) ; } if ( iterable instanceof RandomAccess ) { return RandomAccessListIterate . anySatisfy ( ( List < T > ) iterable , predicate ) ; } if ( iterable != null ) { return IterableIterate . anySatisfy ( iterable , predicate ) ; } throw new IllegalArgumentException ( "Cannot perform an anySatisfy on null" ) ;
public class BulkheadMetricsCollector { /** * Creates a new collector with custom metric names and * using given { @ code supplier } as source of bulkheads . * @ param names the custom metric names * @ param supplier the supplier of bulkheads , note that supplier will be called one every { @ link # collect ( ) } */ public static BulkheadMetricsCollector ofSupplier ( MetricNames names , Supplier < ? extends Iterable < ? extends Bulkhead > > supplier ) { } }
return new BulkheadMetricsCollector ( names , supplier ) ;
public class DSClient { /** * ( non - Javadoc ) * @ see com . impetus . kundera . client . Client # deleteByColumn ( java . lang . String , java . lang . String , java . lang . String , * java . lang . Object ) */ @ Override public void deleteByColumn ( String schemaName , String tableName , String columnName , Object columnValue ) { } }
Session session = factory . getConnection ( ) ; String rowKeyName = null ; CQLTranslator translator = new CQLTranslator ( ) ; try { List < ColumnMetadata > primaryKeys = session . getCluster ( ) . getMetadata ( ) . getKeyspace ( "\"" + schemaName + "\"" ) . getTable ( "\"" + tableName + "\"" ) . getPrimaryKey ( ) ; rowKeyName = primaryKeys . get ( 0 ) . getName ( ) ; } finally { // factory . releaseConnection ( session ) ; } List rowKeys = getColumnsById ( schemaName , tableName , columnName , rowKeyName , columnValue , columnValue . getClass ( ) ) ; for ( Object rowKey : rowKeys ) { if ( rowKey != null ) { String deleteQuery = CQLTranslator . DELETE_QUERY ; deleteQuery = StringUtils . replace ( deleteQuery , CQLTranslator . COLUMN_FAMILY , translator . ensureCase ( new StringBuilder ( ) , tableName , false ) . toString ( ) ) ; StringBuilder deleteQueryBuilder = new StringBuilder ( deleteQuery ) ; deleteQueryBuilder . append ( CQLTranslator . ADD_WHERE_CLAUSE ) ; deleteQueryBuilder = translator . ensureCase ( deleteQueryBuilder , rowKeyName , false ) ; deleteQueryBuilder . append ( CQLTranslator . EQ_CLAUSE ) ; translator . appendValue ( deleteQueryBuilder , rowKey . getClass ( ) , rowKey , false , false ) ; this . execute ( deleteQueryBuilder . toString ( ) , null ) ; } }