signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class DroolsSwitch { /** * Calls < code > caseXXX < / code > for each class of the model until one returns a non null result ; it yields that result . * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ return the first non - null result returned by a < code > caseXXX < / code > call . * @ generated */ protected T doSwitch ( int classifierID , EObject theEObject ) { } }
switch ( classifierID ) { case DroolsPackage . DOCUMENT_ROOT : { DocumentRoot documentRoot = ( DocumentRoot ) theEObject ; T result = caseDocumentRoot ( documentRoot ) ; if ( result == null ) result = defaultCase ( theEObject ) ; return result ; } case DroolsPackage . GLOBAL_TYPE : { GlobalType globalType = ( GlobalType ) theEObject ; T result = caseGlobalType ( globalType ) ; if ( result == null ) result = defaultCase ( theEObject ) ; return result ; } case DroolsPackage . IMPORT_TYPE : { ImportType importType = ( ImportType ) theEObject ; T result = caseImportType ( importType ) ; if ( result == null ) result = defaultCase ( theEObject ) ; return result ; } case DroolsPackage . META_DATA_TYPE : { MetaDataType metaDataType = ( MetaDataType ) theEObject ; T result = caseMetaDataType ( metaDataType ) ; if ( result == null ) result = defaultCase ( theEObject ) ; return result ; } case DroolsPackage . ON_ENTRY_SCRIPT_TYPE : { OnEntryScriptType onEntryScriptType = ( OnEntryScriptType ) theEObject ; T result = caseOnEntryScriptType ( onEntryScriptType ) ; if ( result == null ) result = defaultCase ( theEObject ) ; return result ; } case DroolsPackage . ON_EXIT_SCRIPT_TYPE : { OnExitScriptType onExitScriptType = ( OnExitScriptType ) theEObject ; T result = caseOnExitScriptType ( onExitScriptType ) ; if ( result == null ) result = defaultCase ( theEObject ) ; return result ; } default : return defaultCase ( theEObject ) ; }
public class DescribeDimensionKeysResult { /** * The dimension keys that were requested . * @ param keys * The dimension keys that were requested . */ public void setKeys ( java . util . Collection < DimensionKeyDescription > keys ) { } }
if ( keys == null ) { this . keys = null ; return ; } this . keys = new java . util . ArrayList < DimensionKeyDescription > ( keys ) ;
public class Start { /** * Get the locale if specified on the command line * else return null and if locale option is not used * then return default locale . */ private Locale getLocale ( String localeName ) throws ToolException { } }
Locale userlocale = null ; if ( localeName == null || localeName . isEmpty ( ) ) { return Locale . getDefault ( ) ; } int firstuscore = localeName . indexOf ( '_' ) ; int seconduscore = - 1 ; String language = null ; String country = null ; String variant = null ; if ( firstuscore == 2 ) { language = localeName . substring ( 0 , firstuscore ) ; seconduscore = localeName . indexOf ( '_' , firstuscore + 1 ) ; if ( seconduscore > 0 ) { if ( seconduscore != firstuscore + 3 || localeName . length ( ) <= seconduscore + 1 ) { String text = messager . getText ( "main.malformed_locale_name" , localeName ) ; throw new ToolException ( CMDERR , text ) ; } country = localeName . substring ( firstuscore + 1 , seconduscore ) ; variant = localeName . substring ( seconduscore + 1 ) ; } else if ( localeName . length ( ) == firstuscore + 3 ) { country = localeName . substring ( firstuscore + 1 ) ; } else { String text = messager . getText ( "main.malformed_locale_name" , localeName ) ; throw new ToolException ( CMDERR , text ) ; } } else if ( firstuscore == - 1 && localeName . length ( ) == 2 ) { language = localeName ; } else { String text = messager . getText ( "main.malformed_locale_name" , localeName ) ; throw new ToolException ( CMDERR , text ) ; } userlocale = searchLocale ( language , country , variant ) ; if ( userlocale == null ) { String text = messager . getText ( "main.illegal_locale_name" , localeName ) ; throw new ToolException ( CMDERR , text ) ; } else { return userlocale ; }
public class SharedReference { /** * Decrement the reference count for the shared reference . If the reference count drops to zero , * then dispose of the referenced value */ public void deleteReference ( ) { } }
if ( decreaseRefCount ( ) == 0 ) { T deleted ; synchronized ( this ) { deleted = mValue ; mValue = null ; } mResourceReleaser . release ( deleted ) ; removeLiveReference ( deleted ) ; }
public class BaseXmlTrxMessageOut { /** * Convert this tree to a DOM object . * Currently this is lame because I convert the tree to text , then to DOM . * In the future , jaxb will be able to convert directly . * @ return The dom tree . */ public Node getDOM ( ) { } }
if ( this . getConvertToNative ( ) != null ) { Node node = this . getConvertToNative ( ) . getDOM ( ) ; if ( node != null ) return node ; } String strXML = this . getXML ( ) ; return Utility . convertXMLToDOM ( strXML ) ;
public class ExcelTransformer { /** * Writes the Excel file to disk * @ throws IOException * @ throws WriteException */ public void writeExcelFile ( ) throws IOException , WriteException { } }
WritableWorkbook excelWrkBook = null ; int curDsPointer = 0 ; try { final String [ ] columnNames = ds . getColumns ( ) ; final List < String > exportOnlyColumnsList = getExportOnlyColumns ( ) != null ? Arrays . asList ( exportOnlyColumns ) : null ; final List < String > excludeFromExportColumnsList = getExcludeFromExportColumns ( ) != null ? Arrays . asList ( excludeFromExportColumns ) : null ; final List < String > numericColumnList = getNumericColumns ( ) != null ? Arrays . asList ( getNumericColumns ( ) ) : new ArrayList < > ( ) ; // get the current position of the DataSet . We have to go to the top // to do this write , // and we will put the pionter back where it was after we are done curDsPointer = ds . getIndex ( ) ; ds . goTop ( ) ; excelWrkBook = Workbook . createWorkbook ( xlsFile ) ; final WritableSheet wrkSheet = excelWrkBook . createSheet ( "results" , 0 ) ; final WritableFont times10ptBold = new WritableFont ( WritableFont . TIMES , 10 , WritableFont . BOLD ) ; final WritableFont times10pt = new WritableFont ( WritableFont . TIMES , 10 , WritableFont . NO_BOLD ) ; // write the column headings in the spreadsheet WritableCellFormat cellFormat = new WritableCellFormat ( times10ptBold ) ; int colOffset = 0 ; for ( int i = 0 ; i < columnNames . length ; i ++ ) { if ( exportOnlyColumnsList != null && ! exportOnlyColumnsList . contains ( columnNames [ i ] ) || excludeFromExportColumnsList != null && excludeFromExportColumnsList . contains ( columnNames [ i ] ) ) { colOffset ++ ; continue ; } final Label xlsTextLbl = new Label ( i - colOffset , 0 , columnNames [ i ] , cellFormat ) ; wrkSheet . addCell ( xlsTextLbl ) ; } cellFormat = new WritableCellFormat ( times10pt ) ; int row = 1 ; while ( ds . next ( ) ) { if ( ! ds . isRecordID ( FPConstants . DETAIL_ID ) ) { continue ; } colOffset = 0 ; for ( int i = 0 ; i < columnNames . length ; i ++ ) { if ( exportOnlyColumnsList != null && ! exportOnlyColumnsList . contains ( columnNames [ i ] ) || excludeFromExportColumnsList != null && excludeFromExportColumnsList . contains ( columnNames [ i ] ) ) { colOffset ++ ; continue ; } WritableCell wc = null ; if ( numericColumnList . contains ( columnNames [ i ] ) ) { wc = new Number ( i - colOffset , row , ds . getDouble ( columnNames [ i ] ) , cellFormat ) ; } else { wc = new Label ( i - colOffset , row , ds . getString ( columnNames [ i ] ) , cellFormat ) ; } wrkSheet . addCell ( wc ) ; } row ++ ; } excelWrkBook . write ( ) ; } finally { if ( curDsPointer > - 1 ) { ds . absolute ( curDsPointer ) ; } if ( excelWrkBook != null ) { excelWrkBook . close ( ) ; } }
public class RespokeCall { /** * Process a connected messsage received from the remote endpoint . This is used internally to the SDK and should not be called directly by your client application . */ public void connectedReceived ( ) { } }
if ( null != listenerReference ) { final Listener listener = listenerReference . get ( ) ; if ( null != listener ) { new Handler ( Looper . getMainLooper ( ) ) . post ( new Runnable ( ) { public void run ( ) { if ( isActive ( ) ) { listener . onConnected ( RespokeCall . this ) ; } } } ) ; } }
public class HttpMergeRequestFilter { /** * Write a fresh HttpResponse from this filter down the filter chain , based on the provided request , with the specified http status code . * @ param nextFilter the next filter in the chain * @ param session the IO session * @ param httpRequest the request that the response corresponds to * @ param httpStatus the desired status of the http response * @ return a writeFuture for the written response */ private WriteFuture writeHttpResponse ( NextFilter nextFilter , IoSession session , HttpRequestMessage httpRequest , final HttpStatus httpStatus ) { } }
return writeHttpResponse ( nextFilter , session , httpRequest , httpStatus , null ) ;
public class CPDefinitionGroupedEntryModelImpl { /** * Converts the soap model instances into normal model instances . * @ param soapModels the soap model instances to convert * @ return the normal model instances */ public static List < CPDefinitionGroupedEntry > toModels ( CPDefinitionGroupedEntrySoap [ ] soapModels ) { } }
if ( soapModels == null ) { return null ; } List < CPDefinitionGroupedEntry > models = new ArrayList < CPDefinitionGroupedEntry > ( soapModels . length ) ; for ( CPDefinitionGroupedEntrySoap soapModel : soapModels ) { models . add ( toModel ( soapModel ) ) ; } return models ;
public class DataModelDtoConverters { /** * Converts { @ link IDataModel } to { @ link DataModelDto } . */ static DataModelDto toDataModelDto ( final IDataModel dataModel ) { } }
Map < String , Object > names = dataModel . getNamedAddresses ( ) . entrySet ( ) . stream ( ) . collect ( Collectors . toMap ( e -> e . getKey ( ) , e -> e . getValue ( ) . address ( ) ) ) ; dataModel . getNamedValues ( ) . forEach ( ( k , v ) -> names . put ( k , v . get ( ) ) ) ; Map < String , Object > table = new HashMap < > ( ) ; Map < String , Object > result = new HashMap < > ( ) ; for ( IDmRow r : dataModel ) { for ( IDmCell c : r ) { ICellAddress address = c . getAddress ( ) ; table . put ( address . a1Address ( ) . address ( ) , c . getContent ( ) . get ( ) ) ; if ( c . getValue ( ) . isPresent ( ) ) { result . put ( address . a1Address ( ) . address ( ) , c . getValue ( ) . get ( ) . get ( ) ) ; } } } DataModelDto dto = new DataModelDto ( ) ; dto . names = names ; dto . table = table ; dto . result = result ; dto . dataModelId = dataModel . getDataModelId ( ) . toString ( ) ; dto . name = dataModel . getName ( ) ; return dto ;
public class HttpMessage { /** * Sets new header name value pair . * @ param headerName The name of the header * @ param headerValue The value of the header * @ return The altered HttpMessage */ public HttpMessage header ( final String headerName , final Object headerValue ) { } }
return ( HttpMessage ) super . setHeader ( headerName , headerValue ) ;
public class IoUtil { /** * 从Reader中读取String , 读取完毕后并不关闭Reader * @ param reader Reader * @ return String * @ throws IORuntimeException IO异常 */ public static String read ( Reader reader ) throws IORuntimeException { } }
final StringBuilder builder = StrUtil . builder ( ) ; final CharBuffer buffer = CharBuffer . allocate ( DEFAULT_BUFFER_SIZE ) ; try { while ( - 1 != reader . read ( buffer ) ) { builder . append ( buffer . flip ( ) . toString ( ) ) ; } } catch ( IOException e ) { throw new IORuntimeException ( e ) ; } return builder . toString ( ) ;
public class AtomicRateLimiter { /** * A side - effect - free function that can calculate next { @ link State } from current . * It determines time duration that you should wait for permission and reserves it for you , * if you ' ll be able to wait long enough . * @ param timeoutInNanos max time that caller can wait for permission in nanoseconds * @ param activeState current state of { @ link AtomicRateLimiter } * @ return next { @ link State } */ private State calculateNextState ( final long timeoutInNanos , final State activeState ) { } }
long cyclePeriodInNanos = activeState . config . getLimitRefreshPeriodInNanos ( ) ; int permissionsPerCycle = activeState . config . getLimitForPeriod ( ) ; long currentNanos = currentNanoTime ( ) ; long currentCycle = currentNanos / cyclePeriodInNanos ; long nextCycle = activeState . activeCycle ; int nextPermissions = activeState . activePermissions ; if ( nextCycle != currentCycle ) { long elapsedCycles = currentCycle - nextCycle ; long accumulatedPermissions = elapsedCycles * permissionsPerCycle ; nextCycle = currentCycle ; nextPermissions = ( int ) min ( nextPermissions + accumulatedPermissions , permissionsPerCycle ) ; } long nextNanosToWait = nanosToWaitForPermission ( cyclePeriodInNanos , permissionsPerCycle , nextPermissions , currentNanos , currentCycle ) ; State nextState = reservePermissions ( activeState . config , timeoutInNanos , nextCycle , nextPermissions , nextNanosToWait ) ; return nextState ;
public class BucketImpl { /** * Copies elements from the bucket into the destination array starting at * index 0 . The destination array must be at least large enough to hold all * the elements in the bucket ; otherwise , the behavior is undefined . * @ param dest the destination array */ public void toArray ( Element [ ] dest ) { } }
if ( ivElements != null ) { System . arraycopy ( ivElements , ivHeadIndex , dest , 0 , size ( ) ) ; }
public class CacheProxy { /** * Returns the time when the entry will expire . * @ param created if the write is an insert or update * @ return the time when the entry will expire , zero if it should expire immediately , * Long . MIN _ VALUE if it should not be changed , or Long . MAX _ VALUE if eternal */ protected final long getWriteExpireTimeMS ( boolean created ) { } }
try { Duration duration = created ? expiry . getExpiryForCreation ( ) : expiry . getExpiryForUpdate ( ) ; if ( duration == null ) { return Long . MIN_VALUE ; } else if ( duration . isZero ( ) ) { return 0L ; } else if ( duration . isEternal ( ) ) { return Long . MAX_VALUE ; } return duration . getAdjustedTime ( currentTimeMillis ( ) ) ; } catch ( Exception e ) { logger . log ( Level . WARNING , "Failed to get the policy's expiration time" , e ) ; return Long . MIN_VALUE ; }
public class Crypt { /** * encrypt the given property * @ param property * @ return * @ throws GeneralSecurityException * @ throws UnsupportedEncodingException */ String encrypt ( String property ) throws GeneralSecurityException , UnsupportedEncodingException { } }
SecretKeyFactory keyFactory = SecretKeyFactory . getInstance ( "PBEWithMD5AndDES" ) ; SecretKey key = keyFactory . generateSecret ( new PBEKeySpec ( cypher ) ) ; Cipher pbeCipher = Cipher . getInstance ( "PBEWithMD5AndDES" ) ; pbeCipher . init ( Cipher . ENCRYPT_MODE , key , new PBEParameterSpec ( salt , 20 ) ) ; return base64Encode ( pbeCipher . doFinal ( property . getBytes ( "UTF-8" ) ) ) ;
public class DITAOTCollator { /** * Comparing method required to compare . * @ see java . util . Comparator # compare ( java . lang . Object , java . lang . Object ) */ @ Override public int compare ( final Object source , final Object target ) { } }
try { return ( Integer ) compareMethod . invoke ( collatorInstance , source , target ) ; } catch ( final Exception e ) { throw new RuntimeException ( e . getMessage ( ) , e ) ; }
public class AbstractApplication { /** * Load all Messages files before showing anything . */ private void loadMessagesFiles ( ) { } }
// Parse the first annotation found ( manage overriding ) final Localized local = ClassUtility . getLastClassAnnotation ( this . getClass ( ) , Localized . class ) ; // Conf variable cannot be null because it was defined in this class // It ' s possible to discard default behavior by setting an empty string to the value . // launch the configuration search engine ResourceBuilders . MESSAGE_BUILDER . searchMessagesFiles ( local . value ( ) ) ;
public class StyleUtilities { /** * Creates a default { @ link Style } for a point . * @ return the default style . */ public static Style createDefaultPointStyle ( ) { } }
FeatureTypeStyle featureTypeStyle = sf . createFeatureTypeStyle ( ) ; featureTypeStyle . rules ( ) . add ( createDefaultPointRule ( ) ) ; Style style = sf . createStyle ( ) ; style . featureTypeStyles ( ) . add ( featureTypeStyle ) ; return style ;
public class QrCodePositionPatternDetector { /** * < p > Specifies transforms which can be used to change coordinates from distorted to undistorted and the opposite * coordinates . The undistorted image is never explicitly created . < / p > * @ param width Input image width . Used in sanity check only . * @ param height Input image height . Used in sanity check only . * @ param model distortion model . Null to remove a distortion model . */ public void setLensDistortion ( int width , int height , @ Nullable LensDistortionNarrowFOV model ) { } }
interpolate = FactoryInterpolation . bilinearPixelS ( squareDetector . getInputType ( ) , BorderType . EXTENDED ) ; if ( model != null ) { PixelTransform < Point2D_F32 > distToUndist = new PointToPixelTransform_F32 ( model . undistort_F32 ( true , true ) ) ; PixelTransform < Point2D_F32 > undistToDist = new PointToPixelTransform_F32 ( model . distort_F32 ( true , true ) ) ; squareDetector . setLensDistortion ( width , height , distToUndist , undistToDist ) ; // needs to sample the original image when the Point2Transform2_F32 u2d = model . distort_F32 ( true , true ) ; this . interpolate = new InterpolatePixelDistortS < > ( this . interpolate , u2d ) ; } else { squareDetector . setLensDistortion ( width , height , null , null ) ; }
public class SymoplibParser { /** * Load all SpaceGroup information from the file spacegroups . xml * @ return a map providing information for all spacegroups */ public static TreeMap < Integer , SpaceGroup > parseSpaceGroupsXML ( InputStream spaceGroupIS ) throws IOException , JAXBException { } }
String xml = convertStreamToString ( spaceGroupIS ) ; SpaceGroupMapRoot spaceGroups = SpaceGroupMapRoot . fromXML ( xml ) ; return spaceGroups . getMapProperty ( ) ;
public class ListFixture { /** * Retrieves element at index ( 0 - based ) . * @ param index 0 - based index of element to retrieve value of . * @ param aList list to get element value from . * @ return element at specified index . * @ throws SlimFixtureException if the list does not have at least index elements . */ public Object valueAtIn ( int index , List aList ) { } }
if ( aList . size ( ) > index ) { return aList . get ( index ) ; } else { throw new SlimFixtureException ( false , "list only has " + aList . size ( ) + " elements" ) ; }
public class CmsWorkplace { /** * Generates the footer for the simple report view . < p > * @ return html code */ public static String generatePageEndSimple ( ) { } }
StringBuffer result = new StringBuffer ( 128 ) ; result . append ( "</td></tr>\n" ) ; result . append ( "</table></div>\n" ) ; result . append ( "</body>\n</html>" ) ; return result . toString ( ) ;
public class ClassPathUtils { /** * Find the root path for the given class . If the class is found in a Jar file , then the * result will be an absolute path to the jar file . If the resource is found in a directory , * then the result will be the parent path of the given resource . * @ param clazz class to search for * @ return absolute path of the root of the resource . */ @ Nullable public static Path findRootPathForClass ( Class < ? > clazz ) { } }
Objects . requireNonNull ( clazz , "resourceName" ) ; String resourceName = classToResourceName ( clazz ) ; return findRootPathForResource ( resourceName , clazz . getClassLoader ( ) ) ;
public class EventFilterParser { /** * EventFilter . g : 128:1 : comparison _ function : ( path _ function EQUALS value _ function - > ^ ( EQUALS path _ function value _ function ) | path _ function NOT _ EQUALS value _ function - > ^ ( NOT _ EQUALS path _ function value _ function ) | path _ function GT compariable _ value _ function - > ^ ( GT path _ function compariable _ value _ function ) | path _ function GE compariable _ value _ function - > ^ ( GE path _ function compariable _ value _ function ) | path _ function LT compariable _ value _ function - > ^ ( LT path _ function compariable _ value _ function ) | path _ function LE compariable _ value _ function - > ^ ( LE path _ function compariable _ value _ function ) ) ; */ public final EventFilterParser . comparison_function_return comparison_function ( ) throws RecognitionException { } }
EventFilterParser . comparison_function_return retval = new EventFilterParser . comparison_function_return ( ) ; retval . start = input . LT ( 1 ) ; CommonTree root_0 = null ; Token EQUALS19 = null ; Token NOT_EQUALS22 = null ; Token GT25 = null ; Token GE28 = null ; Token LT31 = null ; Token LE34 = null ; EventFilterParser . path_function_return path_function18 = null ; EventFilterParser . value_function_return value_function20 = null ; EventFilterParser . path_function_return path_function21 = null ; EventFilterParser . value_function_return value_function23 = null ; EventFilterParser . path_function_return path_function24 = null ; EventFilterParser . compariable_value_function_return compariable_value_function26 = null ; EventFilterParser . path_function_return path_function27 = null ; EventFilterParser . compariable_value_function_return compariable_value_function29 = null ; EventFilterParser . path_function_return path_function30 = null ; EventFilterParser . compariable_value_function_return compariable_value_function32 = null ; EventFilterParser . path_function_return path_function33 = null ; EventFilterParser . compariable_value_function_return compariable_value_function35 = null ; CommonTree EQUALS19_tree = null ; CommonTree NOT_EQUALS22_tree = null ; CommonTree GT25_tree = null ; CommonTree GE28_tree = null ; CommonTree LT31_tree = null ; CommonTree LE34_tree = null ; RewriteRuleTokenStream stream_GE = new RewriteRuleTokenStream ( adaptor , "token GE" ) ; RewriteRuleTokenStream stream_GT = new RewriteRuleTokenStream ( adaptor , "token GT" ) ; RewriteRuleTokenStream stream_LT = new RewriteRuleTokenStream ( adaptor , "token LT" ) ; RewriteRuleTokenStream stream_EQUALS = new RewriteRuleTokenStream ( adaptor , "token EQUALS" ) ; RewriteRuleTokenStream stream_NOT_EQUALS = new RewriteRuleTokenStream ( adaptor , "token NOT_EQUALS" ) ; RewriteRuleTokenStream stream_LE = new RewriteRuleTokenStream ( adaptor , "token LE" ) ; RewriteRuleSubtreeStream stream_compariable_value_function = new RewriteRuleSubtreeStream ( adaptor , "rule compariable_value_function" ) ; RewriteRuleSubtreeStream stream_value_function = new RewriteRuleSubtreeStream ( adaptor , "rule value_function" ) ; RewriteRuleSubtreeStream stream_path_function = new RewriteRuleSubtreeStream ( adaptor , "rule path_function" ) ; try { // EventFilter . g : 129:2 : ( path _ function EQUALS value _ function - > ^ ( EQUALS path _ function value _ function ) | path _ function NOT _ EQUALS value _ function - > ^ ( NOT _ EQUALS path _ function value _ function ) | path _ function GT compariable _ value _ function - > ^ ( GT path _ function compariable _ value _ function ) | path _ function GE compariable _ value _ function - > ^ ( GE path _ function compariable _ value _ function ) | path _ function LT compariable _ value _ function - > ^ ( LT path _ function compariable _ value _ function ) | path _ function LE compariable _ value _ function - > ^ ( LE path _ function compariable _ value _ function ) ) int alt6 = 6 ; int LA6_0 = input . LA ( 1 ) ; if ( ( LA6_0 == XPATH_FUN_NAME ) ) { int LA6_1 = input . LA ( 2 ) ; if ( ( LA6_1 == 33 ) ) { int LA6_2 = input . LA ( 3 ) ; if ( ( LA6_2 == STRING ) ) { int LA6_3 = input . LA ( 4 ) ; if ( ( LA6_3 == 34 ) ) { switch ( input . LA ( 5 ) ) { case EQUALS : { alt6 = 1 ; } break ; case NOT_EQUALS : { alt6 = 2 ; } break ; case GT : { alt6 = 3 ; } break ; case GE : { alt6 = 4 ; } break ; case LT : { alt6 = 5 ; } break ; case LE : { alt6 = 6 ; } break ; default : NoViableAltException nvae = new NoViableAltException ( "" , 6 , 4 , input ) ; throw nvae ; } } else { NoViableAltException nvae = new NoViableAltException ( "" , 6 , 3 , input ) ; throw nvae ; } } else { NoViableAltException nvae = new NoViableAltException ( "" , 6 , 2 , input ) ; throw nvae ; } } else { NoViableAltException nvae = new NoViableAltException ( "" , 6 , 1 , input ) ; throw nvae ; } } else { NoViableAltException nvae = new NoViableAltException ( "" , 6 , 0 , input ) ; throw nvae ; } switch ( alt6 ) { case 1 : // EventFilter . g : 129:4 : path _ function EQUALS value _ function { pushFollow ( FOLLOW_path_function_in_comparison_function529 ) ; path_function18 = path_function ( ) ; state . _fsp -- ; stream_path_function . add ( path_function18 . getTree ( ) ) ; EQUALS19 = ( Token ) match ( input , EQUALS , FOLLOW_EQUALS_in_comparison_function531 ) ; stream_EQUALS . add ( EQUALS19 ) ; pushFollow ( FOLLOW_value_function_in_comparison_function533 ) ; value_function20 = value_function ( ) ; state . _fsp -- ; stream_value_function . add ( value_function20 . getTree ( ) ) ; // AST REWRITE // elements : value _ function , path _ function , EQUALS // token labels : // rule labels : retval // token list labels : // rule list labels : // wildcard labels : retval . tree = root_0 ; RewriteRuleSubtreeStream stream_retval = new RewriteRuleSubtreeStream ( adaptor , "rule retval" , retval != null ? retval . tree : null ) ; root_0 = ( CommonTree ) adaptor . nil ( ) ; // 129:40 : - > ^ ( EQUALS path _ function value _ function ) { // EventFilter . g : 129:43 : ^ ( EQUALS path _ function value _ function ) { CommonTree root_1 = ( CommonTree ) adaptor . nil ( ) ; root_1 = ( CommonTree ) adaptor . becomeRoot ( new EqualsTreeNode ( stream_EQUALS . nextToken ( ) ) , root_1 ) ; adaptor . addChild ( root_1 , stream_path_function . nextTree ( ) ) ; adaptor . addChild ( root_1 , stream_value_function . nextTree ( ) ) ; adaptor . addChild ( root_0 , root_1 ) ; } } retval . tree = root_0 ; } break ; case 2 : // EventFilter . g : 130:3 : path _ function NOT _ EQUALS value _ function { pushFollow ( FOLLOW_path_function_in_comparison_function552 ) ; path_function21 = path_function ( ) ; state . _fsp -- ; stream_path_function . add ( path_function21 . getTree ( ) ) ; NOT_EQUALS22 = ( Token ) match ( input , NOT_EQUALS , FOLLOW_NOT_EQUALS_in_comparison_function554 ) ; stream_NOT_EQUALS . add ( NOT_EQUALS22 ) ; pushFollow ( FOLLOW_value_function_in_comparison_function556 ) ; value_function23 = value_function ( ) ; state . _fsp -- ; stream_value_function . add ( value_function23 . getTree ( ) ) ; // AST REWRITE // elements : value _ function , path _ function , NOT _ EQUALS // token labels : // rule labels : retval // token list labels : // rule list labels : // wildcard labels : retval . tree = root_0 ; RewriteRuleSubtreeStream stream_retval = new RewriteRuleSubtreeStream ( adaptor , "rule retval" , retval != null ? retval . tree : null ) ; root_0 = ( CommonTree ) adaptor . nil ( ) ; // 130:43 : - > ^ ( NOT _ EQUALS path _ function value _ function ) { // EventFilter . g : 130:46 : ^ ( NOT _ EQUALS path _ function value _ function ) { CommonTree root_1 = ( CommonTree ) adaptor . nil ( ) ; root_1 = ( CommonTree ) adaptor . becomeRoot ( new NotEqualsTreeNode ( stream_NOT_EQUALS . nextToken ( ) ) , root_1 ) ; adaptor . addChild ( root_1 , stream_path_function . nextTree ( ) ) ; adaptor . addChild ( root_1 , stream_value_function . nextTree ( ) ) ; adaptor . addChild ( root_0 , root_1 ) ; } } retval . tree = root_0 ; } break ; case 3 : // EventFilter . g : 131:3 : path _ function GT compariable _ value _ function { pushFollow ( FOLLOW_path_function_in_comparison_function575 ) ; path_function24 = path_function ( ) ; state . _fsp -- ; stream_path_function . add ( path_function24 . getTree ( ) ) ; GT25 = ( Token ) match ( input , GT , FOLLOW_GT_in_comparison_function577 ) ; stream_GT . add ( GT25 ) ; pushFollow ( FOLLOW_compariable_value_function_in_comparison_function579 ) ; compariable_value_function26 = compariable_value_function ( ) ; state . _fsp -- ; stream_compariable_value_function . add ( compariable_value_function26 . getTree ( ) ) ; // AST REWRITE // elements : path _ function , GT , compariable _ value _ function // token labels : // rule labels : retval // token list labels : // rule list labels : // wildcard labels : retval . tree = root_0 ; RewriteRuleSubtreeStream stream_retval = new RewriteRuleSubtreeStream ( adaptor , "rule retval" , retval != null ? retval . tree : null ) ; root_0 = ( CommonTree ) adaptor . nil ( ) ; // 131:47 : - > ^ ( GT path _ function compariable _ value _ function ) { // EventFilter . g : 131:50 : ^ ( GT path _ function compariable _ value _ function ) { CommonTree root_1 = ( CommonTree ) adaptor . nil ( ) ; root_1 = ( CommonTree ) adaptor . becomeRoot ( new ComparableTreeNode ( stream_GT . nextToken ( ) ) , root_1 ) ; adaptor . addChild ( root_1 , stream_path_function . nextTree ( ) ) ; adaptor . addChild ( root_1 , stream_compariable_value_function . nextTree ( ) ) ; adaptor . addChild ( root_0 , root_1 ) ; } } retval . tree = root_0 ; } break ; case 4 : // EventFilter . g : 132:3 : path _ function GE compariable _ value _ function { pushFollow ( FOLLOW_path_function_in_comparison_function598 ) ; path_function27 = path_function ( ) ; state . _fsp -- ; stream_path_function . add ( path_function27 . getTree ( ) ) ; GE28 = ( Token ) match ( input , GE , FOLLOW_GE_in_comparison_function600 ) ; stream_GE . add ( GE28 ) ; pushFollow ( FOLLOW_compariable_value_function_in_comparison_function602 ) ; compariable_value_function29 = compariable_value_function ( ) ; state . _fsp -- ; stream_compariable_value_function . add ( compariable_value_function29 . getTree ( ) ) ; // AST REWRITE // elements : compariable _ value _ function , path _ function , GE // token labels : // rule labels : retval // token list labels : // rule list labels : // wildcard labels : retval . tree = root_0 ; RewriteRuleSubtreeStream stream_retval = new RewriteRuleSubtreeStream ( adaptor , "rule retval" , retval != null ? retval . tree : null ) ; root_0 = ( CommonTree ) adaptor . nil ( ) ; // 132:47 : - > ^ ( GE path _ function compariable _ value _ function ) { // EventFilter . g : 132:50 : ^ ( GE path _ function compariable _ value _ function ) { CommonTree root_1 = ( CommonTree ) adaptor . nil ( ) ; root_1 = ( CommonTree ) adaptor . becomeRoot ( new ComparableTreeNode ( stream_GE . nextToken ( ) ) , root_1 ) ; adaptor . addChild ( root_1 , stream_path_function . nextTree ( ) ) ; adaptor . addChild ( root_1 , stream_compariable_value_function . nextTree ( ) ) ; adaptor . addChild ( root_0 , root_1 ) ; } } retval . tree = root_0 ; } break ; case 5 : // EventFilter . g : 133:3 : path _ function LT compariable _ value _ function { pushFollow ( FOLLOW_path_function_in_comparison_function621 ) ; path_function30 = path_function ( ) ; state . _fsp -- ; stream_path_function . add ( path_function30 . getTree ( ) ) ; LT31 = ( Token ) match ( input , LT , FOLLOW_LT_in_comparison_function623 ) ; stream_LT . add ( LT31 ) ; pushFollow ( FOLLOW_compariable_value_function_in_comparison_function625 ) ; compariable_value_function32 = compariable_value_function ( ) ; state . _fsp -- ; stream_compariable_value_function . add ( compariable_value_function32 . getTree ( ) ) ; // AST REWRITE // elements : path _ function , compariable _ value _ function , LT // token labels : // rule labels : retval // token list labels : // rule list labels : // wildcard labels : retval . tree = root_0 ; RewriteRuleSubtreeStream stream_retval = new RewriteRuleSubtreeStream ( adaptor , "rule retval" , retval != null ? retval . tree : null ) ; root_0 = ( CommonTree ) adaptor . nil ( ) ; // 133:47 : - > ^ ( LT path _ function compariable _ value _ function ) { // EventFilter . g : 133:50 : ^ ( LT path _ function compariable _ value _ function ) { CommonTree root_1 = ( CommonTree ) adaptor . nil ( ) ; root_1 = ( CommonTree ) adaptor . becomeRoot ( new ComparableTreeNode ( stream_LT . nextToken ( ) ) , root_1 ) ; adaptor . addChild ( root_1 , stream_path_function . nextTree ( ) ) ; adaptor . addChild ( root_1 , stream_compariable_value_function . nextTree ( ) ) ; adaptor . addChild ( root_0 , root_1 ) ; } } retval . tree = root_0 ; } break ; case 6 : // EventFilter . g : 134:3 : path _ function LE compariable _ value _ function { pushFollow ( FOLLOW_path_function_in_comparison_function644 ) ; path_function33 = path_function ( ) ; state . _fsp -- ; stream_path_function . add ( path_function33 . getTree ( ) ) ; LE34 = ( Token ) match ( input , LE , FOLLOW_LE_in_comparison_function646 ) ; stream_LE . add ( LE34 ) ; pushFollow ( FOLLOW_compariable_value_function_in_comparison_function648 ) ; compariable_value_function35 = compariable_value_function ( ) ; state . _fsp -- ; stream_compariable_value_function . add ( compariable_value_function35 . getTree ( ) ) ; // AST REWRITE // elements : compariable _ value _ function , LE , path _ function // token labels : // rule labels : retval // token list labels : // rule list labels : // wildcard labels : retval . tree = root_0 ; RewriteRuleSubtreeStream stream_retval = new RewriteRuleSubtreeStream ( adaptor , "rule retval" , retval != null ? retval . tree : null ) ; root_0 = ( CommonTree ) adaptor . nil ( ) ; // 134:47 : - > ^ ( LE path _ function compariable _ value _ function ) { // EventFilter . g : 134:50 : ^ ( LE path _ function compariable _ value _ function ) { CommonTree root_1 = ( CommonTree ) adaptor . nil ( ) ; root_1 = ( CommonTree ) adaptor . becomeRoot ( new ComparableTreeNode ( stream_LE . nextToken ( ) ) , root_1 ) ; adaptor . addChild ( root_1 , stream_path_function . nextTree ( ) ) ; adaptor . addChild ( root_1 , stream_compariable_value_function . nextTree ( ) ) ; adaptor . addChild ( root_0 , root_1 ) ; } } retval . tree = root_0 ; } break ; } retval . stop = input . LT ( - 1 ) ; retval . tree = ( CommonTree ) adaptor . rulePostProcessing ( root_0 ) ; adaptor . setTokenBoundaries ( retval . tree , retval . start , retval . stop ) ; } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; retval . tree = ( CommonTree ) adaptor . errorNode ( input , retval . start , input . LT ( - 1 ) , re ) ; } finally { // do for sure before leaving } return retval ;
public class BinaryReader { /** * Write a signed number as varint ( integer with variable number of bytes , * determined as part of the bytes themselves . * NOTE : Reading varint accepts end of stream as ' 0 ' . * @ return The varint read from stream . * @ throws IOException if unable to read from stream . */ public int readIntVarint ( ) throws IOException { } }
int i = in . read ( ) ; if ( i < 0 ) { return 0 ; } boolean c = ( i & 0x80 ) > 0 ; int out = ( i & 0x7f ) ; int shift = 0 ; while ( c ) { shift += 7 ; i = expectUInt8 ( ) ; c = ( i & 0x80 ) > 0 ; out |= ( ( i & 0x7f ) << shift ) ; } return out ;
public class TextColumn { /** * TODO ( lwhite ) : This could avoid the append and do a list copy */ @ Override public TextColumn copy ( ) { } }
TextColumn newCol = create ( name ( ) , size ( ) ) ; int r = 0 ; for ( String string : this ) { newCol . set ( r , string ) ; r ++ ; } return newCol ;
public class CypherFormatterUtils { /** * - - - - to string - - - - */ public static String quote ( Iterable < String > ids ) { } }
StringBuilder builder = new StringBuilder ( ) ; for ( Iterator < String > iterator = ids . iterator ( ) ; iterator . hasNext ( ) ; ) { String id = iterator . next ( ) ; builder . append ( quote ( id ) ) ; if ( iterator . hasNext ( ) ) { builder . append ( "," ) ; } } return builder . toString ( ) ;
public class BeanO { /** * Gets the handle list associated with this bean , optionally creating one * if the bean does not have a handle list yet . * @ param create true if a handle list should be created if the bean does * not already have a handle list */ HandleList getHandleList ( boolean create ) // d662032 { } }
if ( connectionHandleList == null && create ) { connectionHandleList = new HandleList ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "getHandleList: created " + connectionHandleList ) ; } return connectionHandleList ;
public class SFStatement { /** * Sanity check query text * @ param sql The SQL statement to check * @ throws SQLException */ private void sanityCheckQuery ( String sql ) throws SQLException { } }
if ( sql == null || sql . isEmpty ( ) ) { throw new SnowflakeSQLException ( SqlState . SQL_STATEMENT_NOT_YET_COMPLETE , ErrorCode . INVALID_SQL . getMessageCode ( ) , sql ) ; }
public class MinimalMetaBean { /** * Adds an alias to the meta - bean . * When using { @ link # metaProperty ( String ) } , the alias will return the * meta - property of the real name . * @ param alias the alias * @ param realName the real name * @ return the new meta - bean instance * @ throws IllegalArgumentException if the realName is invalid */ public MinimalMetaBean < T > withAlias ( String alias , String realName ) { } }
if ( ! metaPropertyMap . containsKey ( realName ) ) { throw new IllegalArgumentException ( "Invalid property name: " + realName ) ; } Map < String , String > aliasMap = new HashMap < > ( this . aliasMap ) ; aliasMap . put ( alias , realName ) ; return new MinimalMetaBean < > ( beanType , builderSupplier , metaPropertyMap , aliasMap ) ;
public class LogBuffer { /** * Return 64 - bit unsigned long from buffer . ( big - endian ) * @ see mysql - 5.6.10 / include / myisampack . h - mi _ uint8korr */ public final BigInteger getBeUlong64 ( final int pos ) { } }
final long long64 = getBeLong64 ( pos ) ; return ( long64 >= 0 ) ? BigInteger . valueOf ( long64 ) : BIGINT_MAX_VALUE . add ( BigInteger . valueOf ( 1 + long64 ) ) ;
public class ServicePoolBuilder { /** * Adds a { @ link HostDiscoverySource } instance to the builder . Multiple instances of { @ code HostDiscoverySource } * may be specified . The service pool will query the sources in the order they were registered and use the first * non - null { @ link HostDiscovery } returned for the service name provided by the * { @ link ServiceFactory # getServiceName ( ) } method of the factory configured by { @ link # withServiceFactory } . * Note that using this method will cause the ServicePoolBuilder to call * { @ link HostDiscoverySource # forService ( String serviceName ) } when { @ link # build ( ) } is called and pass the returned * { @ link HostDiscovery } to the new { @ code ServicePool } . Subsequently calling { @ link ServicePool # close ( ) } will in * turn call { @ link HostDiscovery # close ( ) } on the passed instance . * @ param hostDiscoverySource a host discovery source to use to find the { @ link HostDiscovery } when constructing * the { @ link ServicePool } * @ return this */ public ServicePoolBuilder < S > withHostDiscoverySource ( HostDiscoverySource hostDiscoverySource ) { } }
checkNotNull ( hostDiscoverySource ) ; return withHostDiscovery ( hostDiscoverySource , true ) ;
public class KeyStoreCredentialResolverBuilder { /** * Adds all key names and their passwords which are specified by the { @ code keyPasswords } . */ public KeyStoreCredentialResolverBuilder addKeyPasswords ( Map < String , String > keyPasswords ) { } }
requireNonNull ( keyPasswords , "keyPasswords" ) ; keyPasswords . forEach ( this :: addKeyPassword ) ; return this ;
public class AbstractRegionPainter { /** * Given parameters for creating a LinearGradientPaint , this method will * create and return a linear gradient paint . One primary purpose for this * method is to avoid creating a LinearGradientPaint where the start and end * points are equal . In such a case , the end y point is slightly increased * to avoid the overlap . * @ param x1 * @ param y1 * @ param x2 * @ param y2 * @ param midpoints * @ param colors * @ return a valid LinearGradientPaint . This method never returns null . */ protected final LinearGradientPaint createGradient ( float x1 , float y1 , float x2 , float y2 , float [ ] midpoints , Color [ ] colors ) { } }
if ( x1 == x2 && y1 == y2 ) { y2 += .00001f ; } return new LinearGradientPaint ( x1 , y1 , x2 , y2 , midpoints , colors ) ;
public class QueryParameterValue { /** * Creates a { @ code QueryParameterValue } object with the given value and type . */ public static < T > QueryParameterValue of ( T value , StandardSQLTypeName type ) { } }
return QueryParameterValue . newBuilder ( ) . setValue ( valueToStringOrNull ( value , type ) ) . setType ( type ) . build ( ) ;
public class ClassGraph { /** * Prints associations recovered from the fields of a class . An association is inferred only * if another relation between the two classes is not already in the graph . * @ param classes */ public void printInferredRelations ( ClassDoc c ) { } }
// check if the source is excluded from inference if ( hidden ( c ) ) return ; Options opt = optionProvider . getOptionsFor ( c ) ; for ( FieldDoc field : c . fields ( false ) ) { if ( hidden ( field ) ) continue ; // skip statics if ( field . isStatic ( ) ) continue ; // skip primitives FieldRelationInfo fri = getFieldRelationInfo ( field ) ; if ( fri == null ) continue ; // check if the destination is excluded from inference if ( hidden ( fri . cd ) ) continue ; // if source and dest are not already linked , add a dependency RelationPattern rp = getClassInfo ( c , true ) . getRelation ( fri . cd . toString ( ) ) ; if ( rp == null ) { String destAdornment = fri . multiple ? "*" : "" ; relation ( opt , opt . inferRelationshipType , c , fri . cd , "" , "" , destAdornment ) ; } }
public class AWSS3ControlClient { /** * Retrieves the Public Access Block configuration for an Amazon Web Services account . * @ param getPublicAccessBlockRequest * @ return Result of the GetPublicAccessBlock operation returned by the service . * @ throws NoSuchPublicAccessBlockConfigurationException * This exception is thrown if a < code > GetPublicAccessBlock < / code > request is made against an account that * does not have a PublicAccessBlockConfiguration set . * @ sample AWSS3Control . GetPublicAccessBlock * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / s3control - 2018-08-20 / GetPublicAccessBlock " target = " _ top " > AWS * API Documentation < / a > */ @ Override public GetPublicAccessBlockResult getPublicAccessBlock ( GetPublicAccessBlockRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetPublicAccessBlock ( request ) ;
public class StringParser { /** * Parse the given { @ link Object } as double . Note : both the locale independent * form of a double can be parsed here ( e . g . 4.523 ) as well as a localized * form using the comma as the decimal separator ( e . g . the German 4,523 ) . * @ param aObject * The object to parse . May be < code > null < / code > . * @ param dDefault * The default value to be returned if the passed object could not be * converted to a valid value . * @ return The default value if the object does not represent a valid value . */ public static double parseDouble ( @ Nullable final Object aObject , final double dDefault ) { } }
if ( aObject == null ) return dDefault ; if ( aObject instanceof Number ) return ( ( Number ) aObject ) . doubleValue ( ) ; return parseDouble ( aObject . toString ( ) , dDefault ) ;
public class MessageReader { /** * Extracts the message body ' s charset encoding * by looking it up in the message properties if * given there and choosing the default charset ( UTF - 8) * otherwise . * @ return The message body ' s charset encoding */ public Charset readCharset ( ) { } }
BasicProperties basicProperties = message . getBasicProperties ( ) ; if ( basicProperties == null ) { return Message . DEFAULT_MESSAGE_CHARSET ; } String contentCharset = basicProperties . getContentEncoding ( ) ; if ( contentCharset == null ) { return Message . DEFAULT_MESSAGE_CHARSET ; } return Charset . forName ( contentCharset ) ;
public class ObserverList { /** * Applies the operation to the observer , catching and logging any exceptions thrown in the * process . */ protected boolean checkedApply ( ObserverOp < T > obop , T obs ) { } }
try { return obop . apply ( obs ) ; } catch ( Throwable thrown ) { log . warning ( "ObserverOp choked during notification" , "op" , obop , "obs" , observerForLog ( obs ) , thrown ) ; // if they booched it , definitely don ' t remove them return true ; }
public class ApptentiveAttachmentLoader { /** * Returns singleton class instance */ public static ApptentiveAttachmentLoader getInstance ( ) { } }
if ( instance == null ) { synchronized ( ApptentiveAttachmentLoader . class ) { if ( instance == null ) { instance = new ApptentiveAttachmentLoader ( ) ; } } } return instance ;
public class DurableSubscriptionManager { /** * Register a new durable subscription * @ param clientID * @ param subscriptionName */ public boolean register ( String clientID , String subscriptionName ) { } }
String key = clientID + "-" + subscriptionName ; synchronized ( subscriptions ) { if ( subscriptions . containsKey ( key ) ) return false ; subscriptions . put ( key , new DurableTopicSubscription ( System . currentTimeMillis ( ) , clientID , subscriptionName ) ) ; return true ; }
public class ElementSelectors { /** * Applies the wrapped ElementSelector ' s logic if and only if the * control element matches the given predicate . */ public static ElementSelector conditionalSelector ( final Predicate < ? super Element > predicate , final ElementSelector es ) { } }
if ( predicate == null ) { throw new IllegalArgumentException ( "predicate must not be null" ) ; } if ( es == null ) { throw new IllegalArgumentException ( "es must not be null" ) ; } return new ElementSelector ( ) { @ Override public boolean canBeCompared ( Element controlElement , Element testElement ) { return predicate . test ( controlElement ) && es . canBeCompared ( controlElement , testElement ) ; } } ;
public class Level2 { /** * Explain how the distance was computed . */ public String explainScore ( StringWrapper s , StringWrapper t ) { } }
StringBuffer buf = new StringBuffer ( ) ; BagOfTokens sBag = asBagOfTokens ( s ) ; BagOfTokens tBag = asBagOfTokens ( t ) ; double sumOverI = 0 ; for ( Iterator i = sBag . tokenIterator ( ) ; i . hasNext ( ) ; ) { Token tokenI = ( Token ) i . next ( ) ; buf . append ( "token=" + tokenI ) ; double maxOverJ = - Double . MAX_VALUE ; Token closestToI = null ; for ( Iterator j = tBag . tokenIterator ( ) ; j . hasNext ( ) ; ) { Token tokenJ = ( Token ) j . next ( ) ; double scoreItoJ = tokenDistance . score ( tokenI . getValue ( ) , tokenJ . getValue ( ) ) ; buf . append ( " dist(" + tokenJ . getValue ( ) + ")=" + scoreItoJ ) ; if ( scoreItoJ >= maxOverJ ) { maxOverJ = scoreItoJ ; closestToI = tokenJ ; } } sumOverI += maxOverJ ; buf . append ( " match=" + closestToI + " score=" + maxOverJ + "\n" ) ; } buf . append ( "total: " + sumOverI + "/" + sBag . size ( ) + " = " + score ( s , t ) + "\n" ) ; return buf . toString ( ) ;
public class Config { /** * Get a mapping from strength to curve desired . * @ return mapping from strength to curve name to use . */ public Map < Integer , String > getSecurityCurveMapping ( ) { } }
if ( curveMapping == null ) { curveMapping = parseSecurityCurveMappings ( getProperty ( SECURITY_CURVE_MAPPING ) ) ; } return Collections . unmodifiableMap ( curveMapping ) ;
public class MappingFilterParser { /** * C : \ \ Project \ \ Obdalib \ \ obdalib - parent \ \ obdalib - core \ \ src \ \ main \ \ java \ \ it \ \ unibz \ \ inf \ \ obda \ \ gui \ \ swing \ \ utils \ \ MappingFilter . g : 86:1 : input returns [ String value ] : ( unquoted _ string | quoted _ string ) ; */ public final String input ( ) throws RecognitionException { } }
String value = null ; String unquoted_string3 = null ; String quoted_string4 = null ; try { // C : \ \ Project \ \ Obdalib \ \ obdalib - parent \ \ obdalib - core \ \ src \ \ main \ \ java \ \ it \ \ unibz \ \ inf \ \ obda \ \ gui \ \ swing \ \ utils \ \ MappingFilter . g : 87:3 : ( unquoted _ string | quoted _ string ) int alt6 = 2 ; int LA6_0 = input . LA ( 1 ) ; if ( ( LA6_0 == STRING ) ) { alt6 = 1 ; } else if ( ( ( LA6_0 >= STRING_WITH_QUOTE && LA6_0 <= STRING_WITH_QUOTE_DOUBLE ) ) ) { alt6 = 2 ; } else { NoViableAltException nvae = new NoViableAltException ( "" , 6 , 0 , input ) ; throw nvae ; } switch ( alt6 ) { case 1 : // C : \ \ Project \ \ Obdalib \ \ obdalib - parent \ \ obdalib - core \ \ src \ \ main \ \ java \ \ it \ \ unibz \ \ inf \ \ obda \ \ gui \ \ swing \ \ utils \ \ MappingFilter . g : 87:5 : unquoted _ string { pushFollow ( FOLLOW_unquoted_string_in_input238 ) ; unquoted_string3 = unquoted_string ( ) ; state . _fsp -- ; value = unquoted_string3 ; } break ; case 2 : // C : \ \ Project \ \ Obdalib \ \ obdalib - parent \ \ obdalib - core \ \ src \ \ main \ \ java \ \ it \ \ unibz \ \ inf \ \ obda \ \ gui \ \ swing \ \ utils \ \ MappingFilter . g : 88:5 : quoted _ string { pushFollow ( FOLLOW_quoted_string_in_input246 ) ; quoted_string4 = quoted_string ( ) ; state . _fsp -- ; value = quoted_string4 ; } break ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving } return value ;
public class ActionContext { /** * Return cached object by key . The key will be concatenated with * current session id when fetching the cached object * @ param key * @ param < T > * the object type * @ return the cached object */ public < T > T cached ( String key ) { } }
H . Session sess = session ( ) ; if ( null != sess ) { return sess . cached ( key ) ; } else { return app ( ) . cache ( ) . get ( key ) ; }
public class ResourceLeakDetector { /** * This method is called when an untraced leak is detected . It can be overridden for tracking how many times leaks * have been detected . */ protected void reportUntracedLeak ( String resourceType ) { } }
logger . error ( "LEAK: {}.release() was not called before it's garbage-collected. " + "Enable advanced leak reporting to find out where the leak occurred. " + "To enable advanced leak reporting, " + "specify the JVM option '-D{}={}' or call {}.setLevel() " + "See http://netty.io/wiki/reference-counted-objects.html for more information." , resourceType , PROP_LEVEL , Level . ADVANCED . name ( ) . toLowerCase ( ) , simpleClassName ( this ) ) ;
public class SleUtility { /** * Sorts and groups a set of entries . * @ param values List of Extendable implementations . * @ param groups Group items to group by . * @ param sort Field to sort on . * @ param ascending Sort ascending / descending * @ return Grouped and sorted list of entries . */ public static < T extends Extendable > List < T > sortAndGroup ( final List < T > values , final Group [ ] groups , final Sort sort , final boolean ascending ) { } }
List < T > list = sort ( values , sort , ascending ) ; list = group ( list , groups ) ; return list ;
public class CmsShell { /** * Gets the top of thread - local shell stack , or null if it is empty . * @ return the top of the shell stack */ public static CmsShell getTopShell ( ) { } }
ArrayList < CmsShell > shells = SHELL_STACK . get ( ) ; if ( shells . isEmpty ( ) ) { return null ; } return shells . get ( shells . size ( ) - 1 ) ;
public class DeleteUserDefinedFunctionRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteUserDefinedFunctionRequest deleteUserDefinedFunctionRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteUserDefinedFunctionRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteUserDefinedFunctionRequest . getCatalogId ( ) , CATALOGID_BINDING ) ; protocolMarshaller . marshall ( deleteUserDefinedFunctionRequest . getDatabaseName ( ) , DATABASENAME_BINDING ) ; protocolMarshaller . marshall ( deleteUserDefinedFunctionRequest . getFunctionName ( ) , FUNCTIONNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class EventBus { /** * Sends an event that will notify any registered listener for that class . * < p > Listeners are looked up by an < b > exact match < / b > on the class of the object , as returned by * { @ code event . getClass ( ) } . Listeners of a supertype won ' t be notified . * < p > The listeners are invoked on the calling thread . It ' s their responsibility to schedule event * processing asynchronously if needed . */ public void fire ( Object event ) { } }
LOG . debug ( "[{}] Firing an instance of {}: {}" , logPrefix , event . getClass ( ) , event ) ; // if the exact match thing gets too cumbersome , we can reconsider , but I ' d like to avoid // scanning all the keys with instanceof checks . Class < ? > eventClass = event . getClass ( ) ; for ( Consumer < ? > l : listeners . get ( eventClass ) ) { @ SuppressWarnings ( "unchecked" ) Consumer < Object > listener = ( Consumer < Object > ) l ; LOG . debug ( "[{}] Notifying {} of {}" , logPrefix , listener , event ) ; listener . accept ( event ) ; }
public class Packages { /** * Computes the BND clause from the given set of packages . * @ param packages the packages * @ return the clause */ public static String toClause ( List < String > packages ) { } }
return Joiner . on ( ", " ) . skipNulls ( ) . join ( packages ) ;
public class Speller { /** * Match the last letter of the candidate against two or more letters of the word . */ private int matchAnyToOne ( final int wordIndex , final int candIndex ) { } }
if ( replacementsAnyToOne . containsKey ( candidate [ candIndex ] ) ) { for ( final char [ ] rep : replacementsAnyToOne . get ( candidate [ candIndex ] ) ) { int i = 0 ; while ( i < rep . length && ( wordIndex + i ) < wordLen && rep [ i ] == wordProcessed [ wordIndex + i ] ) { i ++ ; } if ( i == rep . length ) { return i ; } } } return 0 ;
public class Configuration { /** * Return the qualified name of the < code > ClassDoc < / code > if it ' s qualifier is not excluded . Otherwise , * return the unqualified < code > ClassDoc < / code > name . * @ param cd the < code > ClassDoc < / code > to check . */ public String getClassName ( ClassDoc cd ) { } }
PackageDoc pd = cd . containingPackage ( ) ; if ( pd != null && shouldExcludeQualifier ( cd . containingPackage ( ) . name ( ) ) ) { return cd . name ( ) ; } else { return cd . qualifiedName ( ) ; }
public class ApiOvhIpLoadbalancing { /** * Ssl for this iplb * REST : GET / ipLoadbalancing / { serviceName } / ssl * @ param type [ required ] Filter the value of type property ( = ) * @ param serial [ required ] Filter the value of serial property ( like ) * @ param fingerprint [ required ] Filter the value of fingerprint property ( like ) * @ param serviceName [ required ] The internal name of your IP load balancing */ public ArrayList < Long > serviceName_ssl_GET ( String serviceName , String fingerprint , String serial , OvhSslTypeEnum type ) throws IOException { } }
String qPath = "/ipLoadbalancing/{serviceName}/ssl" ; StringBuilder sb = path ( qPath , serviceName ) ; query ( sb , "fingerprint" , fingerprint ) ; query ( sb , "serial" , serial ) ; query ( sb , "type" , type ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t2 ) ;
public class ByteArray { /** * 写入一组byte值 * @ param values 一组byte值 */ public void write ( byte ... values ) { } }
if ( count >= content . length - values . length ) { byte [ ] ns = new byte [ content . length + values . length ] ; System . arraycopy ( content , 0 , ns , 0 , count ) ; this . content = ns ; } System . arraycopy ( values , 0 , content , count , values . length ) ; count += values . length ;
public class ClusterState { /** * Starts the join to the cluster . */ private synchronized CompletableFuture < Void > join ( ) { } }
joinFuture = new CompletableFuture < > ( ) ; context . getThreadContext ( ) . executor ( ) . execute ( ( ) -> { // Transition the server to the appropriate state for the local member type . context . transition ( member . type ( ) ) ; // Attempt to join the cluster . If the local member is ACTIVE then failing to join the cluster // will result in the member attempting to get elected . This allows initial clusters to form . List < MemberState > activeMembers = getActiveMemberStates ( ) ; if ( ! activeMembers . isEmpty ( ) ) { join ( getActiveMemberStates ( ) . iterator ( ) ) ; } else { joinFuture . complete ( null ) ; } } ) ; return joinFuture . whenComplete ( ( result , error ) -> joinFuture = null ) ;
public class DestinationManager { /** * Checks that the queuePointLocalising size is valid */ private void checkQueuePointLocalizingSize ( Set < String > queuePointLocalizingMEs , DestinationDefinition destinationDefinition ) { } }
// There must be at least one queue point if ( ( ( destinationDefinition . getDestinationType ( ) != DestinationType . SERVICE ) && ( queuePointLocalizingMEs . size ( ) == 0 ) ) || ( ( destinationDefinition . getDestinationType ( ) == DestinationType . SERVICE ) && ( queuePointLocalizingMEs . size ( ) != 0 ) ) ) { throw new SIErrorException ( nls . getFormattedMessage ( "INTERNAL_CONFIGURATION_ERROR_CWSIP0006" , new Object [ ] { "DestinationManager" , "1:4867:1.508.1.7" , destinationDefinition . getName ( ) } , null ) ) ; }
public class Calendar { /** * Validate a single field of this calendar . Subclasses should * override this method to validate any calendar - specific fields . * Generic fields can be handled by * < code > Calendar . validateField ( ) < / code > . * @ see # validateField ( int , int , int ) */ protected void validateField ( int field ) { } }
int y ; switch ( field ) { case DAY_OF_MONTH : y = handleGetExtendedYear ( ) ; validateField ( field , 1 , handleGetMonthLength ( y , internalGet ( MONTH ) ) ) ; break ; case DAY_OF_YEAR : y = handleGetExtendedYear ( ) ; validateField ( field , 1 , handleGetYearLength ( y ) ) ; break ; case DAY_OF_WEEK_IN_MONTH : if ( internalGet ( field ) == 0 ) { throw new IllegalArgumentException ( "DAY_OF_WEEK_IN_MONTH cannot be zero" ) ; } validateField ( field , getMinimum ( field ) , getMaximum ( field ) ) ; break ; default : validateField ( field , getMinimum ( field ) , getMaximum ( field ) ) ; break ; }
public class FSNamesystem { /** * Close down this file system manager . * Causes heartbeat and lease daemons to stop ; waits briefly for * them to finish , but a short timeout returns control back to caller . */ public void close ( ) { } }
fsRunning = false ; try { if ( pendingReplications != null ) { pendingReplications . stop ( ) ; } if ( hbthread != null ) { hbthread . interrupt ( ) ; } if ( underreplthread != null ) { underreplthread . interrupt ( ) ; } if ( overreplthread != null ) { overreplthread . interrupt ( ) ; } if ( raidEncodingTaskThread != null ) { raidEncodingTaskThread . interrupt ( ) ; } if ( dnthread != null ) { dnthread . interrupt ( ) ; } if ( automaticEditsRollingThread != null ) { automaticEditsRoller . stop ( ) ; // We cannot interrupt roller thread . For manual failover , we want // the edits file operations to finish . automaticEditsRollingThread . join ( ) ; } if ( safeMode != null ) { safeMode . shutdown ( ) ; } } catch ( Exception e ) { LOG . warn ( "Exception shutting down FSNamesystem" , e ) ; } finally { // using finally to ensure we also wait for lease daemon try { LOG . info ( "Stopping LeaseManager" ) ; stopLeaseMonitor ( ) ; if ( InjectionHandler . trueCondition ( InjectionEvent . FSNAMESYSTEM_CLOSE_DIRECTORY ) ) { if ( dir != null ) { LOG . info ( "Stopping directory (fsimage, fsedits)" ) ; dir . close ( ) ; } } } catch ( InterruptedException ie ) { } catch ( IOException ie ) { LOG . error ( "Error closing FSDirectory" , ie ) ; IOUtils . cleanup ( LOG , dir ) ; } }
public class EndpointUser { /** * Custom attributes that describe the user by associating a name with an array of values . For example , an attribute * named " interests " might have the following values : [ " science " , " politics " , " travel " ] . You can use these * attributes as selection criteria when you create segments . * The Amazon Pinpoint console can ' t display attribute names that include the following characters : hash / pound sign * ( # ) , colon ( : ) , question mark ( ? ) , backslash ( \ ) , and forward slash ( / ) . For this reason , you should avoid using * these characters in the names of custom attributes . * @ return Custom attributes that describe the user by associating a name with an array of values . For example , an * attribute named " interests " might have the following values : [ " science " , " politics " , " travel " ] . You can * use these attributes as selection criteria when you create segments . * The Amazon Pinpoint console can ' t display attribute names that include the following characters : * hash / pound sign ( # ) , colon ( : ) , question mark ( ? ) , backslash ( \ ) , and forward slash ( / ) . For this reason , * you should avoid using these characters in the names of custom attributes . */ public java . util . Map < String , java . util . List < String > > getUserAttributes ( ) { } }
return userAttributes ;
public class JMLambda { /** * Partition by map . * @ param < T > the type parameter * @ param collection the collection * @ param predicate the predicate * @ return the map */ public static < T > Map < Boolean , List < T > > partitionBy ( Collection < T > collection , Predicate < T > predicate ) { } }
return collection . stream ( ) . collect ( partitioningBy ( predicate ) ) ;
public class PageAutoDialect { /** * 初始化 helper * @ param dialectClass * @ param properties */ private AbstractHelperDialect initDialect ( String dialectClass , Properties properties ) { } }
AbstractHelperDialect dialect ; if ( StringUtil . isEmpty ( dialectClass ) ) { throw new PageException ( "使用 PageHelper 分页插件时,必须设置 helper 属性" ) ; } try { Class sqlDialectClass = resloveDialectClass ( dialectClass ) ; if ( AbstractHelperDialect . class . isAssignableFrom ( sqlDialectClass ) ) { dialect = ( AbstractHelperDialect ) sqlDialectClass . newInstance ( ) ; } else { throw new PageException ( "使用 PageHelper 时,方言必须是实现 " + AbstractHelperDialect . class . getCanonicalName ( ) + " 接口的实现类!" ) ; } } catch ( Exception e ) { throw new PageException ( "初始化 helper [" + dialectClass + "]时出错:" + e . getMessage ( ) , e ) ; } dialect . setProperties ( properties ) ; return dialect ;
public class SparkLine { /** * Adds a new value to the DATA _ LIST of the sparkline * @ param DATA */ public void addDataPoint ( final double DATA ) { } }
for ( DataPoint dataPoint : DATA_LIST ) { if ( System . currentTimeMillis ( ) - dataPoint . getTimeStamp ( ) > timeFrame ) { trashList . add ( dataPoint ) ; } } for ( DataPoint dataPoint : trashList ) { DATA_LIST . remove ( dataPoint ) ; } trashList . clear ( ) ; DATA_LIST . add ( new DataPoint ( System . currentTimeMillis ( ) , DATA ) ) ; init ( INNER_BOUNDS . width , INNER_BOUNDS . height ) ; repaint ( INNER_BOUNDS ) ;
public class FunctionSignature { /** * Parses a signature . */ public static FunctionSignature valueOf ( String serial ) { } }
int paramStart = serial . indexOf ( "(" ) ; int paramEnd = serial . indexOf ( ")" ) ; if ( paramStart < 0 || paramEnd != serial . length ( ) - 1 ) throw new IllegalArgumentException ( "Malformed method signature: " + serial ) ; String function = serial . substring ( 0 , paramStart ) . trim ( ) ; String arguments = serial . substring ( paramStart + 1 , paramEnd ) ; StringTokenizer tokenizer = new StringTokenizer ( arguments , ", " ) ; String [ ] names = new String [ tokenizer . countTokens ( ) ] ; for ( int i = 0 ; i < names . length ; ++ i ) names [ i ] = tokenizer . nextToken ( ) ; return new FunctionSignature ( function , names ) ;
public class AbstractCentralAuthenticationService { /** * Evaluate proxied service if needed . * @ param service the service * @ param ticketGrantingTicket the ticket granting ticket * @ param registeredService the registered service */ protected void evaluateProxiedServiceIfNeeded ( final Service service , final TicketGrantingTicket ticketGrantingTicket , final RegisteredService registeredService ) { } }
val proxiedBy = ticketGrantingTicket . getProxiedBy ( ) ; if ( proxiedBy != null ) { LOGGER . debug ( "Ticket-granting ticket is proxied by [{}]. Locating proxy service in registry..." , proxiedBy . getId ( ) ) ; val proxyingService = this . servicesManager . findServiceBy ( proxiedBy ) ; if ( proxyingService != null ) { LOGGER . debug ( "Located proxying service [{}] in the service registry" , proxyingService ) ; if ( ! proxyingService . getProxyPolicy ( ) . isAllowedToProxy ( ) ) { LOGGER . warn ( "Found proxying service [{}], but it is not authorized to fulfill the proxy attempt made by [{}]" , proxyingService . getId ( ) , service . getId ( ) ) ; throw new UnauthorizedProxyingException ( UnauthorizedProxyingException . MESSAGE + registeredService . getId ( ) ) ; } } else { LOGGER . warn ( "No proxying service found. Proxy attempt by service [{}] (registered service [{}]) is not allowed." , service . getId ( ) , registeredService . getId ( ) ) ; throw new UnauthorizedProxyingException ( UnauthorizedProxyingException . MESSAGE + registeredService . getId ( ) ) ; } } else { LOGGER . trace ( "Ticket-granting ticket is not proxied by another service" ) ; }
public class SibRaXaResource { /** * ( non - Javadoc ) * @ see javax . transaction . xa . XAResource # commit ( javax . transaction . xa . Xid , * boolean ) */ public void commit ( final Xid xid , final boolean onePhase ) throws XAException { } }
final String methodName = "commit" ; if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . entry ( this , TRACE , methodName , new Object [ ] { xid , Boolean . valueOf ( onePhase ) } ) ; } _siXaResource . commit ( xid , onePhase ) ; // Add the value false to indicate we have commited to the hashtable of // stats ( keying off the xid ) synchronized ( _transactionStates ) { _transactionStates . put ( xid , Boolean . FALSE ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isDebugEnabled ( ) ) { StringBuffer sb = new StringBuffer ( "After adding the xid " ) ; sb . append ( xid ) ; sb . append ( " the hashtable of transactionStates now contains " ) ; sb . append ( _transactionStates . size ( ) ) ; sb . append ( " entries" ) ; SibTr . debug ( this , TRACE , sb . toString ( ) ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . exit ( this , TRACE , methodName ) ; }
public class SraReader { /** * Read a run set from the specified input stream . * @ param inputStream input stream , must not be null * @ return a run set read from the specified input stream * @ throws IOException if an I / O error occurs */ public static RunSet readRunSet ( final InputStream inputStream ) throws IOException { } }
checkNotNull ( inputStream ) ; try ( BufferedReader reader = new BufferedReader ( new InputStreamReader ( inputStream ) ) ) { return readRunSet ( reader ) ; }
public class SessionApi { /** * Get information about the current user * Get information about the current user , including any existing media logins , calls , and interactions . The returned user information includes state recovery information about the active session . You can make this request at startup to check for an existing session . * @ return ApiResponse & lt ; CurrentSession & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < CurrentSession > getCurrentSessionWithHttpInfo ( ) throws ApiException { } }
com . squareup . okhttp . Call call = getCurrentSessionValidateBeforeCall ( null , null ) ; Type localVarReturnType = new TypeToken < CurrentSession > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class JKTimeObject { /** * After . * @ param thareTime the thare time * @ return true , if successful */ public boolean after ( JKTimeObject thareTime ) { } }
if ( getYear ( ) == thareTime . getYear ( ) || thareTime . getYear ( ) > getYear ( ) ) { System . out . println ( "after:: Year true" ) ; if ( thareTime . getMonth ( ) < getMonth ( ) ) { return true ; } if ( getMonth ( ) == thareTime . getMonth ( ) ) { System . out . println ( "after:: Month true" ) ; if ( thareTime . getDay ( ) < getDay ( ) ) { System . out . println ( "after:: Day true" ) ; return true ; } if ( getDay ( ) == thareTime . getDay ( ) ) { if ( getHour ( ) == thareTime . getHour ( ) ) { if ( thareTime . getMunite ( ) < getMunite ( ) ) { return true ; } } if ( getHour ( ) > thareTime . getHour ( ) ) { return true ; } } } } return false ;
public class LdapAdapter { /** * Method to get the ancestors of the given entity . * @ param entity * @ param ldapEntry * @ param ancesCtrl * @ throws WIMException */ private void getAncestors ( Entity entity , LdapEntry ldapEntry , AncestorControl ancesCtrl ) throws WIMException { } }
if ( ancesCtrl == null ) { return ; } List < String > propNames = ancesCtrl . getProperties ( ) ; int level = ancesCtrl . getLevel ( ) ; List < String > ancesTypes = getEntityTypes ( ancesCtrl ) ; String [ ] bases = getBases ( ancesCtrl , ancesTypes ) ; String dn = ldapEntry . getDN ( ) ; List < String > ancestorDns = iLdapConn . getAncestorDNs ( dn , level ) ; Entity parentEntity = entity ; for ( int i = 0 ; i < ancestorDns . size ( ) ; i ++ ) { String ancesDn = ancestorDns . get ( i ) ; if ( ancesDn . length ( ) == 0 ) { continue ; } if ( LdapHelper . isUnderBases ( ancesDn , bases ) ) { LdapEntry ancesEntry = iLdapConn . getEntityByIdentifier ( ancesDn , null , null , ancesTypes , propNames , false , false ) ; String ancesType = ancesEntry . getType ( ) ; Entity ancestor = null ; if ( ancesTypes . contains ( ancesType ) ) { ancestor = createEntityFromLdapEntry ( parentEntity , SchemaConstants . DO_PARENT , ancesEntry , propNames ) ; } else { ancestor = createEntityFromLdapEntry ( parentEntity , SchemaConstants . DO_PARENT , ancesEntry , null ) ; } parentEntity = ancestor ; } }
public class EncodedElement { /** * Add a number of bits from a long to the end of this list ' s data . Will * add a new element if necessary . The bits stored are taken from the lower - * order of input . * @ param input Long containing bits to append to end . * @ param bitCount Number of bits to append . * @ return EncodedElement which actually contains the appended value . */ public EncodedElement addLong ( long input , int bitCount ) { } }
if ( next != null ) { EncodedElement end = EncodedElement . getEnd_S ( next ) ; return end . addLong ( input , bitCount ) ; } else if ( data . length * 8 <= usableBits + bitCount ) { // create child and attach to next . // Set child ' s offset appropriately ( i . e , manually set usable bits ) int tOff = usableBits % 8 ; int size = data . length / 2 + 1 ; // guarantee that our new element can store our given value if ( size < bitCount ) size = bitCount * 10 ; next = new EncodedElement ( size , tOff ) ; // add int to child return next . addLong ( input , bitCount ) ; } // At this point , we have the space , and we are the end of the chain . int startPos = this . usableBits ; byte [ ] dest = this . data ; EncodedElement . addLong ( input , bitCount , startPos , dest ) ; usableBits += bitCount ; return this ;
public class A_CmsPublishGroupHelper { /** * Returns the localized name for a given publish group based on its age . < p > * @ param resources the resources of the publish group * @ param age the age of the publish group * @ return the localized name of the publish group */ public String getPublishGroupName ( List < RESOURCE > resources , GroupAge age ) { } }
long groupDate = getDateLastModified ( resources . get ( 0 ) ) ; String groupName ; switch ( age ) { case young : groupName = Messages . get ( ) . getBundle ( m_locale ) . key ( Messages . GUI_GROUPNAME_SESSION_1 , new Date ( groupDate ) ) ; break ; case medium : groupName = Messages . get ( ) . getBundle ( m_locale ) . key ( Messages . GUI_GROUPNAME_DAY_1 , new Date ( groupDate ) ) ; break ; case old : default : groupName = Messages . get ( ) . getBundle ( m_locale ) . key ( Messages . GUI_GROUPNAME_EVERYTHING_ELSE_0 ) ; break ; } return groupName ;
public class ZipUtil { /** * See @ link { { @ link # iterate ( InputStream , ZipEntryCallback , Charset ) } . It is a * shorthand where no Charset is specified . * @ param is * input ZIP stream ( it will not be closed automatically ) . * @ param entryNames * names of entries to iterate * @ param action * action to be called for each entry . * @ see ZipEntryCallback * @ see # iterate ( File , String [ ] , ZipEntryCallback ) */ public static void iterate ( InputStream is , String [ ] entryNames , ZipEntryCallback action ) { } }
iterate ( is , entryNames , action , null ) ;
public class JournalingBlockBasedDataStore { /** * / * ( non - Javadoc ) * @ see net . timewalker . ffmq4 . storage . data . impl . AbstractBlockBasedDataStore # extendStoreFiles ( int , int ) */ @ Override protected void extendStoreFiles ( int oldBlockCount , int newBlockCount ) throws DataStoreException { } }
journal . extendStore ( blockSize , oldBlockCount , newBlockCount ) ;
public class GuestAliasManager { /** * Defines an alias for a guest account in a virtual machine . * After the alias is defined , the ESXi Server will use the alias to authenticate guest operations requests . * This will add the given VMWare SSO Server ' s certificate and a subject to the alias store of the specified * user in the guest . In order to add an alias to the guest , you must supply an existing valid credential . * This can be any instance of GuestAuthentication , but must be valid for the specified guest username . * @ param virtualMachine Virtual machine to perform the operation on . * @ param guestAuthentication The guest authentication data for this operation . See { @ link com . vmware . vim25 . GuestAuthentication GuestAuthentication } . These credentials must satisfy authentication requirements for a guest account on the specified virtual machine . * @ param userName Username for the guest account on the virtual machine . * @ param mapCert Indicates whether the certificate associated with the alias should be mapped . If an alias certificate is mapped , guest operation requests that use that alias do not have to specify the guest account username in the { @ link com . vmware . vim25 . SAMLTokenAuthentication } object . If mapCert is false , the request must specify the username . * @ param base64Cert X . 509 certificate from the VMware SSO Server , in base64 encoded DER format . The ESXi Server uses this certificate to authenticate guest operation requests . * @ param guestAuthAliasInfo Specifies the subject name for authentication . The subject name ( when present ) corresponds to the value of the Subject element in SAML tokens . The ESXi Server uses the subject name to authenticate guest operation requests . * @ throws GuestComponentsOutOfDate Thrown if the guest agent is too old to support the operation . * @ throws GuestMultipleMappings Thrown if the operation fails because mapCert is set and the certificate already exists in the mapping file for a different user . * @ throws GuestOperationsFault Thrown if there is an error processing a guest operation . * @ throws GuestOperationsUnavailable Thrown if the VM agent for guest operations is not running . * @ throws GuestPermissionDenied Thrown if there are insufficient permissions in the guest OS . * @ throws InvalidArgument Thrown if the operation fails because the certificate is invalid . * @ throws InvalidGuestLogin Thrown if the the guest authentication information was not accepted . * @ throws InvalidPowerState Thrown if the VM is not powered on . * @ throws InvalidState Thrown if the operation cannot be performed because of the virtual machine ' s current state . * @ throws OperationDisabledByGuest Thrown if the operation is not enabled due to guest agent configuration . * @ throws OperationNotSupportedByGuest Thrown if the operation is not supported by the guest OS . * @ throws RuntimeFault Thrown if any type of runtime fault is thrown that is not covered by the other faults ; for example , a communication error . * @ throws TaskInProgress Thrown if the virtual machine is busy . * @ throws RemoteException */ public void addGuestAlias ( VirtualMachine virtualMachine , GuestAuthentication guestAuthentication , String userName , boolean mapCert , String base64Cert , GuestAuthAliasInfo guestAuthAliasInfo ) throws GuestComponentsOutOfDate , GuestMultipleMappings , GuestOperationsFault , GuestOperationsUnavailable , GuestPermissionDenied , InvalidArgument , InvalidGuestLogin , InvalidPowerState , InvalidState , OperationDisabledByGuest , OperationNotSupportedByGuest , RuntimeFault , TaskInProgress , RemoteException { } }
getVimService ( ) . addGuestAlias ( getMOR ( ) , virtualMachine . getMOR ( ) , guestAuthentication , userName , mapCert , base64Cert , guestAuthAliasInfo ) ;
public class DatastoreEmulator { /** * Starts the emulator . It is the caller ' s responsibility to call { @ link # stop } . Note that * receiving an exception does not indicate that the server did not start . We recommend calling * { @ link # stop } to ensure the server is not running regardless of the result of this method . * @ param emulatorDir The path to the emulator directory , e . g . / usr / local / cloud - datastore - emulator * @ param projectId The project ID * @ param commandLineOptions Command line options to pass to the emulator on startup * @ throws DatastoreEmulatorException If { @ link # start } has already been called or the server does * not start successfully . * @ deprecated prefer setting options in the emulator options and calling { # start ( ) } . */ @ Deprecated public synchronized void start ( String emulatorDir , String projectId , String ... commandLineOptions ) throws DatastoreEmulatorException { } }
checkNotNull ( emulatorDir , "emulatorDir cannot be null" ) ; checkNotNull ( projectId , "projectId cannot be null" ) ; checkState ( state == State . NEW , "Cannot call start() more than once." ) ; try { startEmulatorInternal ( emulatorDir + "/cloud_datastore_emulator" , projectId , Arrays . asList ( commandLineOptions ) ) ; state = State . STARTED ; } finally { if ( state != State . STARTED ) { // If we ' re not able to start the server we don ' t want people trying again . Just move it // straight to the STOPPED state . state = State . STOPPED ; } }
public class Subframe_LPC { /** * Quantize coefficients to integer values of the given precision , and * calculate the shift needed . * @ param coefficients values to quantize . These values will not be changed . * @ param dest destination for quantized values . * @ param order number of values to quantize . First value skipped , coefficients * array must be at least order + 1 in length . * @ param precision number of signed bits to use for coefficients ( must be in range 2-15 , inclusive ) . * @ return */ private static int quantizeCoefficients ( double [ ] coefficients , int [ ] dest , int order , int precision ) { } }
assert ( precision >= 2 && precision <= 15 ) ; assert ( coefficients . length >= order + 1 ) ; assert ( dest . length >= order + 1 ) ; if ( precision < 2 || precision > 15 ) throw new IllegalArgumentException ( "Error! precision must be between 2 and 15, inclusive." ) ; int shiftApplied = 0 ; int maxValAllowed = ( 1 << ( precision - 2 ) ) - 1 ; // minus an extra bit for sign . int minValAllowed = - 1 * maxValAllowed - 1 ; double maxVal = 0 ; for ( int i = 1 ; i <= order ; i ++ ) { double temp = coefficients [ i ] ; if ( temp < 0 ) temp *= - 1 ; if ( temp > maxVal ) maxVal = temp ; } // find shift to use ( by max value ) for ( shiftApplied = 15 ; shiftApplied > 0 ; shiftApplied -- ) { int temp = ( int ) ( maxVal * ( 1 << shiftApplied ) ) ; if ( temp <= maxValAllowed ) break ; } if ( maxVal > maxValAllowed ) { // no shift should have been applied // ensure max value is not too large , cap all necessary / / for ( int i = 1 ; i <= order ; i ++ ) { double temp = coefficients [ i ] ; if ( temp < 0 ) temp = temp * - 1 ; if ( temp > maxValAllowed ) { if ( coefficients [ i ] < 0 ) dest [ i ] = minValAllowed ; else dest [ i ] = maxValAllowed ; } else dest [ i ] = ( int ) coefficients [ i ] ; } } else { // shift and quantize all values by found shift for ( int i = 1 ; i <= order ; i ++ ) { double temp = coefficients [ i ] * ( 1 << shiftApplied ) ; temp = ( temp > 0 ) ? temp + 0.5 : temp - 0.5 ; dest [ i ] = ( int ) temp ; } } return shiftApplied ;
public class Specification { /** * Specifies that no exception of the given type should be * thrown , failing with a { @ link UnallowedExceptionThrownError } otherwise . * @ param type the exception type that should not be thrown */ public void notThrown ( Class < ? extends Throwable > type ) { } }
Throwable thrown = getSpecificationContext ( ) . getThrownException ( ) ; if ( thrown == null ) return ; if ( type . isAssignableFrom ( thrown . getClass ( ) ) ) { throw new UnallowedExceptionThrownError ( type , thrown ) ; } ExceptionUtil . sneakyThrow ( thrown ) ;
public class CollectionPartitionsInner { /** * Retrieves the usages ( most recent storage data ) for the given collection , split by partition . * @ param resourceGroupName Name of an Azure resource group . * @ param accountName Cosmos DB database account name . * @ param databaseRid Cosmos DB database rid . * @ param collectionRid Cosmos DB collection rid . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the List & lt ; PartitionUsageInner & gt ; object if successful . */ public List < PartitionUsageInner > listUsages ( String resourceGroupName , String accountName , String databaseRid , String collectionRid ) { } }
return listUsagesWithServiceResponseAsync ( resourceGroupName , accountName , databaseRid , collectionRid ) . toBlocking ( ) . single ( ) . body ( ) ;
public class DbCloser { /** * Handle closing a connection . Calls * { @ link # close ( Connection , Logger , Object ) } with null for name . * @ param conn The conneciton to close . * @ param logExceptionTo The log to log any { @ link SQLException } * to . If this is null , the logger for the DbCloser class * will be used . * @ return null as a convinence to null out the closed object . */ public static Connection close ( Connection conn , Logger logExceptionTo ) { } }
return close ( conn , logExceptionTo , null ) ;
public class ModulesEx { /** * Create a single module that derived from all bootstrap annotations * on a class , where that class itself is a module . * For example , * < pre > * { @ code * public class MainApplicationModule extends AbstractModule { * @ Override * public void configure ( ) { * / / Application specific bindings here * public static void main ( String [ ] args ) { * Guice . createInjector ( ModulesEx . fromClass ( MainApplicationModule . class ) ) ; * < / pre > * @ author elandau */ public static Module fromClass ( final Class < ? > cls , final boolean override ) { } }
List < Module > modules = new ArrayList < > ( ) ; // Iterate through all annotations of the main class , create a binding for the annotation // and add the module to the list of modules to install for ( final Annotation annot : cls . getDeclaredAnnotations ( ) ) { final Class < ? extends Annotation > type = annot . annotationType ( ) ; Bootstrap bootstrap = type . getAnnotation ( Bootstrap . class ) ; if ( bootstrap != null ) { LOG . info ( "Adding Module {}" , bootstrap . module ( ) ) ; try { modules . add ( bootstrap . module ( ) . getConstructor ( type ) . newInstance ( annot ) ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } } } try { if ( override ) { return Modules . override ( combineAndOverride ( modules ) ) . with ( ( Module ) cls . newInstance ( ) ) ; } else { return Modules . combine ( Modules . combine ( modules ) , ( Module ) cls . newInstance ( ) ) ; } } catch ( Exception e ) { throw new RuntimeException ( e ) ; }
public class RepositoryReaderImpl { /** * returns log records from the binary repository that are within the date range and which satisfy condition of the filter as specified by the parameters . * @ param beginTime the minimum { @ link Date } value that the returned records can have * @ param endTime the maximum { @ link Date } value that the returned records can have * @ param filter an instance implementing { @ link LogRecordHeaderFilter } interface to verify one record at a time . * @ return the iterable instance of a list of log records within a process that are within the parameter range and satisfy the condition . * If no records meet the criteria , an Iterable is returned with no entries */ public Iterable < ServerInstanceLogRecordList > getLogLists ( Date beginTime , Date endTime , final LogRecordHeaderFilter filter ) { } }
final long min = beginTime == null ? - 1 : beginTime . getTime ( ) ; final long max = endTime == null ? - 1 : endTime . getTime ( ) ; LogRepositoryBrowser logs ; if ( beginTime == null ) { logs = logInstanceBrowser . findNext ( ( LogRepositoryBrowser ) null , max ) ; } else { logs = logInstanceBrowser . findByMillis ( min ) ; // Get first instance if min is earlier than the first record if ( logs == null ) { logs = logInstanceBrowser . findNext ( ( LogRepositoryBrowser ) null , max ) ; } } LogRepositoryBrowser traces ; if ( traceInstanceBrowser != null ) { if ( beginTime == null ) { traces = traceInstanceBrowser . findNext ( ( LogRepositoryBrowser ) null , max ) ; } else { traces = traceInstanceBrowser . findByMillis ( min ) ; // Get first instance if min is earlier than the first record if ( traces == null ) { traces = traceInstanceBrowser . findNext ( ( LogRepositoryBrowser ) null , max ) ; } } } else { traces = null ; } final LogRepositoryBrowser finalLogs = logs ; final LogRepositoryBrowser finalTraces = traces ; return new Iterable < ServerInstanceLogRecordList > ( ) { @ Override public Iterator < ServerInstanceLogRecordList > iterator ( ) { return new ServerInstanceListsIterator ( max , finalLogs , finalTraces ) { @ Override protected OnePidRecordListImpl queryResult ( LogRepositoryBrowser browser ) { return new LogRecordBrowser ( browser ) . recordsInProcess ( min , max , filter ) ; } } ; } } ;
public class FTPConnection { /** * Gets the help message from a command * @ param label The command name * @ return The help message or { @ code null } if the command was not found */ public String getHelpMessage ( String label ) { } }
CommandInfo info = commands . get ( label ) ; return info != null ? info . help : null ;
public class SnorocketOWLReasoner { /** * Determines if the specified set of axioms is entailed by the reasoner * axioms . * @ param axioms * The set of axioms to be tested * @ return < code > true < / code > if the set of axioms is entailed by the axioms * in the imports closure of the root ontology , otherwise * < code > false < / code > . If the set of reasoner axioms is inconsistent * then < code > true < / code > . * @ throws FreshEntitiesException * if the signature of the set of axioms is not contained within * the signature of the imports closure of the root ontology and * the undeclared entity policy of this reasoner is set to * { @ link FreshEntityPolicy # DISALLOW } . * @ throws ReasonerInterruptedException * if the reasoning process was interrupted for any particular * reason ( for example if reasoning was cancelled by a client * process ) * @ throws TimeOutException * if the reasoner timed out during a basic reasoning operation . * See { @ link # getTimeOut ( ) } . * @ throws UnsupportedEntailmentTypeException * if the reasoner cannot perform a check to see if the * specified axiom is entailed * @ throws AxiomNotInProfileException * if < code > axiom < / code > is not in the profile that is supported * by this reasoner . * @ throws InconsistentOntologyException * if the set of reasoner axioms is inconsistent * @ see # isEntailmentCheckingSupported ( org . semanticweb . owlapi . model . AxiomType ) */ @ Override public boolean isEntailed ( Set < ? extends OWLAxiom > axioms ) throws ReasonerInterruptedException , UnsupportedEntailmentTypeException , TimeOutException , AxiomNotInProfileException , FreshEntitiesException , InconsistentOntologyException { } }
throw new UnsupportedEntailmentTypeException ( axioms . iterator ( ) . next ( ) ) ;
public class CompressionCodecFactory { /** * Removes a suffix from a filename , if it has it . * @ param filename the filename to strip * @ param suffix the suffix to remove * @ return the shortened filename */ public static String removeSuffix ( String filename , String suffix ) { } }
if ( filename . endsWith ( suffix ) ) { return filename . substring ( 0 , filename . length ( ) - suffix . length ( ) ) ; } return filename ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcClassificationItemRelationship ( ) { } }
if ( ifcClassificationItemRelationshipEClass == null ) { ifcClassificationItemRelationshipEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 81 ) ; } return ifcClassificationItemRelationshipEClass ;
public class ListTrafficPolicyInstancesResult { /** * A list that contains one < code > TrafficPolicyInstance < / code > element for each traffic policy instance that matches * the elements in the request . * @ param trafficPolicyInstances * A list that contains one < code > TrafficPolicyInstance < / code > element for each traffic policy instance that * matches the elements in the request . */ public void setTrafficPolicyInstances ( java . util . Collection < TrafficPolicyInstance > trafficPolicyInstances ) { } }
if ( trafficPolicyInstances == null ) { this . trafficPolicyInstances = null ; return ; } this . trafficPolicyInstances = new com . amazonaws . internal . SdkInternalList < TrafficPolicyInstance > ( trafficPolicyInstances ) ;
public class CommerceSubscriptionEntryPersistenceImpl { /** * Removes all the commerce subscription entries where subscriptionStatus = & # 63 ; from the database . * @ param subscriptionStatus the subscription status */ @ Override public void removeBySubscriptionStatus ( int subscriptionStatus ) { } }
for ( CommerceSubscriptionEntry commerceSubscriptionEntry : findBySubscriptionStatus ( subscriptionStatus , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( commerceSubscriptionEntry ) ; }
public class Choice7 { /** * { @ inheritDoc } */ @ Override public < H > Choice7 < A , B , C , D , E , F , H > zip ( Applicative < Function < ? super G , ? extends H > , Choice7 < A , B , C , D , E , F , ? > > appFn ) { } }
return Monad . super . zip ( appFn ) . coerce ( ) ;
public class VToDoUserAgent { /** * < pre > * 3.4.8 . DECLINECOUNTER * The " DECLINECOUNTER " method in a " VTODO " calendar component is used * by an " Organizer " of the " VTODO " calendar component to reject a * counter proposal offered by one of the " Attendees " . The " Organizer " * sends the message to the " Attendee " that sent the " COUNTER " method to * the " Organizer " . * < / pre > */ @ Override public Calendar declineCounter ( Calendar counter ) { } }
Calendar declineCounter = transform ( Method . DECLINE_COUNTER , counter ) ; declineCounter . validate ( ) ; return declineCounter ;
public class LinkedTransferQueue { /** * Tries to append node s as tail . * @ param s the node to append * @ param haveData true if appending in data mode * @ return null on failure due to losing race with append in * different mode , else s ' s predecessor , or s itself if no * predecessor */ private Node tryAppend ( Node s , boolean haveData ) { } }
for ( Node t = tail , p = t ; ; ) { // move p to last node and append Node n , u ; // temps for reads of next & tail if ( p == null && ( p = head ) == null ) { if ( casHead ( null , s ) ) return s ; // initialize } else if ( p . cannotPrecede ( haveData ) ) return null ; // lost race vs opposite mode else if ( ( n = p . next ) != null ) // not last ; keep traversing p = p != t && t != ( u = tail ) ? ( t = u ) : // stale tail ( UNLINKED != n ) ? n : null ; // restart if off list else if ( ! p . casNext ( null , s ) ) p = p . next ; // re - read on CAS failure else { if ( p != t ) { // update if slack now > = 2 while ( ( tail != t || ! casTail ( t , s ) ) && ( t = tail ) != null && ( s = t . next ) != null && // advance and retry ( s = s . next ) != null && s != UNLINKED ) ; } return p ; } }
public class TagletWriterImpl { /** * { @ inheritDoc } */ public Content getThrowsHeader ( ) { } }
HtmlTree result = HtmlTree . DT ( HtmlTree . SPAN ( HtmlStyle . throwsLabel , new StringContent ( configuration . getText ( "doclet.Throws" ) ) ) ) ; return result ;
public class PDTFactory { /** * Get the passed date time but with micro and nanoseconds set to 0 , so that * only the milliseconds part is present . This is helpful for XSD * serialization , where only milliseconds granularity is available . * @ param aODT * Source date time . May be < code > null < / code > . * @ return < code > null < / code > if the parameter is < code > null < / code > , the local * date time with microseconds and nanoseconds set to 0 otherwise . * @ since 9.2.0 */ @ Nullable public static ZonedDateTime getWithMillisOnly ( @ Nullable final ZonedDateTime aODT ) { } }
return aODT == null ? null : aODT . withNano ( aODT . get ( ChronoField . MILLI_OF_SECOND ) * ( int ) CGlobal . NANOSECONDS_PER_MILLISECOND ) ;
public class Line { /** * FIXME . . . hack */ public String stripID ( ) { } }
if ( m_bIsEmpty || m_sValue . charAt ( m_sValue . length ( ) - m_nTrailing - 1 ) != '}' ) return null ; int nPos = m_nLeading ; boolean bFound = false ; while ( nPos < m_sValue . length ( ) && ! bFound ) { switch ( m_sValue . charAt ( nPos ) ) { case '\\' : if ( nPos + 1 < m_sValue . length ( ) ) { if ( m_sValue . charAt ( nPos + 1 ) == '{' ) nPos ++ ; } nPos ++ ; break ; case '{' : bFound = true ; break ; default : nPos ++ ; break ; } } if ( bFound ) { if ( nPos + 1 < m_sValue . length ( ) && m_sValue . charAt ( nPos + 1 ) == '#' ) { final int nStart = nPos + 2 ; nPos = nStart ; bFound = false ; while ( nPos < m_sValue . length ( ) && ! bFound ) { switch ( m_sValue . charAt ( nPos ) ) { case '\\' : if ( nPos + 1 < m_sValue . length ( ) ) { if ( m_sValue . charAt ( nPos + 1 ) == '}' ) nPos ++ ; } nPos ++ ; break ; case '}' : bFound = true ; break ; default : nPos ++ ; break ; } } if ( bFound ) { final String sID = m_sValue . substring ( nStart , nPos ) . trim ( ) ; if ( m_nLeading != 0 ) { m_sValue = m_sValue . substring ( 0 , m_nLeading ) + m_sValue . substring ( m_nLeading , nStart - 2 ) . trim ( ) ; } else { m_sValue = m_sValue . substring ( m_nLeading , nStart - 2 ) . trim ( ) ; } m_nTrailing = 0 ; return sID . length ( ) > 0 ? sID : null ; } } } return null ;
public class BatchUpdatePhoneNumberRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( BatchUpdatePhoneNumberRequest batchUpdatePhoneNumberRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( batchUpdatePhoneNumberRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( batchUpdatePhoneNumberRequest . getUpdatePhoneNumberRequestItems ( ) , UPDATEPHONENUMBERREQUESTITEMS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class RowProcessingPublisher { /** * Runs the whole row processing logic , start to finish , including * initialization , process rows , result collection and cleanup / closing * resources . * @ param resultQueue * a queue on which to append results * @ param finishedTaskListener * a task listener which will be invoked once the processing is * done . * @ see # processRows ( RowProcessingMetrics ) * @ see # initializeConsumers ( TaskListener ) */ public void runRowProcessing ( Queue < JobAndResult > resultQueue , TaskListener finishedTaskListener ) { } }
final LifeCycleHelper lifeCycleHelper = _publishers . getLifeCycleHelper ( ) ; final TaskRunner taskRunner = _publishers . getTaskRunner ( ) ; final List < RowProcessingConsumer > configurableConsumers = getConfigurableConsumers ( ) ; final int numConsumerTasks = configurableConsumers . size ( ) ; // add tasks for closing components final TaskListener closeTaskListener = new JoinTaskListener ( numConsumerTasks , finishedTaskListener ) ; final List < TaskRunnable > closeTasks = new ArrayList < TaskRunnable > ( numConsumerTasks ) ; for ( RowProcessingConsumer consumer : configurableConsumers ) { closeTasks . add ( createCloseTask ( consumer , closeTaskListener ) ) ; } final TaskListener getResultCompletionListener = new ForkTaskListener ( "collect results" , taskRunner , closeTasks ) ; // add tasks for collecting results final TaskListener getResultTaskListener = new JoinTaskListener ( numConsumerTasks , getResultCompletionListener ) ; final List < TaskRunnable > getResultTasks = new ArrayList < TaskRunnable > ( ) ; for ( RowProcessingConsumer consumer : configurableConsumers ) { final Task collectResultTask = createCollectResultTask ( consumer , resultQueue ) ; if ( collectResultTask == null ) { getResultTasks . add ( new TaskRunnable ( null , getResultTaskListener ) ) ; } else { getResultTasks . add ( new TaskRunnable ( collectResultTask , getResultTaskListener ) ) ; } } final TaskListener runCompletionListener = new ForkTaskListener ( "run row processing" , taskRunner , getResultTasks ) ; final RowProcessingMetrics rowProcessingMetrics = getRowProcessingMetrics ( ) ; final RunRowProcessingPublisherTask runTask = new RunRowProcessingPublisherTask ( this , rowProcessingMetrics ) ; final TaskListener referenceDataInitFinishedListener = new ForkTaskListener ( "Initialize row consumers" , taskRunner , Arrays . asList ( new TaskRunnable ( runTask , runCompletionListener ) ) ) ; final RunNextTaskTaskListener initializeFinishedListener = new RunNextTaskTaskListener ( taskRunner , new InitializeReferenceDataTask ( lifeCycleHelper ) , referenceDataInitFinishedListener ) ; // kick off the initialization initializeConsumers ( initializeFinishedListener ) ;
public class DSClient { /** * ( non - Javadoc ) * @ see com . impetus . client . cassandra . CassandraClientBase # searchInInvertedIndex ( java . lang . String , * com . impetus . kundera . metadata . model . EntityMetadata , java . util . Map ) */ @ Override public List < SearchResult > searchInInvertedIndex ( String columnFamilyName , EntityMetadata m , Map < Boolean , List < IndexClause > > indexClauseMap ) { } }
throw new UnsupportedOperationException ( "Support available only for thrift/pelops." ) ;