signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class CoNLLSentence { /** * 找出所有子节点 * @ param word * @ return */ public List < CoNLLWord > findChildren ( CoNLLWord word ) { } }
List < CoNLLWord > result = new LinkedList < CoNLLWord > ( ) ; for ( CoNLLWord other : this ) { if ( other . HEAD == word ) result . add ( other ) ; } return result ;
public class Logger { /** * Log an info message . */ public void info ( String format , Object ... extra ) { } }
if ( shouldLog ( INFO ) ) { Log . i ( tag , String . format ( format , extra ) ) ; }
public class LambdaDslObject { /** * Attribute that must match the given timestamp format * @ param name attribute name * @ param format timestamp format */ public LambdaDslObject timestamp ( String name , String format ) { } }
object . timestamp ( name , format ) ; return this ;
public class HELM1Utils { /** * method to transform the fourth section into HELM1 - Format * @ param annotations List of AnnotationNotation * @ return the fourth section of an Standard HELM */ private static String setStandardHELMFourthSection ( List < AnnotationNotation > annotations ) { } }
StringBuilder sb = new StringBuilder ( ) ; for ( AnnotationNotation annotation : annotations ) { sb . append ( annotation . toHELM2 ( ) + "|" ) ; } if ( sb . length ( ) > 1 ) { sb . setLength ( sb . length ( ) - 1 ) ; } return sb . toString ( ) ;
public class Interval { /** * Find the gap between two intervals . * @ param that interval * @ return the interval between this and that or { @ link Interval # NULL } if * none exists * @ see # ahead ( ) * @ see # behind ( ) * @ see # intersect ( org . nmdp . ngs . fca . Interval ) * @ see < a href = " https : / / en . wikipedia . org / wiki / Commutative _ property " > * commutative property < / a > */ public Interval < C > gap ( final Interval < C > that ) { } }
if ( this . before ( that ) ) { return this . ahead ( ) . intersect ( that . behind ( ) ) ; } if ( this . after ( that ) ) { return this . behind ( ) . intersect ( that . ahead ( ) ) ; } return NULL ;
public class JFIFHeaderReader { /** * ( non - Javadoc ) * @ see * com . alibaba . simpleimage . jpeg . ExtendImageHeaderReader # readProperties ( com . alibaba . simpleimage . jpeg . ImageInputStream * , com . alibaba . simpleimage . jpeg . ExtendImageHeader ) */ public void readProperties ( ImageInputStream in , int len , ExtendImageHeader imageHeader ) throws IOException { } }
int numToRead = 0 ; // get the interesting part of the marker data if ( len >= 14 ) { numToRead = 14 ; } else if ( len > 0 ) { numToRead = len ; } else { numToRead = 0 ; } byte [ ] datas = new byte [ numToRead ] ; in . read ( datas ) ; len -= numToRead ; if ( numToRead >= 14 && datas [ 0 ] == 0x4A && datas [ 1 ] == 0x46 && datas [ 2 ] == 0x49 && datas [ 3 ] == 0x46 && datas [ 4 ] == 0 ) { // Found JFIF APP0 marker : save info imageHeader . setSawJFIFMarker ( true ) ; imageHeader . setJFIFMajorVersion ( datas [ 5 ] ) ; imageHeader . setJFIFMinorVersion ( datas [ 6 ] ) ; imageHeader . setDensityUnit ( datas [ 7 ] ) ; imageHeader . setXDensity ( datas [ 8 ] << 8 ) ; imageHeader . setYDensity ( datas [ 10 ] << 8 ) ; // need check metadata info or not ? } else if ( numToRead >= 6 && datas [ 0 ] == 0x4A && datas [ 1 ] == 0x46 && datas [ 2 ] == 0x58 && datas [ 3 ] == 0x58 && datas [ 4 ] == 0 ) { // Found JFIF " JFXX " extension APP0 marker imageHeader . setSawJFXXMarker ( true ) ; } // skip any remaining data - - could be lots if ( len > 0 ) { in . skipBytes ( len ) ; }
public class Validate { /** * < p > Validate that the stateful condition is { @ code true } ; otherwise * throwing an exception with the specified message . This method is useful when * validating according to an arbitrary boolean expression , such as validating a * primitive number or using your own custom validation expression . < / p > * < pre > Validate . validState ( this . isOk ( ) , " The state is not OK : % s " , myObject ) ; < / pre > * @ param expression the boolean expression to check * @ param message the { @ link String # format ( String , Object . . . ) } exception message if invalid , not null * @ param values the optional values for the formatted exception message , null array not recommended * @ throws IllegalStateException if expression is { @ code false } * @ see # validState ( boolean ) * @ since 3.0 */ public static void validState ( final boolean expression , final String message , final Object ... values ) { } }
if ( ! expression ) { throw new IllegalStateException ( StringUtils . simpleFormat ( message , values ) ) ; }
public class CoalesceVariableNames { /** * Because the code has already been normalized by the time this pass runs , we can safely * redeclare any let and const coalesced variables as vars */ private static void makeDeclarationVar ( Var coalescedName ) { } }
if ( coalescedName . isLet ( ) || coalescedName . isConst ( ) ) { Node declNode = NodeUtil . getEnclosingNode ( coalescedName . getParentNode ( ) , NodeUtil :: isNameDeclaration ) ; declNode . setToken ( Token . VAR ) ; }
public class CircuitGroupUnblockingAckMessageImpl { /** * ( non - Javadoc ) * @ see org . restcomm . protocols . ss7 . isup . ISUPMessageImpl # decodeMandatoryParameters ( byte [ ] , int ) */ protected int decodeMandatoryParameters ( ISUPParameterFactory parameterFactory , byte [ ] b , int index ) throws ParameterException { } }
int localIndex = index ; index += super . decodeMandatoryParameters ( parameterFactory , b , index ) ; if ( b . length - index > 1 ) { CircuitGroupSuperVisionMessageType cgsvmt = parameterFactory . createCircuitGroupSuperVisionMessageType ( ) ; ( ( AbstractISUPParameter ) cgsvmt ) . decode ( new byte [ ] { b [ index ] } ) ; this . setSupervisionType ( cgsvmt ) ; index ++ ; return index - localIndex ; } else { throw new IllegalArgumentException ( "byte[] must have atleast four octets" ) ; }
public class CoreOptions { /** * Creates a composite option of { @ link ProvisionOption } s . This is handy when bundles are built * on the fly via TinyBundles . * @ param streams * provision sources * @ return composite option of provision options * @ throws IllegalArgumentException * - If a problem occured while flushing streams */ public static Option provision ( final InputStream ... streams ) { } }
validateNotNull ( streams , "streams" ) ; final UrlProvisionOption [ ] options = new UrlProvisionOption [ streams . length ] ; int i = 0 ; for ( InputStream stream : streams ) { options [ i ++ ] = streamBundle ( stream ) ; } return provision ( options ) ;
public class AmazonRedshiftClient { /** * Creates a new cluster from a snapshot . By default , Amazon Redshift creates the resulting cluster with the same * configuration as the original cluster from which the snapshot was created , except that the new cluster is created * with the default cluster security and parameter groups . After Amazon Redshift creates the cluster , you can use * the < a > ModifyCluster < / a > API to associate a different security group and different parameter group with the * restored cluster . If you are using a DS node type , you can also choose to change to another DS node type of the * same size during restore . * If you restore a cluster into a VPC , you must provide a cluster subnet group where you want the cluster restored . * For more information about working with snapshots , go to < a * href = " https : / / docs . aws . amazon . com / redshift / latest / mgmt / working - with - snapshots . html " > Amazon Redshift Snapshots < / a > * in the < i > Amazon Redshift Cluster Management Guide < / i > . * @ param restoreFromClusterSnapshotRequest * @ return Result of the RestoreFromClusterSnapshot operation returned by the service . * @ throws AccessToSnapshotDeniedException * The owner of the specified snapshot has not authorized your account to access the snapshot . * @ throws ClusterAlreadyExistsException * The account already has a cluster with the given identifier . * @ throws ClusterSnapshotNotFoundException * The snapshot identifier does not refer to an existing cluster snapshot . * @ throws ClusterQuotaExceededException * The request would exceed the allowed number of cluster instances for this account . For information about * increasing your quota , go to < a * href = " https : / / docs . aws . amazon . com / redshift / latest / mgmt / amazon - redshift - limits . html " > Limits in Amazon * Redshift < / a > in the < i > Amazon Redshift Cluster Management Guide < / i > . * @ throws InsufficientClusterCapacityException * The number of nodes specified exceeds the allotted capacity of the cluster . * @ throws InvalidClusterSnapshotStateException * The specified cluster snapshot is not in the < code > available < / code > state , or other accounts are * authorized to access the snapshot . * @ throws InvalidRestoreException * The restore is invalid . * @ throws NumberOfNodesQuotaExceededException * The operation would exceed the number of nodes allotted to the account . For information about increasing * your quota , go to < a * href = " https : / / docs . aws . amazon . com / redshift / latest / mgmt / amazon - redshift - limits . html " > Limits in Amazon * Redshift < / a > in the < i > Amazon Redshift Cluster Management Guide < / i > . * @ throws NumberOfNodesPerClusterLimitExceededException * The operation would exceed the number of nodes allowed for a cluster . * @ throws InvalidVPCNetworkStateException * The cluster subnet group does not cover all Availability Zones . * @ throws InvalidClusterSubnetGroupStateException * The cluster subnet group cannot be deleted because it is in use . * @ throws InvalidSubnetException * The requested subnet is not valid , or not all of the subnets are in the same VPC . * @ throws ClusterSubnetGroupNotFoundException * The cluster subnet group name does not refer to an existing cluster subnet group . * @ throws UnauthorizedOperationException * Your account is not authorized to perform the requested operation . * @ throws HsmClientCertificateNotFoundException * There is no Amazon Redshift HSM client certificate with the specified identifier . * @ throws HsmConfigurationNotFoundException * There is no Amazon Redshift HSM configuration with the specified identifier . * @ throws InvalidElasticIpException * The Elastic IP ( EIP ) is invalid or cannot be found . * @ throws ClusterParameterGroupNotFoundException * The parameter group name does not refer to an existing parameter group . * @ throws ClusterSecurityGroupNotFoundException * The cluster security group name does not refer to an existing cluster security group . * @ throws LimitExceededException * The encryption key has exceeded its grant limit in AWS KMS . * @ throws DependentServiceRequestThrottlingException * The request cannot be completed because a dependent service is throttling requests made by Amazon * Redshift on your behalf . Wait and retry the request . * @ throws InvalidClusterTrackException * The provided cluster track name is not valid . * @ throws SnapshotScheduleNotFoundException * We could not find the specified snapshot schedule . * @ sample AmazonRedshift . RestoreFromClusterSnapshot * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / redshift - 2012-12-01 / RestoreFromClusterSnapshot " * target = " _ top " > AWS API Documentation < / a > */ @ Override public Cluster restoreFromClusterSnapshot ( RestoreFromClusterSnapshotRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeRestoreFromClusterSnapshot ( request ) ;
public class GeocodeResultBuilder { /** * Create a GeocodingResult from a GeoServiceGeocodingResult . * @ param gsResult the GeoServiceGeocodingResult * @ return the GeocodingResult */ public GeocodingResult toGeocodingResult ( final GeoServiceGeocodingResult gsResult ) { } }
if ( gsResult == null ) { return null ; } final GeocodingResult result = new GeocodingResult ( ) ; final AddressComponent [ ] addressComponents = gsResult . getAddressComponents ( ) ; if ( addressComponents == null ) { result . addressComponents = null ; } else { result . addressComponents = new AddressComponent [ addressComponents . length ] ; for ( int i = 0 ; i < addressComponents . length ; i ++ ) { result . addressComponents [ i ] = copy ( addressComponents [ i ] ) ; } } result . formattedAddress = gsResult . getFormattedAddress ( ) ; result . geometry = toGeometry ( gsResult . getGeometry ( ) ) ; result . partialMatch = gsResult . isPartialMatch ( ) ; result . placeId = gsResult . getPlaceId ( ) ; // This is safe because the gs object returns a copy of its array . result . postcodeLocalities = gsResult . getPostcodeLocalities ( ) ; // This is safe because the gs object returns a copy of its array . result . types = gsResult . getTypes ( ) ; return result ;
public class DateTime { /** * Parses an RFC3339 date / time value . * < p > Upgrade warning : in prior version 1.17 , this method required milliseconds to be exactly 3 * digits ( if included ) , and did not throw an exception for all types of invalid input values , but * starting in version 1.18 , the parsing done by this method has become more strict to enforce * that only valid RFC3339 strings are entered , and if not , it throws a { @ link * NumberFormatException } . Also , in accordance with the RFC3339 standard , any number of * milliseconds digits is now allowed . * < p > For the date - only case , the time zone is ignored and the hourOfDay , minute , second , and * millisecond parameters are set to zero . * @ param str Date / time string in RFC3339 format * @ throws NumberFormatException if { @ code str } doesn ' t match the RFC3339 standard format ; an * exception is thrown if { @ code str } doesn ' t match { @ code RFC3339 _ REGEX } or if it contains a * time zone shift but no time . */ public static DateTime parseRfc3339 ( String str ) throws NumberFormatException { } }
Matcher matcher = RFC3339_PATTERN . matcher ( str ) ; if ( ! matcher . matches ( ) ) { throw new NumberFormatException ( "Invalid date/time format: " + str ) ; } int year = Integer . parseInt ( matcher . group ( 1 ) ) ; // yyyy int month = Integer . parseInt ( matcher . group ( 2 ) ) - 1 ; // MM int day = Integer . parseInt ( matcher . group ( 3 ) ) ; // dd boolean isTimeGiven = matcher . group ( 4 ) != null ; // ' T ' HH : mm : ss . milliseconds String tzShiftRegexGroup = matcher . group ( 9 ) ; // ' Z ' , or time zone shift HH : mm following ' + ' / ' - ' boolean isTzShiftGiven = tzShiftRegexGroup != null ; int hourOfDay = 0 ; int minute = 0 ; int second = 0 ; int milliseconds = 0 ; Integer tzShiftInteger = null ; if ( isTzShiftGiven && ! isTimeGiven ) { throw new NumberFormatException ( "Invalid date/time format, cannot specify time zone shift" + " without specifying time: " + str ) ; } if ( isTimeGiven ) { hourOfDay = Integer . parseInt ( matcher . group ( 5 ) ) ; // HH minute = Integer . parseInt ( matcher . group ( 6 ) ) ; // mm second = Integer . parseInt ( matcher . group ( 7 ) ) ; // ss if ( matcher . group ( 8 ) != null ) { // contains . milliseconds ? milliseconds = Integer . parseInt ( matcher . group ( 8 ) . substring ( 1 ) ) ; // milliseconds // The number of digits after the dot may not be 3 . Need to renormalize . int fractionDigits = matcher . group ( 8 ) . substring ( 1 ) . length ( ) - 3 ; milliseconds = ( int ) ( ( float ) milliseconds / Math . pow ( 10 , fractionDigits ) ) ; } } Calendar dateTime = new GregorianCalendar ( GMT ) ; dateTime . set ( year , month , day , hourOfDay , minute , second ) ; dateTime . set ( Calendar . MILLISECOND , milliseconds ) ; long value = dateTime . getTimeInMillis ( ) ; if ( isTimeGiven && isTzShiftGiven ) { int tzShift ; if ( Character . toUpperCase ( tzShiftRegexGroup . charAt ( 0 ) ) == 'Z' ) { tzShift = 0 ; } else { tzShift = Integer . parseInt ( matcher . group ( 11 ) ) * 60 // time zone shift HH + Integer . parseInt ( matcher . group ( 12 ) ) ; // time zone shift mm if ( matcher . group ( 10 ) . charAt ( 0 ) == '-' ) { // time zone shift + or - tzShift = - tzShift ; } value -= tzShift * 60000L ; // e . g . if 1 hour ahead of UTC , subtract an hour to get UTC time } tzShiftInteger = tzShift ; } return new DateTime ( ! isTimeGiven , value , tzShiftInteger ) ;
public class AccountsInner { /** * Deletes the specified firewall rule from the specified Data Lake Store account . * @ param resourceGroupName The name of the Azure resource group that contains the Data Lake Store account . * @ param accountName The name of the Data Lake Store account from which to delete the firewall rule . * @ param firewallRuleName The name of the firewall rule to delete . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceResponse } object if successful . */ public Observable < Void > deleteFirewallRuleAsync ( String resourceGroupName , String accountName , String firewallRuleName ) { } }
return deleteFirewallRuleWithServiceResponseAsync ( resourceGroupName , accountName , firewallRuleName ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ;
public class WikisApi { /** * Get a single page of project wiki . * < pre > < code > GitLab Endpoint : GET / projects / : id / wikis / : slug < / code > < / pre > * @ param projectIdOrPath the project in the form of an Integer ( ID ) , String ( path ) , or Project instance * @ param slug the slug of the project ' s wiki page * @ return the specified project Snippet * @ throws GitLabApiException if any exception occurs */ public WikiPage getPage ( Object projectIdOrPath , String slug ) throws GitLabApiException { } }
Response response = get ( Response . Status . OK , null , "projects" , getProjectIdOrPath ( projectIdOrPath ) , "wikis" , slug ) ; return ( response . readEntity ( WikiPage . class ) ) ;
public class ApiOvhIp { /** * IDs of rules configured for this IP * REST : GET / ip / { ip } / game / { ipOnGame } / rule * @ param ip [ required ] * @ param ipOnGame [ required ] */ public ArrayList < Long > ip_game_ipOnGame_rule_GET ( String ip , String ipOnGame ) throws IOException { } }
String qPath = "/ip/{ip}/game/{ipOnGame}/rule" ; StringBuilder sb = path ( qPath , ip , ipOnGame ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t1 ) ;
public class MockFactory { /** * Creates and fills a List object . * If defined as interface , creates an { @ link java . util . ArrayList } . * @ param type Type of List . * @ return List object */ @ SuppressWarnings ( "unchecked" ) // types must agree private static List createListObject ( ParameterizedType type ) { } }
Class rawType = ( Class ) type . getRawType ( ) ; Type [ ] genericTypes = type . getActualTypeArguments ( ) ; List listObject ; if ( rawType . isInterface ( ) ) { listObject = new ArrayList ( MOCK_LIST_COUNT ) ; } else { listObject = ( List ) instantiateObject ( rawType ) ; } for ( int i = 0 ; i < MOCK_LIST_COUNT ; i ++ ) { listObject . add ( generateValue ( ( Class ) genericTypes [ 0 ] ) ) ; } return listObject ;
public class UriEditPanel { /** * This method is called from within the constructor to initialize the form . * WARNING : Do NOT modify this code . The content of this method is always * regenerated by the Form Editor . */ @ SuppressWarnings ( "unchecked" ) // < editor - fold defaultstate = " collapsed " desc = " Generated Code " > / / GEN - BEGIN : initComponents private void initComponents ( ) { } }
java . awt . GridBagConstraints gridBagConstraints ; labelBrowseCurrentLink = new javax . swing . JLabel ( ) ; textFieldURI = new javax . swing . JTextField ( ) ; labelValidator = new javax . swing . JLabel ( ) ; butonReset = new javax . swing . JButton ( ) ; setBorder ( javax . swing . BorderFactory . createEmptyBorder ( 10 , 10 , 10 , 10 ) ) ; setLayout ( new java . awt . GridBagLayout ( ) ) ; labelBrowseCurrentLink . setIcon ( new javax . swing . ImageIcon ( getClass ( ) . getResource ( "/icons/url_link.png" ) ) ) ; // NOI18N java . util . ResourceBundle bundle = java . util . ResourceBundle . getBundle ( "com/igormaznitsa/nbmindmap/i18n/Bundle" ) ; // NOI18N labelBrowseCurrentLink . setToolTipText ( bundle . getString ( "UriEditPanel.labelBrowseCurrentLink.toolTipText_1" ) ) ; // NOI18N labelBrowseCurrentLink . setCursor ( new java . awt . Cursor ( java . awt . Cursor . HAND_CURSOR ) ) ; labelBrowseCurrentLink . setVerticalTextPosition ( javax . swing . SwingConstants . BOTTOM ) ; labelBrowseCurrentLink . addMouseListener ( new java . awt . event . MouseAdapter ( ) { public void mouseClicked ( java . awt . event . MouseEvent evt ) { labelBrowseCurrentLinkMouseClicked ( evt ) ; } } ) ; gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . fill = java . awt . GridBagConstraints . BOTH ; gridBagConstraints . ipadx = 10 ; add ( labelBrowseCurrentLink , gridBagConstraints ) ; gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . fill = java . awt . GridBagConstraints . BOTH ; gridBagConstraints . weightx = 1000.0 ; add ( textFieldURI , gridBagConstraints ) ; labelValidator . setHorizontalAlignment ( javax . swing . SwingConstants . CENTER ) ; labelValidator . setIcon ( new javax . swing . ImageIcon ( getClass ( ) . getResource ( "/icons/question16.png" ) ) ) ; // NOI18N gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . fill = java . awt . GridBagConstraints . BOTH ; gridBagConstraints . ipadx = 10 ; gridBagConstraints . anchor = java . awt . GridBagConstraints . WEST ; add ( labelValidator , gridBagConstraints ) ; butonReset . setIcon ( new javax . swing . ImageIcon ( getClass ( ) . getResource ( "/icons/cross16.png" ) ) ) ; // NOI18N butonReset . setFocusable ( false ) ; butonReset . addActionListener ( new java . awt . event . ActionListener ( ) { public void actionPerformed ( java . awt . event . ActionEvent evt ) { butonResetActionPerformed ( evt ) ; } } ) ; gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . gridx = 3 ; gridBagConstraints . gridy = 0 ; gridBagConstraints . fill = java . awt . GridBagConstraints . BOTH ; add ( butonReset , gridBagConstraints ) ;
public class BitMaskUtil { /** * Check if the bit is set to ' 1' * @ param value integer to check bit * @ param number of bit to check ( right first bit starting at 1) */ public static boolean isBitOn ( int value , int bitNumber ) { } }
ensureBitRange ( bitNumber ) ; return ( ( value & MASKS [ bitNumber - 1 ] ) == MASKS [ bitNumber - 1 ] ) ;
public class KeyStoreServiceImpl { /** * { @ inheritDoc } */ @ Override public Certificate getCertificateFromKeyStore ( String keyStoreName , String alias ) throws KeyStoreException , CertificateException { } }
try { KeyStore ks = ksMgr . getJavaKeyStore ( keyStoreName ) ; if ( ks == null ) { throw new KeyStoreException ( "The keystore [" + keyStoreName + "] is not present in the configuration" ) ; } else { if ( ! ks . isCertificateEntry ( alias ) && ! ks . isKeyEntry ( alias ) ) { throw new CertificateException ( "The alias [" + alias + "] is not present in the KeyStore as a certificate entry" ) ; } else { return ks . getCertificate ( alias ) ; } } } catch ( CertificateException e ) { throw e ; } catch ( KeyStoreException e ) { throw e ; } catch ( Exception e ) { throw new KeyStoreException ( "Unexpected error while loading the request Certificate for alias [" + alias + "] from keystore: " + keyStoreName , e ) ; }
public class QueryLexer { /** * $ ANTLR start " INT " */ public final void mINT ( ) throws RecognitionException { } }
try { int _type = INT ; int _channel = DEFAULT_TOKEN_CHANNEL ; // src / riemann / Query . g : 88:5 : ( ( ' - ' ) ? ( ' 0 ' . . ' 9 ' ) + ) // src / riemann / Query . g : 88:7 : ( ' - ' ) ? ( ' 0 ' . . ' 9 ' ) + { // src / riemann / Query . g : 88:7 : ( ' - ' ) ? int alt2 = 2 ; int LA2_0 = input . LA ( 1 ) ; if ( ( LA2_0 == '-' ) ) { alt2 = 1 ; } switch ( alt2 ) { case 1 : // src / riemann / Query . g : 88:7 : ' - ' { match ( '-' ) ; } break ; } // src / riemann / Query . g : 88:12 : ( ' 0 ' . . ' 9 ' ) + int cnt3 = 0 ; loop3 : do { int alt3 = 2 ; int LA3_0 = input . LA ( 1 ) ; if ( ( ( LA3_0 >= '0' && LA3_0 <= '9' ) ) ) { alt3 = 1 ; } switch ( alt3 ) { case 1 : // src / riemann / Query . g : 88:12 : ' 0 ' . . ' 9' { matchRange ( '0' , '9' ) ; } break ; default : if ( cnt3 >= 1 ) break loop3 ; EarlyExitException eee = new EarlyExitException ( 3 , input ) ; throw eee ; } cnt3 ++ ; } while ( true ) ; } state . type = _type ; state . channel = _channel ; } finally { }
public class Date { /** * setter for year - sets full year ( e . g . 2006 and NOT 06 ) , C * @ generated * @ param v value to set into the feature */ public void setYear ( int v ) { } }
if ( Date_Type . featOkTst && ( ( Date_Type ) jcasType ) . casFeat_year == null ) jcasType . jcas . throwFeatMissing ( "year" , "de.julielab.jules.types.Date" ) ; jcasType . ll_cas . ll_setIntValue ( addr , ( ( Date_Type ) jcasType ) . casFeatCode_year , v ) ;
public class TransformerImpl { /** * Subroutine of simplifyTree to handle NE nodes */ private static Selector simplifyNE ( Selector sel0 , Selector sel1 ) { } }
if ( sel0 . getType ( ) == Selector . BOOLEAN ) return makeOR ( makeAND ( simplifyTree ( sel0 ) , simplifyNOT ( sel1 ) ) , makeAND ( simplifyNOT ( sel0 ) , simplifyTree ( sel1 ) ) ) ; else if ( sel0 . getType ( ) == Selector . STRING || sel0 . getType ( ) == Selector . UNKNOWN ) return new OperatorImpl ( Selector . NE , sel0 , sel1 ) ; else // Numeric : transform into a pair of inequalities return makeOR ( new OperatorImpl ( Selector . LT , sel0 , sel1 ) , new OperatorImpl ( Selector . GT , sel0 , sel1 ) ) ;
public class WebLocatorAbstractBuilder { /** * < p > < b > Used for finding element process ( to generate xpath address ) < / b > < / p > * @ param root If the path starts with / / then all elements in the document which fulfill following criteria are selected . eg . / / or / * @ param < T > the element which calls this method * @ return this element */ @ SuppressWarnings ( "unchecked" ) public < T extends WebLocatorAbstractBuilder > T setRoot ( final String root ) { } }
pathBuilder . setRoot ( root ) ; return ( T ) this ;
public class Dao { /** * Update rows * @ param table The table to update * @ param values The values to update * @ param whereClause The where clause * @ param whereArgs The where clause arguments * @ return An < b > deferred < / b > observable containing the number of rows that have been changed by this update */ @ CheckResult protected Observable < Integer > update ( @ NonNull final String table , @ NonNull final ContentValues values , @ Nullable final String whereClause , @ Nullable final String ... whereArgs ) { } }
return Observable . defer ( new Func0 < Observable < Integer > > ( ) { @ Override public Observable < Integer > call ( ) { return Observable . just ( db . update ( table , values , whereClause , whereArgs ) ) ; } } ) ;
public class CoreAsync { /** * Perform collection for stream collector . * @ param stages stages to apply to collector * @ param consumer consumer to apply * @ param supplier supplier to provide result * @ param < T > source type * @ param < U > target type */ < T , U > Stage < U > doStreamCollect ( final Collection < ? extends Stage < ? extends T > > stages , final Consumer < ? super T > consumer , final Supplier < ? extends U > supplier ) { } }
final Completable < U > target = completable ( ) ; final StreamCollectHelper < ? super T , ? extends U > done = new StreamCollectHelper < > ( caller , stages . size ( ) , consumer , supplier , target ) ; for ( final Stage < ? extends T > q : stages ) { q . handle ( done ) ; } bindSignals ( target , stages ) ; return target ;
public class SegmentManager { /** * Loads all segments from disk . * @ return A collection of segments for the log . */ protected Collection < Segment > loadSegments ( ) { } }
// Ensure log directories are created . storage . directory ( ) . mkdirs ( ) ; TreeMap < Long , Segment > segments = new TreeMap < > ( ) ; // Iterate through all files in the log directory . for ( File file : storage . directory ( ) . listFiles ( File :: isFile ) ) { // If the file looks like a segment file , attempt to load the segment . if ( SegmentFile . isSegmentFile ( name , file ) ) { SegmentFile segmentFile = new SegmentFile ( file ) ; SegmentDescriptor descriptor = new SegmentDescriptor ( FileBuffer . allocate ( file , SegmentDescriptor . BYTES ) ) ; // Valid segments will have been locked . Segments that resulting from failures during log cleaning will be // unlocked and should ultimately be deleted from disk . if ( descriptor . locked ( ) ) { // Load the segment . Segment segment = loadSegment ( descriptor . id ( ) , descriptor . version ( ) ) ; // If a segment with an equal or lower index has already been loaded , ensure this segment is not superseded // by the earlier segment . This can occur due to segments being combined during log compaction . Map . Entry < Long , Segment > previousEntry = segments . floorEntry ( segment . index ( ) ) ; if ( previousEntry != null ) { // If an existing descriptor exists with a lower index than this segment ' s first index , check to determine // whether this segment ' s first index is contained in that existing index . If it is , determine which segment // should take precedence based on segment versions . Segment previousSegment = previousEntry . getValue ( ) ; // If the two segments start at the same index , the segment with the higher version number is used . if ( previousSegment . index ( ) == segment . index ( ) ) { if ( segment . descriptor ( ) . version ( ) > previousSegment . descriptor ( ) . version ( ) ) { LOGGER . debug ( "Replaced segment {} with newer version: {} ({})" , previousSegment . descriptor ( ) . id ( ) , segment . descriptor ( ) . version ( ) , segmentFile . file ( ) . getName ( ) ) ; segments . remove ( previousEntry . getKey ( ) ) ; previousSegment . close ( ) ; previousSegment . delete ( ) ; } else { segment . close ( ) ; segment . delete ( ) ; continue ; } } // If the existing segment ' s entries overlap with the loaded segment ' s entries , the existing segment always // supersedes the loaded segment . Log compaction processes ensure this is always the case . else if ( previousSegment . index ( ) + previousSegment . length ( ) > segment . index ( ) ) { segment . close ( ) ; segment . delete ( ) ; continue ; } } // Add the segment to the segments list . LOGGER . debug ( "Found segment: {} ({})" , segment . descriptor ( ) . id ( ) , segmentFile . file ( ) . getName ( ) ) ; segments . put ( segment . index ( ) , segment ) ; // Ensure any segments later in the log with which this segment overlaps are removed . Map . Entry < Long , Segment > nextEntry = segments . higherEntry ( segment . index ( ) ) ; while ( nextEntry != null ) { if ( nextEntry . getValue ( ) . index ( ) < segment . index ( ) + segment . length ( ) ) { segments . remove ( nextEntry . getKey ( ) ) ; nextEntry = segments . higherEntry ( segment . index ( ) ) ; } else { break ; } } descriptor . close ( ) ; } // If the segment descriptor wasn ' t locked , close and delete the descriptor . else { LOGGER . debug ( "Deleting unlocked segment: {}-{} ({})" , descriptor . id ( ) , descriptor . version ( ) , segmentFile . file ( ) . getName ( ) ) ; descriptor . close ( ) ; descriptor . delete ( ) ; } } } for ( Long segmentId : segments . keySet ( ) ) { Segment segment = segments . get ( segmentId ) ; Map . Entry < Long , Segment > previousEntry = segments . floorEntry ( segmentId - 1 ) ; if ( previousEntry != null ) { Segment previousSegment = previousEntry . getValue ( ) ; if ( previousSegment . index ( ) + previousSegment . length ( ) - 1 < segment . index ( ) ) { previousSegment . skip ( segment . index ( ) - ( previousSegment . index ( ) + previousSegment . length ( ) ) ) ; } } } return segments . values ( ) ;
public class SimpleNameListItemRenderer { /** * { @ inheritDoc } */ @ Override public final StringBuilder renderAsListItem ( final StringBuilder builder , final boolean newLine , final int pad ) { } }
if ( pad > 0 ) { return builder ; } GedRenderer . renderNewLine ( builder , newLine ) ; builder . append ( simpleNameRenderer . renderAsPhrase ( ) ) ; return builder ;
public class CmsGalleryService { /** * Creates the sitemap entry bean for a resource . < p > * @ param cms the current CMS context * @ param resource the resource for which the sitemap entry bean should be created * @ return the created sitemap entry bean * @ throws CmsException if something goes wrong */ CmsSitemapEntryBean internalCreateSitemapEntryBean ( CmsObject cms , CmsResource resource ) throws CmsException { } }
cms = OpenCms . initCmsObject ( cms ) ; cms . getRequestContext ( ) . setSiteRoot ( "" ) ; CmsJspNavBuilder navBuilder = new CmsJspNavBuilder ( cms ) ; CmsJspNavElement entry = navBuilder . getNavigationForResource ( resource . getRootPath ( ) ) ; if ( entry == null ) { // may be null for expired resources return null ; } return prepareSitemapEntry ( cms , entry , false , true ) ;
public class ConcurrentSparqlGraphStoreManager { /** * Add statements to a named model * @ param graphUri * @ param data */ @ Override public void addModelToGraph ( URI graphUri , Model data ) { } }
if ( graphUri == null || data == null ) return ; // Use HTTP protocol if possible if ( this . sparqlServiceEndpoint != null ) { datasetAccessor . add ( graphUri . toASCIIString ( ) , data ) ; } else { this . addModelToGraphSparqlQuery ( graphUri , data ) ; }
public class DataSetBuilder { /** * Set { @ link Date } sequence filler for column * with a specified step in days . * A call to this method is shorthand for * < code > sequence ( column , initial , d - & gt ; new Date ( x . getTime ( ) + step * MILLIS _ PER _ DAY ) ) < / code > . * @ param column Column day . * @ param initial Initial date . * @ param step Step in days . * @ return The builder instance ( for chained calls ) . * @ see # sequence ( String , Time , int ) * @ see # sequence ( String , Timestamp , long ) * @ see # sequence ( String , Object , UnaryOperator ) */ public DataSetBuilder sequence ( String column , Date initial , int step ) { } }
ensureArgNotNull ( initial ) ; return sequence ( column , initial , d -> new Date ( d . getTime ( ) + step * MILLIS_PER_DAY ) ) ;
public class FdfsResponse { /** * 解析反馈内容 * @ param in * @ param charset * @ return * @ throws IOException */ public T decodeContent ( InputStream in , Charset charset ) throws IOException { } }
// 如果有内容 if ( getContentLength ( ) > 0 ) { byte [ ] bytes = new byte [ ( int ) getContentLength ( ) ] ; int contentSize = in . read ( bytes ) ; // 获取数据 if ( contentSize != getContentLength ( ) ) { throw new IOException ( "读取到的数据长度与协议长度不符" ) ; } return FdfsParamMapper . map ( bytes , genericType , charset ) ; } return null ;
public class LRUCache { /** * remove a element from list * @ param key */ public synchronized void remove ( E key ) { } }
Entry < E , T > entry = map . get ( key ) ; this . tail . prev = entry . prev ; entry . prev . next = this . tail ; map . remove ( entry . key ) ; this . length -- ;
public class OpenCLDevice { /** * List OpenCLDevices of a given TYPE , or all OpenCLDevices if type = = null . */ public static List < OpenCLDevice > listDevices ( TYPE type ) { } }
final OpenCLPlatform platform = new OpenCLPlatform ( 0 , null , null , null ) ; final ArrayList < OpenCLDevice > results = new ArrayList < > ( ) ; for ( final OpenCLPlatform p : platform . getOpenCLPlatforms ( ) ) { for ( final OpenCLDevice device : p . getOpenCLDevices ( ) ) { if ( type == null || device . getType ( ) == type ) { results . add ( device ) ; } } } return results ;
public class CasConfigurationJasyptCipherExecutor { /** * Decrypt value string . * @ param value the value * @ return the string */ public String decryptValue ( final String value ) { } }
try { return decryptValuePropagateExceptions ( value ) ; } catch ( final Exception e ) { LOGGER . error ( "Could not decrypt value [{}]" , value , e ) ; } return null ;
public class VEvent { /** * Sets the date that the event ends . This must NOT be set if a * { @ link DurationProperty } is defined . * @ param dateEnd the end date or null to remove * @ param hasTime true if the date has a time component , false if it is * strictly a date ( if false , the given Date object should be created by a * { @ link java . util . Calendar Calendar } object that uses the JVM ' s default * timezone ) * @ return the property that was created * @ see < a href = " http : / / tools . ietf . org / html / rfc5545 # page - 95 " > RFC 5545 * p . 95-6 < / a > * @ see < a href = " http : / / tools . ietf . org / html / rfc2445 # page - 91 " > RFC 2445 * p . 91-2 < / a > * @ see < a href = " http : / / www . imc . org / pdi / vcal - 10 . doc " > vCal 1.0 p . 31 < / a > */ public DateEnd setDateEnd ( Date dateEnd , boolean hasTime ) { } }
DateEnd prop = ( dateEnd == null ) ? null : new DateEnd ( dateEnd , hasTime ) ; setDateEnd ( prop ) ; return prop ;
public class Normalization { /** * Returns the standard deviation * and mean of the given columns * The list returned is a list of size 2 where each row * represents the standard deviation of each column and the mean of each column * @ param data the data to get the standard deviation and mean for * @ param columns the columns to get the * @ return */ public static List < Row > stdDevMeanColumns ( DataRowsFacade data , String ... columns ) { } }
return aggregate ( data , columns , new String [ ] { "stddev" , "mean" } ) ;
public class Client { /** * Logs this client on in standalone mode with the faked bootstrap data and shared local * distributed object manager . */ public void standaloneLogon ( BootstrapData data , DObjectManager omgr ) { } }
if ( ! _standalone ) { throw new IllegalStateException ( "Must call prepareStandaloneLogon() first." ) ; } gotBootstrap ( data , omgr ) ;
public class DebugDrawBox2D { /** * Sets the fill color from a Color3f * @ param color color where ( r , g , b ) = ( x , y , z ) */ private void setFillColor ( Color3f color ) { } }
if ( cacheFillR == color . x && cacheFillG == color . y && cacheFillB == color . z ) { // no need to re - set the fill color , just use the cached values } else { cacheFillR = color . x ; cacheFillG = color . y ; cacheFillB = color . z ; setFillColorFromCache ( ) ; }
public class BatchKernelImpl { /** * There are some assumptions that all partition subjobs have associated DB entries */ @ Override public List < BatchPartitionWorkUnit > buildOnRestartParallelPartitions ( PartitionsBuilderConfig config ) throws JobRestartException , JobExecutionAlreadyCompleteException , JobExecutionNotMostRecentException { } }
List < JSLJob > jobModels = config . getJobModels ( ) ; Properties [ ] partitionProperties = config . getPartitionProperties ( ) ; List < BatchPartitionWorkUnit > batchWorkUnits = new ArrayList < BatchPartitionWorkUnit > ( jobModels . size ( ) ) ; // for now let always use a Properties array . We can add some more convenience methods later for null properties and what not int instance = 0 ; for ( JSLJob parallelJob : jobModels ) { Properties partitionProps = ( partitionProperties == null ) ? null : partitionProperties [ instance ] ; try { long execId = getMostRecentSubJobExecutionId ( parallelJob ) ; RuntimeJobExecution jobExecution = null ; try { jobExecution = JobExecutionHelper . restartPartition ( execId , parallelJob , partitionProps ) ; jobExecution . setPartitionInstance ( instance ) ; } catch ( NoSuchJobExecutionException e ) { String errorMsg = "Caught NoSuchJobExecutionException but this is an internal JobExecution so this shouldn't have happened: execId =" + execId ; logger . severe ( errorMsg ) ; throw new IllegalStateException ( errorMsg , e ) ; } if ( logger . isLoggable ( Level . FINE ) ) { logger . fine ( "JobExecution constructed: " + jobExecution ) ; } BatchPartitionWorkUnit batchWork = new BatchPartitionWorkUnit ( this , jobExecution , config ) ; registerCurrentInstanceAndExecution ( jobExecution , batchWork . getController ( ) ) ; batchWorkUnits . add ( batchWork ) ; } catch ( JobExecutionAlreadyCompleteException e ) { logger . fine ( "This execution already completed: " + parallelJob . getId ( ) ) ; } instance ++ ; } return batchWorkUnits ;
public class UnixUserGroupInformation { /** * Create an immutable { @ link UnixUserGroupInformation } object . */ public static UnixUserGroupInformation createImmutable ( String [ ] ugi ) { } }
return new UnixUserGroupInformation ( ugi ) { public void readFields ( DataInput in ) throws IOException { throw new UnsupportedOperationException ( ) ; } } ;
public class HtmlTool { /** * Extracts elements from the HTML content . * @ param content * @ param selector * @ param amount * @ return the remainder and a list of extracted elements . The main body ( remainder after * extraction ) is always returned as the first element of the list . */ private List < Element > extractElements ( String content , String selector , int amount ) { } }
Element body = parseContent ( content ) ; List < Element > elements = body . select ( selector ) ; if ( elements . size ( ) > 0 ) { elements = filterParents ( elements ) ; if ( amount >= 0 ) { // limit to the indicated amount elements = elements . subList ( 0 , Math . min ( amount , elements . size ( ) ) ) ; } // remove all from their parents for ( Element element : elements ) { element . remove ( ) ; } } List < Element > results = new ArrayList < Element > ( ) ; // first element is the body results . add ( body ) ; results . addAll ( elements ) ; return results ;
public class OpenAPIModelFilterAdapter { /** * { @ inheritDoc } */ @ Override public SecurityRequirement visitSecurityRequirement ( Context context , SecurityRequirement sr ) { } }
visitor . visitSecurityRequirement ( context , sr ) ; return sr ;
public class FeatureValidationCheck { /** * Creates a validation message for the feature and adds it to * the validation result . * @ param severity message severity * @ param origin the origin * @ param messageKey a message key * @ param params message parameters */ protected ValidationMessage < Origin > reportMessage ( Severity severity , Origin origin , String messageKey , Object ... params ) { } }
ValidationMessage < Origin > message = EntryValidations . createMessage ( origin , severity , messageKey , params ) ; message . getMessage ( ) ; // System . out . println ( " message = " + message . getMessage ( ) ) ; result . append ( message ) ; return message ;
public class JaegerConfiguration { /** * Sets the sampler configuration . * @ param samplerConfiguration The sampler configuration */ @ Inject public void setSamplerConfiguration ( @ Nullable Configuration . SamplerConfiguration samplerConfiguration ) { } }
if ( samplerConfiguration != null ) { configuration . withSampler ( samplerConfiguration ) ; }
public class PippoSettings { /** * Returns a list of floats from the specified name using the specified delimiter . * @ param name * @ param delimiter * @ return list of floats */ public List < Float > getFloats ( String name , String delimiter ) { } }
List < String > strings = getStrings ( name , delimiter ) ; List < Float > floats = new ArrayList < > ( strings . size ( ) ) ; for ( String value : strings ) { try { float i = Float . parseFloat ( value ) ; floats . add ( i ) ; } catch ( NumberFormatException e ) { } } return Collections . unmodifiableList ( floats ) ;
public class XmlReader { /** * Find position in input stream that matches XML path query * @ param xmlPathQuery XML path query * @ return { @ code boolean } true if found */ public boolean find ( String xmlPathQuery ) { } }
XmlPath xmlPath = XmlPathParser . parse ( xmlPathQuery ) ; XmlNode node ; while ( ( node = pullXmlNode ( ) ) != null ) { if ( node instanceof XmlStartElement ) { XmlStartElement startElement = ( XmlStartElement ) node ; Element element = new Element ( startElement . getLocalName ( ) ) ; element . addAttributes ( startElement . getAttributes ( ) ) ; currentPath . addLast ( element ) ; if ( xmlPath . matches ( currentPath ) ) { nodeQueue . push ( node ) ; currentPath . removeLast ( ) ; return true ; } } else if ( node instanceof XmlEndElement ) { if ( currentPath . getLast ( ) instanceof Content ) { currentPath . removeLast ( ) ; } currentPath . removeLast ( ) ; } else if ( node instanceof XmlContent ) { XmlContent content = ( XmlContent ) node ; if ( currentPath . getLast ( ) instanceof Content ) { currentPath . removeLast ( ) ; } currentPath . addLast ( new Content ( content . getText ( ) ) ) ; } else { throw new XmlReaderException ( "Unknown XmlNode type: " + node ) ; } } return false ;
public class SimpleFibonacciHeap { /** * { @ inheritDoc } */ @ Override @ SuppressWarnings ( "unchecked" ) @ ConstantTime ( amortized = true ) public void meld ( MergeableAddressableHeap < K , V > other ) { } }
SimpleFibonacciHeap < K , V > h = ( SimpleFibonacciHeap < K , V > ) other ; // check same comparator if ( comparator != null ) { if ( h . comparator == null || ! h . comparator . equals ( comparator ) ) { throw new IllegalArgumentException ( "Cannot meld heaps using different comparators!" ) ; } } else if ( h . comparator != null ) { throw new IllegalArgumentException ( "Cannot meld heaps using different comparators!" ) ; } if ( h . other != h ) { throw new IllegalStateException ( "A heap cannot be used after a meld." ) ; } // meld if ( root == null ) { root = h . root ; } else if ( h . root != null ) { if ( comparator == null ) { if ( ( ( Comparable < ? super K > ) h . root . key ) . compareTo ( root . key ) < 0 ) { root = link ( root , h . root ) ; } else { link ( h . root , root ) ; } } else { if ( comparator . compare ( h . root . key , root . key ) < 0 ) { root = link ( root , h . root ) ; } else { link ( h . root , root ) ; } } } size += h . size ; // clear other h . size = 0 ; h . root = null ; // take ownership h . other = this ;
public class AppServiceEnvironmentsInner { /** * Get available SKUs for scaling a worker pool . * Get available SKUs for scaling a worker pool . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param name Name of the App Service Environment . * @ param workerPoolName Name of the worker pool . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; SkuInfoInner & gt ; object */ public Observable < ServiceResponse < Page < SkuInfoInner > > > listWorkerPoolSkusWithServiceResponseAsync ( final String resourceGroupName , final String name , final String workerPoolName ) { } }
return listWorkerPoolSkusSinglePageAsync ( resourceGroupName , name , workerPoolName ) . concatMap ( new Func1 < ServiceResponse < Page < SkuInfoInner > > , Observable < ServiceResponse < Page < SkuInfoInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < SkuInfoInner > > > call ( ServiceResponse < Page < SkuInfoInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listWorkerPoolSkusNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ;
public class UResourceBundle { /** * Returns a resource in a given resource that has a given key , or null if the * resource is not found . * @ param aKey the key associated with the wanted resource * @ return the resource , or null * @ see # get ( String ) * @ deprecated This API is ICU internal only . * @ hide draft / provisional / internal are hidden on Android */ @ Deprecated protected UResourceBundle findTopLevel ( String aKey ) { } }
// NOTE : this only works for top - level resources . For resources at lower // levels , it fails when you fall back to the parent , since you ' re now // looking at root resources , not at the corresponding nested resource . for ( UResourceBundle res = this ; res != null ; res = res . getParent ( ) ) { UResourceBundle obj = res . handleGet ( aKey , null , this ) ; if ( obj != null ) { return obj ; } } return null ;
public class ICUHumanize { /** * Same as { @ link # smartDateFormat ( Date ) smartDateFormat } for the specified * locale . * @ param value * The date to be formatted * @ param skeleton * A pattern containing only the variable fields . For example , * " MMMdd " and " mmhh " are skeletons . * @ param locale * Target locale * @ return A string with a text representation of the date */ public static String smartDateFormat ( final Date value , final String skeleton , final Locale locale ) { } }
return withinLocale ( new Callable < String > ( ) { public String call ( ) throws Exception { return smartDateFormat ( value , skeleton ) ; } } , locale ) ;
public class Quarters { /** * / * [ deutsch ] * < p > Interpretiert das kanonische Format & quot ; PnQ & quot ; mit optionalem vorangehenden Minus - Zeichen . < / p > * @ param period the formatted string to be parsed * @ return parsed instance * @ throws ParseException if given argument cannot be parsed */ public static Quarters parsePeriod ( String period ) throws ParseException { } }
int amount = SingleUnitTimeSpan . parsePeriod ( period , 'Q' ) ; return Quarters . of ( amount ) ;
public class ClasspathElementModule { /** * Scan for package matches within module . * @ param log * the log */ @ Override void scanPaths ( final LogNode log ) { } }
if ( skipClasspathElement ) { return ; } if ( scanned . getAndSet ( true ) ) { // Should not happen throw new IllegalArgumentException ( "Already scanned classpath element " + toString ( ) ) ; } final String moduleLocationStr = moduleRef . getLocationStr ( ) ; final LogNode subLog = log == null ? null : log . log ( moduleLocationStr , "Scanning module " + moduleRef . getName ( ) ) ; try ( RecycleOnClose < ModuleReaderProxy , IOException > moduleReaderProxyRecycleOnClose = moduleReaderProxyRecycler . acquireRecycleOnClose ( ) ) { // Look for whitelisted files in the module . List < String > resourceRelativePaths ; try { resourceRelativePaths = moduleReaderProxyRecycleOnClose . get ( ) . list ( ) ; } catch ( final SecurityException e ) { if ( subLog != null ) { subLog . log ( "Could not get resource list for module " + moduleRef . getName ( ) , e ) ; } return ; } CollectionUtils . sortIfNotEmpty ( resourceRelativePaths ) ; String prevParentRelativePath = null ; ScanSpecPathMatch prevParentMatchStatus = null ; for ( final String relativePath : resourceRelativePaths ) { // From ModuleReader # find ( ) : " If the module reader can determine that the name locates a // directory then the resulting URI will end with a slash ( ' / ' ) . " But from the documentation // for ModuleReader # list ( ) : " Whether the stream of elements includes names corresponding to // directories in the module is module reader specific . " We don ' t have a way of checking if // a resource is a directory without trying to open it , unless ModuleReader # list ( ) also decides // to put a " / " on the end of resource paths corresponding to directories . Skip directories if // they are found , but if they are not able to be skipped , we will have to settle for having // some IOExceptions thrown when directories are mistaken for resource files . if ( relativePath . endsWith ( "/" ) ) { continue ; } // Whitelist / blacklist classpath elements based on file resource paths checkResourcePathWhiteBlackList ( relativePath , log ) ; if ( skipClasspathElement ) { return ; } // Get match status of the parent directory of this resource ' s relative path ( or reuse the last // match status for speed , if the directory name hasn ' t changed ) . final int lastSlashIdx = relativePath . lastIndexOf ( '/' ) ; final String parentRelativePath = lastSlashIdx < 0 ? "/" : relativePath . substring ( 0 , lastSlashIdx + 1 ) ; final boolean parentRelativePathChanged = ! parentRelativePath . equals ( prevParentRelativePath ) ; final ScanSpecPathMatch parentMatchStatus = prevParentRelativePath == null || parentRelativePathChanged ? scanSpec . dirWhitelistMatchStatus ( parentRelativePath ) : prevParentMatchStatus ; prevParentRelativePath = parentRelativePath ; prevParentMatchStatus = parentMatchStatus ; if ( parentMatchStatus == ScanSpecPathMatch . HAS_BLACKLISTED_PATH_PREFIX ) { // The parent dir or one of its ancestral dirs is blacklisted if ( subLog != null ) { subLog . log ( "Skipping blacklisted path: " + relativePath ) ; } continue ; } // Found non - blacklisted relative path if ( allResourcePaths . add ( relativePath ) // If resource is whitelisted && ( parentMatchStatus == ScanSpecPathMatch . HAS_WHITELISTED_PATH_PREFIX || parentMatchStatus == ScanSpecPathMatch . AT_WHITELISTED_PATH || ( parentMatchStatus == ScanSpecPathMatch . AT_WHITELISTED_CLASS_PACKAGE && scanSpec . classfileIsSpecificallyWhitelisted ( relativePath ) ) || ( scanSpec . enableClassInfo && relativePath . equals ( "module-info.class" ) ) ) ) { // Add whitelisted resource final Resource resource = newResource ( relativePath ) ; addWhitelistedResource ( resource , parentMatchStatus , subLog ) ; } } // Save last modified time for the module file final File moduleFile = moduleRef . getLocationFile ( ) ; if ( moduleFile != null && moduleFile . exists ( ) ) { fileToLastModified . put ( moduleFile , moduleFile . lastModified ( ) ) ; } } catch ( final IOException e ) { if ( subLog != null ) { subLog . log ( "Exception opening module " + moduleRef . getName ( ) , e ) ; } skipClasspathElement = true ; } finishScanPaths ( subLog ) ;
public class ConsoleMenu { /** * Gets a String from the System . in * @ param msg * for the command line * @ return String as entered by the user of the console app */ public static String getString ( final String msg , final String defaultVal ) { } }
String s = getString ( msg + "(default:" + defaultVal + "):" ) ; if ( StringUtils . isBlank ( s ) ) { s = defaultVal ; } return s ;
public class TemplateEngine { /** * Sets a single template resolver for this template engine . * Calling this method is equivalent to calling { @ link # setTemplateResolvers ( Set ) } * passing a Set with only one template resolver . * @ param templateResolver the template resolver to be set . */ public void setTemplateResolver ( final ITemplateResolver templateResolver ) { } }
Validate . notNull ( templateResolver , "Template Resolver cannot be null" ) ; checkNotInitialized ( ) ; this . templateResolvers . clear ( ) ; this . templateResolvers . add ( templateResolver ) ;
public class FrustumIntersection { /** * Update the stored frustum planes of < code > this < / code > { @ link FrustumIntersection } with the given { @ link Matrix4fc matrix } and * allow to optimize the frustum plane extraction in the case when no intersection test is needed for spheres . * Reference : < a href = " http : / / gamedevs . org / uploads / fast - extraction - viewing - frustum - planes - from - world - view - projection - matrix . pdf " > * Fast Extraction of Viewing Frustum Planes from the World - View - Projection Matrix < / a > * @ param m * the { @ link Matrix4fc matrix } to update < code > this < / code > frustum culler ' s frustum planes from * @ param allowTestSpheres * whether the methods { @ link # testSphere ( Vector3fc , float ) } , { @ link # testSphere ( float , float , float , float ) } , * { @ link # intersectSphere ( Vector3fc , float ) } or { @ link # intersectSphere ( float , float , float , float ) } will be used . * If no spheres need to be tested , then < code > false < / code > should be used * @ return this */ public FrustumIntersection set ( Matrix4fc m , boolean allowTestSpheres ) { } }
float invl ; nxX = m . m03 ( ) + m . m00 ( ) ; nxY = m . m13 ( ) + m . m10 ( ) ; nxZ = m . m23 ( ) + m . m20 ( ) ; nxW = m . m33 ( ) + m . m30 ( ) ; if ( allowTestSpheres ) { invl = ( float ) ( 1.0 / Math . sqrt ( nxX * nxX + nxY * nxY + nxZ * nxZ ) ) ; nxX *= invl ; nxY *= invl ; nxZ *= invl ; nxW *= invl ; } planes [ 0 ] . set ( nxX , nxY , nxZ , nxW ) ; pxX = m . m03 ( ) - m . m00 ( ) ; pxY = m . m13 ( ) - m . m10 ( ) ; pxZ = m . m23 ( ) - m . m20 ( ) ; pxW = m . m33 ( ) - m . m30 ( ) ; if ( allowTestSpheres ) { invl = ( float ) ( 1.0 / Math . sqrt ( pxX * pxX + pxY * pxY + pxZ * pxZ ) ) ; pxX *= invl ; pxY *= invl ; pxZ *= invl ; pxW *= invl ; } planes [ 1 ] . set ( pxX , pxY , pxZ , pxW ) ; nyX = m . m03 ( ) + m . m01 ( ) ; nyY = m . m13 ( ) + m . m11 ( ) ; nyZ = m . m23 ( ) + m . m21 ( ) ; nyW = m . m33 ( ) + m . m31 ( ) ; if ( allowTestSpheres ) { invl = ( float ) ( 1.0 / Math . sqrt ( nyX * nyX + nyY * nyY + nyZ * nyZ ) ) ; nyX *= invl ; nyY *= invl ; nyZ *= invl ; nyW *= invl ; } planes [ 2 ] . set ( nyX , nyY , nyZ , nyW ) ; pyX = m . m03 ( ) - m . m01 ( ) ; pyY = m . m13 ( ) - m . m11 ( ) ; pyZ = m . m23 ( ) - m . m21 ( ) ; pyW = m . m33 ( ) - m . m31 ( ) ; if ( allowTestSpheres ) { invl = ( float ) ( 1.0 / Math . sqrt ( pyX * pyX + pyY * pyY + pyZ * pyZ ) ) ; pyX *= invl ; pyY *= invl ; pyZ *= invl ; pyW *= invl ; } planes [ 3 ] . set ( pyX , pyY , pyZ , pyW ) ; nzX = m . m03 ( ) + m . m02 ( ) ; nzY = m . m13 ( ) + m . m12 ( ) ; nzZ = m . m23 ( ) + m . m22 ( ) ; nzW = m . m33 ( ) + m . m32 ( ) ; if ( allowTestSpheres ) { invl = ( float ) ( 1.0 / Math . sqrt ( nzX * nzX + nzY * nzY + nzZ * nzZ ) ) ; nzX *= invl ; nzY *= invl ; nzZ *= invl ; nzW *= invl ; } planes [ 4 ] . set ( nzX , nzY , nzZ , nzW ) ; pzX = m . m03 ( ) - m . m02 ( ) ; pzY = m . m13 ( ) - m . m12 ( ) ; pzZ = m . m23 ( ) - m . m22 ( ) ; pzW = m . m33 ( ) - m . m32 ( ) ; if ( allowTestSpheres ) { invl = ( float ) ( 1.0 / Math . sqrt ( pzX * pzX + pzY * pzY + pzZ * pzZ ) ) ; pzX *= invl ; pzY *= invl ; pzZ *= invl ; pzW *= invl ; } planes [ 5 ] . set ( pzX , pzY , pzZ , pzW ) ; return this ;
public class Constraint { /** * Creates a new map representing a { @ link Min } validation constraint . * @ param min the minimum value * @ return a map */ static Map < String , Object > minPayload ( final Object min ) { } }
if ( min == null ) { return null ; } Map < String , Object > payload = new LinkedHashMap < > ( ) ; payload . put ( "value" , min ) ; payload . put ( "message" , MSG_PREFIX + VALIDATORS . get ( Min . class ) ) ; return payload ;
public class JspContextWrapper { /** * LIDB4147-9 Begin - modified for JSP 2.1 */ public Object resolveVariable ( String pName ) throws ELException { } }
ELContext ctx = this . getELContext ( ) ; return ctx . getELResolver ( ) . getValue ( ctx , null , pName ) ;
public class PubSubInputHandler { /** * The local put method is driven when the producer is attached locally * attached to this PubSub Input handler */ private void localPut ( MessageItem msg , TransactionCommon transaction ) throws SIIncorrectCallException , SIResourceException , SINotPossibleInCurrentConfigurationException , SINotAuthorizedException , SILimitExceededException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "localPut" , new Object [ ] { msg , transaction } ) ; boolean stored = false ; /* * If a non - empty frp exists at this point then send the message to the exception * destination */ if ( ! msg . getMessage ( ) . isForwardRoutingPathEmpty ( ) ) { // If we have a non - empty forward routing path then throw an exception . // A topicspace can only be the final element in the path . SIMPIncorrectCallException e = new SIMPIncorrectCallException ( nls . getFormattedMessage ( "FORWARD_ROUTING_PATH_ERROR_CWSIP0249" , new Object [ ] { _destination . getName ( ) , _messageProcessor . getMessagingEngineName ( ) } , null ) ) ; e . setExceptionReason ( SIRCConstants . SIRC0037_INVALID_ROUTING_PATH_ERROR ) ; e . setExceptionInserts ( new String [ ] { _destination . getName ( ) , _messageProcessor . getMessagingEngineName ( ) , "unknown" , SIMPUtils . getStackTrace ( e ) } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . exception ( tc , e ) ; SibTr . exit ( tc , "localPut" , e ) ; } throw e ; } // If the message came in from a remote bus then // check whether the userId is authorised to access this destination if ( msg . isFromRemoteBus ( ) ) { // Check whether bus security is enabled if ( _messageProcessor . isBusSecure ( ) ) { JsMessage jsMsg = msg . getMessage ( ) ; // Before we test for anything else , see whether this message // was sent by the privileged Jetstream SIBServerSubject . If it was // then we bypass the security checks if ( ! _messageProcessor . getAuthorisationUtils ( ) . sentBySIBServer ( jsMsg ) ) { // Check authority to produce to destination String userid = null ; if ( _destination . isLink ( ) && ! _destination . isMQLink ( ) ) { userid = ( ( LinkHandler ) _destination ) . getInboundUserid ( ) ; } // If the InboundUserid is null or we ' re not working with a link // Set the userid from the message if ( userid == null ) { // Use the id extracted from the message for access checks userid = jsMsg . getSecurityUserid ( ) ; } // Defect 240261 : Map a null userid to an empty string if ( userid == null ) { userid = "" ; // Empty string means those users that have not // been authenticated , but who are still , of course , // members of EVERYONE . } String discriminator = msg . getMessage ( ) . getDiscriminator ( ) ; // Create secContext from userid and discriminator SecurityContext secContext = new SecurityContext ( userid , discriminator ) ; if ( ! _destination . checkDestinationAccess ( secContext , OperationType . SEND ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "localPut" , "not authorized to produce to this destination" ) ; // Build the message for the Exception and the Notification String nlsMessage = nls_cwsik . getFormattedMessage ( "DELIVERY_ERROR_SIRC_18" , // USER _ NOT _ AUTH _ SEND _ ERROR _ CWSIP0306 new Object [ ] { _destination . getName ( ) , userid } , null ) ; // Fire a Notification if Eventing is enabled _messageProcessor . getAccessChecker ( ) . fireDestinationAccessNotAuthorizedEvent ( _destination . getName ( ) , userid , OperationType . SEND , nlsMessage ) ; // Thrown if user denied access to destination SIMPNotAuthorizedException e = new SIMPNotAuthorizedException ( nlsMessage ) ; e . setExceptionReason ( SIRCConstants . SIRC0018_USER_NOT_AUTH_SEND_ERROR ) ; e . setExceptionInserts ( new String [ ] { _destination . getName ( ) , userid } ) ; throw e ; } if ( ! _destination . checkDiscriminatorAccess ( secContext , OperationType . SEND ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "localPut" , "not authorized to produce to this destination's discriminator" ) ; // Write an audit record if access is denied SibTr . audit ( tc , nls_cwsik . getFormattedMessage ( "DELIVERY_ERROR_SIRC_20" , // USER _ NOT _ AUTH _ SEND _ ERROR _ CWSIP0308 new Object [ ] { _destination . getName ( ) , secContext . getDiscriminator ( ) , userid } , null ) ) ; // Thrown if user denied access to destination SIMPNotAuthorizedException e = new SIMPNotAuthorizedException ( nls_cwsik . getFormattedMessage ( "DELIVERY_ERROR_SIRC_20" , // USER _ NOT _ AUTH _ SEND _ ERROR _ CWSIP0308 new Object [ ] { _destination . getName ( ) , secContext . getDiscriminator ( ) , userid } , null ) ) ; e . setExceptionReason ( SIRCConstants . SIRC0020_USER_NOT_AUTH_SEND_ERROR ) ; e . setExceptionInserts ( new String [ ] { _destination . getName ( ) , secContext . getDiscriminator ( ) , userid } ) ; throw e ; } } } } boolean forcePut = msg . isForcePut ( ) ; // Check SendAllowed for local case . // send allowed can be false ONLY if this msg is not force put boolean isSendAllowed = forcePut || ( _itemStream . isSendAllowed ( ) && _destination . isSendAllowed ( ) ) ; if ( ! isSendAllowed ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "localPut" , "Destination send disallowed" ) ; SIMPNotPossibleInCurrentConfigurationException e = new SIMPNotPossibleInCurrentConfigurationException ( nls . getFormattedMessage ( "DESTINATION_SEND_DISALLOWED_CWSIP0253" , new Object [ ] { _destination . getName ( ) , _messageProcessor . getMessagingEngineName ( ) } , null ) ) ; e . setExceptionReason ( SIRCConstants . SIRC0901_INTERNAL_MESSAGING_ERROR ) ; e . setExceptionInserts ( new String [ ] { "com.ibm.ws.sib.processor.impl.PubSubInputHandler.localPut" , "1:1334:1.329.1.1" , SIMPUtils . getStackTrace ( e ) } ) ; throw e ; } // check total items on itemstream not exceeded ( allow extra 1 for ref stream ) // but only if forcePut = = false long topicSpaceHighLimit = _destination . getPublishPoint ( ) . getDestHighMsgs ( ) ; if ( ! forcePut && ( ( topicSpaceHighLimit != - 1 ) && ( _itemStream . getTotalMsgCount ( ) >= topicSpaceHighLimit ) ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "localPut" , "Destination reached high limit" ) ; SIMPLimitExceededException e = new SIMPLimitExceededException ( nls . getFormattedMessage ( "DESTINATION_HIGH_MESSAGES_ERROR_CWSIP0251" , new Object [ ] { _destination . getName ( ) , new Long ( topicSpaceHighLimit ) , _messageProcessor . getMessagingEngineName ( ) } , null ) ) ; e . setExceptionReason ( SIRCConstants . SIRC0025_DESTINATION_HIGH_MESSAGES_ERROR ) ; e . setExceptionInserts ( new String [ ] { _destination . getName ( ) , new Long ( topicSpaceHighLimit ) . toString ( ) } ) ; throw e ; } if ( msg . isTransacted ( ) ) { // Dont store the msg till pre - prepare when we know if there are any // subscribers . For now , just register for the pre - prepare callback on // the transaction . / / 183715.1 registerMessage ( msg , transaction ) ; // 183715.1 } else { LocalTransaction siTran = _txManager . createLocalTransaction ( false ) ; // If COD reports are required , register for the precommit callback if ( msg . getReportCOD ( ) != null && _destination instanceof BaseDestinationHandler ) msg . registerMessageEventListener ( MessageEvents . COD_CALLBACK , ( BaseDestinationHandler ) _destination ) ; try { // Perform the PubSub match . stored = localFanOut ( msg , siTran , false ) ; if ( stored ) siTran . commit ( ) ; else siTran . rollback ( ) ; } catch ( RuntimeException e ) { // FFDC FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.PubSubInputHandler.localPut" , "1:1397:1.329.1.1" , this ) ; SIMPErrorException ee = new SIMPErrorException ( e ) ; ee . setExceptionReason ( SIRCConstants . SIRC0901_INTERNAL_MESSAGING_ERROR ) ; ee . setExceptionInserts ( new String [ ] { "com.ibm.ws.sib.processor.impl.ProducerSessionImpl.handleMessage" , "1:1404:1.329.1.1" , SIMPUtils . getStackTrace ( e ) } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . exception ( tc , ee ) ; SibTr . exit ( tc , "localPut" , ee ) ; } throw ee ; } catch ( SIRollbackException e ) { // No FFDC code needed handleRollback ( siTran ) ; SIMPRollbackException ee = new SIMPRollbackException ( e . getMessage ( ) ) ; ee . setStackTrace ( e . getStackTrace ( ) ) ; ee . setExceptionReason ( SIRCConstants . SIRC0901_INTERNAL_MESSAGING_ERROR ) ; ee . setExceptionInserts ( new String [ ] { "com.ibm.ws.sib.processor.impl.ProducerSessionImpl.handleMessage" , "1:1424:1.329.1.1" , SIMPUtils . getStackTrace ( e ) } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "localPut" , ee ) ; throw ee ; } catch ( SIConnectionLostException e ) { // No FFDC code needed handleRollback ( siTran ) ; SIMPConnectionLostException ee = new SIMPConnectionLostException ( e . getMessage ( ) ) ; ee . setStackTrace ( e . getStackTrace ( ) ) ; ee . setExceptionReason ( SIRCConstants . SIRC0901_INTERNAL_MESSAGING_ERROR ) ; ee . setExceptionInserts ( new String [ ] { "com.ibm.ws.sib.processor.impl.ProducerSessionImpl.handleMessage" , "1:1441:1.329.1.1" , SIMPUtils . getStackTrace ( e ) } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "localPut" , ee ) ; throw ee ; } catch ( SIIncorrectCallException e ) { // No FFDC code needed handleRollback ( siTran ) ; SIMPIncorrectCallException ee = new SIMPIncorrectCallException ( e . getMessage ( ) ) ; ee . setStackTrace ( e . getStackTrace ( ) ) ; ee . setExceptionReason ( SIRCConstants . SIRC0901_INTERNAL_MESSAGING_ERROR ) ; ee . setExceptionInserts ( new String [ ] { "com.ibm.ws.sib.processor.impl.ProducerSessionImpl.handleMessage" , "1:1458:1.329.1.1" , SIMPUtils . getStackTrace ( e ) } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "localPut" , ee ) ; throw ee ; } catch ( SIResourceException e ) { // No FFDC code needed handleRollback ( siTran ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "localPut" , e ) ; SIMPResourceException ee = new SIMPResourceException ( e ) ; ee . setExceptionReason ( SIRCConstants . SIRC0901_INTERNAL_MESSAGING_ERROR ) ; ee . setExceptionInserts ( new String [ ] { "com.ibm.ws.sib.processor.impl.ProducerSessionImpl.handleMessage" , "1:1475:1.329.1.1" , SIMPUtils . getStackTrace ( e ) } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "localPut" , ee ) ; throw ee ; } // Release the JsMessage from the parent MessageItem ( Any MessageItemReferences will // have their own references to the JsMessage ( unless they ' ve released their ' s too ) msg . releaseJsMessage ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "localPut" ) ;
public class InvokerHelper { /** * Appends an object to an Appendable using Groovy ' s default representation for the object . */ public static void append ( Appendable out , Object object ) throws IOException { } }
if ( object instanceof String ) { out . append ( ( String ) object ) ; } else if ( object instanceof Object [ ] ) { out . append ( toArrayString ( ( Object [ ] ) object ) ) ; } else if ( object instanceof Map ) { out . append ( toMapString ( ( Map ) object ) ) ; } else if ( object instanceof Collection ) { out . append ( toListString ( ( Collection ) object ) ) ; } else if ( object instanceof Writable ) { Writable writable = ( Writable ) object ; Writer stringWriter = new StringBuilderWriter ( ) ; writable . writeTo ( stringWriter ) ; out . append ( stringWriter . toString ( ) ) ; } else if ( object instanceof InputStream || object instanceof Reader ) { // Copy stream to stream Reader reader ; if ( object instanceof InputStream ) { reader = new InputStreamReader ( ( InputStream ) object ) ; } else { reader = ( Reader ) object ; } char [ ] chars = new char [ 8192 ] ; int i ; while ( ( i = reader . read ( chars ) ) != - 1 ) { for ( int j = 0 ; j < i ; j ++ ) { out . append ( chars [ j ] ) ; } } reader . close ( ) ; } else { out . append ( toString ( object ) ) ; }
public class ImageRectangleF { /** * Round the floating point rectangle to an integer rectangle * @ return image rectangle */ public ImageRectangle round ( ) { } }
return new ImageRectangle ( Math . round ( left ) , Math . round ( top ) , Math . round ( right ) , Math . round ( bottom ) ) ;
public class Lookup { /** * Sets the search path that will be used as the default by future Lookups . * @ param domains The default search path . * @ throws TextParseException A name in the array is not a valid DNS name . */ public static synchronized void setDefaultSearchPath ( String [ ] domains ) throws TextParseException { } }
if ( domains == null ) { defaultSearchPath = null ; return ; } Name [ ] newdomains = new Name [ domains . length ] ; for ( int i = 0 ; i < domains . length ; i ++ ) newdomains [ i ] = Name . fromString ( domains [ i ] , Name . root ) ; defaultSearchPath = newdomains ;
public class InfoPanelService { /** * Finds the " nearest " info panel . * @ param element The UI element from which to begin the search . * @ param activeOnly If true , only active info panels are considered . * @ return The nearest active info panel , or null if none found . */ public static IInfoPanel findInfoPanel ( ElementBase element , boolean activeOnly ) { } }
ElementBase parent = element ; ElementBase previousParent ; IInfoPanel infoPanel = searchChildren ( element , null , activeOnly ) ; while ( ( infoPanel == null ) && ( parent != null ) ) { previousParent = parent ; parent = parent . getParent ( ) ; infoPanel = searchChildren ( parent , previousParent , activeOnly ) ; } return infoPanel ;
public class JmsConnectionImpl { /** * ( non - Javadoc ) * @ see com . ibm . ws . sib . api . jms . JmsConnection # reportException ( javax . jms . JMSException ) * This method directly invokes any registered exception listener . * No locks are taken by this method , so it is the caller ' s responsibility to * take any locks that may be needed . */ private void callExceptionListener ( JMSException e ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "callExceptionListener" ) ; // Get the latest version of the ExceptionListener ( after this point we ' re // going to call that one regardless of whether it changes ExceptionListener elLocal = elRef . get ( ) ; if ( elLocal != null ) { // Protect our code from badly behaved exception listeners . try { // Trace the class and hashcode of the exception if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { SibTr . debug ( this , tc , "Exception handler class: " + elLocal . getClass ( ) . getName ( ) ) ; SibTr . debug ( this , tc , "Exception: " , e ) ; } // Pass the exception on . elLocal . onException ( e ) ; } catch ( RuntimeException exc ) { // No FFDC code needed if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "User ExceptionListener threw exception" , exc ) ; } } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "No exception listener is currently set for this connection." ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "callExceptionListener" ) ;
public class ProjectSummaryMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ProjectSummary projectSummary , ProtocolMarshaller protocolMarshaller ) { } }
if ( projectSummary == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( projectSummary . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( projectSummary . getProjectId ( ) , PROJECTID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AiMesh { /** * Returns the number of vertex indices for a single face . * @ param face the face * @ return the number of indices */ public int getFaceNumIndices ( int face ) { } }
if ( null == m_faceOffsets ) { if ( face >= m_numFaces || face < 0 ) { throw new IndexOutOfBoundsException ( "Index: " + face + ", Size: " + m_numFaces ) ; } return 3 ; } else { /* * no need to perform bound checks here as the array access will * throw IndexOutOfBoundsExceptions if the index is invalid */ if ( face == m_numFaces - 1 ) { return m_faces . capacity ( ) / 4 - m_faceOffsets . getInt ( face * 4 ) ; } return m_faceOffsets . getInt ( ( face + 1 ) * 4 ) - m_faceOffsets . getInt ( face * 4 ) ; }
public class ExportInfoMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ExportInfo exportInfo , ProtocolMarshaller protocolMarshaller ) { } }
if ( exportInfo == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( exportInfo . getExportId ( ) , EXPORTID_BINDING ) ; protocolMarshaller . marshall ( exportInfo . getExportStatus ( ) , EXPORTSTATUS_BINDING ) ; protocolMarshaller . marshall ( exportInfo . getStatusMessage ( ) , STATUSMESSAGE_BINDING ) ; protocolMarshaller . marshall ( exportInfo . getConfigurationsDownloadUrl ( ) , CONFIGURATIONSDOWNLOADURL_BINDING ) ; protocolMarshaller . marshall ( exportInfo . getExportRequestTime ( ) , EXPORTREQUESTTIME_BINDING ) ; protocolMarshaller . marshall ( exportInfo . getIsTruncated ( ) , ISTRUNCATED_BINDING ) ; protocolMarshaller . marshall ( exportInfo . getRequestedStartTime ( ) , REQUESTEDSTARTTIME_BINDING ) ; protocolMarshaller . marshall ( exportInfo . getRequestedEndTime ( ) , REQUESTEDENDTIME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class JsonObject { /** * Retrieves the decrypted value from the field name and casts it to { @ link String } . * Note : Use of the Field Level Encryption functionality provided in the * com . couchbase . client . encryption namespace provided by Couchbase is * subject to the Couchbase Inc . Enterprise Subscription License Agreement * at https : / / www . couchbase . com / ESLA - 11132015. * @ param name the name of the field . * @ param providerName the crypto provider name for decryption . * @ return the result or null if it does not exist . */ public String getAndDecryptString ( String name , String providerName ) throws Exception { } }
return ( String ) getAndDecrypt ( name , providerName ) ;
public class IfcPersonAndOrganizationImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public EList < IfcActorRole > getRoles ( ) { } }
return ( EList < IfcActorRole > ) eGet ( Ifc4Package . Literals . IFC_PERSON_AND_ORGANIZATION__ROLES , true ) ;
public class ASMDatumWriterFactory { /** * Creates a { @ link DatumWriter } that is able to encode given data type with the given { @ link Schema } . * The instance created is thread safe and reusable . * @ param type Type information of the data type to be encoded . * @ param schema Schema of the data type . * @ param < T > Type of the data type . * @ return A { @ link DatumWriter } instance . */ @ SuppressWarnings ( "unchecked" ) @ Override public < T > DatumWriter < T > create ( TypeToken < T > type , Schema schema ) { } }
try { Class < DatumWriter < ? > > writerClass = datumWriterClasses . getUnchecked ( new CacheKey ( schema , type ) ) ; return ( DatumWriter < T > ) writerClass . getConstructor ( Schema . class , FieldAccessorFactory . class ) . newInstance ( schema , fieldAccessorFactory ) ; } catch ( Exception e ) { throw Throwables . propagate ( e ) ; }
public class QueryPlanner { /** * Check that " MIGRATE FROM tbl WHERE . . . " statement is valid . * @ param sql SQL statement * @ param xmlSQL HSQL parsed tree * @ param db database catalog */ private static void validateMigrateStmt ( String sql , VoltXMLElement xmlSQL , Database db ) { } }
final Map < String , String > attributes = xmlSQL . attributes ; assert attributes . size ( ) == 1 ; final Table targetTable = db . getTables ( ) . get ( attributes . get ( "table" ) ) ; assert targetTable != null ; final CatalogMap < TimeToLive > ttls = targetTable . getTimetolive ( ) ; if ( ttls . isEmpty ( ) ) { throw new PlanningErrorException ( String . format ( "%s: Cannot migrate from table %s because it does not have a TTL column" , sql , targetTable . getTypeName ( ) ) ) ; } else { final Column ttl = ttls . iterator ( ) . next ( ) . getTtlcolumn ( ) ; final TupleValueExpression columnExpression = new TupleValueExpression ( targetTable . getTypeName ( ) , ttl . getName ( ) , ttl . getIndex ( ) ) ; if ( ! ExpressionUtil . collectTerminals ( ExpressionUtil . from ( db , VoltXMLElementHelper . getFirstChild ( VoltXMLElementHelper . getFirstChild ( xmlSQL , "condition" ) , "operation" ) ) ) . contains ( columnExpression ) ) { throw new PlanningErrorException ( String . format ( "%s: Cannot migrate from table %s because the WHERE caluse does not contain TTL column %s" , sql , targetTable . getTypeName ( ) , ttl . getName ( ) ) ) ; } }
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcColourOrFactor ( ) { } }
if ( ifcColourOrFactorEClass == null ) { ifcColourOrFactorEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 1123 ) ; } return ifcColourOrFactorEClass ;
public class StencilOperands { /** * Is Object a whole number . * @ param o * Object to be analyzed . * @ return true if Integer , Long , Byte , Short or Character . */ protected boolean isNumberable ( final Object o ) { } }
return o instanceof Integer || o instanceof Long || o instanceof Byte || o instanceof Short || o instanceof Character ;
public class MirrorTable { /** * Reposition to this record Using this bookmark . * @ exception DBException File exception . */ public FieldList setHandle ( Object bookmark , int iHandleType ) throws DBException { } }
FieldList record = super . setHandle ( bookmark , iHandleType ) ; Iterator < BaseTable > iterator = this . getTables ( ) ; while ( iterator . hasNext ( ) ) { BaseTable table = iterator . next ( ) ; if ( ( table != null ) && ( table != this . getNextTable ( ) ) ) this . syncTables ( table , this . getRecord ( ) ) ; } return record ;
public class ActionFormMapper { protected void mappingListJsonBody ( ActionRuntime runtime , VirtualForm virtualForm , String json ) { } }
try { final ActionFormMeta formMeta = virtualForm . getFormMeta ( ) ; final ParameterizedType pt = formMeta . getListFormParameterParameterizedType ( ) . get ( ) ; // already checked final List < Object > fromJsonList = getJsonManager ( ) . fromJsonParameteried ( json , pt ) ; acceptJsonRealForm ( virtualForm , fromJsonList ) ; } catch ( RuntimeException e ) { throwListJsonBodyParseFailureException ( runtime , virtualForm , json , e ) ; }
public class PreprocessorContext { /** * Set a global variable value * @ param name the variable name , it must not be null and will be normalized to the supported format * @ param value the variable value , it must not be null * @ return this preprocessor context */ @ Nonnull public PreprocessorContext setGlobalVariable ( @ Nonnull final String name , @ Nonnull final Value value ) { } }
assertNotNull ( "Variable name is null" , name ) ; final String normalizedName = assertNotNull ( PreprocessorUtils . normalizeVariableName ( name ) ) ; if ( normalizedName . isEmpty ( ) ) { throw makeException ( "Name is empty" , null ) ; } assertNotNull ( "Value is null" , value ) ; if ( mapVariableNameToSpecialVarProcessor . containsKey ( normalizedName ) ) { mapVariableNameToSpecialVarProcessor . get ( normalizedName ) . setVariable ( normalizedName , value , this ) ; } else { if ( isVerbose ( ) ) { final String valueAsStr = value . toString ( ) ; if ( globalVarTable . containsKey ( normalizedName ) ) { logForVerbose ( "Replacing global variable [" + normalizedName + '=' + valueAsStr + ']' ) ; } else { logForVerbose ( "Defining new global variable [" + normalizedName + '=' + valueAsStr + ']' ) ; } } globalVarTable . put ( normalizedName , value ) ; } return this ;
public class TcpClient { /** * Creates a new TCP client instance with the passed address of the target server . * @ param host Hostname for the target server . * @ param port Port for the target server . * @ return A new { @ code TcpClient } instance . */ public static TcpClient < ByteBuf , ByteBuf > newClient ( String host , int port ) { } }
return newClient ( new InetSocketAddress ( host , port ) ) ;
public class FDBigInteger { /** * @ requires this . value ( ) * UNSIGNED ( iv ) + UNSIGNED ( addend ) < ( ( \ bigint ) 1 ) < < ( ( this . data . length + this . offset ) * 32 ) ; * @ assignable this . data [ * ] ; * @ ensures this . value ( ) = = \ old ( this . value ( ) * UNSIGNED ( iv ) + UNSIGNED ( addend ) ) ; */ private /* @ helper @ */ void multAddMe ( int iv , int addend ) { } }
long v = iv & LONG_MASK ; // unroll 0th iteration , doing addition . long p = v * ( data [ 0 ] & LONG_MASK ) + ( addend & LONG_MASK ) ; data [ 0 ] = ( int ) p ; p >>>= 32 ; for ( int i = 1 ; i < nWords ; i ++ ) { p += v * ( data [ i ] & LONG_MASK ) ; data [ i ] = ( int ) p ; p >>>= 32 ; } if ( p != 0L ) { data [ nWords ++ ] = ( int ) p ; // will fail noisily if illegal ! }
public class OutFactoryH3Impl { /** * Adds a predefined schema */ @ Override public void schema ( Class < ? > type ) { } }
Objects . requireNonNull ( type ) ; _context . schema ( type ) ;
public class OpDef { /** * < pre > * Optional deprecation based on GraphDef versions . * < / pre > * < code > optional . tensorflow . OpDeprecation deprecation = 8 ; < / code > */ public org . tensorflow . framework . OpDeprecation getDeprecation ( ) { } }
return deprecation_ == null ? org . tensorflow . framework . OpDeprecation . getDefaultInstance ( ) : deprecation_ ;
public class BaseLockFactory { /** * { @ inheritDoc } */ @ Override public BaseLuceneLock obtainLock ( Directory dir , String lockName ) throws IOException { } }
if ( ! ( dir instanceof DirectoryLucene ) ) { throw new UnsupportedOperationException ( "BaseLuceneLock can only be used with DirectoryLucene, got: " + dir ) ; } DirectoryLucene infinispanDirectory = ( DirectoryLucene ) dir ; int affinitySegmentId = infinispanDirectory . getAffinitySegmentId ( ) ; Cache distLockCache = infinispanDirectory . getDistLockCache ( ) ; String indexName = infinispanDirectory . getIndexName ( ) ; BaseLuceneLock lock = new BaseLuceneLock ( distLockCache , indexName , lockName , affinitySegmentId ) ; CommonLockObtainUtils . attemptObtain ( lock ) ; return lock ;
public class ClassIncludes { /** * Get the table name . */ public String getTableNames ( boolean bAddQuotes ) { } }
return ( m_tableName == null ) ? Record . formatTableNames ( CLASS_INCLUDES_FILE , bAddQuotes ) : super . getTableNames ( bAddQuotes ) ;
public class DefaultGroovyMethods { /** * A helper method to allow lists to work with subscript operators . * < pre class = " groovyTestCase " > def list = [ 2 , 3] * list [ 0 ] = 1 * assert list = = [ 1 , 3 ] < / pre > * @ param self a List * @ param idx an index * @ param value the value to put at the given index * @ since 1.0 */ public static < T > void putAt ( List < T > self , int idx , T value ) { } }
int size = self . size ( ) ; idx = normaliseIndex ( idx , size ) ; if ( idx < size ) { self . set ( idx , value ) ; } else { while ( size < idx ) { self . add ( size ++ , null ) ; } self . add ( idx , value ) ; }
public class LssClient { /** * Create a domain stream in the live stream service . * @ param request The request object containing all options for creating domain stream * @ return the response */ public CreateStreamResponse createStream ( CreateStreamRequest request ) { } }
checkNotNull ( request , "The parameter request should NOT be null." ) ; checkStringNotEmpty ( request . getPlayDomain ( ) , "playDomain should NOT be empty." ) ; checkStringNotEmpty ( request . getApp ( ) , "app should NOT be empty." ) ; checkNotNull ( request . getPublish ( ) , "publish should NOT be null." ) ; checkStringNotEmpty ( request . getPublish ( ) . getPushStream ( ) , "pushStream should NOT be empty." ) ; InternalRequest internalRequest = createRequest ( HttpMethodName . POST , request , LIVE_DOMAIN , request . getPlayDomain ( ) , LIVE_STREAM ) ; return invokeHttpClient ( internalRequest , CreateStreamResponse . class ) ;
public class ManagerConnectionImpl { /** * Determine version by the ' core show version ' command . This needs * ' command ' permissions . * @ return * @ throws Exception */ protected AsteriskVersion determineVersionByCoreShowVersion ( ) throws Exception { } }
final ManagerResponse coreShowVersionResponse = sendAction ( new CommandAction ( CMD_SHOW_VERSION ) ) ; if ( coreShowVersionResponse == null || ! ( coreShowVersionResponse instanceof CommandResponse ) ) { // this needs ' command ' permissions logger . info ( "Could not get response for 'core show version'" ) ; return null ; } final List < String > coreShowVersionResult = ( ( CommandResponse ) coreShowVersionResponse ) . getResult ( ) ; if ( coreShowVersionResult == null || coreShowVersionResult . isEmpty ( ) ) { logger . warn ( "Got empty response for 'core show version'" ) ; return null ; } final String coreLine = coreShowVersionResult . get ( 0 ) ; return AsteriskVersion . getDetermineVersionFromString ( coreLine ) ;
public class SchemaBuilder { /** * Creates the schema object builder for the identifier . * The actual definitions are retrieved via { @ link SchemaBuilder # getDefinitions } after all types have been declared . * @ param identifier The identifier * @ return The schema JSON object builder with the needed properties */ JsonObjectBuilder build ( final TypeIdentifier identifier ) { } }
final SwaggerType type = toSwaggerType ( identifier . getType ( ) ) ; switch ( type ) { case BOOLEAN : case INTEGER : case NUMBER : case NULL : case STRING : final JsonObjectBuilder builder = Json . createObjectBuilder ( ) ; addPrimitive ( builder , type ) ; return builder ; } final JsonObjectBuilder builder = Json . createObjectBuilder ( ) ; final TypeRepresentationVisitor visitor = new TypeRepresentationVisitor ( ) { private boolean inCollection = false ; @ Override public void visit ( final TypeRepresentation . ConcreteTypeRepresentation representation ) { final JsonObjectBuilder nestedBuilder = inCollection ? Json . createObjectBuilder ( ) : builder ; add ( nestedBuilder , representation ) ; if ( inCollection ) { builder . add ( "items" , nestedBuilder . build ( ) ) ; } } @ Override public void visitStart ( final TypeRepresentation . CollectionTypeRepresentation representation ) { builder . add ( "type" , "array" ) ; inCollection = true ; } @ Override public void visitEnd ( final TypeRepresentation . CollectionTypeRepresentation representation ) { builder . add ( "type" , "array" ) ; inCollection = true ; } @ Override public void visit ( final TypeRepresentation . EnumTypeRepresentation representation ) { builder . add ( "type" , "string" ) ; if ( ! representation . getEnumValues ( ) . isEmpty ( ) ) { final JsonArrayBuilder array = representation . getEnumValues ( ) . stream ( ) . sorted ( ) . collect ( Json :: createArrayBuilder , JsonArrayBuilder :: add , JsonArrayBuilder :: add ) ; if ( inCollection ) { builder . add ( "items" , Json . createObjectBuilder ( ) . add ( "type" , "string" ) . add ( "enum" , array ) . build ( ) ) ; } else { builder . add ( "enum" , array ) ; } } } } ; final TypeRepresentation representation = typeRepresentations . get ( identifier ) ; if ( representation == null ) builder . add ( "type" , "object" ) ; else representation . accept ( visitor ) ; return builder ;
public class Planner { /** * Fetches the list offerings from the Discoverer * @ return the String containing a unique Tosca representation of all available offerings */ private String fetchOfferings ( ) { } }
DiscovererFetchallResult allOfferings = null ; try { String discovererOutput = discovererClient . getRequest ( "fetch_all" , Collections . EMPTY_LIST ) ; ObjectMapper mapper = new ObjectMapper ( ) ; allOfferings = mapper . readValue ( discovererOutput , DiscovererFetchallResult . class ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } String offerings = allOfferings . offering ; return offerings ;
public class RePairRule { /** * Return the prefixed with R rule . * @ return rule string . */ public String toRuleString ( ) { } }
if ( 0 == this . ruleNumber ) { return this . grammar . r0String ; } return this . first . toString ( ) + SPACE + this . second . toString ( ) + SPACE ;
public class LoggerCreator { /** * Convert a logging level to its numerical equivalent . * @ param level * - the logging level . * @ return the numerical index that corresponds to the given level . */ @ SuppressWarnings ( { } }
"checkstyle:magicnumber" , "checkstyle:returncount" , "checkstyle:npathcomplexity" } ) public static int toInt ( Level level ) { if ( level == Level . OFF ) { return 0 ; } if ( level == Level . SEVERE ) { return 1 ; } if ( level == Level . WARNING ) { return 2 ; } if ( level == Level . INFO ) { return 3 ; } if ( level == Level . CONFIG ) { return 4 ; } if ( level == Level . FINE ) { return 4 ; } if ( level == Level . FINER ) { return 5 ; } if ( level == Level . FINEST ) { return 6 ; } if ( level == Level . ALL ) { return 7 ; } return 3 ;
public class WebApp { /** * Add a filter . * @ param filter to add * @ throws NullArgumentException if filter , filter name or filter class is null */ public void addFilter ( final WebAppFilter filter ) { } }
NullArgumentException . validateNotNull ( filter , "Filter" ) ; NullArgumentException . validateNotNull ( filter . getFilterName ( ) , "Filter name" ) ; NullArgumentException . validateNotNull ( filter . getFilterClass ( ) , "Filter class" ) ; filters . put ( filter . getFilterName ( ) , filter ) ; // add url patterns and servlet names for filter mappings added before // filter for ( WebAppFilterMapping mapping : getFilterMappings ( filter . getFilterName ( ) ) ) { if ( mapping . getUrlPattern ( ) != null && mapping . getUrlPattern ( ) . trim ( ) . length ( ) > 0 ) { filter . addUrlPattern ( mapping . getUrlPattern ( ) ) ; } if ( mapping . getServletName ( ) != null && mapping . getServletName ( ) . trim ( ) . length ( ) > 0 ) { filter . addServletName ( mapping . getServletName ( ) ) ; } }
public class ServletUtil { /** * Gets the URL for the provided possibly - relative path or < code > null < / code > if no resource * is mapped to the path . * @ deprecated Use regular methods directly * @ see # getAbsoluteURL ( javax . servlet . http . HttpServletRequest , java . lang . String ) * @ see ServletContext # getResource ( java . lang . String ) * @ see ServletContextCache # getResource ( java . lang . String ) * @ see ServletContextCache # getResource ( javax . servlet . ServletContext , java . lang . String ) */ @ Deprecated public static URL getResource ( ServletContext servletContext , HttpServletRequest request , String relativeUrlPath ) throws MalformedURLException { } }
return servletContext . getResource ( getAbsolutePath ( request , relativeUrlPath ) ) ;
public class Tracer { /** * Return the sec . ms part of time ( if time = " 20:06:11.566 " , " 11.566 " ) */ private static String formatTime ( long time ) { } }
int sec = ( int ) ( ( time / 1000 ) % 60 ) ; int ms = ( int ) ( time % 1000 ) ; return String . format ( "%02d.%03d" , sec , ms ) ;
public class MvpPresenter { /** * Check if view is in restore state or not * @ param view view for check * @ return true if view state restore state to incoming view . false otherwise . */ @ SuppressWarnings ( "unused" ) public boolean isInRestoreState ( View view ) { } }
// noinspection SimplifiableIfStatement if ( mViewState != null ) { return mViewState . isInRestoreState ( view ) ; } return false ;
public class GrailsHibernateTemplate { /** * Execute the action specified by the given action object within a Session . * @ param action callback object that specifies the Hibernate action * @ param enforceNativeSession whether to enforce exposure of the native Hibernate Session to callback code * @ return a result object returned by the action , or < code > null < / code > * @ throws org . springframework . dao . DataAccessException in case of Hibernate errors */ protected < T > T doExecute ( HibernateCallback < T > action , boolean enforceNativeSession ) throws DataAccessException { } }
Assert . notNull ( action , "Callback object must not be null" ) ; Session session = getSession ( ) ; boolean existingTransaction = isSessionTransactional ( session ) ; if ( existingTransaction ) { LOG . debug ( "Found thread-bound Session for HibernateTemplate" ) ; } FlushMode previousFlushMode = null ; try { previousFlushMode = applyFlushMode ( session , existingTransaction ) ; if ( shouldPassReadOnlyToHibernate ( ) ) { session . setDefaultReadOnly ( true ) ; } Session sessionToExpose = ( enforceNativeSession || exposeNativeSession ? session : createSessionProxy ( session ) ) ; T result = action . doInHibernate ( sessionToExpose ) ; flushIfNecessary ( session , existingTransaction ) ; return result ; } catch ( HibernateException ex ) { throw convertHibernateAccessException ( ex ) ; } catch ( PersistenceException ex ) { if ( ex . getCause ( ) instanceof HibernateException ) { throw SessionFactoryUtils . convertHibernateAccessException ( ( HibernateException ) ex . getCause ( ) ) ; } throw ex ; } catch ( SQLException ex ) { throw jdbcExceptionTranslator . translate ( "Hibernate-related JDBC operation" , null , ex ) ; } catch ( RuntimeException ex ) { // Callback code threw application exception . . . throw ex ; } finally { if ( existingTransaction ) { LOG . debug ( "Not closing pre-bound Hibernate Session after HibernateTemplate" ) ; if ( previousFlushMode != null ) { session . setHibernateFlushMode ( previousFlushMode ) ; } } else { SessionFactoryUtils . closeSession ( session ) ; } }
public class ApiOvhOrder { /** * Create order * REST : POST / order / cdn / dedicated / { serviceName } / quota / { duration } * @ param quota [ required ] quota number in TB that will be added to the CDN service * @ param serviceName [ required ] The internal name of your CDN offer * @ param duration [ required ] Duration */ public OvhOrder cdn_dedicated_serviceName_quota_duration_POST ( String serviceName , String duration , OvhOrderQuotaEnum quota ) throws IOException { } }
String qPath = "/order/cdn/dedicated/{serviceName}/quota/{duration}" ; StringBuilder sb = path ( qPath , serviceName , duration ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "quota" , quota ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhOrder . class ) ;
public class MTree { /** * Removes a data object from the M - Tree . * @ param data The data object to be removed . * @ return { @ code true } if and only if the object was found . */ public boolean remove ( DATA data ) { } }
if ( root == null ) { return false ; } double distanceToRoot = distanceFunction . calculate ( data , root . data ) ; try { root . removeData ( data , distanceToRoot ) ; } catch ( RootNodeReplacement e ) { @ SuppressWarnings ( "unchecked" ) Node newRoot = ( Node ) e . newRoot ; root = newRoot ; } catch ( DataNotFound e ) { return false ; } catch ( NodeUnderCapacity e ) { throw new RuntimeException ( "Should have never happened" , e ) ; } return true ;
public class FxObservableTransformers { /** * Performs an action on FX thread on onError with the provided emission count * @ param onError * @ param < T > */ public static < T > ObservableTransformer < T , T > doOnErrorCountFx ( Consumer < Integer > onError ) { } }
return obs -> obs . compose ( doOnErrorCount ( i -> runOnFx ( i , onError ) ) ) ;
public class SubmitEclipseLogWizard { /** * Replies the associated wieard dialog . * @ return the dialog . */ WizardDialog getWizardDialog ( ) { } }
final WeakReference < WizardDialog > ref = this . wizardDialog ; return ( ref == null ) ? null : ref . get ( ) ;
public class JQLChecker { /** * Analyze internal . * @ param < L > * the generic type * @ param jqlContext * the jql context * @ param jql * the jql * @ param listener * the listener */ protected < L extends JqlBaseListener > void analyzeInternal ( JQLContext jqlContext , final String jql , L listener ) { } }
walker . walk ( listener , prepareParser ( jqlContext , jql ) . value0 ) ;
public class DiagnosticsInner { /** * Get site detector response . * Get site detector response . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param siteName Site Name * @ param detectorName Detector Resource Name * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the DetectorResponseInner object if successful . */ public DetectorResponseInner getSiteDetectorResponse ( String resourceGroupName , String siteName , String detectorName ) { } }
return getSiteDetectorResponseWithServiceResponseAsync ( resourceGroupName , siteName , detectorName ) . toBlocking ( ) . single ( ) . body ( ) ;