signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ZooKeeperMasterModel { /** * redundantDeployment determines whether or not rollingUpdateUndeploy should actually emit * RollingUpdateOps to undeploy a job . * < p > Jobs are undeployed in two cases : * < p > 1 . During a rolling update * 2 . When a host leaves a deployment group * < p > In case 1 . this redundancy check makes sense . The undeployment of a job during a rolling * update is always coupled with the deployment of a job . If the ' new ' job is the same job * that is currently deployed , the undeployment would be redundant so we do not generate * deployment operations . * < p > In case 2 . undeployment can never be redundant . We always want to undeploy the job from * hosts that have left the deployment group . Unfortunately in case case undeployments appear to * be redundant to the following checks , so they must be skipped . */ private boolean redundantUndeployment ( final Deployment deployment , final DeploymentGroup deploymentGroup ) { } }
// This deployment was not created by this deployment group . if ( ! Objects . equals ( deployment . getDeploymentGroupName ( ) , deploymentGroup . getName ( ) ) ) { return false ; } // This deployment is not of the deployment group ' s job . if ( ! deployment . getJobId ( ) . equals ( deploymentGroup . getJobId ( ) ) ) { return false ; } // This deployment aims to do something other than start . if ( ! Goal . START . equals ( deployment . getGoal ( ) ) ) { return false ; } // Undeploying this deployment ' s job would be redundant because the next operation would simply // redeploy the same job . return true ;
public class GpfdistServer { /** * Start a server . * @ return the http server * @ throws Exception the exception */ public synchronized HttpServer < Buffer , Buffer > start ( ) throws Exception { } }
if ( server == null ) { server = createProtocolListener ( ) ; } return server ;
public class FeatureSupportsInner { /** * It will validate if given feature with resource properties is supported in service . * @ param azureRegion Azure region to hit Api * @ param parameters Feature support request object * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the AzureVMResourceFeatureSupportResponseInner object */ public Observable < ServiceResponse < AzureVMResourceFeatureSupportResponseInner > > validateWithServiceResponseAsync ( String azureRegion , FeatureSupportRequest parameters ) { } }
if ( azureRegion == null ) { throw new IllegalArgumentException ( "Parameter azureRegion is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( parameters == null ) { throw new IllegalArgumentException ( "Parameter parameters is required and cannot be null." ) ; } Validator . validate ( parameters ) ; final String apiVersion = "2017-07-01" ; return service . validate ( azureRegion , this . client . subscriptionId ( ) , apiVersion , parameters , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < AzureVMResourceFeatureSupportResponseInner > > > ( ) { @ Override public Observable < ServiceResponse < AzureVMResourceFeatureSupportResponseInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < AzureVMResourceFeatureSupportResponseInner > clientResponse = validateDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class RequestWrapper { /** * @ Override * public < X > void body ( BodyReader < X > reader , Result < X > result ) * delegate ( ) . body ( reader , result ) ; */ @ Override public < X > void body ( Class < X > type , Result < X > result ) { } }
delegate ( ) . body ( type , result ) ;
public class CUstream_flags { /** * Returns the String identifying the given CUstream _ flags * @ param n The CUstream _ flags * @ return The String identifying the given CUstream _ flags */ public static String stringFor ( int n ) { } }
if ( n == 0 ) { return "CU_STREAM_DEFAULT" ; } String result = "" ; if ( ( n & CU_STREAM_NON_BLOCKING ) != 0 ) result += "CU_STREAM_NON_BLOCKING " ; return result ;
public class PoolEnableAutoScaleOptions { /** * Set a timestamp indicating the last modified time of the resource known to the client . The operation will be performed only if the resource on the service has not been modified since the specified time . * @ param ifUnmodifiedSince the ifUnmodifiedSince value to set * @ return the PoolEnableAutoScaleOptions object itself . */ public PoolEnableAutoScaleOptions withIfUnmodifiedSince ( DateTime ifUnmodifiedSince ) { } }
if ( ifUnmodifiedSince == null ) { this . ifUnmodifiedSince = null ; } else { this . ifUnmodifiedSince = new DateTimeRfc1123 ( ifUnmodifiedSince ) ; } return this ;
public class WDataTable { /** * Determine the row ids for the provided index range . * @ param startIndex the startIndex * @ param endIndex the endIndex * @ return the list of rowIds for the provided index range */ private List < Integer > getRowIds ( final int startIndex , final int endIndex ) { } }
// If the table is sorted , we may require a mapping for table row index < - - > data model index . int [ ] rowIndexMapping = getComponentModel ( ) . rowIndexMapping ; // Check if sort mapping needs updating if ( isSorted ( ) && rowIndexMapping != null && rowIndexMapping . length != getDataModel ( ) . getRowCount ( ) ) { rowIndexMapping = getDataModel ( ) . sort ( getSortColumnIndex ( ) , isSortAscending ( ) ) ; getOrCreateComponentModel ( ) . rowIndexMapping = rowIndexMapping ; } if ( rowIndexMapping == null ) { // No mapping , return from startIndex to endIndex return new RowIdList ( startIndex , endIndex ) ; } else { List < Integer > rowIds = new ArrayList < > ( endIndex - startIndex + 1 ) ; for ( int i = startIndex ; i <= endIndex ; i ++ ) { rowIds . add ( rowIndexMapping [ i ] ) ; } return rowIds ; }
public class JMXJsonServlet { /** * Renders MBean attributes to jg . * The queries parameter allows selection of a subset of mbeans . * @ param jg * JsonGenerator that will be written to * @ param mBeanNames * Optional list of mbean names to render . If null , every * mbean will be returned . * @ return int * Returns the appropriate HTTP status code . */ private int renderMBeans ( JsonGenerator jg , String [ ] mBeanNames ) throws IOException , MalformedObjectNameException { } }
jg . writeStartObject ( ) ; Set < ObjectName > nameQueries , queriedObjects ; nameQueries = new HashSet < ObjectName > ( ) ; queriedObjects = new HashSet < ObjectName > ( ) ; // if no mbean names provided , add one null entry to query everything if ( mBeanNames == null ) { nameQueries . add ( null ) ; } else { for ( String mBeanName : mBeanNames ) { if ( mBeanName != null ) { nameQueries . add ( new ObjectName ( mBeanName ) ) ; } } } // perform name queries for ( ObjectName nameQuery : nameQueries ) { queriedObjects . addAll ( mBeanServer . queryNames ( nameQuery , null ) ) ; } // render each query result for ( ObjectName objectName : queriedObjects ) { renderMBean ( jg , objectName ) ; } jg . writeEndObject ( ) ; return HttpServletResponse . SC_OK ;
public class ReflectionHelper { /** * Gets the default value as String for the primitive types . * @ param type the primitive type * @ return the default value as String */ public static String getPrimitiveDefault ( Class type ) { } }
if ( type == Boolean . class ) { return "false" ; } if ( type == Character . class ) { return Character . toString ( ( char ) 0 ) ; } return "0" ;
public class LocaleUtils { /** * Returns a list of languages and countries of the { @ code Locale } s ( as { @ code String } , for example " en _ GB " ) , of default * language and available translations . * The list is sorted by language / country codes with default locale , always , at first position . * @ return The list of available translations , ZAP provides */ public static List < String > getAvailableLocales ( ) { } }
List < String > locales = readAvailableLocales ( ) ; Collections . sort ( locales ) ; // Always put English at the top locales . add ( 0 , DEFAULT_LOCALE ) ; return locales ;
public class PeerGroup { /** * Starts the PeerGroup and begins network activity . * @ return A future that completes when first connection activity has been triggered ( note : not first connection made ) . */ public ListenableFuture startAsync ( ) { } }
// This is run in a background thread by the Service implementation . if ( chain == null ) { // Just try to help catch what might be a programming error . log . warn ( "Starting up with no attached block chain. Did you forget to pass one to the constructor?" ) ; } checkState ( ! vUsedUp , "Cannot start a peer group twice" ) ; vRunning = true ; vUsedUp = true ; executorStartupLatch . countDown ( ) ; // We do blocking waits during startup , so run on the executor thread . return executor . submit ( new Runnable ( ) { @ Override public void run ( ) { try { log . info ( "Starting ..." ) ; channels . startAsync ( ) ; channels . awaitRunning ( ) ; triggerConnections ( ) ; setupPinging ( ) ; } catch ( Throwable e ) { log . error ( "Exception when starting up" , e ) ; // The executor swallows exceptions : ( } } } ) ;
public class RootVisitor { /** * Visit the Root . From here we will look through the top level objects for * Persons . * @ see GedObjectVisitor # visit ( Root ) */ @ Override public void visit ( final Root root ) { } }
final Map < String , GedObject > objectMap = root . getObjects ( ) ; final Collection < GedObject > objects = objectMap . values ( ) ; for ( final GedObject gob : objects ) { gob . accept ( this ) ; }
public class ClassPathResource { /** * 根据给定资源初始化URL */ private void initUrl ( ) { } }
if ( null != this . clazz ) { super . url = this . clazz . getResource ( this . path ) ; } else if ( null != this . classLoader ) { super . url = this . classLoader . getResource ( this . path ) ; } else { super . url = ClassLoader . getSystemResource ( this . path ) ; } if ( null == super . url ) { throw new NoResourceException ( "Resource of path [{}] not exist!" , this . path ) ; }
public class KeyGroupedMutableObjectIterator { /** * Moves the iterator to the next key . This method may skip any values that have not yet been returned by the * iterator created by the { @ link # getValues ( ) } method . Hence , if called multiple times it " removes " pairs . * @ return true if the input iterator has an other group of key - value pairs that share the same key . */ public boolean nextKey ( ) throws IOException { } }
// first element if ( this . next == null ) { this . next = this . serializer . createInstance ( ) ; if ( ( this . next = this . iterator . next ( this . next ) ) != null ) { this . comparator . setReference ( this . next ) ; this . nextIsFresh = false ; this . valuesIterator = new ValuesIterator ( ) ; this . valuesIterator . nextIsUnconsumed = true ; return true ; } else { // empty input , set everything null this . valuesIterator = null ; return false ; } } // Whole value - iterator was read and a new key is available . if ( this . nextIsFresh ) { this . nextIsFresh = false ; this . comparator . setReference ( this . next ) ; this . valuesIterator . nextIsUnconsumed = true ; return true ; } // try to move to next key . // Required if user code / reduce ( ) method did not read the whole value iterator . while ( true ) { if ( ( this . next = this . iterator . next ( this . next ) ) != null ) { if ( ! this . comparator . equalToReference ( this . next ) ) { // the keys do not match , so we have a new group . store the current keys this . comparator . setReference ( this . next ) ; this . nextIsFresh = false ; this . valuesIterator . nextIsUnconsumed = true ; return true ; } } else { this . valuesIterator = null ; return false ; } }
public class FSNamesystem { /** * Allocate a number of parity blocks * Require a write lock * @ param numParityBlocks */ private Block [ ] allocateParityBlocks ( int numParityBlocks ) throws IOException { } }
Block [ ] blocks = new Block [ numParityBlocks ] ; for ( int i = 0 ; i < numParityBlocks ; i ++ ) { Block b = new Block ( generateBlockId ( ) , 0 , 0 ) ; while ( isValidBlock ( b ) ) { b . setBlockId ( generateBlockId ( ) ) ; } b . setGenerationStamp ( getGenerationStamp ( ) ) ; blocks [ i ] = b ; } return blocks ;
public class MongoDbDeviceRegistry { /** * API operations */ @ Override public Device get ( String deviceId ) { } }
DBCursor devices = devicesCollection ( ) . find ( new BasicDBObject ( ImmutableMap . of ( "deviceId" , deviceId ) ) ) ; if ( devices . hasNext ( ) ) { return dbObjectToDevice ( devices . next ( ) ) ; } return null ;
public class RecordBuffer { /** * Claim a record in the buffer . Each record has a unique key . * @ param key the key to claim the record with . * @ return the offset at which record was claimed or { @ code DID _ NOT _ CLAIM _ RECORD } if the claim failed . * @ see RecordBuffer # commit ( int ) */ public int claimRecord ( final int key ) { } }
int offset = endOfPositionField ; while ( offset < position ( ) ) { if ( key == key ( offset ) ) { // If someone else is writing something with the same key then abort if ( statusVolatile ( offset ) == PENDING ) { return DID_NOT_CLAIM_RECORD ; } else // state = = COMMITTED { compareAndSetStatus ( offset , COMMITTED , PENDING ) ; return offset + SIZE_OF_RECORD_FRAME ; } } offset += slotSize ; } if ( ( offset + slotSize ) > buffer . capacity ( ) ) { return DID_NOT_CLAIM_RECORD ; } final int claimOffset = movePosition ( slotSize ) ; compareAndSetStatus ( claimOffset , UNUSED , PENDING ) ; key ( claimOffset , key ) ; return claimOffset + SIZE_OF_RECORD_FRAME ;
public class DocumentRoot { /** * Returns the absolute document root when it points to a valid directory , logging a * warning and returning { @ code null } otherwise . * @ return the valid document root */ public final File getValidDirectory ( ) { } }
File file = this . directory ; file = ( file != null ) ? file : getWarFileDocumentRoot ( ) ; file = ( file != null ) ? file : getExplodedWarFileDocumentRoot ( ) ; file = ( file != null ) ? file : getCommonDocumentRoot ( ) ; if ( file == null && this . logger . isDebugEnabled ( ) ) { logNoDocumentRoots ( ) ; } else if ( this . logger . isDebugEnabled ( ) ) { this . logger . debug ( "Document root: " + file ) ; } return file ;
public class StreamUtils { /** * Copy a stream . * @ param src the source input stream * @ param dest the destination output stream * @ param closeStreams TRUE if the streams should be closed on completion * @ throws IOException if an IO error occurs * @ throws NullArgumentException if either the src or dest arguments are null . */ public static void copyStream ( InputStream src , OutputStream dest , boolean closeStreams ) throws IOException , NullArgumentException { } }
copyStream ( null , null , 0 , src , dest , closeStreams ) ;
public class StandardAlgConfigPanel { /** * Searches inside the children of " root " for ' target ' . If found it is removed and the * previous component . */ protected static void removeChildAndPrevious ( JComponent root , JComponent target ) { } }
int N = root . getComponentCount ( ) ; for ( int i = 0 ; i < N ; i ++ ) { if ( root . getComponent ( i ) == target ) { root . remove ( i ) ; root . remove ( i - 1 ) ; return ; } } throw new RuntimeException ( "Can't find component" ) ;
public class Taint { /** * Returns the merge of the facts such that it can represent any of them * @ param a first state to merge * @ param b second state to merge * @ return constructed merge of the specified facts */ public static Taint merge ( Taint a , Taint b ) { } }
if ( a == null ) { if ( b == null ) { return null ; } else { return new Taint ( b ) ; } } else if ( b == null ) { return new Taint ( a ) ; } assert a != null && b != null ; Taint result = new Taint ( State . merge ( a . getState ( ) , b . getState ( ) ) ) ; if ( a . variableIndex == b . variableIndex ) { result . variableIndex = a . variableIndex ; } result . taintLocations . addAll ( a . taintLocations ) ; result . taintLocations . addAll ( b . taintLocations ) ; result . unknownLocations . addAll ( a . unknownLocations ) ; result . unknownLocations . addAll ( b . unknownLocations ) ; if ( ! result . isTainted ( ) ) { mergeParameters ( a , b , result ) ; } mergeRealInstanceClass ( a , b , result ) ; mergeTags ( a , b , result ) ; if ( a . constantValue != null && a . constantValue . equals ( b . constantValue ) ) { result . constantValue = a . constantValue ; } if ( FindSecBugsGlobalConfig . getInstance ( ) . isDebugTaintState ( ) ) { result . setDebugInfo ( "[" + a . getDebugInfo ( ) + "]+[" + b . getDebugInfo ( ) + "]" ) ; } assert ! result . hasParameters ( ) || result . isUnknown ( ) ; if ( a . potentialValue != null ) { result . potentialValue = a . potentialValue ; } else if ( b . potentialValue != null ) { result . potentialValue = b . potentialValue ; } result . addAllSources ( a . sources ) ; result . addAllSources ( b . sources ) ; return result ;
public class DatabaseDAODefaultImpl { public DbDevImportInfo import_device ( Database database , String deviceName ) throws DevFailed { } }
DevVarLongStringArray info ; // ALWAYS Authorized ( e . g . import TAC itself ) int tmp_access = database . access ; database . access = TangoConst . ACCESS_WRITE ; try { DeviceData argIn = new DeviceData ( ) ; argIn . insert ( deviceName ) ; // System . out . println ( " DbImportDevice " + deviceName ) ; DeviceData argOut = command_inout ( database , "DbImportDevice" , argIn ) ; info = argOut . extractLongStringArray ( ) ; database . access = tmp_access ; } catch ( DevFailed e ) { database . access = tmp_access ; throw e ; } return new DbDevImportInfo ( info ) ;
public class RegexHashMap { /** * returns the keysets of both the container and cache hashmaps */ public Set < String > keySet ( ) { } }
// prepare container HashSet < String > set = new HashSet < String > ( ) ; // add container keys set . addAll ( container . keySet ( ) ) ; // add cache keys set . addAll ( cache . keySet ( ) ) ; return set ;
public class Expression { /** * Returns this Expression ' s IType . */ public IType getType ( ) { } }
IType type = getTypeImpl ( ) ; if ( TypeSystem . isDeleted ( type ) ) { type = TypeSystem . getErrorType ( ) ; } return type ;
public class ChangesHolder { /** * Serialize the value into the given { @ link ObjectOutput } * @ param out the stream in which we serialize the value * @ param field the field from which we extract the value * @ throws IOException if the value could not be serialized */ private static void writeValue ( ObjectOutput out , Fieldable field ) throws IOException { } }
Object o = field . stringValue ( ) ; if ( o != null ) { // Use writeObject instead of writeUTF because the value could contain unsupported // characters out . writeObject ( o ) ; return ; } o = field . tokenStreamValue ( ) ; if ( o != null ) { out . writeObject ( o ) ; return ; } o = field . readerValue ( ) ; throw new RuntimeException ( "Unsupported value " + o ) ;
public class Asm { /** * Create xmmword ( 16 bytes ) pointer operand * ! @ note This constructor is provided only for convenience for sse programming . */ public static final Mem xmmword_ptr_abs ( long target , long disp , SEGMENT segmentPrefix ) { } }
return _ptr_build_abs ( target , disp , segmentPrefix , SIZE_DQWORD ) ;
public class LabelingJobS3DataSourceMarshaller { /** * Marshall the given parameter object . */ public void marshall ( LabelingJobS3DataSource labelingJobS3DataSource , ProtocolMarshaller protocolMarshaller ) { } }
if ( labelingJobS3DataSource == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( labelingJobS3DataSource . getManifestS3Uri ( ) , MANIFESTS3URI_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CommandLine { /** * < p > getOptionWithShortForm . < / p > * @ param shortForm a { @ link java . lang . String } object . * @ return a { @ link com . greenpepper . util . cli . Option } object . * @ throws com . greenpepper . util . cli . InvalidOptionException if any . */ public Option getOptionWithShortForm ( String shortForm ) throws InvalidOptionException { } }
for ( Option option : options ) { if ( shortForm . equals ( option . getShortForm ( ) ) ) return option ; } throw new InvalidOptionException ( shortForm ) ;
public class JDefaultInternet { /** * generates a random username in the format of Firstname intial + lastname + random 2 digit * ie tjones67 * @ return */ public static String userName ( ) { } }
return fixNonWord ( StringUtils . left ( JDefaultName . firstName ( ) , 1 ) . toLowerCase ( ) + JDefaultName . lastName ( ) . toLowerCase ( ) + JDefaultNumber . randomNumberString ( 2 ) ) ;
public class Reference { /** * < p > newInstance . < / p > * @ param requirement a { @ link com . greenpepper . server . domain . Requirement } object . * @ param specification a { @ link com . greenpepper . server . domain . Specification } object . * @ param sut a { @ link com . greenpepper . server . domain . SystemUnderTest } object . * @ param sections a { @ link java . lang . String } object . * @ return a { @ link com . greenpepper . server . domain . Reference } object . */ public static Reference newInstance ( Requirement requirement , Specification specification , SystemUnderTest sut , String sections ) { } }
Reference reference = new Reference ( ) ; reference . setSections ( sections ) ; reference . setRequirement ( requirement ) ; reference . setSpecification ( specification ) ; reference . setSystemUnderTest ( sut ) ; requirement . getReferences ( ) . add ( reference ) ; specification . getReferences ( ) . add ( reference ) ; return reference ;
public class VoiceApi { /** * Complete a previously initiated two - step conference identified by the provided IDs . Once completed , * the two separate calls are brought together so that all three parties are participating in the same call . * @ param connId The connection ID of the consult call ( established ) . * @ param parentConnId The connection ID of the parent call ( held ) . */ public void completeConference ( String connId , String parentConnId ) throws WorkspaceApiException { } }
this . completeConference ( connId , parentConnId , null , null ) ;
public class HttpUrlConnectionRpcSession { /** * Sets the connection content - type and content - length and sends the post data . */ protected void sendPostData ( final HttpURLConnection connection , final RpcRequest request ) throws IOException { } }
String post = buildParamsQuery ( request . getPost ( ) ) ; byte [ ] data = post . getBytes ( UTF8 ) ; connection . setRequestProperty ( CONTENT_TYPE_HEADER , APPLICATION_X_WWW_FORM_URLENCODED ) ; connection . setRequestProperty ( CONTENT_LENGTH_HEADER , String . valueOf ( data . length ) ) ; OutputStream out = new BufferedOutputStream ( connection . getOutputStream ( ) ) ; try { out . write ( data ) ; } finally { closeLogExc ( out ) ; }
public class DateUtil { /** * Compares dates < code > d1 < / code > and < code > d2 < / code > taking into * consideration only the year , month and day * @ param d1 the first date * @ param d2 the second date * @ return < code > true < / code > if < code > d1 < / code > is after < code > d2 < / code > , * < code > false < / code > otherwise * @ see java . util . Calendar # after ( java . lang . Object ) * @ see # before ( java . util . Date , java . util . Date ) * @ see # compare ( java . util . Date , java . util . Date ) */ public static boolean after ( Date d1 , Date d2 ) { } }
d1 = floor ( d1 ) ; Calendar c1 = Calendar . getInstance ( ) ; c1 . setTime ( d1 ) ; d2 = floor ( d2 ) ; Calendar c2 = Calendar . getInstance ( ) ; c2 . setTime ( d2 ) ; return c1 . after ( c2 ) ;
public class AbstractDisplayer { /** * Get the current filter selected interval indexes for the given data set column . * @ param columnId The column identifier . * @ return A list of interval indexes */ public List < Integer > filterIndexes ( String columnId ) { } }
List < Integer > result = new ArrayList < > ( ) ; List < Interval > selected = columnSelectionMap . get ( columnId ) ; if ( selected == null ) { return result ; } for ( Interval interval : selected ) { result . add ( interval . getIndex ( ) ) ; } return result ;
public class CommerceShipmentItemPersistenceImpl { /** * Returns the last commerce shipment item in the ordered set where commerceShipmentId = & # 63 ; . * @ param commerceShipmentId the commerce shipment ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce shipment item , or < code > null < / code > if a matching commerce shipment item could not be found */ @ Override public CommerceShipmentItem fetchByCommerceShipment_Last ( long commerceShipmentId , OrderByComparator < CommerceShipmentItem > orderByComparator ) { } }
int count = countByCommerceShipment ( commerceShipmentId ) ; if ( count == 0 ) { return null ; } List < CommerceShipmentItem > list = findByCommerceShipment ( commerceShipmentId , count - 1 , count , orderByComparator ) ; if ( ! list . isEmpty ( ) ) { return list . get ( 0 ) ; } return null ;
public class AbstractDirectoryWalker { /** * Method that finds all files that meets some criteria . * @ return List of files which meets the defined criteria . */ @ Override public List < File > scan ( ) { } }
File baseDirFile = new File ( baseDir ) ; List < File > includedFiles = walkDirectory ( baseDirFile ) ; return includedFiles ;
public class JTrees { /** * Translates one TreePath to a new TreeModel . This methods assumes * DefaultMutableTreeNodes , and identifies the path based on the * equality of user objects using the given equality predicate . * @ param newTreeModel The new tree model * @ param oldPath The old tree path * @ param equality The equality predicate * @ return The new tree path , or < code > null < / code > if there is no * corresponding path in the new tree model */ public static TreePath translatePath ( TreeModel newTreeModel , TreePath oldPath , BiPredicate < Object , Object > equality ) { } }
Object newRoot = newTreeModel . getRoot ( ) ; List < Object > newPath = new ArrayList < Object > ( ) ; newPath . add ( newRoot ) ; Object newPreviousElement = newRoot ; for ( int i = 1 ; i < oldPath . getPathCount ( ) ; i ++ ) { Object oldElement = oldPath . getPathComponent ( i ) ; Object oldUserObject = getUserObjectFromTreeNode ( oldElement ) ; Object newElement = getChildWith ( newPreviousElement , oldUserObject , equality ) ; if ( newElement == null ) { return null ; } newPath . add ( newElement ) ; newPreviousElement = newElement ; } return new TreePath ( newPath . toArray ( ) ) ;
public class ChainedInterceptor { /** * { @ inheritDoc } */ public Object processInvocation ( final InterceptorContext context ) throws Exception { } }
final int oldNext = context . getNextInterceptorIndex ( ) ; final Interceptor [ ] old = context . getInterceptors ( ) ; context . setInterceptors ( interceptors ) ; try { return context . proceed ( ) ; } finally { context . setInterceptors ( old , oldNext ) ; }
public class ValidationUtils { /** * Validates objects . * @ param content an object to be validated * @ param app the current app * @ return a list of error messages or empty if object is valid */ public static String [ ] validateObject ( App app , ParaObject content ) { } }
if ( content == null || app == null ) { return new String [ ] { "Object cannot be null." } ; } try { String type = content . getType ( ) ; boolean isCustomType = ( content instanceof Sysprop ) && ! type . equals ( Utils . type ( Sysprop . class ) ) ; // Validate custom types and user - defined properties if ( ! app . getValidationConstraints ( ) . isEmpty ( ) && isCustomType ) { Map < String , Map < String , Map < String , ? > > > fieldsMap = app . getValidationConstraints ( ) . get ( type ) ; if ( fieldsMap != null && ! fieldsMap . isEmpty ( ) ) { LinkedList < String > errors = new LinkedList < > ( ) ; for ( Map . Entry < String , Map < String , Map < String , ? > > > e : fieldsMap . entrySet ( ) ) { String field = e . getKey ( ) ; Object actualValue = ( ( Sysprop ) content ) . getProperty ( field ) ; // overriding core property validation rules is allowed if ( actualValue == null && PropertyUtils . isReadable ( content , field ) ) { actualValue = PropertyUtils . getProperty ( content , field ) ; } Map < String , Map < String , ? > > consMap = e . getValue ( ) ; for ( Map . Entry < String , Map < String , ? > > constraint : consMap . entrySet ( ) ) { buildAndValidateConstraint ( constraint , field , actualValue , errors ) ; } } if ( ! errors . isEmpty ( ) ) { return errors . toArray ( new String [ 0 ] ) ; } } } } catch ( Exception ex ) { logger . error ( null , ex ) ; } return validateObject ( content ) ;
public class ZipFileReaper { /** * Reap the pending closes . * Reaping is performed in two modes : Un - forced , which occurs a set delay after the first * pending close , and forced , which occurs when shutting down . * An un - forced reap will see pending closes in several different configurations : * First , the pending closes may be empty . That indicates that all pending closes * were re - opened before the reaper was run . Answer - 1 in this case , which indicates * that the reaper should wait for a pending close . * Second , the pending closes is not empty , and one or more pending closes is ready * to close . Close each of these . That indicates that no new open occurred on the * pending closes , which means they are now to be closed . * Third , the pending closes is not empty , but none of the pending closes is ready * to close . This is similar to the first : The pending close which expired was * re - opened . * In the second case , after reaping , there may be un - expired pending closes . In * the third case , there must be un - expired pending closes . When there are un - expired * closes , answer the time to wait before the first of these expires . That will * be the reap time minus the pend time plus the reap interval . * In the second case , if there are no pending closes after reaping , answer - 1 , as * was done for the first case . * If this is a forced reap , all zip files are closed , starting with the pending * closes , and completing with the un - pended zip files . Also , the final time is * set as the reap time , and diagnostic information is displayed . * Reaping is based on two intervals , a minimum delay amount , which is the * the threshold for allowing a close to proceed , and an maximum delay amount , * which is the amount of time the reaper waits before performing delayed * closes . The intent is to reduce the amount of chatter of the reaper * waking up and reaping when there are many opens and closes in a short * amount of time . That is , to prevent a " stutter " of waking the reaper every * few milliseconds because several closes were performed milliseconds apart . * @ param reapAt The time at which the reaping is being performed . * @ param isShutdownReap True or false telling if to perform a shutdown reap . * @ return The next reap time . - 1 if there are no pending closes . */ private long reap ( long reapAt , boolean isShutdownReap ) { } }
String methodName = "reap" ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " At [ " + toRelSec ( initialAt , reapAt ) + " (s) ] Force [ " + isShutdownReap + " ]" ) ; Tr . debug ( tc , methodName + " All [ " + storage . size ( ) + " ]" + " Pending Quick [ " + pendingQuickStorage . size ( ) + " ]" + " Pending Slow [ " + pendingSlowStorage . size ( ) + " ]" ) ; } // Reap the quick pending closes . . . long nextQuickReapDelay = REAP_DELAY_INDEFINITE ; Iterator < ZipFileData > pendingQuick = pendingQuickStorage . values ( ) ; while ( ( nextQuickReapDelay == REAP_DELAY_INDEFINITE ) && pendingQuick . hasNext ( ) ) { ZipFileData nextPending = pendingQuick . next ( ) ; long nextLastPendAt = nextPending . lastPendAt ; long nextPendDuration = reapAt - nextLastPendAt ; if ( isShutdownReap ) { // Shutdown closes all pending , regardless of how long they have waited . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " Path [ " + nextPending . path + " ]" + " Waiting [ " + toAbsSec ( nextPendDuration ) + " (s) ] (Quick): Forced" ) ; } pendingQuick . remove ( ) ; fullyClose ( nextPending , reapAt , IS_SHUTDOWN_REAP ) ; } else { // Normal reap . if ( nextPendDuration > quickPendMin ) { // Reached the shortest expiration ? if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " Path [ " + nextPending . path + " ]" + " Waiting [ " + toAbsSec ( nextPendDuration ) + " (s) ] (Quick): Expired" ) ; } pendingQuick . remove ( ) ; fullyClose ( nextPending , reapAt , IS_NOT_SHUTDOWN_REAP ) ; } else { // Not yet reached the shorted expiration . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " Path [ " + nextPending . path + " ]" + " Waiting [ " + toAbsSec ( nextPendDuration ) + " (s) ]: Still Waiting" ) ; } if ( nextPendDuration < 0 ) { nextPendDuration = 0 ; // Should never happen ; } nextQuickReapDelay = quickPendMax - nextPendDuration ; } } } // Reap the slow pending closes . . . long nextSlowReapDelay = REAP_DELAY_INDEFINITE ; Iterator < ZipFileData > pendingSlow = pendingSlowStorage . values ( ) ; while ( ( nextSlowReapDelay == REAP_DELAY_INDEFINITE ) && pendingSlow . hasNext ( ) ) { ZipFileData nextPending = pendingSlow . next ( ) ; long nextLastPendAt = nextPending . lastPendAt ; long nextPendDuration = reapAt - nextLastPendAt ; if ( isShutdownReap ) { // Shutdown closes all pending , regardless of how long they have waited . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " Path [ " + nextPending . path + " ]" + " Waiting [ " + toAbsSec ( nextPendDuration ) + " (s) ] (Slow): Forced" ) ; } pendingSlow . remove ( ) ; fullyClose ( nextPending , reapAt , IS_SHUTDOWN_REAP ) ; } else { // Normal reap . if ( nextPendDuration > slowPendMin ) { // Reached the shortest expiration ? if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " Path [ " + nextPending . path + " ]" + " Waiting [ " + toAbsSec ( nextPendDuration ) + " (s) ] (Slow): Expired" ) ; } pendingSlow . remove ( ) ; fullyClose ( nextPending , reapAt , IS_NOT_SHUTDOWN_REAP ) ; } else { // Not yet reached the shorted expiration . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " Path [ " + nextPending . path + " ]" + " Waiting [ " + toAbsSec ( nextPendDuration ) + " (s) ]: Still Waiting" ) ; } if ( nextPendDuration < 0 ) { nextPendDuration = 0 ; // Should never happen ; } nextSlowReapDelay = slowPendMax - nextPendDuration ; } } } // Maybe , move this into a different method , and invoke from the // shutdown thread ? // Placement here seems couples normal reaping with shutdown steps , // which seems off . if ( isShutdownReap ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " De-activating reaper" ) ; } // We have the lock : There can be no activity since receiving // the interrupted exception and setting the reaper inactive . // Note : Have to set this before pending the outstanding open zip files . // Remove of the eldest is not performed while shutting down . setIsActive ( false ) ; setFinalAt ( reapAt ) ; // Since this is a shut - down reap , all pending closes were // forced close , regardless of how long they were waiting . // There are only dangling opens to handle . for ( ZipFileData mustBeOpenOrClosed : storage . values ( ) ) { String path = mustBeOpenOrClosed . path ; if ( mustBeOpenOrClosed . isFullyClosed ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " Closed [ " + path + " ]: No shutdown action" ) ; } } else { if ( mustBeOpenOrClosed . isPending ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " Unexpected Pending [ " + path + " ]: Shutdown close" ) ; } } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " Open [ " + path + " ] [ " + mustBeOpenOrClosed . getActiveOpens ( ) + " ]:" + " Shutdown pend and close" ) ; } mustBeOpenOrClosed . enactClose ( reapAt , ZipFileData . CLOSE_ALL ) ; } fullyClose ( mustBeOpenOrClosed , reapAt , IS_SHUTDOWN_REAP ) ; } } // Finalize the zip files , all of which should be closed . // Display statistics for each of the zip files . for ( ZipFileData mustBeClosed : storage . values ( ) ) { mustBeClosed . setFinal ( reapAt ) ; mustBeClosed . debugState ( ) ; } } long nextReapDelay ; boolean useQuick ; if ( ( nextQuickReapDelay < 0 ) && ( nextSlowReapDelay < 0 ) ) { useQuick = true ; nextReapDelay = REAP_DELAY_INDEFINITE ; } else if ( nextQuickReapDelay < 0 ) { useQuick = false ; nextReapDelay = nextSlowReapDelay ; } else if ( nextSlowReapDelay < 0 ) { useQuick = true ; nextReapDelay = nextQuickReapDelay ; } else { if ( nextQuickReapDelay < nextSlowReapDelay ) { useQuick = true ; nextReapDelay = nextQuickReapDelay ; } else { useQuick = false ; nextReapDelay = nextSlowReapDelay ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { String delayText = ( ( nextReapDelay == REAP_DELAY_INDEFINITE ) ? "Indefinite" : toAbsSec ( nextReapDelay ) ) ; String speedText = ( useQuick ? "Quick" : "Slow" ) ; Tr . debug ( tc , methodName + " Next reap [ " + delayText + " (s) ] (" + speedText + ")" ) ; } return nextReapDelay ;
public class ReportDownloader { /** * Returns a CharSource of report contents with { @ code ReportDownloadOptions } . The ExportFormat * must be string - based , such as { @ link * com . google . api . ads . admanager . axis . v201805 . ExportFormat # CSV _ DUMP } . * @ param options the options to download the report with * @ return a new CharSource of report results * @ throws IOException if there was an error performing any I / O action , including any SOAP calls * @ throws IllegalStateException if the report is not ready to be downloaded * @ throws IllegalArgumentException if the { @ link ExportFormat } is not a string - based format */ public CharSource getReportAsCharSource ( ReportDownloadOptions options ) throws IOException { } }
Preconditions . checkArgument ( SUPPORTED_CHARSOURCE_EXPORT_FORMATS . contains ( options . getExportFormat ( ) ) , "ExportFormat " + options . getExportFormat ( ) + " cannot be used with CharSource" ) ; ByteSource byteSource = Resources . asByteSource ( getDownloadUrl ( options ) ) ; return ( options . getUseGzipCompression ( ) ? new GZippedByteSource ( byteSource ) : byteSource ) . asCharSource ( REPORT_CHARSET ) ;
public class ResultSetResult { /** * { @ inheritDoc } */ @ Override public int getInt ( int columnIndex ) { } }
int value ; try { value = resultSet . getInt ( resultIndexToResultSetIndex ( columnIndex ) ) ; } catch ( SQLException e ) { throw new GeoPackageException ( "Failed to get int value for column index: " + columnIndex , e ) ; } return value ;
public class JDBCUtils { /** * Returns true if the query result has at least one row . */ public static boolean queryHasResult ( Statement stmt , String sql ) { } }
try { ResultSet rs = stmt . executeQuery ( sql ) ; try { return rs . next ( ) ; } finally { rs . close ( ) ; } } catch ( SQLException e ) { throw new DukeException ( e ) ; }
public class InternalService { /** * Returns observable to create a conversation . * @ param conversationId ID of a conversation to delete . * @ param eTag ETag for server to check if local version of the data is the same as the one the server side . * @ param callback Callback to deliver new session instance . */ public void deleteConversation ( @ NonNull final String conversationId , final String eTag , @ Nullable Callback < ComapiResult < Void > > callback ) { } }
adapter . adapt ( deleteConversation ( conversationId , eTag ) , callback ) ;
public class NaaccrOptions { /** * Convenience method that computes if a given item needs to be ignored , based on the include / exclude lists . * @ param naaccrId NAACCR ID * @ return true if the corresponding item needs to be processed . */ public boolean processItem ( String naaccrId ) { } }
if ( _itemsToInclude != null ) return _itemsToInclude . contains ( naaccrId ) ; else if ( _itemsToExclude != null ) return ! _itemsToExclude . contains ( naaccrId ) ; return true ;
public class Capabilities { /** * Sauce labs has specific capabilities to manage the selenium version used . The version is obtained from the * POM ( or could be passed in via CMD to override ) and then set so that Sauce sets the specific selenium version , * instead of their default : https : / / wiki . saucelabs . com / display / DOCS / Test + Configuration + Options # TestConfigurationOptions - SeleniumVersion * Additionally , the iedriverVersion is set to match the selenium version as suggested , if ie is the chosen browser * Finally , the default platform for edge is set to windows 10 */ public void setupSauceCapabilities ( ) { } }
if ( Sauce . isSauce ( ) ) { // set the selenium version desiredCapabilities . setCapability ( "seleniumVersion" , System . getProperty ( "selenium.version" ) ) ; // set the ie driver if needed if ( desiredCapabilities . getBrowserName ( ) . equals ( "internet explorer" ) ) { desiredCapabilities . setCapability ( "iedriverVersion" , System . getProperty ( "selenium.version" ) ) ; } }
public class JsonTextMessageValidator { /** * Constructs the error message of a failed validation based on the processing report passed from * com . github . fge . jsonschema . core . report * @ param report The report containing the error message * @ return A string representation of all messages contained in the report */ private String constructErrorMessage ( ProcessingReport report ) { } }
StringBuilder stringBuilder = new StringBuilder ( ) ; stringBuilder . append ( "Json validation failed: " ) ; report . forEach ( processingMessage -> stringBuilder . append ( processingMessage . getMessage ( ) ) ) ; return stringBuilder . toString ( ) ;
public class CPMeasurementUnitLocalServiceUtil { /** * Creates a new cp measurement unit with the primary key . Does not add the cp measurement unit to the database . * @ param CPMeasurementUnitId the primary key for the new cp measurement unit * @ return the new cp measurement unit */ public static com . liferay . commerce . product . model . CPMeasurementUnit createCPMeasurementUnit ( long CPMeasurementUnitId ) { } }
return getService ( ) . createCPMeasurementUnit ( CPMeasurementUnitId ) ;
public class Statements { /** * Set the statement parameters , starting at the index , in the order of the params . * @ since 3.0.0 */ public static PreparedStatement setLongs ( int index , PreparedStatement stmt , long ... params ) throws SQLException { } }
return set ( index , stmt , null , params , null ) ;
public class Capacitor { /** * Ring Functions . Reuse allocated memory */ private ByteBuffer ringGet ( ) { } }
ByteBuffer bb = null ; synchronized ( ring ) { bb = ring [ start ] ; ring [ start ] = null ; if ( bb != null && ++ start > 15 ) start = 0 ; } if ( bb == null ) { bb = ByteBuffer . allocate ( DEFAULT_CHUNK ) ; } else { bb . clear ( ) ; // refresh reused buffer } return bb ;
public class QueryParameters { /** * Checks is specified key is OUT parameter . * OUT and INOUT parameter would be considered as OUT parameter * @ param key Key * @ return true - if key is OUT parameter */ public boolean isOutParameter ( String key ) { } }
boolean isOut = false ; if ( this . getDirection ( processKey ( key ) ) == Direction . INOUT || this . getDirection ( processKey ( key ) ) == Direction . OUT ) { isOut = true ; } return isOut ;
public class MobileDevice { /** * Gets the deviceType value for this MobileDevice . * @ return deviceType * < span class = " constraint ReadOnly " > This field is read only and * will be ignored when sent to the API . < / span > */ public com . google . api . ads . adwords . axis . v201809 . cm . MobileDeviceDeviceType getDeviceType ( ) { } }
return deviceType ;
public class FuzzyLite { /** * Sets whether the library is set to log information * @ param logging indicates whether the library is set to log information */ public static void setLogging ( boolean logging ) { } }
if ( logging ) { logger . setLevel ( debugging ? Level . FINE : Level . INFO ) ; } else { logger . setLevel ( Level . OFF ) ; }
public class Instant { /** * Calculates the amount of time until another instant in terms of the specified unit . * This calculates the amount of time between two { @ code Instant } * objects in terms of a single { @ code TemporalUnit } . * The start and end points are { @ code this } and the specified instant . * The result will be negative if the end is before the start . * The calculation returns a whole number , representing the number of * complete units between the two instants . * The { @ code Temporal } passed to this method is converted to a * { @ code Instant } using { @ link # from ( TemporalAccessor ) } . * For example , the amount in days between two dates can be calculated * using { @ code startInstant . until ( endInstant , SECONDS ) } . * There are two equivalent ways of using this method . * The first is to invoke this method . * The second is to use { @ link TemporalUnit # between ( Temporal , Temporal ) } : * < pre > * / / these two lines are equivalent * amount = start . until ( end , SECONDS ) ; * amount = SECONDS . between ( start , end ) ; * < / pre > * The choice should be made based on which makes the code more readable . * The calculation is implemented in this method for { @ link ChronoUnit } . * The units { @ code NANOS } , { @ code MICROS } , { @ code MILLIS } , { @ code SECONDS } , * { @ code MINUTES } , { @ code HOURS } , { @ code HALF _ DAYS } and { @ code DAYS } * are supported . Other { @ code ChronoUnit } values will throw an exception . * If the unit is not a { @ code ChronoUnit } , then the result of this method * is obtained by invoking { @ code TemporalUnit . between ( Temporal , Temporal ) } * passing { @ code this } as the first argument and the converted input temporal * as the second argument . * This instance is immutable and unaffected by this method call . * @ param endExclusive the end date , exclusive , which is converted to an { @ code Instant } , not null * @ param unit the unit to measure the amount in , not null * @ return the amount of time between this instant and the end instant * @ throws DateTimeException if the amount cannot be calculated , or the end * temporal cannot be converted to an { @ code Instant } * @ throws UnsupportedTemporalTypeException if the unit is not supported * @ throws ArithmeticException if numeric overflow occurs */ @ Override public long until ( Temporal endExclusive , TemporalUnit unit ) { } }
Instant end = Instant . from ( endExclusive ) ; if ( unit instanceof ChronoUnit ) { ChronoUnit f = ( ChronoUnit ) unit ; switch ( f ) { case NANOS : return nanosUntil ( end ) ; case MICROS : return nanosUntil ( end ) / 1000 ; case MILLIS : return Math . subtractExact ( end . toEpochMilli ( ) , toEpochMilli ( ) ) ; case SECONDS : return secondsUntil ( end ) ; case MINUTES : return secondsUntil ( end ) / SECONDS_PER_MINUTE ; case HOURS : return secondsUntil ( end ) / SECONDS_PER_HOUR ; case HALF_DAYS : return secondsUntil ( end ) / ( 12 * SECONDS_PER_HOUR ) ; case DAYS : return secondsUntil ( end ) / ( SECONDS_PER_DAY ) ; } throw new UnsupportedTemporalTypeException ( "Unsupported unit: " + unit ) ; } return unit . between ( this , end ) ;
public class CheckRunManager { /** * Run analysis with feedback * @ param onStatus Command executed repeatedly receiving status update * @ param onCompletion Command executed on completion */ public void run ( final StatusUpdate onStatus , final Command onCompletion ) { } }
// Ensure active analysis is cancelled cancelExistingAnalysis ( ) ; // If there are no checks to run simply return if ( rechecks . isEmpty ( ) ) { if ( onCompletion != null ) { onCompletion . execute ( ) ; return ; } } checkRunner . run ( rechecks , onStatus , onCompletion ) ; rechecks . clear ( ) ;
public class N { /** * Mostly it ' s designed for one - step operation to complete the operation in one step . * < code > java . util . stream . Stream < / code > is preferred for multiple phases operation . * @ param a * @ param fromIndex * @ param toIndex * @ param func * @ return */ public static < T , E extends Exception > LongList mapToLong ( final Collection < ? extends T > c , final int fromIndex , final int toIndex , final Try . ToLongFunction < ? super T , E > func ) throws E { } }
checkFromToIndex ( fromIndex , toIndex , size ( c ) ) ; N . checkArgNotNull ( func ) ; if ( N . isNullOrEmpty ( c ) && fromIndex == 0 && toIndex == 0 ) { return new LongList ( ) ; } final LongList result = new LongList ( toIndex - fromIndex ) ; if ( c instanceof List && c instanceof RandomAccess ) { final List < T > list = ( List < T > ) c ; for ( int i = fromIndex ; i < toIndex ; i ++ ) { result . add ( func . applyAsLong ( list . get ( i ) ) ) ; } } else { int idx = 0 ; for ( T e : c ) { if ( idx ++ < fromIndex ) { continue ; } result . add ( func . applyAsLong ( e ) ) ; if ( idx >= toIndex ) { break ; } } } return result ;
public class ManagedDatabasesInner { /** * Gets a list of managed databases . * ServiceResponse < PageImpl < ManagedDatabaseInner > > * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the PagedList & lt ; ManagedDatabaseInner & gt ; object wrapped in { @ link ServiceResponse } if successful . */ public Observable < ServiceResponse < Page < ManagedDatabaseInner > > > listByInstanceNextSinglePageAsync ( final String nextPageLink ) { } }
if ( nextPageLink == null ) { throw new IllegalArgumentException ( "Parameter nextPageLink is required and cannot be null." ) ; } String nextUrl = String . format ( "%s" , nextPageLink ) ; return service . listByInstanceNext ( nextUrl , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < Page < ManagedDatabaseInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < ManagedDatabaseInner > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < PageImpl < ManagedDatabaseInner > > result = listByInstanceNextDelegate ( response ) ; return Observable . just ( new ServiceResponse < Page < ManagedDatabaseInner > > ( result . body ( ) , result . response ( ) ) ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class CholeskyDecomposition { /** * Computes the determinant of A * @ return the determinant of A */ public double getDet ( ) { } }
double det = 1 ; for ( int i = 0 ; i < L . rows ( ) ; i ++ ) det *= L . get ( i , i ) ; return det ;
public class ServicesInner { /** * Start service . * The services resource is the top - level resource that represents the Data Migration Service . This action starts the service and the service can be used for data migration . * @ param groupName Name of the resource group * @ param serviceName Name of the service * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > beginStartAsync ( String groupName , String serviceName , final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginStartWithServiceResponseAsync ( groupName , serviceName ) , serviceCallback ) ;
public class Quaterniond { /** * Set this quaternion to be a copy of q . * @ param q * the { @ link Quaterniondc } to copy * @ return this */ public Quaterniond set ( Quaterniondc q ) { } }
x = q . x ( ) ; y = q . y ( ) ; z = q . z ( ) ; w = q . w ( ) ; return this ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertIDDUNITBASEToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class CmsAttributeHandler { /** * Return true if there is a single remaining value , which is optional . < p > * @ return true if this has only one optional value */ public boolean hasSingleOptionalValue ( ) { } }
return ( ( getEntityType ( ) . getAttributeMinOccurrence ( m_attributeName ) == 0 ) && ( m_entity . getAttribute ( m_attributeName ) != null ) && ( m_entity . getAttribute ( m_attributeName ) . getValueCount ( ) == 1 ) ) ;
public class CmsLoginController { /** * Called on initialization . < p > */ public void onInit ( ) { } }
String authToken = m_params . getAuthToken ( ) ; if ( authToken != null ) { m_ui . showForgotPasswordView ( authToken ) ; } else if ( m_params . isReset ( ) ) { m_ui . showPasswordResetDialog ( ) ; } else { boolean loggedIn = ! A_CmsUI . getCmsObject ( ) . getRequestContext ( ) . getCurrentUser ( ) . isGuestUser ( ) ; m_ui . setSelectableOrgUnits ( CmsLoginHelper . getOrgUnitsForLoginDialog ( A_CmsUI . getCmsObject ( ) , null ) ) ; if ( loggedIn ) { if ( m_params . isLogout ( ) ) { logout ( ) ; } else { m_ui . showAlreadyLoggedIn ( ) ; } } else { m_ui . showLoginView ( m_params . getOufqn ( ) ) ; } }
public class CmsImportVersion7 { /** * Rewrites all parseable files , to assure link check . < p > * This is a global process , that is executed only once at the * end of the import to be sure that all link targets are * available . < p > * @ see # addXmlDigesterRules ( Digester ) */ public void rewriteParseables ( ) { } }
if ( m_parseables . isEmpty ( ) ) { return ; } I_CmsReport report = getReport ( ) ; CmsObject cms = getCms ( ) ; cms . getRequestContext ( ) . setAttribute ( CmsLogEntry . ATTR_LOG_ENTRY , Boolean . FALSE ) ; report . println ( Messages . get ( ) . container ( Messages . RPT_START_PARSE_LINKS_0 ) , I_CmsReport . FORMAT_HEADLINE ) ; parseLinks ( cms , report ) ; report . println ( Messages . get ( ) . container ( Messages . RPT_END_PARSE_LINKS_0 ) , I_CmsReport . FORMAT_HEADLINE ) ; m_parseables = null ;
public class DefaultImageDecoder { /** * Decodes gif into CloseableImage . * @ param encodedImage input image ( encoded bytes plus meta data ) * @ return a CloseableImage */ public CloseableImage decodeGif ( final EncodedImage encodedImage , final int length , final QualityInfo qualityInfo , final ImageDecodeOptions options ) { } }
if ( ! options . forceStaticImage && mAnimatedGifDecoder != null ) { return mAnimatedGifDecoder . decode ( encodedImage , length , qualityInfo , options ) ; } return decodeStaticImage ( encodedImage , options ) ;
public class AppDescriptor { /** * Create an ` AppDescriptor ` with appName and entry class specified . * If ` appName ` is ` null ` or blank , it will try the following * approach to get app name : * 1 . check the { @ link Version # getArtifactId ( ) artifact id } and use it unless * 2 . if artifact id is null or empty , then infer app name using { @ link AppNameInferer } * @ param appName * the app name * @ param entryClass * the entry class * @ return * an ` AppDescriptor ` instance */ public static AppDescriptor of ( String appName , Class < ? > entryClass ) { } }
System . setProperty ( "osgl.version.suppress-var-found-warning" , "true" ) ; return of ( appName , entryClass , Version . of ( entryClass ) ) ;
public class CmsSite { /** * Returns the site path for the given root path in case the root path * actually starts with this site root , or < code > null < / code > in case * the root path does not . < p > * @ param rootPath the root path to get the site path for * @ return the site path for the given root path in case the root path * actually starts with this site root , or < code > null < / code > in case * the root path does not */ public String getSitePath ( String rootPath ) { } }
String result = null ; if ( CmsStringUtil . isNotEmpty ( rootPath ) ) { if ( rootPath . startsWith ( m_siteRoot ) ) { result = rootPath . substring ( m_siteRoot . length ( ) ) ; } } return result ;
public class CodepointHelper { /** * Insert a codepoint into the buffer , automatically dealing with surrogate * pairs * @ param aSeq * source sequence * @ param nIndex * index * @ param aCodepoint * codepoint to be inserted */ public static void insert ( final CharSequence aSeq , final int nIndex , @ Nonnull final Codepoint aCodepoint ) { } }
insert ( aSeq , nIndex , aCodepoint . getValue ( ) ) ;
public class FileOperations { /** * Delete an existing directory if it is existing . The directory needs to be * empty before it can be deleted . * @ param aDir * The directory to be deleted . May not be < code > null < / code > . * @ return A non - < code > null < / code > error code . * @ see # deleteDir ( File ) */ @ Nonnull public static FileIOError deleteDirIfExisting ( @ Nonnull final File aDir ) { } }
final FileIOError aError = deleteDir ( aDir ) ; if ( aError . getErrorCode ( ) . equals ( EFileIOErrorCode . SOURCE_DOES_NOT_EXIST ) ) return EFileIOErrorCode . NO_ERROR . getAsIOError ( EFileIOOperation . DELETE_DIR , aDir ) ; return aError ;
public class QueryBuilder { /** * Remove the provided { @ link FeatureCode } from the set of query constraints . * @ param code1 the first { @ link FeatureCode } to remove * @ param codes the subsequent { @ link FeatureCode } s to remove * @ return this */ public QueryBuilder removeFeatureCodes ( final FeatureCode code1 , final FeatureCode ... codes ) { } }
featureCodes . remove ( code1 ) ; featureCodes . removeAll ( Arrays . asList ( codes ) ) ; return this ;
public class ApplicationFeatureViewModel { /** * region > helpers */ < T extends ApplicationFeatureViewModel > List < T > asViewModels ( final SortedSet < ApplicationFeatureId > members ) { } }
final Function < ApplicationFeatureId , T > function = Functions . < T > asViewModelForId ( applicationFeatureRepository , container ) ; return Lists . newArrayList ( Iterables . transform ( members , function ) ) ;
public class TagletWriterImpl { /** * { @ inheritDoc } */ public Content deprecatedTagOutput ( Doc doc ) { } }
ContentBuilder result = new ContentBuilder ( ) ; Tag [ ] deprs = doc . tags ( "deprecated" ) ; if ( doc instanceof ClassDoc ) { if ( utils . isDeprecated ( ( ProgramElementDoc ) doc ) ) { result . addContent ( HtmlTree . SPAN ( HtmlStyle . deprecatedLabel , new StringContent ( configuration . getText ( "doclet.Deprecated" ) ) ) ) ; result . addContent ( RawHtml . nbsp ) ; if ( deprs . length > 0 ) { Tag [ ] commentTags = deprs [ 0 ] . inlineTags ( ) ; if ( commentTags . length > 0 ) { result . addContent ( commentTagsToOutput ( null , doc , deprs [ 0 ] . inlineTags ( ) , false ) ) ; } } } } else { MemberDoc member = ( MemberDoc ) doc ; if ( utils . isDeprecated ( ( ProgramElementDoc ) doc ) ) { result . addContent ( HtmlTree . SPAN ( HtmlStyle . deprecatedLabel , new StringContent ( configuration . getText ( "doclet.Deprecated" ) ) ) ) ; result . addContent ( RawHtml . nbsp ) ; if ( deprs . length > 0 ) { Content body = commentTagsToOutput ( null , doc , deprs [ 0 ] . inlineTags ( ) , false ) ; if ( ! body . isEmpty ( ) ) result . addContent ( HtmlTree . SPAN ( HtmlStyle . deprecationComment , body ) ) ; } } else { if ( utils . isDeprecated ( member . containingClass ( ) ) ) { result . addContent ( HtmlTree . SPAN ( HtmlStyle . deprecatedLabel , new StringContent ( configuration . getText ( "doclet.Deprecated" ) ) ) ) ; result . addContent ( RawHtml . nbsp ) ; } } } return result ;
public class PersistenceUtils { /** * Delete children that are no longer contained in list of collection items . * @ param resource Parent resource * @ param data List of collection items */ public static void deleteChildrenNotInCollection ( Resource resource , ConfigurationCollectionPersistData data ) { } }
Set < String > collectionItemNames = data . getItems ( ) . stream ( ) . map ( item -> item . getCollectionItemName ( ) ) . collect ( Collectors . toSet ( ) ) ; for ( Resource child : resource . getChildren ( ) ) { if ( ! collectionItemNames . contains ( child . getName ( ) ) && ! StringUtils . equals ( JCR_CONTENT , child . getName ( ) ) ) { deletePageOrResource ( child ) ; } }
public class JMessageClient { /** * remove members from chat room * @ param roomId chat room id * @ param members { @ link Members } * @ return No content * @ throws APIConnectionException connect exception * @ throws APIRequestException request exception */ public ResponseWrapper removeChatRoomMembers ( long roomId , Members members ) throws APIConnectionException , APIRequestException { } }
return _chatRoomClient . removeChatRoomMembers ( roomId , members ) ;
public class ExecutedTradeHandler { /** * { @ inheritDoc } */ @ Override public void handleChannelData ( final String action , final JSONArray jsonArray ) throws BitfinexClientException { } }
try { final List < BitfinexExecutedTrade > trades = new ArrayList < > ( ) ; if ( action == "tu" ) { return ; // Ignore tu messages ( see issue # 13) } // Snapshots contain multiple executes entries , updates only one if ( jsonArray . get ( 0 ) instanceof JSONArray ) { for ( int pos = 0 ; pos < jsonArray . length ( ) ; pos ++ ) { final JSONArray parts = jsonArray . getJSONArray ( pos ) ; BitfinexExecutedTrade trade = jsonToExecutedTrade ( parts ) ; trades . add ( trade ) ; } } else { BitfinexExecutedTrade trade = jsonToExecutedTrade ( jsonArray ) ; trades . add ( trade ) ; } executedTradesConsumer . accept ( symbol , trades ) ; } catch ( JSONException e ) { throw new BitfinexClientException ( e ) ; }
public class GoAPIGenerator { /** * Writes the function arguments */ private void writeOutArgs ( List < String > elements , Writer out , boolean hasParams ) throws IOException { } }
if ( elements != null && elements . size ( ) > 0 ) { ArrayList < String > args = new ArrayList < String > ( ) ; for ( String param : elements ) { if ( param . equalsIgnoreCase ( "boolean" ) ) { args . add ( "boolean bool" ) ; } else if ( param . equalsIgnoreCase ( "integer" ) ) { args . add ( "i int" ) ; } else if ( param . equalsIgnoreCase ( "string" ) ) { args . add ( "str string" ) ; } else if ( param . equalsIgnoreCase ( "type" ) ) { args . add ( "t string" ) ; } else { args . add ( param . toLowerCase ( ) + " string" ) ; } } out . write ( StringUtils . join ( args , ", " ) ) ; }
public class ColorGenerator { /** * Returns the next random color */ public Color next ( ) { } }
int r = rand . nextInt ( 256 ) ; int g = rand . nextInt ( 256 ) ; int b = rand . nextInt ( 256 ) ; return ( seed == null ) ? new Color ( r , g , b ) : new Color ( ( r + seed . getRed ( ) ) / 2 , ( g + seed . getGreen ( ) ) / 2 , ( b + seed . getBlue ( ) ) / 2 ) ;
public class TreeBuilder { /** * Convert data content for all resources matching the resource selector and within the sub path * @ param converter content converter * @ param subpath sub path * @ param resourceSelector resource selector * @ return builder */ public TreeBuilder < T > convert ( ContentConverter < T > converter , Path subpath , ResourceSelector < T > resourceSelector ) { } }
return convert ( converter , PathUtil . subpathSelector ( subpath ) , resourceSelector ) ;
public class MviBasePresenter { /** * Actually subscribes the view as consumer to the internally view relay . * @ param view The mvp view */ @ MainThread private void subscribeViewStateConsumerActually ( @ NonNull final V view ) { } }
if ( view == null ) { throw new NullPointerException ( "View is null" ) ; } if ( viewStateConsumer == null ) { throw new NullPointerException ( ViewStateConsumer . class . getSimpleName ( ) + " is null. This is a Mosby internal bug. Please file an issue at https://github.com/sockeqwe/mosby/issues" ) ; } viewRelayConsumerDisposable = viewStateBehaviorSubject . subscribe ( new Consumer < VS > ( ) { @ Override public void accept ( VS vs ) throws Exception { viewStateConsumer . accept ( view , vs ) ; } } ) ;
public class ConfigBuilder { /** * Build the config for a ECO topology definition * @ param topologyDefinition - ECO topology definition */ public Config buildConfig ( EcoTopologyDefinition topologyDefinition ) throws IllegalArgumentException { } }
Map < String , Object > configMap = topologyDefinition . getConfig ( ) ; Config config = new Config ( ) ; for ( Map . Entry < String , Object > entry : configMap . entrySet ( ) ) { if ( entry . getKey ( ) . equals ( COMPONENT_RESOURCE_MAP ) ) { setComponentLevelResource ( config , entry ) ; } else if ( entry . getKey ( ) . equals ( COMPONENT_JVM_OPTIONS ) ) { List < Object > objects = ( List < Object > ) entry . getValue ( ) ; for ( Object obj : objects ) { String objString = obj . toString ( ) ; objString = objString . replace ( LEFT_BRACE , WHITESPACE ) ; objString = objString . replace ( RIGHT_BRACE , WHITESPACE ) ; int idIndex = objString . indexOf ( ID ) ; int optionsIndex = objString . indexOf ( OPTIONS ) ; String id = getIdValue ( objString , idIndex ) ; String jvmOptions ; if ( optionsIndex != - 1 ) { int equalsIndex = objString . indexOf ( EQUALS , optionsIndex ) ; jvmOptions = objString . substring ( equalsIndex + 1 , objString . length ( ) ) ; jvmOptions = jvmOptions . replace ( LEFT_BRACKET , "" ) . replace ( RIGHT_BRACKET , "" ) ; } else { throw new IllegalArgumentException ( "You must specify the JVM options for your component" ) ; } config . setComponentJvmOptions ( id , jvmOptions ) ; } } else { config . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } } return config ;
public class ControlsView { /** * Change activation of all buttons . * @ param enable true to enable all buttons false otherwise */ void activateButtons ( final boolean enable ) { } }
this . unloadButton . setDisable ( ! enable ) ; this . playPauseButton . setDisable ( ! enable ) ; this . backwardButton . setDisable ( ! enable ) ; this . forwardButton . setDisable ( ! enable ) ; this . stopButton . setDisable ( ! enable ) ;
public class TranslationServiceClient { /** * Deletes a glossary , or cancels glossary construction if the glossary isn ' t created yet . Returns * NOT _ FOUND , if the glossary doesn ' t exist . * < p > Sample code : * < pre > < code > * try ( TranslationServiceClient translationServiceClient = TranslationServiceClient . create ( ) ) { * String formattedName = TranslationServiceClient . formatGlossaryName ( " [ PROJECT ] " , " [ LOCATION ] " , " [ GLOSSARY ] " ) ; * DeleteGlossaryRequest request = DeleteGlossaryRequest . newBuilder ( ) * . setName ( formattedName ) * . build ( ) ; * DeleteGlossaryResponse response = translationServiceClient . deleteGlossaryAsync ( request ) . get ( ) ; * < / code > < / pre > * @ param request The request object containing all of the parameters for the API call . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi ( "The surface for long-running operations is not stable yet and may change in the future." ) public final OperationFuture < DeleteGlossaryResponse , DeleteGlossaryMetadata > deleteGlossaryAsync ( DeleteGlossaryRequest request ) { } }
return deleteGlossaryOperationCallable ( ) . futureCall ( request ) ;
public class UserDetail { /** * A list of IAM groups that the user is in . * @ return A list of IAM groups that the user is in . */ public java . util . List < String > getGroupList ( ) { } }
if ( groupList == null ) { groupList = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return groupList ;
public class SelfExtractRun { /** * Determine user home based on platform type . * Java user . home property is correct in all cases except * for cygwin . For cygwin , user . home is Windows home , * so use HOME env var instead . * @ return user home directory */ private static String getUserHome ( ) { } }
String home ; if ( platformType == SelfExtractUtils . PlatformType_CYGWIN ) { home = System . getenv ( "HOME" ) ; } else { home = System . getProperty ( "user.home" ) ; } return home ;
public class VictimsRecord { /** * Test if this instance of { @ link VictimsRecord } contains all the file * hashes present in the given instance . Comparison is done on all available * algorithms until a subset match is found . If for an algorithm , either * this or that record is empty , check is skipped . * @ param that * @ return */ public boolean containsAll ( Object o ) { } }
if ( ! ( o instanceof VictimsRecord ) ) { return false ; } VictimsRecord that = ( VictimsRecord ) o ; for ( Algorithms algorithm : VictimsConfig . algorithms ( ) ) { if ( ! ( this . containsAlgorithm ( algorithm ) && that . containsAlgorithm ( algorithm ) ) ) { // skip if both this and that do not have the current algorithm continue ; } HashSet < String > thatHashes = new HashSet < String > ( that . getHashes ( algorithm ) . keySet ( ) ) ; HashSet < String > thisHashes = new HashSet < String > ( this . getHashes ( algorithm ) . keySet ( ) ) ; if ( thisHashes . isEmpty ( ) || thatHashes . isEmpty ( ) ) { // there is no real value in comparing empty sets continue ; } if ( thisHashes . containsAll ( thatHashes ) ) { // we found a subset match return true ; } } // we have gone through all algorithms without finding a subset match return this . hash . equals ( that . hash ) ;
public class BasicAuthenticator { @ Override void authenticate ( Map < String , Object > options ) { } }
final Map < String , Object > auth = new HashMap < > ( ) ; auth . put ( AbstractReplicatorConfiguration . REPLICATOR_AUTH_TYPE , AbstractReplicatorConfiguration . AUTH_TYPE_BASIC ) ; auth . put ( AbstractReplicatorConfiguration . REPLICATOR_AUTH_USER_NAME , username ) ; auth . put ( AbstractReplicatorConfiguration . REPLICATOR_AUTH_PASSWORD , password ) ; options . put ( AbstractReplicatorConfiguration . REPLICATOR_AUTH_OPTION , auth ) ;
public class JDBC4ResultSet { /** * ResultSet object as a byte array in the Java programming language . */ @ Override public byte [ ] getBytes ( int columnIndex ) throws SQLException { } }
checkColumnBounds ( columnIndex ) ; try { if ( table . getColumnType ( columnIndex - 1 ) == VoltType . STRING ) return table . getStringAsBytes ( columnIndex - 1 ) ; else if ( table . getColumnType ( columnIndex - 1 ) == VoltType . VARBINARY ) return table . getVarbinary ( columnIndex - 1 ) ; else throw SQLError . get ( SQLError . CONVERSION_NOT_FOUND , table . getColumnType ( columnIndex - 1 ) , "byte[]" ) ; } catch ( SQLException x ) { throw x ; } catch ( Exception x ) { throw SQLError . get ( x ) ; }
public class PackedLocalDate { /** * Returns the quarter of the year of the given date as an int from 1 to 4 , or - 1 , if the argument is the * MISSING _ VALUE for DateColumn */ public static int getQuarter ( int packedDate ) { } }
if ( packedDate == DateColumnType . missingValueIndicator ( ) ) { return - 1 ; } Month month = getMonth ( packedDate ) ; switch ( month ) { case JANUARY : case FEBRUARY : case MARCH : return 1 ; case APRIL : case MAY : case JUNE : return 2 ; case JULY : case AUGUST : case SEPTEMBER : return 3 ; case OCTOBER : case NOVEMBER : default : // must be december return 4 ; }
public class Tuple { /** * Returns true if the given binding can be added to the give tuple . */ private static boolean isBindingCompatible ( Tuple tuple , VarBindingDef binding ) { } }
VarValueDef currentValue = tuple . getBinding ( binding . getVarDef ( ) ) ; return ( currentValue == null || currentValue . equals ( binding . getValueDef ( ) ) ) ;
public class XmlResponsesSaxParser { /** * Parses an AccessControlListHandler response XML document from an input * stream . * @ param inputStream * XML data input stream . * @ return the XML handler object populated with data parsed from the XML * stream . * @ throws SdkClientException */ public AccessControlListHandler parseAccessControlListResponse ( InputStream inputStream ) throws IOException { } }
AccessControlListHandler handler = new AccessControlListHandler ( ) ; parseXmlInputStream ( handler , inputStream ) ; return handler ;
public class HashUtils { /** * Calculate SHA - 256 hash of a File * @ param file - the File to hash * @ return the SHA - 256 hash value * @ throws IOException */ public static String getFileSHA256String ( File file ) throws IOException { } }
MessageDigest messageDigest = getMessageDigest ( SHA256 ) ; return getFileHashString ( file , messageDigest ) ;
public class VisualContext { /** * Updates this context according to the given style . Moreover given Graphics is updated * to this style and used for taking the font metrics . * @ param style the style data to be used * @ param g Graphics to be updated and used */ public void updateForGraphics ( NodeData style , Graphics2D g ) { } }
if ( style != null ) update ( style ) ; updateGraphics ( g ) ; fm = g . getFontMetrics ( ) ; // update the width units // em has been updated in update ( ) FontRenderContext frc = new FontRenderContext ( null , false , false ) ; TextLayout layout = new TextLayout ( "x" , font , frc ) ; ex = layout . getBounds ( ) . getHeight ( ) ; ch = fm . charWidth ( '0' ) ;
public class AbstractPropertyWidget { /** * Executes a " widget batch update " . Listeners and other effects of updating * individual parts of a widget may be turned off during batch updates . * @ param action * the action to execute */ public final void batchUpdateWidget ( final Runnable action ) { } }
_batchUpdateCounter ++ ; try { action . run ( ) ; } catch ( final RuntimeException e ) { logger . error ( "Exception occurred in widget batch update, fireValueChanged() will not be invoked" , e ) ; throw e ; } finally { _batchUpdateCounter -- ; } if ( _batchUpdateCounter == 0 ) { onBatchFinished ( ) ; }
public class CassandraSchemaManager { /** * On set comment . * @ param cfDef * the cf def * @ param cfProperties * the cf properties * @ param builder * the builder */ private void onSetComment ( CfDef cfDef , Properties cfProperties , StringBuilder builder ) { } }
String comment = cfProperties . getProperty ( CassandraConstants . COMMENT ) ; if ( comment != null ) { if ( builder != null ) { String comment_Str = CQLTranslator . getKeyword ( CassandraConstants . COMMENT ) ; builder . append ( comment_Str ) ; builder . append ( CQLTranslator . EQ_CLAUSE ) ; builder . append ( CQLTranslator . QUOTE_STR ) ; builder . append ( comment ) ; builder . append ( CQLTranslator . QUOTE_STR ) ; builder . append ( CQLTranslator . AND_CLAUSE ) ; } else { cfDef . setComment ( comment ) ; } }
public class AbcNode { /** * Returns the value parsed into integer if label is DIGIT or DIGITS , else */ protected int getIntValue ( ) { } }
if ( label . equals ( AbcTokens . DIGIT ) || label . equals ( AbcTokens . DIGITS ) ) { try { return Integer . parseInt ( value ) ; } catch ( NumberFormatException nfe ) { return - 1 ; } } else return - 1 ;
public class ForwardCurve { /** * Create a forward curve from given times and given forwards . * @ param name The name of this curve . * @ param referenceDate The reference date for this code , i . e . , the date which defines t = 0. * @ param paymentOffsetCode The maturity of the index modeled by this curve . * @ param paymentBusinessdayCalendar The business day calendar used for adjusting the payment date . * @ param paymentDateRollConvention The date roll convention used for adjusting the payment date . * @ param interpolationMethod The interpolation method used for the curve . * @ param extrapolationMethod The extrapolation method used for the curve . * @ param interpolationEntity The entity interpolated / extrapolated . * @ param interpolationEntityForward Interpolation entity used for forward rate interpolation . * @ param discountCurveName The name of a discount curve associated with this index ( associated with it ' s funding or collateralization ) , if any . * @ param model The model to be used to fetch the discount curve , if needed . * @ param times A vector of given time points . * @ param givenForwards A vector of given forwards ( corresponding to the given time points ) . * @ return A new ForwardCurve object . */ public static ForwardCurve createForwardCurveFromForwards ( String name , Date referenceDate , String paymentOffsetCode , BusinessdayCalendarInterface paymentBusinessdayCalendar , BusinessdayCalendarInterface . DateRollConvention paymentDateRollConvention , InterpolationMethod interpolationMethod , ExtrapolationMethod extrapolationMethod , InterpolationEntity interpolationEntity , InterpolationEntityForward interpolationEntityForward , String discountCurveName , AnalyticModelInterface model , double [ ] times , double [ ] givenForwards ) { } }
LocalDate referenceDataAsLocalDate = Instant . ofEpochMilli ( referenceDate . getTime ( ) ) . atZone ( ZoneId . systemDefault ( ) ) . toLocalDate ( ) ; return createForwardCurveFromForwards ( name , referenceDataAsLocalDate , paymentOffsetCode , paymentBusinessdayCalendar , paymentDateRollConvention , interpolationMethod , extrapolationMethod , interpolationEntity , interpolationEntityForward , discountCurveName , model , times , givenForwards ) ;
public class Balancer { /** * / * wait for all block move confirmations * by checking each target ' s pendingMove queue */ private void waitForMoveCompletion ( ) { } }
boolean shouldWait ; int waitedIterations = 0 ; do { shouldWait = false ; for ( BalancerDatanode target : plan . targets ) { if ( ! target . isPendingQEmpty ( ) ) { shouldWait = true ; } } if ( shouldWait ) { try { if ( waitedIterations > MAX_WAIT_ITERATIONS ) { for ( BalancerDatanode target : plan . targets ) { target . killPending ( ) ; } continue ; } waitedIterations ++ ; Thread . sleep ( blockMoveWaitTime ) ; } catch ( InterruptedException ignored ) { } } } while ( shouldWait ) ;
public class VelocityInterceptor { /** * Renders the component using the velocity template which has been provided . * @ param renderContext the context for rendering . */ @ Override public void paint ( final RenderContext renderContext ) { } }
if ( ! ( renderContext instanceof WebXmlRenderContext ) ) { throw new SystemException ( "Unable to render to " + renderContext ) ; } PrintWriter writer = ( ( WebXmlRenderContext ) renderContext ) . getWriter ( ) ; Template template = null ; try { template = VelocityEngineFactory . getVelocityEngine ( ) . getTemplate ( templateUrl ) ; } catch ( Exception ex ) { String message = "Could not open velocity template \"" + templateUrl + "\" for \"" + this . getClass ( ) . getName ( ) + "\"" ; LOG . error ( message , ex ) ; writer . println ( message ) ; return ; } try { VelocityContext context = new VelocityContext ( ) ; fillContext ( context ) ; template . merge ( context , writer ) ; } catch ( ResourceNotFoundException rnfe ) { LOG . error ( "Could not find template " + templateUrl , rnfe ) ; } catch ( ParseErrorException pee ) { // syntax error : problem parsing the template LOG . error ( "Parse problems" , pee ) ; } catch ( MethodInvocationException mie ) { // something invoked in the template // threw an exception Throwable wrapped = mie . getWrappedThrowable ( ) ; LOG . error ( "Problems with velocity" , mie ) ; if ( wrapped != null ) { LOG . error ( "Wrapped exception..." , wrapped ) ; } } catch ( Exception e ) { LOG . error ( "Problems with velocity" , e ) ; }
public class RequestMessage { /** * @ see javax . . HttpServletRequest # getRequestURI ( ) */ @ Override public String getRequestURI ( ) { } }
if ( null == this . strippedURI ) { this . strippedURI = SessionInfo . stripURL ( this . request . getURI ( ) , this . sessionData ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "getRequestURI: " + this . strippedURI ) ; } return this . strippedURI ;
public class ComponentUtils { /** * Duplicate code from OmniFacew project under apache license : * https : / / github . com / omnifaces / omnifaces / blob / develop / license . txt * URI - encode the given string using UTF - 8 . URIs ( paths and filenames ) have different encoding rules as compared to * URL query string parameters . { @ link URLEncoder } is actually only for www ( HTML ) form based query string parameter * values ( as used when a webbrowser submits a HTML form ) . URI encoding has a lot in common with URL encoding , but * the space has to be % 20 and some chars doesn ' t necessarily need to be encoded . * @ param string The string to be URI - encoded using UTF - 8. * @ return The given string , URI - encoded using UTF - 8 , or < code > null < / code > if < code > null < / code > was given . * @ throws UnsupportedEncodingException if UTF - 8 is not supported */ public static String encodeURI ( String string ) throws UnsupportedEncodingException { } }
if ( string == null ) { return null ; } return URLEncoder . encode ( string , "UTF-8" ) . replace ( "+" , "%20" ) . replace ( "%21" , "!" ) . replace ( "%27" , "'" ) . replace ( "%28" , "(" ) . replace ( "%29" , ")" ) . replace ( "%7E" , "~" ) ;