signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ZipBuilder { /** * Adds a file to the archive . The archive must not be closed . * Example : < br > * < pre > * sourceFile = C : \ opt \ work \ deps \ foo . jar * targetDir = WEB - INF / lib / foo . jar * < / pre > * @ param sourceFile * File to be added * @ param targetFile * Relative path for the file within the archive . Regardless of the OS , this path * must use slashes ( ' / ' ) as separators . * @ return this for fluent syntax * @ throws IOException * on I / O error */ public ZipBuilder addFile ( File sourceFile , String targetFile ) throws IOException { } }
FileInputStream fis = new FileInputStream ( sourceFile ) ; ZipEntry jarEntry = new ZipEntry ( targetFile ) ; jarOutputStream . putNextEntry ( jarEntry ) ; StreamUtils . copyStream ( fis , jarOutputStream , false ) ; fis . close ( ) ; return this ;
public class AbcGrammar { /** * \ n in name = linefeed * voice - subname : : = ( " subname = " / " snm = " ) % x22 * non - quote % x22 */ Rule VoiceSubname ( ) { } }
return SequenceS ( FirstOfS ( IgnoreCase ( "subname=" ) , IgnoreCase ( "sname=" ) , IgnoreCase ( "snm=" ) ) , String ( "\"" ) , ZeroOrMore ( NonQuote ( ) ) . label ( VoiceSubname ) . suppressSubnodes ( ) , String ( "\"" ) ) ;
public class SchemaUsageAnalyzer { /** * Create a directory at the given path if it does not exist yet . * @ param path * the path to the directory * @ throws IOException * if it was not possible to create a directory at the given * path */ private static void createDirectory ( Path path ) throws IOException { } }
try { Files . createDirectory ( path ) ; } catch ( FileAlreadyExistsException e ) { if ( ! Files . isDirectory ( path ) ) { throw e ; } }
public class KaplanMeierFigure { /** * The data will set the max time which will result in off time points for * tick marks * @ param title * @ param survivalData * @ param useWeighted * @ throws Exception */ public void setSurvivalData ( ArrayList < String > title , LinkedHashMap < String , ArrayList < CensorStatus > > survivalData , Boolean useWeighted ) throws Exception { } }
this . setSurvivalData ( title , survivalData , null , useWeighted ) ;
public class ObjectSinkNodeList { /** * Returns a list iterator * @ return */ public Iterator iterator ( ) { } }
return new Iterator ( ) { private ObjectSinkNode currentNode = null ; private ObjectSinkNode nextNode = getFirst ( ) ; public boolean hasNext ( ) { return ( this . nextNode != null ) ; } public Object next ( ) { this . currentNode = this . nextNode ; if ( this . currentNode != null ) { this . nextNode = this . currentNode . getNextObjectSinkNode ( ) ; } else { throw new NoSuchElementException ( "No more elements to return" ) ; } return this . currentNode ; } public void remove ( ) { if ( this . currentNode != null ) { ObjectSinkNodeList . this . remove ( this . currentNode ) ; this . currentNode = null ; } else { throw new IllegalStateException ( "No item to remove. Call next() before calling remove()." ) ; } } } ;
public class DatatypeConverter { /** * Parse a duration . * @ param file parent file * @ param defaultUnits default time units for the resulting duration * @ param value duration value * @ return Duration instance */ public static final Duration parseDuration ( ProjectFile file , TimeUnit defaultUnits , String value ) { } }
Duration result = null ; XsdDuration xsd = null ; if ( value != null && value . length ( ) != 0 ) { try { xsd = new XsdDuration ( value ) ; } catch ( IllegalArgumentException ex ) { // The duration is malformed . // MS Project simply ignores values like this . } } if ( xsd != null ) { TimeUnit units = TimeUnit . DAYS ; if ( xsd . getSeconds ( ) != 0 || xsd . getMinutes ( ) != 0 ) { units = TimeUnit . MINUTES ; } if ( xsd . getHours ( ) != 0 ) { units = TimeUnit . HOURS ; } if ( xsd . getDays ( ) != 0 ) { units = TimeUnit . DAYS ; } if ( xsd . getMonths ( ) != 0 ) { units = TimeUnit . MONTHS ; } if ( xsd . getYears ( ) != 0 ) { units = TimeUnit . YEARS ; } double duration = 0 ; switch ( units ) { case YEARS : { // Calculate the number of years duration += xsd . getYears ( ) ; duration += ( ( double ) xsd . getMonths ( ) / 12 ) ; duration += ( ( double ) xsd . getDays ( ) / 365 ) ; duration += ( ( double ) xsd . getHours ( ) / ( 365 * 24 ) ) ; duration += ( ( double ) xsd . getMinutes ( ) / ( 365 * 24 * 60 ) ) ; duration += ( xsd . getSeconds ( ) / ( 365 * 24 * 60 * 60 ) ) ; break ; } case ELAPSED_YEARS : { // Calculate the number of years duration += xsd . getYears ( ) ; duration += ( ( double ) xsd . getMonths ( ) / 12 ) ; duration += ( ( double ) xsd . getDays ( ) / 365 ) ; duration += ( ( double ) xsd . getHours ( ) / ( 365 * 24 ) ) ; duration += ( ( double ) xsd . getMinutes ( ) / ( 365 * 24 * 60 ) ) ; duration += ( xsd . getSeconds ( ) / ( 365 * 24 * 60 * 60 ) ) ; break ; } case MONTHS : { // Calculate the number of months duration += ( xsd . getYears ( ) * 12 ) ; duration += xsd . getMonths ( ) ; duration += ( ( double ) xsd . getDays ( ) / 30 ) ; duration += ( ( double ) xsd . getHours ( ) / ( 30 * 24 ) ) ; duration += ( ( double ) xsd . getMinutes ( ) / ( 30 * 24 * 60 ) ) ; duration += ( xsd . getSeconds ( ) / ( 30 * 24 * 60 * 60 ) ) ; break ; } case ELAPSED_MONTHS : { // Calculate the number of months duration += ( xsd . getYears ( ) * 12 ) ; duration += xsd . getMonths ( ) ; duration += ( ( double ) xsd . getDays ( ) / 30 ) ; duration += ( ( double ) xsd . getHours ( ) / ( 30 * 24 ) ) ; duration += ( ( double ) xsd . getMinutes ( ) / ( 30 * 24 * 60 ) ) ; duration += ( xsd . getSeconds ( ) / ( 30 * 24 * 60 * 60 ) ) ; break ; } case WEEKS : { // Calculate the number of weeks duration += ( xsd . getYears ( ) * 52 ) ; duration += ( xsd . getMonths ( ) * 4 ) ; duration += ( ( double ) xsd . getDays ( ) / 7 ) ; duration += ( ( double ) xsd . getHours ( ) / ( 7 * 24 ) ) ; duration += ( ( double ) xsd . getMinutes ( ) / ( 7 * 24 * 60 ) ) ; duration += ( xsd . getSeconds ( ) / ( 7 * 24 * 60 * 60 ) ) ; break ; } case ELAPSED_WEEKS : { // Calculate the number of weeks duration += ( xsd . getYears ( ) * 52 ) ; duration += ( xsd . getMonths ( ) * 4 ) ; duration += ( ( double ) xsd . getDays ( ) / 7 ) ; duration += ( ( double ) xsd . getHours ( ) / ( 7 * 24 ) ) ; duration += ( ( double ) xsd . getMinutes ( ) / ( 7 * 24 * 60 ) ) ; duration += ( xsd . getSeconds ( ) / ( 7 * 24 * 60 * 60 ) ) ; break ; } case DAYS : { // Calculate the number of days duration += ( xsd . getYears ( ) * 365 ) ; duration += ( xsd . getMonths ( ) * 30 ) ; duration += xsd . getDays ( ) ; duration += ( ( double ) xsd . getHours ( ) / 24 ) ; duration += ( ( double ) xsd . getMinutes ( ) / ( 24 * 60 ) ) ; duration += ( xsd . getSeconds ( ) / ( 24 * 60 * 60 ) ) ; break ; } case ELAPSED_DAYS : { // Calculate the number of days duration += ( xsd . getYears ( ) * 365 ) ; duration += ( xsd . getMonths ( ) * 30 ) ; duration += xsd . getDays ( ) ; duration += ( ( double ) xsd . getHours ( ) / 24 ) ; duration += ( ( double ) xsd . getMinutes ( ) / ( 24 * 60 ) ) ; duration += ( xsd . getSeconds ( ) / ( 24 * 60 * 60 ) ) ; break ; } case HOURS : case ELAPSED_HOURS : { // Calculate the number of hours duration += ( xsd . getYears ( ) * ( 365 * 24 ) ) ; duration += ( xsd . getMonths ( ) * ( 30 * 24 ) ) ; duration += ( xsd . getDays ( ) * 24 ) ; duration += xsd . getHours ( ) ; duration += ( ( double ) xsd . getMinutes ( ) / 60 ) ; duration += ( xsd . getSeconds ( ) / ( 60 * 60 ) ) ; break ; } case MINUTES : case ELAPSED_MINUTES : { // Calculate the number of minutes duration += ( xsd . getYears ( ) * ( 365 * 24 * 60 ) ) ; duration += ( xsd . getMonths ( ) * ( 30 * 24 * 60 ) ) ; duration += ( xsd . getDays ( ) * ( 24 * 60 ) ) ; duration += ( xsd . getHours ( ) * 60 ) ; duration += xsd . getMinutes ( ) ; duration += ( xsd . getSeconds ( ) / 60 ) ; break ; } default : { break ; } } // Convert from a duration in hours to a duration // expressed in the default duration units ProjectProperties properties = file . getProjectProperties ( ) ; if ( defaultUnits == null ) { defaultUnits = properties . getDefaultDurationUnits ( ) ; } result = Duration . convertUnits ( duration , units , defaultUnits , properties ) ; } return ( result ) ;
public class ChunkTopicParser { /** * flush the buffer to file after processing is finished */ private void writeToContentChunk ( final String tmpContent , final URI outputFileName , final boolean needWriteDitaTag ) throws IOException { } }
assert outputFileName . isAbsolute ( ) ; logger . info ( "Writing " + outputFileName ) ; try ( OutputStreamWriter ditaFileOutput = new OutputStreamWriter ( new FileOutputStream ( new File ( outputFileName ) ) , StandardCharsets . UTF_8 ) ) { if ( outputFileName . equals ( changeTable . get ( outputFileName ) ) ) { // if the output file is newly generated file // write the xml header and workdir PI into new file writeStartDocument ( ditaFileOutput ) ; final URI workDir = outputFileName . resolve ( "." ) ; if ( ! OS_NAME . toLowerCase ( ) . contains ( OS_NAME_WINDOWS ) ) { writeProcessingInstruction ( ditaFileOutput , PI_WORKDIR_TARGET , new File ( workDir ) . getAbsolutePath ( ) ) ; } else { writeProcessingInstruction ( ditaFileOutput , PI_WORKDIR_TARGET , UNIX_SEPARATOR + new File ( workDir ) . getAbsolutePath ( ) ) ; } writeProcessingInstruction ( ditaFileOutput , PI_WORKDIR_TARGET_URI , workDir . toString ( ) ) ; final File path2rootmap = toFile ( getRelativePath ( outputFileName , job . getInputMap ( ) ) ) . getParentFile ( ) ; writeProcessingInstruction ( ditaFileOutput , PI_PATH2ROOTMAP_TARGET_URI , path2rootmap == null ? "./" : toURI ( path2rootmap ) . toString ( ) ) ; if ( conflictTable . get ( outputFileName ) != null ) { final String relativePath = getRelativeUnixPath ( new File ( currentFile . resolve ( "." ) ) + UNIX_SEPARATOR + FILE_NAME_STUB_DITAMAP , new File ( conflictTable . get ( outputFileName ) ) . getAbsolutePath ( ) ) ; String path2project = getRelativeUnixPath ( relativePath ) ; if ( null == path2project ) { path2project = "" ; } writeProcessingInstruction ( ditaFileOutput , PI_PATH2PROJ_TARGET , path2project ) ; writeProcessingInstruction ( ditaFileOutput , PI_PATH2PROJ_TARGET_URI , path2project . isEmpty ( ) ? "./" : toURI ( path2project ) . toString ( ) ) ; } } if ( needWriteDitaTag ) { final AttributesImpl atts = new AttributesImpl ( ) ; addOrSetAttribute ( atts , ATTRIBUTE_NAMESPACE_PREFIX_DITAARCHVERSION , DITA_NAMESPACE ) ; addOrSetAttribute ( atts , ATTRIBUTE_PREFIX_DITAARCHVERSION + COLON + ATTRIBUTE_NAME_DITAARCHVERSION , "1.3" ) ; writeStartElement ( ditaFileOutput , ELEMENT_NAME_DITA , atts ) ; } // write the final result to the output file ditaFileOutput . write ( tmpContent ) ; if ( needWriteDitaTag ) { writeEndElement ( ditaFileOutput , ELEMENT_NAME_DITA ) ; } ditaFileOutput . flush ( ) ; } catch ( SAXException e ) { throw new IOException ( e ) ; }
public class Collections { /** * Randomly permutes the specified list using a default source of * randomness . All permutations occur with approximately equal * likelihood . * < p > The hedge " approximately " is used in the foregoing description because * default source of randomness is only approximately an unbiased source * of independently chosen bits . If it were a perfect source of randomly * chosen bits , then the algorithm would choose permutations with perfect * uniformity . * < p > This implementation traverses the list backwards , from the last * element up to the second , repeatedly swapping a randomly selected element * into the " current position " . Elements are randomly selected from the * portion of the list that runs from the first element to the current * position , inclusive . * < p > This method runs in linear time . If the specified list does not * implement the { @ link RandomAccess } interface and is large , this * implementation dumps the specified list into an array before shuffling * it , and dumps the shuffled array back into the list . This avoids the * quadratic behavior that would result from shuffling a " sequential * access " list in place . * @ param list the list to be shuffled . * @ throws UnsupportedOperationException if the specified list or * its list - iterator does not support the < tt > set < / tt > operation . */ public static void shuffle ( List < ? > list ) { } }
Random rnd = r ; if ( rnd == null ) r = rnd = new Random ( ) ; // harmless race . shuffle ( list , rnd ) ;
public class StreamEx { /** * Returns a sequential { @ code StreamEx } containing the results of applying * the given function to the corresponding pairs of values in given two * lists . * The list values are accessed using { @ link List # get ( int ) } , so the lists * should provide fast random access . The lists are assumed to be * unmodifiable during the stream operations . * @ param < U > the type of the first list elements * @ param < V > the type of the second list elements * @ param < T > the type of the resulting stream elements * @ param first the first list , assumed to be unmodified during use * @ param second the second list , assumed to be unmodified during use * @ param mapper a non - interfering , stateless function to apply to each pair * of the corresponding list elements . * @ return a new { @ code StreamEx } * @ throws IllegalArgumentException if length of the lists differs . * @ see EntryStream # zip ( List , List ) * @ since 0.2.1 */ public static < U , V , T > StreamEx < T > zip ( List < U > first , List < V > second , BiFunction < ? super U , ? super V , ? extends T > mapper ) { } }
return of ( new RangeBasedSpliterator . ZipRef < > ( 0 , checkLength ( first . size ( ) , second . size ( ) ) , mapper , first , second ) ) ;
public class AptEventSet { /** * Returns the name of the generated notifier class for this ControlEventSet */ public String getNotifierClass ( ) { } }
StringBuffer sb = new StringBuffer ( getShortName ( ) ) ; sb . append ( "Notifier" ) ; // If the event set declaration has any parameterized types , then include them on // the notifier class as well . Currently , these can only be parameterized types // from the outer ( control interface ) , since there is no other mechanism for specifying // type values at notifier construction ( other than propagation from the outer type ) . sb . append ( getFormalTypeParameterNames ( ) ) ; return sb . toString ( ) ;
public class BluetoothController { /** * Register for bluetooth headset connection states and Sco audio states . * Try to connect to bluetooth headset audio by calling startBluetoothSco ( ) . * This is a work around for API < 11 to detect if a headset is connected before * the application starts . * The official documentation for startBluetoothSco ( ) states * " This method can be used by applications wanting to send and received audio to / from * a bluetooth SCO headset while the phone is not in call . " * Does this mean that startBluetoothSco ( ) would fail if the connected bluetooth device * is not a headset ? * Thus if a call to startBluetoothSco ( ) is successful , i . e mBroadcastReceiver will receive * an ACTION _ SCO _ AUDIO _ STATE _ CHANGED with intent extra SCO _ AUDIO _ STATE _ CONNECTED , then * we assume that a headset is connected . * @ return false if device does not support bluetooth or current platform does not supports * use of SCO for off call . */ @ SuppressWarnings ( "deprecation" ) private boolean startBluetooth ( ) { } }
Log . d ( TAG , "startBluetooth" ) ; // Device support bluetooth if ( mBluetoothAdapter != null ) { if ( mAudioManager . isBluetoothScoAvailableOffCall ( ) ) { mContext . registerReceiver ( mBroadcastReceiver , new IntentFilter ( BluetoothDevice . ACTION_ACL_CONNECTED ) ) ; mContext . registerReceiver ( mBroadcastReceiver , new IntentFilter ( BluetoothDevice . ACTION_ACL_DISCONNECTED ) ) ; mContext . registerReceiver ( mBroadcastReceiver , new IntentFilter ( AudioManager . ACTION_SCO_AUDIO_STATE_CHANGED ) ) ; // Need to set audio mode to MODE _ IN _ CALL for call to startBluetoothSco ( ) to succeed . mAudioManager . setMode ( AudioManager . MODE_IN_COMMUNICATION ) ; mIsCountDownOn = true ; // mCountDown repeatedly tries to start bluetooth Sco audio connection . mCountDown . start ( ) ; // need for audio sco , see mBroadcastReceiver mIsStarting = true ; return true ; } } return false ;
public class AsynchronousExecution { /** * Set the executor without notifying it about task completion . * The caller < b > must < / b > also call { @ link # maybeComplete ( ) } * after releasing any problematic locks . */ @ Restricted ( NoExternalUse . class ) public synchronized final void setExecutorWithoutCompleting ( @ Nonnull Executor executor ) { } }
assert this . executor == null ; this . executor = executor ;
public class FileURLConnection { /** * Note : the semantics of FileURLConnection object is that the * results of the various URLConnection calls , such as * getContentType , getInputStream or getContentLength reflect * whatever was true when connect was called . */ public void connect ( ) throws IOException { } }
if ( ! connected ) { try { filename = file . toString ( ) ; isDirectory = file . isDirectory ( ) ; if ( isDirectory ) { String [ ] fileList = file . list ( ) ; if ( fileList == null ) throw new FileNotFoundException ( filename + " exists, but is not accessible" ) ; files = Arrays . < String > asList ( fileList ) ; } else { is = new BufferedInputStream ( new FileInputStream ( filename ) ) ; // Check if URL should be metered boolean meteredInput = ProgressMonitor . getDefault ( ) . shouldMeterInput ( url , "GET" ) ; if ( meteredInput ) { ProgressSource pi = new ProgressSource ( url , "GET" , file . length ( ) ) ; is = new MeteredStream ( is , pi , file . length ( ) ) ; } } } catch ( IOException e ) { throw e ; } connected = true ; }
public class CmsSynchronize { /** * Deletes a resource in the VFS and updates the synchronisation lists . < p > * @ param res The resource to be deleted * @ throws CmsException if something goes wrong */ private void deleteFromVfs ( CmsResource res ) throws CmsException { } }
String resourcename = m_cms . getSitePath ( res ) ; m_report . print ( org . opencms . report . Messages . get ( ) . container ( org . opencms . report . Messages . RPT_SUCCESSION_1 , String . valueOf ( m_count ++ ) ) , I_CmsReport . FORMAT_NOTE ) ; if ( res . isFile ( ) ) { m_report . print ( Messages . get ( ) . container ( Messages . RPT_DEL_FILE_0 ) , I_CmsReport . FORMAT_NOTE ) ; } else { m_report . print ( Messages . get ( ) . container ( Messages . RPT_DEL_FOLDER_0 ) , I_CmsReport . FORMAT_NOTE ) ; } m_report . print ( org . opencms . report . Messages . get ( ) . container ( org . opencms . report . Messages . RPT_ARGUMENT_1 , resourcename ) ) ; m_report . print ( org . opencms . report . Messages . get ( ) . container ( org . opencms . report . Messages . RPT_DOTS_0 ) ) ; // lock the file in the VFS , so that it can be updated m_cms . lockResource ( resourcename ) ; m_cms . deleteResource ( resourcename , CmsResource . DELETE_PRESERVE_SIBLINGS ) ; // Remove it from the sync list m_syncList . remove ( translate ( resourcename ) ) ; m_report . println ( org . opencms . report . Messages . get ( ) . container ( org . opencms . report . Messages . RPT_OK_0 ) , I_CmsReport . FORMAT_OK ) ;
public class JTSSurfaceExpression { /** * A Point guaranteed to be on this Surface . * @ return point on surface */ public JTSPointExpression < Point > pointOnSurface ( ) { } }
if ( pointOnSurface == null ) { pointOnSurface = JTSGeometryExpressions . pointOperation ( SpatialOps . POINT_ON_SURFACE , mixin ) ; } return pointOnSurface ;
public class SimpleBitSet { /** * Sets the bit specified by the index to < code > false < / code > . * @ param bitIndex the index of the bit to be cleared . * @ exception IndexOutOfBoundsException if the specified index is negative . * @ since JDK1.0 */ public void clear ( int bitIndex ) { } }
if ( bitIndex < 0 ) throw new IndexOutOfBoundsException ( "bitIndex < 0: " + bitIndex ) ; int wordIndex = wordIndex ( bitIndex ) ; if ( wordIndex >= wordsInUse ) return ; words [ wordIndex ] &= ~ ( 1L << bitIndex ) ; recalculateWordsInUse ( ) ; checkInvariants ( ) ;
public class RequestHelper { /** * Returns whether the request is a new session . * The method retrieves the new value from the input request ' s session , which indicates if it ' s a new session or * not . More information can be found here : * https : / / developer . amazon . com / docs / custom - skills / request - and - response - json - reference . html # session - object * This method returns an { @ link IllegalArgumentException } if the request is not an in - session request . * @ return true if the request is a new session */ public boolean isNewSession ( ) { } }
Session session = handlerInput . getRequestEnvelope ( ) . getSession ( ) ; if ( session == null ) { throw new IllegalArgumentException ( "The provided request doesn't contain a session" ) ; } return session . getNew ( ) ;
public class TxtStorer { /** * Parses a single line from the file . This method is never invoked for * magic sequence . The line includes path to file and hash . Subclasses may * include more fields . * This method returns null if the line is of no interest . This can be used * by subclasses to implement different protocols . */ protected RegData parseLine ( State state , String line ) { } }
// Note . Initial this method was using String . split , but that method // seems to be much more expensive than playing with indexOf and // substring . int sepIndex = line . indexOf ( SEPARATOR ) ; String urlExternalForm = line . substring ( 0 , sepIndex ) ; String hash = line . substring ( sepIndex + SEPARATOR_LEN ) ; return new RegData ( urlExternalForm , hash ) ;
public class SignatureFileVerifier { /** * Examines a signature timestamp token to generate a timestamp object . * Examines the signer ' s unsigned attributes for a * < tt > signatureTimestampToken < / tt > attribute . If present , * then it is parsed to extract the date and time at which the * timestamp was generated . * @ param info A signer information element of a PKCS 7 block . * @ return A timestamp token or null if none is present . * @ throws IOException if an error is encountered while parsing the * PKCS7 data . * @ throws NoSuchAlgorithmException if an error is encountered while * verifying the PKCS7 object . * @ throws SignatureException if an error is encountered while * verifying the PKCS7 object . * @ throws CertificateException if an error is encountered while generating * the TSA ' s certpath . */ private Timestamp getTimestamp ( SignerInfo info ) throws IOException , NoSuchAlgorithmException , SignatureException , CertificateException { } }
Timestamp timestamp = null ; // Extract the signer ' s unsigned attributes PKCS9Attributes unsignedAttrs = info . getUnauthenticatedAttributes ( ) ; if ( unsignedAttrs != null ) { PKCS9Attribute timestampTokenAttr = unsignedAttrs . getAttribute ( "signatureTimestampToken" ) ; if ( timestampTokenAttr != null ) { PKCS7 timestampToken = new PKCS7 ( ( byte [ ] ) timestampTokenAttr . getValue ( ) ) ; // Extract the content ( an encoded timestamp token info ) byte [ ] encodedTimestampTokenInfo = timestampToken . getContentInfo ( ) . getData ( ) ; // Extract the signer ( the Timestamping Authority ) // while verifying the content SignerInfo [ ] tsa = timestampToken . verify ( encodedTimestampTokenInfo ) ; // Expect only one signer ArrayList < X509Certificate > chain = tsa [ 0 ] . getCertificateChain ( timestampToken ) ; CertPath tsaChain = certificateFactory . generateCertPath ( chain ) ; // Create a timestamp token info object TimestampToken timestampTokenInfo = new TimestampToken ( encodedTimestampTokenInfo ) ; // Check that the signature timestamp applies to this signature verifyTimestamp ( timestampTokenInfo , info . getEncryptedDigest ( ) ) ; // Create a timestamp object timestamp = new Timestamp ( timestampTokenInfo . getDate ( ) , tsaChain ) ; } } return timestamp ;
public class AbstractIoBufferEx { /** * { @ inheritDoc } */ @ Override public AbstractIoBufferEx capacity ( int newCapacity , IoBufferAllocatorEx < ? > reallocator ) { } }
if ( ! recapacityAllowed ) { throw new IllegalStateException ( "Derived buffers and their parent can't be expanded." ) ; } // Allocate a new buffer and transfer all settings to it . if ( newCapacity > capacity ( ) ) { // Expand : // / / Save the state . int pos = position ( ) ; int limit = limit ( ) ; ByteOrder bo = order ( ) ; // / / Reallocate . ByteBuffer oldBuf = buf ( ) ; ByteBuffer newBuf = reallocator . allocate ( newCapacity , flags ( ) ) ; oldBuf . clear ( ) ; newBuf . put ( oldBuf ) ; buf ( newBuf ) ; // / / Restore the state . buf ( ) . limit ( limit ) ; if ( mark >= 0 ) { buf ( ) . position ( mark ) ; buf ( ) . mark ( ) ; } buf ( ) . position ( pos ) ; buf ( ) . order ( bo ) ; } return this ;
public class MessagingSecurityServiceImpl { /** * Print the Destination Permissions , it will be used for debugging purpose */ private void printDestinationPermissions ( Map < String , ? > destinationPermissions ) { } }
Set < String > destinations = destinationPermissions . keySet ( ) ; for ( String destination : destinations ) { SibTr . debug ( tc , CLASS_NAME + " Destination: " + destination ) ; Permission permission = ( Permission ) destinationPermissions . get ( destination ) ; SibTr . debug ( tc , " Users having permissions!!!" ) ; Map < String , Set < String > > userRoles = permission . getRoleToUserMap ( ) ; Set < String > uRoles = userRoles . keySet ( ) ; for ( String role : uRoles ) { SibTr . debug ( tc , " " + role + ": " + userRoles . get ( role ) ) ; } SibTr . debug ( tc , " Groups having permissions!!!" ) ; Map < String , Set < String > > groupRoles = permission . getRoleToGroupMap ( ) ; Set < String > gRoles = groupRoles . keySet ( ) ; for ( String role : gRoles ) { SibTr . debug ( tc , " " + role + ": " + groupRoles . get ( role ) ) ; } }
public class ValueMapImpl { /** * Add a MapItemValue to the map . * @ param miv map value item . */ public void add ( MapItemValue miv ) { } }
if ( len >= items . length ) { items = LazyMap . grow ( items ) ; } items [ len ] = miv ; len ++ ;
public class JsonDBTemplate { /** * / * ( non - Javadoc ) * @ see io . jsondb . JsonDBOperations # getCollection ( java . lang . Class ) */ @ SuppressWarnings ( "unchecked" ) @ Override public < T > List < T > getCollection ( Class < T > entityClass ) { } }
String collectionName = Util . determineCollectionName ( entityClass ) ; Map < Object , T > collection = ( Map < Object , T > ) collectionsRef . get ( ) . get ( collectionName ) ; if ( null == collection ) { createCollection ( collectionName ) ; collection = ( Map < Object , T > ) collectionsRef . get ( ) . get ( collectionName ) ; } CollectionMetaData cmd = cmdMap . get ( collectionName ) ; List < T > newCollection = new ArrayList < T > ( ) ; try { for ( T document : collection . values ( ) ) { Object obj = Util . deepCopy ( document ) ; if ( encrypted && cmd . hasSecret ( ) && null != obj ) { CryptoUtil . decryptFields ( obj , cmd , dbConfig . getCipher ( ) ) ; } newCollection . add ( ( T ) obj ) ; } } catch ( IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) { logger . error ( "Error when decrypting value for a @Secret annotated field for entity: " + collectionName , e ) ; throw new JsonDBException ( "Error when decrypting value for a @Secret annotated field for entity: " + collectionName , e ) ; } return newCollection ;
public class DataSourceMetaDataFactory { /** * Create new instance of data source meta data . * @ param databaseType database type * @ param url data source URL * @ return data source meta data */ public static DataSourceMetaData newInstance ( final DatabaseType databaseType , final String url ) { } }
switch ( databaseType ) { case H2 : return new H2DataSourceMetaData ( url ) ; case MySQL : return new MySQLDataSourceMetaData ( url ) ; case Oracle : return new OracleDataSourceMetaData ( url ) ; case PostgreSQL : return new PostgreSQLDataSourceMetaData ( url ) ; case SQLServer : return new SQLServerDataSourceMetaData ( url ) ; default : throw new UnsupportedOperationException ( String . format ( "Cannot support database [%s]." , databaseType ) ) ; }
public class SigarAgent { /** * Contract : starting agents after JVM startup . * @ param options * Agent command line options . * @ param instrumentation * Injected JVM instrumentation instance . * @ throws Exception * the exception */ public static void agentmain ( final String options , final Instrumentation instrumentation ) throws Exception { } }
logger . info ( "Sigar loader via agent-main." ) ; configure ( options , instrumentation ) ;
public class ConfigurationMetadataBuilder { /** * Visit a configuration property . * @ param owningType The type that owns the property * @ param declaringType The declaring type of the property * @ param propertyType The property type * @ param name The property name * @ param description A description for the property * @ param defaultValue The default value of the property ( only used for constant values such as strings , numbers , * enums etc . ) * @ return This property metadata */ public PropertyMetadata visitProperty ( T owningType , T declaringType , String propertyType , String name , @ Nullable String description , @ Nullable String defaultValue ) { } }
PropertyMetadata metadata = new PropertyMetadata ( ) ; metadata . declaringType = getTypeString ( declaringType ) ; metadata . name = name ; metadata . path = NameUtils . hyphenate ( buildPropertyPath ( owningType , declaringType , name ) , true ) ; metadata . type = propertyType ; metadata . description = description ; metadata . defaultValue = defaultValue ; properties . add ( metadata ) ; return metadata ;
public class MarkLogicRepositoryConnection { /** * convert bindings * @ param iter * @ param subj * @ param pred * @ param obj * @ return iterator */ private Iteration < Statement , QueryEvaluationException > toStatementIteration ( TupleQueryResult iter , final Resource subj , final URI pred , final Value obj ) { } }
return new ConvertingIteration < BindingSet , Statement , QueryEvaluationException > ( iter ) { @ Override protected Statement convert ( BindingSet b ) throws QueryEvaluationException { Resource s = subj == null ? ( Resource ) b . getValue ( "s" ) : subj ; URI p = pred == null ? ( URI ) b . getValue ( "p" ) : pred ; Value o = obj == null ? b . getValue ( "o" ) : obj ; URI ctx = ( URI ) b . getValue ( "ctx" ) ; if ( ctx . stringValue ( ) . equals ( DEFAULT_GRAPH_URI ) ) { ctx = null ; } return getValueFactory ( ) . createStatement ( s , p , o , ctx ) ; } } ;
public class StyleCache { /** * Set the feature row style into the polygon options * @ param polygonOptions polygon options * @ param featureRow feature row * @ return true if style was set into the polygon options */ public boolean setFeatureStyle ( PolygonOptions polygonOptions , FeatureRow featureRow ) { } }
return StyleUtils . setFeatureStyle ( polygonOptions , featureStyleExtension , featureRow , density ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcCompositeProfileDef ( ) { } }
if ( ifcCompositeProfileDefEClass == null ) { ifcCompositeProfileDefEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 114 ) ; } return ifcCompositeProfileDefEClass ;
public class ConditionalOptional { /** * Returns the set of constained ids . This Set contains all ids of the embedded constraint plus * its own if this is set . */ @ Override public Set < String > getContainedIds ( ) { } }
Set < String > s = super . getContainedIds ( ) ; s . addAll ( constraint . getContainedIds ( ) ) ; return s ;
public class Type { /** * Tests , if this type is kind of the type in the parameter ( question is , is * this type a child of the parameter type ) . * @ param _ type type to test for parent * @ return true if this type is a child , otherwise false */ public boolean isKindOf ( final Type _type ) { } }
boolean ret = false ; Type type = this ; while ( type != null && type . getId ( ) != _type . getId ( ) ) { type = type . getParentType ( ) ; } if ( type != null && type . getId ( ) == _type . getId ( ) ) { ret = true ; } return ret ;
public class AbstrCFMLExprTransformer { /** * Transfomiert eine Modulus Operation . Im Gegensatz zu CFMX , wird das " % " Zeichen auch als Modulus * Operator anerkannt . < br / > * EBNF : < br / > * < code > divMultiOp { ( " mod " | " % " ) spaces divMultiOp } ; ( * modulus operator , " % " Existiert in CFMX nicht * ) < / code > * @ return CFXD Element * @ throws TemplateException */ private Expression modOp ( Data data ) throws TemplateException { } }
Expression expr = divMultiOp ( data ) ; // Modulus Operation while ( data . srcCode . forwardIfCurrent ( '%' ) || data . srcCode . forwardIfCurrentAndNoWordAfter ( "mod" ) ) { expr = _modOp ( data , expr ) ; // comments ( data ) ; // expr = OpDouble . toExprDouble ( expr , divMultiOp ( ) , OpDouble . MODULUS ) ; } return expr ;
public class ConverterFactory { /** * Return convert from S to T */ @ SuppressWarnings ( "unchecked" ) public < T extends R > Converter < S , T > getConverter ( Class < T > targetType ) { } }
return ( Converter < S , T > ) converters . get ( targetType ) ;
public class HttpUtil { /** * Returns the length of the content or the specified default value if the message does not have the { @ code * " Content - Length " header } . Please note that this value is not retrieved from { @ link HttpContent # content ( ) } but * from the { @ code " Content - Length " } header , and thus they are independent from each other . * @ param message the message * @ param defaultValue the default value * @ return the content length or the specified default value * @ throws NumberFormatException if the { @ code " Content - Length " } header does not parse as a long */ public static long getContentLength ( HttpMessage message , long defaultValue ) { } }
String value = message . headers ( ) . get ( HttpHeaderNames . CONTENT_LENGTH ) ; if ( value != null ) { return Long . parseLong ( value ) ; } // We know the content length if it ' s a Web Socket message even if // Content - Length header is missing . long webSocketContentLength = getWebSocketContentLength ( message ) ; if ( webSocketContentLength >= 0 ) { return webSocketContentLength ; } // Otherwise we don ' t . return defaultValue ;
public class DropinMonitor { /** * Set the properties for the _ coreMonitor that are modifiable . All others * are set when this component activates ( { @ link DropinMonitor # activate ( ) } ) . * @ param config */ private void configureCoreMonitor ( ApplicationMonitorConfig config ) { } }
_coreMonitor . setProperty ( FileMonitor . MONITOR_INTERVAL , config . getPollingRate ( ) ) ; _coreMonitor . setProperty ( FileMonitor . MONITOR_DIRECTORIES , new String [ ] { monitoredDirectory . get ( ) . getAbsolutePath ( ) } ) ;
public class Cob2XsdMain { /** * Translates a single COBOL source file . * @ param cobolFile COBOL source file * @ param cobolFileEncoding the COBOL file character encoding * @ param target target file or folder * @ param targetNamespacePrefix the output XML schemas target namespace * prefix * @ param xsltFileName an optional xslt to apply on the XML Schema * @ throws XsdGenerationException if parser fails */ protected void translate ( final File cobolFile , final String cobolFileEncoding , final File target , final String targetNamespacePrefix , final String xsltFileName ) throws XsdGenerationException { } }
try { Cob2XsdIO cob2XsdIO = new Cob2XsdIO ( getConfig ( ) ) ; cob2XsdIO . translate ( cobolFile , cobolFileEncoding , target , targetNamespacePrefix , xsltFileName ) ; } catch ( RecognizerException e ) { throw new XsdGenerationException ( e ) ; }
public class SubscriptionDescription { /** * Sets The amount of time that the message is locked by a given receiver * so that no other receiver receives the same message . * @ param lockDuration - The duration of a peek lock . Max value is 5 minutes . */ public void setLockDuration ( Duration lockDuration ) { } }
this . lockDuration = lockDuration ; if ( this . lockDuration . compareTo ( ManagementClientConstants . MAX_DURATION ) > 0 ) { this . lockDuration = ManagementClientConstants . MAX_DURATION ; }
public class CommandRunner { /** * 移除数组的第一个元素 < / br > * @ param args * 数组 * @ return 移除后的数组 */ public static String [ ] removeFirst ( String [ ] args ) { } }
String [ ] args2 = new String [ args . length - 1 ] ; System . arraycopy ( args , 1 , args2 , 0 , args2 . length ) ; return args2 ;
public class GaliosFieldTableOps { /** * Continue evaluating a polynomial which has been broken up into multiple arrays . * @ param previousOutput Output from the evaluation of the prior part of the polynomial * @ param part Additional segment of the polynomial * @ param x Point it ' s being evaluated at * @ return results */ public int polyEvalContinue ( int previousOutput , GrowQueue_I8 part , int x ) { } }
int y = previousOutput ; for ( int i = 0 ; i < part . size ; i ++ ) { y = multiply ( y , x ) ^ ( part . data [ i ] & 0xFF ) ; } return y ;
public class StringMap { /** * Get a map entry by substring key . * @ param key String containing the key * @ param offset Offset of the key within the String . * @ param length The length of the key * @ return The Map . Entry for the key or null if the key is not in * the map . */ public Map . Entry getEntry ( String key , int offset , int length ) { } }
if ( key == null ) return _nullEntry ; Node node = _root ; int ni = - 1 ; // look for best match charLoop : for ( int i = 0 ; i < length ; i ++ ) { char c = key . charAt ( offset + i ) ; // Advance node if ( ni == - 1 ) { ni = 0 ; node = ( node . _children == null ) ? null : node . _children [ c % _width ] ; } // Look through the node chain while ( node != null ) { // If it is a matching node , goto next char if ( node . _char [ ni ] == c || _ignoreCase && node . _ochar [ ni ] == c ) { ni ++ ; if ( ni == node . _char . length ) ni = - 1 ; continue charLoop ; } // No char match , so if mid node then no match at all . if ( ni > 0 ) return null ; // try next in chain node = node . _next ; } return null ; } if ( ni > 0 ) return null ; if ( node != null && node . _key == null ) return null ; return node ;
public class HttpDataFetcher { /** * Allows fetching and saving a bunch of objects to the specified directory from a server that uses a REST or REST - like API * where each object is retrieved from the URL formed appending the object ' s identifier to the path of the the base URL , and * optionally from a server that uses a parameter to identify the objects . Supports additional configuration options to name * the fetched objects . * @ param baseUrl - base URL from where the objects will be fetched * @ param queryParam - if defined , a query parameter will be appended to the base URL with the identifier of the request * @ param ids - a list with the identifiers of the all requests that will be attempted * @ param prefix - optionally prepend this prefix to the filenames of the saved files * @ param suffix - optionally append this suffix to the filenames of the saved files * @ param outdir - directory where the files will be stored * @ return A { @ link CompletableFuture } that allows cancellation . Once each fetch operation is completed , its status is updated * in the future with one of the possible values provided by the enumeration { @ link FetchStatus } . * @ throws IOException If an error occurs during the execution of the method that prevents fetching or saving the files . */ public FecthFuture fetchToDir ( final URL baseUrl , final @ Nullable String queryParam , final List < String > ids , final @ Nullable String prefix , final @ Nullable String suffix , final File outdir ) throws IOException { } }
// check mandatory parameters requireNonNull ( baseUrl , "A valid URL expected" ) ; final FecthFuture toBeCompleted = new FecthFuture ( requireNonNull ( ids , "A valid list of identifiers expected" ) . stream ( ) . map ( StringUtils :: trimToNull ) . filter ( Objects :: nonNull ) . distinct ( ) . collect ( Collectors . toList ( ) ) ) ; requireNonNull ( outdir , "A valid output directory expected" ) ; checkArgument ( ( outdir . isDirectory ( ) && outdir . canWrite ( ) ) || outdir . mkdirs ( ) , new StringBuilder ( "Cannot write to the output directory: " ) . append ( outdir . getAbsolutePath ( ) ) . toString ( ) ) ; // get optional parameters final Optional < String > queryParam2 = ofNullable ( trimToNull ( queryParam ) ) ; final String prefix2 = ofNullable ( prefix ) . orElse ( "" ) ; final String suffix2 = ofNullable ( suffix ) . orElse ( "" ) ; try ( final CloseableHttpAsyncClient asyncHttpClient = createFiberCloseableHttpAsyncClient ( ) ) { asyncHttpClient . start ( ) ; final UrlBuilder urlBuilder = getUrlBuilder ( baseUrl ) ; // an explanation is needed since this code is instrumented by Quasar and Comsat : requests are created during the first part of // this lambda expression ( map ) , but they are not executed until the get ( ) method is called in the second part of the expression // ( forEach ) . Here that parallel stream is used to block and wait for the requests to complete . In case that a single stream is // used , each request will be created and executed sequentially . Therefore , the alternative to parallel stream is to separate // the lambda expression in two loops , creating the requests in the first loop and calling get ( ) in the second one . toBeCompleted . monList . parallelStream ( ) . map ( m -> { try { // create output file final File outfile = new File ( outdir , new StringBuilder ( prefix2 ) . append ( m . id ) . append ( suffix2 ) . append ( ".partial" ) . toString ( ) ) ; checkState ( outfile . createNewFile ( ) , new StringBuilder ( "Cannot create the output file: " ) . append ( outfile . getAbsolutePath ( ) ) . toString ( ) ) ; // create the HTTP request final HttpHost target = URIUtils . extractHost ( baseUrl . toURI ( ) ) ; final HttpRequest request = new BasicHttpRequest ( "GET" , urlBuilder . buildRelativeUrl ( queryParam2 . isPresent ( ) ? null : m . id , queryParam2 . isPresent ( ) ? of ( queryParam2 . get ( ) , m . id ) : null ) ) ; final HttpAsyncRequestProducer producer = new BasicAsyncRequestProducer ( target , request ) ; // create the consumer final ZeroCopyConsumer < File > consumer = new ZeroCopyConsumer < File > ( outfile ) { @ Override protected File process ( final HttpResponse response , final File file , final ContentType contentType ) throws Exception { final StatusLine status = response . getStatusLine ( ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( new StringBuilder ( "Got file: statusCode=" ) . append ( status . getStatusCode ( ) ) . append ( ", file=" ) . append ( file . getAbsolutePath ( ) ) . toString ( ) ) ; if ( status . getStatusCode ( ) != HttpStatus . SC_OK ) throw new ClientProtocolException ( new StringBuilder ( "Object fetch failed: " ) . append ( status ) . toString ( ) ) ; return file ; } } ; // prepare request m . future = asyncHttpClient . execute ( producer , consumer , new FutureCallback < File > ( ) { @ Override public void cancelled ( ) { toBeCompleted . update ( m . id , FetchStatus . CANCELLED ) ; LOGGER . info ( "Task cancelled" ) ; } @ Override public void completed ( final File result ) { try { final Path path = result . toPath ( ) ; Files . move ( path , path . resolveSibling ( removeEnd ( result . getName ( ) , ".partial" ) ) , REPLACE_EXISTING ) ; toBeCompleted . update ( m . id , FetchStatus . COMPLETED ) ; } catch ( IOException ex ) { toBeCompleted . update ( m . id , FetchStatus . FAILED ) ; LOGGER . error ( "Fecth failed to move file to its final destination with error" , ex ) ; } } @ Override public void failed ( final Exception ex ) { toBeCompleted . update ( m . id , FetchStatus . FAILED ) ; LOGGER . error ( "Fecth failed with error" , ex ) ; } } ) ; } catch ( Exception e ) { LOGGER . error ( new StringBuilder ( "Failed to fetch object with id: " ) . append ( m . id ) . toString ( ) , e ) ; } return m ; } ) . forEach ( m -> { try { // submit requests and wait for completion m . future . get ( ) ; } catch ( Exception ignore ) { /* exceptions are handled in the callback functions */ } } ) ; } return toBeCompleted ;
public class PowerAdapters { /** * Returns the specified { @ link PowerAdapter } as a { @ link SpinnerAdapter } . * < strong > Note that the supplied adapter must only ever use a single view type < / strong > , due to constraints * imposed by { @ link Spinner # setAdapter ( SpinnerAdapter ) } . * @ param powerAdapter The adapter to be converted . * @ return A spinner adapter that presents the same views as { @ code powerAdapter } . * @ see Spinner # setAdapter ( SpinnerAdapter ) * @ see PowerAdapter # getItemViewType ( int ) */ @ CheckResult @ NonNull public static SpinnerAdapter toSpinnerAdapter ( @ NonNull PowerAdapter powerAdapter ) { } }
// SpinnerAdapter adds additional constraints to the ListAdapter contract : getViewTypeCount must return 1. // See android . widget . Spinner . setAdapter ( ) return new ListAdapterConverterAdapter ( checkNotNull ( powerAdapter , "powerAdapter" ) , 1 ) ;
public class AbstractBcX509ExtensionBuilder { /** * Add an extension . * @ param oid the extension oid . * @ param critical true if the extension is critical . * @ param value the value of the extension . * @ return this extensions builder to allow chaining . */ public X509ExtensionBuilder addExtension ( ASN1ObjectIdentifier oid , boolean critical , ASN1Encodable value ) { } }
try { this . extensions . addExtension ( oid , critical , value . toASN1Primitive ( ) . getEncoded ( ASN1Encoding . DER ) ) ; } catch ( IOException e ) { // Very unlikely throw new IllegalArgumentException ( "Invalid extension value, it could not be properly DER encoded." ) ; } return this ;
public class Dag { /** * Update the final dag status . * @ param failed true if any of the jobs has failed */ private void updateDagStatusInternal ( final boolean failed ) { } }
if ( this . status == Status . KILLING ) { /* It ' s possible that some nodes have failed when the dag is killed . Since killing a dag signals an intent from an operator , it is more important to make the dag status reflect the result of that explict intent . e . g . if the killing is a result of handing a job failure , users more likely want to know that someone has taken an action rather than that a job has failed . Operators can still see the individual job status . */ changeStatus ( Status . KILLED ) ; } else if ( failed ) { changeStatus ( Status . FAILURE ) ; } else { changeStatus ( Status . SUCCESS ) ; }
public class AnnotationConfigServletWebApplicationContext { /** * { @ inheritDoc } * Delegates given environment to underlying { @ link AnnotatedBeanDefinitionReader } and * { @ link ClassPathBeanDefinitionScanner } members . */ @ Override public void setEnvironment ( ConfigurableEnvironment environment ) { } }
super . setEnvironment ( environment ) ; this . reader . setEnvironment ( environment ) ; this . scanner . setEnvironment ( environment ) ;
public class MessagePattern { /** * Parses a MessageFormat pattern string . * @ param pattern a MessageFormat pattern string * @ return this * @ throws IllegalArgumentException for syntax errors in the pattern string * @ throws IndexOutOfBoundsException if certain limits are exceeded * ( e . g . , argument number too high , argument name too long , etc . ) * @ throws NumberFormatException if a number could not be parsed */ public MessagePattern parse ( String pattern ) { } }
preParse ( pattern ) ; parseMessage ( 0 , 0 , 0 , ArgType . NONE ) ; postParse ( ) ; return this ;
public class AbstractVirtualHostBuilder { /** * Binds the specified annotated service object under the path prefix { @ code " / " } . * @ param exceptionHandlersAndConverters instances of { @ link ExceptionHandlerFunction } , * { @ link RequestConverterFunction } and / or * { @ link ResponseConverterFunction } */ public B annotatedService ( Object service , Function < Service < HttpRequest , HttpResponse > , ? extends Service < HttpRequest , HttpResponse > > decorator , Object ... exceptionHandlersAndConverters ) { } }
return annotatedService ( "/" , service , decorator , ImmutableList . copyOf ( requireNonNull ( exceptionHandlersAndConverters , "exceptionHandlersAndConverters" ) ) ) ;
public class Cracker { /** * Used for IsSimple . */ static boolean needsCracking ( boolean allowCoincident , EditShape shape , double tolerance , NonSimpleResult result , ProgressTracker progress_tracker ) { } }
if ( ! canBeCracked ( shape ) ) return false ; Cracker cracker = new Cracker ( progress_tracker ) ; cracker . m_shape = shape ; cracker . m_tolerance = tolerance ; cracker . m_bAllowCoincident = allowCoincident ; if ( cracker . needsCrackingImpl_ ( ) ) { if ( result != null ) result . Assign ( cracker . m_non_simple_result ) ; return true ; } // Now swap the coordinates to catch horizontal cases . Transformation2D transform = new Transformation2D ( ) ; transform . setSwapCoordinates ( ) ; shape . applyTransformation ( transform ) ; cracker = new Cracker ( progress_tracker ) ; cracker . m_shape = shape ; cracker . m_tolerance = tolerance ; cracker . m_bAllowCoincident = allowCoincident ; boolean b_res = cracker . needsCrackingImpl_ ( ) ; transform . setSwapCoordinates ( ) ; shape . applyTransformation ( transform ) ; // restore shape if ( b_res ) { if ( result != null ) result . Assign ( cracker . m_non_simple_result ) ; return true ; } return false ;
public class ns_conf_upgrade_history { /** * < pre > * Use this operation to get ns . conf file upgrade history . * < / pre > */ public static ns_conf_upgrade_history [ ] get ( nitro_service client ) throws Exception { } }
ns_conf_upgrade_history resource = new ns_conf_upgrade_history ( ) ; resource . validate ( "get" ) ; return ( ns_conf_upgrade_history [ ] ) resource . get_resources ( client ) ;
public class HelperBase { /** * Gets a substring between the begin and end boundaries * @ param string * original string * @ param begin * start boundary * @ param end * end boundary * @ return substring between boundaries */ public String substringBetween ( String string , String begin , String end ) { } }
return substringBetween ( string , begin , end , false ) ;
public class Qualifier { /** * Extracts qualifier namespace part from given qualifier string . * @ param qualifierAsString qualifier string . * @ return qualifier namespace . */ public static String getQualifierNamespace ( String qualifierAsString ) { } }
int pos = qualifierAsString . indexOf ( DELIMITER , 1 ) ; if ( pos == - 1 ) { throw new IllegalArgumentException ( "Wrong qualifier format: '" + qualifierAsString + "'" ) ; } return qualifierAsString . substring ( 1 , pos ) ;
public class MountTable { /** * Get the associated ufs client with the mount id . * @ param mountId mount id to look up ufs client * @ return ufsClient */ public UfsManager . UfsClient getUfsClient ( long mountId ) { } }
try { return mUfsManager . get ( mountId ) ; } catch ( NotFoundException | UnavailableException e ) { LOG . warn ( "failed to get ufsclient for mountid {}, exception {}" , mountId , e ) ; } return null ;
public class AbstractReady { /** * { @ inheritDoc } */ @ Override public final < M extends Model > M getModel ( final Class < M > clazz , final Object ... keyPart ) { } }
localFacade ( ) . globalFacade ( ) . trackEvent ( JRebirthEventType . ACCESS_MODEL , this . getClass ( ) , clazz ) ; return localFacade ( ) . globalFacade ( ) . uiFacade ( ) . retrieve ( clazz , keyPart ) ;
public class MessagePattern { /** * Parses a number from the specified message substring . * @ param start start index into the message string * @ param limit limit index into the message string , must be start < limit * @ param allowInfinity true if U + 221E is allowed ( for ChoiceFormat ) */ private void parseDouble ( int start , int limit , boolean allowInfinity ) { } }
assert start < limit ; // fake loop for easy exit and single throw statement for ( ; ; ) { // fast path for small integers and infinity int value = 0 ; int isNegative = 0 ; // not boolean so that we can easily add it to value int index = start ; char c = msg . charAt ( index ++ ) ; if ( c == '-' ) { isNegative = 1 ; if ( index == limit ) { break ; // no number } c = msg . charAt ( index ++ ) ; } else if ( c == '+' ) { if ( index == limit ) { break ; // no number } c = msg . charAt ( index ++ ) ; } if ( c == 0x221e ) { // infinity if ( allowInfinity && index == limit ) { addArgDoublePart ( isNegative != 0 ? Double . NEGATIVE_INFINITY : Double . POSITIVE_INFINITY , start , limit - start ) ; return ; } else { break ; } } // try to parse the number as a small integer but fall back to a double while ( '0' <= c && c <= '9' ) { value = value * 10 + ( c - '0' ) ; if ( value > ( Part . MAX_VALUE + isNegative ) ) { break ; // not a small - enough integer } if ( index == limit ) { addPart ( Part . Type . ARG_INT , start , limit - start , isNegative != 0 ? - value : value ) ; return ; } c = msg . charAt ( index ++ ) ; } // Let Double . parseDouble ( ) throw a NumberFormatException . double numericValue = Double . parseDouble ( msg . substring ( start , limit ) ) ; addArgDoublePart ( numericValue , start , limit - start ) ; return ; } throw new NumberFormatException ( "Bad syntax for numeric value: " + msg . substring ( start , limit ) ) ;
public class TriangulateMetricLinearDLT { /** * Given two observations of the same point from two views and a known motion between the * two views , triangulate the point ' s position in camera ' b ' reference frame . * Modification of [ 1 ] to be less generic and use calibrated cameras . * @ param a Observation ' a ' in normalized coordinates . Not modified . * @ param b Observation ' b ' in normalized coordinates . Not modified . * @ param fromAtoB Transformation from camera view ' a ' to ' b ' Not modified . * @ param foundInA Output , the found 3D position of the point . Modified . */ public GeometricResult triangulate ( Point2D_F64 a , Point2D_F64 b , Se3_F64 fromAtoB , Point4D_F64 foundInA ) { } }
A . reshape ( 4 , 4 ) ; int index = addView ( fromAtoB , b , 0 ) ; // third row A . data [ index ++ ] = - 1 ; A . data [ index ++ ] = 0 ; A . data [ index ++ ] = a . x ; A . data [ index ++ ] = 0 ; // fourth row A . data [ index ++ ] = 0 ; A . data [ index ++ ] = - 1 ; A . data [ index ++ ] = a . y ; A . data [ index ] = 0 ; return finishSolving ( foundInA ) ;
public class DroolsStreamUtils { /** * This method reads the contents from the given input stream and returns the object . It is expected that * the contents in the given stream was not compressed , and it was written by the corresponding * streamOut methods of this class . * @ param in * @ return * @ throws IOException * @ throws ClassNotFoundException */ public static Object streamIn ( InputStream in ) throws IOException , ClassNotFoundException { } }
return streamIn ( in , null , false ) ;
public class Environment { /** * Returns the Java command to use . * @ param javaHome the Java Home , if { @ code null } an attempt to determine the command will be done * @ return the Java executable command */ public static String getJavaCommand ( final Path javaHome ) { } }
final Path resolvedJavaHome = javaHome == null ? findJavaHome ( ) : javaHome ; final String exe ; if ( resolvedJavaHome == null ) { exe = "java" ; } else { exe = resolvedJavaHome . resolve ( "bin" ) . resolve ( "java" ) . toString ( ) ; } if ( exe . contains ( " " ) ) { return "\"" + exe + "\"" ; } if ( WINDOWS ) { return exe + ".exe" ; } return exe ;
public class BeaconParser { /** * Adds a < code > BeaconParser < / code > used for parsing extra BLE beacon advertisement packets for * beacons that send multiple different advertisement packets ( for example , Eddystone - TLM ) * @ param extraDataParser a parser that must be configured with an " extra layout " prefix * @ return true when the extra parser is added successfully */ public boolean addExtraDataParser ( BeaconParser extraDataParser ) { } }
// add an extra data parser only if it is not null and it is an extra frame parser return extraDataParser != null && extraDataParser . mExtraFrame && extraParsers . add ( extraDataParser ) ;
public class SerializedFormBuilder { /** * Returns true if the given TypeElement should be included * in the serialized form . * @ param te the TypeElement object to check for serializability . */ private static boolean serialClassInclude ( Utils utils , TypeElement te ) { } }
if ( utils . isEnum ( te ) ) { return false ; } if ( utils . isSerializable ( te ) ) { if ( ! utils . getSerialTrees ( te ) . isEmpty ( ) ) { return serialDocInclude ( utils , te ) ; } else if ( utils . isPublic ( te ) || utils . isProtected ( te ) ) { return true ; } else { return false ; } } return false ;
public class CmsEditorBase { /** * Re - renders the form with the given entity data . < p > * @ param newContent the entity data */ public void rerenderForm ( CmsEntity newContent ) { } }
m_validationHandler . setPaused ( true , m_entity ) ; m_entityBackend . changeEntityContentValues ( m_entity , newContent ) ; CmsType type = m_entityBackend . getType ( m_entity . getTypeName ( ) ) ; if ( ( m_tabInfos != null ) && ! m_tabInfos . isEmpty ( ) ) { int currentTab = m_formTabs . getSelectedIndex ( ) ; m_formPanel . clear ( ) ; m_rootHandler . clearHandlers ( ) ; m_formTabs = m_widgetService . getRendererForType ( type ) . renderForm ( m_entity , m_tabInfos , m_formPanel , m_rootHandler , 0 ) ; m_formTabs . selectTab ( currentTab ) ; } else { m_formPanel . clear ( ) ; m_rootHandler . clearHandlers ( ) ; m_widgetService . getRendererForType ( type ) . renderForm ( m_entity , m_tabInfos , m_formPanel , m_rootHandler , 0 ) ; } m_validationHandler . setPaused ( false , m_entity ) ;
public class SQLLexer { /** * Get the DDL token , if any , at the start of this statement . * @ return returns token , or null if it wasn ' t DDL */ public static String extractDDLToken ( String sql ) { } }
String ddlToken = null ; Matcher ddlMatcher = PAT_ANY_DDL_FIRST_TOKEN . matcher ( sql ) ; if ( ddlMatcher . find ( ) ) { ddlToken = ddlMatcher . group ( 1 ) . toLowerCase ( ) ; } return ddlToken ;
public class DescribeConfigurationAggregatorSourcesStatusRequest { /** * Filters the status type . * < ul > * < li > * Valid value FAILED indicates errors while moving data . * < / li > * < li > * Valid value SUCCEEDED indicates the data was successfully moved . * < / li > * < li > * Valid value OUTDATED indicates the data is not the most recent . * < / li > * < / ul > * @ param updateStatus * Filters the status type . < / p > * < ul > * < li > * Valid value FAILED indicates errors while moving data . * < / li > * < li > * Valid value SUCCEEDED indicates the data was successfully moved . * < / li > * < li > * Valid value OUTDATED indicates the data is not the most recent . * < / li > * @ see AggregatedSourceStatusType */ public void setUpdateStatus ( java . util . Collection < String > updateStatus ) { } }
if ( updateStatus == null ) { this . updateStatus = null ; return ; } this . updateStatus = new com . amazonaws . internal . SdkInternalList < String > ( updateStatus ) ;
public class JSONObject { /** * Get the BigDecimal value associated with a key . * @ param key A key string . * @ return The numeric value . * @ throws JSONException if the key is not found or if the value cannot be * converted to BigDecimal . */ public BigDecimal getBigDecimal ( String key ) throws JSONException { } }
Object object = this . get ( key ) ; try { return new BigDecimal ( object . toString ( ) ) ; } catch ( Exception e ) { throw new JSONException ( "JSONObject[" + quote ( key ) + "] could not be converted to BigDecimal." ) ; }
public class CleverTapAPI { /** * Sends the GCM registration ID to CleverTap . * @ param gcmId The GCM registration ID * @ param register Boolean indicating whether to register * or not for receiving push messages from CleverTap . * Set this to true to receive push messages from CleverTap , * and false to not receive any messages from CleverTap . */ @ SuppressWarnings ( "WeakerAccess" ) public void pushGcmRegistrationId ( String gcmId , boolean register ) { } }
pushDeviceToken ( gcmId , register , PushType . GCM ) ;
public class Key { /** * Write the raw bytes of this key to the specified output stream . * @ param outputStream * the target * @ throws IOException * if the underlying I / O device cannot be written to */ public void writeTo ( final OutputStream outputStream ) throws IOException { } }
outputStream . write ( getSigningKey ( ) ) ; outputStream . write ( getEncryptionKey ( ) ) ;
public class PlayEngine { /** * Seek to a given position * @ param position * Position * @ throws IllegalStateException * If stream is in stopped state * @ throws OperationNotSupportedException * If this object doesn ' t support the operation . */ public void seek ( int position ) throws IllegalStateException , OperationNotSupportedException { } }
// add this pending seek operation to the list pendingOperations . add ( new SeekRunnable ( position ) ) ; cancelDeferredStop ( ) ; ensurePullAndPushRunning ( ) ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public OBPYoaOrent createOBPYoaOrentFromString ( EDataType eDataType , String initialValue ) { } }
OBPYoaOrent result = OBPYoaOrent . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class UIRegistryAmicableImpl { /** * Retrieves the user interface that was registered with the given key . If the UI has not been registered , this * attempts to load the UI using the key as a class name . * @ param key The registration key . * @ return the UI for the given key . The UI may be newly created . */ @ Override public synchronized WComponent getUI ( final String key ) { } }
WComponent ui = registry . get ( key ) ; if ( ui == null ) { // Looks like we haven ' t tried loading this UI yet , so do it now . ui = loadUI ( key ) ; ui . setLocked ( true ) ; // Cache the result only if the UI was successfully loaded . if ( ui instanceof ErrorPage ) { LOG . debug ( "Returning non-cached ErrorPage WComponent. Key=" + key ) ; } else { register ( key , ui ) ; LOG . debug ( "Returning cached WComponent. Key=" + key ) ; } } else { LOG . debug ( "Returning cached WComponent. Key=" + key ) ; } return ui ;
public class EventSubscriber { /** * if returns null , returnCode and errorMessage already set */ protected Response createNotifyResponse ( RequestEvent request , int status , String reason , List < Header > additionalHeaders ) { } }
// when used internally - WATCH OUT - retcode , errorMessage initialized here initErrorInfo ( ) ; if ( ( request == null ) || ( request . getRequest ( ) == null ) ) { setReturnCode ( SipSession . INVALID_ARGUMENT ) ; setErrorMessage ( "Null request given for creating NOTIFY response" ) ; return null ; } Request req = request . getRequest ( ) ; String cseqStr = "CSEQ " + ( ( CSeqHeader ) req . getHeader ( CSeqHeader . NAME ) ) . getSeqNumber ( ) ; LOG . trace ( "Creating NOTIFY {} response with status code {}, reason phrase = {}" , cseqStr , status , reason ) ; try { Response response = parent . getMessageFactory ( ) . createResponse ( status , req ) ; if ( reason != null ) { response . setReasonPhrase ( reason ) ; } ( ( ToHeader ) response . getHeader ( ToHeader . NAME ) ) . setTag ( myTag ) ; response . addHeader ( ( ContactHeader ) parent . getContactInfo ( ) . getContactHeader ( ) . clone ( ) ) ; if ( additionalHeaders != null ) { Iterator < Header > i = additionalHeaders . iterator ( ) ; while ( i . hasNext ( ) ) { response . addHeader ( i . next ( ) ) ; } } return response ; } catch ( Exception e ) { setReturnCode ( SipSession . EXCEPTION_ENCOUNTERED ) ; setException ( e ) ; setErrorMessage ( "Exception: " + e . getClass ( ) . getName ( ) + ": " + e . getMessage ( ) ) ; } return null ;
public class FeatureWebSecurityConfigImpl { /** * { @ inheritDoc } */ @ Override public boolean isTrackLoggedOutSSOCookiesEnabled ( ) { } }
WebAppSecurityConfig globalConfig = WebAppSecurityCollaboratorImpl . getGlobalWebAppSecurityConfig ( ) ; if ( globalConfig != null ) return WebAppSecurityCollaboratorImpl . getGlobalWebAppSecurityConfig ( ) . isTrackLoggedOutSSOCookiesEnabled ( ) ; else return trackLoggedOutSSOCookies ;
public class ExtendedSIFWriter { /** * Writes down the interaction participants ' ( nodes ) details * to the given output stream . Closes the stream at the end . * @ param inters binary interactions * @ param out stream to write * @ return true if any output produced successfully */ public static boolean writeParticipants ( Set < SIFInteraction > inters , OutputStream out ) { } }
if ( ! inters . isEmpty ( ) ) { try { OutputStreamWriter writer = new OutputStreamWriter ( out ) ; writeSourceAndTargetDetails ( inters , writer ) ; writer . close ( ) ; return true ; } catch ( IOException e ) { e . printStackTrace ( ) ; } } return false ;
public class SchemaManager { /** * Returns index of a table or view in the HashMappedList that * contains the table objects for this Database . * @ param table the Table object * @ return the index of the specified table or view , or - 1 if not found */ int getTableIndex ( Table table ) { } }
Schema schema = ( Schema ) schemaMap . get ( table . getSchemaName ( ) . name ) ; if ( schema == null ) { return - 1 ; } HsqlName name = table . getName ( ) ; return schema . tableList . getIndex ( name . name ) ;
public class JSON { /** * 解析为指定对象 * @ param text json字符串 * @ param clazz 指定类 * @ param < T > 指定对象 * @ return 指定对象 */ public static < T > T parseObject ( String text , Class < T > clazz ) { } }
Object obj = JSONSerializer . deserialize ( text ) ; return BeanSerializer . deserializeByType ( obj , clazz ) ;
public class ValidationObjUtil { /** * Obj deep copy object . * @ param from the from * @ param to the to * @ param copyFields the copy fields * @ return the object */ public static Object objDeepCopy ( Object from , Object to , String ... copyFields ) { } }
if ( from == null || to == null ) { log . error ( "object deep copy from or to is null " ) ; return to ; } for ( String field : copyFields ) { copyFromTo ( from , to , field ) ; } return to ;
public class AbstractKMeans { /** * Returns the mean vectors of the given clusters in the given database . * @ param clusters the clusters to compute the means * @ param means the recent means * @ param relation the database containing the vectors * @ return the mean vectors of the given clusters in the given database */ private static double [ ] [ ] sparseMeans ( List < ? extends DBIDs > clusters , double [ ] [ ] means , Relation < ? extends SparseNumberVector > relation ) { } }
final int k = means . length ; double [ ] [ ] newMeans = new double [ k ] [ ] ; for ( int i = 0 ; i < k ; i ++ ) { DBIDs list = clusters . get ( i ) ; if ( list . isEmpty ( ) ) { // Keep degenerated means as - is for now . newMeans [ i ] = means [ i ] ; continue ; } DBIDIter iter = list . iter ( ) ; // Initialize with first . double [ ] mean = relation . get ( iter ) . toArray ( ) ; // Update with remaining instances for ( iter . advance ( ) ; iter . valid ( ) ; iter . advance ( ) ) { SparseNumberVector vec = relation . get ( iter ) ; for ( int j = vec . iter ( ) ; vec . iterValid ( j ) ; j = vec . iterAdvance ( j ) ) { mean [ vec . iterDim ( j ) ] += vec . iterDoubleValue ( j ) ; } } newMeans [ i ] = timesEquals ( mean , 1.0 / list . size ( ) ) ; } return newMeans ;
public class DefaultEventbus { /** * - - - ADD LOCAL LISTENER - - - */ @ Override public void addListeners ( String serviceName , Service service ) { } }
// Service name with version String name = ( serviceName == null || serviceName . isEmpty ( ) ) ? service . getName ( ) : serviceName ; Class < ? extends Service > clazz = service . getClass ( ) ; Field [ ] fields = clazz . getFields ( ) ; boolean hasListener = false ; writeLock . lock ( ) ; try { // Initialize listeners in service for ( Field field : fields ) { // Register event listener if ( Listener . class . isAssignableFrom ( field . getType ( ) ) ) { hasListener = true ; // Name of the action ( eg . " service . action " ) String listenerName = nameOf ( name , field ) ; // Process " Subscribe " annotation Subscribe s = field . getAnnotation ( Subscribe . class ) ; String subscribe = null ; if ( s != null ) { subscribe = s . value ( ) ; } if ( subscribe == null || subscribe . isEmpty ( ) ) { subscribe = listenerName ; } // Process " Group " annotation String group = null ; Group g = field . getAnnotation ( Group . class ) ; if ( g != null ) { group = g . value ( ) ; } if ( group == null || group . isEmpty ( ) ) { group = name ; } // Register listener in EventBus field . setAccessible ( true ) ; Listener listener = ( Listener ) field . get ( service ) ; // Get or create group map HashMap < String , Strategy < ListenerEndpoint > > groups = listeners . get ( subscribe ) ; if ( groups == null ) { groups = new HashMap < String , Strategy < ListenerEndpoint > > ( ) ; listeners . put ( subscribe , groups ) ; } // Get or create strategy Strategy < ListenerEndpoint > strategy = groups . get ( group ) ; if ( strategy == null ) { strategy = this . strategy . create ( ) ; groups . put ( group , strategy ) ; } // Add endpoint to strategy strategy . addEndpoint ( new LocalListenerEndpoint ( executor , nodeID , name , group , subscribe , listener , asyncLocalInvocation ) ) ; } } } catch ( Exception cause ) { logger . error ( "Unable to register local listener!" , cause ) ; } finally { // Clear caches if ( hasListener ) { emitterCache . clear ( ) ; broadcasterCache . clear ( ) ; localBroadcasterCache . clear ( ) ; } // Unlock reader threads writeLock . unlock ( ) ; }
public class ServiceDiscoveryManager { /** * Queries the remote entity for it ' s features and returns true if the given feature is found . * @ param jid the JID of the remote entity * @ param feature * @ return true if the entity supports the feature , false otherwise * @ throws XMPPErrorException * @ throws NoResponseException * @ throws NotConnectedException * @ throws InterruptedException */ public boolean supportsFeature ( Jid jid , CharSequence feature ) throws NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException { } }
return supportsFeatures ( jid , feature ) ;
public class ScopeImpl { /** * Resets the state of the scope . * Useful for automation testing when we want to reset the scope used to install test modules . */ @ Override protected void reset ( ) { } }
super . reset ( ) ; mapClassesToNamedBoundProviders . clear ( ) ; mapClassesToUnNamedBoundProviders . clear ( ) ; hasTestModules = false ; installBindingForScope ( ) ;
public class TemplateDelegateNodeBuilder { /** * Private helper for both setCmdText ( ) and setCmdTextInfo ( ) to generate and set the internal - use * partial template name and template name . */ private void genInternalTemplateNameHelper ( ) { } }
Preconditions . checkState ( id != null ) ; // encode all the deltemplate information into the name to get a unique string // though . . . it might make more sense for this to not have a user visible name given that the // calling convention is indirect . String variant = "" ; if ( delTemplateVariantExpr != null ) { // this is hacky . perhaps we should come up with a less ambiguous strategy ExprNode expr = delTemplateVariantExpr . getRoot ( ) ; if ( expr instanceof StringNode ) { variant = ( ( StringNode ) expr ) . getValue ( ) ; } else { variant = expr . toSourceString ( ) ; } } String generatedPartialTemplateName = partialDeltemplateTemplateName ( delTemplateName , soyFileHeaderInfo . getDelPackageName ( ) , variant ) ; String generatedTemplateName = soyFileHeaderInfo . getNamespace ( ) + generatedPartialTemplateName ; setTemplateNames ( generatedTemplateName , generatedPartialTemplateName ) ;
public class KeyVaultClientBaseImpl { /** * Restores a backed up certificate to a vault . * Restores a backed up certificate , and all its versions , to a vault . This operation requires the certificates / restore permission . * @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net . * @ param certificateBundleBackup The backup blob associated with a certificate bundle . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < CertificateBundle > restoreCertificateAsync ( String vaultBaseUrl , byte [ ] certificateBundleBackup , final ServiceCallback < CertificateBundle > serviceCallback ) { } }
return ServiceFuture . fromResponse ( restoreCertificateWithServiceResponseAsync ( vaultBaseUrl , certificateBundleBackup ) , serviceCallback ) ;
public class ArrayMap { /** * Sets the value at the given index , overriding any existing value mapping . * @ return previous value or { @ code null } for none * @ throws IndexOutOfBoundsException if index is negative or { @ code > = } size */ public final V set ( int index , V value ) { } }
int size = this . size ; if ( index < 0 || index >= size ) { throw new IndexOutOfBoundsException ( ) ; } int valueDataIndex = 1 + ( index << 1 ) ; V result = valueAtDataIndex ( valueDataIndex ) ; this . data [ valueDataIndex ] = value ; return result ;
public class AuthorizationImpl { /** * Answers if the principal has permission to RENDER this Channel . This implementation currently * delegates to the SUBSCRIBE permission . * @ return boolean * @ param principal IAuthorizationPrincipal * @ param portletDefinitionId * @ exception AuthorizationException indicates authorization information could not be retrieved . */ @ Override @ RequestCache public boolean canPrincipalRender ( IAuthorizationPrincipal principal , String portletDefinitionId ) throws AuthorizationException { } }
// This code simply assumes that anyone who can subscribe to a channel // should be able to render it . In the future , we ' d like to update this // implementation to use a separate permission for rendering . return canPrincipalSubscribe ( principal , portletDefinitionId ) ;
public class DescribeActivityRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeActivityRequest describeActivityRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeActivityRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeActivityRequest . getActivityArn ( ) , ACTIVITYARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Computer { /** * Really deletes the agent . */ @ RequirePOST public HttpResponse doDoDelete ( ) throws IOException { } }
checkPermission ( DELETE ) ; Node node = getNode ( ) ; if ( node != null ) { Jenkins . getInstance ( ) . removeNode ( node ) ; } else { AbstractCIBase app = Jenkins . getInstance ( ) ; app . removeComputer ( this ) ; } return new HttpRedirect ( ".." ) ;
public class Op { /** * Creates a set with the specified elements and an < i > operation expression < / i > on it . * @ param elements the elements of the set being created * @ return an operator , ready for chaining */ public static < T > Level0SetOperator < Set < T > , T > onSetFor ( final T ... elements ) { } }
return new Level0SetOperator < Set < T > , T > ( ExecutionTarget . forOp ( VarArgsUtil . asRequiredObjectSet ( elements ) , Normalisation . SET ) ) ;
public class ClassIndexProcessor { /** * Adds given classes for subclass indexing . */ protected final void indexSubclasses ( Class < ? > ... classes ) { } }
for ( Class < ? > klass : classes ) { indexedSuperclasses . add ( klass . getCanonicalName ( ) ) ; } annotationDriven = false ;
public class AsyncAuditOutputStream { /** * { @ inheritDoc } * @ see org . audit4j . core . io . AuditOutputStream # write ( org . audit4j . core . dto . Event ) */ @ Override public AsyncAuditOutputStream write ( AuditEvent event ) { } }
if ( event instanceof AnnotationAuditEvent ) { annotationStream . write ( ( AnnotationAuditEvent ) event ) ; } else { deferred . accept ( event ) ; b . await ( ) ; } return this ;
public class RedactClient { /** * Submit a request to the Redact API to redact a transaction . * @ param id The transaction id to redact . * @ param product The { @ link com . nexmo . client . redact . RedactRequest . Product } which corresponds to the transaction . * @ throws IOException if a network error occurred contacting the Nexmo Redact API . * @ throws NexmoClientException if there was a problem with the Nexmo request or response objects . */ public void redactTransaction ( String id , RedactRequest . Product product ) throws IOException , NexmoClientException { } }
this . redactTransaction ( new RedactRequest ( id , product ) ) ;
public class SelectParametersMarshaller { /** * Marshall the given parameter object . */ public void marshall ( SelectParameters selectParameters , ProtocolMarshaller protocolMarshaller ) { } }
if ( selectParameters == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( selectParameters . getInputSerialization ( ) , INPUTSERIALIZATION_BINDING ) ; protocolMarshaller . marshall ( selectParameters . getExpressionType ( ) , EXPRESSIONTYPE_BINDING ) ; protocolMarshaller . marshall ( selectParameters . getExpression ( ) , EXPRESSION_BINDING ) ; protocolMarshaller . marshall ( selectParameters . getOutputSerialization ( ) , OUTPUTSERIALIZATION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ConfigResolver { /** * Resolve the specified map and store it in the specified target . * @ param root * @ param target * @ return */ public static Map < String , Object > resolveTo ( Map < String , Object > root , Map < String , Object > target ) { } }
resolve ( target , root , "" ) ; return target ;
public class StyleUtil { /** * Create a non - filtered rule with the specified title , name and symbolizer . * @ param title the title * @ param name the name * @ param symbolizer the symbolizer * @ return the rule */ public static RuleInfo createRule ( String title , String name , SymbolizerTypeInfo symbolizer ) { } }
RuleInfo rule = new RuleInfo ( ) ; rule . setTitle ( title ) ; rule . setName ( name ) ; rule . getSymbolizerList ( ) . add ( symbolizer ) ; return rule ;
public class DateContext { /** * Create a { @ link Date } instance from the given string using the default * input format < code > yyyyMMdd < / code > . * @ param dateString The string from which to create a date * @ return The { @ link Date } instance representing date string * @ throws ParseException if the date string is improperly formatted */ public Date createDate ( String dateString ) throws ParseException { } }
Date result = null ; synchronized ( cFormatter ) { result = cFormatter . parse ( dateString ) ; } return result ;
public class FileParser { /** * In order to determine the correct file parser , you can provide the URI and this method * will find the appropriate parser to use * @ param uri The Uri of the file you are parsing * @ param identifier This interface can be generated in order to handle different actions * based on the source of the file */ public static void parseGenericFileUri ( String uri , FileIdentifier identifier ) { } }
if ( uri . startsWith ( "file://" ) ) identifier . onLocalFile ( ) ; else if ( uri . startsWith ( "http://" ) ) identifier . onHttpFile ( ) ; else if ( uri . startsWith ( "android.resource://" ) ) identifier . onAsset ( ) ;
public class RtcpHandler { /** * Disconnects and closes the datagram channel used to send and receive RTCP traffic . */ private void closeChannel ( ) { } }
if ( this . channel != null ) { if ( this . channel . isConnected ( ) ) { try { this . channel . disconnect ( ) ; } catch ( IOException e ) { logger . warn ( e . getMessage ( ) , e ) ; } } if ( this . channel . isOpen ( ) ) { try { this . channel . close ( ) ; } catch ( IOException e ) { logger . warn ( e . getMessage ( ) , e ) ; } } }
public class SpiderService { /** * Create a DBObject from the given scalar / link column values . */ private DBObject createObject ( TableDefinition tableDef , String objID , Iterator < DColumn > colIter ) { } }
DBObject dbObj = new DBObject ( ) ; dbObj . setObjectID ( objID ) ; dbObj . setTableName ( tableDef . getTableName ( ) ) ; while ( colIter . hasNext ( ) ) { DColumn col = colIter . next ( ) ; Pair < String , String > linkCol = extractLinkValue ( tableDef , col . getName ( ) ) ; if ( linkCol == null ) { String fieldName = col . getName ( ) ; String fieldValue = scalarValueToString ( tableDef , col . getName ( ) , col . getRawValue ( ) ) ; FieldDefinition fieldDef = tableDef . getFieldDef ( fieldName ) ; if ( fieldDef != null && fieldDef . isCollection ( ) ) { // MV scalar field Set < String > values = Utils . split ( fieldValue , CommonDefs . MV_SCALAR_SEP_CHAR ) ; dbObj . addFieldValues ( fieldName , values ) ; } else { dbObj . addFieldValue ( col . getName ( ) , fieldValue ) ; } // Skip links no longer present in schema } else if ( tableDef . isLinkField ( linkCol . firstItemInPair ) ) { dbObj . addFieldValue ( linkCol . firstItemInPair , linkCol . secondItemInPair ) ; } } return dbObj ;
public class NamedVersion { /** * People use all sorts of whack characters in version . We are just excluding * the smallest set that we can . */ public boolean isValidNamedVersion ( final String s ) { } }
if ( SEMANTIC_RANGE_SPECIAL_CHARS . matcher ( s ) . find ( ) || SET_RANGE_SPECIAL_CHARS . matcher ( s ) . find ( ) || INVALID_VERSION_CHARS . matcher ( s ) . find ( ) ) { return false ; } return true ;
public class Anvil { /** * Returns currently rendered View . It allows to access the real view from * inside the Renderable . * @ return currently rendered View */ @ SuppressWarnings ( "unchecked" ) public static < T extends View > T currentView ( ) { } }
if ( currentMount == null ) { return null ; } return ( T ) currentMount . iterator . currentView ( ) ;
public class Logger { /** * Issue a formatted log message with a level of ERROR . * @ param t the throwable * @ param format the format string , as per { @ link String # format ( String , Object . . . ) } * @ param params the parameters */ public void errorf ( Throwable t , String format , Object ... params ) { } }
doLogf ( Level . ERROR , FQCN , format , params , t ) ;
public class CmsClientSitemapEntry { /** * Sets the children . < p > * @ param children the children to set * @ param controller a sitemap controller instance */ public void setSubEntries ( List < CmsClientSitemapEntry > children , I_CmsSitemapController controller ) { } }
m_childrenLoadedInitially = true ; m_subEntries . clear ( ) ; if ( children != null ) { m_subEntries . addAll ( children ) ; for ( CmsClientSitemapEntry child : children ) { child . updateSitePath ( CmsStringUtil . joinPaths ( m_sitePath , child . getName ( ) ) , controller ) ; } }
public class UpdateCertificateOptionsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateCertificateOptionsRequest updateCertificateOptionsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateCertificateOptionsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateCertificateOptionsRequest . getCertificateArn ( ) , CERTIFICATEARN_BINDING ) ; protocolMarshaller . marshall ( updateCertificateOptionsRequest . getOptions ( ) , OPTIONS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }