signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class TreeFileIOChannel { /** * not useful , as it slow in read / write */
protected String buildPathX ( String fileName ) { } } | char [ ] chs = fileName . toCharArray ( ) ; StringBuilder path = new StringBuilder ( ) ; for ( char ch : chs ) { path . append ( File . separator ) . append ( ch ) ; } return path . toString ( ) ; |
public class CollationBuilder { /** * Returns the secondary or tertiary weight preceding the current node ' s weight .
* node = nodes [ index ] . */
private int getWeight16Before ( int index , long node , int level ) { } } | assert ( strengthFromNode ( node ) < level || ! isTailoredNode ( node ) ) ; // Collect the root CE weights if this node is for a root CE .
// If it is not , then return the low non - primary boundary for a tailored CE .
int t ; if ( strengthFromNode ( node ) == Collator . TERTIARY ) { t = weight16FromNode ( node ) ; } else { t = Collation . COMMON_WEIGHT16 ; // Stronger node with implied common weight .
} while ( strengthFromNode ( node ) > Collator . SECONDARY ) { index = previousIndexFromNode ( node ) ; node = nodes . elementAti ( index ) ; } if ( isTailoredNode ( node ) ) { return Collation . BEFORE_WEIGHT16 ; } int s ; if ( strengthFromNode ( node ) == Collator . SECONDARY ) { s = weight16FromNode ( node ) ; } else { s = Collation . COMMON_WEIGHT16 ; // Stronger node with implied common weight .
} while ( strengthFromNode ( node ) > Collator . PRIMARY ) { index = previousIndexFromNode ( node ) ; node = nodes . elementAti ( index ) ; } if ( isTailoredNode ( node ) ) { return Collation . BEFORE_WEIGHT16 ; } // [ p , s , t ] is a root CE . Return the preceding weight for the requested level .
long p = weight32FromNode ( node ) ; int weight16 ; if ( level == Collator . SECONDARY ) { weight16 = rootElements . getSecondaryBefore ( p , s ) ; } else { weight16 = rootElements . getTertiaryBefore ( p , s , t ) ; assert ( ( weight16 & ~ Collation . ONLY_TERTIARY_MASK ) == 0 ) ; } return weight16 ; |
public class DefaultServiceRegistry { /** * Initializes ServiceRegistry instance .
* @ param broker
* parent ServiceBroker */
@ Override public void started ( ServiceBroker broker ) throws Exception { } } | super . started ( broker ) ; // Local nodeID
this . nodeID = broker . getNodeID ( ) ; // Set components
ServiceBrokerConfig cfg = broker . getConfig ( ) ; this . executor = cfg . getExecutor ( ) ; this . scheduler = cfg . getScheduler ( ) ; this . strategyFactory = cfg . getStrategyFactory ( ) ; this . contextFactory = cfg . getContextFactory ( ) ; this . transporter = cfg . getTransporter ( ) ; this . eventbus = cfg . getEventbus ( ) ; this . uid = cfg . getUidGenerator ( ) ; |
public class LoggingSupport { /** * java . util . logging and read its configuration */
static String getSimpleFormat ( boolean useProxy ) { } } | String format = AccessController . doPrivileged ( new PrivilegedAction < String > ( ) { public String run ( ) { return System . getProperty ( FORMAT_PROP_KEY ) ; } } ) ; if ( useProxy && proxy != null && format == null ) { format = proxy . getProperty ( FORMAT_PROP_KEY ) ; } if ( format != null ) { try { // validate the user - defined format string
String . format ( format , new Date ( ) , "" , "" , "" , "" , "" ) ; } catch ( IllegalArgumentException e ) { // illegal syntax ; fall back to the default format
format = DEFAULT_FORMAT ; } } else { format = DEFAULT_FORMAT ; } return format ; |
public class MessageUpdater { /** * Add the requested post parameters to the Request .
* @ param request Request to add post params to */
private void addPostParams ( final Request request ) { } } | if ( body != null ) { request . addPostParam ( "Body" , body ) ; } if ( attributes != null ) { request . addPostParam ( "Attributes" , attributes ) ; } |
public class BeanPath { /** * Create a new Simple path
* @ param < A >
* @ param property property name
* @ param type property type
* @ return property path */
@ SuppressWarnings ( "unchecked" ) protected < A > SimplePath < A > createSimple ( String property , Class < ? super A > type ) { } } | return add ( new SimplePath < A > ( ( Class < A > ) type , forProperty ( property ) ) ) ; |
public class ProcessApplicationContext { /** * < p > Takes a callable and executes all engine API invocations within that callable in the context
* of the given process application
* < p > Equivalent to
* < pre >
* try {
* ProcessApplicationContext . setCurrentProcessApplication ( " someProcessApplication " ) ;
* callable . call ( ) ;
* } finally {
* ProcessApplicationContext . clear ( ) ;
* < / pre >
* @ param callable the callable to execute
* @ param processApplication the process application to switch into */
public static < T > T withProcessApplicationContext ( Callable < T > callable , ProcessApplicationInterface processApplication ) throws Exception { } } | try { setCurrentProcessApplication ( processApplication ) ; return callable . call ( ) ; } finally { clear ( ) ; } |
public class YamlWriter { /** * Save a given { @ link Object } to the yml - file .
* @ param fileName
* The file name of the yml - file .
* @ param object
* The { @ link Object } to be stored .
* @ throws IOException
* If the file could not be opened , created ( when it doesn ' t
* exist ) or the given filename is a directory . */
public static synchronized void persist ( String fileName , Object object ) throws IOException { } } | ObjectChecks . checkForNullReference ( fileName , "fileName is null" ) ; ObjectChecks . checkForNullReference ( object , "object is null" ) ; FileWriter fileWriter = null ; File f = null ; try { fileWriter = new FileWriter ( fileName ) ; } catch ( IOException e ) { f = new File ( fileName ) ; if ( f . isDirectory ( ) ) { throw new IOException ( fileName + " is a directory" ) ; } else { throw new IOException ( "Could not create " + fileName + ": " + e . getMessage ( ) ) ; } } Yaml yaml = new Yaml ( mOptions ) ; yaml . dump ( object , fileWriter ) ; fileWriter . close ( ) ; |
public class CredentialFactory { /** * Creates an OAuth2 credential from client secrets , which may require an interactive authorization prompt .
* Use this method when the Application Default Credential is not sufficient .
* @ param clientSecretsFile The { @ code client _ secrets . json } file path .
* @ param credentialId The credentialId for use in identifying the credential in the persistent credential store .
* @ return The user credential */
public static Credential getCredentialFromClientSecrets ( String clientSecretsFile , String credentialId ) { } } | Preconditions . checkArgument ( clientSecretsFile != null ) ; Preconditions . checkArgument ( credentialId != null ) ; HttpTransport httpTransport ; try { httpTransport = GoogleNetHttpTransport . newTrustedTransport ( ) ; } catch ( IOException | GeneralSecurityException e ) { throw new RuntimeException ( "Could not create HTTPS transport for use in credential creation" , e ) ; } JsonFactory jsonFactory = JacksonFactory . getDefaultInstance ( ) ; GoogleClientSecrets clientSecrets ; try { clientSecrets = GoogleClientSecrets . load ( jsonFactory , new FileReader ( clientSecretsFile ) ) ; } catch ( IOException e ) { throw new RuntimeException ( "Could not read the client secrets from file: " + clientSecretsFile , e ) ; } FileDataStoreFactory dataStoreFactory ; try { dataStoreFactory = new FileDataStoreFactory ( CREDENTIAL_STORE ) ; } catch ( IOException e ) { throw new RuntimeException ( "Could not create persisten credential store " + CREDENTIAL_STORE , e ) ; } GoogleAuthorizationCodeFlow flow ; try { flow = new GoogleAuthorizationCodeFlow . Builder ( httpTransport , jsonFactory , clientSecrets , SCOPES ) . setDataStoreFactory ( dataStoreFactory ) . build ( ) ; } catch ( IOException e ) { throw new RuntimeException ( "Could not build credential authorization flow" , e ) ; } // The credentialId identifies the credential in the persistent credential store .
Credential credential ; try { credential = new AuthorizationCodeInstalledApp ( flow , new PromptReceiver ( ) ) . authorize ( credentialId ) ; } catch ( IOException e ) { throw new RuntimeException ( "Could not perform credential authorization flow" , e ) ; } return credential ; |
public class ConcurrentBlockingObjectQueue { /** * { @ inheritDoc } */
public boolean offer ( E value ) { } } | // non volatile read ( which is quicker )
final int writeLocation = this . producerWriteLocation ; // sets the nextWriteLocation my moving it on by 1 , this may cause it it wrap back to the start .
final int nextWriteLocation = ( writeLocation + 1 == capacity ) ? 0 : writeLocation + 1 ; if ( nextWriteLocation == capacity ) { if ( readLocation == 0 ) return false ; } else if ( nextWriteLocation == readLocation ) return false ; // purposely not volatile see the comment below
data [ writeLocation ] = value ; setWriteLocation ( nextWriteLocation ) ; return true ; |
public class WorkManagerUtil { /** * Get should distribute override
* @ param work The work instance
* @ return The override , if none return null */
public static Boolean getShouldDistribute ( DistributableWork work ) { } } | if ( work != null && work instanceof WorkContextProvider ) { List < WorkContext > contexts = ( ( WorkContextProvider ) work ) . getWorkContexts ( ) ; if ( contexts != null ) { for ( WorkContext wc : contexts ) { if ( wc instanceof DistributableContext ) { DistributableContext dc = ( DistributableContext ) wc ; return dc . getDistribute ( ) ; } else if ( wc instanceof HintsContext ) { HintsContext hc = ( HintsContext ) wc ; if ( hc . getHints ( ) . keySet ( ) . contains ( DistributableContext . DISTRIBUTE ) ) { Serializable value = hc . getHints ( ) . get ( DistributableContext . DISTRIBUTE ) ; if ( value != null && value instanceof Boolean ) { return ( Boolean ) value ; } } } } } } return null ; |
public class QueueManager { /** * Challange a QueueMemberStatusEvent . Called from AsteriskServerImpl
* whenever a member state changes .
* @ param event that was triggered by Asterisk server . */
void handleQueueMemberStatusEvent ( QueueMemberStatusEvent event ) { } } | AsteriskQueueImpl queue = getInternalQueueByName ( event . getQueue ( ) ) ; if ( queue == null ) { logger . error ( "Ignored QueueMemberStatusEvent for unknown queue " + event . getQueue ( ) ) ; return ; } AsteriskQueueMemberImpl member = queue . getMemberByLocation ( event . getLocation ( ) ) ; if ( member == null ) { logger . error ( "Ignored QueueMemberStatusEvent for unknown member " + event . getLocation ( ) ) ; return ; } manageQueueMemberChange ( queue , member , event ) ; queue . fireMemberStateChanged ( member ) ; |
public class LocalQPConsumerKey { /** * Detach this consumer */
public void detach ( ) throws SIResourceException , SINotPossibleInCurrentConfigurationException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "detach" ) ; // Make sure we are not ready
notReady ( ) ; // Remove us from any group we are a member of
if ( keyGroup != null ) keyGroup . removeMember ( this ) ; // Remove this consumer from the CD ' s knowledge
consumerDispatcher . detachConsumerPoint ( this ) ; // Cleanly dispose of the getCursor
if ( classifyingMessages ) { // Take the classifications read lock
consumerSet . takeClassificationReadLock ( ) ; int numFilters = consumerKeyFilter . length ; for ( int i = 0 ; i < numFilters ; i ++ ) consumerKeyFilter [ i ] . detach ( ) ; // Free the classifications read lock
consumerSet . freeClassificationReadLock ( ) ; } else consumerKeyFilter [ 0 ] . detach ( ) ; synchronized ( this ) { detached = true ; } // Remove the consumerPoint to the consumerSet if the latter has been specified
if ( classifyingMessages ) consumerSet . removeConsumer ( consumerPoint ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "detach" ) ; |
public class FlyWeightFlatXmlDataSetBuilder { /** * Sets the flat XML input source from which the { @ link FlyWeightFlatXmlDataSet } is to be built
* @ param xmlReader The flat XML input as { @ link Reader }
* @ return The created { @ link FlyWeightFlatXmlDataSet }
* @ throws DataSetException */
public FlyWeightFlatXmlDataSet build ( Reader xmlReader ) throws DataSetException { } } | InputSource inputSource = new InputSource ( xmlReader ) ; return buildInternal ( inputSource ) ; |
public class CompDiagMatrix { /** * Finds the size of the requested diagonal to be allocated */
private int getDiagSize ( int diagonal ) { } } | if ( diagonal < 0 ) return Math . min ( numRows + diagonal , numColumns ) ; else return Math . min ( numRows , numColumns - diagonal ) ; |
public class DnsBatch { /** * Adds a request representing the " list record sets " operation in the zone specified by { @ code
* zoneName } to this batch . The { @ code options } can be used to restrict the fields returned or
* provide page size limits in the same way as for { @ link Dns # listRecordSets ( String ,
* Dns . RecordSetListOption . . . ) } . Calling { @ link DnsBatchResult # get ( ) } on the return value yields a
* page of record sets if successful and throws a { @ link DnsException } if the operation failed or
* the zone does not exist . */
public DnsBatchResult < Page < RecordSet > > listRecordSets ( String zoneName , Dns . RecordSetListOption ... options ) { } } | DnsBatchResult < Page < RecordSet > > result = new DnsBatchResult < > ( ) ; Map < DnsRpc . Option , ? > optionMap = DnsImpl . optionMap ( options ) ; RpcBatch . Callback < ResourceRecordSetsListResponse > callback = createListRecordSetsCallback ( zoneName , result , optionMap ) ; batch . addListRecordSets ( zoneName , callback , optionMap ) ; return result ; |
public class GetJournalImageServlet { /** * Build path to fetch image at given txid for the given journal .
* This path does not contain address . */
public static String buildPath ( String journalId , long txid , NamespaceInfo nsInfo , boolean throttle ) { } } | StringBuilder path = new StringBuilder ( "/getImage?getimage=1&" ) ; try { path . append ( JOURNAL_ID_PARAM ) . append ( "=" ) . append ( URLEncoder . encode ( journalId , "UTF-8" ) ) ; path . append ( "&" + TXID_PARAM ) . append ( "=" ) . append ( txid ) ; path . append ( "&" + THROTTLE_PARAM ) . append ( "=" ) . append ( throttle ) ; path . append ( "&" + STORAGEINFO_PARAM ) . append ( "=" ) . append ( URLEncoder . encode ( nsInfo . toColonSeparatedString ( ) , "UTF-8" ) ) ; } catch ( UnsupportedEncodingException e ) { // Never get here - - everyone supports UTF - 8
throw new RuntimeException ( e ) ; } return path . toString ( ) ; |
public class Visitors { /** * Using a visitor , obtain the { @ link Subquery } objects that are contained within the supplied { @ link Visitable object } . This
* method does find Subquery objets nested in other Subquery objects .
* @ param visitable the visitable
* @ param includeNestedSubqueries true if any Subquery objects within other Subquery objects should be included , or false if
* only the top - level Subquery objects should be included
* @ return the collection of subqueries ; never null but possibly empty if no subqueries were found */
public static Collection < Subquery > subqueries ( Visitable visitable , final boolean includeNestedSubqueries ) { } } | final Collection < Subquery > subqueries = new LinkedList < Subquery > ( ) ; Visitors . visitAll ( visitable , new Visitors . AbstractVisitor ( ) { @ Override public void visit ( Subquery subquery ) { subqueries . add ( subquery ) ; if ( includeNestedSubqueries ) { // Now look for any subqueries in the subquery . . .
subquery . getQuery ( ) . accept ( this ) ; } } } ) ; return subqueries ; |
public class ServerSets { /** * Creates a service instance object deserialized from byte array .
* @ param data the byte array contains a serialized Thrift service instance
* @ param codec the codec to use to deserialize the byte array */
public static ServiceInstance deserializeServiceInstance ( byte [ ] data , Codec < ServiceInstance > codec ) throws IOException { } } | return codec . deserialize ( new ByteArrayInputStream ( data ) ) ; |
public class PredecessorReader { /** * Convert an integer to a RelationType instance .
* @ param type integer value
* @ return RelationType instance */
private RelationType getRelationType ( int type ) { } } | RelationType result ; if ( type > 0 && type < RELATION_TYPES . length ) { result = RELATION_TYPES [ type ] ; } else { result = RelationType . FINISH_START ; } return result ; |
public class AuthorizationFlow { /** * Creates a new credential for the given user ID based on the given token
* response and store in the credential store .
* @ param response implicit authorization token response
* @ param userId user ID or { @ code null } if not using a persisted credential
* store
* @ return newly created credential
* @ throws IOException */
public Credential createAndStoreCredential ( ImplicitResponseUrl implicitResponse , String userId ) throws IOException { } } | Credential credential = newCredential ( userId ) . setAccessToken ( implicitResponse . getAccessToken ( ) ) . setExpiresInSeconds ( implicitResponse . getExpiresInSeconds ( ) ) ; CredentialStore credentialStore = getCredentialStore ( ) ; if ( credentialStore != null ) { credentialStore . store ( userId , credential ) ; } if ( credentialCreatedListener != null ) { credentialCreatedListener . onCredentialCreated ( credential , implicitResponse ) ; } return credential ; |
public class CollectionJsonDeserializer { /** * < p > newInstance < / p >
* @ param deserializer { @ link JsonDeserializer } used to deserialize the objects inside the { @ link Collection } .
* @ param < T > Type of the elements inside the { @ link Collection }
* @ return a new instance of { @ link CollectionJsonDeserializer } */
public static < T > CollectionJsonDeserializer < T > newInstance ( JsonDeserializer < T > deserializer ) { } } | return new CollectionJsonDeserializer < T > ( deserializer ) ; |
public class InvokableAPIDescriptor { /** * Invoke the Java callback underlying this API descriptor . Note : this method
* may only be invoked after
* { @ link # canExecute ( IRequestWebScopeWithoutResponse , MutableInt ) } returned
* < code > true < / code > !
* @ param aRequestScope
* Current request scope . Never < code > null < / code > .
* @ param aUnifiedResponse
* Current response . Never < code > null < / code > .
* @ throws Exception
* In case the Java callback throws one
* @ throws IllegalStateException
* In case the executor factory creates a < code > null < / code > executor . */
public void invokeAPI ( @ Nonnull final IRequestWebScopeWithoutResponse aRequestScope , @ Nonnull final UnifiedResponse aUnifiedResponse ) throws Exception { } } | final IAPIExecutor aExecutor = m_aDescriptor . getExecutorFactory ( ) . get ( ) ; if ( aExecutor == null ) throw new IllegalStateException ( "Failed to created API executor for: " + toString ( ) ) ; // Go go go
aExecutor . invokeAPI ( m_aDescriptor , m_sPath , m_aPathVariables , aRequestScope , aUnifiedResponse ) ; |
public class ParserFactory { /** * D190462 - START */
public static Document parseDocument ( DocumentBuilder builder , File file ) throws IOException , SAXException { } } | final DocumentBuilder docBuilder = builder ; final File parsingFile = file ; try { return ( Document ) AccessController . doPrivileged ( new PrivilegedExceptionAction ( ) { public Object run ( ) throws SAXException , IOException { Thread currThread = Thread . currentThread ( ) ; ClassLoader oldLoader = currThread . getContextClassLoader ( ) ; currThread . setContextClassLoader ( ParserFactory . class . getClassLoader ( ) ) ; try { return docBuilder . parse ( parsingFile ) ; } finally { currThread . setContextClassLoader ( oldLoader ) ; } } } ) ; } catch ( PrivilegedActionException pae ) { Throwable t = pae . getCause ( ) ; if ( t instanceof SAXException ) { throw ( SAXException ) t ; } else if ( t instanceof IOException ) { throw ( IOException ) t ; } } return null ; |
public class DeviceInterface { public CommandInfo getCommandInfo ( String commandName ) { } } | for ( CommandInfo commandInfo : commandInfoList ) if ( commandInfo . cmd_name . toLowerCase ( ) . equals ( commandName . toLowerCase ( ) ) ) return commandInfo ; return null ; |
public class DistCp { /** * Initialize DFSCopyFileMapper specific job - configuration .
* @ param conf : The dfs / mapred configuration .
* @ param jobConf : The handle to the jobConf object to be initialized .
* @ param args Arguments
* @ return true if it is necessary to launch a job . */
private static boolean setup ( Configuration conf , JobConf jobConf , JobClient client , final Arguments args , boolean useFastCopy ) throws IOException { } } | jobConf . set ( DST_DIR_LABEL , args . dst . toUri ( ) . toString ( ) ) ; // set boolean values
final boolean update = args . flags . contains ( Options . UPDATE ) ; final boolean skipCRCCheck = args . flags . contains ( Options . SKIPCRC ) ; final boolean overwrite = ! update && args . flags . contains ( Options . OVERWRITE ) ; final boolean skipUnderConstructionFile = args . flags . contains ( Options . SKIPUNDERCONSTRUCTION ) ; jobConf . setBoolean ( Options . UPDATE . propertyname , update ) ; jobConf . setBoolean ( Options . SKIPCRC . propertyname , skipCRCCheck ) ; jobConf . setBoolean ( Options . OVERWRITE . propertyname , overwrite ) ; jobConf . setBoolean ( Options . IGNORE_READ_FAILURES . propertyname , args . flags . contains ( Options . IGNORE_READ_FAILURES ) ) ; jobConf . setBoolean ( Options . PRESERVE_STATUS . propertyname , args . flags . contains ( Options . PRESERVE_STATUS ) ) ; jobConf . setBoolean ( Options . USEFASTCOPY . propertyname , useFastCopy ) ; jobConf . setBoolean ( Options . SKIPUNDERCONSTRUCTION . propertyname , skipUnderConstructionFile ) ; final String randomId = getRandomId ( ) ; Path jobDirectory = new Path ( client . getSystemDir ( ) , NAME + "_" + randomId ) ; jobConf . set ( JOB_DIR_LABEL , jobDirectory . toString ( ) ) ; FileSystem dstfs = args . dst . getFileSystem ( conf ) ; boolean dstExists = dstfs . exists ( args . dst ) ; boolean dstIsDir = false ; if ( dstExists ) { dstIsDir = dstfs . getFileStatus ( args . dst ) . isDir ( ) ; } // default logPath
Path logPath = args . log ; if ( logPath == null ) { String filename = "_distcp_logs_" + randomId ; if ( ! dstExists || ! dstIsDir ) { Path parent = args . dst . getParent ( ) ; if ( ! dstfs . exists ( parent ) ) { dstfs . mkdirs ( parent ) ; } logPath = new Path ( parent , filename ) ; } else { logPath = new Path ( args . dst , filename ) ; } } FileOutputFormat . setOutputPath ( jobConf , logPath ) ; // create src list , dst list
FileSystem jobfs = jobDirectory . getFileSystem ( jobConf ) ; Path srcfilelist = new Path ( jobDirectory , "_distcp_src_files" ) ; jobConf . set ( SRC_LIST_LABEL , srcfilelist . toString ( ) ) ; SequenceFile . Writer src_writer = SequenceFile . createWriter ( jobfs , jobConf , srcfilelist , LongWritable . class , FilePairComparable . class , jobfs . getConf ( ) . getInt ( "io.file.buffer.size" , 4096 ) , SRC_FILES_LIST_REPL_DEFAULT , jobfs . getDefaultBlockSize ( ) , SequenceFile . CompressionType . NONE , new DefaultCodec ( ) , null , new Metadata ( ) ) ; Path dstfilelist = new Path ( jobDirectory , "_distcp_dst_files" ) ; SequenceFile . Writer dst_writer = SequenceFile . createWriter ( jobfs , jobConf , dstfilelist , Text . class , Text . class , SequenceFile . CompressionType . NONE ) ; Path dstdirlist = new Path ( jobDirectory , "_distcp_dst_dirs" ) ; jobConf . set ( DST_DIR_LIST_LABEL , dstdirlist . toString ( ) ) ; SequenceFile . Writer dir_writer = SequenceFile . createWriter ( jobfs , jobConf , dstdirlist , Text . class , FilePairComparable . class , SequenceFile . CompressionType . NONE ) ; // handle the case where the destination directory doesn ' t exist
// and we ' ve only a single src directory OR we ' re updating / overwriting
// the contents of the destination directory .
final boolean special = ( args . srcs . size ( ) == 1 && ! dstExists ) || update || overwrite ; int srcCount = 0 , cnsyncf = 0 , dirsyn = 0 ; long fileCount = 0L , dirCount = 0L , byteCount = 0L , cbsyncs = 0L , blockCount = 0L ; Path basedir = null ; HashSet < Path > parentDirsToCopy = new HashSet < Path > ( ) ; if ( args . basedir != null ) { FileSystem basefs = args . basedir . getFileSystem ( conf ) ; basedir = args . basedir . makeQualified ( basefs ) ; if ( ! basefs . isDirectory ( basedir ) ) { throw new IOException ( "Basedir " + basedir + " is not a directory." ) ; } } try { for ( Iterator < Path > srcItr = args . srcs . iterator ( ) ; srcItr . hasNext ( ) ; ) { final Path src = srcItr . next ( ) ; FileSystem srcfs = src . getFileSystem ( conf ) ; FileStatus srcfilestat = srcfs . getFileStatus ( src ) ; Path root = special && srcfilestat . isDir ( ) ? src : src . getParent ( ) ; if ( basedir != null ) { root = basedir ; Path parent = src . getParent ( ) . makeQualified ( srcfs ) ; while ( parent != null && ! parent . equals ( basedir ) ) { if ( ! parentDirsToCopy . contains ( parent ) ) { parentDirsToCopy . add ( parent ) ; String dst = makeRelative ( root , parent ) ; FileStatus pst = srcfs . getFileStatus ( parent ) ; src_writer . append ( new LongWritable ( 0 ) , new FilePairComparable ( pst , dst ) ) ; dst_writer . append ( new Text ( dst ) , new Text ( parent . toString ( ) ) ) ; dir_writer . append ( new Text ( dst ) , new FilePairComparable ( pst , dst ) ) ; if ( ++ dirsyn > SYNC_FILE_MAX ) { dirsyn = 0 ; dir_writer . sync ( ) ; } } parent = parent . getParent ( ) ; } if ( parent == null ) { throw new IOException ( "Basedir " + basedir + " is not a prefix of source path " + src ) ; } } if ( srcfilestat . isDir ( ) ) { ++ srcCount ; ++ dirCount ; final String dst = makeRelative ( root , src ) ; src_writer . append ( new LongWritable ( 0 ) , new FilePairComparable ( srcfilestat , dst ) ) ; dst_writer . append ( new Text ( dst ) , new Text ( src . toString ( ) ) ) ; } Stack < FileStatus > pathstack = new Stack < FileStatus > ( ) ; for ( pathstack . push ( srcfilestat ) ; ! pathstack . empty ( ) ; ) { FileStatus cur = pathstack . pop ( ) ; FileStatus [ ] children = srcfs . listStatus ( cur . getPath ( ) ) ; for ( int i = 0 ; i < children . length ; i ++ ) { boolean skipfile = false ; final FileStatus child = children [ i ] ; final String dst = makeRelative ( root , child . getPath ( ) ) ; ++ srcCount ; if ( child . isDir ( ) ) { pathstack . push ( child ) ; ++ dirCount ; } else { // skip file if it exceed file limit or size limit .
// check on whether src and dest files are same will be
// done in mapper .
skipfile |= fileCount == args . filelimit || byteCount + child . getLen ( ) > args . sizelimit ; if ( ! skipfile ) { ++ fileCount ; byteCount += child . getLen ( ) ; blockCount += getBlocks ( child . getLen ( ) , child . getBlockSize ( ) ) ; if ( LOG . isTraceEnabled ( ) ) { LOG . trace ( "adding file " + child . getPath ( ) ) ; } ++ cnsyncf ; cbsyncs += child . getLen ( ) ; if ( cnsyncf > SYNC_FILE_MAX || cbsyncs > BYTES_PER_MAP ) { src_writer . sync ( ) ; dst_writer . sync ( ) ; cnsyncf = 0 ; cbsyncs = 0L ; } } } if ( ! skipfile ) { src_writer . append ( new LongWritable ( child . isDir ( ) ? 0 : child . getLen ( ) ) , new FilePairComparable ( child , dst ) ) ; } dst_writer . append ( new Text ( dst ) , new Text ( child . getPath ( ) . toString ( ) ) ) ; } if ( cur . isDir ( ) ) { String dst = makeRelative ( root , cur . getPath ( ) ) ; dir_writer . append ( new Text ( dst ) , new FilePairComparable ( cur , dst ) ) ; if ( ++ dirsyn > SYNC_FILE_MAX ) { dirsyn = 0 ; dir_writer . sync ( ) ; } } } } } finally { checkAndClose ( src_writer ) ; checkAndClose ( dst_writer ) ; checkAndClose ( dir_writer ) ; } FileStatus dststatus = null ; try { dststatus = dstfs . getFileStatus ( args . dst ) ; } catch ( FileNotFoundException fnfe ) { LOG . info ( args . dst + " does not exist." ) ; } // create dest path dir if copying > 1 file
if ( dststatus == null ) { if ( srcCount > 1 && ! dstfs . mkdirs ( args . dst ) ) { throw new IOException ( "Failed to create" + args . dst ) ; } } final Path sorted = new Path ( jobDirectory , "_distcp_sorted" ) ; checkDuplication ( jobfs , dstfilelist , sorted , conf ) ; if ( dststatus != null && args . flags . contains ( Options . DELETE ) ) { deleteNonexisting ( dstfs , dststatus , sorted , jobfs , jobDirectory , jobConf , conf ) ; } Path tmpDir = new Path ( ( dstExists && ! dstIsDir ) || ( ! dstExists && srcCount == 1 ) ? args . dst . getParent ( ) : args . dst , "_distcp_tmp_" + randomId ) ; jobConf . set ( TMP_DIR_LABEL , tmpDir . toUri ( ) . toString ( ) ) ; LOG . info ( "sourcePathsCount=" + srcCount ) ; LOG . info ( "filesToCopyCount=" + fileCount ) ; LOG . info ( "bytesToCopyCount=" + StringUtils . humanReadableInt ( byteCount ) ) ; jobConf . setInt ( SRC_COUNT_LABEL , srcCount ) ; jobConf . setLong ( TOTAL_SIZE_LABEL , byteCount ) ; jobConf . setLong ( TOTAL_BLOCKS_LABEL , blockCount ) ; setMapCount ( fileCount , byteCount , jobConf , client ) ; setReducerCount ( fileCount , jobConf , client ) ; return fileCount > 0 || dirCount > 0 ; |
public class JacksonRequest { /** * Converts a base URL , endpoint , and parameters into a full URL
* @ param method The { @ link com . android . volley . Request . Method } of the URL
* @ param baseUrl The base URL
* @ param endpoint The endpoint being hit
* @ param params The parameters to be appended to the URL if a GET method is used
* @ return The full URL */
private static String getUrl ( int method , String baseUrl , String endpoint , Map < String , String > params ) { } } | if ( params != null ) { for ( Map . Entry < String , String > entry : params . entrySet ( ) ) { if ( entry . getValue ( ) == null || entry . getValue ( ) . equals ( "null" ) ) { entry . setValue ( "" ) ; } } } if ( method == Method . GET && params != null && ! params . isEmpty ( ) ) { final StringBuilder result = new StringBuilder ( baseUrl + endpoint ) ; final int startLength = result . length ( ) ; for ( String key : params . keySet ( ) ) { try { final String encodedKey = URLEncoder . encode ( key , "UTF-8" ) ; final String encodedValue = URLEncoder . encode ( params . get ( key ) , "UTF-8" ) ; if ( result . length ( ) > startLength ) { result . append ( "&" ) ; } else { result . append ( "?" ) ; } result . append ( encodedKey ) ; result . append ( "=" ) ; result . append ( encodedValue ) ; } catch ( Exception e ) { } } return result . toString ( ) ; } else { return baseUrl + endpoint ; } |
public class hqlLexer { /** * $ ANTLR start " NEW " */
public final void mNEW ( ) throws RecognitionException { } } | try { int _type = NEW ; int _channel = DEFAULT_TOKEN_CHANNEL ; // hql . g : 47:5 : ( ' new ' )
// hql . g : 47:7 : ' new '
{ match ( "new" ) ; if ( state . failed ) return ; } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving
} |
public class ListInstanceProfilesResult { /** * An object containing information about your instance profiles .
* @ param instanceProfiles
* An object containing information about your instance profiles . */
public void setInstanceProfiles ( java . util . Collection < InstanceProfile > instanceProfiles ) { } } | if ( instanceProfiles == null ) { this . instanceProfiles = null ; return ; } this . instanceProfiles = new java . util . ArrayList < InstanceProfile > ( instanceProfiles ) ; |
public class UNode { /** * Get the value of the child VALUE node ( member ) of this UNode with the given name .
* If this is not a MAP , has no children , there is no child node with the given name ,
* or the child node isn ' t a VALUE node , then null is returned .
* @ param name Candidate name of a child member node .
* @ return Value of child VALUE UNode with the given name , if any , otherwise null . */
public String getMemberValue ( String name ) { } } | if ( m_childNodeMap == null ) { return null ; } UNode childNode = m_childNodeMap . get ( name ) ; return childNode != null && childNode . isValue ( ) ? childNode . getValue ( ) : null ; |
public class ConvertRaster { /** * A faster convert that works directly with a specific raster */
static void bufferedToPlanar_F32 ( DataBufferByte buffer , WritableRaster src , Planar < GrayF32 > dst ) { } } | byte [ ] srcData = buffer . getData ( ) ; int numBands = src . getNumBands ( ) ; int srcStride = stride ( src ) ; int srcOffset = getOffset ( src ) ; if ( BoofConcurrency . USE_CONCURRENT ) { if ( numBands == 3 ) { ImplConvertRaster_MT . from_3BU8_to_PLF32 ( srcData , srcStride , srcOffset , dst ) ; } else if ( numBands == 1 ) { ImplConvertRaster_MT . from_1BU8_to_PLF32 ( srcData , srcStride , srcOffset , dst ) ; } else if ( numBands == 4 ) { ImplConvertRaster_MT . from_4BU8_to_PLF32 ( srcData , srcStride , srcOffset , dst ) ; } else { throw new RuntimeException ( "Write more code here." ) ; } } else { if ( numBands == 3 ) { ImplConvertRaster . from_3BU8_to_PLF32 ( srcData , srcStride , srcOffset , dst ) ; } else if ( numBands == 1 ) { ImplConvertRaster . from_1BU8_to_PLF32 ( srcData , srcStride , srcOffset , dst ) ; } else if ( numBands == 4 ) { ImplConvertRaster . from_4BU8_to_PLF32 ( srcData , srcStride , srcOffset , dst ) ; } else { throw new RuntimeException ( "Write more code here." ) ; } } |
public class XmlObjectSerializer { /** * Set the output of the serializer .
* @ param serializerContext
* A { @ link SerializerContext } .
* @ param out
* The { @ link OutputStream } to write to .
* @ param charset
* The charset . The serializer will use " UTF - 8 " if this is < code > null < / code > .
* @ return
* @ throws SerializerException
* @ throws IOException */
public XmlObjectSerializer setOutput ( SerializerContext serializerContext , OutputStream out , String charset ) throws SerializerException , IOException { } } | try { serializerContext . serializer . setOutput ( out , charset == null ? "UTF-8" : charset ) ; } catch ( IllegalArgumentException e ) { throw new SerializerException ( "can't configure serializer" , e ) ; } catch ( IllegalStateException e ) { throw new SerializerException ( "can't configure serializer" , e ) ; } return this ; |
public class CollectionUtil { /** * Helper method to easily create a map .
* Takes as input a varargs containing the key1 , value1 , key2 , value2 , etc . Note : although an Object , we will cast the key to String internally . */
public static Map < String , Object > map ( Object ... objects ) { } } | if ( objects . length % 2 != 0 ) { throw new ActivitiIllegalArgumentException ( "The input should always be even since we expect a list of key-value pairs!" ) ; } Map < String , Object > map = new HashMap < String , Object > ( ) ; for ( int i = 0 ; i < objects . length ; i += 2 ) { map . put ( ( String ) objects [ i ] , objects [ i + 1 ] ) ; } return map ; |
public class StringValueMap { /** * Appends new elements to this map .
* @ param map a map with elements to be added . */
public void append ( Map < ? , ? > map ) { } } | if ( map == null || map . size ( ) == 0 ) return ; for ( Map . Entry < ? , ? > entry : map . entrySet ( ) ) { put ( StringConverter . toString ( entry . getKey ( ) ) , StringConverter . toNullableString ( entry . getValue ( ) ) ) ; } |
public class Iterables { /** * Note : copy from Google Guava under Apache License v2.
* < br / >
* Returns a { @ link Collection } of all the permutations of the specified
* { @ link Iterable } .
* < p > < i > Notes : < / i > This is an implementation of the algorithm for
* Lexicographical Permutations Generation , described in Knuth ' s " The Art of
* Computer Programming " , Volume 4 , Chapter 7 , Section 7.2.1.2 . The
* iteration order follows the lexicographical order . This means that
* the first permutation will be in ascending order , and the last will be in
* descending order .
* < p > Duplicate elements are considered equal . For example , the list [ 1 , 1]
* will have only one permutation , instead of two . This is why the elements
* have to implement { @ link Comparable } .
* < p > An empty iterable has only one permutation , which is an empty list .
* < p > This method is equivalent to
* { @ code Collections2 . orderedPermutations ( list , Ordering . natural ( ) ) } .
* @ param elements the original iterable whose elements have to be permuted .
* @ return an immutable { @ link Collection } containing all the different
* permutations of the original iterable .
* @ throws NullPointerException if the specified iterable is null or has any
* null elements . */
public static < E extends Comparable < ? super E > > Collection < List < E > > orderedPermutations ( Collection < E > elements ) { } } | return orderedPermutations ( elements , Comparators . naturalOrder ( ) ) ; |
public class GreetingServiceImpl { /** * Escape an html string . Escaping data received from the client helps to
* prevent cross - site script vulnerabilities .
* @ param html the html string to escape
* @ return the escaped string */
private String escapeHtml ( String html ) { } } | if ( html == null ) { return null ; } return html . replaceAll ( "&" , "&" ) . replaceAll ( "<" , "<" ) . replaceAll ( ">" , ">" ) ; |
public class SubQuery { /** * This results in the following sort order :
* view subqueries , then other subqueries
* view subqueries :
* views sorted by creation order ( earlier declaration first )
* other subqueries :
* subqueries sorted by depth within select query ( deep = = higher level ) */
@ Override public int compare ( Object a , Object b ) { } } | SubQuery sqa = ( SubQuery ) a ; SubQuery sqb = ( SubQuery ) b ; if ( sqa . parentView == null && sqb . parentView == null ) { return sqb . level - sqa . level ; } else if ( sqa . parentView != null && sqb . parentView != null ) { int ia = database . schemaManager . getTableIndex ( sqa . parentView ) ; int ib = database . schemaManager . getTableIndex ( sqb . parentView ) ; if ( ia == - 1 ) { ia = database . schemaManager . getTables ( sqa . parentView . getSchemaName ( ) . name ) . size ( ) ; } if ( ib == - 1 ) { ib = database . schemaManager . getTables ( sqb . parentView . getSchemaName ( ) . name ) . size ( ) ; } int diff = ia - ib ; return diff == 0 ? sqb . level - sqa . level : diff ; } else { return sqa . parentView == null ? 1 : - 1 ; } |
public class InheritedChannel { /** * If standard inherited channel is connected to a socket then return a Channel
* of the appropriate type based standard input . */
private static Channel createChannel ( ) throws IOException { } } | // dup the file descriptor - we do this so that for two reasons : -
// 1 . Avoids any timing issues with FileDescriptor . in being closed
// or redirected while we create the channel .
// 2 . Allows streams based on file descriptor 0 to co - exist with
// the channel ( closing one doesn ' t impact the other )
int fdVal = dup ( 0 ) ; // Examine the file descriptor - if it ' s not a socket then we don ' t
// create a channel so we release the file descriptor .
int st ; st = soType0 ( fdVal ) ; if ( st != SOCK_STREAM && st != SOCK_DGRAM ) { close0 ( fdVal ) ; return null ; } // Next we create a FileDescriptor for the dup ' ed file descriptor
// Have to use reflection and also make assumption on how FD
// is implemented .
Class paramTypes [ ] = { int . class } ; Constructor ctr = Reflect . lookupConstructor ( "java.io.FileDescriptor" , paramTypes ) ; Object args [ ] = { new Integer ( fdVal ) } ; FileDescriptor fd = ( FileDescriptor ) Reflect . invoke ( ctr , args ) ; // Now create the channel . If the socket is a streams socket then
// we see if tthere is a peer ( ie : connected ) . If so , then we
// create a SocketChannel , otherwise a ServerSocketChannel .
// If the socket is a datagram socket then create a DatagramChannel
SelectorProvider provider = SelectorProvider . provider ( ) ; assert provider instanceof sun . nio . ch . SelectorProviderImpl ; Channel c ; if ( st == SOCK_STREAM ) { InetAddress ia = peerAddress0 ( fdVal ) ; if ( ia == null ) { c = new InheritedServerSocketChannelImpl ( provider , fd ) ; } else { int port = peerPort0 ( fdVal ) ; assert port > 0 ; InetSocketAddress isa = new InetSocketAddress ( ia , port ) ; c = new InheritedSocketChannelImpl ( provider , fd , isa ) ; } } else { c = new InheritedDatagramChannelImpl ( provider , fd ) ; } return c ; |
public class IntAVLTree { /** * Return the largest node under < code > node < / code > . */
public int last ( int node ) { } } | while ( true ) { final int right = right ( node ) ; if ( right == NIL ) { break ; } node = right ; } return node ; |
public class Range { /** * Given a list of unwrapped ranges sorted by left position , return an
* equivalent list of ranges but with no overlapping ranges . */
private static < T extends RingPosition < T > > List < Range < T > > deoverlap ( List < Range < T > > ranges ) { } } | if ( ranges . isEmpty ( ) ) return ranges ; List < Range < T > > output = new ArrayList < Range < T > > ( ) ; Iterator < Range < T > > iter = ranges . iterator ( ) ; Range < T > current = iter . next ( ) ; @ SuppressWarnings ( "unchecked" ) T min = ( T ) current . partitioner . minValue ( current . left . getClass ( ) ) ; while ( iter . hasNext ( ) ) { // If current goes to the end of the ring , we ' re done
if ( current . right . equals ( min ) ) { // If one range is the full range , we return only that
if ( current . left . equals ( min ) ) return Collections . < Range < T > > singletonList ( current ) ; output . add ( new Range < T > ( current . left , min ) ) ; return output ; } Range < T > next = iter . next ( ) ; // if next left is equal to current right , we do not intersect per se , but replacing ( A , B ] and ( B , C ] by ( A , C ] is
// legit , and since this avoid special casing and will result in more " optimal " ranges , we do the transformation
if ( next . left . compareTo ( current . right ) <= 0 ) { // We do overlap
// ( we ' ve handled current . right . equals ( min ) already )
if ( next . right . equals ( min ) || current . right . compareTo ( next . right ) < 0 ) current = new Range < T > ( current . left , next . right ) ; } else { output . add ( current ) ; current = next ; } } output . add ( current ) ; return output ; |
public class ControlBuilder { /** * Tells if the specified directory is ignored by default ( . svn , cvs , etc )
* @ param directory */
private boolean isDefaultExcludes ( File directory ) { } } | for ( String pattern : DirectoryScanner . getDefaultExcludes ( ) ) { if ( DirectoryScanner . match ( pattern , directory . getAbsolutePath ( ) . replace ( "\\" , "/" ) ) ) { return true ; } } return false ; |
public class SnapshotFile { /** * Creates a snapshot file name from the given parameters . */
@ VisibleForTesting static String createSnapshotFileName ( String serverName , long index ) { } } | return String . format ( "%s-%d.%s" , serverName , index , EXTENSION ) ; |
public class HttpURLConnection { /** * Set the method for the URL request , one of :
* < UL >
* < LI > GET
* < LI > POST
* < LI > HEAD
* < LI > OPTIONS
* < LI > PUT
* < LI > DELETE
* < LI > TRACE
* < / UL > are legal , subject to protocol restrictions . The default
* method is GET .
* @ param method the HTTP method
* @ exception ProtocolException if the method cannot be reset or if
* the requested method isn ' t valid for HTTP .
* @ exception SecurityException if a security manager is set and the
* method is " TRACE " , but the " allowHttpTrace "
* NetPermission is not granted .
* @ see # getRequestMethod ( ) */
public void setRequestMethod ( String method ) throws ProtocolException { } } | if ( connected ) { throw new ProtocolException ( "Can't reset method: already connected" ) ; } // This restriction will prevent people from using this class to
// experiment w / new HTTP methods using java . But it should
// be placed for security - the request String could be
// arbitrarily long .
for ( int i = 0 ; i < methods . length ; i ++ ) { if ( methods [ i ] . equals ( method ) ) { if ( method . equals ( "TRACE" ) ) { SecurityManager s = System . getSecurityManager ( ) ; if ( s != null ) { s . checkPermission ( new NetPermission ( "allowHttpTrace" ) ) ; } } this . method = method ; return ; } } throw new ProtocolException ( "Invalid HTTP method: " + method ) ; |
public class InternalPyExprUtils { /** * Wraps an expression with the proper SanitizedContent constructor .
* @ param contentKind The kind of sanitized content .
* @ param pyExpr The expression to wrap . */
static PyExpr wrapAsSanitizedContent ( SanitizedContentKind contentKind , PyExpr pyExpr ) { } } | String sanitizer = NodeContentKinds . toPySanitizedContentOrdainer ( contentKind ) ; String approval = "sanitize.IActuallyUnderstandSoyTypeSafetyAndHaveSecurityApproval(" + "'Internally created Sanitization.')" ; return new PyExpr ( sanitizer + "(" + pyExpr . getText ( ) + ", approval=" + approval + ")" , Integer . MAX_VALUE ) ; |
public class ElementWithCardinalityImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public boolean eIsSet ( int featureID ) { } } | switch ( featureID ) { case SimpleAntlrPackage . ELEMENT_WITH_CARDINALITY__ELEMENT : return element != null ; case SimpleAntlrPackage . ELEMENT_WITH_CARDINALITY__CARDINALITY : return CARDINALITY_EDEFAULT == null ? cardinality != null : ! CARDINALITY_EDEFAULT . equals ( cardinality ) ; } return super . eIsSet ( featureID ) ; |
public class VideoDescriptionMarshaller { /** * Marshall the given parameter object . */
public void marshall ( VideoDescription videoDescription , ProtocolMarshaller protocolMarshaller ) { } } | if ( videoDescription == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( videoDescription . getAfdSignaling ( ) , AFDSIGNALING_BINDING ) ; protocolMarshaller . marshall ( videoDescription . getAntiAlias ( ) , ANTIALIAS_BINDING ) ; protocolMarshaller . marshall ( videoDescription . getCodecSettings ( ) , CODECSETTINGS_BINDING ) ; protocolMarshaller . marshall ( videoDescription . getColorMetadata ( ) , COLORMETADATA_BINDING ) ; protocolMarshaller . marshall ( videoDescription . getCrop ( ) , CROP_BINDING ) ; protocolMarshaller . marshall ( videoDescription . getDropFrameTimecode ( ) , DROPFRAMETIMECODE_BINDING ) ; protocolMarshaller . marshall ( videoDescription . getFixedAfd ( ) , FIXEDAFD_BINDING ) ; protocolMarshaller . marshall ( videoDescription . getHeight ( ) , HEIGHT_BINDING ) ; protocolMarshaller . marshall ( videoDescription . getPosition ( ) , POSITION_BINDING ) ; protocolMarshaller . marshall ( videoDescription . getRespondToAfd ( ) , RESPONDTOAFD_BINDING ) ; protocolMarshaller . marshall ( videoDescription . getScalingBehavior ( ) , SCALINGBEHAVIOR_BINDING ) ; protocolMarshaller . marshall ( videoDescription . getSharpness ( ) , SHARPNESS_BINDING ) ; protocolMarshaller . marshall ( videoDescription . getTimecodeInsertion ( ) , TIMECODEINSERTION_BINDING ) ; protocolMarshaller . marshall ( videoDescription . getVideoPreprocessors ( ) , VIDEOPREPROCESSORS_BINDING ) ; protocolMarshaller . marshall ( videoDescription . getWidth ( ) , WIDTH_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class NativeJpegTranscoder { /** * Transcodes an image to match the specified exif orientation and the scale factor .
* @ param inputStream The { @ link InputStream } of the image that will be transcoded .
* @ param outputStream The { @ link OutputStream } where the newly created image is written to .
* @ param exifOrientation 0 , 90 , 180 or 270
* @ param scaleNumerator 1 - 16 , image will be scaled using scaleNumerator / 8 factor
* @ param quality 1 - 100 */
@ VisibleForTesting public static void transcodeJpegWithExifOrientation ( final InputStream inputStream , final OutputStream outputStream , final int exifOrientation , final int scaleNumerator , final int quality ) throws IOException { } } | NativeJpegTranscoderSoLoader . ensure ( ) ; Preconditions . checkArgument ( scaleNumerator >= MIN_SCALE_NUMERATOR ) ; Preconditions . checkArgument ( scaleNumerator <= MAX_SCALE_NUMERATOR ) ; Preconditions . checkArgument ( quality >= MIN_QUALITY ) ; Preconditions . checkArgument ( quality <= MAX_QUALITY ) ; Preconditions . checkArgument ( JpegTranscoderUtils . isExifOrientationAllowed ( exifOrientation ) ) ; Preconditions . checkArgument ( scaleNumerator != SCALE_DENOMINATOR || exifOrientation != ExifInterface . ORIENTATION_NORMAL , "no transformation requested" ) ; nativeTranscodeJpegWithExifOrientation ( Preconditions . checkNotNull ( inputStream ) , Preconditions . checkNotNull ( outputStream ) , exifOrientation , scaleNumerator , quality ) ; |
public class DirectionalComponent { /** * Gets the { @ link PropertyDirection } used by the block .
* @ param block the block
* @ return the property */
public static PropertyDirection getProperty ( Block block ) { } } | DirectionalComponent dc = IComponent . getComponent ( DirectionalComponent . class , block ) ; return dc != null ? dc . getProperty ( ) : null ; |
public class VirtualNetworksInner { /** * Gets the specified virtual network by resource group .
* @ param resourceGroupName The name of the resource group .
* @ param virtualNetworkName The name of the virtual network .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the VirtualNetworkInner object if successful . */
public VirtualNetworkInner getByResourceGroup ( String resourceGroupName , String virtualNetworkName ) { } } | return getByResourceGroupWithServiceResponseAsync ( resourceGroupName , virtualNetworkName ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class TypeName { /** * Returns an unboxed type if this is a boxed primitive type ( like { @ code int } for { @ code
* Integer } ) or { @ code Void } . Returns this type if it is already unboxed .
* @ throws UnsupportedOperationException if this type isn ' t eligible for unboxing . */
public TypeName unbox ( ) { } } | if ( keyword != null ) return this ; // Already unboxed .
if ( this . equals ( BOXED_VOID ) ) return VOID ; if ( this . equals ( BOXED_BOOLEAN ) ) return BOOLEAN ; if ( this . equals ( BOXED_BYTE ) ) return BYTE ; if ( this . equals ( BOXED_SHORT ) ) return SHORT ; if ( this . equals ( BOXED_INT ) ) return INT ; if ( this . equals ( BOXED_LONG ) ) return LONG ; if ( this . equals ( BOXED_CHAR ) ) return CHAR ; if ( this . equals ( BOXED_FLOAT ) ) return FLOAT ; if ( this . equals ( BOXED_DOUBLE ) ) return DOUBLE ; throw new UnsupportedOperationException ( "cannot unbox " + this ) ; |
public class SortedCursor { /** * Creates a dotted name string using the bean property names . */
private static String chainToBeanString ( ChainedProperty property ) { } } | int count = property . getChainCount ( ) ; if ( count <= 0 ) { return property . getPrimeProperty ( ) . getBeanName ( ) ; } StringBuilder b = new StringBuilder ( ) ; b . append ( property . getPrimeProperty ( ) . getBeanName ( ) ) ; for ( int i = 0 ; i < count ; i ++ ) { b . append ( '.' ) ; b . append ( property . getChainedProperty ( i ) . getBeanName ( ) ) ; } return b . toString ( ) ; |
public class AcroFields { /** * Adds a substitution font to the list . The fonts in this list will be used if the original
* font doesn ' t contain the needed glyphs .
* @ param font the font */
public void addSubstitutionFont ( BaseFont font ) { } } | if ( substitutionFonts == null ) substitutionFonts = new ArrayList ( ) ; substitutionFonts . add ( font ) ; |
public class XmlSchemaParser { /** * Check name against validity for C + + and Java naming . Warning if not valid .
* @ param node to have the name checked .
* @ param name of the node to be checked . */
public static void checkForValidName ( final Node node , final String name ) { } } | if ( ! ValidationUtil . isSbeCppName ( name ) ) { handleWarning ( node , "name is not valid for C++: " + name ) ; } if ( ! ValidationUtil . isSbeJavaName ( name ) ) { handleWarning ( node , "name is not valid for Java: " + name ) ; } if ( ! ValidationUtil . isSbeGolangName ( name ) ) { handleWarning ( node , "name is not valid for Golang: " + name ) ; } if ( ! ValidationUtil . isSbeCSharpName ( name ) ) { handleWarning ( node , "name is not valid for C#: " + name ) ; } |
public class JBBPParser { /** * Prepare a parser for a script with default bit order ( LSB0 ) use and with defined custom type field processor .
* @ param script a text script contains field order and types reference , it
* must not be null
* @ param customFieldTypeProcessor custom field type processor , can be null
* @ return the prepared parser for the script
* @ see JBBPBitOrder # LSB0
* @ since 1.2.0 */
public static JBBPParser prepare ( final String script , final JBBPCustomFieldTypeProcessor customFieldTypeProcessor ) { } } | return JBBPParser . prepare ( script , JBBPBitOrder . LSB0 , customFieldTypeProcessor , 0 ) ; |
public class ApiFutureUtils { /** * Creates api future from ValueSourceFuture . We have to send all Future API calls to ValueSourceFuture . */
public static < T > ApiFuture < T > createApiFuture ( ValueSourceFuture < T > valueSourceFuture ) { } } | if ( valueSourceFuture instanceof ApiFutureBackedValueSourceFuture ) { return ( ( ApiFutureBackedValueSourceFuture < T > ) valueSourceFuture ) . getWrappedFuture ( ) ; } else { return new ValueSourceFutureBackedApiFuture < > ( valueSourceFuture ) ; } |
public class FlowTypeCheck { /** * Check the type for a given arithmetic operator . Such an operator has the type
* int , and all children should also produce values of type int .
* @ param expr
* @ return */
private SemanticType checkIntegerOperator ( Expr . BinaryOperator expr , Environment environment ) { } } | checkOperand ( Type . Int , expr . getFirstOperand ( ) , environment ) ; checkOperand ( Type . Int , expr . getSecondOperand ( ) , environment ) ; return Type . Int ; |
public class VertexAttribute { /** * Replaces the current buffer data with a copy of the given { @ link java . nio . ByteBuffer } This method arbitrarily creates data for the ByteBuffer regardless of the data type of the vertex
* attribute .
* @ param buffer to set */
public void setData ( ByteBuffer buffer ) { } } | buffer . rewind ( ) ; this . buffer = CausticUtil . createByteBuffer ( buffer . capacity ( ) ) ; this . buffer . put ( buffer ) ; |
public class WebServiceConfiguration { /** * { @ inheritDoc } */
@ Override public void validate ( ) { } } | // Check if endpoint has been specified .
if ( StringUtil . isBlank ( endpoint ) ) { throw new IllegalArgumentException ( "Endpoint cannot be null or empty." ) ; } // Check if servicename has been specified .
if ( StringUtil . isBlank ( servicename ) ) { throw new IllegalArgumentException ( "Service name cannot be null or empty." ) ; } // Check if servicename has been specified .
if ( StringUtil . isBlank ( connectionTimeout ) ) { connectionTimeout = "30" ; } try { Long . parseLong ( connectionTimeout ) ; } catch ( NumberFormatException e ) { throw new IllegalArgumentException ( "The specified connection timeout is not valid." ) ; } if ( StringUtil . isBlank ( receiveTimeout ) ) { receiveTimeout = "60" ; } try { Long . parseLong ( receiveTimeout ) ; } catch ( NumberFormatException e ) { throw new IllegalArgumentException ( "The specified receive timeout is not valid." ) ; } try { // Check if the specified endpoint is a well - formed URL
final URL endpointURL = new URL ( endpoint ) ; endpointURL . toURI ( ) ; } catch ( Exception e ) { throw new IllegalArgumentException ( "The specified endpoint is not a valid URL." ) ; } |
public class AbstractResourceBundleHandler { /** * ( non - Javadoc )
* @ see
* net . jawr . web . resource . ResourceHandler # storeJawrBundleMapping ( java . util
* . Properties ) */
@ Override public void storeJawrBundleMapping ( Properties bundleMapping ) { } } | File bundleMappingFile = new File ( tempDirPath , mappingFileName ) ; OutputStream out = null ; try { out = new FileOutputStream ( bundleMappingFile ) ; bundleMapping . store ( out , "Jawr mapping" ) ; } catch ( IOException e ) { LOGGER . error ( "Unable to store the bundle mapping" ) ; } finally { IOUtils . close ( out ) ; } |
public class Vectors { /** * Returns true if vectors ( x1 , y1 ) and ( x2 , y2 ) are aligned ( ox , oy ) centered
* coordinate .
* @ param ox
* @ param oy
* @ param x1
* @ param y1
* @ param x2
* @ param y2
* @ return */
public static final boolean areAligned ( double ox , double oy , double x1 , double y1 , double x2 , double y2 ) { } } | return areAligned ( x1 - ox , y1 - oy , x2 - ox , y2 - oy ) ; |
public class AtomContainerRenderer { /** * Given a bond length for a model , calculate the scale that will transform
* this length to the on screen bond length in RendererModel .
* @ param modelBondLength the average bond length of the model
* @ return the scale necessary to transform this to a screen bond */
@ Override public double calculateScaleForBondLength ( double modelBondLength ) { } } | if ( Double . isNaN ( modelBondLength ) || modelBondLength == 0 ) { return DEFAULT_SCALE ; } else { return rendererModel . getParameter ( BondLength . class ) . getValue ( ) / modelBondLength ; } |
public class XMLWriterSettings { /** * Set the serialization charset .
* @ param aCharset
* The charset to be used . May not be < code > null < / code > .
* @ return this */
@ Nonnull public final XMLWriterSettings setCharset ( @ Nonnull final Charset aCharset ) { } } | m_aCharset = ValueEnforcer . notNull ( aCharset , "Charset" ) ; return this ; |
public class CoreRemoteMongoCollectionImpl { /** * Finds all documents in the collection .
* @ param filter the query filter
* @ param resultClass the class to decode each document into
* @ param < ResultT > the target document type of the iterable .
* @ return the find iterable interface */
public < ResultT > CoreRemoteFindIterable < ResultT > find ( final Bson filter , final Class < ResultT > resultClass ) { } } | return createFindIterable ( filter , resultClass ) ; |
public class MarkdownParser { /** * Create the id of a section header .
* < p > The ID format follows the ReadCarpet standards .
* @ param headerNumber the number of the header , or { @ code null } .
* @ param headerText the section header text .
* @ return the identifier . */
public static String computeHeaderId ( String headerNumber , String headerText ) { } } | final String fullText = Strings . emptyIfNull ( headerNumber ) + " " + Strings . emptyIfNull ( headerText ) ; // $ NON - NLS - 1 $
return computeHeaderId ( fullText ) ; |
public class TypeDefUtils { /** * Convert an { @ link javax . lang . model . element . ExecutableElement } to a { @ link io . sundr . codegen . model . TypeDef }
* @ param context The context of the operation .
* @ param executableElement The target element .
* @ return An instance of { @ link io . sundr . codegen . model . TypeDef } that describes the interface . */
public static TypeDef executableToInterface ( DslContext context , ExecutableElement executableElement ) { } } | // Do generate the interface
Boolean multiple = executableElement . getAnnotation ( Multiple . class ) != null ; Boolean isEntryPoint = executableElement . getAnnotation ( EntryPoint . class ) != null ; Boolean isTerminal = executableElement . getAnnotation ( Terminal . class ) != null || ! isVoid ( executableElement ) ; Set < String > classes = new HashSet < String > ( ) ; Set < String > keywords = new HashSet < String > ( ) ; Set < String > methods = new HashSet < String > ( ) ; TransitionFilter filter = executableElement . getAnnotation ( Or . class ) != null ? new OrTransitionFilter ( context . getToRequiresAll ( ) . apply ( executableElement ) , context . getToRequiresAny ( ) . apply ( executableElement ) , context . getToRequiresOnly ( ) . apply ( executableElement ) , context . getToRequiresNoneOf ( ) . apply ( executableElement ) ) : new AndTransitionFilter ( context . getToRequiresAll ( ) . apply ( executableElement ) , context . getToRequiresAny ( ) . apply ( executableElement ) , context . getToRequiresOnly ( ) . apply ( executableElement ) , context . getToRequiresNoneOf ( ) . apply ( executableElement ) ) ; for ( String clazz : context . getToClasses ( ) . apply ( executableElement ) ) { classes . add ( clazz ) ; } for ( String keyword : context . getToKeywords ( ) . apply ( executableElement ) ) { keywords . add ( keyword ) ; } // Let ' s add the name of the method as a keyword to make things simpler
methods . add ( executableElement . getSimpleName ( ) . toString ( ) ) ; TypeRef returnType ; if ( isTerminal ( executableElement ) ) { returnType = isVoid ( executableElement ) ? VOID_REF : ElementTo . MIRROR_TO_TYPEREF . apply ( executableElement . getReturnType ( ) ) ; } else { returnType = TRANSPARENT_REF ; } InterfaceName targetInterfaceName = executableElement . getAnnotation ( InterfaceName . class ) ; MethodName tagetMethodName = executableElement . getAnnotation ( MethodName . class ) ; Begin begin = executableElement . getAnnotation ( Begin . class ) ; End end = executableElement . getAnnotation ( End . class ) ; if ( begin != null ) { keywords . add ( begin . value ( ) ) ; } if ( end != null ) { keywords . add ( end . value ( ) ) ; } String methodName = tagetMethodName != null ? tagetMethodName . value ( ) : executableElement . getSimpleName ( ) . toString ( ) ; String beginScope = begin != null ? begin . value ( ) : null ; String endScope = end != null ? end . value ( ) : null ; TypeParamDef paremeterType = Generics . MAP . apply ( returnType ) ; Method sourceMethod = ElementTo . METHOD . apply ( executableElement ) ; List < AnnotationRef > annotations = new ArrayList < AnnotationRef > ( ) ; for ( AnnotationRef candidate : sourceMethod . getAnnotations ( ) ) { if ( ! candidate . getClassRef ( ) . getFullyQualifiedName ( ) . startsWith ( "io.sundr" ) ) { annotations . add ( candidate ) ; } } Method targetMethod = new MethodBuilder ( sourceMethod ) . withAnnotations ( annotations ) . withModifiers ( TypeUtils . modifiersToInt ( Modifier . PUBLIC ) ) . withReturnType ( paremeterType . toReference ( ) ) . withName ( methodName ) . build ( ) ; String interfaceName = targetInterfaceName != null ? targetInterfaceName . value ( ) : toInterfaceName ( targetMethod . getName ( ) ) ; return new TypeDefBuilder ( ) . withPackageName ( ModelUtils . getPackageElement ( executableElement ) . toString ( ) ) . withName ( interfaceName ) . withParameters ( paremeterType ) . withKind ( Kind . INTERFACE ) . withModifiers ( TypeUtils . modifiersToInt ( Modifier . PUBLIC ) ) . addToAttributes ( ORIGINAL_RETURN_TYPE , returnType ) . addToAttributes ( IS_ENTRYPOINT , isEntryPoint ) . addToAttributes ( IS_TERMINAL , isTerminal ) . addToAttributes ( IS_GENERIC , Boolean . FALSE ) . addToAttributes ( CLASSES , classes ) . addToAttributes ( KEYWORDS , keywords ) . addToAttributes ( METHODS , methods ) . addToAttributes ( BEGIN_SCOPE , beginScope ) . addToAttributes ( END_SCOPE , endScope ) . addToAttributes ( FILTER , filter ) . addToAttributes ( CARDINALITY_MULTIPLE , multiple ) . addToAttributes ( METHOD_NAME , methodName ) . addToMethods ( targetMethod ) . build ( ) ; |
public class ProcessGroovyMethods { /** * Gets the output and error streams from a process and reads them
* to keep the process from blocking due to a full output buffer .
* The processed stream data is appended to the supplied Appendable .
* For this , two Threads are started , so this method will return immediately .
* The threads will not be join ( ) ed , even if waitFor ( ) is called . To wait
* for the output to be fully consumed call waitForProcessOutput ( ) .
* @ param self a Process
* @ param output an Appendable to capture the process stdout
* @ param error an Appendable to capture the process stderr
* @ since 1.7.5 */
public static void consumeProcessOutput ( Process self , Appendable output , Appendable error ) { } } | consumeProcessOutputStream ( self , output ) ; consumeProcessErrorStream ( self , error ) ; |
public class DurationUtilImpl { /** * This method gets the singleton instance of { @ link DurationUtil } . < br >
* < b > ATTENTION : < / b > < br >
* Please prefer dependency - injection instead of using this method .
* @ return the singleton instance . */
public static DurationUtil getInstance ( ) { } } | if ( instance == null ) { synchronized ( DurationUtilImpl . class ) { if ( instance == null ) { instance = new DurationUtilImpl ( ) ; } } } return instance ; |
public class JdbcMapperFactory { /** * Will create a DynamicMapper on the specified type .
* @ param target the type
* @ param < T > the jdbcMapper target type
* @ return the DynamicMapper */
public < T > DynamicJdbcMapper < T > newMapper ( final Type target ) { } } | final ClassMeta < T > classMeta = getClassMeta ( target ) ; return new DynamicJdbcSetRowMapper < T > ( new SetRowMapperFactory < T > ( classMeta ) , new MapperKeyFactory ( ) , new MapperKeyFactory ( ) ) ; |
public class HtmlDocletWriter { /** * Adds the user specified bottom .
* @ param body the content tree to which user specified bottom will be added */
public void addBottom ( Content body ) { } } | Content bottom = new RawHtml ( replaceDocRootDir ( configuration . bottom ) ) ; Content small = HtmlTree . SMALL ( bottom ) ; Content p = HtmlTree . P ( HtmlStyle . legalCopy , small ) ; body . addContent ( p ) ; |
public class CommercePriceListPersistenceImpl { /** * Returns the first commerce price list in the ordered set where companyId = & # 63 ; .
* @ param companyId the company ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching commerce price list
* @ throws NoSuchPriceListException if a matching commerce price list could not be found */
@ Override public CommercePriceList findByCompanyId_First ( long companyId , OrderByComparator < CommercePriceList > orderByComparator ) throws NoSuchPriceListException { } } | CommercePriceList commercePriceList = fetchByCompanyId_First ( companyId , orderByComparator ) ; if ( commercePriceList != null ) { return commercePriceList ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "companyId=" ) ; msg . append ( companyId ) ; msg . append ( "}" ) ; throw new NoSuchPriceListException ( msg . toString ( ) ) ; |
public class MapConstraints { /** * Returns a constrained view of the specified set multimap , using the
* specified constraint . Any operations that add new mappings will call the
* provided constraint . However , this method does not verify that existing
* mappings satisfy the constraint .
* < p > Note that the generated multimap ' s { @ link Multimap # removeAll } and
* { @ link Multimap # replaceValues } methods return collections that are not
* constrained .
* < p > The returned multimap is not serializable .
* @ param multimap the multimap to constrain
* @ param constraint the constraint that validates added entries
* @ return a constrained view of the specified multimap */
public static < K , V > SetMultimap < K , V > constrainedSetMultimap ( SetMultimap < K , V > multimap , MapConstraint < ? super K , ? super V > constraint ) { } } | return new ConstrainedSetMultimap < K , V > ( multimap , constraint ) ; |
public class xen_health_resource_sw { /** * < pre >
* Converts API response of bulk operation into object and returns the object array in case of get request .
* < / pre > */
protected base_resource [ ] get_nitro_bulk_response ( nitro_service service , String response ) throws Exception { } } | xen_health_resource_sw_responses result = ( xen_health_resource_sw_responses ) service . get_payload_formatter ( ) . string_to_resource ( xen_health_resource_sw_responses . class , response ) ; if ( result . errorcode != 0 ) { if ( result . errorcode == SESSION_NOT_EXISTS ) service . clear_session ( ) ; throw new nitro_exception ( result . message , result . errorcode , ( base_response [ ] ) result . xen_health_resource_sw_response_array ) ; } xen_health_resource_sw [ ] result_xen_health_resource_sw = new xen_health_resource_sw [ result . xen_health_resource_sw_response_array . length ] ; for ( int i = 0 ; i < result . xen_health_resource_sw_response_array . length ; i ++ ) { result_xen_health_resource_sw [ i ] = result . xen_health_resource_sw_response_array [ i ] . xen_health_resource_sw [ 0 ] ; } return result_xen_health_resource_sw ; |
public class RollingFileAppender { /** * { @ inheritDoc } */
protected void subAppend ( final LoggingEvent event ) { } } | // The rollover check must precede actual writing . This is the
// only correct behavior for time driven triggers .
AccessController . doPrivileged ( new PrivilegedAction < Void > ( ) { @ Override public Void run ( ) { if ( triggeringPolicy . isTriggeringEvent ( RollingFileAppender . this , event , getFile ( ) , getFileLength ( ) ) ) { // wrap rollover request in try block since
// rollover may fail in case read access to directory
// is not provided . However appender should still be in good
// condition and the append should still happen .
try { rollover ( ) ; } catch ( Exception ex ) { LogLog . warn ( "Exception during rollover attempt." , ex ) ; } } return null ; } } ) ; super . subAppend ( event ) ; |
public class YahooFinance { /** * Sends a request with the historical quotes included
* starting from the specified { @ link Calendar } date
* at the default interval ( monthly ) .
* Returns null if the data can ' t be retrieved from Yahoo Finance .
* @ param symbol the symbol of the stock for which you want to retrieve information
* @ param from start date of the historical data
* @ return a { @ link Stock } object containing the requested information
* @ throws java . io . IOException when there ' s a connection problem */
public static Stock get ( String symbol , Calendar from ) throws IOException { } } | return YahooFinance . get ( symbol , from , HistQuotesRequest . DEFAULT_TO , HistQuotesRequest . DEFAULT_INTERVAL ) ; |
public class AmazonAppStreamClient { /** * Retrieves the name of the stack with which the specified fleet is associated .
* @ param listAssociatedStacksRequest
* @ return Result of the ListAssociatedStacks operation returned by the service .
* @ sample AmazonAppStream . ListAssociatedStacks
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / appstream - 2016-12-01 / ListAssociatedStacks " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public ListAssociatedStacksResult listAssociatedStacks ( ListAssociatedStacksRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeListAssociatedStacks ( request ) ; |
public class JsonObject { /** * Sets the value of the member with the specified name to the JSON representation of the
* specified < code > int < / code > value . If this object does not contain a member with this name , a
* new member is added at the end of the object . If this object contains multiple members with
* this name , only the last one is changed .
* This method should < strong > only be used to modify existing objects < / strong > . To fill a new
* object with members , the method < code > add ( name , value ) < / code > should be preferred which is much
* faster ( as it does not need to search for existing members ) .
* @ param name
* the name of the member to replace
* @ param value
* the value to set to the member
* @ return the object itself , to enable method chaining */
public JsonObject set ( String name , int value ) { } } | set ( name , Json . value ( value ) ) ; return this ; |
public class CmsXmlContainerPageHandler { /** * Validates container names , so that they are unique in the page . < p >
* @ param cms the cms context
* @ param value the value to validate
* @ param content the container page to validate
* @ throws CmsXmlException if there are duplicated names */
protected void validateNames ( CmsObject cms , I_CmsXmlContentValue value , CmsXmlContent content ) throws CmsXmlException { } } | // get the current name
Locale locale = value . getLocale ( ) ; String namePath = CmsXmlUtils . concatXpath ( value . getPath ( ) , CmsXmlContainerPage . XmlNode . Name . name ( ) ) ; String name = content . getValue ( namePath , locale ) . getStringValue ( cms ) ; // iterate over all containers
Iterator < I_CmsXmlContentValue > itValues = content . getValues ( CmsXmlContainerPage . XmlNode . Containers . name ( ) , locale ) . iterator ( ) ; while ( itValues . hasNext ( ) ) { I_CmsXmlContentValue itValue = itValues . next ( ) ; if ( itValue . getPath ( ) . equals ( value . getPath ( ) ) ) { // skip current container
continue ; } // get container name
namePath = CmsXmlUtils . concatXpath ( itValue . getPath ( ) , CmsXmlContainerPage . XmlNode . Name . name ( ) ) ; String itName = content . getValue ( namePath , locale ) . getStringValue ( cms ) ; // validate
if ( name . equals ( itName ) ) { throw new CmsXmlException ( Messages . get ( ) . container ( Messages . ERR_DUPLICATE_NAME_1 , name ) ) ; } } |
public class ServletRegistrationBean { /** * Configure registration settings . Subclasses can override this method to perform
* additional configuration if required .
* @ param registration the registration */
@ Override protected void configure ( ServletRegistration . Dynamic registration ) { } } | super . configure ( registration ) ; String [ ] urlMapping = StringUtils . toStringArray ( this . urlMappings ) ; if ( urlMapping . length == 0 && this . alwaysMapUrl ) { urlMapping = DEFAULT_MAPPINGS ; } if ( ! ObjectUtils . isEmpty ( urlMapping ) ) { registration . addMapping ( urlMapping ) ; } registration . setLoadOnStartup ( this . loadOnStartup ) ; if ( this . multipartConfig != null ) { registration . setMultipartConfig ( this . multipartConfig ) ; } |
public class Kb { /** * ZAP : Added the type arguments . */
private void add ( TreeMap < String , Object > map , String key , Object value ) { } } | // ZAP : Added the type argument .
Vector < Object > v = getList ( map , key ) ; if ( v == null ) { // ZAP : Added the type argument .
v = new Vector < > ( ) ; synchronized ( map ) { map . put ( key , v ) ; } } if ( ! v . contains ( value ) ) { v . add ( value ) ; } |
public class ChannelSelector { /** * * Selects a set of keys whose corresponding channels are ready for I / O operations .
* This method performs a non - blocking selection operation . If no channels have become selectable since the previous
* selection operation then this method immediately returns empty selected - key set .
* @ return
* @ throws IOException If an I / O error occurs */
public FastSet < ChannelSelectionKey > selectNow ( ) throws IOException { } } | FastSet < ChannelSelectionKey > selectedKey = new FastSet < ChannelSelectionKey > ( ) ; selector . selectNow ( ) ; Set < SelectionKey > selection = selector . selectedKeys ( ) ; for ( SelectionKey key : selection ) { ChannelSelectionKey k = ( ChannelSelectionKey ) key . attachment ( ) ; try { if ( key . isValid ( ) ) { if ( key . isValid ( ) && key . isAcceptable ( ) ) { selectedKey . add ( k ) ; } else { if ( key . isValid ( ) && key . isReadable ( ) ) { ( ( ShellChannel ) k . channel ( ) ) . doRead ( ) ; if ( k . isValid ( ) && k . isReadable ( ) ) { selectedKey . add ( k ) ; } } if ( key . isValid ( ) && key . isWritable ( ) ) { ( ( ShellChannel ) k . channel ( ) ) . doWrite ( ) ; if ( k . isValid ( ) && k . isWritable ( ) ) { selectedKey . add ( k ) ; } } } } else { // adding invalid channel to allow its removal
selectedKey . add ( k ) ; } } catch ( IOException ioe ) { throw new ChannelException ( k , ioe . getMessage ( ) ) ; } } // for
selection . clear ( ) ; return selectedKey ; |
public class HelloSignClient { /** * Creates a template draft that can be used for embedded template creation .
* @ param req EmbeddedRequest
* @ return Template the unclaimed template draft
* @ throws HelloSignException thrown if there ' s a problem processing the
* HTTP request or the JSON response . */
public TemplateDraft createEmbeddedTemplateDraft ( EmbeddedRequest req ) throws HelloSignException { } } | return new TemplateDraft ( httpClient . withAuth ( auth ) . withPostFields ( req . getPostFields ( ) ) . post ( BASE_URI + TEMPLATE_CREATE_EMBEDDED_DRAFT_URI ) . asJson ( ) ) ; |
public class FileLocator { /** * Get a list of all sub - directories ( as Files ) located in the provided
* directory
* @ param root
* Starting directory
* @ return List of File objects ( one per sub - directory ) , or null
* @ see File # listFiles ( FileFilter ) */
public static List < File > getDirectories ( File root ) { } } | if ( root == null ) return Collections . emptyList ( ) ; FileFilter filter = new DirsOnlyFilter ( ) ; File fileList [ ] = root . listFiles ( filter ) ; if ( fileList == null ) return Collections . emptyList ( ) ; else return Arrays . asList ( fileList ) ; |
public class HBCIUtils { /** * Wandelt ein Byte - Array in eine entsprechende hex - Darstellung um .
* @ param data das Byte - Array , für das eine Hex - Darstellung erzeugt werden soll
* @ return einen String , der für jedes Byte aus < code > data < / code >
* zwei Zeichen ( 0-9 , A - F ) enthält . */
public static String data2hex ( byte [ ] data ) { } } | StringBuffer ret = new StringBuffer ( ) ; for ( int i = 0 ; i < data . length ; i ++ ) { String st = Integer . toHexString ( data [ i ] ) ; if ( st . length ( ) == 1 ) { st = '0' + st ; } st = st . substring ( st . length ( ) - 2 ) ; ret . append ( st ) . append ( " " ) ; } return ret . toString ( ) ; |
public class AbstractHBCIJob { /** * speichert die HBCI - Rückgabewerte für diesen GV im outStore ab . Dazu
* werden alle RetSegs durchgesehen ; diejenigen , die den aktuellen GV
* betreffen , werden im @ c data Property unter dem namen @ c ret _ i . *
* gespeichert . @ i entspricht dabei dem @ c retValCounter . */
protected void saveReturnValues ( HBCIMsgStatus status , int sref ) { } } | List < HBCIRetVal > retVals = status . segStatus . getRetVals ( ) ; String segref = Integer . toString ( sref ) ; retVals . forEach ( retVal -> { if ( retVal . segref != null && retVal . segref . equals ( segref ) ) { jobResult . jobStatus . addRetVal ( retVal ) ; } } ) ; /* bei Jobs , die mehrere Nachrichten benötigt haben , bewirkt das , dass nur
* der globStatus der * letzten * ausgeführten Nachricht gespeichert wird .
* Das ist aber auch ok , weil nach einem Fehler keine weiteren Nachrichten
* ausgeführt werden , so dass im Fehlerfall der fehlerhafte globStatus zur
* Verfügung steht . Im OK - Fall werden höchstens die OK - Meldungen der vorherigen
* Nachrichten überschrieben . */
jobResult . globStatus = status . globStatus ; |
public class DTMNodeProxy { /** * Method hasAttribute
* @ param name */
public boolean hasAttribute ( String name ) { } } | return DTM . NULL != dtm . getAttributeNode ( node , null , name ) ; |
public class XMLDataStorer { /** * Initializes XML - Data .
* @ param classname
* Name of the testclass */
public void createXMLData ( final String classname ) { } } | data = new Kopemedata ( ) ; data . setTestcases ( new Testcases ( ) ) ; final Testcases tc = data . getTestcases ( ) ; tc . setClazz ( classname ) ; storeData ( ) ; |
public class FeedTaskFactoryImpl { /** * { @ inheritDoc } */
@ Override public FeedDownloadTask get ( CombinedFeed feed ) { } } | return new FeedDownloadTask ( feedDownloader , datastore , feed . getId ( ) ) ; |
public class HttpRequest { /** * < pre >
* The scheme ( http , https ) , the host name , the path and the query
* portion of the URL that was requested .
* Example : ` " http : / / example . com / some / info ? color = red " ` .
* < / pre >
* < code > string request _ url = 2 ; < / code > */
public java . lang . String getRequestUrl ( ) { } } | java . lang . Object ref = requestUrl_ ; if ( ref instanceof java . lang . String ) { return ( java . lang . String ) ref ; } else { com . google . protobuf . ByteString bs = ( com . google . protobuf . ByteString ) ref ; java . lang . String s = bs . toStringUtf8 ( ) ; requestUrl_ = s ; return s ; } |
public class XMLGISElementUtil { /** * Read a map element from the XML description .
* @ param < T > is the type of the element to create .
* @ param element is the XML node to read .
* @ param elementNodeName is the name of the XML node that should contains the map element data .
* It must be one of { @ link # NODE _ POINT } , { @ link # NODE _ CIRCLE } , { @ link # NODE _ POLYGON } , { @ link # NODE _ POLYLINE } ,
* { @ link # NODE _ MULTIPOINT } , or { @ code null } for the XML node name itself .
* @ param type is the type of the element to create , or { @ code null } to use the default .
* @ param pathBuilder is the tool to make paths absolute .
* @ param resources is the tool that permits to gather the resources .
* @ return the map element .
* @ throws IOException in case of error . */
@ SuppressWarnings ( { } } | "unchecked" , "checkstyle:cyclomaticcomplexity" , "checkstyle:npathcomplexity" } ) public static < T extends MapElement > T readMapElement ( Element element , String elementNodeName , Class < T > type , PathBuilder pathBuilder , XMLResources resources ) throws IOException { final UUID id = XMLUtil . getAttributeUUIDWithDefault ( element , null , XMLUtil . ATTR_ID ) ; String nn = elementNodeName ; if ( nn == null || nn . length ( ) == 0 ) { nn = element . getNodeName ( ) ; } if ( NODE_POINT . equals ( nn ) ) { final double x = XMLUtil . getAttributeDouble ( element , ATTR_X ) ; final double y = XMLUtil . getAttributeDouble ( element , ATTR_Y ) ; final boolean doubleFrame = XMLUtil . getAttributeBooleanWithDefault ( element , false , ATTR_DOUBLEFRAME ) ; final double size = XMLUtil . getAttributeDoubleWithDefault ( element , GeoLocationUtil . GIS_POINT_SIZE , ATTR_SIZE ) ; final MapPoint point ; if ( type != null && MapPoint . class . isAssignableFrom ( type ) ) { try { if ( id != null ) { final Constructor < T > cons = type . getConstructor ( UUID . class , double . class , double . class ) ; point = ( MapPoint ) cons . newInstance ( id , x , y ) ; } else { final Constructor < T > cons = type . getConstructor ( double . class , double . class ) ; point = ( MapPoint ) cons . newInstance ( x , y ) ; } } catch ( AssertionError e ) { throw e ; } catch ( Throwable e ) { throw new IOException ( e ) ; } } else if ( id == null ) { point = new MapPoint ( x , y ) ; } else { point = new MapPoint ( id , x , y ) ; } point . setDoubleFramed ( doubleFrame ) ; point . setPointSize ( size ) ; readGISElementAttributes ( element , point , pathBuilder , resources ) ; final Integer color = XMLUtil . getAttributeColorWithDefault ( element , null , XMLUtil . ATTR_COLOR ) ; if ( color != null ) { point . setColor ( color ) ; } return ( T ) point ; } if ( NODE_CIRCLE . equals ( nn ) ) { final double x = XMLUtil . getAttributeDouble ( element , ATTR_X ) ; final double y = XMLUtil . getAttributeDouble ( element , ATTR_Y ) ; final double radius = XMLUtil . getAttributeDoubleWithDefault ( element , MapElementConstants . getPreferredRadius ( ) , ATTR_RADIUS ) ; final MapCircle circle ; if ( type != null && MapCircle . class . isAssignableFrom ( type ) ) { try { if ( id != null ) { final Constructor < T > cons = type . getConstructor ( UUID . class , double . class , double . class ) ; circle = ( MapCircle ) cons . newInstance ( id , x , y ) ; } else { final Constructor < T > cons = type . getConstructor ( double . class , double . class ) ; circle = ( MapCircle ) cons . newInstance ( x , y ) ; } } catch ( AssertionError e ) { throw e ; } catch ( Throwable e ) { throw new IOException ( e ) ; } } else if ( id == null ) { circle = new MapCircle ( x , y ) ; } else { circle = new MapCircle ( id , x , y ) ; } circle . setRadius ( radius ) ; readGISElementAttributes ( element , circle , pathBuilder , resources ) ; final Integer color = XMLUtil . getAttributeColorWithDefault ( element , null , XMLUtil . ATTR_COLOR ) ; if ( color != null ) { circle . setColor ( color ) ; } return ( T ) circle ; } if ( NODE_POLYLINE . equals ( nn ) ) { final boolean isWide = XMLUtil . getAttributeBooleanWithDefault ( element , false , ATTR_WIDE ) ; final MapPolyline polyline ; if ( type != null && MapPolyline . class . isAssignableFrom ( type ) ) { try { if ( id != null ) { final Constructor < T > cons = type . getConstructor ( UUID . class ) ; polyline = ( MapPolyline ) cons . newInstance ( id ) ; } else { polyline = ( MapPolyline ) type . newInstance ( ) ; } } catch ( AssertionError e ) { throw e ; } catch ( Throwable e ) { throw new IOException ( e ) ; } } else if ( id == null ) { polyline = new MapPolyline ( ) ; } else { polyline = new MapPolyline ( id ) ; } polyline . setWidePolyline ( isWide ) ; readMapComposedElementPoints ( element , polyline ) ; readGISElementAttributes ( element , polyline , pathBuilder , resources ) ; final Integer color = XMLUtil . getAttributeColorWithDefault ( element , null , XMLUtil . ATTR_COLOR ) ; if ( color != null ) { polyline . setColor ( color ) ; } return ( T ) polyline ; } if ( NODE_POLYGON . equals ( nn ) ) { final MapPolygon polygon ; if ( type != null && MapPolygon . class . isAssignableFrom ( type ) ) { try { if ( id != null ) { final Constructor < T > cons = type . getConstructor ( UUID . class ) ; polygon = ( MapPolygon ) cons . newInstance ( id ) ; } else { polygon = ( MapPolygon ) type . newInstance ( ) ; } } catch ( AssertionError e ) { throw e ; } catch ( Throwable e ) { throw new IOException ( e ) ; } } else if ( id == null ) { polygon = new MapPolygon ( ) ; } else { polygon = new MapPolygon ( id ) ; } readMapComposedElementPoints ( element , polygon ) ; readGISElementAttributes ( element , polygon , pathBuilder , resources ) ; final Integer color = XMLUtil . getAttributeColorWithDefault ( element , null , XMLUtil . ATTR_COLOR ) ; if ( color != null ) { polygon . setColor ( color ) ; } return ( T ) polygon ; } if ( NODE_MULTIPOINT . equals ( nn ) ) { final boolean doubleFrame = XMLUtil . getAttributeBooleanWithDefault ( element , false , ATTR_DOUBLEFRAME ) ; final double size = XMLUtil . getAttributeDoubleWithDefault ( element , GeoLocationUtil . GIS_POINT_SIZE , ATTR_SIZE ) ; final MapMultiPoint multipoint ; if ( type != null && MapMultiPoint . class . isAssignableFrom ( type ) ) { try { if ( id != null ) { final Constructor < T > cons = type . getConstructor ( UUID . class ) ; multipoint = ( MapMultiPoint ) cons . newInstance ( id ) ; } else { multipoint = ( MapMultiPoint ) type . newInstance ( ) ; } } catch ( AssertionError e ) { throw e ; } catch ( Throwable e ) { throw new IOException ( e ) ; } } else if ( id == null ) { multipoint = new MapMultiPoint ( ) ; } else { multipoint = new MapMultiPoint ( id ) ; } multipoint . setDoubleFramed ( doubleFrame ) ; multipoint . setPointSize ( size ) ; readMapComposedElementPoints ( element , multipoint ) ; readGISElementAttributes ( element , multipoint , pathBuilder , resources ) ; final Integer color = XMLUtil . getAttributeColorWithDefault ( element , null , XMLUtil . ATTR_COLOR ) ; if ( color != null ) { multipoint . setColor ( color ) ; } return ( T ) multipoint ; } throw new IOException ( "unable to parse the XML node to retrieve a map element" ) ; // $ NON - NLS - 1 $ |
public class InternalAnnotationDefinitionConverter { /** * { @ inheritDoc } */
@ Override public XBELInternalAnnotationDefinition convert ( AnnotationDefinition t ) { } } | if ( t == null || t . getURL ( ) != null ) { return null ; } XBELInternalAnnotationDefinition dest = new XBELInternalAnnotationDefinition ( ) ; String description = t . getDescription ( ) ; String id = t . getId ( ) ; String usage = t . getUsage ( ) ; // If null , set equal to an empty string so that the document can be properly converted .
if ( description == null ) description = "" ; if ( usage == null ) usage = "" ; dest . setDescription ( description ) ; dest . setId ( id ) ; dest . setUsage ( usage ) ; AnnotationType type = t . getType ( ) ; String value = t . getValue ( ) ; switch ( type ) { case ENUMERATION : List < String > enums = t . getEnums ( ) ; XBELListAnnotation xla = new XBELListAnnotation ( ) ; List < String > xlaval = xla . getListValue ( ) ; xlaval . addAll ( enums ) ; dest . setListAnnotation ( xla ) ; break ; case REGULAR_EXPRESSION : dest . setPatternAnnotation ( value ) ; break ; default : throw new UnsupportedOperationException ( "unknown type: " + type ) ; } return dest ; |
public class MathUtil { /** * Find the next power of 2.
* Classic bit operation , for signed 32 - bit . Valid for positive integers only
* ( 0 otherwise ) .
* @ param x original integer
* @ return Next power of 2 */
public static int nextPow2Int ( int x ) { } } | -- x ; x |= x >>> 1 ; x |= x >>> 2 ; x |= x >>> 4 ; x |= x >>> 8 ; x |= x >>> 16 ; return ++ x ; |
public class ProjectClient { /** * Moves a persistent disk from one zone to another .
* < p > Sample code :
* < pre > < code >
* try ( ProjectClient projectClient = ProjectClient . create ( ) ) {
* ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ;
* DiskMoveRequest diskMoveRequestResource = DiskMoveRequest . newBuilder ( ) . build ( ) ;
* Operation response = projectClient . moveDiskProject ( project . toString ( ) , diskMoveRequestResource ) ;
* < / code > < / pre >
* @ param project Project ID for this request .
* @ param diskMoveRequestResource
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final Operation moveDiskProject ( String project , DiskMoveRequest diskMoveRequestResource ) { } } | MoveDiskProjectHttpRequest request = MoveDiskProjectHttpRequest . newBuilder ( ) . setProject ( project ) . setDiskMoveRequestResource ( diskMoveRequestResource ) . build ( ) ; return moveDiskProject ( request ) ; |
public class AssociateSurfBasic { /** * Splits the set of input features into positive and negative laplacian lists .
* Keep track of the feature ' s index in the original input list . This is
* the index that needs to be returned . */
private void sort ( FastQueue < BrightFeature > input , FastQueue < Helper > pos , FastQueue < Helper > neg ) { } } | pos . reset ( ) ; neg . reset ( ) ; for ( int i = 0 ; i < input . size ; i ++ ) { BrightFeature f = input . get ( i ) ; if ( f . white ) { pos . grow ( ) . wrap ( f , i ) ; } else { neg . grow ( ) . wrap ( f , i ) ; } } |
public class DescribeStackResourceDriftsRequest { /** * The resource drift status values to use as filters for the resource drift results returned .
* < ul >
* < li >
* < code > DELETED < / code > : The resource differs from its expected template configuration in that the resource has been
* deleted .
* < / li >
* < li >
* < code > MODIFIED < / code > : One or more resource properties differ from their expected template values .
* < / li >
* < li >
* < code > IN _ SYNC < / code > : The resources ' s actual configuration matches its expected template configuration .
* < / li >
* < li >
* < code > NOT _ CHECKED < / code > : AWS CloudFormation does not currently return this value .
* < / li >
* < / ul >
* @ return The resource drift status values to use as filters for the resource drift results returned . < / p >
* < ul >
* < li >
* < code > DELETED < / code > : The resource differs from its expected template configuration in that the resource
* has been deleted .
* < / li >
* < li >
* < code > MODIFIED < / code > : One or more resource properties differ from their expected template values .
* < / li >
* < li >
* < code > IN _ SYNC < / code > : The resources ' s actual configuration matches its expected template configuration .
* < / li >
* < li >
* < code > NOT _ CHECKED < / code > : AWS CloudFormation does not currently return this value .
* < / li >
* @ see StackResourceDriftStatus */
public java . util . List < String > getStackResourceDriftStatusFilters ( ) { } } | if ( stackResourceDriftStatusFilters == null ) { stackResourceDriftStatusFilters = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return stackResourceDriftStatusFilters ; |
public class Disruptor { /** * Create a group of event processors to be used as a dependency .
* @ param processors the event processors , previously set up with { @ link # handleEventsWith ( com . lmax . disruptor . EventProcessor . . . ) } ,
* that will form the barrier for subsequent handlers or processors .
* @ return an { @ link EventHandlerGroup } that can be used to setup a { @ link SequenceBarrier } over the specified event processors .
* @ see # after ( com . lmax . disruptor . EventHandler [ ] ) */
public EventHandlerGroup < T > after ( final EventProcessor ... processors ) { } } | for ( final EventProcessor processor : processors ) { consumerRepository . add ( processor ) ; } return new EventHandlerGroup < > ( this , consumerRepository , Util . getSequencesFor ( processors ) ) ; |
public class RebootRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( RebootRequest rebootRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( rebootRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( rebootRequest . getWorkspaceId ( ) , WORKSPACEID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class InternalAuthentication { /** * Save user lists to the file on the disk
* @ throws IOException if fail to save to file */
private void saveUsers ( ) throws IOException { } } | if ( file_store != null ) { try { file_store . store ( new FileOutputStream ( file_path ) , "" ) ; } catch ( Exception e ) { throw new IOException ( e ) ; } } |
public class XAbstractFeatureCallImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public boolean eIsSet ( int featureID ) { } } | switch ( featureID ) { case XbasePackage . XABSTRACT_FEATURE_CALL__FEATURE : return feature != null ; case XbasePackage . XABSTRACT_FEATURE_CALL__TYPE_ARGUMENTS : return typeArguments != null && ! typeArguments . isEmpty ( ) ; case XbasePackage . XABSTRACT_FEATURE_CALL__IMPLICIT_RECEIVER : return implicitReceiver != null ; case XbasePackage . XABSTRACT_FEATURE_CALL__INVALID_FEATURE_ISSUE_CODE : return INVALID_FEATURE_ISSUE_CODE_EDEFAULT == null ? invalidFeatureIssueCode != null : ! INVALID_FEATURE_ISSUE_CODE_EDEFAULT . equals ( invalidFeatureIssueCode ) ; case XbasePackage . XABSTRACT_FEATURE_CALL__VALID_FEATURE : return validFeature != VALID_FEATURE_EDEFAULT ; case XbasePackage . XABSTRACT_FEATURE_CALL__IMPLICIT_FIRST_ARGUMENT : return implicitFirstArgument != null ; } return super . eIsSet ( featureID ) ; |
public class DynamicAccessImpl { /** * Maybe these methods ' exposure needs to be re - thought ? */
public FieldSearchResult findObjects ( Context context , String [ ] resultFields , int maxResults , FieldSearchQuery query ) throws ServerException { } } | return null ; |
public class OpenALStreamPlayer { /** * Seeks to a position in the music .
* @ param position Position in seconds .
* @ return True if the setting of the position was successful */
public boolean setPosition ( float position ) { } } | try { if ( getPosition ( ) > position ) { initStreams ( ) ; } float sampleRate = audio . getRate ( ) ; float sampleSize ; if ( audio . getChannels ( ) > 1 ) { sampleSize = 4 ; // AL10 . AL _ FORMAT _ STEREO16
} else { sampleSize = 2 ; // AL10 . AL _ FORMAT _ MONO16
} while ( positionOffset < position ) { int count = audio . read ( buffer ) ; if ( count != - 1 ) { float bufferLength = ( count / sampleSize ) / sampleRate ; positionOffset += bufferLength ; } else { if ( loop ) { initStreams ( ) ; } else { done = true ; } return false ; } } startPlayback ( ) ; return true ; } catch ( IOException e ) { Log . error ( e ) ; return false ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.