signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class CmsSendEmailDialog { /** * Returns a semicolon separated list of user names . < p > * @ return a semicolon separated list of user names */ @ Override protected String getToNames ( ) { } }
List < String > excluded = new ArrayList < String > ( ) ; List < String > users = new ArrayList < String > ( ) ; Iterator < String > itIds = idsList ( ) . iterator ( ) ; while ( itIds . hasNext ( ) ) { String id = itIds . next ( ) ; CmsSessionInfo session = OpenCms . getSessionManager ( ) . getSessionInfo ( id ) ; if ( session != null ) { try { CmsUser user = getCms ( ) . readUser ( session . getUserId ( ) ) ; String userName = user . getFullName ( ) ; if ( ! users . contains ( userName ) ) { String emailAddress = user . getEmail ( ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( emailAddress ) ) { users . add ( userName ) ; } else { excluded . add ( userName ) ; } } } catch ( Exception e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } } } if ( ! excluded . isEmpty ( ) ) { StringBuffer text = new StringBuffer ( 500 ) ; text . append ( Messages . get ( ) . container ( Messages . GUI_EXCLUDED_USERS_WARNING_0 ) . key ( getLocale ( ) ) ) ; text . append ( "\n" ) ; Iterator < String > it = excluded . iterator ( ) ; while ( it . hasNext ( ) ) { text . append ( "- " ) ; text . append ( it . next ( ) ) ; text . append ( "\n" ) ; } setExcludedUsers ( text . toString ( ) ) ; } if ( users . isEmpty ( ) ) { setCommitErrors ( Collections . singletonList ( ( Throwable ) new CmsIllegalStateException ( Messages . get ( ) . container ( Messages . ERR_NO_SELECTED_USER_WITH_EMAIL_0 ) ) ) ) ; return "" ; } StringBuffer result = new StringBuffer ( 256 ) ; Iterator < String > itUsers = users . iterator ( ) ; while ( itUsers . hasNext ( ) ) { result . append ( itUsers . next ( ) . toString ( ) ) ; if ( itUsers . hasNext ( ) ) { result . append ( "; " ) ; } } return result . toString ( ) ;
public class QueryBuilder { /** * 对SQL语句进行转义 * @ param param SQL语句 * @ return 转义后的字符串 */ private static String escapeSQLParam ( final String param ) { } }
int stringLength = param . length ( ) ; StringBuilder buf = new StringBuilder ( ( int ) ( stringLength * 1.1 ) ) ; for ( int i = 0 ; i < stringLength ; ++ i ) { char c = param . charAt ( i ) ; switch ( c ) { case 0 : /* Must be escaped for ' mysql ' */ buf . append ( '\\' ) ; buf . append ( '0' ) ; break ; case '\n' : /* Must be escaped for logs */ buf . append ( '\\' ) ; buf . append ( 'n' ) ; break ; case '\r' : buf . append ( '\\' ) ; buf . append ( 'r' ) ; break ; case '\\' : buf . append ( '\\' ) ; buf . append ( '\\' ) ; break ; case '\'' : buf . append ( '\\' ) ; buf . append ( '\'' ) ; break ; case '"' : /* Better safe than sorry */ buf . append ( '\\' ) ; buf . append ( '"' ) ; break ; case '\032' : /* This gives problems on Win32 */ buf . append ( '\\' ) ; buf . append ( 'Z' ) ; break ; default : buf . append ( c ) ; } } return buf . toString ( ) ;
public class FileSystemDatasets { /** * Convert a URI for a partition directory in a filesystem dataset to a { @ link View } * object representing that partition . * @ param dataset the ( partitioned ) filesystem dataset * @ param uri the path to the partition directory * @ return a view of the partition */ public static < E > View < E > viewForUri ( Dataset < E > dataset , URI uri ) { } }
if ( dataset instanceof FileSystemDataset ) { return ( ( FileSystemDataset < E > ) dataset ) . viewForUri ( uri ) ; } throw new IllegalArgumentException ( "Not a file system dataset: " + dataset ) ;
public class CommonOps_DDRM { /** * Performs the following operation : < br > * < br > * c = c + & alpha ; * a * b < br > * c < sub > ij < / sub > = c < sub > ij < / sub > + & alpha ; * & sum ; < sub > k = 1 : n < / sub > { a < sub > ik < / sub > * b < sub > kj < / sub > } * @ param alpha scaling factor . * @ param a The left matrix in the multiplication operation . Not modified . * @ param b The right matrix in the multiplication operation . Not modified . * @ param c Where the results of the operation are stored . Modified . */ public static void multAdd ( double alpha , DMatrix1Row a , DMatrix1Row b , DMatrix1Row c ) { } }
// TODO add a matrix vectory multiply here if ( b . numCols >= EjmlParameters . MULT_COLUMN_SWITCH ) { MatrixMatrixMult_DDRM . multAdd_reorder ( alpha , a , b , c ) ; } else { MatrixMatrixMult_DDRM . multAdd_small ( alpha , a , b , c ) ; }
public class AFactoryAppBeans { /** * < p > Get FctBnCnvIbnToColumnValues in lazy mode . < / p > * @ return FctBnCnvIbnToColumnValues - FctBnCnvIbnToColumnValues * @ throws Exception - an exception */ public final FctBnCnvIbnToColumnValues lazyGetFctBnCnvIbnToColumnValues ( ) throws Exception { } }
String beanName = getFctBnCnvIbnToColumnValuesName ( ) ; FctBnCnvIbnToColumnValues fctBnCnvIbnToColumnValues = ( FctBnCnvIbnToColumnValues ) this . beansMap . get ( beanName ) ; if ( fctBnCnvIbnToColumnValues == null ) { fctBnCnvIbnToColumnValues = new FctBnCnvIbnToColumnValues ( ) ; fctBnCnvIbnToColumnValues . setUtlReflection ( lazyGetUtlReflection ( ) ) ; fctBnCnvIbnToColumnValues . setFieldsRapiHolder ( lazyGetHolderRapiFields ( ) ) ; fctBnCnvIbnToColumnValues . setGettersRapiHolder ( lazyGetHolderRapiGetters ( ) ) ; fctBnCnvIbnToColumnValues . setIsNeedsToSqlEscape ( getIsNeedsToSqlEscape ( ) ) ; fctBnCnvIbnToColumnValues . setSrvSqlEscape ( lazyGetSrvSqlEscape ( ) ) ; this . beansMap . put ( beanName , fctBnCnvIbnToColumnValues ) ; lazyGetLogger ( ) . info ( null , AFactoryAppBeans . class , beanName + " has been created." ) ; } return fctBnCnvIbnToColumnValues ;
public class RetryerFactory { /** * Creates new instance of retryer based on the config . * Accepted config keys are defined in RetryerFactory as static member variable . * You can use State along with ConfigBuilder and config prefix to build config . * @ param config * @ return */ public static < T > Retryer < T > newInstance ( Config config ) { } }
config = config . withFallback ( DEFAULTS ) ; RetryType type = RetryType . valueOf ( config . getString ( RETRY_TYPE ) . toUpperCase ( ) ) ; switch ( type ) { case EXPONENTIAL : return newExponentialRetryer ( config ) ; case FIXED : return newFixedRetryer ( config ) ; default : throw new IllegalArgumentException ( type + " is not supported" ) ; }
public class syslog_ui_cmd { /** * < pre > * Converts API response of bulk operation into object and returns the object array in case of get request . * < / pre > */ protected base_resource [ ] get_nitro_bulk_response ( nitro_service service , String response ) throws Exception { } }
syslog_ui_cmd_responses result = ( syslog_ui_cmd_responses ) service . get_payload_formatter ( ) . string_to_resource ( syslog_ui_cmd_responses . class , response ) ; if ( result . errorcode != 0 ) { if ( result . errorcode == SESSION_NOT_EXISTS ) service . clear_session ( ) ; throw new nitro_exception ( result . message , result . errorcode , ( base_response [ ] ) result . syslog_ui_cmd_response_array ) ; } syslog_ui_cmd [ ] result_syslog_ui_cmd = new syslog_ui_cmd [ result . syslog_ui_cmd_response_array . length ] ; for ( int i = 0 ; i < result . syslog_ui_cmd_response_array . length ; i ++ ) { result_syslog_ui_cmd [ i ] = result . syslog_ui_cmd_response_array [ i ] . syslog_ui_cmd [ 0 ] ; } return result_syslog_ui_cmd ;
public class VersionNumber { /** * Interprets a string with version information . The last version number in the string will be searched and * processed . * @ param text * string with version information * @ return an object of { @ code VersionNumber } , never { @ code null } */ public static VersionNumber parseLastVersionNumber ( @ Nonnull final String text ) { } }
return VersionParser . parseLastVersionNumber ( Check . notNull ( text , "text" ) ) ;
public class BufferingXmlWriter { /** * Internal methods , buffering */ private final void flushBuffer ( ) throws IOException { } }
if ( mOutputPtr > 0 && mOutputBuffer != null ) { int ptr = mOutputPtr ; // Need to update location info , to keep it in sync mLocPastChars += ptr ; mLocRowStartOffset -= ptr ; mOutputPtr = 0 ; mOut . write ( mOutputBuffer , 0 , ptr ) ; }
public class StaticMessageSource { /** * Adds a message to the default locale . * @ param locale The locale * @ param code The code * @ param message The the message * @ return This message source */ public @ Nonnull StaticMessageSource addMessage ( @ Nonnull Locale locale , @ Nonnull String code , @ Nonnull String message ) { } }
ArgumentUtils . requireNonNull ( "locale" , locale ) ; if ( StringUtils . isNotEmpty ( code ) && StringUtils . isNotEmpty ( message ) ) { messageMap . put ( new MessageKey ( locale , code ) , message ) ; } return this ;
public class ContextManager { /** * Is the address an IPv6 Address . * @ param host The host string to check . * @ return True if the string is in the format of an IPv6 address . */ private static boolean isIPv6Addr ( String host ) { } }
if ( host != null ) { if ( host . contains ( "[" ) && host . contains ( "]" ) ) host = host . substring ( host . indexOf ( "[" ) + 1 , host . indexOf ( "]" ) ) ; host = host . toLowerCase ( ) ; Pattern p1 = Pattern . compile ( "^(?:(?:(?:(?:[a-f0-9]{1,4}(?::[a-f0-9]{1,4}){7})|(?:(?!(?:.*[a-f0-9](?::|$)){7,})(?:[a-f0-9]{1,4}(?::[a-f0-9]{1,4}){0,5})?::(?:[a-f0-9]{1,4}(?::[a-f0-9]{1,4}){0,5})?)))|(?:(?:(?:[a-f0-9]{1,4}(?::[a-f0-9]{1,4}){5}:)|(?:(?!(?:.*[a-f0-9]:){5,})(?:[a-f0-9]{1,4}(?::[a-f0-9]{1,4}){0,3})?::(?:[a-f0-9]{1,4}(?::[a-f0-9]{1,4}){0,3}:)?))?(?:(?:25[0-5])|(?:2[0-4][0-9])|(?:1[0-9]{2})|(?:[1-9]?[0-9]))(?:\\.(?:(?:25[0-5])|(?:2[0-4][0-9])|(?:1[0-9]{2})|(?:[1-9]?[0-9]))){3}))$" ) ; Pattern p2 = Pattern . compile ( "^(\\d{1,3}\\.){3}\\d{1,3}$" ) ; Matcher m1 = p1 . matcher ( host ) ; boolean b1 = m1 . matches ( ) ; Matcher m2 = p2 . matcher ( host ) ; boolean b2 = ! m2 . matches ( ) ; return b1 && b2 ; } else { return false ; }
public class CrocEyeNotificationHandler { @ Override public void onCommunicationException ( Throwable error ) { } }
addMessage ( MESSAGES . commandCommunicationError ( ) + error . getMessage ( ) , ERRORMESSAGE_STYLE ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcUShapeProfileDef ( ) { } }
if ( ifcUShapeProfileDefEClass == null ) { ifcUShapeProfileDefEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 741 ) ; } return ifcUShapeProfileDefEClass ;
public class JcValue { /** * < div color = ' red ' style = " font - size : 24px ; color : red " > < b > < i > < u > JCYPHER < / u > < / i > < / b > < / div > * < div color = ' red ' style = " font - size : 18px ; color : red " > < i > return the receiver as a JcNumber < / i > < / div > * < br / > */ public JcNumber asNumber ( ) { } }
JcNumber ret = new JcNumber ( null , this , null ) ; QueryRecorder . recordInvocationConditional ( this , "asNumber" , ret ) ; return ret ;
public class AIStream { /** * Mark each tick in the list as delivered . We assume the list contains TickRange objects corresponding to * V / U ( value ) ticks . * @ param list A list assumed to contain TickRange objects */ public void markListDelivered ( List list ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "markListDelivered" , list ) ; int len = list . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { AIValueTick valueTick = ( AIValueTick ) list . get ( i ) ; valueTick . setDelivered ( true ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "markListDelivered" ) ;
public class CmsObject { /** * Returns all child groups of a group . < p > * @ param groupname the name of the group * @ param includeSubChildren if set also returns all sub - child groups of the given group * @ return a list of all child < code > { @ link CmsGroup } < / code > objects or < code > null < / code > * @ throws CmsException if operation was not successful */ public List < CmsGroup > getChildren ( String groupname , boolean includeSubChildren ) throws CmsException { } }
return m_securityManager . getChildren ( m_context , groupname , includeSubChildren ) ;
public class PdfDictionary { /** * Associates the specified < CODE > PdfObject < / CODE > as value to the * specified < CODE > PdfName < / CODE > as key in this map . * If the < VAR > value < / VAR > is a < CODE > PdfNull < / CODE > , it is treated just as * any other < CODE > PdfObject < / CODE > . If the < VAR > value < / VAR > is * < CODE > null < / CODE > however nothing is done . * @ param key a < CODE > PdfName < / CODE > * @ param value the < CODE > PdfObject < / CODE > to be associated to the * < VAR > key < / VAR > */ public void putEx ( PdfName key , PdfObject value ) { } }
if ( value == null ) { return ; } put ( key , value ) ;
public class AbstractDependenceMeasure { /** * Compute ranks of all objects , ranging from 1 to len . * Ties are given the average rank . * @ param adapter Data adapter * @ param data Data array * @ param idx Data index * @ return Array of scores */ protected static < A > double [ ] ranks ( final NumberArrayAdapter < ? , A > adapter , final A data , int [ ] idx ) { } }
final int len = idx . length ; double [ ] ret = new double [ len ] ; for ( int i = 0 ; i < len ; ) { final int start = i ++ ; final double val = adapter . getDouble ( data , idx [ start ] ) ; // Include ties : while ( i < len && adapter . getDouble ( data , idx [ i ] ) <= val ) { i ++ ; } final double score = ( start + i - 1 ) * .5 + 1 ; for ( int j = start ; j < i ; j ++ ) { ret [ idx [ j ] ] = score ; } } return ret ;
public class RandomMatrices_DSCC { /** * Creates a random symmetric matrix . The entire matrix will be filled in , not just a triangular * portion . * @ param N Number of rows and columns * @ param nz _ total Number of nonzero elements in the triangular portion of the matrix * @ param min Minimum element value , inclusive * @ param max Maximum element value , inclusive * @ param rand Random number generator * @ return Randomly generated matrix */ public static DMatrixSparseCSC symmetric ( int N , int nz_total , double min , double max , Random rand ) { } }
// compute the number of elements in the triangle , including diagonal int Ntriagle = ( N * N + N ) / 2 ; // create a list of open elements int open [ ] = new int [ Ntriagle ] ; for ( int row = 0 , index = 0 ; row < N ; row ++ ) { for ( int col = row ; col < N ; col ++ , index ++ ) { open [ index ] = row * N + col ; } } // perform a random draw UtilEjml . shuffle ( open , open . length , 0 , nz_total , rand ) ; Arrays . sort ( open , 0 , nz_total ) ; // construct the matrix DMatrixSparseTriplet A = new DMatrixSparseTriplet ( N , N , nz_total * 2 ) ; for ( int i = 0 ; i < nz_total ; i ++ ) { int index = open [ i ] ; int row = index / N ; int col = index % N ; double value = rand . nextDouble ( ) * ( max - min ) + min ; if ( row == col ) { A . addItem ( row , col , value ) ; } else { A . addItem ( row , col , value ) ; A . addItem ( col , row , value ) ; } } DMatrixSparseCSC B = new DMatrixSparseCSC ( N , N , A . nz_length ) ; ConvertDMatrixStruct . convert ( A , B ) ; return B ;
public class ConverterServerBuilder { /** * Enables the given { @ link com . documents4j . conversion . IExternalConverter } . Any converter that is shipped with * this library is discovered automatically but can be disabled by invoking this method . * @ param externalConverter The converter to be disabled . * @ return This builder . */ public ConverterServerBuilder disable ( Class < ? extends IExternalConverter > externalConverter ) { } }
converterConfiguration . put ( externalConverter , Boolean . FALSE ) ; return this ;
public class LabelOperationMetadata { /** * < code > * . google . cloud . datalabeling . v1beta1 . LabelImagePolylineOperationMetadata image _ polyline _ details = 12; * < / code > */ public com . google . cloud . datalabeling . v1beta1 . LabelImagePolylineOperationMetadata getImagePolylineDetails ( ) { } }
if ( detailsCase_ == 12 ) { return ( com . google . cloud . datalabeling . v1beta1 . LabelImagePolylineOperationMetadata ) details_ ; } return com . google . cloud . datalabeling . v1beta1 . LabelImagePolylineOperationMetadata . getDefaultInstance ( ) ;
public class CommonOps_ZDRM { /** * Performs the following operation : < br > * < br > * c = c + & alpha ; * a * b < br > * c < sub > ij < / sub > = c < sub > ij < / sub > + & alpha ; * & sum ; < sub > k = 1 : n < / sub > { a < sub > ik < / sub > * b < sub > kj < / sub > } * @ param realAlpha real component of scaling factor . * @ param imgAlpha imaginary component of scaling factor . * @ param a The left matrix in the multiplication operation . Not modified . * @ param b The right matrix in the multiplication operation . Not modified . * @ param c Where the results of the operation are stored . Modified . */ public static void multAdd ( double realAlpha , double imgAlpha , ZMatrixRMaj a , ZMatrixRMaj b , ZMatrixRMaj c ) { } }
if ( b . numCols >= EjmlParameters . CMULT_COLUMN_SWITCH ) { MatrixMatrixMult_ZDRM . multAdd_reorder ( realAlpha , imgAlpha , a , b , c ) ; } else { MatrixMatrixMult_ZDRM . multAdd_small ( realAlpha , imgAlpha , a , b , c ) ; }
public class BoxApiBookmark { /** * Gets a request that copies a bookmark * @ param id id of the bookmark to copy * @ param parentId id of the parent folder to copy the bookmark into * @ return request to copy a bookmark */ public BoxRequestsBookmark . CopyBookmark getCopyRequest ( String id , String parentId ) { } }
BoxRequestsBookmark . CopyBookmark request = new BoxRequestsBookmark . CopyBookmark ( id , parentId , getBookmarkCopyUrl ( id ) , mSession ) ; return request ;
public class BuildDatabase { /** * Add a SpecTopic to the database . * @ param specTopic The SpecTopic object to be added . * @ param key A key that represents the Topic mapped to the SpecTopic */ public void add ( final ITopicNode specTopic , final String key ) { } }
if ( specTopic == null ) return ; final Integer topicId = specTopic . getDBId ( ) ; if ( ! topics . containsKey ( topicId ) ) { topics . put ( topicId , new LinkedList < ITopicNode > ( ) ) ; } // Make sure the key exists if ( ! topicsKeys . containsKey ( key ) ) { topicsKeys . put ( key , new LinkedList < ITopicNode > ( ) ) ; } topics . get ( topicId ) . add ( specTopic ) ; topicsKeys . get ( key ) . add ( specTopic ) ;
public class Compaction { /** * before processing " internal _ key " . */ public boolean shouldStopBefore ( InternalKey internalKey ) { } }
// Scan to find earliest grandparent file that contains key . InternalKeyComparator internalKeyComparator = inputVersion . getInternalKeyComparator ( ) ; while ( grandparentIndex < grandparents . size ( ) && internalKeyComparator . compare ( internalKey , grandparents . get ( grandparentIndex ) . getLargest ( ) ) > 0 ) { if ( seenKey ) { overlappedBytes += grandparents . get ( grandparentIndex ) . getFileSize ( ) ; } grandparentIndex ++ ; } seenKey = true ; if ( overlappedBytes > MAX_GRAND_PARENT_OVERLAP_BYTES ) { // Too much overlap for current output ; start new output overlappedBytes = 0 ; return true ; } else { return false ; }
public class ModelMetricsBinomial { /** * Build a Binomial ModelMetrics object from target - class probabilities , from actual labels , and a given domain for both labels ( and domain [ 1 ] is the target class ) * @ param targetClassProbs A Vec containing target class probabilities * @ param actualLabels A Vec containing the actual labels ( can be for fewer labels than what ' s in domain , since the predictions can be for a small subset of the data ) * @ return ModelMetrics object */ static public ModelMetricsBinomial make ( Vec targetClassProbs , Vec actualLabels ) { } }
return make ( targetClassProbs , actualLabels , actualLabels . domain ( ) ) ;
public class SecurityServletConfiguratorHelper { /** * Gets a list of zero or more web resource collection objects that represent the * web - resource - collection elements in web . xml and / or web - fragment . xml files . * @ param archiveConstraint the security - constraint * @ return a list of web resource collections */ private List < WebResourceCollection > createWebResourceCollections ( com . ibm . ws . javaee . dd . web . common . SecurityConstraint archiveConstraint , boolean denyUncoveredHttpMethods ) { } }
List < WebResourceCollection > webResourceCollections = new ArrayList < WebResourceCollection > ( ) ; List < com . ibm . ws . javaee . dd . web . common . WebResourceCollection > archiveWebResourceCollections = archiveConstraint . getWebResourceCollections ( ) ; for ( com . ibm . ws . javaee . dd . web . common . WebResourceCollection archiveWebResourceCollection : archiveWebResourceCollections ) { List < String > urlPatterns = archiveWebResourceCollection . getURLPatterns ( ) ; List < String > methods = archiveWebResourceCollection . getHTTPMethods ( ) ; List < String > omissionMethods = archiveWebResourceCollection . getHTTPMethodOmissions ( ) ; webResourceCollections . add ( new WebResourceCollection ( urlPatterns , methods , omissionMethods , denyUncoveredHttpMethods ) ) ; } return webResourceCollections ;
public class DefaultDispatchChallengeHandler { /** * Locate a challenge handler factory to serve the given location and challenge type . * @ param challengeRequest A challenge string from the server . * @ return a challenge handler registered to handle the challenge at the location , * or < code > null < / code > if none could be found . */ ChallengeHandler lookup ( ChallengeRequest challengeRequest ) { } }
ChallengeHandler result = null ; String location = challengeRequest . getLocation ( ) ; if ( location != null ) { Node < ChallengeHandler , UriElement > resultNode = findBestMatchingNode ( location ) ; // If we found an exact or wildcard match , try to find a handler // for the requested challenge . if ( resultNode != null ) { List < ChallengeHandler > handlers = resultNode . getValues ( ) ; if ( handlers != null ) { for ( ChallengeHandler challengeHandler : handlers ) { if ( challengeHandler . canHandle ( challengeRequest ) ) { result = challengeHandler ; break ; } } } } } return result ;
public class WritableUtils { /** * / * Ugly utility , maybe someone else can do this better */ public static String readCompressedString ( DataInput in ) throws IOException { } }
byte [ ] bytes = readCompressedByteArray ( in ) ; if ( bytes == null ) return null ; return new String ( bytes , "UTF-8" ) ;
public class FieldInfo { /** * / * ( non - Javadoc ) * @ see io . github . classgraph . ScanResultObject # setScanResult ( io . github . classgraph . ScanResult ) */ @ Override void setScanResult ( final ScanResult scanResult ) { } }
super . setScanResult ( scanResult ) ; if ( this . typeSignature != null ) { this . typeSignature . setScanResult ( scanResult ) ; } if ( this . typeDescriptor != null ) { this . typeDescriptor . setScanResult ( scanResult ) ; } if ( this . annotationInfo != null ) { for ( final AnnotationInfo ai : this . annotationInfo ) { ai . setScanResult ( scanResult ) ; } }
public class Datatype_Builder { /** * Sets the value to be returned by { @ link Datatype # getType ( ) } . * @ return this { @ code Builder } object * @ throws NullPointerException if { @ code type } is null */ public Datatype . Builder setType ( TypeClass type ) { } }
this . type = Objects . requireNonNull ( type ) ; _unsetProperties . remove ( Property . TYPE ) ; return ( Datatype . Builder ) this ;
public class CassandraClientBase { /** * Executes Update / Delete CQL query . * @ param cqlQuery * the cql query * @ return the int */ public int executeUpdateDeleteQuery ( String cqlQuery ) { } }
if ( log . isDebugEnabled ( ) ) { log . debug ( "Executing cql query {}." , cqlQuery ) ; } try { CqlResult result = ( CqlResult ) executeCQLQuery ( cqlQuery , true ) ; return result . getNum ( ) ; } catch ( Exception e ) { log . error ( "Error while executing updated query: {}, Caused by: . " , cqlQuery , e ) ; return 0 ; }
public class FuncCount { /** * Execute the function . The function must return * a valid object . * @ param xctxt The current execution context . * @ return A valid XObject . * @ throws javax . xml . transform . TransformerException */ public XObject execute ( XPathContext xctxt ) throws javax . xml . transform . TransformerException { } }
// DTMIterator nl = m _ arg0 . asIterator ( xctxt , xctxt . getCurrentNode ( ) ) ; // / / We should probably make a function on the iterator for this , // / / as a given implementation could optimize . // int i = 0; // while ( DTM . NULL ! = nl . nextNode ( ) ) // nl . detach ( ) ; DTMIterator nl = m_arg0 . asIterator ( xctxt , xctxt . getCurrentNode ( ) ) ; int i = nl . getLength ( ) ; nl . detach ( ) ; return new XNumber ( ( double ) i ) ;
public class UriEscape { /** * Perform am URI path segment < strong > escape < / strong > operation * on a < tt > String < / tt > input . * The following are the only allowed chars in an URI path segment ( will not be escaped ) : * < ul > * < li > < tt > A - Z a - z 0-9 < / tt > < / li > * < li > < tt > - . _ ~ < / tt > < / li > * < li > < tt > ! $ & amp ; ' ( ) * + , ; = < / tt > < / li > * < li > < tt > : @ < / tt > < / li > * < / ul > * All other chars will be escaped by converting them to the sequence of bytes that * represents them in the specified < em > encoding < / em > and then representing each byte * in < tt > % HH < / tt > syntax , being < tt > HH < / tt > the hexadecimal representation of the byte . * This method is < strong > thread - safe < / strong > . * @ param text the < tt > String < / tt > to be escaped . * @ param encoding the encoding to be used for escaping . * @ return The escaped result < tt > String < / tt > . As a memory - performance improvement , will return the exact * same object as the < tt > text < / tt > input argument if no escaping modifications were required ( and * no additional < tt > String < / tt > objects will be created during processing ) . Will * return < tt > null < / tt > if input is < tt > null < / tt > . */ public static String escapeUriPathSegment ( final String text , final String encoding ) { } }
if ( encoding == null ) { throw new IllegalArgumentException ( "Argument 'encoding' cannot be null" ) ; } return UriEscapeUtil . escape ( text , UriEscapeUtil . UriEscapeType . PATH_SEGMENT , encoding ) ;
public class RedmineManager { /** * deletes a new { @ link Version } from the { @ link Project } contained . < br / > * @ param version the { @ link Version } . * @ throws RedmineAuthenticationException thrown in case something went wrong while trying to login * @ throws RedmineException thrown in case something went wrong in Redmine * @ throws NotFoundException thrown in case an object can not be found */ public void deleteVersion ( Version version ) throws RedmineException { } }
transport . deleteObject ( Version . class , Integer . toString ( version . getId ( ) ) ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcBeamTypeEnum ( ) { } }
if ( ifcBeamTypeEnumEEnum == null ) { ifcBeamTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 784 ) ; } return ifcBeamTypeEnumEEnum ;
public class GetRateCardsByStatement { /** * Runs the example . * @ param adManagerServices the services factory . * @ param session the session . * @ throws ApiException if the API request failed with one or more service errors . * @ throws RemoteException if the API request failed due to other errors . */ public static void runExample ( AdManagerServices adManagerServices , AdManagerSession session ) throws RemoteException { } }
// Get the RateCardService . RateCardServiceInterface rateCardService = adManagerServices . get ( session , RateCardServiceInterface . class ) ; // Create a statement to get all rate cards using USD as currency . StatementBuilder statementBuilder = new StatementBuilder ( ) . where ( "currencyCode = 'USD'" ) . orderBy ( "id ASC" ) . limit ( StatementBuilder . SUGGESTED_PAGE_LIMIT ) ; // Default for total result set size . int totalResultSetSize = 0 ; do { // Get rate cards by statement . RateCardPage page = rateCardService . getRateCardsByStatement ( statementBuilder . toStatement ( ) ) ; if ( page . getResults ( ) != null ) { totalResultSetSize = page . getTotalResultSetSize ( ) ; int i = page . getStartIndex ( ) ; for ( RateCard rateCard : page . getResults ( ) ) { System . out . printf ( "%d) Rate card with ID %d, name '%s', and currency '%s' was found.%n" , i ++ , rateCard . getId ( ) , rateCard . getName ( ) , rateCard . getCurrencyCode ( ) ) ; } } statementBuilder . increaseOffsetBy ( StatementBuilder . SUGGESTED_PAGE_LIMIT ) ; } while ( statementBuilder . getOffset ( ) < totalResultSetSize ) ; System . out . printf ( "Number of results found: %d%n" , totalResultSetSize ) ;
public class ClassDescriptorDef { /** * Returns an iterator of all direct and indirect extents of this class . * @ return The extents iterator */ public Iterator getAllExtentClasses ( ) { } }
ArrayList subTypes = new ArrayList ( ) ; subTypes . addAll ( _extents ) ; for ( int idx = 0 ; idx < subTypes . size ( ) ; idx ++ ) { ClassDescriptorDef curClassDef = ( ClassDescriptorDef ) subTypes . get ( idx ) ; for ( Iterator it = curClassDef . getExtentClasses ( ) ; it . hasNext ( ) ; ) { ClassDescriptorDef curSubTypeDef = ( ClassDescriptorDef ) it . next ( ) ; if ( ! subTypes . contains ( curSubTypeDef ) ) { subTypes . add ( curSubTypeDef ) ; } } } return subTypes . iterator ( ) ;
public class TypefaceHelper { /** * Return spannable string with typeface in certain style * see : http : / / stackoverflow . com / questions / 8607707 / how - to - set - a - custom - font - in - the - actionbar - title * @ param context to obtain string resource * @ param strResId string resource id , content * @ param collection TypefaceCollection instance * @ param style Typeface . NORMAL , Typeface . BOLD , Typeface . ITALIC or Typeface . BOLD _ ITALIC * @ return SpannableString that can be used in TextView . setText ( ) method */ public static SpannableString typeface ( Context context , int strResId , TypefaceCollection collection , int style ) { } }
return typeface ( context . getString ( strResId ) , collection , style ) ;
public class DateTime { /** * Returns a copy of this datetime minus the specified number of months . * The calculation will do its best to only change the month field * retaining the same day of month . * However , in certain circumstances , it may be necessary to alter * smaller fields . For example , 2007-05-31 minus one month cannot result * in 2007-04-31 , so the day of month is adjusted to 2007-04-30. * The following three lines are identical in effect : * < pre > * DateTime subtracted = dt . minusMonths ( 6 ) ; * DateTime subtracted = dt . minus ( Period . months ( 6 ) ) ; * DateTime subtracted = dt . withFieldAdded ( DurationFieldType . months ( ) , - 6 ) ; * < / pre > * This datetime instance is immutable and unaffected by this method call . * @ param months the amount of months to subtract , may be negative * @ return the new datetime minus the increased months * @ since 1.1 */ public DateTime minusMonths ( int months ) { } }
if ( months == 0 ) { return this ; } long instant = getChronology ( ) . months ( ) . subtract ( getMillis ( ) , months ) ; return withMillis ( instant ) ;
public class UploadMediaBundle { /** * Runs the example . * @ param adWordsServices the services factory . * @ param session the session . * @ throws ApiException if the API request failed with one or more service errors . * @ throws RemoteException if the API request failed due to other errors . * @ throws IOException if unable to get media data from the URL . */ public static void runExample ( AdWordsServicesInterface adWordsServices , AdWordsSession session ) throws IOException { } }
// Get the MediaService . MediaServiceInterface mediaService = adWordsServices . get ( session , MediaServiceInterface . class ) ; // Create HTML5 media . byte [ ] html5Zip = com . google . api . ads . common . lib . utils . Media . getMediaDataFromUrl ( "https://goo.gl/9Y7qI2" ) ; // Create a media bundle containing the zip file with all the HTML5 components . MediaBundle mediaBundle = new MediaBundle ( ) ; mediaBundle . setData ( html5Zip ) ; mediaBundle . setType ( MediaMediaType . MEDIA_BUNDLE ) ; // Upload HTML5 zip . mediaBundle = ( MediaBundle ) mediaService . upload ( new Media [ ] { mediaBundle } ) [ 0 ] ; // Display HTML5 zip . Map < MediaSize , Dimensions > dimensions = Maps . toMap ( mediaBundle . getDimensions ( ) ) ; System . out . printf ( "HTML5 media with ID %d, dimensions '%dx%d', and MIME type '%s' " + "was uploaded.%n" , mediaBundle . getMediaId ( ) , dimensions . get ( MediaSize . FULL ) . getWidth ( ) , dimensions . get ( MediaSize . FULL ) . getHeight ( ) , mediaBundle . getMimeType ( ) ) ;
public class MultiVertexGeometryImpl { /** * Checked vs . Jan 11 , 2011 */ public void setXY ( int index , double x , double y ) { } }
if ( index < 0 || index >= m_pointCount ) // TODO exc throw new IndexOutOfBoundsException ( ) ; _verifyAllStreams ( ) ; // AttributeStreamOfDbl v = ( AttributeStreamOfDbl ) // m _ vertexAttributes [ 0 ] ; // TODO ask sergey about casts AttributeStreamOfDbl v = ( AttributeStreamOfDbl ) m_vertexAttributes [ 0 ] ; v . write ( index * 2 , x ) ; v . write ( index * 2 + 1 , y ) ; notifyModified ( DirtyFlags . DirtyCoordinates ) ;
public class JSONArray { /** * Append a double value . This increases the array ' s length by one . * @ param value * A double value . * @ throws JSONException * if the value is not finite . * @ return this . */ public JSONArray put ( double value ) throws JSONException { } }
Double d = new Double ( value ) ; JSONObject . testValidity ( d ) ; this . put ( d ) ; return this ;
public class Journal { /** * Return an iterable to replay the journal by going through all records * locations . * @ return * @ throws IOException * @ throws ClosedJournalException * @ throws CompactedDataFileException */ public Iterable < Location > redo ( ) throws ClosedJournalException , CompactedDataFileException , IOException { } }
Entry < Integer , DataFile > firstEntry = dataFiles . firstEntry ( ) ; if ( firstEntry == null ) { return new Redo ( null ) ; } return new Redo ( goToFirstLocation ( firstEntry . getValue ( ) , Location . USER_RECORD_TYPE , true ) ) ;
public class WindowsJNIFaxClientSpi { /** * This function will cancel an existing fax job . * @ param serverName * The fax server name * @ param faxJobID * The fax job ID */ private void winCancelFaxJob ( String serverName , int faxJobID ) { } }
synchronized ( WindowsFaxClientSpiHelper . NATIVE_LOCK ) { // pre native call this . preNativeCall ( ) ; // invoke native WindowsJNIFaxClientSpi . cancelFaxJobNative ( serverName , faxJobID ) ; }
public class UnlimitedConcurrentCache { /** * Replying on the ConcurrentHashMap thread - safe iteration implementation the method will remove all entries holding * SoftReferences to gc - evicted objects . */ @ Override public void cleanUpNullReferences ( ) { } }
for ( Map . Entry < K , V > entry : map . entrySet ( ) ) { Object entryVal = entry . getValue ( ) ; if ( entryVal instanceof SoftReference && ( ( SoftReference ) entryVal ) . get ( ) == null ) { map . remove ( entry . getKey ( ) , entryVal ) ; } }
public class FullDuplexHttpService { /** * This is where we receive inputs from the client . */ public synchronized void upload ( StaplerRequest req , StaplerResponse rsp ) throws InterruptedException , IOException { } }
rsp . setStatus ( HttpServletResponse . SC_OK ) ; InputStream in = req . getInputStream ( ) ; if ( DIY_CHUNKING ) { in = new ChunkedInputStream ( in ) ; } // publish the upload channel upload = in ; LOGGER . log ( Level . FINE , "Recording upload stream {0} for {1}: {2}" , new Object [ ] { upload , uuid , this } ) ; notify ( ) ; // wait until we are done while ( ! completed ) { wait ( ) ; }
public class RegistryService { private boolean imageRequiresPull ( boolean hasImage , ImagePullPolicy pullPolicy , String imageName ) throws MojoExecutionException { } }
// The logic here is like this ( see also # 96 ) : // otherwise : don ' t pull if ( pullPolicy == ImagePullPolicy . Never ) { if ( ! hasImage ) { throw new MojoExecutionException ( String . format ( "No image '%s' found and pull policy 'Never' is set. Please chose another pull policy or pull the image yourself)" , imageName ) ) ; } return false ; } // If the image is not available and mode is not ImagePullPolicy . Never - - > pull if ( ! hasImage ) { return true ; } // If pullPolicy = = Always - - > pull , otherwise not ( we have it already ) return pullPolicy == ImagePullPolicy . Always ;
public class SqlTask { /** * this is a separate method to ensure that the ` this ` reference is not leaked during construction */ private void initialize ( Function < SqlTask , ? > onDone , CounterStat failedTasks ) { } }
requireNonNull ( onDone , "onDone is null" ) ; requireNonNull ( failedTasks , "failedTasks is null" ) ; taskStateMachine . addStateChangeListener ( new StateChangeListener < TaskState > ( ) { @ Override public void stateChanged ( TaskState newState ) { if ( ! newState . isDone ( ) ) { return ; } // Update failed tasks counter if ( newState == FAILED ) { failedTasks . update ( 1 ) ; } // store final task info while ( true ) { TaskHolder taskHolder = taskHolderReference . get ( ) ; if ( taskHolder . isFinished ( ) ) { // another concurrent worker already set the final state return ; } if ( taskHolderReference . compareAndSet ( taskHolder , new TaskHolder ( createTaskInfo ( taskHolder ) , taskHolder . getIoStats ( ) ) ) ) { break ; } } // make sure buffers are cleaned up if ( newState == FAILED || newState == ABORTED ) { // don ' t close buffers for a failed query // closed buffers signal to upstream tasks that everything finished cleanly outputBuffer . fail ( ) ; } else { outputBuffer . destroy ( ) ; } try { onDone . apply ( SqlTask . this ) ; } catch ( Exception e ) { log . warn ( e , "Error running task cleanup callback %s" , SqlTask . this . taskId ) ; } } } ) ;
public class KryoInitializer { /** * See { @ link KryoSerializer # registerSerializableClass ( Class , Serializer ) } . * @ param clazz see { @ link KryoSerializer # registerSerializableClass ( Class , Serializer ) } . * @ param serializer see { @ link KryoSerializer # registerSerializableClass ( Class , Serializer ) } . * @ param < T > see { @ link KryoSerializer # registerSerializableClass ( Class , Serializer ) } . * @ see KryoSerializer # registerSerializableClass ( Class , Serializer ) */ < T > void registerSerializableClass ( final Class < T > clazz , final Serializer < T > serializer ) { } }
synchronized ( customSerializers ) { customSerializers . add ( new CustomSerializers < > ( clazz , serializer ) ) ; }
public class PortFile { /** * Unlock the port file . */ public void unlock ( ) throws IOException { } }
if ( lock == null ) { return ; } lock . release ( ) ; lock = null ; lockSem . release ( ) ;
public class XPointerEngine { /** * Setting the base uri for the pointer resolution * The base uri is used for xml : base calculations * @ param baseURI of the parent xml * @ return the XPointerEngine for fluent api usage */ public XPointerEngine setBaseURI ( final String baseURI ) { } }
if ( baseURI == null ) { this . baseURIValue = XdmEmptySequence . getInstance ( ) ; } else { this . baseURIValue = new XdmAtomicValue ( baseURI ) ; } return this ;
public class CassandraStorage { /** * define the schema */ public ResourceSchema getSchema ( String location , Job job ) throws IOException { } }
setLocation ( location , job ) ; CfInfo cfInfo = getCfInfo ( loadSignature ) ; CfDef cfDef = cfInfo . cfDef ; if ( cfDef . column_type . equals ( "Super" ) ) return null ; /* Our returned schema should look like this : ( key , index1 : ( name , value ) , index2 : ( name , value ) , columns : { ( name , value ) } ) Which is to say , columns that have metadata will be returned as named tuples , but unknown columns will go into a bag . This way , wide rows can still be handled by the bag , but known columns can easily be referenced . */ // top - level schema , no type ResourceSchema schema = new ResourceSchema ( ) ; // get default marshallers and validators Map < MarshallerType , AbstractType > marshallers = getDefaultMarshallers ( cfDef ) ; Map < ByteBuffer , AbstractType > validators = getValidatorMap ( cfDef ) ; // add key ResourceFieldSchema keyFieldSchema = new ResourceFieldSchema ( ) ; keyFieldSchema . setName ( "key" ) ; keyFieldSchema . setType ( getPigType ( marshallers . get ( MarshallerType . KEY_VALIDATOR ) ) ) ; ResourceSchema bagSchema = new ResourceSchema ( ) ; ResourceFieldSchema bagField = new ResourceFieldSchema ( ) ; bagField . setType ( DataType . BAG ) ; bagField . setName ( "columns" ) ; // inside the bag , place one tuple with the default comparator / validator schema ResourceSchema bagTupleSchema = new ResourceSchema ( ) ; ResourceFieldSchema bagTupleField = new ResourceFieldSchema ( ) ; bagTupleField . setType ( DataType . TUPLE ) ; ResourceFieldSchema bagcolSchema = new ResourceFieldSchema ( ) ; ResourceFieldSchema bagvalSchema = new ResourceFieldSchema ( ) ; bagcolSchema . setName ( "name" ) ; bagvalSchema . setName ( "value" ) ; bagcolSchema . setType ( getPigType ( marshallers . get ( MarshallerType . COMPARATOR ) ) ) ; bagvalSchema . setType ( getPigType ( marshallers . get ( MarshallerType . DEFAULT_VALIDATOR ) ) ) ; bagTupleSchema . setFields ( new ResourceFieldSchema [ ] { bagcolSchema , bagvalSchema } ) ; bagTupleField . setSchema ( bagTupleSchema ) ; bagSchema . setFields ( new ResourceFieldSchema [ ] { bagTupleField } ) ; bagField . setSchema ( bagSchema ) ; // will contain all fields for this schema List < ResourceFieldSchema > allSchemaFields = new ArrayList < ResourceFieldSchema > ( ) ; // add the key first , then the indexed columns , and finally the bag allSchemaFields . add ( keyFieldSchema ) ; if ( ! widerows && ( cfInfo . compactCqlTable || ! cfInfo . cql3Table ) ) { // defined validators / indexes for ( ColumnDef cdef : cfDef . column_metadata ) { // make a new tuple for each col / val pair ResourceSchema innerTupleSchema = new ResourceSchema ( ) ; ResourceFieldSchema innerTupleField = new ResourceFieldSchema ( ) ; innerTupleField . setType ( DataType . TUPLE ) ; innerTupleField . setSchema ( innerTupleSchema ) ; innerTupleField . setName ( new String ( cdef . getName ( ) ) ) ; ResourceFieldSchema idxColSchema = new ResourceFieldSchema ( ) ; idxColSchema . setName ( "name" ) ; idxColSchema . setType ( getPigType ( marshallers . get ( MarshallerType . COMPARATOR ) ) ) ; ResourceFieldSchema valSchema = new ResourceFieldSchema ( ) ; AbstractType validator = validators . get ( cdef . name ) ; if ( validator == null ) validator = marshallers . get ( MarshallerType . DEFAULT_VALIDATOR ) ; valSchema . setName ( "value" ) ; valSchema . setType ( getPigType ( validator ) ) ; innerTupleSchema . setFields ( new ResourceFieldSchema [ ] { idxColSchema , valSchema } ) ; allSchemaFields . add ( innerTupleField ) ; } } // bag at the end for unknown columns allSchemaFields . add ( bagField ) ; // add top - level index elements if needed if ( usePartitionFilter ) { for ( ColumnDef cdef : getIndexes ( ) ) { ResourceFieldSchema idxSchema = new ResourceFieldSchema ( ) ; idxSchema . setName ( "index_" + new String ( cdef . getName ( ) ) ) ; AbstractType validator = validators . get ( cdef . name ) ; if ( validator == null ) validator = marshallers . get ( MarshallerType . DEFAULT_VALIDATOR ) ; idxSchema . setType ( getPigType ( validator ) ) ; allSchemaFields . add ( idxSchema ) ; } } // top level schema contains everything schema . setFields ( allSchemaFields . toArray ( new ResourceFieldSchema [ allSchemaFields . size ( ) ] ) ) ; return schema ;
public class AbstractSliding { /** * Returns new oldLen ring buffer * @ param old * @ param oldLen * @ param arr * @ return */ protected Object newArray ( Object old , int oldLen , Object arr ) { } }
int sb = Math . floorMod ( begin , oldLen ) ; int se = Math . floorMod ( end , oldLen ) ; if ( sb < se ) { System . arraycopy ( old , sb , arr , sb , se - sb ) ; } else { System . arraycopy ( old , sb , arr , sb , oldLen - sb ) ; System . arraycopy ( old , 0 , arr , 0 , se ) ; } return arr ;
public class RegexUtils { /** * Returns true if the string matches ( full match ) any of the specified regexes . * @ param str the string to match * @ param regexes the regexes used for matching * @ return true if the string matches ( full match ) one or more of the regexes */ public static boolean matchesAny ( String str , List < String > regexes ) { } }
return matchesAny ( str , regexes , true ) ;
public class EasyBind { /** * Creates a thin wrapper around an observable value to make it monadic . * @ param o ObservableValue to wrap * @ return { @ code o } if { @ code o } is already monadic , or a thin monadic * wrapper around { @ code o } otherwise . */ public static < T > MonadicObservableValue < T > monadic ( ObservableValue < T > o ) { } }
if ( o instanceof MonadicObservableValue ) { return ( MonadicObservableValue < T > ) o ; } else { return new MonadicWrapper < > ( o ) ; }
public class AttributeHelper { /** * Set value with an object on Attribute * @ param value * the value to set on Attribute of JServer , possibles Class : * Short , String , Long , Float , Boolean , Integer , Double , * DevState . * @ param attribute * the attribute to set * @ throws DevFailed */ public static void set_value ( final Object value , final Attribute attribute ) throws DevFailed { } }
try { if ( value instanceof Short ) { attribute . set_value ( ( ( Short ) value ) . shortValue ( ) ) ; } else if ( value instanceof Byte ) { attribute . set_value ( ( ( Byte ) value ) . shortValue ( ) ) ; } else if ( value instanceof String ) { attribute . set_value ( ( String ) value ) ; } else if ( value instanceof Integer ) { attribute . set_value ( ( ( Integer ) value ) . intValue ( ) ) ; } else if ( value instanceof Long ) { attribute . set_value ( ( ( Long ) value ) . longValue ( ) ) ; } else if ( value instanceof Float ) { attribute . set_value ( ( ( Float ) value ) . doubleValue ( ) ) ; } else if ( value instanceof Boolean ) { attribute . set_value ( ( ( Boolean ) value ) . booleanValue ( ) ) ; } else if ( value instanceof Double ) { attribute . set_value ( ( ( Double ) value ) . doubleValue ( ) ) ; } else if ( value instanceof DevState ) { attribute . set_value ( ( DevState ) value ) ; } else { // System . out . println ( value . getClass ( ) . getName ( ) ) ; Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "input type " + value . getClass ( ) + " not supported" , "AttributeHelper.insert(Object value,deviceAttributeWritten)" ) ; } } catch ( final Exception e ) { e . printStackTrace ( ) ; }
public class WebServiceConnector { /** * { @ inheritDoc } * @ param oclass * @ param options * @ return */ @ Override public FilterTranslator < Operand > createFilterTranslator ( final ObjectClass oclass , final OperationOptions options ) { } }
if ( oclass == null || ( ! oclass . equals ( ObjectClass . ACCOUNT ) ) ) { throw new IllegalArgumentException ( "Invalid objectclass" ) ; } return new WebServiceFilterTranslator ( ) ;
public class LicenseClient { /** * Gets the access control policy for a resource . May be empty if no such policy or resource * exists . * < p > Sample code : * < pre > < code > * try ( LicenseClient licenseClient = LicenseClient . create ( ) ) { * ProjectGlobalLicenseResourceName resource = ProjectGlobalLicenseResourceName . of ( " [ PROJECT ] " , " [ RESOURCE ] " ) ; * Policy response = licenseClient . getIamPolicyLicense ( resource ) ; * < / code > < / pre > * @ param resource Name or id of the resource for this request . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Policy getIamPolicyLicense ( ProjectGlobalLicenseResourceName resource ) { } }
GetIamPolicyLicenseHttpRequest request = GetIamPolicyLicenseHttpRequest . newBuilder ( ) . setResource ( resource == null ? null : resource . toString ( ) ) . build ( ) ; return getIamPolicyLicense ( request ) ;
public class JDBCResultSet { /** * < ! - - start generic documentation - - > * Updates the designated column with a < code > java . sql . Timestamp < / code > * value . * The updater methods are used to update column values in the * current row or the insert row . The updater methods do not * update the underlying database ; instead the < code > updateRow < / code > or * < code > insertRow < / code > methods are called to update the database . * < ! - - end generic documentation - - > * < ! - - start release - specific documentation - - > * < div class = " ReleaseSpecificDocumentation " > * < h3 > HSQLDB - Specific Information : < / h3 > < p > * HSQLDB supports this feature . < p > * < / div > * < ! - - end release - specific documentation - - > * @ param columnLabel the label for the column specified with the SQL AS clause . If the SQL AS clause was not specified , then the label is the name of the column * @ param x the new column value * @ exception SQLException if a database access error occurs , * the result set concurrency is < code > CONCUR _ READ _ ONLY < / code > * or this method is called on a closed result set * @ exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @ since JDK 1.2 ( JDK 1.1 . x developers : read the overview for * JDBCResultSet ) */ public void updateTimestamp ( String columnLabel , Timestamp x ) throws SQLException { } }
updateTimestamp ( findColumn ( columnLabel ) , x ) ;
public class ConfigurableEmitter { /** * Check if this emitter has completed it ' s cycle * @ return True if the emitter has completed it ' s cycle */ public boolean completed ( ) { } }
if ( engine == null ) { return false ; } if ( length . isEnabled ( ) ) { if ( timeout > 0 ) { return false ; } return completed ; } if ( emitCount . isEnabled ( ) ) { if ( leftToEmit > 0 ) { return false ; } return completed ; } if ( wrapUp ) { return completed ; } return false ;
public class RequestsTracker { /** * Clones this tracker and zeroizes out it afterwards if the ' reset ' is * true . * @ param reset * zero out this tracker * @ return clone of this tracker */ public synchronized RequestsTracker snapshot ( boolean reset ) { } }
long t = executing . getCount ( ) ; long r = rejected . getCount ( ) ; long f = failed . getCount ( ) ; TimeCounter c = counter . snapshot ( reset ) ; IntervalHistogram h = histogram . snapshot ( reset ) ; if ( reset ) { executing . reset ( ) ; rejected . reset ( ) ; failed . reset ( ) ; succeeded . reset ( ) ; } return new RequestsTracker ( t , r , f , c , h , lastFailureReason , lastRejectReason ) ;
public class SegmentAggregator { /** * Attempts to reconcile the attributes for a given operation . There is no verification done ; this operation simply * re - writes all the attributes to the index , which is much more efficient than trying to read the values from the * index and then comparing them . * @ param op The operation to reconcile . * @ param timer Timer for the operation . * @ return A CompletableFuture that will indicate when the operation completed . */ private CompletableFuture < Void > reconcileAttributes ( AggregatedAppendOperation op , TimeoutTimer timer ) { } }
// This operation must have previously succeeded if any of the following are true : // - If the Attribute Index is sealed , and so is our Segment . // - If the Attribute Index does not exist ( deleted ) , and our Segment is deleted or a merged Transaction . return handleAttributeException ( this . dataSource . persistAttributes ( this . metadata . getId ( ) , op . attributes , timer . getRemaining ( ) ) ) ;
public class ReadOperationHandler { /** * / * ( non - Javadoc ) * @ see org . jboss . as . cli . OperationCommand # buildRequest ( org . jboss . as . cli . CommandContext ) */ @ Override public ModelNode buildRequestWithoutHeaders ( CommandContext ctx ) throws CommandFormatException { } }
final ParsedCommandLine parsedCmd = ctx . getParsedCommandLine ( ) ; final String name = this . name . getValue ( parsedCmd ) ; if ( name == null || name . isEmpty ( ) ) { final OperationRequestAddress address = getAddress ( ctx ) ; final ModelNode request = Util . buildRequest ( ctx , address , Util . READ_OPERATION_NAMES ) ; if ( ctx . getConfig ( ) . isAccessControl ( ) ) { request . get ( Util . ACCESS_CONTROL ) . set ( true ) ; } return request ; } final OperationRequestAddress address = getAddress ( ctx ) ; ModelNode req = Util . buildRequest ( ctx , address , Util . READ_OPERATION_DESCRIPTION ) ; req . get ( Util . NAME ) . set ( name ) ; return req ;
public class CollectionUtils { /** * Partitions a list into the specified number of partitions as evenly as is possible . The final * " extra " elements that cannot be evenly distributed are distributed starting with the first * partitions . For example , three partitions of ( 1 , 2 , 3 , 4 ) results in ( ( 1 , 4 ) , ( 2 ) , ( 3 ) ) . * Unlike { @ link Lists # partition ( List , int ) } , this returns { @ link ImmutableList } s , not list views , * and computations are computed eagerly . * @ param partitions the number of partitions to divide the list into * @ return a list of the partitions , which are themselves lists */ public static < E > ImmutableList < ImmutableList < E > > partitionAlmostEvenly ( final List < E > list , final int partitions ) { } }
checkNotNull ( list ) ; checkArgument ( partitions > 0 , "Number of partitions must be positive" ) ; checkArgument ( partitions <= list . size ( ) , "Cannot request more partitions than there are list items" ) ; // Divide into partitions , with the remainder falling into the extra partitions final List < List < E > > prelimPartitions = Lists . partition ( list , IntMath . divide ( list . size ( ) , partitions , RoundingMode . DOWN ) ) ; // Create output final ImmutableList . Builder < ImmutableList < E > > ret = ImmutableList . builder ( ) ; // If we evenly partitioned , just do the type conversion and return . The type conversion is // performed because Lists # partition returns list views . if ( prelimPartitions . size ( ) == partitions ) { for ( List < E > partition : prelimPartitions ) { ret . add ( ImmutableList . copyOf ( partition ) ) ; } } else { // Otherwise , distribute the extras // Make a builder for each output partition and the extras . The extras are anything in the // preliminary partitions after the number of partitions we actually want . Thus , the // first index of extras is the same as the number of partitions . final ImmutableList . Builder < ImmutableList . Builder < E > > builderOfBuilders = ImmutableList . builder ( ) ; final ImmutableList . Builder < E > extrasBuilder = ImmutableList . builder ( ) ; for ( int i = 0 ; i < prelimPartitions . size ( ) ; i ++ ) { if ( i < partitions ) { builderOfBuilders . add ( ImmutableList . < E > builder ( ) . addAll ( prelimPartitions . get ( i ) ) ) ; } else { extrasBuilder . addAll ( prelimPartitions . get ( i ) ) ; } } final ImmutableList < ImmutableList . Builder < E > > builders = builderOfBuilders . build ( ) ; final ImmutableList < E > extras = extrasBuilder . build ( ) ; // Distribute the extra elements . We cannot overrun the bounds of builders because the number // of extras is always at least one less than the number of builders ( otherwise , we would ' ve // just had larger partitions ) . int partitionIdx = 0 ; for ( E item : extras ) { builders . get ( partitionIdx ++ ) . add ( item ) ; } // Fill in output for ( ImmutableList . Builder < E > builder : builders ) { ret . add ( builder . build ( ) ) ; } } final ImmutableList < ImmutableList < E > > finalPartitions = ret . build ( ) ; checkState ( finalPartitions . size ( ) == partitions , "Partitioning failed: number of output partitions (" + finalPartitions . size ( ) + ") does not match requested number (" + partitions + ")" ) ; return finalPartitions ;
public class Solo { /** * Scrolls a ListView matching the specified index to the top . * @ param index the index of the { @ link ListView } to scroll . { @ code 0 } if only one list is available * @ return { @ code true } if more scrolling can be performed */ public boolean scrollListToTop ( int index ) { } }
if ( config . commandLogging ) { Log . d ( config . commandLoggingTag , "scrollListToTop(" + index + ")" ) ; } return scroller . scrollList ( waiter . waitForAndGetView ( index , ListView . class ) , Scroller . UP , true ) ;
public class CollectionUtil { /** * < p > toPrimitiveLongArray . < / p > * @ param values a { @ link java . util . List } object . * @ return a { @ link java . lang . Object } object . */ public static Object toPrimitiveLongArray ( List < ? > values ) { } }
long [ ] array = new long [ values . size ( ) ] ; int cursor = 0 ; for ( Object o : values ) { array [ cursor ] = ( Long ) o ; cursor ++ ; } return array ;
public class ModifyColumnFamiliesRequest { /** * Configures the name and GcRule of the new ColumnFamily to be created * @ param familyId * @ param gcRule * @ return */ public ModifyColumnFamiliesRequest addFamily ( String familyId , GCRule gcRule ) { } }
Preconditions . checkNotNull ( gcRule ) ; Modification . Builder modification = Modification . newBuilder ( ) . setId ( familyId ) ; modification . getCreateBuilder ( ) . setGcRule ( gcRule . toProto ( ) ) ; modFamilyRequest . addModifications ( modification . build ( ) ) ; return this ;
public class CmsExtendedWorkflowManager { /** * Helper method to check whether a project exists . < p > * @ param projectName the project name * @ return true if the project exists */ protected boolean existsProject ( String projectName ) { } }
try { m_adminCms . readProject ( projectName ) ; return true ; } catch ( CmsException e ) { return false ; }
public class SOD { /** * Compute the per - dimension variances for the given neighborhood and center . * @ param relation Data relation * @ param center Center vector * @ param neighborhood Neighbors * @ return Per - dimension variances . */ private static double [ ] computePerDimensionVariances ( Relation < ? extends NumberVector > relation , double [ ] center , DBIDs neighborhood ) { } }
final int dim = center . length ; double [ ] variances = new double [ dim ] ; for ( DBIDIter iter = neighborhood . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { NumberVector databaseObject = relation . get ( iter ) ; for ( int d = 0 ; d < dim ; d ++ ) { final double deviation = databaseObject . doubleValue ( d ) - center [ d ] ; variances [ d ] += deviation * deviation ; } } return VMath . timesEquals ( variances , 1. / neighborhood . size ( ) ) ;
public class BitmapUtil { /** * Calculates the sample size , which should be used to downsample an image to a maximum width * and height . * @ param imageDimensions * A pair , which contains the width and height of the image , which should be * downsampled , as an instance of the class Pair . The pair may not be null * @ param maxWidth * The maximum width in pixels as an { @ link Integer } value . The maximum width must be at * least 1 * @ param maxHeight * The maximum height in pixels as an { @ link Integer } value . The maximum height must be * at least 1 * @ return The sample size , which has been calculated , as an { @ link Integer } value */ private static int getSampleSize ( @ NonNull final Pair < Integer , Integer > imageDimensions , final int maxWidth , final int maxHeight ) { } }
Condition . INSTANCE . ensureNotNull ( imageDimensions , "The image dimensions may not be null" ) ; Condition . INSTANCE . ensureAtLeast ( maxWidth , 1 , "The maximum width must be at least 1" ) ; Condition . INSTANCE . ensureAtLeast ( maxHeight , 1 , "The maximum height must be at least 1" ) ; int width = imageDimensions . first ; int height = imageDimensions . second ; int sampleSize = 1 ; if ( width > maxWidth || height > maxHeight ) { int halfWidth = width / 2 ; int halfHeight = height / 2 ; while ( ( halfWidth / sampleSize ) > maxWidth && ( halfHeight / sampleSize ) > maxHeight ) { sampleSize *= 2 ; } } return sampleSize ;
public class CreateTypeRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CreateTypeRequest createTypeRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( createTypeRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createTypeRequest . getApiId ( ) , APIID_BINDING ) ; protocolMarshaller . marshall ( createTypeRequest . getDefinition ( ) , DEFINITION_BINDING ) ; protocolMarshaller . marshall ( createTypeRequest . getFormat ( ) , FORMAT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ReceivedMessageRequestInfo { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . runtime . AbstractControllable # dereferenceControllable ( ) */ public void dereferenceControllable ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "dereferenceControllable" ) ; _issueTime = 0 ; _timeout = 0 ; _selectionCriterias = null ; _ackingDME = 0 ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "dereferenceControllable" ) ;
public class RedisClusterClient { /** * Create a connection to a redis socket address . * @ param codec Use this codec to encode / decode keys and values , must not be { @ literal null } * @ param nodeId the nodeId * @ param clusterWriter global cluster writer * @ param socketAddressSupplier supplier for the socket address * @ param < K > Key type * @ param < V > Value type * @ return A new connection */ < K , V > ConnectionFuture < StatefulRedisConnection < K , V > > connectToNodeAsync ( RedisCodec < K , V > codec , String nodeId , RedisChannelWriter clusterWriter , Mono < SocketAddress > socketAddressSupplier ) { } }
assertNotNull ( codec ) ; assertNotEmpty ( initialUris ) ; LettuceAssert . notNull ( socketAddressSupplier , "SocketAddressSupplier must not be null" ) ; ClusterNodeEndpoint endpoint = new ClusterNodeEndpoint ( clientOptions , getResources ( ) , clusterWriter ) ; RedisChannelWriter writer = endpoint ; if ( CommandExpiryWriter . isSupported ( clientOptions ) ) { writer = new CommandExpiryWriter ( writer , clientOptions , clientResources ) ; } StatefulRedisConnectionImpl < K , V > connection = new StatefulRedisConnectionImpl < K , V > ( writer , codec , timeout ) ; ConnectionFuture < StatefulRedisConnection < K , V > > connectionFuture = connectStatefulAsync ( connection , codec , endpoint , getFirstUri ( ) , socketAddressSupplier , ( ) -> new CommandHandler ( clientOptions , clientResources , endpoint ) ) ; return connectionFuture . whenComplete ( ( conn , throwable ) -> { if ( throwable != null ) { connection . close ( ) ; } } ) ;
public class CFEndPointSerializer { /** * Determine the type of the Object passed in and add the XML format * for the result . * @ param type * @ param name * @ param o * @ return StringBuilder */ static private StringBuilder determineType ( String name , Object o ) { } }
String value = null ; if ( o instanceof String || o instanceof StringBuffer || o instanceof java . nio . CharBuffer || o instanceof Integer || o instanceof Long || o instanceof Byte || o instanceof Double || o instanceof Float || o instanceof Short || o instanceof BigInteger || o instanceof java . math . BigDecimal ) { value = o . toString ( ) ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Skipping class: " + o . getClass ( ) ) ; } return null ; } // type = " class " name = " o " StringBuilder buffer = new StringBuilder ( 48 ) ; buffer . append ( name ) ; buffer . append ( "type=\"" ) ; // charbuffer is abstract so we might get HeapCharBuffer here , force it // to the generic layer in the XML output if ( o instanceof java . nio . CharBuffer ) { buffer . append ( "java.nio.CharBuffer" ) ; } else { buffer . append ( o . getClass ( ) . getName ( ) ) ; } buffer . append ( "\" " ) ; buffer . append ( name ) ; buffer . append ( "=\"" ) ; buffer . append ( value ) ; buffer . append ( "\"" ) ; return buffer ;
public class Distance { /** * Gets the Correlation distance between two points . * @ param p A point in space . * @ param q A point in space . * @ return The Correlation distance between x and y . */ public static double Correlation ( double [ ] p , double [ ] q ) { } }
double x = 0 ; double y = 0 ; for ( int i = 0 ; i < p . length ; i ++ ) { x += - p [ i ] ; y += - q [ i ] ; } x /= p . length ; y /= q . length ; double num = 0 ; double den1 = 0 ; double den2 = 0 ; for ( int i = 0 ; i < p . length ; i ++ ) { num += ( p [ i ] + x ) * ( q [ i ] + y ) ; den1 += Math . abs ( Math . pow ( p [ i ] + x , 2 ) ) ; den2 += Math . abs ( Math . pow ( q [ i ] + x , 2 ) ) ; } return 1 - ( num / ( Math . sqrt ( den1 ) * Math . sqrt ( den2 ) ) ) ;
public class GzipOutputHandler { /** * Create the output bytebuffer based on the output compressed storage . * @ param len * @ return WsByteBuffer */ private WsByteBuffer makeBuffer ( int len ) { } }
WsByteBuffer buffer = HttpDispatcher . getBufferManager ( ) . allocateDirect ( len ) ; buffer . put ( this . buf , 0 , len ) ; buffer . flip ( ) ; return buffer ;
public class AvatarStorageSetup { /** * Shared image needs to be in file storage , or QJM providing that QJM also * stores edits . */ private static String checkImageStorage ( URI sharedImage , URI sharedEdits ) { } }
if ( sharedImage . getScheme ( ) . equals ( NNStorage . LOCAL_URI_SCHEME ) ) { // shared image is stored in file storage return "" ; } else if ( sharedImage . getScheme ( ) . equals ( QuorumJournalManager . QJM_URI_SCHEME ) && sharedImage . equals ( sharedEdits ) ) { // image is stored in qjm together with edits return "" ; } return "Shared image uri: " + sharedImage + " must be either file storage" + " or be equal to shared edits storage " + sharedEdits + ". " ;
public class AstyanaxTableDAO { /** * MaintenanceDAO */ @ Override public Iterator < Map . Entry < String , MaintenanceOp > > listMaintenanceOps ( ) { } }
final Iterator < Map < String , Object > > tableIter = _backingStore . scan ( _systemTable , null , LimitCounter . max ( ) , ReadConsistency . STRONG ) ; return new AbstractIterator < Map . Entry < String , MaintenanceOp > > ( ) { @ Override protected Map . Entry < String , MaintenanceOp > computeNext ( ) { while ( tableIter . hasNext ( ) ) { TableJson json = new TableJson ( tableIter . next ( ) ) ; MaintenanceOp op = getNextMaintenanceOp ( json , false /* don ' t expose task outside this class */ ) ; if ( op != null ) { return Maps . immutableEntry ( json . getTable ( ) , op ) ; } } return endOfData ( ) ; } } ;
public class PathsDocumentExtension { /** * Returns title level offset from 1 to apply to content * @ param context context * @ return title level offset */ protected int levelOffset ( Context context ) { } }
// TODO : Unused method , make sure this is never used and then remove it . int levelOffset ; switch ( context . position ) { case DOCUMENT_BEFORE : case DOCUMENT_AFTER : levelOffset = 0 ; break ; case DOCUMENT_BEGIN : case DOCUMENT_END : case OPERATION_BEFORE : case OPERATION_AFTER : levelOffset = 1 ; break ; case OPERATION_BEGIN : case OPERATION_END : levelOffset = increaseLevelOffset ( 2 ) ; break ; case OPERATION_DESCRIPTION_BEFORE : case OPERATION_DESCRIPTION_AFTER : case OPERATION_PARAMETERS_BEFORE : case OPERATION_PARAMETERS_AFTER : case OPERATION_RESPONSES_BEFORE : case OPERATION_RESPONSES_AFTER : case OPERATION_SECURITY_BEFORE : case OPERATION_SECURITY_AFTER : levelOffset = increaseLevelOffset ( 2 ) ; break ; case OPERATION_DESCRIPTION_BEGIN : case OPERATION_DESCRIPTION_END : case OPERATION_PARAMETERS_BEGIN : case OPERATION_PARAMETERS_END : case OPERATION_RESPONSES_BEGIN : case OPERATION_RESPONSES_END : case OPERATION_SECURITY_BEGIN : case OPERATION_SECURITY_END : levelOffset = 3 ; break ; default : throw new RuntimeException ( String . format ( "Unknown position '%s'" , context . position ) ) ; } return levelOffset ;
public class ExactQPathEntryFilter { /** * { @ inheritDoc } */ public boolean accept ( ItemData item ) { } }
QPathEntry itemEntry = item . getQPath ( ) . getEntries ( ) [ item . getQPath ( ) . getDepth ( ) ] ; return entry . equals ( itemEntry ) ;
public class FrequencyCap { /** * Sets the timeUnit value for this FrequencyCap . * @ param timeUnit * The unit of time for specifying the time period . */ public void setTimeUnit ( com . google . api . ads . admanager . axis . v201808 . TimeUnit timeUnit ) { } }
this . timeUnit = timeUnit ;
public class JobInProgressTraits { /** * Return a vector of setup TaskInProgress objects */ public Vector < TaskInProgress > reportSetupTIPs ( boolean shouldBeComplete ) { } }
Vector < TaskInProgress > results = new Vector < TaskInProgress > ( ) ; for ( int i = 0 ; i < setup . length ; i ++ ) { if ( setup [ i ] . isComplete ( ) == shouldBeComplete ) { results . add ( setup [ i ] ) ; } } return results ;
public class XForLoopExpressionImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case XbasePackage . XFOR_LOOP_EXPRESSION__FOR_EXPRESSION : return getForExpression ( ) ; case XbasePackage . XFOR_LOOP_EXPRESSION__EACH_EXPRESSION : return getEachExpression ( ) ; case XbasePackage . XFOR_LOOP_EXPRESSION__DECLARED_PARAM : return getDeclaredParam ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class PageFlowRequestProcessor { /** * An opportunity to process a page forward in a different way than performing a server forward . The default * implementation looks for a file on classpath called * META - INF / pageflow - page - servlets / < i > path - to - page < / i > . properties ( e . g . , * " / META - INF / pageflow - page - servlets / foo / bar / hello . jsp . properties " ) . This file contains mappings from * < i > platform - name < / i > ( the value returned by { @ link ServletContainerAdapter # getPlatformName } ) to the name of a Servlet * class that will process the page request . If the current platform name is not found , the value " default " is * tried . An example file might look like this : * < pre > * tomcat = org . apache . jsp . foo . bar . hello _ jsp * default = my . servlets . foo . bar . hello * < / pre > * @ param pagePath the webapp - relative path to the page , e . g . , " / foo / bar / hello . jsp " * @ param request the current HttpServletRequest * @ param response the current HttpServletResponse * @ return < code > true < / code > if the method handled the request , in which case it should not be forwarded . * @ throws IOException * @ throws ServletException */ private boolean processPageForward ( String pagePath , HttpServletRequest request , HttpServletResponse response ) throws IOException , ServletException { } }
Class pageServletClass = ( Class ) _pageServletClasses . get ( pagePath ) ; if ( pageServletClass == null ) { pageServletClass = Void . class ; ClassLoader cl = DiscoveryUtils . getClassLoader ( ) ; String path = "META-INF/pageflow-page-servlets" + pagePath + ".properties" ; InputStream in = cl . getResourceAsStream ( path ) ; if ( in != null ) { String className = null ; try { Properties props = new Properties ( ) ; props . load ( in ) ; className = props . getProperty ( _servletContainerAdapter . getPlatformName ( ) ) ; if ( className == null ) className = props . getProperty ( "default" ) ; if ( className != null ) { pageServletClass = cl . loadClass ( className ) ; if ( Servlet . class . isAssignableFrom ( pageServletClass ) ) { if ( LOG . isInfoEnabled ( ) ) { LOG . info ( "Loaded page Servlet class " + className + " for path " + pagePath ) ; } } else { pageServletClass = Void . class ; LOG . error ( "Page Servlet class " + className + " for path " + pagePath + " does not extend " + Servlet . class . getName ( ) ) ; } } } catch ( IOException e ) { LOG . error ( "Error while reading " + path , e ) ; } catch ( ClassNotFoundException e ) { LOG . error ( "Error while loading page Servlet class " + className , e ) ; } } _pageServletClasses . put ( pagePath , pageServletClass ) ; } if ( pageServletClass . equals ( Void . class ) ) { return false ; } try { Servlet pageServlet = ( Servlet ) pageServletClass . newInstance ( ) ; pageServlet . init ( new PageServletConfig ( pagePath ) ) ; ensurePageServletFilter ( ) . doFilter ( request , response , new PageServletFilterChain ( pageServlet ) ) ; return true ; } catch ( InstantiationException e ) { LOG . error ( "Error while instantiating page Servlet of type " + pageServletClass . getName ( ) , e ) ; } catch ( IllegalAccessException e ) { LOG . error ( "Error while instantiating page Servlet of type " + pageServletClass . getName ( ) , e ) ; } return false ;
public class ExceptionDestinationHandlerImpl { /** * This method contains the routine used to handle an undeliverable message . The * method examines the attributes of a message to determine what to do with it . * It is possible for a message to be discarded , blocked , or sent to an exception * destination . * @ param msg - The undeliverable message * @ param tran - The transaction that the message was delivered under * @ param exceptionReason - The reason why the message could not be delivered * @ param exceptionStrings - A list of inserts to place into an error message * @ return A code indicating what we did with the message */ public UndeliverableReturnCode handleUndeliverableMessage ( SIMPMessage message , TransactionCommon tran , int exceptionReason , String [ ] exceptionStrings ) { } }
// F001333-14610 // Delegate down onto the new method passing a null // subscription ID . return handleUndeliverableMessage ( message , tran , exceptionReason , exceptionStrings , null ) ;
public class ObjectAreaSizeImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case AfplibPackage . OBJECT_AREA_SIZE__SIZE_TYPE : return getSizeType ( ) ; case AfplibPackage . OBJECT_AREA_SIZE__XOA_SIZE : return getXoaSize ( ) ; case AfplibPackage . OBJECT_AREA_SIZE__YOA_SIZE : return getYoaSize ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class GetAllImagesAndVideos { /** * Runs the example . * @ param adWordsServices the services factory . * @ param session the session . * @ throws ApiException if the API request failed with one or more service errors . * @ throws RemoteException if the API request failed due to other errors . */ public static void runExample ( AdWordsServicesInterface adWordsServices , AdWordsSession session ) throws RemoteException { } }
// Get the MediaService . MediaServiceInterface mediaService = adWordsServices . get ( session , MediaServiceInterface . class ) ; int offset = 0 ; // Create selector . SelectorBuilder builder = new SelectorBuilder ( ) ; Selector selector = builder . fields ( MediaField . MediaId , MediaField . Width , MediaField . Height , MediaField . MimeType ) . orderAscBy ( MediaField . MediaId ) . offset ( offset ) . limit ( PAGE_SIZE ) . in ( MediaField . Type , "IMAGE" , "VIDEO" ) . build ( ) ; MediaPage page = null ; do { // Get all images . page = mediaService . get ( selector ) ; // Display images . if ( page != null && page . getEntries ( ) != null ) { for ( Media media : page . getEntries ( ) ) { Map < MediaSize , Dimensions > dimensions = Maps . toMap ( media . getDimensions ( ) ) ; System . out . printf ( "Media with ID %d, dimensions %s, and MIME type '%s' was found.%n" , media . getMediaId ( ) , toString ( dimensions . get ( MediaSize . FULL ) ) , media . getMediaType ( ) ) ; } } else { System . out . println ( "No images/videos were found." ) ; } offset += PAGE_SIZE ; selector = builder . increaseOffsetBy ( PAGE_SIZE ) . build ( ) ; } while ( offset < page . getTotalNumEntries ( ) ) ;
public class AggregateVocab { /** * Returns a vocabulary composed of the union of the vocabularies given as * parameter . The given vocabularies must have the same base URI . * @ param vocabs * the vocabularies to aggregate . * @ return the aggregated vocabulary . */ public static Vocab of ( Vocab ... vocabs ) { } }
return new AggregateVocab ( new ImmutableList . Builder < Vocab > ( ) . add ( vocabs ) . build ( ) ) ;
public class WebcamLock { /** * Lock webcam . */ public void lock ( ) { } }
if ( disabled . get ( ) ) { return ; } if ( isLocked ( ) ) { throw new WebcamLockException ( String . format ( "Webcam %s has already been locked" , webcam . getName ( ) ) ) ; } if ( ! locked . compareAndSet ( false , true ) ) { return ; } LOG . debug ( "Lock {}" , webcam ) ; update ( ) ; updater = new LockUpdater ( ) ; updater . start ( ) ;
public class TCPMasterConnection { /** * Opens this < tt > TCPMasterConnection < / tt > . * @ param useRtuOverTcp True if the RTU protocol should be used over TCP * @ throws Exception if there is a network failure . */ public void connect ( boolean useRtuOverTcp ) throws Exception { } }
if ( ! isConnected ( ) ) { logger . debug ( "connect()" ) ; // Create a socket without auto - connecting socket = new Socket ( ) ; socket . setReuseAddress ( true ) ; socket . setSoLinger ( true , 1 ) ; socket . setKeepAlive ( true ) ; setTimeout ( timeout ) ; // Connect - only wait for the timeout number of milliseconds socket . connect ( new InetSocketAddress ( address , port ) , timeout ) ; // Prepare the transport prepareTransport ( useRtuOverTcp ) ; connected = true ; }
public class RelaxNGDefaultsComponent { /** * Check and add defaults * @ param name The element name * @ param atts The attributes */ private void checkAndAddDefaults ( QName name , XMLAttributes atts ) { } }
List < RelaxNGDefaultValues . Attribute > def = defaults . getDefaultAttributes ( name . localpart , name . uri ) ; if ( def != null ) { for ( RelaxNGDefaultValues . Attribute a : def ) { // EXM - 24143 it is possible that the namespace of the default attribute is empty // and the namespace of the attribute declared in the XMLAttributes is NULL . boolean alreadyDeclared = false ; alreadyDeclared = atts . getIndex ( a . namespace , a . localName ) >= 0 ; if ( ! alreadyDeclared ) { if ( "" . equals ( a . namespace ) ) { // Extra check with NULL Namespace alreadyDeclared = atts . getIndex ( null , a . localName ) >= 0 ; } } if ( ! alreadyDeclared ) { String prefix = null ; String rawname = a . localName ; if ( a . namespace != null && a . namespace . length ( ) > 0 ) { prefix = context . getPrefix ( a . namespace ) ; if ( prefix == null ) { for ( int i = 0 ; i < atts . getLength ( ) ; i ++ ) { String attname = atts . getQName ( i ) ; if ( attname . startsWith ( "xmlns:" ) ) { if ( a . namespace . equals ( atts . getValue ( i ) ) ) { prefix = attname . substring ( 6 ) ; } } } } if ( prefix != null && prefix . length ( ) > 0 ) { rawname = prefix + ":" + a . localName ; // double check in case of no namespace aware parsers . // if we want to fully handle this case we may need further // processing . if ( atts . getIndex ( rawname ) < 0 ) { QName attName = new QName ( fSymbolTable . addSymbol ( prefix ) , fSymbolTable . addSymbol ( a . localName ) , fSymbolTable . addSymbol ( rawname ) , fSymbolTable . addSymbol ( a . namespace ) ) ; atts . addAttribute ( attName , "CDATA" , a . value ) ; int attrIndex = atts . getIndex ( attName . uri , attName . localpart ) ; atts . setSpecified ( attrIndex , false ) ; } } else { int k = 1 ; if ( // EXM - 24494 Prefer this prefix // See the implementation in : org . dita . dost . reader . MergeTopicParser . startElement ( String , String , String , Attributes ) // If the file is a composite no attributes are copied from the root element . "http://dita.oasis-open.org/architecture/2005/" . equals ( a . namespace ) ) { prefix = "ditaarch" ; } else { prefix = "ns" + k ; } while ( context . getURI ( prefix ) != null || atts . getValue ( "xmlns:" + prefix ) != null ) { k ++ ; prefix = "ns" + k ; } rawname = prefix + ":" + a . localName ; QName attNs = new QName ( fSymbolTable . addSymbol ( "xmlns" ) , fSymbolTable . addSymbol ( prefix ) , fSymbolTable . addSymbol ( "xmlns:" + prefix ) , fSymbolTable . addSymbol ( "http://www.w3.org/2000/xmlns/" ) ) ; atts . addAttribute ( attNs , "CDATA" , a . namespace ) ; context . declarePrefix ( prefix , a . namespace ) ; QName attName = new QName ( fSymbolTable . addSymbol ( prefix ) , fSymbolTable . addSymbol ( a . localName ) , fSymbolTable . addSymbol ( rawname ) , fSymbolTable . addSymbol ( a . namespace ) ) ; atts . addAttribute ( attName , "CDATA" , a . value ) ; int attrIndex = atts . getIndex ( attName . uri , attName . localpart ) ; atts . setSpecified ( attrIndex , false ) ; } } else { String attname = fSymbolTable . addSymbol ( a . localName ) ; QName attName = new QName ( null , attname , attname , null ) ; atts . addAttribute ( attName , "CDATA" , a . value ) ; int attrIndex = atts . getIndex ( attname ) ; atts . setSpecified ( attrIndex , false ) ; } } } }
public class DbgpXmlEntityParser { /** * $ NON - NLS - 1 $ */ protected static String getEncodedValue ( Element element ) { } }
String encoding = ENCODING_NONE ; if ( element . hasAttribute ( ATTR_ENCODING ) ) { encoding = element . getAttribute ( ATTR_ENCODING ) ; } if ( ENCODING_NONE . equals ( encoding ) ) { return parseContent ( element ) ; } if ( ENCODING_BASE64 . equals ( encoding ) ) { return parseBase64Content ( element ) ; } throw new AssertionError ( NLS . bind ( "invalidEncoding" , encoding ) ) ;
public class FileCopier { /** * 执行拷贝 < br > * 拷贝规则为 : * < pre > * 1 、 源为文件 , 目标为已存在目录 , 则拷贝到目录下 , 文件名不变 * 2 、 源为文件 , 目标为不存在路径 , 则目标以文件对待 ( 自动创建父级目录 ) 比如 : / dest / aaa , 如果aaa不存在 , 则aaa被当作文件名 * 3 、 源为文件 , 目标是一个已存在的文件 , 则当 { @ link # setOverride ( boolean ) } 设为true时会被覆盖 , 默认不覆盖 * 4 、 源为目录 , 目标为已存在目录 , 当 { @ link # setCopyContentIfDir ( boolean ) } 为true时 , 只拷贝目录中的内容到目标目录中 , 否则整个源目录连同其目录拷贝到目标目录中 * 5 、 源为目录 , 目标为不存在路径 , 则自动创建目标为新目录 , 然后按照规则4复制 * 6 、 源为目录 , 目标为文件 , 抛出IO异常 * 7 、 源路径和目标路径相同时 , 抛出IO异常 * < / pre > * @ return 拷贝后目标的文件或目录 * @ throws IORuntimeException IO异常 */ @ Override public File copy ( ) throws IORuntimeException { } }
final File src = this . src ; final File dest = this . dest ; // check Assert . notNull ( src , "Source File is null !" ) ; if ( false == src . exists ( ) ) { throw new IORuntimeException ( "File not exist: " + src ) ; } Assert . notNull ( dest , "Destination File or directiory is null !" ) ; if ( FileUtil . equals ( src , dest ) ) { throw new IORuntimeException ( "Files '{}' and '{}' are equal" , src , dest ) ; } if ( src . isDirectory ( ) ) { // 复制目录 if ( dest . exists ( ) && false == dest . isDirectory ( ) ) { // 源为目录 , 目标为文件 , 抛出IO异常 throw new IORuntimeException ( "Src is a directory but dest is a file!" ) ; } final File subDest = isCopyContentIfDir ? dest : FileUtil . mkdir ( FileUtil . file ( dest , src . getName ( ) ) ) ; internalCopyDirContent ( src , subDest ) ; } else { // 复制文件 internalCopyFile ( src , dest ) ; } return dest ;
public class MOEA { /** * Collector of { @ link Phenotype } objects , who ' s ( multi - objective ) fitness * value is part of the < a href = " https : / / en . wikipedia . org / wiki / Pareto _ efficiency " > * pareto front < / a > . * @ param size the allowed size range of the returned pareto set . If the * size of the pareto set is bigger than { @ code size . getMax ( ) } , * during the collection , it is reduced to { @ code size . getMin ( ) } . * Pareto set elements which are close to each other are removed firsts . * @ param < G > the gene type * @ param < T > the array type , e . g . { @ code double [ ] } * @ param < V > the multi object result type vector * @ return the pareto set collector * @ throws NullPointerException if one the { @ code size } is { @ code null } * @ throws IllegalArgumentException if the minimal pareto set { @ code size } * is smaller than one */ public static < G extends Gene < ? , G > , T , V extends Vec < T > > Collector < EvolutionResult < G , V > , ? , ISeq < Phenotype < G , V > > > toParetoSet ( final IntRange size ) { } }
return toParetoSet ( size , Vec < T > :: dominance , Vec < T > :: compare , Vec < T > :: distance , Vec < T > :: length ) ;
public class InternalXtextParser { /** * InternalXtext . g : 2627:1 : entryRuleCrossReference returns [ EObject current = null ] : iv _ ruleCrossReference = ruleCrossReference EOF ; */ public final EObject entryRuleCrossReference ( ) throws RecognitionException { } }
EObject current = null ; EObject iv_ruleCrossReference = null ; try { // InternalXtext . g : 2627:55 : ( iv _ ruleCrossReference = ruleCrossReference EOF ) // InternalXtext . g : 2628:2 : iv _ ruleCrossReference = ruleCrossReference EOF { newCompositeNode ( grammarAccess . getCrossReferenceRule ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_1 ) ; iv_ruleCrossReference = ruleCrossReference ( ) ; state . _fsp -- ; current = iv_ruleCrossReference ; match ( input , EOF , FollowSets000 . FOLLOW_2 ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class FilenameUtil { /** * Splits a string into a number of tokens . * The text is split by ' ? ' and ' * ' . * Where multiple ' * ' occur consecutively they are collapsed into a single ' * ' . * @ param text the text to split * @ return the array of tokens , never null */ static String [ ] splitOnTokens ( final String text ) { } }
// used by wildcardMatch // package level so a unit test may run on this if ( text . indexOf ( '?' ) == NOT_FOUND && text . indexOf ( '*' ) == NOT_FOUND ) { return new String [ ] { text } ; } final char [ ] array = text . toCharArray ( ) ; final ArrayList < String > list = new ArrayList < String > ( ) ; final StringBuilder buffer = new StringBuilder ( ) ; char prevChar = 0 ; for ( int i = 0 ; i < array . length ; i ++ ) { final char ch = array [ i ] ; if ( ch == '?' || ch == '*' ) { if ( buffer . length ( ) != 0 ) { list . add ( buffer . toString ( ) ) ; buffer . setLength ( 0 ) ; } if ( ch == '?' ) { list . add ( "?" ) ; } else if ( prevChar != '*' ) { // ch = = ' * ' here ; check if previous char was ' * ' list . add ( "*" ) ; } } else { buffer . append ( ch ) ; } prevChar = ch ; } if ( buffer . length ( ) != 0 ) { list . add ( buffer . toString ( ) ) ; } return list . toArray ( new String [ list . size ( ) ] ) ;
public class Predictor { /** * The METHOD command defines the analysis task to be performed for a * particular system configuration . * @ param method * @ return */ public Predictor method ( int method , int startPage ) { } }
input . add ( new CommandLine < > ( METHOD , 5 , method , startPage ) ) ; return this ;
public class StringFunctions { /** * Returned expression results in a split of the string into an array of substrings separated by sep . */ public static Expression split ( Expression expression , String sep ) { } }
return x ( "SPLIT(" + expression . toString ( ) + ", \"" + sep + "\")" ) ;
public class HadoopCompatLoader { /** * TODO add threadsafe caching that is aware of the string argument and instantiates a compat for each argument at most once ( assuming the instantiation succeeds ) */ public static HadoopCompat getCompat ( ) { } }
String ver = VersionInfo . getVersion ( ) ; log . debug ( "Read Hadoop VersionInfo string {}" , ver ) ; final String pkgName = HadoopCompatLoader . class . getPackage ( ) . getName ( ) ; final String className ; if ( ver . startsWith ( "1." ) ) { className = pkgName + ".h1.Hadoop1Compat" ; } else { className = pkgName + ".h2.Hadoop2Compat" ; } log . debug ( "Attempting to load class {} and instantiate with nullary constructor" , className ) ; try { Constructor < ? > ctor = Class . forName ( className ) . getConstructor ( ) ; log . debug ( "Invoking constructor {}" , ctor ) ; return ( HadoopCompat ) ctor . newInstance ( ) ; } catch ( NoSuchMethodException e ) { throw new RuntimeException ( e ) ; } catch ( SecurityException e ) { throw new RuntimeException ( e ) ; } catch ( ClassNotFoundException e ) { throw new RuntimeException ( e ) ; } catch ( InstantiationException e ) { throw new RuntimeException ( e ) ; } catch ( IllegalAccessException e ) { throw new RuntimeException ( e ) ; } catch ( IllegalArgumentException e ) { throw new RuntimeException ( e ) ; } catch ( InvocationTargetException e ) { throw new RuntimeException ( e ) ; }