signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class LNGVector { /** * Replaces the contents of this vector with the contents of another vector in - place . * @ param other the other vector * @ throws IllegalArgumentException if you try to replace a vector with itself */ @ SuppressWarnings ( "unchecked" ) public void replaceInplace ( final LNGVector < ? extends T > other ) { } }
if ( this == other ) throw new IllegalArgumentException ( "cannot replace a vector in-place with itself" ) ; this . elements = ( T [ ] ) new Object [ other . size ( ) ] ; for ( int i = 0 ; i < other . size ( ) ; i ++ ) this . elements [ i ] = other . get ( i ) ; this . size = other . size ;
public class StoreLabelsResponseExtractor { /** * Parse the response , which includes label settings and command status . * We expect only one FETCH response as we only set labels on one msg . * http : / / code . google . com / apis / gmail / imap / # x - gm - labels * C : a011 STORE _ FLAGS 1 + X - GM - LABELS ( foo ) * S : * 1 FETCH ( X - GM - LABELS ( \ Inbox \ Sent Important " Muy Importante " foo ) ) * S : a011 OK STORE _ FLAGS ( Success ) */ @ Override public Set < String > extract ( List < String > messages ) throws ExtractionException { } }
boolean gotFetch = false ; Set < String > result = null ; // Find FETCH , throw error if none or more than one , or if we receive an error response . String fetchStr = null ; for ( int i = 0 , messagesSize = messages . size ( ) ; i < messagesSize ; i ++ ) { String message = messages . get ( i ) ; if ( null == message || message . isEmpty ( ) ) continue ; fetchStr = matchAndGetGroup1 ( FETCH_LABEL_PATT , message ) ; if ( fetchStr != null ) { if ( gotFetch ) { throw new ExtractionException ( "STORE_LABELS RESPONSE: Got more than one FETCH " + "response " + message ) ; } gotFetch = true ; result = Sets . < String > newHashSet ( ) ; result . addAll ( Parsing . tokenize ( fetchStr ) ) ; result . remove ( "(" ) ; result . remove ( ")" ) ; } else { if ( matchAndGetGroup1 ( OK_PATT , message ) != null ) { if ( ! gotFetch ) { throw new ExtractionException ( "STORE_LABELS RESPONSE: no LABELS received." + message ) ; } // All Good . } else if ( matchAndGetGroup1 ( BAD_PATT , message ) != null || matchAndGetGroup1 ( NO_PATT , message ) != null ) { throw new ExtractionException ( "STORE_LABELS RESPONSE: " + message ) ; } } } return result ;
public class SecurityServiceImpl { /** * { @ inheritDoc } */ @ Override public AuthorizationService getAuthorizationService ( ) { } }
AuthorizationService service = authzService . get ( ) ; if ( service == null ) { if ( isConfigurationDefinedInFile ( ) ) { String id = getEffectiveSecurityConfiguration ( ) . getAuthorizationServiceId ( ) ; service = getAuthorizationService ( id ) ; } else { service = autoDetectAuthorizationService ( ) ; } // remember the authorization service authzService . set ( service ) ; } return service ;
public class CurdDaoImpl { /** * 更新实体之前的处理 * @ param entity * @ return */ private T onBeforeUpdate ( T entity ) { } }
// TODO 更新前的操作 Class < ? > cls = entity . getClass ( ) ; String entityName = cls . getName ( ) ; String idFieldName = getIdFieldName ( cls ) ; ID id = entity . getId ( ) ; String indexName = DaoHelper . getExtendFieldName ( cls , ExtendField . Index ) ; String pathName = DaoHelper . getExtendFieldName ( cls , ExtendField . Path ) ; String parentName = DaoHelper . getExtendFieldName ( cls , ExtendField . Parent ) ; System . out . println ( indexName + "," + pathName + "," + parentName ) ; // 可记录时间的 if ( Dateable . class . isAssignableFrom ( cls ) ) { ( ( Dateable ) entity ) . setLastModifiedDate ( Calendar . getInstance ( ) . getTime ( ) ) ; } // 可树形结构化的 if ( Treeable . class . isAssignableFrom ( cls ) ) { // 判断自身位置有没有发生变化 Treeable currObject = ( Treeable ) entity ; Serializable newParentId = currObject . getParent ( ) ; Treeable dbObject = ( Treeable ) getCurrentSession ( ) . get ( cls , id ) ; Serializable oldParentId = dbObject . getParent ( ) ; System . out . println ( "newParentId:" + newParentId + " --> oldParentId:" + oldParentId ) ; // 跨层级移动 if ( notNull ( newParentId ) && ( ! newParentId . equals ( oldParentId ) ) ) { String oldPath = dbObject . getPath ( ) ; Treeable newParent = ( Treeable ) getCurrentSession ( ) . get ( cls , newParentId ) ; String newPath = newParent . getPath ( ) + id . toString ( ) + "/" ; currObject . setPath ( newPath ) ; System . out . println ( "newPath:" + newPath + " --> oldPath:" + oldPath ) ; // 1 . 修改自身的排位 Object maxVal = createCriteria ( cls ) . add ( Restrictions . eq ( parentName , currObject . getParent ( ) ) ) . setProjection ( Projections . max ( indexName ) ) . uniqueResult ( ) ; currObject . setIndex ( isNull ( maxVal ) ? 1 : ( Integer . valueOf ( maxVal . toString ( ) ) + 1 ) ) ; // 2 . 如果自身的下级节点路劲 : 截取dbObject的path 换成currentObject的path int subStart = oldPath . length ( ) + 1 ; String hql = "update " + entityName + " set " + pathName + " = Concat(?, Substring( " + pathName + ", " + subStart + ", Length(" + pathName + ") )) where " + pathName + " like ?" ; getCurrentSession ( ) . createQuery ( hql ) . setParameter ( 0 , newPath ) . setParameter ( 1 , oldPath + "_%" ) . executeUpdate ( ) ; } // 同级之间移动 else if ( ! dbObject . getIndex ( ) . equals ( currObject . getIndex ( ) ) ) { // 更新目标节点的位置 String hql = "update " + entityName + " set " + indexName + "=? where " + parentName + "=? and " + indexName + "=?" ; createQuery ( hql ) . setParameter ( 0 , dbObject . getIndex ( ) ) . setParameter ( 1 , dbObject . getParent ( ) ) . setParameter ( 2 , currObject . getIndex ( ) ) . executeUpdate ( ) ; } } // 可移动的 else if ( Movable . class . isAssignableFrom ( cls ) ) { Movable currObject = ( Movable ) entity ; Movable dbObject = ( Movable ) getCurrentSession ( ) . get ( cls , entity . getId ( ) ) ; if ( ! dbObject . getIndex ( ) . equals ( currObject . getIndex ( ) ) ) { String hql = "update " + entityName + " set " + indexName + "=? where " + indexName + "=? " ; createQuery ( hql ) . setParameter ( 0 , currObject . getIndex ( ) ) . setParameter ( 1 , dbObject . getIndex ( ) ) . executeUpdate ( ) ; } } return entity ;
public class Cells { /** * Returns the { @ code URL } value of the { @ link Cell } ( associated to { @ code table } ) whose name iscellName , or null if * this Cells object contains no cell whose name is cellName . * @ param nameSpace the name of the owning table * @ param cellName the name of the Cell we want to retrieve from this Cells object . * @ return the { @ code URL } value of the { @ link Cell } ( associated to { @ code table } ) whose name is cellName , or null * if this Cells object contains no cell whose name is cellName */ public URL getURL ( String nameSpace , String cellName ) { } }
return getValue ( nameSpace , cellName , URL . class ) ;
public class AuthenticationContext { /** * Acquires a security token from the authority using a Refresh Token * previously received . * @ param refreshToken * Refresh Token to use in the refresh flow . * @ param credential * object representing Private Key to use for token acquisition . * @ param resource * Identifier of the target resource that is the recipient of the * requested token . If null , token is requested for the same * resource refresh token was originally issued for . If passed , * resource should match the original resource used to acquire * refresh token unless token service supports refresh token for * multiple resources . * @ param callback * optional callback object for non - blocking execution . * @ return A { @ link Future } object representing the * { @ link AuthenticationResult } of the call . It contains Access * Token , Refresh Token and the Access Token ' s expiration time . * @ throws AuthenticationException * thrown if { @ link AsymmetricKeyCredential } fails to sign the * JWT token . */ public Future < AuthenticationResult > acquireTokenByRefreshToken ( final String refreshToken , final AsymmetricKeyCredential credential , final String resource , final AuthenticationCallback callback ) throws AuthenticationException { } }
return acquireTokenByRefreshToken ( refreshToken , credential . getClientId ( ) , JwtHelper . buildJwt ( credential , this . authenticationAuthority . getSelfSignedJwtAudience ( ) ) , resource , callback ) ;
public class ReefEventStateManager { /** * convert time from long to formatted string . * @ param time * @ return */ private String convertTime ( final long time ) { } }
final Date date = new Date ( time ) ; return FORMAT . format ( date ) ;
public class IOGroovyMethods { /** * This method tries to read subsequent buffers from the reader using a mark */ private static String readLineFromReaderWithMark ( final Reader input ) throws IOException { } }
char [ ] cbuf = new char [ charBufferSize ] ; try { input . mark ( charBufferSize ) ; } catch ( IOException e ) { // this should never happen LOG . warning ( "Caught exception setting mark on supporting reader: " + e ) ; // fallback return readLineFromReaderWithoutMark ( input ) ; } // could be changed into do . . while , but then // we might create an additional StringBuilder // instance at the end of the stream int count = input . read ( cbuf ) ; if ( count == EOF ) // we are at the end of the input data return null ; StringBuilder line = new StringBuilder ( expectedLineLength ) ; // now work on the buffer ( s ) int ls = lineSeparatorIndex ( cbuf , count ) ; while ( ls == - 1 ) { line . append ( cbuf , 0 , count ) ; count = input . read ( cbuf ) ; if ( count == EOF ) { // we are at the end of the input data return line . toString ( ) ; } ls = lineSeparatorIndex ( cbuf , count ) ; } line . append ( cbuf , 0 , ls ) ; // correct ls if we have \ r \ n int skipLS = 1 ; if ( ls + 1 < count ) { // we are not at the end of the buffer if ( cbuf [ ls ] == '\r' && cbuf [ ls + 1 ] == '\n' ) { skipLS ++ ; } } else { if ( cbuf [ ls ] == '\r' && input . read ( ) == '\n' ) { skipLS ++ ; } } // reset ( ) and skip over last linesep input . reset ( ) ; input . skip ( line . length ( ) + skipLS ) ; return line . toString ( ) ;
public class StringUtils { /** * Generates the MD5 checksum for the specified message . * @ param message The message . * @ return The hexadecimal checksum . */ public static String md5 ( final String message ) { } }
byte [ ] res ; try { MessageDigest instance = MessageDigest . getInstance ( "MD5" ) ; instance . reset ( ) ; instance . update ( message . getBytes ( ) ) ; res = instance . digest ( ) ; } catch ( final NoSuchAlgorithmException ex ) { throw new RuntimeException ( ex ) ; } StringBuilder hexString = new StringBuilder ( ) ; for ( byte resByte : res ) { hexString . append ( Integer . toString ( ( resByte & 0xff ) + 0x100 , 16 ) . substring ( 1 ) ) ; } return hexString . toString ( ) ;
public class BasicODataClientQuery { /** * Returns a StringBuilder that is consisted of filtering and expanding parameters that in turn are appended * to the query string used for Odata Client . * An Odata Client query can have either one of filter or expand parameters ( with multiple properties * if desired ) or both . * @ return String Builder showing parameters appended to the query . * @ see { @ link com . sdl . odata . client . api . ODataClient } */ private StringBuilder generateParameters ( ) { } }
StringBuilder parameters = new StringBuilder ( ) ; if ( filterMap == null && expandParameters == null ) { return parameters ; } parameters . append ( '?' ) ; int filterParameterCounter = 0 ; if ( filterMap != null && ! filterMap . isEmpty ( ) ) { parameters . append ( FILTER_PREFIX ) ; for ( Map . Entry < String , String > filterEntry : filterMap . entrySet ( ) ) { parameters . append ( String . format ( "%s eq '%s'" , filterEntry . getKey ( ) , filterEntry . getValue ( ) ) ) ; if ( ++ filterParameterCounter < filterMap . size ( ) ) { parameters . append ( " and " ) ; } } if ( expandParameters != null ) { parameters . append ( "&" ) ; } } if ( expandParameters != null ) { parameters . append ( EXPAND_PREFIX ) ; Iterator iterator = expandParameters . iterator ( ) ; parameters . append ( String . format ( "%s" , iterator . next ( ) ) ) ; while ( iterator . hasNext ( ) ) { parameters . append ( String . format ( ",%s" , iterator . next ( ) ) ) ; } } return parameters ;
public class VPTree { /** * Euclidean distance * @ return the distance between the two points */ public float distance ( INDArray arr1 , INDArray arr2 ) { } }
if ( scalars == null ) scalars = new ThreadLocal < > ( ) ; if ( Nd4j . scalar ( 0.0f ) . equals ( scalars . get ( ) ) ) scalars . set ( Nd4j . scalar ( 0.0 ) ) ; switch ( similarityFunction ) { case "jaccard" : float ret7 = Nd4j . getExecutioner ( ) . execAndReturn ( new JaccardDistance ( arr1 , arr2 , scalars . get ( ) ) ) . getFinalResult ( ) . floatValue ( ) ; return invert ? - ret7 : ret7 ; case "hamming" : float ret8 = Nd4j . getExecutioner ( ) . execAndReturn ( new HammingDistance ( arr1 , arr2 , scalars . get ( ) ) ) . getFinalResult ( ) . floatValue ( ) ; return invert ? - ret8 : ret8 ; case "euclidean" : float ret = Nd4j . getExecutioner ( ) . execAndReturn ( new EuclideanDistance ( arr1 , arr2 , scalars . get ( ) ) ) . getFinalResult ( ) . floatValue ( ) ; return invert ? - ret : ret ; case "cosinesimilarity" : float ret2 = Nd4j . getExecutioner ( ) . execAndReturn ( new CosineSimilarity ( arr1 , arr2 , scalars . get ( ) ) ) . getFinalResult ( ) . floatValue ( ) ; return invert ? - ret2 : ret2 ; case "cosinedistance" : float ret6 = Nd4j . getExecutioner ( ) . execAndReturn ( new CosineDistance ( arr1 , arr2 , scalars . get ( ) ) ) . getFinalResult ( ) . floatValue ( ) ; return invert ? - ret6 : ret6 ; case "manhattan" : float ret3 = Nd4j . getExecutioner ( ) . execAndReturn ( new ManhattanDistance ( arr1 , arr2 , scalars . get ( ) ) ) . getFinalResult ( ) . floatValue ( ) ; return invert ? - ret3 : ret3 ; case "dot" : float dotRet = ( float ) Nd4j . getBlasWrapper ( ) . dot ( arr1 , arr2 ) ; return invert ? - dotRet : dotRet ; default : float ret4 = Nd4j . getExecutioner ( ) . execAndReturn ( new EuclideanDistance ( arr1 , arr2 , scalars . get ( ) ) ) . getFinalResult ( ) . floatValue ( ) ; return invert ? - ret4 : ret4 ; }
public class DRL5Lexer { /** * $ ANTLR start " MINUS " */ public final void mMINUS ( ) throws RecognitionException { } }
try { int _type = MINUS ; int _channel = DEFAULT_TOKEN_CHANNEL ; // src / main / resources / org / drools / compiler / lang / DRL5Lexer . g : 309:7 : ( ' - ' ) // src / main / resources / org / drools / compiler / lang / DRL5Lexer . g : 309:9 : ' - ' { match ( '-' ) ; if ( state . failed ) return ; } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving }
public class SpaceGroup { /** * Given a rotation matrix calculates the rotation axis and angle for it . * The angle is calculated from the trace , the axis from the eigenvalue * decomposition . * If given matrix is improper rotation or identity matrix then * axis ( 0,0,0 ) and angle 0 are returned . * @ param m * @ return * @ throws IllegalArgumentException if given matrix is not a rotation matrix ( determinant not 1 or - 1) */ public static AxisAngle4d getRotAxisAndAngle ( Matrix3d m ) { } }
double determinant = m . determinant ( ) ; if ( ! ( Math . abs ( determinant ) - 1.0 < DELTA ) ) throw new IllegalArgumentException ( "Given matrix is not a rotation matrix" ) ; AxisAngle4d axisAndAngle = new AxisAngle4d ( new Vector3d ( 0 , 0 , 0 ) , 0 ) ; double [ ] d = { m . m00 , m . m10 , m . m20 , m . m01 , m . m11 , m . m21 , m . m02 , m . m12 , m . m22 } ; Matrix r = new Matrix ( d , 3 ) ; if ( ! deltaComp ( r . det ( ) , 1.0 , DELTA ) ) { // improper rotation : we return axis 0,0,0 and angle 0 return axisAndAngle ; } EigenvalueDecomposition evd = new EigenvalueDecomposition ( r ) ; Matrix eval = evd . getD ( ) ; if ( deltaComp ( eval . get ( 0 , 0 ) , 1.0 , DELTA ) && deltaComp ( eval . get ( 1 , 1 ) , 1.0 , DELTA ) && deltaComp ( eval . get ( 2 , 2 ) , 1.0 , DELTA ) ) { // the rotation is an identity : we return axis 0,0,0 and angle 0 return axisAndAngle ; } int indexOfEv1 ; for ( indexOfEv1 = 0 ; indexOfEv1 < 3 ; indexOfEv1 ++ ) { if ( deltaComp ( eval . get ( indexOfEv1 , indexOfEv1 ) , 1 , DELTA ) ) break ; } Matrix evec = evd . getV ( ) ; axisAndAngle . set ( new Vector3d ( evec . get ( 0 , indexOfEv1 ) , evec . get ( 1 , indexOfEv1 ) , evec . get ( 2 , indexOfEv1 ) ) , Math . acos ( ( eval . trace ( ) - 1.0 ) / 2.0 ) ) ; return axisAndAngle ;
public class MaxTxId { /** * Store the specified transaction in ZooKeeper irrespective of what the * current max transaction id is ( can be used to reset the max transaction * to an older one ) . * @ param maxTxId The max transaction id to set * @ throws StaleVersionException If last max transaction id read by this * is out of date compared to the version in * ZooKeeper * @ throws IOException If there an unrecoverable error communicating with * ZooKeeper */ public synchronized void set ( long maxTxId ) throws IOException { } }
maxTxIdWritable . set ( maxTxId ) ; try { byte [ ] data = WritableUtil . writableToByteArray ( maxTxIdWritable ) ; if ( lastZNodeStat != null ) { try { zooKeeper . setData ( fullyQualifiedZNode , data , lastZNodeStat . getVersion ( ) ) ; } catch ( KeeperException . BadVersionException e ) { LOG . error ( fullyQualifiedZNode + " was updated by another process" , e ) ; throw new StaleVersionException ( fullyQualifiedZNode + " has been updated by another process" ) ; } } else { zooKeeper . create ( fullyQualifiedZNode , data , ZooDefs . Ids . OPEN_ACL_UNSAFE , CreateMode . PERSISTENT ) ; lastZNodeStat = zooKeeper . exists ( fullyQualifiedZNode , false ) ; } } catch ( KeeperException e ) { LOG . error ( "Unrecoverable ZooKeeper writing to " + fullyQualifiedZNode , e ) ; throw new IOException ( "Unrecoverable writing to " + fullyQualifiedZNode , e ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . error ( "Interrupted reading from " + fullyQualifiedZNode , e ) ; throw new IOException ( "Interrupted reading from " + fullyQualifiedZNode , e ) ; }
public class Interceptors { /** * Extracts a set of interceptor bindings from a collection of annotations . * @ param beanManager * @ param annotations * @ return */ public static Set < Annotation > filterInterceptorBindings ( BeanManagerImpl beanManager , Collection < Annotation > annotations ) { } }
Set < Annotation > interceptorBindings = new InterceptorBindingSet ( beanManager ) ; for ( Annotation annotation : annotations ) { if ( beanManager . isInterceptorBinding ( annotation . annotationType ( ) ) ) { interceptorBindings . add ( annotation ) ; } } return interceptorBindings ;
public class FrameHandler { /** * Return a safe copy of all registered { @ link MessageHandler } s . */ public final Set < MessageHandler > getHandlers ( ) { } }
Set < MessageHandler > msgHandlers = new HashSet < > ( ) ; for ( HandlerWrapper handler : handlers . values ( ) ) { msgHandlers . add ( handler . getHandler ( ) ) ; } return msgHandlers ;
public class ProductSegmentation { /** * Gets the deviceManufacturerSegment value for this ProductSegmentation . * @ return deviceManufacturerSegment * The device manufacturer segmentation . { @ link DeviceFamilyTargeting # isTargeted } * must be { @ code * true } . * < p > This attribute is optional . */ public com . google . api . ads . admanager . axis . v201811 . DeviceManufacturerTargeting getDeviceManufacturerSegment ( ) { } }
return deviceManufacturerSegment ;
public class WebAppDescriptorImpl { /** * If not already created , a new < code > servlet < / code > element will be created and returned . * Otherwise , the first existing < code > servlet < / code > element will be returned . * @ return the instance defined for the element < code > servlet < / code > */ public ServletType < WebAppDescriptor > getOrCreateServlet ( ) { } }
List < Node > nodeList = model . get ( "servlet" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new ServletTypeImpl < WebAppDescriptor > ( this , "servlet" , model , nodeList . get ( 0 ) ) ; } return createServlet ( ) ;
public class SessionCreateManager { /** * Sets id of current authentication process . * @ param sessionAuthId id of current authentication process . */ void setId ( String sessionAuthId ) { } }
synchronized ( lock ) { if ( isCreatingSession . get ( ) ) { this . sessionAuthId = sessionAuthId ; } lock . notifyAll ( ) ; }
public class DbxWebAuth { /** * Starts authorization and returns an " authorization URL " on the Dropbox website that * let the user grant your app access to their Dropbox account . * < p > If a redirect URI was specified ( { @ link Request . Builder # withRedirectUri } ) , then users * will be redirected to the redirect URI after completing the authorization flow . Call { @ link * # finishFromRedirect } with the query parameters received from the redirect . * < p > If no redirect URI was specified ( { @ link Request . Builder # withNoRedirect } ) , then users who * grant access will be shown an " authorization code " . The user must copy / paste the * authorization code back into your app , at which point you can call { @ link * # finishFromCode ( String ) } to get an access token . * @ param request OAuth 2.0 web - based authorization flow request configuration * @ return Authorization URL of website user can use to authorize your app . * @ throws IllegalStateException if this { @ link DbxWebAuth } instance was created using the * deprecated { @ link # DbxWebAuth ( DbxRequestConfig , DbxAppInfo , String , DbxSessionStore ) } * constructor , or if this ( @ link DbxWebAuth } instance was created with { @ link DbxAppInfo } * without app secret . */ public String authorize ( Request request ) { } }
if ( deprecatedRequest != null ) { throw new IllegalStateException ( "Must create this instance using DbxWebAuth(DbxRequestConfig,DbxAppInfo) to call this method." ) ; } if ( ! appInfo . hasSecret ( ) ) { throw new IllegalStateException ( "For native apps, please use DbxPKCEWebAuth" ) ; } return authorizeImpl ( request ) ;
public class ConfigRepository { /** * Loads up the configuration data for the specified object . * @ return a map containing field / value pairs for all stored configuration data . */ public HashMap < String , String > loadConfig ( String node , String object ) { } }
HashMap < String , String > data = Maps . newHashMap ( ) ; for ( ConfigRecord record : from ( ConfigRecord . class ) . where ( ConfigRecord . OBJECT . eq ( object ) , ConfigRecord . NODE . eq ( node ) ) . select ( ) ) { data . put ( record . field , record . value ) ; } return data ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link CompanyCurrency } { @ code > } } */ @ XmlElementDecl ( namespace = "http://schema.intuit.com/finance/v3" , name = "CompanyCurrency" , substitutionHeadNamespace = "http://schema.intuit.com/finance/v3" , substitutionHeadName = "IntuitObject" ) public JAXBElement < CompanyCurrency > createCompanyCurrency ( CompanyCurrency value ) { } }
return new JAXBElement < CompanyCurrency > ( _CompanyCurrency_QNAME , CompanyCurrency . class , null , value ) ;
public class ImportStringsAction { /** * get result . */ @ Override public void getResult ( final ContentHandler buf ) throws SAXException { } }
final String templateFilePath = paramTable . get ( FileGenerator . PARAM_TEMPLATE ) ; for ( final Value value : valueSet ) { buf . startElement ( NULL_NS_URI , "stringfile" , "stringfile" , new AttributesBuilder ( ) . build ( ) ) ; final char [ ] location = FileUtils . getRelativeUnixPath ( templateFilePath , value . value ) . toCharArray ( ) ; buf . characters ( location , 0 , location . length ) ; buf . endElement ( NULL_NS_URI , "stringfile" , "stringfile" ) ; }
public class CPDAvailabilityEstimateServiceUtil { /** * NOTE FOR DEVELOPERS : * Never modify this class directly . Add custom service methods to { @ link com . liferay . commerce . service . impl . CPDAvailabilityEstimateServiceImpl } and rerun ServiceBuilder to regenerate this class . */ public static com . liferay . commerce . model . CPDAvailabilityEstimate fetchCPDAvailabilityEstimateByCPDefinitionId ( long cpDefinitionId ) throws com . liferay . portal . kernel . exception . PortalException { } }
return getService ( ) . fetchCPDAvailabilityEstimateByCPDefinitionId ( cpDefinitionId ) ;
public class CmsPreferences { /** * Sets the " direct edit button style " setting . < p > * @ param value a String representation of an int value to set the " direct edit button style " setting */ public void setParamTabEdDirectEditButtonStyle ( String value ) { } }
try { m_userSettings . setDirectEditButtonStyle ( Integer . parseInt ( value ) ) ; } catch ( Throwable t ) { // should usually never happen }
public class J { /** * Aliases of nested joins are determined at runtime . To refer a nested join , this method should be used to get a correct alias . * For example , there is a query * < code > Q . from ( QGroup . group ) . joins ( J . left ( QPerson . person ) . nested ( J . left ( QContact . contact ) . nested ( QStatus . status ) ) ) < / code > * To refer a < code > Status < / code > entity in the ' where ' clause , one should use < code > J . path ( QPerson . person , QContact . contact . QStatus . status ) . state . eq ( " active " ) < / code > * @ param grandFather parent of parent join path * @ param father parent join path * @ param path target join path * @ param < T > any entity path * @ return entity path with correct alias */ @ SuppressWarnings ( "unchcecked" ) public static < T extends EntityPath > T path ( EntityPath < ? > grandFather , EntityPath < ? > father , T path ) { } }
Assert . notNull ( father ) ; Assert . notNull ( grandFather ) ; EntityPath < ? > parentPath = path ( grandFather , father ) ; return path ( parentPath , path ) ;
public class PointLocationFormatter { /** * Formats a point location as an ISO 6709 string . * @ param pointLocation * Point location to format * @ return Formatted string */ private static String formatISO6709Short ( final PointLocation pointLocation ) { } }
final Latitude latitude = pointLocation . getLatitude ( ) ; final Longitude longitude = pointLocation . getLongitude ( ) ; String string = formatLatitudeShort ( latitude ) + formatLongitudeShort ( longitude ) ; final double altitude = pointLocation . getAltitude ( ) ; string = string + formatAltitudeWithSign ( altitude ) ; final String crs = pointLocation . getCoordinateReferenceSystemIdentifier ( ) ; string = string + formatCoordinateReferenceSystemIdentifier ( crs ) ; return string + "/" ;
public class CollapsibleSplitLayoutPanel { /** * Sets whether or not double - clicking on the splitter should toggle the * display of the widget . * @ param child the child whose display toggling will be allowed or not . * @ param allowed whether or not display toggling is allowed for this widget */ public void setWidgetToggleDisplayAllowed ( Widget child , boolean allowed ) { } }
assertIsChild ( child ) ; Splitter splitter = getAssociatedSplitter ( child ) ; // The splitter is null for the center element . if ( splitter != null ) { splitter . setToggleDisplayAllowed ( allowed ) ; }
public class RhythmicalTomcat { protected Context createContext ( Host host , String url ) { } }
// similar to super class ' s private method String contextClass = StandardContext . class . getName ( ) ; if ( host == null ) { host = this . getHost ( ) ; } if ( host instanceof StandardHost ) { contextClass = ( ( StandardHost ) host ) . getContextClass ( ) ; } final Context ctx ; try { ctx = ( Context ) Class . forName ( contextClass ) . getConstructor ( ) . newInstance ( ) ; } catch ( InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException | ClassNotFoundException e ) { String msg = "Can't instantiate context-class " + contextClass + " for host " + host + " and url " + url ; throw new IllegalArgumentException ( msg , e ) ; } setupAccessLogIfNeeds ( ctx ) ; setupYourValveIfNeeds ( ctx ) ; if ( likeitCatalinaSetupper != null ) { likeitCatalinaSetupper . setup ( new LikeItCatalinaResource ( host , ctx ) ) ; } return ctx ;
public class CacheEntryViews { /** * Creates a { @ link DefaultCacheEntryView } instance . * @ param key the key to be wrapped * @ param value the value to be wrapped * @ param record { @ link CacheRecord } instance to gather additional entry view properties like access time , * expiration time and access hit * @ return the { @ link DefaultCacheEntryView } instance */ public static CacheEntryView < Data , Data > createDefaultEntryView ( Data key , Data value , Data expiryPolicy , CacheRecord < Object , Data > record ) { } }
CacheEntryView entryView = new DefaultCacheEntryView ( key , value , record . getCreationTime ( ) , record . getExpirationTime ( ) , record . getLastAccessTime ( ) , record . getAccessHit ( ) , expiryPolicy ) ; return entryView ;
public class ColorUtil { /** * Blends the two supplied colors , using the supplied percentage * as the amount of the first color to use . * @ param firstperc The percentage of the first color to use , from 0.0f * to 1.0f inclusive . */ public static final Color blend ( Color c1 , Color c2 , float firstperc ) { } }
float p2 = 1.0f - firstperc ; return new Color ( ( int ) ( c1 . getRed ( ) * firstperc + c2 . getRed ( ) * p2 ) , ( int ) ( c1 . getGreen ( ) * firstperc + c2 . getGreen ( ) * p2 ) , ( int ) ( c1 . getBlue ( ) * firstperc + c2 . getBlue ( ) * p2 ) ) ;
public class PackagesToContainingMavenArtifactsIndex { /** * For given API artifact , finds the projects whose Java classes use artifact ' s classes , * and links them in the graph . */ public boolean moduleContainsPackagesFromAPI ( ProjectModel projectModel , MavenCoord apiCoords ) { } }
ArchiveCoordinateModel archive = new ArchiveCoordinateService ( graphContext , ArchiveCoordinateModel . class ) . findSingle ( apiCoords . getGroupId ( ) , apiCoords . getArtifactId ( ) , null ) ; if ( archive == null ) return false ; // return graphContext . testIncidence ( projectModel . asVertex ( ) , archive . asVertex ( ) , EDGE _ USES ) ; Iterator < Vertex > projectsVerts = archive . getElement ( ) . vertices ( Direction . IN , EDGE_USES ) ; Iterator < ProjectModel > projects = ( Iterator < ProjectModel > ) graphContext . getFramed ( ) . frame ( projectsVerts , ProjectModel . class ) ; while ( projects . hasNext ( ) ) { ProjectModel project = projects . next ( ) ; if ( projectModel . equals ( project ) ) return true ; } return false ;
public class ModelErrorException { /** * Original status code . * @ param originalStatusCode * Original status code . */ @ com . fasterxml . jackson . annotation . JsonProperty ( "OriginalStatusCode" ) public void setOriginalStatusCode ( Integer originalStatusCode ) { } }
this . originalStatusCode = originalStatusCode ;
public class SecurityUtil { /** * Redirect < code > System . out < / code > and < code > System . err < / code > streams to the given SLF4J loggers . * This is a benefit if you have a legacy console logger application . Does not provide * benefit of a full implementation . For example , no hierarchical or logger inheritence * support but there are some ancilarity benefits like , 1 ) capturing messages that would * otherwise be lost , 2 ) redirecting console messages to centralized log services , 3) * formatting console messages in other types of output ( e . g . , HTML ) . * @ param sysOutLogger * @ param sysErrLogger */ public static void bindSystemStreamsToSLF4J ( Logger sysOutLogger , Logger sysErrLogger ) { } }
SecurityUtil . sysOutLogger = sysOutLogger ; SecurityUtil . sysErrLogger = sysErrLogger ; bindSystemStreamsToSLF4J ( ) ;
public class AddMultiAssetResponsiveDisplayAd { /** * Creates and uploads an { @ link ImageAsset } for the specified URL . * @ return the ID of the { @ link ImageAsset } . * @ throws IOException if unable to read the image from the specified URL . */ private static long uploadImageAsset ( AdWordsServicesInterface adWordsServices , AdWordsSession session , String url ) throws IOException { } }
AssetServiceInterface assetService = adWordsServices . get ( session , AssetServiceInterface . class ) ; // Create the image asset . ImageAsset image = new ImageAsset ( ) ; // Optional : Provide a unique friendly name to identify your asset . If you specify the assetName // field , then both the asset name and the image being uploaded should be unique , and should not // match another ACTIVE asset in this customer account . // image . setAssetName ( " Image asset # " + System . currentTimeMillis ( ) ) ; image . setImageData ( com . google . api . ads . common . lib . utils . Media . getMediaDataFromUrl ( url ) ) ; // Create the operation . AssetOperation operation = new AssetOperation ( ) ; operation . setOperator ( Operator . ADD ) ; operation . setOperand ( image ) ; // Create the asset and return the ID . return assetService . mutate ( new AssetOperation [ ] { operation } ) . getValue ( 0 ) . getAssetId ( ) ;
public class Instance { /** * < pre > * Availability of the instance . * & # 64 ; OutputOnly * < / pre > * < code > . google . appengine . v1 . Instance . Availability availability = 4 ; < / code > */ public com . google . appengine . v1 . Instance . Availability getAvailability ( ) { } }
com . google . appengine . v1 . Instance . Availability result = com . google . appengine . v1 . Instance . Availability . valueOf ( availability_ ) ; return result == null ? com . google . appengine . v1 . Instance . Availability . UNRECOGNIZED : result ;
public class FilteredJobLifecycleListener { /** * { @ inheritDoc } */ @ Override public void onStageTransition ( JobExecutionState state , String previousStage , String newStage ) { } }
if ( this . filter . apply ( state . getJobSpec ( ) ) ) { this . delegate . onStageTransition ( state , previousStage , newStage ) ; }
public class CRFClassifier { /** * end class TestSequenceModel */ @ Override public List < IN > classify ( List < IN > document ) { } }
if ( flags . doGibbs ) { try { return classifyGibbs ( document ) ; } catch ( Exception e ) { System . err . println ( "Error running testGibbs inference!" ) ; e . printStackTrace ( ) ; return null ; } } else if ( flags . crfType . equalsIgnoreCase ( "maxent" ) ) { return classifyMaxEnt ( document ) ; } else { throw new RuntimeException ( "Unsupported inference type: " + flags . crfType ) ; }
public class GlobalizationPreferences { /** * Gets a date format according to the current settings . If there * is an explicit ( non - null ) date / time format set , a copy of that * is returned . Otherwise , the language priority list is used . * DF _ NONE should be used for the style , where only the date or * time format individually is being gotten . * @ param dateStyle DF _ FULL , DF _ LONG , DF _ MEDIUM , DF _ SHORT or DF _ NONE * @ param timeStyle DF _ FULL , DF _ LONG , DF _ MEDIUM , DF _ SHORT or DF _ NONE * @ return a DateFormat , according to the above description * @ hide draft / provisional / internal are hidden on Android */ public DateFormat getDateFormat ( int dateStyle , int timeStyle ) { } }
if ( dateStyle == DF_NONE && timeStyle == DF_NONE || dateStyle < 0 || dateStyle >= DF_LIMIT || timeStyle < 0 || timeStyle >= DF_LIMIT ) { throw new IllegalArgumentException ( "Illegal date format style arguments" ) ; } DateFormat result = null ; if ( dateFormats != null ) { result = dateFormats [ dateStyle ] [ timeStyle ] ; } if ( result != null ) { result = ( DateFormat ) result . clone ( ) ; // clone for safety // Not sure overriding configuration is what we really want . . . result . setTimeZone ( getTimeZone ( ) ) ; } else { result = guessDateFormat ( dateStyle , timeStyle ) ; } return result ;
public class NfsCreateResponse { /** * ( non - Javadoc ) * @ see com . emc . ecs . nfsclient . nfs . NfsResponseBase # unmarshalling ( com . emc . ecs . * nfsclient . rpc . Xdr ) */ public void unmarshalling ( Xdr xdr ) throws RpcException { } }
super . unmarshalling ( xdr ) ; if ( stateIsOk ( ) ) { unmarshallingFileHandle ( xdr ) ; unmarshallingAttributes ( xdr ) ; } _directoryWccData = new NfsWccData ( xdr ) ;
public class Hashes { /** * Constant - time SpookyHash 4 - word - state hashing reusing precomputed state * partially . * @ param bv * a bit vector . * @ param seed * a seed for the hash . * @ param state * the state vector returned by * { @ link # preprocessSpooky4 ( BitVector , long ) } ; note that * { @ code seed } must be the same . * @ param tuple * a tuple of longs in which up to four generated hashes will be * saved . */ @ SuppressWarnings ( { } }
"fallthrough" } ) public static void spooky4 ( final BitVector bv , final long prefixLength , final long seed , final long [ ] state , final long [ ] tuple ) { long h0 , h1 , h2 , h3 ; h0 = seed ; h1 = seed ; h2 = ARBITRARY_BITS ; h3 = ARBITRARY_BITS ; long pos ; if ( prefixLength >= 2 * Long . SIZE ) { final int p = 4 * ( int ) ( ( prefixLength - 2 * Long . SIZE ) / ( 4 * Long . SIZE ) ) ; h0 = state [ p + 0 ] ; h1 = state [ p + 1 ] ; h2 = state [ p + 2 ] ; h3 = state [ p + 3 ] ; pos = p * Long . SIZE + 2 * Long . SIZE ; } else pos = 0 ; long remaining = prefixLength - pos ; if ( remaining >= Long . SIZE * 2 ) { h0 += bv . getLong ( pos + 0 * Long . SIZE , pos + 1 * Long . SIZE ) ; h1 += bv . getLong ( pos + 1 * Long . SIZE , pos + 2 * Long . SIZE ) ; remaining -= 2 * Long . SIZE ; pos += 2 * Long . SIZE ; } if ( remaining > Long . SIZE ) { h2 += bv . getLong ( pos + 0 * Long . SIZE , pos + 1 * Long . SIZE ) ; h3 += bv . getLong ( pos + 1 * Long . SIZE , prefixLength ) ; } else if ( remaining > 0 ) { h2 += bv . getLong ( pos , prefixLength ) ; } else { h2 += ARBITRARY_BITS ; h3 += ARBITRARY_BITS ; } h0 += prefixLength ; h3 ^= h2 ; h2 = Long . rotateLeft ( h2 , 15 ) ; h3 += h2 ; h0 ^= h3 ; h3 = Long . rotateLeft ( h3 , 52 ) ; h0 += h3 ; h1 ^= h0 ; h0 = Long . rotateLeft ( h0 , 26 ) ; h1 += h0 ; h2 ^= h1 ; h1 = Long . rotateLeft ( h1 , 51 ) ; h2 += h1 ; h3 ^= h2 ; h2 = Long . rotateLeft ( h2 , 28 ) ; h3 += h2 ; h0 ^= h3 ; h3 = Long . rotateLeft ( h3 , 9 ) ; h0 += h3 ; h1 ^= h0 ; h0 = Long . rotateLeft ( h0 , 47 ) ; h1 += h0 ; h2 ^= h1 ; h1 = Long . rotateLeft ( h1 , 54 ) ; h2 += h1 ; h3 ^= h2 ; h2 = Long . rotateLeft ( h2 , 32 ) ; h3 += h2 ; h0 ^= h3 ; h3 = Long . rotateLeft ( h3 , 25 ) ; h0 += h3 ; h1 ^= h0 ; h0 = Long . rotateLeft ( h0 , 63 ) ; h1 += h0 ; switch ( tuple . length ) { case 4 : tuple [ 3 ] = h3 ; case 3 : tuple [ 2 ] = h2 ; case 2 : tuple [ 1 ] = h1 ; case 1 : tuple [ 0 ] = h0 ; }
public class DeviceProxyDAODefaultImpl { private void setRequestArgsForReadAttr ( final Request request , final String [ ] names , final DevSource src , final ClntIdent ident , final TypeCode return_type ) { } }
Any any ; any = request . add_in_arg ( ) ; DevVarStringArrayHelper . insert ( any , names ) ; // Add source if any if ( src != null ) { any = request . add_in_arg ( ) ; DevSourceHelper . insert ( any , src ) ; } if ( ident != null ) { any = request . add_in_arg ( ) ; ClntIdentHelper . insert ( any , ident ) ; } request . set_return_type ( return_type ) ; request . exceptions ( ) . add ( DevFailedHelper . type ( ) ) ;
public class DefaultCompanyProvider { /** * In case of the illegal hostname characters in company name * and truncate it if it is too long ( length & gt ; 10 ) after escape * It is compatible with other non - latin language and will not change the original result for latin language . * P . S . Actually the best way for Chinese here is to use phonetic writing ( so as Japanese or Korean ) */ @ Override public void generateDomain ( ) { } }
if ( domain != null ) { return ; } String host = TextUtils . stripAccents ( StringUtils . strip ( StringUtils . deleteWhitespace ( name . toLowerCase ( ) ) , "." ) . replace ( "/" , "" ) ) ; int len1 = host . length ( ) ; host = StringEscapeUtils . escapeJava ( host ) . replaceAll ( "\\\\u" , "" ) ; int len2 = host . length ( ) ; if ( len2 > len1 && len2 > 10 ) host = host . substring ( 0 , 10 ) ; domain = host + "." + dataMaster . getRandomValue ( DOMAIN ) ;
public class CertUtil { /** * Writes certificate to the specified output stream in PEM format . */ public static void writeCertificate ( OutputStream out , X509Certificate cert ) throws IOException , CertificateEncodingException { } }
CertificateIOUtil . writeCertificate ( out , cert ) ;
public class OptionUtil { /** * Format a parameter description . * @ param param Parameter * @ return Parameter description */ public static String getFullDescription ( Parameter < ? > param ) { } }
StringBuilder description = new StringBuilder ( 1000 ) . append ( param . getShortDescription ( ) ) . append ( FormatUtil . NEWLINE ) ; param . describeValues ( description ) ; if ( ! FormatUtil . endsWith ( description , FormatUtil . NEWLINE ) ) { description . append ( FormatUtil . NEWLINE ) ; } if ( param . hasDefaultValue ( ) ) { description . append ( "Default: " ) . append ( param . getDefaultValueAsString ( ) ) . append ( FormatUtil . NEWLINE ) ; } List < ? extends ParameterConstraint < ? > > constraints = param . getConstraints ( ) ; if ( constraints != null && ! constraints . isEmpty ( ) ) { description . append ( ( constraints . size ( ) == 1 ) ? "Constraint: " : "Constraints: " ) . append ( constraints . get ( 0 ) . getDescription ( param . getOptionID ( ) . getName ( ) ) ) ; for ( int i = 1 ; i < constraints . size ( ) ; i ++ ) { description . append ( ", " ) . append ( constraints . get ( i ) . getDescription ( param . getOptionID ( ) . getName ( ) ) ) ; } description . append ( FormatUtil . NEWLINE ) ; } return description . toString ( ) ;
public class AmazonCloudSearchDomainClient { /** * Retrieves autocomplete suggestions for a partial query string . You can use suggestions enable you to display * likely matches before users finish typing . In Amazon CloudSearch , suggestions are based on the contents of a * particular text field . When you request suggestions , Amazon CloudSearch finds all of the documents whose values * in the suggester field start with the specified query string . The beginning of the field must match the query * string to be considered a match . * For more information about configuring suggesters and retrieving suggestions , see < a * href = " http : / / docs . aws . amazon . com / cloudsearch / latest / developerguide / getting - suggestions . html " > Getting * Suggestions < / a > in the < i > Amazon CloudSearch Developer Guide < / i > . * The endpoint for submitting < code > Suggest < / code > requests is domain - specific . You submit suggest requests to a * domain ' s search endpoint . To get the search endpoint for your domain , use the Amazon CloudSearch configuration * service < code > DescribeDomains < / code > action . A domain ' s endpoints are also displayed on the domain dashboard in * the Amazon CloudSearch console . * @ param suggestRequest * Container for the parameters to the < code > Suggest < / code > request . * @ return Result of the Suggest operation returned by the service . * @ throws SearchException * Information about any problems encountered while processing a search request . * @ sample AmazonCloudSearchDomain . Suggest */ @ Override public SuggestResult suggest ( SuggestRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeSuggest ( request ) ;
public class CheckSumUtils { /** * Returns the MD5 Checksum of the input stream * @ param is * the input stream * @ return the Checksum of the input stream * @ throws IOException * if an IO exception occurs */ public static String getMD5Checksum ( InputStream is ) throws IOException { } }
byte [ ] digest = null ; try { MessageDigest md = MessageDigest . getInstance ( JawrConstant . MD5_ALGORITHM ) ; InputStream digestIs = new DigestInputStream ( is , md ) ; // read stream to EOF as normal . . . while ( digestIs . read ( ) != - 1 ) { } digest = md . digest ( ) ; } catch ( NoSuchAlgorithmException e ) { throw new BundlingProcessException ( "MD5 algorithm needs to be installed" , e ) ; } return new BigInteger ( 1 , digest ) . toString ( 16 ) ;
public class CmsGroupTable { /** * Updates the app . < p > * @ param uuid of current group */ protected void updateApp ( String uuid ) { } }
try { CmsGroup group = m_cms . readGroup ( new CmsUUID ( uuid ) ) ; m_app . update ( group . getOuFqn ( ) , m_type , group . getId ( ) , "" ) ; } catch ( CmsException e ) { LOG . error ( "unable to read group." , e ) ; }
public class ObservableSupport { /** * public methods - - - - - */ public void addObservableListener ( ObservableListener < T > listener ) { } }
checkNotNull ( listener , "listener cannot be null" ) ; listeners . add ( ObservableListener . class , listener ) ;
public class FTPServerFacade { /** * Convert the exception to a negative 451 reply , and pipe * it to the provided control channel . */ public static void exceptionToControlChannel ( Throwable e , String msg , BasicServerControlChannel control ) { } }
// how to convert exception stack trace to string ? // i am sure it can be done easier . java . io . StringWriter writer = new java . io . StringWriter ( ) ; e . printStackTrace ( new java . io . PrintWriter ( writer ) ) ; String stack = writer . toString ( ) ; // 451 Requested action aborted : local error in processing . LocalReply reply = new LocalReply ( 451 , msg + "\n" + e . toString ( ) + "\n" + stack ) ; control . write ( reply ) ;
public class RaftNodeImpl { /** * Executes query operation sets execution result to the future . */ public void runQueryOperation ( Object operation , SimpleCompletableFuture resultFuture ) { } }
long commitIndex = state . commitIndex ( ) ; Object result = raftIntegration . runOperation ( operation , commitIndex ) ; resultFuture . setResult ( result ) ;
public class FunctionExpression { /** * Get the idx parameter from the parameter list as double . * @ param idx * the index starting with 0 * @ param defaultValue * the result if such a parameter idx does not exists . * @ param formatter * current formatter * @ return the expression */ private double getDouble ( int idx , double defaultValue , CssFormatter formatter ) { } }
if ( parameters . size ( ) <= idx ) { return defaultValue ; } return parameters . get ( idx ) . doubleValue ( formatter ) ;
public class SdkInstaller { /** * Configure and create a new Installer instance . * @ param managedSdkDirectory home directory of google cloud java managed cloud SDKs * @ param version version of the Cloud SDK we want to install * @ param osInfo target operating system for installation * @ param userAgentString user agent string for https requests * @ param usageReporting enable client side usage reporting on gcloud * @ return a new configured Cloud SDK Installer */ public static SdkInstaller newInstaller ( Path managedSdkDirectory , Version version , OsInfo osInfo , String userAgentString , boolean usageReporting ) { } }
DownloaderFactory downloaderFactory = new DownloaderFactory ( userAgentString ) ; ExtractorFactory extractorFactory = new ExtractorFactory ( ) ; InstallerFactory installerFactory = version == Version . LATEST ? new InstallerFactory ( osInfo , usageReporting ) : null ; FileResourceProviderFactory fileResourceProviderFactory = new FileResourceProviderFactory ( version , osInfo , managedSdkDirectory ) ; return new SdkInstaller ( fileResourceProviderFactory , downloaderFactory , extractorFactory , installerFactory ) ;
public class GrailsHibernateUtil { /** * Configures the criteria instance to cache based on the configured mapping . * @ param targetClass The target class * @ param criteria The criteria */ public static void cacheCriteriaByMapping ( Class < ? > targetClass , Criteria criteria ) { } }
Mapping m = GrailsDomainBinder . getMapping ( targetClass ) ; if ( m != null && m . getCache ( ) != null && m . getCache ( ) . getEnabled ( ) ) { criteria . setCacheable ( true ) ; }
public class Element { /** * Add element Attributes . * The attributes are added to the Element attributes ( separated with * a space ) . The attributes are available to the derived class in the * protected member String < I > attributes < / I > * @ param attributes String of HTML attributes to add to the element . * A null attribute clears the current attributes . * @ return This Element so calls can be chained . */ public Element attribute ( String attributes ) { } }
if ( log . isDebugEnabled ( ) && attributes != null && attributes . indexOf ( '=' ) >= 0 ) log . warn ( "Set attribute with old method: " + attributes + " on " + getClass ( ) . getName ( ) ) ; if ( attributes == null || this . attributes == null || this . attributes == noAttributes || this . attributes . length ( ) == 0 ) this . attributes = attributes ; else this . attributes += ' ' + attributes ; return this ;
public class Common { /** * Write a transaction to the Log . * @ param info The type of transaction to log . * @ param cause The cause of the transaction . * @ param causeReason The reason of the cause * @ param account The account being impacted by the change * @ param amount The amount of money in this transaction . * @ param currency The currency associated with this transaction * @ param worldName The world name associated with this transaction */ public void writeLog ( LogInfo info , Cause cause , String causeReason , Account account , double amount , Currency currency , String worldName ) { } }
if ( getMainConfig ( ) . getBoolean ( "System.Logging.Enabled" ) ) { getStorageHandler ( ) . getStorageEngine ( ) . saveLog ( info , cause , causeReason , account , amount , currency , worldName ) ; }
public class SingleValueReference { /** * Set a string value to bean based on known conversion rule and value reference * @ param bean Bean to set value to * @ param value String expression of value to set */ void setValue ( T bean , String value ) { } }
ref . writeValue ( converter . fromCharacters ( value ) , bean ) ;
public class TextReport { /** * Filter stack trace if { @ link # addConfigured ( StackTraceFilter ) } . */ private String filterStackTrace ( String trace ) { } }
for ( StackTraceFilter filter : stackFilters ) { trace = filter . apply ( trace ) ; } return trace ;
public class ResourceConverter { /** * Creates relationship object by consuming provided ' data ' node . * @ param relationshipDataNode relationship data node * @ param type object type * @ return created object or < code > null < / code > in case data node is not valid * @ throws IOException * @ throws IllegalAccessException * @ throws InstantiationException */ private Object parseRelationship ( JsonNode relationshipDataNode , Class < ? > type ) throws IOException , IllegalAccessException , InstantiationException { } }
if ( ValidationUtils . isRelationshipParsable ( relationshipDataNode ) ) { String identifier = createIdentifier ( relationshipDataNode ) ; if ( resourceCache . contains ( identifier ) ) { return resourceCache . get ( identifier ) ; } else { // Never cache relationship objects resourceCache . lock ( ) ; try { return readObject ( relationshipDataNode , type , true ) ; } finally { resourceCache . unlock ( ) ; } } } return null ;
public class CacheSpec { /** * Gets cache type . * @ return the cache type */ public CacheEngine getEngine ( ) { } }
if ( StringUtils . isNullOrBlank ( cacheEngine ) ) { return CacheEngines . get ( "Guava" ) ; } return CacheEngines . get ( cacheEngine ) ;
public class AmazonRoute53DomainsClient { /** * This operation sets the transfer lock on the domain ( specifically the < code > clientTransferProhibited < / code > * status ) to prevent domain transfers . Successful submission returns an operation ID that you can use to track the * progress and completion of the action . If the request is not completed successfully , the domain registrant will * be notified by email . * @ param enableDomainTransferLockRequest * A request to set the transfer lock for the specified domain . * @ return Result of the EnableDomainTransferLock operation returned by the service . * @ throws InvalidInputException * The requested item is not acceptable . For example , for an OperationId it might refer to the ID of an * operation that is already completed . For a domain name , it might not be a valid domain name or belong to * the requester account . * @ throws DuplicateRequestException * The request is already in progress for the domain . * @ throws TLDRulesViolationException * The top - level domain does not support this operation . * @ throws OperationLimitExceededException * The number of operations or jobs running exceeded the allowed threshold for the account . * @ throws UnsupportedTLDException * Amazon Route 53 does not support this top - level domain ( TLD ) . * @ sample AmazonRoute53Domains . EnableDomainTransferLock * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / route53domains - 2014-05-15 / EnableDomainTransferLock " * target = " _ top " > AWS API Documentation < / a > */ @ Override public EnableDomainTransferLockResult enableDomainTransferLock ( EnableDomainTransferLockRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeEnableDomainTransferLock ( request ) ;
public class HarrisFast { /** * 角点的生成和过滤 * @ param sigma * @ param k * @ param minDistance * , 该邻域内只取算子最大的特征点 */ public void filter ( double sigma , double k , int minDistance ) { } }
computeDerivatives ( sigma ) ; // fastComputeDerivatives ( ) ; float [ ] [ ] harrismap = computeHarrisMap ( k ) ; for ( int y = 1 ; y < height - 1 ; y ++ ) { for ( int x = 1 ; x < width - 1 ; x ++ ) { float h = harrismap [ x ] [ y ] ; if ( h <= 1E-3 ) continue ; if ( ! isSpatialMaxima ( harrismap , x , y ) ) continue ; corners . add ( new Corner ( x , y , h ) ) ; } } // System . out . println ( corners . size ( ) + " potential corners found . " ) ; // remove corners to close to each other ( keep the highest measure ) Iterator < Corner > iter = corners . iterator ( ) ; while ( iter . hasNext ( ) ) { Corner p = iter . next ( ) ; for ( Corner n : corners ) { if ( n == p ) continue ; int dist = ( int ) Math . sqrt ( ( p . x - n . x ) * ( p . x - n . x ) + ( p . y - n . y ) * ( p . y - n . y ) ) ; if ( dist > minDistance ) continue ; if ( n . h < p . h ) continue ; iter . remove ( ) ; break ; } } // output /* * int [ ] [ ] output = new int [ width ] [ height ] ; for ( int y = 0 ; y < height ; * y + + ) for ( int x = 0 ; x < width ; x + + ) output [ x ] [ y ] = ( int ) * ( image [ x ] [ y ] * 0.75 ) ; / / original image / / ( darker ) * / / for each corner for ( Corner p : corners ) { / / add the cross sign * over the image for ( int dt = - 3 ; dt < = 3 ; dt + + ) { if ( p . x + dt > = 0 * & & p . x + dt < width ) output [ p . x + dt ] [ p . y ] = 255 ; if ( p . y + dt > = 0 * & & p . y + dt < height ) output [ p . x ] [ p . y + dt ] = 255 ; } * System . out . println ( " corner found at : " + p . x + " , " + p . y + " ( " + p . h * + " ) " ) ; } System . out . println ( corners . size ( ) + " corners found . " ) ; * return output ; */
public class POICellFormatter { /** * ブランクセルの結果を作成する 。 * @ since 0.7 * @ return */ private CellFormatResult createBlankCellResult ( ) { } }
CellFormatResult result = new CellFormatResult ( ) ; result . setCellType ( FormatCellType . Blank ) ; result . setText ( "" ) ; return result ;
public class LocalisationManager { /** * Method assignQueuePointOutputHandler . * @ param outputHandler * < p > Add the outputHandler to the set of queuePointOutputHanders < / p > */ public void assignQueuePointOutputHandler ( OutputHandler outputHandler , SIBUuid8 messagingEngineUuid ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "assignQueuePointOutputHandler" , new Object [ ] { outputHandler , messagingEngineUuid } ) ; synchronized ( _queuePointOutputHandlers ) { _queuePointOutputHandlers . put ( messagingEngineUuid , outputHandler ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "assignQueuePointOutputHandler" ) ;
public class CmsCategoryTreeEntry { /** * Gets the title of the category , or the name if the title is not set . < p > * @ return the title or name */ public Object getTitleOrName ( ) { } }
String result = getTitle ( ) ; if ( CmsStringUtil . isEmptyOrWhitespaceOnly ( result ) ) { result = getPath ( ) ; } return result ;
public class AmazonWorkMailClient { /** * Lists the mailbox permissions associated with a user , group , or resource mailbox . * @ param listMailboxPermissionsRequest * @ return Result of the ListMailboxPermissions operation returned by the service . * @ throws EntityNotFoundException * The identifier supplied for the user , group , or resource does not exist in your organization . * @ throws InvalidParameterException * One or more of the input parameters don ' t match the service ' s restrictions . * @ throws OrganizationNotFoundException * An operation received a valid organization identifier that either doesn ' t belong or exist in the system . * @ throws OrganizationStateException * The organization must have a valid state ( Active or Synchronizing ) to perform certain operations on the * organization or its members . * @ sample AmazonWorkMail . ListMailboxPermissions * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / workmail - 2017-10-01 / ListMailboxPermissions " * target = " _ top " > AWS API Documentation < / a > */ @ Override public ListMailboxPermissionsResult listMailboxPermissions ( ListMailboxPermissionsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListMailboxPermissions ( request ) ;
public class AbstractCIBase { /** * Computer API uses package protection heavily */ private void updateComputer ( Node n , Map < String , Computer > byNameMap , Set < Computer > used , boolean automaticSlaveLaunch ) { } }
Map < Node , Computer > computers = getComputerMap ( ) ; Computer c ; c = byNameMap . get ( n . getNodeName ( ) ) ; if ( c != null ) { try { c . setNode ( n ) ; // reuse used . add ( c ) ; } catch ( RuntimeException e ) { LOGGER . log ( Level . WARNING , "Error updating node " + n . getNodeName ( ) + ", continuing" , e ) ; } } else { // we always need Computer for the master as a fallback in case there ' s no other Computer . if ( n . getNumExecutors ( ) > 0 || n == Jenkins . getInstance ( ) ) { try { c = n . createComputer ( ) ; } catch ( RuntimeException ex ) { // Just in case there is a bogus extension LOGGER . log ( Level . WARNING , "Error retrieving computer for node " + n . getNodeName ( ) + ", continuing" , ex ) ; } if ( c == null ) { LOGGER . log ( Level . WARNING , "Cannot create computer for node {0}, the {1}#createComputer() method returned null. Skipping this node" , new Object [ ] { n . getNodeName ( ) , n . getClass ( ) . getName ( ) } ) ; return ; } computers . put ( n , c ) ; if ( ! n . isHoldOffLaunchUntilSave ( ) && automaticSlaveLaunch ) { RetentionStrategy retentionStrategy = c . getRetentionStrategy ( ) ; if ( retentionStrategy != null ) { // if there is a retention strategy , it is responsible for deciding to start the computer retentionStrategy . start ( c ) ; } else { // we should never get here , but just in case , we ' ll fall back to the legacy behaviour c . connect ( true ) ; } } used . add ( c ) ; } else { // TODO : Maybe it should be allowed , but we would just get NPE in the original logic before JENKINS - 43496 LOGGER . log ( Level . WARNING , "Node {0} has no executors. Cannot update the Computer instance of it" , n . getNodeName ( ) ) ; } }
public class FacesConfigRenderKitTypeImpl { /** * Returns all < code > renderer < / code > elements * @ return list of < code > renderer < / code > */ public List < FacesConfigRendererType < FacesConfigRenderKitType < T > > > getAllRenderer ( ) { } }
List < FacesConfigRendererType < FacesConfigRenderKitType < T > > > list = new ArrayList < FacesConfigRendererType < FacesConfigRenderKitType < T > > > ( ) ; List < Node > nodeList = childNode . get ( "renderer" ) ; for ( Node node : nodeList ) { FacesConfigRendererType < FacesConfigRenderKitType < T > > type = new FacesConfigRendererTypeImpl < FacesConfigRenderKitType < T > > ( this , "renderer" , childNode , node ) ; list . add ( type ) ; } return list ;
public class Reporter { /** * Records the performed check as a pass to the output file . A screenshot will be taken for traceability * This method takes in a check being performed , and determines if a wait is * occurring or not . If no wait , no action is recorded . If a wait was performed , that wait is * added to the check , and recorded as the action . The check is used as the expected outcome , and the actual * input is used for actual . If it took some time ( timeTook greater than zero ) , than the actual result will * be updated to reflect the time took . * If a ' real ' browser is not being used ( not NONE or HTMLUNIT ) , then no screenshot will be taken * @ param check - the check being performed * @ param waitFor - how long was something waited for . Provide 0 if no wait , and therefore no action * @ param actual - the actual outcome from the check * @ param timeTook - how long something took to run , provide 0 if it was an immediate check , and actual * will be returned unaltered */ public void pass ( String check , double waitFor , String actual , double timeTook ) { } }
passes ++ ; recordStep ( getAction ( check , waitFor ) , "Expected " + check , getActual ( actual , timeTook ) , true , Success . PASS ) ;
public class S3LocationMarshaller { /** * Marshall the given parameter object . */ public void marshall ( S3Location s3Location , ProtocolMarshaller protocolMarshaller ) { } }
if ( s3Location == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( s3Location . getBucketName ( ) , BUCKETNAME_BINDING ) ; protocolMarshaller . marshall ( s3Location . getPrefix ( ) , PREFIX_BINDING ) ; protocolMarshaller . marshall ( s3Location . getEncryption ( ) , ENCRYPTION_BINDING ) ; protocolMarshaller . marshall ( s3Location . getCannedACL ( ) , CANNEDACL_BINDING ) ; protocolMarshaller . marshall ( s3Location . getAccessControlList ( ) , ACCESSCONTROLLIST_BINDING ) ; protocolMarshaller . marshall ( s3Location . getTagging ( ) , TAGGING_BINDING ) ; protocolMarshaller . marshall ( s3Location . getUserMetadata ( ) , USERMETADATA_BINDING ) ; protocolMarshaller . marshall ( s3Location . getStorageClass ( ) , STORAGECLASS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class OMVRBTree { /** * Returns the predecessor of the specified Entry , or null if no such . */ public static < K , V > OMVRBTreeEntry < K , V > predecessor ( final OMVRBTreeEntry < K , V > t ) { } }
if ( t == null ) return null ; else if ( t . getLeft ( ) != null ) { OMVRBTreeEntry < K , V > p = t . getLeft ( ) ; while ( p . getRight ( ) != null ) p = p . getRight ( ) ; return p ; } else { OMVRBTreeEntry < K , V > p = t . getParent ( ) ; Entry < K , V > ch = t ; while ( p != null && ch == p . getLeft ( ) ) { ch = p ; p = p . getParent ( ) ; } return p ; }
public class TCAPProviderImpl { /** * ( non - Javadoc ) * @ see org . restcomm . protocols . ss7 . tcap . api . TCAPProvider # getNewDialog ( org . mobicents * . protocols . ss7 . sccp . parameter . SccpAddress , org . restcomm . protocols . ss7 . sccp . parameter . SccpAddress ) */ public Dialog getNewDialog ( SccpAddress localAddress , SccpAddress remoteAddress ) throws TCAPException { } }
DialogImpl res = getNewDialog ( localAddress , remoteAddress , getNextSeqControl ( ) , null ) ; if ( this . stack . getStatisticsEnabled ( ) ) { this . stack . getCounterProviderImpl ( ) . updateAllLocalEstablishedDialogsCount ( ) ; this . stack . getCounterProviderImpl ( ) . updateAllEstablishedDialogsCount ( ) ; } this . setSsnToDialog ( res , localAddress . getSubsystemNumber ( ) ) ; return res ;
public class CmsUgcSession { /** * Adds the given values to the content document . < p > * @ param content the content document * @ param locale the content locale * @ param contentValues the values * @ throws CmsXmlException if writing the XML fails */ protected void addContentValues ( CmsXmlContent content , Locale locale , Map < String , String > contentValues ) throws CmsXmlException { } }
if ( ! content . hasLocale ( locale ) ) { content . addLocale ( m_cms , locale ) ; } List < String > paths = new ArrayList < String > ( contentValues . keySet ( ) ) ; // first delete all null values // use reverse index ordering for similar elements Collections . sort ( paths , new PathComparator ( true ) ) ; String lastDelete = "///" ; for ( String path : paths ) { // skip values where the parent node has been deleted if ( ( contentValues . get ( path ) == null ) && ! path . startsWith ( lastDelete ) ) { lastDelete = path ; deleteContentValue ( content , locale , path ) ; } } // now add the new or changed values // use regular ordering Collections . sort ( paths , new PathComparator ( false ) ) ; for ( String path : paths ) { String value = contentValues . get ( path ) ; if ( value != null ) { addContentValue ( content , locale , path , value ) ; } }
public class SftpFsHelper { /** * Executes a get SftpCommand and returns an input stream to the file * @ param cmd is the command to execute * @ param sftp is the channel to execute the command on * @ throws SftpException */ @ Override public InputStream getFileStream ( String file ) throws FileBasedHelperException { } }
SftpGetMonitor monitor = new SftpGetMonitor ( ) ; try { ChannelSftp channel = getSftpChannel ( ) ; return new SftpFsFileInputStream ( channel . get ( file , monitor ) , channel ) ; } catch ( SftpException e ) { throw new FileBasedHelperException ( "Cannot download file " + file + " due to " + e . getMessage ( ) , e ) ; }
public class StrictDateTimeField { /** * Does a bounds check before setting the value . * @ throws IllegalArgumentException if the value is invalid */ public long set ( long instant , int value ) { } }
FieldUtils . verifyValueBounds ( this , value , getMinimumValue ( instant ) , getMaximumValue ( instant ) ) ; return super . set ( instant , value ) ;
public class XMLDatabase { /** * { @ inheritDoc } */ @ Override public void moveUpTextNodes ( ITextNode [ ] textNodes ) { } }
for ( ITextNode textNode : textNodes ) { int index = textNodeList . indexOf ( textNode ) ; if ( index > 0 ) { ITextNode previousTextNode = textNodeList . get ( index - 1 ) ; textNodeList . set ( index , previousTextNode ) ; textNodeList . set ( index - 1 , textNode ) ; } } fireTextNodesMoved ( textNodes ) ;
public class CommerceNotificationQueueEntryPersistenceImpl { /** * Returns the last commerce notification queue entry in the ordered set where sent = & # 63 ; . * @ param sent the sent * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce notification queue entry , or < code > null < / code > if a matching commerce notification queue entry could not be found */ @ Override public CommerceNotificationQueueEntry fetchBySent_Last ( boolean sent , OrderByComparator < CommerceNotificationQueueEntry > orderByComparator ) { } }
int count = countBySent ( sent ) ; if ( count == 0 ) { return null ; } List < CommerceNotificationQueueEntry > list = findBySent ( sent , count - 1 , count , orderByComparator ) ; if ( ! list . isEmpty ( ) ) { return list . get ( 0 ) ; } return null ;
public class LPIntegerNormDistanceFunction { /** * Compute unscaled distance in a range of dimensions . * @ param v1 First object * @ param v2 Second object * @ param start First dimension * @ param end Exclusive last dimension * @ return Aggregated values . */ private double preDistance ( NumberVector v1 , NumberVector v2 , final int start , final int end ) { } }
double agg = 0. ; for ( int d = start ; d < end ; d ++ ) { final double xd = v1 . doubleValue ( d ) , yd = v2 . doubleValue ( d ) ; final double delta = xd >= yd ? xd - yd : yd - xd ; agg += MathUtil . powi ( delta , intp ) ; } return agg ;
public class DoubleMatrix { /** * Aij - = v * @ param i * @ param j * @ param v */ public void sub ( int i , int j , double v ) { } }
consumer . set ( i , j , supplier . get ( i , j ) - v ) ;
public class ByteBuddy { /** * Rebases the given type where any intercepted method that is declared by the redefined type is preserved within the * rebased type ' s class such that the class ' s original can be invoked from the new method implementations . Rebasing a * type can be seen similarly to creating a subclass where the subclass is later merged with the original class file . * @ param type The type that is being rebased . * @ param classFileLocator The class file locator that is queried for the rebased type ' s class file . * @ param methodNameTransformer The method name transformer for renaming a method that is rebased . * @ param < T > The loaded type of the rebased type . * @ return A type builder for rebasing the provided type . */ public < T > DynamicType . Builder < T > rebase ( TypeDescription type , ClassFileLocator classFileLocator , MethodNameTransformer methodNameTransformer ) { } }
if ( type . isArray ( ) || type . isPrimitive ( ) ) { throw new IllegalArgumentException ( "Cannot rebase array or primitive type: " + type ) ; } return new RebaseDynamicTypeBuilder < T > ( instrumentedTypeFactory . represent ( type ) , classFileVersion , auxiliaryTypeNamingStrategy , annotationValueFilterFactory , annotationRetention , implementationContextFactory , methodGraphCompiler , typeValidation , visibilityBridgeStrategy , classWriterStrategy , ignoredMethods , type , classFileLocator , methodNameTransformer ) ;
public class InternalService { /** * Applies given profile patch if required permission is granted . * @ param profileId Id of an profile to patch . * @ param profileDetails Profile details . * @ param eTag ETag for server to check if local version of the data is the same as the one the server side . * @ return Observable with to perform update profile for current session . */ @ Override public Observable < ComapiResult < Map < String , Object > > > patchProfile ( @ NonNull final String profileId , @ NonNull final Map < String , Object > profileDetails , final String eTag ) { } }
final String token = getToken ( ) ; if ( sessionController . isCreatingSession ( ) ) { return getTaskQueue ( ) . queuePatchProfile ( profileDetails , eTag ) ; } else if ( TextUtils . isEmpty ( token ) ) { return Observable . error ( getSessionStateErrorDescription ( ) ) ; } else { return doPatchProfile ( token , dataMgr . getSessionDAO ( ) . session ( ) . getProfileId ( ) , profileDetails , eTag ) ; }
public class SchemaToJava { /** * Create the Java */ private static void createCode ( String packageName , String className , ObjectSchema schema , Set < SyntaxToJavaClass . ClassInfo > imports , File outputFile ) throws IOException , TemplateException { } }
Configuration freeMarkerConfiguration = new Configuration ( ) ; freeMarkerConfiguration . setClassForTemplateLoading ( DEFAULT_LOADER_CLASS , "" ) ; freeMarkerConfiguration . setObjectWrapper ( new DefaultObjectWrapper ( ) ) ; // Build the model for FreeMarker Map < String , Object > model = new HashMap < String , Object > ( ) ; model . put ( "package" , packageName ) ; model . put ( "class" , className ) ; model . put ( "schema" , schema ) ; model . put ( "imports" , imports ) ; // Have FreeMarker process the model with the template Template template = freeMarkerConfiguration . getTemplate ( TEMPLATE_FILE ) ; if ( LOG . isDebugEnabled ( ) ) { Writer out = new OutputStreamWriter ( System . out ) ; template . process ( model , out ) ; out . flush ( ) ; } LOG . debug ( String . format ( "Writing java to: %1$s" , outputFile . getAbsolutePath ( ) ) ) ; FileOutputStream outputStream = new FileOutputStream ( outputFile ) ; Writer out = new OutputStreamWriter ( outputStream ) ; template . process ( model , out ) ; out . flush ( ) ; out . close ( ) ;
public class BehaviorHistoryInfo { /** * 对应deleteCount的数据统计平均值 */ public Long getFileCountAvg ( ) { } }
Long fileCountAvg = 0L ; if ( items . size ( ) != 0 ) { for ( TableStat item : items ) { if ( item . getEndTime ( ) . equals ( item . getStartTime ( ) ) ) { fileCountAvg += item . getFileCount ( ) ; } else { fileCountAvg += item . getFileCount ( ) * 1000 / ( item . getEndTime ( ) . getTime ( ) - item . getStartTime ( ) . getTime ( ) ) ; } } fileCountAvg = fileCountAvg / items . size ( ) ; } return fileCountAvg ;
public class AWSDataSyncClient { /** * Returns metadata such as the name , the network interfaces , and the status ( that is , whether the agent is running * or not ) for an agent . To specify which agent to describe , use the Amazon Resource Name ( ARN ) of the agent in your * request . * @ param describeAgentRequest * DescribeAgent * @ return Result of the DescribeAgent operation returned by the service . * @ throws InvalidRequestException * This exception is thrown when the client submits a malformed request . * @ sample AWSDataSync . DescribeAgent * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / datasync - 2018-11-09 / DescribeAgent " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DescribeAgentResult describeAgent ( DescribeAgentRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeAgent ( request ) ;
public class DefaultGroovyMethods { /** * Truncate the value * @ param number a Float * @ return the Float truncated to 0 decimal places * @ since 1.6.0 */ public static float trunc ( Float number ) { } }
if ( number < 0f ) { return ( float ) Math . ceil ( number . doubleValue ( ) ) ; } return ( float ) Math . floor ( number . doubleValue ( ) ) ;
public class BeanUtil { /** * 获得Bean类属性描述 * @ param clazz Bean类 * @ param fieldName 字段名 * @ param ignoreCase 是否忽略大小写 * @ return PropertyDescriptor * @ throws BeanException 获取属性异常 */ public static PropertyDescriptor getPropertyDescriptor ( Class < ? > clazz , final String fieldName , boolean ignoreCase ) throws BeanException { } }
final Map < String , PropertyDescriptor > map = getPropertyDescriptorMap ( clazz , ignoreCase ) ; return ( null == map ) ? null : map . get ( fieldName ) ;
public class SepaVersion { /** * Liefert die SEPA - Version aus dem URN . * @ param urn URN . * In der Form " urn : iso : std : iso : 20022 : tech : xsd : pain . 001.002.03 " oder in * der alten Form " sepade . pain . 001.001.02 . xsd " . * @ return die SEPA - Version . */ public static SepaVersion byURN ( String urn ) { } }
SepaVersion test = new SepaVersion ( null , 0 , urn , null , false ) ; if ( urn == null || urn . length ( ) == 0 ) return test ; for ( List < SepaVersion > types : knownVersions . values ( ) ) { for ( SepaVersion v : types ) { if ( v . equals ( test ) ) return v ; } } // keine passende Version gefunden . Dann erzeugen wir selbst eine return test ;
public class XPathParser { /** * Look ahead of the current token in order to * make a branching decision . * @ param s the string to compare it to . * @ param n number of tokens to lookahead . Must be * greater than 1. * @ return true if the token behind the current token matches the string * argument . */ private final boolean lookahead ( String s , int n ) { } }
boolean isToken ; if ( ( m_queueMark + n ) <= m_ops . getTokenQueueSize ( ) ) { String lookahead = ( String ) m_ops . m_tokenQueue . elementAt ( m_queueMark + ( n - 1 ) ) ; isToken = ( lookahead != null ) ? lookahead . equals ( s ) : ( s == null ) ; } else { isToken = ( null == s ) ; } return isToken ;
public class ns_ssl_certkey { /** * < pre > * Use this operation to get certificates on NetScaler Instance ( s ) . * < / pre > */ public static ns_ssl_certkey [ ] get ( nitro_service client ) throws Exception { } }
ns_ssl_certkey resource = new ns_ssl_certkey ( ) ; resource . validate ( "get" ) ; return ( ns_ssl_certkey [ ] ) resource . get_resources ( client ) ;
public class PaymentChannelClientState { /** * Skips saving state in the wallet for testing */ @ VisibleForTesting synchronized void fakeSave ( ) { } }
try { wallet . commitTx ( getContractInternal ( ) ) ; } catch ( VerificationException e ) { throw new RuntimeException ( e ) ; // We created it } stateMachine . transition ( State . PROVIDE_MULTISIG_CONTRACT_TO_SERVER ) ;
public class GenericInMemoryCatalog { /** * - - - - - functions - - - - - */ @ Override public void createFunction ( ObjectPath functionPath , CatalogFunction function , boolean ignoreIfExists ) throws FunctionAlreadyExistException , DatabaseNotExistException { } }
checkNotNull ( functionPath ) ; checkNotNull ( function ) ; if ( ! databaseExists ( functionPath . getDatabaseName ( ) ) ) { throw new DatabaseNotExistException ( catalogName , functionPath . getDatabaseName ( ) ) ; } if ( functionExists ( functionPath ) ) { if ( ! ignoreIfExists ) { throw new FunctionAlreadyExistException ( catalogName , functionPath ) ; } } else { functions . put ( functionPath , function . copy ( ) ) ; }
public class MapLayer { /** * Clear the current bounding box to force the computation of it at * the next call to { @ link # getBoundingBox ( ) } . */ @ Override public void resetBoundingBox ( ) { } }
super . resetBoundingBox ( ) ; final GISLayerContainer < ? > parent = getContainer ( ) ; if ( parent != null ) { parent . resetBoundingBox ( ) ; }
public class BackpressureManagingHandler { /** * Intercepts a write on the channel . The following message types are handled : * < ul > * < li > String : If the pipeline is not configured to write a String , this converts the string to a { @ link io . netty . buffer . ByteBuf } and * then writes it on the channel . < / li > * < li > byte [ ] : If the pipeline is not configured to write a byte [ ] , this converts the byte [ ] to a { @ link io . netty . buffer . ByteBuf } and * then writes it on the channel . < / li > * < li > Observable : Subscribes to the { @ link Observable } and writes all items , requesting the next item if and only if * the channel is writable as indicated by { @ link Channel # isWritable ( ) } < / li > * < / ul > * @ param ctx Channel handler context . * @ param msg Message to write . * @ param promise Promise for the completion of write . * @ throws Exception If there is an error handling this write . */ @ Override public void write ( ChannelHandlerContext ctx , Object msg , ChannelPromise promise ) throws Exception { } }
if ( msg instanceof Observable ) { @ SuppressWarnings ( "rawtypes" ) Observable observable = ( Observable ) msg ; /* One can write heterogeneous objects on a channel . */ final WriteStreamSubscriber subscriber = bytesWriteInterceptor . newSubscriber ( ctx , promise ) ; subscriber . subscribeTo ( observable ) ; } else { ctx . write ( msg , promise ) ; }
public class AutocompleteUtil { /** * Get an { @ code autocomplete } attribute value for a named auto - fill section . * For information on sections see * < a href = " https : / / html . spec . whatwg . org / multipage / form - control - infrastructure . html # attr - fe - autocomplete - section " target = " _ blank " > * https : / / html . spec . whatwg . org / multipage / form - control - infrastructure . html # attr - fe - autocomplete - section < / a > . Note that in this use a section is * < strong > not < / strong > a HTML section element as provided by { @ link com . github . bordertech . wcomponents . WSection } . * @ param sectionName the name of the autocomplete section required * @ return an autocomplete value for a named section */ public static String getNamedSection ( final String sectionName ) { } }
if ( Util . empty ( sectionName ) ) { throw new IllegalArgumentException ( "argument must not be empty" ) ; } return SECTION_PREFIX . concat ( sectionName ) ;
public class CmsJspActionElement { /** * Include a named sub - element with parameters from the OpenCms VFS , same as * using the < code > & lt ; cms : include file = " * * * " element = " * * * " / & gt ; < / code > tag * with parameters in the tag body . < p > * @ param target the target URI of the file in the OpenCms VFS ( can be relative or absolute ) * @ param element the element ( template selector ) to display from the target * @ param parameterMap a map of the request parameters * @ throws JspException in case there were problems including the target * @ see org . opencms . jsp . CmsJspTagInclude */ public void include ( String target , String element , Map < String , ? > parameterMap ) throws JspException { } }
include ( target , element , false , parameterMap ) ;
public class DataStream { /** * Sets the partitioning of the { @ link DataStream } so that the output elements * are broadcasted to every parallel instance of the next operation . In addition , * it implicitly as many { @ link org . apache . flink . api . common . state . BroadcastState broadcast states } * as the specified descriptors which can be used to store the element of the stream . * @ param broadcastStateDescriptors the descriptors of the broadcast states to create . * @ return A { @ link BroadcastStream } which can be used in the { @ link # connect ( BroadcastStream ) } to * create a { @ link BroadcastConnectedStream } for further processing of the elements . */ @ PublicEvolving public BroadcastStream < T > broadcast ( final MapStateDescriptor < ? , ? > ... broadcastStateDescriptors ) { } }
Preconditions . checkNotNull ( broadcastStateDescriptors ) ; final DataStream < T > broadcastStream = setConnectionType ( new BroadcastPartitioner < > ( ) ) ; return new BroadcastStream < > ( environment , broadcastStream , broadcastStateDescriptors ) ;
public class DateUtils { /** * Converts a two - digit input year to a four - digit year . As the current calendar year * approaches a century , we assume small values to mean the next century . For instance , if * the current year is 2090 , and the input value is " 18 " , the user probably means 2118, * not 2018 . However , in 2017 , the input " 18 " probably means 2018 . This code should be * updated before the year 9981. * @ param inputYear a two - digit integer , between 0 and 99 , inclusive * @ return a four - digit year */ @ IntRange ( from = 1000 , to = 9999 ) static int convertTwoDigitYearToFour ( @ IntRange ( from = 0 , to = 99 ) int inputYear ) { } }
return convertTwoDigitYearToFour ( inputYear , Calendar . getInstance ( ) ) ;
public class ParsedSwapStmt { /** * Parse the arguments to a SWAP TABLE statement . * SWAP TABLE statements use simple String attributes * as the " VoltXML " representation for their target tables . * They don ' t need all the complexity of the child table nodes * used in other kinds of statements . * SWAP TABLE statements don ' t bother to populate most of the * detailed AbstractParsedTable members related to tables . * The m _ tableList is sufficient for SWAP TABLE ' s minimal * validation and planning requirements . */ @ Override void parse ( VoltXMLElement stmtNode ) { } }
// parseTablesAndParameters may have been called on this // SWAP TABLE statement , but the simplified VoltXML representation // for SWAP TABLE gave that method nothing to do . assert ( stmtNode . children . isEmpty ( ) ) ; assert ( m_tableList . isEmpty ( ) ) ; addTabletoList ( stmtNode , "thetable" ) ; addTabletoList ( stmtNode , "othertable" ) ;
public class QrCodeBinaryGridToPixel { /** * Outside corners on position patterns are more likely to be damaged , so remove them */ public void removeOutsideCornerFeatures ( ) { } }
if ( pairs2D . size ( ) != storagePairs2D . size ) throw new RuntimeException ( "This can only be called when all the features have been added" ) ; pairs2D . remove ( 11 ) ; pairs2D . remove ( 5 ) ; pairs2D . remove ( 0 ) ;
public class AdaGrad { /** * Gets feature specific learning rates * Adagrad keeps a history of gradients being passed in . * Note that each gradient passed in becomes adapted over time , hence * the opName adagrad * @ param gradient the gradient to get learning rates for * @ param iteration * @ return the feature specific learning rates */ public INDArray getGradient ( INDArray gradient , int iteration ) { } }
if ( historicalGradient == null ) throw new IllegalStateException ( "Updater has not been initialized with view state" ) ; historicalGradient . addi ( gradient . mul ( gradient ) ) ; INDArray sqrtHistory = sqrt ( historicalGradient . dup ( gradientReshapeOrder ) , false ) . addi ( epsilon ) ; // lr * gradient / ( sqrt ( sumSquaredGradients ) + epsilon ) INDArray ret = gradient . muli ( sqrtHistory . rdivi ( learningRate ) ) ; numIterations ++ ; return ret ;