signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class AuthFilter { /** * Authenticates a user if required . * @ param request The HTTP request . * @ param response The HTTP response . * @ param chain The filter chain to execute . * @ throws IOException If an I / O error occurs . * @ throws ServletException If an unknown error occurs . */ @ Override public void doFilter ( ServletRequest request , ServletResponse response , FilterChain chain ) throws IOException , ServletException { } }
String user = null ; if ( HttpServletRequest . class . isAssignableFrom ( request . getClass ( ) ) ) { HttpServletRequest req = HttpServletRequest . class . cast ( request ) ; String authorizationHeader = req . getHeader ( HttpHeaders . AUTHORIZATION ) ; // Only perform authentication check for endpoints that require it and if the req is not an OPTIONS request . if ( _requiresAuthentication ( req ) && ! "options" . equalsIgnoreCase ( req . getMethod ( ) ) ) { if ( authorizationHeader != null && authorizationHeader . startsWith ( "Bearer" ) ) { try { String jwt = authorizationHeader . substring ( "Bearer " . length ( ) ) . trim ( ) ; user = JWTUtils . validateTokenAndGetSubj ( jwt , JWTUtils . TokenType . ACCESS ) ; } catch ( UnsupportedJwtException | MalformedJwtException | IllegalArgumentException e ) { HttpServletResponse httpresponse = HttpServletResponse . class . cast ( response ) ; httpresponse . setHeader ( "Access-Control-Allow-Origin" , req . getHeader ( "Origin" ) ) ; httpresponse . setHeader ( "Access-Control-Allow-Credentials" , "true" ) ; httpresponse . sendError ( HttpServletResponse . SC_UNAUTHORIZED , "Unsupported or Malformed JWT. Please provide a valid JWT." ) ; } catch ( SignatureException e ) { HttpServletResponse httpresponse = HttpServletResponse . class . cast ( response ) ; httpresponse . setHeader ( "Access-Control-Allow-Origin" , req . getHeader ( "Origin" ) ) ; httpresponse . setHeader ( "Access-Control-Allow-Credentials" , "true" ) ; httpresponse . sendError ( HttpServletResponse . SC_UNAUTHORIZED , "Signature Exception. Please provide a valid JWT." ) ; } catch ( ExpiredJwtException e ) { HttpServletResponse httpresponse = HttpServletResponse . class . cast ( response ) ; httpresponse . setHeader ( "Access-Control-Allow-Origin" , req . getHeader ( "Origin" ) ) ; httpresponse . setHeader ( "Access-Control-Allow-Credentials" , "true" ) ; httpresponse . sendError ( HttpServletResponse . SC_UNAUTHORIZED , "JWT has expired. Please obtain a new token." ) ; } } else { HttpServletResponse httpresponse = HttpServletResponse . class . cast ( response ) ; httpresponse . setHeader ( "Access-Control-Allow-Origin" , req . getHeader ( "Origin" ) ) ; httpresponse . setHeader ( "Access-Control-Allow-Credentials" , "true" ) ; httpresponse . sendError ( HttpServletResponse . SC_UNAUTHORIZED , HttpHeaders . AUTHORIZATION + " Header is either not proivded or incorrectly formatted." ) ; } } } try { MDC . put ( USER_ATTRIBUTE_NAME , user ) ; request . setAttribute ( USER_ATTRIBUTE_NAME , user ) ; chain . doFilter ( request , response ) ; } finally { MDC . remove ( USER_ATTRIBUTE_NAME ) ; }
public class GetTokenApi { /** * HuaweiApiClient 连接结果回调 * @ param rst 结果码 * @ param client HuaweiApiClient 实例 */ @ Override public void onConnect ( int rst , HuaweiApiClient client ) { } }
if ( client == null || ! ApiClientMgr . INST . isConnect ( client ) ) { HMSAgentLog . e ( "client not connted" ) ; onPushTokenResult ( rst , null ) ; return ; } PendingResult < TokenResult > tokenResult = HuaweiPush . HuaweiPushApi . getToken ( client ) ; tokenResult . setResultCallback ( new ResultCallback < TokenResult > ( ) { @ Override public void onResult ( TokenResult result ) { if ( result == null ) { HMSAgentLog . e ( "result is null" ) ; onPushTokenResult ( HMSAgent . AgentResultCode . RESULT_IS_NULL , null ) ; return ; } Status status = result . getStatus ( ) ; if ( status == null ) { HMSAgentLog . e ( "status is null" ) ; onPushTokenResult ( HMSAgent . AgentResultCode . STATUS_IS_NULL , null ) ; return ; } int rstCode = status . getStatusCode ( ) ; HMSAgentLog . d ( "status=" + status ) ; // 需要重试的错误码 , 并且可以重试 if ( ( rstCode == CommonCode . ErrorCode . SESSION_INVALID || rstCode == CommonCode . ErrorCode . CLIENT_API_INVALID ) && retryTimes > 0 ) { retryTimes -- ; connect ( ) ; } else { onPushTokenResult ( rstCode , result ) ; } } } ) ;
public class Polarizability { /** * Method which assigns the polarizabilitiyFactors . * @ param atomContainer AtomContainer * @ param atom Atom * @ return double polarizabilitiyFactor */ private double getKJPolarizabilityFactor ( IAtomContainer atomContainer , IAtom atom ) { } }
double polarizabilitiyFactor = 0 ; String AtomSymbol ; AtomSymbol = atom . getSymbol ( ) ; switch ( AtomSymbol ) { case "H" : polarizabilitiyFactor = 0.387 ; break ; case "C" : if ( atom . getFlag ( CDKConstants . ISAROMATIC ) ) { polarizabilitiyFactor = 1.230 ; } else if ( atomContainer . getMaximumBondOrder ( atom ) == IBond . Order . SINGLE ) { polarizabilitiyFactor = 1.064 ; /* 1.064 */ } else if ( atomContainer . getMaximumBondOrder ( atom ) == IBond . Order . DOUBLE ) { if ( getNumberOfHydrogen ( atomContainer , atom ) == 0 ) { polarizabilitiyFactor = 1.382 ; } else { polarizabilitiyFactor = 1.37 ; } } else if ( atomContainer . getMaximumBondOrder ( atom ) == IBond . Order . TRIPLE || atomContainer . getMaximumBondOrder ( atom ) == IBond . Order . QUADRUPLE ) { polarizabilitiyFactor = 1.279 ; } break ; case "N" : if ( atom . getCharge ( ) != CDKConstants . UNSET && atom . getCharge ( ) < 0 ) { polarizabilitiyFactor = 1.090 ; } else if ( atomContainer . getMaximumBondOrder ( atom ) == IBond . Order . SINGLE ) { polarizabilitiyFactor = 1.094 ; } else if ( atomContainer . getMaximumBondOrder ( atom ) == IBond . Order . DOUBLE ) { polarizabilitiyFactor = 1.030 ; } else { polarizabilitiyFactor = 0.852 ; } break ; case "O" : if ( atom . getCharge ( ) != CDKConstants . UNSET && atom . getCharge ( ) == - 1 ) { polarizabilitiyFactor = 1.791 ; } else if ( atom . getCharge ( ) != CDKConstants . UNSET && atom . getCharge ( ) == 1 ) { polarizabilitiyFactor = 0.422 ; } else if ( atomContainer . getMaximumBondOrder ( atom ) == IBond . Order . SINGLE ) { polarizabilitiyFactor = 0.664 ; } else if ( atomContainer . getMaximumBondOrder ( atom ) == IBond . Order . DOUBLE ) { polarizabilitiyFactor = 0.460 ; } break ; case "P" : if ( atomContainer . getConnectedBondsCount ( atom ) == 4 && atomContainer . getMaximumBondOrder ( atom ) == IBond . Order . DOUBLE ) { polarizabilitiyFactor = 0 ; } break ; case "S" : if ( atom . getFlag ( CDKConstants . ISAROMATIC ) ) { polarizabilitiyFactor = 3.38 ; } else if ( atomContainer . getMaximumBondOrder ( atom ) == IBond . Order . SINGLE ) { polarizabilitiyFactor = 3.20 ; /* 3.19 */ } else if ( atomContainer . getMaximumBondOrder ( atom ) == IBond . Order . DOUBLE ) { if ( getNumberOfHydrogen ( atomContainer , atom ) == 0 ) { polarizabilitiyFactor = 3.51 ; } else { polarizabilitiyFactor = 3.50 ; } } else { polarizabilitiyFactor = 3.42 ; } break ; case "F" : polarizabilitiyFactor = 0.296 ; break ; case "Cl" : polarizabilitiyFactor = 2.343 ; break ; case "Br" : polarizabilitiyFactor = 3.5 ; break ; case "I" : polarizabilitiyFactor = 5.79 ; break ; } return polarizabilitiyFactor ;
public class CPDefinitionPersistenceImpl { /** * Removes the cp definition where companyId = & # 63 ; and externalReferenceCode = & # 63 ; from the database . * @ param companyId the company ID * @ param externalReferenceCode the external reference code * @ return the cp definition that was removed */ @ Override public CPDefinition removeByC_ERC ( long companyId , String externalReferenceCode ) throws NoSuchCPDefinitionException { } }
CPDefinition cpDefinition = findByC_ERC ( companyId , externalReferenceCode ) ; return remove ( cpDefinition ) ;
public class Context { /** * Initialize the Audit4j instance . This will ensure the single audit4j * instance and single Configuration repository load in to the memory . */ final static void init ( ) { } }
StopWatch stopWatch = new StopWatch ( ) ; stopWatch . start ( "Audit4jInit" ) ; if ( configContext == null ) { configContext = new ConcurrentConfigurationContext ( ) ; } if ( lifeCycle . getStatus ( ) . equals ( RunStatus . READY ) || lifeCycle . getStatus ( ) . equals ( RunStatus . STOPPED ) ) { Audit4jBanner banner = new Audit4jBanner ( ) ; banner . printBanner ( ) ; Log . info ( "Initializing Audit4j..." ) ; // Check system environment ; checkEnvironment ( ) ; // Load configurations to Memory Log . info ( "Loading Configurations..." ) ; if ( conf == null ) { loadConfig ( ) ; } Log . info ( "Validating Configurations..." ) ; if ( conf == null ) { terminate ( ) ; throw new InitializationException ( INIT_FAILED ) ; } try { ValidationManager . validateConfigurations ( conf ) ; } catch ( ValidationException e1 ) { terminate ( ) ; throw new InitializationException ( INIT_FAILED , e1 ) ; } // Execute commands . CommandProcessor . getInstance ( ) . process ( conf . getCommands ( ) ) ; // Load Registry configurations . loadRegistry ( ) ; if ( conf . getProperties ( ) == null ) { conf . setProperties ( new HashMap < String , String > ( ) ) ; } else { for ( Map . Entry < String , String > entry : conf . getProperties ( ) . entrySet ( ) ) { if ( System . getProperties ( ) . containsKey ( entry . getValue ( ) ) ) { conf . getProperties ( ) . put ( entry . getKey ( ) , System . getProperty ( entry . getValue ( ) ) ) ; } } } configContext . getProperties ( ) . putAll ( conf . getProperties ( ) ) ; // Initialize handlers . initHandlers ( ) ; // Initialize layouts . initLayout ( ) ; // Initialize annotation transformer . if ( conf . getAnnotationTransformer ( ) == null ) { annotationTransformer = getDefaultAnnotationTransformer ( ) ; } else { annotationTransformer = conf . getAnnotationTransformer ( ) ; } // Initialize IO streams . initStreams ( ) ; if ( ! conf . getFilters ( ) . isEmpty ( ) ) { Log . info ( "Registoring Filters..." ) ; } for ( AuditEventFilter filter : conf . getFilters ( ) ) { configContext . addFilter ( filter ) ; Log . info ( filter . getClass ( ) . getName ( ) + " Registored..." ) ; } configContext . setMetaData ( conf . getMetaData ( ) ) ; // Execute Scheduler tasks . Log . info ( "Executing Schedulers..." ) ; Schedulers . taskRegistry ( ) . scheduleAll ( ) ; // Initialize monitoring support if available in the configurations . if ( conf . getJmx ( ) != null ) { MBeanAgent agent = new MBeanAgent ( ) ; agent . setJmxConfig ( conf . getJmx ( ) ) ; agent . init ( ) ; agent . registerMbeans ( ) ; } lifeCycle . setStatus ( RunStatus . RUNNING ) ; lifeCycle . setStartUpTime ( new Date ( ) . getTime ( ) ) ; stopWatch . stop ( ) ; Long initializationTime = stopWatch . getLastTaskTimeMillis ( ) ; Log . info ( "Audit4j initialized. Total time: " , initializationTime , "ms" ) ; }
public class JKMessage { public String getLabel ( JKLocale locale , final String key , final Object ... params ) { } }
if ( key == null || key . trim ( ) . equals ( "" ) ) { return "" ; } String newKey = JKCollectionUtil . fixPropertyKey ( key ) ; Properties prop = getLables ( locale ) ; if ( prop . containsValue ( newKey ) ) { // to avoid calling the values as keys return newKey ; } String value = prop . getProperty ( newKey ) ; // System . out . println ( prop ) ; // Fix the string only if not found if ( value == null || value . equals ( "" ) ) { value = newKey ; // if ( ! value . equals ( " " ) ) { value = fixValue ( value ) ; // to avoid printing the error statement again setProperty ( locale , key , value ) ; } return setParamsters ( value , params ) ;
public class Stylesheet { /** * Set an " xsl : template " property . * @ see < a href = " http : / / www . w3 . org / TR / xslt # section - Defining - Template - Rules " > section - Defining - Template - Rules in XSLT Specification < / a > * @ param v ElemTemplate to add to list of templates */ public void setTemplate ( ElemTemplate v ) { } }
if ( null == m_templates ) m_templates = new Vector ( ) ; m_templates . addElement ( v ) ; v . setStylesheet ( this ) ;
public class ProfileHandler { /** * Pushes everything available in the JSON object returned by the Facebook GraphRequest * @ param graphUser The object returned from Facebook * @ deprecated use { @ link CleverTapAPI # pushFacebookUser ( JSONObject graphUser ) } */ @ Deprecated public void pushFacebookUser ( final JSONObject graphUser ) { } }
CleverTapAPI cleverTapAPI = weakReference . get ( ) ; if ( cleverTapAPI == null ) { Logger . d ( "CleverTap Instance is null." ) ; } else { cleverTapAPI . pushFacebookUser ( graphUser ) ; }
public class AWSDeviceFarmClient { /** * Returns a list of offering promotions . Each offering promotion record contains the ID and description of the * promotion . The API returns a < code > NotEligible < / code > error if the caller is not permitted to invoke the * operation . Contact < a href = " mailto : aws - devicefarm - support @ amazon . com " > aws - devicefarm - support @ amazon . com < / a > if * you believe that you should be able to invoke this operation . * @ param listOfferingPromotionsRequest * @ return Result of the ListOfferingPromotions operation returned by the service . * @ throws ArgumentException * An invalid argument was specified . * @ throws NotFoundException * The specified entity was not found . * @ throws NotEligibleException * Exception gets thrown when a user is not eligible to perform the specified transaction . * @ throws LimitExceededException * A limit was exceeded . * @ throws ServiceAccountException * There was a problem with the service account . * @ sample AWSDeviceFarm . ListOfferingPromotions * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / devicefarm - 2015-06-23 / ListOfferingPromotions " * target = " _ top " > AWS API Documentation < / a > */ @ Override public ListOfferingPromotionsResult listOfferingPromotions ( ListOfferingPromotionsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListOfferingPromotions ( request ) ;
public class AtomTetrahedralLigandPlacer3D { /** * Gets the spatproduct of three vectors . * @ param a vector a * @ param b vector b * @ param c vector c * @ return double value of the spatproduct */ public double getSpatproduct ( Vector3d a , Vector3d b , Vector3d c ) { } }
return ( c . x * ( b . y * a . z - b . z * a . y ) + c . y * ( b . z * a . x - b . x * a . z ) + c . z * ( b . x * a . y - b . y * a . x ) ) ;
public class OgmLoader { /** * Execute the physical query and initialize the various entities and collections * @ param session the session * @ param qp the query parameters * @ param ogmLoadingContext the loading context * @ param returnProxies when { @ code true } , get an existing proxy for each collection element ( if there is one ) * @ return the result of the query */ private List < Object > doQuery ( SharedSessionContractImplementor session , QueryParameters qp , OgmLoadingContext ogmLoadingContext , boolean returnProxies ) { } }
// TODO support lock timeout int entitySpan = entityPersisters . length ; final List < Object > hydratedObjects = entitySpan == 0 ? null : new ArrayList < Object > ( entitySpan * 10 ) ; // TODO yuk ! Is there a cleaner way to access the id ? final Serializable id ; // see if we use batching first // then look for direct id // then for a tuple based result set we could extract the id // otherwise that ' s a collection so we use the collection key boolean loadSeveralIds = loadSeveralIds ( qp ) ; boolean isCollectionLoader ; if ( loadSeveralIds ) { // need to be set to null otherwise the optionalId has precedence // and is used for all tuples regardless of their actual ids id = null ; isCollectionLoader = false ; } else if ( qp . getOptionalId ( ) != null ) { id = qp . getOptionalId ( ) ; isCollectionLoader = false ; } else if ( ogmLoadingContext . hasResultSet ( ) ) { // extract the ids from the tuples directly id = null ; isCollectionLoader = false ; } else { id = qp . getCollectionKeys ( ) [ 0 ] ; isCollectionLoader = true ; } TupleAsMapResultSet resultset = getResultSet ( id , qp , ogmLoadingContext , session ) ; // Todo implement lockmode // final LockMode [ ] lockModesArray = getLockModes ( queryParameters . getLockOptions ( ) ) ; // FIXME should we use subselects as it ' s closer to this process ? ? // TODO is resultset a good marker , or should it be an ad - hoc marker ? ? // It likely all depends on what resultset ends up being handleEmptyCollections ( qp . getCollectionKeys ( ) , resultset , session ) ; final org . hibernate . engine . spi . EntityKey [ ] keys = new org . hibernate . engine . spi . EntityKey [ entitySpan ] ; // for each element in resultset // TODO should we collect List < Object > as result ? Not necessary today Object result = null ; List < Object > results = new ArrayList < Object > ( ) ; if ( isCollectionLoader ) { preLoadBatchFetchingQueue ( session , resultset ) ; } try { while ( resultset . next ( ) ) { result = getRowFromResultSet ( resultset , session , qp , ogmLoadingContext , // lockmodeArray , id , hydratedObjects , keys , returnProxies ) ; results . add ( result ) ; } // TODO collect subselect result key } catch ( SQLException e ) { // never happens this is not a regular ResultSet } // end of for each element in resultset initializeEntitiesAndCollections ( hydratedObjects , resultset , session , qp . isReadOnly ( session ) ) ; // TODO create subselects return results ;
public class EsaResourceImpl { /** * Uses the required features information to calculate a list of queries * stored as Strings that can be searched upon later * Input the list of required features information to convert into the query * Returns the list of queries ( Strings ) */ private Collection < String > createEnablesQuery ( ) { } }
Collection < String > query = null ; Collection < String > requiredFeatures = getRequireFeature ( ) ; if ( requiredFeatures != null ) { query = new ArrayList < String > ( ) ; for ( String required : requiredFeatures ) { String temp = "wlpInformation.provideFeature=" + required ; String version = findVersion ( ) ; if ( version != null ) { temp += "&wlpInformation.appliesToFilterInfo.minVersion.value=" ; temp += version ; } temp += "&type=" ; temp += getType ( ) . getValue ( ) ; // get the long name of the Type query . add ( temp ) ; } } return query ;
public class PipelineApi { /** * Get a list of pipelines in a project in the specified page range . * < pre > < code > GitLab Endpoint : GET / projects / : id / pipelines < / code > < / pre > * @ param projectIdOrPath the project in the form of an Integer ( ID ) , String ( path ) , or Project instance * @ param page the page to get * @ param perPage the number of Pipeline instances per page * @ return a list containing the pipelines for the specified project ID in the specified page range * @ throws GitLabApiException if any exception occurs during execution */ public List < Pipeline > getPipelines ( Object projectIdOrPath , int page , int perPage ) throws GitLabApiException { } }
Response response = get ( Response . Status . OK , getPageQueryParams ( page , perPage ) , "projects" , getProjectIdOrPath ( projectIdOrPath ) , "pipelines" ) ; return ( response . readEntity ( new GenericType < List < Pipeline > > ( ) { } ) ) ;
public class SSLUtils { /** * This method is called for tracing in various places . It returns a string that * represents all the buffers in the array including hashcode , position , limit , and capacity . * @ param buffers array of buffers to get debug info on * @ return string representing the buffer array */ public static String getBufferTraceInfo ( WsByteBuffer buffers [ ] ) { } }
if ( null == buffers ) { return "Null buffer array" ; } StringBuilder sb = new StringBuilder ( 32 + ( 64 * buffers . length ) ) ; for ( int i = 0 ; i < buffers . length ; i ++ ) { sb . append ( "\r\n\t Buffer [" ) ; sb . append ( i ) ; sb . append ( "]: " ) ; getBufferTraceInfo ( sb , buffers [ i ] ) ; } return sb . toString ( ) ;
public class NettyRSocketServerFactory { /** * Add { @ link ServerRSocketFactoryCustomizer } s that should applied while building the * server . * @ param serverCustomizers the customizers to add */ public void addServerCustomizers ( ServerRSocketFactoryCustomizer ... serverCustomizers ) { } }
Assert . notNull ( serverCustomizers , "ServerCustomizer must not be null" ) ; this . serverCustomizers . addAll ( Arrays . asList ( serverCustomizers ) ) ;
public class SLF4JLogFactory { /** * Return an array containing the names of all currently defined configuration * attributes . If there are no such attributes , a zero length array is * returned . */ @ SuppressWarnings ( "unchecked" ) public String [ ] getAttributeNames ( ) { } }
List < String > names = new ArrayList < String > ( ) ; Enumeration < String > keys = attributes . keys ( ) ; while ( keys . hasMoreElements ( ) ) { names . add ( ( String ) keys . nextElement ( ) ) ; } String results [ ] = new String [ names . size ( ) ] ; for ( int i = 0 ; i < results . length ; i ++ ) { results [ i ] = ( String ) names . get ( i ) ; } return ( results ) ;
public class AbstractLinkType { /** * { @ inheritDoc } */ @ Override public Object readValue ( final Attribute _attribute , final List < Object > _objectList ) throws EFapsException { } }
final List < Object > list = new ArrayList < Object > ( ) ; Object temp = null ; for ( final Object object : _objectList ) { if ( object instanceof Object [ ] ) { final List < Object > list2 = new ArrayList < Object > ( ) ; Object temp2 = null ; for ( final Object object2 : ( Object [ ] ) object ) { // Oracle database stores all IDs as Decimal if ( object2 instanceof BigDecimal ) { temp2 = ( ( BigDecimal ) object2 ) . longValue ( ) ; } else { temp2 = object2 ; } list2 . add ( temp2 ) ; } list . add ( list2 . toArray ( ) ) ; } else { // Oracle database stores all IDs as Decimal if ( object instanceof BigDecimal ) { temp = ( ( BigDecimal ) object ) . longValue ( ) ; } else { temp = object ; } list . add ( temp ) ; } } return _objectList . size ( ) > 0 ? ( list . size ( ) > 1 ? list : list . get ( 0 ) ) : null ;
public class AmazonMQClient { /** * Updates the specified configuration . * @ param updateConfigurationRequest * Updates the specified configuration . * @ return Result of the UpdateConfiguration operation returned by the service . * @ throws NotFoundException * HTTP Status Code 404 : Resource not found due to incorrect input . Correct your request and then retry it . * @ throws BadRequestException * HTTP Status Code 400 : Bad request due to incorrect input . Correct your request and then retry it . * @ throws InternalServerErrorException * HTTP Status Code 500 : Unexpected internal server error . Retrying your request might resolve the issue . * @ throws ConflictException * HTTP Status Code 409 : Conflict . Concurrent update to configuration . Retry to create a new revision . * @ throws ForbiddenException * HTTP Status Code 403 : Access forbidden . Correct your input and then retry your request . * @ sample AmazonMQ . UpdateConfiguration * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / mq - 2017-11-27 / UpdateConfiguration " target = " _ top " > AWS API * Documentation < / a > */ @ Override public UpdateConfigurationResult updateConfiguration ( UpdateConfigurationRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeUpdateConfiguration ( request ) ;
public class ValueURLIOHelper { /** * Extracts the content of the given { @ link ValueData } and links the data to * the { @ link URL } in the Value Storage if needed * @ param plugin the plug - in that will manage the storage of the provided { @ link ValueData } * @ param value the value from which we want to extract the content * @ param resourceId the internal id of the { @ link ValueData } * @ param spoolContent Indicates whether or not the content should always be spooled * @ return the content of the { @ link ValueData } * @ throws IOException if the content could not be extracted */ public static InputStream getContent ( ValueStoragePlugin plugin , ValueData value , String resourceId , boolean spoolContent ) throws IOException { } }
if ( value . isByteArray ( ) ) { return new ByteArrayInputStream ( value . getAsByteArray ( ) ) ; } else if ( value instanceof StreamPersistedValueData ) { StreamPersistedValueData streamed = ( StreamPersistedValueData ) value ; if ( ! streamed . isPersisted ( ) ) { InputStream stream ; // the Value not yet persisted , i . e . or in client stream or spooled to a temp file File tempFile ; if ( ( tempFile = streamed . getTempFile ( ) ) != null ) { // it ' s spooled Value , try move its file to VS stream = new FileInputStream ( tempFile ) ; } else { // not spooled , use client InputStream stream = streamed . getStream ( ) ; } // link this Value to URL in VS InputStream result = streamed . setPersistedURL ( plugin . createURL ( resourceId ) , spoolContent ) ; return result != null ? result : stream ; } } return value . getAsStream ( ) ;
public class ResolvableType { /** * Return a { @ link ResolvableType } array representing the direct interfaces implemented by this type . If this type does not * implement any interfaces an empty array is returned . * @ see # getSuperType ( ) */ public ResolvableType [ ] getInterfaces ( ) { } }
Class < ? > resolved = resolve ( ) ; Object [ ] array = resolved . getGenericInterfaces ( ) ; if ( resolved == null || ( array == null || array . length == 0 ) ) { return EMPTY_TYPES_ARRAY ; } if ( this . interfaces == null ) { this . interfaces = forTypes ( TypeWrapper . forGenericInterfaces ( resolved ) , asVariableResolver ( ) ) ; } return this . interfaces ;
public class InterProcessMultiLock { /** * { @ inheritDoc } * < p > NOTE : locks are released in the reverse order that they were acquired . < / p > */ @ Override public synchronized void release ( ) throws Exception { } }
Exception baseException = null ; for ( InterProcessLock lock : reverse ( locks ) ) { try { lock . release ( ) ; } catch ( Exception e ) { ThreadUtils . checkInterrupted ( e ) ; if ( baseException == null ) { baseException = e ; } else { baseException = new Exception ( baseException ) ; } } } if ( baseException != null ) { throw baseException ; }
public class CharEscapeUtil { /** * / * Same as " _ writeString2 ( ) " , except needs additional escaping * for subset of characters */ private void _writeStringCustom ( final int len ) throws IOException , JsonGenerationException { } }
// And then we ' ll need to verify need for escaping etc : int end = _outputTail + len ; final int [ ] escCodes = _outputEscapes ; final int maxNonEscaped = ( _maximumNonEscapedChar < 1 ) ? 0xFFFF : _maximumNonEscapedChar ; final int escLimit = Math . min ( escCodes . length , maxNonEscaped + 1 ) ; int escCode = 0 ; final CharacterEscapes customEscapes = _characterEscapes ; output_loop : while ( _outputTail < end ) { char c ; // Fast loop for chars not needing escaping escape_loop : while ( true ) { c = _outputBuffer [ _outputTail ] ; if ( c < escLimit ) { escCode = escCodes [ c ] ; if ( escCode != 0 ) { break escape_loop ; } } else if ( c > maxNonEscaped ) { escCode = CharacterEscapes . ESCAPE_STANDARD ; break escape_loop ; } else { if ( ( _currentEscape = customEscapes . getEscapeSequence ( c ) ) != null ) { escCode = CharacterEscapes . ESCAPE_CUSTOM ; break escape_loop ; } } if ( ++ _outputTail >= end ) { break output_loop ; } } int flushLen = ( _outputTail - _outputHead ) ; if ( flushLen > 0 ) { _writer . write ( _outputBuffer , _outputHead , flushLen ) ; } ++ _outputTail ; _prependOrWriteCharacterEscape ( c , escCode ) ; }
public class DefaultGroovyMethods { /** * Drops the given number of key / value pairs from the head of this map if they are available . * < pre class = " groovyTestCase " > * def strings = [ ' a ' : 10 , ' b ' : 20 , ' c ' : 30 ] * assert strings . drop ( 0 ) = = [ ' a ' : 10 , ' b ' : 20 , ' c ' : 30 ] * assert strings . drop ( 2 ) = = [ ' c ' : 30 ] * assert strings . drop ( 5 ) = = [ : ] * < / pre > * If the map instance does not have ordered keys , then this function could drop a random < code > num < / code > * entries . Groovy by default uses LinkedHashMap , so this shouldn ' t be an issue in the main . * @ param self the original map * @ param num the number of elements to drop from this map * @ return a map consisting of all key / value pairs of this map except the first * < code > num < / code > ones , or else the empty map , if this map has * less than < code > num < / code > elements . * @ since 1.8.1 */ public static < K , V > Map < K , V > drop ( Map < K , V > self , int num ) { } }
if ( self . size ( ) <= num ) { return createSimilarMap ( self ) ; } if ( num == 0 ) { return cloneSimilarMap ( self ) ; } Map < K , V > ret = createSimilarMap ( self ) ; for ( K key : self . keySet ( ) ) { if ( num -- <= 0 ) { ret . put ( key , self . get ( key ) ) ; } } return ret ;
public class MpMessages { /** * 预览图片消息 * @ param wxName * @ param openId * @ param wxcard * @ return */ public long cardPreview ( String wxName , String openId , String wxcard ) { } }
return preview ( wxName , openId , "wxcard" , wxcard ) ;
public class IotHubResourcesInner { /** * Create or update the metadata of an IoT hub . * Create or update the metadata of an Iot hub . The usual pattern to modify a property is to retrieve the IoT hub metadata and security metadata , and then combine them with the modified values in a new body to update the IoT hub . * @ param resourceGroupName The name of the resource group that contains the IoT hub . * @ param resourceName The name of the IoT hub . * @ param iotHubDescription The IoT hub metadata and security metadata . * @ param ifMatch ETag of the IoT Hub . Do not specify for creating a brand new IoT Hub . Required to update an existing IoT Hub . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorDetailsException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the IotHubDescriptionInner object if successful . */ public IotHubDescriptionInner beginCreateOrUpdate ( String resourceGroupName , String resourceName , IotHubDescriptionInner iotHubDescription , String ifMatch ) { } }
return beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , resourceName , iotHubDescription , ifMatch ) . toBlocking ( ) . single ( ) . body ( ) ;
public class StartPipelineReprocessingRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( StartPipelineReprocessingRequest startPipelineReprocessingRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( startPipelineReprocessingRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( startPipelineReprocessingRequest . getPipelineName ( ) , PIPELINENAME_BINDING ) ; protocolMarshaller . marshall ( startPipelineReprocessingRequest . getStartTime ( ) , STARTTIME_BINDING ) ; protocolMarshaller . marshall ( startPipelineReprocessingRequest . getEndTime ( ) , ENDTIME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class FileUtil { /** * create specified directory if doesn ' t exist . */ public static void mkdir ( File dir ) throws IOException { } }
if ( ! dir . exists ( ) && ! dir . mkdir ( ) ) throw new IOException ( "couldn't create directory: " + dir ) ;
public class StorageCommand { /** * Runs doctor storage command . * @ return 0 on success , 1 otherwise */ public int run ( ) throws IOException { } }
List < WorkerLostStorageInfo > workerLostStorageList = mBlockMasterClient . getWorkerLostStorage ( ) ; if ( workerLostStorageList . size ( ) == 0 ) { mPrintStream . println ( "All worker storage paths are in working state." ) ; return 0 ; } for ( WorkerLostStorageInfo info : workerLostStorageList ) { Map < String , StorageList > lostStorageMap = info . getLostStorageMap ( ) ; if ( lostStorageMap . size ( ) != 0 ) { mPrintStream . printf ( "The following storage paths are lost in worker %s: %n" , info . getAddress ( ) . getHost ( ) ) ; for ( Map . Entry < String , StorageList > tierStorage : lostStorageMap . entrySet ( ) ) { for ( String storage : tierStorage . getValue ( ) . getStorageList ( ) ) { mPrintStream . printf ( "%s (%s)%n" , storage , tierStorage . getKey ( ) ) ; } } } } return 0 ;
public class GQ { /** * Create an instance of a JsonBuilder object whose type is < T > * and set the the underlying properties object . */ public static < T extends JsonBuilder > T create ( Class < T > clz , IsProperties obj ) { } }
T ret = create ( clz ) ; ret . load ( obj . getDataImpl ( ) ) ; return ret ;
public class BaseDTO { /** * 构建系统异常对象 , status为500 , msg为用户传入参数 * @ param msg 异常消息 * @ param < T > 数据类型 * @ return 系统异常对象 */ public static < T > BaseDTO < T > buildError ( String msg ) { } }
BaseDTO < T > dto = new BaseDTO < > ( ) ; dto . setStatus ( "500" ) ; dto . setMessage ( msg ) ; return dto ;
public class ApkBuilder { /** * Returns the key and certificate from a given debug store . * It is expected that the store password is ' android ' and the key alias and password are * ' androiddebugkey ' and ' android ' respectively . * @ param storeOsPath the OS path to the debug store . * @ param verboseStream an option { @ link PrintStream } to display verbose information * @ return they key and certificate in a { @ link SigningInfo } object or null . * @ throws ApkCreationException */ public static SigningInfo getDebugKey ( String storeOsPath , final PrintStream verboseStream ) throws ApkCreationException { } }
try { if ( storeOsPath != null ) { File storeFile = new File ( storeOsPath ) ; try { checkInputFile ( storeFile ) ; } catch ( FileNotFoundException e ) { // ignore these since the debug store can be created on the fly anyway . } // get the debug key if ( verboseStream != null ) { verboseStream . println ( String . format ( "Using keystore: %s" , storeOsPath ) ) ; } IKeyGenOutput keygenOutput = null ; if ( verboseStream != null ) { keygenOutput = new IKeyGenOutput ( ) { @ Override public void out ( String message ) { verboseStream . println ( message ) ; } @ Override public void err ( String message ) { verboseStream . println ( message ) ; } } ; } DebugKeyProvider keyProvider = new DebugKeyProvider ( storeOsPath , null /* store type */ , keygenOutput ) ; PrivateKey key = keyProvider . getDebugKey ( ) ; X509Certificate certificate = ( X509Certificate ) keyProvider . getCertificate ( ) ; if ( key == null ) { throw new ApkCreationException ( "Unable to get debug signature key" ) ; } // compare the certificate expiration date if ( certificate != null && certificate . getNotAfter ( ) . compareTo ( new Date ( ) ) < 0 ) { // TODO , regenerate a new one . throw new ApkCreationException ( "Debug Certificate expired on " + DateFormat . getInstance ( ) . format ( certificate . getNotAfter ( ) ) ) ; } return new SigningInfo ( key , certificate ) ; } else { return null ; } } catch ( KeytoolException e ) { if ( e . getJavaHome ( ) == null ) { throw new ApkCreationException ( e . getMessage ( ) + "\nJAVA_HOME seems undefined, setting it will help locating keytool automatically\n" + "You can also manually execute the following command\n:" + e . getCommandLine ( ) , e ) ; } else { throw new ApkCreationException ( e . getMessage ( ) + "\nJAVA_HOME is set to: " + e . getJavaHome ( ) + "\nUpdate it if necessary, or manually execute the following command:\n" + e . getCommandLine ( ) , e ) ; } } catch ( ApkCreationException e ) { throw e ; } catch ( Exception e ) { throw new ApkCreationException ( e ) ; }
public class CategoryGraph { /** * This parameter is already set in the constructor as it is needed for computation of relatedness values . * Therefore its computation does not trigger setGraphParameters ( it is too slow ) , even if the depth is implicitly determined there , too . * @ return The depth of the category graph , i . e . the maximum path length starting with the root node . * @ throws WikiApiException Thrown if errors occurred . */ private double getDepthFromRootPathMap ( ) throws WikiApiException { } }
int max = 0 ; for ( List < Integer > path : getRootPathMap ( ) . values ( ) ) { if ( path . size ( ) > max ) { max = path . size ( ) ; } } max = max - 1 ; // depth is measured in nodes , not edges if ( max < 0 ) { return 0 ; } else { return max ; }
public class VecPaired { /** * This method is used assuming multiple VecPaired are used together . The * implementation of the vector may have logic to handle the case that * the other vector is of the same type . This will go through every layer * of VecPaired to return the final base vector . * @ param b a Vec , that may or may not be an instance of { @ link VecPaired } * @ return the final Vec backing b , which may be b itself . */ public static Vec extractTrueVec ( Vec b ) { } }
while ( b instanceof VecPaired ) b = ( ( VecPaired ) b ) . getVector ( ) ; return b ;
public class TZDBTimeZoneNames { /** * / * ( non - Javadoc ) * @ see android . icu . text . TimeZoneNames # find ( java . lang . CharSequence , int , java . util . EnumSet ) */ @ Override public Collection < MatchInfo > find ( CharSequence text , int start , EnumSet < NameType > nameTypes ) { } }
if ( text == null || text . length ( ) == 0 || start < 0 || start >= text . length ( ) ) { throw new IllegalArgumentException ( "bad input text or range" ) ; } prepareFind ( ) ; TZDBNameSearchHandler handler = new TZDBNameSearchHandler ( nameTypes , getTargetRegion ( ) ) ; TZDB_NAMES_TRIE . find ( text , start , handler ) ; return handler . getMatches ( ) ;
public class CmsResourceUtil { /** * Returns the id of the project which the resource belongs to . < p > * @ return the id of the project which the resource belongs to */ public CmsUUID getProjectId ( ) { } }
CmsUUID projectId = m_resource . getProjectLastModified ( ) ; if ( ! getLock ( ) . isUnlocked ( ) && ! getLock ( ) . isInherited ( ) ) { // use lock project ID only if lock is not inherited projectId = getLock ( ) . getProjectId ( ) ; } return projectId ;
public class Switch { /** * Color of the left hand side of the switch . Legal values : ' primary ' , * ' info ' , ' success ' , ' warning ' , ' danger ' , ' default ' . Default value : * ' primary ' . * @ return Returns the value of the attribute , or null , if it hasn ' t been * set by the JSF file . */ public String getOnColor ( ) { } }
String value = ( String ) getStateHelper ( ) . eval ( PropertyKeys . onColor ) ; return value ;
public class Utils { /** * Expands all nodes in a JTree . * @ param tree The JTree to expand . * @ param depth The depth to which the tree should be expanded . Zero * will just expand the root node , a negative value will * fully expand the tree , and a positive value will * recursively expand the tree to that depth . */ public static void collapseJTree ( javax . swing . JTree tree , int depth ) { } }
javax . swing . tree . TreeModel model = tree . getModel ( ) ; collapseJTreeNode ( tree , model , model . getRoot ( ) , 0 , depth ) ;
public class ChatApi { /** * Send a message * Send a message to participants in the specified chat . * @ param id The ID of the chat interaction . ( required ) * @ param acceptData Request parameters . ( optional ) * @ return ApiResponse & lt ; ApiSuccessResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < ApiSuccessResponse > sendMessageWithHttpInfo ( String id , AcceptData1 acceptData ) throws ApiException { } }
com . squareup . okhttp . Call call = sendMessageValidateBeforeCall ( id , acceptData , null , null ) ; Type localVarReturnType = new TypeToken < ApiSuccessResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class TrivialSwap { /** * Swap the elements of two arrays at the specified positions . * @ param < E > the type of elements in this array . * @ param array1 one of the arrays that will have one of its values swapped . * @ param array1Index the index of the first array that will be swapped . * @ param array2 the other array that will have one of its values swapped . * @ param array2Index the index of the second array that will be swapped . */ public static < E > void swap ( E [ ] array1 , int array1Index , E [ ] array2 , int array2Index ) { } }
if ( array1 [ array1Index ] != array2 [ array2Index ] ) { E hold = array1 [ array1Index ] ; array1 [ array1Index ] = array2 [ array2Index ] ; array2 [ array2Index ] = hold ; }
public class CountCumSum { /** * Do cum sum within the partition */ public void cumSumWithinPartition ( ) { } }
// Accumulator to get the max of the cumulative sum in each partition final Accumulator < Counter < Integer > > maxPerPartitionAcc = sc . accumulator ( new Counter < Integer > ( ) , new MaxPerPartitionAccumulator ( ) ) ; // Partition mapping to fold within partition foldWithinPartitionRDD = sentenceCountRDD . mapPartitionsWithIndex ( new FoldWithinPartitionFunction ( maxPerPartitionAcc ) , true ) . cache ( ) ; actionForMapPartition ( foldWithinPartitionRDD ) ; // Broadcast the counter ( partition index : sum of count ) to all workers broadcastedMaxPerPartitionCounter = sc . broadcast ( maxPerPartitionAcc . value ( ) ) ;
public class ObjectFactory2 { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link WasEndedBy } { @ code > } } */ @ XmlElementDecl ( namespace = "http://www.w3.org/ns/prov#" , name = "wasEndedBy" ) public JAXBElement < WasEndedBy > createWasEndedBy ( WasEndedBy value ) { } }
return new JAXBElement < WasEndedBy > ( _WasEndedBy_QNAME , WasEndedBy . class , null , value ) ;
public class DatasourceTemplate { /** * A { @ link RowMapper } that returns the object contained in the first field . */ public < T > RowMapper < T > getSingleColumnRowMapper ( Class < T > requiredType ) { } }
return new RowMapper < T > ( ) { @ Override public T map ( ResultSet rs ) throws SQLException { return ( T ) rs . getObject ( 1 ) ; } } ;
public class StreamUtils { /** * Convert a { @ link Path } to a { @ link String } and make sure it is properly formatted to be recognized as a directory * by { @ link TarArchiveEntry } . */ private static String formatPathToDir ( Path path ) { } }
return path . toString ( ) . endsWith ( Path . SEPARATOR ) ? path . toString ( ) : path . toString ( ) + Path . SEPARATOR ;
public class RequiredPropertiesUtil { /** * Throws a ConstraintViolationException if the model does not contain all required server - managed triples for a * container . * @ param model rdf to validate * @ throws ConstraintViolationException if model does not contain all required server - managed triples for container */ public static void assertRequiredContainerTriples ( final Model model ) throws ConstraintViolationException { } }
assertContainsRequiredProperties ( model , REQUIRED_PROPERTIES ) ; assertContainsRequiredTypes ( model , CONTAINER_TYPES ) ;
public class MetadataHandler { /** * Fetch the metadata for a Schema by its full internal classname , e . g . " hex . schemas . DeepLearningV2 . DeepLearningParametersV2 " . TODO : Do we still need this ? */ @ Deprecated @ SuppressWarnings ( "unused" ) // called through reflection by RequestServer public MetadataV3 fetchSchemaMetadataByClass ( int version , MetadataV3 docs ) { } }
docs . schemas = new SchemaMetadataV3 [ 1 ] ; // NOTE : this will throw an exception if the classname isn ' t found : SchemaMetadataV3 meta = new SchemaMetadataV3 ( SchemaMetadata . createSchemaMetadata ( docs . classname ) ) ; docs . schemas [ 0 ] = meta ; return docs ;
public class JmsAdapter { /** * The method overrides the one in the super class to perform * JMS specific functions . */ @ Override protected void closeConnection ( Object connection ) { } }
try { if ( qSession != null ) qSession . close ( ) ; if ( qConnection != null ) qConnection . close ( ) ; } catch ( Exception e ) { } qSession = null ; qConnection = null ;
public class CPDefinitionPersistenceImpl { /** * Returns all the cp definitions where CProductId = & # 63 ; and status = & # 63 ; . * @ param CProductId the c product ID * @ param status the status * @ return the matching cp definitions */ @ Override public List < CPDefinition > findByC_S ( long CProductId , int status ) { } }
return findByC_S ( CProductId , status , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ;
public class CmsPublishSelectPanel { /** * Sets the publish groups . < p > * @ param groups the list of publish groups * @ param newData true if the data is new * @ param defaultWorkflow the default workflow id */ protected void setGroups ( List < CmsPublishGroup > groups , boolean newData , String defaultWorkflow ) { } }
m_model = new CmsPublishDataModel ( groups , this ) ; m_model . setSelectionChangeAction ( new Runnable ( ) { public void run ( ) { onChangePublishSelection ( ) ; } } ) ; m_currentGroupIndex = 0 ; m_currentGroupPanel = null ; m_problemsPanel . clear ( ) ; if ( newData ) { m_showProblemsOnly = false ; m_checkboxProblems . setChecked ( false ) ; m_checkboxProblems . setVisible ( false ) ; m_problemsPanel . setVisible ( false ) ; } m_groupPanels . clear ( ) ; m_groupPanelContainer . clear ( ) ; m_scrollPanel . onResizeDescendant ( ) ; enableActions ( false ) ; int numGroups = groups . size ( ) ; setResourcesVisible ( numGroups > 0 ) ; if ( numGroups == 0 ) { return ; } enableActions ( true ) ; addMoreListItems ( ) ; showProblemCount ( m_model . countProblems ( ) ) ; if ( defaultWorkflow != null ) { m_workflowSelector . setFormValue ( defaultWorkflow , false ) ; m_publishDialog . setWorkflowId ( defaultWorkflow ) ; m_actions = m_publishDialog . getSelectedWorkflow ( ) . getActions ( ) ; m_publishDialog . setPanel ( CmsPublishDialog . PANEL_SELECT ) ; }
public class CmsAttributeComparisonList { /** * Returns either the historical file or the offline file , depending on the version number . < p > * @ param cms the CmsObject to use * @ param structureId the structure id of the file * @ param version the historical version number * @ return either the historical file or the offline file , depending on the version number * @ throws CmsException if something goes wrong */ protected static CmsFile readFile ( CmsObject cms , CmsUUID structureId , String version ) throws CmsException { } }
if ( Integer . parseInt ( version ) == CmsHistoryResourceHandler . PROJECT_OFFLINE_VERSION ) { // offline CmsResource resource = cms . readResource ( structureId , CmsResourceFilter . IGNORE_EXPIRATION ) ; return cms . readFile ( resource ) ; } else { int ver = Integer . parseInt ( version ) ; if ( ver < 0 ) { // online CmsProject project = cms . getRequestContext ( ) . getCurrentProject ( ) ; try { cms . getRequestContext ( ) . setCurrentProject ( cms . readProject ( CmsProject . ONLINE_PROJECT_ID ) ) ; CmsResource resource = cms . readResource ( structureId , CmsResourceFilter . IGNORE_EXPIRATION ) ; return cms . readFile ( resource ) ; } finally { cms . getRequestContext ( ) . setCurrentProject ( project ) ; } } // backup return cms . readFile ( ( CmsHistoryFile ) cms . readResource ( structureId , ver ) ) ; }
public class StringUtils { /** * Prepends the prefix to the start of the string if the string does not * already start , case insensitive , with any of the prefixes . * < pre > * StringUtils . prependIfMissingIgnoreCase ( null , null ) = null * StringUtils . prependIfMissingIgnoreCase ( " abc " , null ) = " abc " * StringUtils . prependIfMissingIgnoreCase ( " " , " xyz " ) = " xyz " * StringUtils . prependIfMissingIgnoreCase ( " abc " , " xyz " ) = " xyzabc " * StringUtils . prependIfMissingIgnoreCase ( " xyzabc " , " xyz " ) = " xyzabc " * StringUtils . prependIfMissingIgnoreCase ( " XYZabc " , " xyz " ) = " XYZabc " * < / pre > * < p > With additional prefixes , < / p > * < pre > * StringUtils . prependIfMissingIgnoreCase ( null , null , null ) = null * StringUtils . prependIfMissingIgnoreCase ( " abc " , null , null ) = " abc " * StringUtils . prependIfMissingIgnoreCase ( " " , " xyz " , null ) = " xyz " * StringUtils . prependIfMissingIgnoreCase ( " abc " , " xyz " , new CharSequence [ ] { null } ) = " xyzabc " * StringUtils . prependIfMissingIgnoreCase ( " abc " , " xyz " , " " ) = " abc " * StringUtils . prependIfMissingIgnoreCase ( " abc " , " xyz " , " mno " ) = " xyzabc " * StringUtils . prependIfMissingIgnoreCase ( " xyzabc " , " xyz " , " mno " ) = " xyzabc " * StringUtils . prependIfMissingIgnoreCase ( " mnoabc " , " xyz " , " mno " ) = " mnoabc " * StringUtils . prependIfMissingIgnoreCase ( " XYZabc " , " xyz " , " mno " ) = " XYZabc " * StringUtils . prependIfMissingIgnoreCase ( " MNOabc " , " xyz " , " mno " ) = " MNOabc " * < / pre > * @ param str The string . * @ param prefix The prefix to prepend to the start of the string . * @ param prefixes Additional prefixes that are valid ( optional ) . * @ return A new String if prefix was prepended , the same string otherwise . * @ since 3.2 */ public static String prependIfMissingIgnoreCase ( final String str , final CharSequence prefix , final CharSequence ... prefixes ) { } }
return prependIfMissing ( str , prefix , true , prefixes ) ;
public class HttpMethodInfo { /** * Calls { @ link BodyConsumer # finished ( HttpResponder ) } method . The current bodyConsumer will be set to { @ code null } * after the call . */ private void bodyConsumerFinish ( ) { } }
BodyConsumer consumer = bodyConsumer ; bodyConsumer = null ; try { consumer . finished ( responder ) ; } catch ( Throwable t ) { exceptionHandler . handle ( t , request , responder ) ; }
public class CPDefinitionOptionRelLocalServiceBaseImpl { /** * Deletes the cp definition option rel with the primary key from the database . Also notifies the appropriate model listeners . * @ param CPDefinitionOptionRelId the primary key of the cp definition option rel * @ return the cp definition option rel that was removed * @ throws PortalException if a cp definition option rel with the primary key could not be found */ @ Indexable ( type = IndexableType . DELETE ) @ Override public CPDefinitionOptionRel deleteCPDefinitionOptionRel ( long CPDefinitionOptionRelId ) throws PortalException { } }
return cpDefinitionOptionRelPersistence . remove ( CPDefinitionOptionRelId ) ;
public class LoggerLevelFilter { /** * Checks the log level of the event against the effective log level of the Logger referenced in the event . * @ param event the event * @ return Returns " NEUTRAL " if the event level is greater than or equal to the loggers effective log level , " DENY " * otherwise . */ @ Override public FilterReply decide ( ILoggingEvent event ) { } }
LoggerContext loggerFactory = ( LoggerContext ) LoggerFactory . getILoggerFactory ( ) ; Logger logger = loggerFactory . getLogger ( event . getLoggerName ( ) ) ; if ( event . getLevel ( ) . isGreaterOrEqual ( logger . getEffectiveLevel ( ) ) ) return FilterReply . NEUTRAL ; return FilterReply . DENY ;
public class HtmlEscape { /** * Perform a ( configurable ) HTML < strong > escape < / strong > operation on a < tt > char [ ] < / tt > input . * This method will perform an escape operation according to the specified * { @ link org . unbescape . html . HtmlEscapeType } and { @ link org . unbescape . html . HtmlEscapeLevel } * argument values . * All other < tt > char [ ] < / tt > - based < tt > escapeHtml * ( . . . ) < / tt > methods call this one with preconfigured * < tt > type < / tt > and < tt > level < / tt > values . * This method is < strong > thread - safe < / strong > . * @ param text the < tt > char [ ] < / tt > to be escaped . * @ param offset the position in < tt > text < / tt > at which the escape operation should start . * @ param len the number of characters in < tt > text < / tt > that should be escaped . * @ param writer the < tt > java . io . Writer < / tt > to which the escaped result will be written . Nothing will * be written at all to this writer if input is < tt > null < / tt > . * @ param type the type of escape operation to be performed , see { @ link org . unbescape . html . HtmlEscapeType } . * @ param level the escape level to be applied , see { @ link org . unbescape . html . HtmlEscapeLevel } . * @ throws IOException if an input / output exception occurs */ public static void escapeHtml ( final char [ ] text , final int offset , final int len , final Writer writer , final HtmlEscapeType type , final HtmlEscapeLevel level ) throws IOException { } }
if ( writer == null ) { throw new IllegalArgumentException ( "Argument 'writer' cannot be null" ) ; } if ( type == null ) { throw new IllegalArgumentException ( "The 'type' argument cannot be null" ) ; } if ( level == null ) { throw new IllegalArgumentException ( "The 'level' argument cannot be null" ) ; } final int textLen = ( text == null ? 0 : text . length ) ; if ( offset < 0 || offset > textLen ) { throw new IllegalArgumentException ( "Invalid (offset, len). offset=" + offset + ", len=" + len + ", text.length=" + textLen ) ; } if ( len < 0 || ( offset + len ) > textLen ) { throw new IllegalArgumentException ( "Invalid (offset, len). offset=" + offset + ", len=" + len + ", text.length=" + textLen ) ; } HtmlEscapeUtil . escape ( text , offset , len , writer , type , level ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcMotorConnectionType ( ) { } }
if ( ifcMotorConnectionTypeEClass == null ) { ifcMotorConnectionTypeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 324 ) ; } return ifcMotorConnectionTypeEClass ;
public class ns_conf_upgrade_history { /** * Use this API to fetch filtered set of ns _ conf _ upgrade _ history resources . * filter string should be in JSON format . eg : " vm _ state : DOWN , name : [ a - z ] + " */ public static ns_conf_upgrade_history [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
ns_conf_upgrade_history obj = new ns_conf_upgrade_history ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; ns_conf_upgrade_history [ ] response = ( ns_conf_upgrade_history [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class SqlFunctionUtils { /** * Returns the string str right - padded with the string pad to a length of len characters . * If str is longer than len , the return value is shortened to len characters . */ public static String rpad ( String base , int len , String pad ) { } }
if ( len < 0 || "" . equals ( pad ) ) { return null ; } else if ( len == 0 ) { return "" ; } char [ ] data = new char [ len ] ; char [ ] baseChars = base . toCharArray ( ) ; char [ ] padChars = pad . toCharArray ( ) ; int pos = 0 ; // copy the base while ( pos < base . length ( ) && pos < len ) { data [ pos ] = baseChars [ pos ] ; pos += 1 ; } // copy the padding while ( pos < len ) { int i = 0 ; while ( i < pad . length ( ) && i < len - pos ) { data [ pos + i ] = padChars [ i ] ; i += 1 ; } pos += pad . length ( ) ; } return new String ( data ) ;
public class AmazonECRClient { /** * Describes image repositories in a registry . * @ param describeRepositoriesRequest * @ return Result of the DescribeRepositories operation returned by the service . * @ throws ServerException * These errors are usually caused by a server - side issue . * @ throws InvalidParameterException * The specified parameter is invalid . Review the available parameters for the API request . * @ throws RepositoryNotFoundException * The specified repository could not be found . Check the spelling of the specified repository and ensure * that you are performing operations on the correct registry . * @ sample AmazonECR . DescribeRepositories * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ecr - 2015-09-21 / DescribeRepositories " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DescribeRepositoriesResult describeRepositories ( DescribeRepositoriesRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeRepositories ( request ) ;
public class DiagnosticsInner { /** * Get site detector response . * Get site detector response . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param siteName Site Name * @ param detectorName Detector Resource Name * @ param slot Slot Name * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the DetectorResponseInner object if successful . */ public DetectorResponseInner getSiteDetectorResponseSlot ( String resourceGroupName , String siteName , String detectorName , String slot ) { } }
return getSiteDetectorResponseSlotWithServiceResponseAsync ( resourceGroupName , siteName , detectorName , slot ) . toBlocking ( ) . single ( ) . body ( ) ;
public class FieldCoordinates { /** * Creates new field coordinates * @ param parentType the container of the field * @ param fieldDefinition the field definition * @ return new field coordinates represented by the two parameters */ public static FieldCoordinates coordinates ( GraphQLFieldsContainer parentType , GraphQLFieldDefinition fieldDefinition ) { } }
return new FieldCoordinates ( parentType . getName ( ) , fieldDefinition . getName ( ) ) ;
public class ResourceBindingImpl { /** * Returns the binding listener name . This method can only be called * if { @ link # bindingListener } is non - null . */ public String getBindingListenerName ( ) { } }
ServiceReference < ? > ref = bindingListener . getServiceReference ( ) ; Object serviceDescription = ref . getProperty ( Constants . SERVICE_DESCRIPTION ) ; if ( serviceDescription instanceof String ) { return ( String ) serviceDescription ; } return bindingListener . getService ( ) . getClass ( ) . getName ( ) + " (" + ref . getBundle ( ) . getSymbolicName ( ) + ')' ;
public class ConfigLoader { /** * Loads properties from property { @ link File } , if provided . Calling this method only has effect on new Email and Mailer instances after this . * @ param filename Any file reference that holds a properties list . * @ param addProperties Flag to indicate if the new properties should be added or replacing the old properties . * @ return The updated properties map that is used internally . */ public static Map < Property , Object > loadProperties ( final File filename , final boolean addProperties ) { } }
try { return loadProperties ( new FileInputStream ( filename ) , addProperties ) ; } catch ( final FileNotFoundException e ) { throw new IllegalStateException ( "error reading properties file from File" , e ) ; }
public class ToSolidMap { /** * Returns a function that converts a stream of { @ link Pair } into { @ link SolidMap } . */ public static < K , V > Func1 < Iterable < Pair < K , V > > , SolidMap < K , V > > pairsToSolidMap ( ) { } }
return new Func1 < Iterable < Pair < K , V > > , SolidMap < K , V > > ( ) { @ Override public SolidMap < K , V > call ( Iterable < Pair < K , V > > iterable ) { return new SolidMap < > ( iterable ) ; } } ;
public class Base32 { /** * Encodes the given bytes into a base - 32 string , inserting dashes after * every 6 characters of output . * @ param in * the bytes to encode . * @ return The formatted base - 32 string , or null if { @ code in } is null . */ public static String encodeWithDashes ( byte [ ] in ) { } }
if ( in == null ) { return null ; } return encodeWithDashes ( in , 0 , in . length ) ;
public class CmsCommentImages { /** * Returns the initialized image scaler object used to generate thumbnails for the dialog form . < p > * @ return the initialized image scaler object used to generate thumbnails for the dialog form */ protected CmsImageScaler getImageScaler ( ) { } }
if ( m_imageScaler == null ) { // not initialized , create image scaler with default settings m_imageScaler = new CmsImageScaler ( ) ; m_imageScaler . setWidth ( THUMB_WIDTH ) ; m_imageScaler . setHeight ( THUMB_HEIGHT ) ; m_imageScaler . setRenderMode ( Simapi . RENDER_SPEED ) ; m_imageScaler . setColor ( new Color ( 0 , 0 , 0 ) ) ; m_imageScaler . setType ( 1 ) ; } return m_imageScaler ;
public class TsdbQuery { /** * Identify the table to be scanned based on the roll up and pre - aggregate * query parameters * @ return table name as byte array * @ since 2.4 */ private byte [ ] tableToBeScanned ( ) { } }
final byte [ ] tableName ; if ( RollupQuery . isValidQuery ( rollup_query ) ) { if ( pre_aggregate ) { tableName = rollup_query . getRollupInterval ( ) . getGroupbyTable ( ) ; } else { tableName = rollup_query . getRollupInterval ( ) . getTemporalTable ( ) ; } } else if ( pre_aggregate ) { tableName = tsdb . getDefaultInterval ( ) . getGroupbyTable ( ) ; } else { tableName = tsdb . dataTable ( ) ; } return tableName ;
public class CmsSpellcheckDictionaryIndexer { /** * Returns whether the Solr spellchecking index directories are empty * ( not initiliazed ) or not . * @ return true , if the directories contain no indexed data , otherwise false . */ private static boolean isSolrSpellcheckIndexDirectoryEmpty ( ) { } }
final File path = new File ( getSolrSpellcheckRfsPath ( ) ) ; final File [ ] directories = path . listFiles ( SPELLCHECKING_DIRECTORY_NAME_FILTER ) ; // Each directory that has been created by Solr but hasn ' t been indexed yet // contains exactly two files . If there are more files , at least one index has // already been built , so return false in that case . if ( directories != null ) { for ( final File directory : directories ) { if ( directory . list ( ) . length > 2 ) { return false ; } } } return true ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link CRSRefType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link CRSRefType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "crsRef" ) public JAXBElement < CRSRefType > createCrsRef ( CRSRefType value ) { } }
return new JAXBElement < CRSRefType > ( _CrsRef_QNAME , CRSRefType . class , null , value ) ;
public class CmsPositionBean { /** * Returns over which area of this the given position is . Will return < code > null < / code > if the provided position is not within this position . < p > * @ param absLeft the left position * @ param absTop the right position * @ param offset the border offset * @ return the area */ public Area getArea ( int absLeft , int absTop , int offset ) { } }
if ( isOverElement ( absLeft , absTop ) ) { if ( absLeft < ( m_left + 10 ) ) { // left border if ( absTop < ( m_top + offset ) ) { // top left corner return Area . CORNER_TOP_LEFT ; } else if ( absTop > ( ( m_top + m_height ) - offset ) ) { // bottom left corner return Area . CORNER_BOTTOM_LEFT ; } return Area . BORDER_LEFT ; } if ( absLeft > ( ( m_left + m_width ) - offset ) ) { // right border if ( absTop < ( m_top + offset ) ) { // top right corner return Area . CORNER_TOP_RIGHT ; // fixing opposite corner } else if ( absTop > ( ( m_top + m_height ) - offset ) ) { // bottom right corner return Area . CORNER_BOTTOM_RIGHT ; // fixing opposite corner } return Area . BORDER_RIGHT ; } if ( absTop < ( m_top + offset ) ) { // border top return Area . BORDER_TOP ; } else if ( absTop > ( ( m_top + m_height ) - offset ) ) { // border bottom return Area . BORDER_BOTTOM ; } return Area . CENTER ; } return null ;
public class NonBlockingProperties { /** * Create { @ link NonBlockingProperties } from an existing { @ link Properties } * object . * @ param aProperties * Source properties . May be < code > null < / code > . * @ return The newly created { @ link NonBlockingProperties } . Never * < code > null < / code > . */ @ Nonnull public static NonBlockingProperties create ( @ Nullable final Map < ? , ? > aProperties ) { } }
final NonBlockingProperties ret = new NonBlockingProperties ( ) ; if ( aProperties != null ) for ( final Map . Entry < ? , ? > aEntry : aProperties . entrySet ( ) ) ret . put ( ( String ) aEntry . getKey ( ) , ( String ) aEntry . getValue ( ) ) ; return ret ;
public class MappedByteBuffer { /** * of the mapping . Computed each time to avoid storing in every direct buffer . */ private long mappingOffset ( ) { } }
int ps = Bits . pageSize ( ) ; long offset = address % ps ; return ( offset >= 0 ) ? offset : ( ps + offset ) ;
public class Strings { /** * Determines whether the specified strings contains the specified string , ignoring case considerations . * @ param string the specified string * @ param strings the specified strings * @ return { @ code true } if the specified strings contains the specified string , ignoring case considerations , returns { @ code false } * otherwise */ public static boolean containsIgnoreCase ( final String string , final String [ ] strings ) { } }
if ( null == strings ) { return false ; } return Arrays . stream ( strings ) . anyMatch ( str -> StringUtils . equalsIgnoreCase ( string , str ) ) ;
public class DynamoDBExecutor { @ SuppressWarnings ( "rawtypes" ) public Stream < Map < String , Object > > stream ( final QueryRequest queryRequest ) { } }
return ( Stream ) stream ( Map . class , queryRequest ) ;
public class GuildController { /** * Kicks the { @ link net . dv8tion . jda . core . entities . Member Member } specified by the userId from the from the { @ link net . dv8tion . jda . core . entities . Guild Guild } . * < p > < b > Note : < / b > { @ link net . dv8tion . jda . core . entities . Guild # getMembers ( ) } will still contain the { @ link net . dv8tion . jda . core . entities . User User } * until Discord sends the { @ link net . dv8tion . jda . core . events . guild . member . GuildMemberLeaveEvent GuildMemberLeaveEvent } . * < p > Possible { @ link net . dv8tion . jda . core . requests . ErrorResponse ErrorResponses } caused by * the returned { @ link net . dv8tion . jda . core . requests . RestAction RestAction } include the following : * < ul > * < li > { @ link net . dv8tion . jda . core . requests . ErrorResponse # MISSING _ PERMISSIONS MISSING _ PERMISSIONS } * < br > The target Member cannot be kicked due to a permission discrepancy < / li > * < li > { @ link net . dv8tion . jda . core . requests . ErrorResponse # MISSING _ ACCESS MISSING _ ACCESS } * < br > We were removed from the Guild before finishing the task < / li > * < li > { @ link net . dv8tion . jda . core . requests . ErrorResponse # UNKNOWN _ MEMBER UNKNOWN _ MEMBER } * < br > The specified Member was removed from the Guild before finishing the task < / li > * < / ul > * @ param userId * The id of the { @ link net . dv8tion . jda . core . entities . User User } to kick * from the from the { @ link net . dv8tion . jda . core . entities . Guild Guild } . * @ param reason * The reason for this action or { @ code null } if there is no specified reason * @ throws net . dv8tion . jda . core . exceptions . InsufficientPermissionException * If the logged in account does not have the { @ link net . dv8tion . jda . core . Permission # KICK _ MEMBERS } permission . * @ throws net . dv8tion . jda . core . exceptions . HierarchyException * If the logged in account cannot kick the other member due to permission hierarchy position . * < br > See { @ link net . dv8tion . jda . core . utils . PermissionUtil # canInteract ( Member , Member ) PermissionUtil . canInteract ( Member , Member ) } * @ throws java . lang . IllegalArgumentException * If the userId provided does not correspond to a Member in this Guild or the provided { @ code userId } is blank / null . * @ return { @ link net . dv8tion . jda . core . requests . restaction . AuditableRestAction AuditableRestAction } */ @ CheckReturnValue public AuditableRestAction < Void > kick ( String userId , String reason ) { } }
Member member = getGuild ( ) . getMemberById ( userId ) ; Checks . check ( member != null , "The provided userId does not correspond to a member in this guild! Provided userId: %s" , userId ) ; return kick ( member , reason ) ;
public class SendGrid { /** * Attempt an API call . This method executes the API call asynchronously * on an internal thread pool . If the call is rate limited , the thread * will retry up to the maximum configured time . * @ param request the API request . */ public void attempt ( Request request ) { } }
this . attempt ( request , new APICallback ( ) { @ Override public void error ( Exception ex ) { } public void response ( Response r ) { } } ) ;
public class StringUtils { /** * Performs the logic for the { @ code splitByWholeSeparatorPreserveAllTokens } methods . * @ param str the String to parse , may be { @ code null } * @ param separator String containing the String to be used as a delimiter , * { @ code null } splits on whitespace * @ param max the maximum number of elements to include in the returned * array . A zero or negative value implies no limit . * @ param preserveAllTokens if { @ code true } , adjacent separators are * treated as empty token separators ; if { @ code false } , adjacent * separators are treated as one separator . * @ return an array of parsed Strings , { @ code null } if null String input * @ since 2.4 */ private static String [ ] splitByWholeSeparatorWorker ( final String str , final String separator , final int max , final boolean preserveAllTokens ) { } }
if ( str == null ) { return null ; } final int len = str . length ( ) ; if ( len == 0 ) { return ArrayUtils . EMPTY_STRING_ARRAY ; } if ( separator == null || EMPTY . equals ( separator ) ) { // Split on whitespace . return splitWorker ( str , null , max , preserveAllTokens ) ; } final int separatorLength = separator . length ( ) ; final ArrayList < String > substrings = new ArrayList < > ( ) ; int numberOfSubstrings = 0 ; int beg = 0 ; int end = 0 ; while ( end < len ) { end = str . indexOf ( separator , beg ) ; if ( end > - 1 ) { if ( end > beg ) { numberOfSubstrings += 1 ; if ( numberOfSubstrings == max ) { end = len ; substrings . add ( str . substring ( beg ) ) ; } else { // The following is OK , because String . substring ( beg , end ) excludes // the character at the position ' end ' . substrings . add ( str . substring ( beg , end ) ) ; // Set the starting point for the next search . // The following is equivalent to beg = end + ( separatorLength - 1 ) + 1, // which is the right calculation : beg = end + separatorLength ; } } else { // We found a consecutive occurrence of the separator , so skip it . if ( preserveAllTokens ) { numberOfSubstrings += 1 ; if ( numberOfSubstrings == max ) { end = len ; substrings . add ( str . substring ( beg ) ) ; } else { substrings . add ( EMPTY ) ; } } beg = end + separatorLength ; } } else { // String . substring ( beg ) goes from ' beg ' to the end of the String . substrings . add ( str . substring ( beg ) ) ; end = len ; } } return substrings . toArray ( new String [ substrings . size ( ) ] ) ;
public class VectorFieldTypeInformation { /** * Constructor for a type request with dimensionality constraints . * @ param cls Class constraint * @ param mindim Minimum dimensionality * @ param maxdim Maximum dimensionality * @ param < V > vector type */ public static < V extends FeatureVector < ? > > VectorFieldTypeInformation < V > typeRequest ( Class < ? super V > cls , int mindim , int maxdim ) { } }
return new VectorFieldTypeInformation < > ( cls , mindim , maxdim ) ;
public class CmsUploadProgressInfo { /** * Returns the file text . < p > * @ return the file text */ private String getFileText ( ) { } }
if ( m_orderedFilenamesToUpload . size ( ) > 1 ) { return Messages . get ( ) . key ( Messages . GUI_UPLOAD_FILES_PLURAL_0 ) ; } else { return Messages . get ( ) . key ( Messages . GUI_UPLOAD_FILES_SINGULAR_0 ) ; }
public class HtmlDocWriter { /** * Print the frameset version of the Html file header . * Called only when generating an HTML frameset file . * @ param title Title of this HTML document * @ param noTimeStamp If true , don ' t print time stamp in header * @ param frameset the frameset to be added to the HTML document */ public void printFramesetDocument ( String title , boolean noTimeStamp , Content frameset ) throws IOException { } }
Content htmlDocType = DocType . FRAMESET ; Content htmlComment = new Comment ( configuration . getText ( "doclet.New_Page" ) ) ; Content head = new HtmlTree ( HtmlTag . HEAD ) ; head . addContent ( getGeneratedBy ( ! noTimeStamp ) ) ; if ( configuration . charset . length ( ) > 0 ) { Content meta = HtmlTree . META ( "Content-Type" , CONTENT_TYPE , configuration . charset ) ; head . addContent ( meta ) ; } Content windowTitle = HtmlTree . TITLE ( new StringContent ( title ) ) ; head . addContent ( windowTitle ) ; head . addContent ( getFramesetJavaScript ( ) ) ; Content htmlTree = HtmlTree . HTML ( configuration . getLocale ( ) . getLanguage ( ) , head , frameset ) ; Content htmlDocument = new HtmlDocument ( htmlDocType , htmlComment , htmlTree ) ; write ( htmlDocument ) ;
public class DynamicRegistrationBean { /** * Set init - parameters for this registration . Calling this method will replace any * existing init - parameters . * @ param initParameters the init parameters * @ see # getInitParameters * @ see # addInitParameter */ public void setInitParameters ( Map < String , String > initParameters ) { } }
Assert . notNull ( initParameters , "InitParameters must not be null" ) ; this . initParameters = new LinkedHashMap < > ( initParameters ) ;
public class ConfigurationUtils { /** * Fetches a value specified by key * @ param map XPP3 map equivalent * @ param key navigation key * @ param basedir basedir can be different from current basedir * @ param defaultValue Default value if no such key exists * @ return File representation of the value */ static File valueAsFile ( Map < String , Object > map , Key key , File basedir , File defaultValue ) { } }
String value = valueAsString ( map , key , null ) ; if ( Validate . isNullOrEmpty ( value ) ) { return defaultValue ; } File candidate = new File ( value ) ; if ( ! candidate . isAbsolute ( ) && ( basedir != null && basedir . exists ( ) ) ) { return new File ( basedir , candidate . getPath ( ) ) ; } return candidate ;
public class ConnectionImpl { /** * ( non - Javadoc ) * @ see com . ibm . ws . sib . msgstore . transactions . TransactionCallback # afterCompletion ( com . ibm . ws . sib . msgstore . Transaction , boolean ) */ @ Override public void afterCompletion ( TransactionCommon transaction , boolean committed ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "afterCompletion" , new Object [ ] { transaction , Boolean . valueOf ( committed ) } ) ; synchronized ( this ) { // If the connection is already closed this transaction // will have been removed . if ( ! _closed ) { synchronized ( _ownedTransactions ) { // Remove the transaction from the connections list _ownedTransactions . remove ( transaction ) ; } } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "afterCompletion" ) ;
public class PeriodCounterFactory { /** * compteur custom */ Counter getCustomCounter ( Range range ) { } }
assert range . getPeriod ( ) == null ; final Counter customCounter = createPeriodCounter ( "yyyy-MM-dd" , range . getStartDate ( ) ) ; addRequestsAndErrorsForRange ( customCounter , range ) ; return customCounter ;
public class NodeTypeDefinitionAccessProvider { /** * Read node NodeDefinitionData [ ] of node type * @ param nodeData * @ return * @ throws RepositoryException * @ throws NodeTypeReadException * @ throws RepositoryException */ public NodeDefinitionData [ ] readNodeDefinitions ( NodeData nodeData ) throws NodeTypeReadException , RepositoryException { } }
InternalQName name = null ; List < NodeDefinitionData > nodeDefinitionDataList ; List < NodeData > childDefinitions = dataManager . getChildNodesData ( nodeData ) ; if ( childDefinitions . size ( ) > 0 ) name = readMandatoryName ( nodeData , null , Constants . JCR_NODETYPENAME ) ; else return new NodeDefinitionData [ 0 ] ; nodeDefinitionDataList = new ArrayList < NodeDefinitionData > ( ) ; for ( NodeData childDefinition : childDefinitions ) { if ( Constants . NT_CHILDNODEDEFINITION . equals ( childDefinition . getPrimaryTypeName ( ) ) ) { nodeDefinitionDataList . add ( nodeDefinitionAccessProvider . read ( childDefinition , name ) ) ; } } return nodeDefinitionDataList . toArray ( new NodeDefinitionData [ nodeDefinitionDataList . size ( ) ] ) ;
public class FormatterResolver { /** * キャッシュに初期値データを登録する 。 * ・ ロケールによって切り替わるフォーマットや 、 間違った組み込みフォーマットの場合を登録しておく 。 */ public synchronized void registerDefaultFormat ( ) { } }
final Locale [ ] availableLocales = new Locale [ ] { Locale . JAPANESE } ; // 組み込み書式の登録 for ( int i = 0 ; i <= 58 ; i ++ ) { final CellFormatter formatter = createDefaultFormatter ( String . valueOf ( i ) , availableLocales ) ; if ( formatter != null ) { registerFormatter ( ( short ) i , formatter ) ; } } // 特別な書式 final String [ ] names = new String [ ] { "F800" , "F400" } ; for ( String name : names ) { final String key = String . format ( "format.%s" , name ) ; final String defaultFormat = messageResolver . getMessage ( key ) ; if ( defaultFormat == null ) { continue ; } final CellFormatter formatter = createDefaultFormatter ( name , availableLocales ) ; if ( formatter != null ) { registerFormatter ( defaultFormat , formatter ) ; } }
public class ContextItems { /** * Sets a context item value . * @ param itemName Item name . * @ param value The value to set . The value ' s class must have an associated context serializer * registered for it . */ public void setItem ( String itemName , Object value ) { } }
if ( value == null ) { setItem ( itemName , ( String ) null ) ; } else { @ SuppressWarnings ( "unchecked" ) ISerializer < Object > contextSerializer = ( ISerializer < Object > ) ContextSerializerRegistry . getInstance ( ) . get ( value . getClass ( ) ) ; if ( contextSerializer == null ) { throw new ContextException ( "No serializer found for type " + value . getClass ( ) . getName ( ) ) ; } setItem ( itemName , contextSerializer . serialize ( value ) ) ; }
public class DefaultTopologyScheduler { /** * TODO : problematic : some dead slots have been freed * @ param context topology assign context */ protected void freeUsed ( TopologyAssignContext context ) { } }
Set < Integer > canFree = new HashSet < > ( ) ; canFree . addAll ( context . getAllTaskIds ( ) ) ; canFree . removeAll ( context . getUnstoppedTaskIds ( ) ) ; Map < String , SupervisorInfo > cluster = context . getCluster ( ) ; Assignment oldAssigns = context . getOldAssignment ( ) ; for ( Integer task : canFree ) { ResourceWorkerSlot worker = oldAssigns . getWorkerByTaskId ( task ) ; if ( worker == null ) { LOG . warn ( "No ResourceWorkerSlot of task " + task + " is found when freeing resource" ) ; continue ; } SupervisorInfo supervisorInfo = cluster . get ( worker . getNodeId ( ) ) ; if ( supervisorInfo == null ) { continue ; } supervisorInfo . getAvailableWorkerPorts ( ) . add ( worker . getPort ( ) ) ; }
public class MCP3424GpioProvider { /** * This method will perform an immediate data acquisition directly to the ADC chip to get the requested pin ' s input * conversion value . * @ param pin requested input pin to acquire conversion value * @ return conversion value for requested analog input pin * @ throws IOException */ @ Override public double getImmediateValue ( final Pin pin ) throws IOException { } }
// Pin address to read from to device int command = ( configuration & 0x9F ) | ( pin . getAddress ( ) << 5 ) ; device . write ( ( byte ) command ) ; // Write configuration to device double rate = 0.0 ; byte data [ ] = null ; switch ( configuration & 0x0C ) { case 0x00 : { data = new byte [ 3 ] ; rate = 176.0 /* 240.0 */ ; break ; } case 0x04 : { data = new byte [ 3 ] ; rate = 44.0 /* 60.0 */ ; break ; } case 0x08 : { data = new byte [ 3 ] ; rate = 11.0 /* 15.0 */ ; break ; } case 0x0C : { data = new byte [ 4 ] ; rate = 2.75 /* 3.75 */ ; break ; } default : throw new IllegalArgumentException ( ) ; } // Sleep current thread during conversion try { Thread . sleep ( ( long ) Math . ceil ( 1000.0 / rate ) ) ; } catch ( InterruptedException ex ) { ex . printStackTrace ( ) ; } // Get answer from device device . read ( data , 0 , data . length ) ; int answer = data [ data . length - 1 ] & 0xFF ; double weight = ( data [ 0 ] & 0x80 ) == 0 ? 1 : - 1 ; // weight = weight / ( 1 < < ( answer & 0x03 ) ) ; if ( ( ( answer & 0x60 ) == ( command & 0x60 ) ) && ( ( answer & 0x80 ) == 0 ) ) { if ( weight < 0 ) { for ( int i = 0 ; i < data . length - 1 ; i ++ ) { data [ i ] = ( byte ) ( ~ data [ i ] & 0xFF ) ; } data [ data . length - 2 ] = ( byte ) ( data [ data . length - 2 ] + 1 ) ; } switch ( configuration & 0x0C ) { case 0x00 : { data [ 0 ] = ( byte ) ( data [ 0 ] & 0x07 ) ; break ; } case 0x04 : { data [ 0 ] = ( byte ) ( data [ 0 ] & 0x1F ) ; break ; } case 0x08 : { data [ 0 ] = ( byte ) ( data [ 0 ] & 0x7F ) ; break ; } case 0x0C : { data [ 0 ] = ( byte ) ( data [ 0 ] & 0x01 ) ; break ; } default : throw new IllegalArgumentException ( ) ; } int buffer = data [ 0 ] & 0xFF ; for ( int i = 1 ; i < data . length - 1 ; i ++ ) { buffer = ( buffer << 8 ) | ( data [ i ] & 0xFF ) ; } // validate value within acceptable range double value = buffer * weight ; if ( value >= getMinSupportedValue ( ) && value <= getMaxSupportedValue ( ) ) { getPinCache ( pin ) . setAnalogValue ( value ) ; return value ; } } return INVALID_VALUE ;
public class SchemaManager { /** * Checks class and disk for class definition . * @ param cls * @ param node * @ return Class definition , may return null if no definition is found . */ private ZooClassDef locateClassDefinition ( Class < ? > cls , Node node ) { } }
ZooClassDef def = cache . getSchema ( cls , node ) ; if ( def == null || def . jdoZooIsDeleted ( ) ) { return null ; } return def ;
public class TransactionalUniqueIndex { /** * for int indicies : */ public Object get ( int indexValue ) { } }
Object result = null ; TransactionLocalStorage txStorage = ( TransactionLocalStorage ) perTransactionStorage . get ( MithraManagerProvider . getMithraManager ( ) . zGetCurrentTransactionWithNoCheck ( ) ) ; Index perThreadAdded = txStorage == null ? null : txStorage . added ; if ( perThreadAdded != null ) { result = perThreadAdded . get ( indexValue ) ; } if ( result == null ) { result = this . mainIndex . get ( indexValue ) ; result = this . checkDeletedIndex ( result , txStorage ) ; } return result ;
public class UIUtils { /** * helper method to set the background depending on the android version * @ param v * @ param d */ @ Deprecated @ SuppressLint ( "NewApi" ) public static void setBackground ( View v , Drawable d ) { } }
ViewCompat . setBackground ( v , d ) ;
public class Classfile { /** * Get the byte offset within the buffer of a string from the constant pool , or 0 for a null string . * @ param cpIdx * the constant pool index * @ param subFieldIdx * should be 0 for CONSTANT _ Utf8 , CONSTANT _ Class and CONSTANT _ String , and for * CONSTANT _ NameAndType _ info , fetches the name for value 0 , or the type descriptor for value 1. * @ return the constant pool string offset * @ throws ClassfileFormatException * If a problem is detected */ private int getConstantPoolStringOffset ( final int cpIdx , final int subFieldIdx ) throws ClassfileFormatException { } }
if ( cpIdx < 1 || cpIdx >= cpCount ) { throw new ClassfileFormatException ( "Constant pool index " + cpIdx + ", should be in range [1, " + ( cpCount - 1 ) + "] -- cannot continue reading class. " + "Please report this at https://github.com/classgraph/classgraph/issues" ) ; } final int t = entryTag [ cpIdx ] ; if ( ( t != 12 && subFieldIdx != 0 ) || ( t == 12 && subFieldIdx != 0 && subFieldIdx != 1 ) ) { throw new ClassfileFormatException ( "Bad subfield index " + subFieldIdx + " for tag " + t + ", cannot continue reading class. " + "Please report this at https://github.com/classgraph/classgraph/issues" ) ; } int cpIdxToUse ; if ( t == 0 ) { // Assume this means null return 0 ; } else if ( t == 1 ) { // CONSTANT _ Utf8 cpIdxToUse = cpIdx ; } else if ( t == 7 || t == 8 || t == 19 ) { // t = = 7 = > CONSTANT _ Class , e . g . " [ [ I " , " [ Ljava / lang / Thread ; " ; t = = 8 = > CONSTANT _ String ; // t = = 19 = > CONSTANT _ Method _ Info final int indirIdx = indirectStringRefs [ cpIdx ] ; if ( indirIdx == - 1 ) { // Should not happen throw new ClassfileFormatException ( "Bad string indirection index, cannot continue reading class. " + "Please report this at https://github.com/classgraph/classgraph/issues" ) ; } if ( indirIdx == 0 ) { // I assume this represents a null string , since the zeroeth entry is unused return 0 ; } cpIdxToUse = indirIdx ; } else if ( t == 12 ) { // CONSTANT _ NameAndType _ info final int compoundIndirIdx = indirectStringRefs [ cpIdx ] ; if ( compoundIndirIdx == - 1 ) { // Should not happen throw new ClassfileFormatException ( "Bad string indirection index, cannot continue reading class. " + "Please report this at https://github.com/classgraph/classgraph/issues" ) ; } final int indirIdx = ( subFieldIdx == 0 ? ( compoundIndirIdx >> 16 ) : compoundIndirIdx ) & 0xffff ; if ( indirIdx == 0 ) { // Should not happen throw new ClassfileFormatException ( "Bad string indirection index, cannot continue reading class. " + "Please report this at https://github.com/classgraph/classgraph/issues" ) ; } cpIdxToUse = indirIdx ; } else { throw new ClassfileFormatException ( "Wrong tag number " + t + " at constant pool index " + cpIdx + ", " + "cannot continue reading class. Please report this at " + "https://github.com/classgraph/classgraph/issues" ) ; } if ( cpIdxToUse < 1 || cpIdxToUse >= cpCount ) { throw new ClassfileFormatException ( "Constant pool index " + cpIdx + ", should be in range [1, " + ( cpCount - 1 ) + "] -- cannot continue reading class. " + "Please report this at https://github.com/classgraph/classgraph/issues" ) ; } return entryOffset [ cpIdxToUse ] ;
public class JCusparse { /** * Description : This routine converts a sparse matrix in CSC storage format * to a dense matrix . */ public static int cusparseScsc2dense ( cusparseHandle handle , int m , int n , cusparseMatDescr descrA , Pointer cscSortedValA , Pointer cscSortedRowIndA , Pointer cscSortedColPtrA , Pointer A , int lda ) { } }
return checkResult ( cusparseScsc2denseNative ( handle , m , n , descrA , cscSortedValA , cscSortedRowIndA , cscSortedColPtrA , A , lda ) ) ;
public class Bivariate { /** * Calculates BivariateMatrix for a given statistic * @ param dataSet * @ param type * @ return */ private static DataTable2D bivariateMatrix ( Dataframe dataSet , BivariateType type ) { } }
DataTable2D bivariateMatrix = new DataTable2D ( ) ; // extract values of first variable Map < Object , TypeInference . DataType > columnTypes = dataSet . getXDataTypes ( ) ; Object [ ] allVariables = columnTypes . keySet ( ) . toArray ( ) ; int numberOfVariables = allVariables . length ; TransposeDataList transposeDataList ; for ( int i = 0 ; i < numberOfVariables ; ++ i ) { Object variable0 = allVariables [ i ] ; if ( columnTypes . get ( variable0 ) == TypeInference . DataType . CATEGORICAL ) { continue ; } transposeDataList = new TransposeDataList ( ) ; // extract values of first variable transposeDataList . put ( 0 , dataSet . getXColumn ( variable0 ) ) ; for ( int j = i ; j < numberOfVariables ; ++ j ) { Object variable1 = allVariables [ j ] ; if ( columnTypes . get ( variable1 ) == TypeInference . DataType . CATEGORICAL ) { continue ; } transposeDataList . put ( 1 , dataSet . getXColumn ( variable1 ) ) ; double value = 0.0 ; if ( type == BivariateType . COVARIANCE ) { value = Descriptives . covariance ( transposeDataList , true ) ; } else if ( type == BivariateType . PEARSONCORRELATION ) { if ( variable0 . equals ( variable1 ) ) { value = 1.0 ; } else { value = PearsonCorrelation . calculateCorrelation ( transposeDataList ) ; } } else if ( type == BivariateType . SPEARMANCORRELATION ) { if ( variable0 . equals ( variable1 ) ) { value = 1.0 ; } else { value = SpearmanCorrelation . calculateCorrelation ( transposeDataList ) ; } } else if ( type == BivariateType . KENDALLTAUCORRELATION ) { if ( variable0 . equals ( variable1 ) ) { value = 1.0 ; } else { value = KendallTauCorrelation . calculateCorrelation ( transposeDataList ) ; } } // bivariateMatrix . internalData . get ( variable0 ) . internalData . put ( variable1 , value ) ; bivariateMatrix . put2d ( variable0 , variable1 , value ) ; if ( ! variable0 . equals ( variable1 ) ) { /* if ( ! bivariateMatrix . internalData . containsKey ( variable1 ) ) { bivariateMatrix . internalData . put ( variable1 , new AssociativeArray ( ) ) ; bivariateMatrix . internalData . get ( variable1 ) . internalData . put ( variable0 , value ) ; */ bivariateMatrix . put2d ( variable1 , variable0 , value ) ; } } // transposeDataList = null ; } return bivariateMatrix ;
public class LineReader { /** * Also closes the LineReader . */ public Collection < String > collect ( Collection < String > result ) throws IOException { } }
String line ; while ( true ) { line = next ( ) ; if ( line == null ) { close ( ) ; return result ; } result . add ( line ) ; }
public class BaseScriptPlugin { /** * Create the command array for the data context . * @ param dataContext data * @ return arglist */ protected ExecArgList createScriptArgsList ( final Map < String , Map < String , String > > dataContext ) { } }
final ScriptPluginProvider plugin = getProvider ( ) ; final File scriptfile = plugin . getScriptFile ( ) ; final String scriptargs = null != plugin . getScriptArgs ( ) ? DataContextUtils . replaceDataReferencesInString ( plugin . getScriptArgs ( ) , dataContext ) : null ; final String [ ] scriptargsarr = null != plugin . getScriptArgsArray ( ) ? DataContextUtils . replaceDataReferencesInArray ( plugin . getScriptArgsArray ( ) , dataContext ) : null ; final String scriptinterpreter = plugin . getScriptInterpreter ( ) ; final boolean interpreterargsquoted = plugin . getInterpreterArgsQuoted ( ) ; return getScriptExecHelper ( ) . createScriptArgList ( scriptfile . getAbsolutePath ( ) , scriptargs , scriptargsarr , scriptinterpreter , interpreterargsquoted ) ;
public class UnitizingAnnotationStudy { /** * Utility method for moving on the cursor of the given iterator until * a unit of the specified rater and category is returned . Both * the rater index and the category may be null if those filter * conditions are to be ignored . */ public static IUnitizingAnnotationUnit findNextUnit ( final Iterator < IUnitizingAnnotationUnit > units , int raterIdx , final Object category ) { } }
while ( units . hasNext ( ) ) { IUnitizingAnnotationUnit result = units . next ( ) ; if ( category != null && ! category . equals ( result . getCategory ( ) ) ) { continue ; } if ( raterIdx < 0 || result . getRaterIdx ( ) == raterIdx ) { return result ; } } return null ;
public class Transcoding { /** * / * rb _ transcoding _ convert */ EConvResult convert ( byte [ ] in , Ptr inPtr , int inStop , byte [ ] out , Ptr outPtr , int outStop , int flags ) { } }
return transcodeRestartable ( in , inPtr , inStop , out , outPtr , outStop , flags ) ;
public class AbilityUtils { /** * A " best - effort " boolean stating whether the update is a super - group message or not . * @ param update a Telegram { @ link Update } * @ return whether the update is linked to a group */ public static boolean isSuperGroupUpdate ( Update update ) { } }
if ( MESSAGE . test ( update ) ) { return update . getMessage ( ) . isSuperGroupMessage ( ) ; } else if ( CALLBACK_QUERY . test ( update ) ) { return update . getCallbackQuery ( ) . getMessage ( ) . isSuperGroupMessage ( ) ; } else if ( CHANNEL_POST . test ( update ) ) { return update . getChannelPost ( ) . isSuperGroupMessage ( ) ; } else if ( EDITED_CHANNEL_POST . test ( update ) ) { return update . getEditedChannelPost ( ) . isSuperGroupMessage ( ) ; } else if ( EDITED_MESSAGE . test ( update ) ) { return update . getEditedMessage ( ) . isSuperGroupMessage ( ) ; } else { return false ; }
public class AmazonIdentityManagementClient { /** * After you generate a user , group , role , or policy report using the * < code > GenerateServiceLastAccessedDetails < / code > operation , you can use the < code > JobId < / code > parameter in * < code > GetServiceLastAccessedDetails < / code > . This operation retrieves the status of your report job and a list of * AWS services that the resource ( user , group , role , or managed policy ) can access . * < note > * Service last accessed data does not use other policy types when determining whether a resource could access a * service . These other policy types include resource - based policies , access control lists , AWS Organizations * policies , IAM permissions boundaries , and AWS STS assume role policies . It only applies permissions policy logic . * For more about the evaluation of policy types , see < a href = * " https : / / docs . aws . amazon . com / IAM / latest / UserGuide / reference _ policies _ evaluation - logic . html # policy - eval - basics " * > Evaluating Policies < / a > in the < i > IAM User Guide < / i > . * < / note > * For each service that the resource could access using permissions policies , the operation returns details about * the most recent access attempt . If there was no attempt , the service is listed without details about the most * recent attempt to access the service . If the operation fails , the < code > GetServiceLastAccessedDetails < / code > * operation returns the reason that it failed . * The < code > GetServiceLastAccessedDetails < / code > operation returns a list of services . This list includes the * number of entities that have attempted to access the service and the date and time of the last attempt . It also * returns the ARN of the following entity , depending on the resource ARN that you used to generate the report : * < ul > * < li > * < b > User < / b > – Returns the user ARN that you used to generate the report * < / li > * < li > * < b > Group < / b > – Returns the ARN of the group member ( user ) that last attempted to access the service * < / li > * < li > * < b > Role < / b > – Returns the role ARN that you used to generate the report * < / li > * < li > * < b > Policy < / b > – Returns the ARN of the user or role that last used the policy to attempt to access the service * < / li > * < / ul > * By default , the list is sorted by service namespace . * @ param getServiceLastAccessedDetailsRequest * @ return Result of the GetServiceLastAccessedDetails operation returned by the service . * @ throws NoSuchEntityException * The request was rejected because it referenced a resource entity that does not exist . The error message * describes the resource . * @ throws InvalidInputException * The request was rejected because an invalid or out - of - range value was supplied for an input parameter . * @ sample AmazonIdentityManagement . GetServiceLastAccessedDetails * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / iam - 2010-05-08 / GetServiceLastAccessedDetails " * target = " _ top " > AWS API Documentation < / a > */ @ Override public GetServiceLastAccessedDetailsResult getServiceLastAccessedDetails ( GetServiceLastAccessedDetailsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetServiceLastAccessedDetails ( request ) ;