signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class StreamSource { /** * Close the stream . */ public void close ( ) { } }
StreamSource ss = _indirectSource ; _indirectSource = null ; if ( ss != null ) { ss . freeUseCount ( ) ; } else { freeUseCount ( ) ; }
public class ImmutableGrid { /** * Obtains an immutable grid by copying a set of cells . * @ param < R > the type of the value * @ param rowCount the number of rows , zero or greater * @ param columnCount the number of columns , zero or greater * @ param cells the cells to copy , not null * @ return the immutable grid , not null * @ throws IndexOutOfBoundsException if either index is less than zero */ public static < R > ImmutableGrid < R > copyOf ( int rowCount , int columnCount , Iterable < ? extends Cell < R > > cells ) { } }
if ( cells == null ) { throw new IllegalArgumentException ( "Cells must not be null" ) ; } if ( ! cells . iterator ( ) . hasNext ( ) ) { return new EmptyGrid < R > ( rowCount , columnCount ) ; } return new SparseImmutableGrid < R > ( rowCount , columnCount , cells ) ;
public class AbstractPluginBeanValidation { /** * attribute from parent declaration */ private void _processValue ( @ Nonnull final CValuePropertyInfo aProperty , final ClassOutline aClassOutline ) { } }
final String sPropertyName = aProperty . getName ( false ) ; final XSComponent aDefinition = aProperty . getSchemaComponent ( ) ; if ( aDefinition instanceof RestrictionSimpleTypeImpl ) { final RestrictionSimpleTypeImpl aParticle = ( RestrictionSimpleTypeImpl ) aDefinition ; final XSSimpleType aSimpleType = aParticle . asSimpleType ( ) ; final JFieldVar aFieldVar = aClassOutline . implClass . fields ( ) . get ( sPropertyName ) ; // if ( particle . isRequired ( ) ) { // if ( ! hasAnnotation ( var , NotNull . class ) ) { // if ( notNullAnnotations ) { // var . annotate ( NotNull . class ) ; _processType ( aSimpleType , aFieldVar ) ; }
public class Matrix4d { /** * / * ( non - Javadoc ) * @ see org . joml . Matrix4dc # getTransposed ( int , java . nio . ByteBuffer ) */ public ByteBuffer getTransposed ( int index , ByteBuffer dest ) { } }
MemUtil . INSTANCE . putTransposed ( this , index , dest ) ; return dest ;
public class ComputePoliciesInner { /** * Lists the Data Lake Analytics compute policies within the specified Data Lake Analytics account . An account supports , at most , 50 policies . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the PagedList & lt ; ComputePolicyInner & gt ; object if successful . */ public PagedList < ComputePolicyInner > listByAccountNext ( final String nextPageLink ) { } }
ServiceResponse < Page < ComputePolicyInner > > response = listByAccountNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) ; return new PagedList < ComputePolicyInner > ( response . body ( ) ) { @ Override public Page < ComputePolicyInner > nextPage ( String nextPageLink ) { return listByAccountNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
public class StringUtil { /** * adds zeros add the begin of a int example : addZeros ( 2,3 ) return " 002" * @ param i number to add nulls * @ param size * @ return min len of return value ; */ public static String addZeros ( int i , int size ) { } }
String rtn = Caster . toString ( i ) ; if ( rtn . length ( ) < size ) return repeatString ( "0" , size - rtn . length ( ) ) + rtn ; return rtn ;
public class BinarySerde { /** * Setup the given byte buffer * for serialization ( note that this is for uncompressed INDArrays ) * 4 bytes int for rank * 4 bytes for data opType * shape buffer * data buffer * @ param arr the array to setup * @ param allocated the byte buffer to setup * @ param rewind whether to rewind the byte buffer or nt */ public static void doByteBufferPutUnCompressed ( INDArray arr , ByteBuffer allocated , boolean rewind ) { } }
// ensure we send data to host memory Nd4j . getExecutioner ( ) . commit ( ) ; Nd4j . getAffinityManager ( ) . ensureLocation ( arr , AffinityManager . Location . HOST ) ; ByteBuffer buffer = arr . data ( ) . pointer ( ) . asByteBuffer ( ) . order ( ByteOrder . nativeOrder ( ) ) ; ByteBuffer shapeBuffer = arr . shapeInfoDataBuffer ( ) . pointer ( ) . asByteBuffer ( ) . order ( ByteOrder . nativeOrder ( ) ) ; // 2 four byte ints at the beginning allocated . putInt ( arr . rank ( ) ) ; // put data opType next so its self describing allocated . putInt ( arr . data ( ) . dataType ( ) . ordinal ( ) ) ; allocated . put ( shapeBuffer ) ; allocated . put ( buffer ) ; if ( rewind ) allocated . rewind ( ) ;
public class DescribeEventTypesResult { /** * A list of event types that match the filter criteria . Event types have a category ( < code > issue < / code > , * < code > accountNotification < / code > , or < code > scheduledChange < / code > ) , a service ( for example , < code > EC2 < / code > , * < code > RDS < / code > , < code > DATAPIPELINE < / code > , < code > BILLING < / code > ) , and a code ( in the format * < code > AWS _ < i > SERVICE < / i > _ < i > DESCRIPTION < / i > < / code > ; for example , < code > AWS _ EC2 _ SYSTEM _ MAINTENANCE _ EVENT < / code > ) . * @ param eventTypes * A list of event types that match the filter criteria . Event types have a category ( < code > issue < / code > , * < code > accountNotification < / code > , or < code > scheduledChange < / code > ) , a service ( for example , * < code > EC2 < / code > , < code > RDS < / code > , < code > DATAPIPELINE < / code > , < code > BILLING < / code > ) , and a code ( in the * format < code > AWS _ < i > SERVICE < / i > _ < i > DESCRIPTION < / i > < / code > ; for example , * < code > AWS _ EC2 _ SYSTEM _ MAINTENANCE _ EVENT < / code > ) . */ public void setEventTypes ( java . util . Collection < EventType > eventTypes ) { } }
if ( eventTypes == null ) { this . eventTypes = null ; return ; } this . eventTypes = new java . util . ArrayList < EventType > ( eventTypes ) ;
public class CmsDefaultWorkflowManager { /** * Gets the localized label for a given CMS context and key . < p > * @ param cms the CMS context * @ param key the localization key * @ return the localized label */ public String getLabel ( CmsObject cms , String key ) { } }
CmsMessages messages = Messages . get ( ) . getBundle ( getLocale ( cms ) ) ; return messages . key ( key ) ;
public class ConversionTracker { /** * Gets the countingType value for this ConversionTracker . * @ return countingType * How to count events for this conversion tracker . * If countingType is MANY _ PER _ CLICK , then all conversion * events are counted . * If countingType is ONE _ PER _ CLICK , then only the first * conversion event of this type * following a given click will be counted . * More information is available at https : / / support . google . com / adwords / answer / 3438531 * < span class = " constraint Selectable " > This field can be selected using * the value " CountingType " . < / span > < span class = " constraint Filterable " > This * field can be filtered on . < / span > */ public com . google . api . ads . adwords . axis . v201809 . cm . ConversionDeduplicationMode getCountingType ( ) { } }
return countingType ;
public class RegionDiskClient { /** * Creates a snapshot of this regional disk . * < p > Sample code : * < pre > < code > * try ( RegionDiskClient regionDiskClient = RegionDiskClient . create ( ) ) { * ProjectRegionDiskName disk = ProjectRegionDiskName . of ( " [ PROJECT ] " , " [ REGION ] " , " [ DISK ] " ) ; * Snapshot snapshotResource = Snapshot . newBuilder ( ) . build ( ) ; * Operation response = regionDiskClient . createSnapshotRegionDisk ( disk , snapshotResource ) ; * < / code > < / pre > * @ param disk Name of the regional persistent disk to snapshot . * @ param snapshotResource A persistent disk snapshot resource . ( = = resource _ for beta . snapshots * = = ) ( = = resource _ for v1 . snapshots = = ) * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation createSnapshotRegionDisk ( ProjectRegionDiskName disk , Snapshot snapshotResource ) { } }
CreateSnapshotRegionDiskHttpRequest request = CreateSnapshotRegionDiskHttpRequest . newBuilder ( ) . setDisk ( disk == null ? null : disk . toString ( ) ) . setSnapshotResource ( snapshotResource ) . build ( ) ; return createSnapshotRegionDisk ( request ) ;
public class HsqlTimer { /** * ( Re ) starts background processing of the task queue . * @ throws IllegalStateException if this timer is shut down . * @ see # shutdown ( ) * @ see # shutdownImmediately ( ) */ public synchronized void restart ( ) throws IllegalStateException { } }
if ( this . isShutdown ) { throw new IllegalStateException ( "isShutdown==true" ) ; } else if ( this . taskRunnerThread == null ) { this . taskRunnerThread = this . threadFactory . newThread ( this . taskRunner ) ; this . taskRunnerThread . start ( ) ; } else { this . taskQueue . unpark ( ) ; }
public class NucleotideWSSaver { /** * Adds or updates a single nucleotide to the nucleotide store using the URL configured in * { @ code MonomerStoreConfiguration } . * @ param nucleotide to save * @ return response from the webservice */ public String saveNucleotideToStore ( Nucleotide nucleotide ) { } }
String res = "" ; CloseableHttpResponse response = null ; try { response = WSAdapterUtils . putResource ( nucleotide . toJSON ( ) , MonomerStoreConfiguration . getInstance ( ) . getWebserviceNucleotidesPutFullURL ( ) ) ; LOG . debug ( response . getStatusLine ( ) . toString ( ) ) ; EntityUtils . consume ( response . getEntity ( ) ) ; } catch ( Exception e ) { LOG . error ( "Saving nucleotide failed!" , e ) ; return "" ; } finally { try { if ( response != null ) { response . close ( ) ; } } catch ( IOException e ) { LOG . debug ( "Closing resources failed." , e ) ; return res ; } } return res ;
public class RequireBuilder { /** * Creates a new require ( ) function . You are still responsible for invoking * either { @ link Require # install ( Scriptable ) } or * { @ link Require # requireMain ( Context , String ) } to effectively make it * available to its JavaScript program . * @ param cx the current context * @ param globalScope the global scope containing the JS standard natives . * @ return a new Require instance . */ public Require createRequire ( Context cx , Scriptable globalScope ) { } }
return new Require ( cx , globalScope , moduleScriptProvider , preExec , postExec , sandboxed ) ;
public class PerlinNoise { /** * Cosine interpolation . * @ param x1 X1 Value . * @ param x2 X2 Value . * @ param a Value . * @ return Value . */ private double CosineInterpolate ( double x1 , double x2 , double a ) { } }
double f = ( 1 - Math . cos ( a * Math . PI ) ) * 0.5 ; return x1 * ( 1 - f ) + x2 * f ;
public class ObjectResult { /** * Serialize this ObjectResult into a UNode tree . The root node is called " doc " . * @ return This object serialized into a UNode tree . */ public UNode toDoc ( ) { } }
// Root node is called " doc " . UNode result = UNode . createMapNode ( "doc" ) ; // Each child of " doc " is a simple VALUE node . for ( String fieldName : m_resultFields . keySet ( ) ) { // In XML , we want the element name to be " field " when the node name is " _ ID " . if ( fieldName . equals ( OBJECT_ID ) ) { result . addValueNode ( fieldName , m_resultFields . get ( fieldName ) , "field" ) ; } else { result . addValueNode ( fieldName , m_resultFields . get ( fieldName ) ) ; } } return result ;
public class Header { /** * Converts the header ' s flags into a String */ public String printFlags ( ) { } }
StringBuffer sb = new StringBuffer ( ) ; for ( int i = 0 ; i < 16 ; i ++ ) if ( validFlag ( i ) && getFlag ( i ) ) { sb . append ( Flags . string ( i ) ) ; sb . append ( " " ) ; } return sb . toString ( ) ;
public class Rapids { /** * Parse a " num list " . This could be either a plain list of numbers , or a range , or a list of ranges . For example * [ 2 3 4 5 6 7 ] can also be written as [ 2:6 ] or [ 2:2 4:4:1 ] . The format of each " range " is ` start : count [ : stride ] ` , * and it denotes the sequence { start , start + stride , . . . , start + ( count - 1 ) * stride } . Here start and stride may * be real numbers , however count must be a non - negative integer . Negative strides are also not allowed . */ private AstNumList parseNumList ( ) { } }
ArrayList < Double > bases = new ArrayList < > ( ) ; ArrayList < Double > strides = new ArrayList < > ( ) ; ArrayList < Long > counts = new ArrayList < > ( ) ; while ( skipWS ( ) != ']' ) { double base = number ( ) ; double count = 1 ; double stride = 1 ; if ( skipWS ( ) == ':' ) { eatChar ( ':' ) ; skipWS ( ) ; count = number ( ) ; if ( count < 1 || ( ( long ) count ) != count ) throw new IllegalASTException ( "Count must be a positive integer, got " + count ) ; } if ( skipWS ( ) == ':' ) { eatChar ( ':' ) ; skipWS ( ) ; stride = number ( ) ; if ( stride < 0 || Double . isNaN ( stride ) ) throw new IllegalASTException ( "Stride must be positive, got " + stride ) ; } if ( count == 1 && stride != 1 ) throw new IllegalASTException ( "If count is 1, then stride must be one (and ignored)" ) ; bases . add ( base ) ; counts . add ( ( long ) count ) ; strides . add ( stride ) ; // Optional comma separating span if ( skipWS ( ) == ',' ) eatChar ( ',' ) ; } return new AstNumList ( bases , strides , counts ) ;
public class CmsConvertXmlThread { /** * Gets file xml content . < p > * @ param cmsResource current resource CmsResource * @ param cmsFile current CmsFile * @ param cmsObject current CmsObject * @ param xmlContent xml content to write * @ param encodingType encoding type * @ param report I _ CmsReport */ private void setXmlContentFromFile ( CmsResource cmsResource , CmsFile cmsFile , CmsObject cmsObject , String xmlContent , String encodingType , I_CmsReport report ) { } }
try { byte [ ] fileContent = xmlContent . getBytes ( encodingType ) ; cmsFile . setContents ( fileContent ) ; // write into file cmsObject . writeFile ( cmsFile ) ; // unlock resource try { cmsObject . unlockResource ( cmsObject . getSitePath ( cmsResource ) ) ; } catch ( CmsException e ) { m_errorTransform += 1 ; report . println ( Messages . get ( ) . container ( Messages . RPT_CONVERTXML_UNLOCK_FILE_1 , cmsObject . getSitePath ( cmsResource ) ) , I_CmsReport . FORMAT_ERROR ) ; if ( LOG . isErrorEnabled ( ) ) { LOG . error ( e . getMessageContainer ( ) , e ) ; } } } catch ( Exception e ) { m_errorTransform += 1 ; String reportContent = "<br/>" ; reportContent = reportContent + CmsEncoder . escapeXml ( xmlContent ) ; reportContent = reportContent . replaceAll ( "\r\n" , "<br/>" ) ; report . println ( Messages . get ( ) . container ( Messages . RPT_CONVERTXML_WRITE_ERROR_1 , reportContent ) , I_CmsReport . FORMAT_ERROR ) ; if ( LOG . isErrorEnabled ( ) ) { LOG . error ( e . toString ( ) ) ; } }
public class AbstractDatabaseEngine { /** * Creates a prepared statement . * @ param name The name of the prepared statement . * @ param query The query . * @ param timeout The timeout ( in seconds ) if applicable . Only applicable if > 0. * @ param recovering True if calling from recovering , false otherwise . * @ throws NameAlreadyExistsException If the name already exists . * @ throws DatabaseEngineException If something goes wrong creating the statement . */ private void createPreparedStatement ( final String name , final String query , final int timeout , final boolean recovering ) throws NameAlreadyExistsException , DatabaseEngineException { } }
if ( ! recovering ) { if ( stmts . containsKey ( name ) ) { throw new NameAlreadyExistsException ( String . format ( "There's already a PreparedStatement with the name '%s'" , name ) ) ; } try { getConnection ( ) ; } catch ( final Exception e ) { throw new DatabaseEngineException ( "Could not create prepared statement" , e ) ; } } PreparedStatement ps ; try { ps = conn . prepareStatement ( query ) ; if ( timeout > 0 ) { ps . setQueryTimeout ( timeout ) ; } stmts . put ( name , new PreparedStatementCapsule ( query , ps , timeout ) ) ; } catch ( final SQLException e ) { throw new DatabaseEngineException ( "Could not create prepared statement" , e ) ; }
public class Transformers { /** * Gets the element value . * @ param parent the parent * @ param elementName the element name * @ return the element value */ private String getElementValue ( Element parent , String elementName ) { } }
String value = null ; NodeList nodes = parent . getElementsByTagName ( elementName ) ; if ( nodes . getLength ( ) > 0 ) { value = nodes . item ( 0 ) . getChildNodes ( ) . item ( 0 ) . getNodeValue ( ) ; } return value ;
public class CSSURLHelper { /** * Internal method to escape a CSS URL . Because this method is only called for * quoted URLs , only the quote character itself needs to be quoted . * @ param sURL * The URL to be escaped . May not be < code > null < / code > . * @ param cQuoteChar * The quote char that is used . Either ' \ ' ' or ' " ' * @ return The escaped string . Never < code > null < / code > . */ @ Nonnull @ Nonempty public static String getEscapedCSSURL ( @ Nonnull final String sURL , final char cQuoteChar ) { } }
ValueEnforcer . notNull ( sURL , "URL" ) ; if ( sURL . indexOf ( cQuoteChar ) < 0 && sURL . indexOf ( CSSParseHelper . URL_ESCAPE_CHAR ) < 0 ) { // Found nothing to quote return sURL ; } final StringBuilder aSB = new StringBuilder ( sURL . length ( ) * 2 ) ; for ( final char c : sURL . toCharArray ( ) ) { // Escape the quote char and the escape char itself if ( c == cQuoteChar || c == CSSParseHelper . URL_ESCAPE_CHAR ) aSB . append ( CSSParseHelper . URL_ESCAPE_CHAR ) ; aSB . append ( c ) ; } return aSB . toString ( ) ;
public class BlobContainersInner { /** * Creates a new container under the specified account as described by request body . The container resource includes metadata and properties for that container . It does not include a list of the blobs contained by the container . * @ param resourceGroupName The name of the resource group within the user ' s subscription . The name is case insensitive . * @ param accountName The name of the storage account within the specified resource group . Storage account names must be between 3 and 24 characters in length and use numbers and lower - case letters only . * @ param containerName The name of the blob container within the specified storage account . Blob container names must be between 3 and 63 characters in length and use numbers , lower - case letters and dash ( - ) only . Every dash ( - ) character must be immediately preceded and followed by a letter or number . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the BlobContainerInner object if successful . */ public BlobContainerInner create ( String resourceGroupName , String accountName , String containerName ) { } }
return createWithServiceResponseAsync ( resourceGroupName , accountName , containerName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class DataService { /** * Common method to prepare the request params for voidRequest operation for both sync and async calls * @ param entity * the entity * @ return IntuitMessage the intuit message * @ throws FMSException */ private < T extends IEntity > IntuitMessage prepareVoidRequest ( T entity ) throws FMSException { } }
IntuitMessage intuitMessage = new IntuitMessage ( ) ; RequestElements requestElements = intuitMessage . getRequestElements ( ) ; // set the request parameters Map < String , String > requestParameters = requestElements . getRequestParameters ( ) ; requestParameters . put ( RequestElements . REQ_PARAM_METHOD_TYPE , MethodType . POST . toString ( ) ) ; requestParameters . put ( RequestElements . REQ_PARAM_INCLUDE , OperationType . VOID . toString ( ) ) ; requestElements . setContext ( context ) ; requestElements . setEntity ( entity ) ; requestElements . setObjectToSerialize ( getSerializableObject ( entity ) ) ; return intuitMessage ;
public class PolylineSplitMerge { /** * Selects and splits the side defined by the e0 corner . If convex a check is performed to * ensure that the polyline will be convex still . */ void setSplitVariables ( List < Point2D_I32 > contour , Element < Corner > e0 , Element < Corner > e1 ) { } }
int distance0 = CircularIndex . distanceP ( e0 . object . index , e1 . object . index , contour . size ( ) ) ; int index0 = CircularIndex . plusPOffset ( e0 . object . index , minimumSideLength , contour . size ( ) ) ; int index1 = CircularIndex . minusPOffset ( e1 . object . index , minimumSideLength , contour . size ( ) ) ; splitter . selectSplitPoint ( contour , index0 , index1 , resultsA ) ; // if convex only perform the split if it would result in a convex polygon if ( convex ) { Point2D_I32 a = contour . get ( e0 . object . index ) ; Point2D_I32 b = contour . get ( resultsA . index ) ; Point2D_I32 c = contour . get ( next ( e0 ) . object . index ) ; if ( UtilPolygons2D_I32 . isPositiveZ ( a , b , c ) ) { e0 . object . splitable = false ; return ; } } // see if this would result in a side that ' s too small int dist0 = CircularIndex . distanceP ( e0 . object . index , resultsA . index , contour . size ( ) ) ; if ( dist0 < minimumSideLength || ( contour . size ( ) - dist0 ) < minimumSideLength ) { throw new RuntimeException ( "Should be impossible" ) ; } // this function is only called if splitable is set to true so no need to set it again e0 . object . splitLocation = resultsA . index ; e0 . object . splitError0 = computeSideError ( contour , e0 . object . index , resultsA . index ) ; e0 . object . splitError1 = computeSideError ( contour , resultsA . index , e1 . object . index ) ; if ( e0 . object . splitLocation >= contour . size ( ) ) throw new RuntimeException ( "Egads" ) ;
public class Spies { /** * Proxies an consumer spying for parameter . * @ param < T > the consumer parameter type * @ param consumer the consumer to be spied * @ param param a box that will be containing the spied parameter * @ return the proxied consumer */ public static < T > Consumer < T > spy ( Consumer < T > consumer , Box < T > param ) { } }
return new CapturingConsumer < T > ( consumer , param ) ;
public class RouteFiltersInner { /** * Updates a route filter in a specified resource group . * @ param resourceGroupName The name of the resource group . * @ param routeFilterName The name of the route filter . * @ param routeFilterParameters Parameters supplied to the update route filter operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the RouteFilterInner object if successful . */ public RouteFilterInner update ( String resourceGroupName , String routeFilterName , PatchRouteFilter routeFilterParameters ) { } }
return updateWithServiceResponseAsync ( resourceGroupName , routeFilterName , routeFilterParameters ) . toBlocking ( ) . last ( ) . body ( ) ;
public class VTimeZone { /** * Format integer number */ private static String numToString ( int num , int width ) { } }
String str = Integer . toString ( num ) ; int len = str . length ( ) ; if ( len >= width ) { return str . substring ( len - width , len ) ; } StringBuilder sb = new StringBuilder ( width ) ; for ( int i = len ; i < width ; i ++ ) { sb . append ( '0' ) ; } sb . append ( str ) ; return sb . toString ( ) ;
public class FessMessages { /** * Add the created action message for the key ' success . upload _ bad _ word ' with parameters . * < pre > * message : Uploaded Bad Word file . * < / pre > * @ param property The property name for the message . ( NotNull ) * @ return this . ( NotNull ) */ public FessMessages addSuccessUploadBadWord ( String property ) { } }
assertPropertyNotNull ( property ) ; add ( property , new UserMessage ( SUCCESS_upload_bad_word ) ) ; return this ;
public class AWSCognitoIdentityProviderClient { /** * Stops the user import job . * @ param stopUserImportJobRequest * Represents the request to stop the user import job . * @ return Result of the StopUserImportJob operation returned by the service . * @ throws ResourceNotFoundException * This exception is thrown when the Amazon Cognito service cannot find the requested resource . * @ throws InvalidParameterException * This exception is thrown when the Amazon Cognito service encounters an invalid parameter . * @ throws TooManyRequestsException * This exception is thrown when the user has made too many requests for a given operation . * @ throws InternalErrorException * This exception is thrown when Amazon Cognito encounters an internal error . * @ throws PreconditionNotMetException * This exception is thrown when a precondition is not met . * @ throws NotAuthorizedException * This exception is thrown when a user is not authorized . * @ sample AWSCognitoIdentityProvider . StopUserImportJob * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cognito - idp - 2016-04-18 / StopUserImportJob " target = " _ top " > AWS * API Documentation < / a > */ @ Override public StopUserImportJobResult stopUserImportJob ( StopUserImportJobRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeStopUserImportJob ( request ) ;
public class TLEImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . TLE__TRIPLETS : return triplets != null && ! triplets . isEmpty ( ) ; } return super . eIsSet ( featureID ) ;
public class Contract { /** * CHECKSTYLE : OFF : RedundantThrows */ public static void requireValid ( @ NotNull final Validator validator , @ NotNull final Object value , @ Nullable final Class < ? > ... groups ) throws ConstraintViolationException { } }
// CHECKSTYLE : ON final Set < ConstraintViolation < Object > > constraintViolations = validator . validate ( value ) ; if ( constraintViolations . size ( ) > 0 ) { final StringBuffer sb = new StringBuffer ( ) ; for ( final ConstraintViolation < Object > constraintViolation : constraintViolations ) { if ( sb . length ( ) > 0 ) { sb . append ( ", " ) ; } sb . append ( "[" + constraintViolation . getPropertyPath ( ) + "] " + constraintViolation . getMessage ( ) + " {" + constraintViolation . getInvalidValue ( ) + "}" ) ; } throw new ConstraintViolationException ( sb . toString ( ) , constraintViolations ) ; }
public class Range { /** * Checks whether the specified element occurs within this range . * @ param element * the element to check for , null returns false * @ return true if the specified element occurs within this range */ public boolean contains ( final T element ) { } }
if ( element == null ) { return false ; } return lowerEndpoint . includes ( element ) && upperEndpoint . includes ( element ) ;
public class Mediawiki { /** * get a Post response * @ param queryUrl * @ param params * - direct query parameters * @ param token * - a token if any * @ param pFormData * - the form data - either as multipart of urlencoded * @ return - the client Response * @ throws Exception */ public ClientResponse getPostResponse ( String queryUrl , String params , TokenResult token , Object pFormDataObject ) throws Exception { } }
params = params . replace ( "|" , "%7C" ) ; params = params . replace ( "+" , "%20" ) ; // modal handling of post FormDataMultiPart form = null ; MultivaluedMap < String , String > lFormData = null ; if ( pFormDataObject instanceof FormDataMultiPart ) { form = ( FormDataMultiPart ) pFormDataObject ; } else { @ SuppressWarnings ( "unchecked" ) Map < String , String > pFormData = ( Map < String , String > ) pFormDataObject ; lFormData = new MultivaluedMapImpl ( ) ; if ( pFormData != null ) { for ( String key : pFormData . keySet ( ) ) { lFormData . add ( key , pFormData . get ( key ) ) ; } } } if ( token != null ) { switch ( token . tokenMode ) { case token1_24 : if ( lFormData != null ) { lFormData . add ( token . tokenName , token . token ) ; } else { form . field ( token . tokenName , token . token ) ; } break ; default : params += token . asParam ( ) ; } } Builder resource = getResource ( queryUrl + params ) ; // FIXME allow to specify content type ( not needed for Mediawiki itself // but // could be good for interfacing ) ClientResponse response = null ; if ( lFormData != null ) { response = resource . type ( MediaType . APPLICATION_FORM_URLENCODED_TYPE ) . post ( ClientResponse . class , lFormData ) ; } else { response = resource . type ( MediaType . MULTIPART_FORM_DATA_TYPE ) . post ( ClientResponse . class , form ) ; } return response ;
public class CodecCollector { /** * Collect group using spans . * @ param list * the list * @ param docSet * the doc set * @ param docBase * the doc base * @ param docCounter * the doc counter * @ param matchData * the match data * @ param occurencesSum * the occurences sum * @ param occurencesN * the occurences N * @ return the int * @ throws IOException * Signals that an I / O exception has occurred . */ private static int collectGroupUsingSpans ( Map < GroupHit , Spans > list , List < Integer > docSet , int docBase , int docCounter , Map < Integer , List < Match > > matchData , Map < GroupHit , Long > occurencesSum , Map < GroupHit , Integer > occurencesN ) throws IOException { } }
int total = 0 ; if ( docCounter + 1 < docSet . size ( ) ) { // initialize int nextDocCounter = docCounter + 1 ; long [ ] subSum = new long [ list . size ( ) ] ; int [ ] subN = new int [ list . size ( ) ] ; boolean [ ] newNextDocs = new boolean [ list . size ( ) ] ; boolean newNextDoc ; int [ ] spansNextDoc = new int [ list . size ( ) ] ; int nextDoc = 0 ; List < Match > matchList ; GroupHit [ ] hitList = list . keySet ( ) . toArray ( new GroupHit [ list . size ( ) ] ) ; Spans [ ] spansList = new Spans [ list . size ( ) ] ; boolean [ ] finishedSpansList = new boolean [ list . size ( ) ] ; newNextDoc = true ; // advance spans , find nextDoc for ( int i = 0 ; i < hitList . length ; i ++ ) { newNextDocs [ i ] = true ; spansList [ i ] = list . get ( hitList [ i ] ) ; spansNextDoc [ i ] = spansList [ i ] . advance ( docSet . get ( nextDocCounter ) - docBase ) ; nextDoc = ( i == 0 ) ? spansNextDoc [ i ] : Math . min ( nextDoc , spansNextDoc [ i ] ) ; } // loop over future documents while ( nextDoc < DocIdSetIterator . NO_MORE_DOCS ) { // find matches for next document while ( nextDocCounter < docSet . size ( ) && docSet . get ( nextDocCounter ) < ( nextDoc + docBase ) ) { nextDocCounter ++ ; } // finish , if no more docs in set if ( nextDocCounter >= docSet . size ( ) ) { break ; } // go to the matches if ( docSet . get ( nextDocCounter ) == nextDoc + docBase ) { matchList = matchData . get ( nextDoc + docBase ) ; if ( matchList != null && ! matchList . isEmpty ( ) ) { // initialize int currentMatchPosition = 0 ; int lastMatchStartPosition = matchList . get ( matchList . size ( ) - 1 ) . startPosition ; ArrayList < Match > newMatchList = new ArrayList < > ( matchList . size ( ) ) ; int currentSpanPosition = Spans . NO_MORE_POSITIONS ; // check and initialize for each span for ( int i = 0 ; i < spansList . length ; i ++ ) { if ( spansList [ i ] . docID ( ) == nextDoc ) { int tmpStartPosition = spansList [ i ] . nextStartPosition ( ) ; if ( tmpStartPosition < Spans . NO_MORE_POSITIONS ) { finishedSpansList [ i ] = false ; } else { finishedSpansList [ i ] = true ; } // compute position currentSpanPosition = ( currentSpanPosition == Spans . NO_MORE_POSITIONS ) ? tmpStartPosition : Math . min ( currentSpanPosition , tmpStartPosition ) ; } else { finishedSpansList [ i ] = true ; } } // loop over matches while ( currentMatchPosition < matchList . size ( ) && currentSpanPosition < Spans . NO_MORE_POSITIONS ) { if ( currentSpanPosition < matchList . get ( currentMatchPosition ) . startPosition ) { // do nothing , match not reached } else if ( currentSpanPosition > lastMatchStartPosition ) { // finish , past last match break ; } else { // advance matches while ( currentMatchPosition < matchList . size ( ) && currentSpanPosition > matchList . get ( currentMatchPosition ) . startPosition ) { // store current match , not relevant newMatchList . add ( matchList . get ( currentMatchPosition ) ) ; currentMatchPosition ++ ; } // equal startPosition while ( currentMatchPosition < matchList . size ( ) && currentSpanPosition == matchList . get ( currentMatchPosition ) . startPosition ) { // check for each span for ( int i = 0 ; i < spansList . length ; i ++ ) { // equal start and end , therefore match if ( ! finishedSpansList [ i ] && spansList [ i ] . docID ( ) == nextDoc && spansList [ i ] . startPosition ( ) == matchList . get ( currentMatchPosition ) . startPosition && spansList [ i ] . endPosition ( ) == matchList . get ( currentMatchPosition ) . endPosition ) { // administration total ++ ; subSum [ i ] ++ ; if ( newNextDocs [ i ] ) { subN [ i ] ++ ; newNextDocs [ i ] = false ; newNextDoc = false ; } } else if ( ! finishedSpansList [ i ] && spansList [ i ] . docID ( ) == nextDoc && spansList [ i ] . startPosition ( ) == matchList . get ( currentMatchPosition ) . startPosition ) { // no match , store newMatchList . add ( matchList . get ( currentMatchPosition ) ) ; } } currentMatchPosition ++ ; } } // advance spans if ( currentMatchPosition < matchList . size ( ) ) { currentSpanPosition = Spans . NO_MORE_POSITIONS ; for ( int i = 0 ; i < spansList . length ; i ++ ) { if ( ! finishedSpansList [ i ] && ( spansList [ i ] . docID ( ) == nextDoc ) ) { while ( ! finishedSpansList [ i ] && spansList [ i ] . startPosition ( ) < matchList . get ( currentMatchPosition ) . startPosition ) { int tmpStartPosition = spansList [ i ] . nextStartPosition ( ) ; if ( tmpStartPosition == Spans . NO_MORE_POSITIONS ) { finishedSpansList [ i ] = true ; } } if ( ! finishedSpansList [ i ] ) { currentSpanPosition = ( currentSpanPosition == Spans . NO_MORE_POSITIONS ) ? spansList [ i ] . startPosition ( ) : Math . min ( currentSpanPosition , spansList [ i ] . startPosition ( ) ) ; } } else { finishedSpansList [ i ] = true ; } } } } if ( ! newNextDoc ) { // add other matches while ( currentMatchPosition < matchList . size ( ) ) { newMatchList . add ( matchList . get ( currentMatchPosition ) ) ; currentMatchPosition ++ ; } // update administration if ( ! newMatchList . isEmpty ( ) ) { matchData . put ( nextDoc + docBase , newMatchList ) ; } else { matchData . put ( nextDoc + docBase , null ) ; } } } } // advance to next document nextDocCounter ++ ; newNextDoc = true ; for ( int i = 0 ; i < hitList . length ; i ++ ) { newNextDocs [ i ] = true ; } // advance spans if ( nextDocCounter < docSet . size ( ) ) { nextDoc = Spans . NO_MORE_DOCS ; // advance spans for ( int i = 0 ; i < hitList . length ; i ++ ) { if ( spansNextDoc [ i ] < ( docSet . get ( nextDocCounter ) - docBase ) ) { spansNextDoc [ i ] = spansList [ i ] . advance ( docSet . get ( nextDocCounter ) - docBase ) ; } if ( spansNextDoc [ i ] < Spans . NO_MORE_DOCS ) { nextDoc = ( nextDoc == Spans . NO_MORE_DOCS ) ? spansNextDoc [ i ] : Math . min ( nextDoc , spansNextDoc [ i ] ) ; } } } } // update administration for ( int i = 0 ; i < hitList . length ; i ++ ) { if ( subSum [ i ] > 0 && ( occurencesSum . containsKey ( hitList [ i ] ) ) ) { occurencesSum . put ( hitList [ i ] , occurencesSum . get ( hitList [ i ] ) + subSum [ i ] ) ; occurencesN . put ( hitList [ i ] , occurencesN . get ( hitList [ i ] ) + subN [ i ] ) ; } } } return total ;
public class ClassHelper { /** * Load a collection of classes . * @ param typeNames * The class names . * @ return The collection of classes . */ public static Collection < Class < ? > > getTypes ( Collection < String > typeNames ) { } }
Set < Class < ? > > types = new HashSet < > ( ) ; for ( String typeName : typeNames ) { types . add ( ClassHelper . getType ( typeName ) ) ; } return types ;
public class SoftHashMap { /** * Returns a collection view of the mappings contained in this map . Each * element in the returned collection is a < tt > Map . Entry < / tt > . The * collection is backed by the map , so changes to the map are reflected in * the collection , and vice - versa . The collection supports element * removal , which removes the corresponding mapping from the map , via the * < tt > Iterator . remove < / tt > , < tt > Collection . remove < / tt > , * < tt > removeAll < / tt > , < tt > retainAll < / tt > , and < tt > clear < / tt > operations . * It does not support the < tt > add < / tt > or < tt > addAll < / tt > operations . * @ return a collection view of the mappings contained in this map . * @ see Map . Entry */ public Set entrySet ( ) { } }
if ( mEntrySet == null ) { mEntrySet = new AbstractSet ( ) { public Iterator iterator ( ) { return getHashIterator ( IdentityMap . ENTRIES ) ; } public boolean contains ( Object o ) { if ( ! ( o instanceof Map . Entry ) ) { return false ; } Map . Entry entry = ( Map . Entry ) o ; Object key = entry . getKey ( ) ; Entry tab [ ] = mTable ; int hash = key == null ? 0 : key . hashCode ( ) ; int index = ( hash & 0x7FFFFFFF ) % tab . length ; for ( Entry e = tab [ index ] , prev = null ; e != null ; e = e . mNext ) { Object entryValue = e . getValue ( ) ; if ( entryValue == null ) { // Clean up after a cleared Reference . mModCount ++ ; if ( prev != null ) { prev . mNext = e . mNext ; } else { tab [ index ] = e . mNext ; } mCount -- ; } else if ( e . mHash == hash && e . equals ( entry ) ) { return true ; } else { prev = e ; } } return false ; } public boolean remove ( Object o ) { if ( ! ( o instanceof Map . Entry ) ) { return false ; } Map . Entry entry = ( Map . Entry ) o ; Object key = entry . getKey ( ) ; Entry tab [ ] = mTable ; int hash = key == null ? 0 : key . hashCode ( ) ; int index = ( hash & 0x7FFFFFFF ) % tab . length ; for ( Entry e = tab [ index ] , prev = null ; e != null ; e = e . mNext ) { Object entryValue = e . getValue ( ) ; if ( entryValue == null ) { // Clean up after a cleared Reference . mModCount ++ ; if ( prev != null ) { prev . mNext = e . mNext ; } else { tab [ index ] = e . mNext ; } mCount -- ; } else if ( e . mHash == hash && e . equals ( entry ) ) { mModCount ++ ; if ( prev != null ) { prev . mNext = e . mNext ; } else { tab [ index ] = e . mNext ; } mCount -- ; e . setValue ( null ) ; return true ; } else { prev = e ; } } return false ; } public int size ( ) { return mCount ; } public void clear ( ) { SoftHashMap . this . clear ( ) ; } public String toString ( ) { return IdentityMap . toString ( this ) ; } } ; } return mEntrySet ;
public class JimfsFileChannel { /** * Begins a blocking operation , making the operation interruptible . Returns { @ code true } if the * channel was open and the thread was added as a blocking thread ; returns { @ code false } if the * channel was closed . */ private boolean beginBlocking ( ) { } }
begin ( ) ; synchronized ( blockingThreads ) { if ( isOpen ( ) ) { blockingThreads . add ( Thread . currentThread ( ) ) ; return true ; } return false ; }
public class BaseTable { /** * Is the last record in the file ? * @ return false if file position is at last record . */ public boolean doHasNext ( ) throws DBException { } }
if ( ( m_iRecordStatus & DBConstants . RECORD_AT_EOF ) != 0 ) return false ; // Already at EOF , can ' t be one after if ( ( m_iRecordStatus & DBConstants . RECORD_NEXT_PENDING ) != 0 ) return true ; // Already one waiting boolean bAtEOF = true ; if ( ! this . isOpen ( ) ) this . open ( ) ; // Make sure any listeners are called before disabling . Object [ ] rgobjEnabledFields = this . getRecord ( ) . setEnableNonFilter ( null , false , false , false , false , true ) ; FieldList record = null ; try { record = this . move ( DBConstants . NEXT_RECORD ) ; if ( record == null ) bAtEOF = true ; else if ( ( m_iRecordStatus & DBConstants . RECORD_AT_EOF ) != 0 ) bAtEOF = true ; else if ( this . isTable ( ) ) bAtEOF = this . isEOF ( ) ; else bAtEOF = false ; // Valid record ! } catch ( DBException ex ) { throw ex ; } finally { this . getRecord ( ) . setEnableNonFilter ( rgobjEnabledFields , false , false , false , bAtEOF , true ) ; } if ( bAtEOF ) return false ; // Does not have a next record m_iRecordStatus |= DBConstants . RECORD_NEXT_PENDING ; // If next call is a moveNext ( ) , return unchanged ! return true ; // Yes , a next record exists .
public class DataSynchronizer { /** * Deletes a single synchronized document by its given id . No deletion will occur if the _ id is * not being synchronized . * @ param nsConfig the namespace synchronization config of the namespace where the document * lives . * @ param documentId the _ id of the document . */ @ CheckReturnValue private @ Nullable LocalSyncWriteModelContainer deleteOneFromResolution ( final NamespaceSynchronizationConfig nsConfig , final BsonValue documentId , final BsonDocument atVersion ) { } }
final MongoNamespace namespace = nsConfig . getNamespace ( ) ; final ChangeEvent < BsonDocument > event ; final Lock lock = this . syncConfig . getNamespaceConfig ( namespace ) . getLock ( ) . writeLock ( ) ; lock . lock ( ) ; final CoreDocumentSynchronizationConfig config ; try { config = syncConfig . getSynchronizedDocument ( namespace , documentId ) ; if ( config == null ) { return null ; } event = ChangeEvents . changeEventForLocalDelete ( namespace , documentId , true ) ; config . setSomePendingWrites ( logicalT , atVersion , 0L , event ) ; } finally { lock . unlock ( ) ; } final LocalSyncWriteModelContainer container = newWriteModelContainer ( nsConfig ) ; container . addDocIDs ( documentId ) ; container . addLocalWrite ( new DeleteOneModel < > ( getDocumentIdFilter ( documentId ) ) ) ; container . addLocalChangeEvent ( event ) ; container . addConfigWrite ( new ReplaceOneModel < > ( CoreDocumentSynchronizationConfig . getDocFilter ( namespace , config . getDocumentId ( ) ) , config ) ) ; return container ;
public class Coref { /** * getter for ref - gets * @ generated * @ return value of the feature */ public Coref getRef ( ) { } }
if ( Coref_Type . featOkTst && ( ( Coref_Type ) jcasType ) . casFeat_ref == null ) jcasType . jcas . throwFeatMissing ( "ref" , "de.julielab.jules.types.muc7.Coref" ) ; return ( Coref ) ( jcasType . ll_cas . ll_getFSForRef ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( Coref_Type ) jcasType ) . casFeatCode_ref ) ) ) ;
public class RequestServer { /** * Returns the name of the request , that is the request url without the * request suffix . E . g . converts " / GBM . html / crunk " into " / GBM / crunk " */ String requestName ( String url ) { } }
String s = "." + toString ( ) ; int i = url . indexOf ( s ) ; if ( i == - 1 ) return url ; // No , or default , type return url . substring ( 0 , i ) + url . substring ( i + s . length ( ) ) ;
public class PlatformLogger { /** * Returns true if a message of the given level would actually * be logged by this logger . */ public boolean isLoggable ( Level level ) { } }
if ( level == null ) { throw new NullPointerException ( ) ; } // performance - sensitive method : use two monomorphic call - sites JavaLoggerProxy jlp = javaLoggerProxy ; return jlp != null ? jlp . isLoggable ( level ) : loggerProxy . isLoggable ( level ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link String } { @ code > } } */ @ XmlElementDecl ( namespace = "http://www.ibm.com/websphere/wim" , name = "employeeNumber" ) public JAXBElement < String > createEmployeeNumber ( String value ) { } }
return new JAXBElement < String > ( _EmployeeNumber_QNAME , String . class , null , value ) ;
public class PackageMatcher { /** * Returns a matching { @ link PackageMatcher . Result Result } * against the provided package name . If the package identifier of this { @ link PackageMatcher } does not match the * given package name , then { @ link Optional # absent ( ) } is returned . * @ param aPackage The package name to match against * @ return A { @ link PackageMatcher . Result Result } if the package name matches , * otherwise { @ link Optional # absent ( ) } */ @ PublicAPI ( usage = ACCESS ) public Optional < Result > match ( String aPackage ) { } }
Matcher matcher = packagePattern . matcher ( aPackage ) ; return matcher . matches ( ) ? Optional . of ( new Result ( matcher ) ) : Optional . < Result > absent ( ) ;
public class QuerySpec { /** * Evaluates this querySpec against the provided list in memory . It supports * sorting , filter and paging . * TODO currently ignores relations and inclusions , has room for * improvements * @ param < T > the type of resources in this Iterable * @ param resources resources * @ return sorted , filtered list . */ public < T > DefaultResourceList < T > apply ( Iterable < T > resources ) { } }
DefaultResourceList < T > resultList = new DefaultResourceList < > ( ) ; resultList . setMeta ( new DefaultPagedMetaInformation ( ) ) ; apply ( resources , resultList ) ; return resultList ;
public class Tickets { /** * 创建临时二维码 * @ return */ public Ticket temporary ( int expires , int sceneId ) { } }
String url = WxEndpoint . get ( "url.ticket.create" ) ; String json = "{\"expire_seconds\":%s,\"action_name\":\"QR_SCENE\",\"action_info\":{\"scene\":{\"scene_id\":%s}}}" ; logger . debug ( "create temporary ticket : {}" , String . format ( json , expires , sceneId ) ) ; String response = wxClient . post ( url , String . format ( json , expires , sceneId ) ) ; return JsonMapper . nonEmptyMapper ( ) . fromJson ( response , Ticket . class ) ;
public class XPathScanner { /** * Checks if the given character is a special character . * @ param paramInput * The character to check . * @ return Returns true , if the character is a special character . */ private boolean isSpecial ( final char paramInput ) { } }
return ( paramInput == ')' ) || ( paramInput == ';' ) || ( paramInput == ',' ) || ( paramInput == '@' ) || ( paramInput == '[' ) || ( paramInput == ']' ) || ( paramInput == '=' ) || ( paramInput == '"' ) || ( paramInput == '\'' ) || ( paramInput == '$' ) || ( paramInput == ':' ) || ( paramInput == '|' ) || ( paramInput == '+' ) || ( paramInput == '-' ) || ( paramInput == '?' ) || ( paramInput == '*' ) ;
public class FileUtils { /** * Renames the source file to the target file . If the target file exists , then we attempt to * delete it . If the delete or the rename operation fails , then we raise an exception * @ param source the source file * @ param target the new ' name ' for the source file * @ throws IOException */ public static void rename ( File source , File target ) throws RenameException { } }
Preconditions . checkNotNull ( source ) ; Preconditions . checkNotNull ( target ) ; // delete the target first - but ignore the result target . delete ( ) ; if ( source . renameTo ( target ) ) { return ; } Throwable innerException = null ; if ( target . exists ( ) ) { innerException = new FileDeleteException ( target . getAbsolutePath ( ) ) ; } else if ( ! source . getParentFile ( ) . exists ( ) ) { innerException = new ParentDirNotFoundException ( source . getAbsolutePath ( ) ) ; } else if ( ! source . exists ( ) ) { innerException = new FileNotFoundException ( source . getAbsolutePath ( ) ) ; } throw new RenameException ( "Unknown error renaming " + source . getAbsolutePath ( ) + " to " + target . getAbsolutePath ( ) , innerException ) ;
public class ZealotKhala { /** * 根据指定的模式字符串生成 " NOT LIKE " 模糊查询的SQL片段 . * < p > 示例 : 传入 { " b . title " , " Java % " } 两个参数 , 生成的SQL片段为 : " b . title NOT LIKE ' Java % ' " < / p > * @ param field 数据库字段 * @ param pattern 模式字符串 * @ return ZealotKhala实例 */ public ZealotKhala notLikePattern ( String field , String pattern ) { } }
return this . doLikePattern ( ZealotConst . ONE_SPACE , field , pattern , true , false ) ;
public class ServerAuthenticatorNone { /** * Convinience routine for selecting SOCKSv5 authentication . * This method reads in authentication methods that client supports , * checks wether it supports given method . If it does , the notification * method is written back to client , that this method have been chosen * for authentication . If given method was not found , authentication * failure message is send to client ( [ 5 , FF ] ) . * @ param in Input stream , version byte should be removed from the stream * before calling this method . * @ param out Output stream . * @ param methodId Method which should be selected . * @ return true if methodId was found , false otherwise . */ static public boolean selectSocks5Authentication ( InputStream in , OutputStream out , int methodId ) throws IOException { } }
int num_methods = in . read ( ) ; if ( num_methods <= 0 ) return false ; byte method_ids [ ] = new byte [ num_methods ] ; byte response [ ] = new byte [ 2 ] ; boolean found = false ; response [ 0 ] = ( byte ) 5 ; // SOCKS version response [ 1 ] = ( byte ) 0xFF ; // Not found , we are pessimistic int bread = 0 ; // bytes read so far while ( bread < num_methods ) bread += in . read ( method_ids , bread , num_methods - bread ) ; for ( int i = 0 ; i < num_methods ; ++ i ) if ( method_ids [ i ] == methodId ) { found = true ; response [ 1 ] = ( byte ) methodId ; break ; } out . write ( response ) ; return found ;
public class QueuedExecutions { /** * Fetch Activereference for an execution . Returns null , if execution not in queue */ public ExecutionReference getReference ( final int executionId ) { } }
if ( hasExecution ( executionId ) ) { return this . queuedFlowMap . get ( executionId ) . getFirst ( ) ; } return null ;
public class ForeignBus { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . runtime . SIMPMessageHandlerControllable # isTemporary ( ) */ public boolean isTemporary ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "isTemporary" ) ; boolean isTemporary = _foreignBus . isTemporary ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "isTemporary" , new Boolean ( isTemporary ) ) ; return isTemporary ;
public class AuthorizationCodeFlow { /** * Returns a new credential instance based on the given user ID . * @ param userId user ID or { @ code null } if not using a persisted credential store */ @ SuppressWarnings ( "deprecation" ) private Credential newCredential ( String userId ) { } }
Credential . Builder builder = new Credential . Builder ( method ) . setTransport ( transport ) . setJsonFactory ( jsonFactory ) . setTokenServerEncodedUrl ( tokenServerEncodedUrl ) . setClientAuthentication ( clientAuthentication ) . setRequestInitializer ( requestInitializer ) . setClock ( clock ) ; if ( credentialDataStore != null ) { builder . addRefreshListener ( new DataStoreCredentialRefreshListener ( userId , credentialDataStore ) ) ; } else if ( credentialStore != null ) { builder . addRefreshListener ( new CredentialStoreRefreshListener ( userId , credentialStore ) ) ; } builder . getRefreshListeners ( ) . addAll ( refreshListeners ) ; return builder . build ( ) ;
public class ObjectInputStream { /** * Reads and discards block data and objects until TC _ ENDBLOCKDATA is found . * @ throws IOException * If an IO exception happened when reading the optional class * annotation . * @ throws ClassNotFoundException * If the class corresponding to the class descriptor could not * be found . */ private void discardData ( ) throws ClassNotFoundException , IOException { } }
primitiveData = emptyStream ; boolean resolve = mustResolve ; mustResolve = false ; do { byte tc = nextTC ( ) ; if ( tc == TC_ENDBLOCKDATA ) { mustResolve = resolve ; return ; // End of annotation } readContent ( tc ) ; } while ( true ) ;
public class ClassDiscoveryUtil { /** * Returns an array of concrete classes in the given package that implement the * specified interface . * @ param basePackage the name of the package containing the classes to discover * @ param requiredInterface the inteface that the returned classes must implement * @ return an array of concrete classes in the given package that implement the * specified interface */ public static Class [ ] getClasses ( String basePackage , Class requiredInterface ) { } }
return getClasses ( basePackage , new Class [ ] { requiredInterface } ) ;
public class RespokeClient { /** * Retrieve the history of messages that have been persisted for a specific group . Only those * messages that have been marked to be persisted when sent will show up in the history . * @ param groupId The groups to pull history for * @ param maxMessages The maximum number of messages per group to pull . Must be & gt ; = 1 * @ param before Limit messages to those with a timestamp before this value * @ param completionListener The callback called when this async operation has completed */ public void getGroupHistory ( final String groupId , final Integer maxMessages , final Date before , final GroupHistoryCompletionListener completionListener ) { } }
if ( ! isConnected ( ) ) { getGroupHistoryError ( completionListener , "Can't complete request when not connected, " + "Please reconnect!" ) ; return ; } if ( ( maxMessages == null ) || ( maxMessages < 1 ) ) { getGroupHistoryError ( completionListener , "maxMessages must be at least 1" ) ; return ; } if ( ( groupId == null ) || groupId . length ( ) == 0 ) { getGroupHistoryError ( completionListener , "groupId cannot be blank" ) ; return ; } Uri . Builder builder = new Uri . Builder ( ) ; builder . appendQueryParameter ( "limit" , maxMessages . toString ( ) ) ; if ( before != null ) { builder . appendQueryParameter ( "before" , Long . toString ( before . getTime ( ) ) ) ; } String urlEndpoint = String . format ( "/v1/groups/%s/history%s" , groupId , builder . build ( ) . toString ( ) ) ; signalingChannel . sendRESTMessage ( "get" , urlEndpoint , null , new RespokeSignalingChannel . RESTListener ( ) { @ Override public void onSuccess ( Object response ) { if ( ! ( response instanceof JSONArray ) ) { getGroupHistoryError ( completionListener , "Invalid response from server" ) ; return ; } final JSONArray json = ( JSONArray ) response ; final ArrayList < RespokeGroupMessage > results = new ArrayList < > ( json . length ( ) ) ; try { for ( int i = 0 ; i < json . length ( ) ; i ++ ) { final JSONObject jsonMessage = json . getJSONObject ( i ) ; final RespokeGroupMessage message = buildGroupMessage ( jsonMessage ) ; results . add ( message ) ; } } catch ( JSONException e ) { getGroupHistoryError ( completionListener , "Error parsing JSON response" ) ; return ; } new Handler ( Looper . getMainLooper ( ) ) . post ( new Runnable ( ) { @ Override public void run ( ) { if ( completionListener != null ) { completionListener . onSuccess ( results ) ; } } } ) ; } @ Override public void onError ( final String errorMessage ) { getGroupHistoryError ( completionListener , errorMessage ) ; } } ) ;
public class AstaDatabaseReader { /** * Process resource assignments . * @ throws SQLException */ private void processAssignments ( ) throws SQLException { } }
List < Row > permanentAssignments = getRows ( "select * from permanent_schedul_allocation inner join perm_resource_skill on permanent_schedul_allocation.allocatiop_of = perm_resource_skill.perm_resource_skillid where permanent_schedul_allocation.projid=? order by permanent_schedul_allocation.permanent_schedul_allocationid" , m_projectID ) ; m_reader . processAssignments ( permanentAssignments ) ;
public class ResourceWriter { /** * 将XML内容写入文件 * @ param content XML内容 * @ param filename 需要写入的文件 * @ throws NoSuchPathException 写入文件时 , 无法发现路径引发的异常 * @ throws IOException 文件写入异常 */ public void writeToFile ( StringBuffer content , String filename ) throws NoSuchPathException , IOException { } }
try { FileWriter input = new FileWriter ( filename ) ; input . write ( content . toString ( ) , 0 , content . length ( ) ) ; input . flush ( ) ; input . close ( ) ; } catch ( FileNotFoundException e ) { throw new NoSuchPathException ( e ) ; }
public class KTypeArrayDeque { /** * { @ inheritDoc } */ @ Override public KType removeFirst ( ) { } }
assert size ( ) > 0 : "The deque is empty." ; final KType result = Intrinsics . < KType > cast ( buffer [ head ] ) ; buffer [ head ] = Intrinsics . empty ( ) ; head = oneRight ( head , buffer . length ) ; return result ;
public class GeomajasController { /** * Gets the { @ link SerializationPolicy } for given module base URL and strong name if there is one . * Use { @ link # setSerializationPolicyLocator ( SerializationPolicyLocator ) } to provide an alternative approach . * @ param request the HTTP request being serviced * @ param moduleBaseURL as specified in the incoming payload * @ param strongName a strong name that uniquely identifies a serialization policy file * @ return a { @ link SerializationPolicy } for the given module base URL and strong name , or < code > null < / code > if * there is none */ protected SerializationPolicy doGetSerializationPolicy ( HttpServletRequest request , String moduleBaseURL , String strongName ) { } }
if ( getSerializationPolicyLocator ( ) == null ) { return super . doGetSerializationPolicy ( request , moduleBaseURL , strongName ) ; } else { return getSerializationPolicyLocator ( ) . loadPolicy ( request , moduleBaseURL , strongName ) ; }
public class Node { /** * Gets all the property types , in sorted order . */ private byte [ ] getSortedPropTypes ( ) { } }
int count = 0 ; for ( PropListItem x = propListHead ; x != null ; x = x . next ) { count ++ ; } byte [ ] keys = new byte [ count ] ; for ( PropListItem x = propListHead ; x != null ; x = x . next ) { count -- ; keys [ count ] = x . propType ; } Arrays . sort ( keys ) ; return keys ;
public class CmsGallerySearchParameters { /** * Checks if the given list of resource type names contains a function - like type . < p > * @ param resourceTypes the collection of resource types * @ return true if the list contains a function - like type */ private boolean containsFunctionType ( List < String > resourceTypes ) { } }
if ( resourceTypes . contains ( CmsXmlDynamicFunctionHandler . TYPE_FUNCTION ) ) { return true ; } if ( resourceTypes . contains ( CmsResourceTypeFunctionConfig . TYPE_NAME ) ) { return true ; } return false ;
public class IntStream { /** * Returns a { @ code IntStream } produced by iterative application of a accumulation function * to reduction value and next element of the current stream . * Produces a { @ code IntStream } consisting of { @ code value1 } , { @ code acc ( value1 , value2 ) } , * { @ code acc ( acc ( value1 , value2 ) , value3 ) } , etc . * < p > This is an intermediate operation . * < p > Example : * < pre > * accumulator : ( a , b ) - & gt ; a + b * stream : [ 1 , 2 , 3 , 4 , 5] * result : [ 1 , 3 , 6 , 10 , 15] * < / pre > * @ param accumulator the accumulation function * @ return the new stream * @ throws NullPointerException if { @ code accumulator } is null * @ since 1.1.6 */ @ NotNull public IntStream scan ( @ NotNull final IntBinaryOperator accumulator ) { } }
Objects . requireNonNull ( accumulator ) ; return new IntStream ( params , new IntScan ( iterator , accumulator ) ) ;
public class CPDefinitionInventoryUtil { /** * Returns the cp definition inventory with the primary key or throws a { @ link NoSuchCPDefinitionInventoryException } if it could not be found . * @ param CPDefinitionInventoryId the primary key of the cp definition inventory * @ return the cp definition inventory * @ throws NoSuchCPDefinitionInventoryException if a cp definition inventory with the primary key could not be found */ public static CPDefinitionInventory findByPrimaryKey ( long CPDefinitionInventoryId ) throws com . liferay . commerce . exception . NoSuchCPDefinitionInventoryException { } }
return getPersistence ( ) . findByPrimaryKey ( CPDefinitionInventoryId ) ;
public class MonthRenderer { /** * returns the instance . * @ return Renderer */ public static final Renderer < Date > instance ( ) { } }
// NOPMD it ' s thread save ! if ( MonthRenderer . instanceRenderer == null ) { synchronized ( MonthRenderer . class ) { if ( MonthRenderer . instanceRenderer == null ) { MonthRenderer . instanceRenderer = new MonthRenderer ( "yyyy-MM" ) ; } } } return MonthRenderer . instanceRenderer ;
public class HintedHandOffManager { /** * read less columns ( mutations ) per page if they are very large */ private int calculatePageSize ( ) { } }
int meanColumnCount = hintStore . getMeanColumns ( ) ; if ( meanColumnCount <= 0 ) return PAGE_SIZE ; int averageColumnSize = ( int ) ( hintStore . getMeanRowSize ( ) / meanColumnCount ) ; if ( averageColumnSize <= 0 ) return PAGE_SIZE ; // page size of 1 does not allow actual paging b / c of > = behavior on startColumn return Math . max ( 2 , Math . min ( PAGE_SIZE , 4 * 1024 * 1024 / averageColumnSize ) ) ;
public class CreateHITWithHITTypeRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CreateHITWithHITTypeRequest createHITWithHITTypeRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( createHITWithHITTypeRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createHITWithHITTypeRequest . getHITTypeId ( ) , HITTYPEID_BINDING ) ; protocolMarshaller . marshall ( createHITWithHITTypeRequest . getMaxAssignments ( ) , MAXASSIGNMENTS_BINDING ) ; protocolMarshaller . marshall ( createHITWithHITTypeRequest . getLifetimeInSeconds ( ) , LIFETIMEINSECONDS_BINDING ) ; protocolMarshaller . marshall ( createHITWithHITTypeRequest . getQuestion ( ) , QUESTION_BINDING ) ; protocolMarshaller . marshall ( createHITWithHITTypeRequest . getRequesterAnnotation ( ) , REQUESTERANNOTATION_BINDING ) ; protocolMarshaller . marshall ( createHITWithHITTypeRequest . getUniqueRequestToken ( ) , UNIQUEREQUESTTOKEN_BINDING ) ; protocolMarshaller . marshall ( createHITWithHITTypeRequest . getAssignmentReviewPolicy ( ) , ASSIGNMENTREVIEWPOLICY_BINDING ) ; protocolMarshaller . marshall ( createHITWithHITTypeRequest . getHITReviewPolicy ( ) , HITREVIEWPOLICY_BINDING ) ; protocolMarshaller . marshall ( createHITWithHITTypeRequest . getHITLayoutId ( ) , HITLAYOUTID_BINDING ) ; protocolMarshaller . marshall ( createHITWithHITTypeRequest . getHITLayoutParameters ( ) , HITLAYOUTPARAMETERS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AsyncContextImpl { /** * Called by the container when the initial request is finished . * If this request has a dispatch or complete call pending then * this will be started . */ public synchronized void initialRequestDone ( ) { } }
initialRequestDone = true ; if ( previousAsyncContext != null ) { previousAsyncContext . onAsyncStart ( this ) ; previousAsyncContext = null ; } if ( ! processingAsyncTask ) { processAsyncTask ( ) ; } initiatingThread = null ;
public class OptionUtil { /** * 获取文件夹路径 , 如果不存在则创建 * @ param folderPath * @ return */ private static String getFolderPath ( String folderPath ) { } }
File folder = new File ( folderPath ) ; if ( folder . exists ( ) && folder . isFile ( ) ) { String tempPath = folder . getParent ( ) ; folder = new File ( tempPath ) ; } if ( ! folder . exists ( ) ) { folder . mkdirs ( ) ; } return folder . getPath ( ) ;
public class PropertiesConfigurationSource { /** * Creates an instance of { @ link PropertiesConfigurationSource } . * @ throws ConfigurationException if failed to create the { @ link PropertiesConfiguration } * @ throws IOException if failed to read from { @ code reader } . */ public static PropertiesConfigurationSource create ( Reader reader , int priority ) throws ConfigurationException , IOException { } }
return new PropertiesConfigurationSource ( createPropertiesConfiguration ( reader ) , priority ) ;
public class ReflectionToStringBuilder { /** * Appends the fields and values defined by the given object of the given Class . * If a cycle is detected as an object is & quot ; toString ( ) ' ed & quot ; , such an object is rendered as if * < code > Object . toString ( ) < / code > had been called and not implemented by the object . * @ param clazz * The class of object parameter */ protected void appendFieldsIn ( final Class < ? > clazz ) { } }
if ( clazz . isArray ( ) ) { this . reflectionAppendArray ( this . getObject ( ) ) ; return ; } final Field [ ] fields = clazz . getDeclaredFields ( ) ; AccessibleObject . setAccessible ( fields , true ) ; for ( final Field field : fields ) { final String fieldName = field . getName ( ) ; if ( this . accept ( field ) ) { try { // Warning : Field . get ( Object ) creates wrappers objects // for primitive types . final Object fieldValue = this . getValue ( field ) ; if ( ! excludeNullValues || fieldValue != null ) { this . append ( fieldName , fieldValue ) ; } } catch ( final IllegalAccessException ex ) { // this can ' t happen . Would get a Security exception // instead // throw a runtime exception in case the impossible // happens . throw new InternalError ( "Unexpected IllegalAccessException: " + ex . getMessage ( ) ) ; } } }
public class Schedule { /** * The tags to apply to policy - created resources . These user - defined tags are in addition to the AWS - added lifecycle * tags . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setTagsToAdd ( java . util . Collection ) } or { @ link # withTagsToAdd ( java . util . Collection ) } if you want to * override the existing values . * @ param tagsToAdd * The tags to apply to policy - created resources . These user - defined tags are in addition to the AWS - added * lifecycle tags . * @ return Returns a reference to this object so that method calls can be chained together . */ public Schedule withTagsToAdd ( Tag ... tagsToAdd ) { } }
if ( this . tagsToAdd == null ) { setTagsToAdd ( new java . util . ArrayList < Tag > ( tagsToAdd . length ) ) ; } for ( Tag ele : tagsToAdd ) { this . tagsToAdd . add ( ele ) ; } return this ;
public class CmsADEConfigCacheState { /** * Helper method to retrieve the parent folder type or < code > null < / code > if none available . < p > * @ param rootPath the path of a resource * @ return the parent folder content type */ public String getParentFolderType ( String rootPath ) { } }
String parent = CmsResource . getParentFolder ( rootPath ) ; if ( parent == null ) { return null ; } String type = m_folderTypes . get ( parent ) ; // type may be null return type ;
public class SvgGraphicsContext { /** * Draw a circle on the < code > GraphicsContext < / code > . * @ param parent * parent group object * @ param name * The circle ' s name . * @ param position * The center position as a coordinate . * @ param radius * The circle ' s radius . * @ param style * The styling object by which the circle should be drawn . */ public void drawCircle ( Object parent , String name , Coordinate position , double radius , ShapeStyle style ) { } }
if ( isAttached ( ) ) { Element circle = helper . createOrUpdateElement ( parent , name , "circle" , style ) ; Dom . setElementAttribute ( circle , "cx" , Integer . toString ( ( int ) position . getX ( ) ) ) ; Dom . setElementAttribute ( circle , "cy" , Integer . toString ( ( int ) position . getY ( ) ) ) ; Dom . setElementAttribute ( circle , "r" , Integer . toString ( ( int ) radius ) ) ; }
public class ExceptionDestinationHandlerImpl { /** * Returns a String representing the name of the default exception destination * for this messaging engine */ public String getDefaultExceptionDestinationName ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getDefaultExceptionDestinationName" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getDefaultExceptionDestinationName" , _defaultExceptionDestinationName ) ; return _defaultExceptionDestinationName ;
public class ClaimBean { /** * { @ inheritDoc } */ @ SuppressWarnings ( "unchecked" ) @ Override public T create ( CreationalContext < T > creationalContext ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "create" , creationalContext ) ; } T instance = null ; CDI < Object > cdi = CDI . current ( ) ; BeanManager beanManager = cdi . getBeanManager ( ) ; if ( beanType instanceof ParameterizedType ) { instance = createInstanceForParameterizedType ( creationalContext , beanManager ) ; } else if ( beanType instanceof Class ) { instance = createClaimValueForClassType ( ) ; } else { throw new IllegalArgumentException ( Tr . formatMessage ( tc , "MPJWT_CDI_INVALID_INJECTION_TYPE" , beanType ) ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "create" , instance ) ; } return instance ;
public class ServiceException { /** * Sets the exception which caused this service exception . * It ' s important to set this exception to understand * and reproduce the error ! * @ param innerException Underlying exception */ public ServiceException setException ( Throwable innerException ) { } }
ByteArrayOutputStream messageOutputStream = new ByteArrayOutputStream ( ) ; innerException . printStackTrace ( new PrintStream ( messageOutputStream ) ) ; String exceptionMessage = messageOutputStream . toString ( ) ; this . fault . setException ( exceptionMessage ) ; return this ;
public class MFSResource { /** * Get the ordered Map of most frequent senses for a lemma # pos entry . * @ param lemmaPOSClass * the lemma # pos entry * @ return the ordered multimap of senses */ public TreeMultimap < Integer , String > getOrderedMap ( final String lemmaPOSClass ) { } }
final List < String > mfsList = this . multiMap . get ( lemmaPOSClass ) ; final TreeMultimap < Integer , String > mfsMap = TreeMultimap . create ( Ordering . natural ( ) . reverse ( ) , Ordering . natural ( ) ) ; if ( ! mfsList . isEmpty ( ) ) { getOrderedSenses ( mfsList , mfsMap ) ; } return mfsMap ;
public class PythonExecutioner { /** * Executes python code . Also manages python thread state . * @ param code */ public static void exec ( String code ) { } }
code = getFunctionalCode ( "__f_" + Thread . currentThread ( ) . getId ( ) , code ) ; acquireGIL ( ) ; log . info ( "CPython: PyRun_SimpleStringFlag()" ) ; log . info ( code ) ; int result = PyRun_SimpleStringFlags ( code , null ) ; if ( result != 0 ) { PyErr_Print ( ) ; throw new RuntimeException ( "exec failed" ) ; } log . info ( "Exec done" ) ; releaseGIL ( ) ;
public class PredefinedPromotionLevelController { /** * Gets the list of predefined promotion levels . */ @ RequestMapping ( value = "predefinedPromotionLevels" , method = RequestMethod . GET ) public Resources < PredefinedPromotionLevel > getPredefinedPromotionLevelList ( ) { } }
return Resources . of ( predefinedPromotionLevelService . getPredefinedPromotionLevels ( ) , uri ( on ( getClass ( ) ) . getPredefinedPromotionLevelList ( ) ) ) . with ( Link . CREATE , uri ( on ( getClass ( ) ) . getPredefinedPromotionLevelCreationForm ( ) ) ) . with ( "_reorderPromotionLevels" , uri ( on ( getClass ( ) ) . reorderPromotionLevelListForBranch ( null ) ) ) ;
public class AWSDirectoryServiceClient { /** * Returns the shared directories in your account . * @ param describeSharedDirectoriesRequest * @ return Result of the DescribeSharedDirectories operation returned by the service . * @ throws EntityDoesNotExistException * The specified entity could not be found . * @ throws InvalidNextTokenException * The < code > NextToken < / code > value is not valid . * @ throws InvalidParameterException * One or more parameters are not valid . * @ throws UnsupportedOperationException * The operation is not supported . * @ throws ClientException * A client exception has occurred . * @ throws ServiceException * An exception has occurred in AWS Directory Service . * @ sample AWSDirectoryService . DescribeSharedDirectories * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ds - 2015-04-16 / DescribeSharedDirectories " target = " _ top " > AWS * API Documentation < / a > */ @ Override public DescribeSharedDirectoriesResult describeSharedDirectories ( DescribeSharedDirectoriesRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeSharedDirectories ( request ) ;
public class OntologyBuilderImpl { /** * Normalizes and adds class disjointness axiom * DisjointClasses : = ' DisjointClasses ' ' ( ' axiomAnnotations * subClassExpression subClassExpression { subClassExpression } ' ) ' < br > * Implements rule [ C2 ] : < br > * - eliminates all occurrences of bot and if the result contains < br > * - no top and at least two elements then disjointness < br > * - one top then emptiness of all other elements < br > * - two tops then inconsistency ( this behavior is an extension of OWL 2 , where duplicates are removed from the list ) */ @ Override public void addDisjointClassesAxiom ( ClassExpression ... ces ) throws InconsistentOntologyException { } }
for ( ClassExpression c : ces ) checkSignature ( c ) ; classAxioms . addDisjointness ( ces ) ;
public class TaskController { /** * Use DestroyJVMTaskRunnable to kill task JVM asynchronously . Wait for the * confirmed kill if configured so . * @ param context Task context */ final void destroyTaskJVM ( TaskControllerContext context ) { } }
Thread taskJVMDestroyer = new Thread ( new DestroyJVMTaskRunnable ( context ) ) ; taskJVMDestroyer . start ( ) ; if ( waitForConfirmedKill ) { try { taskJVMDestroyer . join ( ) ; } catch ( InterruptedException e ) { throw new IllegalStateException ( "destroyTaskJVM: Failed to join " + taskJVMDestroyer . getName ( ) ) ; } }
public class UpdateDescription { /** * Find the diff between two documents . * < p > NOTE : This does not do a full diff on [ BsonArray ] . If there is * an inequality between the old and new array , the old array will * simply be replaced by the new one . * @ param beforeDocument original document * @ param afterDocument document to diff on * @ return a description of the updated fields and removed keys between the documents . */ public static UpdateDescription diff ( @ Nullable final BsonDocument beforeDocument , @ Nullable final BsonDocument afterDocument ) { } }
if ( beforeDocument == null || afterDocument == null ) { return new UpdateDescription ( new BsonDocument ( ) , new HashSet < > ( ) ) ; } return UpdateDescription . diff ( beforeDocument , afterDocument , null , new BsonDocument ( ) , new HashSet < > ( ) ) ;
public class MonoT { /** * Construct an MonoT from an AnyM that wraps a monad containing MonoWs * @ param monads AnyM that contains a monad wrapping an Mono * @ return MonoT */ public static < W extends WitnessType < W > , A > MonoT < W , A > of ( final AnyM < W , Mono < A > > monads ) { } }
return new MonoT < > ( monads ) ;
public class NFAppenderAttachableImpl { /** * ( non - Javadoc ) * @ see * org . apache . log4j . helpers . AppenderAttachableImpl # getAppender ( java . lang * . String ) */ @ Override public Appender getAppender ( String name ) { } }
if ( appenderList == null || name == null ) return null ; Appender appender ; Iterator < Appender > it = appenderList . iterator ( ) ; while ( it . hasNext ( ) ) { appender = ( Appender ) it . next ( ) ; if ( name . equals ( appender . getName ( ) ) ) { return appender ; } } return null ;
public class MetricsTags { /** * Create host tag based on the local host . * @ return host tag . */ public static String [ ] createHostTag ( ) { } }
String [ ] hostTag = { MetricsTags . TAG_HOST , null } ; try { hostTag [ 1 ] = InetAddress . getLocalHost ( ) . getHostName ( ) ; } catch ( UnknownHostException e ) { hostTag [ 1 ] = "unknown" ; } return hostTag ;
public class CommonUtils { /** * TODO ignore case for windows */ public static File relativize ( File target , File baseDir ) { } }
String separator = File . separator ; try { String absTargetPath = target . getAbsolutePath ( ) ; absTargetPath = FilenameUtils . normalizeNoEndSeparator ( FilenameUtils . separatorsToSystem ( absTargetPath ) ) ; String absBasePath = baseDir . getAbsolutePath ( ) ; absBasePath = FilenameUtils . normalizeNoEndSeparator ( FilenameUtils . separatorsToSystem ( absBasePath ) ) ; if ( filePathEquals ( absTargetPath , absBasePath ) ) { throw new IllegalArgumentException ( "target and base are equal: " + absTargetPath ) ; } String [ ] absTargets = absTargetPath . split ( Pattern . quote ( separator ) ) ; String [ ] absBases = absBasePath . split ( Pattern . quote ( separator ) ) ; int minLength = Math . min ( absTargets . length , absBases . length ) ; int lastCommonRoot = - 1 ; for ( int i = 0 ; i < minLength ; i ++ ) { if ( filePathEquals ( absTargets [ i ] , absBases [ i ] ) ) { lastCommonRoot = i ; } else { break ; } } if ( lastCommonRoot == - 1 ) { // This case can happen on Windows when drive of two file paths differ . throw new IllegalArgumentException ( "no common root" ) ; } String relativePath = "" ; for ( int i = lastCommonRoot + 1 ; i < absBases . length ; i ++ ) { relativePath = relativePath + ".." + separator ; } for ( int i = lastCommonRoot + 1 ; i < absTargets . length ; i ++ ) { relativePath = relativePath + absTargets [ i ] ; if ( i != absTargets . length - 1 ) { relativePath = relativePath + separator ; } } return new File ( relativePath ) ; } catch ( Exception e ) { throw new RuntimeException ( String . format ( "target: %s; baseDir: %s; separator: %s" , target , baseDir , separator ) , e ) ; }
public class QueryBuilder { /** * Method to get an Attribute Query in case of a Select where criteria . * @ return Attribute Query * @ throws EFapsException on error */ protected AttributeQuery getAttributeQuery ( ) throws EFapsException { } }
AttributeQuery ret = this . getAttributeQuery ( getSelectAttributeName ( ) ) ; // check if in the linkto chain is one before this one if ( ! this . attrQueryBldrs . isEmpty ( ) ) { final QueryBuilder queryBldr = this . attrQueryBldrs . values ( ) . iterator ( ) . next ( ) ; queryBldr . addWhereAttrInQuery ( queryBldr . getLinkAttributeName ( ) , ret ) ; ret = queryBldr . getAttributeQuery ( ) ; } return ret ;
public class CmsXmlGenericWrapper { /** * Provides a type safe / generic wrapper for { @ link Document # selectNodes ( String ) } . < p > * @ param doc the document to select the nodes from * @ param xpathExpression the XPATH expression to select * @ return type safe access to { @ link Document # selectNodes ( String ) } */ @ SuppressWarnings ( "unchecked" ) public static List < Node > selectNodes ( Document doc , String xpathExpression ) { } }
return doc . selectNodes ( xpathExpression ) ;
public class FctBnAccEntitiesProcessors { /** * < p > Get PrcAccSettingsSave ( create and put into map ) . < / p > * @ param pAddParam additional param * @ return requested PrcAccSettingsSave * @ throws Exception - an exception */ protected final PrcAccSettingsSave < RS > lazyGetPrcAccSettingsSave ( final Map < String , Object > pAddParam ) throws Exception { } }
@ SuppressWarnings ( "unchecked" ) PrcAccSettingsSave < RS > proc = ( PrcAccSettingsSave < RS > ) this . processorsMap . get ( PrcAccSettingsSave . class . getSimpleName ( ) ) ; if ( proc == null ) { proc = new PrcAccSettingsSave < RS > ( ) ; proc . setSrvAccSettings ( getSrvAccSettings ( ) ) ; // assigning fully initialized object : this . processorsMap . put ( PrcAccSettingsSave . class . getSimpleName ( ) , proc ) ; } return proc ;
public class CmsSearchManager { /** * Registers a new Solr core for the given index . < p > * @ param index the index to register a new Solr core for * @ throws CmsConfigurationException if no Solr server is configured */ @ SuppressWarnings ( "resource" ) public void registerSolrIndex ( CmsSolrIndex index ) throws CmsConfigurationException { } }
if ( ( m_solrConfig == null ) || ! m_solrConfig . isEnabled ( ) ) { // No solr server configured throw new CmsConfigurationException ( Messages . get ( ) . container ( Messages . ERR_SOLR_NOT_ENABLED_0 ) ) ; } if ( m_solrConfig . getServerUrl ( ) != null ) { // HTTP Server configured // TODO Implement multi core support for HTTP server // @ see http : / / lucidworks . lucidimagination . com / display / solr / Configuring + solr . xml index . setSolrServer ( new Builder ( ) . withBaseSolrUrl ( m_solrConfig . getServerUrl ( ) ) . build ( ) ) ; } // get the core container that contains one core for each configured index if ( m_coreContainer == null ) { m_coreContainer = createCoreContainer ( ) ; } // unload the existing core if it exists to avoid problems with forced unlock . if ( m_coreContainer . getAllCoreNames ( ) . contains ( index . getCoreName ( ) ) ) { m_coreContainer . unload ( index . getCoreName ( ) , false , false , true ) ; } // ensure that all locks on the index are gone ensureIndexIsUnlocked ( index . getPath ( ) ) ; // load the core to the container File dataDir = new File ( index . getPath ( ) ) ; if ( ! dataDir . exists ( ) ) { dataDir . mkdirs ( ) ; if ( CmsLog . INIT . isInfoEnabled ( ) ) { CmsLog . INIT . info ( Messages . get ( ) . getBundle ( ) . key ( Messages . INIT_SOLR_INDEX_DIR_CREATED_2 , index . getName ( ) , index . getPath ( ) ) ) ; } } File instanceDir = new File ( m_solrConfig . getHome ( ) + FileSystems . getDefault ( ) . getSeparator ( ) + index . getName ( ) ) ; if ( ! instanceDir . exists ( ) ) { instanceDir . mkdirs ( ) ; if ( CmsLog . INIT . isInfoEnabled ( ) ) { CmsLog . INIT . info ( Messages . get ( ) . getBundle ( ) . key ( Messages . INIT_SOLR_INDEX_DIR_CREATED_2 , index . getName ( ) , index . getPath ( ) ) ) ; } } // create the core // TODO : suboptimal - forces always the same schema SolrCore core = null ; try { // creation includes registration . // TODO : this was the old code : core = m _ coreContainer . create ( descriptor , false ) ; Map < String , String > properties = new HashMap < String , String > ( 3 ) ; properties . put ( CoreDescriptor . CORE_DATADIR , dataDir . getAbsolutePath ( ) ) ; properties . put ( CoreDescriptor . CORE_CONFIGSET , "default" ) ; core = m_coreContainer . create ( index . getCoreName ( ) , instanceDir . toPath ( ) , properties , false ) ; } catch ( NullPointerException e ) { if ( core != null ) { core . close ( ) ; } throw new CmsConfigurationException ( Messages . get ( ) . container ( Messages . ERR_SOLR_SERVER_NOT_CREATED_3 , index . getName ( ) + " (" + index . getCoreName ( ) + ")" , index . getPath ( ) , m_solrConfig . getSolrConfigFile ( ) . getAbsolutePath ( ) ) , e ) ; } if ( index . isNoSolrServerSet ( ) ) { index . setSolrServer ( new EmbeddedSolrServer ( m_coreContainer , index . getCoreName ( ) ) ) ; } if ( CmsLog . INIT . isInfoEnabled ( ) ) { CmsLog . INIT . info ( Messages . get ( ) . getBundle ( ) . key ( Messages . INIT_SOLR_SERVER_CREATED_1 , index . getName ( ) + " (" + index . getCoreName ( ) + ")" ) ) ; }
public class RSAUtils { /** * 公钥解密 * @ param encryptedData 已加密数据 * @ param publicKey 公钥 ( BASE64编码 ) * @ return * @ throws Exception */ public static byte [ ] decryptByPublicKey ( byte [ ] encryptedData , String publicKey ) throws Exception { } }
String keyBytes = Base64Utils . decode ( publicKey ) ; X509EncodedKeySpec x509KeySpec = new X509EncodedKeySpec ( keyBytes . getBytes ( ) ) ; KeyFactory keyFactory = KeyFactory . getInstance ( KEY_ALGORITHM ) ; Key publicK = keyFactory . generatePublic ( x509KeySpec ) ; Cipher cipher = Cipher . getInstance ( keyFactory . getAlgorithm ( ) ) ; cipher . init ( Cipher . DECRYPT_MODE , publicK ) ; int inputLen = encryptedData . length ; ByteArrayOutputStream out = new ByteArrayOutputStream ( ) ; int offSet = 0 ; int i = 0 ; setData ( encryptedData , cipher , inputLen , out , offSet , i ) ; byte [ ] decryptedData = out . toByteArray ( ) ; out . close ( ) ; return decryptedData ;
public class ReservoirItemsSketch { /** * Used during union operations to ensure we do not overwrite an existing reservoir . Creates a * shallow copy of the reservoir . * @ return A copy of the current sketch */ @ SuppressWarnings ( "unchecked" ) ReservoirItemsSketch < T > copy ( ) { } }
return new ReservoirItemsSketch < > ( reservoirSize_ , currItemsAlloc_ , itemsSeen_ , rf_ , ( ArrayList < T > ) data_ . clone ( ) ) ;
public class ValidationFileUtil { /** * Write string to file . * @ param file the file * @ param content the content * @ throws IOException the io exception */ public static void writeStringToFile ( File file , String content ) throws IOException { } }
OutputStream outputStream = getOutputStream ( file ) ; outputStream . write ( content . getBytes ( ) ) ;
public class ServerPrepareStatementCache { /** * Remove eldestEntry . * @ param eldest eldest entry * @ return true if eldest entry must be removed */ @ Override public boolean removeEldestEntry ( Map . Entry eldest ) { } }
boolean mustBeRemoved = this . size ( ) > maxSize ; if ( mustBeRemoved ) { ServerPrepareResult serverPrepareResult = ( ( ServerPrepareResult ) eldest . getValue ( ) ) ; serverPrepareResult . setRemoveFromCache ( ) ; if ( serverPrepareResult . canBeDeallocate ( ) ) { try { protocol . forceReleasePrepareStatement ( serverPrepareResult . getStatementId ( ) ) ; } catch ( SQLException e ) { // eat exception } } } return mustBeRemoved ;
public class JarMain { /** * By default read config from current dir * @ param args cmd * @ throws java . io . IOException when the configuration file can ' t be found */ public static void main ( String [ ] args ) throws IOException { } }
if ( args . length == 0 ) { System . out . println ( "No command given, choose: \n\tinit\n\tpull\n\tpush\n\tpushTerms" ) ; return ; } Map < String , String > parameters = new HashMap < String , String > ( ) ; if ( args . length > 1 ) { for ( String s : args ) { String [ ] splitted = s . split ( "=" ) ; if ( splitted . length == 2 ) { parameters . put ( splitted [ 0 ] , splitted [ 1 ] ) ; } } } // Read config Path current = Paths . get ( "" ) ; File configFile = new File ( current . toAbsolutePath ( ) . toString ( ) , "poeditor.properties" ) ; Config config = Config . load ( configFile ) ; BaseTask task = null ; if ( "init" . equals ( args [ 0 ] ) ) { System . out . println ( "Initialize project" ) ; task = new InitTask ( ) ; } else if ( "pull" . equals ( args [ 0 ] ) ) { System . out . println ( "Pull languages" ) ; task = new PullTask ( ) ; } else if ( "push" . equals ( args [ 0 ] ) ) { System . out . println ( "Push languages" ) ; task = new PushTask ( ) ; } else if ( "pushTerms" . equals ( args [ 0 ] ) ) { System . out . println ( "Push terms" ) ; task = new PushTermsTask ( ) ; } else if ( "generate" . equals ( args [ 0 ] ) ) { System . out . println ( "Generate config" ) ; task = new GenerateTask ( ) ; } else if ( "status" . equals ( args [ 0 ] ) ) { System . out . println ( "Status" ) ; task = new StatusTask ( ) ; } if ( task != null ) { task . configure ( config , parameters ) ; task . handle ( ) ; }
public class AlphabeticIndex { /** * Add more index characters ( aside from what are in the locale ) * @ param additions additional characters to add to the index , such as those in Swedish . * @ return this , for chaining */ public AlphabeticIndex < V > addLabels ( Locale ... additions ) { } }
for ( Locale addition : additions ) { addIndexExemplars ( ULocale . forLocale ( addition ) ) ; } buckets = null ; return this ;
public class JSMessageImpl { /** * The BoxManager stuff is a black art so we ' ll lock round it to be safe . */ public boolean isPresent ( int accessor ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) JmfTr . entry ( this , tc , "isPresent" , new Object [ ] { Integer . valueOf ( accessor ) } ) ; boolean result ; if ( accessor < cacheSize ) { result = super . isPresent ( accessor ) ; } else if ( accessor < firstBoxed ) { result = getCase ( accessor - cacheSize ) > - 1 ; } else if ( accessor < accessorLimit ) { // Conservative answer : a boxed value is present if its containing box is present ; // this is enough to support creation of the JSBoxedImpl for the value , which can // then be interrogated element by element . synchronized ( getMessageLockArtefact ( ) ) { result = super . isPresent ( boxManager . getBoxAccessor ( accessor - firstBoxed ) ) ; } } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) JmfTr . exit ( this , tc , "isPresent" , "IndexOutOfBoundsException" ) ; throw new IndexOutOfBoundsException ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) JmfTr . exit ( this , tc , "isPresent" , Boolean . valueOf ( result ) ) ; return result ;