signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ObjectToJsonSerializer { /** * Converts an object to json . * @ param object an object * @ param deidentify the deidentify * @ return a json string of the object */ public final static String toJson ( Object object , DeIdentify deidentify ) { } }
if ( isPrimitive ( object ) ) { Object deidentifiedObj = deidentifyObject ( object , deidentify ) ; String primitiveValue = String . valueOf ( deidentifiedObj ) ; if ( object instanceof String || object instanceof Character || ! object . equals ( deidentifiedObj ) ) { primitiveValue = '"' + primitiveValue + '"' ; } return primitiveValue ; } return JSON . toJSONString ( object , JsonFilter . INSTANCE ) ;
public class ElementBuilder { /** * Adds ( force = true ) or removes ( force = false ) the specified CSS class to the class list of the element . */ public B css ( String className , boolean force ) { } }
get ( ) . classList . toggle ( className , force ) ; return that ( ) ;
public class IterUtil { /** * 获得 { @ link Iterable } 对象的元素类型 ( 通过第一个非空元素判断 ) < br > * 注意 , 此方法至少会调用多次next方法 * @ param iterable { @ link Iterable } * @ return 元素类型 , 当列表为空或元素全部为null时 , 返回null */ public static Class < ? > getElementType ( Iterable < ? > iterable ) { } }
if ( null != iterable ) { final Iterator < ? > iterator = iterable . iterator ( ) ; return getElementType ( iterator ) ; } return null ;
public class AbstractWriteBehindProcessor { /** * Used to partition the list to chunks . * @ param list to be paged . * @ param batchSize batch operation size . * @ param chunkNumber batch chunk number . * @ return sub - list of list if any or null . */ protected List < T > getBatchChunk ( List < T > list , int batchSize , int chunkNumber ) { } }
if ( list == null || list . isEmpty ( ) ) { return null ; } final int start = chunkNumber * batchSize ; final int end = Math . min ( start + batchSize , list . size ( ) ) ; if ( start >= end ) { return null ; } return list . subList ( start , end ) ;
public class ClassLoadingManager { /** * Returns whether a target class is an instance of a reference class . * If a class cannot be loaded by the default { @ link ClassLoader } , this * method will attempt to use the loader for the specified app package . */ public boolean checkInstanceOf ( Context context , CharSequence targetClassName , CharSequence loaderPackage , CharSequence referenceClassName ) { } }
if ( ( targetClassName == null ) || ( referenceClassName == null ) ) { return false ; } // Try a shortcut for efficiency . if ( TextUtils . equals ( targetClassName , referenceClassName ) ) { return true ; } final Class < ? > referenceClass = loadOrGetCachedClass ( context , referenceClassName , loaderPackage ) ; if ( referenceClass == null ) { return false ; } return checkInstanceOf ( context , targetClassName , loaderPackage , referenceClass ) ;
public class RSS090Parser { /** * Parses the root element of an RSS document looking for all items information . * It iterates through the item elements list , obtained from the getItems ( ) method , and invoke * parseItem ( ) for each item element . The resulting RSSItem of each item element is stored in a * list . * @ param rssRoot the root element of the RSS document to parse for all items information . * @ return a list with all the parsed RSSItem beans . */ protected List < Item > parseItems ( final Element rssRoot , final Locale locale ) { } }
final List < Item > items = new ArrayList < Item > ( ) ; for ( final Element item : getItems ( rssRoot ) ) { items . add ( parseItem ( rssRoot , item , locale ) ) ; } return items ;
public class IoUtil { /** * 拷贝流 , 使用NIO , 不会关闭流 * @ param in { @ link ReadableByteChannel } * @ param out { @ link WritableByteChannel } * @ return 拷贝的字节数 * @ throws IORuntimeException IO异常 * @ since 4.5.0 */ public static long copy ( ReadableByteChannel in , WritableByteChannel out ) throws IORuntimeException { } }
return copy ( in , out , DEFAULT_BUFFER_SIZE ) ;
public class BigQueryStorageClient { /** * Creates a new read session . A read session divides the contents of a BigQuery table into one or * more streams , which can then be used to read data from the table . The read session also * specifies properties of the data to be read , such as a list of columns or a push - down filter * describing the rows to be returned . * < p > A particular row can be read by at most one stream . When the caller has reached the end of * each stream in the session , then all the data in the table has been read . * < p > Read sessions automatically expire 24 hours after they are created and do not require manual * clean - up by the caller . * < p > Sample code : * < pre > < code > * try ( BigQueryStorageClient bigQueryStorageClient = BigQueryStorageClient . create ( ) ) { * TableReference tableReference = TableReference . newBuilder ( ) . build ( ) ; * String parent = " " ; * int requestedStreams = 0; * ReadSession response = bigQueryStorageClient . createReadSession ( tableReference , parent , requestedStreams ) ; * < / code > < / pre > * @ param tableReference Required . Reference to the table to read . * @ param parent Required . String of the form " projects / your - project - id " indicating the project * this ReadSession is associated with . This is the project that will be billed for usage . * @ param requestedStreams Optional . Initial number of streams . If unset or 0 , we will provide a * value of streams so as to produce reasonable throughput . Must be non - negative . The number * of streams may be lower than the requested number , depending on the amount parallelism that * is reasonable for the table and the maximum amount of parallelism allowed by the system . * < p > Streams must be read starting from offset 0. * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final ReadSession createReadSession ( TableReference tableReference , String parent , int requestedStreams ) { } }
CreateReadSessionRequest request = CreateReadSessionRequest . newBuilder ( ) . setTableReference ( tableReference ) . setParent ( parent ) . setRequestedStreams ( requestedStreams ) . build ( ) ; return createReadSession ( request ) ;
public class ServletRequest { /** * { @ inheritDoc } */ @ Override public void setAttribute ( final String key , final Serializable value ) { } }
backing . setAttribute ( key , value ) ;
public class ProcessingRecordsPrinter { /** * ( non - Javadoc ) * @ see org . apache . hadoop . util . Tool # run ( java . lang . String [ ] ) */ public int run ( String [ ] args ) throws Exception { } }
Configuration hbaseConf = HBaseConfiguration . create ( getConf ( ) ) ; // Grab input args and allow for - Dxyz style arguments String [ ] otherArgs = new GenericOptionsParser ( hbaseConf , args ) . getRemainingArgs ( ) ; // Grab the arguments we ' re looking for . CommandLine commandLine = parseArgs ( otherArgs ) ; // Grab the cluster argument String cluster = commandLine . getOptionValue ( "c" ) ; LOG . info ( "cluster=" + cluster ) ; // Grab the cluster argument String processFileSubstring = null ; if ( commandLine . hasOption ( "p" ) ) { processFileSubstring = commandLine . getOptionValue ( "p" ) ; } LOG . info ( "processFileSubstring=" + processFileSubstring ) ; // Default to no max Integer maxCount = Integer . MAX_VALUE ; if ( commandLine . hasOption ( "m" ) ) { try { maxCount = Integer . parseInt ( commandLine . getOptionValue ( "m" ) ) ; } catch ( NumberFormatException nfe ) { System . err . println ( "Error: " + NAME + " maxCount is not an integer: " + commandLine . getOptionValue ( "m" ) ) ; } } boolean success = true ; Connection hbaseConnection = null ; try { hbaseConnection = ConnectionFactory . createConnection ( hbaseConf ) ; success = printProcessRecordsFromHBase ( hbaseConf , hbaseConnection , cluster , maxCount , processFileSubstring ) ; } finally { if ( hbaseConnection == null ) { success = false ; } else { hbaseConnection . close ( ) ; } } // Return the status return success ? 0 : 1 ;
public class CPDefinitionSpecificationOptionValuePersistenceImpl { /** * Returns the last cp definition specification option value in the ordered set where CPOptionCategoryId = & # 63 ; . * @ param CPOptionCategoryId the cp option category ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching cp definition specification option value * @ throws NoSuchCPDefinitionSpecificationOptionValueException if a matching cp definition specification option value could not be found */ @ Override public CPDefinitionSpecificationOptionValue findByCPOptionCategoryId_Last ( long CPOptionCategoryId , OrderByComparator < CPDefinitionSpecificationOptionValue > orderByComparator ) throws NoSuchCPDefinitionSpecificationOptionValueException { } }
CPDefinitionSpecificationOptionValue cpDefinitionSpecificationOptionValue = fetchByCPOptionCategoryId_Last ( CPOptionCategoryId , orderByComparator ) ; if ( cpDefinitionSpecificationOptionValue != null ) { return cpDefinitionSpecificationOptionValue ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "CPOptionCategoryId=" ) ; msg . append ( CPOptionCategoryId ) ; msg . append ( "}" ) ; throw new NoSuchCPDefinitionSpecificationOptionValueException ( msg . toString ( ) ) ;
public class SSTableLoader { /** * releases the shared reference for all sstables , we acquire this when opening the sstable */ private void releaseReferences ( ) { } }
for ( SSTableReader sstable : sstables ) { sstable . selfRef ( ) . release ( ) ; assert sstable . selfRef ( ) . globalCount ( ) == 0 ; }
public class Neo4JVertex { /** * { @ inheritDoc } */ @ Override public Edge addEdge ( String label , Vertex vertex , Object ... keyValues ) { } }
// validate label ElementHelper . validateLabel ( label ) ; // vertex must exist if ( vertex == null ) throw Graph . Exceptions . argumentCanNotBeNull ( "vertex" ) ; // validate properties ElementHelper . legalPropertyKeyValueArray ( keyValues ) ; // transaction should be ready for io operations graph . tx ( ) . readWrite ( ) ; // add edge return session . addEdge ( label , this , ( Neo4JVertex ) vertex , keyValues ) ;
public class ComparableTimSort { /** * Ensures that the external array tmp has at least the specified * number of elements , increasing its size if necessary . The size * increases exponentially to ensure amortized linear time complexity . * @ param minCapacity the minimum required capacity of the tmp array * @ return tmp , whether or not it grew */ private Object [ ] ensureCapacity ( int minCapacity ) { } }
if ( tmpLen < minCapacity ) { // Compute smallest power of 2 > minCapacity int newSize = minCapacity ; newSize |= newSize >> 1 ; newSize |= newSize >> 2 ; newSize |= newSize >> 4 ; newSize |= newSize >> 8 ; newSize |= newSize >> 16 ; newSize ++ ; if ( newSize < 0 ) // Not bloody likely ! newSize = minCapacity ; else newSize = Math . min ( newSize , a . length >>> 1 ) ; @ SuppressWarnings ( { "unchecked" , "UnnecessaryLocalVariable" } ) Object [ ] newArray = new Object [ newSize ] ; tmp = newArray ; tmpLen = newSize ; tmpBase = 0 ; } return tmp ;
public class CSRFServiceImpl { /** * Checks whether or not the request is valid ( not by passed , valid token ) . * @ param context the context * @ return { @ code true } if the request if valid , { @ code false } otherwise . */ @ Override public boolean isValidRequest ( Context context ) { } }
// Check if we are executing an unsafe method if ( ! isUnsafe ( context ) ) { return true ; } if ( checkCsrfBypass ( context ) ) { LOGGER . debug ( "Bypassing CSRF check for {} {}" , context . route ( ) . getHttpMethod ( ) , context . route ( ) . getUrl ( ) ) ; return true ; } else { String tokenFromRequest = extractTokenFromRequest ( context ) ; if ( tokenFromRequest == null ) { LOGGER . error ( "CSRF Check failed because there is no token in the incoming request headers" ) ; return false ; } String tokenFromContent = extractTokenFromContent ( context ) ; if ( tokenFromContent == null ) { LOGGER . error ( "CSRF Check failed because we are unable to find a token in the incoming request query " + "string or body" ) ; return false ; } if ( compareTokens ( tokenFromRequest , tokenFromContent ) ) { return true ; } else { LOGGER . error ( "CSRF Check failed because the given token is invalid" ) ; return false ; } }
public class ServletRegistrationBean { /** * Set the URL mappings for the servlet . If not specified the mapping will default to * ' / ' . This will replace any previously specified mappings . * @ param urlMappings the mappings to set * @ see # addUrlMappings ( String . . . ) */ public void setUrlMappings ( Collection < String > urlMappings ) { } }
Assert . notNull ( urlMappings , "UrlMappings must not be null" ) ; this . urlMappings = new LinkedHashSet < > ( urlMappings ) ;
public class AttributeCollector { /** * Method called to resolve and initialize specified collected * namespace declaration * @ return Attribute that contains specified namespace declaration */ protected Attribute resolveNamespaceDecl ( int index , boolean internURI ) { } }
Attribute ns = mNamespaces [ index ] ; String full = mNamespaceBuilder . getAllValues ( ) ; String uri ; if ( mNsCount == 0 ) { uri = full ; } else { ++ index ; if ( index < mNsCount ) { // not last int endOffset = mNamespaces [ index ] . mValueStartOffset ; uri = ns . getValue ( full , endOffset ) ; } else { // is last uri = ns . getValue ( full ) ; } } if ( internURI && uri . length ( ) > 0 ) { uri = sInternCache . intern ( uri ) ; } ns . mNamespaceURI = uri ; return ns ;
public class NodePingUtil { /** * Internally ping a node . This should probably use the connections from the nodes pool , if there are any available . * @ param node the node * @ param callback the ping callback * @ param ioThread the xnio i / o thread * @ param bufferPool the xnio buffer pool * @ param client the undertow client * @ param xnioSsl the ssl setup * @ param options the options */ static void internalPingNode ( Node node , PingCallback callback , NodeHealthChecker healthChecker , XnioIoThread ioThread , ByteBufferPool bufferPool , UndertowClient client , XnioSsl xnioSsl , OptionMap options ) { } }
final URI uri = node . getNodeConfig ( ) . getConnectionURI ( ) ; final long timeout = node . getNodeConfig ( ) . getPing ( ) ; final RequestExchangeListener exchangeListener = new RequestExchangeListener ( callback , healthChecker , true ) ; final HttpClientPingTask r = new HttpClientPingTask ( uri , exchangeListener , ioThread , client , xnioSsl , bufferPool , options ) ; // Schedule timeout task scheduleCancelTask ( ioThread , exchangeListener , timeout , TimeUnit . SECONDS ) ; ioThread . execute ( r ) ;
public class CmdLineParser { /** * Reads all lines of a file with the platform encoding . */ private static List < String > readAllLines ( File f ) throws IOException { } }
BufferedReader r = new BufferedReader ( new FileReader ( f ) ) ; try { List < String > result = new ArrayList < String > ( ) ; String line ; while ( ( line = r . readLine ( ) ) != null ) { result . add ( line ) ; } return result ; } finally { r . close ( ) ; }
public class CryptoFileSystemProperties { /** * Constructs { @ code CryptoFileSystemProperties } from a { @ link Map } . * @ param properties the { @ code Map } to convert * @ return the passed in { @ code Map } if already of type { @ code CryptoFileSystemProperties } or a new { @ code CryptoFileSystemProperties } instance holding the values from the { @ code Map } * @ throws IllegalArgumentException if a value in the { @ code Map } does not have the expected type or if a required value is missing */ public static CryptoFileSystemProperties wrap ( Map < String , ? > properties ) { } }
if ( properties instanceof CryptoFileSystemProperties ) { return ( CryptoFileSystemProperties ) properties ; } else { try { return cryptoFileSystemPropertiesFrom ( properties ) . build ( ) ; } catch ( IllegalStateException e ) { throw new IllegalArgumentException ( e ) ; } }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link BridgeRoomType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link BridgeRoomType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/citygml/bridge/2.0" , name = "BridgeRoom" , substitutionHeadNamespace = "http://www.opengis.net/citygml/2.0" , substitutionHeadName = "_CityObject" ) public JAXBElement < BridgeRoomType > createBridgeRoom ( BridgeRoomType value ) { } }
return new JAXBElement < BridgeRoomType > ( _BridgeRoom_QNAME , BridgeRoomType . class , null , value ) ;
public class DefaultSqlConfig { /** * データソースを指定してSqlConfigを取得する * @ param dataSource データソース * @ return SqlConfigオブジェクト */ public static SqlConfig getConfig ( final DataSource dataSource ) { } }
DataSourceConnectionSupplierImpl connectionSupplier = new DataSourceConnectionSupplierImpl ( dataSource ) ; return new DefaultSqlConfig ( connectionSupplier , null ) ;
public class LayerMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Layer layer , ProtocolMarshaller protocolMarshaller ) { } }
if ( layer == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( layer . getLayerDigest ( ) , LAYERDIGEST_BINDING ) ; protocolMarshaller . marshall ( layer . getLayerAvailability ( ) , LAYERAVAILABILITY_BINDING ) ; protocolMarshaller . marshall ( layer . getLayerSize ( ) , LAYERSIZE_BINDING ) ; protocolMarshaller . marshall ( layer . getMediaType ( ) , MEDIATYPE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class IOUtils { /** * Returns all the text at the given URL . */ public static String slurpURLNoExceptions ( URL u ) { } }
try { return slurpURL ( u ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; return null ; }
public class DoubleArrayTrie { /** * 前缀查询 * @ param key 查询字串 * @ param pos 字串的开始位置 * @ param len 字串长度 * @ param nodePos base中的开始位置 * @ return 一个含有所有下标的list */ public List < Integer > commonPrefixSearch ( String key , int pos , int len , int nodePos ) { } }
if ( len <= 0 ) len = key . length ( ) ; if ( nodePos <= 0 ) nodePos = 0 ; List < Integer > result = new ArrayList < Integer > ( ) ; char [ ] keyChars = key . toCharArray ( ) ; int b = base [ nodePos ] ; int n ; int p ; for ( int i = pos ; i < len ; i ++ ) { p = b + ( int ) ( keyChars [ i ] ) + 1 ; // 状态转移 p = base [ char [ i - 1 ] ] + char [ i ] + 1 if ( b == check [ p ] ) // base [ char [ i - 1 ] ] = = check [ base [ char [ i - 1 ] ] + char [ i ] + 1] b = base [ p ] ; else return result ; p = b ; n = base [ p ] ; if ( b == check [ p ] && n < 0 ) // base [ p ] = = check [ p ] & & base [ p ] < 0 查到一个词 { result . add ( - n - 1 ) ; } } return result ;
public class MonthView { /** * Calculates the day that the given x position is in , accounting for week * number . * @ param x The x position of the touch event * @ return The day number */ protected int getInternalDayFromLocation ( float x , float y ) { } }
int dayStart = mEdgePadding ; if ( x < dayStart || x > mWidth - mEdgePadding ) { return - 1 ; } // Selection is ( x - start ) / ( pixels / day ) = = ( x - s ) * day / pixels int row = ( int ) ( y - getMonthHeaderSize ( ) ) / mRowHeight ; int column = ( int ) ( ( x - dayStart ) * mNumDays / ( mWidth - dayStart - mEdgePadding ) ) ; int day = column - findDayOffset ( ) + 1 ; day += row * mNumDays ; return day ;
public class DefaultResourceResolver { /** * Do the standard resource resolving of sSystemId relative to sBaseURI * @ param sSystemId * The resource to search . May be < code > null < / code > if base URI is set . * @ param sBaseURI * The base URI from where the search is initiated . May be * < code > null < / code > if systemId is set . * @ return The non - < code > null < / code > resource . May be non - existing ! * @ throws UncheckedIOException * In case the file resolution ( to an absolute file ) fails . */ @ Nonnull public static IReadableResource getResolvedResource ( @ Nullable final String sSystemId , @ Nullable final String sBaseURI ) { } }
return getResolvedResource ( sSystemId , sBaseURI , ( ClassLoader ) null ) ;
public class ActivityTypeInfos { /** * List of activity type information . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setTypeInfos ( java . util . Collection ) } or { @ link # withTypeInfos ( java . util . Collection ) } if you want to * override the existing values . * @ param typeInfos * List of activity type information . * @ return Returns a reference to this object so that method calls can be chained together . */ public ActivityTypeInfos withTypeInfos ( ActivityTypeInfo ... typeInfos ) { } }
if ( this . typeInfos == null ) { setTypeInfos ( new java . util . ArrayList < ActivityTypeInfo > ( typeInfos . length ) ) ; } for ( ActivityTypeInfo ele : typeInfos ) { this . typeInfos . add ( ele ) ; } return this ;
public class FlatpackWriterUtil { /** * Creates a Mapping for a WriterFactory for the given list of columns . * @ param colsAsCsv comma - separated column names * @ return a map to be used in , for instance , DelimiterWriterFactory */ public static Map < String , Object > buildParametersForColumns ( final String colsAsCsv ) { } }
final Map < String , Object > mapping = new HashMap < > ( ) ; mapping . put ( FPConstants . DETAIL_ID , buildColumns ( colsAsCsv ) ) ; return mapping ;
public class XMLParser { /** * Sets name and attributes . * @ param xmldecl the xmldecl * @ param throwOnResolveFailure the throw on resolve failure * @ throws IOException Signals that an I / O exception has occurred . * @ throws KriptonRuntimeException the kripton runtime exception */ private void parseStartTag ( boolean xmldecl , boolean throwOnResolveFailure ) throws IOException , KriptonRuntimeException { } }
if ( ! xmldecl ) { read ( '<' ) ; } name = readName ( ) ; attributeCount = 0 ; while ( true ) { skip ( ) ; if ( position >= limit && ! fillBuffer ( 1 ) ) { checkRelaxed ( UNEXPECTED_EOF ) ; return ; } int c = buffer [ position ] ; if ( xmldecl ) { if ( c == '?' ) { position ++ ; read ( '>' ) ; return ; } } else { if ( c == '/' ) { degenerated = true ; position ++ ; skip ( ) ; read ( '>' ) ; break ; } else if ( c == '>' ) { position ++ ; break ; } } String attrName = readName ( ) ; int i = ( attributeCount ++ ) * 4 ; attributes = ensureCapacity ( attributes , i + 4 ) ; attributes [ i ] = "" ; attributes [ i + 1 ] = null ; attributes [ i + 2 ] = attrName ; skip ( ) ; if ( position >= limit && ! fillBuffer ( 1 ) ) { checkRelaxed ( UNEXPECTED_EOF ) ; return ; } if ( buffer [ position ] == '=' ) { position ++ ; skip ( ) ; if ( position >= limit && ! fillBuffer ( 1 ) ) { checkRelaxed ( UNEXPECTED_EOF ) ; return ; } char delimiter = buffer [ position ] ; if ( delimiter == '\'' || delimiter == '"' ) { position ++ ; } else if ( relaxed ) { delimiter = ' ' ; } else { throw new KriptonRuntimeException ( "attr value delimiter missing!" , true , this . getLineNumber ( ) , this . getColumnNumber ( ) , getPositionDescription ( ) , null ) ; } attributes [ i + 3 ] = readValue ( delimiter , true , throwOnResolveFailure , ValueContext . ATTRIBUTE ) ; if ( delimiter != ' ' ) { position ++ ; // end quote } } else if ( relaxed ) { attributes [ i + 3 ] = attrName ; } else { checkRelaxed ( "Attr.value missing f. " + attrName ) ; attributes [ i + 3 ] = attrName ; } } int sp = depth ++ * 4 ; elementStack = ensureCapacity ( elementStack , sp + 4 ) ; elementStack [ sp + 3 ] = name ; if ( depth >= nspCounts . length ) { int [ ] bigger = new int [ depth + 4 ] ; System . arraycopy ( nspCounts , 0 , bigger , 0 , nspCounts . length ) ; nspCounts = bigger ; } nspCounts [ depth ] = nspCounts [ depth - 1 ] ; if ( processNsp ) { adjustNsp ( ) ; } else { namespace = "" ; } // For consistency with Expat , add default attributes after fixing // namespaces . if ( defaultAttributes != null ) { Map < String , String > elementDefaultAttributes = defaultAttributes . get ( name ) ; if ( elementDefaultAttributes != null ) { for ( Map . Entry < String , String > entry : elementDefaultAttributes . entrySet ( ) ) { if ( getAttributeValue ( null , entry . getKey ( ) ) != null ) { continue ; // an explicit value overrides the default } int i = ( attributeCount ++ ) * 4 ; attributes = ensureCapacity ( attributes , i + 4 ) ; attributes [ i ] = "" ; attributes [ i + 1 ] = null ; attributes [ i + 2 ] = entry . getKey ( ) ; attributes [ i + 3 ] = entry . getValue ( ) ; } } } elementStack [ sp ] = namespace ; elementStack [ sp + 1 ] = prefix ; elementStack [ sp + 2 ] = name ;
public class LinkedHashMapPro { /** * Returns < tt > true < / tt > if this map maps one or more keys to the specified value . * @ param value value whose presence in this map is to be tested * @ return < tt > true < / tt > if this map maps one or more keys to the specified value */ @ Override public boolean containsValue ( Object value ) { } }
// Overridden to take advantage of faster iterator if ( value == null ) { for ( Entry e = header . after ; e != header ; e = e . after ) if ( e . value == null ) return true ; } else { for ( Entry e = header . after ; e != header ; e = e . after ) if ( value . equals ( e . value ) ) return true ; } return false ;
public class QuickLauncher { /** * Returns the < code > Status < / code > of the node . An empty status is returned * if status file is missing and < code > emptyForMissing < / code > argument is true * else null is returned . */ private void readStatus ( boolean emptyForMissing , final Path statusFile ) { } }
this . status = null ; if ( Files . exists ( statusFile ) ) { // try some number of times if dsMsg is null for ( int i = 1 ; i <= 3 ; i ++ ) { this . status = Status . spinRead ( baseName , statusFile ) ; if ( this . status . dsMsg != null ) break ; } } if ( this . status == null && emptyForMissing ) { this . status = Status . create ( baseName , Status . SHUTDOWN , 0 , statusFile ) ; }
public class FNNRGImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . FNNRG__GCGID : setGCGID ( ( String ) newValue ) ; return ; case AfplibPackage . FNNRG__TS_OFFSET : setTSOffset ( ( Integer ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class SignatureMessageServiceImpl { /** * If a { @ code SignMessage } exists the method will decrypt it ( if needed ) . */ @ Override public void processRequest ( ProfileRequestContext < ? , ? > context ) throws ExternalAutenticationErrorCodeException { } }
final String logId = this . getLogString ( context ) ; boolean isSignatureService = this . isSignatureServicePeer ( context ) ; SignMessageContext signMessageContext = this . getSignMessageContext ( context ) ; if ( signMessageContext == null ) { // If the peer is a signature service and has only requested a sigmessage LoA we report an error . if ( isSignatureService ) { AuthnContextClassContext authnContextClassContext = this . authnContextService . getAuthnContextClassContext ( context ) ; for ( String loa : authnContextClassContext . getAuthnContextClassRefs ( ) ) { if ( this . isSignMessageURI ( loa ) ) { log . info ( "SP has requested '{}' but did not include SignMessage, removing ... [{}]" , loa , logId ) ; authnContextClassContext . deleteAuthnContextClassRef ( loa ) ; } } if ( authnContextClassContext . isEmpty ( ) ) { final String msg = "No valid AuthnContext URI:s were specified in AuthnRequest" ; log . info ( "{} - can not proceed [{}]" , msg , logId ) ; throw new ExternalAutenticationErrorCodeException ( AuthnEventIds . REQUEST_UNSUPPORTED , msg ) ; } } } else { // If an ordinary SP included a SignMessage in the request , we simply ignore it . if ( ! isSignatureService ) { log . warn ( "Requesting SP is not a signature service, but included SignMessage extension, ignoring ... [{}]" , logId ) ; AuthenticationContext authnContext = authenticationContextLookupStrategy . apply ( context ) ; if ( authnContext == null ) { log . error ( "No AuthenticationContext available [{}]" , this . getLogString ( context ) ) ; throw new ExternalAutenticationErrorCodeException ( AuthnEventIds . INVALID_AUTHN_CTX , "Missing AuthenticationContext" ) ; } authnContext . removeSubcontext ( SignMessageContext . class ) ; } // Else , make additional checks and decrypt . else { // Decrypt the message if ( signMessageContext . getSignMessage ( ) . getEncryptedMessage ( ) != null ) { try { Message cleartextMessage = this . signatureSupportKeyService . decrypt ( signMessageContext . getSignMessage ( ) ) ; log . debug ( "SignMessage was successfully decrypted [{}]" , logId ) ; signMessageContext . setClearTextMessage ( cleartextMessage ) ; } catch ( DecryptionException e ) { final String msg = String . format ( "Failed to decrypt SignMessage - %s" , e . getMessage ( ) ) ; log . error ( "{} [{}]" , msg , logId ) ; throw new ExternalAutenticationErrorCodeException ( ExtAuthnEventIds . SIGN_MESSAGE_DECRYPTION_ERROR , msg ) ; } } else { log . debug ( "SignMessage was not encrypted [{}]" , logId ) ; } if ( ! this . supportsMimeType ( signMessageContext . getMimeType ( ) ) ) { log . warn ( "IdP does not support display of SignMessage with type '{}' [{}]" , signMessageContext . getMimeType ( ) , logId ) ; signMessageContext . setDoDisplayMessage ( false ) ; if ( signMessageContext . mustShow ( ) ) { throw new ExternalAutenticationErrorCodeException ( ExtAuthnEventIds . SIGN_MESSAGE_TYPE_NOT_SUPPORTED , "Unsupported SignMessage mime type" ) ; } } // If the SignMessage element from the signature request includes a MustShow attribute with the value true , the // Signature Service MUST require that the provided sign message is displayed by the Identity Provider , by // including a sigmessage authentication context URI . if ( signMessageContext . mustShow ( ) ) { AuthnContextClassContext authnContextClassContext = this . authnContextService . getAuthnContextClassContext ( context ) ; for ( String loa : authnContextClassContext . getAuthnContextClassRefs ( ) ) { if ( ! this . isSignMessageURI ( loa ) ) { log . info ( "SP has requested the SignMessage must be displayed, removing '{}' ... [{}]" , loa , logId ) ; authnContextClassContext . deleteAuthnContextClassRef ( loa ) ; } } if ( authnContextClassContext . isEmpty ( ) ) { final String msg = "No valid sigmessage AuthnContext URI:s were specified in AuthnRequest" ; log . info ( "{} - can not proceed [{}]" , msg , logId ) ; throw new ExternalAutenticationErrorCodeException ( AuthnEventIds . REQUEST_UNSUPPORTED , msg ) ; } } signMessageContext . setDoDisplayMessage ( true ) ; } }
public class AbstractDataDistributionType { /** * { @ inheritDoc } */ public Node getOrCreateDataNode ( Node rootNode , String dataId , String nodeType , List < String > mixinTypes , Map < String , String [ ] > permissions ) throws RepositoryException { } }
try { return getDataNode ( rootNode , dataId ) ; } catch ( PathNotFoundException e ) { if ( LOG . isTraceEnabled ( ) ) { LOG . trace ( "An exception occurred: " + e . getMessage ( ) ) ; } } // The node could not be found so we need to create it Node node = rootNode ; List < String > ancestors = getAncestors ( dataId ) ; for ( int i = 0 , length = ancestors . size ( ) ; i < length ; i ++ ) { String nodeName = ancestors . get ( i ) ; try { node = node . getNode ( nodeName ) ; continue ; } catch ( PathNotFoundException e ) { if ( LOG . isTraceEnabled ( ) ) { LOG . trace ( "An exception occurred: " + e . getMessage ( ) ) ; } } // The node doesn ' t exist we need to create it node = createNode ( node , nodeName , nodeType , mixinTypes , permissions , i == length - 1 , true ) ; } return node ;
public class DiffDisplayerScriptService { /** * Builds an in - line diff between two versions of a text . * @ param previous the previous version * @ param next the next version * @ return the list of in - line diff chunks */ public List < InlineDiffChunk < Character > > inline ( String previous , String next ) { } }
setError ( null ) ; try { return this . inlineDiffDisplayer . display ( this . diffManager . diff ( this . charSplitter . split ( previous ) , this . charSplitter . split ( next ) , null ) ) ; } catch ( DiffException e ) { setError ( e ) ; return null ; }
public class BusLayerConstants { /** * Replies if the bus stops should be drawn or not . * @ return < code > true < / code > if the bus stops are drawable ; * otherwise < code > false < / code > */ @ Pure public static boolean isBusStopDrawable ( ) { } }
final Preferences prefs = Preferences . userNodeForPackage ( BusLayerConstants . class ) ; if ( prefs != null ) { return prefs . getBoolean ( "DRAW_BUS_STOPS" , DEFAULT_BUS_STOP_DRAWING ) ; // $ NON - NLS - 1 $ } return DEFAULT_BUS_STOP_DRAWING ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link MphantomType } { @ code > } } */ @ XmlElementDecl ( namespace = "http://www.w3.org/1998/Math/MathML" , name = "mphantom" ) public JAXBElement < MphantomType > createMphantom ( MphantomType value ) { } }
return new JAXBElement < MphantomType > ( _Mphantom_QNAME , MphantomType . class , null , value ) ;
public class NetworkConfig { /** * Returns the PEM ( as a String ) from either a path or a pem field */ private static String extractPemString ( JsonObject json , String fieldName , String msgPrefix ) throws NetworkConfigurationException { } }
String path = null ; String pemString = null ; JsonObject jsonField = getJsonValueAsObject ( json . get ( fieldName ) ) ; if ( jsonField != null ) { path = getJsonValueAsString ( jsonField . get ( "path" ) ) ; pemString = getJsonValueAsString ( jsonField . get ( "pem" ) ) ; } if ( path != null && pemString != null ) { throw new NetworkConfigurationException ( format ( "%s should not specify both %s path and pem" , msgPrefix , fieldName ) ) ; } if ( path != null ) { // Determine full pathname and ensure the file exists File pemFile = new File ( path ) ; String fullPathname = pemFile . getAbsolutePath ( ) ; if ( ! pemFile . exists ( ) ) { throw new NetworkConfigurationException ( format ( "%s: %s file %s does not exist" , msgPrefix , fieldName , fullPathname ) ) ; } try ( FileInputStream stream = new FileInputStream ( pemFile ) ) { pemString = IOUtils . toString ( stream , "UTF-8" ) ; } catch ( IOException ioe ) { throw new NetworkConfigurationException ( format ( "Failed to read file: %s" , fullPathname ) , ioe ) ; } } return pemString ;
public class ItemRule { /** * Sets a value to this Properties type item . * @ param properties the properties */ public void setValue ( Properties properties ) { } }
if ( properties == null ) { throw new IllegalArgumentException ( "properties must not be null" ) ; } if ( type == null ) { type = ItemType . PROPERTIES ; } if ( ! isMappableType ( ) ) { throw new IllegalArgumentException ( "The type of this item must be 'properties' or 'map'" ) ; } tokensMap = new LinkedHashMap < > ( ) ; for ( String key : properties . stringPropertyNames ( ) ) { Object o = properties . get ( key ) ; if ( o instanceof Token [ ] ) { tokensMap . put ( key , ( Token [ ] ) o ) ; } else if ( o instanceof Token ) { Token [ ] tokens = new Token [ ] { ( Token ) o } ; tokensMap . put ( key , tokens ) ; } else { Token [ ] tokens = TokenParser . makeTokens ( o . toString ( ) , isTokenize ( ) ) ; putValue ( name , tokens ) ; } }
public class CmsPublishProject { /** * Returns the parent folder for the publish process . < p > * @ return the parent folder for the publish process */ private String getParentFolder ( ) { } }
String relativeTo ; if ( isDirectPublish ( ) ) { relativeTo = CmsResource . getParentFolder ( getResourceList ( ) . get ( 0 ) ) ; } else { relativeTo = getCms ( ) . getRequestContext ( ) . getSiteRoot ( ) + "/" ; } return relativeTo ;
public class AutoValueGsonExtension { /** * Converts the ExecutableElement properties to TypeName properties */ private Map < String , TypeName > convertPropertiesToTypes ( Map < String , ExecutableElement > properties ) { } }
Map < String , TypeName > types = new LinkedHashMap < > ( ) ; for ( Map . Entry < String , ExecutableElement > entry : properties . entrySet ( ) ) { ExecutableElement el = entry . getValue ( ) ; types . put ( entry . getKey ( ) , TypeName . get ( el . getReturnType ( ) ) ) ; } return types ;
public class CloudMe { /** * Gets the folders tree structure beginning from the root folder . * @ return the root folder */ private CMFolder loadFoldersStructure ( ) { } }
CMFolder rootFolder = new CMFolder ( getRootId ( ) , "root" ) ; HttpPost request = buildSoapRequest ( "getFolderXML" , "<folder id='" + rootFolder . getId ( ) + "'/>" ) ; CResponse response = retryStrategy . invokeRetry ( getApiRequestInvoker ( request , null ) ) ; Element rootElement = findRootFolderElement ( response . asDom ( ) ) ; scanFolderLevel ( rootElement , rootFolder ) ; return rootFolder ;
public class OverviewPlot { /** * Trigger a redraw , but avoid excessive redraws . */ public final void lazyRefresh ( ) { } }
if ( plot == null ) { LOG . warning ( "'lazyRefresh' called before initialized!" ) ; return ; } LOG . debug ( "Scheduling refresh." ) ; Runnable pr = new Runnable ( ) { @ Override public void run ( ) { if ( OverviewPlot . this . pendingRefresh . compareAndSet ( this , null ) ) { OverviewPlot . this . refresh ( ) ; } } } ; OverviewPlot . this . pendingRefresh . set ( pr ) ; plot . scheduleUpdate ( pr ) ;
public class SplitMergeLineFitLoop { /** * Recursively splits pixels between indexStart to indexStart + length . A split happens if there is a pixel * more than the desired distance away from the two end points . Results are placed into ' splits ' */ protected void splitPixels ( int indexStart , int length ) { } }
// too short to split if ( length < minimumSideLengthPixel ) return ; // end points of the line int indexEnd = ( indexStart + length ) % N ; int splitOffset = selectSplitOffset ( indexStart , length ) ; if ( splitOffset >= 0 ) { // System . out . println ( " splitting " ) ; splitPixels ( indexStart , splitOffset ) ; int indexSplit = ( indexStart + splitOffset ) % N ; splits . add ( indexSplit ) ; splitPixels ( indexSplit , circularDistance ( indexSplit , indexEnd ) ) ; }
public class AWSApplicationDiscoveryClient { /** * Normal invoke with authentication . Credentials are required and may be overriden at the request level . */ private < X , Y extends AmazonWebServiceRequest > Response < X > invoke ( Request < Y > request , HttpResponseHandler < AmazonWebServiceResponse < X > > responseHandler , ExecutionContext executionContext , URI cachedEndpoint , URI uriFromEndpointTrait ) { } }
executionContext . setCredentialsProvider ( CredentialUtils . getCredentialsProvider ( request . getOriginalRequest ( ) , awsCredentialsProvider ) ) ; return doInvoke ( request , responseHandler , executionContext , cachedEndpoint , uriFromEndpointTrait ) ;
public class Configuration { /** * Loads defaults from { @ link # PROPERTY _ FILE } . This method should only be * called outside of WAS therefore no nls support should be required * @ throws IOException */ public void loadFromPropertyFile ( ) throws IOException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "loadFromPropertyFile()" ) ; String key = null ; Properties config = new Properties ( ) ; InputStream is = this . getClass ( ) . getResourceAsStream ( PROPERTY_FILE ) ; if ( is == null ) { throw new IOException ( "Resource " + PROPERTY_FILE + " not found in current classloader" ) ; } config . load ( is ) ; Enumeration vEnum = config . propertyNames ( ) ; while ( vEnum . hasMoreElements ( ) ) { key = ( String ) vEnum . nextElement ( ) ; if ( key . equals ( "datasourceClassname" ) ) datasourceClassname = config . getProperty ( key ) ; else if ( key . equals ( "username" ) ) datasourceUsername = config . getProperty ( key ) ; else if ( key . equals ( "password" ) ) datasourcePassword = config . getProperty ( key ) ; else if ( key . equals ( "schemaname" ) ) schemaName = config . getProperty ( key ) ; else if ( key . equals ( "poolSize" ) ) numberOfPooledConnections = Integer . parseInt ( config . getProperty ( key ) ) ; else if ( key . equals ( "verbose" ) ) verbose = Boolean . valueOf ( config . getProperty ( key ) ) . booleanValue ( ) ; else datasourceProperties . setProperty ( key , config . getProperty ( key ) ) ; } if ( datasourceClassname == null ) throw new IllegalStateException ( "The properties file must specify datasourceClassname" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "loadFromPropertyFile()" ) ;
public class TypeExtractor { /** * Returns the unary operator ' s return type . * < p > This method can extract a type in 4 different ways : * < p > 1 . By using the generics of the base class like MyFunction < X , Y , Z , IN , OUT > . * This is what outputTypeArgumentIndex ( in this example " 4 " ) is good for . * < p > 2 . By using input type inference SubMyFunction < T , String , String , String , T > . * This is what inputTypeArgumentIndex ( in this example " 0 " ) and inType is good for . * < p > 3 . By using the static method that a compiler generates for Java lambdas . * This is what lambdaOutputTypeArgumentIndices is good for . Given that MyFunction has * the following single abstract method : * < pre > * < code > * void apply ( IN value , Collector < OUT > value ) * < / code > * < / pre > * < p > Lambda type indices allow the extraction of a type from lambdas . To extract the * output type < b > OUT < / b > from the function one should pass { @ code new int [ ] { 1 , 0 } } . * " 1 " for selecting the parameter and 0 for the first generic in this type . * Use { @ code TypeExtractor . NO _ INDEX } for selecting the return type of the lambda for * extraction or if the class cannot be a lambda because it is not a single abstract * method interface . * < p > 4 . By using interfaces such as { @ link TypeInfoFactory } or { @ link ResultTypeQueryable } . * < p > See also comments in the header of this class . * @ param function Function to extract the return type from * @ param baseClass Base class of the function * @ param inputTypeArgumentIndex Index of input generic type in the base class specification ( ignored if inType is null ) * @ param outputTypeArgumentIndex Index of output generic type in the base class specification * @ param lambdaOutputTypeArgumentIndices Table of indices of the type argument specifying the input type . See example . * @ param inType Type of the input elements ( In case of an iterable , it is the element type ) or null * @ param functionName Function name * @ param allowMissing Can the type information be missing ( this generates a MissingTypeInfo for postponing an exception ) * @ param < IN > Input type * @ param < OUT > Output type * @ return TypeInformation of the return type of the function */ @ SuppressWarnings ( "unchecked" ) @ PublicEvolving public static < IN , OUT > TypeInformation < OUT > getUnaryOperatorReturnType ( Function function , Class < ? > baseClass , int inputTypeArgumentIndex , int outputTypeArgumentIndex , int [ ] lambdaOutputTypeArgumentIndices , TypeInformation < IN > inType , String functionName , boolean allowMissing ) { } }
Preconditions . checkArgument ( inType == null || inputTypeArgumentIndex >= 0 , "Input type argument index was not provided" ) ; Preconditions . checkArgument ( outputTypeArgumentIndex >= 0 , "Output type argument index was not provided" ) ; Preconditions . checkArgument ( lambdaOutputTypeArgumentIndices != null , "Indices for output type arguments within lambda not provided" ) ; // explicit result type has highest precedence if ( function instanceof ResultTypeQueryable ) { return ( ( ResultTypeQueryable < OUT > ) function ) . getProducedType ( ) ; } // perform extraction try { final LambdaExecutable exec ; try { exec = checkAndExtractLambda ( function ) ; } catch ( TypeExtractionException e ) { throw new InvalidTypesException ( "Internal error occurred." , e ) ; } if ( exec != null ) { // parameters must be accessed from behind , since JVM can add additional parameters e . g . when using local variables inside lambda function // paramLen is the total number of parameters of the provided lambda , it includes parameters added through closure final int paramLen = exec . getParameterTypes ( ) . length ; final Method sam = TypeExtractionUtils . getSingleAbstractMethod ( baseClass ) ; // number of parameters the SAM of implemented interface has ; the parameter indexing applies to this range final int baseParametersLen = sam . getParameterTypes ( ) . length ; final Type output ; if ( lambdaOutputTypeArgumentIndices . length > 0 ) { output = TypeExtractionUtils . extractTypeFromLambda ( baseClass , exec , lambdaOutputTypeArgumentIndices , paramLen , baseParametersLen ) ; } else { output = exec . getReturnType ( ) ; TypeExtractionUtils . validateLambdaType ( baseClass , output ) ; } return new TypeExtractor ( ) . privateCreateTypeInfo ( output , inType , null ) ; } else { if ( inType != null ) { validateInputType ( baseClass , function . getClass ( ) , inputTypeArgumentIndex , inType ) ; } return new TypeExtractor ( ) . privateCreateTypeInfo ( baseClass , function . getClass ( ) , outputTypeArgumentIndex , inType , null ) ; } } catch ( InvalidTypesException e ) { if ( allowMissing ) { return ( TypeInformation < OUT > ) new MissingTypeInfo ( functionName != null ? functionName : function . toString ( ) , e ) ; } else { throw e ; } }
public class NumberPath { /** * Method to construct the less than or equals expression for double * @ param value the double * @ return Expression */ public Expression < Double > lte ( double value ) { } }
String valueString = "'" + value + "'" ; return new Expression < Double > ( this , Operation . lte , valueString ) ;
public class WComponentGroup { /** * Add a component to this group . * @ param component the component to add . */ public void addToGroup ( final T component ) { } }
ComponentGroupModel model = getOrCreateComponentModel ( ) ; model . components . add ( component ) ; MemoryUtil . checkSize ( model . components . size ( ) , this . getClass ( ) . getSimpleName ( ) ) ;
public class SourceLineAnnotation { /** * Factory method for creating a source line annotation describing the * source line number for the instruction being visited by given visitor . * @ param visitor * a DismantleBytecode visitor which is visiting the method * @ return the SourceLineAnnotation , or null if we do not have line number * information for the instruction */ public static SourceLineAnnotation fromVisitedInstruction ( BytecodeScanningDetector visitor ) { } }
return fromVisitedInstruction ( visitor . getClassContext ( ) , visitor , visitor . getPC ( ) ) ;
public class ListUniqueProblemsResult { /** * Information about the unique problems . * Allowed values include : * < ul > * < li > * PENDING : A pending condition . * < / li > * < li > * PASSED : A passing condition . * < / li > * < li > * WARNED : A warning condition . * < / li > * < li > * FAILED : A failed condition . * < / li > * < li > * SKIPPED : A skipped condition . * < / li > * < li > * ERRORED : An error condition . * < / li > * < li > * STOPPED : A stopped condition . * < / li > * < / ul > * @ return Information about the unique problems . < / p > * Allowed values include : * < ul > * < li > * PENDING : A pending condition . * < / li > * < li > * PASSED : A passing condition . * < / li > * < li > * WARNED : A warning condition . * < / li > * < li > * FAILED : A failed condition . * < / li > * < li > * SKIPPED : A skipped condition . * < / li > * < li > * ERRORED : An error condition . * < / li > * < li > * STOPPED : A stopped condition . * < / li > */ public java . util . Map < String , java . util . List < UniqueProblem > > getUniqueProblems ( ) { } }
return uniqueProblems ;
public class RunnersApi { /** * Get a Stream all runners ( specific and shared ) available in the project . Shared runners are listed if at least one * shared runner is defined and shared runners usage is enabled in the project ' s settings . * < pre > < code > GitLab Endpoint : GET / projects / : id / runners < / code > < / pre > * @ param projectIdOrPath the project in the form of an Integer ( ID ) , String ( path ) , or Project instance * @ return a Stream of all Runner available in the project * @ throws GitLabApiException if any exception occurs */ public Stream < Runner > getProjectRunnersStream ( Object projectIdOrPath ) throws GitLabApiException { } }
return ( getProjectRunners ( projectIdOrPath , getDefaultPerPage ( ) ) . stream ( ) ) ;
public class CPInstanceUtil { /** * Returns the first cp instance in the ordered set where groupId = & # 63 ; . * @ param groupId the group ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching cp instance , or < code > null < / code > if a matching cp instance could not be found */ public static CPInstance fetchByGroupId_First ( long groupId , OrderByComparator < CPInstance > orderByComparator ) { } }
return getPersistence ( ) . fetchByGroupId_First ( groupId , orderByComparator ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcSlabTypeEnum ( ) { } }
if ( ifcSlabTypeEnumEEnum == null ) { ifcSlabTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 898 ) ; } return ifcSlabTypeEnumEEnum ;
public class AuroraCLIController { /** * Utils method for unit tests */ @ VisibleForTesting boolean runProcess ( List < String > auroraCmd , StringBuilder stdout , StringBuilder stderr ) { } }
int status = ShellUtils . runProcess ( auroraCmd . toArray ( new String [ auroraCmd . size ( ) ] ) , stderr != null ? stderr : new StringBuilder ( ) ) ; if ( status != 0 ) { LOG . severe ( String . format ( "Failed to run process. Command=%s, STDOUT=%s, STDERR=%s" , auroraCmd , stdout , stderr ) ) ; } return status == 0 ;
public class SqlParserImpl { /** * Parse an ELSE node . */ protected void parseElse ( ) { } }
Node parent = peek ( ) ; if ( ! ( parent instanceof IfNode ) ) { return ; } IfNode ifNode = ( IfNode ) pop ( ) ; ElseNode elseNode = new ElseNode ( ) ; ifNode . setElseNode ( elseNode ) ; push ( elseNode ) ; tokenizer . skipWhitespace ( ) ;
public class Log4jConfigurer { /** * Returns the list of active loggers . * @ return logger names */ @ Nonnull public String [ ] getLoggerList ( ) { } }
try { Enumeration < Logger > currentLoggers = LogManager . getLoggerRepository ( ) . getCurrentLoggers ( ) ; List < String > loggerNames = new ArrayList < String > ( ) ; while ( currentLoggers . hasMoreElements ( ) ) { loggerNames . add ( currentLoggers . nextElement ( ) . getName ( ) ) ; } return loggerNames . toArray ( new String [ 0 ] ) ; } catch ( RuntimeException e ) { logger . warn ( "Exception getting logger names" , e ) ; throw e ; }
public class HttpFileServiceBuilder { /** * Returns a newly - created { @ link HttpFileService } based on the properties of this builder . */ public HttpFileService build ( ) { } }
return new HttpFileService ( new HttpFileServiceConfig ( vfs , clock , maxCacheEntries , maxCacheEntrySizeBytes , serveCompressedFiles , autoIndex , headers ( ) ) ) ;
public class CmsListMetadata { /** * Adds a list item independent action . < p > * @ param action the action */ public void addIndependentAction ( I_CmsListAction action ) { } }
action . setListId ( getListId ( ) ) ; m_indepActions . addIdentifiableObject ( action . getId ( ) , action ) ;
public class KdTreeMemory { /** * Request a leaf node be returned . All data parameters will be automatically assigned appropriate * values for a leaf . */ public KdTree . Node requestNode ( P point , int index ) { } }
KdTree . Node n = requestNode ( ) ; n . point = point ; n . index = index ; n . split = - 1 ; return n ;
public class UniversalTimeScale { /** * Convert a datetime from the universal time scale stored as a < code > BigDecimal < / code > to a * < code > long < / code > in the given time scale . * Since this calculation requires a divide , we must round . The straight forward * way to round by adding half of the divisor will push the sum out of range for values * within have the divisor of the limits of the precision of a < code > long < / code > . To get around this , we do * the rounding like this : * < p > < code > * ( universalTime - units + units / 2 ) / units + 1 * < / code > * ( i . e . we subtract units first to guarantee that we ' ll still be in range when we * add < code > units / 2 < / code > . We then need to add one to the quotent to make up for the extra subtraction . * This simplifies to : * < p > < code > * ( universalTime - units / 2 ) / units - 1 * < / code > * For negative values to round away from zero , we need to flip the signs : * < p > < code > * ( universalTime + units / 2 ) / units + 1 * < / code > * Since we also need to subtract the epochOffset , we fold the < code > + / - 1 < / code > * into the offset value . ( i . e . < code > epochOffsetP1 < / code > , < code > epochOffsetM1 < / code > . ) * @ param universalTime The datetime in the universal time scale * @ param timeScale The time scale to convert to * @ return The datetime converted to the given time scale */ public static long toLong ( long universalTime , int timeScale ) { } }
TimeScaleData data = toRangeCheck ( universalTime , timeScale ) ; if ( universalTime < 0 ) { if ( universalTime < data . minRound ) { return ( universalTime + data . unitsRound ) / data . units - data . epochOffsetP1 ; } return ( universalTime - data . unitsRound ) / data . units - data . epochOffset ; } if ( universalTime > data . maxRound ) { return ( universalTime - data . unitsRound ) / data . units - data . epochOffsetM1 ; } return ( universalTime + data . unitsRound ) / data . units - data . epochOffset ;
public class Placeholders { /** * Replace placeholders in a string . * @ param string string in which to replace placeholders * @ param uploadInfo upload information data * @ return string with replaced placeholders */ public static String replace ( String string , UploadInfo uploadInfo ) { } }
if ( string == null || string . isEmpty ( ) ) return "" ; String tmp ; tmp = string . replace ( ELAPSED_TIME , uploadInfo . getElapsedTimeString ( ) ) ; tmp = tmp . replace ( PROGRESS , uploadInfo . getProgressPercent ( ) + "%" ) ; tmp = tmp . replace ( UPLOAD_RATE , uploadInfo . getUploadRateString ( ) ) ; tmp = tmp . replace ( UPLOADED_FILES , Integer . toString ( uploadInfo . getSuccessfullyUploadedFiles ( ) . size ( ) ) ) ; tmp = tmp . replace ( TOTAL_FILES , Integer . toString ( uploadInfo . getTotalFiles ( ) ) ) ; return tmp ;
public class Logger { /** * Issue a log message with parameters with a level of INFO . * @ param message the message * @ param params the message parameters * @ deprecated To log a message with parameters , using { @ link # infov ( String , Object . . . ) } is recommended . */ @ Deprecated public void info ( Object message , Object [ ] params ) { } }
doLog ( Level . INFO , FQCN , message , params , null ) ;
public class Socks5DatagramSocket { /** * Sends the Datagram either through the proxy or directly depending * on current proxy settings and destination address . < BR > * < B > NOTE : < / B > DatagramPacket size should be at least 10 bytes less * than the systems limit . * See documentation on java . net . DatagramSocket * for full details on how to use this method . * @ param dp Datagram to send . * @ throws IOException If error happens with I / O . */ public void send ( DatagramPacket dp ) throws IOException { } }
// If the host should be accessed directly , send it as is . if ( ! server_mode && proxy . isDirect ( dp . getAddress ( ) ) ) { super . send ( dp ) ; // debug ( " Sending directly : " ) ; return ; } byte [ ] head = formHeader ( dp . getAddress ( ) , dp . getPort ( ) ) ; byte [ ] buf = new byte [ head . length + dp . getLength ( ) ] ; byte [ ] data = dp . getData ( ) ; // Merge head and data System . arraycopy ( head , 0 , buf , 0 , head . length ) ; // System . arraycopy ( data , dp . getOffset ( ) , buf , head . length , dp . getLength ( ) ) ; System . arraycopy ( data , 0 , buf , head . length , dp . getLength ( ) ) ; if ( encapsulation != null ) buf = encapsulation . udpEncapsulate ( buf , true ) ; super . send ( new DatagramPacket ( buf , buf . length , relayIP , relayPort ) ) ;
public class LanguageMatcher { /** * Return all matches and their distances , if they exceed the given threshold . * If no match exceeds the threshold this returns an empty list . */ public List < Match > matches ( String desiredRaw , int threshold ) { } }
return matchesImpl ( parse ( desiredRaw ) , threshold ) ;
public class ReadOnlyHttp2Headers { /** * Create a new read only representation of headers used by clients . * @ param validateHeaders { @ code true } will run validation on each header name / value pair to ensure protocol * compliance . * @ param method The value for { @ link PseudoHeaderName # METHOD } . * @ param path The value for { @ link PseudoHeaderName # PATH } . * @ param scheme The value for { @ link PseudoHeaderName # SCHEME } . * @ param authority The value for { @ link PseudoHeaderName # AUTHORITY } . * @ param otherHeaders A an array of key : value pairs . Must not contain any * < a href = " https : / / tools . ietf . org / html / rfc7540 # section - 8.1.2.1 " > pseudo headers < / a > * or { @ code null } names / values . * A copy will < strong > NOT < / strong > be made of this array . If the contents of this array * may be modified externally you are responsible for passing in a copy . * @ return a new read only representation of headers used by clients . */ public static ReadOnlyHttp2Headers clientHeaders ( boolean validateHeaders , AsciiString method , AsciiString path , AsciiString scheme , AsciiString authority , AsciiString ... otherHeaders ) { } }
return new ReadOnlyHttp2Headers ( validateHeaders , new AsciiString [ ] { PseudoHeaderName . METHOD . value ( ) , method , PseudoHeaderName . PATH . value ( ) , path , PseudoHeaderName . SCHEME . value ( ) , scheme , PseudoHeaderName . AUTHORITY . value ( ) , authority } , otherHeaders ) ;
public class BaseDataSource { /** * Sets a fixed value . * @ param dataSetKey * The { @ link DataSet } key . * @ param entryKey * The entry key . * @ param value * The fixed value . */ @ Override public void setFixedValue ( final String dataSetKey , final String entryKey , final String value ) { } }
Map < String , String > map = fixedValues . get ( dataSetKey ) ; if ( map == null ) { map = Maps . newHashMap ( ) ; fixedValues . put ( dataSetKey , map ) ; } map . put ( entryKey , value ) ; DataSet dataSet = getCurrentDataSet ( dataSetKey ) ; if ( dataSet != null ) { dataSet . setFixedValue ( entryKey , value ) ; }
public class SasFileParser { /** * The function to read a subheader signature at the offset known from its ( { @ link SubheaderPointer } ) . * @ param subheaderPointerOffset the offset at which the subheader is located . * @ return - the subheader signature to search for in the { @ link SasFileParser # SUBHEADER _ SIGNATURE _ TO _ INDEX } * mapping later . * @ throws IOException if reading from the { @ link SasFileParser # sasFileStream } stream is impossible . */ private long readSubheaderSignature ( Long subheaderPointerOffset ) throws IOException { } }
int intOrLongLength = sasFileProperties . isU64 ( ) ? BYTES_IN_LONG : BYTES_IN_INT ; Long [ ] subheaderOffsetMass = { subheaderPointerOffset } ; Integer [ ] subheaderLengthMass = { intOrLongLength } ; List < byte [ ] > subheaderSignatureMass = getBytesFromFile ( subheaderOffsetMass , subheaderLengthMass ) ; return bytesToLong ( subheaderSignatureMass . get ( 0 ) ) ;
public class DoubleArrayTrie { /** * 拓展数组 * @ param newSize * @ return */ private int resize ( int newSize ) { } }
int [ ] base2 = new int [ newSize ] ; int [ ] check2 = new int [ newSize ] ; if ( allocSize > 0 ) { System . arraycopy ( base , 0 , base2 , 0 , allocSize ) ; System . arraycopy ( check , 0 , check2 , 0 , allocSize ) ; } base = base2 ; check = check2 ; return allocSize = newSize ;
public class DBTransaction { /** * Add or set a column with the given long value . The column value is converted to * binary form using Long . toString ( colValue ) , which is then converted to a String using * UTF - 8. * @ param storeName Name of store that owns row . * @ param rowKey Key of row that owns column . * @ param columnName Name of column . * @ param columnValue Column value as a long . */ public void addColumn ( String storeName , String rowKey , String columnName , long columnValue ) { } }
addColumn ( storeName , rowKey , columnName , Long . toString ( columnValue ) ) ;
public class Transform1D { /** * Set the path used by this transformation . * @ param path is the new path * @ param direction is the direction to follow on the path if the path contains only one segment . */ public void setPath ( List < ? extends S > path , Direction1D direction ) { } }
this . path = path == null || path . isEmpty ( ) ? null : new ArrayList < > ( path ) ; this . firstSegmentDirection = detectFirstSegmentDirection ( direction ) ;
public class AbstractIoSession { /** * { @ inheritDoc } */ @ Override public final long getLastIdleTime ( IdleStatus status ) { } }
if ( status == IdleStatus . BOTH_IDLE ) { return lastIdleTimeForBoth ; } if ( status == IdleStatus . READER_IDLE ) { return lastIdleTimeForRead ; } if ( status == IdleStatus . WRITER_IDLE ) { return lastIdleTimeForWrite ; } throw new IllegalArgumentException ( "Unknown idle status: " + status ) ;
public class TransactionLogger { /** * Get ComponentsMultiThread of current instance * @ return componentsMultiThread */ public static ComponentsMultiThread getComponentsMultiThread ( ) { } }
TransactionLogger instance = getInstance ( ) ; if ( instance == null ) { return null ; } return instance . componentsMultiThread ;
public class XMIDoclet { /** * Method that serializes PackageDoc objects by serializing it ' s classes * into & lt ; UML : Class > elements . * @ param pkgs an array of PackageDoc objects */ private void generateUMLClass ( PackageDoc [ ] pkgs ) throws IOException { } }
for ( int i = 0 ; i < pkgs . length ; i ++ ) { // generate statistics generateUMLClass ( pkgs [ i ] . ordinaryClasses ( ) , pkgs [ i ] . name ( ) ) ; }
public class SecurityExpressionUtils { /** * Checks the current user permission . * @ param permission the permission to check * @ param simpleScope the simple scope to check this permission on . * @ return true if user has the given permission for the given simple scope . */ public static boolean hasPermissionOn ( String permission , String simpleScope ) { } }
return securitySupport . isPermitted ( permission , new SimpleScope ( simpleScope ) ) ;
public class Unifier { /** * Make sure that we really matched all the required features of the unification . * @ param uFeatures Features to be checked * @ return True if the token sequence has been found . * @ since 2.5 */ public final boolean getFinalUnificationValue ( Map < String , List < String > > uFeatures ) { } }
int tokUnified = 0 ; for ( int j = 0 ; j < tokSequence . size ( ) ; j ++ ) { boolean unifiedTokensFound = false ; // assume that nothing has been found for ( int i = 0 ; i < tokSequenceEquivalences . get ( j ) . size ( ) ; i ++ ) { int featUnified = 0 ; if ( tokSequenceEquivalences . get ( j ) . get ( i ) . containsKey ( UNIFY_IGNORE ) ) { if ( i == 0 ) { tokUnified ++ ; } unifiedTokensFound = true ; continue ; } else { for ( Map . Entry < String , List < String > > feat : uFeatures . entrySet ( ) ) { if ( tokSequenceEquivalences . get ( j ) . get ( i ) . containsKey ( feat . getKey ( ) ) && tokSequenceEquivalences . get ( j ) . get ( i ) . get ( feat . getKey ( ) ) . isEmpty ( ) ) { featUnified = 0 ; } else { featUnified ++ ; } if ( featUnified == unificationFeats . entrySet ( ) . size ( ) && tokUnified <= j ) { tokUnified ++ ; unifiedTokensFound = true ; break ; } } } } if ( ! unifiedTokensFound ) { return false ; } } if ( tokUnified == tokSequence . size ( ) ) { return true ; } return false ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcTransformerType ( ) { } }
if ( ifcTransformerTypeEClass == null ) { ifcTransformerTypeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 729 ) ; } return ifcTransformerTypeEClass ;
public class WatcherManager { /** * Clean all watchers of the Service . * @ param name * the Service Name . */ public void cleanWatchers ( String name ) { } }
synchronized ( watchers ) { if ( watchers . containsKey ( name ) ) { watchers . get ( name ) . clear ( ) ; } }
public class LockingStrategy { /** * Creates the appropriate { @ link LockingStrategy } for the given { @ link LockingMode } . */ @ CheckForNull public static LockingStrategy create ( @ Nullable final LockingMode lockingMode , @ Nullable final Pattern uriPattern , @ Nullable final StorageClient storage , @ Nonnull final MemcachedSessionService manager , @ Nonnull final MemcachedNodesManager memcachedNodesManager , @ Nonnull final LRUCache < String , Boolean > missingSessionsCache , final boolean storeSecondaryBackup , @ Nonnull final Statistics stats , @ Nonnull final CurrentRequest currentRequest ) { } }
if ( lockingMode == null || storage == null ) { return null ; } switch ( lockingMode ) { case ALL : return new LockingStrategyAll ( manager , memcachedNodesManager , storage , missingSessionsCache , storeSecondaryBackup , stats , currentRequest ) ; case AUTO : return new LockingStrategyAuto ( manager , memcachedNodesManager , storage , missingSessionsCache , storeSecondaryBackup , stats , currentRequest ) ; case URI_PATTERN : return new LockingStrategyUriPattern ( manager , memcachedNodesManager , uriPattern , storage , missingSessionsCache , storeSecondaryBackup , stats , currentRequest ) ; case NONE : return new LockingStrategyNone ( manager , memcachedNodesManager , storage , missingSessionsCache , storeSecondaryBackup , stats , currentRequest ) ; default : throw new IllegalArgumentException ( "LockingMode not yet supported: " + lockingMode ) ; }
public class Ix { /** * Collects the elements of this sequence into a Map where the key is * determined from each element via the keySelector function and * the value is derived from the same element via the valueSelector function ; duplicates are * overwritten . * @ param < K > the key type * @ param < V > the value type * @ param keySelector the function that receives the current element and returns * a key for it to be used as the Map key . * @ param valueSelector the function that receives the current element and returns * a value for it to be used as the Map value * @ return the new Map instance * @ throws NullPointerException if keySelector or valueSelector is null * @ since 1.0 */ public final < K , V > Map < K , V > toMap ( IxFunction < ? super T , ? extends K > keySelector , IxFunction < ? super T , ? extends V > valueSelector ) { } }
return this . < K , V > collectToMap ( keySelector , valueSelector ) . first ( ) ;
public class ClientConnection { /** * Handles a connect response . */ private void handleConnectResponse ( ConnectResponse response , Throwable error , CompletableFuture < Connection > future ) { } }
if ( open ) { if ( error == null ) { LOGGER . trace ( "{} - Received {}" , id , response ) ; // If the connection was successfully created , immediately send a keep - alive request // to the server to ensure we maintain our session and get an updated list of server addresses . if ( response . status ( ) == Response . Status . OK ) { selector . reset ( response . leader ( ) , response . members ( ) ) ; future . complete ( connection ) ; } else { connect ( future ) ; } } else { LOGGER . debug ( "{} - Failed to connect! Reason: {}" , id , error ) ; connect ( future ) ; } }
public class StringUtil { /** * fast lower case conversion . Only works on ascii ( not unicode ) * @ param s the string to convert * @ return a lower case version of s */ public static String asciiToLowerCase ( String s ) { } }
char [ ] c = null ; int i = s . length ( ) ; // look for first conversion while ( i -- > 0 ) { char c1 = s . charAt ( i ) ; if ( c1 <= 127 ) { char c2 = lowercases [ c1 ] ; if ( c1 != c2 ) { c = s . toCharArray ( ) ; c [ i ] = c2 ; break ; } } } while ( i -- > 0 ) { if ( c [ i ] <= 127 ) c [ i ] = lowercases [ c [ i ] ] ; } return c == null ? s : new String ( c ) ;
public class ManagedUserAuthenticationService { /** * Authenticates the username / password combo against the directory service * and returns a Principal if authentication is successful . Otherwise , * returns an AuthenticationException . * @ return a Principal if authentication was successful * @ throws AlpineAuthenticationException when authentication is unsuccessful * @ since 1.0.0 */ public Principal authenticate ( ) throws AlpineAuthenticationException { } }
LOGGER . debug ( "Attempting to authenticate user: " + username ) ; try ( AlpineQueryManager qm = new AlpineQueryManager ( ) ) { final ManagedUser user = qm . getManagedUser ( username ) ; if ( user != null ) { if ( PasswordService . matches ( password . toCharArray ( ) , user ) ) { if ( user . isSuspended ( ) ) { throw new AlpineAuthenticationException ( AlpineAuthenticationException . CauseType . SUSPENDED , user ) ; } if ( user . isForcePasswordChange ( ) ) { throw new AlpineAuthenticationException ( AlpineAuthenticationException . CauseType . FORCE_PASSWORD_CHANGE , user ) ; } return user ; } } } throw new AlpineAuthenticationException ( AlpineAuthenticationException . CauseType . INVALID_CREDENTIALS ) ;
public class DefaultIteratorResultSetMapper { /** * Map a ResultSet to an object type * Type of object to interate over is defined in the SQL annotation for the method . * @ param context A ControlBeanContext instance , see Beehive controls javadoc for additional information * @ param m Method assoicated with this call . * @ param resultSet Result set to map . * @ param cal A Calendar instance for time / date value resolution . * @ return The Iterator object instance resulting from the ResultSet */ public Iterator mapToResultType ( ControlBeanContext context , Method m , ResultSet resultSet , Calendar cal ) { } }
return new ResultSetIterator ( context , m , resultSet , cal ) ;
public class CmsCodeMirror { /** * Called on blur . < p > * @ param value the content value */ void onChange ( String value ) { } }
boolean changed = ! m_codeValue . equals ( value ) ; m_codeValue = value ; if ( changed ) { fireEvent ( new ValueChangeEvent ( this ) ) ; }
public class SessionBeanO { /** * Checks if beanO can be removed . Throws RemoveException if * cannot be removed . */ protected void canBeRemoved ( ) throws RemoveException { } }
ContainerTx tx = container . getCurrentContainerTx ( ) ; // d171654 // If there is no current transaction then we are removing a // TX _ BEAN _ MANAGED session bean outside of a transaction which // is correct . if ( tx == null ) { return ; } // Stateful beans cannot be removed in a global transaction , // unless this is an EJB 3.0 business method designated as a // removemethod d451675 if ( tx . isTransactionGlobal ( ) && tx . ivRemoveBeanO != this ) { throw new RemoveException ( "Cannot remove session bean " + "within a transaction." ) ; }
public class CallbackFilteringExample { /** * Entry point to the Callback Filtering Example . * @ param args unused */ public static void main ( String [ ] args ) { } }
// manager with two stopwatches is created Manager manager = new EnabledManager ( ) ; Stopwatch sw1 = manager . getStopwatch ( "org.javasimon.examples.stopwatch1" ) ; Stopwatch sw2 = manager . getStopwatch ( "other.stopwatch2" ) ; // simple callback printing actions to the stdout is created and installed Callback stdoutCallback = new CallbackSkeleton ( ) { @ Override public void onStopwatchStart ( Split split ) { System . out . println ( "Starting " + split . getStopwatch ( ) . getName ( ) ) ; } @ Override public void onStopwatchStop ( Split split , StopwatchSample sample ) { System . out . println ( "Stopped " + split . getStopwatch ( ) . getName ( ) + " (" + SimonUtils . presentNanoTime ( split . runningFor ( ) ) + ")" ) ; } } ; manager . callback ( ) . addCallback ( stdoutCallback ) ; // prints start / stop for both stopwatches sw1 . start ( ) . stop ( ) ; sw2 . start ( ) . stop ( ) ; System . out . println ( ) ; // we need to remove old callback manager . callback ( ) . removeCallback ( stdoutCallback ) ; // alternatively you can call this if you want to remove all callbacks SimonManager . callback ( ) . removeAllCallbacks ( ) ; // filter callback is created CompositeFilterCallback filter = new CompositeFilterCallback ( ) ; // rule to filter out all Simons matching pattern " other . * " is added filter . addRule ( FilterRule . Type . MUST_NOT , null , "other.*" ) ; // original callback is added after this callback filter . addCallback ( stdoutCallback ) ; // filter callback is installed to the manager ( with printing callback behind ) manager . callback ( ) . addCallback ( filter ) ; // start / stop is printed only for sw1 because sw2 matches other . * pattern that is excluded ( MUST _ NOT ) sw1 . start ( ) . stop ( ) ; sw2 . start ( ) . stop ( ) ;
public class ReadAheadInputStream { /** * Internal skip function which should be called only from skip ( ) api . The assumption is that * the stateChangeLock is already acquired in the caller before calling this function . */ private long skipInternal ( long n ) throws IOException { } }
assert ( stateChangeLock . isLocked ( ) ) ; waitForAsyncReadComplete ( ) ; if ( isEndOfStream ( ) ) { return 0 ; } if ( available ( ) >= n ) { // we can skip from the internal buffers int toSkip = ( int ) n ; // We need to skip from both active buffer and read ahead buffer toSkip -= activeBuffer . remaining ( ) ; assert ( toSkip > 0 ) ; // skipping from activeBuffer already handled . activeBuffer . position ( 0 ) ; activeBuffer . flip ( ) ; readAheadBuffer . position ( toSkip + readAheadBuffer . position ( ) ) ; swapBuffers ( ) ; // Trigger async read to emptied read ahead buffer . readAsync ( ) ; return n ; } else { int skippedBytes = available ( ) ; long toSkip = n - skippedBytes ; activeBuffer . position ( 0 ) ; activeBuffer . flip ( ) ; readAheadBuffer . position ( 0 ) ; readAheadBuffer . flip ( ) ; long skippedFromInputStream = underlyingInputStream . skip ( toSkip ) ; readAsync ( ) ; return skippedBytes + skippedFromInputStream ; }
public class DeleteApplicationReferenceDataSourceRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteApplicationReferenceDataSourceRequest deleteApplicationReferenceDataSourceRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteApplicationReferenceDataSourceRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteApplicationReferenceDataSourceRequest . getApplicationName ( ) , APPLICATIONNAME_BINDING ) ; protocolMarshaller . marshall ( deleteApplicationReferenceDataSourceRequest . getCurrentApplicationVersionId ( ) , CURRENTAPPLICATIONVERSIONID_BINDING ) ; protocolMarshaller . marshall ( deleteApplicationReferenceDataSourceRequest . getReferenceId ( ) , REFERENCEID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class VcfReader { /** * Parse the specified readable . * @ param readable readable to parse , must not be null * @ param listener low - level event based parser callback , must not be null * @ throws IOException if an I / O error occurs */ public static void parse ( final Readable readable , final VcfParseListener listener ) throws IOException { } }
VcfParser . parse ( readable , listener ) ;
public class FragmentExpression { /** * Create the executed fragment expression . * @ param context the context * @ param expression the expresson * @ param expContext the expression context * @ return the executed fragment expression * @ deprecated Deprecated in 3.0.9 . Use the version without " expContext " itself , as all FragmentExpressions should * be executed in RESTRICTED mode ( no request parameter use allowed ) . */ @ Deprecated public static ExecutedFragmentExpression createExecutedFragmentExpression ( final IExpressionContext context , final FragmentExpression expression , final StandardExpressionExecutionContext expContext ) { } }
return doCreateExecutedFragmentExpression ( context , expression , expContext ) ;
public class StringUtils { /** * Creates a percentage ASCII bar . * @ param percentage value in [ 0 , 100] * @ return " [ # # # . . . . . ] nn % " */ public static String percentageBar ( double percentage ) { } }
final char dot = '.' ; final char mark = '#' ; final int slots = 40 ; StringBuilder bar = new StringBuilder ( replicate ( String . valueOf ( dot ) , slots ) ) ; int numSlots = ( int ) ( slots * percentage / 100.0 ) ; for ( int k = 0 ; k < numSlots ; ++ k ) bar . setCharAt ( k , mark ) ; return String . format ( "[%s] %3.0f%%" , bar , percentage ) ;
public class TorqueModelDef { /** * Extracts the list of columns from the given field list . * @ param fields The fields * @ return The corresponding columns */ private List getColumns ( List fields ) { } }
ArrayList columns = new ArrayList ( ) ; for ( Iterator it = fields . iterator ( ) ; it . hasNext ( ) ; ) { FieldDescriptorDef fieldDef = ( FieldDescriptorDef ) it . next ( ) ; columns . add ( fieldDef . getProperty ( PropertyHelper . OJB_PROPERTY_COLUMN ) ) ; } return columns ;
public class ManagedTransactionFactory { public javax . transaction . Transaction getJTATransaction ( ) { } }
if ( tm == null ) { try { tm = TransactionManagerFactoryFactory . instance ( ) . getTransactionManager ( ) ; } catch ( TransactionManagerFactoryException e ) { throw new TransactionFactoryException ( "Can't instantiate TransactionManagerFactory" , e ) ; } } try { return tm . getTransaction ( ) ; } catch ( SystemException e ) { throw new TransactionFactoryException ( "Error acquiring JTA Transaction" , e ) ; }
public class Duration { /** * Parses a duration string . * @ param value the duration string ( e . g . " P30DT10H " ) * @ return the parsed duration * @ throws IllegalArgumentException if the duration string is invalid */ public static Duration parse ( String value ) { } }
/* * Implementation note : Regular expressions are not used to improve * performance . */ if ( value . length ( ) == 0 ) { throw parseError ( value ) ; } int index = 0 ; char first = value . charAt ( index ) ; boolean prior = ( first == '-' ) ; if ( first == '-' || first == '+' ) { index ++ ; } if ( value . charAt ( index ) != 'P' ) { throw parseError ( value ) ; } Builder builder = new Builder ( ) ; builder . prior ( prior ) ; StringBuilder buffer = new StringBuilder ( ) ; for ( int i = index + 1 ; i < value . length ( ) ; i ++ ) { char c = value . charAt ( i ) ; if ( c == 'T' ) { /* * A " T " character is supposed to immediately precede the time * component value ( s ) . It is required by the syntax , but not * really necessary . Ignore it . */ continue ; } if ( c >= '0' && c <= '9' ) { buffer . append ( c ) ; continue ; } if ( buffer . length ( ) == 0 ) { throw parseError ( value ) ; } Integer num = Integer . valueOf ( buffer . toString ( ) ) ; buffer . setLength ( 0 ) ; switch ( c ) { case 'W' : builder . weeks ( num ) ; break ; case 'D' : builder . days ( num ) ; break ; case 'H' : builder . hours ( num ) ; break ; case 'M' : builder . minutes ( num ) ; break ; case 'S' : builder . seconds ( num ) ; break ; default : throw parseError ( value ) ; } } return builder . build ( ) ;
public class dbdbprofile { /** * Use this API to update dbdbprofile . */ public static base_response update ( nitro_service client , dbdbprofile resource ) throws Exception { } }
dbdbprofile updateresource = new dbdbprofile ( ) ; updateresource . name = resource . name ; updateresource . interpretquery = resource . interpretquery ; updateresource . stickiness = resource . stickiness ; updateresource . kcdaccount = resource . kcdaccount ; updateresource . conmultiplex = resource . conmultiplex ; return updateresource . update_resource ( client ) ;
public class ServletRESTRequestWithParams { /** * ( non - Javadoc ) * @ see com . ibm . wsspi . rest . handler . RESTRequest # getRemoteAddr ( ) */ @ Override public String getRemoteAddr ( ) { } }
ServletRESTRequestImpl ret = castRequest ( ) ; if ( ret != null ) return ret . getRemoteAddr ( ) ; return null ;
public class StatefulBeanO { /** * d112866 */ public final synchronized void uninstall ( ) { } }
// To transition a Stateful bean to the " does not exist " state , // either ejbRemove or ejbPassivate must be called . // ejbPassivate has been chosen , as it will generally perform // better ( it normally does less ) assuming that the bean is not // actually passivated to a file . Setting the " uninstalling " // flag to true avoids serializing to a file . // Calling ejbRemove is likely to cause a problem , as it is // typical for ejbRemove to call remove ( ) on other beans . . . // which may already be disabled / uninstalled . // If the bean is not passivation capable , then just call // ejbRemove / PreDestroy rather than passivate . try { if ( home . beanMetaData . isPassivationCapable ( ) ) { uninstalling = true ; passivate ( ) ; } else { completeRemoveMethod ( null ) ; } } catch ( RemoteException rex ) { FFDCFilter . processException ( rex , CLASS_NAME + ".uninstall" , "654" , this ) ; Tr . warning ( tc , "IGNORING_UNEXPECTED_EXCEPTION_CNTR0033E" , rex ) ; }
public class IOUtils { /** * Read next string from input stream . * @ param is The reader to read characters from . * @ param term Terminator character . * @ return The string up until , but not including the terminator . * @ throws IOException when unable to read from stream . */ public static String readString ( Reader is , char term ) throws IOException { } }
CharArrayWriter baos = new CharArrayWriter ( ) ; int ch_int ; while ( ( ch_int = is . read ( ) ) >= 0 ) { final char ch = ( char ) ch_int ; if ( ch == term ) { break ; } baos . write ( ch ) ; } return baos . toString ( ) ;