signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class ConsoleReaderWrapper { /** * Print charSequence to console
* @ param charSequence to print */
public void print ( CharSequence charSequence ) { } }
|
try { consoleReader . println ( charSequence ) ; consoleReader . flush ( ) ; } catch ( IOException e ) { throw new IllegalStateException ( "Can't write to console" , e ) ; }
|
public class CommerceNotificationAttachmentPersistenceImpl { /** * Returns all the commerce notification attachments .
* @ return the commerce notification attachments */
@ Override public List < CommerceNotificationAttachment > findAll ( ) { } }
|
return findAll ( QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ;
|
public class ElementMatchers { /** * Matches a method that declares the given generic exception type as a ( erased ) exception type .
* @ param exceptionType The exception type that is matched .
* @ param < T > The type of the matched object .
* @ return A matcher that matches any method that exactly matches the provided exception . */
public static < T extends MethodDescription > ElementMatcher . Junction < T > declaresException ( TypeDescription exceptionType ) { } }
|
return exceptionType . isAssignableTo ( Throwable . class ) ? ElementMatchers . < T > declaresGenericException ( new CollectionItemMatcher < TypeDescription . Generic > ( erasure ( exceptionType ) ) ) : new BooleanMatcher < T > ( false ) ;
|
public class ScriptableObject { /** * Attach the specified object to this object , and delegate all indexed property lookups to it . In other words ,
* if the object has 3 elements , then an attempt to look up or modify " [ 0 ] " , " [ 1 ] " , or " [ 2 ] " will be delegated
* to this object . Additional indexed properties outside the range specified , and additional non - indexed
* properties , may still be added . The object specified must implement the ExternalArrayData interface .
* @ param array the List to use for delegated property access . Set this to null to revert back to regular
* property access .
* @ since 1.7.6 */
public void setExternalArrayData ( ExternalArrayData array ) { } }
|
externalData = array ; if ( array == null ) { delete ( "length" ) ; } else { // Define " length " to return whatever length the List gives us .
defineProperty ( "length" , null , GET_ARRAY_LENGTH , null , READONLY | DONTENUM ) ; }
|
public class CPDefinitionOptionValueRelUtil { /** * Returns the first cp definition option value rel in the ordered set where groupId = & # 63 ; .
* @ param groupId the group ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching cp definition option value rel
* @ throws NoSuchCPDefinitionOptionValueRelException if a matching cp definition option value rel could not be found */
public static CPDefinitionOptionValueRel findByGroupId_First ( long groupId , OrderByComparator < CPDefinitionOptionValueRel > orderByComparator ) throws com . liferay . commerce . product . exception . NoSuchCPDefinitionOptionValueRelException { } }
|
return getPersistence ( ) . findByGroupId_First ( groupId , orderByComparator ) ;
|
public class MetaDataProcessorResolver { /** * Looks for the processor element ( by querying it with the { @ code processorElementXPathQuery } ) in the item ' s
* descriptor . If the element is found , the element value is mapped to a processor and that processor is
* returned .
* @ throws XmlException if the element value doesn ' t refer to an existing processor */
@ Override public ItemProcessor getProcessor ( Item item ) throws XmlException { } }
|
String processorElementValue = item . queryDescriptorValue ( processorElementXPathQuery ) ; if ( StringUtils . isNotEmpty ( processorElementValue ) ) { ItemProcessor processor = elementValueToProcessorMappings . get ( processorElementValue ) ; if ( processor != null ) { return processor ; } else { throw new XmlException ( "Element value \"" + processorElementValue + "\" doesn't refer to a " + "registered processor" ) ; } } else { return null ; }
|
public class SerializedFormWriterImpl { /** * { @ inheritDoc } */
public void addPackageSerializedTree ( Content serializedSummariesTree , Content packageSerializedTree ) { } }
|
serializedSummariesTree . addContent ( ( configuration . allowTag ( HtmlTag . SECTION ) ) ? HtmlTree . LI ( HtmlStyle . blockList , packageSerializedTree ) : packageSerializedTree ) ;
|
public class ConcurrentLinkedList { /** * ( non - Javadoc )
* @ see ws . sib . objectManager . List # size ( ws . sib . objectManager . Transaction ) */
public long size ( Transaction transaction ) throws ObjectManagerException { } }
|
if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "size" , "Transaction=" + transaction ) ; long totalSize = 0 ; // Size to return .
for ( int i = 0 ; i < subLists . length ; i ++ ) { totalSize = totalSize + subLists [ i ] . size ( transaction ) ; } // for subLists .
if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "size" , "return=" + totalSize ) ; return totalSize ;
|
public class ServletContextFacade { /** * / * ( non - Javadoc )
* @ see com . ibm . wsspi . webcontainer . servlet . IServletContext # addMappingTarget ( java . lang . String , com . ibm . ws . webcontainer . core . RequestProcessor ) */
public void addMappingTarget ( String mapping , RequestProcessor target ) throws Exception { } }
|
context . addMappingTarget ( mapping , target ) ;
|
public class AABBd { /** * Set the minimum corner coordinates .
* @ param min
* the minimum coordinates
* @ return this */
public AABBd setMin ( Vector3dc min ) { } }
|
return this . setMin ( min . x ( ) , min . y ( ) , min . z ( ) ) ;
|
public class AuthRegistrationsCredentialListMapping { /** * Create a AuthRegistrationsCredentialListMappingCreator to execute create .
* @ param pathAccountSid The SID of the Account that will create the resource
* @ param pathDomainSid The SID of the SIP domain that will contain the new
* resource
* @ param credentialListSid The SID of the CredentialList resource to map to
* the SIP domain
* @ return AuthRegistrationsCredentialListMappingCreator capable of executing
* the create */
public static AuthRegistrationsCredentialListMappingCreator creator ( final String pathAccountSid , final String pathDomainSid , final String credentialListSid ) { } }
|
return new AuthRegistrationsCredentialListMappingCreator ( pathAccountSid , pathDomainSid , credentialListSid ) ;
|
public class current_timezone { /** * < pre >
* Use this operation to modify current time zone .
* < / pre > */
public static current_timezone update ( nitro_service client , current_timezone resource ) throws Exception { } }
|
resource . validate ( "modify" ) ; return ( ( current_timezone [ ] ) resource . update_resource ( client ) ) [ 0 ] ;
|
public class LockCache { /** * Attempts to take a lock on the given key .
* @ param key the key to lock
* @ param mode lockMode to acquire
* @ return either empty or a lock resource which must be closed to unlock the key */
public Optional < LockResource > tryGet ( K key , LockMode mode ) { } }
|
ValNode valNode = getValNode ( key ) ; ReentrantReadWriteLock lock = valNode . mValue ; Lock innerLock ; switch ( mode ) { case READ : innerLock = lock . readLock ( ) ; break ; case WRITE : innerLock = lock . writeLock ( ) ; break ; default : throw new IllegalStateException ( "Unknown lock mode: " + mode ) ; } if ( ! innerLock . tryLock ( ) ) { return Optional . empty ( ) ; } return Optional . of ( new RefCountLockResource ( innerLock , false , valNode . mRefCount ) ) ;
|
public class JettyBoot { protected Configuration [ ] prepareConfigurations ( ) { } }
|
final List < Configuration > configList = new ArrayList < Configuration > ( ) ; setupConfigList ( configList ) ; return configList . toArray ( new Configuration [ configList . size ( ) ] ) ;
|
public class srecParser { /** * / home / victor / srec / core / src / main / antlr / srec . g : 54:1 : expression : ( method _ call _ or _ varref | assignment | method _ def ) ; */
public final srecParser . expression_return expression ( ) throws RecognitionException { } }
|
srecParser . expression_return retval = new srecParser . expression_return ( ) ; retval . start = input . LT ( 1 ) ; CommonTree root_0 = null ; srecParser . method_call_or_varref_return method_call_or_varref9 = null ; srecParser . assignment_return assignment10 = null ; srecParser . method_def_return method_def11 = null ; try { // / home / victor / srec / core / src / main / antlr / srec . g : 55:2 : ( method _ call _ or _ varref | assignment | method _ def )
int alt5 = 3 ; int LA5_0 = input . LA ( 1 ) ; if ( ( LA5_0 == ID ) ) { int LA5_1 = input . LA ( 2 ) ; if ( ( LA5_1 == EOF || ( LA5_1 >= STRING && LA5_1 <= NULL ) || LA5_1 == 29 ) ) { alt5 = 1 ; } else if ( ( LA5_1 == 32 ) ) { alt5 = 2 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return retval ; } NoViableAltException nvae = new NoViableAltException ( "" , 5 , 1 , input ) ; throw nvae ; } } else if ( ( LA5_0 == 33 ) ) { alt5 = 3 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return retval ; } NoViableAltException nvae = new NoViableAltException ( "" , 5 , 0 , input ) ; throw nvae ; } switch ( alt5 ) { case 1 : // / home / victor / srec / core / src / main / antlr / srec . g : 55:4 : method _ call _ or _ varref
{ root_0 = ( CommonTree ) adaptor . nil ( ) ; pushFollow ( FOLLOW_method_call_or_varref_in_expression202 ) ; method_call_or_varref9 = method_call_or_varref ( ) ; state . _fsp -- ; if ( state . failed ) return retval ; if ( state . backtracking == 0 ) adaptor . addChild ( root_0 , method_call_or_varref9 . getTree ( ) ) ; } break ; case 2 : // / home / victor / srec / core / src / main / antlr / srec . g : 55:28 : assignment
{ root_0 = ( CommonTree ) adaptor . nil ( ) ; pushFollow ( FOLLOW_assignment_in_expression206 ) ; assignment10 = assignment ( ) ; state . _fsp -- ; if ( state . failed ) return retval ; if ( state . backtracking == 0 ) adaptor . addChild ( root_0 , assignment10 . getTree ( ) ) ; } break ; case 3 : // / home / victor / srec / core / src / main / antlr / srec . g : 55:41 : method _ def
{ root_0 = ( CommonTree ) adaptor . nil ( ) ; pushFollow ( FOLLOW_method_def_in_expression210 ) ; method_def11 = method_def ( ) ; state . _fsp -- ; if ( state . failed ) return retval ; if ( state . backtracking == 0 ) adaptor . addChild ( root_0 , method_def11 . getTree ( ) ) ; } break ; } retval . stop = input . LT ( - 1 ) ; if ( state . backtracking == 0 ) { retval . tree = ( CommonTree ) adaptor . rulePostProcessing ( root_0 ) ; adaptor . setTokenBoundaries ( retval . tree , retval . start , retval . stop ) ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; retval . tree = ( CommonTree ) adaptor . errorNode ( input , retval . start , input . LT ( - 1 ) , re ) ; } finally { } return retval ;
|
public class AlertResources { /** * Deletes all notifications for a given alert .
* @ param req The HttpServlet request object . Cannot be null .
* @ param alertId The alert id . Cannot be null and must be a positive non - zero number .
* @ return Updated alert object .
* @ throws WebApplicationException The exception with 404 status will be thrown if an alert does not exist . */
@ DELETE @ Produces ( MediaType . APPLICATION_JSON ) @ Path ( "/{alertId}/notifications" ) @ Description ( "Deletes all notifications for the given alert ID. Associated triggers are not deleted from the alert." ) public Response deleteAllNotificationsByAlertId ( @ Context HttpServletRequest req , @ PathParam ( "alertId" ) BigInteger alertId ) { } }
|
if ( alertId == null || alertId . compareTo ( BigInteger . ZERO ) < 1 ) { throw new WebApplicationException ( "Alert Id cannot be null and must be a positive non-zero number." , Status . BAD_REQUEST ) ; } Alert alert = alertService . findAlertByPrimaryKey ( alertId ) ; if ( alert != null ) { validateResourceAuthorization ( req , alert . getOwner ( ) , getRemoteUser ( req ) ) ; alert . setNotifications ( new ArrayList < Notification > ( 0 ) ) ; alert . setModifiedBy ( getRemoteUser ( req ) ) ; alertService . updateAlert ( alert ) ; return Response . status ( Status . OK ) . build ( ) ; } throw new WebApplicationException ( Response . Status . NOT_FOUND . getReasonPhrase ( ) , Response . Status . NOT_FOUND ) ;
|
public class Table { /** * Returns a table with the same columns as this table */
public Table copy ( ) { } }
|
Table copy = new Table ( name ) ; for ( Column < ? > column : columnList ) { copy . addColumns ( column . emptyCopy ( rowCount ( ) ) ) ; } int [ ] rows = new int [ rowCount ( ) ] ; for ( int i = 0 ; i < rowCount ( ) ; i ++ ) { rows [ i ] = i ; } Rows . copyRowsToTable ( rows , this , copy ) ; return copy ;
|
public class SequentialExecutionQueue { /** * Gets { @ link Runnable } s that are currently executed by a live thread . */
public synchronized Set < Runnable > getInProgress ( ) { } }
|
Set < Runnable > items = new HashSet < > ( ) ; for ( QueueEntry entry : inProgress ) { items . add ( entry . item ) ; } return items ;
|
public class ParticleIO { /** * Load a single emitter from an XML file
* @ param ref
* The XML file to read
* @ return The configured emitter
* @ param factory
* The factory used to create the emitter than will be poulated
* with loaded data .
* @ throws IOException
* Indicates a failure to find , read or parse the XML file */
public static ConfigurableEmitter loadEmitter ( File ref , ConfigurableEmitterFactory factory ) throws IOException { } }
|
return loadEmitter ( new FileInputStream ( ref ) , factory ) ;
|
public class InputStreamHelper { /** * Expands a zip file input stream into a temporary directory .
* @ param dir temporary directory
* @ param inputStream zip file input stream */
private static void processZipStream ( File dir , InputStream inputStream ) throws IOException { } }
|
ZipInputStream zip = new ZipInputStream ( inputStream ) ; while ( true ) { ZipEntry entry = zip . getNextEntry ( ) ; if ( entry == null ) { break ; } File file = new File ( dir , entry . getName ( ) ) ; if ( entry . isDirectory ( ) ) { FileHelper . mkdirsQuietly ( file ) ; continue ; } File parent = file . getParentFile ( ) ; if ( parent != null ) { FileHelper . mkdirsQuietly ( parent ) ; } FileOutputStream fos = new FileOutputStream ( file ) ; byte [ ] bytes = new byte [ 1024 ] ; int length ; while ( ( length = zip . read ( bytes ) ) >= 0 ) { fos . write ( bytes , 0 , length ) ; } fos . close ( ) ; }
|
public class Tooltip { /** * Changes text tooltip to specified text . If tooltip content is not instance of VisLabel then previous tooltip content
* will be replaced by VisLabel instance .
* @ param text next tooltip text */
public void setText ( String text ) { } }
|
if ( content instanceof VisLabel ) { ( ( VisLabel ) content ) . setText ( text ) ; } else { setContent ( new VisLabel ( text ) ) ; } pack ( ) ;
|
public class XMLUtil { /** * Replies the text inside the node at the specified path .
* < p > The path is an ordered list of tag ' s names and ended by the name of
* the desired node .
* Be careful about the fact that the names are case sensitives .
* @ param document is the XML document to explore .
* @ param path is the list of names . This path may be empty .
* @ return the text or < code > null < / code > if the node was not found in the document or the text
* inside was empty . */
@ Pure public static String getText ( Node document , String ... path ) { } }
|
assert document != null : AssertMessages . notNullParameter ( 0 ) ; Node parentNode = getNodeFromPath ( document , path ) ; if ( parentNode == null ) { parentNode = document ; } final StringBuilder text = new StringBuilder ( ) ; final NodeList children = parentNode . getChildNodes ( ) ; final int len = children . getLength ( ) ; for ( int i = 0 ; i < len ; ++ i ) { final Node child = children . item ( i ) ; if ( child instanceof Text ) { text . append ( ( ( Text ) child ) . getWholeText ( ) ) ; } } if ( text . length ( ) > 0 ) { return text . toString ( ) ; } return null ;
|
public class HostEndsWithOneOf { /** * Apply the filter to a given URI
* @ param uri the URI to be filtered
* @ return < code > true < / code > if the host part of < code > uri < / code > ends with one of the inner suffixes */
@ Override public boolean apply ( final URI uri ) { } }
|
String host = uri . getHost ( ) ; // BURL guarantees lower case .
for ( String suffix : suffixes ) if ( host . endsWith ( suffix ) ) return true ; return false ;
|
public class SimulatorProtocolImpl { /** * listen for a complete message . */
protected void read ( ) throws Exception { } }
|
InputStream is = socket . getInputStream ( ) ; while ( is . available ( ) > 0 ) { byte [ ] bytes = new byte [ 1024 * 1024 ] ; int read = is . read ( bytes ) ; byte [ ] actuallyRead = new byte [ read ] ; System . arraycopy ( bytes , 0 , actuallyRead , 0 , read ) ; pushInput ( actuallyRead ) ; }
|
public class CmsGalleryService { /** * Checks the current users permissions on the upload target folder to update the no upload reason . < p >
* @ param searchCms the cms context
* @ param searchObj the search data */
private void updateNoUploadReason ( CmsObject searchCms , CmsGallerySearchBean searchObj ) { } }
|
if ( ( searchObj . getGalleries ( ) . size ( ) + searchObj . getFolders ( ) . size ( ) ) == 1 ) { String target = ! searchObj . getGalleries ( ) . isEmpty ( ) ? searchObj . getGalleries ( ) . get ( 0 ) : searchObj . getFolders ( ) . iterator ( ) . next ( ) ; try { CmsResource targetRes ; if ( searchCms . existsResource ( target , CmsResourceFilter . ONLY_VISIBLE_NO_DELETED ) ) { targetRes = searchCms . readResource ( target , CmsResourceFilter . ONLY_VISIBLE_NO_DELETED ) ; } else { CmsObject rootCms = OpenCms . initCmsObject ( searchCms ) ; rootCms . getRequestContext ( ) . setSiteRoot ( "" ) ; targetRes = rootCms . readResource ( target , CmsResourceFilter . ONLY_VISIBLE_NO_DELETED ) ; } searchObj . setNoUploadReason ( new CmsResourceUtil ( searchCms , targetRes ) . getNoEditReason ( getWorkplaceLocale ( ) , true ) ) ; } catch ( CmsException e ) { searchObj . setNoUploadReason ( e . getLocalizedMessage ( getWorkplaceLocale ( ) ) ) ; } } else { searchObj . setNoUploadReason ( null ) ; }
|
public class FacesBackingBeanFactory { /** * Load a " backing bean " associated with the JavaServer Faces page for a request .
* @ param requestContext a { @ link RequestContext } object which contains the current request and response .
* @ param backingClassName the name of the backing bean class .
* @ return an initialized FacesBackingBean , or < code > null < / code > if an error occurred . */
protected FacesBackingBean loadFacesBackingBean ( RequestContext requestContext , String backingClassName ) { } }
|
try { Class backingClass = null ; try { backingClass = getFacesBackingBeanClass ( backingClassName ) ; } catch ( ClassNotFoundException e ) { // ignore - - we deal with this and log this immediately below . getFacesBackingBeanClass ( ) by default
// does not throw this exception , but a derived version might .
} if ( backingClass == null ) { if ( _log . isTraceEnabled ( ) ) { _log . trace ( "No backing bean class " + backingClassName + " found for request " + requestContext . getHttpRequest ( ) . getRequestURI ( ) ) ; } } else { AnnotationReader annReader = AnnotationReader . getAnnotationReader ( backingClass , getServletContext ( ) ) ; if ( annReader . getJpfAnnotation ( backingClass , "FacesBacking" ) != null ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( "Found backing class " + backingClassName + " for request " + requestContext . getHttpRequest ( ) . getRequestURI ( ) + "; creating a new instance." ) ; } return getFacesBackingBeanInstance ( backingClass ) ; } else { if ( _log . isDebugEnabled ( ) ) { _log . debug ( "Found matching backing class " + backingClassName + " for request " + requestContext . getHttpRequest ( ) . getRequestURI ( ) + ", but it does not have the " + ANNOTATION_QUALIFIER + "FacesBacking" + " annotation." ) ; } } } } catch ( InstantiationException e ) { _log . error ( "Could not create backing bean instance of " + backingClassName , e ) ; } catch ( IllegalAccessException e ) { _log . error ( "Could not create backing bean instance of " + backingClassName , e ) ; } return null ;
|
public class CmsVfsSelection { /** * Sets the value of the widget . < p >
* @ param value the new value */
public void setFormValue ( Object value ) { } }
|
if ( value == null ) { value = "" ; } if ( value instanceof String ) { String strValue = ( String ) value ; m_selectionInput . m_textbox . setText ( strValue ) ; setTitle ( strValue ) ; }
|
public class TableExtractor { /** * Cleans up the noising lines , e . g . , the continuing blank lines
* @ param linesOfAPage
* a list of lines of one page
* @ return a list of words of one page */
private ArrayList < TextPiece > dataCleaning ( ArrayList < TextPiece > linesOfAPage ) { } }
|
TextPiece line = null ; TextPiece preLine = null ; TextPiece nextLine = null ; for ( int i = 0 ; i < linesOfAPage . size ( ) ; i ++ ) { line = linesOfAPage . get ( i ) ; preLine = ( i == 0 ) ? null : linesOfAPage . get ( i - 1 ) ; nextLine = ( i == linesOfAPage . size ( ) - 1 ) ? null : linesOfAPage . get ( i + 1 ) ; if ( ( line . getText ( ) . replaceAll ( " " , "" ) . length ( ) == 0 ) && ( preLine != null ) && ( nextLine != null ) && ( preLine . getEndY ( ) < line . getY ( ) ) && ( line . getEndY ( ) < nextLine . getY ( ) ) ) { linesOfAPage . remove ( i ) ; i -- ; // System . out . println ( " Removed the next line of : " +
// preLine . getText ( ) ) ;
} } return linesOfAPage ;
|
public class DatabaseImpl { /** * < p > Returns the current winning revision of a local document . < / p >
* @ param docId ID of the local document
* @ return { @ code LocalDocument } of the document
* @ throws DocumentNotFoundException if the document ID doesn ' t exist */
public LocalDocument getLocalDocument ( final String docId ) throws DocumentNotFoundException { } }
|
Misc . checkState ( this . isOpen ( ) , "Database is closed" ) ; try { return get ( queue . submit ( new GetLocalDocumentCallable ( docId ) ) ) ; } catch ( ExecutionException e ) { throw new DocumentNotFoundException ( e ) ; }
|
public class CoreUtils { /** * Have shutdown actually means shutdown . Tasks that need to complete should use
* futures . */
public static ScheduledThreadPoolExecutor getScheduledThreadPoolExecutor ( String name , int poolSize , int stackSize ) { } }
|
ScheduledThreadPoolExecutor ses = new ScheduledThreadPoolExecutor ( poolSize , getThreadFactory ( null , name , stackSize , poolSize > 1 , null ) ) ; ses . setContinueExistingPeriodicTasksAfterShutdownPolicy ( false ) ; ses . setExecuteExistingDelayedTasksAfterShutdownPolicy ( false ) ; return ses ;
|
public class Wxs { /** * 根据不同的消息类型 , 调用WxHandler不同的方法 */
public static WxOutMsg handle ( WxInMsg msg , WxHandler handler ) { } }
|
WxOutMsg out = null ; switch ( WxMsgType . valueOf ( msg . getMsgType ( ) ) ) { case text : out = handler . text ( msg ) ; break ; case image : out = handler . image ( msg ) ; break ; case voice : out = handler . voice ( msg ) ; break ; case video : out = handler . video ( msg ) ; break ; case location : out = handler . location ( msg ) ; break ; case link : out = handler . link ( msg ) ; break ; case event : out = handleEvent ( msg , handler ) ; break ; case shortvideo : out = handler . shortvideo ( msg ) ; break ; default : log . infof ( "New MsyType=%s ? fallback to defaultMsg" , msg . getMsgType ( ) ) ; out = handler . defaultMsg ( msg ) ; break ; } return out ;
|
public class CPDefinitionLinkPersistenceImpl { /** * Returns the cp definition link with the primary key or returns < code > null < / code > if it could not be found .
* @ param primaryKey the primary key of the cp definition link
* @ return the cp definition link , or < code > null < / code > if a cp definition link with the primary key could not be found */
@ Override public CPDefinitionLink fetchByPrimaryKey ( Serializable primaryKey ) { } }
|
Serializable serializable = entityCache . getResult ( CPDefinitionLinkModelImpl . ENTITY_CACHE_ENABLED , CPDefinitionLinkImpl . class , primaryKey ) ; if ( serializable == nullModel ) { return null ; } CPDefinitionLink cpDefinitionLink = ( CPDefinitionLink ) serializable ; if ( cpDefinitionLink == null ) { Session session = null ; try { session = openSession ( ) ; cpDefinitionLink = ( CPDefinitionLink ) session . get ( CPDefinitionLinkImpl . class , primaryKey ) ; if ( cpDefinitionLink != null ) { cacheResult ( cpDefinitionLink ) ; } else { entityCache . putResult ( CPDefinitionLinkModelImpl . ENTITY_CACHE_ENABLED , CPDefinitionLinkImpl . class , primaryKey , nullModel ) ; } } catch ( Exception e ) { entityCache . removeResult ( CPDefinitionLinkModelImpl . ENTITY_CACHE_ENABLED , CPDefinitionLinkImpl . class , primaryKey ) ; throw processException ( e ) ; } finally { closeSession ( session ) ; } } return cpDefinitionLink ;
|
public class RLogPanel { /** * System . getProperty ( " RLogPanel . filter " , " ( . * ) " ) ; */
public static void main ( String [ ] args ) throws Exception { } }
|
RLogPanel log = new RLogPanel ( ) ; JFrame f = new JFrame ( ) ; f . setContentPane ( log ) ; f . setVisible ( true ) ; f . pack ( ) ; Rsession R = RserveSession . newInstanceTry ( log , null ) ; R . rawEval ( "ls()" ) ;
|
public class ShootistSipServlet { /** * ( non - Javadoc )
* @ see javax . servlet . sip . SipServletListener # servletInitialized ( javax . servlet . sip . SipServletContextEvent ) */
public void servletInitialized ( SipServletContextEvent ce ) { } }
|
SipFactory sipFactory = ( SipFactory ) ce . getServletContext ( ) . getAttribute ( SIP_FACTORY ) ; SipApplicationSession sipApplicationSession = sipFactory . createApplicationSession ( ) ; URI fromURI = sipFactory . createSipURI ( "BigGuy" , "here.com" ) ; URI toURI = sipFactory . createSipURI ( "LittleGuy" , "there.com" ) ; SipServletRequest sipServletRequest = sipFactory . createRequest ( sipApplicationSession , "INVITE" , fromURI , toURI ) ; SipURI requestURI = sipFactory . createSipURI ( "LittleGuy" , "127.0.0.1:5090" ) ; sipServletRequest . setRequestURI ( requestURI ) ; try { sipServletRequest . send ( ) ; } catch ( IOException e ) { logger . error ( e ) ; }
|
public class SDKUtil { /** * 通过传入的签名密钥进行签名并返回签名值 < br >
* @ param data
* 待签名数据Map键值对形式
* @ param encoding
* 编码
* @ param certPath
* 证书绝对路径
* @ param certPwd
* 证书密码
* @ return 签名值 */
public static boolean signByCertInfo ( Map < String , String > data , String certPath , String certPwd , String encoding ) { } }
|
if ( isEmpty ( encoding ) ) { encoding = "UTF-8" ; } if ( isEmpty ( certPath ) || isEmpty ( certPwd ) ) { LogUtil . writeErrorLog ( "CertPath or CertPwd is empty" ) ; return false ; } String signMethod = data . get ( param_signMethod ) ; String version = data . get ( SDKConstants . param_version ) ; if ( ! VERSION_1_0_0 . equals ( version ) && ! VERSION_5_0_1 . equals ( version ) && isEmpty ( signMethod ) ) { LogUtil . writeErrorLog ( "signMethod must Not null" ) ; return false ; } if ( isEmpty ( version ) ) { LogUtil . writeErrorLog ( "version must Not null" ) ; return false ; } if ( SIGNMETHOD_RSA . equals ( signMethod ) || VERSION_1_0_0 . equals ( version ) || VERSION_5_0_1 . equals ( version ) ) { if ( VERSION_5_0_0 . equals ( version ) || VERSION_1_0_0 . equals ( version ) || VERSION_5_0_1 . equals ( version ) ) { // 设置签名证书序列号
data . put ( SDKConstants . param_certId , CertUtil . getCertIdByKeyStoreMap ( certPath , certPwd ) ) ; // 将Map信息转换成key1 = value1 & key2 = value2的形式
String stringData = coverMap2String ( data ) ; LogUtil . writeLog ( "待签名请求报文串:[" + stringData + "]" ) ; byte [ ] byteSign = null ; String stringSign = null ; try { // 通过SHA1进行摘要并转16进制
byte [ ] signDigest = SecureUtil . sha1X16 ( stringData , encoding ) ; byteSign = SecureUtil . base64Encode ( SecureUtil . signBySoft ( CertUtil . getSignCertPrivateKeyByStoreMap ( certPath , certPwd ) , signDigest ) ) ; stringSign = new String ( byteSign ) ; // 设置签名域值
data . put ( SDKConstants . param_signature , stringSign ) ; return true ; } catch ( Exception e ) { LogUtil . writeErrorLog ( "Sign Error" , e ) ; return false ; } } else if ( VERSION_5_1_0 . equals ( version ) ) { // 设置签名证书序列号
data . put ( SDKConstants . param_certId , CertUtil . getCertIdByKeyStoreMap ( certPath , certPwd ) ) ; // 将Map信息转换成key1 = value1 & key2 = value2的形式
String stringData = coverMap2String ( data ) ; LogUtil . writeLog ( "待签名请求报文串:[" + stringData + "]" ) ; byte [ ] byteSign = null ; String stringSign = null ; try { // 通过SHA256进行摘要并转16进制
byte [ ] signDigest = SecureUtil . sha256X16 ( stringData , encoding ) ; byteSign = SecureUtil . base64Encode ( SecureUtil . signBySoft256 ( CertUtil . getSignCertPrivateKeyByStoreMap ( certPath , certPwd ) , signDigest ) ) ; stringSign = new String ( byteSign ) ; // 设置签名域值
data . put ( SDKConstants . param_signature , stringSign ) ; return true ; } catch ( Exception e ) { LogUtil . writeErrorLog ( "Sign Error" , e ) ; return false ; } } } return false ;
|
public class GuaranteedTargetStream { /** * This method will walk the oststream from the doubtHorizon to the stamp
* and send Nacks for any Unknown or Requested ticks it finds .
* It will also change Unknown ticks to Requested .
* @ exception GDException Thrown from the writeRange method */
@ Override public void processAckExpected ( long stamp ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "processAckExpected" , new Object [ ] { Long . valueOf ( stamp ) } ) ; List nackList = null ; List msgList = null ; boolean processNack = true ; // Take lock on target stream and hold it until messages have been
// added to batch by deliverOrderedMessages
synchronized ( this ) { synchronized ( oststream ) // see defect 289889
{ long completedPrefix = oststream . getCompletedPrefix ( ) ; long blockingTick = - 1 ; // Send Ack message for ticks up to completedPrefix
sendAck ( ) ; if ( isStreamBlocked ( ) ) { // If the link was implicitly blocked due to an ' unexpected ' problem when we tried
// to deliver a message ( the most obvious reason is due to not being authorized )
// we can ' t keep the stream blocked as we need to allow messages in again to re - try
// the deliverery of the message to be able to tell if the problem still exists .
// However , rather than nack all messages currently blocked on the stream ( which is
// what the source end will be ack - expecting ) we only nack the blocking message ,
// if that succeeds the stream will be left un - blocked , but if it fails the stream
// will be re - locked . If the stream is left un - blocked , the next time an ack - expected
// comes in we ' ll nack everything .
if ( isStreamBlockedUnexpectedly ( ) ) { stamp = linkBlockingTick ; blockingTick = linkBlockingTick ; // We need to unblock the stream , otherwise the failing message won ' t be re - delivered
// when it arrives ( it ' ll get bounced in writeValue ) , and that ' s the only way to tell
// is the failure is still valid ( as it ' s not detected by streamCanAcceptNewMessage ) .
setStreamIsBlocked ( false , DestinationHandler . OUTPUT_HANDLER_FOUND , null , null ) ; } else { // The stream has been marked as blocked . We should only send
// NACKs if we are able to accept the resent message once it arrives .
// Because ACKs are for the stream as a whole , if this is a link
// then we need to check whether the link ' s exception destination is
// able to accept messages . If no exception destination is defined on the
// link then we check the link blocking destination .
int blockingReason = deliverer . checkStillBlocked ( ) ; if ( blockingReason == DestinationHandler . OUTPUT_HANDLER_FOUND ) { // the destination is no longer blocked
setStreamIsBlocked ( false , DestinationHandler . OUTPUT_HANDLER_FOUND , null , null ) ; } else { // Its possible that the reason for the blockage has changed . This code allows
// the link health state to be altered to reflect that change ( but keeps the
// same blocking tick as that hasn ' t changed )
setStreamIsBlocked ( true , blockingReason , null , streamBlockingAddress ) ; // It ' s possible that the blockage has been cleared on the sending side without
// us knowing about it ( e . g . the offending message was deleted but the silence never
// made it to us ) . So , to err on the side of caution , we occasionally forget about
// the blockage and allow the blocking tick to be nack ' d ( just that one , not all
// of them , we don ' t want them all to be re - sent until we know the problem doesn ' t
// still exist ) . This will either cause the same problematic message to be re - sent
// and the link will stay blocked or a silence will arrive indicating the blockage
// has been cleared , allowing the subsequent ackExcepted to result in a full Nack
if ( blockingCount ++ > 3 ) { stamp = linkBlockingTick ; blockingCount = 0 ; blockingTick = linkBlockingTick ; // Note : In this case ( unlike the unexpected block case above ) we can leave the
// stream blocked as the original problem was detected by streamCanAcceptNewMessage ,
// if the mesasge is re - sent writeValue will have a chance of processing it if
// the call to streamCanAcceptNewMessage thinks the problem has been resolved .
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Nacking the first tick (" + stamp + ") of a blocked link" ) ; } // The stream is blocked ( for now ) so we don ' t want the blocking message ( and all
// the ones built up behind it ) to be resent because we nack them !
else processNack = false ; } } } // We ' re allowed to send Nacks
if ( processNack ) { if ( stamp <= completedPrefix ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "processAckExpected" ) ; return ; } // walk the oststream from the completedPrefix to endstamp and Nack
// everything that is Unknown or Requested
// Change everything which is Unknown to Requested .
nackList = new ArrayList ( ) ; long startstamp = completedPrefix + 1 ; long endstamp = stamp ; oststream . setCursor ( startstamp ) ; TickRange tr2 = null ; TickRange tr1 = oststream . getNext ( ) ; do { if ( ( tr1 . type == TickRange . Unknown ) || ( tr1 . type == TickRange . Requested ) ) { long ss , es ; // Handle case where start or endstamps fall within a range
ss = tr1 . startstamp > startstamp ? tr1 . startstamp : startstamp ; es = endstamp > tr1 . endstamp ? tr1 . endstamp : endstamp ; TickRange tr = new TickRange ( TickRange . Requested , ss , es ) ; // Only need to update stream if this is currently Unknown
if ( tr1 . type == TickRange . Unknown ) { oststream . writeRange ( tr ) ; } // SIB0105
// Set the head msg health state to AMBER if neccessary
synchronized ( pendingAlarms ) { // If we have no current gaps then we must be missing msg at the head of the stream
if ( ! getAlarms ( this ) . hasNext ( ) ) { getControlAdapter ( ) . getHealthState ( ) . updateHealth ( HealthStateListener . MSG_LOST_ERROR_STATE , HealthState . AMBER ) ; lastNackTick = endstamp ; } } nackList . add ( tr ) ; } // If the stream is blocked but we want to force the message ( or a silence if it ' s
// been deleted on the source side ) to be sent again then we need to add the blocking
// range to the nack list , even if it ' s in value state ( which is possible if we tried
// to deliver it before but failed ( so it ' s already added to the stream , but not
// delivered yet ) .
else if ( ( tr1 . type == TickRange . Value ) && ( tr1 . endstamp == blockingTick ) ) { nackList . add ( tr1 ) ; } tr2 = tr1 ; tr1 = oststream . getNext ( ) ; } while ( ( tr1 . startstamp <= endstamp ) && ( tr1 != tr2 ) ) ; } // end processNack
else { // we did not process the AckExpected message for any nacks
// as we did not have room for the replies
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "did not process AckExpected message for nacks." ) ; } // PK57736 We should should this stimulus from the sending side
// to advance the doubt horizon , as it is possible they are not
// sending messages any longer ( which is the other stimulus ) .
msgList = advanceDoubtHorizon ( null ) ; } // end synchronized
// PK57736 Deliver messages outside of synchronise
// We do this because deliverOrderedMessages takes the
// BatchHandler lock and the BatchHandler callbacks require
// the stream lock to update the completedPrefix . If we call
// the BatchHandler when we hold the stream lock it could cause
// a deadlock
if ( msgList != null ) { // Call the Input or Output Handler to deliver the messages
try { deliverer . deliverOrderedMessages ( msgList , this , priority , reliability ) ; } catch ( SINotPossibleInCurrentConfigurationException e ) { // No FFDC code needed
SibTr . exception ( tc , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "processAckExpected" , "GDException" ) ; // Dont rethrow the exception . The GD protocols will handle the resend of
// the message
} catch ( SIException e ) { // FFDC
FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.gd.GuaranteedTargetStream.processAckExpected" , "1:1201:1.110" , this ) ; SibTr . exception ( tc , e ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.gd.GuaranteedTargetStream" , "1:1208:1.110" , e } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "processAckExpected" , "GDException" ) ; throw new SIErrorException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.gd.GuaranteedTargetStream" , "1:1218:1.110" , e } , null ) , e ) ; } } } // end sync - release lock on target stream
// send nacks
for ( int j = 0 ; processNack && j < nackList . size ( ) ; j ++ ) { TickRange temptr = ( TickRange ) nackList . get ( j ) ; try { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "processAckExpected sending Nack from: " + temptr . startstamp + " to " + temptr . endstamp ) ; upControl . sendNackMessage ( streamSet . getRemoteMEUuid ( ) , streamSet . getDestUuid ( ) , streamSet . getBusUuid ( ) , temptr . startstamp , temptr . endstamp , priority , reliability , streamSet . getStreamID ( ) ) ; } catch ( SIResourceException e ) { // FFDC
FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.gd.GuaranteedTargetStream.processAckExpected" , "1:1252:1.110" , this ) ; SibTr . exception ( tc , e ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.gd.GuaranteedTargetStream" , "1:1259:1.110" , e } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "processAckExpected" , e ) ; throw new SIErrorException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.gd.GuaranteedTargetStream" , "1:1270:1.110" , e } , null ) , e ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "processAckExpected" ) ;
|
public class AbstractRemoteDocker { /** * / * - - - Public methods - - - */
public Set < AbstractRemoteDockerImage > pullRemoteDockerImages ( ) { } }
|
if ( isAllSoftwareRequiredInstalled ( ) ) { if ( loginToRemoteRegistry ( ) ) { imagesFound = getRemoteRegistryImagesList ( ) ; if ( imagesFound != null && ! imagesFound . isEmpty ( ) ) { imagesPulled = pullImagesFromRemoteRegistry ( ) ; } logger . info ( "{} New images were pulled" , pulledImagesCount ) ; logger . info ( "{} Images are up to date (not pulled)" , existingImagesCount ) ; // Logout from account if UA logged in
logoutRemoteDocker ( ) ; } } return imagesPulled ;
|
public class AbstractControllerService { /** * Boot , optionally disabling model and capability registry validation , using the given provider for the root
* { @ link ManagementResourceRegistration } .
* @ param bootOperations the operations . Cannot be { @ code null }
* @ param rollbackOnRuntimeFailure { @ code true } if the boot should fail if operations fail in the runtime stage
* @ param skipModelValidation { @ code true } if model and capability validation should be skipped .
* @ param parallelBootRootResourceRegistrationProvider provider of the root resource registration
* @ return { @ code true } if boot was successful
* @ throws ConfigurationPersistenceException */
protected boolean boot ( List < ModelNode > bootOperations , boolean rollbackOnRuntimeFailure , boolean skipModelValidation , MutableRootResourceRegistrationProvider parallelBootRootResourceRegistrationProvider ) throws ConfigurationPersistenceException { } }
|
return controller . boot ( bootOperations , OperationMessageHandler . logging , ModelController . OperationTransactionControl . COMMIT , rollbackOnRuntimeFailure , parallelBootRootResourceRegistrationProvider , skipModelValidation , getPartialModelIndicator ( ) . isModelPartial ( ) ) ;
|
public class BpsimFactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public ResultType createResultTypeObjectFromString ( EDataType eDataType , String initialValue ) { } }
|
return createResultTypeFromString ( BpsimPackage . Literals . RESULT_TYPE , initialValue ) ;
|
public class AbstractProjectCommand { /** * Filters the given value choices according the current enabled stack
* @ param select the { @ link SelectComponent } containing the value choices to be filtered
* @ return < code > true < / code > if it should be displayed in the UI */
protected < T extends ProjectFacet > boolean filterValueChoicesFromStack ( Project project , UISelectOne < T > select ) { } }
|
boolean result = true ; Optional < Stack > stackOptional = project . getStack ( ) ; // Filtering only supported facets
if ( stackOptional . isPresent ( ) ) { Stack stack = stackOptional . get ( ) ; Iterable < T > valueChoices = select . getValueChoices ( ) ; Set < T > filter = stack . filter ( select . getValueType ( ) , valueChoices ) ; select . setValueChoices ( filter ) ; if ( filter . size ( ) == 1 ) { select . setDefaultValue ( filter . iterator ( ) . next ( ) ) ; result = false ; } else if ( filter . size ( ) == 0 ) { result = false ; } // FIXME : JBIDE - 21584 : Contains return false because of proxy class
// else if ( ! filter . contains ( select . getValue ( ) ) )
// select . setDefaultValue ( ( T ) null ) ;
} return result ;
|
public class Solo { /** * Asserts that the Activity matching the specified class is active , with the possibility to
* verify that the expected Activity is a new instance of the Activity .
* @ param message the message to display if the assert fails
* @ param activityClass the class of the Activity that is expected to be active . Example is : { @ code MyActivity . class }
* @ param isNewInstance { @ code true } if the expected { @ link Activity } is a new instance of the { @ link Activity } */
@ SuppressWarnings ( "unchecked" ) public void assertCurrentActivity ( String message , @ SuppressWarnings ( "rawtypes" ) Class activityClass , boolean isNewInstance ) { } }
|
if ( config . commandLogging ) { Log . d ( config . commandLoggingTag , "assertCurrentActivity(\"" + message + "\", " + activityClass + ", " + isNewInstance + ")" ) ; } asserter . assertCurrentActivity ( message , activityClass , isNewInstance ) ;
|
public class BaseSimpleReact { /** * This internal method has been left protected , so it can be mocked / stubbed as some of the entry points are final */
@ SuppressWarnings ( "unchecked" ) protected < U > BaseSimpleReactStream < U > reactI ( final Supplier < U > ... actions ) { } }
|
return construct ( Stream . of ( actions ) . map ( next -> CompletableFuture . supplyAsync ( next , getExecutor ( ) ) ) ) ;
|
public class KTypeVTypeHashMap { /** * { @ inheritDoc } */
@ Override public void clear ( ) { } }
|
assigned = 0 ; hasEmptyKey = false ; Arrays . fill ( keys , Intrinsics . < KType > empty ( ) ) ; /* # if ( $ TemplateOptions . VTypeGeneric ) */
Arrays . fill ( values , Intrinsics . < VType > empty ( ) ) ; /* # end */
|
public class BandwidthClient { /** * This method implements an HTTP put . Use this method to update a resource .
* @ param uri the URI
* @ param params the parameters .
* @ return the put response .
* @ throws IOException unexpected exception .
* @ throws AppPlatformException unexpected exception . */
public RestResponse put ( final String uri , final Map < String , Object > params ) throws IOException , AppPlatformException { } }
|
return request ( getPath ( uri ) , HttpPut . METHOD_NAME , params ) ;
|
public class FixedModeScheduleActionStartSettingsMarshaller { /** * Marshall the given parameter object . */
public void marshall ( FixedModeScheduleActionStartSettings fixedModeScheduleActionStartSettings , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( fixedModeScheduleActionStartSettings == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( fixedModeScheduleActionStartSettings . getTime ( ) , TIME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class AvatarZooKeeperClient { /** * Retrieves the last transaction id of the primary from zookeeper .
* @ param address
* the address of the cluster
* @ param sync
* whether or not to perform a sync before read
* @ throws IOException
* @ throws KeeperException
* @ throws InterruptedException */
public ZookeeperTxId getPrimaryLastTxId ( String address , boolean sync ) throws IOException , KeeperException , InterruptedException , ClassNotFoundException { } }
|
Stat stat = new Stat ( ) ; String node = getLastTxIdNode ( address ) ; byte [ ] data = getNodeData ( node , stat , false , sync ) ; if ( data == null ) { return null ; } return ZookeeperTxId . getFromBytes ( data ) ;
|
public class RTMPProtocolEncoder { /** * Encodes object with given protocol state to byte buffer
* @ param message
* Object to encode
* @ return IoBuffer with encoded data
* @ throws Exception
* Any decoding exception */
public IoBuffer encode ( Object message ) throws Exception { } }
|
if ( message != null ) { try { return encodePacket ( ( Packet ) message ) ; } catch ( Exception e ) { log . error ( "Error encoding" , e ) ; } } else if ( log . isDebugEnabled ( ) ) { try { String callingMethod = Thread . currentThread ( ) . getStackTrace ( ) [ 4 ] . getMethodName ( ) ; log . debug ( "Message is null at encode, expecting a Packet from: {}" , callingMethod ) ; } catch ( Throwable t ) { log . warn ( "Problem getting current calling method from stacktrace" , t ) ; } } return null ;
|
public class JsonDiff { /** * TODO this is quite unclear and needs some serious documentation */
private static boolean isAllowed ( JsonPointer source , JsonPointer destination ) { } }
|
boolean isSame = source . equals ( destination ) ; int i = 0 ; int j = 0 ; // Hack to fix broken COPY operation , need better handling here
while ( i < source . size ( ) && j < destination . size ( ) ) { JsonPointer . RefToken srcValue = source . get ( i ) ; JsonPointer . RefToken dstValue = destination . get ( j ) ; String srcStr = srcValue . toString ( ) ; String dstStr = dstValue . toString ( ) ; if ( isNumber ( srcStr ) && isNumber ( dstStr ) ) { if ( srcStr . compareTo ( dstStr ) > 0 ) { return false ; } } i ++ ; j ++ ; } return ! isSame ;
|
public class Option { /** * Returns an { @ code Option } with the specified present value if it is not
* { @ code null } or { @ link # NONE } otherwise .
* @ param value
* the value
* @ param < T >
* the type of the value
* @ return an { @ code Option } describing the value if it is not { @ code null }
* or { @ link # NONE } if the value is { @ code null }
* @ since 0.2 */
@ SuppressWarnings ( "unchecked" ) public static < T > Option < T > of ( T value ) { } }
|
return null == value ? NONE : some ( value ) ;
|
public class Encoder { /** * Returns a < code > PersistenceDelegate < / code > for the given class type .
* The < code > PersistenceDelegate < / code > is determined as following :
* < ol >
* < li > If a < code > PersistenceDelegate < / code > has been registered by calling < code > setPersistenceDelegate < / code > for the given type , it is returned . < / li >
* < li > If the given type is an array class , a special < code > PersistenceDelegate < / code > for array types is returned . < / li >
* < li > If the given type is a proxy class , a special < code > PersistenceDelegate < / code > for proxy classes is returned . < / li >
* < li > < code > Introspector < / code > is used to check the bean descriptor value " persistenceDelegate " . If one is set , it is returned . < / li >
* < li > If none of the above applies , the < code > DefaultPersistenceDelegate < / code > is returned . < / li >
* < / ol >
* @ param type
* a class type
* @ return a < code > PersistenceDelegate < / code > for the given class type */
public PersistenceDelegate getPersistenceDelegate ( Class < ? > type ) { } }
|
if ( type == null ) { return nullPD ; // may be return a special PD ?
} // registered delegate
PersistenceDelegate registeredPD = delegates . get ( type ) ; if ( registeredPD != null ) { return registeredPD ; } if ( type . getName ( ) . startsWith ( UtilCollectionsPersistenceDelegate . CLASS_PREFIX ) ) { return utilCollectionsPD ; } if ( type . isArray ( ) ) { return arrayPD ; } if ( Proxy . isProxyClass ( type ) ) { return proxyPD ; } // check " persistenceDelegate " property
try { BeanInfo beanInfo = Introspector . getBeanInfo ( type ) ; if ( beanInfo != null ) { PersistenceDelegate pd = ( PersistenceDelegate ) beanInfo . getBeanDescriptor ( ) . getValue ( "persistenceDelegate" ) ; // $ NON - NLS - 1 $
if ( pd != null ) { return pd ; } } } catch ( Exception e ) { // Ignored
} // default persistence delegate
return defaultPD ;
|
public class CellConverterRegistry { /** * タイプに対する { @ link CellConverter } を登録する 。
* @ param clazz 変換対象のJavaのクラスタイプ 。
* @ param converterFactory 変換する { @ link CellConverterFactory } のインスタンス 。 */
public < T > void registerConverter ( final Class < T > clazz , final CellConverterFactory < T > converterFactory ) { } }
|
ArgUtils . notNull ( clazz , "clazz" ) ; ArgUtils . notNull ( converterFactory , "converterFactory" ) ; converterFactoryMap . put ( clazz , converterFactory ) ;
|
public class TypeValidator { /** * Expect that the given variable has not been declared with a type .
* @ param sourceName The name of the source file we ' re in .
* @ param n The node where warnings should point to .
* @ param parent The parent of { @ code n } .
* @ param var The variable that we ' re checking .
* @ param variableName The name of the variable .
* @ param newType The type being applied to the variable . Mostly just here
* for the benefit of the warning .
* @ return The variable we end up with . Most of the time , this will just
* be { @ code var } , but in some rare cases we will need to declare
* a new var with new source info . */
TypedVar expectUndeclaredVariable ( String sourceName , CompilerInput input , Node n , Node parent , TypedVar var , String variableName , JSType newType ) { } }
|
TypedVar newVar = var ; JSType varType = var . getType ( ) ; // Only report duplicate declarations that have types . Other duplicates
// will be reported by the syntactic scope creator later in the
// compilation process .
if ( varType != null && varType != typeRegistry . getNativeType ( UNKNOWN_TYPE ) && newType != null && newType != typeRegistry . getNativeType ( UNKNOWN_TYPE ) ) { // If there are two typed declarations of the same variable , that
// is an error and the second declaration is ignored , except in the
// case of native types . A null input type means that the declaration
// was made in TypedScopeCreator # createInitialScope and is a
// native type . We should redeclare it at the new input site .
if ( var . input == null ) { TypedScope s = var . getScope ( ) ; s . undeclare ( var ) ; newVar = s . declare ( variableName , n , varType , input , false ) ; n . setJSType ( varType ) ; if ( parent . isVar ( ) ) { if ( n . hasChildren ( ) ) { n . getFirstChild ( ) . setJSType ( varType ) ; } } else { checkState ( parent . isFunction ( ) || parent . isClass ( ) ) ; parent . setJSType ( varType ) ; } } else { // Check for @ suppress duplicate or similar warnings guard on the previous variable
// declaration location .
boolean allowDupe = hasDuplicateDeclarationSuppression ( compiler , var . getNameNode ( ) ) ; // If the previous definition doesn ' t suppress the warning , emit it here ( i . e . always emit
// on the second of the duplicate definitions ) . The warning might still be suppressed by an
// @ suppress tag on this declaration .
if ( ! allowDupe ) { // Report specifically if it is not just a duplicate , but types also don ' t mismatch .
// NOTE : structural matches are explicitly allowed here .
if ( ! newType . isEquivalentTo ( varType , true ) ) { report ( JSError . make ( n , DUP_VAR_DECLARATION_TYPE_MISMATCH , variableName , newType . toString ( ) , var . getInputName ( ) , String . valueOf ( var . nameNode . getLineno ( ) ) , varType . toString ( ) ) ) ; } else if ( ! var . getParentNode ( ) . isExprResult ( ) ) { // If the type matches and the previous declaration was a stub declaration
// ( isExprResult ) , then ignore the duplicate , otherwise emit an error .
report ( JSError . make ( n , DUP_VAR_DECLARATION , variableName , var . getInputName ( ) , String . valueOf ( var . nameNode . getLineno ( ) ) ) ) ; } } } } return newVar ;
|
public class IdentityMap { /** * Returns < tt > true < / tt > if this map maps one or more keys to the
* specified value .
* @ param value value whose presence in this map is to be tested .
* @ return < tt > true < / tt > if this map maps one or more keys to the
* specified value . */
public boolean containsValue ( Object value ) { } }
|
Entry tab [ ] = mTable ; if ( value == null ) { for ( int i = tab . length ; i -- > 0 ; ) { for ( Entry e = tab [ i ] , prev = null ; e != null ; e = e . mNext ) { if ( e . getKey ( ) == null ) { // Clean up after a cleared Reference .
mModCount ++ ; if ( prev != null ) { prev . mNext = e . mNext ; } else { tab [ i ] = e . mNext ; } mCount -- ; } else if ( e . mValue == null ) { return true ; } else { prev = e ; } } } } else { for ( int i = tab . length ; i -- > 0 ; ) { for ( Entry e = tab [ i ] , prev = null ; e != null ; e = e . mNext ) { if ( e . getKey ( ) == null ) { // Clean up after a cleared Reference .
mModCount ++ ; if ( prev != null ) { prev . mNext = e . mNext ; } else { tab [ i ] = e . mNext ; } mCount -- ; } else if ( value . equals ( e . mValue ) ) { return true ; } else { prev = e ; } } } } return false ;
|
public class Subframe_LPC { /** * Get the data from the last encode attempt . Data is returned in an
* EncodedElement , properly packed at the bit - level to be added directly to
* a FLAC stream .
* @ return EncodedElement containing encoded subframe */
public EncodedElement getData ( ) { } }
|
EncodedElement result = new EncodedElement ( _totalBits / 8 + 1 , _offset ) ; // result . clear ( ( int ) _ totalBits + 1 , _ offset ) ;
writeLPC ( _samples , _lastCount , _start , _increment , result , _frameSampleSize , _lowOrderBits , _precision , _shift , _quantizedCoeffs , _errors , _lpcOrder , rice ) ; int totalBits = result . getTotalBits ( ) ; this . lastEncodedSize = ( int ) totalBits ; if ( DEBUG_LEV > 0 ) { System . err . println ( "lastencodedSize set: " + this . lastEncodedSize ) ; System . err . println ( "Subframe_LPC::getData(...): End" ) ; } return result ;
|
public class DateSpinner { /** * Sets the minimum allowed date .
* Spinner items and dates in the date picker before the given date will get disabled .
* @ param minDate The minimum date , or null to clear the previous min date . */
public void setMinDate ( @ Nullable Calendar minDate ) { } }
|
this . minDate = minDate ; // update the date picker ( even if it is not used right now )
if ( minDate == null ) datePickerDialog . setMinDate ( MINIMUM_POSSIBLE_DATE ) ; else if ( maxDate != null && compareCalendarDates ( minDate , maxDate ) > 0 ) throw new IllegalArgumentException ( "Minimum date must be before maximum date!" ) ; else datePickerDialog . setMinDate ( new CalendarDay ( minDate ) ) ; updateEnabledItems ( ) ;
|
public class AbstractTopology { /** * The default placement group ( a . k . a . rack - aware group ) is " 0 " , if user override the setting
* in command line configuration , we need to check whether the partition layout meets the
* requirement ( tolerate entire rack loss without shutdown the cluster ) . And also because we
* support partition group by default , if we can meet both requirements ( we prefer partition
* group because online upgrade with minimum hardware option needs it ) , at least we need tell
* user the fact .
* @ return null if the topology is balanced , otherwise return the error message */
public String validateLayout ( Set < Integer > liveHosts ) { } }
|
if ( m_unbalancedPartitionCount > 0 ) { return String . format ( "%d out of %d partitions are unbalanced across placement groups." , m_unbalancedPartitionCount , partitionsById . size ( ) ) ; } if ( liveHosts == null ) { return null ; } // verify the partition leaders on live hosts
for ( Host host : hostsById . values ( ) ) { if ( liveHosts . contains ( Integer . valueOf ( host . id ) ) ) { for ( Partition p : host . partitions ) { if ( ! liveHosts . contains ( Integer . valueOf ( p . leaderHostId ) ) ) { return String . format ( "The leader host %d of partition %d is not on live host." , p . leaderHostId , p . id ) ; } } } } return null ;
|
public class RecurlyClient { /** * Lookup all coupon redemptions on a subscription given query params .
* @ param subscriptionUuid String subscription uuid
* @ param params { @ link QueryParams }
* @ return the coupon redemptions for this subscription on success , null otherwise */
public Redemptions getCouponRedemptionsBySubscription ( final String subscriptionUuid , final QueryParams params ) { } }
|
return doGET ( Subscription . SUBSCRIPTION_RESOURCE + "/" + subscriptionUuid + Redemptions . REDEMPTIONS_RESOURCE , Redemptions . class , params ) ;
|
public class ST_ConstrainedDelaunay { /** * Build a constrained delaunay triangulation based on a geometry
* ( point , line , polygon )
* @ param geometry
* @ param flag
* @ return a set of polygons ( triangles )
* @ throws SQLException */
public static GeometryCollection createCDT ( Geometry geometry , int flag ) throws SQLException { } }
|
if ( geometry != null ) { DelaunayData delaunayData = new DelaunayData ( ) ; delaunayData . put ( geometry , DelaunayData . MODE . CONSTRAINED ) ; delaunayData . triangulate ( ) ; if ( flag == 0 ) { return delaunayData . getTriangles ( ) ; } else if ( flag == 1 ) { return delaunayData . getTrianglesSides ( ) ; } else { throw new SQLException ( "Only flag 0 or 1 is supported." ) ; } } return null ;
|
public class DomesticResult { /** * json format :
* < pre >
* " g1 " : {
* " type " : 1,
* " address " : " 台湾 "
* " g2 " : {
* " type " : 1,
* " address " : " 河北省 衡水市 武强县 "
* " g3 " : {
* " type " : 0,
* " address " : " "
* < / pre >
* @ param json response json string
* @ return list of DomesticResult */
public static List < DomesticResult > parseList ( JsonNode json ) { } }
|
List < DomesticResult > results = new ArrayList < DomesticResult > ( ) ; for ( int i = 1 ; i <= MAX_RESULTS ; i ++ ) { String itemFieldName = String . format ( "g%d" , i ) ; if ( json . has ( itemFieldName ) ) { results . add ( parseItem ( json . get ( itemFieldName ) ) ) ; } else { break ; } } return results ;
|
public class CrumbIssuer { /** * Get a crumb from multipart form data and validate it against other data
* in the current request . The salt and request parameter that is used is
* defined by the current configuration .
* @ param request
* @ param parser */
public boolean validateCrumb ( ServletRequest request , MultipartFormDataParser parser ) { } }
|
CrumbIssuerDescriptor < CrumbIssuer > desc = getDescriptor ( ) ; String crumbField = desc . getCrumbRequestField ( ) ; String crumbSalt = desc . getCrumbSalt ( ) ; return validateCrumb ( request , crumbSalt , parser . get ( crumbField ) ) ;
|
public class Dim { /** * Called when a script exception has been thrown . */
private void handleExceptionThrown ( Context cx , Throwable ex , StackFrame frame ) { } }
|
if ( breakOnExceptions ) { ContextData cd = frame . contextData ( ) ; if ( cd . lastProcessedException != ex ) { interrupted ( cx , frame , ex ) ; cd . lastProcessedException = ex ; } }
|
public class DistCp { /** * Job configuration */
@ SuppressWarnings ( "deprecation" ) private static JobConf createJobConfForCopyByChunk ( Configuration conf ) { } }
|
JobConf jobconf = new JobConf ( conf , DistCp . class ) ; jobconf . setJobName ( NAME ) ; // turn off speculative execution , because DFS doesn ' t handle
// multiple writers to the same file .
jobconf . setReduceSpeculativeExecution ( false ) ; jobconf . setMapOutputKeyClass ( Text . class ) ; jobconf . setMapOutputValueClass ( IntWritable . class ) ; jobconf . setOutputKeyClass ( Text . class ) ; jobconf . setOutputValueClass ( Text . class ) ; jobconf . setInputFormat ( CopyByChunkInputFormat . class ) ; jobconf . setMapperClass ( CopyFilesByChunkMapper . class ) ; jobconf . setReducerClass ( CopyFilesByChunkReducer . class ) ; // Prevent the reducer from starting until all maps are done .
jobconf . setInt ( "mapred.job.rushreduce.reduce.threshold" , 0 ) ; jobconf . setFloat ( "mapred.reduce.slowstart.completed.maps" , 1.0f ) ; return jobconf ;
|
public class CmsWorkplaceManager { /** * Returns the { @ link CmsWorkplaceMessages } for the given locale . < p >
* The workplace messages are a collection of resource bundles , containing the messages
* for all OpenCms core bundles and of all initialized modules . < p >
* Please note that the message objects are cached internally .
* The returned message object should therefore never be modified directly in any way . < p >
* @ param locale the locale to get the messages for
* @ return the { @ link CmsWorkplaceMessages } for the given locale */
public CmsWorkplaceMessages getMessages ( Locale locale ) { } }
|
CmsWorkplaceMessages result = m_messages . get ( locale ) ; if ( result != null ) { // messages have already been read
return result ; } // messages have not been read so far
synchronized ( this ) { // check again
result = m_messages . get ( locale ) ; if ( result == null ) { result = new CmsWorkplaceMessages ( locale ) ; m_messages . put ( locale , result ) ; } } return result ;
|
public class EasyPredictModelWrapper { private void validateModelCategory ( ModelCategory c ) throws PredictException { } }
|
if ( ! m . getModelCategories ( ) . contains ( c ) ) throw new PredictException ( c + " prediction type is not supported for this model." ) ;
|
public class Triangle3d { /** * { @ inheritDoc } */
@ Override public void setP1 ( Point3D point ) { } }
|
setP1 ( point . getX ( ) , point . getY ( ) , point . getZ ( ) ) ;
|
public class UfsJournalSnapshot { /** * Gets the first journal log sequence number that is not yet checkpointed .
* @ return the first journal log sequence number that is not yet checkpointed */
static long getNextLogSequenceNumberToCheckpoint ( UfsJournal journal ) throws IOException { } }
|
List < UfsJournalFile > checkpoints = new ArrayList < > ( ) ; UfsStatus [ ] statuses = journal . getUfs ( ) . listStatus ( journal . getCheckpointDir ( ) . toString ( ) ) ; if ( statuses != null ) { for ( UfsStatus status : statuses ) { UfsJournalFile file = UfsJournalFile . decodeCheckpointFile ( journal , status . getName ( ) ) ; if ( file != null ) { checkpoints . add ( file ) ; } } Collections . sort ( checkpoints ) ; } if ( checkpoints . isEmpty ( ) ) { return 0 ; } return checkpoints . get ( checkpoints . size ( ) - 1 ) . getEnd ( ) ;
|
public class DeleteUserPoolClientRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DeleteUserPoolClientRequest deleteUserPoolClientRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( deleteUserPoolClientRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteUserPoolClientRequest . getUserPoolId ( ) , USERPOOLID_BINDING ) ; protocolMarshaller . marshall ( deleteUserPoolClientRequest . getClientId ( ) , CLIENTID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class DtoSearchConverterServiceImpl { /** * Prune impossible combinations .
* ( eg . If And criteria filter different layers , they will return nothing , so they are pruned ) . */
void prune ( AndCriterion criterion ) { } }
|
Set < String > usedLayers = new HashSet < String > ( ) ; Set < String > badLayers = new HashSet < String > ( ) ; criterion . serverLayerIdVisitor ( usedLayers ) ; findUnmatchedLayers ( criterion , usedLayers , badLayers ) ; if ( usedLayers . isEmpty ( ) ) { criterion . getCriteria ( ) . clear ( ) ; } else if ( ! badLayers . isEmpty ( ) ) { removeUnmatchedLayers ( criterion , badLayers ) ; }
|
public class LinuxTaskController { /** * Convenience method used to sending appropriate Kill signal to the task
* VM
* @ param context
* @ param command
* @ throws IOException */
private void finishTask ( TaskControllerContext context , TaskCommands command ) throws IOException { } }
|
if ( context . task == null ) { LOG . info ( "Context task null not killing the JVM" ) ; return ; } ShellCommandExecutor shExec = buildTaskControllerExecutor ( command , context . env . conf . getUser ( ) , buildKillTaskCommandArgs ( context ) , context . env . workDir , context . env . env ) ; try { shExec . execute ( ) ; } catch ( Exception e ) { LOG . warn ( "Output from task-contoller is : " + shExec . getOutput ( ) ) ; throw new IOException ( e ) ; }
|
public class DynamicEndpointGroup { /** * Sets the specified { @ link Endpoint } s as current { @ link Endpoint } list . */
protected final void setEndpoints ( Iterable < Endpoint > endpoints ) { } }
|
final List < Endpoint > oldEndpoints = this . endpoints ; final List < Endpoint > newEndpoints = ImmutableList . sortedCopyOf ( endpoints ) ; if ( oldEndpoints . equals ( newEndpoints ) ) { return ; } endpointsLock . lock ( ) ; try { this . endpoints = newEndpoints ; } finally { endpointsLock . unlock ( ) ; } notifyListeners ( newEndpoints ) ; completeInitialEndpointsFuture ( newEndpoints ) ;
|
public class CaliperMain { /** * Entry point for the caliper benchmark runner application ; run with { @ code - - help } for details . */
public static void main ( String [ ] args ) { } }
|
PrintWriter stdout = new PrintWriter ( System . out , true ) ; PrintWriter stderr = new PrintWriter ( System . err , true ) ; int code = 1 ; // pessimism !
try { exitlessMain ( args , stdout , stderr ) ; code = 0 ; } catch ( InvalidCommandException e ) { e . display ( stderr ) ; code = e . exitCode ( ) ; } catch ( InvalidBenchmarkException e ) { e . display ( stderr ) ; } catch ( InvalidConfigurationException e ) { e . display ( stderr ) ; } catch ( Throwable t ) { t . printStackTrace ( stderr ) ; stdout . println ( ) ; stdout . println ( "An unexpected exception has been thrown by the caliper runner." ) ; stdout . println ( "Please see https://sites.google.com/site/caliperusers/issues" ) ; } stdout . flush ( ) ; stderr . flush ( ) ; System . exit ( code ) ;
|
public class Matchers { /** * Applies the given matcher recursively to all descendants of an AST node , and matches if any
* matching descendant node is found .
* @ param clazz The type of node to be matched .
* @ param treeMatcher The matcher to apply recursively to the tree . */
public static < T extends Tree , V extends Tree > Matcher < T > contains ( Class < V > clazz , Matcher < V > treeMatcher ) { } }
|
final Matcher < Tree > contains = new Contains ( toType ( clazz , treeMatcher ) ) ; return contains :: matches ;
|
public class BufferedFileWriter { /** * Gets a writer that can write to the supplied file using the UTF - 8 charset .
* @ param aFile A file to which to write
* @ return A writer that writes to the supplied file
* @ throws FileNotFoundException If the supplied file cannot be found */
private static Writer getWriter ( final File aFile ) throws FileNotFoundException { } }
|
try { return new OutputStreamWriter ( new FileOutputStream ( aFile ) , StandardCharsets . UTF_8 . name ( ) ) ; } catch ( final java . io . UnsupportedEncodingException details ) { throw new UnsupportedEncodingException ( details , StandardCharsets . UTF_8 . name ( ) ) ; }
|
public class PipelineHistoryService { /** * we need placeholder stage for unscheduled stages in pipeline , so we can trigger it */
private void populatePlaceHolderStages ( PipelineInstanceModel pipeline ) { } }
|
StageInstanceModels stageHistory = pipeline . getStageHistory ( ) ; String pipelineName = pipeline . getName ( ) ; appendFollowingStagesFromConfig ( pipelineName , stageHistory ) ;
|
public class ParseUtil { /** * Convert a partially parsed LIKE expression , in which the pattern and escape are
* in the form of token images , to the proper Selector expression to represent
* the LIKE expression
* @ param arg the argument of the like
* @ param pattern the pattern image ( still containing leading and trailing ' )
* @ param escape the escape ( still containing leading and trailing ' ) or null
* @ return an appropriate Selector or null if the pattern and / or escape are
* syntactically invalid */
static Selector convertLike ( Selector arg , String pattern , String escape ) { } }
|
try { pattern = reduceStringLiteralToken ( pattern ) ; boolean escaped = false ; char esc = 0 ; if ( escape != null ) { escape = reduceStringLiteralToken ( escape ) ; if ( escape . length ( ) != 1 ) return null ; escaped = true ; esc = escape . charAt ( 0 ) ; } return Matching . getInstance ( ) . createLikeOperator ( arg , pattern , escaped , esc ) ; } catch ( Exception e ) { // No FFDC Code Needed .
// FFDC driven by wrapper class .
FFDC . processException ( cclass , "com.ibm.ws.sib.matchspace.selector.impl.ParseUtil.convertLike" , e , "1:183:1.19" ) ; // This should never occur as to get into this we should be missing
// the sib . matchspace jar file , but we are already in it .
throw new RuntimeException ( e ) ; }
|
public class AbstractRegisteredServiceAttributeReleasePolicy { /** * Determines a default bundle of attributes that may be released to all services
* without the explicit mapping for each service .
* @ param p the principal
* @ param attributes the attributes
* @ return the released by default attributes */
protected Map < String , List < Object > > getReleasedByDefaultAttributes ( final Principal p , final Map < String , List < Object > > attributes ) { } }
|
val ctx = ApplicationContextProvider . getApplicationContext ( ) ; if ( ctx != null ) { LOGGER . trace ( "Located application context. Retrieving default attributes for release, if any" ) ; val props = ctx . getAutowireCapableBeanFactory ( ) . getBean ( CasConfigurationProperties . class ) ; val defaultAttrs = props . getAuthn ( ) . getAttributeRepository ( ) . getDefaultAttributesToRelease ( ) ; LOGGER . debug ( "Default attributes for release are: [{}]" , defaultAttrs ) ; val defaultAttributesToRelease = new TreeMap < String , List < Object > > ( String . CASE_INSENSITIVE_ORDER ) ; defaultAttrs . forEach ( key -> { if ( attributes . containsKey ( key ) ) { LOGGER . debug ( "Found and added default attribute for release: [{}]" , key ) ; defaultAttributesToRelease . put ( key , attributes . get ( key ) ) ; } } ) ; return defaultAttributesToRelease ; } return new TreeMap < > ( ) ;
|
public class MockHttpServletResponse { /** * Retrieves the content written to the response .
* @ return the content written to the response outputStream or printWriter . Null is returned if neither
* { @ link # getOutputStream ( ) } or { @ link # getWriter ( ) } have been called . */
public String getOutputAsString ( ) { } }
|
if ( stringWriter != null ) { return stringWriter . toString ( ) ; } else if ( outputStream != null ) { String outputStr = null ; byte [ ] bytes = outputStream . getOutput ( ) ; if ( bytes != null ) { try { outputStr = new String ( bytes , characterEncoding ) ; } catch ( UnsupportedEncodingException e ) { outputStr = null ; } } return outputStr ; } return null ;
|
public class LevenbergMarquardt { /** * Finds the best fit parameters .
* @ param function The function being optimized
* @ param parameters ( Input / Output ) initial parameter estimate and storage for optimized parameters
* @ return true if it succeeded and false if it did not . */
public boolean optimize ( ResidualFunction function , DMatrixRMaj parameters ) { } }
|
configure ( function , parameters . getNumElements ( ) ) ; // save the cost of the initial parameters so that it knows if it improves or not
double previousCost = initialCost = cost ( parameters ) ; // iterate until the difference between the costs is insignificant
double lambda = initialLambda ; // if it should recompute the Jacobian in this iteration or not
boolean computeHessian = true ; for ( int iter = 0 ; iter < maxIterations ; iter ++ ) { if ( computeHessian ) { // compute some variables based on the gradient
computeGradientAndHessian ( parameters ) ; computeHessian = false ; // check for convergence using gradient test
boolean converged = true ; for ( int i = 0 ; i < g . getNumElements ( ) ; i ++ ) { if ( Math . abs ( g . data [ i ] ) > gtol ) { converged = false ; break ; } } if ( converged ) return true ; } // H = H + lambda * I
for ( int i = 0 ; i < H . numRows ; i ++ ) { H . set ( i , i , Hdiag . get ( i ) + lambda ) ; } // In robust implementations failure to solve is handled much better
if ( ! CommonOps_DDRM . solve ( H , g , negativeStep ) ) { return false ; } // compute the candidate parameters
CommonOps_DDRM . subtract ( parameters , negativeStep , candidateParameters ) ; double cost = cost ( candidateParameters ) ; if ( cost <= previousCost ) { // the candidate parameters produced better results so use it
computeHessian = true ; parameters . set ( candidateParameters ) ; // check for convergence
// ftol < = ( cost ( k ) - cost ( k + 1 ) ) / cost ( k )
boolean converged = ftol * previousCost >= previousCost - cost ; previousCost = cost ; lambda /= 10.0 ; if ( converged ) { return true ; } } else { lambda *= 10.0 ; } } finalCost = previousCost ; return true ;
|
public class SimpleTimeZone { /** * Sets the daylight savings ending rule . For example , in the U . S . , Daylight
* Savings Time ends at the first Sunday in November , at 2 AM in standard time .
* Therefore , you can set the end rule by calling :
* setEndRule ( Calendar . NOVEMBER , 1 , Calendar . SUNDAY , 2*60*60*1000 ) ;
* Various other types of rules can be specified by manipulating the dayOfWeek
* and dayOfWeekInMonth parameters . For complete details , see the documentation
* for setStartRule ( ) .
* @ param month the daylight savings ending month . Month is 0 - based .
* eg , 0 for January .
* @ param dayOfWeekInMonth the daylight savings ending
* day - of - week - in - month . See setStartRule ( ) for a complete explanation .
* @ param dayOfWeek the daylight savings ending day - of - week . See setStartRule ( )
* for a complete explanation .
* @ param time the daylight savings ending time . Please see the member
* description for an example . */
private void setEndRule ( int month , int dayOfWeekInMonth , int dayOfWeek , int time , int mode ) { } }
|
assert ( ! isFrozen ( ) ) ; endMonth = month ; endDay = dayOfWeekInMonth ; endDayOfWeek = dayOfWeek ; endTime = time ; endTimeMode = mode ; decodeEndRule ( ) ; transitionRulesInitialized = false ;
|
public class Stagnation { /** * { @ inheritDoc } */
public boolean shouldTerminate ( PopulationData < ? > populationData ) { } }
|
double fitness = getFitness ( populationData ) ; if ( populationData . getGenerationNumber ( ) == 0 || hasFitnessImproved ( fitness ) ) { bestFitness = fitness ; fittestGeneration = populationData . getGenerationNumber ( ) ; } return populationData . getGenerationNumber ( ) - fittestGeneration >= generationLimit ;
|
public class PaxChronology { @ Override public PaxDate resolveDate ( Map < TemporalField , Long > fieldValues , ResolverStyle resolverStyle ) { } }
|
return ( PaxDate ) super . resolveDate ( fieldValues , resolverStyle ) ;
|
public class ApiZoneImpl { /** * / * ( non - Javadoc )
* @ see com . tvd12 . ezyfox . core . entities . ApiProperties # getProperty ( java . lang . Object , java . lang . Class ) */
@ SuppressWarnings ( "unchecked" ) @ Override public < T > T getProperty ( Object key , Class < T > clazz ) { } }
|
return ( T ) getProperty ( key ) ;
|
public class BibTeXConverter { /** * Converts a BibTeX entry to a citation item
* @ param e the BibTeX entry to convert
* @ return the citation item */
public CSLItemData toItemData ( BibTeXEntry e ) { } }
|
// get all fields from the BibTeX entry
Map < String , String > entries = new HashMap < > ( ) ; for ( Map . Entry < Key , Value > field : e . getFields ( ) . entrySet ( ) ) { String us = field . getValue ( ) . toUserString ( ) . replaceAll ( "\\r" , "" ) ; // convert LaTeX string to normal text
try { List < LaTeXObject > objs = latexParser . parse ( new StringReader ( us ) ) ; us = latexPrinter . print ( objs ) . replaceAll ( "\\n" , " " ) . replaceAll ( "\\r" , "" ) . trim ( ) ; } catch ( ParseException ex ) { // ignore
} catch ( TokenMgrException err ) { // ignore
} entries . put ( field . getKey ( ) . getValue ( ) . toLowerCase ( ) , us ) ; } // map type
CSLType type = toType ( e . getType ( ) ) ; CSLItemDataBuilder builder = new CSLItemDataBuilder ( ) . id ( e . getKey ( ) . getValue ( ) ) . type ( type ) ; // map address
if ( entries . containsKey ( FIELD_LOCATION ) ) { builder . eventPlace ( entries . get ( FIELD_LOCATION ) ) ; builder . publisherPlace ( entries . get ( FIELD_LOCATION ) ) ; } else { builder . eventPlace ( entries . get ( FIELD_ADDRESS ) ) ; builder . publisherPlace ( entries . get ( FIELD_ADDRESS ) ) ; } // map author
if ( entries . containsKey ( FIELD_AUTHOR ) ) { builder . author ( NameParser . parse ( entries . get ( FIELD_AUTHOR ) ) ) ; } // map editor
if ( entries . containsKey ( FIELD_EDITOR ) ) { builder . editor ( NameParser . parse ( entries . get ( FIELD_EDITOR ) ) ) ; builder . collectionEditor ( NameParser . parse ( entries . get ( FIELD_EDITOR ) ) ) ; } // map date
if ( type == CSLType . WEBPAGE && entries . containsKey ( FIELD_URLDATE ) ) { CSLDate date = DateParser . toDate ( entries . get ( FIELD_URLDATE ) ) ; builder . issued ( date ) ; } else if ( entries . containsKey ( FIELD_DATE ) ) { CSLDate date = DateParser . toDate ( entries . get ( FIELD_DATE ) ) ; builder . issued ( date ) ; builder . eventDate ( date ) ; } else { CSLDate date = DateParser . toDate ( entries . get ( FIELD_YEAR ) , entries . get ( FIELD_MONTH ) ) ; builder . issued ( date ) ; builder . eventDate ( date ) ; } // map journal / journaltitle , booktitle , series
if ( entries . containsKey ( FIELD_JOURNAL ) ) { builder . containerTitle ( entries . get ( FIELD_JOURNAL ) ) ; builder . collectionTitle ( entries . get ( FIELD_JOURNAL ) ) ; } else if ( entries . containsKey ( FIELD_JOURNALTITLE ) ) { builder . containerTitle ( entries . get ( FIELD_JOURNALTITLE ) ) ; builder . collectionTitle ( entries . get ( FIELD_JOURNALTITLE ) ) ; } else if ( entries . containsKey ( FIELD_BOOKTITLE ) ) { builder . containerTitle ( entries . get ( FIELD_BOOKTITLE ) ) ; builder . collectionTitle ( entries . get ( FIELD_BOOKTITLE ) ) ; } else { builder . containerTitle ( entries . get ( FIELD_SERIES ) ) ; builder . collectionTitle ( entries . get ( FIELD_SERIES ) ) ; } // map number and issue
builder . number ( entries . get ( FIELD_NUMBER ) ) ; if ( entries . containsKey ( FIELD_ISSUE ) ) { builder . issue ( entries . get ( FIELD_ISSUE ) ) ; } else { builder . issue ( entries . get ( FIELD_NUMBER ) ) ; } // map publisher , insitution , school , organisation
if ( type == CSLType . REPORT ) { if ( entries . containsKey ( FIELD_PUBLISHER ) ) { builder . publisher ( entries . get ( FIELD_PUBLISHER ) ) ; } else if ( entries . containsKey ( FIELD_INSTITUTION ) ) { builder . publisher ( entries . get ( FIELD_INSTITUTION ) ) ; } else if ( entries . containsKey ( FIELD_SCHOOL ) ) { builder . publisher ( entries . get ( FIELD_SCHOOL ) ) ; } else { builder . publisher ( entries . get ( FIELD_ORGANIZATION ) ) ; } } else if ( type == CSLType . THESIS ) { if ( entries . containsKey ( FIELD_PUBLISHER ) ) { builder . publisher ( entries . get ( FIELD_PUBLISHER ) ) ; } else if ( entries . containsKey ( FIELD_SCHOOL ) ) { builder . publisher ( entries . get ( FIELD_SCHOOL ) ) ; } else if ( entries . containsKey ( FIELD_INSTITUTION ) ) { builder . publisher ( entries . get ( FIELD_INSTITUTION ) ) ; } else { builder . publisher ( entries . get ( FIELD_ORGANIZATION ) ) ; } } else { if ( entries . containsKey ( FIELD_PUBLISHER ) ) { builder . publisher ( entries . get ( FIELD_PUBLISHER ) ) ; } else if ( entries . containsKey ( FIELD_ORGANIZATION ) ) { builder . publisher ( entries . get ( FIELD_ORGANIZATION ) ) ; } else if ( entries . containsKey ( FIELD_INSTITUTION ) ) { builder . publisher ( entries . get ( FIELD_INSTITUTION ) ) ; } else { builder . publisher ( entries . get ( FIELD_SCHOOL ) ) ; } } // map title or chapter
if ( entries . containsKey ( FIELD_TITLE ) ) { builder . title ( entries . get ( FIELD_TITLE ) ) ; } else { builder . title ( entries . get ( FIELD_CHAPTER ) ) ; } // map pages
String pages = entries . get ( FIELD_PAGES ) ; if ( pages != null ) { PageRange pr = PageParser . parse ( pages ) ; builder . page ( pr . getLiteral ( ) ) ; builder . pageFirst ( pr . getPageFirst ( ) ) ; if ( pr . getNumberOfPages ( ) != null ) { builder . numberOfPages ( String . valueOf ( pr . getNumberOfPages ( ) ) ) ; } } // map last accessed date
if ( entries . containsKey ( FIELD_ACCESSED ) ) { builder . accessed ( DateParser . toDate ( entries . get ( FIELD_ACCESSED ) ) ) ; } // map other attributes
builder . volume ( entries . get ( FIELD_VOLUME ) ) ; builder . keyword ( entries . get ( FIELD_KEYWORDS ) ) ; builder . URL ( entries . get ( FIELD_URL ) ) ; builder . status ( entries . get ( FIELD_STATUS ) ) ; builder . ISSN ( entries . get ( FIELD_ISSN ) ) ; builder . ISBN ( entries . get ( FIELD_ISBN ) ) ; builder . version ( entries . get ( FIELD_REVISION ) ) ; builder . annote ( entries . get ( FIELD_ANNOTE ) ) ; builder . edition ( entries . get ( FIELD_EDITION ) ) ; builder . abstrct ( entries . get ( FIELD_ABSTRACT ) ) ; builder . DOI ( entries . get ( FIELD_DOI ) ) ; builder . note ( entries . get ( FIELD_NOTE ) ) ; // create citation item
return builder . build ( ) ;
|
public class HttpSender { /** * Convert a report to string
* @ param report the report to convert
* @ param format the format to convert to
* @ return a string representation of the report
* @ throws Exception if conversion failed */
@ NonNull @ SuppressWarnings ( "WeakerAccess" ) protected String convertToString ( CrashReportData report , @ NonNull StringFormat format ) throws Exception { } }
|
return format . toFormattedString ( report , config . reportContent ( ) , "&" , "\n" , true ) ;
|
public class JobCallbackUtil { /** * This method takes the job context info . and put the values into a map with keys as the tokens .
* @ return Map < String , String > */
public static Map < String , String > buildJobContextInfoMap ( final Event event , final String server ) { } }
|
if ( event . getRunner ( ) instanceof JobRunner ) { final JobRunner jobRunner = ( JobRunner ) event . getRunner ( ) ; final ExecutableNode node = jobRunner . getNode ( ) ; final EventData eventData = event . getData ( ) ; final String projectName = node . getParentFlow ( ) . getProjectName ( ) ; final String flowName = node . getParentFlow ( ) . getFlowId ( ) ; final String executionId = String . valueOf ( node . getParentFlow ( ) . getExecutionId ( ) ) ; final String jobId = node . getId ( ) ; final Map < String , String > result = new HashMap < > ( ) ; result . put ( CONTEXT_SERVER_TOKEN , server ) ; result . put ( CONTEXT_PROJECT_TOKEN , projectName ) ; result . put ( CONTEXT_FLOW_TOKEN , flowName ) ; result . put ( CONTEXT_EXECUTION_ID_TOKEN , executionId ) ; result . put ( CONTEXT_JOB_TOKEN , jobId ) ; result . put ( CONTEXT_JOB_STATUS_TOKEN , eventData . getStatus ( ) . name ( ) . toLowerCase ( ) ) ; /* * if ( node . getStatus ( ) = = Status . SUCCEEDED | | node . getStatus ( ) = =
* Status . FAILED ) { result . put ( JOB _ STATUS _ TOKEN ,
* node . getStatus ( ) . name ( ) . toLowerCase ( ) ) ; } else if ( node . getStatus ( ) = =
* Status . PREPARING ) { result . put ( JOB _ STATUS _ TOKEN , " started " ) ; } */
return result ; } else { throw new IllegalArgumentException ( "Provided event is not a job event" ) ; }
|
public class PageSourcePool { /** * sts a page object to the page pool
* @ param key key reference to store page object
* @ param ps pagesource to store */
public void setPage ( String key , PageSource ps ) { } }
|
ps . setLastAccessTime ( ) ; pageSources . put ( key . toLowerCase ( ) , ps ) ;
|
public class Sheet { /** * Maps the rendered column index to the real column index .
* @ param renderCol the rendered index
* @ return the mapped index */
public int getMappedColumn ( final int renderCol ) { } }
|
if ( columnMapping == null || renderCol == - 1 ) { return renderCol ; } else { final Integer result = columnMapping . get ( renderCol ) ; if ( result == null ) { throw new IllegalArgumentException ( "Invalid index " + renderCol ) ; } return result ; }
|
public class WonderPushUriHelper { /** * Checks that the provided URI points to the WonderPush REST server */
protected static boolean isAPIUri ( Uri uri ) { } }
|
if ( uri == null ) { return false ; } return getBaseUri ( ) . getHost ( ) . equals ( uri . getHost ( ) ) ;
|
public class MarshallUtil { /** * Marshall a { @ link Collection } .
* This method supports { @ code null } { @ code collection } .
* @ param collection { @ link Collection } to marshal .
* @ param out { @ link ObjectOutput } to write .
* @ param < E > Collection ' s element type .
* @ throws IOException If any of the usual Input / Output related exceptions occur . */
public static < E > void marshallCollection ( Collection < E > collection , ObjectOutput out ) throws IOException { } }
|
marshallCollection ( collection , out , ObjectOutput :: writeObject ) ;
|
public class StaticImports { /** * to filter on method signature . */
private static ImmutableSet < Symbol > lookup ( Symbol . TypeSymbol typeSym , Symbol . TypeSymbol start , Name identifier , Types types , Symbol . PackageSymbol pkg ) { } }
|
if ( typeSym == null ) { return ImmutableSet . of ( ) ; } ImmutableSet . Builder < Symbol > members = ImmutableSet . builder ( ) ; members . addAll ( lookup ( types . supertype ( typeSym . type ) . tsym , start , identifier , types , pkg ) ) ; for ( Type i : types . interfaces ( typeSym . type ) ) { members . addAll ( lookup ( i . tsym , start , identifier , types , pkg ) ) ; } OUTER : for ( Symbol member : typeSym . members ( ) . getSymbolsByName ( identifier ) ) { if ( ! member . isStatic ( ) ) { continue ; } switch ( ( int ) ( member . flags ( ) & Flags . AccessFlags ) ) { case Flags . PRIVATE : continue OUTER ; case 0 : case Flags . PROTECTED : if ( member . packge ( ) != pkg ) { continue OUTER ; } break ; case Flags . PUBLIC : default : break ; } if ( member . isMemberOf ( start , types ) ) { members . add ( member ) ; } } return members . build ( ) ;
|
public class FleetsApi { /** * Kick fleet member Kick a fleet member - - - SSO Scope :
* esi - fleets . write _ fleet . v1
* @ param fleetId
* ID for a fleet ( required )
* @ param memberId
* The character ID of a member in this fleet ( required )
* @ param datasource
* The server name you would like data from ( optional , default to
* tranquility )
* @ param token
* Access token to use if unable to set a header ( optional )
* @ throws ApiException
* If fail to call the API , e . g . server error or cannot
* deserialize the response body */
public void deleteFleetsFleetIdMembersMemberId ( Long fleetId , Integer memberId , String datasource , String token ) throws ApiException { } }
|
deleteFleetsFleetIdMembersMemberIdWithHttpInfo ( fleetId , memberId , datasource , token ) ;
|
public class TableBuilder { /** * Adds the specified constraint to the created table .
* @ param columnName The name of the column on which the constraint is applied .
* @ param constraintType The type of constraint to apply .
* One of
* < ul >
* < li > { @ link com . tjeannin . provigen . model . Constraint # UNIQUE } < / li >
* < li > { @ link com . tjeannin . provigen . model . Constraint # NOT _ NULL } < / li >
* < / ul >
* @ param constraintConflictClause The conflict clause to apply in case of constraint violation .
* One of
* < ul >
* < li > { @ link com . tjeannin . provigen . model . Constraint . OnConflict # ABORT } < / li >
* < li > { @ link com . tjeannin . provigen . model . Constraint . OnConflict # FAIL } < / li >
* < li > { @ link com . tjeannin . provigen . model . Constraint . OnConflict # IGNORE } < / li >
* < li > { @ link com . tjeannin . provigen . model . Constraint . OnConflict # REPLACE } < / li >
* < li > { @ link com . tjeannin . provigen . model . Constraint . OnConflict # ROLLBACK } < / li >
* < / ul >
* @ return The { @ link com . tjeannin . provigen . helper . TableBuilder } instance to allow chaining . */
public TableBuilder addConstraint ( String columnName , String constraintType , String constraintConflictClause ) { } }
|
constraints . add ( new Constraint ( columnName , constraintType , constraintConflictClause ) ) ; return this ;
|
public class FluentSelect { /** * Clear all selected entries . This is only valid when the SELECT supports multiple selections .
* @ throws UnsupportedOperationException If the SELECT does not support multiple selections */
public FluentSelect deselectAll ( ) { } }
|
executeAndWrapReThrowIfNeeded ( new DeselectAll ( ) , Context . singular ( context , "deselectAll" ) , true ) ; return new FluentSelect ( super . delegate , currentElement . getFound ( ) , this . context , monitor , booleanInsteadOfNotFoundException ) ;
|
public class AsynchronousRequest { /** * For more info on pets API go < a href = " https : / / wiki . guildwars2 . com / wiki / API : 2 / pets " > here < / a > < br / >
* Give user the access to { @ link Callback # onResponse ( Call , Response ) } and { @ link Callback # onFailure ( Call , Throwable ) } methods for custom interactions
* @ param callback callback that is going to be used for { @ link Call # enqueue ( Callback ) }
* @ throws NullPointerException if given { @ link Callback } is empty
* @ see Pet pet info */
public void getAllPetID ( Callback < List < Integer > > callback ) throws NullPointerException { } }
|
gw2API . getAllPetIDs ( ) . enqueue ( callback ) ;
|
public class AuthCollection { /** * Obtain an AuthMethod of type T , if one is contained in this collection .
* @ param type The type of AuthMethod to be located
* @ param < T > The type of AuthMethod which will be returned
* @ return An AuthMethod subclass matching type
* @ throws NexmoUnacceptableAuthException if no matching AuthMethod is found . */
public < T extends AuthMethod > T getAuth ( Class < T > type ) throws NexmoUnacceptableAuthException { } }
|
for ( AuthMethod availableAuthMethod : this . authList ) { if ( type . isInstance ( availableAuthMethod ) ) { return ( T ) availableAuthMethod ; } } throw new NexmoUnacceptableAuthException ( this . authList , new HashSet < > ( Arrays . asList ( new Class [ ] { type } ) ) ) ;
|
public class ByteUtil { /** * Encodes a byte array into a hex string ( hex dump ) .
* @ deprecated Please see class HexUtil */
@ Deprecated public static String encodeHex ( byte [ ] data , char delimiter ) { } }
|
// the result
StringBuilder result = new StringBuilder ( ) ; short val = 0 ; // encode each byte into a hex dump
for ( int i = 0 ; i < data . length ; i ++ ) { val = decodeUnsigned ( data [ i ] ) ; result . append ( padLeading ( Integer . toHexString ( ( int ) val ) , 2 ) ) ; result . append ( delimiter ) ; } // return encoded text
return result . toString ( ) ;
|
public class PmiModuleConfig { /** * Return the list of statistic IDs that are in the given pre - defined statistic sets .
* Statistic sets are defined in { @ link com . ibm . websphere . pmi . stat . StatConstants } */
public int [ ] listStatisticsBySet ( String statisticSet ) { } }
|
// System . out . println ( " & & & & & calling listStatisticsBySet " + statisticSet ) ;
if ( statisticSet . equals ( StatConstants . STATISTIC_SET_NONE ) || statisticSet . equals ( StatConstants . STATISTIC_SET_CUSTOM ) ) return new int [ 0 ] ; int k = 0 ; if ( statisticSet . equals ( StatConstants . STATISTIC_SET_BASIC ) ) k = 1 ; else if ( statisticSet . equals ( StatConstants . STATISTIC_SET_EXTENDED ) ) k = 2 ; else if ( statisticSet . equals ( StatConstants . STATISTIC_SET_ALL ) ) k = 3 ; // System . out . println ( " perfData . values ( ) for = " + this . getShortName ( ) ) ;
// System . out . println ( " perfData . values ( ) = " + perfData . values ( ) ) ;
ArrayList list = new ArrayList ( 3 ) ; Iterator allData = perfData . values ( ) . iterator ( ) ; // System . out . println ( " & & & alldata = " + allData ) ;
while ( allData . hasNext ( ) ) { PmiDataInfo dataInfo = ( PmiDataInfo ) allData . next ( ) ; // System . out . println ( " & & & dataInfo = " + dataInfo . getName ( ) ) ;
String s = dataInfo . getStatisticSet ( ) ; if ( s . equals ( StatConstants . STATISTIC_SET_BASIC ) ) { if ( k > 0 ) list . add ( new Integer ( dataInfo . getId ( ) ) ) ; } else if ( s . equals ( StatConstants . STATISTIC_SET_EXTENDED ) ) { if ( k > 1 ) list . add ( new Integer ( dataInfo . getId ( ) ) ) ; } else if ( s . equals ( StatConstants . STATISTIC_SET_ALL ) ) { if ( k > 2 ) list . add ( new Integer ( dataInfo . getId ( ) ) ) ; } } int [ ] statSet = new int [ list . size ( ) ] ; for ( int i = 0 ; i < list . size ( ) ; i ++ ) { statSet [ i ] = ( ( Integer ) list . get ( i ) ) . intValue ( ) ; } return statSet ;
|
public class PEMReader { /** * Read the PEM file and save the DER encoded octet
* stream and begin marker .
* @ throws IOException */
protected void readFile ( ) throws IOException { } }
|
String line ; try ( BufferedReader reader = new BufferedReader ( new InputStreamReader ( stream ) ) ) { while ( ( line = reader . readLine ( ) ) != null ) { if ( line . contains ( BEGIN_MARKER ) ) { beginMarker = line . trim ( ) ; String endMarker = beginMarker . replace ( "BEGIN" , "END" ) ; derBytes = readBytes ( reader , endMarker ) ; return ; } } throw new IOException ( "Invalid PEM file: no begin marker" ) ; }
|
public class Props { /** * Create a Props from a list of key value pairing . i . e . [ key1 , value1 , key2 , value2 . . . ] */
public static Props of ( final Props parent , final String ... args ) { } }
|
if ( args . length % 2 != 0 ) { throw new IllegalArgumentException ( "Must have an equal number of keys and values." ) ; } final Map < String , String > vals = new HashMap < > ( args . length / 2 ) ; for ( int i = 0 ; i < args . length ; i += 2 ) { vals . put ( args [ i ] , args [ i + 1 ] ) ; } return new Props ( parent , vals ) ;
|
public class CmsVfsMemoryObjectCache { /** * Uses a transformer for loading an object from a path if it has not already been cached , and then caches it . < p >
* @ param cms the CMS context
* @ param rootPath the root path from which the object should be loaded
* @ param function the function which should load the object from VFS if it isn ' t already cached
* @ return the loaded object */
public Object loadVfsObject ( CmsObject cms , String rootPath , Transformer function ) { } }
|
Object result = getCachedObject ( cms , rootPath ) ; if ( result == null ) { result = function . transform ( rootPath ) ; putCachedObject ( cms , rootPath , result ) ; } return result ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.