signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class BitcoinSerializer { /** * Make an inventory message from the payload . Extension point for alternative * serialization format support . */ @ Override public InventoryMessage makeInventoryMessage ( byte [ ] payloadBytes , int length ) throws ProtocolException { } }
return new InventoryMessage ( params , payloadBytes , this , length ) ;
public class AbstractProfileService { /** * Define the attributes to read in the storage . * @ return the attributes */ protected List < String > defineAttributesToRead ( ) { } }
final List < String > names = new ArrayList < > ( ) ; names . add ( getIdAttribute ( ) ) ; names . add ( LINKEDID ) ; // legacy mode : ' getIdAttribute ( ) ' + linkedid + username + attributes if ( isLegacyMode ( ) ) { names . add ( getUsernameAttribute ( ) ) ; names . addAll ( Arrays . asList ( attributeNames ) ) ; } else { // new beahviour ( > = v2.0 ) : ' getIdAttribute ( ) ' + linkedid + serializedprofile names . add ( SERIALIZED_PROFILE ) ; } return names ;
public class CmsForm { /** * Updates the model validation status . < p > * @ param modelId the model id * @ param result the validation result */ protected void updateModelValidationStatus ( String modelId , CmsValidationResult result ) { } }
Collection < I_CmsFormField > fields = getFieldsByModelId ( modelId ) ; for ( I_CmsFormField field : fields ) { updateFieldValidationStatus ( field , result ) ; }
public class DFSInputStream { /** * Seek to given position on a node other than the current node . If * a node other than the current node is found , then returns true . * If another node could not be found , then returns false . */ public synchronized boolean seekToNewSource ( long targetPos , boolean throwWhenNotFound ) throws IOException { } }
boolean markedDead = deadNodes . containsKey ( currentNode ) ; addToDeadNodes ( currentNode ) ; DatanodeInfo oldNode = currentNode ; DatanodeInfo newNode = blockSeekTo ( targetPos , throwWhenNotFound ) ; if ( ! markedDead ) { /* remove it from deadNodes . blockSeekTo could have cleared * deadNodes and added currentNode again . Thats ok . */ deadNodes . remove ( oldNode ) ; } if ( ! oldNode . getStorageID ( ) . equals ( newNode . getStorageID ( ) ) ) { currentNode = newNode ; return true ; } else { return false ; }
public class CmsJspResourceWrapper { /** * Returns < code > true < / code > in case this resource is an XML content . < p > * @ return < code > true < / code > in case this resource is an XML content */ public boolean getIsXml ( ) { } }
if ( m_isXml == null ) { m_isXml = Boolean . valueOf ( CmsResourceTypeXmlPage . isXmlPage ( this ) || CmsResourceTypeXmlContent . isXmlContent ( this ) ) ; } return m_isXml . booleanValue ( ) ;
public class HttpContext { public static String canonicalContextPathSpec ( String contextPathSpec ) { } }
// check context path if ( contextPathSpec == null || contextPathSpec . indexOf ( ',' ) >= 0 || contextPathSpec . startsWith ( "*" ) ) throw new IllegalArgumentException ( "Illegal context spec:" + contextPathSpec ) ; if ( ! contextPathSpec . startsWith ( "/" ) ) contextPathSpec = '/' + contextPathSpec ; if ( contextPathSpec . length ( ) > 1 ) { if ( contextPathSpec . endsWith ( "/" ) ) contextPathSpec += "*" ; else if ( ! contextPathSpec . endsWith ( "/*" ) ) contextPathSpec += "/*" ; } return contextPathSpec ;
public class DateUtils { /** * Zaokrągla przekazaną datę do pełnego dnia . * @ param date Data do zaokrąglenia . * @ return Obiekt { @ link Timestamp } reprezentujący zaokrągloną datę . */ public static Date getDayDate ( Date date ) { } }
Calendar calendar = getCalendar ( ) ; calendar . setTime ( date ) ; return getDayDate ( calendar ) ;
public class VoltSystemProcedure { /** * Produce work units , possibly on all sites , for a list of plan fragments . * The final plan fragment must aggregate intermediate results and produce a * single output dependency . This aggregate output is returned as the * result . * @ param pfs * an array of synthesized plan fragments * @ param aggregatorOutputDependencyId * dependency id produced by the aggregation pf The id of the * table returned as the result of this procedure . */ public void executeSysProcPlanFragmentsAsync ( SynthesizedPlanFragment pfs [ ] ) { } }
MpTransactionState txnState = ( MpTransactionState ) m_runner . getTxnState ( ) ; assert ( txnState != null ) ; int fragmentIndex = 0 ; for ( SynthesizedPlanFragment pf : pfs ) { assert ( pf . parameters != null ) ; FragmentTaskMessage task = FragmentTaskMessage . createWithOneFragment ( txnState . initiatorHSId , m_site . getCorrespondingSiteId ( ) , txnState . txnId , txnState . uniqueId , txnState . isReadOnly ( ) , fragIdToHash ( pf . fragmentId ) , pf . outputDepId , pf . parameters , false , txnState . isForReplay ( ) , txnState . isNPartTxn ( ) , txnState . getTimetamp ( ) ) ; // During @ MigratePartitionLeader , a fragment may be mis - routed . fragmentIndex is used to check which fragment is mis - routed and // to determine how the follow - up fragments are processed . task . setBatch ( fragmentIndex ++ ) ; task . setFragmentTaskType ( FragmentTaskMessage . SYS_PROC_PER_SITE ) ; if ( pf . multipartition ) { // create a workunit for every execution site txnState . createAllParticipatingFragmentWork ( task ) ; } else { // create one workunit for the current site if ( pf . siteId == - 1 ) { txnState . createLocalFragmentWork ( task , false ) ; } else { txnState . createFragmentWork ( new long [ ] { pf . siteId } , task ) ; } } }
public class PreconditionExcerpts { /** * Negates { @ code conditionTemplate } , removing unnecessary brackets and double - negatives if * possible . */ private static String negate ( String conditionTemplate ) { } }
if ( conditionTemplate . startsWith ( "!" ) && ! BOOLEAN_BINARY_OPERATOR . matcher ( conditionTemplate ) . find ( ) ) { return conditionTemplate . substring ( 1 ) ; } else if ( ANY_OPERATOR . matcher ( conditionTemplate ) . find ( ) ) { // The condition might already enclosed in a bracket , but we can ' t simply check for opening // and closing brackets at the start and end of the string , as that misses cases like // ( a | | b ) & & ( c | | d ) . Attempting to determine if the initial and closing bracket are paired // requires understanding character constants and strings constants , so for simplicity we // just add unnecessary brackets . return "!(" + conditionTemplate + ")" ; } else { return "!" + conditionTemplate ; }
public class SdkHttpUtils { /** * Append the given path to the given baseUri . * @ param baseUri The URI to append to ( required , may be relative ) * @ param path The path to append ( may be null or empty ) . Path should be pre - encoded . * @ param escapeDoubleSlash Whether double - slash in the path should be escaped to " / % 2F " * @ return The baseUri with the path appended */ public static String appendUri ( final String baseUri , String path , final boolean escapeDoubleSlash ) { } }
String resultUri = baseUri ; if ( path != null && path . length ( ) > 0 ) { if ( path . startsWith ( "/" ) ) { // trim the trailing slash in baseUri , since the path already starts with a slash if ( resultUri . endsWith ( "/" ) ) { resultUri = resultUri . substring ( 0 , resultUri . length ( ) - 1 ) ; } } else if ( ! resultUri . endsWith ( "/" ) ) { resultUri += "/" ; } if ( escapeDoubleSlash ) { resultUri += path . replace ( "//" , "/%2F" ) ; } else { resultUri += path ; } } else if ( ! resultUri . endsWith ( "/" ) ) { resultUri += "/" ; } return resultUri ;
public class CheckRequestInfo { /** * Returns the { @ link CheckRequest } instance corresponding to this instance . * The service name , operation ID and operation Name must all be set * @ param clock is used to determine the current timestamp * @ return a { @ link CheckRequest } * @ throws java . lang . IllegalStateException if any required values are not set when this is called . */ public CheckRequest asCheckRequest ( Clock clock ) { } }
Preconditions . checkState ( ! Strings . isNullOrEmpty ( getServiceName ( ) ) , "a service name must be set" ) ; Preconditions . checkState ( ! Strings . isNullOrEmpty ( getOperationId ( ) ) , "an operation ID must be set" ) ; Preconditions . checkState ( ! Strings . isNullOrEmpty ( getOperationName ( ) ) , "an operation name must be set" ) ; Operation . Builder b = super . asOperation ( clock ) . toBuilder ( ) ; b . putAllLabels ( getSystemLabels ( ) ) ; return CheckRequest . newBuilder ( ) . setServiceName ( getServiceName ( ) ) . setOperation ( b ) . build ( ) ;
public class ImportJobsResponse { /** * A list of import jobs for the application . * @ param item * A list of import jobs for the application . */ public void setItem ( java . util . Collection < ImportJobResponse > item ) { } }
if ( item == null ) { this . item = null ; return ; } this . item = new java . util . ArrayList < ImportJobResponse > ( item ) ;
public class DigestFactory { /** * This method creates an < code > IDigest < / code > instance of the given type . * @ param digestName The name of the digest type . * @ return The < code > IDigest < / code > instance of the given type . */ public final IDigest create ( final String digestName ) { } }
IDigest digest ; if ( digestName . compareTo ( "None" ) == 0 ) { digest = new NullDigest ( ) ; } else if ( digestName . compareTo ( "CRC32C" ) == 0 ) { digest = new CRC32CDigest ( ) ; } else { throw new IllegalArgumentException ( "Digest Type (" + digestName + ") is unknown." ) ; } return digest ;
public class PJsonArray { /** * Get the element at the index as a json array . * @ param i the index of the element to access */ public final PJsonArray getJSONArray ( final int i ) { } }
JSONArray val = this . array . optJSONArray ( i ) ; final String context = "[" + i + "]" ; if ( val == null ) { throw new ObjectMissingException ( this , context ) ; } return new PJsonArray ( this , val , context ) ;
public class TimeSeriesMetricDeltaSet { /** * Apply a single - argument function to the set . * @ param fn A function that takes a TimeSeriesMetricDelta and returns a TimeSeriesMetricDelta . * @ return The mapped TimeSeriesMetricDelta from this set . */ public TimeSeriesMetricDeltaSet map ( Function < ? super MetricValue , ? extends MetricValue > fn ) { } }
return values_ . map ( fn , ( x ) -> x . entrySet ( ) . stream ( ) . map ( ( entry ) -> apply_fn_ ( entry , fn ) ) ) . mapCombine ( TimeSeriesMetricDeltaSet :: new , TimeSeriesMetricDeltaSet :: new ) ;
public class ContentPackageProperties { /** * Get properties of AEM package . * @ param packageFile AEM package file . * @ return Map with properties or empty map if none found . * @ throws IOException I / O exception */ public static Map < String , Object > get ( File packageFile ) throws IOException { } }
ZipFile zipFile = null ; try { zipFile = new ZipFile ( packageFile ) ; ZipArchiveEntry entry = zipFile . getEntry ( ZIP_ENTRY_PROPERTIES ) ; if ( entry != null && ! entry . isDirectory ( ) ) { Map < String , Object > props = getPackageProperties ( zipFile , entry ) ; return new TreeMap < > ( transformPropertyTypes ( props ) ) ; } return Collections . emptyMap ( ) ; } finally { IOUtils . closeQuietly ( zipFile ) ; }
public class DRL5Expressions { /** * $ ANTLR start synpred7 _ DRL5Expressions */ public final void synpred7_DRL5Expressions_fragment ( ) throws RecognitionException { } }
// src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 323:6 : ( not _ key in _ key ) // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 323:7 : not _ key in _ key { pushFollow ( FOLLOW_not_key_in_synpred7_DRL5Expressions1547 ) ; not_key ( ) ; state . _fsp -- ; if ( state . failed ) return ; pushFollow ( FOLLOW_in_key_in_synpred7_DRL5Expressions1549 ) ; in_key ( ) ; state . _fsp -- ; if ( state . failed ) return ; }
public class Base { /** * Opens a new connection in case additional driver - specific parameters need to be passed in . * @ param driver driver class name * @ param url JDBC URL * @ param props connection properties */ public static DB open ( String driver , String url , Properties props ) { } }
return new DB ( DB . DEFAULT_NAME ) . open ( driver , url , props ) ;
public class KeyVaultClientBaseImpl { /** * Lists the deleted keys in the specified vault . * Retrieves a list of the keys in the Key Vault as JSON Web Key structures that contain the public part of a deleted key . This operation includes deletion - specific information . The Get Deleted Keys operation is applicable for vaults enabled for soft - delete . While the operation can be invoked on any vault , it will return an error if invoked on a non soft - delete enabled vault . This operation requires the keys / list permission . * @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; DeletedKeyItem & gt ; object */ public Observable < ServiceResponse < Page < DeletedKeyItem > > > getDeletedKeysWithServiceResponseAsync ( final String vaultBaseUrl ) { } }
return getDeletedKeysSinglePageAsync ( vaultBaseUrl ) . concatMap ( new Func1 < ServiceResponse < Page < DeletedKeyItem > > , Observable < ServiceResponse < Page < DeletedKeyItem > > > > ( ) { @ Override public Observable < ServiceResponse < Page < DeletedKeyItem > > > call ( ServiceResponse < Page < DeletedKeyItem > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( getDeletedKeysNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ;
public class HostMessenger { /** * Convenience method for doing the verbose COW insert into the map */ private void putForeignHost ( int hostId , ForeignHost fh ) { } }
synchronized ( m_mapLock ) { m_foreignHosts = ImmutableMultimap . < Integer , ForeignHost > builder ( ) . putAll ( m_foreignHosts ) . put ( hostId , fh ) . build ( ) ; }
public class BDDKernel { /** * Restricts the variables in the BDD { @ code r } to constants true or false . The restriction is submitted in the BDD * { @ code var } . * @ param r the BDD to be restricted * @ param var the variable mapping as a BDD * @ return the restricted BDD */ public int restrict ( final int r , final int var ) { } }
final int res ; if ( var < 2 ) return r ; varset2svartable ( var ) ; initRef ( ) ; res = restrictRec ( r , ( var << 3 ) | CACHEID_RESTRICT ) ; return res ;
public class RedBlackTreeInteger { /** * are black , make h . right or one of its children red . */ private Node < T > moveRedRight ( Node < T > h ) { } }
// assert ( h ! = null ) ; // assert isRed ( h ) & & ! isRed ( h . right ) & & ! isRed ( h . right . left ) ; flipColors ( h ) ; if ( h . left . left != null && h . left . left . red ) { h = rotateRight ( h ) ; flipColors ( h ) ; } return h ;
public class BannedDependencies { /** * Checks the set of dependencies against the list of patterns . * @ param thePatterns the patterns * @ param dependencies the dependencies * @ return a set containing artifacts matching one of the patterns or < code > null < / code > * @ throws EnforcerRuleException the enforcer rule exception */ private Set < Artifact > checkDependencies ( Set < Artifact > dependencies , List < String > thePatterns ) throws EnforcerRuleException { } }
Set < Artifact > foundMatches = null ; if ( thePatterns != null && thePatterns . size ( ) > 0 ) { for ( String pattern : thePatterns ) { String [ ] subStrings = pattern . split ( ":" ) ; subStrings = StringUtils . stripAll ( subStrings ) ; for ( Artifact artifact : dependencies ) { if ( compareDependency ( subStrings , artifact ) ) { // only create if needed if ( foundMatches == null ) { foundMatches = new HashSet < Artifact > ( ) ; } foundMatches . add ( artifact ) ; } } } } return foundMatches ;
public class CPDefinitionPersistenceImpl { /** * Returns an ordered range of all the cp definitions where CPTaxCategoryId = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPDefinitionModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param CPTaxCategoryId the cp tax category ID * @ param start the lower bound of the range of cp definitions * @ param end the upper bound of the range of cp definitions ( not inclusive ) * @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > ) * @ return the ordered range of matching cp definitions */ @ Override public List < CPDefinition > findByCPTaxCategoryId ( long CPTaxCategoryId , int start , int end , OrderByComparator < CPDefinition > orderByComparator ) { } }
return findByCPTaxCategoryId ( CPTaxCategoryId , start , end , orderByComparator , true ) ;
public class SocketIOWithTimeout { /** * Performs one IO and returns number of bytes read or written . * It waits up to the specified timeout . If the channel is * not read before the timeout , SocketTimeoutException is thrown . * @ param buf buffer for IO * @ param ops Selection Ops used for waiting . Suggested values : * SelectionKey . OP _ READ while reading and SelectionKey . OP _ WRITE while * writing . * @ return number of bytes read or written . negative implies end of stream . * @ throws IOException */ int doIO ( ByteBuffer buf , int ops ) throws IOException { } }
/* For now only one thread is allowed . If user want to read or write * from multiple threads , multiple streams could be created . In that * case multiple threads work as well as underlying channel supports it . */ if ( ! buf . hasRemaining ( ) ) { throw new IllegalArgumentException ( "Buffer has no data left." ) ; // or should we just return 0? } while ( buf . hasRemaining ( ) ) { if ( closed ) { return - 1 ; } try { int n = performIO ( buf ) ; if ( n != 0 ) { // successful io or an error . return n ; } } catch ( IOException e ) { if ( ! channel . isOpen ( ) ) { closed = true ; } throw e ; } // now wait for socket to be ready . int count = 0 ; try { count = selector . select ( channel , ops , timeout ) ; } catch ( IOException e ) { // unexpected IOException . closed = true ; throw e ; } if ( count == 0 ) { throw new SocketTimeoutException ( timeoutExceptionString ( channel , timeout , ops ) ) ; } // otherwise the socket should be ready for io . } return 0 ; // does not reach here .
public class ScreenPrinter { /** * Print this page . */ public int print ( Graphics g , PageFormat pageFormat , int pageIndex ) { } }
Graphics2D g2d = ( Graphics2D ) g ; if ( firstTime ) { m_componentPrint = new ComponentPrint ( null ) ; m_componentPrint . surveyComponents ( m_component ) ; m_paperPrint = new PaperPrint ( null ) ; m_paperPrint . surveyPage ( pageFormat , g2d , m_bPrintHeader ) ; firstTime = false ; } // Print this page int pageHeight = m_paperPrint . getPrintableHeight ( ) ; double scale = Math . min ( 1.0 , ( ( double ) m_paperPrint . getPrintableWidth ( ) / ( double ) m_componentPrint . getMaxComponentWidth ( ) ) ) ; m_paperPrint . setCurrentYLocation ( 0 ) ; m_componentPrint . setPageHeight ( ( int ) ( pageHeight / scale ) ) ; boolean pageDone = m_componentPrint . setCurrentYLocation ( pageIndex , 0 ) ; while ( ! pageDone ) { Component component = m_componentPrint . getCurrentComponent ( ) ; int componentCurrentYLocation = m_componentPrint . getCurrentYLocation ( ) ; int componentHeightOnPage = m_componentPrint . getComponentPageHeight ( ) ; if ( this . isCancelled ( ) ) return NO_SUCH_PAGE ; // Done , No more pages if ( component == null ) { if ( m_paperPrint . getCurrentYLocation ( ) == 0 ) return NO_SUCH_PAGE ; // No more pages else break ; // End of components ( last page ) } if ( m_paperPrint . getCurrentYLocation ( ) == 0 ) { // First time through if ( m_bPrintHeader ) { m_paperPrint . printHeader ( g2d , m_componentPrint . getTitle ( ) ) ; m_paperPrint . printFooter ( g2d , "Page " + ( pageIndex + 1 ) ) ; } this . setDialogMessage ( "Printing page " + ( pageIndex + 1 ) ) ; } int xShift = m_paperPrint . getXOffset ( ) ; int yShift = m_paperPrint . getYOffset ( ) - ( int ) ( componentCurrentYLocation * scale ) ; g2d . setClip ( m_paperPrint . getXOffset ( ) , m_paperPrint . getYOffset ( ) , m_paperPrint . getPrintableWidth ( ) , ( int ) ( componentHeightOnPage * scale ) ) ; g2d . translate ( xShift , yShift ) ; g2d . scale ( scale , scale ) ; disableDoubleBuffering ( component ) ; component . paint ( g2d ) ; enableDoubleBuffering ( component ) ; g2d . scale ( 1 / scale , 1 / scale ) ; g2d . translate ( - xShift , - yShift ) ; m_paperPrint . addCurrentYLocation ( ( int ) ( componentHeightOnPage * scale ) ) ; pageDone = m_componentPrint . addCurrentYLocation ( componentHeightOnPage ) ; } return PAGE_EXISTS ;
public class ParserRuleImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case XtextPackage . PARSER_RULE__DEFINES_HIDDEN_TOKENS : return isDefinesHiddenTokens ( ) ; case XtextPackage . PARSER_RULE__HIDDEN_TOKENS : return getHiddenTokens ( ) ; case XtextPackage . PARSER_RULE__PARAMETERS : return getParameters ( ) ; case XtextPackage . PARSER_RULE__FRAGMENT : return isFragment ( ) ; case XtextPackage . PARSER_RULE__WILDCARD : return isWildcard ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class NativeString { /** * See ECMA 15.5.4.7 */ private static int js_lastIndexOf ( String target , Object [ ] args ) { } }
String search = ScriptRuntime . toString ( args , 0 ) ; double end = ScriptRuntime . toNumber ( args , 1 ) ; if ( end != end || end > target . length ( ) ) end = target . length ( ) ; else if ( end < 0 ) end = 0 ; return target . lastIndexOf ( search , ( int ) end ) ;
public class CmsVfsTab { /** * Checks the check boxes for the selected folders . < p > * @ param folders the folders for which to check the check boxes */ public void checkFolders ( Set < String > folders ) { } }
if ( folders != null ) { for ( String folder : folders ) { CmsLazyTreeItem item = m_itemsByPath . get ( folder ) ; if ( ( item != null ) && ( item . getCheckBox ( ) != null ) ) { item . getCheckBox ( ) . setChecked ( true ) ; } } }
public class BeanUtil { /** * 判断Bean中是否有值为null的字段 * @ param bean Bean * @ return 是否有值为null的字段 */ public static boolean hasNull ( Object bean ) { } }
final Field [ ] fields = ClassUtil . getDeclaredFields ( bean . getClass ( ) ) ; Object fieldValue = null ; for ( Field field : fields ) { field . setAccessible ( true ) ; try { fieldValue = field . get ( bean ) ; } catch ( Exception e ) { // ignore } if ( null == fieldValue ) { return true ; } } return false ;
public class SimpleBeanTreeTableDataModel { /** * { @ inheritDoc } */ @ Override public int [ ] sort ( final int col , final boolean ascending ) { } }
if ( ! isSortable ( col ) ) { throw new IllegalStateException ( "Attempted to sort on column " + col + ", which is not sortable" ) ; } // Obtains the list of top level nodes , sorts them & re - add them in order TableTreeNode root = getRootNode ( ) ; List < TableTreeNode > topLevelNodes = new ArrayList < > ( root . getChildCount ( ) ) ; for ( int i = 0 ; i < root . getChildCount ( ) ; i ++ ) { topLevelNodes . add ( ( TableTreeNode ) root . getChildAt ( i ) ) ; } Comparator < TableTreeNode > comp = new Comparator < TableTreeNode > ( ) { @ Override public int compare ( final TableTreeNode obj1 , final TableTreeNode obj2 ) { Comparator backing = comparators . get ( col ) ; return backing . compare ( obj1 . getData ( ) , obj2 . getData ( ) ) ; } } ; if ( ascending ) { Collections . sort ( topLevelNodes , comp ) ; } else { Collections . sort ( topLevelNodes , Collections . reverseOrder ( comp ) ) ; } root . removeAll ( ) ; for ( TableTreeNode node : topLevelNodes ) { root . add ( node ) ; } return null ;
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertIfcDuctFittingTypeEnumToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class CommsByteBuffer { /** * Put a < String , String > Map object into the transmission buffer */ public void putMap ( Map < String , String > map ) { } }
// SIB0163 . comms . 1 if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "putMap" , "map=" + map ) ; if ( map != null ) { final Set < String > keys = map . keySet ( ) ; putShort ( keys . size ( ) ) ; // Number of entries for ( String k : keys ) { putString ( k ) ; putString ( map . get ( k ) ) ; } } else { putShort ( 0 ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "putMap" ) ;
public class UserManagedCacheBuilder { /** * Adds a { @ link CacheLoaderWriter } to the returned builder . * @ param loaderWriter the cache loader writer to use * @ return a new builder with the added cache loader writer */ public UserManagedCacheBuilder < K , V , T > withLoaderWriter ( CacheLoaderWriter < K , V > loaderWriter ) { } }
if ( loaderWriter == null ) { throw new NullPointerException ( "Null loaderWriter" ) ; } UserManagedCacheBuilder < K , V , T > otherBuilder = new UserManagedCacheBuilder < > ( this ) ; otherBuilder . cacheLoaderWriter = loaderWriter ; return otherBuilder ;
public class DoubleStream { /** * Takes elements while the predicate returns { @ code false } . * Once predicate condition is satisfied by an element , the stream * finishes with this element . * < p > This is an intermediate operation . * < p > Example : * < pre > * stopPredicate : ( a ) - & gt ; a & gt ; 2 * stream : [ 1 , 2 , 3 , 4 , 1 , 2 , 3 , 4] * result : [ 1 , 2 , 3] * < / pre > * @ param stopPredicate the predicate used to take elements * @ return the new { @ code DoubleStream } * @ since 1.1.6 */ @ NotNull public DoubleStream takeUntil ( @ NotNull final DoublePredicate stopPredicate ) { } }
return new DoubleStream ( params , new DoubleTakeUntil ( iterator , stopPredicate ) ) ;
public class CmsVfsSitemapService { /** * Adds a function detail element to a container page . < p > * @ param cms the current CMS context * @ param page the container page which should be changed * @ param containerName the name of the container which should be used for function detail elements * @ param elementId the structure id of the element to add * @ param formatterId the structure id of the formatter for the element * @ throws CmsException if something goes wrong */ private void addFunctionDetailElement ( CmsObject cms , CmsXmlContainerPage page , String containerName , CmsUUID elementId , CmsUUID formatterId ) throws CmsException { } }
CmsContainerPageBean bean = page . getContainerPage ( cms ) ; List < CmsContainerBean > containerBeans = new ArrayList < CmsContainerBean > ( ) ; Collection < CmsContainerBean > originalContainers = bean . getContainers ( ) . values ( ) ; if ( ( containerName == null ) && ! originalContainers . isEmpty ( ) ) { CmsContainerBean firstContainer = originalContainers . iterator ( ) . next ( ) ; containerName = firstContainer . getName ( ) ; } boolean foundContainer = false ; for ( CmsContainerBean cntBean : originalContainers ) { boolean isDetailTarget = cntBean . getName ( ) . equals ( containerName ) ; if ( isDetailTarget && ! foundContainer ) { foundContainer = true ; List < CmsContainerElementBean > newElems = new ArrayList < CmsContainerElementBean > ( ) ; newElems . addAll ( cntBean . getElements ( ) ) ; CmsContainerElementBean newElement = new CmsContainerElementBean ( elementId , formatterId , new HashMap < String , String > ( ) , false ) ; newElems . add ( 0 , newElement ) ; CmsContainerBean newCntBean = new CmsContainerBean ( cntBean . getName ( ) , cntBean . getType ( ) , cntBean . getParentInstanceId ( ) , cntBean . isRootContainer ( ) , newElems ) ; containerBeans . add ( newCntBean ) ; } else { containerBeans . add ( cntBean ) ; } } if ( ! foundContainer ) { throw new CmsException ( Messages . get ( ) . container ( Messages . ERR_NO_FUNCTION_DETAIL_CONTAINER_1 , page . getFile ( ) . getRootPath ( ) ) ) ; } CmsContainerPageBean bean2 = new CmsContainerPageBean ( new ArrayList < CmsContainerBean > ( containerBeans ) ) ; page . writeContainerPage ( cms , bean2 ) ;
public class ConsonantUtil { /** * * * * * * BEGINNING OF FUNCTION * * * * * */ / * / public static boolean is_savarna ( String str1 , String str2 ) { } }
// Log . logInfo ( " in is _ savarna " ) ; if ( is_kavarganta ( str1 ) && is_kavargadi ( str2 ) ) return true ; if ( is_chavarganta ( str1 ) && is_chavargadi ( str2 ) ) return true ; if ( is_Tavarganta ( str1 ) && is_Tavargadi ( str2 ) ) return true ; if ( is_tavarganta ( str1 ) && is_tavargadi ( str2 ) ) return true ; if ( is_pavarganta ( str1 ) && is_pavargadi ( str2 ) ) return true ; // what abt the others y , r , ; , v etc // Log . logInfo ( " is not savarna " ) ; return false ;
public class Authentication { /** * Attempts to authenticate a given IPerson based on a set of principals and credentials * @ param principals * @ param credentials * @ param person * @ exception PortalSecurityException */ public void authenticate ( HttpServletRequest request , Map < String , String > principals , Map < String , String > credentials , IPerson person ) throws PortalSecurityException { } }
// Retrieve the security context for the user final ISecurityContext securityContext = person . getSecurityContext ( ) ; // Set the principals and credentials for the security context chain this . configureSecurityContextChain ( principals , credentials , securityContext , BASE_CONTEXT_NAME ) ; // NOTE : PortalPreAuthenticatedProcessingFilter looks in the security . properties file to // determine what tokens to look for that represent the principals and // credentials for each context . It then retrieves the values from the request // and stores the values in the principals and credentials HashMaps that are // passed to the Authentication service . // Attempt to authenticate the user final long start = System . currentTimeMillis ( ) ; securityContext . authenticate ( ) ; final long elapsed = System . currentTimeMillis ( ) - start ; // Check to see if the user was authenticated if ( securityContext . isAuthenticated ( ) ) { lastAuthentication = authenticationTimes . add ( elapsed ) ; // metric // Add the authenticated username to the person object // the login name may have been provided or reset by the security provider // so this needs to be done after authentication . final String userName = securityContext . getPrincipal ( ) . getUID ( ) ; person . setAttribute ( IPerson . USERNAME , userName ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "FINISHED SecurityContext authentication for user '" + userName + "' in " + elapsed + "ms #milestone" ) ; } threadNamingRequestFilter . updateCurrentUsername ( userName ) ; /* * Clear cached group info for this user . * There seem to be 2 systems in place for this information : * - The old system based on EntityCachingService * - The new system based on ehcache * For uPortal 5 , we should work to remove the old system . */ GroupService . finishedSession ( person ) ; // Old system for ( IAuthenticationListener authListener : authenticationListeners ) { // New system authListener . userAuthenticated ( person ) ; } // Clear all existing cached data about the person this . usernameTaggedCacheEntryPurger . purgeTaggedCacheEntries ( userName ) ; // Retrieve the additional descriptor from the security context final IAdditionalDescriptor addInfo = person . getSecurityContext ( ) . getAdditionalDescriptor ( ) ; // Process the additional descriptor if one was created if ( addInfo != null ) { // Replace the passed in IPerson with the additional descriptor if the // additional descriptor is an IPerson object created by the security context // NOTE : This is not the preferred method , creation of IPerson objects should be // handled by the PersonManager . if ( addInfo instanceof IPerson ) { final IPerson newPerson = ( IPerson ) addInfo ; person . setFullName ( newPerson . getFullName ( ) ) ; for ( final String attributeName : newPerson . getAttributeMap ( ) . keySet ( ) ) { person . setAttribute ( attributeName , newPerson . getAttribute ( attributeName ) ) ; } } // If the additional descriptor is a map then we can // simply copy all of these additional attributes into the IPerson else if ( addInfo instanceof Map ) { // Cast the additional descriptor as a Map final Map < ? , ? > additionalAttributes = ( Map < ? , ? > ) addInfo ; // Copy each additional attribute into the person object for ( final Iterator < ? > keys = additionalAttributes . keySet ( ) . iterator ( ) ; keys . hasNext ( ) ; ) { // Get a key final String key = ( String ) keys . next ( ) ; // Set the attribute person . setAttribute ( key , additionalAttributes . get ( key ) ) ; } } else if ( addInfo instanceof ChainingSecurityContext . ChainingAdditionalDescriptor ) { // do nothing } else { if ( log . isWarnEnabled ( ) ) { log . warn ( "Authentication Service received unknown additional descriptor [" + addInfo + "]" ) ; } } } // Populate the person object using the PersonDirectory if applicable if ( PropertiesManager . getPropertyAsBoolean ( "org.apereo.portal.services.Authentication.usePersonDirectory" ) ) { // Retrieve all of the attributes associated with the person logging in final String username = person . getUserName ( ) ; final long timestamp = System . currentTimeMillis ( ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "STARTING user attribute gathering for user '" + userName + "' #milestone" ) ; } final IPersonAttributes personAttributes = this . personAttributeDao . getPerson ( username ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "FINISHED user attribute gathering for user '" + userName + "' in " + Long . toString ( System . currentTimeMillis ( ) - timestamp ) + "ms #milestone" ) ; } if ( personAttributes != null ) { // attribs may be null . IPersonAttributeDao returns null when it does not // recognize a user at all , as // distinguished from returning an empty Map of attributes when it recognizes a // user has having no // attributes . person . setAttributes ( personAttributes . getAttributes ( ) ) ; } } // Make sure the the user ' s fullname is set if ( person . getFullName ( ) == null ) { // Use portal display name if one exists if ( person . getAttribute ( "portalDisplayName" ) != null ) { person . setFullName ( ( String ) person . getAttribute ( "portalDisplayName" ) ) ; } // If not try the eduPerson displayName else if ( person . getAttribute ( "displayName" ) != null ) { person . setFullName ( ( String ) person . getAttribute ( "displayName" ) ) ; } // If still no FullName use an unrecognized string if ( person . getFullName ( ) == null ) { person . setFullName ( "Unrecognized person: " + person . getAttribute ( IPerson . USERNAME ) ) ; } } // Find the uPortal userid for this user or flunk authentication if not found . final boolean autocreate = PropertiesManager . getPropertyAsBoolean ( "org.apereo.portal.services.Authentication.autoCreateUsers" ) ; try { // Attempt to retrieve the UID final int newUID = this . userIdentityStore . getPortalUID ( person , autocreate ) ; person . setID ( newUID ) ; } catch ( final AuthorizationException ae ) { log . error ( "Exception retrieving ID" , ae ) ; throw new PortalSecurityException ( "Authentication Service: Exception retrieving UID" ) ; } } // Publish a login event for the person this . portalEventFactory . publishLoginEvent ( request , this , person ) ;
public class SignatureFileVerifier { /** * convert a byte array to a hex string for debugging purposes * @ param data the binary data to be converted to a hex string * @ return an ASCII hex string */ static String toHex ( byte [ ] data ) { } }
StringBuffer sb = new StringBuffer ( data . length * 2 ) ; for ( int i = 0 ; i < data . length ; i ++ ) { sb . append ( hexc [ ( data [ i ] >> 4 ) & 0x0f ] ) ; sb . append ( hexc [ data [ i ] & 0x0f ] ) ; } return sb . toString ( ) ;
public class Jenkins { /** * Returns the enabled and activated administrative monitors . * @ since 2.64 */ public List < AdministrativeMonitor > getActiveAdministrativeMonitors ( ) { } }
return administrativeMonitors . stream ( ) . filter ( m -> m . isEnabled ( ) && m . isActivated ( ) ) . collect ( Collectors . toList ( ) ) ;
public class BundleRepositoryRegistry { /** * Add the default repositories for the product * @ param serverName If set to a serverName , a cache will be created in that server ' s workarea . A null value disables caching . * @ param useMsgs This setting is passed on to the held ContentLocalBundleRepositories . */ public static synchronized void initializeDefaults ( String serverName , boolean useMsgs ) { } }
allUseMsgs = useMsgs ; cacheServerName = serverName ; addBundleRepository ( Utils . getInstallDir ( ) . getAbsolutePath ( ) , ExtensionConstants . CORE_EXTENSION ) ; addBundleRepository ( new File ( Utils . getUserDir ( ) , "/extension/" ) . getAbsolutePath ( ) , ExtensionConstants . USER_EXTENSION ) ;
public class JMThread { /** * Run with schedule at fixed rate scheduled future . * @ param initialDelayMillis the initial delay millis * @ param periodMillis the period millis * @ param runnable the runnable * @ return the scheduled future */ public static ScheduledFuture < ? > runWithScheduleAtFixedRate ( long initialDelayMillis , long periodMillis , Runnable runnable ) { } }
return runWithScheduleAtFixedRate ( initialDelayMillis , periodMillis , "runWithScheduleAtFixedRate" , runnable ) ;
public class IbvPd { /** * - - - - - oo - verbs */ public SVCRegMr regMr ( ByteBuffer buffer , int access ) throws IOException { } }
return verbs . regMr ( this , buffer , access ) ;
public class SQLiteConnection { /** * Collects statistics about database connection memory usage , in the case where the * caller might not actually own the connection . * @ return The statistics object , never null . */ void collectDbStatsUnsafe ( ArrayList < com . couchbase . lite . internal . database . sqlite . SQLiteDebug . DbStats > dbStatsList ) { } }
dbStatsList . add ( getMainDbStatsUnsafe ( 0 , 0 , 0 ) ) ;
public class BuildEvaluator { /** * Calculates Audit Response for a given dashboard * @ param beginDate * @ param endDate * @ return BuildAuditResponse for the build job for a given dashboard , begin and end date */ private BuildAuditResponse getBuildJobAuditResponse ( CollectorItem buildItem , long beginDate , long endDate , List < CollectorItem > repoItems ) { } }
BuildAuditResponse buildAuditResponse = new BuildAuditResponse ( ) ; List < CollectorItemConfigHistory > jobConfigHists = collItemConfigHistoryRepository . findByCollectorItemIdAndTimestampIsBetweenOrderByTimestampDesc ( buildItem . getId ( ) , beginDate - 1 , endDate + 1 ) ; // Check Jenkins Job config log to validate pr author is not modifying the Prod Job // since beginDate and endDate are the same column and between is excluding the edge values , we need to subtract / add a millisec buildAuditResponse . setConfigHistory ( jobConfigHists ) ; if ( ! CollectionUtils . isEmpty ( repoItems ) ) { Build build = buildRepository . findTop1ByCollectorItemIdOrderByTimestampDesc ( buildItem . getId ( ) ) ; if ( build != null ) { List < RepoBranch > repoBranches = build . getCodeRepos ( ) ; List < ParsedRepo > codeRepos = repoItems . stream ( ) . map ( r -> new ParsedRepo ( ( String ) r . getOptions ( ) . get ( "url" ) , ( String ) r . getOptions ( ) . get ( "branch" ) ) ) . collect ( Collectors . toList ( ) ) ; List < ParsedRepo > buildRepos = repoBranches . stream ( ) . map ( b -> new ParsedRepo ( b . getUrl ( ) , b . getBranch ( ) ) ) . collect ( Collectors . toList ( ) ) ; List < ParsedRepo > intersection = codeRepos . stream ( ) . filter ( buildRepos :: contains ) . collect ( Collectors . toList ( ) ) ; buildAuditResponse . addAuditStatus ( CollectionUtils . isEmpty ( intersection ) ? BuildAuditStatus . BUILD_REPO_MISMATCH : BuildAuditStatus . BUILD_MATCHES_REPO ) ; } else { buildAuditResponse . addAuditStatus ( BuildAuditStatus . NO_BUILD_FOUND ) ; } } Set < String > codeAuthors = CommonCodeReview . getCodeAuthors ( repoItems , beginDate , endDate , commitRepository ) ; List < String > overlap = jobConfigHists . stream ( ) . map ( CollectorItemConfigHistory :: getUserID ) . filter ( codeAuthors :: contains ) . collect ( Collectors . toList ( ) ) ; buildAuditResponse . addAuditStatus ( ! CollectionUtils . isEmpty ( overlap ) ? BuildAuditStatus . BUILD_AUTHOR_EQ_REPO_AUTHOR : BuildAuditStatus . BUILD_AUTHOR_NE_REPO_AUTHOR ) ; return buildAuditResponse ;
public class JournalNodeJournalSyncer { /** * Recovers a single segment * @ param elf * descriptor of the segment to be recovered * @ param task * contains journal description * @ throws IOException */ void recoverSegments ( SyncTask task ) throws IOException { } }
// obtain the list of segments that are valid if ( ! prepareRecovery ( task ) ) { return ; } // iterate through all nodes for ( InetSocketAddress jn : journalNodes ) { if ( isLocalIpAddress ( jn . getAddress ( ) ) && jn . getPort ( ) == journalNode . getPort ( ) ) { // we do not need to talk to ourselves continue ; } try { // get manifest for log that we care about List < EditLogFile > remoteLogFiles = getManifest ( jn , task . journal , task . recoveryStartTxid ) ; // go through all remote segments for ( EditLogFile relf : remoteLogFiles ) { recoverSegment ( jn , relf , task ) ; } // if we are done , there is no need to iterate more if ( ! task . hasMissingValidSegments ( ) ) { LOG . info ( logMsg + "recovery finished." ) ; break ; } } catch ( Exception e ) { LOG . error ( logMsg + "error" , e ) ; continue ; } }
public class RestartableSequence { /** * Load all of the remaining rows from the supplied sequence into the buffer . */ protected void loadRemaining ( ) { } }
if ( ! loadedAll ) { // Put all of the batches from the sequence into the buffer assert targetNumRowsInMemory >= 0L ; assert batchSize != null ; Batch batch = original . nextBatch ( ) ; boolean loadIntoMemory = inMemoryBatches != null && actualNumRowsInMemory < targetNumRowsInMemory ; while ( batch != null ) { long rows = loadBatch ( batch , loadIntoMemory , null ) ; if ( batchSize . get ( ) == 0L ) batchSize . set ( rows ) ; if ( loadIntoMemory ) { assert inMemoryBatches != null ; if ( actualNumRowsInMemory >= targetNumRowsInMemory ) loadIntoMemory = false ; } batch = original . nextBatch ( ) ; } long numInMemory = inMemoryBatches != null ? actualNumRowsInMemory : 0L ; totalSize = offHeapBatchesSupplier . size ( ) + numInMemory ; loadedAll = true ; restartBatches ( ) ; }
public class GeometryUtilities { /** * Calculates the azimuth in degrees given two { @ link Coordinate } composing a line . * Note that the coords order is important and will differ of 180. * @ param c1 first coordinate ( used as origin ) . * @ param c2 second coordinate . * @ return the azimuth angle . */ public static double azimuth ( Coordinate c1 , Coordinate c2 ) { } }
// vertical if ( c1 . x == c2 . x ) { if ( c1 . y == c2 . y ) { // same point return Double . NaN ; } else if ( c1 . y < c2 . y ) { return 0.0 ; } else if ( c1 . y > c2 . y ) { return 180.0 ; } } // horiz if ( c1 . y == c2 . y ) { if ( c1 . x < c2 . x ) { return 90.0 ; } else if ( c1 . x > c2 . x ) { return 270.0 ; } } if ( c1 . x < c2 . x && c1 . y < c2 . y ) { double tanA = ( c2 . x - c1 . x ) / ( c2 . y - c1 . y ) ; double atan = atan ( tanA ) ; return toDegrees ( atan ) ; } if ( c1 . x < c2 . x && c1 . y > c2 . y ) { double tanA = ( c1 . y - c2 . y ) / ( c2 . x - c1 . x ) ; double atan = atan ( tanA ) ; return toDegrees ( atan ) + 90.0 ; } if ( c1 . x > c2 . x && c1 . y > c2 . y ) { double tanA = ( c1 . x - c2 . x ) / ( c1 . y - c2 . y ) ; double atan = atan ( tanA ) ; return toDegrees ( atan ) + 180 ; } if ( c1 . x > c2 . x && c1 . y < c2 . y ) { double tanA = ( c2 . y - c1 . y ) / ( c1 . x - c2 . x ) ; double atan = atan ( tanA ) ; return toDegrees ( atan ) + 270 ; } return Double . NaN ;
public class ST_AddZ { /** * Add a z with to the existing value ( do the sum ) . NaN values are not * updated . * @ param geometry * @ param z * @ return * @ throws java . sql . SQLException */ public static Geometry addZ ( Geometry geometry , double z ) throws SQLException { } }
if ( geometry == null ) { return null ; } geometry . apply ( new AddZCoordinateSequenceFilter ( z ) ) ; return geometry ;
public class CacheManager { /** * Download in background all tiles of the specified area in osmdroid cache . * @ param ctx * @ param pTiles * @ param zoomMin * @ param zoomMax */ public CacheManagerTask downloadAreaAsync ( Context ctx , List < Long > pTiles , final int zoomMin , final int zoomMax ) { } }
final CacheManagerTask task = new CacheManagerTask ( this , getDownloadingAction ( ) , pTiles , zoomMin , zoomMax ) ; task . addCallback ( getDownloadingDialog ( ctx , task ) ) ; return execute ( task ) ;
public class NetworkWatchersInner { /** * Get network configuration diagnostic . * @ param resourceGroupName The name of the resource group . * @ param networkWatcherName The name of the network watcher . * @ param parameters Parameters to get network configuration diagnostic . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the NetworkConfigurationDiagnosticResponseInner object if successful . */ public NetworkConfigurationDiagnosticResponseInner getNetworkConfigurationDiagnostic ( String resourceGroupName , String networkWatcherName , NetworkConfigurationDiagnosticParameters parameters ) { } }
return getNetworkConfigurationDiagnosticWithServiceResponseAsync ( resourceGroupName , networkWatcherName , parameters ) . toBlocking ( ) . last ( ) . body ( ) ;
public class WPartialDateField { /** * Set the WPartialDateField with the given day , month and year . Each of the day , month and year parameters that * make up the partial date are optional . * @ param day A number from 1 to 31 or null if unknown . * @ param month A number from 1 to 12 , or null if unknown . * @ param year A number , or null if unknown . */ public void setPartialDate ( final Integer day , final Integer month , final Integer year ) { } }
// Validate Year if ( ! isValidYear ( year ) ) { throw new IllegalArgumentException ( "Setting invalid partial year value (" + year + "). Year should be between " + YEAR_MIN + " to " + YEAR_MAX + "." ) ; } // Validate Month if ( ! isValidMonth ( month ) ) { throw new IllegalArgumentException ( "Setting invalid partial month value (" + month + "). Month should be between " + MONTH_MIN + " to " + MONTH_MAX + "." ) ; } // Validate Day if ( ! isValidDay ( day ) ) { throw new IllegalArgumentException ( "Setting invalid partial day value (" + day + "). Day should be between " + DAY_MIN + " to " + DAY_MAX + "." ) ; } String formatted = formatPartialDateToString ( day , month , year , getPaddingChar ( ) ) ; setData ( formatted ) ; PartialDateFieldModel model = getOrCreateComponentModel ( ) ; model . text = null ; model . validDate = true ;
public class FilmlistenSuchen { /** * Add our default full list servers . */ private void insertDefaultActiveServers ( ) { } }
listeFilmlistenUrls_akt . add ( new DatenFilmlisteUrl ( "http://m.picn.de/f/Filmliste-akt.xz" , DatenFilmlisteUrl . SERVER_ART_AKT ) ) ; listeFilmlistenUrls_akt . add ( new DatenFilmlisteUrl ( "http://m1.picn.de/f/Filmliste-akt.xz" , DatenFilmlisteUrl . SERVER_ART_AKT ) ) ; listeFilmlistenUrls_akt . add ( new DatenFilmlisteUrl ( "http://m2.picn.de/f/Filmliste-akt.xz" , DatenFilmlisteUrl . SERVER_ART_AKT ) ) ; listeFilmlistenUrls_akt . add ( new DatenFilmlisteUrl ( "http://download10.onlinetvrecorder.com/mediathekview/Filmliste-akt.xz" , DatenFilmlisteUrl . SERVER_ART_AKT ) ) ; listeFilmlistenUrls_akt . add ( new DatenFilmlisteUrl ( "http://mediathekview.jankal.me/Filmliste-akt.xz" , DatenFilmlisteUrl . SERVER_ART_AKT ) ) ; listeFilmlistenUrls_akt . add ( new DatenFilmlisteUrl ( "http://verteiler1.mediathekview.de/Filmliste-akt.xz" , DatenFilmlisteUrl . SERVER_ART_AKT ) ) ; listeFilmlistenUrls_akt . add ( new DatenFilmlisteUrl ( "http://verteiler2.mediathekview.de/Filmliste-akt.xz" , DatenFilmlisteUrl . SERVER_ART_AKT ) ) ; listeFilmlistenUrls_akt . add ( new DatenFilmlisteUrl ( "http://verteiler3.mediathekview.de/Filmliste-akt.xz" , DatenFilmlisteUrl . SERVER_ART_AKT ) ) ;
public class JsMainAdminServiceImpl { /** * This method is only used to populate destination of type Alias * Some properties even though not there in config it is set * to maintain backward compatibility with the runtime code . * @ param properties * @ param destinationList * @ param meName * @ param configAdmin * @ param modified */ private void populateAliasDestinations ( Map < String , Object > properties , HashMap < String , BaseDestination > destinationList , ConfigurationAdmin configAdmin ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "populateAliasDestinations" , new Object [ ] { properties , destinationList , configAdmin } ) ; } String [ ] aliasDestinations = ( String [ ] ) properties . get ( JsAdminConstants . ALIAS ) ; if ( aliasDestinations != null ) { for ( String aliasDestinationPid : aliasDestinations ) { pids . add ( aliasDestinationPid ) ; Configuration config = null ; try { config = configAdmin . getConfiguration ( aliasDestinationPid , bundleLocation ) ; } catch ( IOException e ) { SibTr . exception ( tc , e ) ; FFDCFilter . processException ( e , this . getClass ( ) . getName ( ) , "561" , this ) ; } Dictionary aliasDestinationProperties = config . getProperties ( ) ; AliasDestination aliasDest = new AliasDestinationImpl ( ) ; String aliasDestinationName = ( String ) aliasDestinationProperties . get ( JsAdminConstants . ID ) ; String targetDestinationName = ( String ) aliasDestinationProperties . get ( JsAdminConstants . TARGETDESTINATION ) ; if ( destinationList . containsKey ( aliasDestinationName ) ) { SibTr . error ( tc , "ALIAS_SAME_DEST_ID_SIAS0125" , new Object [ ] { aliasDestinationName } ) ; continue ; } if ( aliasDestinationName != null && ! aliasDestinationName . toString ( ) . trim ( ) . isEmpty ( ) ) { if ( targetDestinationName == null || targetDestinationName . toString ( ) . trim ( ) . isEmpty ( ) ) { SibTr . error ( tc , "INVALID_TARGET_DEST_SIAS0110" , new Object [ ] { aliasDestinationProperties . get ( JsAdminConstants . ID ) } ) ; continue ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . debug ( this , tc , "Destination ID : " + aliasDestinationProperties . get ( JsAdminConstants . ID ) ) ; } // set the name of the queue . Here ID is considered as the name aliasDest . setName ( aliasDestinationName ) ; // set the target destination aliasDest . setTargetDestination ( targetDestinationName ) ; // here local is false and alias is true as we are negotiating the destination // of type Alias and not Queue or Topic aliasDest . setLocal ( false ) ; aliasDest . setAlias ( true ) ; // set overrideOfQOSByProducerAllowed String forceReliablility = ( String ) aliasDestinationProperties . get ( JsAdminConstants . FORCERELIABILITY ) ; aliasDest . setDefaultReliability ( forceReliablility ) ; aliasDest . setMaximumReliability ( forceReliablility ) ; String sendAllowed = "false" ; String receiveAllowed = "false" ; if ( destinationList . get ( targetDestinationName ) instanceof SIBDestination ) { SIBDestination targetDestination = ( SIBDestination ) destinationList . get ( targetDestinationName ) ; if ( targetDestination . isSendAllowed ( ) ) { sendAllowed = ( ( String ) aliasDestinationProperties . get ( JsAdminConstants . SENDALLOWED ) ) ; } receiveAllowed = String . valueOf ( targetDestination . isReceiveAllowed ( ) ) ; } aliasDest . setSendAllowed ( sendAllowed ) ; aliasDest . setReceiveAllowed ( receiveAllowed ) ; } else { SibTr . error ( tc , "NO_ID_PROVIDED_SIAS0102" , new Object [ ] { JsAdminConstants . ALIAS } ) ; continue ; } destinationList . put ( aliasDest . getName ( ) , aliasDest ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . exit ( tc , "populateAliasDestinations" , new Object [ ] { destinationList } ) ; }
public class ExceptionUtil { /** * This rethrow the exception providing an allowed Exception in first priority , even it is a Runtime exception */ public static < T extends Throwable > RuntimeException rethrowAllowedTypeFirst ( final Throwable t , Class < T > allowedType ) throws T { } }
rethrowIfError ( t ) ; if ( allowedType . isAssignableFrom ( t . getClass ( ) ) ) { throw ( T ) t ; } else { throw peel ( t ) ; }
public class JsonWriter { /** * Inserts any necessary separators and whitespace before a name . Also * adjusts the stack to expect the name ' s value . */ private void beforeName ( ) throws IOException { } }
int context = peek ( ) ; if ( context == NONEMPTY_OBJECT ) { // first in object out . write ( ',' ) ; } else if ( context != EMPTY_OBJECT ) { // not in an object ! throw new IllegalStateException ( "Nesting problem." ) ; } newline ( ) ; replaceTop ( DANGLING_NAME ) ;
public class GVRPose { /** * Gets the local rotation for a bone given its index . * @ param boneindexzero based index of bone whose rotation is wanted . * @ return local rotation for the designated bone as a quaternion . * @ see # setLocalRotation * @ see # setWorldRotations * @ see # setWorldMatrix * @ see GVRSkeleton # setBoneAxis */ public void getLocalRotation ( int boneindex , Quaternionf q ) { } }
Bone bone = mBones [ boneindex ] ; if ( ( bone . Changed & ( WORLD_POS | WORLD_ROT ) ) != 0 ) { calcLocal ( bone , mSkeleton . getParentBoneIndex ( boneindex ) ) ; } bone . LocalMatrix . getUnnormalizedRotation ( q ) ; q . normalize ( ) ;
public class JdbcCpoTrxAdapter { /** * DOCUMENT ME ! * @ return DOCUMENT ME ! * @ throws CpoException DOCUMENT ME ! */ @ Override protected Connection getReadConnection ( ) throws CpoException { } }
Connection connection = getStaticConnection ( ) ; setConnectionBusy ( connection ) ; return connection ;
public class LineMap { /** * Maps a destination line to an error location . * @ param buf CharBuffer to write the error location * @ param line generated source line to convert . */ private void convertError ( CharBuffer buf , int line ) { } }
String srcFilename = null ; int destLine = 0 ; int srcLine = 0 ; int srcTailLine = Integer . MAX_VALUE ; for ( int i = 0 ; i < _lines . size ( ) ; i ++ ) { Line map = ( Line ) _lines . get ( i ) ; if ( map . _dstLine <= line && line <= map . getLastDestinationLine ( ) ) { srcFilename = map . _srcFilename ; destLine = map . _dstLine ; srcLine = map . getSourceLine ( line ) ; break ; } } if ( srcFilename != null ) { } else if ( _lines . size ( ) > 0 ) srcFilename = ( ( Line ) _lines . get ( 0 ) ) . _srcFilename ; else srcFilename = "" ; buf . append ( srcFilename ) ; if ( line >= 0 ) { buf . append ( ":" ) ; buf . append ( srcLine + ( line - destLine ) ) ; }
public class AmazonRDSClient { /** * Enables ingress to a DBSecurityGroup using one of two forms of authorization . First , EC2 or VPC security groups * can be added to the DBSecurityGroup if the application using the database is running on EC2 or VPC instances . * Second , IP ranges are available if the application accessing your database is running on the Internet . Required * parameters for this API are one of CIDR range , EC2SecurityGroupId for VPC , or ( EC2SecurityGroupOwnerId and either * EC2SecurityGroupName or EC2SecurityGroupId for non - VPC ) . * < note > * You can ' t authorize ingress from an EC2 security group in one AWS Region to an Amazon RDS DB instance in another . * You can ' t authorize ingress from a VPC security group in one VPC to an Amazon RDS DB instance in another . * < / note > * For an overview of CIDR ranges , go to the < a * href = " http : / / en . wikipedia . org / wiki / Classless _ Inter - Domain _ Routing " > Wikipedia Tutorial < / a > . * @ param authorizeDBSecurityGroupIngressRequest * @ return Result of the AuthorizeDBSecurityGroupIngress operation returned by the service . * @ throws DBSecurityGroupNotFoundException * < i > DBSecurityGroupName < / i > doesn ' t refer to an existing DB security group . * @ throws InvalidDBSecurityGroupStateException * The state of the DB security group doesn ' t allow deletion . * @ throws AuthorizationAlreadyExistsException * The specified CIDRIP or Amazon EC2 security group is already authorized for the specified DB security * group . * @ throws AuthorizationQuotaExceededException * The DB security group authorization quota has been reached . * @ sample AmazonRDS . AuthorizeDBSecurityGroupIngress * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / rds - 2014-10-31 / AuthorizeDBSecurityGroupIngress " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DBSecurityGroup authorizeDBSecurityGroupIngress ( AuthorizeDBSecurityGroupIngressRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeAuthorizeDBSecurityGroupIngress ( request ) ;
public class HttpURL { /** * Whether this URL uses the default port for the protocol . The default * port is 80 for " http " protocol , and 443 for " https " . Other protocols * are not supported and this method will always return false * for them . * @ return < code > true < / code > if the URL is using the default port . * @ since 1.8.0 */ public boolean isPortDefault ( ) { } }
return ( PROTOCOL_HTTPS . equalsIgnoreCase ( protocol ) && port == DEFAULT_HTTPS_PORT ) || ( PROTOCOL_HTTP . equalsIgnoreCase ( protocol ) && port == DEFAULT_HTTP_PORT ) ;
public class DynamicMessage { /** * Change option name to its fully qualified name . */ public void normalizeName ( Key key , String fullyQualifiedName ) { } }
Value value = fields . remove ( key ) ; if ( value == null ) { throw new IllegalStateException ( "Could not find option for key=" + key ) ; } Key newKey ; if ( fullyQualifiedName . startsWith ( "." ) ) { // TODO : we should not use format with leading dot internally newKey = Key . extension ( fullyQualifiedName . substring ( 1 ) ) ; } else { newKey = Key . extension ( fullyQualifiedName ) ; } fields . put ( newKey , value ) ;
public class ProcedureRunner { /** * Batch up pre - planned fragments , but handle ad hoc independently . */ private VoltTable [ ] fastPath ( List < QueuedSQL > batch , final boolean finalTask ) { } }
final int batchSize = batch . size ( ) ; Object [ ] params = new Object [ batchSize ] ; long [ ] fragmentIds = new long [ batchSize ] ; String [ ] sqlTexts = new String [ batchSize ] ; boolean [ ] isWriteFrag = new boolean [ batchSize ] ; int [ ] sqlCRCs = new int [ batchSize ] ; int succeededFragmentsCount = 0 ; int i = 0 ; for ( final QueuedSQL qs : batch ) { assert ( qs . stmt . collector == null ) ; fragmentIds [ i ] = qs . stmt . aggregator . id ; // use the pre - serialized params if it exists params [ i ] = qs . params ; sqlTexts [ i ] = qs . stmt . getText ( ) ; isWriteFrag [ i ] = ! qs . stmt . isReadOnly ; sqlCRCs [ i ] = SQLStmtAdHocHelper . getHash ( qs . stmt ) ; i ++ ; } VoltTable [ ] results = null ; // Before executing the fragments , tell the EE if this batch should be // timed . getExecutionEngine ( ) . setPerFragmentTimingEnabled ( m_perCallStats . samplingStmts ( ) ) ; try { FastDeserializer fragResult = m_site . executePlanFragments ( batchSize , fragmentIds , null , params , m_determinismHash , sqlTexts , isWriteFrag , sqlCRCs , m_txnState . txnId , m_txnState . m_spHandle , m_txnState . uniqueId , m_isReadOnly , VoltTrace . log ( VoltTrace . Category . EE ) != null ) ; final int totalSize ; try { // read the size of the DR buffer used fragResult . readInt ( ) ; // read the complete size of the buffer used totalSize = fragResult . readInt ( ) ; } catch ( final IOException ex ) { log . error ( "Failed to deserialze result table" + ex ) ; throw new EEException ( ExecutionEngine . ERRORCODE_WRONG_SERIALIZED_BYTES ) ; } final ByteBuffer rawDataBuff ; if ( ( m_batchIndex == 0 && ! m_site . usingFallbackBuffer ( ) ) || finalTask ) { // If this is the first or final batch , skip the copy of the // underlying byte array rawDataBuff = fragResult . buffer ( ) ; } else { rawDataBuff = fragResult . readBuffer ( totalSize ) ; } results = TableHelper . convertBackedBufferToTables ( rawDataBuff , batchSize ) ; } catch ( Throwable ex ) { if ( ! m_isReadOnly ) { // roll back the current batch and re - throw the EE exception m_site . truncateUndoLog ( true , false , m_spBigBatchBeginToken >= 0 ? m_spBigBatchBeginToken : m_site . getLatestUndoToken ( ) , m_txnState . m_spHandle , null ) ; } throw ex ; } finally { long [ ] executionTimes = null ; if ( m_perCallStats . samplingStmts ( ) ) { executionTimes = new long [ batchSize ] ; } succeededFragmentsCount = getExecutionEngine ( ) . extractPerFragmentStats ( batchSize , executionTimes ) ; for ( i = 0 ; i < batchSize ; i ++ ) { QueuedSQL qs = batch . get ( i ) ; // No coordinator task for a single partition procedure . boolean isCoordinatorTask = false ; // If all the fragments in this batch are executed successfully , // succeededFragmentsCount = = batchSize . // Otherwise , the fragment whose index equals // succeededFragmentsCount is the one that failed . boolean failed = i == succeededFragmentsCount ; m_perCallStats . recordStatementStats ( qs . stmt . getStmtName ( ) , isCoordinatorTask , failed , executionTimes == null ? 0 : executionTimes [ i ] , results == null ? null : results [ i ] , qs . params ) ; // If this fragment failed , no subsequent fragments will be // executed . if ( failed ) { break ; } } } return results ;
public class DoubleTuples { /** * Returns the geometric mean of the given tuple * @ param t The input tuple * @ return The mean */ public static double geometricMean ( DoubleTuple t ) { } }
double product = DoubleTupleFunctions . reduce ( t , 1.0 , ( a , b ) -> ( a * b ) ) ; return Math . pow ( product , 1.0 / t . getSize ( ) ) ;
public class XmlMapper { /** * 创建Marshaller并设定encoding ( 可为null ) . * 线程不安全 , 需要每次创建或pooling 。 */ public static Marshaller createMarshaller ( Class clazz , String encoding ) { } }
try { JAXBContext jaxbContext = getJaxbContext ( clazz ) ; Marshaller marshaller = jaxbContext . createMarshaller ( ) ; marshaller . setProperty ( Marshaller . JAXB_FORMATTED_OUTPUT , Boolean . TRUE ) ; if ( StringUtils . isNotBlank ( encoding ) ) { marshaller . setProperty ( Marshaller . JAXB_ENCODING , encoding ) ; } return marshaller ; } catch ( JAXBException e ) { throw ExceptionUtil . unchecked ( e ) ; }
public class ModeShapeRestClient { /** * Returns a repository which has the given name or { @ code null } . * @ param name the name of a repository ; may not be null * @ return a { @ link Repositories . Repository } instace or { @ code null } */ public Repositories . Repository getRepository ( String name ) { } }
JSONRestClient . Response response = jsonRestClient . doGet ( ) ; if ( ! response . isOK ( ) ) { throw new RuntimeException ( JdbcI18n . invalidServerResponse . text ( jsonRestClient . url ( ) , response . asString ( ) ) ) ; } return new Repositories ( response . json ( ) ) . getRepository ( name ) ;
public class FctBnTradeEntitiesProcessors { /** * < p > Get PrcItemSpecificsSave ( create and put into map ) . < / p > * @ param pAddParam additional param * @ return requested PrcItemSpecificsSave * @ throws Exception - an exception */ protected final PrcItemSpecificsSave < RS , IHasIdLongVersion , AItemSpecificsId < IHasIdLongVersion > > lazyGetPrcItemSpecificsSave ( final Map < String , Object > pAddParam ) throws Exception { } }
String beanName = PrcItemSpecificsSave . class . getSimpleName ( ) ; @ SuppressWarnings ( "unchecked" ) PrcItemSpecificsSave < RS , IHasIdLongVersion , AItemSpecificsId < IHasIdLongVersion > > proc = ( PrcItemSpecificsSave < RS , IHasIdLongVersion , AItemSpecificsId < IHasIdLongVersion > > ) this . processorsMap . get ( beanName ) ; if ( proc == null ) { proc = new PrcItemSpecificsSave < RS , IHasIdLongVersion , AItemSpecificsId < IHasIdLongVersion > > ( ) ; proc . setSrvOrm ( getSrvOrm ( ) ) ; // assigning fully initialized object : this . processorsMap . put ( beanName , proc ) ; this . logger . info ( null , FctBnTradeEntitiesProcessors . class , beanName + " has been created." ) ; } return proc ;
public class NamespaceConverter { /** * { @ inheritDoc } */ @ Override public XBELNamespace convert ( Namespace source ) { } }
if ( source == null ) return null ; String prefix = source . getPrefix ( ) ; String resourceLocation = source . getResourceLocation ( ) ; XBELNamespace xn = new XBELNamespace ( ) ; xn . setPrefix ( prefix ) ; xn . setResourceLocation ( resourceLocation ) ; return xn ;
public class JsonWebKey { /** * Get the RSA Private Key Parameter value . * @ return the RSA Private Key Parameter value . */ @ JsonProperty ( "dq" ) @ JsonSerialize ( using = Base64UrlJsonSerializer . class ) @ JsonDeserialize ( using = Base64UrlJsonDeserializer . class ) public byte [ ] dq ( ) { } }
return ByteExtensions . clone ( this . dq ) ;
public class CDIServiceUtils { /** * And removes the last " . < number . < number > " */ @ Trivial public static String getSymbolicNameWithoutMinorOrMicroVersionPart ( String symbolicName ) { } }
if ( symbolicName . matches ( ".*\\d+\\.\\d+\\.\\d+$" ) ) { return symbolicName . replaceAll ( "\\.\\d+\\.\\d+$" , "" ) ; } else { return symbolicName ; }
public class SerializationUtils { /** * Deserialize an object . * @ param < T > the type parameter * @ param inputStream The stream to be deserialized * @ param clazz the clazz * @ return the object * @ since 5.0.0 */ @ SneakyThrows public static < T > T deserialize ( final InputStream inputStream , final Class < T > clazz ) { } }
try ( val in = new ObjectInputStream ( inputStream ) ) { val obj = in . readObject ( ) ; if ( ! clazz . isAssignableFrom ( obj . getClass ( ) ) ) { throw new ClassCastException ( "Result [" + obj + " is of type " + obj . getClass ( ) + " when we were expecting " + clazz ) ; } return ( T ) obj ; }
public class RangeTombstone { /** * This tombstone supersedes another one if it is more recent and cover a * bigger range than rt . */ public boolean supersedes ( RangeTombstone rt , Comparator < Composite > comparator ) { } }
if ( rt . data . markedForDeleteAt > data . markedForDeleteAt ) return false ; return comparator . compare ( min , rt . min ) <= 0 && comparator . compare ( max , rt . max ) >= 0 ;
public class AbstractQuery { private void check ( ) throws CouchbaseLiteException { } }
synchronized ( lock ) { if ( c4query != null ) { return ; } database = ( Database ) from . getSource ( ) ; final String json = encodeAsJson ( ) ; Log . v ( DOMAIN , "Query encoded as %s" , json ) ; if ( json == null ) { throw new CouchbaseLiteException ( "Failed to generate JSON query." ) ; } if ( columnNames == null ) { columnNames = generateColumnNames ( ) ; } try { c4query = database . getC4Database ( ) . createQuery ( json ) ; } catch ( LiteCoreException e ) { throw CBLStatus . convertException ( e ) ; } }
public class Mutations { /** * Inverts mutations , so that they reflect difference from seq2 to seq1 . < p / > E . g . for mutations generated with * < pre > * NucleotideSequence ref = randomSequence ( 300 ) ; * int [ ] mutations = Mutations . generateMutations ( ref , * MutationModels . getEmpiricalNucleotideMutationModel ( ) * . multiply ( 3.0 ) ) ; * < / pre > * and the inverted mutations * < pre > * int [ ] invMutations = ConsensusAligner . invertMutations ( mutations ) ; * < / pre > * The following two methods are equal * < pre > * Mutations . printAlignment ( ref , mutations ) ; * Mutations . printAlignment ( Mutations . mutate ( ref , mutations ) , invMutations ) ; * < / pre > * Same stands for * < pre > * Mutations . getPosition ( mutations , posInSeq1) * < / pre > * @ return mutations that will generate seq1 from seq2 */ public Mutations < S > invert ( ) { } }
if ( mutations . length == 0 ) return this ; int [ ] newMutations = new int [ mutations . length ] ; int delta = 0 ; for ( int i = 0 ; i < mutations . length ; i ++ ) { int from = getFrom ( mutations [ i ] ) ; int to = getTo ( mutations [ i ] ) ; int pos = getPosition ( mutations [ i ] ) ; int type = getRawTypeCode ( mutations [ i ] ) ; switch ( type ) { case RAW_MUTATION_TYPE_DELETION : delta -- ; type = RAW_MUTATION_TYPE_INSERTION ; pos ++ ; break ; case RAW_MUTATION_TYPE_INSERTION : delta ++ ; type = RAW_MUTATION_TYPE_DELETION ; pos -- ; break ; default : break ; } newMutations [ i ] = createMutation ( type , pos + delta , to , from ) ; } return new Mutations < > ( alphabet , newMutations , true ) ;
public class GenericsUtils { /** * Generics declaration may contain type ' s generics together with outer class generics ( if type is inner class ) . * Return map itself for not inner class ( or if no extra generics present in map ) . * In case when type ' s generic is not mentioned in map - it will be resolved from variable declaration . * @ param type type * @ param generics all type ' s context generics ( self + outer class ) * @ return generics , declared on type ( { @ code A < T , K > - > T , K } ) or empty map if no generics declared on type */ public static Map < String , Type > extractTypeGenerics ( final Class < ? > type , final Map < String , Type > generics ) { } }
// assuming generics map always contains correct generics and may include only outer // so if we call it with outer type and outer only generics it will correctly detect it final boolean enoughGenerics = type . getTypeParameters ( ) . length == generics . size ( ) ; if ( enoughGenerics ) { return generics ; } final LinkedHashMap < String , Type > res = new LinkedHashMap < String , Type > ( ) ; // owner generics are all generics not mentioned in signature for ( TypeVariable var : type . getTypeParameters ( ) ) { final String name = var . getName ( ) ; if ( generics . containsKey ( name ) ) { res . put ( name , generics . get ( name ) ) ; } else { // case : generic not provided in map may appear with outer class generics , which // may incompletely present in type ' s generic map ( class may use generics with the same name ) res . put ( name , resolveClass ( var . getBounds ( ) [ 0 ] , res ) ) ; } } return res ;
public class Jpa2ResponseStore { /** * { @ inheritDoc } */ @ Override public Collection < Response > findResponses ( SearchCriteria criteria ) { } }
Collection < Response > responsesAllTimestamps = responseRepository . find ( criteria ) ; // timestamp stored as string not queryable in DB , all timestamps come back , still need to filter this subset return findResponses ( criteria , responsesAllTimestamps ) ;
public class AVPush { /** * A helper method to concisely send a push to a query . This method is equivalent to * < pre > * AVPush push = new AVPush ( ) ; * push . setData ( data ) ; * push . setQuery ( query ) ; * push . sendInBackground ( callback ) ; * < / pre > * @ param data The entire data of the push message . See the push guide for more details on the * data format . * @ param query A AVInstallation query which specifies the recipients of a push . * @ param callback callback . done ( e ) is called when the send completes . */ public static void sendDataInBackground ( JSONObject data , AVQuery < ? extends AVInstallation > query , final SendCallback callback ) { } }
AVPush push = new AVPush ( ) ; push . setData ( data ) ; push . setQuery ( query ) ; push . sendInBackground ( callback ) ;
public class FormatUtils { /** * Is a a subclass of b ? Strict . */ public static boolean isSubclass ( Class a , Class b ) { } }
if ( a == b ) return false ; if ( ! ( b . isAssignableFrom ( a ) ) ) return false ; return true ;
public class IdentityConverter { /** * / * ( non - Javadoc ) * @ see org . opoo . press . Converter # getOutputFileExtension ( org . opoo . press . Source ) */ @ Override public String getOutputFileExtension ( Source src ) { } }
String name = src . getOrigin ( ) . getName ( ) ; return "." + FilenameUtils . getExtension ( name ) ;
public class ShareSheetStyle { /** * Exclude items from the ShareSheet by package name array . * @ param packageName { @ link String [ ] } package name to be excluded . * @ return this Builder object to allow for chaining of calls to set methods . */ public ShareSheetStyle excludeFromShareSheet ( @ NonNull String [ ] packageName ) { } }
excludeFromShareSheet . addAll ( Arrays . asList ( packageName ) ) ; return this ;
public class DynamicLegacyProgram { /** * Generate MBean parameter info for all Program field annotated with @ In * @ return an array of MBeanParameterInfo */ private MBeanParameterInfo [ ] createMBeanParameterInfos ( ) { } }
final int lenght = this . remoteProgram . getArguments ( ) != null ? this . remoteProgram . getArguments ( ) . size ( ) : 0 ; int cpt = 0 ; MBeanParameterInfo [ ] result = new MBeanParameterInfo [ lenght ] ; if ( this . remoteProgram . getArguments ( ) != null ) { for ( Argument arg : this . remoteProgram . getArguments ( ) ) { // Only simple Type are authorized for JMX String type = null ; String descriptionWithParserFormat = "Parser '%s' will be used to set this parameter as it is defined in @In annotation for this field." ; String descriptionWithoutParserFormat = "No parser will be used for this field as it is a primitive one." ; String descriptionValueValue = descriptionWithoutParserFormat ; if ( ! arg . getParser ( ) . getCanonicalName ( ) . equals ( NopParser . class . getCanonicalName ( ) ) ) { descriptionValueValue = String . format ( descriptionWithParserFormat , arg . getParser ( ) . getCanonicalName ( ) ) ; } if ( ! arg . getType ( ) . equals ( String . class ) && ! arg . getType ( ) . equals ( int . class ) && ! arg . getType ( ) . equals ( long . class ) && ! arg . getType ( ) . equals ( boolean . class ) ) { type = String . class . getCanonicalName ( ) ; } else { type = arg . getType ( ) . getCanonicalName ( ) ; } result [ cpt ++ ] = new MBeanParameterInfo ( arg . getName ( ) , type , descriptionValueValue ) ; } } return result ;
public class Base64 { /** * Decode the byte array . * @ param aEncodedBytes * The encoded byte array . * @ param nOfs * The offset of where to begin decoding * @ param nLen * The number of characters to decode * @ param nOptions * Decoding options . * @ return < code > null < / code > if decoding failed . */ @ Nullable @ ReturnsMutableCopy public static byte [ ] safeDecode ( @ Nullable final byte [ ] aEncodedBytes , @ Nonnegative final int nOfs , @ Nonnegative final int nLen , final int nOptions ) { } }
if ( aEncodedBytes != null ) try { return decode ( aEncodedBytes , nOfs , nLen , nOptions ) ; } catch ( final IOException | IllegalArgumentException ex ) { // fall through } return null ;
public class ConfigUtil { /** * - - - - - Other Methods */ public static void updateKey ( String key , String value ) { } }
if ( PROPERTIES . containsKey ( key + ENC_SUFFIX ) ) { if ( value != null ) { PROPERTIES . setProperty ( key + ENC_SUFFIX , StringEncryptorUtil . encrypt ( value ) ) ; } } else { PROPERTIES . setProperty ( key , value != null ? value : "" ) ; }
public class DefaultNullnessAnnotations { /** * Add default NullnessAnnotations to given INullnessAnnotationDatabase . * @ param database * an INullnessAnnotationDatabase */ public static void addDefaultNullnessAnnotations ( INullnessAnnotationDatabase database ) { } }
if ( AnnotationDatabase . IGNORE_BUILTIN_ANNOTATIONS ) { return ; } boolean missingClassWarningsSuppressed = AnalysisContext . currentAnalysisContext ( ) . setMissingClassWarningsSuppressed ( true ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . METHOD , Values . DOTTED_JAVA_LANG_STRING , NullnessAnnotation . NONNULL ) ; database . addFieldAnnotation ( "java.lang.System" , "out" , "Ljava/io/PrintStream;" , true , NullnessAnnotation . NONNULL ) ; database . addFieldAnnotation ( "java.lang.System" , "err" , "Ljava/io/PrintStream;" , true , NullnessAnnotation . NONNULL ) ; database . addFieldAnnotation ( "java.lang.System" , "in" , "Ljava/io/InputStream;" , true , NullnessAnnotation . NONNULL ) ; database . addFieldAnnotation ( "java.math.BigInteger" , "ZERO" , "Ljava/math/BigInteger;" , true , NullnessAnnotation . NONNULL ) ; database . addFieldAnnotation ( "java.math.BigInteger" , "ONE" , "Ljava/math/BigInteger;" , true , NullnessAnnotation . NONNULL ) ; database . addFieldAnnotation ( "java.math.BigInteger" , "TEN" , "Ljava/math/BigInteger;" , true , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( "java.nio.file.Files" , "probeContentType" , "(Ljava/nio/file/Path;)Ljava/lang/String;" , true , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodAnnotation ( "java.nio.file.Path" , "getRoot" , "()Ljava/nio/file/Path;" , false , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodAnnotation ( "java.nio.file.Path" , "getFileName" , "()Ljava/nio/file/Path;" , false , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodAnnotation ( "java.nio.file.Path" , "getParent" , "()Ljava/nio/file/Path;" , false , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodAnnotation ( Values . DOTTED_JAVA_IO_FILE , "list" , "()[Ljava/lang/String;" , false , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodAnnotation ( Values . DOTTED_JAVA_IO_FILE , "list" , "(Ljava/io/FilenameFilter;)[Ljava/lang/String;" , false , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodAnnotation ( Values . DOTTED_JAVA_IO_FILE , "listFiles" , "()[Ljava/io/File;" , false , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodAnnotation ( Values . DOTTED_JAVA_IO_FILE , "listFiles" , "(Ljava/io/FilenameFilter;)[Ljava/io/File;" , false , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodAnnotation ( Values . DOTTED_JAVA_IO_FILE , "listFiles" , "(Ljava/io/FileFilter;)[Ljava/io/File;" , false , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodAnnotation ( "java.lang.ref.ReferenceQueue" , "poll" , "()Ljava/lang/ref/Reference;" , false , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodAnnotation ( "java.lang.ref.Reference" , "get" , "()Ljava/lang/Object;" , false , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodAnnotation ( "java.lang.Class" , "newInstance" , "()Ljava/lang/Object;" , false , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( "java.lang.Class" , "forName" , "(Ljava/lang/String;)Ljava/lang/Class;" , true , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( "java.lang.reflect.Method" , "getParameterTypes" , "()[Ljava/lang/Class;" , false , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( Values . DOTTED_JAVA_LANG_OBJECT , "clone" , "()Ljava/lang/Object;" , false , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( Values . DOTTED_JAVA_LANG_OBJECT , "toString" , "()Ljava/lang/String;" , false , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( Values . DOTTED_JAVA_LANG_OBJECT , "getClass" , "()Ljava/lang/Class;" , false , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( Values . DOTTED_JAVA_LANG_OBJECT , "equals" , "(Ljava/lang/Object;)Z" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.Arrays" , "asList" , "([Ljava/lang/Object;)Ljava/util/List;" , true , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( Values . DOTTED_JAVA_LANG_INTEGER , "<init>" , "(Ljava/lang/String;)V" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( Values . DOTTED_JAVA_LANG_INTEGER , "parseInt" , "(Ljava/lang/String;I)I" , true , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( Values . DOTTED_JAVA_LANG_INTEGER , "parseInt" , "(Ljava/lang/String;)I" , true , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( "java.nio.channels.SocketChannel" , "open" , "()Ljava/nio/channels/SocketChannel;" , true , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( "java.sql.Statement" , "executeQuery" , "(Ljava/lang/String;)Ljava/sql/ResultSet;" , false , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( "java.sql.PreparedStatement" , "executeQuery" , "()Ljava/sql/ResultSet;" , false , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( "java.sql.Connection" , "prepareStatement" , "(Ljava/lang/String;)Ljava/sql/PreparedStatement;" , false , NullnessAnnotation . NONNULL ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . METHOD , "java.sql.DatabaseMetaData" , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( "java.sql.DatabaseMetaData" , "getConnection" , "()Ljava/sql/Connection;" , false , NullnessAnnotation . NULLABLE ) ; database . addMethodAnnotation ( "java.sql.DatabaseMetaData" , "getAttributes" , "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)Ljava/sql/ResultSet;" , false , NullnessAnnotation . NULLABLE ) ; database . addMethodAnnotation ( "java.sql.DatabaseMetaData" , "getColumns" , "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)Ljava/sql/ResultSet;" , false , NullnessAnnotation . NULLABLE ) ; database . addMethodAnnotation ( "java.sql.DatabaseMetaData" , "getSuperTables" , "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)Ljava/sql/ResultSet;" , false , NullnessAnnotation . NULLABLE ) ; database . addMethodAnnotation ( "java.sql.DatabaseMetaData" , "getSuperTypes" , "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)Ljava/sql/ResultSet;" , false , NullnessAnnotation . NULLABLE ) ; database . addMethodAnnotation ( "java.sql.DatabaseMetaData" , "getTimeDateFunctions" , "()Ljava/lang/String;" , false , NullnessAnnotation . NULLABLE ) ; database . addMethodAnnotation ( "java.sql.DatabaseMetaData" , "getTypeInfo" , "()Ljava/sql/ResultSet;" , false , NullnessAnnotation . NULLABLE ) ; database . addMethodAnnotation ( "java.sql.DatabaseMetaData" , "getURL" , "()Ljava/lang/String;" , false , NullnessAnnotation . NULLABLE ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.package-info" , NullnessAnnotation . NONNULL ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.CopyOnWriteArrayList" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.CopyOnWriteArraySet" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.ConcurrentLinkedQueue$Node" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.Exchanger" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.FutureTask" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.LinkedBlockingQueue$Node" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.SynchronousQueue$WaitQueue" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.SynchronousQueue$Node" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.ThreadPoolExecutor$Worker" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.AbstractExecutorService" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.ConcurrentSkipListMap$ConcurrentSkipListSubMap" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.ConcurrentSkipListMap$HeadIndex" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.ConcurrentSkipListMap$Index" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.ConcurrentSkipListMap$Node" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.ConcurrentSkipListMap$SubMap" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.ConcurrentSkipListSet$ConcurrentSkipListSubSet" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.LinkedBlockingDeque$Node" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.SynchronousQueue$TransferQueue" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.SynchronousQueue$TransferQueue$QNode" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.SynchronousQueue$TransferStack" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.SynchronousQueue$Transferer" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ConcurrentMap" , "containsKey" , "(Ljava/lang/Object;)Z" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ConcurrentMap" , "containsValue" , "(Ljava/lang/Object;)Z" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ConcurrentMap" , "get" , "(Ljava/lang/Object;)Ljava/lang/Object;" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ConcurrentMap" , "remove" , "(Ljava/lang/Object;)Ljava/lang/Object;" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ConcurrentMap" , "put" , "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ConcurrentMap" , "put" , "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;" , false , 1 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ConcurrentHashMap" , "remove" , "(Ljava/lang/Object;Ljava/lang/Object;)Z" , false , 1 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ConcurrentHashMap" , "setEntryAt" , "([Ljava/util/concurrent/ConcurrentHashMap$HashEntry;ILjava/util/concurrent/ConcurrentHashMap$HashEntry;)V" , false , 1 , NullnessAnnotation . NULLABLE ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ForkJoinPool" , "<init>" , "(ILjava/util/concurrent/ForkJoinPool$ForkJoinWorkerThreadFactory;Ljava/lang/Thread$UncaughtExceptionHandler;Z)V" , false , 1 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ForkJoinPool" , "<init>" , "(ILjava/util/concurrent/ForkJoinPool$ForkJoinWorkerThreadFactory;Ljava/lang/Thread$UncaughtExceptionHandler;Z)V" , false , 2 , NullnessAnnotation . NULLABLE ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.PriorityBlockingQueue" , "<init>" , "(ILjava/util/Comparator;)V" , false , 1 , NullnessAnnotation . NULLABLE ) ; database . addDefaultAnnotation ( AnnotationDatabase . Target . PARAMETER , "java.util.concurrent.ConcurrentLinkedDeque$Node" , NullnessAnnotation . UNKNOWN_NULLNESS ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ConcurrentMap" , "remove" , "(Ljava/lang/Object;Ljava/lang/Object;)Z" , false , 1 , NullnessAnnotation . NULLABLE ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.FutureTask" , "<init>" , "(Ljava/lang/Runnable;Ljava/lang/Object;)V" , false , 1 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.Executors" , "callable" , "(Ljava/lang/Runnable;Ljava/lang/Object;)Ljava/util/concurrent/Callable;" , true , 1 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ThreadPoolExecutor" , "addWorker" , "(Ljava/lang/Runnable;Z)Z" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ConcurrentHashMap$Segment" , "remove" , "(Ljava/lang/Object;ILjava/lang/Object;)Ljava/lang/Object;" , false , 2 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.CyclicBarrier" , "<init>" , "(ILjava/lang/Runnable;)V" , false , 1 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.BrokenBarrierException" , "<init>" , "(Ljava/lang/String;)V" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.CancellationException" , "<init>" , "(Ljava/lang/String;)V" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.CompletableFuture" , "complete" , "(Ljava/lang/Object;)Z" , false , 0 , NullnessAnnotation . NULLABLE ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.CompletableFuture" , "completedFuture" , "(Ljava/lang/Object;)Ljava/util/concurrent/CompletableFuture;" , true , 0 , NullnessAnnotation . NULLABLE ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ExecutionException" , "<init>" , "(Ljava/lang/String;)V" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ExecutionException" , "<init>" , "(Ljava/lang/String;Ljava/lang/Throwable;)V" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ExecutionException" , "<init>" , "(Ljava/lang/String;Ljava/lang/Throwable;)V" , false , 1 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ExecutionException" , "<init>" , "(Ljava/lang/Throwable;)V" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.RejectedExecutionException" , "<init>" , "(Ljava/lang/String;)V" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.RejectedExecutionException" , "<init>" , "(Ljava/lang/String;Ljava/lang/Throwable;)V" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.RejectedExecutionException" , "<init>" , "(Ljava/lang/String;Ljava/lang/Throwable;)V" , false , 1 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.RejectedExecutionException" , "<init>" , "(Ljava/lang/Throwable;)V" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.TimeoutException" , "<init>" , "(Ljava/lang/String;)V" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.Executors$RunnableAdapter" , "<init>" , "(Ljava/lang/Runnable;Ljava/lang/Object;)V" , false , 1 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ConcurrentSkipListMap" , "<init>" , "(Ljava/util/Comparator;)V" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ConcurrentSkipListMap" , "doRemove" , "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;" , false , 1 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ForkJoinPool" , "casBarrierStack" , "(Ljava/util/concurrent/ForkJoinPool$WaitQueueNode;Ljava/util/concurrent/ForkJoinPool$WaitQueueNode;)Z" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ForkJoinPool" , "casBarrierStack" , "(Ljava/util/concurrent/ForkJoinPool$WaitQueueNode;Ljava/util/concurrent/ForkJoinPool$WaitQueueNode;)Z" , false , 1 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ForkJoinPool" , "casSpareStack" , "(Ljava/util/concurrent/ForkJoinPool$WaitQueueNode;Ljava/util/concurrent/ForkJoinPool$WaitQueueNode;)Z" , false , 1 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ForkJoinTask" , "adapt" , "(Ljava/lang/Runnable;Ljava/lang/Object;)Ljava/util/concurrent/ForkJoinTask;" , true , 1 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ForkJoinTask" , "awaitDone" , "(Ljava/util/concurrent/ForkJoinWorkerThread;J)I" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ForkJoinTask" , "awaitDone" , "(Ljava/util/concurrent/ForkJoinWorkerThread;Z)I" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ForkJoinTask$AdaptedRunnable" , "<init>" , "(Ljava/lang/Runnable;Ljava/lang/Object;)V" , false , 1 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ForkJoinWorkerThread" , "onTermination" , "(Ljava/lang/Throwable;)V" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ForkJoinWorkerThread" , "setSlot" , "([Ljava/util/concurrent/ForkJoinTask;ILjava/util/concurrent/ForkJoinTask;)V" , true , 2 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.LinkedTransferQueue" , "casCleanMe" , "(Ljava/util/concurrent/LinkedTransferQueue$Node;Ljava/util/concurrent/LinkedTransferQueue$Node;)Z" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.LinkedTransferQueue" , "casCleanMe" , "(Ljava/util/concurrent/LinkedTransferQueue$Node;Ljava/util/concurrent/LinkedTransferQueue$Node;)Z" , false , 1 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.LinkedTransferQueue" , "casHead" , "(Ljava/util/concurrent/LinkedTransferQueue$Node;Ljava/util/concurrent/LinkedTransferQueue$Node;)Z" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.LinkedTransferQueue" , "xfer" , "(Ljava/lang/Object;ZIJ)Ljava/lang/Object;" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.LinkedTransferQueue$Itr" , "advance" , "(Ljava/util/concurrent/LinkedTransferQueue$Node;)V" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.LinkedTransferQueue$Node" , "casItem" , "(Ljava/lang/Object;Ljava/lang/Object;)Z" , false , 1 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.LinkedTransferQueue$Node" , "casNext" , "(Ljava/util/concurrent/LinkedTransferQueue$Node;Ljava/util/concurrent/LinkedTransferQueue$Node;)Z" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.Phaser" , "<init>" , "(Ljava/util/concurrent/Phaser;)V" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.Phaser" , "<init>" , "(Ljava/util/concurrent/Phaser;I)V" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodAnnotation ( "java.util.concurrent.locks.ReadWriteLock" , "readLock" , "()Ljava/util/concurrent/locks/Lock;" , false , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( "java.util.concurrent.locks.ReadWriteLock" , "writeLock" , "()Ljava/util/concurrent/locks/Lock;" , false , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( "java.util.concurrent.locks.ReentrantReadWriteLock" , "readLock" , "()Ljava/util/concurrent/locks/ReentrantReadWriteLock$ReadLock;" , false , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( "java.util.concurrent.locks.ReentrantReadWriteLock" , "writeLock" , "()Ljava/util/concurrent/locks/ReentrantReadWriteLock$WriteLock;" , false , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ExecutorService" , "submit" , "(Ljava/lang/Runnable;Ljava/lang/Object;)Ljava/util/concurrent/Future;" , false , 1 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.AbstractExecutorService" , "submit" , "(Ljava/lang/Runnable;Ljava/lang/Object;)Ljava/util/concurrent/Future;" , false , 1 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ExecutorCompletionService" , "submit" , "(Ljava/lang/Runnable;Ljava/lang/Object;)Ljava/util/concurrent/Future;" , false , 1 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.AbstractExecutorServiceNullnessAnnotationDatabase" , "newTaskFor" , "(Ljava/lang/Runnable;Ljava/lang/Object;)Ljava/util/concurrent/Future;" , false , 1 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ExecutorCompletionService" , "newTaskFor" , "(Ljava/lang/Runnable;Ljava/lang/Object;)Ljava/util/concurrent/RunnableFuture;" , false , 1 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ThreadPoolExecutor" , "addIfUnderCorePoolSize" , "(Ljava/lang/Runnable;)Z" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ThreadPoolExecutor" , "addThread" , "(Ljava/lang/Runnable;)Ljava/lang/Thread;" , false , 0 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.concurrent.ThreadPoolExecutor" , "afterExecute" , "(Ljava/lang/Runnable;Ljava/lang/Throwable;)V" , false , 1 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.EnumMap" , "get" , "(Ljava/lang/Object;)Ljava/lang/Object;" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.EnumMap" , "containsKey" , "(Ljava/lang/Object;)Z" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.EnumMap" , "put" , "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.EnumMap" , "remove" , "(Ljava/lang/Object;)Ljava/lang/Object;" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.SortedMap" , "get" , "(Ljava/lang/Object;)Ljava/lang/Object;" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.SortedMap" , "containsKey" , "(Ljava/lang/Object;)Ljava/lang/Object;" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.SortedMap" , "put" , "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.SortedMap" , "remove" , "(Ljava/lang/Object;)Ljava/lang/Object;" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.SortedSet" , "add" , "(Ljava/lang/Object;)Z" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.SortedSet" , "remove" , "(Ljava/lang/Object;)Z" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.SortedSet" , "contains" , "(Ljava/lang/Object;)Z" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.Hashtable" , "get" , "(Ljava/lang/Object;)Ljava/lang/Object;" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.Hashtable" , "containsKey" , "(Ljava/lang/Object;)Z" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.Hashtable" , "containsValue" , "(Ljava/lang/Object;)Z" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.Hashtable" , "contains" , "(Ljava/lang/Object;)Z" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.Hashtable" , "put" , "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.Hashtable" , "put" , "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;" , false , 1 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "javax.swing.UIDefaults" , "put" , "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;" , false , 1 , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "java.util.Properties" , "getProperty" , "(Ljava/lang/String;)Ljava/lang/String;" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.Properties" , "setProperty" , "(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;" , false , 1 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.Properties" , "setProperty" , "(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "org.w3c.dom.Element" , "setAttribute" , "(Ljava/lang/String;Ljava/lang/String;)V" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.text.DateFormat" , "parse" , "(Ljava/lang/String;Ljava/text/ParsePosition;)Ljava/util/Date;" , false , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.text.DateFormat" , "parse" , "(Ljava/lang/String;)Ljava/util/Date;" , false , 0 , NullnessAnnotation . NONNULL ) ; // addMethodAnnotation ( " java . util . Queue " , " poll " , // " ( ) Ljava / lang / Object ; " , false , NullnessAnnotation . CHECK _ FOR _ NULL ) ; database . addMethodAnnotation ( "java.io.BufferedReader" , "readLine" , "()Ljava/lang/String;" , false , NullnessAnnotation . CHECK_FOR_NULL ) ; database . addMethodParameterAnnotation ( "com.google.common.base.Preconditions" , "checkNotNull" , "(Ljava/lang/Object;)Ljava/lang/Object;" , true , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "com.google.common.base.Preconditions" , "checkNotNull" , "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;" , true , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "com.google.common.base.Preconditions" , "checkNotNull" , "(Ljava/lang/Object;Ljava/lang/String;[Ljava/lang/Object;)Ljava/lang/Object;" , true , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( "com.google.common.base.Preconditions" , "checkNotNull" , "(Ljava/lang/Object;)Ljava/lang/Object;" , true , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( "com.google.common.base.Preconditions" , "checkNotNull" , "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;" , true , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( "com.google.common.base.Preconditions" , "checkNotNull" , "(Ljava/lang/Object;Ljava/lang/String;[Ljava/lang/Object;)Ljava/lang/Object;" , true , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.Objects" , "requireNonNull" , "(Ljava/lang/Object;)Ljava/lang/Object;" , true , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodParameterAnnotation ( "java.util.Objects" , "requireNonNull" , "(Ljava/lang/Object;Ljava/lang/String;)Ljava/lang/Object;" , true , 0 , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( "java.util.Objects" , "requireNonNull" , "(Ljava/lang/Object;)Ljava/lang/Object;" , true , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( "java.util.Objects" , "requireNonNull" , "(Ljava/lang/Object;Ljava/lang/String;)Ljava/lang/Object;" , true , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( "org.w3c.dom.Element" , "getAttribute" , "(Ljava/lang/String;)Ljava/lang/String;" , false , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( "org.w3c.dom.Element" , "getAttributeNS" , "(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;" , false , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( "org.w3c.dom.Element" , "getElementsByTagName" , "(Ljava/lang/String;)Lorg/w3c/dom/NodeList;" , false , NullnessAnnotation . NONNULL ) ; database . addMethodAnnotation ( "org.w3c.dom.Element" , "getElementsByTagNameNS" , "(Ljava/lang/String;Ljava/lang/String;)Lorg/w3c/dom/NodeList;" , false , NullnessAnnotation . NONNULL ) ; addEclipseSpecificAnnotations ( database ) ; AnalysisContext . currentAnalysisContext ( ) . setMissingClassWarningsSuppressed ( missingClassWarningsSuppressed ) ;
public class Assert { /** * Asserts that the given { @ link Object } is an instance of the specified { @ link Class type } . * The assertion holds if and only if the { @ link Object } is not { @ literal null } and is an instance of * the specified { @ link Class type } . This assertion functions exactly the same as * the Java { @ literal instanceof } operator . * @ param obj { @ link Object } evaluated as an instance of the { @ link Class type } . * @ param type { @ link Class type } used to evaluate the { @ link Object } in the { @ literal instanceof } operator . * @ param message { @ link Supplier } containing the message used in the { @ link IllegalArgumentException } thrown * if the assertion fails . * @ throws org . cp . elements . lang . IllegalTypeException if the { @ link Object } is not an instance of * the specified { @ link Class type } . * @ see java . lang . Class # isInstance ( Object ) */ public static void isInstanceOf ( Object obj , Class < ? > type , Supplier < String > message ) { } }
if ( isNotInstanceOf ( obj , type ) ) { throw new IllegalTypeException ( message . get ( ) ) ; }
public class CPFriendlyURLEntryUtil { /** * Returns the first cp friendly url entry in the ordered set where uuid = & # 63 ; . * @ param uuid the uuid * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching cp friendly url entry , or < code > null < / code > if a matching cp friendly url entry could not be found */ public static CPFriendlyURLEntry fetchByUuid_First ( String uuid , OrderByComparator < CPFriendlyURLEntry > orderByComparator ) { } }
return getPersistence ( ) . fetchByUuid_First ( uuid , orderByComparator ) ;
public class FastSet { /** * { @ inheritDoc } */ @ Override public boolean add ( int i ) { } }
int wordIndex = wordIndex ( i ) ; expandTo ( wordIndex ) ; int before = words [ wordIndex ] ; words [ wordIndex ] |= ( 1 << i ) ; if ( before != words [ wordIndex ] ) { if ( size >= 0 ) { size ++ ; } return true ; } return false ;
public class GLImplementation { /** * Loads the implementation , making it accessible . * @ param implementation The implementation to load * @ return Whether or not the loading succeeded */ @ SuppressWarnings ( "unchecked" ) public static boolean load ( GLImplementation implementation ) { } }
try { final Constructor < ? > constructor = Class . forName ( implementation . getContextName ( ) ) . getDeclaredConstructor ( ) ; constructor . setAccessible ( true ) ; implementations . put ( implementation , ( Constructor < Context > ) constructor ) ; return true ; } catch ( ClassNotFoundException | NoSuchMethodException | SecurityException ex ) { CausticUtil . getCausticLogger ( ) . log ( Level . WARNING , "Couldn't load implementation" , ex ) ; return false ; }
public class BitmexTradeServiceRaw { /** * See { @ link Bitmex # getOrders } * @ return List of { @ link BitmexPrivateOrder } s . */ public List < BitmexPrivateOrder > getBitmexOrders ( ) throws ExchangeException { } }
return getBitmexOrders ( null , null , null , null , null ) ;
public class BaseWordsi { /** * { @ inheritDoc } */ public boolean acceptWord ( String word ) { } }
return acceptedWords == null || acceptedWords . isEmpty ( ) || acceptedWords . contains ( word ) ;
public class CollectAccumulator { /** * / * ( non - Javadoc ) * @ see org . kie . spi . Accumulator # accumulate ( java . lang . Object , org . kie . spi . Tuple , org . kie . common . InternalFactHandle , org . kie . rule . Declaration [ ] , org . kie . rule . Declaration [ ] , org . kie . WorkingMemory ) */ public void accumulate ( Object workingMemoryContext , Object context , Tuple leftTuple , InternalFactHandle handle , Declaration [ ] declarations , Declaration [ ] innerDeclarations , WorkingMemory workingMemory ) throws Exception { } }
Object value = this . unwrapHandle ? ( ( LeftTuple ) handle . getObject ( ) ) . getFactHandle ( ) . getObject ( ) : handle . getObject ( ) ; ( ( CollectContext ) context ) . result . add ( value ) ;
public class SignatureUtils { /** * converts a primitive type code to a signature * @ param typeCode * the raw JVM type value * @ return the signature of the type */ public static String getTypeCodeSignature ( int typeCode ) { } }
String signature = Values . PRIMITIVE_TYPE_CODE_SIGS . get ( ( byte ) typeCode ) ; return signature == null ? Values . SIG_JAVA_LANG_OBJECT : signature ;
public class DOTranslationUtility { /** * Reads the state attribute from a DigitalObject . * Null or empty strings are interpteted as " Active " . * @ param obj Object that potentially contains object state data . * @ return String containing full state value ( Active , Inactive , or Deleted ) * @ throws ObjectIntegrityException thrown when the state cannot be parsed . */ public static String getStateAttribute ( DigitalObject obj ) throws ObjectIntegrityException { } }
if ( obj . getState ( ) == null || obj . getState ( ) . isEmpty ( ) ) { return MODEL . ACTIVE . localName ; } else { switch ( obj . getState ( ) . charAt ( 0 ) ) { case 'D' : return MODEL . DELETED . localName ; case 'I' : return MODEL . INACTIVE . localName ; case 'A' : return MODEL . ACTIVE . localName ; default : throw new ObjectIntegrityException ( "Could not determine " + "state attribute from '" + obj . getState ( ) + "'" ) ; } }
public class JSONTableSawRESTDataProvider { /** * Invoke a GET call on the web target and return the result as a TabularResult ( parsed CSV ) . Throws a QuandlUnprocessableEntityException * if Quandl returned a response code that indicates a nonsensical request Throws a QuandlTooManyRequestsException if Quandl returned a * response code indicating the client had made too many requests Throws a QuandlRuntimeException if there was a JSON parsing problem , * network issue or response code was unusual * @ param target the WebTarget describing the call to make , not null * @ return the parsed TabularResult */ public Table getTabularResponse ( final WebTarget target , Request request ) { } }
return getResponse ( target , TABLE_SAW_RESPONSE_PROCESSOR , request ) ;
public class LogoutRequestParser { /** * { @ inheritDoc } */ @ Override protected final void checkIntegrity ( ) throws InternetSCSIException { } }
String exceptionMessage ; do { Utils . isReserved ( logicalUnitNumber ) ; if ( reasonCode == LogoutReasonCode . CLOSE_SESSION && connectionID != 0 ) { exceptionMessage = "The CID field must be zero, if close session is requested." ; break ; } if ( protocolDataUnit . getBasicHeaderSegment ( ) . getTotalAHSLength ( ) != 0 ) { exceptionMessage = "TotalAHSLength must be 0!" ; break ; } if ( protocolDataUnit . getBasicHeaderSegment ( ) . getDataSegmentLength ( ) != 0 ) { exceptionMessage = "DataSegmentLength must be 0!" ; break ; } // message is checked correctly return ; } while ( false ) ; throw new InternetSCSIException ( exceptionMessage ) ;
public class ClassResourceGridScreen { /** * SetupSFields Method . */ public void setupSFields ( ) { } }
this . getRecord ( ClassResource . CLASS_RESOURCE_FILE ) . getField ( ClassResource . SEQUENCE_NO ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; this . getRecord ( ClassResource . CLASS_RESOURCE_FILE ) . getField ( ClassResource . KEY_NAME ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; this . getRecord ( ClassResource . CLASS_RESOURCE_FILE ) . getField ( ClassResource . VALUE_NAME ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ;
public class AtomContainerManipulator { /** * Convenience method to perceive atom types for all < code > IAtom < / code > s in the * < code > IAtomContainer < / code > , using the < code > CDKAtomTypeMatcher < / code > . If the * matcher finds a matching atom type , the < code > IAtom < / code > will be configured * to have the same properties as the < code > IAtomType < / code > . If no matching atom * type is found , no configuration is performed . * < b > This method overwrites existing values . < / b > * @ param container * @ throws CDKException */ public static void percieveAtomTypesAndConfigureAtoms ( IAtomContainer container ) throws CDKException { } }
CDKAtomTypeMatcher matcher = CDKAtomTypeMatcher . getInstance ( container . getBuilder ( ) ) ; for ( IAtom atom : container . atoms ( ) ) { IAtomType matched = matcher . findMatchingAtomType ( container , atom ) ; if ( matched != null ) AtomTypeManipulator . configure ( atom , matched ) ; }
public class Client { /** * Gets a list of the Privileges created in an account . * @ return List of Privilege * @ throws OAuthSystemException - if there is a IOException reading parameters of the httpURLConnection * @ throws OAuthProblemException - if there are errors validating the OneloginOAuthJSONResourceResponse and throwOAuthProblemException is enabled * @ throws URISyntaxException - if there is an error when generating the target URL at the getResource call * @ see com . onelogin . sdk . model . Privileges * @ see < a target = " _ blank " href = " https : / / developers . onelogin . com / api - docs / 1 / privileges / list - privileges " > Get Privileges documentation < / a > */ public List < Privilege > getPrivileges ( ) throws OAuthSystemException , OAuthProblemException , URISyntaxException { } }
cleanError ( ) ; prepareToken ( ) ; URIBuilder url = new URIBuilder ( settings . getURL ( Constants . LIST_PRIVILEGES_URL ) ) ; OneloginURLConnectionClient httpClient = new OneloginURLConnectionClient ( ) ; OAuthClient oAuthClient = new OAuthClient ( httpClient ) ; OAuthClientRequest bearerRequest = new OAuthBearerClientRequest ( url . toString ( ) ) . buildHeaderMessage ( ) ; Map < String , String > headers = getAuthorizedHeader ( ) ; bearerRequest . setHeaders ( headers ) ; List < Privilege > privileges = new ArrayList < Privilege > ( maxResults ) ; OneloginOAuth2JSONResourceResponse oAuth2Response = oAuthClient . resource ( bearerRequest , OAuth . HttpMethod . GET , OneloginOAuth2JSONResourceResponse . class ) ; if ( oAuth2Response . getResponseCode ( ) == 200 ) { JSONArray jsonArray = oAuth2Response . getJSONArrayFromContent ( ) ; if ( jsonArray != null && jsonArray . length ( ) > 0 ) { for ( int i = 0 ; i < jsonArray . length ( ) ; i ++ ) { privileges . add ( new Privilege ( jsonArray . getJSONObject ( i ) ) ) ; } } } else { error = oAuth2Response . getError ( ) ; errorDescription = oAuth2Response . getErrorDescription ( ) ; } return privileges ;
public class SimpleTable { /** * Adds content to this object . * @ param element * @ throws BadElementException */ public void addElement ( SimpleCell element ) throws BadElementException { } }
if ( ! element . isCellgroup ( ) ) { throw new BadElementException ( "You can't add cells to a table directly, add them to a row first." ) ; } content . add ( element ) ;
public class FileSystemStorage { /** * Executes the given Callable and returns its result , while translating any Exceptions bubbling out of it into * StreamSegmentExceptions . * @ param segmentName Full name of the StreamSegment . * @ param operation The function to execute . * @ param < R > Return type of the operation . * @ return Instance of the return type of the operation . */ private < R > R execute ( String segmentName , Callable < R > operation ) throws StreamSegmentException { } }
Exceptions . checkNotClosed ( this . closed . get ( ) , this ) ; try { return operation . call ( ) ; } catch ( Exception e ) { return throwException ( segmentName , e ) ; }