signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class JobCancellationsInner { /** * Cancels a job . This is an asynchronous operation . To know the status of the cancellation , call GetCancelOperationResult API . * @ param vaultName The name of the recovery services vault . * @ param resourceGroupName The name of the resource group where the recovery services vault is present . * @ param jobName Name of the job to cancel . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void trigger ( String vaultName , String resourceGroupName , String jobName ) { } }
triggerWithServiceResponseAsync ( vaultName , resourceGroupName , jobName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class CouchbaseClientFactory { /** * ( non - Javadoc ) * @ see * com . impetus . kundera . loader . GenericClientFactory # initialize ( java . util . Map ) */ @ Override public void initialize ( Map < String , Object > puProperties ) { } }
reader = new CouchbaseEntityReader ( kunderaMetadata ) ; setExternalProperties ( puProperties ) ; initializePropertyReader ( ) ; PersistenceUnitMetadata pum = kunderaMetadata . getApplicationMetadata ( ) . getPersistenceUnitMetadata ( getPersistenceUnit ( ) ) ; Properties pumProps = pum . getProperties ( ) ; if ( puProperties != null ) { pumProps . putAll ( puProperties ) ; } String host = pumProps . getProperty ( "kundera.nodes" ) ; String keyspace = pumProps . getProperty ( "kundera.keyspace" ) ; if ( host == null ) { throw new KunderaException ( "Hostname/IP is null." ) ; } if ( keyspace == null ) { throw new KunderaException ( "kundera.keyspace is null." ) ; } cluster = CouchbaseCluster . create ( splitHostNames ( host ) ) ; String password = ( ( CouchbasePropertyReader ) propertyReader ) . csmd . getBucketProperty ( "bucket.password" ) ; bucket = CouchbaseBucketUtils . openBucket ( cluster , keyspace , password ) ;
public class Curve25519 { /** * / * Signature verification primitive , calculates Y = vP + hG * Y [ out ] signature public key * v [ in ] signature value * h [ in ] signature hash * P [ in ] public key */ public static final void verify ( byte [ ] Y , byte [ ] v , byte [ ] h , byte [ ] P ) { } }
/* Y = v abs ( P ) + h G */ byte [ ] d = new byte [ 32 ] ; long10 [ ] p = new long10 [ ] { new long10 ( ) , new long10 ( ) } , s = new long10 [ ] { new long10 ( ) , new long10 ( ) } , yx = new long10 [ ] { new long10 ( ) , new long10 ( ) , new long10 ( ) } , yz = new long10 [ ] { new long10 ( ) , new long10 ( ) , new long10 ( ) } , t1 = new long10 [ ] { new long10 ( ) , new long10 ( ) , new long10 ( ) } , t2 = new long10 [ ] { new long10 ( ) , new long10 ( ) , new long10 ( ) } ; int vi = 0 , hi = 0 , di = 0 , nvh = 0 , i , j , k ; /* set p [ 0 ] to G and p [ 1 ] to P */ set ( p [ 0 ] , 9 ) ; unpack ( p [ 1 ] , P ) ; /* set s [ 0 ] to P + G and s [ 1 ] to P - G */ /* s [ 0 ] = ( Py ^ 2 + Gy ^ 2 - 2 Py Gy ) / ( Px - Gx ) ^ 2 - Px - Gx - 486662 */ /* s [ 1 ] = ( Py ^ 2 + Gy ^ 2 + 2 Py Gy ) / ( Px - Gx ) ^ 2 - Px - Gx - 486662 */ x_to_y2 ( t1 [ 0 ] , t2 [ 0 ] , p [ 1 ] ) ; /* t2[0 ] = Py ^ 2 */ sqrt ( t1 [ 0 ] , t2 [ 0 ] ) ; /* t1[0 ] = Py or - Py */ j = is_negative ( t1 [ 0 ] ) ; /* . . . check which */ t2 [ 0 ] . _0 += 39420360 ; /* t2[0 ] = Py ^ 2 + Gy ^ 2 */ mul ( t2 [ 1 ] , BASE_2Y , t1 [ 0 ] ) ; /* t2[1 ] = 2 Py Gy or - 2 Py Gy */ sub ( t1 [ j ] , t2 [ 0 ] , t2 [ 1 ] ) ; /* t1[0 ] = Py ^ 2 + Gy ^ 2 - 2 Py Gy */ add ( t1 [ 1 - j ] , t2 [ 0 ] , t2 [ 1 ] ) ; /* t1[1 ] = Py ^ 2 + Gy ^ 2 + 2 Py Gy */ cpy ( t2 [ 0 ] , p [ 1 ] ) ; /* t2[0 ] = Px */ t2 [ 0 ] . _0 -= 9 ; /* t2[0 ] = Px - Gx */ sqr ( t2 [ 1 ] , t2 [ 0 ] ) ; /* t2[1 ] = ( Px - Gx ) ^ 2 */ recip ( t2 [ 0 ] , t2 [ 1 ] , 0 ) ; /* t2[0 ] = 1 / ( Px - Gx ) ^ 2 */ mul ( s [ 0 ] , t1 [ 0 ] , t2 [ 0 ] ) ; /* s [ 0 ] = t1[0 ] / ( Px - Gx ) ^ 2 */ sub ( s [ 0 ] , s [ 0 ] , p [ 1 ] ) ; /* s [ 0 ] = t1[0 ] / ( Px - Gx ) ^ 2 - Px */ s [ 0 ] . _0 -= 9 + 486662 ; /* s [ 0 ] = X ( P + G ) */ mul ( s [ 1 ] , t1 [ 1 ] , t2 [ 0 ] ) ; /* s [ 1 ] = t1[1 ] / ( Px - Gx ) ^ 2 */ sub ( s [ 1 ] , s [ 1 ] , p [ 1 ] ) ; /* s [ 1 ] = t1[1 ] / ( Px - Gx ) ^ 2 - Px */ s [ 1 ] . _0 -= 9 + 486662 ; /* s [ 1 ] = X ( P - G ) */ mul_small ( s [ 0 ] , s [ 0 ] , 1 ) ; /* reduce s [ 0] */ mul_small ( s [ 1 ] , s [ 1 ] , 1 ) ; /* reduce s [ 1] */ /* prepare the chain */ for ( i = 0 ; i < 32 ; i ++ ) { vi = ( vi >> 8 ) ^ ( v [ i ] & 0xFF ) ^ ( ( v [ i ] & 0xFF ) << 1 ) ; hi = ( hi >> 8 ) ^ ( h [ i ] & 0xFF ) ^ ( ( h [ i ] & 0xFF ) << 1 ) ; nvh = ~ ( vi ^ hi ) ; di = ( nvh & ( di & 0x80 ) >> 7 ) ^ vi ; di ^= nvh & ( di & 0x01 ) << 1 ; di ^= nvh & ( di & 0x02 ) << 1 ; di ^= nvh & ( di & 0x04 ) << 1 ; di ^= nvh & ( di & 0x08 ) << 1 ; di ^= nvh & ( di & 0x10 ) << 1 ; di ^= nvh & ( di & 0x20 ) << 1 ; di ^= nvh & ( di & 0x40 ) << 1 ; d [ i ] = ( byte ) di ; } di = ( ( nvh & ( di & 0x80 ) << 1 ) ^ vi ) >> 8 ; /* initialize state */ set ( yx [ 0 ] , 1 ) ; cpy ( yx [ 1 ] , p [ di ] ) ; cpy ( yx [ 2 ] , s [ 0 ] ) ; set ( yz [ 0 ] , 0 ) ; set ( yz [ 1 ] , 1 ) ; set ( yz [ 2 ] , 1 ) ; /* y [ 0 ] is ( even ) P + ( even ) G * y [ 1 ] is ( even ) P + ( odd ) G if current d - bit is 0 * y [ 1 ] is ( odd ) P + ( even ) G if current d - bit is 1 * y [ 2 ] is ( odd ) P + ( odd ) G */ vi = 0 ; hi = 0 ; /* and go for it ! */ for ( i = 32 ; i -- != 0 ; ) { vi = ( vi << 8 ) | ( v [ i ] & 0xFF ) ; hi = ( hi << 8 ) | ( h [ i ] & 0xFF ) ; di = ( di << 8 ) | ( d [ i ] & 0xFF ) ; for ( j = 8 ; j -- != 0 ; ) { mont_prep ( t1 [ 0 ] , t2 [ 0 ] , yx [ 0 ] , yz [ 0 ] ) ; mont_prep ( t1 [ 1 ] , t2 [ 1 ] , yx [ 1 ] , yz [ 1 ] ) ; mont_prep ( t1 [ 2 ] , t2 [ 2 ] , yx [ 2 ] , yz [ 2 ] ) ; k = ( ( vi ^ vi >> 1 ) >> j & 1 ) + ( ( hi ^ hi >> 1 ) >> j & 1 ) ; mont_dbl ( yx [ 2 ] , yz [ 2 ] , t1 [ k ] , t2 [ k ] , yx [ 0 ] , yz [ 0 ] ) ; k = ( di >> j & 2 ) ^ ( ( di >> j & 1 ) << 1 ) ; mont_add ( t1 [ 1 ] , t2 [ 1 ] , t1 [ k ] , t2 [ k ] , yx [ 1 ] , yz [ 1 ] , p [ di >> j & 1 ] ) ; mont_add ( t1 [ 2 ] , t2 [ 2 ] , t1 [ 0 ] , t2 [ 0 ] , yx [ 2 ] , yz [ 2 ] , s [ ( ( vi ^ hi ) >> j & 2 ) >> 1 ] ) ; } } k = ( vi & 1 ) + ( hi & 1 ) ; recip ( t1 [ 0 ] , yz [ k ] , 0 ) ; mul ( t1 [ 1 ] , yx [ k ] , t1 [ 0 ] ) ; pack ( t1 [ 1 ] , Y ) ;
public class SpiderThread { /** * Adds the given message to the sites tree . * @ param historyReference the history reference of the message , must not be { @ code null } * @ param message the actual message , must not be { @ code null } */ private void addMessageToSitesTree ( final HistoryReference historyReference , final HttpMessage message ) { } }
if ( View . isInitialised ( ) && ! EventQueue . isDispatchThread ( ) ) { EventQueue . invokeLater ( new Runnable ( ) { @ Override public void run ( ) { addMessageToSitesTree ( historyReference , message ) ; } } ) ; return ; } StructuralNode node = SessionStructure . addPath ( Model . getSingleton ( ) . getSession ( ) , historyReference , message , true ) ; if ( node != null ) { try { addUriToAddedNodesModel ( SessionStructure . getNodeName ( message ) , message . getRequestHeader ( ) . getMethod ( ) , "" ) ; } catch ( URIException e ) { log . error ( "Error while adding node to added nodes model: " + e . getMessage ( ) , e ) ; } }
public class SqlServerParser { /** * 处理selectBody去除Order by * @ param selectBody */ protected void processSelectBody ( SelectBody selectBody , int level ) { } }
if ( selectBody instanceof PlainSelect ) { processPlainSelect ( ( PlainSelect ) selectBody , level + 1 ) ; } else if ( selectBody instanceof WithItem ) { WithItem withItem = ( WithItem ) selectBody ; if ( withItem . getSelectBody ( ) != null ) { processSelectBody ( withItem . getSelectBody ( ) , level + 1 ) ; } } else { SetOperationList operationList = ( SetOperationList ) selectBody ; if ( operationList . getSelects ( ) != null && operationList . getSelects ( ) . size ( ) > 0 ) { List < SelectBody > plainSelects = operationList . getSelects ( ) ; for ( SelectBody plainSelect : plainSelects ) { processSelectBody ( plainSelect , level + 1 ) ; } } }
public class RunbookDraftsInner { /** * Undo draft edit to last known published state identified by runbook name . * @ param resourceGroupName Name of an Azure Resource group . * @ param automationAccountName The name of the automation account . * @ param runbookName The runbook name . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < RunbookDraftUndoEditResultInner > undoEditAsync ( String resourceGroupName , String automationAccountName , String runbookName , final ServiceCallback < RunbookDraftUndoEditResultInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( undoEditWithServiceResponseAsync ( resourceGroupName , automationAccountName , runbookName ) , serviceCallback ) ;
public class JvmByteAnnotationValueImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case TypesPackage . JVM_BYTE_ANNOTATION_VALUE__VALUES : return values != null && ! values . isEmpty ( ) ; } return super . eIsSet ( featureID ) ;
public class ContentSpec { /** * Set the Bugzilla Keywords to be applied during building . * @ param bugzillaKeywords The keywords to be set in bugzilla . */ public void setBugzillaKeywords ( final String bugzillaKeywords ) { } }
if ( bugzillaKeywords == null && this . bugzillaKeywords == null ) { return ; } else if ( bugzillaKeywords == null ) { removeChild ( this . bugzillaKeywords ) ; this . bugzillaKeywords = null ; } else if ( this . bugzillaKeywords == null ) { this . bugzillaKeywords = new KeyValueNode < String > ( CommonConstants . CS_BUGZILLA_KEYWORDS_TITLE , bugzillaKeywords ) ; appendChild ( this . bugzillaKeywords , false ) ; } else { this . bugzillaKeywords . setValue ( bugzillaKeywords ) ; }
public class MutablePeriod { /** * Parses a { @ code MutablePeriod } from the specified string using a formatter . * @ param str the string to parse , not null * @ param formatter the formatter to use , not null * @ since 2.0 */ public static MutablePeriod parse ( String str , PeriodFormatter formatter ) { } }
return formatter . parsePeriod ( str ) . toMutablePeriod ( ) ;
public class Symbol { /** * < p > Sets the font to use to render the human - readable text . This is an alternative to setting the * { @ link # setFontName ( String ) font name } and { @ link # setFontSize ( int ) font size } separately . May * allow some applications to avoid the use of { @ link GraphicsEnvironment # registerFont ( Font ) } * when using the { @ link Java2DRenderer } . * < p > Do not use this method in combination with { @ link # setFontName ( String ) } or { @ link # setFontSize ( int ) } . * @ param font the font to use to render the human - readable text */ public void setFont ( Font font ) { } }
this . font = font ; this . fontName = font . getFontName ( ) ; this . fontSize = font . getSize ( ) ;
public class CmsPreviewDialog { /** * Loads preview for another locale . < p > * @ param locale the locale to load */ protected void loadOtherLocale ( final String locale ) { } }
CmsRpcAction < CmsPreviewInfo > previewAction = new CmsRpcAction < CmsPreviewInfo > ( ) { @ SuppressWarnings ( "synthetic-access" ) @ Override public void execute ( ) { if ( m_previewInfoProvider != null ) { m_previewInfoProvider . loadPreviewForLocale ( locale , this ) ; } else { CmsCoreProvider . getVfsService ( ) . getPreviewInfo ( getSitePath ( ) , locale , this ) ; } start ( 0 , true ) ; } @ Override protected void onResponse ( CmsPreviewInfo result ) { stop ( false ) ; updatePreviewContent ( result ) ; } } ; previewAction . execute ( ) ;
public class PermissionsImpl { /** * Adds a user to the allowed list of users to access this LUIS application . Users are added using their email address . * @ param appId The application ID . * @ param addOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the OperationStatus object if successful . */ public OperationStatus add ( UUID appId , AddPermissionsOptionalParameter addOptionalParameter ) { } }
return addWithServiceResponseAsync ( appId , addOptionalParameter ) . toBlocking ( ) . single ( ) . body ( ) ;
public class KeyVaultClientBaseImpl { /** * Imports an externally created key , stores it , and returns key parameters and attributes to the client . * The import key operation may be used to import any key type into an Azure Key Vault . If the named key already exists , Azure Key Vault creates a new version of the key . This operation requires the keys / import permission . * @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net . * @ param keyName Name for the imported key . * @ param key The Json web key * @ param hsm Whether to import as a hardware key ( HSM ) or software key . * @ param keyAttributes The key management attributes . * @ param tags Application specific metadata in the form of key - value pairs . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws KeyVaultErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the KeyBundle object if successful . */ public KeyBundle importKey ( String vaultBaseUrl , String keyName , JsonWebKey key , Boolean hsm , KeyAttributes keyAttributes , Map < String , String > tags ) { } }
return importKeyWithServiceResponseAsync ( vaultBaseUrl , keyName , key , hsm , keyAttributes , tags ) . toBlocking ( ) . single ( ) . body ( ) ;
public class HibernateConfigProcessor { /** * Extracts all { @ link ClassMetadata } and { @ link CollectionMetadata } from a given { @ link SessionFactory } to be * used in determining the types that need a { @ link org . springframework . flex . core . io . SpringPropertyProxy } registered in { @ link # findTypesToRegister ( ) } * @ param sessionFactory the session factory from which to read metadata */ @ SuppressWarnings ( "unchecked" ) protected void extractHibernateMetadata ( SessionFactory sessionFactory ) { } }
this . classMetadata . addAll ( sessionFactory . getAllClassMetadata ( ) . values ( ) ) ; this . collectionMetadata . addAll ( sessionFactory . getAllCollectionMetadata ( ) . values ( ) ) ; this . hibernateConfigured = true ;
public class BaseStatementHandler { /** * { @ inheritDoc } */ public void setStatement ( Statement statement , QueryParameters params ) throws SQLException { } }
AssertUtils . assertNotNull ( params ) ; PreparedStatement preparedStmt = ( PreparedStatement ) statement ; // check the parameter count , if we can ParameterMetaData pmd = null ; int stmtCount = - 1 ; int paramsCount = params == null ? 0 : params . orderSize ( ) ; try { if ( useMetadata == true ) { pmd = preparedStmt . getParameterMetaData ( ) ; stmtCount = pmd . getParameterCount ( ) ; } else { // check is not performed , assuming that it is equal . If not - exception would be thrown later . . . stmtCount = paramsCount ; } } catch ( Exception ex ) { // driver doesn ' t support properly that function . Assuming it is equals useMetadata = false ; stmtCount = paramsCount ; } if ( stmtCount != paramsCount ) { if ( this . overrider . hasOverride ( MjdbcConstants . OVERRIDE_CONTROL_PARAM_COUNT ) == true ) { // value from this field is irrelevant , but I need to read the value in order to remove it if it should be invoked once . this . overrider . getOverride ( MjdbcConstants . OVERRIDE_CONTROL_PARAM_COUNT ) ; throw new SQLException ( "Wrong number of parameters: expected " + stmtCount + ", was given " + paramsCount ) ; } else { // Due to the fact that sometimes getParameterCount returns // unexpected value - warning about inconsistency but not throwing an exception . logger . warning ( "Wrong number of parameters: expected " + stmtCount + ", was given " + paramsCount ) ; } } // nothing to do here if ( params == null ) { return ; } String parameterName = null ; Object parameterValue = null ; Integer parameterType = null ; for ( int i = 0 ; i < params . orderSize ( ) ; i ++ ) { parameterName = params . getNameByPosition ( i ) ; parameterValue = params . getValue ( parameterName ) ; parameterType = params . getType ( parameterName ) ; if ( params . isInParameter ( parameterName ) == true ) { if ( parameterValue != null ) { if ( parameterType != null && parameterType . intValue ( ) != MjdbcTypes . OTHER ) { try { if ( parameterType . intValue ( ) == MjdbcTypes . VARCHAR && parameterValue instanceof Reader ) { // preparedStmt . setCharacterStream ( i + 1 , ( Reader ) parameterValue ) ; MappingUtils . invokeFunction ( preparedStmt , "setCharacterStream" , new Class [ ] { int . class , Reader . class } , new Object [ ] { i + 1 , parameterValue } ) ; } else if ( parameterType . intValue ( ) == MjdbcTypes . VARBINARY && parameterValue instanceof InputStream ) { // preparedStmt . setBinaryStream ( i + 1 , ( InputStream ) parameterValue ) ; MappingUtils . invokeFunction ( preparedStmt , "setBinaryStream" , new Class [ ] { int . class , InputStream . class } , new Object [ ] { i + 1 , parameterValue } ) ; } else { preparedStmt . setObject ( i + 1 , parameterValue , parameterType ) ; } } catch ( MjdbcException ex ) { preparedStmt . setObject ( i + 1 , parameterValue , parameterType ) ; } } else { preparedStmt . setObject ( i + 1 , parameterValue ) ; } } else { // VARCHAR works with many drivers regardless // of the actual column type . Oddly , NULL and // OTHER don ' t work with Oracle ' s drivers . int sqlType = MjdbcTypes . VARCHAR ; if ( useMetadata == true ) { try { sqlType = pmd . getParameterType ( i + 1 ) ; } catch ( SQLException e ) { useMetadata = false ; } } preparedStmt . setNull ( i + 1 , sqlType ) ; } } }
public class AbstractBeanDefinition { /** * The default implementation which provides no injection . To be overridden by compile time tooling . * @ param resolutionContext The resolution context * @ param context The bean context * @ param bean The bean * @ return The injected bean */ @ Internal @ SuppressWarnings ( { } }
"WeakerAccess" , "unused" } ) @ UsedByGeneratedCode protected Object injectBean ( BeanResolutionContext resolutionContext , BeanContext context , Object bean ) { return bean ;
public class QueryTerminalNode { public void readExternal ( ObjectInput in ) throws IOException , ClassNotFoundException { } }
super . readExternal ( in ) ; query = ( QueryImpl ) in . readObject ( ) ; subrule = ( GroupElement ) in . readObject ( ) ; subruleIndex = in . readInt ( ) ; initDeclarations ( ) ;
public class StackAwareMethodVisitor { /** * Explicitly registers a label to define a given stack state . * @ param label The label to register a stack state for . * @ param stackSizes The stack sizes to assume when reaching the supplied label . */ public void register ( Label label , List < StackSize > stackSizes ) { } }
sizes . put ( label , stackSizes ) ;
public class ServerModel { /** * Associates a http context with a bundle if the http service is not * already associated to another bundle . This is done in order to prevent * sharing http context between bundles . The implementation is not 100% * correct as it can be that at a certain moment in time when this method is * called , another thread is processing a release of the http service , * process that will deassociate the bundle that released the http service , * and that bundle could actually be related to the http context that this * method is trying to associate . But this is less likely to happen as it * should have as precondition that this is happening concurrent and that * the two bundles are sharing the http context . But this solution has the * benefits of not needing synchronization . * @ param httpContext http context to be assicated to the bundle * @ param bundle bundle to be assiciated with the htp service * @ param allowReAsssociation if it should allow a context to be reassiciated to a bundle * @ throws IllegalStateException - If htp context is already associated to another bundle . */ public void associateHttpContext ( final WebContainerContext httpContext , final Bundle bundle , final boolean allowReAsssociation ) { } }
List < String > virtualHosts = resolveVirtualHosts ( bundle ) ; for ( String virtualHost : virtualHosts ) { ConcurrentMap < WebContainerContext , Bundle > virtualHostHttpContexts = httpContexts . get ( virtualHost ) ; if ( virtualHostHttpContexts == null ) { virtualHostHttpContexts = new ConcurrentHashMap < > ( ) ; httpContexts . put ( virtualHost , virtualHostHttpContexts ) ; } final Bundle currentBundle = virtualHostHttpContexts . putIfAbsent ( httpContext , bundle ) ; if ( ( ! allowReAsssociation ) && currentBundle != null && currentBundle != bundle ) { throw new IllegalStateException ( "Http context " + httpContext + " is already associated to bundle " + currentBundle ) ; } }
public class ActiveConnectionRecord { /** * Initializes this connection record , associating it with the given user , * connection , balancing connection group , and sharing profile . The given * balancing connection group MUST be the connection group from which the * given connection was chosen , and the given sharing profile MUST be the * sharing profile that was used to share access to the given connection . * The start date of this connection record will be the time of its * creation . * @ param user * The user that connected to the connection associated with this * connection record . * @ param balancingGroup * The balancing group from which the given connection was chosen , or * null if no balancing group is being used . * @ param connection * The connection to associate with this connection record . * @ param sharingProfile * The sharing profile that was used to share access to the given * connection , or null if no sharing profile was used . */ private void init ( RemoteAuthenticatedUser user , ModeledConnectionGroup balancingGroup , ModeledConnection connection , ModeledSharingProfile sharingProfile ) { } }
this . user = user ; this . balancingGroup = balancingGroup ; this . connection = connection ; this . sharingProfile = sharingProfile ;
public class ComparatorCompat { /** * Returns a comparator that reverses the order of the specified comparator . * If the specified comparator is { @ code null } , this method is equivalent * to { @ link # reverseOrder ( ) } . * @ param < T > the type of the objects compared by the comparator * @ param comparator a comparator to be reversed * @ return a comparator * @ see Collections # reverseOrder ( java . util . Comparator ) * @ throws NullPointerException if { @ code comparator } is null */ @ NotNull public static < T > Comparator < T > reversed ( @ Nullable Comparator < T > comparator ) { } }
return Collections . reverseOrder ( comparator ) ;
public class StringParser { /** * Finds a pattern . If succeeds the skipped text returnned from skipped ( ) method * @ param pattern * @ return * @ see java . util . regex . Matcher # find */ public boolean find ( Pattern pattern ) { } }
matcher . usePattern ( pattern ) ; int start = matcher . regionStart ( ) ; if ( matcher . find ( ) ) { stack . add ( new MatchResultImpl ( matcher ) ) ; matcher . region ( matcher . end ( ) , matcher . regionEnd ( ) ) ; return true ; } else { return false ; }
public class Searcher { /** * Searches the pattern in a given model , but instead of a match map , returns all matches in a * list . * @ param model model to search in * @ param pattern pattern to search for * @ return matching results */ public static List < Match > searchPlain ( Model model , Pattern pattern ) { } }
List < Match > list = new LinkedList < Match > ( ) ; Map < BioPAXElement , List < Match > > map = search ( model , pattern ) ; for ( List < Match > matches : map . values ( ) ) { list . addAll ( matches ) ; } return list ;
public class CmsJspStandardContextBean { /** * Returns a caching hash specific to the element , it ' s properties and the current container width . < p > * @ return the caching hash */ public String elementCachingHash ( ) { } }
String result = "" ; if ( m_element != null ) { result = m_element . editorHash ( ) ; if ( m_container != null ) { result += "w:" + m_container . getWidth ( ) + "cName:" + m_container . getName ( ) + "cType:" + m_container . getType ( ) ; } } return result ;
public class ProactiveDetectionConfigurationsInner { /** * Update the ProactiveDetection configuration for this configuration id . * @ param resourceGroupName The name of the resource group . * @ param resourceName The name of the Application Insights component resource . * @ param configurationId The ProactiveDetection configuration ID . This is unique within a Application Insights component . * @ param proactiveDetectionProperties Properties that need to be specified to update the ProactiveDetection configuration . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < ApplicationInsightsComponentProactiveDetectionConfigurationInner > updateAsync ( String resourceGroupName , String resourceName , String configurationId , ApplicationInsightsComponentProactiveDetectionConfigurationInner proactiveDetectionProperties , final ServiceCallback < ApplicationInsightsComponentProactiveDetectionConfigurationInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( updateWithServiceResponseAsync ( resourceGroupName , resourceName , configurationId , proactiveDetectionProperties ) , serviceCallback ) ;
public class JsonReaderI { /** * called when json - smart done parsing a value */ public void setValue ( Object current , String key , Object value ) throws ParseException , IOException { } }
throw new RuntimeException ( ERR_MSG + " setValue in " + this . getClass ( ) + " key=" + key ) ;
public class Collator { /** * < strong > [ icu ] < / strong > Given a keyword , returns an array of all values for * that keyword that are currently in use . * @ param keyword one of the keywords returned by getKeywords . * @ see # getKeywords */ public static final String [ ] getKeywordValues ( String keyword ) { } }
if ( ! keyword . equals ( KEYWORDS [ 0 ] ) ) { throw new IllegalArgumentException ( "Invalid keyword: " + keyword ) ; } return ICUResourceBundle . getKeywordValues ( BASE , RESOURCE ) ;
public class ComparableFilter { /** * Indicates the specified entity instance matches to this * { @ code ComparableFilter } . */ @ Override public boolean matches ( Object entity ) { } }
T left = property . get ( entity ) ; return operator ( ) . matches ( left . compareTo ( value ( ) ) ) ;
public class IfcGeometricSetImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public EList < IfcGeometricSetSelect > getElements ( ) { } }
return ( EList < IfcGeometricSetSelect > ) eGet ( Ifc4Package . Literals . IFC_GEOMETRIC_SET__ELEMENTS , true ) ;
public class EquivalentFragmentSets { /** * An { @ link EquivalentFragmentSet } that indicates a variable representing a concept with a particular ID . */ public static EquivalentFragmentSet id ( VarProperty varProperty , Variable start , ConceptId id ) { } }
return new AutoValue_IdFragmentSet ( varProperty , start , id ) ;
public class MoreExecutors { /** * An implementation of { @ link ExecutorService # invokeAny } for { @ link ListeningExecutorService } * implementations . */ @ GwtIncompatible static < T > T invokeAnyImpl ( ListeningExecutorService executorService , Collection < ? extends Callable < T > > tasks , boolean timed , long timeout , TimeUnit unit ) throws InterruptedException , ExecutionException , TimeoutException { } }
checkNotNull ( executorService ) ; checkNotNull ( unit ) ; int ntasks = tasks . size ( ) ; checkArgument ( ntasks > 0 ) ; List < Future < T > > futures = Lists . newArrayListWithCapacity ( ntasks ) ; BlockingQueue < Future < T > > futureQueue = Queues . newLinkedBlockingQueue ( ) ; long timeoutNanos = unit . toNanos ( timeout ) ; // For efficiency , especially in executors with limited // parallelism , check to see if previously submitted tasks are // done before submitting more of them . This interleaving // plus the exception mechanics account for messiness of main // loop . try { // Record exceptions so that if we fail to obtain any // result , we can throw the last exception we got . ExecutionException ee = null ; long lastTime = timed ? System . nanoTime ( ) : 0 ; Iterator < ? extends Callable < T > > it = tasks . iterator ( ) ; futures . add ( submitAndAddQueueListener ( executorService , it . next ( ) , futureQueue ) ) ; -- ntasks ; int active = 1 ; while ( true ) { Future < T > f = futureQueue . poll ( ) ; if ( f == null ) { if ( ntasks > 0 ) { -- ntasks ; futures . add ( submitAndAddQueueListener ( executorService , it . next ( ) , futureQueue ) ) ; ++ active ; } else if ( active == 0 ) { break ; } else if ( timed ) { f = futureQueue . poll ( timeoutNanos , TimeUnit . NANOSECONDS ) ; if ( f == null ) { throw new TimeoutException ( ) ; } long now = System . nanoTime ( ) ; timeoutNanos -= now - lastTime ; lastTime = now ; } else { f = futureQueue . take ( ) ; } } if ( f != null ) { -- active ; try { return f . get ( ) ; } catch ( ExecutionException eex ) { ee = eex ; } catch ( RuntimeException rex ) { ee = new ExecutionException ( rex ) ; } } } if ( ee == null ) { ee = new ExecutionException ( null ) ; } throw ee ; } finally { for ( Future < T > f : futures ) { f . cancel ( true ) ; } }
public class InductivePartialCharges { /** * Gets the paulingElectronegativities attribute of the * InductivePartialCharges object . * @ param ac AtomContainer * @ param modified if true , some values are modified by following the reference * @ return The pauling electronegativities */ public double [ ] getPaulingElectronegativities ( IAtomContainer ac , boolean modified ) throws CDKException { } }
double [ ] paulingElectronegativities = new double [ ac . getAtomCount ( ) ] ; IElement element = null ; String symbol = null ; int atomicNumber = 0 ; try { ifac = Isotopes . getInstance ( ) ; for ( int i = 0 ; i < ac . getAtomCount ( ) ; i ++ ) { IAtom atom = ac . getAtom ( i ) ; symbol = ac . getAtom ( i ) . getSymbol ( ) ; element = ifac . getElement ( symbol ) ; atomicNumber = element . getAtomicNumber ( ) ; if ( modified ) { if ( symbol . equals ( "Cl" ) ) { paulingElectronegativities [ i ] = 3.28 ; } else if ( symbol . equals ( "Br" ) ) { paulingElectronegativities [ i ] = 3.13 ; } else if ( symbol . equals ( "I" ) ) { paulingElectronegativities [ i ] = 2.93 ; } else if ( symbol . equals ( "H" ) ) { paulingElectronegativities [ i ] = 2.10 ; } else if ( symbol . equals ( "C" ) ) { if ( ac . getMaximumBondOrder ( atom ) == IBond . Order . SINGLE ) { // Csp3 paulingElectronegativities [ i ] = 2.20 ; } else if ( ac . getMaximumBondOrder ( atom ) == IBond . Order . DOUBLE ) { paulingElectronegativities [ i ] = 2.31 ; } else { paulingElectronegativities [ i ] = 3.15 ; } } else if ( symbol . equals ( "O" ) ) { if ( ac . getMaximumBondOrder ( atom ) == IBond . Order . SINGLE ) { // Osp3 paulingElectronegativities [ i ] = 3.20 ; } else if ( ac . getMaximumBondOrder ( atom ) != IBond . Order . SINGLE ) { paulingElectronegativities [ i ] = 4.34 ; } } else if ( symbol . equals ( "Si" ) ) { paulingElectronegativities [ i ] = 1.99 ; } else if ( symbol . equals ( "S" ) ) { paulingElectronegativities [ i ] = 2.74 ; } else if ( symbol . equals ( "N" ) ) { paulingElectronegativities [ i ] = 2.59 ; } else { paulingElectronegativities [ i ] = pauling [ atomicNumber ] ; } } else { paulingElectronegativities [ i ] = pauling [ atomicNumber ] ; } } return paulingElectronegativities ; } catch ( Exception ex1 ) { logger . debug ( ex1 ) ; throw new CDKException ( "Problems with IsotopeFactory due to " + ex1 . toString ( ) , ex1 ) ; }
public class DebuggableThreadPoolExecutor { /** * Returns a ThreadPoolExecutor with a fixed number of threads . * When all threads are actively executing tasks , new tasks are queued . * If ( most ) threads are expected to be idle most of the time , prefer createWithMaxSize ( ) instead . * @ param threadPoolName the name of the threads created by this executor * @ param size the fixed number of threads for this executor * @ return the new DebuggableThreadPoolExecutor */ public static DebuggableThreadPoolExecutor createWithFixedPoolSize ( String threadPoolName , int size ) { } }
return createWithMaximumPoolSize ( threadPoolName , size , Integer . MAX_VALUE , TimeUnit . SECONDS ) ;
public class CaliperConfig { /** * TODO ( gak ) : make this work with different directory layouts . I ' m looking at you OS X . . . */ private static File getJdkHomeDir ( @ Nullable String baseDirectoryPath , @ Nullable String homeDirPath , String vmConfigName ) throws InvalidConfigurationException { } }
if ( homeDirPath == null ) { File baseDirectory = getBaseDirectory ( baseDirectoryPath ) ; File homeDir = new File ( baseDirectory , vmConfigName ) ; checkConfiguration ( homeDir . isDirectory ( ) , "%s is not a directory" , homeDir ) ; return homeDir ; } else { File potentialHomeDir = new File ( homeDirPath ) ; if ( potentialHomeDir . isAbsolute ( ) ) { checkConfiguration ( potentialHomeDir . isDirectory ( ) , "%s is not a directory" , potentialHomeDir ) ; return potentialHomeDir ; } else { File baseDirectory = getBaseDirectory ( baseDirectoryPath ) ; File homeDir = new File ( baseDirectory , homeDirPath ) ; checkConfiguration ( homeDir . isDirectory ( ) , "%s is not a directory" , potentialHomeDir ) ; return homeDir ; } }
public class ValidatorVault { /** * Validate the create methods . */ private void createValidation ( Class < ? > vaultClass , Class < ? > assetClass , Class < ? > idClass ) { } }
for ( Method method : vaultClass . getMethods ( ) ) { if ( ! method . getName ( ) . startsWith ( "create" ) ) { continue ; } if ( ! Modifier . isAbstract ( method . getModifiers ( ) ) ) { continue ; } TypeRef resultRef = findResult ( method . getParameters ( ) ) ; if ( resultRef == null ) { continue ; } TypeRef typeRef = resultRef . to ( Result . class ) . param ( 0 ) ; Class < ? > typeClass = typeRef . rawClass ( ) ; // id return type if ( unbox ( idClass ) . equals ( unbox ( typeClass ) ) ) { continue ; } if ( void . class . equals ( unbox ( typeClass ) ) ) { continue ; } try { new ShimConverter < > ( assetClass , typeClass ) ; } catch ( Exception e ) { throw error ( e , "{0}.{1}: {2}" , vaultClass . getSimpleName ( ) , method . getName ( ) , e . getMessage ( ) ) ; } }
public class DateContext { /** * Creates a date object based on the format passed in . Exceptions are NOT * thrown by this method and instead < code > null < / code > is returned . * Otherwise , this is identical to { @ link # createDate ( String , String ) } . * @ param dateString The string from which to create a date * @ param inputFormat The format of the date string * @ return The { @ link Date } instance representing the date * @ see # createDate ( String , String ) */ public Date createDateWithValidation ( String dateString , String inputFormat ) { } }
try { return createDate ( dateString , inputFormat ) ; } catch ( Exception exception ) { Syslog . debug ( "DateContextImpl.createDateWithValidation: Error " + "creating date with " + dateString + "/" + inputFormat + "; Exception: " + exception . getMessage ( ) ) ; } return null ;
public class CommonUtils { /** * 字符串转值 * @ param nums 多个数字 * @ param sperator 分隔符 * @ return int [ ] */ public static int [ ] parseInts ( String nums , String sperator ) { } }
String [ ] ss = StringUtils . split ( nums , sperator ) ; int [ ] ints = new int [ ss . length ] ; for ( int i = 0 ; i < ss . length ; i ++ ) { ints [ i ] = Integer . parseInt ( ss [ i ] ) ; } return ints ;
public class AbstractDependenceMeasure { /** * Compute ranks of all objects , ranging from 1 to len . * Ties are given the average rank . * @ param adapter Data adapter * @ param data Data array * @ param len Length of data * @ return Array of scores */ protected static < A > double [ ] ranks ( final NumberArrayAdapter < ? , A > adapter , final A data , int len ) { } }
return ranks ( adapter , data , sortedIndex ( adapter , data , len ) ) ;
public class StandaloneAetherArtifactFileResolver { /** * Retrieves the standard remote repositories , i . e . , maven central and jboss . org * @ return */ public static List < RemoteRepository > getStandardRemoteRepositories ( ) { } }
final List < RemoteRepository > remoteRepositories = new ArrayList < > ( ) ; remoteRepositories . add ( new RemoteRepository . Builder ( "central" , "default" , "https://central.maven.org/maven2/" ) . build ( ) ) ; remoteRepositories . add ( new RemoteRepository . Builder ( "jboss-community-repository" , "default" , "https://repository.jboss.org/nexus/content/groups/public/" ) . build ( ) ) ; return remoteRepositories ;
public class Index { /** * Get an object from this index * @ param objectID the unique identifier of the object to retrieve * @ param attributesToRetrieve contains the list of attributes to retrieve . */ public JSONObject getObject ( String objectID , List < String > attributesToRetrieve ) throws AlgoliaException { } }
return this . getObject ( objectID , attributesToRetrieve , RequestOptions . empty ) ;
public class ClassUtils { /** * Given a type , this method resolves the corresponding raw type . * This method works if { @ code type } is of type { @ link Class } , or { @ link ParameterizedType } . * Its shortcoming is that it cannot resolve { @ link TypeVariable } and will always return { @ code Object . class } , * not attempting to resolve the concrete type that the variable is to be substituted with . * Please use tools like { @ link io . crnk . core . engine . information . bean . BeanInformation } and * { @ link io . crnk . core . engine . information . bean . BeanAttributeInformation } instead . */ @ Deprecated public static Class < ? > getRawType ( Type type ) { } }
if ( type instanceof Class ) { return ( Class < ? > ) type ; } else if ( type instanceof ParameterizedType ) { return getRawType ( ( ( ParameterizedType ) type ) . getRawType ( ) ) ; } else if ( type instanceof TypeVariable < ? > ) { return getRawType ( ( ( TypeVariable < ? > ) type ) . getBounds ( ) [ 0 ] ) ; } throw new IllegalStateException ( "unknown type: " + type ) ;
public class CoNLLBenchmark { private static String getWordShape ( String string ) { } }
if ( string . toUpperCase ( ) . equals ( string ) && string . toLowerCase ( ) . equals ( string ) ) return "no-case" ; if ( string . toUpperCase ( ) . equals ( string ) ) return "upper-case" ; if ( string . toLowerCase ( ) . equals ( string ) ) return "lower-case" ; if ( string . length ( ) > 1 && Character . isUpperCase ( string . charAt ( 0 ) ) && string . substring ( 1 ) . toLowerCase ( ) . equals ( string . substring ( 1 ) ) ) return "capitalized" ; return "mixed-case" ;
public class MapperScannerConfigurer { /** * This property specifies the parent that the scanner will search for . * The scanner will register all interfaces in the base package that also have the * specified interface class as a parent . * Note this can be combined with annotationClass . * @ param superClass parent class */ public void setMarkerInterface ( Class < ? > superClass ) { } }
this . markerInterface = superClass ; if ( Marker . class . isAssignableFrom ( superClass ) ) { mapperHelper . registerMapper ( superClass ) ; }
public class CloudSpannerStatement { /** * Does some formatting to DDL statements that might have been generated by standard SQL * generators to make it compatible with Google Cloud Spanner . We also need to get rid of any * comments , as Google Cloud Spanner does not accept comments in DDL - statements . * @ param sql The sql to format * @ return The formatted DDL statement . */ protected String formatDDLStatement ( String sql ) { } }
String result = removeComments ( sql ) ; String [ ] parts = getTokens ( sql , 0 ) ; if ( parts . length > 2 && parts [ 0 ] . equalsIgnoreCase ( "create" ) && parts [ 1 ] . equalsIgnoreCase ( "table" ) ) { String sqlWithSingleSpaces = String . join ( " " , parts ) ; int primaryKeyIndex = sqlWithSingleSpaces . toUpperCase ( ) . indexOf ( ", PRIMARY KEY (" ) ; if ( primaryKeyIndex > - 1 ) { int endPrimaryKeyIndex = sqlWithSingleSpaces . indexOf ( ')' , primaryKeyIndex ) ; String primaryKeySpec = sqlWithSingleSpaces . substring ( primaryKeyIndex + 2 , endPrimaryKeyIndex + 1 ) ; sqlWithSingleSpaces = sqlWithSingleSpaces . replace ( ", " + primaryKeySpec , "" ) ; sqlWithSingleSpaces = sqlWithSingleSpaces + " " + primaryKeySpec ; result = sqlWithSingleSpaces . replaceAll ( "\\s+\\)" , ")" ) ; } } return result ;
public class HttpBuilder { /** * Executes a PATCH request on the configured URI , with additional configuration provided by the configuration closure . The result will be cast to * the specified ` type ` . * [ source , groovy ] * def http = HttpBuilder . configure { * request . uri = ' http : / / localhost : 10101' * String result = http . patch ( String ) { * request . uri . path = ' / something ' * The configuration ` closure ` allows additional configuration for this request based on the { @ link HttpConfig } interface . * @ param type the type of the response content * @ param closure the additional configuration closure ( delegated to { @ link HttpConfig } ) * @ return the resulting content cast to the specified type */ public < T > T patch ( final Class < T > type , @ DelegatesTo ( HttpConfig . class ) final Closure closure ) { } }
return type . cast ( interceptors . get ( HttpVerb . PATCH ) . apply ( configureRequest ( type , HttpVerb . PATCH , closure ) , this :: doPatch ) ) ;
public class XsdAsmUtils { /** * Obtains the signature for a class given the interface names . * @ param interfaces The implemented interfaces . * @ param className The class name . * @ param apiName The name of the generated fluent interface . * @ return The signature of the class . */ static String getClassSignature ( String [ ] interfaces , String className , String apiName ) { } }
StringBuilder signature ; signature = new StringBuilder ( "<Z::" + XsdSupportingStructure . elementTypeDesc + ">" + JAVA_OBJECT_DESC ) ; if ( interfaces != null ) { for ( String anInterface : interfaces ) { signature . append ( "L" ) . append ( getFullClassTypeName ( anInterface , apiName ) ) . append ( "<L" ) . append ( getFullClassTypeName ( className , apiName ) ) . append ( "<TZ;>;TZ;>;" ) ; } } return signature . toString ( ) ;
public class DiskTypeClient { /** * Returns the specified disk type . Gets a list of available disk types by making a list ( ) * request . * < p > Sample code : * < pre > < code > * try ( DiskTypeClient diskTypeClient = DiskTypeClient . create ( ) ) { * ProjectZoneDiskTypeName diskType = ProjectZoneDiskTypeName . of ( " [ PROJECT ] " , " [ ZONE ] " , " [ DISK _ TYPE ] " ) ; * DiskType response = diskTypeClient . getDiskType ( diskType . toString ( ) ) ; * < / code > < / pre > * @ param diskType Name of the disk type to return . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final DiskType getDiskType ( String diskType ) { } }
GetDiskTypeHttpRequest request = GetDiskTypeHttpRequest . newBuilder ( ) . setDiskType ( diskType ) . build ( ) ; return getDiskType ( request ) ;
public class CoordinatorResource { /** * This is an unsecured endpoint , defined as such in UNSECURED _ PATHS in CoordinatorJettyServerInitializer */ @ GET @ Path ( "/isLeader" ) @ Produces ( MediaType . APPLICATION_JSON ) public Response isLeader ( ) { } }
final boolean leading = coordinator . isLeader ( ) ; final Map < String , Boolean > response = ImmutableMap . of ( "leader" , leading ) ; if ( leading ) { return Response . ok ( response ) . build ( ) ; } else { return Response . status ( Response . Status . NOT_FOUND ) . entity ( response ) . build ( ) ; }
public class Container { /** * Assembles the specified component instance by injecting the dependent * components into it . * The assembled component instance is < b > not < / b > managed by the container . * @ param < T > The component type . * @ param component The component instance that the dependent components to * be injected . * @ return The assembled component instance . */ @ SuppressWarnings ( "unchecked" ) public < T > T assemble ( T component ) { } }
Preconditions . checkArgument ( component != null , "Parameter 'component' must not be [" + component + "]" ) ; Descriptor < T > descriptor = new Descriptor < T > ( ( Class < T > ) component . getClass ( ) ) ; return new Assembler < T > ( this , descriptor , component ) . assemble ( ) ;
public class CompareRetinaApiImpl { /** * { @ inheritDoc } */ @ Override public Metric compare ( Model model1 , Model model2 ) throws JsonProcessingException , ApiException { } }
validateRequiredModels ( model1 , model2 ) ; LOG . debug ( "Compare models: model1: " + model1 . toJson ( ) + " model: " + model2 . toJson ( ) ) ; return compareApi . compare ( toJson ( model1 , model2 ) , this . retinaName ) ;
public class ArrayBufferInput { /** * Reset buffer . This method returns the old buffer . * @ param buf new buffer . This can be null to make this input empty . * @ return the old buffer . */ public MessageBuffer reset ( MessageBuffer buf ) { } }
MessageBuffer old = this . buffer ; this . buffer = buf ; if ( buf == null ) { isEmpty = true ; } else { isEmpty = false ; } return old ;
public class SoapFault { /** * Builder method from Spring WS SOAP fault object . * @ param fault * @ return */ public static SoapFault from ( org . springframework . ws . soap . SoapFault fault ) { } }
QNameEditor qNameEditor = new QNameEditor ( ) ; qNameEditor . setValue ( fault . getFaultCode ( ) ) ; SoapFault soapFault = new SoapFault ( ) . faultCode ( qNameEditor . getAsText ( ) ) . faultActor ( fault . getFaultActorOrRole ( ) ) . faultString ( fault . getFaultStringOrReason ( ) ) ; if ( fault . getFaultDetail ( ) != null ) { Iterator < SoapFaultDetailElement > details = fault . getFaultDetail ( ) . getDetailEntries ( ) ; while ( details . hasNext ( ) ) { SoapFaultDetailElement soapFaultDetailElement = details . next ( ) ; soapFault . addFaultDetail ( extractFaultDetail ( soapFaultDetailElement ) ) ; } } return soapFault ;
public class Yaml { /** * Loads the string representations into { @ link YamlNode } s . * @ param yaml the reader * @ return the { @ link YamlNode } s * @ see org . yaml . snakeyaml . Yaml # loadAll ( Reader ) */ public Stream < YamlNode > loadStream ( Reader yaml ) { } }
return StreamSupport . stream ( loadAll ( yaml ) . spliterator ( ) , false ) ;
public class JmsMessage { /** * Gets the JMS correlationId header . * @ return */ public String getCorrelationId ( ) { } }
Object correlationId = getHeader ( JmsMessageHeaders . CORRELATION_ID ) ; if ( correlationId != null ) { return correlationId . toString ( ) ; } return null ;
public class Try { /** * Try to execute supplied Runnable and will Catch specified Excpetions or java . lang . Exception * if none specified . * @ param cf CheckedRunnable to recover to execute * @ param classes Exception types to catch ( or java . lang . Exception if none specified ) * @ return New Try */ @ SafeVarargs public static < X extends Throwable > Try < Void , X > runWithCatch ( final CheckedRunnable < X > cf , final Class < ? extends X > ... classes ) { } }
Objects . requireNonNull ( cf ) ; try { cf . run ( ) ; return Try . success ( null ) ; } catch ( final Throwable t ) { if ( classes . length == 0 ) return Try . failure ( ( X ) t ) ; val error = Stream . of ( classes ) . filter ( c -> c . isAssignableFrom ( t . getClass ( ) ) ) . findFirst ( ) ; if ( error . isPresent ( ) ) return Try . failure ( ( X ) t ) ; else throw ExceptionSoftener . throwSoftenedException ( t ) ; }
public class ApplicationMetadata { /** * returns mapped persistence unit . * @ param clazzName * clazz name . * @ return mapped persistence unit . */ public String getMappedPersistenceUnit ( String clazzName ) { } }
List < String > pus = clazzToPuMap . get ( clazzName ) ; final int _first = 0 ; String pu = null ; if ( pus != null && ! pus . isEmpty ( ) ) { if ( pus . size ( ) == 2 ) { onError ( clazzName ) ; } return pus . get ( _first ) ; } else { Set < String > mappedClasses = this . clazzToPuMap . keySet ( ) ; boolean found = false ; for ( String clazz : mappedClasses ) { if ( found && clazz . endsWith ( "." + clazzName ) ) { onError ( clazzName ) ; } else if ( clazz . endsWith ( "." + clazzName ) || clazz . endsWith ( "$" + clazzName ) ) { pu = clazzToPuMap . get ( clazz ) . get ( _first ) ; found = true ; } } } return pu ;
public class ChardevBackend { @ Nonnull public static ChardevBackend file ( @ Nonnull ChardevFile file ) { } }
ChardevBackend self = new ChardevBackend ( ) ; self . type = Discriminator . file ; self . file = file ; return self ;
public class FileGetFromComputeNodeOptions { /** * Set a timestamp indicating the last modified time of the resource known to the client . The operation will be performed only if the resource on the service has been modified since the specified time . * @ param ifModifiedSince the ifModifiedSince value to set * @ return the FileGetFromComputeNodeOptions object itself . */ public FileGetFromComputeNodeOptions withIfModifiedSince ( DateTime ifModifiedSince ) { } }
if ( ifModifiedSince == null ) { this . ifModifiedSince = null ; } else { this . ifModifiedSince = new DateTimeRfc1123 ( ifModifiedSince ) ; } return this ;
public class HostXml_4 { /** * Add the operation to add the local host definition . */ private ModelNode addLocalHost ( final ModelNode address , final List < ModelNode > operationList , final String hostName ) { } }
String resolvedHost = hostName != null ? hostName : defaultHostControllerName ; // All further operations should modify the newly added host so the address passed in is updated . address . add ( HOST , resolvedHost ) ; // Add a step to setup the ManagementResourceRegistrations for the root host resource final ModelNode hostAddOp = new ModelNode ( ) ; hostAddOp . get ( OP ) . set ( HostAddHandler . OPERATION_NAME ) ; hostAddOp . get ( OP_ADDR ) . set ( address ) ; operationList . add ( hostAddOp ) ; // Add a step to store the HC name ModelNode nameValue = hostName == null ? new ModelNode ( ) : new ModelNode ( hostName ) ; final ModelNode writeName = Util . getWriteAttributeOperation ( address , NAME , nameValue ) ; operationList . add ( writeName ) ; return hostAddOp ;
public class TileFactory { /** * Convert a pixel in the world bitmap at the specified zoom level into a GeoPosition * @ param pixelCoordinate a Point2D representing a pixel in the world bitmap * @ param zoom the zoom level of the world bitmap * @ return the converted GeoPosition */ public GeoPosition pixelToGeo ( Point2D pixelCoordinate , int zoom ) { } }
return GeoUtil . getPosition ( pixelCoordinate , zoom , getInfo ( ) ) ;
public class DefaultEncodingStateRegistry { /** * / * ( non - Javadoc ) * @ see EncodingStateRegistry # shouldEncodeWith ( Encoder , java . lang . CharSequence ) */ public boolean shouldEncodeWith ( Encoder encoderToApply , CharSequence string ) { } }
if ( isNoneEncoder ( encoderToApply ) ) return false ; EncodingState encodingState = getEncodingStateFor ( string ) ; return shouldEncodeWith ( encoderToApply , encodingState ) ;
public class RedisQuery { /** * ( non - Javadoc ) * @ see * com . impetus . kundera . query . QueryImpl # recursivelyPopulateEntities ( com . impetus * . kundera . metadata . model . EntityMetadata , * com . impetus . kundera . client . Client ) */ @ Override protected List < Object > recursivelyPopulateEntities ( EntityMetadata m , Client client ) { } }
List < EnhanceEntity > ls = new ArrayList < EnhanceEntity > ( ) ; RedisQueryInterpreter interpreter = onTranslation ( getKunderaQuery ( ) . getFilterClauseQueue ( ) , m ) ; ls = ( ( RedisClient ) client ) . onExecuteQuery ( interpreter , m . getEntityClazz ( ) ) ; return setRelationEntities ( ls , client , m ) ;
public class SipManagerDelegate { /** * Remove the sip sessions and sip application sessions */ public void removeAllSessions ( ) { } }
List < SipSessionKey > sipSessionsToRemove = new ArrayList < SipSessionKey > ( ) ; for ( SipSessionKey sipSessionKey : sipSessions . keySet ( ) ) { sipSessionsToRemove . add ( sipSessionKey ) ; } for ( SipSessionKey sipSessionKey : sipSessionsToRemove ) { removeSipSession ( sipSessionKey ) ; } List < SipApplicationSessionKey > sipApplicationSessionsToRemove = new ArrayList < SipApplicationSessionKey > ( ) ; for ( SipApplicationSessionKey sipApplicationSessionKey : sipApplicationSessions . keySet ( ) ) { sipApplicationSessionsToRemove . add ( sipApplicationSessionKey ) ; } for ( SipApplicationSessionKey sipApplicationSessionKey : sipApplicationSessionsToRemove ) { removeSipApplicationSession ( sipApplicationSessionKey ) ; }
public class OlsonTimeZone { /** * / * ( non - Javadoc ) * @ see android . icu . util . BasicTimeZone # getNextTransition ( long , boolean ) */ @ Override public TimeZoneTransition getNextTransition ( long base , boolean inclusive ) { } }
initTransitionRules ( ) ; if ( finalZone != null ) { if ( inclusive && base == firstFinalTZTransition . getTime ( ) ) { return firstFinalTZTransition ; } else if ( base >= firstFinalTZTransition . getTime ( ) ) { if ( finalZone . useDaylightTime ( ) ) { // return finalZone . getNextTransition ( base , inclusive ) ; return finalZoneWithStartYear . getNextTransition ( base , inclusive ) ; } else { // No more transitions return null ; } } } if ( historicRules != null ) { // Find a historical transition int ttidx = transitionCount - 1 ; for ( ; ttidx >= firstTZTransitionIdx ; ttidx -- ) { long t = transitionTimes64 [ ttidx ] * Grego . MILLIS_PER_SECOND ; if ( base > t || ( ! inclusive && base == t ) ) { break ; } } if ( ttidx == transitionCount - 1 ) { return firstFinalTZTransition ; } else if ( ttidx < firstTZTransitionIdx ) { return firstTZTransition ; } else { // Create a TimeZoneTransition TimeZoneRule to = historicRules [ getInt ( typeMapData [ ttidx + 1 ] ) ] ; TimeZoneRule from = historicRules [ getInt ( typeMapData [ ttidx ] ) ] ; long startTime = transitionTimes64 [ ttidx + 1 ] * Grego . MILLIS_PER_SECOND ; // The transitions loaded from zoneinfo . res may contain non - transition data if ( from . getName ( ) . equals ( to . getName ( ) ) && from . getRawOffset ( ) == to . getRawOffset ( ) && from . getDSTSavings ( ) == to . getDSTSavings ( ) ) { return getNextTransition ( startTime , false ) ; } return new TimeZoneTransition ( startTime , from , to ) ; } } return null ;
public class Iobeam { /** * Registers a device asynchronously with parameters of the provided { @ link Device } . This will * not block the calling thread . If successful , the device ID of this client will be set ( either * to the id provided , or if not provided , a randomly generated one ) . Any provided callback will * be run on a background thread . If the client already has a device ID set , registration will * only happen for a non - null device . Otherwise , the callback will be called with a { @ link * Device } with the current ID . * @ param device Desired device parameters to register . * @ param callback Callback for result of the registration . * @ throws ApiException Thrown if the iobeam client is not initialized . */ public void registerDeviceAsync ( Device device , RegisterCallback callback ) { } }
RestCallback < Device > cb ; if ( callback == null ) { cb = RegisterCallback . getEmptyCallback ( ) . getInnerCallback ( this ) ; } else { cb = callback . getInnerCallback ( this ) ; } // If device ID is set and not explicitly asking for a different one , return current ID . boolean alreadySet = this . deviceId != null ; if ( alreadySet && ( device == null || this . deviceId . equals ( device . getId ( ) ) ) ) { cb . completed ( device , null ) ; return ; } // Make sure to unset before attempting , so as not to reuse old ID if it fails . this . deviceId = null ; final DeviceService . Add req ; try { req = prepareDeviceRequest ( device ) ; } catch ( ApiException e ) { IobeamException ie = new IobeamException ( e ) ; if ( callback == null ) { throw ie ; } else { cb . failed ( ie , null ) ; return ; } } req . executeAsync ( cb ) ;
public class ClassReader { /** * If name is an array type or class signature , return the * corresponding type ; otherwise return a ClassSymbol with given name . */ Object readClassOrType ( int i ) { } }
int index = poolIdx [ i ] ; int len = getChar ( index + 1 ) ; int start = index + 3 ; Assert . check ( buf [ start ] == '[' || buf [ start + len - 1 ] != ';' ) ; // by the above assertion , the following test can be // simplified to ( buf [ start ] = = ' [ ' ) return ( buf [ start ] == '[' || buf [ start + len - 1 ] == ';' ) ? ( Object ) sigToType ( buf , start , len ) : ( Object ) enterClass ( names . fromUtf ( internalize ( buf , start , len ) ) ) ;
public class ClassUtils { /** * < p > Converts the specified array of primitive Class objects to an array of * its corresponding wrapper Class objects . < / p > * @ param classes the class array to convert , may be null or empty * @ return an array which contains for each given class , the wrapper class or * the original class if class is not a primitive . { @ code null } if null input . * Empty array if an empty array passed in . * @ since 2.1 */ public static Class < ? > [ ] primitivesToWrappers ( final Class < ? > ... classes ) { } }
if ( classes == null ) { return null ; } if ( classes . length == 0 ) { return classes ; } final Class < ? > [ ] convertedClasses = new Class [ classes . length ] ; for ( int i = 0 ; i < classes . length ; i ++ ) { convertedClasses [ i ] = primitiveToWrapper ( classes [ i ] ) ; } return convertedClasses ;
public class FileOutputFormat { /** * Helper function to create the task ' s temporary output directory and * return the path to the task ' s output file . * @ param conf job - configuration * @ param name temporary task - output filename * @ return path to the task ' s temporary output file * @ throws IOException */ public static Path getTaskOutputPath ( JobConf conf , String name ) throws IOException { } }
// $ { mapred . out . dir } Path outputPath = getOutputPath ( conf ) ; if ( outputPath == null ) { throw new IOException ( "Undefined job output-path" ) ; } OutputCommitter committer = conf . getOutputCommitter ( ) ; Path workPath = outputPath ; TaskAttemptContext context = new TaskAttemptContext ( conf , TaskAttemptID . forName ( conf . get ( "mapred.task.id" ) ) ) ; if ( committer instanceof FileOutputCommitter ) { workPath = ( ( FileOutputCommitter ) committer ) . getWorkPath ( context , outputPath ) ; } // $ { mapred . out . dir } / _ temporary / _ $ { taskid } / $ { name } return new Path ( workPath , name ) ;
public class ImageryMetaDataResource { /** * Parse a JSON string containing resource field of a ImageryMetaData response * @ param a _ jsonObjectthe JSON content string * @ returnImageryMetaDataResource object containing parsed information * @ throws Exception */ static public ImageryMetaDataResource getInstanceFromJSON ( final JSONObject a_jsonObject , final JSONObject parent ) throws Exception { } }
final ImageryMetaDataResource result = new ImageryMetaDataResource ( ) ; if ( a_jsonObject == null ) { throw new Exception ( "JSON to parse is null" ) ; } result . copyright = parent . getString ( COPYRIGHT ) ; if ( a_jsonObject . has ( IMAGE_HEIGHT ) ) { result . m_imageHeight = a_jsonObject . getInt ( IMAGE_HEIGHT ) ; } if ( a_jsonObject . has ( IMAGE_WIDTH ) ) { result . m_imageWidth = a_jsonObject . getInt ( IMAGE_WIDTH ) ; } if ( a_jsonObject . has ( ZOOM_MIN ) ) { result . m_zoomMin = a_jsonObject . getInt ( ZOOM_MIN ) ; } if ( a_jsonObject . has ( ZOOM_MAX ) ) { result . m_zoomMax = a_jsonObject . getInt ( ZOOM_MAX ) ; } result . m_imageUrl = a_jsonObject . getString ( IMAGE_URL ) ; if ( result . m_imageUrl != null && result . m_imageUrl . matches ( ".*?\\{.*?\\}.*?" ) ) { result . m_imageUrl = result . m_imageUrl . replaceAll ( "\\{.*?\\}" , "%s" ) ; } final JSONArray subdomains = a_jsonObject . getJSONArray ( IMAGE_URL_SUBDOMAINS ) ; if ( subdomains != null && subdomains . length ( ) >= 1 ) { result . m_imageUrlSubdomains = new String [ subdomains . length ( ) ] ; for ( int i = 0 ; i < subdomains . length ( ) ; i ++ ) { result . m_imageUrlSubdomains [ i ] = subdomains . getString ( i ) ; } } result . m_isInitialised = true ; return result ;
public class CmsSetupStep03Database { /** * Switches DB type . * @ param dbName the database type * @ param webapp the webapp name */ private void updateDb ( String dbName , String webapp ) { } }
m_mainLayout . removeAllComponents ( ) ; m_setupBean . setDatabase ( dbName ) ; CmsDbSettingsPanel panel = new CmsDbSettingsPanel ( m_setupBean ) ; m_panel [ 0 ] = panel ; panel . initFromSetupBean ( webapp ) ; m_mainLayout . addComponent ( panel ) ;
public class PageFlowUtils { /** * Add a property - related message that will be shown with the Errors and Error tags . * @ param request the current ServletRequest . * @ param propertyName the name of the property with which to associate this error . * @ param messageKey the message - resources key for the message . */ public static void addActionError ( ServletRequest request , String propertyName , String messageKey ) { } }
InternalUtils . addActionError ( propertyName , new ActionMessage ( messageKey , null ) , request ) ;
public class AttributeCriterionPane { private void buildUI ( ) { } }
// Attribute select : attributeSelect = new SelectItem ( "attributeItem" ) ; attributeSelect . setWidth ( 140 ) ; attributeSelect . setShowTitle ( false ) ; attributeSelect . setValueMap ( org . geomajas . gwt . client . widget . attribute . AttributeCriterionPane . getSearchableAttributes ( layer ) ) ; attributeSelect . setHint ( I18nProvider . getSearch ( ) . gridChooseAttribute ( ) ) ; attributeSelect . setShowHintInField ( true ) ; attributeSelect . setValidateOnChange ( true ) ; attributeSelect . setShowErrorStyle ( true ) ; attributeSelect . setRequired ( true ) ; // Operator select : operatorSelect = new SelectItem ( "operatorItem" ) ; operatorSelect . setDisabled ( true ) ; operatorSelect . setWidth ( 140 ) ; operatorSelect . setShowTitle ( false ) ; operatorSelect . setValidateOnChange ( true ) ; operatorSelect . setShowErrorStyle ( true ) ; operatorSelect . setRequired ( true ) ; // Value form item : valueItem = new AttributeFormItem ( "valueItem" ) ; valueItem . setShowTitle ( false ) ; valueItem . setDisabled ( true ) ; valueItem . setWidth ( 150 ) ; // Mechanisms : attributeSelect . addChangedHandler ( new ChangedHandler ( ) { public void onChanged ( ChangedEvent event ) { attributeChanged ( ) ; } } ) ; // Finalize : DynamicForm form = new DynamicForm ( ) ; form . setNumCols ( 6 ) ; form . setHeight ( 26 ) ; form . setFields ( attributeSelect , operatorSelect , valueItem ) ; addChild ( form ) ;
public class VersionHistoryImpl { /** * { @ inheritDoc } */ public String [ ] getVersionLabels ( Version version ) throws VersionException , RepositoryException { } }
checkValid ( ) ; List < String > vlabels = getVersionLabelsList ( version ) ; String [ ] res = new String [ vlabels . size ( ) ] ; for ( int i = 0 ; i < vlabels . size ( ) ; i ++ ) { res [ i ] = vlabels . get ( i ) ; } return res ;
public class BigDecimalUtil { /** * true if ABS ( ( startValue - newValue ) / startValue ) & lt ; = abs ( thresholdPercent ) * @ param startValue * @ param newValue * @ param thresholdPercent * @ return */ public static boolean movedInsideThresholdPercentage ( final BigDecimal startValue , final BigDecimal newValue , final BigDecimal thresholdPercent ) { } }
return ! movedStrictlyOutsideThresholdPercentage ( startValue , newValue , thresholdPercent ) ;
public class PluginManager { /** * Gets a static resource from a plugin * @ param pluginName - Name of the plugin ( defined in the plugin manifest ) * @ param fileName - Filename to fetch * @ return byte array of the resource * @ throws Exception exception */ public byte [ ] getResource ( String pluginName , String fileName ) throws Exception { } }
// TODO : This is going to be slow . . future improvement is to cache the data instead of searching all jars for ( String jarFilename : jarInformation ) { JarFile jarFile = new JarFile ( new File ( jarFilename ) ) ; Enumeration < ? > enumer = jarFile . entries ( ) ; // Use the Plugin - Name manifest entry to match with the provided pluginName String jarPluginName = jarFile . getManifest ( ) . getMainAttributes ( ) . getValue ( "Plugin-Name" ) ; if ( ! jarPluginName . equals ( pluginName ) ) { continue ; } while ( enumer . hasMoreElements ( ) ) { Object element = enumer . nextElement ( ) ; String elementName = element . toString ( ) ; // Skip items in the jar that don ' t start with " resources / " if ( ! elementName . startsWith ( "resources/" ) ) { continue ; } elementName = elementName . replace ( "resources/" , "" ) ; if ( elementName . equals ( fileName ) ) { // get the file from the jar ZipEntry ze = jarFile . getEntry ( element . toString ( ) ) ; InputStream fileStream = jarFile . getInputStream ( ze ) ; byte [ ] data = new byte [ ( int ) ze . getSize ( ) ] ; DataInputStream dataIs = new DataInputStream ( fileStream ) ; dataIs . readFully ( data ) ; dataIs . close ( ) ; return data ; } } } throw new FileNotFoundException ( "Could not find resource" ) ;
public class ResourcesInner { /** * Creates a resource . * @ param resourceGroupName The name of the resource group for the resource . The name is case insensitive . * @ param resourceProviderNamespace The namespace of the resource provider . * @ param parentResourcePath The parent resource identity . * @ param resourceType The resource type of the resource to create . * @ param resourceName The name of the resource to create . * @ param apiVersion The API version to use for the operation . * @ param parameters Parameters for creating or updating the resource . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < GenericResourceInner > beginCreateOrUpdateAsync ( String resourceGroupName , String resourceProviderNamespace , String parentResourcePath , String resourceType , String resourceName , String apiVersion , GenericResourceInner parameters , final ServiceCallback < GenericResourceInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , resourceProviderNamespace , parentResourcePath , resourceType , resourceName , apiVersion , parameters ) , serviceCallback ) ;
public class Single { /** * Runs the current Single and if it doesn ' t signal within the specified timeout window , it is * disposed and the other SingleSource subscribed to . * < dl > * < dt > < b > Scheduler : < / b > < / dt > * < dd > { @ code timeout } subscribes to the other SingleSource on the { @ link Scheduler } you specify . < / dd > * < / dl > * @ param timeout the timeout amount * @ param unit the time unit * @ param scheduler the scheduler where the timeout is awaited and the subscription to other happens * @ param other the other SingleSource that gets subscribed to if the current Single times out * @ return the new Single instance * @ since 2.0 */ @ CheckReturnValue @ SchedulerSupport ( SchedulerSupport . CUSTOM ) public final Single < T > timeout ( long timeout , TimeUnit unit , Scheduler scheduler , SingleSource < ? extends T > other ) { } }
ObjectHelper . requireNonNull ( other , "other is null" ) ; return timeout0 ( timeout , unit , scheduler , other ) ;
public class IceAgent { /** * Attempts to select a candidate pair on a ICE component . < br > * A candidate pair is only selected if the local candidate channel is * registered with the provided Selection Key . * @ param component * The component that holds the gathered candidates . * @ param key * The key of the datagram channel of the elected candidate . * @ return Returns the selected candidate pair . If no pair was selected , * returns null . */ private CandidatePair selectCandidatePair ( IceComponent component , DatagramChannel channel ) { } }
for ( LocalCandidateWrapper localCandidate : component . getLocalCandidates ( ) ) { if ( channel . equals ( localCandidate . getChannel ( ) ) ) { return component . setCandidatePair ( channel ) ; } } return null ;
public class AbstractResultSetWrapper { /** * { @ inheritDoc } * @ see java . sql . ResultSet # updateLong ( java . lang . String , long ) */ @ Override public void updateLong ( final String columnLabel , final long x ) throws SQLException { } }
wrapped . updateLong ( columnLabel , x ) ;
public class AnnisServiceRunner { /** * Creates and starts the server * @ param rethrowExceptions Set to true if you want to get exceptions * re - thrown to parent */ public void start ( boolean rethrowExceptions ) throws Exception { } }
log . info ( "Starting up REST..." ) ; try { createWebServer ( ) ; if ( server == null ) { isShutdownRequested = true ; errorCode = 100 ; } else { server . start ( ) ; } } catch ( Exception ex ) { log . error ( "could not start ANNIS REST service" , ex ) ; isShutdownRequested = true ; errorCode = 100 ; if ( rethrowExceptions ) { if ( ! ( ex instanceof AnnisException ) && ex . getCause ( ) instanceof AnnisException ) { throw ( ( AnnisException ) ex . getCause ( ) ) ; } else { throw ( ex ) ; } } }
public class YearQuarter { /** * Obtains the current year - quarter from the specified clock . * This will query the specified clock to obtain the current year - quarter . * Using this method allows the use of an alternate clock for testing . * The alternate clock may be introduced using { @ link Clock dependency injection } . * @ param clock the clock to use , not null * @ return the current year - quarter , not null */ public static YearQuarter now ( Clock clock ) { } }
final LocalDate now = LocalDate . now ( clock ) ; // called once return YearQuarter . of ( now . getYear ( ) , Quarter . from ( now . getMonth ( ) ) ) ;
public class DateParamTagSupport { /** * Private utility methods */ private void convertValue ( ) throws JspException { } }
if ( ( type == null ) || ( type . equalsIgnoreCase ( TIMESTAMP_TYPE ) ) ) { if ( ! ( value instanceof java . sql . Timestamp ) ) { value = new java . sql . Timestamp ( value . getTime ( ) ) ; } } else if ( type . equalsIgnoreCase ( TIME_TYPE ) ) { if ( ! ( value instanceof java . sql . Time ) ) { value = new java . sql . Time ( value . getTime ( ) ) ; } } else if ( type . equalsIgnoreCase ( DATE_TYPE ) ) { if ( ! ( value instanceof java . sql . Date ) ) { value = new java . sql . Date ( value . getTime ( ) ) ; } } else { throw new JspException ( Resources . getMessage ( "SQL_DATE_PARAM_INVALID_TYPE" , type ) ) ; }
public class VirtualMachinesInner { /** * The operation to get all extensions of a Virtual Machine . * @ param resourceGroupName The name of the resource group . * @ param vmName The name of the virtual machine containing the extension . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the VirtualMachineExtensionsListResultInner object */ public Observable < ServiceResponse < VirtualMachineExtensionsListResultInner > > getExtensionsWithServiceResponseAsync ( String resourceGroupName , String vmName ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( vmName == null ) { throw new IllegalArgumentException ( "Parameter vmName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } final String expand = null ; return service . getExtensions ( resourceGroupName , vmName , this . client . subscriptionId ( ) , expand , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < VirtualMachineExtensionsListResultInner > > > ( ) { @ Override public Observable < ServiceResponse < VirtualMachineExtensionsListResultInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < VirtualMachineExtensionsListResultInner > clientResponse = getExtensionsDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class KeyStore { /** * Returns a keystore object of the specified type . * < p > A new KeyStore object encapsulating the * KeyStoreSpi implementation from the specified provider * is returned . The specified provider must be registered * in the security provider list . * < p > Note that the list of registered providers may be retrieved via * the { @ link Security # getProviders ( ) Security . getProviders ( ) } method . * @ param type the type of keystore . * See the KeyStore section in the < a href = * " { @ docRoot } openjdk - redirect . html ? v = 8 & path = / technotes / guides / security / StandardNames . html # KeyStore " > * Java Cryptography Architecture Standard Algorithm Name Documentation < / a > * for information about standard keystore types . * @ param provider the name of the provider . * @ return a keystore object of the specified type . * @ exception KeyStoreException if a KeyStoreSpi * implementation for the specified type is not * available from the specified provider . * @ exception NoSuchProviderException if the specified provider is not * registered in the security provider list . * @ exception IllegalArgumentException if the provider name is null * or empty . * @ see Provider */ public static KeyStore getInstance ( String type , String provider ) throws KeyStoreException , NoSuchProviderException { } }
if ( provider == null || provider . length ( ) == 0 ) throw new IllegalArgumentException ( "missing provider" ) ; try { Object [ ] objs = Security . getImpl ( type , "KeyStore" , provider ) ; return new KeyStore ( ( KeyStoreSpi ) objs [ 0 ] , ( Provider ) objs [ 1 ] , type ) ; } catch ( NoSuchAlgorithmException nsae ) { throw new KeyStoreException ( type + " not found" , nsae ) ; }
public class NamespaceHeaderParser { /** * Parses the namespace { @ link File } into a { @ link NamespaceHeader } object . * @ param namespaceFile { @ link File } , the namespace file , which cannot be * null , must exist , and must be readable * @ return { @ link NamespaceHeader } , the parsed namespace header * @ throws IOException Thrown if an IO error occurred reading the * < tt > namespaceFile < / tt > * @ throws BELDataConversionException * @ throws BELDataMissingPropertyException * @ throws InvalidArgument Thrown if the < tt > namespaceFile < / tt > is null , * does not exist , or cannot be read */ public NamespaceHeader parseNamespace ( String resourceLocation , File namespaceFile ) throws IOException , BELDataMissingPropertyException , BELDataConversionException { } }
if ( namespaceFile == null ) { throw new InvalidArgument ( "namespaceFile" , namespaceFile ) ; } if ( ! namespaceFile . exists ( ) ) { throw new InvalidArgument ( "namespaceFile does not exist" ) ; } if ( ! namespaceFile . canRead ( ) ) { throw new InvalidArgument ( "namespaceFile cannot be read" ) ; } Map < String , Properties > blockProperties = parse ( namespaceFile ) ; NamespaceBlock nsblock = NamespaceBlock . create ( resourceLocation , blockProperties . get ( NamespaceBlock . BLOCK_NAME ) ) ; AuthorBlock authorblock = AuthorBlock . create ( resourceLocation , blockProperties . get ( AuthorBlock . BLOCK_NAME ) ) ; CitationBlock citationBlock = CitationBlock . create ( resourceLocation , blockProperties . get ( CitationBlock . BLOCK_NAME ) ) ; ProcessingBlock processingBlock = ProcessingBlock . create ( resourceLocation , blockProperties . get ( ProcessingBlock . BLOCK_NAME ) ) ; return new NamespaceHeader ( nsblock , authorblock , citationBlock , processingBlock ) ;
public class CustomScanDialog { /** * Use the save method to launch a scan */ @ Override public void save ( ) { } }
List < Object > contextSpecificObjects = new ArrayList < Object > ( ) ; techTreeState = getTechTree ( ) . getTechSet ( ) ; if ( ! this . getBoolValue ( FIELD_ADVANCED ) ) { contextSpecificObjects . add ( scanPolicy ) ; } else { contextSpecificObjects . add ( policyPanel . getScanPolicy ( ) ) ; if ( target == null && this . customPanels != null ) { // One of the custom scan panels must have specified a target for ( CustomScanPanel customPanel : this . customPanels ) { target = customPanel . getTarget ( ) ; if ( target != null ) { break ; } } } // Save all Variant configurations getVariantPanel ( ) . saveParam ( scannerParam ) ; // If all other vectors has been disabled // force all injectable params and rpc model to NULL if ( getDisableNonCustomVectors ( ) . isSelected ( ) ) { scannerParam . setTargetParamsInjectable ( 0 ) ; scannerParam . setTargetParamsEnabledRPC ( 0 ) ; } if ( ! getBoolValue ( FIELD_RECURSE ) && injectionPointModel . getSize ( ) > 0 ) { int [ ] [ ] injPoints = new int [ injectionPointModel . getSize ( ) ] [ ] ; for ( int i = 0 ; i < injectionPointModel . getSize ( ) ; i ++ ) { Highlight hl = injectionPointModel . elementAt ( i ) ; injPoints [ i ] = new int [ 2 ] ; injPoints [ i ] [ 0 ] = hl . getStartOffset ( ) ; injPoints [ i ] [ 1 ] = hl . getEndOffset ( ) ; } try { if ( target != null && target . getStartNode ( ) != null ) { VariantUserDefined . setInjectionPoints ( this . target . getStartNode ( ) . getHistoryReference ( ) . getURI ( ) . toString ( ) , injPoints ) ; enableUserDefinedRPC ( ) ; } } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; } } scannerParam . setHostPerScan ( extension . getScannerParam ( ) . getHostPerScan ( ) ) ; scannerParam . setThreadPerHost ( extension . getScannerParam ( ) . getThreadPerHost ( ) ) ; scannerParam . setHandleAntiCSRFTokens ( extension . getScannerParam ( ) . getHandleAntiCSRFTokens ( ) ) ; scannerParam . setMaxResultsToList ( extension . getScannerParam ( ) . getMaxResultsToList ( ) ) ; contextSpecificObjects . add ( scannerParam ) ; contextSpecificObjects . add ( techTreeState ) ; if ( this . customPanels != null ) { for ( CustomScanPanel customPanel : this . customPanels ) { Object [ ] objs = customPanel . getContextSpecificObjects ( ) ; if ( objs != null ) { for ( Object obj : objs ) { contextSpecificObjects . add ( obj ) ; } } } } } target . setRecurse ( this . getBoolValue ( FIELD_RECURSE ) ) ; if ( target . getContext ( ) == null && getSelectedContext ( ) != null ) { target . setContext ( getSelectedContext ( ) ) ; } this . extension . startScan ( target , getSelectedUser ( ) , contextSpecificObjects . toArray ( ) ) ;
public class JobLifecycleListenersList { /** * { @ inheritDoc } */ @ Override public void registerJobLifecycleListener ( JobLifecycleListener listener ) { } }
_dispatcher . addListener ( listener ) ; _jobCatalogDelegate . addListener ( listener ) ; _jobSchedulerDelegate . registerJobSpecSchedulerListener ( listener ) ;
public class DateUtils { /** * Returns the days between two dates . Positive values indicate that the * second date is after the first , and negative values indicate , well , the * opposite . Relying on specific times is problematic . * @ param early * the " first date " * @ param late * the " second date " * @ return the days between the two dates */ public static final int daysBetween ( Date early , Date late ) { } }
java . util . GregorianCalendar calst = new java . util . GregorianCalendar ( ) ; java . util . GregorianCalendar caled = new java . util . GregorianCalendar ( ) ; calst . setTime ( early ) ; caled . setTime ( late ) ; // 设置时间为0时 calst . set ( java . util . GregorianCalendar . HOUR_OF_DAY , 0 ) ; calst . set ( java . util . GregorianCalendar . MINUTE , 0 ) ; calst . set ( java . util . GregorianCalendar . SECOND , 0 ) ; caled . set ( java . util . GregorianCalendar . HOUR_OF_DAY , 0 ) ; caled . set ( java . util . GregorianCalendar . MINUTE , 0 ) ; caled . set ( java . util . GregorianCalendar . SECOND , 0 ) ; // 得到两个日期相差的天数 int days = ( ( int ) ( caled . getTime ( ) . getTime ( ) / 1000 / 3600 / 24 ) - ( int ) ( calst . getTime ( ) . getTime ( ) / 1000 / 3600 / 24 ) ) ; return days ;
public class CIMsgPhrase { /** * Get the type this Configuration item represents . * @ return Type */ public MsgPhrase getMsgPhrase ( ) { } }
MsgPhrase ret = null ; try { ret = MsgPhrase . get ( this . uuid ) ; } catch ( final EFapsException e ) { LOG . error ( "Error on retrieving MsgPhrase for CIMsgPhrase with uuid: {}" , this . uuid ) ; } return ret ;
public class XOManagerImpl { /** * Find entities according to the given example entity . * @ param type * The entity type . * @ param entity * The example entity . * @ param < T > * The entity type . * @ return A { @ link ResultIterable } . */ private < T > ResultIterable < T > findByExample ( Class < ? > type , Map < PrimitivePropertyMethodMetadata < PropertyMetadata > , Object > entity ) { } }
sessionContext . getCacheSynchronizationService ( ) . flush ( ) ; EntityTypeMetadata < EntityMetadata > entityTypeMetadata = sessionContext . getMetadataProvider ( ) . getEntityMetadata ( type ) ; EntityDiscriminator entityDiscriminator = entityTypeMetadata . getDatastoreMetadata ( ) . getDiscriminator ( ) ; if ( entityDiscriminator == null ) { throw new XOException ( "Type " + type . getName ( ) + " has no discriminator (i.e. cannot be identified in datastore)." ) ; } ResultIterator < Entity > iterator = sessionContext . getDatastoreSession ( ) . getDatastoreEntityManager ( ) . findEntity ( entityTypeMetadata , entityDiscriminator , entity ) ; AbstractInstanceManager < EntityId , Entity > entityInstanceManager = sessionContext . getEntityInstanceManager ( ) ; ResultIterator < T > resultIterator = new ResultIterator < T > ( ) { @ Override public boolean hasNext ( ) { return iterator . hasNext ( ) ; } @ Override public T next ( ) { Entity entity = iterator . next ( ) ; return entityInstanceManager . readInstance ( entity ) ; } @ Override public void remove ( ) { throw new UnsupportedOperationException ( "Cannot remove instance." ) ; } @ Override public void close ( ) { iterator . close ( ) ; } } ; XOTransaction xoTransaction = sessionContext . getXOTransaction ( ) ; final ResultIterator < T > transactionalIterator = xoTransaction != null ? new TransactionalResultIterator < > ( resultIterator , xoTransaction ) : resultIterator ; return sessionContext . getInterceptorFactory ( ) . addInterceptor ( new AbstractResultIterable < T > ( ) { @ Override public ResultIterator < T > iterator ( ) { return transactionalIterator ; } } , ResultIterable . class ) ;
public class NormalizeUtils { /** * Produces a hash for the paths of adjacent bnodes for a bnode , * incorporating all information about its subgraph of bnodes . This method * will recursively pick adjacent bnode permutations that produce the * lexicographically - least ' path ' serializations . * @ param id * the ID of the bnode to hash paths for . * @ param bnodes * the map of bnode quads . * @ param namer * the canonical bnode namer . * @ param pathNamer * the namer used to assign names to adjacent bnodes . * @ param callback * ( err , result ) called once the operation completes . */ private static HashResult hashPaths ( String id , Map < String , Object > bnodes , UniqueNamer namer , UniqueNamer pathNamer ) { } }
try { // create SHA - 1 digest final MessageDigest md = MessageDigest . getInstance ( "SHA-1" ) ; final Map < String , List < String > > groups = new LinkedHashMap < String , List < String > > ( ) ; List < String > groupHashes ; final List < Object > quads = ( List < Object > ) ( ( Map < String , Object > ) bnodes . get ( id ) ) . get ( "quads" ) ; for ( int hpi = 0 ; ; hpi ++ ) { if ( hpi == quads . size ( ) ) { // done , hash groups groupHashes = new ArrayList < String > ( groups . keySet ( ) ) ; Collections . sort ( groupHashes ) ; for ( int hgi = 0 ; ; hgi ++ ) { if ( hgi == groupHashes . size ( ) ) { final HashResult res = new HashResult ( ) ; res . hash = encodeHex ( md . digest ( ) ) ; res . pathNamer = pathNamer ; return res ; } // digest group hash final String groupHash = groupHashes . get ( hgi ) ; md . update ( groupHash . getBytes ( "UTF-8" ) ) ; // choose a path and namer from the permutations String chosenPath = null ; UniqueNamer chosenNamer = null ; final Permutator permutator = new Permutator ( groups . get ( groupHash ) ) ; while ( true ) { Boolean contPermutation = false ; Boolean breakOut = false ; final List < String > permutation = permutator . next ( ) ; UniqueNamer pathNamerCopy = pathNamer . clone ( ) ; // build adjacent path String path = "" ; final List < String > recurse = new ArrayList < String > ( ) ; for ( final String bnode : permutation ) { // use canonical name if available if ( namer . isNamed ( bnode ) ) { path += namer . getName ( bnode ) ; } else { // recurse if bnode isn ' t named in the path // yet if ( ! pathNamerCopy . isNamed ( bnode ) ) { recurse . add ( bnode ) ; } path += pathNamerCopy . getName ( bnode ) ; } // skip permutation if path is already > = chosen // path if ( chosenPath != null && path . length ( ) >= chosenPath . length ( ) && path . compareTo ( chosenPath ) > 0 ) { // return nextPermutation ( true ) ; if ( permutator . hasNext ( ) ) { contPermutation = true ; } else { // digest chosen path and update namer md . update ( chosenPath . getBytes ( "UTF-8" ) ) ; pathNamer = chosenNamer ; // hash the nextGroup breakOut = true ; } break ; } } // if we should do the next permutation if ( contPermutation ) { continue ; } // if we should stop processing this group if ( breakOut ) { break ; } // does the next recursion for ( int nrn = 0 ; ; nrn ++ ) { if ( nrn == recurse . size ( ) ) { // return nextPermutation ( false ) ; if ( chosenPath == null || path . compareTo ( chosenPath ) < 0 ) { chosenPath = path ; chosenNamer = pathNamerCopy ; } if ( ! permutator . hasNext ( ) ) { // digest chosen path and update namer md . update ( chosenPath . getBytes ( "UTF-8" ) ) ; pathNamer = chosenNamer ; // hash the nextGroup breakOut = true ; } break ; } // do recursion final String bnode = recurse . get ( nrn ) ; final HashResult result = hashPaths ( bnode , bnodes , namer , pathNamerCopy ) ; path += pathNamerCopy . getName ( bnode ) + "<" + result . hash + ">" ; pathNamerCopy = result . pathNamer ; // skip permutation if path is already > = chosen // path if ( chosenPath != null && path . length ( ) >= chosenPath . length ( ) && path . compareTo ( chosenPath ) > 0 ) { // return nextPermutation ( true ) ; if ( ! permutator . hasNext ( ) ) { // digest chosen path and update namer md . update ( chosenPath . getBytes ( "UTF-8" ) ) ; pathNamer = chosenNamer ; // hash the nextGroup breakOut = true ; } break ; } // do next recursion } // if we should stop processing this group if ( breakOut ) { break ; } } } } // get adjacent bnode final Map < String , Object > quad = ( Map < String , Object > ) quads . get ( hpi ) ; String bnode = getAdjacentBlankNodeName ( ( Map < String , Object > ) quad . get ( "subject" ) , id ) ; String direction = null ; if ( bnode != null ) { // normal property direction = "p" ; } else { bnode = getAdjacentBlankNodeName ( ( Map < String , Object > ) quad . get ( "object" ) , id ) ; if ( bnode != null ) { // reverse property direction = "r" ; } } if ( bnode != null ) { // get bnode name ( try canonical , path , then hash ) String name ; if ( namer . isNamed ( bnode ) ) { name = namer . getName ( bnode ) ; } else if ( pathNamer . isNamed ( bnode ) ) { name = pathNamer . getName ( bnode ) ; } else { name = hashQuads ( bnode , bnodes , namer ) ; } // hash direction , property , end bnode name / hash final MessageDigest md1 = MessageDigest . getInstance ( "SHA-1" ) ; // String toHash = direction + ( String ) ( ( Map < String , // Object > ) quad . get ( " predicate " ) ) . get ( " value " ) + name ; md1 . update ( direction . getBytes ( "UTF-8" ) ) ; md1 . update ( ( ( String ) ( ( Map < String , Object > ) quad . get ( "predicate" ) ) . get ( "value" ) ) . getBytes ( "UTF-8" ) ) ; md1 . update ( name . getBytes ( "UTF-8" ) ) ; final String groupHash = encodeHex ( md1 . digest ( ) ) ; if ( groups . containsKey ( groupHash ) ) { groups . get ( groupHash ) . add ( bnode ) ; } else { final List < String > tmp = new ArrayList < String > ( ) ; tmp . add ( bnode ) ; groups . put ( groupHash , tmp ) ; } } } } catch ( final NoSuchAlgorithmException e ) { // TODO : i don ' t expect that SHA - 1 is even NOT going to be // available ? // look into this further throw new RuntimeException ( e ) ; } catch ( final UnsupportedEncodingException e ) { // TODO : i don ' t expect that UTF - 8 is ever not going to be available // either throw new RuntimeException ( e ) ; }
public class CmsPropertyChange { /** * Builds the html for the result list of resources where the property was changed . < p > * @ return the html for the result list */ public String buildResultList ( ) { } }
StringBuffer result = new StringBuffer ( 16 ) ; if ( ( getChangedResources ( ) != null ) && ( getChangedResources ( ) . size ( ) > 0 ) ) { // at least one resource property value has been changed , show list for ( int i = 0 ; i < getChangedResources ( ) . size ( ) ; i ++ ) { CmsResource res = ( CmsResource ) getChangedResources ( ) . get ( i ) ; String resName = getCms ( ) . getSitePath ( res ) ; result . append ( resName ) ; result . append ( "<br>\n" ) ; } } else { // nothing was changed , show message result . append ( Messages . get ( ) . getBundle ( getLocale ( ) ) . key ( Messages . GUI_INPUT_PROPERTYCHANGE_RESULT_NONE_0 ) ) ; } return result . toString ( ) ;
public class ResolvableType { /** * Return an array of { @ link ResolvableType } s representing the generic parameters of * this type . If no generics are available an empty array is returned . If you need to * access a specific generic consider using the { @ link # getGeneric ( int . . . ) } method as * it allows access to nested generics and protects against * { @ code IndexOutOfBoundsExceptions } . * @ return an array of { @ link ResolvableType } s representing the generic parameters * ( never { @ code null } ) * @ see # hasGenerics ( ) * @ see # getGeneric ( int . . . ) * @ see # resolveGeneric ( int . . . ) * @ see # resolveGenerics ( ) */ public ResolvableType [ ] getGenerics ( ) { } }
if ( this == NONE ) { return EMPTY_TYPES_ARRAY ; } if ( this . generics == null ) { if ( this . type instanceof Class ) { Class < ? > typeClass = ( Class < ? > ) this . type ; this . generics = forTypes ( SerializableTypeWrapper . forTypeParameters ( typeClass ) , this . variableResolver ) ; } else if ( this . type instanceof ParameterizedType ) { Type [ ] actualTypeArguments = ( ( ParameterizedType ) this . type ) . getActualTypeArguments ( ) ; ResolvableType [ ] generics = new ResolvableType [ actualTypeArguments . length ] ; for ( int i = 0 ; i < actualTypeArguments . length ; i ++ ) { generics [ i ] = forType ( actualTypeArguments [ i ] , this . variableResolver ) ; } this . generics = generics ; } else { this . generics = resolveType ( ) . getGenerics ( ) ; } } return this . generics ;
public class MessageItem { /** * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . impl . interfaces . SIMPMessage # getGuaranteedStreamUuid ( ) */ @ Override public SIBUuid12 getGuaranteedStreamUuid ( ) { } }
if ( ! guaranteedStreamUuidSet ) { JsMessage localMsg = getJSMessage ( true ) ; guaranteedStreamUuidSet = true ; guaranteedStreamUuid = localMsg . getGuaranteedStreamUUID ( ) ; } return guaranteedStreamUuid ;
public class WebSiteManagementClientImpl { /** * Updates source control token . * Updates source control token . * @ param sourceControlType Type of source control * @ param requestMessage Source control token information * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the SourceControlInner object */ public Observable < SourceControlInner > updateSourceControlAsync ( String sourceControlType , SourceControlInner requestMessage ) { } }
return updateSourceControlWithServiceResponseAsync ( sourceControlType , requestMessage ) . map ( new Func1 < ServiceResponse < SourceControlInner > , SourceControlInner > ( ) { @ Override public SourceControlInner call ( ServiceResponse < SourceControlInner > response ) { return response . body ( ) ; } } ) ;
public class ClientNotificationArea { /** * Remove all the client notifications */ public synchronized void remoteClientRegistrations ( RESTRequest request ) { } }
Iterator < Entry < NotificationTargetInformation , ClientNotificationListener > > clientListeners = listeners . entrySet ( ) . iterator ( ) ; try { while ( clientListeners . hasNext ( ) ) { Entry < NotificationTargetInformation , ClientNotificationListener > clientListener = clientListeners . next ( ) ; NotificationTargetInformation nti = clientListener . getKey ( ) ; ClientNotificationListener listener = clientListener . getValue ( ) ; // Check whether the producer of the notification is local or remote . if ( nti . getRoutingInformation ( ) == null ) { // Remove the notification from the MBeanServer ObjectName objName = RESTHelper . objectNameConverter ( nti . getNameAsString ( ) , false , null ) ; if ( MBeanServerHelper . isRegistered ( objName ) ) { try { MBeanServerHelper . removeClientNotification ( objName , listener ) ; } catch ( RESTHandlerJsonException exception ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Received exception while cleaning up: " + exception ) ; } } } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "The MBean " + objName + " is not registered with the MBean server." ) ; } } } else { // Remove the notification listener from the Target - Client Manager through EventAdmin MBeanRoutedNotificationHelper helper = MBeanRoutedNotificationHelper . getMBeanRoutedNotificationHelper ( ) ; helper . removeRoutedNotificationListener ( nti , listener ) ; } } } finally { // Clear the map listeners . clear ( ) ; }
public class LocLogger { /** * Log a localized message at the ERROR level . * @ param key * the key used for localization * @ param args * optional arguments */ public void error ( Enum < ? > key , Object ... args ) { } }
if ( ! logger . isErrorEnabled ( ) ) { return ; } String translatedMsg = imc . getMessage ( key , args ) ; MessageParameterObj mpo = new MessageParameterObj ( key , args ) ; if ( instanceofLAL ) { ( ( LocationAwareLogger ) logger ) . log ( LOCALIZED , FQCN , LocationAwareLogger . ERROR_INT , translatedMsg , args , null ) ; } else { logger . error ( LOCALIZED , translatedMsg , mpo ) ; }
public class JPAEntity { /** * Finds a JPA entity by its primary key . * @ param < E > The JPA entity type . * @ param em The entity manager to use . Cannot be null . * @ param id The ID of the entity to find . Must be a positive , non - zero integer . * @ param type The runtime type to cast the result value to . * @ return The corresponding entity or null if no entity exists . */ public static < E extends Identifiable > E findByPrimaryKey ( EntityManager em , BigInteger id , Class < E > type ) { } }
requireArgument ( em != null , "The entity manager cannot be null." ) ; requireArgument ( id != null && id . compareTo ( ZERO ) > 0 , "ID cannot be null and must be positive and non-zero" ) ; requireArgument ( type != null , "The entity type cannot be null." ) ; TypedQuery < E > query = em . createNamedQuery ( "JPAEntity.findByPrimaryKey" , type ) ; query . setHint ( "javax.persistence.cache.storeMode" , "REFRESH" ) ; try { query . setParameter ( "id" , id ) ; query . setParameter ( "deleted" , false ) ; return query . getSingleResult ( ) ; } catch ( NoResultException ex ) { return null ; }
public class HBaseQueueAdmin { /** * Returns the column qualifier for the consumer state column . The qualifier is formed by * { @ code < groupId > < instanceId > } . * @ param groupId Group ID of the consumer * @ param instanceId Instance ID of the consumer * @ return A new byte [ ] which is the column qualifier . */ public static byte [ ] getConsumerStateColumn ( long groupId , int instanceId ) { } }
byte [ ] column = new byte [ Longs . BYTES + Ints . BYTES ] ; Bytes . putLong ( column , 0 , groupId ) ; Bytes . putInt ( column , Longs . BYTES , instanceId ) ; return column ;
public class DeleteAppRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteAppRequest deleteAppRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteAppRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteAppRequest . getAppId ( ) , APPID_BINDING ) ; protocolMarshaller . marshall ( deleteAppRequest . getForceStopAppReplication ( ) , FORCESTOPAPPREPLICATION_BINDING ) ; protocolMarshaller . marshall ( deleteAppRequest . getForceTerminateApp ( ) , FORCETERMINATEAPP_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }