signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class AbstractEJBRuntime { /** * Fully initialize the BeanMetaData . When this method completes * successfully , bmd . fullyInitialized will be true ; this method must not be * called if this field is already true . The context class loader must be * the runtime class loader when calling this method . */ private void finishBMDInit ( BeanMetaData bmd ) throws ContainerException , EJBConfigurationException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "finishBMDInit: " + bmd . j2eeName ) ; // d640935.1 // First , create the reference context for the bean if we haven ' t already // done so . createReferenceContext ( bmd ) ; // F743-29417 ivEJBMDOrchestrator . finishBMDInitWithReferenceContext ( bmd ) ; // Free resources in EJBModuleMetaData if all beans have been initialized . bmd . _moduleMetaData . freeResourcesAfterAllBeansInitialized ( bmd ) ; // d462512 if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "finishBMDInit" ) ;
public class BatchAddFacetToObjectMarshaller { /** * Marshall the given parameter object . */ public void marshall ( BatchAddFacetToObject batchAddFacetToObject , ProtocolMarshaller protocolMarshaller ) { } }
if ( batchAddFacetToObject == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( batchAddFacetToObject . getSchemaFacet ( ) , SCHEMAFACET_BINDING ) ; protocolMarshaller . marshall ( batchAddFacetToObject . getObjectAttributeList ( ) , OBJECTATTRIBUTELIST_BINDING ) ; protocolMarshaller . marshall ( batchAddFacetToObject . getObjectReference ( ) , OBJECTREFERENCE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class EventFilterParser { /** * EventFilter . g : 186:1 : time _ millis _ function : ( TIME _ MILLIS _ FUN _ NAME ' ( ' STRING ' , ' STRING ' ) ' - > ^ ( TIME _ MILLIS _ FUN _ NAME STRING STRING ) | ) ; */ public final EventFilterParser . time_millis_function_return time_millis_function ( ) throws RecognitionException { } }
EventFilterParser . time_millis_function_return retval = new EventFilterParser . time_millis_function_return ( ) ; retval . start = input . LT ( 1 ) ; CommonTree root_0 = null ; Token TIME_MILLIS_FUN_NAME95 = null ; Token char_literal96 = null ; Token STRING97 = null ; Token char_literal98 = null ; Token STRING99 = null ; Token char_literal100 = null ; CommonTree TIME_MILLIS_FUN_NAME95_tree = null ; CommonTree char_literal96_tree = null ; CommonTree STRING97_tree = null ; CommonTree char_literal98_tree = null ; CommonTree STRING99_tree = null ; CommonTree char_literal100_tree = null ; RewriteRuleTokenStream stream_35 = new RewriteRuleTokenStream ( adaptor , "token 35" ) ; RewriteRuleTokenStream stream_33 = new RewriteRuleTokenStream ( adaptor , "token 33" ) ; RewriteRuleTokenStream stream_34 = new RewriteRuleTokenStream ( adaptor , "token 34" ) ; RewriteRuleTokenStream stream_TIME_MILLIS_FUN_NAME = new RewriteRuleTokenStream ( adaptor , "token TIME_MILLIS_FUN_NAME" ) ; RewriteRuleTokenStream stream_STRING = new RewriteRuleTokenStream ( adaptor , "token STRING" ) ; try { // EventFilter . g : 187:2 : ( TIME _ MILLIS _ FUN _ NAME ' ( ' STRING ' , ' STRING ' ) ' - > ^ ( TIME _ MILLIS _ FUN _ NAME STRING STRING ) | ) int alt15 = 2 ; int LA15_0 = input . LA ( 1 ) ; if ( ( LA15_0 == TIME_MILLIS_FUN_NAME ) ) { alt15 = 1 ; } else if ( ( LA15_0 == EOF || LA15_0 == AND || LA15_0 == OR || ( LA15_0 >= 34 && LA15_0 <= 35 ) ) ) { alt15 = 2 ; } else { NoViableAltException nvae = new NoViableAltException ( "" , 15 , 0 , input ) ; throw nvae ; } switch ( alt15 ) { case 1 : // EventFilter . g : 187:4 : TIME _ MILLIS _ FUN _ NAME ' ( ' STRING ' , ' STRING ' ) ' { TIME_MILLIS_FUN_NAME95 = ( Token ) match ( input , TIME_MILLIS_FUN_NAME , FOLLOW_TIME_MILLIS_FUN_NAME_in_time_millis_function1132 ) ; stream_TIME_MILLIS_FUN_NAME . add ( TIME_MILLIS_FUN_NAME95 ) ; char_literal96 = ( Token ) match ( input , 33 , FOLLOW_33_in_time_millis_function1134 ) ; stream_33 . add ( char_literal96 ) ; STRING97 = ( Token ) match ( input , STRING , FOLLOW_STRING_in_time_millis_function1136 ) ; stream_STRING . add ( STRING97 ) ; char_literal98 = ( Token ) match ( input , 35 , FOLLOW_35_in_time_millis_function1138 ) ; stream_35 . add ( char_literal98 ) ; STRING99 = ( Token ) match ( input , STRING , FOLLOW_STRING_in_time_millis_function1140 ) ; stream_STRING . add ( STRING99 ) ; char_literal100 = ( Token ) match ( input , 34 , FOLLOW_34_in_time_millis_function1142 ) ; stream_34 . add ( char_literal100 ) ; // AST REWRITE // elements : TIME _ MILLIS _ FUN _ NAME , STRING , STRING // token labels : // rule labels : retval // token list labels : // rule list labels : // wildcard labels : retval . tree = root_0 ; RewriteRuleSubtreeStream stream_retval = new RewriteRuleSubtreeStream ( adaptor , "rule retval" , retval != null ? retval . tree : null ) ; root_0 = ( CommonTree ) adaptor . nil ( ) ; // 187:51 : - > ^ ( TIME _ MILLIS _ FUN _ NAME STRING STRING ) { // EventFilter . g : 187:54 : ^ ( TIME _ MILLIS _ FUN _ NAME STRING STRING ) { CommonTree root_1 = ( CommonTree ) adaptor . nil ( ) ; root_1 = ( CommonTree ) adaptor . becomeRoot ( new TimeMillisValueTreeNode ( stream_TIME_MILLIS_FUN_NAME . nextToken ( ) ) , root_1 ) ; adaptor . addChild ( root_1 , new StringTreeNode ( stream_STRING . nextToken ( ) ) ) ; adaptor . addChild ( root_1 , new StringTreeNode ( stream_STRING . nextToken ( ) ) ) ; adaptor . addChild ( root_0 , root_1 ) ; } } retval . tree = root_0 ; } break ; case 2 : // EventFilter . g : 188:2: { root_0 = ( CommonTree ) adaptor . nil ( ) ; } break ; } retval . stop = input . LT ( - 1 ) ; retval . tree = ( CommonTree ) adaptor . rulePostProcessing ( root_0 ) ; adaptor . setTokenBoundaries ( retval . tree , retval . start , retval . stop ) ; } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; retval . tree = ( CommonTree ) adaptor . errorNode ( input , retval . start , input . LT ( - 1 ) , re ) ; } finally { // do for sure before leaving } return retval ;
public class BigtableInstanceAdminClient { /** * Constructs an instance of BigtableInstanceAdminClient with the given ProjectName and stub . * @ deprecated Please use { @ link # create ( String , BigtableInstanceAdminStub ) } . */ @ Deprecated public static BigtableInstanceAdminClient create ( @ Nonnull com . google . bigtable . admin . v2 . ProjectName projectName , @ Nonnull BigtableInstanceAdminStub stub ) { } }
return create ( projectName . getProject ( ) , stub ) ;
public class RelationalOperationsMatrix { /** * with the interior of area B . */ private void interiorAreaInteriorArea_ ( int half_edge , int id_a , int id_b ) { } }
if ( m_matrix [ MatrixPredicate . InteriorInterior ] == 2 ) return ; int faceParentage = m_topo_graph . getHalfEdgeFaceParentage ( half_edge ) ; if ( ( faceParentage & id_a ) != 0 && ( faceParentage & id_b ) != 0 ) m_matrix [ MatrixPredicate . InteriorInterior ] = 2 ;
public class AbstractWComponent { /** * This is where most of the painting work is normally done . If a layout has been supplied either directly or by * supplying a velocity template , then painting is delegated to the layout manager . If there is no layout , the * default behaviour is to paint the child components in sequence . * @ param renderContext the context to render to . */ protected void paintComponent ( final RenderContext renderContext ) { } }
Renderer renderer = UIManager . getRenderer ( this , renderContext ) ; if ( getTemplate ( ) != null || getTemplateMarkUp ( ) != null ) { Renderer templateRenderer = UIManager . getTemplateRenderer ( renderContext ) ; templateRenderer . render ( this , renderContext ) ; } else if ( renderer == null ) { // Default is juxtaposition List < WComponent > children = getComponentModel ( ) . getChildren ( ) ; if ( children != null ) { final int size = children . size ( ) ; for ( int i = 0 ; i < size ; i ++ ) { children . get ( i ) . paint ( renderContext ) ; } } } else { renderer . render ( this , renderContext ) ; }
public class CreatorJournalEntry { /** * Process the management method : * < ul > * < li > Check the operating mode - if we are in * { @ link JournalOperatingMode # READ _ ONLY Read - Only } mode , this check will * throw an exception . < / li > * < li > Prepare the writer in case we need to initialize a new file with a * repository hash . < / li > * < li > Invoke the method on the ManagementDelegate . < / li > * < li > Write the full journal entry , including any context changes from the * Management method . < / li > * < / ul > * These operations occur within a synchronized block . We must be sure that * any pending operations are complete before we get the repository hash , so * we are confident that the hash accurately reflects the state of the * repository . Since all API - M operations go through this synchronized * block , we can be confident that the previous one had completed before the * current one started . * There might be a way to enforce the synchronization at a lower level , * thus increasing throughput , but we haven ' t explored it yet . */ public Object invokeMethod ( ManagementDelegate delegate , JournalWriter writer ) throws ServerException , JournalException { } }
synchronized ( JournalWriter . SYNCHRONIZER ) { JournalOperatingMode . enforceCurrentMode ( ) ; writer . prepareToWriteJournalEntry ( ) ; Object result = super . getMethod ( ) . invoke ( delegate ) ; writer . writeJournalEntry ( this ) ; return result ; }
public class CloneUtility { /** * Creates a java . util . Properties map from a com . ibm . jbatch . jsl . model . Properties * object . * @ param xmlProperties * @ return */ public static Properties jslPropertiesToJavaProperties ( final JSLProperties xmlProperties ) { } }
final Properties props = new Properties ( ) ; for ( final Property prop : xmlProperties . getPropertyList ( ) ) { props . setProperty ( prop . getName ( ) , prop . getValue ( ) ) ; } return props ;
public class UriEscape { /** * Perform am URI path segment < strong > escape < / strong > operation * on a < tt > Reader < / tt > input , writing results to a < tt > Writer < / tt > . * The following are the only allowed chars in an URI path segment ( will not be escaped ) : * < ul > * < li > < tt > A - Z a - z 0-9 < / tt > < / li > * < li > < tt > - . _ ~ < / tt > < / li > * < li > < tt > ! $ & amp ; ' ( ) * + , ; = < / tt > < / li > * < li > < tt > : @ < / tt > < / li > * < / ul > * All other chars will be escaped by converting them to the sequence of bytes that * represents them in the specified < em > encoding < / em > and then representing each byte * in < tt > % HH < / tt > syntax , being < tt > HH < / tt > the hexadecimal representation of the byte . * This method is < strong > thread - safe < / strong > . * @ param reader the < tt > Reader < / tt > reading the text to be escaped . * @ param writer the < tt > java . io . Writer < / tt > to which the escaped result will be written . Nothing will * be written at all to this writer if input is < tt > null < / tt > . * @ param encoding the encoding to be used for escaping . * @ throws IOException if an input / output exception occurs * @ since 1.1.2 */ public static void escapeUriPathSegment ( final Reader reader , final Writer writer , final String encoding ) throws IOException { } }
if ( writer == null ) { throw new IllegalArgumentException ( "Argument 'writer' cannot be null" ) ; } if ( encoding == null ) { throw new IllegalArgumentException ( "Argument 'encoding' cannot be null" ) ; } UriEscapeUtil . escape ( reader , writer , UriEscapeUtil . UriEscapeType . PATH_SEGMENT , encoding ) ;
public class OkCoinTradeServiceRaw { /** * 获取OKEX合约交易历史 ( 非个人 ) * @ param symbol * @ param since * @ param date * @ return * @ throws IOException */ public OkCoinFuturesTradeHistoryResult [ ] getFuturesTradesHistory ( String symbol , long since , String date ) throws IOException { } }
OkCoinFuturesTradeHistoryResult [ ] futuresHistory = okCoin . getFuturesTradeHistory ( apikey , since , symbol , date , signatureCreator ( ) ) ; return ( futuresHistory ) ;
public class LoggingDecoratorBuilder { /** * Sets the { @ link Function } to use to sanitize a response cause before logging . You can * sanitize the stack trace of the exception to remove sensitive information , or prevent from logging * the stack trace completely by returning { @ code null } in the { @ link Function } . If unset , will use * { @ link Function # identity ( ) } . */ public T responseCauseSanitizer ( Function < ? super Throwable , ? extends Throwable > responseCauseSanitizer ) { } }
this . responseCauseSanitizer = requireNonNull ( responseCauseSanitizer , "responseCauseSanitizer" ) ; return self ( ) ;
public class SubdocHelper { /** * Check whether a { @ link ResponseStatus } is subdocument - level or not . That is to say an error code which , * if received in the context of a multi - operation , would not prevent the successful execution of other * operations in that packet . * For instance , { @ link ResponseStatus # SUBDOC _ PATH _ NOT _ FOUND } is a subdoc error code that would not prevent * the execution of other operations whereas { @ link ResponseStatus # NOT _ EXISTS } is a document access error code * and would inherently invalidate any other operation within the theoretical packet . * @ param responseStatus the status code to check . * @ return true if the status code denotes a subdocument - level error , false otherwise . */ public static boolean isSubdocLevelError ( ResponseStatus responseStatus ) { } }
switch ( responseStatus ) { case SUBDOC_PATH_NOT_FOUND : case SUBDOC_PATH_EXISTS : case SUBDOC_DELTA_RANGE : case SUBDOC_NUM_RANGE : case SUBDOC_VALUE_TOO_DEEP : case SUBDOC_PATH_TOO_BIG : case SUBDOC_PATH_INVALID : case SUBDOC_PATH_MISMATCH : case SUBDOC_VALUE_CANTINSERT : return true ; case SUBDOC_DOC_NOT_JSON : case SUBDOC_DOC_TOO_DEEP : case SUBDOC_INVALID_COMBO : case SUBDOC_MULTI_PATH_FAILURE : return false ; default : return false ; }
public class PeekView { /** * Sets the height of the view in PX . * @ param height the height of the circle in px */ private void setHeight ( int height ) { } }
contentParams . height = options . fullScreenPeek ( ) ? screenHeight : height ; content . setLayoutParams ( contentParams ) ;
public class ZeroMQNetworkService { /** * Set the reference to the space service . * @ param service the service . */ @ Inject public void setSpaceService ( ContextSpaceService service ) { } }
if ( this . spaceService != null ) { this . spaceService . removeSpaceRepositoryListener ( this . serviceListener ) ; } this . spaceService = service ; if ( this . spaceService != null ) { this . spaceService . addSpaceRepositoryListener ( this . serviceListener ) ; }
public class AccountsImpl { /** * Lists all node agent SKUs supported by the Azure Batch service . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the PagedList & lt ; NodeAgentSku & gt ; object wrapped in { @ link ServiceResponseWithHeaders } if successful . */ public Observable < ServiceResponseWithHeaders < Page < NodeAgentSku > , AccountListNodeAgentSkusHeaders > > listNodeAgentSkusSinglePageAsync ( ) { } }
if ( this . client . batchUrl ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.batchUrl() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } final AccountListNodeAgentSkusOptions accountListNodeAgentSkusOptions = null ; String filter = null ; Integer maxResults = null ; Integer timeout = null ; UUID clientRequestId = null ; Boolean returnClientRequestId = null ; DateTime ocpDate = null ; String parameterizedHost = Joiner . on ( ", " ) . join ( "{batchUrl}" , this . client . batchUrl ( ) ) ; DateTimeRfc1123 ocpDateConverted = null ; if ( ocpDate != null ) { ocpDateConverted = new DateTimeRfc1123 ( ocpDate ) ; } return service . listNodeAgentSkus ( this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , filter , maxResults , timeout , clientRequestId , returnClientRequestId , ocpDateConverted , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponseWithHeaders < Page < NodeAgentSku > , AccountListNodeAgentSkusHeaders > > > ( ) { @ Override public Observable < ServiceResponseWithHeaders < Page < NodeAgentSku > , AccountListNodeAgentSkusHeaders > > call ( Response < ResponseBody > response ) { try { ServiceResponseWithHeaders < PageImpl < NodeAgentSku > , AccountListNodeAgentSkusHeaders > result = listNodeAgentSkusDelegate ( response ) ; return Observable . just ( new ServiceResponseWithHeaders < Page < NodeAgentSku > , AccountListNodeAgentSkusHeaders > ( result . body ( ) , result . headers ( ) , result . response ( ) ) ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class TopologyUtils { /** * Parses the value in Config . TOPOLOGY _ COMPONENT _ DISKMAP , * and returns a map containing only component specified . * Returns a empty map if the Config is not set * @ param topology the topology def * @ return a map ( componentName - & gt ; disk required ) */ public static Map < String , ByteAmount > getComponentDiskMapConfig ( TopologyAPI . Topology topology ) throws RuntimeException { } }
Map < String , String > configMap = getComponentConfigMap ( topology , Config . TOPOLOGY_COMPONENT_DISKMAP ) ; Map < String , ByteAmount > diskMap = new HashMap < > ( ) ; for ( Map . Entry < String , String > entry : configMap . entrySet ( ) ) { long requiredDisk = Long . parseLong ( entry . getValue ( ) ) ; diskMap . put ( entry . getKey ( ) , ByteAmount . fromBytes ( requiredDisk ) ) ; } return diskMap ;
public class MockSecurityGroupController { /** * Create the mock SecurityGroup . * @ param groupName group Name . * @ param groupDescription group Description . * @ param vpcId vpc Id for Security Group . * @ return mock Security Group . */ public MockSecurityGroup createSecurityGroup ( final String groupName , final String groupDescription , final String vpcId ) { } }
MockSecurityGroup ret = new MockSecurityGroup ( ) ; ret . setGroupName ( groupName ) ; ret . setGroupDescription ( groupDescription ) ; ret . setGroupId ( "sg-" + UUID . randomUUID ( ) . toString ( ) . substring ( 0 , SECURITYGROUP_ID_POSTFIX_LENGTH ) ) ; ret . setVpcId ( vpcId ) ; MockIpPermissionType mockIpPermissionType = new MockIpPermissionType ( ) ; mockIpPermissionType . setIpProtocol ( "-1" ) ; List < String > ipRanges = new ArrayList < String > ( ) ; ipRanges . add ( "0.0.0.0/0" ) ; mockIpPermissionType . setIpRanges ( ipRanges ) ; List < MockIpPermissionType > mockIpPermissionTypes = new ArrayList < MockIpPermissionType > ( ) ; mockIpPermissionTypes . add ( mockIpPermissionType ) ; ret . setIpPermissionsEgress ( mockIpPermissionTypes ) ; List < MockIpPermissionType > mockIpPermissionTypesIngress = new ArrayList < MockIpPermissionType > ( ) ; ret . setIpPermissions ( mockIpPermissionTypesIngress ) ; allMockSecurityGroup . put ( ret . getGroupId ( ) , ret ) ; return ret ;
public class Matrix4x3f { /** * Set this matrix to a model transformation for a right - handed coordinate system , * that translates to the given < code > pos < / code > and aligns the local < code > - z < / code > * axis with < code > dir < / code > . * This method is equivalent to calling : < code > translation ( pos ) . rotateTowards ( dir , up ) < / code > * @ see # translation ( Vector3fc ) * @ see # rotateTowards ( Vector3fc , Vector3fc ) * @ param pos * the position to translate to * @ param dir * the direction to rotate towards * @ param up * the up vector * @ return this */ public Matrix4x3f translationRotateTowards ( Vector3fc pos , Vector3fc dir , Vector3fc up ) { } }
return translationRotateTowards ( pos . x ( ) , pos . y ( ) , pos . z ( ) , dir . x ( ) , dir . y ( ) , dir . z ( ) , up . x ( ) , up . y ( ) , up . z ( ) ) ;
public class Message { /** * returns the hmac of the data and the annotation chunk values ( except HMAC chunk itself ) */ public byte [ ] hmac ( byte [ ] key ) { } }
try { Key secretKey = new SecretKeySpec ( key , "HmacSHA1" ) ; Mac hmac_algo = Mac . getInstance ( "HmacSHA1" ) ; hmac_algo . init ( secretKey ) ; hmac_algo . update ( this . data ) ; for ( Entry < String , byte [ ] > a : this . annotations . entrySet ( ) ) // this is in a fixed order because it is a SortedMap { if ( ! a . getKey ( ) . equals ( "HMAC" ) ) hmac_algo . update ( a . getValue ( ) ) ; } return hmac_algo . doFinal ( ) ; } catch ( NoSuchAlgorithmException e ) { throw new PyroException ( "invalid hmac algorithm" , e ) ; } catch ( InvalidKeyException e ) { throw new PyroException ( "invalid hmac key" , e ) ; }
public class QueueFile { /** * Writes header atomically . The arguments contain the updated values . The class member fields * should not have changed yet . This only updates the state in the file . It ' s up to the caller to * update the class member variables * after * this call succeeds . Assumes segment writes are atomic * in the underlying file system . */ private void writeHeader ( int fileLength , int elementCount , int firstPosition , int lastPosition ) throws IOException { } }
writeInt ( buffer , 0 , fileLength ) ; writeInt ( buffer , 4 , elementCount ) ; writeInt ( buffer , 8 , firstPosition ) ; writeInt ( buffer , 12 , lastPosition ) ; raf . seek ( 0 ) ; raf . write ( buffer ) ;
public class SimpleEncryptor { /** * { @ inheritDoc } */ @ Override public String encrypt ( final String string ) throws UnsupportedEncodingException , IllegalBlockSizeException , BadPaddingException , InvalidKeyException , NoSuchAlgorithmException , InvalidKeySpecException , NoSuchPaddingException , InvalidAlgorithmParameterException { } }
initialize ( ) ; final byte [ ] utf8 = string . getBytes ( StandardCharsets . UTF_8 . name ( ) ) ; final byte [ ] encrypt = this . cipher . doFinal ( utf8 ) ; final String encrypted = Base64 . getEncoder ( ) . encodeToString ( encrypt ) ; return encrypted ;
public class IotHubResourcesInner { /** * Get the health for routing endpoints . * Get the health for routing endpoints . * ServiceResponse < PageImpl < EndpointHealthDataInner > > * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the PagedList & lt ; EndpointHealthDataInner & gt ; object wrapped in { @ link ServiceResponse } if successful . */ public Observable < ServiceResponse < Page < EndpointHealthDataInner > > > getEndpointHealthNextSinglePageAsync ( final String nextPageLink ) { } }
if ( nextPageLink == null ) { throw new IllegalArgumentException ( "Parameter nextPageLink is required and cannot be null." ) ; } String nextUrl = String . format ( "%s" , nextPageLink ) ; return service . getEndpointHealthNext ( nextUrl , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < Page < EndpointHealthDataInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < EndpointHealthDataInner > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < PageImpl < EndpointHealthDataInner > > result = getEndpointHealthNextDelegate ( response ) ; return Observable . just ( new ServiceResponse < Page < EndpointHealthDataInner > > ( result . body ( ) , result . response ( ) ) ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class StringSetPreference { /** * Set the value for the preference */ @ SuppressLint ( "CommitPrefEdits" ) @ Override public void set ( Set < String > value ) { } }
getPreferences ( ) . edit ( ) . putStringSet ( getKey ( ) , value ) . apply ( ) ;
public class TransactionSet { /** * Compute a list of queues that were updated in this transaction set */ public synchronized List < LocalQueue > updatedQueues ( List < String > deliveredMessageIDs ) throws FFMQException { } }
int len = deliveredMessageIDs . size ( ) ; List < LocalQueue > updatedQueues = new ArrayList < > ( len ) ; for ( int n = 0 ; n < len ; n ++ ) { String deliveredMessageID = deliveredMessageIDs . get ( len - n - 1 ) ; boolean found = false ; Iterator < TransactionItem > entries = items . iterator ( ) ; while ( entries . hasNext ( ) ) { TransactionItem item = entries . next ( ) ; if ( item . getMessageId ( ) . equals ( deliveredMessageID ) ) { found = true ; LocalQueue localQueue = item . getDestination ( ) ; if ( ! updatedQueues . contains ( localQueue ) ) updatedQueues . add ( localQueue ) ; break ; } } if ( ! found ) throw new FFMQException ( "Message does not belong to transaction : " + deliveredMessageID , "INTERNAL_ERROR" ) ; } return updatedQueues ;
public class SoyFutureValueProvider { /** * Calls Future . get ( ) and then converts the result to SoyValue . Note that this result can never * return { @ code null } , since null converts to { @ code NullData . INSTANCE } . */ @ Override @ Nonnull protected final SoyValue compute ( ) { } }
try { if ( ! future . isDone ( ) ) { futureBlockCallback . get ( ) . beforeBlock ( ) ; } return SoyValueConverter . INSTANCE . convert ( future . get ( ) ) . resolve ( ) ; } catch ( ExecutionException e ) { throw new SoyFutureException ( e . getCause ( ) ) ; } catch ( Throwable e ) { throw new SoyFutureException ( e ) ; }
public class ButtonBar { /** * Builds and returns { @ link VisTable } containing buttons in platform dependant order . Note that calling this multiple * times will remove buttons from previous tables . */ public VisTable createTable ( ) { } }
VisTable table = new VisTable ( true ) ; table . left ( ) ; boolean spacingValid = false ; for ( int i = 0 ; i < order . length ( ) ; i ++ ) { char ch = order . charAt ( i ) ; if ( ignoreSpacing == false && ch == ' ' && spacingValid ) { table . add ( ) . width ( sizes . buttonBarSpacing ) ; spacingValid = false ; } Button button = buttons . get ( ch ) ; if ( button != null ) { table . add ( button ) ; spacingValid = true ; } } return table ;
public class TSProcessor { /** * Z - Normalize routine . * @ param series the input timeseries . * @ param normalizationThreshold the zNormalization threshold value . * @ return Z - normalized time - series . */ public double [ ] znorm ( double [ ] series , double normalizationThreshold ) { } }
double [ ] res = new double [ series . length ] ; double sd = stDev ( series ) ; if ( sd < normalizationThreshold ) { // return series . clone ( ) ; // return array of zeros return res ; } double mean = mean ( series ) ; for ( int i = 0 ; i < res . length ; i ++ ) { res [ i ] = ( series [ i ] - mean ) / sd ; } return res ;
public class GetParameterHistoryResult { /** * A list of parameters returned by the request . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setParameters ( java . util . Collection ) } or { @ link # withParameters ( java . util . Collection ) } if you want to * override the existing values . * @ param parameters * A list of parameters returned by the request . * @ return Returns a reference to this object so that method calls can be chained together . */ public GetParameterHistoryResult withParameters ( ParameterHistory ... parameters ) { } }
if ( this . parameters == null ) { setParameters ( new com . amazonaws . internal . SdkInternalList < ParameterHistory > ( parameters . length ) ) ; } for ( ParameterHistory ele : parameters ) { this . parameters . add ( ele ) ; } return this ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getOCD ( ) { } }
if ( ocdEClass == null ) { ocdEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 309 ) ; } return ocdEClass ;
public class N { /** * Mostly it ' s designed for one - step operation to complete the operation in one step . * < code > java . util . stream . Stream < / code > is preferred for multiple phases operation . * @ param a * @ param fromIndex * @ param toIndex * @ param func * @ return */ public static < T , E extends Exception > FloatList mapToFloat ( final T [ ] a , final int fromIndex , final int toIndex , final Try . ToFloatFunction < ? super T , E > func ) throws E { } }
checkFromToIndex ( fromIndex , toIndex , len ( a ) ) ; N . checkArgNotNull ( func ) ; if ( N . isNullOrEmpty ( a ) ) { return new FloatList ( ) ; } final FloatList result = new FloatList ( toIndex - fromIndex ) ; for ( int i = fromIndex ; i < toIndex ; i ++ ) { result . add ( func . applyAsFloat ( a [ i ] ) ) ; } return result ;
public class PaddingLogRecord { /** * Called to perform recovery action during a warm start of the ObjectManager . * @ param objectManagerState of the ObjectManager performing recovery . * @ throws ObjectManagerException */ public void performRecovery ( ObjectManagerState objectManagerState ) throws ObjectManagerException { } }
if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "performRecovery" , new Object [ ] { objectManagerState } ) ; // Nothing to do . if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "performRecovery" ) ;
public class DataFile { /** * Returns whether the file exists in the Data API * @ return true iff the file exists * @ throws APIException if there were any problems communicating with the DataAPI */ public boolean exists ( ) throws APIException { } }
HttpResponse response = client . head ( getUrl ( ) ) ; int status = response . getStatusLine ( ) . getStatusCode ( ) ; if ( status != 200 && status != 404 ) { throw APIException . fromHttpResponse ( response ) ; } return ( 200 == status ) ;
public class InitStrategy { /** * Obtains the instance to be used . */ public static InitStrategy get ( ClassLoader cl ) throws IOException { } }
Iterator < InitStrategy > it = ServiceLoader . load ( InitStrategy . class , cl ) . iterator ( ) ; if ( ! it . hasNext ( ) ) { return new InitStrategy ( ) ; // default } InitStrategy s = it . next ( ) ; LOGGER . log ( Level . FINE , "Using {0} as InitStrategy" , s ) ; return s ;
public class Equation { /** * Checks to see if the token is an integer scalar * @ return true if integer or false if not */ private static boolean isVariableInteger ( TokenList . Token t ) { } }
if ( t == null ) return false ; return t . getScalarType ( ) == VariableScalar . Type . INTEGER ;
public class MockMethodDispatcher { /** * Calls { @ code MockMethodAdvice # handle } */ public Callable < ? > handle ( Object instance , Method origin , Object [ ] arguments ) throws Throwable { } }
try { return ( Callable < ? > ) mAdvice . getClass ( ) . getMethod ( "handle" , Object . class , Method . class , Object [ ] . class ) . invoke ( mAdvice , instance , origin , arguments ) ; } catch ( InvocationTargetException e ) { throw e . getCause ( ) ; }
public class ReconciliationLineItemReportServiceLocator { /** * For the given interface , get the stub implementation . * If this service has no port for the given interface , * then ServiceException is thrown . */ public java . rmi . Remote getPort ( Class serviceEndpointInterface ) throws javax . xml . rpc . ServiceException { } }
try { if ( com . google . api . ads . admanager . axis . v201805 . ReconciliationLineItemReportServiceInterface . class . isAssignableFrom ( serviceEndpointInterface ) ) { com . google . api . ads . admanager . axis . v201805 . ReconciliationLineItemReportServiceSoapBindingStub _stub = new com . google . api . ads . admanager . axis . v201805 . ReconciliationLineItemReportServiceSoapBindingStub ( new java . net . URL ( ReconciliationLineItemReportServiceInterfacePort_address ) , this ) ; _stub . setPortName ( getReconciliationLineItemReportServiceInterfacePortWSDDServiceName ( ) ) ; return _stub ; } } catch ( java . lang . Throwable t ) { throw new javax . xml . rpc . ServiceException ( t ) ; } throw new javax . xml . rpc . ServiceException ( "There is no stub implementation for the interface: " + ( serviceEndpointInterface == null ? "null" : serviceEndpointInterface . getName ( ) ) ) ;
public class DateTimePicker { /** * Shows the dialog to the user . Make sure to call * { @ link # setListener ( ) } before calling this . */ public void show ( ) { } }
if ( mListener == null ) { throw new NullPointerException ( "Attempting to bind null listener to SlideDateTimePicker" ) ; } if ( mInitialDate == null ) { setInitialDate ( new Date ( ) ) ; } DateTimeDialogFragment dialogFragment = DateTimeDialogFragment . newInstance ( mListener , mInitialDate , mMinDate , mMaxDate , mIsClientSpecified24HourTime , mIs24HourTime , mTheme , mIndicatorColor ) ; dialogFragment . show ( mFragmentManager , DateTimeDialogFragment . TAG_SLIDE_DATE_TIME_DIALOG_FRAGMENT ) ;
public class MultipleRecommendationRunner { /** * Runs Ranksys - based recommenders . * @ param paths the input and output paths . * @ param properties the properties . */ public static void runRanksysRecommenders ( final Set < String > paths , final Properties properties ) { } }
for ( AbstractRunner < Long , Long > rec : instantiateRanksysRecommenders ( paths , properties ) ) { RecommendationRunner . run ( rec ) ; }
public class ReflectUtils { /** * 得到set方法 * @ param clazz 类 * @ param property 属性 * @ param propertyClazz 属性 * @ return Method 方法对象 */ public static Method getPropertySetterMethod ( Class clazz , String property , Class propertyClazz ) { } }
String methodName = "set" + property . substring ( 0 , 1 ) . toUpperCase ( ) + property . substring ( 1 ) ; try { return clazz . getMethod ( methodName , propertyClazz ) ; } catch ( NoSuchMethodException e ) { throw new SofaRpcRuntimeException ( "No setter method for " + clazz . getName ( ) + "#" + property , e ) ; }
public class NanoUtils { /** * Adapt { @ code parser } to a { @ link Marshaller } . * @ since 1.0.0 */ public static < T extends MessageNano > Marshaller < T > marshaller ( MessageNanoFactory < T > factory ) { } }
return new MessageMarshaller < > ( factory ) ;
public class CommerceCountryPersistenceImpl { /** * Returns the last commerce country in the ordered set where uuid = & # 63 ; . * @ param uuid the uuid * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce country * @ throws NoSuchCountryException if a matching commerce country could not be found */ @ Override public CommerceCountry findByUuid_Last ( String uuid , OrderByComparator < CommerceCountry > orderByComparator ) throws NoSuchCountryException { } }
CommerceCountry commerceCountry = fetchByUuid_Last ( uuid , orderByComparator ) ; if ( commerceCountry != null ) { return commerceCountry ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "uuid=" ) ; msg . append ( uuid ) ; msg . append ( "}" ) ; throw new NoSuchCountryException ( msg . toString ( ) ) ;
public class ConfigurationFile { /** * Finds a single file in { @ code searchDir } whose name ends with " { @ code . backupType . xml } " * and returns its name with { @ code . backupType } removed . * @ param searchDir the directory to search * @ param backupType the backup type ; { @ link # LAST } , { @ link # BOOT } , { @ link # INITIAL } or { @ code v \ d + } * @ return the single file that meets the criteria . Will not return { @ code null } * @ throws IllegalStateException if no files meet the criteria or more than one does * @ throws IllegalArgumentException if they file that meets the criteria ' s full name is " { @ code backupType . xml } " */ private String findMainFileFromBackupSuffix ( File searchDir , String backupType ) { } }
final String suffix = "." + backupType + ".xml" ; File [ ] files = null ; if ( searchDir . exists ( ) && searchDir . isDirectory ( ) ) { files = searchDir . listFiles ( new FilenameFilter ( ) { @ Override public boolean accept ( File dir , String name ) { return name . endsWith ( suffix ) ; } } ) ; } if ( files == null || files . length == 0 ) { throw ControllerLogger . ROOT_LOGGER . configurationFileNotFound ( suffix , searchDir ) ; } else if ( files . length > 1 ) { throw ControllerLogger . ROOT_LOGGER . ambiguousConfigurationFiles ( backupType , searchDir , suffix ) ; } String matchName = files [ 0 ] . getName ( ) ; if ( matchName . equals ( suffix ) ) { throw ControllerLogger . ROOT_LOGGER . configurationFileNameNotAllowed ( backupType ) ; } String prefix = matchName . substring ( 0 , matchName . length ( ) - suffix . length ( ) ) ; return prefix + ".xml" ;
public class PdfContentByte { /** * Appends a straight line segment from the current point < I > ( x , y ) < / I > . The new current * point is < I > ( x , y ) < / I > . * @ param x new x - coordinate * @ param y new y - coordinate */ public void lineTo ( float x , float y ) { } }
content . append ( x ) . append ( ' ' ) . append ( y ) . append ( " l" ) . append_i ( separator ) ;
public class GitHubClientCacheOps { /** * Removes all not active dirs with old caches . * This method is invoked after each save of global plugin config * @ param configs active server configs to exclude caches from cleanup */ public static void clearRedundantCaches ( List < GitHubServerConfig > configs ) { } }
Path baseCacheDir = getBaseCacheDir ( ) ; if ( notExists ( baseCacheDir ) ) { return ; } final Set < String > actualNames = from ( configs ) . filter ( withEnabledCache ( ) ) . transform ( toCacheDir ( ) ) . transform ( cacheToName ( ) ) . toSet ( ) ; try ( DirectoryStream < Path > caches = newDirectoryStream ( baseCacheDir , notInCaches ( actualNames ) ) ) { deleteEveryIn ( caches ) ; } catch ( IOException e ) { LOGGER . warn ( "Can't list cache dirs in {}" , baseCacheDir , e ) ; }
public class LocalTransaction { /** * Calculates the list of nodes to which a commit / rollback needs to be sent based on the nodes to which prepare * was sent . If the commit / rollback is to be sent in the same topologyId , then the ' recipients ' param is returned back . * If the current topologyId is different than the topologyId of this transaction ( { @ link # getTopologyId ( ) } then * this method returns the reunion between ' recipients ' and { @ link # getRemoteLocksAcquired ( ) } from which it discards * the members that left . */ public Collection < Address > getCommitNodes ( Collection < Address > recipients , CacheTopology cacheTopology ) { } }
int currentTopologyId = cacheTopology . getTopologyId ( ) ; List < Address > members = cacheTopology . getMembers ( ) ; if ( trace ) log . tracef ( "getCommitNodes recipients=%s, currentTopologyId=%s, members=%s, txTopologyId=%s" , recipients , currentTopologyId , members , getTopologyId ( ) ) ; if ( hasModification ( ClearCommand . class ) ) { return members ; } if ( recipients == null ) { return null ; } // Include all the nodes we sent a LockControlCommand to and are not in the recipients list now // either because the topology changed , or because the lock failed . // Also include nodes that are no longer in the cluster , so if JGroups retransmits a lock / prepare command // after a merge , it also retransmits the commit / rollback . Set < Address > allRecipients = new HashSet < > ( getRemoteLocksAcquired ( ) ) ; allRecipients . addAll ( recipients ) ; if ( trace ) log . tracef ( "The merged list of nodes to send commit/rollback is %s" , allRecipients ) ; return allRecipients ;
public class TBSONProtocol { /** * Push a new write context onto the stack . */ protected void pushContext ( Context c ) { } }
Stack < Context > stack = threadSafeContextStack . get ( ) ; if ( stack == null ) { stack = new Stack < Context > ( ) ; stack . push ( c ) ; threadSafeContextStack . set ( stack ) ; } else { threadSafeContextStack . get ( ) . push ( c ) ; }
public class StorePackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EEnum getProgressTopicType ( ) { } }
if ( progressTopicTypeEEnum == null ) { progressTopicTypeEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( StorePackage . eNS_URI ) . getEClassifiers ( ) . get ( 89 ) ; } return progressTopicTypeEEnum ;
public class IdGenerator { /** * Generate Id when given sequence generation strategy . * @ param m * @ param client * @ param keyValue * @ param e */ private Object onSequenceGenerator ( EntityMetadata m , Client < ? > client , IdDiscriptor keyValue , Object e ) { } }
Object seqgenerator = getAutoGenClazz ( client ) ; if ( seqgenerator instanceof SequenceGenerator ) { Object generatedId = ( ( SequenceGenerator ) seqgenerator ) . generate ( keyValue . getSequenceDiscriptor ( ) , client , m . getIdAttribute ( ) . getJavaType ( ) . getSimpleName ( ) ) ; try { generatedId = PropertyAccessorHelper . fromSourceToTargetClass ( m . getIdAttribute ( ) . getJavaType ( ) , generatedId . getClass ( ) , generatedId ) ; PropertyAccessorHelper . setId ( e , m , generatedId ) ; return generatedId ; } catch ( IllegalArgumentException iae ) { log . error ( "Unknown integral data type for ids : " + m . getIdAttribute ( ) . getJavaType ( ) ) ; throw new KunderaException ( "Unknown integral data type for ids : " + m . getIdAttribute ( ) . getJavaType ( ) , iae ) ; } } throw new IllegalArgumentException ( GenerationType . class . getSimpleName ( ) + "." + GenerationType . SEQUENCE + " Strategy not supported by this client :" + client . getClass ( ) . getName ( ) ) ;
public class OrthologizeTool { /** * Sets the reportable implementation for { @ link System # out } and * { @ link System # err } . */ private void reportable ( ) { } }
final SimpleOutput reportable = new SimpleOutput ( ) ; reportable . setErrorStream ( System . err ) ; reportable . setOutputStream ( System . out ) ; setReportable ( reportable ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcWorkTime ( ) { } }
if ( ifcWorkTimeEClass == null ) { ifcWorkTimeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 774 ) ; } return ifcWorkTimeEClass ;
public class InternalXbaseParser { /** * InternalXbase . g : 267:1 : ruleXEqualityExpression : ( ( rule _ _ XEqualityExpression _ _ Group _ _ 0 ) ) ; */ public final void ruleXEqualityExpression ( ) throws RecognitionException { } }
int stackSize = keepStackSize ( ) ; try { // InternalXbase . g : 271:2 : ( ( ( rule _ _ XEqualityExpression _ _ Group _ _ 0 ) ) ) // InternalXbase . g : 272:2 : ( ( rule _ _ XEqualityExpression _ _ Group _ _ 0 ) ) { // InternalXbase . g : 272:2 : ( ( rule _ _ XEqualityExpression _ _ Group _ _ 0 ) ) // InternalXbase . g : 273:3 : ( rule _ _ XEqualityExpression _ _ Group _ _ 0 ) { if ( state . backtracking == 0 ) { before ( grammarAccess . getXEqualityExpressionAccess ( ) . getGroup ( ) ) ; } // InternalXbase . g : 274:3 : ( rule _ _ XEqualityExpression _ _ Group _ _ 0 ) // InternalXbase . g : 274:4 : rule _ _ XEqualityExpression _ _ Group _ _ 0 { pushFollow ( FOLLOW_2 ) ; rule__XEqualityExpression__Group__0 ( ) ; state . _fsp -- ; if ( state . failed ) return ; } if ( state . backtracking == 0 ) { after ( grammarAccess . getXEqualityExpressionAccess ( ) . getGroup ( ) ) ; } } } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { restoreStackSize ( stackSize ) ; } return ;
public class Joiner { /** * Returns a joiner with the same behavior as this one , except automatically substituting { @ code * nullText } for any provided null elements . */ @ CheckReturnValue public Joiner useForNull ( final String nullText ) { } }
checkNotNull ( nullText ) ; return new Joiner ( this ) { @ Override CharSequence toString ( @ Nullable Object part ) { return ( part == null ) ? nullText : Joiner . this . toString ( part ) ; } @ Override public Joiner useForNull ( String nullText ) { throw new UnsupportedOperationException ( "already specified useForNull" ) ; } @ Override public Joiner skipNulls ( ) { throw new UnsupportedOperationException ( "already specified useForNull" ) ; } } ;
public class SpiderController { /** * Adds a new seed , if it wasn ' t already processed . * @ param uri the uri * @ param method the http method used for fetching the resource */ protected void addSeed ( URI uri , String method ) { } }
// Check if the uri was processed already String visitedURI ; try { visitedURI = URLCanonicalizer . buildCleanedParametersURIRepresentation ( uri , spider . getSpiderParam ( ) . getHandleParameters ( ) , spider . getSpiderParam ( ) . isHandleODataParametersVisited ( ) ) ; } catch ( URIException e ) { return ; } synchronized ( visitedGet ) { if ( visitedGet . contains ( visitedURI ) ) { log . debug ( "URI already visited: " + visitedURI ) ; return ; } else { visitedGet . add ( visitedURI ) ; } } // Create and submit the new task SpiderTask task = new SpiderTask ( spider , null , uri , 0 , method ) ; spider . submitTask ( task ) ; // Add the uri to the found list spider . notifyListenersFoundURI ( uri . toString ( ) , method , FetchStatus . SEED ) ;
public class ListItemBox { /** * Draw the list item symbol , number or image depending on list - style - type */ public void drawMarker ( Graphics2D g ) { } }
Shape oldclip = g . getClip ( ) ; if ( clipblock != null ) g . setClip ( applyClip ( oldclip , clipblock . getClippedContentBounds ( ) ) ) ; if ( image != null ) { if ( ! drawImage ( g ) ) drawBullet ( g ) ; } else drawBullet ( g ) ; g . setClip ( oldclip ) ;
public class ScheduleGenerator { /** * ScheduleFromPeriods generation for given { referenceDate , startDate , maturityDate } . Method using Date instead of LocalDate for backward compatibility . * Generates a schedule based on some meta data . * < ul > * < li > The schedule generation considers short stub periods at beginning or at the end . < / li > * < li > Date rolling is performed using the provided businessdayCalendar . < / li > * < / ul > * The reference date is used internally to represent all dates as doubles . * @ param referenceDate The date which is used in the schedule to internally convert dates to doubles , i . e . , the date where t = 0. * @ param startDate The start date of the first period ( unadjusted - adjustments take place during schedule generation ) . * @ param maturityDate The end date of the last period ( unadjusted - adjustments take place during schedule generation ) . * @ param frequency The frequency ( as String ) . * @ param daycountConvention The daycount convention ( as String ) . * @ param shortPeriodConvention If short period exists , have it first or last ( as String ) . * @ param dateRollConvention Adjustment to be applied to the all dates ( as String ) . * @ param businessdayCalendar Businessday calendar ( holiday calendar ) to be used for date roll adjustment . * @ param fixingOffsetDays Number of business days to be added to period start to get the fixing date . * @ param paymentOffsetDays Number of business days to be added to period end to get the payment date . * @ return The corresponding schedule */ public static Schedule createScheduleFromConventions ( Date referenceDate , Date startDate , Date maturityDate , String frequency , String daycountConvention , String shortPeriodConvention , String dateRollConvention , BusinessdayCalendar businessdayCalendar , int fixingOffsetDays , int paymentOffsetDays ) { } }
return createScheduleFromConventions ( Instant . ofEpochMilli ( referenceDate . getTime ( ) ) . atZone ( ZoneId . systemDefault ( ) ) . toLocalDate ( ) , Instant . ofEpochMilli ( startDate . getTime ( ) ) . atZone ( ZoneId . systemDefault ( ) ) . toLocalDate ( ) , Instant . ofEpochMilli ( maturityDate . getTime ( ) ) . atZone ( ZoneId . systemDefault ( ) ) . toLocalDate ( ) , frequency , daycountConvention , shortPeriodConvention , dateRollConvention , businessdayCalendar , fixingOffsetDays , paymentOffsetDays ) ;
public class CoreActivity { /** * Prepares a new activity for the Translet Rule by taking * the results of the process that was created earlier . * @ param requestName the request name * @ param requestMethod the request method * @ param transletRule the translet rule * @ param parentTranslet the process result that was created earlier */ private void prepare ( String requestName , MethodType requestMethod , TransletRule transletRule , Translet parentTranslet ) { } }
try { if ( log . isDebugEnabled ( ) ) { log . debug ( "Translet " + transletRule ) ; } newTranslet ( requestMethod , requestName , transletRule , parentTranslet ) ; if ( parentTranslet == null ) { if ( isIncluded ( ) ) { backupCurrentActivity ( ) ; saveCurrentActivity ( ) ; } else { saveCurrentActivity ( ) ; } adapt ( ) ; } prepareAspectAdviceRule ( transletRule , ( parentTranslet != null ) ) ; parseRequest ( ) ; parsePathVariables ( ) ; if ( parentTranslet == null ) { resolveLocale ( ) ; } } catch ( ActivityTerminatedException e ) { throw e ; } catch ( Exception e ) { throw new ActivityPrepareException ( "Failed to prepare activity for translet " + transletRule , e ) ; }
public class MtasSolrBaseList { /** * Gets the . * @ param key * the key * @ return the mtas solr status * @ throws IOException * Signals that an I / O exception has occurred . */ public final MtasSolrStatus get ( String key ) throws IOException { } }
return index . get ( Objects . requireNonNull ( key , "no key provided" ) ) ;
public class PHS398FellowshipSupplementalV1_1Generator { /** * This method is used to get FederalStipendRequested XMLObject and set * additional information data to it . */ private FederalStipendRequested getFederalStipendRequested ( ) { } }
FederalStipendRequested federalStipendRequested = FederalStipendRequested . Factory . newInstance ( ) ; ProposalDevelopmentBudgetExtContract budget = s2SCommonBudgetService . getBudget ( pdDoc . getDevelopmentProposal ( ) ) ; if ( budget == null ) { return federalStipendRequested ; } ScaleTwoDecimal sumOfLineItemCost = ScaleTwoDecimal . ZERO ; ScaleTwoDecimal numberOfMonths = ScaleTwoDecimal . ZERO ; for ( BudgetPeriodContract budgetPeriod : budget . getBudgetPeriods ( ) ) { if ( budgetPeriod . getBudgetPeriod ( ) == 1 ) { for ( BudgetLineItemContract budgetLineItem : budgetPeriod . getBudgetLineItems ( ) ) { if ( getCostElementsByParam ( ConfigurationConstants . STIPEND_COST_ELEMENTS ) . contains ( budgetLineItem . getCostElementBO ( ) . getCostElement ( ) ) ) { sumOfLineItemCost = sumOfLineItemCost . add ( budgetLineItem . getLineItemCost ( ) ) ; numberOfMonths = numberOfMonths . add ( getNumberOfMonths ( budgetLineItem . getStartDate ( ) , budgetLineItem . getEndDate ( ) ) ) ; } } } } federalStipendRequested . setAmount ( sumOfLineItemCost . bigDecimalValue ( ) ) ; federalStipendRequested . setNumberOfMonths ( numberOfMonths . bigDecimalValue ( ) ) ; return federalStipendRequested ;
public class druidGLexer { /** * $ ANTLR start " OCTAL _ ESC " */ public final void mOCTAL_ESC ( ) throws RecognitionException { } }
try { // druidG . g : 741:5 : ( ' \ \ \ \ ' ( ' 0 ' . . ' 3 ' ) ( ' 0 ' . . ' 7 ' ) ( ' 0 ' . . ' 7 ' ) | ' \ \ \ \ ' ( ' 0 ' . . ' 7 ' ) ( ' 0 ' . . ' 7 ' ) | ' \ \ \ \ ' ( ' 0 ' . . ' 7 ' ) ) int alt46 = 3 ; int LA46_0 = input . LA ( 1 ) ; if ( ( LA46_0 == '\\' ) ) { int LA46_1 = input . LA ( 2 ) ; if ( ( ( LA46_1 >= '0' && LA46_1 <= '3' ) ) ) { int LA46_2 = input . LA ( 3 ) ; if ( ( ( LA46_2 >= '0' && LA46_2 <= '7' ) ) ) { int LA46_4 = input . LA ( 4 ) ; if ( ( ( LA46_4 >= '0' && LA46_4 <= '7' ) ) ) { alt46 = 1 ; } else { alt46 = 2 ; } } else { alt46 = 3 ; } } else if ( ( ( LA46_1 >= '4' && LA46_1 <= '7' ) ) ) { int LA46_3 = input . LA ( 3 ) ; if ( ( ( LA46_3 >= '0' && LA46_3 <= '7' ) ) ) { alt46 = 2 ; } else { alt46 = 3 ; } } else { int nvaeMark = input . mark ( ) ; try { input . consume ( ) ; NoViableAltException nvae = new NoViableAltException ( "" , 46 , 1 , input ) ; throw nvae ; } finally { input . rewind ( nvaeMark ) ; } } } else { NoViableAltException nvae = new NoViableAltException ( "" , 46 , 0 , input ) ; throw nvae ; } switch ( alt46 ) { case 1 : // druidG . g : 741:9 : ' \ \ \ \ ' ( ' 0 ' . . ' 3 ' ) ( ' 0 ' . . ' 7 ' ) ( ' 0 ' . . ' 7 ' ) { match ( '\\' ) ; if ( ( input . LA ( 1 ) >= '0' && input . LA ( 1 ) <= '3' ) ) { input . consume ( ) ; } else { MismatchedSetException mse = new MismatchedSetException ( null , input ) ; recover ( mse ) ; throw mse ; } if ( ( input . LA ( 1 ) >= '0' && input . LA ( 1 ) <= '7' ) ) { input . consume ( ) ; } else { MismatchedSetException mse = new MismatchedSetException ( null , input ) ; recover ( mse ) ; throw mse ; } if ( ( input . LA ( 1 ) >= '0' && input . LA ( 1 ) <= '7' ) ) { input . consume ( ) ; } else { MismatchedSetException mse = new MismatchedSetException ( null , input ) ; recover ( mse ) ; throw mse ; } } break ; case 2 : // druidG . g : 742:9 : ' \ \ \ \ ' ( ' 0 ' . . ' 7 ' ) ( ' 0 ' . . ' 7 ' ) { match ( '\\' ) ; if ( ( input . LA ( 1 ) >= '0' && input . LA ( 1 ) <= '7' ) ) { input . consume ( ) ; } else { MismatchedSetException mse = new MismatchedSetException ( null , input ) ; recover ( mse ) ; throw mse ; } if ( ( input . LA ( 1 ) >= '0' && input . LA ( 1 ) <= '7' ) ) { input . consume ( ) ; } else { MismatchedSetException mse = new MismatchedSetException ( null , input ) ; recover ( mse ) ; throw mse ; } } break ; case 3 : // druidG . g : 743:9 : ' \ \ \ \ ' ( ' 0 ' . . ' 7 ' ) { match ( '\\' ) ; if ( ( input . LA ( 1 ) >= '0' && input . LA ( 1 ) <= '7' ) ) { input . consume ( ) ; } else { MismatchedSetException mse = new MismatchedSetException ( null , input ) ; recover ( mse ) ; throw mse ; } } break ; } } finally { // do for sure before leaving }
public class Encoder { /** * Interleave " bits " with corresponding error correction bytes . On success , store the result in * " result " . The interleave rule is complicated . See 8.6 of JISX0510:2004 ( p . 37 ) for details . */ static BitArray interleaveWithECBytes ( BitArray bits , int numTotalBytes , int numDataBytes , int numRSBlocks ) throws WriterException { } }
// " bits " must have " getNumDataBytes " bytes of data . if ( bits . getSizeInBytes ( ) != numDataBytes ) { throw new WriterException ( "Number of bits and data bytes does not match" ) ; } // Step 1 . Divide data bytes into blocks and generate error correction bytes for them . We ' ll // store the divided data bytes blocks and error correction bytes blocks into " blocks " . int dataBytesOffset = 0 ; int maxNumDataBytes = 0 ; int maxNumEcBytes = 0 ; // Since , we know the number of reedsolmon blocks , we can initialize the vector with the number . Collection < BlockPair > blocks = new ArrayList < > ( numRSBlocks ) ; for ( int i = 0 ; i < numRSBlocks ; ++ i ) { int [ ] numDataBytesInBlock = new int [ 1 ] ; int [ ] numEcBytesInBlock = new int [ 1 ] ; getNumDataBytesAndNumECBytesForBlockID ( numTotalBytes , numDataBytes , numRSBlocks , i , numDataBytesInBlock , numEcBytesInBlock ) ; int size = numDataBytesInBlock [ 0 ] ; byte [ ] dataBytes = new byte [ size ] ; bits . toBytes ( 8 * dataBytesOffset , dataBytes , 0 , size ) ; byte [ ] ecBytes = generateECBytes ( dataBytes , numEcBytesInBlock [ 0 ] ) ; blocks . add ( new BlockPair ( dataBytes , ecBytes ) ) ; maxNumDataBytes = Math . max ( maxNumDataBytes , size ) ; maxNumEcBytes = Math . max ( maxNumEcBytes , ecBytes . length ) ; dataBytesOffset += numDataBytesInBlock [ 0 ] ; } if ( numDataBytes != dataBytesOffset ) { throw new WriterException ( "Data bytes does not match offset" ) ; } BitArray result = new BitArray ( ) ; // First , place data blocks . for ( int i = 0 ; i < maxNumDataBytes ; ++ i ) { for ( BlockPair block : blocks ) { byte [ ] dataBytes = block . getDataBytes ( ) ; if ( i < dataBytes . length ) { result . appendBits ( dataBytes [ i ] , 8 ) ; } } } // Then , place error correction blocks . for ( int i = 0 ; i < maxNumEcBytes ; ++ i ) { for ( BlockPair block : blocks ) { byte [ ] ecBytes = block . getErrorCorrectionBytes ( ) ; if ( i < ecBytes . length ) { result . appendBits ( ecBytes [ i ] , 8 ) ; } } } if ( numTotalBytes != result . getSizeInBytes ( ) ) { // Should be same . throw new WriterException ( "Interleaving error: " + numTotalBytes + " and " + result . getSizeInBytes ( ) + " differ." ) ; } return result ;
public class TypeConverter { /** * Converts value into an object type specified by Type Code or returns null * when conversion is not possible . * @ param type the Class type for the data type . * @ param value the value to convert . * @ return object value of type corresponding to TypeCode , or null when * conversion is not supported . * @ see TypeConverter # toTypeCode ( Class ) */ @ SuppressWarnings ( "unchecked" ) public static < T > T toNullableType ( Class < T > type , Object value ) { } }
TypeCode resultType = toTypeCode ( type ) ; if ( value == null ) return null ; if ( type . isInstance ( value ) ) return ( T ) value ; // Convert to known types if ( resultType == TypeCode . String ) return type . cast ( StringConverter . toNullableString ( value ) ) ; else if ( resultType == TypeCode . Integer ) return type . cast ( IntegerConverter . toNullableInteger ( value ) ) ; else if ( resultType == TypeCode . Long ) return type . cast ( LongConverter . toNullableLong ( value ) ) ; else if ( resultType == TypeCode . Float ) return type . cast ( FloatConverter . toNullableFloat ( value ) ) ; else if ( resultType == TypeCode . Double ) return type . cast ( DoubleConverter . toNullableDouble ( value ) ) ; else if ( resultType == TypeCode . Duration ) return type . cast ( DurationConverter . toNullableDuration ( value ) ) ; else if ( resultType == TypeCode . DateTime ) return type . cast ( DateTimeConverter . toNullableDateTime ( value ) ) ; else if ( resultType == TypeCode . Array ) return type . cast ( ArrayConverter . toNullableArray ( value ) ) ; else if ( resultType == TypeCode . Map ) return type . cast ( MapConverter . toNullableMap ( value ) ) ; // Convert to unknown type try { return type . cast ( value ) ; } catch ( Throwable t ) { return null ; }
public class CassandraCpoAdapter { /** * DOCUMENT ME ! * @ param obj DOCUMENT ME ! * @ param type DOCUMENT ME ! * @ param name DOCUMENT ME ! * @ param session DOCUMENT ME ! * @ return DOCUMENT ME ! * @ throws CpoException DOCUMENT ME ! */ protected < T > String getGroupType ( T obj , String type , String name , Session session ) throws CpoException { } }
String retType = type ; long objCount ; if ( CpoAdapter . PERSIST_GROUP . equals ( retType ) ) { objCount = existsObject ( name , obj , session , null ) ; if ( objCount == 0 ) { retType = CpoAdapter . CREATE_GROUP ; } else if ( objCount == 1 ) { retType = CpoAdapter . UPDATE_GROUP ; } else { throw new CpoException ( "Persist can only UPDATE one record. Your EXISTS function returned 2 or more." ) ; } } return retType ;
public class Tree { /** * Return ( depth < < 32 ) | ( leaves ) , in 1 pass . */ public static long depth_leaves ( AutoBuffer tbits , boolean regression ) { } }
return new TreeVisitor < RuntimeException > ( tbits , regression ) { int _maxdepth , _depth , _leaves ; @ Override protected TreeVisitor leafFloat ( float fl ) { _leaves ++ ; if ( _depth > _maxdepth ) _maxdepth = _depth ; return this ; } @ Override protected TreeVisitor leaf ( int tclass ) { _leaves ++ ; if ( _depth > _maxdepth ) _maxdepth = _depth ; return this ; } @ Override protected TreeVisitor pre ( int col , float fcmp , int off0 , int offl , int offr ) { _depth ++ ; return this ; } @ Override protected TreeVisitor post ( int col , float fcmp ) { _depth -- ; return this ; } @ Override long result ( ) { return ( ( long ) _maxdepth << 32 ) | _leaves ; } } . visit ( ) . result ( ) ;
public class CrtAuthServer { /** * Get the public key for a user by iterating through all key providers . The first * matching key will be returned . * @ param userName the username to get the key for * @ return the first RSAPublicKey found for the user * @ throws KeyNotFoundException */ private RSAPublicKey getKeyForUser ( String userName ) throws KeyNotFoundException { } }
RSAPublicKey key = null ; for ( final KeyProvider keyProvider : keyProviders ) { try { key = keyProvider . getKey ( userName ) ; break ; } catch ( KeyNotFoundException e ) { // that ' s fine , try the next provider } } if ( key == null ) { throw new KeyNotFoundException ( ) ; } return key ;
public class BigIntStringChecksum { /** * Routine to construct an instance that allows you to print the hex strings . * Such as * String s = BigIntStringChecksum create ( biginteger ) . toString ( ) ; * s . equals ( " bigintcs : 00f3ea - CBA3D0 " ) ; * @ param in the big integer to hexify and md5 check sum * @ return big integer string checksum object */ public static BigIntStringChecksum create ( final BigInteger in ) { } }
if ( in == null ) { throw new SecretShareException ( "Input BigInteger cannot be null" ) ; } final String inHex = in . toString ( HEX_RADIX ) ; final String inAsHex = pad ( inHex ) ; String md5checksum = computeMd5ChecksumLimit6 ( inAsHex ) ; return new BigIntStringChecksum ( inAsHex , md5checksum ) ;
public class ConfigClient { /** * Gets a sink . * < p > Sample code : * < pre > < code > * try ( ConfigClient configClient = ConfigClient . create ( ) ) { * SinkName sinkName = ProjectSinkName . of ( " [ PROJECT ] " , " [ SINK ] " ) ; * LogSink response = configClient . getSink ( sinkName . toString ( ) ) ; * < / code > < / pre > * @ param sinkName Required . The resource name of the sink : * < p > " projects / [ PROJECT _ ID ] / sinks / [ SINK _ ID ] " * " organizations / [ ORGANIZATION _ ID ] / sinks / [ SINK _ ID ] " * " billingAccounts / [ BILLING _ ACCOUNT _ ID ] / sinks / [ SINK _ ID ] " * " folders / [ FOLDER _ ID ] / sinks / [ SINK _ ID ] " * < p > Example : ` " projects / my - project - id / sinks / my - sink - id " ` . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final LogSink getSink ( String sinkName ) { } }
GetSinkRequest request = GetSinkRequest . newBuilder ( ) . setSinkName ( sinkName ) . build ( ) ; return getSink ( request ) ;
public class FactoryDenoiseWaveletAlg { /** * Returns { @ link DenoiseBayesShrink _ F32 Bayes shrink } wavelet based image denoiser . * @ param rule Shrinkage rule . If null then a { @ link ShrinkThresholdSoft _ F32 soft threshold } rule will be used . * @ param imageType Type of image it will process . * @ return Bayes Shrink */ public static < T extends ImageGray < T > > DenoiseWavelet < T > bayes ( ShrinkThresholdRule < T > rule , Class < T > imageType ) { } }
if ( rule == null ) { rule = ( ShrinkThresholdRule < T > ) new ShrinkThresholdSoft_F32 ( ) ; } if ( imageType == GrayF32 . class ) { return ( DenoiseWavelet < T > ) new DenoiseBayesShrink_F32 ( ( ShrinkThresholdRule < GrayF32 > ) rule ) ; } else { throw new IllegalArgumentException ( "Unsupported image type " + imageType ) ; }
public class ORTO { /** * region - - - Option tree methods */ protected Node findWorstOption ( ) { } }
Stack < Node > stack = new Stack < Node > ( ) ; stack . add ( treeRoot ) ; double ratio = Double . MIN_VALUE ; Node out = null ; while ( ! stack . empty ( ) ) { Node node = stack . pop ( ) ; if ( node . getParent ( ) instanceof OptionNode ) { OptionNode myParent = ( OptionNode ) node . getParent ( ) ; int nodeIndex = myParent . getChildIndex ( node ) ; double nodeRatio = myParent . getFFRatio ( nodeIndex ) ; if ( nodeRatio > ratio ) { ratio = nodeRatio ; out = node ; } } if ( node instanceof InnerNode ) { for ( Node child : ( ( InnerNode ) node ) . children ) { stack . add ( child ) ; } } } return out ;
public class VirtualMachineScaleSetsInner { /** * Upgrades one or more virtual machines to the latest SKU set in the VM scale set model . * @ param resourceGroupName The name of the resource group . * @ param vmScaleSetName The name of the VM scale set . * @ param instanceIds The virtual machine scale set instance ids . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the OperationStatusResponseInner object if successful . */ public OperationStatusResponseInner updateInstances ( String resourceGroupName , String vmScaleSetName , List < String > instanceIds ) { } }
return updateInstancesWithServiceResponseAsync ( resourceGroupName , vmScaleSetName , instanceIds ) . toBlocking ( ) . last ( ) . body ( ) ;
public class SnowflakeConnectionV1 { /** * Method to put data from a stream at a stage location . The data will be * uploaded as one file . No splitting is done in this method . * Stream size must match the total size of data in the input stream unless * compressData parameter is set to true . * caller is responsible for passing the correct size for the data in the * stream and releasing the inputStream after the method is called . * Note this method is deprecated since streamSize is not required now . Keep * the function signature for backward compatibility * @ param stageName stage name : e . g . ~ or table name or stage name * @ param destPrefix path prefix under which the data should be uploaded on the stage * @ param inputStream input stream from which the data will be uploaded * @ param destFileName destination file name to use * @ param streamSize data size in the stream * @ throws SQLException failed to put data from a stream at stage */ @ Deprecated public void uploadStream ( String stageName , String destPrefix , InputStream inputStream , String destFileName , long streamSize ) throws SQLException { } }
uploadStreamInternal ( stageName , destPrefix , inputStream , destFileName , false ) ;
public class Transmitter { /** * Sets the base address for this transmitter . * @ param baseId * the base address for this transmitter . */ public void setBaseId ( byte [ ] baseId ) { } }
if ( baseId == null ) { throw new IllegalArgumentException ( "Transmitter base ID cannot be null." ) ; } if ( baseId . length != TRANSMITTER_ID_SIZE ) { throw new IllegalArgumentException ( String . format ( "Transmitter base ID must be %d bytes long." , Integer . valueOf ( TRANSMITTER_ID_SIZE ) ) ) ; } this . baseId = baseId ;
public class Annotations { /** * Process : @ ConnectionDefinition * @ param annotationRepository The annotation repository * @ param classLoader The class loader * @ param configProperty The config properties * @ param plainConfigProperty The plain config properties * @ return The updated metadata * @ exception Exception Thrown if an error occurs */ private ArrayList < ConnectionDefinition > processConnectionDefinition ( AnnotationRepository annotationRepository , ClassLoader classLoader , ArrayList < ? extends ConfigProperty > configProperty , ArrayList < ? extends ConfigProperty > plainConfigProperty ) throws Exception { } }
ArrayList < ConnectionDefinition > connectionDefinitions = null ; Collection < Annotation > values = annotationRepository . getAnnotation ( javax . resource . spi . ConnectionDefinition . class ) ; if ( values != null ) { connectionDefinitions = new ArrayList < ConnectionDefinition > ( values . size ( ) ) ; for ( Annotation annotation : values ) { ConnectionDefinition cd = attachConnectionDefinition ( annotation , classLoader , configProperty , plainConfigProperty ) ; if ( trace ) log . tracef ( "Adding connection definition: %s" , cd ) ; connectionDefinitions . add ( cd ) ; } } return connectionDefinitions ;
public class IntegrationAccountBatchConfigurationsInner { /** * List the batch configurations for an integration account . * @ param resourceGroupName The resource group name . * @ param integrationAccountName The integration account name . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the List & lt ; BatchConfigurationInner & gt ; object */ public Observable < List < BatchConfigurationInner > > listAsync ( String resourceGroupName , String integrationAccountName ) { } }
return listWithServiceResponseAsync ( resourceGroupName , integrationAccountName ) . map ( new Func1 < ServiceResponse < List < BatchConfigurationInner > > , List < BatchConfigurationInner > > ( ) { @ Override public List < BatchConfigurationInner > call ( ServiceResponse < List < BatchConfigurationInner > > response ) { return response . body ( ) ; } } ) ;
public class RepositoryTags { /** * returns the opening xml - tag associated with the repository element with * id < code > elementId < / code > . * @ return the resulting tag */ public String getCompleteTagById ( int elementId , String characters ) { } }
String result = getOpeningTagById ( elementId ) ; result += characters ; result += getClosingTagById ( elementId ) ; return result ;
public class Swipe { /** * Called to process touch screen events . * @ param event MotionEvent */ public boolean dispatchTouchEvent ( final MotionEvent event ) { } }
checkNotNull ( event , "event == null" ) ; boolean isEventConsumed = false ; switch ( event . getAction ( ) ) { case MotionEvent . ACTION_DOWN : // user started touching the screen onActionDown ( event ) ; break ; case MotionEvent . ACTION_UP : // user stopped touching the screen isEventConsumed = onActionUp ( event ) ; break ; case MotionEvent . ACTION_MOVE : // user is moving finger on the screen onActionMove ( event ) ; break ; default : break ; } return isEventConsumed ;
public class hanode_routemonitor_binding { /** * Use this API to fetch hanode _ routemonitor _ binding resources of given name . */ public static hanode_routemonitor_binding [ ] get ( nitro_service service , Long id ) throws Exception { } }
hanode_routemonitor_binding obj = new hanode_routemonitor_binding ( ) ; obj . set_id ( id ) ; hanode_routemonitor_binding response [ ] = ( hanode_routemonitor_binding [ ] ) obj . get_resources ( service ) ; return response ;
public class AbstractManagerFactory { /** * Provide the statically computed table name with keyspace ( if defined ) for a given entity class * @ param entityClass given entity class * @ return statically computed table name with keyspace ( if define ) */ public Optional < String > staticTableNameFor ( Class < ? > entityClass ) { } }
final Optional < String > tableName = entityProperties . stream ( ) . filter ( x -> x . entityClass . equals ( entityClass ) ) . map ( x -> x . getKeyspace ( ) . map ( ks -> ks + "." + x . getTableOrViewName ( ) ) . orElseGet ( x :: getTableOrViewName ) ) . findFirst ( ) ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( format ( "Determining table name for entity type %s : %s" , entityClass . getCanonicalName ( ) , tableName ) ) ; } return tableName ;
public class ZLIBCodec { /** * Compress / encode the data provided using the ZLIB format . * @ param data data to compress * @ return compressed data * @ throws IOException on compression failure */ public static byte [ ] encode ( final byte [ ] data ) throws IOException { } }
ByteArrayOutputStream byteOut = new ByteArrayOutputStream ( ) ; DeflaterOutputStream deflateOut = null ; try { deflateOut = new DeflaterOutputStream ( byteOut ) ; deflateOut . write ( data ) ; deflateOut . close ( ) ; byteOut . close ( ) ; return byteOut . toByteArray ( ) ; } finally { deflateOut . close ( ) ; byteOut . close ( ) ; }
public class P3Parse { /** * Gets an expression that evaluates to the denotation of * this parse . The expression will not re - evaluate any * already evaluated subexpressions of this parse . { @ code env } * may be extended with additional variable bindings to * capture denotations of already - evaluated subparses . * @ param env * @ param symbolTable * @ return */ public Expression2 getUnevaluatedLogicalForm ( Environment env , IndexedList < String > symbolTable ) { } }
List < String > newBindings = Lists . newArrayList ( ) ; return getUnevaluatedLogicalForm ( env , symbolTable , newBindings ) ;
public class SpScheduler { /** * update the duplicated counters after the host failure . */ public void updateReplicasFromMigrationLeaderFailedHost ( int failedHostId ) { } }
List < Long > replicas = new ArrayList < > ( ) ; for ( long hsid : m_replicaHSIds ) { if ( failedHostId != CoreUtils . getHostIdFromHSId ( hsid ) ) { replicas . add ( hsid ) ; } } ( ( InitiatorMailbox ) m_mailbox ) . updateReplicas ( replicas , null ) ;
public class TrafficCounter { /** * Returns the time to wait ( if any ) for the given length message , using the given limitTraffic and * the max wait time . * @ param size * the write size * @ param limitTraffic * the traffic limit in bytes per second . * @ param maxTime * the max time in ms to wait in case of excess of traffic . * @ param now the current time * @ return the current time to wait ( in ms ) if needed for Write operation . */ public long writeTimeToWait ( final long size , final long limitTraffic , final long maxTime , final long now ) { } }
bytesWriteFlowControl ( size ) ; if ( size == 0 || limitTraffic == 0 ) { return 0 ; } final long lastTimeCheck = lastTime . get ( ) ; long sum = currentWrittenBytes . get ( ) ; long lastWB = lastWrittenBytes ; long localWritingTime = writingTime ; long pastDelay = Math . max ( lastWritingTime - lastTimeCheck , 0 ) ; final long interval = now - lastTimeCheck ; if ( interval > AbstractTrafficShapingHandler . MINIMAL_WAIT ) { // Enough interval time to compute shaping long time = sum * 1000 / limitTraffic - interval + pastDelay ; if ( time > AbstractTrafficShapingHandler . MINIMAL_WAIT ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Time: " + time + ':' + sum + ':' + interval + ':' + pastDelay ) ; } if ( time > maxTime && now + time - localWritingTime > maxTime ) { time = maxTime ; } writingTime = Math . max ( localWritingTime , now + time ) ; return time ; } writingTime = Math . max ( localWritingTime , now ) ; return 0 ; } // take the last write interval check to get enough interval time long lastsum = sum + lastWB ; long lastinterval = interval + checkInterval . get ( ) ; long time = lastsum * 1000 / limitTraffic - lastinterval + pastDelay ; if ( time > AbstractTrafficShapingHandler . MINIMAL_WAIT ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Time: " + time + ':' + lastsum + ':' + lastinterval + ':' + pastDelay ) ; } if ( time > maxTime && now + time - localWritingTime > maxTime ) { time = maxTime ; } writingTime = Math . max ( localWritingTime , now + time ) ; return time ; } writingTime = Math . max ( localWritingTime , now ) ; return 0 ;
public class Sets { /** * Cartesian product of A and B , denoted A × B , is the set whose members are * all possible ordered pairs ( a , b ) where a is a member of A and b is a * member of B . * @ param < A > * @ param < B > * @ param a * @ param b * @ return */ public static final < A , B > Set < OrderedPair < A , B > > cartesianProduct ( Set < A > a , Set < B > b ) { } }
Set < OrderedPair < A , B > > set = new HashSet < > ( ) ; for ( A t : a ) { for ( B v : b ) { set . add ( new OrderedPair ( t , v ) ) ; } } return set ;
public class Resource { /** * Returns a { @ link Resource } . * @ param type the type identifier for the resource . * @ param labels a map of labels that describe the resource . * @ return a { @ code Resource } . * @ throws NullPointerException if { @ code labels } is null . * @ throws IllegalArgumentException if type or label key or label value is not a valid printable * ASCII string or exceed { @ link # MAX _ LENGTH } characters . * @ since 0.18 */ public static Resource create ( @ Nullable String type , Map < String , String > labels ) { } }
return createInternal ( type , Collections . unmodifiableMap ( new LinkedHashMap < String , String > ( Utils . checkNotNull ( labels , "labels" ) ) ) ) ;
public class HttpServletRequestDummy { /** * add a new header value * @ param name name of the new value * @ param value header value */ public void addHeader ( String name , String value ) { } }
headers = ReqRspUtil . add ( headers , name , value ) ;
public class ContentExtractor { /** * / * 输入URL , 获取正文所在Element */ public static Element getContentElementByUrl ( String url ) throws Exception { } }
// HttpRequest request = new HttpRequest ( url ) ; String html = okHttpRequester . getResponse ( url ) . html ( ) ; return getContentElementByHtml ( html , url ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } * { @ link EnumUnfileObject } { @ code > } */ @ XmlElementDecl ( namespace = "http://docs.oasis-open.org/ns/cmis/messaging/200908/" , name = "unfileObjects" , scope = DeleteTree . class ) public JAXBElement < EnumUnfileObject > createDeleteTreeUnfileObjects ( EnumUnfileObject value ) { } }
return new JAXBElement < EnumUnfileObject > ( _DeleteTreeUnfileObjects_QNAME , EnumUnfileObject . class , DeleteTree . class , value ) ;
public class DefaultAnnotationProvider { /** * < p > Return a list of the classes defined under the * < code > / WEB - INF / classes < / code > directory of this web * application . If there are no such classes , a zero - length list * will be returned . < / p > * @ param externalContext < code > ExternalContext < / code > instance for * this application * @ exception ClassNotFoundException if a located class cannot be loaded */ private List < Class < ? > > webClasses ( ExternalContext externalContext ) { } }
List < Class < ? > > list = new ArrayList < Class < ? > > ( ) ; webClasses ( externalContext , WEB_CLASSES_PREFIX , list ) ; return list ;
public class JaxRsEJBModuleInfoBuilder { /** * getEJBWithProviderClasses * @ param classes * @ param ejb * @ param ejbInterfaceName * @ param appClassloader */ private void getEJBWithProviderClasses ( Set < String > classeNames , EJBEndpoint ejb , String ejbInterfaceName , ClassLoader appClassloader ) { } }
final String methodName = "getEJBWithProviderClasses" ; if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , methodName , classeNames , ejb , ejbInterfaceName ) ; if ( classeNames == null ) { if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , methodName , Collections . emptySet ( ) ) ; return ; } Class < Provider > providerClass = Provider . class ; final String ejbClassName = ( ejbInterfaceName == null ) ? ejb . getClassName ( ) : ejbInterfaceName ; Class < ? > c = null ; try { c = appClassloader . loadClass ( ejbClassName ) ; } catch ( ClassNotFoundException e ) { // if ( tc . isDebugEnabled ( ) ) { // Tr . debug ( tc , " getEJBWithProviderClasses ( ) exit - due to Class Not Found for " + ejbClassName + " : " + e ) ; Tr . error ( tc , "error.failed.toloadejbclass" , ejbClassName ) ; } if ( c != null && c . getAnnotation ( providerClass ) != null ) { classeNames . add ( ejbClassName ) ; } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , methodName , classeNames ) ;
public class ApiOvhXdsl { /** * Switch this access to total deconsolidation * REST : POST / xdsl / { serviceName } / requestTotalDeconsolidation * @ param noPortability [ required ] Do not port the number * @ param rio [ required ] A token to prove the ownership of the line number , needed to port the number * @ param serviceName [ required ] The internal name of your XDSL offer */ public OvhTask serviceName_requestTotalDeconsolidation_POST ( String serviceName , Boolean noPortability , String rio ) throws IOException { } }
String qPath = "/xdsl/{serviceName}/requestTotalDeconsolidation" ; StringBuilder sb = path ( qPath , serviceName ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "noPortability" , noPortability ) ; addBody ( o , "rio" , rio ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhTask . class ) ;
public class AwsSecurityFindingFilters { /** * The text of a note . * @ param noteText * The text of a note . */ public void setNoteText ( java . util . Collection < StringFilter > noteText ) { } }
if ( noteText == null ) { this . noteText = null ; return ; } this . noteText = new java . util . ArrayList < StringFilter > ( noteText ) ;
public class aaauser_vpnsessionpolicy_binding { /** * Use this API to fetch aaauser _ vpnsessionpolicy _ binding resources of given name . */ public static aaauser_vpnsessionpolicy_binding [ ] get ( nitro_service service , String username ) throws Exception { } }
aaauser_vpnsessionpolicy_binding obj = new aaauser_vpnsessionpolicy_binding ( ) ; obj . set_username ( username ) ; aaauser_vpnsessionpolicy_binding response [ ] = ( aaauser_vpnsessionpolicy_binding [ ] ) obj . get_resources ( service ) ; return response ;
public class ping { /** * < pre > * Performs generic data validation for the operation to be performed * < / pre > */ protected void validate ( String operationType ) throws Exception { } }
super . validate ( operationType ) ; MPSIPAddress device_ipaddress_validator = new MPSIPAddress ( ) ; device_ipaddress_validator . validate ( operationType , device_ipaddress , "\"device_ipaddress\"" ) ; MPSString ping_status_validator = new MPSString ( ) ; ping_status_validator . setConstraintMaxStrLen ( MPSConstants . GENERIC_CONSTRAINT , 1024 ) ; ping_status_validator . setConstraintMinStrLen ( MPSConstants . GENERIC_CONSTRAINT , 1 ) ; ping_status_validator . validate ( operationType , ping_status , "\"ping_status\"" ) ;
public class TarEntry { /** * Determine if the given entry is a descendant of this entry . Descendancy is determined by the name of the * descendant starting with this entry ' s name . * @ param desc * Entry to be checked as a descendent of this . * @ return True if entry is a descendant of this . */ public boolean isDescendent ( TarEntry desc ) { } }
return desc . header . name . toString ( ) . startsWith ( this . header . name . toString ( ) ) ;
public class CreateOrganizationRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CreateOrganizationRequest createOrganizationRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( createOrganizationRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createOrganizationRequest . getFeatureSet ( ) , FEATURESET_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AbstractThymeleafView { /** * Sets a set of static variables , which will be available at the context * when this view is processed . * This method < b > does not overwrite < / b > the existing static variables , it * simply adds the ones specify to any variables already registered . * These static variables are added to the context before this view is * processed , so that they can be referenced from * the context like any other context variables , for example : * { @ code $ { myStaticVar } } . * @ param variables the set of variables to be added . */ public void setStaticVariables ( final Map < String , ? > variables ) { } }
if ( variables != null ) { if ( this . staticVariables == null ) { this . staticVariables = new HashMap < String , Object > ( 3 , 1.0f ) ; } this . staticVariables . putAll ( variables ) ; }
public class AABBUtils { /** * Reads a { @ link AxisAlignedBB } from { @ link NBTTagCompound } with the specified prefix . * @ param tag the tag * @ param prefix the prefix * @ return the axis aligned bb */ public static AxisAlignedBB readFromNBT ( NBTTagCompound tag , String prefix ) { } }
prefix = prefix == null ? "" : prefix + "." ; return tag != null ? new AxisAlignedBB ( tag . getDouble ( prefix + "minX" ) , tag . getDouble ( prefix + "minY" ) , tag . getDouble ( prefix + "minZ" ) , tag . getDouble ( prefix + "maxX" ) , tag . getDouble ( prefix + "maxY" ) , tag . getDouble ( prefix + "maxZ" ) ) : null ;
public class GregorianCalendar { /** * Returns this object if it ' s normalized ( all fields and time are * in sync ) . Otherwise , a cloned object is returned after calling * complete ( ) in lenient mode . */ private GregorianCalendar getNormalizedCalendar ( ) { } }
GregorianCalendar gc ; if ( isFullyNormalized ( ) ) { gc = this ; } else { // Create a clone and normalize the calendar fields gc = ( GregorianCalendar ) this . clone ( ) ; gc . setLenient ( true ) ; gc . complete ( ) ; } return gc ;
public class CmsCopyMoveDialog { /** * Submits the dialog action . < p > * @ param overwrite to forcefully overwrite existing files * @ param makroMap map of key - value pairs to be resolved as macro . if null or empty , then ignored */ void submit ( boolean overwrite , Map < String , String > makroMap ) { } }
try { CmsResource targetFolder = null ; String targetName = null ; String target = m_targetPath . getValue ( ) ; boolean isSingleResource = m_context . getResources ( ) . size ( ) == 1 ; // resolve relative paths target = CmsLinkManager . getAbsoluteUri ( target , CmsResource . getParentFolder ( getCms ( ) . getSitePath ( m_context . getResources ( ) . get ( 0 ) ) ) ) ; // check if the given path is a root path CmsObject cms = OpenCms . getSiteManager ( ) . getSiteForRootPath ( target ) != null ? getRootCms ( ) : getCms ( ) ; if ( cms . existsResource ( target , CmsResourceFilter . ALL . addRequireFolder ( ) ) ) { // The target is an existing folder // always copy files into that folder targetFolder = cms . readResource ( target ) ; } else if ( cms . existsResource ( target , CmsResourceFilter . ALL . addRequireFile ( ) ) ) { // The target is an existing file if ( isSingleResource ) { // Replace the file with the resource copied , if it is just a single resource if ( target . equals ( m_context . getResources ( ) . get ( 0 ) . getRootPath ( ) ) ) { throw new CmsVfsException ( org . opencms . workplace . commons . Messages . get ( ) . container ( org . opencms . workplace . commons . Messages . ERR_COPY_ONTO_ITSELF_1 , target ) ) ; } targetName = CmsResource . getName ( target ) ; targetFolder = cms . readResource ( CmsResource . getParentFolder ( target ) ) ; } else { // Throw an error if a single file should be replaced with multiple resources // since we cannot copy multiple resources to a single file throw new CmsVfsException ( org . opencms . workplace . commons . Messages . get ( ) . container ( org . opencms . workplace . commons . Messages . ERR_COPY_MULTI_TARGET_NOFOLDER_1 , target ) ) ; } } else { // The target does not exist if ( isSingleResource ) { // If we have a single resource , we could possible create the target as copy of that resource if ( cms . existsResource ( CmsResource . getParentFolder ( target ) , CmsResourceFilter . ALL . addRequireFolder ( ) ) ) { targetName = CmsResource . getName ( target ) ; targetFolder = cms . readResource ( CmsResource . getParentFolder ( target ) ) ; } else { // If the parent folder of the resource does not exist , we will not create it automatically . // Thus we need to throw an exception . throw new CmsVfsException ( org . opencms . workplace . commons . Messages . get ( ) . container ( org . opencms . workplace . commons . Messages . ERR_COPY_TARGET_PARENT_FOLDER_MISSING_1 , target ) ) ; } } else { // We cannot copy multiple resources to a single resource throw new CmsVfsException ( org . opencms . workplace . commons . Messages . get ( ) . container ( org . opencms . workplace . commons . Messages . ERR_COPY_MULTI_TARGET_NOFOLDER_1 , target ) ) ; } } Action action = m_actionCombo != null ? ( Action ) m_actionCombo . getValue ( ) : Action . move ; overwrite = overwrite || isOverwriteExisting ( ) ; if ( ! overwrite || action . equals ( Action . move ) ) { List < CmsResource > collidingResources = getExistingFileCollisions ( targetFolder , targetName ) ; if ( collidingResources != null ) { if ( action . equals ( Action . move ) ) { throw new CmsVfsException ( org . opencms . workplace . commons . Messages . get ( ) . container ( org . opencms . workplace . commons . Messages . ERR_MOVE_FORCES_OVERWRITE_EXISTING_RESOURCE_0 ) ) ; } else { showConfirmOverwrite ( collidingResources ) ; return ; } } } Map < CmsResource , CmsException > errors = new HashMap < CmsResource , CmsException > ( ) ; // Check if dialog for macro resolver has to be shown : action correct ? , makroMap = = null ( default , not set by dialog yet ) if ( ( action == Action . sub_sitemap ) & ( makroMap == null ) ) { if ( CmsSiteManager . isFolderWithMacros ( getRootCms ( ) , m_context . getResources ( ) . get ( 0 ) . getRootPath ( ) ) ) { showMacroResolverDialog ( m_context . getResources ( ) . get ( 0 ) ) ; return ; } } if ( targetName == null ) { for ( CmsResource source : m_context . getResources ( ) ) { try { performSingleOperation ( source , targetFolder , action , overwrite , makroMap ) ; } catch ( CmsException e ) { errors . put ( source , e ) ; LOG . error ( "Error while executing " + m_actionCombo . getValue ( ) . toString ( ) + " on resource " + source . getRootPath ( ) , e ) ; } } } else { // this will only be the case in a single resource scenario CmsResource source = m_context . getResources ( ) . get ( 0 ) ; try { performSingleOperation ( source , targetFolder , targetName , action , overwrite , makroMap ) ; } catch ( CmsException e ) { errors . put ( source , e ) ; LOG . error ( "Error while executing " + m_actionCombo . getValue ( ) . toString ( ) + " on resource " + source . getRootPath ( ) , e ) ; } } if ( ! errors . isEmpty ( ) ) { m_context . finish ( m_updateResources ) ; m_context . error ( errors . values ( ) . iterator ( ) . next ( ) ) ; } else { m_context . finish ( m_updateResources ) ; } } catch ( CmsException e ) { m_context . error ( e ) ; }
public class InstanceTypeDescriptionFactory { /** * Constructs a new { @ link InstaceTypeDescription } object . * @ param instanceType * the instance type * @ param hardwareDescription * the hardware description as created by the { @ link InstanceManager } * @ param numberOfAvailableInstances * the number of available instances of this type * @ return the instance type description */ public static InstanceTypeDescription construct ( InstanceType instanceType , HardwareDescription hardwareDescription , int numberOfAvailableInstances ) { } }
return new InstanceTypeDescription ( instanceType , hardwareDescription , numberOfAvailableInstances ) ;
public class Hasher { /** * Computes the hash value for the file bytes from offset < code > from < / code > * until offset < code > until < / code > , using the hash instance as defined by * the hash type . * @ param file * the file to compute the hash from * @ param digest * the message digest instance * @ param from * file offset to start from * @ param until * file offset for the end * @ return hash value as byte array * @ throws IOException */ public static byte [ ] computeHash ( File file , MessageDigest digest , long from , long until ) throws IOException { } }
Preconditions . checkArgument ( from >= 0 , "negative offset" ) ; Preconditions . checkArgument ( until > from , "end offset is smaller or equal to start offset" ) ; Preconditions . checkArgument ( until <= file . length ( ) , "end offset is greater than file length" ) ; try ( RandomAccessFile raf = new RandomAccessFile ( file , "r" ) ) { byte [ ] buffer = new byte [ BUFFER_SIZE ] ; int readbytes ; long byteSum = from ; raf . seek ( from ) ; while ( ( readbytes = raf . read ( buffer ) ) != - 1 && byteSum <= until ) { byteSum += readbytes ; if ( byteSum > until ) { readbytes -= ( byteSum - until ) ; } digest . update ( buffer , 0 , readbytes ) ; } return digest . digest ( ) ; }