signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class DateUtils { /** * Attempt to construct an ISO formatted date as a string built from atomic parts of the date . * @ param verbatimEventDate a string containing the verbatim event date * @ param startDayOfYear a string containing the start day of a year , expected to parse to an integer . * @ param endDayOfYear a string containing the end day of a year , expected to parse to an integer . * @ param year a string containing the year , expected to parse to an integer . * @ param month a string containing the month , expected to parse to an integer . * @ param day a string containing the start day , expected to parse to an integer . * @ return null , or a string in the form of an ISO date consistent with the input fields . */ public static String createEventDateFromParts ( String verbatimEventDate , String startDayOfYear , String endDayOfYear , String year , String month , String day ) { } }
String result = null ; if ( verbatimEventDate != null && verbatimEventDate . trim ( ) . length ( ) > 0 ) { Map < String , String > verbatim = extractDateToDayFromVerbatim ( verbatimEventDate , DateUtils . YEAR_BEFORE_SUSPECT ) ; if ( verbatim . size ( ) > 0 ) { if ( verbatim . get ( "resultState" ) != null && verbatim . get ( "resultState" ) . equals ( "date" ) ) { result = verbatim . get ( "result" ) ; } if ( verbatim . get ( "resultState" ) != null && verbatim . get ( "resultState" ) . equals ( "ambiguous" ) ) { result = verbatim . get ( "result" ) ; } if ( verbatim . get ( "resultState" ) != null && verbatim . get ( "resultState" ) . equals ( "range" ) ) { result = verbatim . get ( "result" ) ; } } } if ( year != null && year . matches ( "[0-9]{4}" ) && isEmpty ( month ) && isEmpty ( day ) && isEmpty ( startDayOfYear ) ) { result = year ; } if ( year != null && year . matches ( "[0-9]{4}" ) && ( month == null || month . trim ( ) . length ( ) == 0 ) && ( day == null || day . trim ( ) . length ( ) == 0 ) && startDayOfYear != null && startDayOfYear . trim ( ) . length ( ) > 0 ) { try { StringBuffer assembly = new StringBuffer ( ) ; if ( ! isEmpty ( endDayOfYear ) && ! startDayOfYear . trim ( ) . equals ( endDayOfYear . trim ( ) ) ) { assembly . append ( year ) . append ( "-" ) . append ( String . format ( "%03d" , Integer . parseInt ( startDayOfYear ) ) ) . append ( "/" ) ; assembly . append ( year ) . append ( "-" ) . append ( String . format ( "%03d" , Integer . parseInt ( endDayOfYear ) ) ) ; } else { assembly . append ( year ) . append ( "-" ) . append ( String . format ( "%03d" , Integer . parseInt ( startDayOfYear ) ) ) ; } Map < String , String > verbatim = extractDateToDayFromVerbatim ( assembly . toString ( ) , DateUtils . YEAR_BEFORE_SUSPECT ) ; logger . debug ( verbatim . get ( "resultState" ) ) ; logger . debug ( verbatim . get ( "result" ) ) ; if ( verbatim . get ( "resultState" ) != null && ( verbatim . get ( "resultState" ) . equals ( "date" ) || verbatim . get ( "resultState" ) . equals ( "range" ) ) ) { result = verbatim . get ( "result" ) ; } } catch ( Exception e ) { logger . debug ( e . getMessage ( ) ) ; } } if ( ( verbatimEventDate != null && verbatimEventDate . matches ( "^[0-9]{4}$" ) ) && ( year == null || year . trim ( ) . length ( ) == 0 ) && ( month == null || month . trim ( ) . length ( ) == 0 ) && ( day == null || day . trim ( ) . length ( ) == 0 ) && startDayOfYear != null && startDayOfYear . trim ( ) . length ( ) > 0 ) { try { StringBuffer assembly = new StringBuffer ( ) ; if ( endDayOfYear != null && endDayOfYear . trim ( ) . length ( ) > 0 && ! startDayOfYear . trim ( ) . equals ( endDayOfYear . trim ( ) ) ) { assembly . append ( verbatimEventDate ) . append ( "-" ) . append ( String . format ( "%03d" , Integer . parseInt ( startDayOfYear ) ) ) . append ( "/" ) ; assembly . append ( verbatimEventDate ) . append ( "-" ) . append ( String . format ( "%03d" , Integer . parseInt ( endDayOfYear ) ) ) ; } else { assembly . append ( verbatimEventDate ) . append ( "-" ) . append ( String . format ( "%03d" , Integer . parseInt ( startDayOfYear ) ) ) ; } Map < String , String > verbatim = extractDateToDayFromVerbatim ( assembly . toString ( ) , DateUtils . YEAR_BEFORE_SUSPECT ) ; logger . debug ( verbatim . get ( "resultState" ) ) ; logger . debug ( verbatim . get ( "result" ) ) ; if ( verbatim . get ( "resultState" ) != null && ( verbatim . get ( "resultState" ) . equals ( "date" ) || verbatim . get ( "resultState" ) . equals ( "range" ) ) ) { result = verbatim . get ( "result" ) ; } } catch ( Exception e ) { logger . debug ( e . getMessage ( ) ) ; } } if ( year != null && year . matches ( "[0-9]{4}" ) && month != null && month . matches ( "[0-9]{1,2}" ) && ( day == null || day . trim ( ) . length ( ) == 0 ) ) { result = String . format ( "%04d" , Integer . parseInt ( year ) ) + "-" + String . format ( "%02d" , Integer . parseInt ( month ) ) ; } if ( year != null && year . matches ( "[0-9]{4}" ) && month != null && month . matches ( "[0-9]{1,2}" ) && day != null && day . matches ( "[0-9]{1,2}" ) ) { result = String . format ( "%04d" , Integer . parseInt ( year ) ) + "-" + String . format ( "%02d" , Integer . parseInt ( month ) ) + "-" + String . format ( "%02d" , Integer . parseInt ( day ) ) ; } return result ;
public class BaseTangramEngine { /** * Parse original data with type { @ link O } into model data with type { @ link L } * @ param parent the parent group to hold parsed object . * @ param data Original data . * @ return Parsed data . * @ since 3.0.0 */ public L parseSingleComponent ( @ Nullable C parent , @ Nullable O data ) { } }
return mDataParser . parseSingleComponent ( data , parent , this ) ;
public class UIComponentBase { /** * < p > Throw < code > IllegalArgumentException < / code > if the specified * component identifier is non - < code > null < / code > and not * syntactically valid . < / p > * @ param id The component identifier to test */ private static void validateId ( String id ) { } }
if ( id == null ) { return ; } int n = id . length ( ) ; if ( n < 1 ) { throw new IllegalArgumentException ( "Empty id attribute is not allowed" ) ; } for ( int i = 0 ; i < n ; i ++ ) { char c = id . charAt ( i ) ; if ( i == 0 ) { if ( ! Character . isLetter ( c ) && ( c != '_' ) ) { throw new IllegalArgumentException ( id ) ; } } else { if ( ! Character . isLetter ( c ) && ! Character . isDigit ( c ) && ( c != '-' ) && ( c != '_' ) ) { throw new IllegalArgumentException ( id ) ; } } }
public class CollectionsInterface { /** * Retrieves info on the given Flickr { @ link Collection } ( of { @ link Photoset } s ) . * This method requires authentication . * @ param collectionId * the id of the collection ( from the getTree call , not from the collection URL ) . * @ return the given Collection * @ throws FlickrException */ public Collection getInfo ( String collectionId ) throws FlickrException { } }
Map < String , Object > parameters = new HashMap < String , Object > ( ) ; parameters . put ( "method" , METHOD_GET_INFO ) ; parameters . put ( "collection_id" , collectionId ) ; Response response = transportAPI . get ( transportAPI . getPath ( ) , parameters , apiKey , sharedSecret ) ; if ( response . isError ( ) ) { throw new FlickrException ( response . getErrorCode ( ) , response . getErrorMessage ( ) ) ; } Collection collection = parseCollection ( response . getPayload ( ) ) ; return collection ;
public class StreamletImpl { /** * Sets the name of the Streamlet . * @ param sName The name given by the user for this streamlet * @ return Returns back the Streamlet with changed name */ @ Override public Streamlet < R > setName ( String sName ) { } }
checkNotBlank ( sName , "Streamlet name cannot be null/blank" ) ; this . name = sName ; return this ;
public class KeyVaultClientBaseImpl { /** * List certificates in a specified key vault . * The GetCertificates operation returns the set of certificates resources in the specified key vault . This operation requires the certificates / list permission . * @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; CertificateItem & gt ; object */ public Observable < ServiceResponse < Page < CertificateItem > > > getCertificatesWithServiceResponseAsync ( final String vaultBaseUrl ) { } }
return getCertificatesSinglePageAsync ( vaultBaseUrl ) . concatMap ( new Func1 < ServiceResponse < Page < CertificateItem > > , Observable < ServiceResponse < Page < CertificateItem > > > > ( ) { @ Override public Observable < ServiceResponse < Page < CertificateItem > > > call ( ServiceResponse < Page < CertificateItem > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( getCertificatesNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ;
public class VersionControlGit { /** * Does not do anything if already on target branch . */ public void checkout ( String branch ) throws Exception { } }
if ( ! branch . equals ( getBranch ( ) ) ) { createBranchIfNeeded ( branch ) ; git . checkout ( ) . setName ( branch ) . setStartPoint ( "origin/" + branch ) . setUpstreamMode ( CreateBranchCommand . SetupUpstreamMode . TRACK ) . call ( ) ; // for some reason jgit needs this when branch is switched git . checkout ( ) . setName ( branch ) . call ( ) ; }
public class MessageStoreImpl { /** * Notification that the configuration of the engine has changed . * @ param engine The messaging engine whose configuration has been reloaded */ @ Override public void engineReloaded ( Object messagingEngine ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "engineReloaded" ) ; // Defect 337421.1 // Our config has been reloaded so we need to essentially // re - initialize so that the next time that we run start ( ) // we pick up any changes . initialize ( ( JsMessagingEngine ) messagingEngine , true ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "engineReloaded" ) ;
public class XMLUnit { /** * Get the SAX parser to use in tests . * < p > Unless an instance has been given via { @ link * # setSAXParserFactory ( SAXParserFactory ) setSAXParserFactory } * explicitly , the returned factory will be namespace aware . < / p > * @ return the SAXParserFactory instance used by the { @ link * Validator Validator } to perform DTD validation */ public static SAXParserFactory getSAXParserFactory ( ) { } }
if ( saxParserFactory == null ) { saxParserFactory = SAXParserFactory . newInstance ( ) ; saxParserFactory . setNamespaceAware ( true ) ; } return saxParserFactory ;
public class SortOrderHandler { /** * Set the grid order to the value in this field . * @ return an error code . */ public int setupGridOrder ( ) { } }
int iErrorCode = DBConstants . NORMAL_RETURN ; boolean bOrder = DBConstants . ASCENDING ; int iKeyOrder = ( int ) ( ( NumberField ) this . getOwner ( ) ) . getValue ( ) ; if ( iKeyOrder == 0 ) return DBConstants . KEY_NOT_FOUND ; if ( iKeyOrder < 0 ) { bOrder = DBConstants . DESCENDING ; iKeyOrder = - iKeyOrder ; } iKeyOrder -- ; // 0 Based if ( iKeyOrder < m_iNextArrayIndex ) { if ( m_recGrid == null ) m_recGrid = ( Record ) m_gridScreen . getMainRecord ( ) ; for ( int i = 0 ; i < m_recGrid . getKeyAreaCount ( ) ; i ++ ) { if ( m_recGrid . getKeyArea ( i ) . getKeyName ( ) . equals ( m_iKeyAreaArray [ iKeyOrder ] ) ) iKeyOrder = i ; // Get key order from internal array } } else { // They chose a column that was was not specified . . . Get the desc and try to sort it by the field if ( m_gridScreen != null ) { int iColumn = iKeyOrder + 1 + m_gridScreen . getNavCount ( ) ; // grid column ScreenComponent sField = m_gridScreen . getSField ( iColumn ) ; if ( sField . getConverter ( ) != null ) if ( sField . getConverter ( ) . getField ( ) != null ) { Record record = ( Record ) m_gridScreen . getMainRecord ( ) ; iKeyOrder = - 1 ; // No obvious sort order for ( int iKeyArea = 0 ; iKeyArea < record . getKeyAreaCount ( ) ; iKeyArea ++ ) { KeyArea keyArea = record . getKeyArea ( iKeyArea ) ; if ( keyArea . getField ( 0 ) == sField . getConverter ( ) . getField ( ) ) { // Is this field the first field of this key ? iKeyOrder = iKeyArea ; // Yes , use this order break ; } } if ( iKeyOrder == - 1 ) if ( m_bCreateSortOrder ) { // Create a key to sort on BaseField field = ( BaseField ) sField . getConverter ( ) . getField ( ) ; KeyArea keyArea = new KeyArea ( record , DBConstants . NOT_UNIQUE , field . getFieldName ( ) + "tempKey" ) ; new KeyField ( keyArea , field , DBConstants . ASCENDING ) ; iKeyOrder = record . getKeyAreaCount ( ) - 1 ; } } } } if ( iKeyOrder < 0 ) return DBConstants . KEY_NOT_FOUND ; KeyArea keyArea = null ; if ( m_recGrid == null ) m_recGrid = ( Record ) m_gridScreen . getMainRecord ( ) ; keyArea = m_recGrid . setKeyArea ( iKeyOrder ) ; if ( keyArea == null ) iErrorCode = DBConstants . KEY_NOT_FOUND ; else { for ( int i = 0 ; i < keyArea . getKeyFields ( ) ; i ++ ) { KeyField keyField = keyArea . getKeyField ( i ) ; keyField . setKeyOrder ( bOrder ) ; } } return iErrorCode ;
public class FileServletWrapper { /** * PK55965 Start */ private ServletEvent getServletEvent ( ) { } }
if ( event == null ) { event = new ServletEvent ( this , getServletContext ( ) , this . getServletAndFileName ( ) , getFileName ( ) ) ; } return event ;
public class StorageAccountsInner { /** * Gets the first page of Azure Storage accounts , if any , linked to the specified Data Lake Analytics account . The response includes a link to the next page , if any . * @ param resourceGroupName The name of the Azure resource group . * @ param accountName The name of the Data Lake Analytics account . * @ param filter The OData filter . Optional . * @ param top The number of items to return . Optional . * @ param skip The number of items to skip over before returning elements . Optional . * @ param select OData Select statement . Limits the properties on each entry to just those requested , e . g . Categories ? $ select = CategoryName , Description . Optional . * @ param orderby OrderBy clause . One or more comma - separated expressions with an optional " asc " ( the default ) or " desc " depending on the order you ' d like the values sorted , e . g . Categories ? $ orderby = CategoryName desc . Optional . * @ param count The Boolean value of true or false to request a count of the matching resources included with the resources in the response , e . g . Categories ? $ count = true . Optional . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; StorageAccountInformationInner & gt ; object */ public Observable < Page < StorageAccountInformationInner > > listByAccountAsync ( final String resourceGroupName , final String accountName , final String filter , final Integer top , final Integer skip , final String select , final String orderby , final Boolean count ) { } }
return listByAccountWithServiceResponseAsync ( resourceGroupName , accountName , filter , top , skip , select , orderby , count ) . map ( new Func1 < ServiceResponse < Page < StorageAccountInformationInner > > , Page < StorageAccountInformationInner > > ( ) { @ Override public Page < StorageAccountInformationInner > call ( ServiceResponse < Page < StorageAccountInformationInner > > response ) { return response . body ( ) ; } } ) ;
public class InOutUtil { /** * Retrieves the serialized form of the specified < CODE > Object < / CODE > * as an array of bytes . * @ param s the Object to serialize * @ return a static byte array representing the passed Object */ public static byte [ ] serialize ( Serializable s ) throws IOException { } }
HsqlByteArrayOutputStream bo = new HsqlByteArrayOutputStream ( ) ; ObjectOutputStream os = new ObjectOutputStream ( bo ) ; os . writeObject ( s ) ; return bo . toByteArray ( ) ;
public class CommerceWarehousePersistenceImpl { /** * Returns the first commerce warehouse in the ordered set where groupId = & # 63 ; and active = & # 63 ; . * @ param groupId the group ID * @ param active the active * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching commerce warehouse * @ throws NoSuchWarehouseException if a matching commerce warehouse could not be found */ @ Override public CommerceWarehouse findByG_A_First ( long groupId , boolean active , OrderByComparator < CommerceWarehouse > orderByComparator ) throws NoSuchWarehouseException { } }
CommerceWarehouse commerceWarehouse = fetchByG_A_First ( groupId , active , orderByComparator ) ; if ( commerceWarehouse != null ) { return commerceWarehouse ; } StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "groupId=" ) ; msg . append ( groupId ) ; msg . append ( ", active=" ) ; msg . append ( active ) ; msg . append ( "}" ) ; throw new NoSuchWarehouseException ( msg . toString ( ) ) ;
public class AWSOrganizationsClient { /** * Sends an invitation to another account to join your organization as a member account . Organizations sends email * on your behalf to the email address that is associated with the other account ' s owner . The invitation is * implemented as a < a > Handshake < / a > whose details are in the response . * < important > * < ul > * < li > * You can invite AWS accounts only from the same seller as the master account . For example , if your organization ' s * master account was created by Amazon Internet Services Pvt . Ltd ( AISPL ) , an AWS seller in India , then you can * only invite other AISPL accounts to your organization . You can ' t combine accounts from AISPL and AWS , or any * other AWS seller . For more information , see < a * href = " http : / / docs . aws . amazon . com / awsaccountbilling / latest / aboutv2 / useconsolidatedbilliing - India . html " * > Consolidated Billing in India < / a > . * < / li > * < li > * If you receive an exception that indicates that you exceeded your account limits for the organization or that the * operation failed because your organization is still initializing , wait one hour and then try again . If the error * persists after an hour , then contact < a href = " https : / / console . aws . amazon . com / support / home # / " > AWS Customer * Support < / a > . * < / li > * < / ul > * < / important > * This operation can be called only from the organization ' s master account . * @ param inviteAccountToOrganizationRequest * @ return Result of the InviteAccountToOrganization operation returned by the service . * @ throws AccessDeniedException * You don ' t have permissions to perform the requested operation . The user or role that is making the * request must have at least one IAM permissions policy attached that grants the required permissions . For * more information , see < a href = " https : / / docs . aws . amazon . com / IAM / latest / UserGuide / access . html " > Access * Management < / a > in the < i > IAM User Guide < / i > . * @ throws AWSOrganizationsNotInUseException * Your account isn ' t a member of an organization . To make this request , you must use the credentials of an * account that belongs to an organization . * @ throws AccountOwnerNotVerifiedException * You can ' t invite an existing account to your organization until you verify that you own the email address * associated with the master account . For more information , see < a href = * " http : / / docs . aws . amazon . com / organizations / latest / userguide / orgs _ manage _ create . html # about - email - verification " * > Email Address Verification < / a > in the < i > AWS Organizations User Guide . < / i > * @ throws ConcurrentModificationException * The target of the operation is currently being modified by a different request . Try again later . * @ throws HandshakeConstraintViolationException * The requested operation would violate the constraint identified in the reason code . < / p > < note > * Some of the reasons in the following list might not be applicable to this specific API or operation : * < / note > * < ul > * < li > * ACCOUNT _ NUMBER _ LIMIT _ EXCEEDED : You attempted to exceed the limit on the number of accounts in an * organization . Note that deleted and closed accounts still count toward your limit . * < important > * If you get this exception immediately after creating the organization , wait one hour and try again . If * after an hour it continues to fail with this error , contact < a * href = " https : / / console . aws . amazon . com / support / home # / " > AWS Support < / a > . * < / important > < / li > * < li > * HANDSHAKE _ RATE _ LIMIT _ EXCEEDED : You attempted to exceed the number of handshakes that you can send in one * day . * < / li > * < li > * ALREADY _ IN _ AN _ ORGANIZATION : The handshake request is invalid because the invited account is already a * member of an organization . * < / li > * < li > * ORGANIZATION _ ALREADY _ HAS _ ALL _ FEATURES : The handshake request is invalid because the organization has * already enabled all features . * < / li > * < li > * INVITE _ DISABLED _ DURING _ ENABLE _ ALL _ FEATURES : You can ' t issue new invitations to join an organization while * it ' s in the process of enabling all features . You can resume inviting accounts after you finalize the * process when all accounts have agreed to the change . * < / li > * < li > * PAYMENT _ INSTRUMENT _ REQUIRED : You can ' t complete the operation with an account that doesn ' t have a payment * instrument , such as a credit card , associated with it . * < / li > * < li > * ORGANIZATION _ FROM _ DIFFERENT _ SELLER _ OF _ RECORD : The request failed because the account is from a different * marketplace than the accounts in the organization . For example , accounts with India addresses must be * associated with the AISPL marketplace . All accounts in an organization must be from the same marketplace . * < / li > * < li > * ORGANIZATION _ MEMBERSHIP _ CHANGE _ RATE _ LIMIT _ EXCEEDED : You attempted to change the membership of an account * too quickly after its previous change . * < / li > * @ throws DuplicateHandshakeException * A handshake with the same action and target already exists . For example , if you invited an account to * join your organization , the invited account might already have a pending invitation from this * organization . If you intend to resend an invitation to an account , ensure that existing handshakes that * might be considered duplicates are canceled or declined . * @ throws InvalidInputException * The requested operation failed because you provided invalid values for one or more of the request * parameters . This exception includes a reason that contains additional information about the violated * limit : < / p > < note > * Some of the reasons in the following list might not be applicable to this specific API or operation : * < / note > * < ul > * < li > * IMMUTABLE _ POLICY : You specified a policy that is managed by AWS and can ' t be modified . * < / li > * < li > * INPUT _ REQUIRED : You must include a value for all required parameters . * < / li > * < li > * INVALID _ ENUM : You specified a value that isn ' t valid for that parameter . * < / li > * < li > * INVALID _ FULL _ NAME _ TARGET : You specified a full name that contains invalid characters . * < / li > * < li > * INVALID _ LIST _ MEMBER : You provided a list to a parameter that contains at least one invalid value . * < / li > * < li > * INVALID _ PARTY _ TYPE _ TARGET : You specified the wrong type of entity ( account , organization , or email ) as a * party . * < / li > * < li > * INVALID _ PAGINATION _ TOKEN : Get the value for the < code > NextToken < / code > parameter from the response to a * previous call of the operation . * < / li > * < li > * INVALID _ PATTERN : You provided a value that doesn ' t match the required pattern . * < / li > * < li > * INVALID _ PATTERN _ TARGET _ ID : You specified a policy target ID that doesn ' t match the required pattern . * < / li > * < li > * INVALID _ ROLE _ NAME : You provided a role name that isn ' t valid . A role name can ' t begin with the reserved * prefix < code > AWSServiceRoleFor < / code > . * < / li > * < li > * INVALID _ SYNTAX _ ORGANIZATION _ ARN : You specified an invalid Amazon Resource Name ( ARN ) for the * organization . * < / li > * < li > * INVALID _ SYNTAX _ POLICY _ ID : You specified an invalid policy ID . * < / li > * < li > * MAX _ FILTER _ LIMIT _ EXCEEDED : You can specify only one filter parameter for the operation . * < / li > * < li > * MAX _ LENGTH _ EXCEEDED : You provided a string parameter that is longer than allowed . * < / li > * < li > * MAX _ VALUE _ EXCEEDED : You provided a numeric parameter that has a larger value than allowed . * < / li > * < li > * MIN _ LENGTH _ EXCEEDED : You provided a string parameter that is shorter than allowed . * < / li > * < li > * MIN _ VALUE _ EXCEEDED : You provided a numeric parameter that has a smaller value than allowed . * < / li > * < li > * MOVING _ ACCOUNT _ BETWEEN _ DIFFERENT _ ROOTS : You can move an account only between entities in the same root . * < / li > * @ throws FinalizingOrganizationException * AWS Organizations couldn ' t perform the operation because your organization hasn ' t finished initializing . * This can take up to an hour . Try again later . If after one hour you continue to receive this error , * contact < a href = " https : / / console . aws . amazon . com / support / home # / " > AWS Support < / a > . * @ throws ServiceException * AWS Organizations can ' t complete your request because of an internal service error . Try again later . * @ throws TooManyRequestsException * You ' ve sent too many requests in too short a period of time . The limit helps protect against * denial - of - service attacks . Try again later . < / p > * For information on limits that affect Organizations , see < a * href = " https : / / docs . aws . amazon . com / organizations / latest / userguide / orgs _ reference _ limits . html " > Limits of * AWS Organizations < / a > in the < i > AWS Organizations User Guide < / i > . * @ sample AWSOrganizations . InviteAccountToOrganization * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / organizations - 2016-11-28 / InviteAccountToOrganization " * target = " _ top " > AWS API Documentation < / a > */ @ Override public InviteAccountToOrganizationResult inviteAccountToOrganization ( InviteAccountToOrganizationRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeInviteAccountToOrganization ( request ) ;
public class JsonObject { /** * Returns the < code > boolean < / code > value of the member with the specified name in this object . If * this object does not contain a member with this name , the given default value is returned . If * this object contains multiple members with the given name , the last one will be picked . If this * member ' s value does not represent a JSON < code > true < / code > or < code > false < / code > value , an * exception is thrown . * @ param name * the name of the member whose value is to be returned * @ param defaultValue * the value to be returned if the requested member is missing * @ return the value of the last member with the specified name , or the given default value if * this object does not contain a member with that name */ public boolean getBoolean ( String name , boolean defaultValue ) { } }
JsonValue value = get ( name ) ; return value != null ? value . asBoolean ( ) : defaultValue ;
public class Z85 { /** * Returns NULL and sets errno = EINVAL for invalid input . */ public static byte [ ] decode ( String string ) { } }
if ( string . length ( ) % 5 != 0 ) { return null ; } ByteBuffer buf = ByteBuffer . allocate ( string . length ( ) * 4 / 5 ) ; int byteNbr = 0 ; int charNbr = 0 ; int stringLen = string . length ( ) ; long value = 0 ; while ( charNbr < stringLen ) { // Accumulate value in base 85 value = value * 85 + ( decoder [ string . charAt ( charNbr ++ ) - 32 ] & 0xff ) ; if ( charNbr % 5 == 0 ) { // Output value in base 256 int divisor = 256 * 256 * 256 ; while ( divisor != 0 ) { buf . put ( byteNbr ++ , ( byte ) ( ( value / divisor ) % 256 ) ) ; divisor /= 256 ; } value = 0 ; } } assert ( byteNbr == string . length ( ) * 4 / 5 ) ; return buf . array ( ) ;
public class LoggingLinkListener { /** * Called when the sent message to remoteAddress is failed to be transferred . */ @ Override public void onException ( final Throwable cause , final SocketAddress remoteAddress , final T message ) { } }
if ( LOG . isLoggable ( Level . FINEST ) ) { LOG . log ( Level . FINEST , "Error sending message " + message + " to " + remoteAddress , cause ) ; }
public class FindSerializers { /** * { @ inheritDoc } */ @ Override public JsonSerializer < ? > findSerializer ( SerializationConfig config , JavaType type , BeanDescription beanDesc ) { } }
if ( jsonContext . isSupportedType ( type . getRawClass ( ) ) ) { return createSerializer ( ) ; } return null ;
public class CmsVfsDriver { /** * Reads the URL name mapping entries which match a given filter . < p > * @ param dbc the database context * @ param online if true , reads from the online mapping , else from the offline mapping * @ param filter the filter which the entries to be read should match * @ return the mapping entries which match the given filter * @ throws CmsDataAccessException if something goes wrong */ public List < CmsUrlNameMappingEntry > readUrlNameMappingEntries ( CmsDbContext dbc , boolean online , CmsUrlNameMappingFilter filter ) throws CmsDataAccessException { } }
Connection conn = null ; ResultSet resultSet = null ; PreparedStatement stmt = null ; List < CmsUrlNameMappingEntry > result = new ArrayList < CmsUrlNameMappingEntry > ( ) ; try { conn = m_sqlManager . getConnection ( dbc ) ; String query = m_sqlManager . readQuery ( "C_READ_URLNAME_MAPPINGS" ) ; query = replaceProject ( query , online ) ; stmt = getPreparedStatementForFilter ( conn , query , filter ) ; resultSet = stmt . executeQuery ( ) ; while ( resultSet . next ( ) ) { CmsUrlNameMappingEntry entry = internalCreateUrlNameMappingEntry ( resultSet ) ; result . add ( entry ) ; } return result ; } catch ( SQLException e ) { throw wrapException ( stmt , e ) ; } finally { m_sqlManager . closeAll ( dbc , conn , stmt , resultSet ) ; }
public class JarjarTask { /** * Returns the directory where the archive is generated into . * @ return the directory */ public File getDestinationDir ( ) { } }
File out = destinationDir ; if ( out == null ) out = new File ( getProject ( ) . getBuildDir ( ) , "jarjar" ) ; return out ;
public class JsonWriter { /** * Write attributes for an individual custom field . * Note that at present we are only writing a subset of the * available data . . . in this instance the field alias . * If the field does not have an alias we won ' t write an * entry . * @ param field custom field to write * @ throws IOException */ private void writeCustomField ( CustomField field ) throws IOException { } }
if ( field . getAlias ( ) != null ) { m_writer . writeStartObject ( null ) ; m_writer . writeNameValuePair ( "field_type_class" , field . getFieldType ( ) . getFieldTypeClass ( ) . name ( ) . toLowerCase ( ) ) ; m_writer . writeNameValuePair ( "field_type" , field . getFieldType ( ) . name ( ) . toLowerCase ( ) ) ; m_writer . writeNameValuePair ( "field_alias" , field . getAlias ( ) ) ; m_writer . writeEndObject ( ) ; }
public class QueueEntryRow { /** * For a queue entry consumer state , serialized to byte array , return whether it is processed and committed . */ public static boolean isCommittedProcessed ( byte [ ] stateBytes , Transaction tx ) { } }
long writePointer = Bytes . toLong ( stateBytes , 0 , Longs . BYTES ) ; if ( ! tx . isVisible ( writePointer ) ) { return false ; } byte state = stateBytes [ Longs . BYTES + Ints . BYTES ] ; return state == ConsumerEntryState . PROCESSED . getState ( ) ;
public class NettyUtils { /** * Writes multiple strings to a channelBuffer with the length of the string * preceding its content . So if there are two string < code > Hello < / code > and * < code > World < / code > then the channel buffer returned would contain < Length * of Hello > < Hello as appropriate charset binary > < Length of world > < World as * UTF - 8 binary > * @ param charset * The Charset say ' UTF - 8 ' in which the encoding needs to be * done . * @ param msgs * The messages to be written . * @ return { @ link ChannelBuffer } with format * length - stringbinary - length - stringbinary */ public static ChannelBuffer writeStrings ( Charset charset , String ... msgs ) { } }
ChannelBuffer buffer = null ; for ( String msg : msgs ) { if ( null == buffer ) { buffer = writeString ( msg , charset ) ; } else { ChannelBuffer theBuffer = writeString ( msg , charset ) ; if ( null != theBuffer ) { buffer = ChannelBuffers . wrappedBuffer ( buffer , theBuffer ) ; } } } return buffer ;
public class ConnectorModuleRuntimeContainer { /** * Unregister listeners for configurations processed by the metatype provider * @ param id resource adapter id */ private void removeServiceListeners ( String id ) { } }
final ServiceListener [ ] listeners = serviceListeners . remove ( id ) ; if ( listeners != null ) for ( ServiceListener listener : listeners ) if ( listener != null ) { lock . readLock ( ) . lock ( ) ; try { if ( bundleContext != null ) bundleContext . removeServiceListener ( listener ) ; } finally { lock . readLock ( ) . unlock ( ) ; } }
public class SearchIndex { /** * Invokes all recovery filters from the set * @ return true if any filter requires reindexing */ @ SuppressWarnings ( "unchecked" ) private boolean isIndexRecoveryRequired ( ) throws RepositoryException { } }
// instantiate filters first , if not initialized if ( recoveryFilters == null ) { recoveryFilters = new ArrayList < AbstractRecoveryFilter > ( ) ; log . info ( "Initializing RecoveryFilters." ) ; // add default filter , if none configured . if ( recoveryFilterClasses . isEmpty ( ) ) { this . recoveryFilterClasses . add ( DocNumberRecoveryFilter . class . getName ( ) ) ; } for ( String recoveryFilterClassName : recoveryFilterClasses ) { AbstractRecoveryFilter filter = null ; Class < ? extends AbstractRecoveryFilter > filterClass ; try { filterClass = ( Class < ? extends AbstractRecoveryFilter > ) ClassLoading . forName ( recoveryFilterClassName , this ) ; Constructor < ? extends AbstractRecoveryFilter > constuctor = filterClass . getConstructor ( SearchIndex . class ) ; filter = constuctor . newInstance ( this ) ; recoveryFilters . add ( filter ) ; } catch ( ClassNotFoundException e ) { throw new RepositoryException ( e . getMessage ( ) , e ) ; } catch ( IllegalArgumentException e ) { throw new RepositoryException ( e . getMessage ( ) , e ) ; } catch ( InstantiationException e ) { throw new RepositoryException ( e . getMessage ( ) , e ) ; } catch ( IllegalAccessException e ) { throw new RepositoryException ( e . getMessage ( ) , e ) ; } catch ( InvocationTargetException e ) { throw new RepositoryException ( e . getMessage ( ) , e ) ; } catch ( SecurityException e ) { throw new RepositoryException ( e . getMessage ( ) , e ) ; } catch ( NoSuchMethodException e ) { throw new RepositoryException ( e . getMessage ( ) , e ) ; } } } // invoke filters for ( AbstractRecoveryFilter filter : recoveryFilters ) { if ( filter . accept ( ) ) { return true ; } } return false ;
public class CpuEventViewer { /** * Operations */ public void drawOpCompleted ( GenericTabItem tab , TraceCPU cpu , TraceThread thread , TraceObject destinationObj , TraceOperation operation ) { } }
TraceObject currentObj = thread . getCurrentObject ( ) ; updateObject ( tab , currentObj ) ; if ( currentObj . getId ( ) == destinationObj . getId ( ) ) { // Internal object operation updateObject ( tab , currentObj ) ; Long x1 = currentObj . getX ( ) ; Long x2 = x1 ; Long y1 = tab . getYMax ( ) ; Long y2 = y1 + ELEMENT_SIZE ; NormalLabel lbl = new NormalLabel ( "C" , tab . getCurrentFont ( ) ) ; ; String operationLabel = " Completed " + operation . getName ( ) + " on object " + currentObj . getId ( ) ; NormalLabel ttl = new NormalLabel ( operationLabel , tab . getCurrentFont ( ) ) ; Point pt = new Point ( x1 . intValue ( ) + 8 , y1 . intValue ( ) + 2 ) ; drawMarker ( tab , x1 , y1 , x2 , y2 , ColorConstants . blue ) ; lbl . setToolTip ( ttl ) ; lbl . setLocation ( pt ) ; tab . addFigure ( lbl ) ; } else { updateObject ( tab , destinationObj ) ; drawObjectArrow ( tab , destinationObj , currentObj , new String ( "" ) ) ; }
public class SnowballProgram { /** * Set the current string . */ public void setCurrent ( String value ) { } }
current . replace ( 0 , current . length ( ) , value ) ; cursor = 0 ; limit = current . length ( ) ; limit_backward = 0 ; bra = cursor ; ket = limit ;
public class AmazonPollyClient { /** * Retrieves a specific SpeechSynthesisTask object based on its TaskID . This object contains information about the * given speech synthesis task , including the status of the task , and a link to the S3 bucket containing the output * of the task . * @ param getSpeechSynthesisTaskRequest * @ return Result of the GetSpeechSynthesisTask operation returned by the service . * @ throws InvalidTaskIdException * The provided Task ID is not valid . Please provide a valid Task ID and try again . * @ throws ServiceFailureException * An unknown condition has caused a service failure . * @ throws SynthesisTaskNotFoundException * The Speech Synthesis task with requested Task ID cannot be found . * @ sample AmazonPolly . GetSpeechSynthesisTask * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / polly - 2016-06-10 / GetSpeechSynthesisTask " target = " _ top " > AWS * API Documentation < / a > */ @ Override public GetSpeechSynthesisTaskResult getSpeechSynthesisTask ( GetSpeechSynthesisTaskRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetSpeechSynthesisTask ( request ) ;
public class EventSubscriptionsInner { /** * List all regional event subscriptions under an Azure subscription and resource group . * List all event subscriptions from the given location under a specific Azure subscription and resource group . * @ param resourceGroupName The name of the resource group within the user ' s subscription . * @ param location Name of the location * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the List & lt ; EventSubscriptionInner & gt ; object */ public Observable < List < EventSubscriptionInner > > listRegionalByResourceGroupAsync ( String resourceGroupName , String location ) { } }
return listRegionalByResourceGroupWithServiceResponseAsync ( resourceGroupName , location ) . map ( new Func1 < ServiceResponse < List < EventSubscriptionInner > > , List < EventSubscriptionInner > > ( ) { @ Override public List < EventSubscriptionInner > call ( ServiceResponse < List < EventSubscriptionInner > > response ) { return response . body ( ) ; } } ) ;
public class JobService { /** * Checks all run locks and releases the lock , if the job is stopped . * TODO : This method should never do something , otherwise the is a bug in the lock handling . * TODO : Check Log files + Remove */ private void clearRunLocks ( ) { } }
jobMetaService . runningJobs ( ) . forEach ( ( RunningJob runningJob ) -> { final Optional < JobInfo > jobInfoOptional = jobRepository . findOne ( runningJob . jobId ) ; if ( jobInfoOptional . isPresent ( ) && jobInfoOptional . get ( ) . isStopped ( ) ) { jobMetaService . releaseRunLock ( runningJob . jobType ) ; LOG . error ( "Clear Lock of Job {}. Job stopped already." , runningJob . jobType ) ; } else if ( ! jobInfoOptional . isPresent ( ) ) { jobMetaService . releaseRunLock ( runningJob . jobType ) ; LOG . error ( "Clear Lock of Job {}. JobID does not exist" , runningJob . jobType ) ; } } ) ;
public class ControlMessageImpl { /** * Set the value of the Reliability field in the message header . * Javadoc description supplied by ControlMessage interface . */ public final void setReliability ( Reliability value ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "setReliability to " + value ) ; /* Get the int value of the Reliability instance and set it in the field */ jmo . setField ( ControlAccess . RELIABILITY , value . toByte ( ) ) ;
public class DefaultUsageFormatter { /** * Wrap a potentially long line to { @ link # commander # getColumnSize ( ) } . * @ param out the output * @ param indent the indentation in spaces for lines after the first line . * @ param description the text to wrap . No extra spaces are inserted before { @ code * description } . If the first line needs to be indented prepend the * correct number of spaces to { @ code description } . * @ see # wrapDescription ( StringBuilder , int , int , String ) */ public void wrapDescription ( StringBuilder out , int indent , String description ) { } }
wrapDescription ( out , indent , 0 , description ) ;
public class PixelMath { /** * Bounds image pixels to be between these two values * @ param img Image * @ param min minimum value . * @ param max maximum value . */ public static void boundImage ( GrayF64 img , double min , double max ) { } }
ImplPixelMath . boundImage ( img , min , max ) ;
public class MessageProcessor { /** * Returns the destinationLocationManager . * @ return DestinationLocationManager */ public DestinationLocationManager getDestinationLocationManager ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "getDestinationLocationManager" ) ; SibTr . exit ( tc , "getDestinationLocationManager" , _destinationLocationManager ) ; } return _destinationLocationManager ;
public class DataUnformatFilter { /** * Filter a start element event . * @ param uri * The element ' s Namespace URI . * @ param localName * The element ' s local name . * @ param qName * The element ' s qualified ( prefixed ) name . * @ param atts * The element ' s attribute list . * @ exception org . xml . sax . SAXException * If a filter further down the chain raises an exception . * @ see org . xml . sax . ContentHandler # startElement */ public void startElement ( String uri , String localName , String qName , Attributes atts ) throws SAXException { } }
clearWhitespace ( ) ; stateStack . push ( SEEN_ELEMENT ) ; state = SEEN_NOTHING ; super . startElement ( uri , localName , qName , atts ) ;
public class LogResource { /** * - - - - - private methods - - - - - */ private void collectFilesAndStore ( final Context context , final Path dir , final int level ) throws FrameworkException { } }
if ( level == 1 ) { logger . info ( "Path {}" , dir ) ; } try ( final DirectoryStream < Path > stream = Files . newDirectoryStream ( dir ) ) { for ( final Path p : stream ) { if ( Files . isDirectory ( p ) ) { collectFilesAndStore ( context , p , level + 1 ) ; } else { context . update ( storeLogEntry ( p ) ) ; // update object count and commit context . commit ( true ) ; } Files . delete ( p ) ; } } catch ( IOException ioex ) { logger . warn ( "" , ioex ) ; }
public class ConsulRegistry { /** * update service cache of the serviceName . * update local cache when service list changed , * if need notify , notify service * @ param serviceName * @ param serviceUrls * @ param needNotify */ private void updateServiceCache ( String serviceName , ConcurrentHashMap < String , List < URL > > serviceUrls , boolean needNotify ) { } }
if ( serviceUrls != null && ! serviceUrls . isEmpty ( ) ) { List < URL > urls = serviceCache . get ( serviceName ) ; if ( urls == null ) { if ( logger . isDebugEnabled ( ) ) { try { logger . debug ( "serviceUrls = " + Config . getInstance ( ) . getMapper ( ) . writeValueAsString ( serviceUrls ) ) ; } catch ( Exception e ) { } } serviceCache . put ( serviceName , serviceUrls . get ( serviceName ) ) ; } for ( Map . Entry < String , List < URL > > entry : serviceUrls . entrySet ( ) ) { boolean change = true ; if ( urls != null ) { List < URL > newUrls = entry . getValue ( ) ; if ( newUrls == null || newUrls . isEmpty ( ) || ConsulUtils . isSame ( newUrls , urls ) ) { change = false ; } else { serviceCache . put ( serviceName , newUrls ) ; } } if ( change && needNotify ) { notifyExecutor . execute ( new NotifyService ( entry . getKey ( ) , entry . getValue ( ) ) ) ; logger . info ( "light service notify-service: " + entry . getKey ( ) ) ; StringBuilder sb = new StringBuilder ( ) ; for ( URL url : entry . getValue ( ) ) { sb . append ( url . getUri ( ) ) . append ( ";" ) ; } logger . info ( "consul notify urls:" + sb . toString ( ) ) ; } } }
public class AWSIotClient { /** * Updates the event configurations . * @ param updateEventConfigurationsRequest * @ return Result of the UpdateEventConfigurations operation returned by the service . * @ throws InvalidRequestException * The request is not valid . * @ throws InternalFailureException * An unexpected error has occurred . * @ throws ThrottlingException * The rate exceeds the limit . * @ sample AWSIot . UpdateEventConfigurations */ @ Override public UpdateEventConfigurationsResult updateEventConfigurations ( UpdateEventConfigurationsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeUpdateEventConfigurations ( request ) ;
public class CDXFlexFormat { /** * Single place to do the flex cdx - line parsing logic */ public static CaptureSearchResult parseCDXLineFlex ( String line ) { } }
CaptureSearchResult result = new CaptureSearchResult ( ) ; return parseCDXLineFlex ( line , result ) ;
public class RetryingUnaryOperation { /** * { @ inheritDoc } */ @ Override protected boolean onOK ( Metadata trailers ) { } }
if ( value == null ) { // No value received so mark the future as an error completionFuture . setException ( NO_VALUE_SET_EXCEPTION ) ; } return true ;
public class AlbumUtils { /** * Take picture . * @ param activity activity . * @ param requestCode code , see { @ link Activity # onActivityResult ( int , int , Intent ) } . * @ param outPath file path . */ public static void takeImage ( @ NonNull Activity activity , int requestCode , File outPath ) { } }
Intent intent = new Intent ( MediaStore . ACTION_IMAGE_CAPTURE ) ; Uri uri = getUri ( activity , outPath ) ; intent . putExtra ( MediaStore . EXTRA_OUTPUT , uri ) ; intent . addFlags ( Intent . FLAG_GRANT_READ_URI_PERMISSION ) ; intent . addFlags ( Intent . FLAG_GRANT_WRITE_URI_PERMISSION ) ; activity . startActivityForResult ( intent , requestCode ) ;
public class Stripe { /** * Blocking method to create a { @ link Token } using a { @ link BankAccount } . Do not call this on * the UI thread or your app will crash . * @ param bankAccount the { @ link BankAccount } to use for this token * @ param publishableKey the publishable key to use with this request * @ return a { @ link Token } that can be used for this { @ link BankAccount } * @ throws AuthenticationException failure to properly authenticate yourself ( check your key ) * @ throws InvalidRequestException your request has invalid parameters * @ throws APIConnectionException failure to connect to Stripe ' s API * @ throws CardException should not be thrown with this type of token , but is theoretically * possible given the underlying methods called * @ throws APIException any other type of problem */ public Token createBankAccountTokenSynchronous ( final BankAccount bankAccount , String publishableKey ) throws AuthenticationException , InvalidRequestException , APIConnectionException , CardException , APIException { } }
validateKey ( publishableKey ) ; RequestOptions requestOptions = RequestOptions . builder ( publishableKey , mStripeAccount , RequestOptions . TYPE_QUERY ) . build ( ) ; return mApiHandler . createToken ( mStripeNetworkUtils . hashMapFromBankAccount ( bankAccount ) , requestOptions , Token . TYPE_BANK_ACCOUNT ) ;
public class LaunchNowOption { /** * Register parameter for job . < br > * Job can get from runtime as parameter map . * @ param key The key of parameter . ( NotNull ) * @ param value The value as parameter . ( NullAllowed : parameter value can be null ) * @ return this . ( NotNull ) */ public LaunchNowOption param ( String key , Object value ) { } }
if ( key == null ) { throw new IllegalArgumentException ( "The argument 'key' should not be null." ) ; } if ( parameterMap == null ) { parameterMap = new LinkedHashMap < String , Object > ( ) ; } parameterMap . put ( key , value ) ; return this ;
public class Instance { /** * The list of EBS volumes that are attached to this instance . * @ param ebsVolumes * The list of EBS volumes that are attached to this instance . */ public void setEbsVolumes ( java . util . Collection < EbsVolume > ebsVolumes ) { } }
if ( ebsVolumes == null ) { this . ebsVolumes = null ; return ; } this . ebsVolumes = new com . amazonaws . internal . SdkInternalList < EbsVolume > ( ebsVolumes ) ;
public class WorkerInfo { /** * < code > map & lt ; string , int64 & gt ; usedBytesOnTiers = 9 ; < / code > */ public java . util . Map < java . lang . String , java . lang . Long > getUsedBytesOnTiersMap ( ) { } }
return internalGetUsedBytesOnTiers ( ) . getMap ( ) ;
public class EasyRecyclerAdapter { /** * Add a single item and refresh the { @ code RecyclerView } by calling * { @ code notifyItemInserted ( ) } . * @ param item item to add */ public void addItem ( T item ) { } }
mListItems . add ( item ) ; notifyItemInserted ( mListItems . indexOf ( item ) ) ;
public class Cursor { /** * Reset this cursor for the provided tree , to iterate between the provided start and end * @ param btree the tree to iterate over * @ param comparator the comparator that defines the ordering over the items in the tree * @ param lowerBound the first item to include , inclusive * @ param upperBound the last item to include , exclusive * @ param forwards if false , the cursor will start at the end and move backwards */ public void reset ( Object [ ] btree , Comparator < K > comparator , K lowerBound , K upperBound , boolean forwards ) { } }
_reset ( btree , comparator , lowerBound , true , upperBound , false , forwards ) ;
public class MemdbImpl { @ Override public boolean remove ( String key ) { } }
String value = this . data . remove ( key ) ; return ( value != null ) ;
public class Persistence { /** * Looks up a ParametricStatement by its name . */ public ParametricStatement getParametricStatement ( String name ) { } }
ParametricStatement statement = _statements . get ( name ) ; if ( statement != null ) { return statement ; } else { throw new RuntimeException ( "ParametricStatement '" + name + "' not found" ) ; }
public class DiscreteFourierTransformOps { /** * Checks to see if the image and its transform are appropriate sizes . The transform should have * twice the width and twice the height as the image . * @ param image Storage for an image * @ param transform Storage for a Fourier Transform */ public static void checkImageArguments ( ImageBase image , ImageInterleaved transform ) { } }
InputSanityCheck . checkSameShape ( image , transform ) ; if ( 2 != transform . getNumBands ( ) ) throw new IllegalArgumentException ( "The transform must have two bands" ) ;
public class JPAExEmInvocation { /** * ( non - Javadoc ) * @ see javax . persistence . EntityManager # lock ( java . lang . Object , javax . persistence . LockModeType ) */ @ Override public void lock ( Object entity , LockModeType lockMode ) { } }
ivEm . lock ( entity , lockMode ) ;
public class TypeInformationKeyValueSerializationSchema { @ Override public Tuple2 < K , V > deserialize ( ConsumerRecord < byte [ ] , byte [ ] > record ) throws Exception { } }
K key = null ; V value = null ; if ( record . key ( ) != null ) { inputDeserializer . setBuffer ( record . key ( ) ) ; key = keySerializer . deserialize ( inputDeserializer ) ; } if ( record . value ( ) != null ) { inputDeserializer . setBuffer ( record . value ( ) ) ; value = valueSerializer . deserialize ( inputDeserializer ) ; } return new Tuple2 < > ( key , value ) ;
public class TypeCasting { /** * Creates a casting to the given , non - primitive type . * @ param typeDefinition The type to which a value should be casted . * @ return A stack manipulation that represents the casting . */ public static StackManipulation to ( TypeDefinition typeDefinition ) { } }
if ( typeDefinition . isPrimitive ( ) ) { throw new IllegalArgumentException ( "Cannot cast to primitive type: " + typeDefinition ) ; } return new TypeCasting ( typeDefinition . asErasure ( ) ) ;
public class Grefenstette { /** * Checks to see if the tag can modify another word * @ param tag A tag from the parsed corpus to be checked */ private boolean inStartSet ( String tag ) { } }
return // noun tag . startsWith ( "NN" ) || // adjective tag . startsWith ( "JJ" ) || // adverb tag . startsWith ( "RB" ) || // cardinal number tag . startsWith ( "CD" ) ;
public class InternalSimpleAntlrParser { /** * InternalSimpleAntlr . g : 1309:1 : entryRulePredicated returns [ EObject current = null ] : iv _ rulePredicated = rulePredicated EOF ; */ public final EObject entryRulePredicated ( ) throws RecognitionException { } }
EObject current = null ; EObject iv_rulePredicated = null ; try { // InternalSimpleAntlr . g : 1310:2 : ( iv _ rulePredicated = rulePredicated EOF ) // InternalSimpleAntlr . g : 1311:2 : iv _ rulePredicated = rulePredicated EOF { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getPredicatedRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; iv_rulePredicated = rulePredicated ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = iv_rulePredicated ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return current ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class ThrottledApiHandler { /** * Retrieve runes pages for multiple users * @ param ids The ids of the users * @ return A map , mapping user ids to their respective runes pages * @ see < a href = https : / / developer . riotgames . com / api / methods # ! / 620/1932 > Official API documentation < / a > */ public Future < Map < Integer , Set < RunePage > > > getRunePagesMultipleUsers ( int ... ids ) { } }
return new ApiFuture < > ( ( ) -> handler . getRunePagesMultipleUsers ( ids ) ) ;
public class ToArray { /** * Reads this Constructor . * @ param in source to read from */ private void readObject ( ObjectInputStream in ) throws IOException , ClassNotFoundException , NoSuchMethodException { } }
componentType = ClassRef . read ( in ) ;
public class Config { /** * Retrieve a < code > long < / code > . If the key does not exist or * cannot be converted to a < code > long < / code > , the provided default * argument will be returned . */ public static long getLong ( Properties props , String key , long def ) { } }
String s = props . getProperty ( key ) ; if ( s != null ) { try { def = Long . parseLong ( s ) ; } catch ( NumberFormatException nfe ) { log . error ( "Not a number" , nfe ) ; } } return def ;
public class AbstractWALDAO { /** * Modify the created document by e . g . adding some comment or digital * signature or whatsoever . * @ param aDoc * The created non - < code > null < / code > document . */ @ OverrideOnDemand @ MustBeLocked ( ELockType . WRITE ) protected void modifyWriteData ( @ Nonnull final IMicroDocument aDoc ) { } }
final IMicroComment aComment = new MicroComment ( "This file was generated automatically - do NOT modify!\n" + "Written at " + PDTToString . getAsString ( ZonedDateTime . now ( Clock . systemUTC ( ) ) , Locale . US ) ) ; final IMicroElement eRoot = aDoc . getDocumentElement ( ) ; // Add a small comment if ( eRoot != null ) aDoc . insertBefore ( aComment , eRoot ) ; else aDoc . appendChild ( aComment ) ;
public class VertexLookupContext { /** * Adds an instance to be loaded . */ public void addInstance ( IReferenceableInstance instance ) throws AtlasException { } }
ClassType classType = typeSystem . getDataType ( ClassType . class , instance . getTypeName ( ) ) ; ITypedReferenceableInstance newInstance = classType . convert ( instance , Multiplicity . REQUIRED ) ; findReferencedInstancesToPreLoad ( newInstance ) ; Id id = instance . getId ( ) ; if ( mapper . lookupVertex ( id ) == null ) { if ( id . isAssigned ( ) ) { guidsToLookup . add ( id ) ; } else { addToClassMap ( classType , instance ) ; } }
public class ParseException { /** * Creates a new exception based on the list of errors . * @ param errors the errors which occurred while processing the user input * @ return a new ParseException which can be thrown */ public static ParseException create ( List < ParseError > errors ) { } }
if ( errors . size ( ) == 1 ) { return new ParseException ( errors . get ( 0 ) . getMessage ( ) , errors ) ; } else if ( errors . size ( ) > 1 ) { return new ParseException ( String . format ( "%d errors occured. First: %s" , errors . size ( ) , errors . get ( 0 ) . getMessage ( ) ) , errors ) ; } else { return new ParseException ( "An unknown error occured" , errors ) ; }
public class BatchWriteItemRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( BatchWriteItemRequest batchWriteItemRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( batchWriteItemRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( batchWriteItemRequest . getRequestItems ( ) , REQUESTITEMS_BINDING ) ; protocolMarshaller . marshall ( batchWriteItemRequest . getReturnConsumedCapacity ( ) , RETURNCONSUMEDCAPACITY_BINDING ) ; protocolMarshaller . marshall ( batchWriteItemRequest . getReturnItemCollectionMetrics ( ) , RETURNITEMCOLLECTIONMETRICS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class LoadBalancerPoolService { /** * Update load balancer pools for group * @ param loadBalancer load balancer pool * @ param configs load balancer pool configs * @ return OperationFuture wrapper for load balancer pool */ public OperationFuture < LoadBalancer > update ( LoadBalancer loadBalancer , List < LoadBalancerPoolConfig > configs ) { } }
List < LoadBalancerPoolMetadata > poolsForGroup = find ( new LoadBalancerPoolFilter ( ) . loadBalancers ( loadBalancer ) ) ; return new OperationFuture < > ( loadBalancer , new ParallelJobsFuture ( configs . stream ( ) . map ( poolConfig -> createOrUpdate ( poolConfig , loadBalancer , poolsForGroup ) ) . map ( OperationFuture :: jobFuture ) . collect ( toList ( ) ) ) ) ;
public class Log4jConfigHelper { /** * Update the log4j configuration . * @ param targetClass the target class used to get the original log4j configuration file as a resource * @ param log4jFileName the custom log4j configuration properties file name * @ throws IOException if there ' s something wrong with updating the log4j configuration */ public static void updateLog4jConfiguration ( Class < ? > targetClass , String log4jFileName ) throws IOException { } }
final Closer closer = Closer . create ( ) ; try { final InputStream inputStream = closer . register ( targetClass . getResourceAsStream ( "/" + log4jFileName ) ) ; final Properties originalProperties = new Properties ( ) ; originalProperties . load ( inputStream ) ; LogManager . resetConfiguration ( ) ; PropertyConfigurator . configure ( originalProperties ) ; } catch ( Throwable t ) { throw closer . rethrow ( t ) ; } finally { closer . close ( ) ; }
public class RangeUtils { /** * Return Range & lt ; Date & gt ; by given date and period . * @ param date the date belong to the range . * @ param period the range granulation . * @ return a Range & lt ; Date & gt ; by given date and period . */ public static Range < Date > getDatePeriod ( final Date date , final Period period ) { } }
Calendar calendar = buildCalendar ( date ) ; Range < Date > dateRange = null ; Date startDate = calendar . getTime ( ) ; Date endDate = calendar . getTime ( ) ; if ( period != Period . DAY ) { for ( ; period . getValue ( date ) == period . getValue ( calendar . getTime ( ) ) ; calendar . add ( DAY_OF_MONTH , 1 ) ) { endDate = calendar . getTime ( ) ; } calendar . setTime ( date ) ; for ( ; period . getValue ( date ) == period . getValue ( calendar . getTime ( ) ) ; calendar . add ( DAY_OF_MONTH , - 1 ) ) { startDate = calendar . getTime ( ) ; } } calendarCache . add ( calendar ) ; dateRange = Range . getInstance ( startDate , endDate ) ; return dateRange ;
public class StripeApiHandler { /** * Retrieve a { @ link PaymentIntent } using the provided { @ link PaymentIntentParams } * @ param paymentIntentParams contains the retrieval params * @ param publishableKey an API key * @ param stripeAccount a connected Stripe Account ID */ @ Nullable PaymentIntent retrievePaymentIntent ( @ NonNull PaymentIntentParams paymentIntentParams , @ NonNull String publishableKey , @ Nullable String stripeAccount ) throws AuthenticationException , InvalidRequestException , APIConnectionException , APIException { } }
final Map < String , Object > paramMap = paymentIntentParams . toParamMap ( ) ; final RequestOptions options = RequestOptions . builder ( publishableKey , stripeAccount , RequestOptions . TYPE_QUERY ) . build ( ) ; try { final String apiKey = options . getPublishableApiKey ( ) ; if ( StripeTextUtils . isBlank ( apiKey ) ) { return null ; } logTelemetryData ( ) ; final Map < String , Object > loggingParams = mLoggingUtils . getPaymentIntentRetrieveParams ( null , apiKey ) ; final RequestOptions loggingOptions = RequestOptions . builder ( publishableKey ) . build ( ) ; logApiCall ( loggingParams , loggingOptions ) ; final String paymentIntentId = PaymentIntent . parseIdFromClientSecret ( paymentIntentParams . getClientSecret ( ) ) ; final StripeResponse response = requestData ( RequestExecutor . RestMethod . GET , getRetrievePaymentIntentUrl ( paymentIntentId ) , paramMap , options ) ; return PaymentIntent . fromString ( response . getResponseBody ( ) ) ; } catch ( CardException unexpected ) { // This particular kind of exception should not be possible from a PaymentI API endpoint throw new APIException ( unexpected . getMessage ( ) , unexpected . getRequestId ( ) , unexpected . getStatusCode ( ) , null , unexpected ) ; }
public class ByteUtils { /** * Modifies the length of a byte array by padding zero bytes to the left . * @ param bytes * a byte array * @ param width * of the new byte array * @ return new byte array */ public static byte [ ] rjust ( byte [ ] bytes , int width ) { } }
if ( bytes . length >= width ) { return Arrays . copyOfRange ( bytes , bytes . length - width , bytes . length ) ; } else { byte [ ] rjustied = new byte [ width ] ; System . arraycopy ( bytes , 0 , rjustied , width - bytes . length , bytes . length ) ; return rjustied ; }
public class CommerceAccountLocalServiceBaseImpl { /** * Returns the number of rows matching the dynamic query . * @ param dynamicQuery the dynamic query * @ param projection the projection to apply to the query * @ return the number of rows matching the dynamic query */ @ Override public long dynamicQueryCount ( DynamicQuery dynamicQuery , Projection projection ) { } }
return commerceAccountPersistence . countWithDynamicQuery ( dynamicQuery , projection ) ;
public class SynchroData { /** * Read the version number . * @ param is input stream */ private void readVersion ( InputStream is ) throws IOException { } }
BytesReadInputStream bytesReadStream = new BytesReadInputStream ( is ) ; String version = DatatypeConverter . getString ( bytesReadStream ) ; m_offset += bytesReadStream . getBytesRead ( ) ; SynchroLogger . log ( "VERSION" , version ) ; String [ ] versionArray = version . split ( "\\." ) ; m_majorVersion = Integer . parseInt ( versionArray [ 0 ] ) ;
public class Neighbour { /** * Removes all Subscriptions that are no longer registered * Loops through all the ME Subscriptions for this Neighbour * and finds the ones that still contain the reset mark and removes * them . * @ param topicSpaces The list of topicSpaces to add the deletes to * @ param topics The list of topics to add the deletes to * @ param okToForward Whether to add the topics to the lists or not . */ void sweepMarkedProxies ( List topicSpaces , List topics , Transaction transaction , boolean okToForward ) throws SIResourceException , SIException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "sweepMarkedProxies" , new Object [ ] { topicSpaces , topics , transaction , new Boolean ( okToForward ) } ) ; // Get the list of proxies for this Neighbour final Enumeration enu = iProxies . elements ( ) ; // Cycle through each of the proxies while ( enu . hasMoreElements ( ) ) { final MESubscription sub = ( MESubscription ) enu . nextElement ( ) ; // If the subscription is marked , then remove it if ( sub . isMarked ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Subscription " + sub + " being removed" ) ; // Remove the proxy from the list . proxyDeregistered ( sub . getTopicSpaceUuid ( ) , sub . getTopic ( ) , transaction ) ; // Remove this Subscription from the // match space and the item stream on which they are // stored . final boolean proxyDeleted = iNeighbours . deleteProxy ( iDestinationManager . getDestinationInternal ( sub . getTopicSpaceUuid ( ) , false ) , sub , this , sub . getTopicSpaceUuid ( ) , sub . getTopic ( ) , true , false ) ; // Generate the key to remove the subscription from . final String key = BusGroup . subscriptionKey ( sub . getTopicSpaceUuid ( ) , sub . getTopic ( ) ) ; // Remove the proxy from the list iProxies . remove ( key ) ; // Add the details to the list of topics / topicSpaces to be deleted if ( okToForward && proxyDeleted ) { topics . add ( sub . getTopic ( ) ) ; topicSpaces . add ( sub . getTopicSpaceUuid ( ) ) ; } } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "sweepMarkedProxies" ) ;
public class MemcachedClient { /** * Get the addresses of unavailable servers . * This is based on a snapshot in time so shouldn ' t be considered completely * accurate , but is a useful for getting a feel for what ' s working and what ' s * not working . * @ return point - in - time view of currently available servers */ @ Override public Collection < SocketAddress > getUnavailableServers ( ) { } }
ArrayList < SocketAddress > rv = new ArrayList < SocketAddress > ( ) ; for ( MemcachedNode node : mconn . getLocator ( ) . getAll ( ) ) { if ( ! node . isActive ( ) ) { rv . add ( node . getSocketAddress ( ) ) ; } } return rv ;
public class JsonUtf8Writer { /** * Enters a new scope by appending any necessary whitespace and the given * bracket . */ private JsonWriter open ( int empty , String openBracket ) throws IOException { } }
beforeValue ( ) ; pushScope ( empty ) ; pathIndices [ stackSize - 1 ] = 0 ; sink . writeUtf8 ( openBracket ) ; return this ;
public class TzdbZoneRulesCompiler { /** * Process to create the jar files . */ private static void process ( List < File > srcDirs , List < String > srcFileNames , File dstDir , boolean unpacked , boolean verbose ) { } }
// build actual jar files Map < Object , Object > deduplicateMap = new HashMap < Object , Object > ( ) ; Map < String , SortedMap < String , ZoneRules > > allBuiltZones = new TreeMap < String , SortedMap < String , ZoneRules > > ( ) ; Set < String > allRegionIds = new TreeSet < String > ( ) ; Set < ZoneRules > allRules = new HashSet < ZoneRules > ( ) ; SortedMap < LocalDate , Byte > bestLeapSeconds = null ; for ( File srcDir : srcDirs ) { // source files in this directory List < File > srcFiles = new ArrayList < File > ( ) ; for ( String srcFileName : srcFileNames ) { File file = new File ( srcDir , srcFileName ) ; if ( file . exists ( ) ) { srcFiles . add ( file ) ; } } if ( srcFiles . isEmpty ( ) ) { continue ; // nothing to process } File leapSecondsFile = new File ( srcDir , "leapseconds" ) ; if ( ! leapSecondsFile . exists ( ) ) { System . out . println ( "Version " + srcDir . getName ( ) + " does not include leap seconds information." ) ; leapSecondsFile = null ; } // compile String loopVersion = srcDir . getName ( ) ; TzdbZoneRulesCompiler compiler = new TzdbZoneRulesCompiler ( loopVersion , srcFiles , leapSecondsFile , verbose ) ; compiler . setDeduplicateMap ( deduplicateMap ) ; try { // compile compiler . compile ( ) ; SortedMap < String , ZoneRules > builtZones = compiler . getZones ( ) ; SortedMap < LocalDate , Byte > parsedLeapSeconds = compiler . getLeapSeconds ( ) ; // output version - specific file if ( unpacked == false ) { File dstFile = new File ( dstDir , "threeten-TZDB-" + loopVersion + ".jar" ) ; if ( verbose ) { System . out . println ( "Outputting file: " + dstFile ) ; } outputFile ( dstFile , loopVersion , builtZones , parsedLeapSeconds ) ; } // create totals allBuiltZones . put ( loopVersion , builtZones ) ; allRegionIds . addAll ( builtZones . keySet ( ) ) ; allRules . addAll ( builtZones . values ( ) ) ; // track best possible leap seconds collection if ( compiler . getMostRecentLeapSecond ( ) != null ) { // we ' ve got a live one ! if ( bestLeapSeconds == null || compiler . getMostRecentLeapSecond ( ) . compareTo ( bestLeapSeconds . lastKey ( ) ) > 0 ) { // found the first one , or found a better one bestLeapSeconds = parsedLeapSeconds ; } } } catch ( Exception ex ) { System . out . println ( "Failed: " + ex . toString ( ) ) ; ex . printStackTrace ( ) ; System . exit ( 1 ) ; } } // output merged file if ( unpacked ) { if ( verbose ) { System . out . println ( "Outputting combined files: " + dstDir ) ; } outputFilesDat ( dstDir , allBuiltZones , allRegionIds , allRules , bestLeapSeconds ) ; } else { File dstFile = new File ( dstDir , "threeten-TZDB-all.jar" ) ; if ( verbose ) { System . out . println ( "Outputting combined file: " + dstFile ) ; } outputFile ( dstFile , allBuiltZones , allRegionIds , allRules , bestLeapSeconds ) ; }
public class JawrRequestHandler { /** * Refresh the dirty bundles */ @ Override public synchronized void rebuildDirtyBundles ( ) { } }
if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "Rebuild dirty bundles" ) ; } StopWatch stopWatch = new StopWatch ( ) ; ThreadLocalJawrContext . setStopWatch ( stopWatch ) ; // Initialize the Thread local for the Jawr context ThreadLocalJawrContext . setJawrConfigMgrObjectName ( JmxUtils . getMBeanObjectName ( servletContext , resourceType , jawrConfig . getProperty ( JawrConstant . JAWR_JMX_MBEAN_PREFIX ) ) ) ; try { if ( bundlesHandler != null ) { bundlesHandler . rebuildModifiedBundles ( ) ; } if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "Jawr configuration successfully reloaded. " ) ; } if ( PERF_PROCESSING_LOGGER . isDebugEnabled ( ) ) { PERF_PROCESSING_LOGGER . debug ( stopWatch . prettyPrint ( ) ) ; } } catch ( InterruptBundlingProcessException e ) { if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( "Bundling processed stopped" ) ; } } catch ( Exception e ) { throw new BundlingProcessException ( "Error while rebuilding dirty bundles : " + e . getMessage ( ) , e ) ; } finally { // Reset the Thread local for the Jawr context ThreadLocalJawrContext . reset ( ) ; }
public class Kryo { /** * Stores the specified registration . If the ID is already in use by the same type , the old entry is overwritten . Registering * a primitive also affects the corresponding primitive wrapper . * IDs must be the same at deserialization as they were for serialization . * Registration can be suclassed to efficiently store per type information , accessible in serializers via * { @ link Kryo # getRegistration ( Class ) } . */ public Registration register ( Registration registration ) { } }
int id = registration . getId ( ) ; if ( id < 0 ) throw new IllegalArgumentException ( "id must be > 0: " + id ) ; Registration existing = classResolver . unregister ( id ) ; if ( DEBUG && existing != null && existing . getType ( ) != registration . getType ( ) ) debug ( "kryo" , "Registration overwritten: " + existing + " -> " + registration ) ; return classResolver . register ( registration ) ;
public class ValidStorageOptions { /** * The valid range of provisioned IOPS . For example , 1000-20000. * @ param provisionedIops * The valid range of provisioned IOPS . For example , 1000-20000. */ public void setProvisionedIops ( java . util . Collection < Range > provisionedIops ) { } }
if ( provisionedIops == null ) { this . provisionedIops = null ; return ; } this . provisionedIops = new com . amazonaws . internal . SdkInternalList < Range > ( provisionedIops ) ;
public class SDLoss { /** * TODO * @ param targets * @ param inputs * @ param weights * @ return */ public SDVariable weightedCrossEntropyWithLogits ( SDVariable targets , SDVariable inputs , SDVariable weights ) { } }
return weightedCrossEntropyWithLogits ( null , targets , inputs , weights ) ;
public class ConverterManager { /** * Gets a copy of the list of converters . * @ return the converters , a copy of the real data , never null */ public DurationConverter [ ] getDurationConverters ( ) { } }
ConverterSet set = iDurationConverters ; DurationConverter [ ] converters = new DurationConverter [ set . size ( ) ] ; set . copyInto ( converters ) ; return converters ;
public class Duration { /** * / * [ deutsch ] * < p > Erzeugt eine neue Zeitspanne als Vereinigung dieser und der * angegebenen Zeitspanne , wobei Betr & auml ; ge zu gleichen Zeiteinheiten * addiert werden . < / p > * < p > Um Zeitspannen mit verschiedenen Einheitstypen zu vereinigen , kann * folgender Kniff angewandt werden : < / p > * < pre > * Duration & lt ; IsoUnit & gt ; zero = Duration . ofZero ( ) ; * Duration & lt ; IsoUnit & gt ; result = zero . plus ( this ) . plus ( timespan ) ; * < / pre > * < p > < strong > Hinweis zur Vorzeichenbehandlung : < / strong > Wenn diese Dauer * und die angegebene Zeitspanne verschiedene Vorzeichen haben , wird * Time4J bei Bedarf eine automatische Normalisierung durchf & uuml ; hren . * Sind dann immer noch gemischte Vorzeichen f & uuml ; r einzelne * Dauerelemente vorhanden , wird eine Ausnahme geworfen . Es wird * deshalb empfohlen , nur Zeitspannen mit gleichen * Vorzeichen zusammenzuf & uuml ; hren . < / p > * @ param timespan other time span this duration will be merged * with by adding the partial amounts * @ return new merged duration * @ throws IllegalStateException if the result gets mixed signs by * adding the partial amounts * @ throws IllegalArgumentException if different units of same length exist * @ throws ArithmeticException in case of long overflow * @ see # union ( TimeSpan ) */ @ SuppressWarnings ( "unchecked" ) public Duration < U > plus ( TimeSpan < ? extends U > timespan ) { } }
Duration < U > result = merge ( this , timespan ) ; if ( result == null ) { long [ ] sums = new long [ 4 ] ; sums [ 0 ] = 0 ; sums [ 1 ] = 0 ; sums [ 2 ] = 0 ; sums [ 3 ] = 0 ; if ( summarize ( this , sums ) && summarize ( timespan , sums ) ) { long months = sums [ 0 ] ; long days = sums [ 1 ] ; long secs = sums [ 2 ] ; long nanos = sums [ 3 ] ; long daytime ; if ( nanos != 0 ) { daytime = nanos ; } else if ( secs != 0 ) { daytime = secs ; } else { daytime = days ; } if ( ! hasMixedSigns ( months , daytime ) ) { boolean neg = ( ( months < 0 ) || ( daytime < 0 ) ) ; if ( neg ) { months = MathUtils . safeNegate ( months ) ; days = MathUtils . safeNegate ( days ) ; secs = MathUtils . safeNegate ( secs ) ; nanos = MathUtils . safeNegate ( nanos ) ; } long years = months / 12 ; months = months % 12 ; long nanosecs = 0 ; if ( nanos != 0 ) { nanosecs = nanos % MRD ; secs = nanos / MRD ; } long hours = secs / 3600 ; secs = secs % 3600 ; long minutes = secs / 60 ; secs = secs % 60 ; Map < IsoUnit , Long > map = new HashMap < > ( ) ; map . put ( YEARS , years ) ; map . put ( MONTHS , months ) ; map . put ( DAYS , days ) ; map . put ( HOURS , hours ) ; map . put ( MINUTES , minutes ) ; map . put ( SECONDS , secs ) ; map . put ( NANOS , nanosecs ) ; return ( Duration < U > ) Duration . create ( map , neg ) ; } } throw new IllegalStateException ( "Mixed signs in result time span not allowed: " + this + " PLUS " + timespan ) ; } return result ;
public class CommonUtils { /** * Extract package name from canonical Java class name * @ param fileNameWithoutExtension canonical class name ( like ' a . b . c . SomeClassName ' ) , must not be null * @ return extracted package name , must not be null but can be empty */ @ Nonnull public static String extractPackageName ( @ Nonnull final String fileNameWithoutExtension ) { } }
final int lastDot = fileNameWithoutExtension . lastIndexOf ( '.' ) ; if ( lastDot < 0 ) { return "" ; } return fileNameWithoutExtension . substring ( 0 , lastDot ) . trim ( ) ;
public class Base64 { /** * Decodes data from Base64 notation , automatically * detecting gzip - compressed data and decompressing it . * @ param s the string to decode * @ param options encodeToString options such as URL _ SAFE * @ return the decoded data * @ throws NullPointerException if < code > s < / code > is null */ public static byte [ ] decode ( String s , int options ) { } }
if ( s == null ) { throw new IllegalArgumentException ( "Input string was null." ) ; } byte [ ] bytes = s . getBytes ( UTF_8 ) ; return decode ( bytes , 0 , bytes . length , options ) ;
public class ArrayLabelSetterFactory { /** * { @ link Map } フィールドにラベル情報が格納されている場合 。 * < p > キーはフィールド名 。 < / p > * @ param beanClass フィールドが定義してあるクラスのインスタンス * @ param fieldName フィールド名 * @ return ラベル情報の設定用クラス */ private Optional < ArrayLabelSetter > createMapField ( final Class < ? > beanClass , final String fieldName ) { } }
final Field labelsField ; try { labelsField = beanClass . getDeclaredField ( "labels" ) ; labelsField . setAccessible ( true ) ; } catch ( NoSuchFieldException | SecurityException e ) { // フィールドが見つからない場合は 、 何もしない 。 return Optional . empty ( ) ; } if ( ! Map . class . isAssignableFrom ( labelsField . getType ( ) ) ) { return Optional . empty ( ) ; } final ParameterizedType type = ( ParameterizedType ) labelsField . getGenericType ( ) ; final Class < ? > keyType = ( Class < ? > ) type . getActualTypeArguments ( ) [ 0 ] ; final Class < ? > valueType = ( Class < ? > ) type . getActualTypeArguments ( ) [ 1 ] ; if ( keyType . equals ( String . class ) && valueType . equals ( String . class ) ) { return Optional . of ( new ArrayLabelSetter ( ) { @ SuppressWarnings ( "unchecked" ) @ Override public void set ( final Object beanObj , final String label , final int index ) { ArgUtils . notNull ( beanObj , "beanObj" ) ; ArgUtils . notEmpty ( label , "label" ) ; try { Map < String , String > labelsMapObj = ( Map < String , String > ) labelsField . get ( beanObj ) ; if ( labelsMapObj == null ) { labelsMapObj = new LinkedHashMap < > ( ) ; labelsField . set ( beanObj , labelsMapObj ) ; } final String mapKey = createMapKey ( fieldName , index ) ; labelsMapObj . put ( mapKey , label ) ; } catch ( IllegalArgumentException | IllegalAccessException e ) { throw new RuntimeException ( "fail access labels field." , e ) ; } } } ) ; } else { // タイプが一致しない場合 log . warn ( "not match generics type of labels. key type:{}, value type:{}." , keyType . getName ( ) , valueType . getName ( ) ) ; return Optional . empty ( ) ; }
public class ProvenanceChallenge2 { /** * / * ( non - Javadoc ) * @ see org . openprovenance . prov . tutorial . tutorial5 . Challenge # reslice ( java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String ) */ @ Override public Collection < StatementOrBundle > reslice ( String warp , String activity , String imgfile , String imglabel , String hdrfile , String hdrlabel , String workflow , String agent ) { } }
Collection < StatementOrBundle > ll = new LinkedList < StatementOrBundle > ( ) ; Activity a5 = pFactory . newActivity ( pc ( activity ) ) ; pFactory . addType ( a5 , pFactory . newQualifiedName ( PRIM_NS , RESLICE , PRIM_PREFIX ) , name . PROV_QUALIFIED_NAME ) ; Entity e15 = newFile ( pFactory , imgfile , imglabel ) ; Entity e16 = newFile ( pFactory , hdrfile , hdrlabel ) ; Entity e11 = pFactory . newEntity ( pc ( warp ) ) ; ll . add ( newUsed ( a5 , ROLE_IN , e11 ) ) ; ll . add ( newWasGeneratedBy ( e15 , ROLE_IMG , a5 ) ) ; ll . add ( newWasGeneratedBy ( e16 , ROLE_HDR , a5 ) ) ; ll . add ( newWasDerivedFrom ( e15 , e11 ) ) ; ll . add ( newWasDerivedFrom ( e16 , e11 ) ) ; ll . addAll ( Arrays . asList ( a5 , e15 , e16 , e11 ) ) ; ll . add ( newUsed ( a5 , ROLE_IN , e11 ) ) ; ll . add ( pFactory . newAgent ( pc ( agent ) ) ) ; ll . add ( pFactory . newActivity ( pc ( workflow ) ) ) ; ll . add ( pFactory . newWasAssociatedWith ( null , pc ( workflow ) , pc ( agent ) ) ) ; ll . add ( pFactory . newWasStartedBy ( null , pc ( activity ) , null , pc ( workflow ) ) ) ; return ll ;
public class CommonOps_DDF5 { /** * Performs an upper Cholesky decomposition of matrix ' A ' and stores result in A . * @ param A ( Input ) SPD Matrix . ( Output ) upper cholesky . * @ return true if it was successful or false if it failed . Not always reliable . */ public static boolean cholU ( DMatrix5x5 A ) { } }
A . a11 = Math . sqrt ( A . a11 ) ; A . a21 = 0 ; A . a31 = 0 ; A . a41 = 0 ; A . a51 = 0 ; A . a12 = ( A . a12 ) / A . a11 ; A . a22 = Math . sqrt ( A . a22 - A . a12 * A . a12 ) ; A . a32 = 0 ; A . a42 = 0 ; A . a52 = 0 ; A . a13 = ( A . a13 ) / A . a11 ; A . a23 = ( A . a23 - A . a12 * A . a13 ) / A . a22 ; A . a33 = Math . sqrt ( A . a33 - A . a13 * A . a13 - A . a23 * A . a23 ) ; A . a43 = 0 ; A . a53 = 0 ; A . a14 = ( A . a14 ) / A . a11 ; A . a24 = ( A . a24 - A . a12 * A . a14 ) / A . a22 ; A . a34 = ( A . a34 - A . a13 * A . a14 - A . a23 * A . a24 ) / A . a33 ; A . a44 = Math . sqrt ( A . a44 - A . a14 * A . a14 - A . a24 * A . a24 - A . a34 * A . a34 ) ; A . a54 = 0 ; A . a15 = ( A . a15 ) / A . a11 ; A . a25 = ( A . a25 - A . a12 * A . a15 ) / A . a22 ; A . a35 = ( A . a35 - A . a13 * A . a15 - A . a23 * A . a25 ) / A . a33 ; A . a45 = ( A . a45 - A . a14 * A . a15 - A . a24 * A . a25 - A . a34 * A . a35 ) / A . a44 ; A . a55 = Math . sqrt ( A . a55 - A . a15 * A . a15 - A . a25 * A . a25 - A . a35 * A . a35 - A . a45 * A . a45 ) ; return ! UtilEjml . isUncountable ( A . a55 ) ;
public class CssBoxPngRenderer { /** * Sets some common fonts as the defaults for generic font families . */ private void setDefaultFonts ( BrowserConfig config ) { } }
config . setDefaultFont ( Font . SERIF , "Times New Roman" ) ; config . setDefaultFont ( Font . SANS_SERIF , "Arial" ) ; config . setDefaultFont ( Font . MONOSPACED , "Courier New" ) ;
public class StringUtil { /** * Helper functions to query a strings end portion . The comparison is case insensitive . * @ param base the base string . * @ param end the ending text . * @ return true , if the string ends with the given ending text . */ public static boolean endsWithIgnoreCase ( final String base , final String end ) { } }
if ( base . length ( ) < end . length ( ) ) { return false ; } return base . regionMatches ( true , base . length ( ) - end . length ( ) , end , 0 , end . length ( ) ) ;
public class AbstractHBCIJob { /** * / * gibt zu einem gegebenen jobnamen des namen dieses jobs in der syntax - spez . * zurück ( also mit angehängter versionsnummer ) */ private void findSpecNameForGV ( String jobnameLL ) { } }
int maxVersion = 0 ; StringBuilder key = new StringBuilder ( ) ; // alle param - segmente durchlaufen Map < String , String > bpd = passport . getBPD ( ) ; for ( String path : bpd . keySet ( ) ) { key . setLength ( 0 ) ; key . append ( path ) ; if ( key . indexOf ( "Params" ) == 0 ) { key . delete ( 0 , key . indexOf ( "." ) + 1 ) ; // wenn segment mit namen des aktuellen jobs gefunden wurde if ( key . indexOf ( jobnameLL + "Par" ) == 0 && key . toString ( ) . endsWith ( ".SegHead.code" ) ) { key . delete ( 0 , jobnameLL . length ( ) + ( "Par" ) . length ( ) ) ; // extrahieren der versionsnummer aus dem spez - namen String st = key . substring ( 0 , key . indexOf ( "." ) ) ; int version = 0 ; try { version = Integer . parseInt ( st ) ; } catch ( Exception e ) { log . warn ( "found invalid job version: key=" + key + ", jobnameLL=" + jobnameLL + " (this is a " + "known, but harmless bug)" ) ; } // merken der größten jemals aufgetretenen versionsnummer if ( version != 0 ) { log . debug ( "task " + jobnameLL + " is supported with segment version " + st ) ; if ( version > maxVersion ) { maxVersion = version ; } } } } } if ( maxVersion == 0 && ! jobnameLL . equals ( GVRawSEPA . getLowlevelName ( ) ) ) { maxVersion = 1 ; log . warn ( "Using segment version " + maxVersion + " for job " + jobnameLL + ", although not found in BPD. " + "This may fail" ) ; throw new JobNotSupportedException ( jobnameLL ) ; } // namen + versionsnummer speichern this . jobName = jobnameLL ; this . segVersion = maxVersion ; this . name = jobnameLL + this . segVersion ;
public class Maps { /** * Returns a list of values of the keys which exist in the specified < code > Map < / code > . * If the key dosn ' t exist in the < code > Map < / code > , No value will be added into the returned list . * @ param map * @ param keys * @ return */ public static < K , V > List < V > getIfPresentForEach ( final Map < K , V > map , final Collection < ? > keys ) { } }
if ( N . isNullOrEmpty ( map ) || N . isNullOrEmpty ( keys ) ) { return new ArrayList < > ( 0 ) ; } final List < V > result = new ArrayList < > ( keys . size ( ) ) ; V val = null ; for ( Object key : keys ) { val = map . get ( key ) ; if ( val != null || map . containsKey ( key ) ) { result . add ( val ) ; } } return result ;
public class HolidayHelper { /** * Get the number of working days between start date ( incl . ) and end date * ( incl . ) . An optional holiday calculator can be used as well . * @ param aStartDate * The start date . May not be < code > null < / code > . * @ param aEndDate * The end date . May not be < code > null < / code > . * @ param aHolidayMgr * The holiday calculator to use . May not be < code > null < / code > . * @ return The number of working days . If start date is after end date , the * value will be negative ! If start date equals end date the return * will be 1 if it is a working day . */ public static int getWorkingDays ( @ Nonnull final LocalDate aStartDate , @ Nonnull final LocalDate aEndDate , @ Nonnull final IHolidayManager aHolidayMgr ) { } }
ValueEnforcer . notNull ( aStartDate , "StartDate" ) ; ValueEnforcer . notNull ( aEndDate , "EndDate" ) ; ValueEnforcer . notNull ( aHolidayMgr , "HolidayMgr" ) ; final boolean bFlip = aStartDate . isAfter ( aEndDate ) ; LocalDate aCurDate = bFlip ? aEndDate : aStartDate ; final LocalDate aRealEndDate = bFlip ? aStartDate : aEndDate ; int ret = 0 ; while ( ! aRealEndDate . isBefore ( aCurDate ) ) { if ( isWorkDay ( aCurDate , aHolidayMgr ) ) ret ++ ; aCurDate = aCurDate . plusDays ( 1 ) ; } return bFlip ? - 1 * ret : ret ;
public class UpdateUserPoolClientRequest { /** * A list of provider names for the identity providers that are supported on this client . * @ param supportedIdentityProviders * A list of provider names for the identity providers that are supported on this client . */ public void setSupportedIdentityProviders ( java . util . Collection < String > supportedIdentityProviders ) { } }
if ( supportedIdentityProviders == null ) { this . supportedIdentityProviders = null ; return ; } this . supportedIdentityProviders = new java . util . ArrayList < String > ( supportedIdentityProviders ) ;
public class CollectionProxyDefaultImpl { /** * Clears the proxy . A cleared proxy is defined as loaded * @ see Collection # clear ( ) */ public void clear ( ) { } }
Class collClass = getCollectionClass ( ) ; // ECER : assure we notify all objects being removed , // necessary for RemovalAwareCollections . . . if ( IRemovalAwareCollection . class . isAssignableFrom ( collClass ) ) { getData ( ) . clear ( ) ; } else { Collection coll ; // BRJ : use an empty collection so isLoaded will return true // for non RemovalAwareCollections only ! ! try { coll = ( Collection ) collClass . newInstance ( ) ; } catch ( Exception e ) { coll = new ArrayList ( ) ; } setData ( coll ) ; } _size = 0 ;
public class SqlBuilder { /** * Pair < ColumnName , FieldName > * @ param field 字段 * @ return 返回列名和字段名 */ private static Pair < String , String > getColumnName ( Field field ) { } }
String fieldName = field . getName ( ) ; Column column = field . getAnnotation ( Column . class ) ; if ( null != column && StringUtils . isNotBlank ( column . name ( ) ) ) { fieldName = column . name ( ) ; } String columnName = NameUtils . getUnderlineName ( fieldName ) ; return new Pair < > ( columnName , fieldName ) ;
public class ReactorCircuitBreakerAspectExt { /** * handle the Spring web flux ( Flux / Mono ) return types AOP based into reactor circuit - breaker * See { @ link io . github . resilience4j . reactor . circuitbreaker . operator . CircuitBreakerOperator } for details . * @ param proceedingJoinPoint Spring AOP proceedingJoinPoint * @ param circuitBreaker the configured circuitBreaker * @ param methodName the method name * @ return the result object * @ throws Throwable exception in case of faulty flow */ @ Override public Object handle ( ProceedingJoinPoint proceedingJoinPoint , CircuitBreaker circuitBreaker , String methodName ) throws Throwable { } }
Object returnValue = proceedingJoinPoint . proceed ( ) ; if ( Flux . class . isAssignableFrom ( returnValue . getClass ( ) ) ) { Flux < ? > fluxReturnValue = ( Flux < ? > ) returnValue ; return fluxReturnValue . transform ( io . github . resilience4j . reactor . circuitbreaker . operator . CircuitBreakerOperator . of ( circuitBreaker ) ) ; } else if ( Mono . class . isAssignableFrom ( returnValue . getClass ( ) ) ) { Mono < ? > monoReturnValue = ( Mono < ? > ) returnValue ; return monoReturnValue . transform ( CircuitBreakerOperator . of ( circuitBreaker ) ) ; } else { logger . error ( "Unsupported type for Reactor circuit breaker {}" , returnValue . getClass ( ) . getTypeName ( ) ) ; throw new IllegalArgumentException ( "Not Supported type for the circuit breaker in Reactor:" + returnValue . getClass ( ) . getName ( ) ) ; }
public class TimeLimitedMatcherFactory { /** * Generate a Matcher instance that will throw if used or still * in use more than 2 seconds after its instantiation . * Use the instance immediately and then discard it . * @ param pattern The Pattern instance . * @ param charSequence The CharSequence to operate on . * @ return a matcher */ public static Matcher matcher ( Pattern pattern , CharSequence charSequence ) { } }
return matcher ( pattern , charSequence , timeoutMs ) ;
public class MiniJPEWriterHandler { /** * { @ inheritDoc } */ public Object addYToPoint ( double y , Object point ) { } }
( ( JSONArray ) ( ( LinkedHashMap ) point ) . get ( "coordinates" ) ) . set ( 1 , y ) ; return point ;
public class JCasUtil2 { /** * Returns a list of tokens starting with firstToken and ending with lastToken ( incl . ) * @ param jCas jCas * @ param firstToken first token of the span * @ param lastToken last token of the span * @ return list ( never empty , contains at least one token if firstToken = lastToken ) * @ throws IllegalArgumentException if last token precedes first token * @ throws NullPointerException if firstToken or lastToken are null */ public static List < Token > getTokenSpan ( JCas jCas , Token firstToken , Token lastToken ) { } }
if ( firstToken == null ) { throw new NullPointerException ( "firstToken is null" ) ; } if ( lastToken == null ) { throw new NullPointerException ( "lastToken is null" ) ; } if ( firstToken . getBegin ( ) > lastToken . getBegin ( ) ) { throw new IllegalArgumentException ( "firstToken (begin: " + firstToken . getBegin ( ) + ") appears after lastToken (begin: " + lastToken . getBegin ( ) + ")" ) ; } List < Token > result = new ArrayList < Token > ( ) ; for ( Token t : JCasUtil . select ( getInitialView ( jCas ) , Token . class ) ) { if ( t . getBegin ( ) >= firstToken . getBegin ( ) && t . getBegin ( ) <= lastToken . getBegin ( ) ) { result . add ( t ) ; } } return result ;
public class CSSColorHelper { /** * Get the passed value as a valid HSL Saturation or Lightness value in the * range of { @ value # PERCENTAGE _ MIN } - { @ value # PERCENTAGE _ MAX } ( percentage ) . * @ param nHSLPart * Source value * @ return Target value in the range of { @ value # PERCENTAGE _ MIN } - * { @ value # PERCENTAGE _ MAX } */ @ Nonnegative public static int getHSLPercentageValue ( final int nHSLPart ) { } }
return nHSLPart < PERCENTAGE_MIN ? PERCENTAGE_MIN : nHSLPart > PERCENTAGE_MAX ? PERCENTAGE_MAX : nHSLPart ;
public class ToStringBuilder { /** * < p > Append to the < code > toString < / code > an < code > Object < / code > * value . < / p > * @ param fieldName the field name * @ param obj the value to add to the < code > toString < / code > * @ param fullDetail < code > true < / code > for detail , * < code > false < / code > for summary info * @ return this */ @ GwtIncompatible ( "incompatible method" ) public ToStringBuilder append ( final String fieldName , final Object obj , final boolean fullDetail ) { } }
style . append ( buffer , fieldName , obj , Boolean . valueOf ( fullDetail ) ) ; return this ;
public class FunctionLibFactory { /** * return one FunctionLib contain content of all given Function Libs * @ param flds * @ return combined function lib */ public static FunctionLib combineFLDs ( FunctionLib [ ] flds ) { } }
FunctionLib fl = new FunctionLib ( ) ; if ( ArrayUtil . isEmpty ( flds ) ) return fl ; setAttributes ( flds [ 0 ] , fl ) ; // add functions for ( int i = 0 ; i < flds . length ; i ++ ) { copyFunctions ( flds [ i ] , fl ) ; } return fl ;