signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class PartitionReplicaManager { /** * Checks preconditions for replica sync - if we don ' t know the owner yet , if this node is the owner or not a replica */
PartitionReplica checkAndGetPrimaryReplicaOwner ( int partitionId , int replicaIndex ) { } } | InternalPartitionImpl partition = partitionStateManager . getPartitionImpl ( partitionId ) ; PartitionReplica owner = partition . getOwnerReplicaOrNull ( ) ; if ( owner == null ) { logger . info ( "Sync replica target is null, no need to sync -> partitionId=" + partitionId + ", replicaIndex=" + replicaIndex ) ; return null ; } PartitionReplica localReplica = PartitionReplica . from ( nodeEngine . getLocalMember ( ) ) ; if ( owner . equals ( localReplica ) ) { if ( logger . isFinestEnabled ( ) ) { logger . finest ( "This node is now owner of partition, cannot sync replica -> partitionId=" + partitionId + ", replicaIndex=" + replicaIndex + ", partition-info=" + partitionStateManager . getPartitionImpl ( partitionId ) ) ; } return null ; } if ( ! partition . isOwnerOrBackup ( localReplica ) ) { if ( logger . isFinestEnabled ( ) ) { logger . finest ( "This node is not backup replica of partitionId=" + partitionId + ", replicaIndex=" + replicaIndex + " anymore." ) ; } return null ; } return owner ; |
public class TimeUUIDs { /** * Compare the embedded timestamps of the given UUIDs . This is used when it is OK to return
* an equality based on timestamps alone
* @ throws UnsupportedOperationException if either uuid is not a timestamp UUID */
public static int compareTimestamps ( UUID uuid1 , UUID uuid2 ) { } } | return Longs . compare ( uuid1 . timestamp ( ) , uuid2 . timestamp ( ) ) ; |
public class TagsApi { /** * Creates a tag on a particular ref of the given project .
* < pre > < code > GitLab Endpoint : POST / projects / : id / repository / tags < / code > < / pre >
* @ param projectIdOrPath id , path of the project , or a Project instance holding the project ID or path
* @ param tagName The name of the tag Must be unique for the project
* @ param ref the git ref to place the tag on
* @ return a Tag instance containing info on the newly created tag
* @ throws GitLabApiException if any exception occurs */
public Tag createTag ( Object projectIdOrPath , String tagName , String ref ) throws GitLabApiException { } } | return ( createTag ( projectIdOrPath , tagName , ref , null , ( String ) null ) ) ; |
public class event { /** * < pre >
* Use this operation to get events .
* < / pre > */
public static event [ ] get ( nitro_service client ) throws Exception { } } | event resource = new event ( ) ; resource . validate ( "get" ) ; return ( event [ ] ) resource . get_resources ( client ) ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link TextType }
* { @ code > } */
@ XmlElementDecl ( namespace = "http://www.w3.org/2005/Atom" , name = "title" , scope = SourceType . class ) public JAXBElement < TextType > createSourceTypeTitle ( TextType value ) { } } | return new JAXBElement < TextType > ( ENTRY_TYPE_TITLE_QNAME , TextType . class , SourceType . class , value ) ; |
public class PlanNode { /** * Add the supplied node to the front of the list of children .
* @ param child the node that should be added as the first child ; may not be null */
public void addFirstChild ( PlanNode child ) { } } | assert child != null ; this . children . addFirst ( child ) ; child . removeFromParent ( ) ; child . parent = this ; |
public class NodeImpl { /** * { @ inheritDoc } */
public void restore ( Version version , String relPath , boolean removeExisting ) throws VersionException , ItemExistsException , UnsupportedRepositoryOperationException , LockException , RepositoryException , InvalidItemStateException { } } | if ( JCRPath . THIS_RELPATH . equals ( relPath ) ) { // restore at this position
this . restore ( version , removeExisting ) ; } else { // restore at relPath
checkValid ( ) ; if ( ! session . getAccessManager ( ) . hasPermission ( getACL ( ) , new String [ ] { PermissionType . ADD_NODE , PermissionType . SET_PROPERTY } , session . getUserState ( ) . getIdentity ( ) ) ) { throw new AccessDeniedException ( "Access denied: checkin operation " + getPath ( ) + " for: " + session . getUserID ( ) + " item owner " + getACL ( ) . getOwner ( ) ) ; } if ( session . hasPendingChanges ( ) ) { throw new InvalidItemStateException ( "Session has pending changes " ) ; } if ( ( ( VersionImpl ) version ) . getInternalName ( ) . equals ( Constants . JCR_ROOTVERSION ) ) { throw new VersionException ( "It is illegal to call restore() on jcr:rootVersion" ) ; } QPath destPath = locationFactory . parseRelPath ( relPath ) . getInternalPath ( ) ; NodeImpl destParent = ( NodeImpl ) dataManager . getItem ( nodeData ( ) , destPath . makeParentPath ( ) . getEntries ( ) , false , ItemType . NODE ) ; if ( destParent == null ) { throw new PathNotFoundException ( "Parent not found for " + relPath ) ; } if ( ! destParent . isNode ( ) ) { throw new ConstraintViolationException ( "Parent item is not a node. Rel path " + relPath ) ; } NodeImpl destNode = ( NodeImpl ) dataManager . getItem ( destParent . nodeData ( ) , new QPathEntry ( destPath . getName ( ) , destPath . getIndex ( ) ) , false , ItemType . NODE ) ; if ( destNode != null ) { // Dest node exists
if ( ! destNode . isNode ( ) ) { throw new ConstraintViolationException ( "Item at relPath is not a node " + destNode . getPath ( ) ) ; } if ( ! destNode . isNodeType ( Constants . MIX_VERSIONABLE ) ) { throw new UnsupportedRepositoryOperationException ( "Node at relPath is not versionable " + destNode . getPath ( ) ) ; } if ( ! destNode . versionHistory ( false ) . isVersionBelongToThis ( version ) ) { throw new VersionException ( "Bad version " + version . getPath ( ) ) ; } // Check locking
if ( ! destNode . parent ( ) . checkLocking ( ) ) { throw new LockException ( "Node " + destNode . getPath ( ) + " is locked " ) ; } } else { // Dest node not found
if ( ! destParent . checkedOut ( ) ) { throw new VersionException ( "Parent of a node at relPath is versionable and checked-in " + destParent . getPath ( ) ) ; } } ( ( VersionImpl ) version ) . restore ( session , destParent . nodeData ( ) , destPath . getName ( ) , removeExisting ) ; } |
public class UIInputContainer { /** * Walk the component tree branch built by the composite component and locate the input container elements .
* @ return a composite object of the input container elements */
protected InputContainerElements scan ( final UIComponent component , InputContainerElements elements , final FacesContext context ) { } } | if ( elements == null ) { elements = new InputContainerElements ( ) ; } // NOTE we need to walk the tree ignoring rendered attribute because it ' s condition
// could be based on what we discover
if ( ( elements . getLabel ( ) == null ) && ( component instanceof HtmlOutputLabel ) ) { elements . setLabel ( ( HtmlOutputLabel ) component ) ; } else if ( component instanceof EditableValueHolder ) { elements . registerInput ( ( EditableValueHolder ) component , getDefaultValidator ( context ) , context ) ; } else if ( component instanceof UIMessage ) { elements . registerMessage ( ( UIMessage ) component ) ; } // may need to walk smarter to ensure " element of least suprise "
for ( UIComponent child : component . getChildren ( ) ) { scan ( child , elements , context ) ; } return elements ; |
public class SimpleCheckBoxControl { /** * Sets up event handlers for all checkboxes . */
private void setupCheckboxEventHandlers ( ) { } } | for ( int i = 0 ; i < checkboxes . size ( ) ; i ++ ) { final int j = i ; checkboxes . get ( i ) . setOnAction ( event -> { if ( checkboxes . get ( j ) . isSelected ( ) ) { field . select ( j ) ; } else { field . deselect ( j ) ; } } ) ; } |
public class UserTransactionRegistryImpl { /** * Remove a provider
* @ param provider The provider */
public void removeProvider ( UserTransactionProvider provider ) { } } | UserTransactionProviderImpl impl = providers . get ( provider ) ; if ( impl != null ) { delegator . removeProvider ( impl ) ; providers . remove ( provider ) ; } |
public class ModifyVpcEndpointServicePermissionsRequest { /** * The Amazon Resource Names ( ARN ) of one or more principals . Permissions are granted to the principals in this
* list . To grant permissions to all principals , specify an asterisk ( * ) .
* @ param addAllowedPrincipals
* The Amazon Resource Names ( ARN ) of one or more principals . Permissions are granted to the principals in
* this list . To grant permissions to all principals , specify an asterisk ( * ) . */
public void setAddAllowedPrincipals ( java . util . Collection < String > addAllowedPrincipals ) { } } | if ( addAllowedPrincipals == null ) { this . addAllowedPrincipals = null ; return ; } this . addAllowedPrincipals = new com . amazonaws . internal . SdkInternalList < String > ( addAllowedPrincipals ) ; |
public class Primitives { /** * Converts an array of object Integer to primitives handling { @ code null } .
* This method returns { @ code null } for a { @ code null } input array .
* @ param a
* a { @ code Integer } array , may be { @ code null }
* @ param valueForNull
* the value to insert if { @ code null } found
* @ return an { @ code int } array , { @ code null } if null array input */
public static int [ ] unbox ( final Integer [ ] a , final int valueForNull ) { } } | if ( a == null ) { return null ; } return unbox ( a , 0 , a . length , valueForNull ) ; |
public class AmazonCodeDeployAsyncClient { /** * Simplified method form for invoking the ListOnPremisesInstances operation with an AsyncHandler .
* @ see # listOnPremisesInstancesAsync ( ListOnPremisesInstancesRequest , com . amazonaws . handlers . AsyncHandler ) */
@ Override public java . util . concurrent . Future < ListOnPremisesInstancesResult > listOnPremisesInstancesAsync ( com . amazonaws . handlers . AsyncHandler < ListOnPremisesInstancesRequest , ListOnPremisesInstancesResult > asyncHandler ) { } } | return listOnPremisesInstancesAsync ( new ListOnPremisesInstancesRequest ( ) , asyncHandler ) ; |
public class BusHandler { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . sib . processor . impl . interfaces . DestinationHandler # isSendAllowed ( ) */
public boolean isSendAllowed ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "isSendAllowed" ) ; boolean sendAllowed ; if ( _sendAllowedOnTargetForeignBus . equals ( Boolean . FALSE ) ) sendAllowed = false ; else sendAllowed = _foreignDestinationDefault . isSendAllowed ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "isSendAllowed" , sendAllowed ) ; return sendAllowed ; |
public class ByteBuffer { /** * method to append a part of a String
* @ param str string to get part from
* @ param off start index on the string
* @ param len length of the sequenz to get from string
* @ throws IOException */
public void append ( String str , int off , int len ) throws IOException { } } | append ( str . substring ( off , off + len ) ) ; |
public class UriEscape { /** * Perform am URI query parameter ( name or value ) < strong > escape < / strong > operation
* on a < tt > String < / tt > input , writing results to a < tt > Writer < / tt > .
* The following are the only allowed chars in an URI query parameter ( will not be escaped ) :
* < ul >
* < li > < tt > A - Z a - z 0-9 < / tt > < / li >
* < li > < tt > - . _ ~ < / tt > < / li >
* < li > < tt > ! $ ' ( ) * , ; < / tt > < / li >
* < li > < tt > : @ < / tt > < / li >
* < li > < tt > / ? < / tt > < / li >
* < / ul >
* All other chars will be escaped by converting them to the sequence of bytes that
* represents them in the specified < em > encoding < / em > and then representing each byte
* in < tt > % HH < / tt > syntax , being < tt > HH < / tt > the hexadecimal representation of the byte .
* This method is < strong > thread - safe < / strong > .
* @ param text the < tt > String < / tt > to be escaped .
* @ param writer the < tt > java . io . Writer < / tt > to which the escaped result will be written . Nothing will
* be written at all to this writer if input is < tt > null < / tt > .
* @ param encoding the encoding to be used for escaping .
* @ throws IOException if an input / output exception occurs
* @ since 1.1.2 */
public static void escapeUriQueryParam ( final String text , final Writer writer , final String encoding ) throws IOException { } } | if ( writer == null ) { throw new IllegalArgumentException ( "Argument 'writer' cannot be null" ) ; } if ( encoding == null ) { throw new IllegalArgumentException ( "Argument 'encoding' cannot be null" ) ; } UriEscapeUtil . escape ( new InternalStringReader ( text ) , writer , UriEscapeUtil . UriEscapeType . QUERY_PARAM , encoding ) ; |
public class NotifdEventConsumer { private EventCallBackStruct getEventCallBackStruct ( String eventName ) { } } | Enumeration keys = event_callback_map . keys ( ) ; while ( keys . hasMoreElements ( ) ) { String key = ( String ) keys . nextElement ( ) ; // Notifd do not use tango host
int start = key . indexOf ( '/' , "tango:// " . length ( ) ) ; String shortName = key . substring ( start + 1 ) ; if ( eventName . equalsIgnoreCase ( shortName ) ) { return event_callback_map . get ( key ) ; } } return null ; |
public class Iterators { /** * Adds all elements in { @ code iterator } to { @ code collection } . The iterator
* will be left exhausted : its { @ code hasNext ( ) } method will return
* { @ code false } .
* @ return { @ code true } if { @ code collection } was modified as a result of this
* operation */
public static < T > boolean addAll ( Collection < T > addTo , Iterator < ? extends T > iterator ) { } } | checkNotNull ( addTo ) ; checkNotNull ( iterator ) ; boolean wasModified = false ; while ( iterator . hasNext ( ) ) { wasModified |= addTo . add ( iterator . next ( ) ) ; } return wasModified ; |
public class Lexer { /** * 调用者已确定以字母或下划线开头 , 故一定可以获取到 id值 */
String scanId ( ) { } } | int idStart = forward ; while ( CharTable . isLetterOrDigit ( next ( ) ) ) { ; } return subBuf ( idStart , forward - 1 ) . toString ( ) ; |
public class PerformanceCachingGoogleCloudStorage { /** * This function may return cached copies of GoogleCloudStorageItemInfo . */
@ Override public List < GoogleCloudStorageItemInfo > getItemInfos ( List < StorageResourceId > resourceIds ) throws IOException { } } | List < GoogleCloudStorageItemInfo > result = new ArrayList < > ( resourceIds . size ( ) ) ; List < StorageResourceId > request = new ArrayList < > ( resourceIds . size ( ) ) ; // Populate the result list with items in the cache , and the request list with resources that
// still need to be resolved . Null items are added to the result list to preserve ordering .
for ( StorageResourceId resourceId : resourceIds ) { GoogleCloudStorageItemInfo item = cache . getItem ( resourceId ) ; if ( item == null ) { request . add ( resourceId ) ; } result . add ( item ) ; } // Resolve all the resources which were not cached , cache them , and add them to the result list .
// Null entries in the result list are replaced by the fresh entries from the underlying
// GoogleCloudStorage .
if ( ! request . isEmpty ( ) ) { List < GoogleCloudStorageItemInfo > response = super . getItemInfos ( request ) ; Iterator < GoogleCloudStorageItemInfo > responseIterator = response . iterator ( ) ; // Iterate through the result set , replacing the null entries added previously with entries
// from the response .
for ( int i = 0 ; i < result . size ( ) && responseIterator . hasNext ( ) ; i ++ ) { if ( result . get ( i ) == null ) { GoogleCloudStorageItemInfo item = responseIterator . next ( ) ; cache . putItem ( item ) ; result . set ( i , item ) ; } } } return result ; |
public class BeanBox { /** * This is Java configuration method equal to put a AOP annotation on method . a
* AOP annotation is a kind of annotation be binded to an AOP alliance
* interceptor like ctx . bind ( Tx . class , MyInterceptor . class ) ; then you can put
* a @ Tx annotation on method . But this method allow aop can be annotation class
* or interceptor class for both */
public synchronized BeanBox addMethodAop ( Object aop , Method method ) { } } | checkOrCreateMethodAops ( ) ; List < Object > aops = methodAops . get ( method ) ; if ( aops == null ) { aops = new ArrayList < Object > ( ) ; methodAops . put ( method , aops ) ; } aops . add ( BeanBoxUtils . checkAOP ( aop ) ) ; return this ; |
public class Logcat { /** * Starts reading traces from the application logcat and notifying listeners if needed . */
@ Override public void run ( ) { } } | super . run ( ) ; try { process = Runtime . getRuntime ( ) . exec ( "logcat -v time" ) ; } catch ( IOException e ) { Log . e ( LOGTAG , "IOException executing logcat command." , e ) ; } readLogcat ( ) ; |
public class CronTrigger { /** * Returns the next time at which the < code > CronTrigger < / code > will fire ,
* after the given time . If the trigger will not fire after the given time ,
* < code > null < / code > will be returned .
* Note that the date returned is NOT validated against the related
* { @ link ICalendar } ( if any )
* @ param aAfterTime
* after time */
@ Override public Date getFireTimeAfter ( @ Nullable final Date aAfterTime ) { } } | Date afterTime = aAfterTime ; if ( afterTime == null ) afterTime = new Date ( ) ; if ( getStartTime ( ) . after ( afterTime ) ) { afterTime = new Date ( getStartTime ( ) . getTime ( ) - 1000l ) ; } if ( getEndTime ( ) != null && ( afterTime . compareTo ( getEndTime ( ) ) >= 0 ) ) { return null ; } final Date pot = getTimeAfter ( afterTime ) ; if ( getEndTime ( ) != null && pot != null && pot . after ( getEndTime ( ) ) ) { return null ; } return pot ; |
public class RegexParser { /** * char - class : : = ' [ ' ( ' ^ ' ? range ' , ' ? ) + ' ] ' range : : = ' \ d ' | ' \ w ' | ' \ s ' |
* category - block | range - char | range - char ' - ' range - char range - char : : =
* ' \ [ ' | ' \ ] ' | ' \ \ ' | ' \ ' [ , - efnrtv ] | bmp - code | character - 2 bmp - code : : =
* ' \ ' ' u ' [ 0-9a - fA - F ] [ 0-9a - fA - F ] [ 0-9a - fA - F ] [ 0-9a - fA - F ] */
protected RangeToken parseCharacterClass ( boolean useNrange ) throws ParseException { } } | this . setContext ( S_INBRACKETS ) ; this . next ( ) ; boolean nrange = false ; RangeToken base = null ; RangeToken tok ; if ( this . read ( ) == T_CHAR && this . chardata == '^' ) { nrange = true ; this . next ( ) ; if ( useNrange ) { tok = Token . createNRange ( ) ; } else { base = Token . createRange ( ) ; base . addRange ( 0 , Token . UTF16_MAX ) ; tok = Token . createRange ( ) ; } } else { tok = Token . createRange ( ) ; } int type ; boolean firstloop = true ; while ( ( type = this . read ( ) ) != T_EOF ) { if ( type == T_CHAR && this . chardata == ']' && ! firstloop ) break ; int c = this . chardata ; boolean end = false ; if ( type == T_BACKSOLIDUS ) { switch ( c ) { case 'd' : case 'D' : case 'w' : case 'W' : case 's' : case 'S' : tok . mergeRanges ( this . getTokenForShorthand ( c ) ) ; end = true ; break ; case 'i' : case 'I' : case 'c' : case 'C' : c = this . processCIinCharacterClass ( tok , c ) ; if ( c < 0 ) end = true ; break ; case 'p' : case 'P' : int pstart = this . offset ; RangeToken tok2 = this . processBacksolidus_pP ( c ) ; if ( tok2 == null ) throw this . ex ( "parser.atom.5" , pstart ) ; tok . mergeRanges ( tok2 ) ; end = true ; break ; default : c = this . decodeEscaped ( ) ; } } // backsolidus
// POSIX Character class such as [ : alnum : ]
else if ( type == T_POSIX_CHARCLASS_START ) { int nameend = this . regex . indexOf ( ':' , this . offset ) ; if ( nameend < 0 ) throw this . ex ( "parser.cc.1" , this . offset ) ; boolean positive = true ; if ( this . regex . charAt ( this . offset ) == '^' ) { this . offset ++ ; positive = false ; } String name = this . regex . substring ( this . offset , nameend ) ; RangeToken range = Token . getRange ( name , positive , this . isSet ( RegularExpression . XMLSCHEMA_MODE ) ) ; if ( range == null ) throw this . ex ( "parser.cc.3" , this . offset ) ; tok . mergeRanges ( range ) ; end = true ; if ( nameend + 1 >= this . regexlen || this . regex . charAt ( nameend + 1 ) != ']' ) throw this . ex ( "parser.cc.1" , nameend ) ; this . offset = nameend + 2 ; } else if ( type == T_XMLSCHEMA_CC_SUBTRACTION && ! firstloop ) { if ( nrange ) { nrange = false ; if ( useNrange ) { tok = ( RangeToken ) Token . complementRanges ( tok ) ; } else { base . subtractRanges ( tok ) ; tok = base ; } } RangeToken range2 = this . parseCharacterClass ( false ) ; tok . subtractRanges ( range2 ) ; if ( this . read ( ) != T_CHAR || this . chardata != ']' ) { throw this . ex ( "parser.cc.5" , this . offset ) ; } break ; // Exit this loop
} this . next ( ) ; if ( ! end ) { // if not shorthands . . .
if ( this . read ( ) != T_CHAR || this . chardata != '-' ) { // Here is
// no
if ( ! this . isSet ( RegularExpression . IGNORE_CASE ) || c > 0xffff ) { tok . addRange ( c , c ) ; } else { addCaseInsensitiveChar ( tok , c ) ; } } else if ( type == T_XMLSCHEMA_CC_SUBTRACTION ) { throw this . ex ( "parser.cc.8" , this . offset - 1 ) ; } else { this . next ( ) ; // Skips ' - '
if ( ( type = this . read ( ) ) == T_EOF ) throw this . ex ( "parser.cc.2" , this . offset ) ; if ( type == T_CHAR && this . chardata == ']' ) { if ( ! this . isSet ( RegularExpression . IGNORE_CASE ) || c > 0xffff ) { tok . addRange ( c , c ) ; } else { addCaseInsensitiveChar ( tok , c ) ; } tok . addRange ( '-' , '-' ) ; } else { int rangeend = this . chardata ; if ( type == T_BACKSOLIDUS ) { rangeend = this . decodeEscaped ( ) ; } this . next ( ) ; if ( c > rangeend ) { throw this . ex ( "parser.ope.3" , this . offset - 1 ) ; } if ( ! this . isSet ( RegularExpression . IGNORE_CASE ) || ( c > 0xffff && rangeend > 0xffff ) ) { tok . addRange ( c , rangeend ) ; } else { addCaseInsensitiveCharRange ( tok , c , rangeend ) ; } } } } if ( this . isSet ( RegularExpression . SPECIAL_COMMA ) && this . read ( ) == T_CHAR && this . chardata == ',' ) { this . next ( ) ; } firstloop = false ; } if ( this . read ( ) == T_EOF ) { throw this . ex ( "parser.cc.2" , this . offset ) ; } if ( ! useNrange && nrange ) { base . subtractRanges ( tok ) ; tok = base ; } tok . sortRanges ( ) ; tok . compactRanges ( ) ; this . setContext ( S_NORMAL ) ; this . next ( ) ; // Skips ' ] '
return tok ; |
public class JdtRecorder { /** * < pre >
* Initializer :
* [ static ] Block
* Block :
* { { Statement } }
* < / pre >
* @ param initializer the { @ link Initializer initializer } being recorded ( cannot be < code > null < / code > )
* @ param nodeName the name of the node being created that represents the initializer ( cannot be < code > null < / code > or empty )
* @ param parentNode the parent { @ link Node node } ( cannot be < code > null < / code > )
* @ throws Exception if there is a problem */
protected void record ( final Initializer initializer , final String nodeName , final Node parentNode ) throws Exception { } } | final Block block = initializer . getBody ( ) ; if ( block != null ) { @ SuppressWarnings ( "unchecked" ) final List < Statement > statements = block . statements ( ) ; if ( ( statements != null ) && ! statements . isEmpty ( ) ) { final Node initializerNode = parentNode . addNode ( nodeName , ClassFileSequencerLexicon . STATEMENTS ) ; record ( block , initializerNode ) ; } } |
public class ReservedInstancesOffering { /** * The pricing details of the Reserved Instance offering .
* @ param pricingDetails
* The pricing details of the Reserved Instance offering . */
public void setPricingDetails ( java . util . Collection < PricingDetail > pricingDetails ) { } } | if ( pricingDetails == null ) { this . pricingDetails = null ; return ; } this . pricingDetails = new com . amazonaws . internal . SdkInternalList < PricingDetail > ( pricingDetails ) ; |
public class Dynamic { /** * Resolves this { @ link Dynamic } constant to resolve the returned instance to the supplied type . The type must be a subtype of the
* bootstrap method ' s return type . Constructors cannot be resolved to a different type .
* @ param typeDescription The type to resolve the bootstrapped value to .
* @ return This dynamic constant but resolved to the supplied type . */
public JavaConstant withType ( TypeDescription typeDescription ) { } } | if ( typeDescription . represents ( void . class ) ) { throw new IllegalArgumentException ( "Constant value cannot represent void" ) ; } else if ( value . getBootstrapMethod ( ) . getName ( ) . equals ( MethodDescription . CONSTRUCTOR_INTERNAL_NAME ) ? ! this . typeDescription . isAssignableTo ( typeDescription ) : ( ! typeDescription . asBoxed ( ) . isInHierarchyWith ( this . typeDescription . asBoxed ( ) ) ) ) { throw new IllegalArgumentException ( typeDescription + " is not compatible with bootstrapped type " + this . typeDescription ) ; } Object [ ] bootstrapMethodArgument = new Object [ value . getBootstrapMethodArgumentCount ( ) ] ; for ( int index = 0 ; index < value . getBootstrapMethodArgumentCount ( ) ; index ++ ) { bootstrapMethodArgument [ index ] = value . getBootstrapMethodArgument ( index ) ; } return new Dynamic ( new ConstantDynamic ( value . getName ( ) , typeDescription . getDescriptor ( ) , value . getBootstrapMethod ( ) , bootstrapMethodArgument ) , typeDescription ) ; |
public class DescribeAccountAuditConfigurationRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DescribeAccountAuditConfigurationRequest describeAccountAuditConfigurationRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( describeAccountAuditConfigurationRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class HiveRegister { /** * Get an instance of { @ link HiveRegister } .
* @ param props A { @ link State } object . To get a specific implementation of { @ link HiveRegister } ,
* specify property { @ link # HIVE _ REGISTER _ TYPE } as the class name . Otherwise , { @ link # DEFAULT _ HIVE _ REGISTER _ TYPE }
* will be returned . This { @ link State } object is also used to instantiate the { @ link HiveRegister } object . */
public static HiveRegister get ( State props , Optional < String > metastoreURI ) { } } | return get ( props . getProp ( HIVE_REGISTER_TYPE , DEFAULT_HIVE_REGISTER_TYPE ) , props , metastoreURI ) ; |
public class MECommsTrc { /** * Minimal trace entry for ME < - > ME comms .
* Caller should check TraceComponent . isAnyTracingEnabled ( ) before calling .
* @ param _ tc The caller ' s trace component
* @ param operation The operation . See constants in this class .
* @ param targetME The MEUUID of the target ME
* @ param connection The ME < - > ME connection ( hashCode will be printed ) . Can be null to indicate no suitable connection was found .
* @ param msg The mess1age being sent over the connection */
public static void traceMessage ( TraceComponent _tc , MessageProcessor localME , SIBUuid8 remoteME , String operation , Object connection , AbstractMessage msg ) { } } | // Trace if either the callers trace is enabled , or our own trace group
if ( tc . isDebugEnabled ( ) || _tc . isDebugEnabled ( ) ) { StringBuilder buff = new StringBuilder ( ) ; buff . append ( "MECOMMS" ) ; try { // Append operation details
buff . append ( "[" ) ; buff . append ( ( localME != null ) ? localME . getMessagingEngineName ( ) : "???" ) ; buff . append ( operation ) ; buff . append ( remoteME ) ; buff . append ( "] " ) ; // Append the details of the message
try { msg . getTraceSummaryLine ( buff ) ; } catch ( Exception e ) { // No FFDC code needed
// We failed to get the message summary line , just print the message class instead .
// We know this can happen within unit tests , because the MFP getter methods used
// to build the summary line can throw a NullPointerException if the message is not
// fully initialized . However , no circumstances are known for this to happen in
// a ' real ' messaging engine .
String safeMsgSummary = ( msg != null ) ? msg . getClass ( ) . getName ( ) : "null" ; buff . append ( " SUMMARY TRACE FAILED. Message class=" + safeMsgSummary ) ; } // Add details identifying the link / stream / destination at the end of the line .
// As this is on every line , I ' ve tried to limit it as much as possible
buff . append ( " [C=" ) ; buff . append ( ( connection != null ) ? Integer . toHexString ( connection . hashCode ( ) ) : null ) ; buff . append ( ",D=" ) ; buff . append ( msg . getGuaranteedTargetDestinationDefinitionUUID ( ) ) ; buff . append ( ",S=" ) ; buff . append ( msg . getGuaranteedStreamUUID ( ) ) ; buff . append ( "]" ) ; } catch ( Exception e ) { // No FFDC code needed
// We encountered an unexpected runtime exception . Try to print a bit of helpful info .
String safeMsgSummary = ( msg != null ) ? msg . getClass ( ) . getName ( ) : "null" ; buff . append ( " SUMMARY TRACE FAILED. Message class=" + safeMsgSummary ) ; } // Preferentially trace with our trace group , to allow a grep on this class name
if ( tc . isDebugEnabled ( ) ) { SibTr . debug ( tc , buff . toString ( ) ) ; } else { SibTr . debug ( _tc , buff . toString ( ) ) ; } } |
public class ClassDiscoverer { /** * Returns true if the type is a JAXBElement . In the case of JAXBElements , we want to traverse its
* underlying value as opposed to the JAXBElement .
* @ param type element type to test to see if its a JAXBElement
* @ return true if the type is a JAXBElement */
static boolean isJAXBElement ( JType type ) { } } | // noinspection RedundantIfStatement
if ( type . fullName ( ) . startsWith ( JAXBElement . class . getName ( ) ) ) { return true ; } return false ; |
public class SequenceNumber { /** * Efficiently converts a string containing a hexadecimal number from lower case to upper case */
private static String upperCaseHex ( String s ) { } } | char chars [ ] = s . toCharArray ( ) ; int length = s . length ( ) ; for ( int i = 0 ; i < length ; i ++ ) { switch ( chars [ i ] ) { case 'a' : chars [ i ] = 'A' ; break ; case 'b' : chars [ i ] = 'B' ; break ; case 'c' : chars [ i ] = 'C' ; break ; case 'd' : chars [ i ] = 'D' ; break ; case 'e' : chars [ i ] = 'E' ; break ; case 'f' : chars [ i ] = 'F' ; break ; } } return new String ( chars ) ; |
public class FractionalPartSubstitution { /** * If in " by digits " mode , parses the string as if it were a string
* of individual digits ; otherwise , uses the superclass function .
* @ param text The string to parse
* @ param parsePosition Ignored on entry , but updated on exit to point
* to the first unmatched character
* @ param baseValue The partial parse result prior to entering this
* function
* @ param upperBound Only consider rules with base values lower than
* this when filling in the substitution
* @ param lenientParse If true , try matching the text as numerals if
* matching as words doesn ' t work
* @ return If the match was successful , the current partial parse
* result ; otherwise new Long ( 0 ) . The result is either a Long or
* a Double . */
public Number doParse ( String text , ParsePosition parsePosition , double baseValue , double upperBound , boolean lenientParse ) { } } | // if we ' re not in byDigits mode , we can just use the inherited
// doParse ( )
if ( ! byDigits ) { return super . doParse ( text , parsePosition , baseValue , 0 , lenientParse ) ; } else { // if we ARE in byDigits mode , parse the text one digit at a time
// using this substitution ' s owning rule set ( we do this by setting
// upperBound to 10 when calling doParse ( ) ) until we reach
// nonmatching text
String workText = text ; ParsePosition workPos = new ParsePosition ( 1 ) ; double result ; int digit ; DigitList dl = new DigitList ( ) ; while ( workText . length ( ) > 0 && workPos . getIndex ( ) != 0 ) { workPos . setIndex ( 0 ) ; digit = ruleSet . parse ( workText , workPos , 10 ) . intValue ( ) ; if ( lenientParse && workPos . getIndex ( ) == 0 ) { Number n = ruleSet . owner . getDecimalFormat ( ) . parse ( workText , workPos ) ; if ( n != null ) { digit = n . intValue ( ) ; } } if ( workPos . getIndex ( ) != 0 ) { dl . append ( '0' + digit ) ; parsePosition . setIndex ( parsePosition . getIndex ( ) + workPos . getIndex ( ) ) ; workText = workText . substring ( workPos . getIndex ( ) ) ; while ( workText . length ( ) > 0 && workText . charAt ( 0 ) == ' ' ) { workText = workText . substring ( 1 ) ; parsePosition . setIndex ( parsePosition . getIndex ( ) + 1 ) ; } } } result = dl . count == 0 ? 0 : dl . getDouble ( ) ; result = composeRuleValue ( result , baseValue ) ; return new Double ( result ) ; } |
public class Curve { /** * compute a Catmull - Rom spline .
* @ param x the input parameter
* @ param numKnots the number of knots in the spline
* @ param knots the array of knots
* @ return the spline value */
public static float Spline ( float x , int numKnots , float [ ] knots ) { } } | int span ; int numSpans = numKnots - 3 ; float k0 , k1 , k2 , k3 ; float c0 , c1 , c2 , c3 ; if ( numSpans < 1 ) throw new IllegalArgumentException ( "Too few knots in spline" ) ; x = x > 1 ? 1 : x ; x = x < 0 ? 0 : x ; x *= numSpans ; span = ( int ) x ; if ( span > numKnots - 4 ) span = numKnots - 4 ; x -= span ; k0 = knots [ span ] ; k1 = knots [ span + 1 ] ; k2 = knots [ span + 2 ] ; k3 = knots [ span + 3 ] ; c3 = - 0.5f * k0 + 1.5f * k1 + - 1.5f * k2 + 0.5f * k3 ; c2 = 1f * k0 + - 2.5f * k1 + 2f * k2 + - 0.5f * k3 ; c1 = - 0.5f * k0 + 0f * k1 + 0.5f * k2 + 0f * k3 ; c0 = 0f * k0 + 1f * k1 + 0f * k2 + 0f * k3 ; return ( ( c3 * x + c2 ) * x + c1 ) * x + c0 ; |
public class TypeUtil { /** * 获得指定类型中所有泛型参数类型 , 例如 :
* < pre >
* class A & lt ; T & gt ;
* class B extends A & lt ; String & gt ;
* < / pre >
* 通过此方法 , 传入B . class即可得到String
* @ param type 指定类型
* @ return 所有泛型参数类型 */
public static Type [ ] getTypeArguments ( Type type ) { } } | if ( null == type ) { return null ; } final ParameterizedType parameterizedType = toParameterizedType ( type ) ; return ( null == parameterizedType ) ? null : parameterizedType . getActualTypeArguments ( ) ; |
public class BasePGPCommon { /** * read the private key from the given secret key
* @ param pgpSecretKey
* the secret key
* @ param password
* the password to unlock the private key
* @ return the unlocked private key
* @ throws PGPException */
protected PGPPrivateKey findPrivateKey ( PGPSecretKey pgpSecretKey , String password ) throws PGPException { } } | LOGGER . trace ( "findPrivateKey(PGPSecretKey, String)" ) ; LOGGER . trace ( "Secret Key: {}, Password: {}" , pgpSecretKey == null ? "not set" : "set" , password == null ? "not set" : "********" ) ; PGPPrivateKey result = null ; PBESecretKeyDecryptor pbeSecretKeyDecryptor = new BcPBESecretKeyDecryptorBuilder ( new BcPGPDigestCalculatorProvider ( ) ) . build ( password . toCharArray ( ) ) ; LOGGER . info ( "Extracting private key" ) ; result = pgpSecretKey . extractPrivateKey ( pbeSecretKeyDecryptor ) ; if ( result == null && LOGGER . isErrorEnabled ( ) ) { LOGGER . error ( "No private key could be extracted" ) ; } return result ; |
public class JobMarshaller { /** * Marshall the given parameter object . */
public void marshall ( Job job , ProtocolMarshaller protocolMarshaller ) { } } | if ( job == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( job . getId ( ) , ID_BINDING ) ; protocolMarshaller . marshall ( job . getData ( ) , DATA_BINDING ) ; protocolMarshaller . marshall ( job . getNonce ( ) , NONCE_BINDING ) ; protocolMarshaller . marshall ( job . getAccountId ( ) , ACCOUNTID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class JMLambda { /** * Consume if true .
* @ param < T > the type parameter
* @ param bool the bool
* @ param target the target
* @ param consumer the consumer */
public static < T > void consumeIfTrue ( boolean bool , T target , Consumer < T > consumer ) { } } | if ( bool ) consumer . accept ( target ) ; |
public class SetUtil { /** * set1 , set2的并集 ( 在set1或set2的对象 ) 的只读view , 不复制产生新的Set对象 .
* 如果尝试写入该View会抛出UnsupportedOperationException */
public static < E > Set < E > unionView ( final Set < ? extends E > set1 , final Set < ? extends E > set2 ) { } } | return Sets . union ( set1 , set2 ) ; |
public class NioUtils { /** * Convert a File into a ByteBuffer
* @ param file File to be converted
* @ return ByteBuffer containing the file */
public static ByteBuffer toByteBuffer ( File file ) { } } | ByteBuffer buffer = ByteBuffer . allocateDirect ( ( int ) file . length ( ) ) ; try { buffer . put ( toByteArray ( new FileInputStream ( file ) ) ) ; } catch ( IOException e ) { logger . error ( "Failed to write file to byte array: " + e . getMessage ( ) ) ; } buffer . flip ( ) ; return buffer ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link Term } { @ code > } } */
@ XmlElementDecl ( namespace = "http://schema.intuit.com/finance/v3" , name = "Term" , substitutionHeadNamespace = "http://schema.intuit.com/finance/v3" , substitutionHeadName = "IntuitObject" ) public JAXBElement < Term > createTerm ( Term value ) { } } | return new JAXBElement < Term > ( _Term_QNAME , Term . class , null , value ) ; |
public class GoogleMapShapeConverter { /** * Convert a { @ link Polyline } to a { @ link LineString }
* @ param polyline polyline
* @ param hasZ has z flag
* @ param hasM has m flag
* @ return line string */
public LineString toLineString ( Polyline polyline , boolean hasZ , boolean hasM ) { } } | return toLineString ( polyline . getPoints ( ) , hasZ , hasM ) ; |
public class AbstractStreamOperator { /** * Creates a partitioned state handle , using the state backend configured for this task .
* @ throws IllegalStateException Thrown , if the key / value state was already initialized .
* @ throws Exception Thrown , if the state backend cannot create the key / value state . */
protected < S extends State > S getPartitionedState ( StateDescriptor < S , ? > stateDescriptor ) throws Exception { } } | return getPartitionedState ( VoidNamespace . INSTANCE , VoidNamespaceSerializer . INSTANCE , stateDescriptor ) ; |
public class AbstractCsvAnnotationBeanReader { /** * 指定したBeanのクラスのインスタンスを作成する 。
* @ param clazz Beanのクラスタイプ 。
* @ return Beanのインスタンス 。
* @ throws SuperCsvReflectionException Beanのインスタンスの作成に失敗した場合 。 */
protected T instantiateBean ( final Class < T > clazz ) { } } | final T bean ; if ( clazz . isInterface ( ) ) { bean = BeanInterfaceProxy . createProxy ( clazz ) ; } else { try { bean = clazz . newInstance ( ) ; } catch ( InstantiationException e ) { throw new SuperCsvReflectionException ( String . format ( "error instantiating bean, check that %s has a default no-args constructor" , clazz . getName ( ) ) , e ) ; } catch ( IllegalAccessException e ) { throw new SuperCsvReflectionException ( "error instantiating bean" , e ) ; } } return bean ; |
public class BloomCalculations { /** * Given a maximum tolerable false positive probability , compute a Bloom
* specification which will give less than the specified false positive rate ,
* but minimize the number of buckets per element and the number of hash
* functions used . Because bandwidth ( and therefore total bitvector size )
* is considered more expensive than computing power , preference is given
* to minimizing buckets per element rather than number of hash functions .
* @ param maxBucketsPerElement The maximum number of buckets available for the filter .
* @ param maxFalsePosProb The maximum tolerable false positive rate .
* @ return A Bloom Specification which would result in a false positive rate
* less than specified by the function call
* @ throws UnsupportedOperationException if a filter satisfying the parameters cannot be met */
public static BloomSpecification computeBloomSpec ( int maxBucketsPerElement , double maxFalsePosProb ) { } } | assert maxBucketsPerElement >= 1 ; assert maxBucketsPerElement <= probs . length - 1 ; int maxK = probs [ maxBucketsPerElement ] . length - 1 ; // Handle the trivial cases
if ( maxFalsePosProb >= probs [ minBuckets ] [ minK ] ) { return new BloomSpecification ( 2 , optKPerBuckets [ 2 ] ) ; } if ( maxFalsePosProb < probs [ maxBucketsPerElement ] [ maxK ] ) { throw new UnsupportedOperationException ( String . format ( "Unable to satisfy %s with %s buckets per element" , maxFalsePosProb , maxBucketsPerElement ) ) ; } // First find the minimal required number of buckets :
int bucketsPerElement = 2 ; int K = optKPerBuckets [ 2 ] ; while ( probs [ bucketsPerElement ] [ K ] > maxFalsePosProb ) { bucketsPerElement ++ ; K = optKPerBuckets [ bucketsPerElement ] ; } // Now that the number of buckets is sufficient , see if we can relax K
// without losing too much precision .
while ( probs [ bucketsPerElement ] [ K - 1 ] <= maxFalsePosProb ) { K -- ; } return new BloomSpecification ( K , bucketsPerElement ) ; |
public class PublicIPAddressesInner { /** * Gets information about all public IP addresses in a virtual machine IP configuration in a virtual machine scale set .
* @ param resourceGroupName The name of the resource group .
* @ param virtualMachineScaleSetName The name of the virtual machine scale set .
* @ param virtualmachineIndex The virtual machine index .
* @ param networkInterfaceName The network interface name .
* @ param ipConfigurationName The IP configuration name .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the PagedList & lt ; PublicIPAddressInner & gt ; object if successful . */
public PagedList < PublicIPAddressInner > listVirtualMachineScaleSetVMPublicIPAddresses ( final String resourceGroupName , final String virtualMachineScaleSetName , final String virtualmachineIndex , final String networkInterfaceName , final String ipConfigurationName ) { } } | ServiceResponse < Page < PublicIPAddressInner > > response = listVirtualMachineScaleSetVMPublicIPAddressesSinglePageAsync ( resourceGroupName , virtualMachineScaleSetName , virtualmachineIndex , networkInterfaceName , ipConfigurationName ) . toBlocking ( ) . single ( ) ; return new PagedList < PublicIPAddressInner > ( response . body ( ) ) { @ Override public Page < PublicIPAddressInner > nextPage ( String nextPageLink ) { return listVirtualMachineScaleSetVMPublicIPAddressesNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ; |
public class Jsoup { /** * Parse HTML into a Document , using the provided Parser . You can provide an alternate parser , such as a simple XML
* ( non - HTML ) parser .
* @ param html HTML to parse
* @ param baseUri The URL where the HTML was retrieved from . Used to resolve relative URLs to absolute URLs , that occur
* before the HTML declares a { @ code < base href > } tag .
* @ param parser alternate { @ link Parser # xmlParser ( ) parser } to use .
* @ return sane HTML */
public static Document parse ( String html , String baseUri , Parser parser ) { } } | return parser . parseInput ( html , baseUri ) ; |
public class ParsedValues { /** * called by format processors */
void put ( ChronoElement < ? > element , int v ) { } } | int pos ; Object current ; Object [ ] keys = this . keys ; if ( keys == null ) { if ( element == PlainDate . YEAR ) { if ( this . duplicateKeysAllowed || ( this . ints [ 0 ] == Integer . MIN_VALUE ) || ( this . ints [ 0 ] == v ) ) { this . ints [ 0 ] = v ; } else { throw new AmbivalentValueException ( element ) ; } } else if ( element == PlainDate . MONTH_AS_NUMBER ) { if ( this . duplicateKeysAllowed || ( this . ints [ 1 ] == Integer . MIN_VALUE ) || ( this . ints [ 1 ] == v ) ) { this . ints [ 1 ] = v ; } else { throw new AmbivalentValueException ( element ) ; } } else if ( element == PlainDate . DAY_OF_MONTH ) { if ( this . duplicateKeysAllowed || ( this . ints [ 2 ] == Integer . MIN_VALUE ) || ( this . ints [ 2 ] == v ) ) { this . ints [ 2 ] = v ; } else { throw new AmbivalentValueException ( element ) ; } } else if ( element == PlainTime . DIGITAL_HOUR_OF_DAY ) { if ( this . duplicateKeysAllowed || ( this . len == Integer . MIN_VALUE ) || ( this . len == v ) ) { this . len = v ; } else { throw new AmbivalentValueException ( element ) ; } } else if ( element == PlainTime . MINUTE_OF_HOUR ) { if ( this . duplicateKeysAllowed || ( this . mask == Integer . MIN_VALUE ) || ( this . mask == v ) ) { this . mask = v ; } else { throw new AmbivalentValueException ( element ) ; } } else if ( element == PlainTime . SECOND_OF_MINUTE ) { if ( this . duplicateKeysAllowed || ( this . threshold == Integer . MIN_VALUE ) || ( this . threshold == v ) ) { this . threshold = v ; } else { throw new AmbivalentValueException ( element ) ; } } else if ( element == PlainTime . NANO_OF_SECOND ) { if ( this . duplicateKeysAllowed || ( this . count == Integer . MIN_VALUE ) || ( this . count == v ) ) { this . count = v ; } else { throw new AmbivalentValueException ( element ) ; } } else { Map < ChronoElement < ? > , Object > m = this . map ; if ( m == null ) { m = new HashMap < > ( ) ; this . map = m ; } Object newValue = Integer . valueOf ( v ) ; if ( this . duplicateKeysAllowed || ! m . containsKey ( element ) || newValue . equals ( m . get ( element ) ) ) { m . put ( element , newValue ) ; return ; } else { throw new AmbivalentValueException ( element ) ; } } return ; } if ( ! ( ( current = keys [ pos = ( mix ( element . hashCode ( ) ) & this . mask ) ] ) == null ) ) { if ( current . equals ( element ) ) { if ( this . duplicateKeysAllowed || ( this . ints [ pos ] == v ) ) { this . ints [ pos ] = v ; return ; } else { throw new AmbivalentValueException ( element ) ; } } while ( ! ( ( current = keys [ pos = ( pos + 1 ) & this . mask ] ) == null ) ) { if ( current . equals ( element ) ) { if ( this . duplicateKeysAllowed || ( this . ints [ pos ] == v ) ) { this . ints [ pos ] = v ; return ; } else { throw new AmbivalentValueException ( element ) ; } } } } keys [ pos ] = element ; this . ints [ pos ] = v ; if ( this . count ++ >= this . threshold ) { rehash ( arraySize ( this . count ) ) ; } |
public class AbstractExtendedSet { /** * { @ inheritDoc } */
@ Override public Iterable < T > descending ( ) { } } | return new Iterable < T > ( ) { @ Override public Iterator < T > iterator ( ) { return descendingIterator ( ) ; } } ; |
public class AttributesManager { /** * Retrieves current persistence attributes . If existing persistence attributes are present , they will be contained
* in the returned map . If not , an empty map will be returned . Modifications to this map will not be persisted
* back unless { @ link # savePersistentAttributes ( ) } is called . An exception is thrown if this method is called
* when a { @ link PersistenceAdapter } is not configured on the SDK .
* @ return map containing existing persistence attributes , or an empty map if none exist
* @ throws IllegalStateException if no { @ link PersistenceAdapter } is configured */
public Map < String , Object > getPersistentAttributes ( ) { } } | Request request = requestEnvelope . getRequest ( ) ; if ( persistenceAdapter == null ) { throw new IllegalStateException ( "Attempting to read persistence attributes without configured persistence adapter" ) ; } if ( ! persistenceAttributesSet ) { Optional < Map < String , Object > > retrievedAttributes = persistenceAdapter . getAttributes ( requestEnvelope ) ; if ( retrievedAttributes . isPresent ( ) ) { logger . debug ( "[{}] Found existing persistence attributes" , request . getRequestId ( ) ) ; persistentAttributes = retrievedAttributes . get ( ) ; } else { logger . debug ( "[{}] No existing persistence attributes" , request . getRequestId ( ) ) ; persistentAttributes = new HashMap < > ( ) ; } persistenceAttributesSet = true ; } return persistentAttributes ; |
public class HistoryCommand { /** * Helper method to format a timestamp .
* @ param timeLive
* @ return */
private static String formatTimeLive ( long timeLive ) { } } | String timeString = "ms" ; timeString = ( timeLive % 1000 ) + timeString ; timeLive = timeLive / 1000 ; if ( timeLive > 0 ) { timeString = ( timeLive % 60 ) + "s" + timeString ; timeLive = timeLive / 60 ; if ( timeLive > 0 ) { timeString = ( timeLive % 60 ) + "m" + timeString ; timeLive = timeLive / 60 ; if ( timeLive > 0 ) { timeString = ( timeLive % 24 ) + "h" + timeString ; timeLive = timeLive / 24 ; if ( timeLive > 0 ) { timeString = ( timeLive ) + "d" + timeString ; } } } } return timeString ; |
public class MetricContext { /** * Get a { @ link ContextAwareMeter } with a given name .
* @ param name name of the { @ link ContextAwareMeter }
* @ param factory a { @ link ContextAwareMetricFactory } for building { @ link ContextAwareMeter } s
* @ return the { @ link ContextAwareMeter } with the given name */
public ContextAwareMeter contextAwareMeter ( String name , ContextAwareMetricFactory < ContextAwareMeter > factory ) { } } | return this . innerMetricContext . getOrCreate ( name , factory ) ; |
public class RestItemHandlerImpl { /** * Adds the content of the request as a node ( or subtree of nodes ) at the location specified by { @ code path } .
* The primary type and mixin type ( s ) may optionally be specified through the { @ code jcr : primaryType } and
* { @ code jcr : mixinTypes } properties .
* @ param request the servlet request ; may not be null or unauthenticated
* @ param repositoryName the URL - encoded repository name
* @ param workspaceName the URL - encoded workspace name
* @ param path the path to the item
* @ param requestBody the JSON - encoded representation of the node or nodes to be added
* @ return the JSON - encoded representation of the node or nodes that were added . This will differ from { @ code requestBody } in
* that auto - created and protected properties ( e . g . , jcr : uuid ) will be populated .
* @ throws javax . jcr . RepositoryException if any other error occurs while interacting with the repository */
@ Override public RestItem addItem ( Request request , String repositoryName , String workspaceName , String path , String requestBody ) throws RepositoryException { } } | JsonNode requestBodyJSON = stringToJSONObject ( requestBody ) ; String parentAbsPath = parentPath ( path ) ; String newNodeName = newNodeName ( path ) ; Session session = getSession ( request , repositoryName , workspaceName ) ; Node parentNode = ( Node ) session . getItem ( parentAbsPath ) ; Node newNode = addNode ( parentNode , newNodeName , requestBodyJSON ) ; session . save ( ) ; RestItem restNewNode = createRestItem ( request , 0 , session , newNode ) ; return restNewNode ; |
public class ExpectationMaximizationGmm_F64 { /** * For each point compute the " responsibility " for each Gaussian
* @ return The sum of chi - square . Can be used to estimate the total error . */
protected double expectation ( ) { } } | double sumChiSq = 0 ; for ( int i = 0 ; i < info . size ( ) ; i ++ ) { PointInfo p = info . get ( i ) ; // identify the best cluster match and save it ' s chi - square for convergence testing
double bestLikelihood = 0 ; double bestChiSq = Double . MAX_VALUE ; double total = 0 ; for ( int j = 0 ; j < mixture . size ; j ++ ) { GaussianLikelihoodManager . Likelihood g = likelihoodManager . getLikelihood ( j ) ; double likelihood = g . likelihood ( p . point ) ; total += p . weights . data [ j ] = likelihood ; if ( likelihood > bestLikelihood ) { bestLikelihood = likelihood ; bestChiSq = g . getChisq ( ) ; } } // make sure it sums up to 1
if ( total > 0 ) { for ( int j = 0 ; j < mixture . size ; j ++ ) { p . weights . data [ j ] /= total ; } } // only add the best chi - square since the other mixtures might be far away
// I guess I could use the weights to do this too .
sumChiSq += bestChiSq ; } return sumChiSq ; |
public class Sort { /** * Check if the short array is sorted . It loops through the entire short
* array once , checking that the elements are sorted .
* < br >
* < br >
* < i > Runtime : < / i > O ( n )
* @ param shortArray the short array to check
* @ return < i > true < / i > if the short array is sorted , else < i > false < / i > . */
public static boolean isSorted ( short [ ] shortArray ) { } } | for ( int i = 0 ; i < shortArray . length - 1 ; i ++ ) { if ( shortArray [ i ] > shortArray [ i + 1 ] ) { return false ; } } return true ; |
public class ElasticPoolsInner { /** * Gets all elastic pools in a server .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; ElasticPoolInner & gt ; object */
public Observable < Page < ElasticPoolInner > > listByServerNextAsync ( final String nextPageLink ) { } } | return listByServerNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < ElasticPoolInner > > , Page < ElasticPoolInner > > ( ) { @ Override public Page < ElasticPoolInner > call ( ServiceResponse < Page < ElasticPoolInner > > response ) { return response . body ( ) ; } } ) ; |
public class ServiceLoaderModule { /** * Load services and make them available via a Set < S > binding using
* multi - binding . Note that this methods loads services lazily but also
* allows for additional bindings to be done via Guice modules .
* @ param type */
public < S > ServiceBinder < S > bindServices ( final Class < S > type ) { } } | ServiceBinderImpl < S > binder = new ServiceBinderImpl < S > ( type ) ; binders . add ( binder ) ; return binder ; |
public class PreambleUtil { /** * Flags */
static void insertEmptyFlag ( final WritableMemory wmem , final boolean empty ) { } } | int flags = wmem . getByte ( FLAGS_BYTE ) ; if ( empty ) { flags |= EMPTY_FLAG_MASK ; } else { flags &= ~ EMPTY_FLAG_MASK ; } wmem . putByte ( FLAGS_BYTE , ( byte ) flags ) ; |
public class BaseTable { /** * Set up / do the local criteria .
* This is only here to accommodate local file systems that can ' t handle
* REMOTE criteria . All you have to do here to handle the remote criteria
* locally is to call : return record . handleRemoteCriteria ( xx , yy ) . */
public boolean doLocalCriteria ( StringBuffer strFilter , boolean bIncludeFileName , Vector < BaseField > vParamList ) { } } | // Default BaseListener
if ( this . isTable ( ) ) // For tables , do the remote criteria now
return this . getRecord ( ) . handleRemoteCriteria ( strFilter , bIncludeFileName , vParamList ) ; // If can ' t handle remote
else return true ; // Record okay , don ' t skip it |
public class BaseStepControllerImpl { /** * The only valid states at this point are STARTED , STOPPING , or FAILED .
* been able to get to STOPPED , or COMPLETED yet at this point in the code . */
private void transitionToFinalBatchStatus ( ) { } } | BatchStatus currentBatchStatus = stepContext . getBatchStatus ( ) ; if ( currentBatchStatus . equals ( BatchStatus . STARTED ) ) { updateBatchStatus ( BatchStatus . COMPLETED ) ; } else if ( currentBatchStatus . equals ( BatchStatus . STOPPING ) ) { updateBatchStatus ( BatchStatus . STOPPED ) ; } else if ( currentBatchStatus . equals ( BatchStatus . FAILED ) ) { updateBatchStatus ( BatchStatus . FAILED ) ; // Should have already been done but maybe better for possible code refactoring to have it here .
} else { throw new IllegalStateException ( "Step batch status should not be in a " + currentBatchStatus . name ( ) + " state" ) ; } |
public class ContextInserterWorkerImpl { /** * < p > If , after calling insertContext , an error occurs the requestFailed
* method should be called . This gives the system context providers an
* opportunity to react to the failure .
* @ throws UnsatisfiedLinkError if the underlying list of handlers cannot be located */
public void requestFailed ( ) { } } | if ( _tc . isEntryEnabled ( ) ) SibTr . entry ( this , _tc , "requestFailed" ) ; // lohith liberty change
// _ invoker . requestFailed ( ) ;
if ( _tc . isEntryEnabled ( ) ) SibTr . exit ( this , _tc , "requestFailed" ) ; |
public class TarOutputStreamImpl { /** * Put an entry on the output stream . This writes the entry ' s header record and positions the output stream for
* writing the contents of the entry . Once this method is called , the stream is ready for calls to write ( ) to write
* the entry ' s contents . Once the contents are written , closeEntry ( ) < B > MUST < / B > be called to ensure that all
* buffered data is completely written to the output stream .
* @ param entry
* The TarEntry to be written to the archive . */
public void putNextEntry ( TarEntry entry ) throws IOException { } } | StringBuffer name = entry . getHeader ( ) . name ; // NOTE
// This check is not adequate , because the maximum file length that
// can be placed into a POSIX ( ustar ) header depends on the precise
// locations of the path elements ( slashes ) within the file ' s full
// pathname . For this reason , writeEntryHeader ( ) can still throw an
// InvalidHeaderException if the file ' s full pathname will not fit
// in the header .
if ( ( entry . isUnixTarFormat ( ) && name . length ( ) > TarHeader . NAMELEN ) || ( ! entry . isUnixTarFormat ( ) && name . length ( ) > ( TarHeader . NAMELEN + TarHeader . PREFIXLEN ) ) ) { throw new InvalidHeaderException ( "file name '" + name + "' is too long ( " + name . length ( ) + " > " + ( entry . isUnixTarFormat ( ) ? TarHeader . NAMELEN : ( TarHeader . NAMELEN + TarHeader . PREFIXLEN ) ) + " bytes )" ) ; } entry . writeEntryHeader ( this . recordBuf ) ; this . buffer . writeRecord ( this . recordBuf ) ; this . currBytes = 0 ; if ( entry . isDirectory ( ) ) { this . currSize = 0 ; } else { this . currSize = entry . getSize ( ) ; } |
public class V1OperationModel { /** * { @ inheritDoc } */
@ Override public GlobalsModel getGlobals ( ) { } } | if ( _globals == null ) { _globals = ( GlobalsModel ) getFirstChildModel ( GLOBALS ) ; } return _globals ; |
public class Service { /** * Initialize this service . A warning is logged if the service has already been
* initialized . This method must be called before { @ link # start ( ) } . This method
* causes the service ' s state to become { @ link State # INITIALIZED } . */
public final void initialize ( ) { } } | if ( m_state . isInitialized ( ) ) { m_logger . warn ( "initialize(): Service is already initialized -- ignoring" ) ; } else { logParams ( "Initializing with the following service parameters:" ) ; this . initService ( ) ; setState ( State . INITIALIZED ) ; } |
public class current_hostname { /** * Use this API to fetch filtered set of current _ hostname resources .
* filter string should be in JSON format . eg : " vm _ state : DOWN , name : [ a - z ] + " */
public static current_hostname [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } } | current_hostname obj = new current_hostname ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; current_hostname [ ] response = ( current_hostname [ ] ) obj . getfiltered ( service , option ) ; return response ; |
public class BloomFilter { /** * Combines this bloom filter with another bloom filter by performing a bitwise OR of the
* underlying data . The mutations happen to < b > this < / b > instance . Callers must ensure the bloom
* filters are appropriately sized to avoid saturating them .
* @ param that The bloom filter to combine this bloom filter with . It is not mutated .
* @ throws IllegalArgumentException if { @ code isCompatible ( that ) = = false }
* @ since 15.0 */
public void putAll ( BloomFilter < T > that ) { } } | N . checkArgNotNull ( that ) ; N . checkArgument ( this != that , "Cannot combine a BloomFilter with itself." ) ; N . checkArgument ( this . numHashFunctions == that . numHashFunctions , "BloomFilters must have the same number of hash functions (%s != %s)" , this . numHashFunctions , that . numHashFunctions ) ; N . checkArgument ( this . bitSize ( ) == that . bitSize ( ) , "BloomFilters must have the same size underlying bit arrays (%s != %s)" , this . bitSize ( ) , that . bitSize ( ) ) ; N . checkArgument ( this . strategy . equals ( that . strategy ) , "BloomFilters must have equal strategies (%s != %s)" , this . strategy , that . strategy ) ; N . checkArgument ( this . funnel . equals ( that . funnel ) , "BloomFilters must have equal funnels (%s != %s)" , this . funnel , that . funnel ) ; this . bits . putAll ( that . bits ) ; |
public class OWLDifferentIndividualsAxiomImpl_CustomFieldSerializer { /** * Deserializes the content of the object from the
* { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } .
* @ param streamReader the { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } to read the
* object ' s content from
* @ param instance the object instance to deserialize
* @ throws com . google . gwt . user . client . rpc . SerializationException
* if the deserialization operation is not
* successful */
@ Override public void deserializeInstance ( SerializationStreamReader streamReader , OWLDifferentIndividualsAxiomImpl instance ) throws SerializationException { } } | deserialize ( streamReader , instance ) ; |
public class CmsJspTagEditable { /** * Close the direct edit tag , also prints the direct edit HTML to the current page . < p >
* @ return { @ link # EVAL _ PAGE }
* @ throws JspException in case something goes wrong */
@ Override public int doEndTag ( ) throws JspException { } } | if ( m_firstOnPage || m_manualPlacement ) { // only execute action for the first " editable " tag on the page ( include file ) , or in manual mode
editableTagAction ( pageContext , m_provider , m_mode , m_file ) ; } if ( OpenCms . getSystemInfo ( ) . getServletContainerSettings ( ) . isReleaseTagsAfterEnd ( ) ) { // need to release manually , JSP container may not call release as required ( happens with Tomcat )
release ( ) ; } return EVAL_PAGE ; |
public class VMCommandLine { /** * Replies the command line including the options and the standard parameters .
* @ return the command line . */
@ Pure @ SuppressWarnings ( { } } | "checkstyle:cyclomaticcomplexity" , "checkstyle:npathcomplexity" } ) public static String [ ] getAllCommandLineParameters ( ) { final int osize = commandLineOptions == null ? 0 : commandLineOptions . size ( ) ; final int psize = commandLineParameters == null ? 0 : commandLineParameters . length ; final int tsize = ( osize > 0 && psize > 0 ) ? 1 : 0 ; final List < String > params = new ArrayList < > ( osize + tsize ) ; if ( osize > 0 ) { List < Object > values ; String name ; String prefix ; String v ; for ( final Entry < String , List < Object > > entry : commandLineOptions . entrySet ( ) ) { name = entry . getKey ( ) ; prefix = ( name . length ( ) > 1 ) ? "--" : "-" ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $
values = entry . getValue ( ) ; if ( values == null || values . isEmpty ( ) ) { params . add ( prefix + name ) ; } else { for ( final Object value : values ) { if ( value != null ) { v = value . toString ( ) ; if ( v != null && v . length ( ) > 0 ) { params . add ( prefix + name + "=" + v ) ; // $ NON - NLS - 1 $
} else { params . add ( prefix + name ) ; } } } } } } if ( tsize > 0 ) { params . add ( "--" ) ; // $ NON - NLS - 1 $
} final String [ ] tab = new String [ params . size ( ) + psize ] ; params . toArray ( tab ) ; params . clear ( ) ; if ( psize > 0 ) { System . arraycopy ( commandLineParameters , 0 , tab , osize + tsize , psize ) ; } return tab ; |
public class StringToObjectConverter { /** * Returns null if a string conversion should happen */
private Object prepareForDirectUsage ( Class expectedClass , Object pArgument ) { } } | Class givenClass = pArgument . getClass ( ) ; if ( expectedClass . isArray ( ) && List . class . isAssignableFrom ( givenClass ) ) { return convertListToArray ( expectedClass , ( List ) pArgument ) ; } else { return expectedClass . isAssignableFrom ( givenClass ) ? pArgument : null ; } |
public class OAuth20HandlerInterceptorAdapter { /** * Is authorization request .
* @ param request the request
* @ param response the response
* @ return the boolean */
protected boolean isAuthorizationRequest ( final HttpServletRequest request , final HttpServletResponse response ) { } } | val requestPath = request . getRequestURI ( ) ; return doesUriMatchPattern ( requestPath , OAuth20Constants . AUTHORIZE_URL ) ; |
public class SAIS { /** * / * compute SA and BWT */
private static void induceSA ( BaseArray T , int [ ] SA , BaseArray C , BaseArray B , int n , int k ) { } } | int b , i , j ; int c0 , c1 ; /* compute SAl */
if ( C == B ) { getCounts ( T , C , n , k ) ; } getBuckets ( C , B , k , false ) ; /* find starts of buckets */
j = n - 1 ; b = B . get ( c1 = T . get ( j ) ) ; SA [ b ++ ] = ( ( 0 < j ) && ( T . get ( j - 1 ) < c1 ) ) ? ~ j : j ; for ( i = 0 ; i < n ; ++ i ) { j = SA [ i ] ; SA [ i ] = ~ j ; if ( 0 < j ) { if ( ( c0 = T . get ( -- j ) ) != c1 ) { B . set ( c1 , b ) ; b = B . get ( c1 = c0 ) ; } SA [ b ++ ] = ( ( 0 < j ) && ( T . get ( j - 1 ) < c1 ) ) ? ~ j : j ; } } /* compute SAs */
if ( C == B ) { getCounts ( T , C , n , k ) ; } getBuckets ( C , B , k , true ) ; /* find ends of buckets */
for ( i = n - 1 , b = B . get ( c1 = 0 ) ; 0 <= i ; -- i ) { if ( 0 < ( j = SA [ i ] ) ) { if ( ( c0 = T . get ( -- j ) ) != c1 ) { B . set ( c1 , b ) ; b = B . get ( c1 = c0 ) ; } SA [ -- b ] = ( ( j == 0 ) || ( T . get ( j - 1 ) > c1 ) ) ? ~ j : j ; } else { SA [ i ] = ~ j ; } } |
public class Keys { /** * Returns a key for { @ code type } annotated with { @ code annotations } ,
* reporting failures against { @ code subject } .
* @ param annotations the annotations on a single method , field or parameter .
* This array may contain at most one qualifier annotation . */
public static String get ( Type type , Annotation [ ] annotations , Object subject ) { } } | return get ( type , extractQualifier ( annotations , subject ) ) ; |
public class SmbTreeHandleImpl { /** * { @ inheritDoc }
* @ see jcifs . SmbTreeHandle # getRemoteHostName ( ) */
@ Override public String getRemoteHostName ( ) { } } | try ( SmbSessionImpl session = this . treeConnection . getSession ( ) ; SmbTransportImpl transport = session . getTransport ( ) ) { return transport . getRemoteHostName ( ) ; } |
public class GitCommand { /** * https : / / git - scm . com / docs / git - fetch - pack */
public void unshallow ( ConsoleOutputStreamConsumer outputStreamConsumer , Integer depth ) { } } | log ( outputStreamConsumer , "Unshallowing repository with depth %d" , depth ) ; CommandLine gitFetch = git ( environment ) . withArgs ( "fetch" , "origin" ) . withArg ( String . format ( "--depth=%d" , depth ) ) . withWorkingDir ( workingDir ) ; int result = run ( gitFetch , outputStreamConsumer ) ; if ( result != 0 ) { throw new RuntimeException ( String . format ( "Unshallow repository failed for [%s]" , this . workingRepositoryUrl ( ) ) ) ; } |
public class Pool { /** * - - utf encode / decode */
public static String fromUtf8 ( byte [ ] info , int ofs , int max ) { } } | StringBuilder result ; int i ; int c ; result = new StringBuilder ( max - ofs ) ; i = ofs ; while ( i < max ) { c = info [ i ] ; if ( c >= 0 ) /* same as ( c & 0x80 ) = = 0 , but more efficient */
{ i += 1 ; } else { switch ( c & 0xe0 ) { case 0xc0 : // 2 bytes for 0x0000 or 0x0080 . . 0x07ff
c = ( ( c & 0x1f ) << 6 ) | ( info [ i + 1 ] & 0x3f ) ; i += 2 ; break ; case 0xe0 : // 3 bytes for 0x0800 . . 0xffff
c = ( ( c & 0x1f ) << 12 ) | ( ( info [ i + 1 ] & 0x3f ) << 6 ) | ( ( info [ i + 2 ] & 0x3f ) ) ; i += 3 ; break ; default : throw new RuntimeException ( "illegal utf8 byte: " + c ) ; } } result . append ( ( char ) c ) ; } return result . toString ( ) ; |
public class RobotoTypefaces { /** * Set up typeface for TextView .
* @ param textView The text view
* @ param fontFamily The value of " robotoFontFamily " attribute
* @ param textWeight The value of " robotoTextWeight " attribute
* @ param textStyle The value of " robotoTextStyle " attribute */
public static void setUpTypeface ( @ NonNull TextView textView , @ RobotoFontFamily int fontFamily , @ RobotoTextWeight int textWeight , @ RobotoTextStyle int textStyle ) { } } | setUpTypeface ( textView , obtainTypeface ( textView . getContext ( ) , fontFamily , textWeight , textStyle ) ) ; |
public class TransactionJavaColonHelper { /** * { @ inheritDoc } */
@ Override public Collection < ? extends NameClassPair > listInstances ( JavaColonNamespace namespace , String nameInContext ) { } } | if ( JavaColonNamespace . COMP . equals ( namespace ) && "" . equals ( nameInContext ) ) { ArrayList < NameClassPair > retVal = new ArrayList < NameClassPair > ( ) ; if ( userTranSvcRef != null ) { NameClassPair pair = new NameClassPair ( nameInContext , EmbeddableUserTransactionImpl . class . getName ( ) ) ; retVal . add ( pair ) ; } retVal . add ( new NameClassPair ( nameInContext , TransactionSynchronizationRegistry . class . getName ( ) ) ) ; retVal . add ( new NameClassPair ( nameInContext , UOWManager . class . getName ( ) ) ) ; return retVal ; } else { return Collections . emptyList ( ) ; } |
public class ColumnSlice { /** * Validates that the provided slice array contains only non - overlapped slices valid for a query { @ code reversed }
* or not on a table using { @ code comparator } . */
public static boolean validateSlices ( ColumnSlice [ ] slices , CellNameType type , boolean reversed ) { } } | Comparator < Composite > comparator = reversed ? type . reverseComparator ( ) : type ; for ( int i = 0 ; i < slices . length ; i ++ ) { Composite start = slices [ i ] . start ; Composite finish = slices [ i ] . finish ; if ( start . isEmpty ( ) || finish . isEmpty ( ) ) { if ( start . isEmpty ( ) && i > 0 ) return false ; if ( finish . isEmpty ( ) ) return i == slices . length - 1 ; } else { // ! finish . isEmpty ( ) is imposed by prior loop
if ( i > 0 && comparator . compare ( slices [ i - 1 ] . finish , start ) >= 0 ) return false ; if ( comparator . compare ( start , finish ) > 0 ) return false ; } } return true ; |
public class PackageInfo { /** * The child packages of this package , or the empty list if none .
* @ return the child packages , or the empty list if none . */
public PackageInfoList getChildren ( ) { } } | if ( children == null ) { return PackageInfoList . EMPTY_LIST ; } final PackageInfoList childrenSorted = new PackageInfoList ( children ) ; // Ensure children are sorted
CollectionUtils . sortIfNotEmpty ( childrenSorted , new Comparator < PackageInfo > ( ) { @ Override public int compare ( final PackageInfo o1 , final PackageInfo o2 ) { return o1 . name . compareTo ( o2 . name ) ; } } ) ; return childrenSorted ; |
public class DataSourceConverter { /** * Get data source map .
* @ param dataSourceConfigurationMap data source configuration map
* @ return data source parameter map */
public static Map < String , YamlDataSourceParameter > getDataSourceParameterMap ( final Map < String , DataSourceConfiguration > dataSourceConfigurationMap ) { } } | Map < String , YamlDataSourceParameter > result = new LinkedHashMap < > ( dataSourceConfigurationMap . size ( ) , 1 ) ; for ( Entry < String , DataSourceConfiguration > entry : dataSourceConfigurationMap . entrySet ( ) ) { result . put ( entry . getKey ( ) , createDataSourceParameter ( entry . getValue ( ) ) ) ; } return result ; |
public class ManagementEnforcer { /** * removeNamedGroupingPolicy removes a role inheritance rule from the current named policy .
* @ param ptype the policy type , can be " g " , " g2 " , " g3 " , . .
* @ param params the " g " policy rule .
* @ return succeeds or not . */
public boolean removeNamedGroupingPolicy ( String ptype , String ... params ) { } } | return removeNamedGroupingPolicy ( ptype , Arrays . asList ( params ) ) ; |
public class CmsTemplateMapperConfiguration { /** * Checks if the mapping is enabled for the given root path . < p >
* @ param rootPath a VFS root path
* @ return true if the configuration is enabled for the given root path */
public boolean isEnabledForPath ( String rootPath ) { } } | for ( String path : m_paths ) { if ( CmsStringUtil . isPrefixPath ( path , rootPath ) ) { return true ; } } return false ; |
public class SnowflakeConnectionV1 { /** * Sets the transaction isolation level .
* @ param level transaction level : TRANSACTION _ NONE or TRANSACTION _ READ _ COMMITTED
* @ throws SQLException if any SQL error occurs */
@ Override public void setTransactionIsolation ( int level ) throws SQLException { } } | logger . debug ( "void setTransactionIsolation(int level), level = {}" , level ) ; raiseSQLExceptionIfConnectionIsClosed ( ) ; if ( level == Connection . TRANSACTION_NONE || level == Connection . TRANSACTION_READ_COMMITTED ) { this . transactionIsolation = level ; } else { throw new SQLFeatureNotSupportedException ( "Transaction Isolation " + level + " not supported." , FEATURE_UNSUPPORTED . getSqlState ( ) , FEATURE_UNSUPPORTED . getMessageCode ( ) ) ; } |
public class IntPriorityQueue { /** * Sets the given index to use the specific value
* @ param e the value to store the index of
* @ param i the index of the value */
private void indexArrayStore ( int e , int i ) { } } | if ( valueIndexStore . length < e ) { int oldLength = valueIndexStore . length ; valueIndexStore = Arrays . copyOf ( valueIndexStore , e + 2 ) ; Arrays . fill ( valueIndexStore , oldLength , valueIndexStore . length , - 1 ) ; } valueIndexStore [ e ] = i ; |
public class MetricReader { /** * Reads the metrics from the { @ link MetricProducerManager } and exports them to the { @ code
* metricExporter } .
* @ param metricExporter the exporter called to export the metrics read .
* @ since 0.19 */
public void readAndExport ( MetricExporter metricExporter ) { } } | Span span = tracer . spanBuilder ( spanName ) . setRecordEvents ( true ) . setSampler ( probabilitySampler ) . startSpan ( ) ; Scope scope = tracer . withSpan ( span ) ; try { ArrayList < Metric > metricsList = new ArrayList < > ( ) ; for ( MetricProducer metricProducer : metricProducerManager . getAllMetricProducer ( ) ) { metricsList . addAll ( metricProducer . getMetrics ( ) ) ; } metricExporter . export ( metricsList ) ; } catch ( Throwable e ) { logger . log ( Level . WARNING , "Exception thrown by the metrics exporter." , e ) ; span . setStatus ( Status . UNKNOWN . withDescription ( "Exception when export metrics: " + exceptionMessage ( e ) ) ) ; } finally { scope . close ( ) ; span . end ( ) ; } |
public class XBELValidator { /** * { @ inheritDoc } */
@ Override public List < SAXParseException > validateWithErrors ( final File f ) throws SAXException , IOException { } } | final Validator errorValidator = createNewErrorValidator ( ) ; errorValidator . validate ( utf8SourceForFile ( f ) , null ) ; return ( ( Handler ) errorValidator . getErrorHandler ( ) ) . exceptions ; |
public class ConstantListIndex { /** * implements the visitor to find accesses to lists or arrays using Const
* @ param seen
* the currently visitor opcode */
@ Override public void sawOpcode ( int seen ) { } } | try { stack . precomputation ( this ) ; switch ( state ) { case SAW_NOTHING : if ( seen == Const . ICONST_0 ) { state = State . SAW_CONSTANT_0 ; } else if ( ( seen >= Const . ICONST_1 ) && ( seen <= Const . ICONST_5 ) ) { state = State . SAW_CONSTANT ; } else if ( ( seen == Const . LDC ) || ( seen == Const . LDC_W ) ) { Constant c = getConstantRefOperand ( ) ; if ( c instanceof ConstantInteger ) { state = State . SAW_CONSTANT ; } } break ; case SAW_CONSTANT_0 : case SAW_CONSTANT : switch ( seen ) { case Const . AALOAD : if ( "main" . equals ( this . getMethodName ( ) ) ) { break ; } // $ FALL - THROUGH $
case Const . IALOAD : case Const . LALOAD : case Const . FALOAD : case Const . DALOAD : // case BALOAD : byte and char indexing seems prevalent , and
// case CALOAD : usually harmless so ignore
case Const . SALOAD : if ( stack . getStackDepth ( ) > 1 ) { OpcodeStack . Item item = stack . getStackItem ( 1 ) ; if ( ! isArrayFromUbiquitousMethod ( item ) ) { if ( state == State . SAW_CONSTANT_0 ) { iConst0Looped . set ( getPC ( ) ) ; } else { bugReporter . reportBug ( new BugInstance ( this , BugType . CLI_CONSTANT_LIST_INDEX . name ( ) , NORMAL_PRIORITY ) . addClass ( this ) . addMethod ( this ) . addSourceLine ( this ) ) ; } } } break ; case Const . INVOKEVIRTUAL : if ( Values . SLASHED_JAVA_UTIL_LIST . equals ( getClassConstantOperand ( ) ) ) { String methodName = getNameConstantOperand ( ) ; if ( "get" . equals ( methodName ) ) { if ( state == State . SAW_CONSTANT_0 ) { iConst0Looped . set ( getPC ( ) ) ; } else { bugReporter . reportBug ( new BugInstance ( this , BugType . CLI_CONSTANT_LIST_INDEX . name ( ) , NORMAL_PRIORITY ) . addClass ( this ) . addMethod ( this ) . addSourceLine ( this ) ) ; } } } break ; default : break ; } state = State . SAW_NOTHING ; break ; } if ( ( ( seen >= Const . IFEQ ) && ( seen <= Const . GOTO ) ) || ( seen == Const . GOTO_W ) ) { int branchTarget = this . getBranchTarget ( ) ; for ( int bugPC = iConst0Looped . nextSetBit ( 0 ) ; bugPC >= 0 ; bugPC = iConst0Looped . nextSetBit ( bugPC + 1 ) ) { if ( branchTarget < bugPC ) { if ( ( bugPC - branchTarget ) < max_iConst0LoopDistance ) { bugReporter . reportBug ( new BugInstance ( this , BugType . CLI_CONSTANT_LIST_INDEX . name ( ) , NORMAL_PRIORITY ) . addClass ( this ) . addMethod ( this ) . addSourceLine ( this , bugPC ) ) ; } iConst0Looped . clear ( bugPC ) ; } } } } finally { stack . sawOpcode ( this , seen ) ; } |
public class Reporter { /** * Formats the response parameters to be ' prettily ' printed out in HTML
* @ param response - the http response to be formatted .
* @ return String : a ' prettily ' formatted string that is HTML safe to output */
public static String formatResponse ( Response response ) { } } | if ( response == null ) { return "" ; } StringBuilder output = new StringBuilder ( ) ; if ( response . isData ( ) ) { output . append ( DIV_I ) ; Gson gson = new GsonBuilder ( ) . setPrettyPrinting ( ) . create ( ) ; if ( response . getArrayData ( ) != null ) { output . append ( gson . toJson ( response . getArrayData ( ) ) ) ; } if ( response . getObjectData ( ) != null ) { output . append ( gson . toJson ( response . getObjectData ( ) ) ) ; } output . append ( END_IDIV ) ; } return formatHTML ( output . toString ( ) ) ; |
public class CodeBuilder { /** * invocation style instructions */
public void invokeVirtual ( String methodName , TypeDesc ret , TypeDesc [ ] params ) { } } | invokeVirtual ( mClassFile . getClassName ( ) , methodName , ret , params ) ; |
public class JSONArray { /** * Put or replace a String value in the JSONArray . If the index is greater
* than the length of the JSONArray , then null elements will be added as
* necessary to pad it out . < br >
* The string may be a valid JSON formatted string , in tha case , it will be
* transformed to a JSONArray , JSONObject or JSONNull .
* @ param index The subscript .
* @ param value A String value .
* @ return this .
* @ throws JSONException If the index is negative or if the the value is an
* invalid number . */
public JSONArray element ( int index , String value , JsonConfig jsonConfig ) { } } | if ( index < 0 ) { throw new JSONException ( "JSONArray[" + index + "] not found." ) ; } if ( index < size ( ) ) { if ( value == null ) { this . elements . set ( index , "" ) ; } else if ( JSONUtils . mayBeJSON ( value ) ) { try { this . elements . set ( index , JSONSerializer . toJSON ( value , jsonConfig ) ) ; } catch ( JSONException jsone ) { this . elements . set ( index , JSONUtils . stripQuotes ( value ) ) ; } } else { this . elements . set ( index , JSONUtils . stripQuotes ( value ) ) ; } } else { while ( index != size ( ) ) { element ( JSONNull . getInstance ( ) ) ; } element ( value , jsonConfig ) ; } return this ; |
public class OtpErlangBitstr { /** * Get the size in whole bytes of the bitstr , rest bits in the last byte not
* counted .
* @ return the number of bytes contained in the bintstr . */
public int size ( ) { } } | if ( pad_bits == 0 ) { return bin . length ; } if ( bin . length == 0 ) { throw new java . lang . IllegalStateException ( "Impossible length" ) ; } return bin . length - 1 ; |
public class MiniJPEContentHandler { /** * { @ inheritDoc } */
public Object addXToPoint ( double x , Object point ) { } } | ( ( Point ) point ) . setX ( x ) ; return point ; |
public class ApplicationGatewaysInner { /** * Lists all available web application firewall rule sets .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < ApplicationGatewayAvailableWafRuleSetsResultInner > listAvailableWafRuleSetsAsync ( final ServiceCallback < ApplicationGatewayAvailableWafRuleSetsResultInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( listAvailableWafRuleSetsWithServiceResponseAsync ( ) , serviceCallback ) ; |
public class ContextRegisteringPolicyEnforcementPoint { /** * / * ( non - Javadoc )
* @ see org . fcrepo . server . security . PolicyEnforcementPoint # enforce ( java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , org . fcrepo . server . Context ) */
@ SuppressWarnings ( "unchecked" ) @ Override public final void enforce ( String subjectId , String action , String api , String pid , String namespace , Context context ) throws AuthzException { } } | boolean debug = logger . isDebugEnabled ( ) ; long enforceStartTime = debug ? System . currentTimeMillis ( ) : 0 ; try { synchronized ( this ) { // wait , if pdp update is in progress
} if ( ENFORCE_MODE_PERMIT_ALL_REQUESTS . equals ( m_enforceMode ) ) { logger . debug ( "permitting request because enforceMode==ENFORCE_MODE_PERMIT_ALL_REQUESTS" ) ; } else if ( ENFORCE_MODE_DENY_ALL_REQUESTS . equals ( m_enforceMode ) ) { logger . debug ( "denying request because enforceMode==ENFORCE_MODE_DENY_ALL_REQUESTS" ) ; throw new AuthzDeniedException ( "all requests are currently denied" ) ; } else if ( ! ENFORCE_MODE_ENFORCE_POLICIES . equals ( m_enforceMode ) ) { logger . debug ( "denying request because enforceMode is invalid" ) ; throw new AuthzOperationalException ( "invalid enforceMode from config \"" + m_enforceMode + "\"" ) ; } else { ResponseCtx response = null ; String contextIndex = null ; try { contextIndex = Integer . toString ( next ( ) ) ; logger . debug ( "context index set={}" , contextIndex ) ; List < Subject > subjects = wrapSubjects ( subjectId ) ; List < Attribute > actions = wrapActions ( action , api , contextIndex ) ; List < Attribute > resources = wrapResources ( pid , namespace ) ; RequestCtx request = new BasicRequestCtx ( subjects , resources , actions , EMPTY_ENV ) ; if ( debug ) { for ( Attribute tempobj : actions ) { logger . debug ( "request action has {}={}" , tempobj . getId ( ) , tempobj . getValue ( ) ) ; } } m_registry . registerContext ( contextIndex , context ) ; long st = debug ? System . currentTimeMillis ( ) : 0 ; try { // we use a localized EvaluationCtx so that resource - id is correctly located
response = m_pdp . evaluate ( new BasicEvaluationCtx ( request , m_pdpConfig . getAttributeFinder ( ) ) ) ; } finally { if ( debug ) { long dur = System . currentTimeMillis ( ) - st ; logger . debug ( "Policy evaluation took {} ms." , dur ) ; } } logger . debug ( "in pep, after evaluate() called" ) ; } catch ( Throwable t ) { logger . error ( "Error evaluating policy" , t ) ; throw new AuthzOperationalException ( "" ) ; } finally { m_registry . unregisterContext ( contextIndex ) ; } logger . debug ( "in pep, before denyBiasedAuthz() called" ) ; if ( ! denyBiasedAuthz ( response . getResults ( ) ) ) { if ( debug ) { ByteArrayOutputStream bos = new ByteArrayOutputStream ( ) ; response . encode ( bos ) ; logger . debug ( bos . toString ( ) ) ; } throw new AuthzDeniedException ( "" ) ; } } if ( context . getNoOp ( ) ) { throw new AuthzPermittedException ( "noOp" ) ; } } finally { if ( debug ) { long dur = System . currentTimeMillis ( ) - enforceStartTime ; logger . debug ( "Policy enforcement took {} ms." , dur ) ; } } |
public class TrainingsImpl { /** * Get region proposals for an image . Returns empty array if no proposals are found .
* This API will get region proposals for an image along with confidences for the region . It returns an empty array if no proposals are found .
* @ param projectId The project id
* @ param imageId The image id
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < ImageRegionProposal > getImageRegionProposalsAsync ( UUID projectId , UUID imageId , final ServiceCallback < ImageRegionProposal > serviceCallback ) { } } | return ServiceFuture . fromResponse ( getImageRegionProposalsWithServiceResponseAsync ( projectId , imageId ) , serviceCallback ) ; |
public class TreeElement { /** * Remove the child node ( and all children of that child ) at the
* specified position in the child list . If there are no children
* or the specified child position is too large , then this method
* just returns . ( I . e . no runtime exception if the offset argument
* is too large )
* @ param offset Zero - relative offset at which the existing
* node should be removed */
public void removeChild ( int offset ) { } } | if ( _children == null || offset >= _children . size ( ) ) return ; TreeElement child = ( TreeElement ) _children . remove ( offset ) ; child . setParent ( null ) ; child . setName ( null ) ; // Rename all affected children
int size = _children . size ( ) ; for ( int i = offset ; i < size ; i ++ ) { TreeElement thisChild = ( TreeElement ) _children . get ( i ) ; thisChild . updateName ( this , i ) ; } |
public class BaseHolder { /** * Find the key for this BaseHolder */
public String find ( BaseHolder baseHolder ) { } } | if ( m_mapChildHolders == null ) return null ; return m_mapChildHolders . find ( baseHolder ) ; |
public class Environment { /** * Get the Environment properties object .
* @ param strPropertyCode The key I ' m looking for the owner to .
* @ return The owner of this property key . */
public PropertyOwner retrieveUserProperties ( String strRegistrationKey ) { } } | if ( this . getDefaultApplication ( ) != null ) if ( this . getDefaultApplication ( ) != null ) return this . getDefaultApplication ( ) . retrieveUserProperties ( strRegistrationKey ) ; return null ; |
public class VMInfo { /** * 创建JMX连接并构造VMInfo实例 */
public static VMInfo processNewVM ( String pid , String jmxHostAndPort ) { } } | try { final JmxClient jmxClient = new JmxClient ( ) ; jmxClient . connect ( pid , jmxHostAndPort ) ; // 注册JMXClient注销的钩子
Runtime . getRuntime ( ) . addShutdownHook ( new Thread ( new Runnable ( ) { @ Override public void run ( ) { jmxClient . disconnect ( ) ; } } ) ) ; return new VMInfo ( jmxClient , pid ) ; } catch ( Exception e ) { e . printStackTrace ( System . out ) ; } return createDeadVM ( pid , VMInfoState . ERROR_DURING_ATTACH ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.