signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class FluoConfiguration { /** * Adds an { @ link ObserverSpecification } to the configuration using a unique integer prefix that ' s * not currently in use . * @ deprecated since 1.1.0 . Replaced by { @ link # setObserverProvider ( String ) } and * { @ link # getObserverProvider ( ) } */ @ Deprecated public FluoConfiguration addObserver ( ObserverSpecification oconf ) { } }
int next = getNextObserverId ( ) ; addObserver ( oconf , next ) ; return this ;
public class AddShoppingCampaignForShowcaseAds { /** * Creates a Showcase ad . */ private static AdGroupAd createShowcaseAd ( AdWordsServicesInterface adWordsServices , AdWordsSession session , AdGroup adGroup ) throws IOException { } }
// Create the Showcase ad . AdGroupAdServiceInterface adGroupAdService = adWordsServices . get ( session , AdGroupAdServiceInterface . class ) ; ShowcaseAd showcaseAd = new ShowcaseAd ( ) ; showcaseAd . setName ( "Showcase ad #" + System . currentTimeMillis ( ) ) ; showcaseAd . setFinalUrls ( new String [ ] { "http://example.com/showcase" } ) ; showcaseAd . setDisplayUrl ( "example.com" ) ; // Required : Set the ad ' s expanded image . Image expandedImage = new Image ( ) ; expandedImage . setMediaId ( uploadImage ( adWordsServices , session , "https://goo.gl/IfVlpF" ) ) ; showcaseAd . setExpandedImage ( expandedImage ) ; // Optional : Set the collapsed image . Image collapsedImage = new Image ( ) ; collapsedImage . setMediaId ( uploadImage ( adWordsServices , session , "https://goo.gl/NqTxAE" ) ) ; showcaseAd . setCollapsedImage ( collapsedImage ) ; // Create ad group ad . AdGroupAd adGroupAd = new AdGroupAd ( ) ; adGroupAd . setAdGroupId ( adGroup . getId ( ) ) ; adGroupAd . setAd ( showcaseAd ) ; // Create operation . AdGroupAdOperation adGroupAdOperation = new AdGroupAdOperation ( ) ; adGroupAdOperation . setOperand ( adGroupAd ) ; adGroupAdOperation . setOperator ( Operator . ADD ) ; // Make the mutate request . AdGroupAdReturnValue adGroupAdAddResult = adGroupAdService . mutate ( new AdGroupAdOperation [ ] { adGroupAdOperation } ) ; return adGroupAdAddResult . getValue ( 0 ) ;
public class ItemIdValueImpl { /** * Parses an item IRI * @ param iri * the item IRI like http : / / www . wikidata . org / entity / Q42 * @ throws IllegalArgumentException * if the IRI is invalid or does not ends with an item id */ static ItemIdValueImpl fromIri ( String iri ) { } }
int separator = iri . lastIndexOf ( '/' ) + 1 ; try { return new ItemIdValueImpl ( iri . substring ( separator ) , iri . substring ( 0 , separator ) ) ; } catch ( IllegalArgumentException e ) { throw new IllegalArgumentException ( "Invalid Wikibase entity IRI: " + iri , e ) ; }
public class Criteria { /** * Adds IN criteria , * customer _ id in ( 1,10,33,44) * large values are split into multiple InCriteria * IN ( 1,10 ) OR IN ( 33 , 44 ) < / br > * The attribute will NOT be translated into column name * @ param column The column name to be used without translation * @ param values The value Collection */ public void addColumnIn ( String column , Collection values ) { } }
List list = splitInCriteria ( column , values , false , IN_LIMIT ) ; int index = 0 ; InCriteria inCrit ; Criteria allInCritaria ; inCrit = ( InCriteria ) list . get ( index ) ; inCrit . setTranslateAttribute ( false ) ; allInCritaria = new Criteria ( inCrit ) ; for ( index = 1 ; index < list . size ( ) ; index ++ ) { inCrit = ( InCriteria ) list . get ( index ) ; inCrit . setTranslateAttribute ( false ) ; allInCritaria . addOrCriteria ( new Criteria ( inCrit ) ) ; } addAndCriteria ( allInCritaria ) ;
public class GeneralSubtrees { /** * intersect this GeneralSubtrees with other . This function * is used in merging permitted NameConstraints . The operation * is performed as follows : * < ul > * < li > If a name in other narrows all names of the same type in this , * the result will contain the narrower name and none of the * names it narrows . * < li > If a name in other widens all names of the same type in this , * the result will not contain the wider name . * < li > If a name in other does not share the same subtree with any name * of the same type in this , then the name is added to the list * of GeneralSubtrees returned . These names should be added to * the list of names that are specifically excluded . The reason * is that , if the intersection is empty , then no names of that * type are permitted , and the only way to express this in * NameConstraints is to include the name in excludedNames . * < li > If a name in this has no name of the same type in other , then * the result contains the name in this . No name of a given type * means the name type is completely permitted . * < li > If a name in other has no name of the same type in this , then * the result contains the name in other . This means that * the name is now constrained in some way , whereas before it was * completely permitted . * < ul > * @ param other GeneralSubtrees to be intersected with this * @ returns GeneralSubtrees to be merged with excluded ; these are * empty - valued name types corresponding to entries that were * of the same type but did not share the same subtree between * this and other . Returns null if no such . */ public GeneralSubtrees intersect ( GeneralSubtrees other ) { } }
if ( other == null ) { throw new NullPointerException ( "other GeneralSubtrees must not be null" ) ; } GeneralSubtrees newThis = new GeneralSubtrees ( ) ; GeneralSubtrees newExcluded = null ; // Step 1 : If this is empty , just add everything in other to this and // return no new excluded entries if ( size ( ) == 0 ) { union ( other ) ; return null ; } // Step 2 : For ease of checking the subtrees , minimize them by // constructing versions that contain only the widest instance of // each type this . minimize ( ) ; other . minimize ( ) ; // Step 3 : Check each entry in this to see whether we keep it or // remove it , and whether we add anything to newExcluded or newThis . // We keep an entry in this unless it is narrowed by all entries in // other . We add an entry to newExcluded if there is at least one // entry of the same nameType in other , but this entry does // not share the same subtree with any of the entries in other . // We add an entry from other to newThis if there is no name of the // same type in this . for ( int i = 0 ; i < size ( ) ; i ++ ) { GeneralNameInterface thisEntry = getGeneralNameInterface ( i ) ; boolean removeThisEntry = false ; // Step 3a : If the widest name of this type in other narrows // thisEntry , remove thisEntry and add widest other to newThis . // Simultaneously , check for situation where there is a name of // this type in other , but no name in other matches , narrows , // or widens thisEntry . boolean sameType = false ; for ( int j = 0 ; j < other . size ( ) ; j ++ ) { GeneralSubtree otherEntryGS = other . get ( j ) ; GeneralNameInterface otherEntry = getGeneralNameInterface ( otherEntryGS ) ; switch ( thisEntry . constrains ( otherEntry ) ) { case NAME_NARROWS : remove ( i ) ; i -- ; newThis . add ( otherEntryGS ) ; sameType = false ; break ; case NAME_SAME_TYPE : sameType = true ; continue ; case NAME_MATCH : case NAME_WIDENS : sameType = false ; break ; case NAME_DIFF_TYPE : default : continue ; } break ; } // Step 3b : If sameType is still true , we have the situation // where there was a name of the same type as thisEntry in // other , but no name in other widened , matched , or narrowed // thisEntry . if ( sameType ) { // Step 3b . 1 : See if there are any entries in this and other // with this type that match , widen , or narrow each other . // If not , then we need to add a " widest subtree " of this // type to excluded . boolean intersection = false ; for ( int j = 0 ; j < size ( ) ; j ++ ) { GeneralNameInterface thisAltEntry = getGeneralNameInterface ( j ) ; if ( thisAltEntry . getType ( ) == thisEntry . getType ( ) ) { for ( int k = 0 ; k < other . size ( ) ; k ++ ) { GeneralNameInterface othAltEntry = other . getGeneralNameInterface ( k ) ; int constraintType = thisAltEntry . constrains ( othAltEntry ) ; if ( constraintType == NAME_MATCH || constraintType == NAME_WIDENS || constraintType == NAME_NARROWS ) { intersection = true ; break ; } } } } if ( intersection == false ) { if ( newExcluded == null ) { newExcluded = new GeneralSubtrees ( ) ; } GeneralSubtree widestSubtree = createWidestSubtree ( thisEntry ) ; if ( ! newExcluded . contains ( widestSubtree ) ) { newExcluded . add ( widestSubtree ) ; } } // Step 3b . 2 : Remove thisEntry from this remove ( i ) ; i -- ; } } // Step 4 : Add all entries in newThis to this if ( newThis . size ( ) > 0 ) { union ( newThis ) ; } // Step 5 : Add all entries in other that do not have any entry of the // same type in this to this for ( int i = 0 ; i < other . size ( ) ; i ++ ) { GeneralSubtree otherEntryGS = other . get ( i ) ; GeneralNameInterface otherEntry = getGeneralNameInterface ( otherEntryGS ) ; boolean diffType = false ; for ( int j = 0 ; j < size ( ) ; j ++ ) { GeneralNameInterface thisEntry = getGeneralNameInterface ( j ) ; switch ( thisEntry . constrains ( otherEntry ) ) { case NAME_DIFF_TYPE : diffType = true ; // continue to see if we find something later of the // same type continue ; case NAME_NARROWS : case NAME_SAME_TYPE : case NAME_MATCH : case NAME_WIDENS : diffType = false ; // we found an entry of the same type // break because we know we won ' t be adding it to // this now break ; default : continue ; } break ; } if ( diffType ) { add ( otherEntryGS ) ; } } // Step 6 : Return the newExcluded GeneralSubtrees return newExcluded ;
public class BifurcatedConsumerSessionImpl { /** * First check is to make sure that the original Consumer * hasn ' t been closed . Then check that this bifurcated consumer * session is not closed . * @ throws SIObjectClosedException */ private void checkNotClosed ( ) throws SISessionUnavailableException { } }
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "checkNotClosed" ) ; // Check that the consumer session isn ' t closed _consumerSession . checkNotClosed ( ) ; // Now check that this consumer hasn ' t closed . synchronized ( this ) { if ( _closed ) { SISessionUnavailableException e = new SISessionUnavailableException ( nls . getFormattedMessage ( "CONSUMER_CLOSED_ERROR_CWSIP0177" , new Object [ ] { _localConsumerPoint . getConsumerManager ( ) . getDestination ( ) . getName ( ) , _localConsumerPoint . getConsumerManager ( ) . getMessageProcessor ( ) . getMessagingEngineName ( ) } , null ) ) ; if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "checkNotClosed" , "consumer closed" ) ; throw e ; } } if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "checkNotClosed" ) ;
public class Branch { /** * Adds a leaf to the local branch , looking for collisions * @ param leaf The leaf to add * @ param tree The tree to report to with collisions * @ return True if the leaf was new , false if the leaf already exists or * would cause a collision * @ throws IllegalArgumentException if the incoming leaf is null */ public boolean addLeaf ( final Leaf leaf , final Tree tree ) { } }
if ( leaf == null ) { throw new IllegalArgumentException ( "Null leaves are not allowed" ) ; } if ( leaves == null ) { leaves = new HashMap < Integer , Leaf > ( ) ; leaves . put ( leaf . hashCode ( ) , leaf ) ; return true ; } if ( leaves . containsKey ( leaf . hashCode ( ) ) ) { // if we try to sync a leaf with the same hash of an existing key // but a different TSUID , it ' s a collision , so mark it if ( ! leaves . get ( leaf . hashCode ( ) ) . getTsuid ( ) . equals ( leaf . getTsuid ( ) ) ) { final Leaf collision = leaves . get ( leaf . hashCode ( ) ) ; if ( tree != null ) { tree . addCollision ( leaf . getTsuid ( ) , collision . getTsuid ( ) ) ; } // log at info or lower since it ' s not a system error , rather it ' s // a user issue with the rules or naming schema LOG . warn ( "Incoming TSUID [" + leaf . getTsuid ( ) + "] collided with existing TSUID [" + collision . getTsuid ( ) + "] on display name [" + collision . getDisplayName ( ) + "]" ) ; } return false ; } else { leaves . put ( leaf . hashCode ( ) , leaf ) ; return true ; }
public class TaskReactivateHeaders { /** * Set the time at which the resource was last modified . * @ param lastModified the lastModified value to set * @ return the TaskReactivateHeaders object itself . */ public TaskReactivateHeaders withLastModified ( DateTime lastModified ) { } }
if ( lastModified == null ) { this . lastModified = null ; } else { this . lastModified = new DateTimeRfc1123 ( lastModified ) ; } return this ;
public class ConverterRegistry { /** * 登记自定义转换器 * @ param type 转换的目标类型 * @ param converter 转换器 * @ return { @ link ConverterRegistry } */ public ConverterRegistry putCustom ( Type type , Converter < ? > converter ) { } }
if ( null == customConverterMap ) { synchronized ( this ) { if ( null == customConverterMap ) { customConverterMap = new ConcurrentHashMap < > ( ) ; } } } customConverterMap . put ( type , converter ) ; return this ;
public class ConditionMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Condition condition , ProtocolMarshaller protocolMarshaller ) { } }
if ( condition == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( condition . getType ( ) , TYPE_BINDING ) ; protocolMarshaller . marshall ( condition . getKey ( ) , KEY_BINDING ) ; protocolMarshaller . marshall ( condition . getValue ( ) , VALUE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class HTTaxinvoiceServiceImp { /** * ( non - Javadoc ) * @ see com . popbill . api . HTTaxinvoiceService # getPopUpURL ( java . lang . String , java . lang . String ) */ @ Override public String getPopUpURL ( String CorpNum , String NTSConfirmNum ) throws PopbillException { } }
if ( NTSConfirmNum . length ( ) != 24 ) throw new PopbillException ( - 99999999 , "국세청승인번호가 올바르지 않았습니다." ) ; URLResponse response = httpget ( "/HomeTax/Taxinvoice/" + NTSConfirmNum + "/PopUp" , CorpNum , null , URLResponse . class ) ; return response . url ;
public class WTableOptionsExample { /** * Create a radio button select containing the options . * @ param < T > the enumeration type . * @ param options the list of options * @ return a radioButtonSelect with the options */ private < T extends Enum < T > > EnumerationRadioButtonSelect < T > createRadioButtonGroup ( final T [ ] options ) { } }
EnumerationRadioButtonSelect < T > rbSelect = new EnumerationRadioButtonSelect < > ( options ) ; rbSelect . setButtonLayout ( EnumerationRadioButtonSelect . Layout . FLAT ) ; rbSelect . setFrameless ( true ) ; return rbSelect ;
public class CollisionConfig { /** * Create an XML node from a collision . * @ param root The node root ( must not be < code > null < / code > ) . * @ param collision The collision reference ( must not be < code > null < / code > ) . */ public static void exports ( Xml root , Collision collision ) { } }
Check . notNull ( root ) ; Check . notNull ( collision ) ; final Xml node = root . createChild ( NODE_COLLISION ) ; node . writeString ( ATT_NAME , collision . getName ( ) ) ; node . writeInteger ( ATT_OFFSETX , collision . getOffsetX ( ) ) ; node . writeInteger ( ATT_OFFSETY , collision . getOffsetY ( ) ) ; node . writeInteger ( ATT_WIDTH , collision . getWidth ( ) ) ; node . writeInteger ( ATT_HEIGHT , collision . getHeight ( ) ) ; node . writeBoolean ( ATT_MIRROR , collision . hasMirror ( ) ) ;
public class CasSimpleMultifactorAuthenticationTicketFactory { /** * Create delegated authentication request ticket . * @ param service the service * @ param properties the properties * @ return the delegated authentication request ticket */ @ Override public TransientSessionTicket create ( final Service service , final Map < String , Serializable > properties ) { } }
val id = ticketIdGenerator . getNewTicketId ( "CAS" ) ; return new TransientSessionTicketImpl ( id , expirationPolicy , service , properties ) ;
public class ReciprocalOperator { /** * Gets the amount as reciprocal / multiplicative inversed value ( 1 / n ) . * E . g . ' EUR 2.0 ' will be converted to ' EUR 0.5 ' . * @ return the reciprocal / multiplicative inversed of the amount * @ throws ArithmeticException if the arithmetic operation failed */ @ Override public MonetaryAmount apply ( MonetaryAmount amount ) { } }
Objects . requireNonNull ( amount , "Amount required." ) ; NumberValue num = amount . getNumber ( ) ; BigDecimal one = new BigDecimal ( "1.0" ) . setScale ( num . getScale ( ) < 5 ? 5 : num . getScale ( ) , BigDecimal . ROUND_HALF_EVEN ) ; return amount . getFactory ( ) . setNumber ( one . divide ( num . numberValue ( BigDecimal . class ) , RoundingMode . HALF_EVEN ) ) . create ( ) ;
public class GeneratedDFactoryDaoImpl { /** * query - by method for field updatedBy * @ param updatedBy the specified attribute * @ return an Iterable of DFactorys for the specified updatedBy */ public Iterable < DFactory > queryByUpdatedBy ( java . lang . String updatedBy ) { } }
return queryByField ( null , DFactoryMapper . Field . UPDATEDBY . getFieldName ( ) , updatedBy ) ;
public class GetClusterDetails { /** * Retrieves cluster information . * https : / / developer . couchbase . com / documentation / server / 4.6 / rest - api / rest - cluster - get . html * @ param endpoint Endpoint to which request will be sent . A valid endpoint will be formatted as it shows in * bellow example . * Example : " http : / / somewhere . couchbase . com : 8091" * @ param username Username used in basic authentication . * @ param password Password associated with " username " input to be used in basic authentication . * @ param proxyHost Optional - proxy server used to connect to Couchbase API . If empty no proxy will be used . * @ param proxyPort Optional - proxy server port . You must either specify values for both proxyHost and * proxyPort inputs or leave them both empty . * @ param proxyUsername Optional - proxy server user name . * @ param proxyPassword Optional - proxy server password associated with the proxyUsername input value . * @ param trustAllRoots Optional - specifies whether to enable weak security over SSL / TSL . A certificate is * trusted even if no trusted certification authority issued it . * Valid values : " true " , " false " * Default value : " true " * @ param x509HostnameVerifier Optional - specifies the way the server hostname must match a domain name in the subject ' s * Common Name ( CN ) or subjectAltName field of the X . 509 certificate . Set this to " allow _ all " * to skip any checking . For the value " browser _ compatible " the hostname verifier works * the same way as Curl and Firefox . The hostname must match either the first CN , or any * of the subject - alts . A wildcard can occur in the CN , and in any of the subject - alts . * The only difference between " browser _ compatible " and " strict " is that a wildcard ( such * as " * . foo . com " ) with " browser _ compatible " matches all subdomains , including " a . b . foo . com " . * Valid values : " strict " , " browser _ compatible " , " allow _ all " * Default value : " allow _ all " * @ param trustKeystore Optional - pathname of the Java TrustStore file . This contains certificates from other * parties that you expect to communicate with , or from Certificate Authorities that you * trust to identify other parties . If the protocol ( specified by the " url " ) is not " https " * or if trustAllRoots is " true " this input is ignored . * Default value : . . / java / lib / security / cacerts * Format : Java KeyStore ( JKS ) * @ param trustPassword Optional - password associated with the TrustStore file . If trustAllRoots is " false " * and trustKeystore is empty , trustPassword default will be supplied . * Default value : " changeit " * @ param keystore Optional - pathname of the Java KeyStore file . You only need this if the server requires * client authentication . If the protocol ( specified by the " url " ) is not " https " or if * trustAllRoots is " true " this input is ignored . * Format : Java KeyStore ( JKS ) * Default value : . . / java / lib / security / cacerts . * @ param keystorePassword Optional - password associated with the KeyStore file . If trustAllRoots is " false " and * keystore is empty , keystorePassword default will be supplied . * Default value : " changeit " * @ param connectTimeout Optional - time to wait for a connection to be established , in seconds . A timeout value * of " 0 " represents an infinite timeout . * Default value : " 0" * @ param socketTimeout Optional - timeout for waiting for data ( a maximum period inactivity between two * consecutive data packets ) , in seconds . A socketTimeout value of " 0 " represents an * infinite timeout . * Default value : " 0" * @ param useCookies Optional - specifies whether to enable cookie tracking or not . Cookies are stored between * consecutive calls in a serializable session object therefore they will be available on * a branch level . If you specify a non - boolean value , the default value is used . * Valid values : " true " , " false " * Default value : " true " * @ param keepAlive Optional - specifies whether to create a shared connection that will be used in subsequent * calls . If keepAlive is " false " , the already open connection will be used and after * execution it will close it . * Valid values : " true " , " false " * Default value : " true " * @ return A map with strings as keys and strings as values that contains : outcome of the action ( or failure message * and the exception if there is one ) , returnCode of the operation and the ID of the request */ @ Action ( name = "Get Cluster Details" , outputs = { } }
@ Output ( RETURN_CODE ) , @ Output ( RETURN_RESULT ) , @ Output ( EXCEPTION ) } , responses = { @ Response ( text = SUCCESS , field = RETURN_CODE , value = ReturnCodes . SUCCESS , matchType = MatchType . COMPARE_EQUAL , responseType = ResponseType . RESOLVED ) , @ Response ( text = FAILURE , field = RETURN_CODE , value = ReturnCodes . FAILURE , matchType = MatchType . COMPARE_EQUAL , responseType = ResponseType . ERROR , isOnFail = true ) } ) public Map < String , String > execute ( @ Param ( value = ENDPOINT , required = true ) String endpoint , @ Param ( value = USERNAME , required = true ) String username , @ Param ( value = PASSWORD , required = true , encrypted = true ) String password , @ Param ( value = PROXY_HOST ) String proxyHost , @ Param ( value = PROXY_PORT ) String proxyPort , @ Param ( value = PROXY_USERNAME ) String proxyUsername , @ Param ( value = PROXY_PASSWORD , encrypted = true ) String proxyPassword , @ Param ( value = TRUST_ALL_ROOTS ) String trustAllRoots , @ Param ( value = X509_HOSTNAME_VERIFIER ) String x509HostnameVerifier , @ Param ( value = TRUST_KEYSTORE ) String trustKeystore , @ Param ( value = TRUST_PASSWORD , encrypted = true ) String trustPassword , @ Param ( value = KEYSTORE ) String keystore , @ Param ( value = KEYSTORE_PASSWORD , encrypted = true ) String keystorePassword , @ Param ( value = CONNECT_TIMEOUT ) String connectTimeout , @ Param ( value = SOCKET_TIMEOUT ) String socketTimeout , @ Param ( value = USE_COOKIES ) String useCookies , @ Param ( value = KEEP_ALIVE ) String keepAlive ) { try { final HttpClientInputs httpClientInputs = getHttpClientInputs ( username , password , proxyHost , proxyPort , proxyUsername , proxyPassword , trustAllRoots , x509HostnameVerifier , trustKeystore , trustPassword , keystore , keystorePassword , connectTimeout , socketTimeout , useCookies , keepAlive , METHOD_NAME ) ; final CommonInputs commonInputs = new CommonInputs . Builder ( ) . withAction ( GET_CLUSTER_DETAILS ) . withApi ( CLUSTER ) . withEndpoint ( endpoint ) . build ( ) ; return new CouchbaseService ( ) . execute ( httpClientInputs , commonInputs ) ; } catch ( Exception exception ) { return getFailureResultsMap ( exception ) ; }
public class WebSocketAddon { /** * { @ inheritDoc } */ @ Override public void setup ( final Application application ) { } }
endpointClasses . clear ( ) ; subscribeSystemEvent ( ClassFoundEvent . class , event -> event . accept ( info -> { if ( info . accpet ( ctClass -> ctClass . hasAnnotation ( WebSocket . class ) ) ) { endpointClasses . add ( info . toClass ( ) ) ; return true ; } return false ; } ) ) ; application . register ( WebSocketFeature . class ) ;
public class PropertiesUtils { /** * Resolve symbols in a supplied value against supplied known properties . * @ param props a set of know properties * @ param value the string to parse for tokens * @ return the resolved string */ public static String resolveProperty ( Properties props , String value ) { } }
return PropertyResolver . resolve ( props , value ) ;
public class JcrNodeType { /** * Returns < code > true < / code > if < code > value < / code > can be cast to < code > property . getRequiredType ( ) < / code > per the type * conversion rules in section 3.6.4 of the JCR 2.0 specification AND < code > value < / code > satisfies the constraints ( if any ) * for the property definition . If the property definition has a required type of { @ link PropertyType # UNDEFINED } , the cast * will be considered to have succeeded and the value constraints ( if any ) will be interpreted using the semantics for the * type specified in < code > value . getType ( ) < / code > . * @ param session the session in which the constraints are to be checked ; may not be null * @ param propertyDefinition the property definition to validate against * @ param values the values to be validated * @ return < code > true < / code > if the value can be cast to the required type for the property definition ( if it exists ) and * satisfies the constraints for the property ( if any exist ) . * @ see PropertyDefinition # getValueConstraints ( ) * @ see JcrPropertyDefinition # satisfiesConstraints ( Value , JcrSession ) */ boolean canCastToTypeAndMatchesConstraints ( JcrSession session , JcrPropertyDefinition propertyDefinition , Value [ ] values ) { } }
for ( Value value : values ) { if ( ! canCastToTypeAndMatchesConstraints ( session , propertyDefinition , value ) ) return false ; } return true ;
public class BandwidthClient { /** * Helper method to build the POST request for the server . * @ param path the path . * @ param param json string . * @ return the post object . */ protected HttpPost generatePostRequest ( final String path , final String param ) { } }
final HttpPost post = new HttpPost ( buildUri ( path ) ) ; post . setEntity ( new StringEntity ( param , ContentType . APPLICATION_JSON ) ) ; return post ;
public class DataTree { /** * remove the path from the datatree * @ param path * the path to of the node to be deleted * @ param zxid * the current zxid * @ throws KeeperException . NoNodeException */ public void deleteNode ( String path , long zxid ) throws KeeperException . NoNodeException { } }
int lastSlash = path . lastIndexOf ( '/' ) ; String parentName = path . substring ( 0 , lastSlash ) ; String childName = path . substring ( lastSlash + 1 ) ; DataNode node = nodes . get ( path ) ; if ( node == null ) { throw new KeeperException . NoNodeException ( ) ; } nodes . remove ( path ) ; DataNode parent = nodes . get ( parentName ) ; if ( parent == null ) { throw new KeeperException . NoNodeException ( ) ; } synchronized ( parent ) { parent . removeChild ( childName ) ; parent . stat . setCversion ( parent . stat . getCversion ( ) + 1 ) ; parent . stat . setPzxid ( zxid ) ; long eowner = node . stat . getEphemeralOwner ( ) ; if ( eowner != 0 ) { HashSet < String > nodes = ephemerals . get ( eowner ) ; if ( nodes != null ) { synchronized ( nodes ) { nodes . remove ( path ) ; } } } node . parent = null ; } if ( parentName . startsWith ( procZookeeper ) ) { // delete the node in the trie . if ( Quotas . limitNode . equals ( childName ) ) { // we need to update the trie // as well pTrie . deletePath ( parentName . substring ( quotaZookeeper . length ( ) ) ) ; } } // also check to update the quotas for this node String lastPrefix = pTrie . findMaxPrefix ( path ) ; if ( ! rootZookeeper . equals ( lastPrefix ) && ! ( "" . equals ( lastPrefix ) ) ) { // ok we have some match and need to update updateCount ( lastPrefix , - 1 ) ; int bytes = 0 ; synchronized ( node ) { bytes = ( node . data == null ? 0 : - ( node . data . length ) ) ; } updateBytes ( lastPrefix , bytes ) ; } if ( LOG . isTraceEnabled ( ) ) { ZooTrace . logTraceMessage ( LOG , ZooTrace . EVENT_DELIVERY_TRACE_MASK , "dataWatches.triggerWatch " + path ) ; ZooTrace . logTraceMessage ( LOG , ZooTrace . EVENT_DELIVERY_TRACE_MASK , "childWatches.triggerWatch " + parentName ) ; } Set < Watcher > processed = dataWatches . triggerWatch ( path , EventType . NodeDeleted ) ; childWatches . triggerWatch ( path , EventType . NodeDeleted , processed ) ; childWatches . triggerWatch ( parentName . equals ( "" ) ? "/" : parentName , EventType . NodeChildrenChanged ) ;
public class WeakArrayList { /** * Trims the capacity of this < tt > WeakArrayList < / tt > instance to be the * list ' s current size . An application can use this operation to minimize * the storage of an < tt > WeakArrayList < / tt > instance . */ public void trimToSize ( ) { } }
this . modCount ++ ; final int oldCapacity = this . data . length ; if ( this . size < oldCapacity ) { this . data = Arrays . copyOf ( this . data , this . size ) ; }
public class OptionGroup { /** * Returns all the values of the options of this group that are enabled . * If the group itself is disabled , its options are also considered as disabled . * @ return Set of values of enabled options */ public Set < String > getEnabledOptionValues ( ) { } }
Set < String > result = new HashSet < > ( ) ; if ( ! enabled ) { return result ; // This group is disabled entirely } for ( Option option : options . values ( ) ) { if ( option . isEnabled ( ) ) { result . add ( option . getValue ( ) ) ; } } return result ;
public class VideoPositionTargeting { /** * Sets the targetedPositions value for this VideoPositionTargeting . * @ param targetedPositions * The { @ link VideoTargetingPosition } objects being targeted by * the * video { @ link LineItem } . */ public void setTargetedPositions ( com . google . api . ads . admanager . axis . v201811 . VideoPositionTarget [ ] targetedPositions ) { } }
this . targetedPositions = targetedPositions ;
public class JDBCStorableIntrospector { /** * Figures out how to best access the given property , or returns null if * not supported . An adapter may be applied . * @ return null if not supported */ private static AccessInfo getAccessInfo ( StorableProperty property , int dataType , String dataTypeName , int columnSize , int decimalDigits ) { } }
AccessInfo info = getAccessInfo ( property . getType ( ) , dataType , dataTypeName , columnSize , decimalDigits ) ; if ( info != null ) { return info ; } // Dynamically typed data sources ( e . g . SQLite3 ) always report // dataType as java . sql . Types . VARCHAR . Infer the dataType from the // dataTypeName and try again . if ( dataType == java . sql . Types . VARCHAR ) { Integer dataTypeMapping = typeNameToDataTypeMapping . get ( dataTypeName . toUpperCase ( ) ) ; if ( dataTypeMapping != null ) { info = getAccessInfo ( property . getType ( ) , dataTypeMapping , dataTypeName , columnSize , decimalDigits ) ; if ( info != null ) { return info ; } } } // See if an appropriate adapter exists . StorablePropertyAdapter adapter = property . getAdapter ( ) ; if ( adapter != null ) { Method [ ] toMethods = adapter . findAdaptMethodsTo ( property . getType ( ) ) ; for ( Method toMethod : toMethods ) { Class fromType = toMethod . getParameterTypes ( ) [ 0 ] ; // Verify that reverse adapt method exists as well . . . if ( adapter . findAdaptMethod ( property . getType ( ) , fromType ) != null ) { // . . . and try to get access info for fromType . info = getAccessInfo ( fromType , dataType , dataTypeName , columnSize , decimalDigits ) ; if ( info != null ) { info . setAdapter ( adapter ) ; return info ; } } } } return null ;
public class ObjImpl { /** * Map impl */ @ Override public boolean containsKey ( Object key ) { } }
return key instanceof String ? members . containsKey ( Key . of ( ( String ) key ) ) : false ;
public class DrawableContainerCompat { /** * Initializes a drawable for display in this container . * @ param d The drawable to initialize . */ private void initializeDrawableForDisplay ( Drawable d ) { } }
if ( mBlockInvalidateCallback == null ) { mBlockInvalidateCallback = new BlockInvalidateCallback ( ) ; } // Temporary fix for suspending callbacks during initialization . We // don ' t want any of these setters causing an invalidate ( ) since that // may call back into DrawableContainer . d . setCallback ( mBlockInvalidateCallback . wrap ( d . getCallback ( ) ) ) ; try { if ( mDrawableContainerState . mEnterFadeDuration <= 0 && mHasAlpha ) { d . setAlpha ( mAlpha ) ; } if ( mDrawableContainerState . mHasColorFilter ) { // Color filter always overrides tint . d . setColorFilter ( mDrawableContainerState . mColorFilter ) ; } else { if ( mDrawableContainerState . mHasTintList ) { DrawableCompat . setTintList ( d , mDrawableContainerState . mTintList ) ; } if ( mDrawableContainerState . mHasTintMode ) { DrawableCompat . setTintMode ( d , mDrawableContainerState . mTintMode ) ; } } d . setVisible ( isVisible ( ) , true ) ; d . setDither ( mDrawableContainerState . mDither ) ; d . setState ( getState ( ) ) ; d . setLevel ( getLevel ( ) ) ; d . setBounds ( getBounds ( ) ) ; if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . M ) { d . setLayoutDirection ( getLayoutDirection ( ) ) ; } if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . KITKAT ) { d . setAutoMirrored ( mDrawableContainerState . mAutoMirrored ) ; } final Rect hotspotBounds = mHotspotBounds ; if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . LOLLIPOP && hotspotBounds != null ) { d . setHotspotBounds ( hotspotBounds . left , hotspotBounds . top , hotspotBounds . right , hotspotBounds . bottom ) ; } } finally { d . setCallback ( mBlockInvalidateCallback . unwrap ( ) ) ; }
public class RandomMatrices_DDRM { /** * Sets the provided square matrix to be a random symmetric matrix whose values are selected from an uniform distribution * from min to max , inclusive . * @ param A The matrix that is to be modified . Must be square . Modified . * @ param min Minimum value an element can have . * @ param max Maximum value an element can have . * @ param rand Random number generator . */ public static void symmetric ( DMatrixRMaj A , double min , double max , Random rand ) { } }
if ( A . numRows != A . numCols ) throw new IllegalArgumentException ( "A must be a square matrix" ) ; double range = max - min ; int length = A . numRows ; for ( int i = 0 ; i < length ; i ++ ) { for ( int j = i ; j < length ; j ++ ) { double val = rand . nextDouble ( ) * range + min ; A . set ( i , j , val ) ; A . set ( j , i , val ) ; } }
public class ReceiveMessageBuilder { /** * Sets the payload data on the message builder implementation . * @ param payload * @ return */ protected void setPayload ( String payload ) { } }
MessageContentBuilder messageContentBuilder = getMessageContentBuilder ( ) ; if ( messageContentBuilder instanceof PayloadTemplateMessageBuilder ) { ( ( PayloadTemplateMessageBuilder ) messageContentBuilder ) . setPayloadData ( payload ) ; } else if ( messageContentBuilder instanceof StaticMessageContentBuilder ) { ( ( StaticMessageContentBuilder ) messageContentBuilder ) . getMessage ( ) . setPayload ( payload ) ; } else { throw new CitrusRuntimeException ( "Unable to set payload on message builder type: " + messageContentBuilder . getClass ( ) ) ; }
public class Traverser { /** * Advances if possible , returning next valid node , or null if none . */ final Node < K , V > advance ( ) { } }
Node < K , V > e ; if ( ( e = next ) != null ) e = e . next ; for ( ; ; ) { Node < K , V > [ ] t ; int i , n ; // must use locals in checks if ( e != null ) return next = e ; if ( baseIndex >= baseLimit || ( t = tab ) == null || ( n = t . length ) <= ( i = index ) || i < 0 ) return next = null ; if ( ( e = tabAt ( t , i ) ) != null && e . hash < 0 ) { if ( e instanceof ForwardingNode ) { tab = ( ( ForwardingNode < K , V > ) e ) . nextTable ; e = null ; pushState ( t , i , n ) ; continue ; } else if ( e instanceof TreeBin ) e = ( ( TreeBin < K , V > ) e ) . first ; else e = null ; } if ( stack != null ) recoverState ( n ) ; else if ( ( index = i + baseSize ) >= n ) index = ++ baseIndex ; // visit upper slots if present }
public class RestApiConnector { /** * get error message while executing http url * @ return error message */ private static String getFirstErrorMessage ( String defaultMessage , JsonElement json ) { } }
if ( json == null ) { return defaultMessage ; } JsonObject jsonObject = null ; if ( ! json . isJsonArray ( ) ) { jsonObject = json . getAsJsonObject ( ) ; } else { JsonArray jsonArray = json . getAsJsonArray ( ) ; if ( jsonArray . size ( ) != 0 ) { jsonObject = jsonArray . get ( 0 ) . getAsJsonObject ( ) ; } } if ( jsonObject != null ) { if ( jsonObject . has ( "error_description" ) ) { defaultMessage = defaultMessage + jsonObject . get ( "error_description" ) . getAsString ( ) ; } else if ( jsonObject . has ( "message" ) ) { defaultMessage = defaultMessage + jsonObject . get ( "message" ) . getAsString ( ) ; } } return defaultMessage ;
public class PathUtil { /** * Adds , if not already present , the absolute slash preceding the specified path , and returns the adjusted result . * If the argument is null , adjusts to an empty String before processing . * @ param path * @ return */ public static String optionallyPrependSlash ( final String path ) { } }
// Adjust null String resolved = path ; if ( resolved == null ) { resolved = EMPTY ; } // If the first character is not a slash if ( ! isFirstCharSlash ( resolved ) ) { // Prepend the slash return ArchivePath . SEPARATOR + resolved ; } // Return as - is return resolved ;
public class UndertowOutputStream { /** * If the response has not yet been written to the client this method will clear the streams buffer , * invalidating any content that has already been written . If any content has already been sent to the client then * this method will throw and IllegalStateException * @ throws java . lang . IllegalStateException If the response has been commited */ public void resetBuffer ( ) { } }
if ( anyAreSet ( state , FLAG_WRITE_STARTED ) ) { throw UndertowMessages . MESSAGES . cannotResetBuffer ( ) ; } buffer = null ; IoUtils . safeClose ( pooledBuffer ) ; pooledBuffer = null ;
public class DefaultCrawler { /** * Fetch links to the next level of the crawl . * @ param responses holding bodys where we should fetch the links . * @ param allUrls every url we have fetched so far * @ param nonWorkingUrls the urls that didn ' t work to fetch * @ param verifiedUrls responses that are already verified * @ param host the host we are working on * @ param onlyOnPath only fetch files that match the following path . If empty , all will match . * @ param notOnPath don ' t collect / follow urls that contains this text in the url * @ return the next level of links that we should fetch */ protected Set < CrawlerURL > fetchNextLevelLinks ( Map < Future < HTMLPageResponse > , CrawlerURL > responses , Set < CrawlerURL > allUrls , Set < HTMLPageResponse > nonWorkingUrls , Set < HTMLPageResponse > verifiedUrls , String host , String onlyOnPath , String notOnPath ) { } }
final Set < CrawlerURL > nextLevel = new LinkedHashSet < CrawlerURL > ( ) ; final Iterator < Entry < Future < HTMLPageResponse > , CrawlerURL > > it = responses . entrySet ( ) . iterator ( ) ; while ( it . hasNext ( ) ) { final Entry < Future < HTMLPageResponse > , CrawlerURL > entry = it . next ( ) ; try { final HTMLPageResponse response = entry . getKey ( ) . get ( ) ; if ( HttpStatus . SC_OK == response . getResponseCode ( ) && response . getResponseType ( ) . indexOf ( "html" ) > 0 ) { // we know that this links work verifiedUrls . add ( response ) ; final Set < CrawlerURL > allLinks = parser . get ( response ) ; for ( CrawlerURL link : allLinks ) { // only add if it is the same host if ( host . equals ( link . getHost ( ) ) && link . getUrl ( ) . contains ( onlyOnPath ) && ( notOnPath . equals ( "" ) ? true : ( ! link . getUrl ( ) . contains ( notOnPath ) ) ) ) { if ( ! allUrls . contains ( link ) ) { nextLevel . add ( link ) ; allUrls . add ( link ) ; } } } } else if ( HttpStatus . SC_OK != response . getResponseCode ( ) || StatusCode . SC_SERVER_REDIRECT_TO_NEW_DOMAIN . getCode ( ) == response . getResponseCode ( ) ) { allUrls . remove ( entry . getValue ( ) ) ; nonWorkingUrls . add ( response ) ; } else { // it is of another content type than HTML or if it redirected to another domain allUrls . remove ( entry . getValue ( ) ) ; } } catch ( InterruptedException e ) { nonWorkingUrls . add ( new HTMLPageResponse ( entry . getValue ( ) , StatusCode . SC_SERVER_RESPONSE_UNKNOWN . getCode ( ) , Collections . < String , String > emptyMap ( ) , "" , "" , 0 , "" , - 1 ) ) ; } catch ( ExecutionException e ) { nonWorkingUrls . add ( new HTMLPageResponse ( entry . getValue ( ) , StatusCode . SC_SERVER_RESPONSE_UNKNOWN . getCode ( ) , Collections . < String , String > emptyMap ( ) , "" , "" , 0 , "" , - 1 ) ) ; } } return nextLevel ;
public class WriteBehindManager { /** * todo get this via constructor function . */ @ Override public MapDataStore getMapDataStore ( String mapName , int partitionId ) { } }
return MapDataStores . createWriteBehindStore ( mapStoreContext , partitionId , writeBehindProcessor ) ;
public class SipServletMessageImpl { /** * ( non - Javadoc ) * @ see javax . servlet . sip . SipServletMessage # getFrom ( ) */ public Address getFrom ( ) { } }
FromHeader from = ( FromHeader ) this . message . getHeader ( getCorrectHeaderName ( FromHeader . NAME ) ) ; // AddressImpl address = new AddressImpl ( from . getAddress ( ) , AddressImpl . getParameters ( ( Parameters ) from ) , ModifiableRule . From ) ; // https : / / code . google . com / p / sipservlets / issues / detail ? id = 245 try { return new AddressImpl ( from , ModifiableRule . From ) ; } catch ( ParseException e ) { throw new IllegalArgumentException ( "Couldn't parse From Header " + from , e ) ; }
public class PreambleUtil { /** * basic checks of SerVer , Format , preInts , Family , fiCol , lgK . */ static void checkLoPreamble ( final Memory mem ) { } }
rtAssertEquals ( getSerVer ( mem ) , SER_VER & 0XFF ) ; final Format fmt = getFormat ( mem ) ; final int preIntsDef = getDefinedPreInts ( fmt ) & 0XFF ; rtAssertEquals ( getPreInts ( mem ) , preIntsDef ) ; final Family fam = getFamily ( mem ) ; rtAssert ( fam == Family . CPC ) ; final int lgK = getLgK ( mem ) ; rtAssert ( ( lgK >= 4 ) && ( lgK <= 26 ) ) ; final int fiCol = getFiCol ( mem ) ; rtAssert ( ( fiCol <= 63 ) && ( fiCol >= 0 ) ) ;
public class ExpressionList { /** * Aids debugging . Creates a diagrammatic form of the roots ( + tail instruction ) so that we can analyze control flow . * < pre > * I I I C C I U I U [ I ] I * < / pre > * @ param _ instruction The instruction we are considering adding ( may be null ) * @ return */ public String dumpDiagram ( Instruction _instruction ) { } }
final StringBuilder sb = new StringBuilder ( ) ; final List < Instruction > list = new ArrayList < Instruction > ( ) ; for ( Instruction i = head ; i != null ; i = i . getNextExpr ( ) ) { list . add ( i ) ; } for ( Instruction i = _instruction ; i != null ; i = i . getNextPC ( ) ) { list . add ( i ) ; } final Instruction [ ] array = list . toArray ( new Instruction [ 0 ] ) ; boolean lastWasCursor = false ; final List < Branch > branches = new ArrayList < Branch > ( ) ; for ( final Instruction i : list ) { sb . append ( String . format ( " %3d" , i . getStartPC ( ) ) ) ; } sb . append ( "\n" ) ; for ( final Instruction i : list ) { sb . append ( String . format ( " %3d" , i . getThisPC ( ) ) ) ; } sb . append ( "\n" ) ; for ( final Instruction i : list ) { if ( i == _instruction ) { sb . append ( " [" ) ; lastWasCursor = true ; } else { if ( lastWasCursor ) { sb . append ( "] " ) ; lastWasCursor = false ; } else { sb . append ( " " ) ; } } if ( i . isBranch ( ) && i . asBranch ( ) . isConditional ( ) ) { branches . add ( i . asBranch ( ) ) ; if ( i . asBranch ( ) . isForward ( ) ) { sb . append ( "?>" ) ; } else { sb . append ( "?<" ) ; } } else if ( i . isBranch ( ) && i . asBranch ( ) . isUnconditional ( ) ) { branches . add ( i . asBranch ( ) ) ; if ( i . asBranch ( ) . isForward ( ) ) { sb . append ( ">>" ) ; } else { sb . append ( "<<" ) ; } } else if ( i instanceof CompositeInstruction ) { sb . append ( " C" ) ; } else if ( i instanceof Return ) { sb . append ( " R" ) ; // } else if ( i instanceof AssignToLocalVariable ) { // sb . append ( " S " ) ; } else { sb . append ( ".." ) ; } } if ( lastWasCursor ) { sb . append ( "] " ) ; } else { sb . append ( " " ) ; } for ( final Branch b : branches ) { sb . append ( "\n " ) ; if ( b . isForward ( ) ) { for ( int i = 0 ; i < array . length ; i ++ ) { if ( ( array [ i ] . getStartPC ( ) < b . getStartPC ( ) ) || ( array [ i ] . getThisPC ( ) > b . getTarget ( ) . getThisPC ( ) ) ) { sb . append ( " " ) ; } else { if ( b . isConditional ( ) ) { sb . append ( "?-" ) ; } else { sb . append ( "+-" ) ; } i ++ ; while ( ( i < array . length ) && ( array [ i ] . getStartPC ( ) < b . getTarget ( ) . getThisPC ( ) ) ) { sb . append ( "----" ) ; i ++ ; } sb . append ( "->" ) ; sb . append ( b . getTarget ( ) . getThisPC ( ) ) ; } } } else { for ( int i = 0 ; i < array . length ; i ++ ) { if ( ( array [ i ] . getStartPC ( ) < b . getTarget ( ) . getThisPC ( ) ) || ( array [ i ] . getThisPC ( ) > b . getThisPC ( ) ) ) { sb . append ( " " ) ; } else { sb . append ( "<-" ) ; i ++ ; while ( ( i < array . length ) && ( array [ i ] . getStartPC ( ) < b . getThisPC ( ) ) ) { sb . append ( "----" ) ; i ++ ; } if ( b . isConditional ( ) ) { sb . append ( "-?" ) ; } else { sb . append ( "-+" ) ; } } } } } return ( sb . toString ( ) ) ;
public class ExtAtomContainerManipulator { /** * Returns IAtomContainer without Hydrogen . If an AtomContainer has atom single atom which * is atom Hydrogen then its not removed . * @ param atomContainer * @ return IAtomContainer without Hydrogen . If an AtomContainer has atom single atom which * is atom Hydrogen then its not removed . */ public static IAtomContainer convertExplicitToImplicitHydrogens ( IAtomContainer atomContainer ) { } }
IAtomContainer mol = atomContainer . getBuilder ( ) . newInstance ( IAtomContainer . class , atomContainer ) ; convertImplicitToExplicitHydrogens ( mol ) ; if ( mol . getAtomCount ( ) > 1 ) { mol = removeHydrogens ( mol ) ; } else if ( atomContainer . atoms ( ) . iterator ( ) . next ( ) . getSymbol ( ) . equalsIgnoreCase ( "H" ) ) { System . err . println ( "WARNING: single hydrogen atom removal not supported!" ) ; } mol . addProperties ( atomContainer . getProperties ( ) ) ; mol . setFlags ( atomContainer . getFlags ( ) ) ; if ( atomContainer . getID ( ) != null ) { mol . setID ( atomContainer . getID ( ) ) ; } return mol ;
public class InsertMeta { /** * be removed in future . */ public Map < String , Object > getSpec ( ) { } }
return ImmutableMap . < String , Object > of ( "ioConfig" , getIoConfig ( ) , "dataSchema" , getDataSchema ( ) , "tuningConfig" , getTuningConfig ( ) ) ;
public class CPInstancePersistenceImpl { /** * Returns the cp instance where companyId = & # 63 ; and externalReferenceCode = & # 63 ; or throws a { @ link NoSuchCPInstanceException } if it could not be found . * @ param companyId the company ID * @ param externalReferenceCode the external reference code * @ return the matching cp instance * @ throws NoSuchCPInstanceException if a matching cp instance could not be found */ @ Override public CPInstance findByC_ERC ( long companyId , String externalReferenceCode ) throws NoSuchCPInstanceException { } }
CPInstance cpInstance = fetchByC_ERC ( companyId , externalReferenceCode ) ; if ( cpInstance == null ) { StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "companyId=" ) ; msg . append ( companyId ) ; msg . append ( ", externalReferenceCode=" ) ; msg . append ( externalReferenceCode ) ; msg . append ( "}" ) ; if ( _log . isDebugEnabled ( ) ) { _log . debug ( msg . toString ( ) ) ; } throw new NoSuchCPInstanceException ( msg . toString ( ) ) ; } return cpInstance ;
public class CreateCommitRequest { /** * The files to add or update in this commit . * @ param putFiles * The files to add or update in this commit . */ public void setPutFiles ( java . util . Collection < PutFileEntry > putFiles ) { } }
if ( putFiles == null ) { this . putFiles = null ; return ; } this . putFiles = new java . util . ArrayList < PutFileEntry > ( putFiles ) ;
public class DataStream { /** * Creates a new { @ link BroadcastConnectedStream } by connecting the current * { @ link DataStream } or { @ link KeyedStream } with a { @ link BroadcastStream } . * < p > The latter can be created using the { @ link # broadcast ( MapStateDescriptor [ ] ) } method . * < p > The resulting stream can be further processed using the { @ code BroadcastConnectedStream . process ( MyFunction ) } * method , where { @ code MyFunction } can be either a * { @ link org . apache . flink . streaming . api . functions . co . KeyedBroadcastProcessFunction KeyedBroadcastProcessFunction } * or a { @ link org . apache . flink . streaming . api . functions . co . BroadcastProcessFunction BroadcastProcessFunction } * depending on the current stream being a { @ link KeyedStream } or not . * @ param broadcastStream The broadcast stream with the broadcast state to be connected with this stream . * @ return The { @ link BroadcastConnectedStream } . */ @ PublicEvolving public < R > BroadcastConnectedStream < T , R > connect ( BroadcastStream < R > broadcastStream ) { } }
return new BroadcastConnectedStream < > ( environment , this , Preconditions . checkNotNull ( broadcastStream ) , broadcastStream . getBroadcastStateDescriptor ( ) ) ;
public class CmsVfsDriver { /** * Build the whole WHERE sql statement part for the given relation filter . < p > * @ param projectId the current project id * @ param filter the filter * @ param resource the resource ( may be null , if you want to delete all relations for the resource in the filter ) * @ param params the parameter values ( return parameter ) * @ param checkSource if the query is for the source relations * @ return the WHERE sql statement part string */ protected String prepareRelationConditions ( CmsUUID projectId , CmsRelationFilter filter , CmsResource resource , List < Object > params , boolean checkSource ) { } }
StringBuffer conditions = new StringBuffer ( 128 ) ; params . clear ( ) ; // be sure the parameters list is clear // source or target filter if ( filter . isSource ( ) || filter . isTarget ( ) ) { // source or target id filter from resource if ( resource != null ) { conditions . append ( BEGIN_CONDITION ) ; if ( filter . isSource ( ) && checkSource ) { if ( ! filter . isIncludeSubresources ( ) ) { conditions . append ( m_sqlManager . readQuery ( projectId , "C_RELATION_FILTER_TARGET_ID" ) ) ; params . add ( resource . getStructureId ( ) . toString ( ) ) ; } else { conditions . append ( m_sqlManager . readQuery ( projectId , "C_RELATION_FILTER_TARGET_PATH" ) ) ; params . add ( resource . getRootPath ( ) + '%' ) ; } } else if ( filter . isTarget ( ) && ! checkSource ) { if ( ! filter . isIncludeSubresources ( ) ) { conditions . append ( m_sqlManager . readQuery ( projectId , "C_RELATION_FILTER_SOURCE_ID" ) ) ; params . add ( resource . getStructureId ( ) . toString ( ) ) ; } else { conditions . append ( m_sqlManager . readQuery ( projectId , "C_RELATION_FILTER_SOURCE_PATH" ) ) ; params . add ( resource . getRootPath ( ) + '%' ) ; } } conditions . append ( END_CONDITION ) ; } // target or source id filter from filter parameter if ( filter . getStructureId ( ) != null ) { if ( conditions . length ( ) == 0 ) { conditions . append ( BEGIN_CONDITION ) ; } else { conditions . append ( BEGIN_INCLUDE_CONDITION ) ; } if ( filter . isSource ( ) && checkSource ) { conditions . append ( m_sqlManager . readQuery ( projectId , "C_RELATION_FILTER_SOURCE_ID" ) ) ; params . add ( filter . getStructureId ( ) . toString ( ) ) ; } else if ( filter . isTarget ( ) && ! checkSource ) { conditions . append ( m_sqlManager . readQuery ( projectId , "C_RELATION_FILTER_TARGET_ID" ) ) ; params . add ( filter . getStructureId ( ) . toString ( ) ) ; } conditions . append ( END_CONDITION ) ; } // target or source path filter from filter parameter if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( filter . getPath ( ) ) ) { if ( conditions . length ( ) == 0 ) { conditions . append ( BEGIN_CONDITION ) ; } else { conditions . append ( BEGIN_INCLUDE_CONDITION ) ; } String queryPath = filter . getPath ( ) ; if ( filter . isIncludeSubresources ( ) ) { queryPath += '%' ; } if ( filter . isSource ( ) && checkSource ) { conditions . append ( m_sqlManager . readQuery ( projectId , "C_RELATION_FILTER_SOURCE_PATH" ) ) ; params . add ( queryPath ) ; } else if ( filter . isTarget ( ) && ! checkSource ) { conditions . append ( m_sqlManager . readQuery ( projectId , "C_RELATION_FILTER_TARGET_PATH" ) ) ; params . add ( queryPath ) ; } conditions . append ( END_CONDITION ) ; } } // relation type filter Set < CmsRelationType > types = filter . getTypes ( ) ; if ( ! types . isEmpty ( ) ) { if ( conditions . length ( ) == 0 ) { conditions . append ( BEGIN_CONDITION ) ; } else { conditions . append ( BEGIN_INCLUDE_CONDITION ) ; } conditions . append ( m_sqlManager . readQuery ( projectId , "C_RELATION_FILTER_TYPE" ) ) ; conditions . append ( BEGIN_CONDITION ) ; Iterator < CmsRelationType > it = types . iterator ( ) ; while ( it . hasNext ( ) ) { CmsRelationType type = it . next ( ) ; conditions . append ( "?" ) ; params . add ( new Integer ( type . getId ( ) ) ) ; if ( it . hasNext ( ) ) { conditions . append ( ", " ) ; } } conditions . append ( END_CONDITION ) ; conditions . append ( END_CONDITION ) ; } return conditions . toString ( ) ;
public class AnalyticFormulas { /** * This static method calculated the rho of a call option under a Black - Scholes model * @ param initialStockValue The initial value of the underlying , i . e . , the spot . * @ param riskFreeRate The risk free rate of the bank account numerarie . * @ param volatility The Black - Scholes volatility . * @ param optionMaturity The option maturity T . * @ param optionStrike The option strike . * @ return The rho of the option */ public static double blackScholesOptionRho ( double initialStockValue , double riskFreeRate , double volatility , double optionMaturity , double optionStrike ) { } }
if ( optionStrike <= 0.0 || optionMaturity <= 0.0 ) { // The Black - Scholes model does not consider it being an option return 0.0 ; } else { // Calculate rho double dMinus = ( Math . log ( initialStockValue / optionStrike ) + ( riskFreeRate - 0.5 * volatility * volatility ) * optionMaturity ) / ( volatility * Math . sqrt ( optionMaturity ) ) ; double rho = optionStrike * optionMaturity * Math . exp ( - riskFreeRate * optionMaturity ) * NormalDistribution . cumulativeDistribution ( dMinus ) ; return rho ; }
public class DynoJedisUtils { /** * This is the non pipelined version of the reads * @ param key * @ return the value of the corresponding key * @ throws Exception */ public String nonPipelineRead ( String key ) throws Exception { } }
String res = jedisClient . get ( ) . get ( key ) ; if ( res != null ) { if ( res . isEmpty ( ) ) { throw new Exception ( "Data retrieved is not ok " ) ; } } else { return CacheMiss ; } return ResultOK ;
public class VersionID { /** * Match ' this ' versionID against vid . * The _ usePrefixMatch / _ useGreaterThan flag is used to determine if a * prefix match of an exact match should be performed * if _ isCompound , must match _ rest also . * @ param vid TODO * @ return TODO */ public boolean match ( VersionID vid ) { } }
if ( _isCompound ) { if ( ! _rest . match ( vid ) ) { return false ; } } return ( _usePrefixMatch ) ? this . isPrefixMatch ( vid ) : ( _useGreaterThan ) ? vid . isGreaterThanOrEqual ( this ) : matchTuple ( vid ) ;
public class AuditFinding { /** * The list of related resources . * @ param relatedResources * The list of related resources . */ public void setRelatedResources ( java . util . Collection < RelatedResource > relatedResources ) { } }
if ( relatedResources == null ) { this . relatedResources = null ; return ; } this . relatedResources = new java . util . ArrayList < RelatedResource > ( relatedResources ) ;
public class WeeklyAutoScalingSchedule { /** * The schedule for Tuesday . * @ return The schedule for Tuesday . */ public java . util . Map < String , String > getTuesday ( ) { } }
if ( tuesday == null ) { tuesday = new com . amazonaws . internal . SdkInternalMap < String , String > ( ) ; } return tuesday ;
public class DataSet { /** * Returns a distinct set of a { @ link Tuple } { @ link DataSet } using field position keys . * < p > The field position keys specify the fields of Tuples on which the decision is made if two Tuples are distinct or * not . * < p > Note : Field position keys can only be specified for Tuple DataSets . * @ param fields One or more field positions on which the distinction of the DataSet is decided . * @ return A DistinctOperator that represents the distinct DataSet . */ public DistinctOperator < T > distinct ( int ... fields ) { } }
return new DistinctOperator < > ( this , new Keys . ExpressionKeys < > ( fields , getType ( ) ) , Utils . getCallLocationName ( ) ) ;
public class Benchmark { /** * Creates the output report . * @ return the report as a String . */ public final String report ( ) { } }
StringBuilder sb = new StringBuilder ( ) ; sb . append ( String . format ( "%s stats: %s\n" , name , this ) ) ; if ( pubs . hasSamples ( ) ) { String indent = " " ; if ( subs . hasSamples ( ) ) { sb . append ( String . format ( "%sPub stats: %s\n" , indent , pubs ) ) ; indent = " " ; } if ( pubs . getSamples ( ) . size ( ) > 1 ) { for ( Sample stat : pubs . getSamples ( ) ) { sb . append ( String . format ( "%s[%2d] %s (%d msgs)\n" , indent , pubs . getSamples ( ) . indexOf ( stat ) + 1 , stat , stat . jobMsgCnt ) ) ; } sb . append ( String . format ( "%s %s\n" , indent , pubs . statistics ( ) ) ) ; } } if ( subs . hasSamples ( ) ) { String indent = " " ; sb . append ( String . format ( "%sSub stats: %s\n" , indent , subs ) ) ; indent = " " ; if ( subs . getSamples ( ) . size ( ) > 1 ) { for ( Sample stat : subs . getSamples ( ) ) { sb . append ( String . format ( "%s[%2d] %s (%d msgs)\n" , indent , subs . getSamples ( ) . indexOf ( stat ) + 1 , stat , stat . jobMsgCnt ) ) ; } sb . append ( String . format ( "%s %s\n" , indent , subs . statistics ( ) ) ) ; } } return sb . toString ( ) ;
public class Validator { /** * Executes all registered validators ( in registration order ) . If any validator fires a rule * that fails , its feedback will be displayed , the validation process will be stopped , and * false will be returned . If all validators succeed , true will be returned . */ public boolean validate ( boolean asError ) { } }
for ( WidgetValidator < ? , ? > validator : _validators ) { if ( ! validator . validate ( asError ) ) return false ; } return true ;
public class KeyAndCertificateFactory { /** * Create a KeyStore with a server certificate for the given domain and subject alternative names . */ synchronized KeyAndCertificateFactory buildAndSaveCertificates ( ) { } }
try { // personal keys KeyPair keyPair = generateKeyPair ( FAKE_KEYSIZE ) ; PrivateKey mockServerPrivateKey = keyPair . getPrivate ( ) ; PublicKey mockServerPublicKey = keyPair . getPublic ( ) ; // ca keys PrivateKey caPrivateKey = loadPrivateKeyFromPEMFile ( "org/mockserver/socket/CertificateAuthorityPrivateKey.pem" ) ; X509Certificate caCert = loadX509FromPEMFile ( "org/mockserver/socket/CertificateAuthorityCertificate.pem" ) ; // generate mockServer certificate X509Certificate mockServerCert = createCASignedCert ( mockServerPublicKey , caCert , caPrivateKey , caCert . getPublicKey ( ) , ConfigurationProperties . sslCertificateDomainName ( ) , ConfigurationProperties . sslSubjectAlternativeNameDomains ( ) , ConfigurationProperties . sslSubjectAlternativeNameIps ( ) ) ; String randomUUID = UUID . randomUUID ( ) . toString ( ) ; mockServerCertificatePEMFile = saveCertificateAsPEMFile ( mockServerCert , "MockServerCertificate" + randomUUID + ".pem" , true ) ; mockServerPublicKeyPEMFile = saveCertificateAsPEMFile ( mockServerPublicKey , "MockServerPublicKey" + randomUUID + ".pem" , true ) ; mockServerPrivateKeyPEMFile = saveCertificateAsPEMFile ( mockServerPrivateKey , "MockServerPrivateKey" + randomUUID + ".pem" , true ) ; } catch ( Exception e ) { MOCK_SERVER_LOGGER . error ( "Error while refreshing certificates" , e ) ; } return this ;
public class PhiAccrualFailureDetector { /** * The suspicion level of the accrual failure detector . * If a connection does not have any records in failure detector then it is * considered healthy . */ private double phi ( long timestampMillis ) { } }
long timeDiffMillis ; double meanMillis ; double stdDeviationMillis ; synchronized ( heartbeatHistory ) { long lastTimestampMillis = lastHeartbeatMillis ; if ( lastTimestampMillis == NO_HEARTBEAT_TIMESTAMP ) { return 0.0 ; } timeDiffMillis = timestampMillis - lastTimestampMillis ; meanMillis = heartbeatHistory . mean ( ) ; stdDeviationMillis = ensureValidStdDeviation ( heartbeatHistory . stdDeviation ( ) ) ; } return phi ( timeDiffMillis , meanMillis + acceptableHeartbeatPauseMillis , stdDeviationMillis ) ;
public class RestServlet { /** * Parses the request to get information about what controller is trying to call , then * invoke the action from that controller ( if any ) , and finally gives an answer . < br > * < br > * Basically it only dispatchs the request to a controller . */ @ Override protected void service ( HttpServletRequest request , HttpServletResponse response ) throws ServletException , IOException { } }
// Eliminamos de la URI el contexto String url = request . getRequestURI ( ) . substring ( request . getContextPath ( ) . length ( ) ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "url => {}" , url ) ; LOGGER . debug ( "HTTP_METHOD => {}" , request . getMethod ( ) ) ; LOGGER . debug ( "queryString => {}" , request . getQueryString ( ) ) ; LOGGER . debug ( "Context [{}]" , request . getContextPath ( ) ) ; } HttpMethod requestMethod = HttpMethod . valueOf ( request . getMethod ( ) ) ; if ( requestMethod == HttpMethod . POST ) { String httpMethodParam = request . getParameter ( HTTP_METHOD_PARAM ) ; LOGGER . debug ( "param: http_method => {}" , httpMethodParam ) ; if ( httpMethodParam != null ) { requestMethod = HttpMethod . valueOf ( httpMethodParam ) ; } } // Getting all the information from the URL UrlInfo urlInfo = urlInspector . getUrlInfo ( url , requestMethod ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "URL info {}" , urlInfo . toString ( ) ) ; } // Calling the controller ' s action ResponseHelper responseHelper = new ResponseHelper ( this . getServletContext ( ) , request , response , urlInfo , config . getString ( Config . VIEWS_DIRECTORY ) ) ; helper . invokeAction ( urlInfo , responseHelper ) ; responseHelper . doResponse ( ) ;
public class VF2State { /** * { @ inheritDoc } */ public boolean isFeasiblePair ( int node1 , int node2 ) { } }
assert node1 < n1 ; assert node2 < n2 ; assert core1 [ node1 ] == NULL_NODE ; assert core2 [ node2 ] == NULL_NODE ; // TODO : add checks for compatible nodes here // int i = 0 ; / / , other1 = 0 , other2 = 0; int termout1 = 0 , termout2 = 0 , termin1 = 0 , termin2 = 0 , new1 = 0 , new2 = 0 ; // Check the ' out ' edges of node1 for ( int other1 : getSuccessors ( g1 , node1 ) ) { if ( core1 [ other1 ] != NULL_NODE ) { int other2 = core1 [ other1 ] ; // If there ' s node edge to the other node , or if there is some // edge incompatability , then the mapping is not feasible if ( ! g2 . contains ( node2 , other2 ) || ! areCompatibleEdges ( node1 , other1 , node2 , other2 ) ) return false ; } else { if ( in1 [ other1 ] != 0 ) termin1 ++ ; if ( out1 [ other1 ] != 0 ) termout1 ++ ; if ( in1 [ other1 ] == 0 && out1 [ other1 ] == 0 ) new1 ++ ; } } // Check the ' in ' edges of node1 for ( int other1 : getPredecessors ( g1 , node1 ) ) { if ( core1 [ other1 ] != NULL_NODE ) { int other2 = core1 [ other1 ] ; // If there ' s node edge to the other node , or if there is some // edge incompatability , then the mapping is not feasible if ( ! g2 . contains ( other2 , node2 ) || ! areCompatibleEdges ( node1 , other1 , node2 , other2 ) ) return false ; } else { if ( in1 [ other1 ] != 0 ) termin1 ++ ; if ( out1 [ other1 ] != 0 ) termout1 ++ ; if ( in1 [ other1 ] == 0 && out1 [ other1 ] == 0 ) new1 ++ ; } } // Check the ' out ' edges of node2 for ( int other2 : getSuccessors ( g2 , node2 ) ) { if ( core2 [ other2 ] != NULL_NODE ) { int other1 = core2 [ other2 ] ; if ( ! g1 . contains ( node1 , other1 ) ) return false ; } else { if ( in2 [ other2 ] != 0 ) termin2 ++ ; if ( out2 [ other2 ] != 0 ) termout2 ++ ; if ( in2 [ other2 ] == 0 && out2 [ other2 ] == 0 ) new2 ++ ; } } // Check the ' in ' edges of node2 for ( int other2 : getPredecessors ( g2 , node2 ) ) { if ( core2 [ other2 ] != NULL_NODE ) { int other1 = core2 [ other2 ] ; if ( ! g1 . contains ( other1 , node1 ) ) return false ; } else { if ( in2 [ other2 ] != 0 ) termin2 ++ ; if ( out2 [ other2 ] != 0 ) termout2 ++ ; if ( in2 [ other2 ] == 0 && out2 [ other2 ] == 0 ) new2 ++ ; } } return termin1 == termin2 && termout1 == termout2 && new1 == new2 ;
public class CyclicCarbohydrateRecognition { /** * Determine the horizontal offset of the projection . This allows * projections that are drawn at angle to be correctly interpreted . * Currently only projections of chair conformations are considered . * @ param points points of the cycle * @ param turns the turns in the cycle ( left / right ) * @ param projection the type of projection * @ return the horizontal offset */ private Point2d horizontalOffset ( Point2d [ ] points , Turn [ ] turns , Projection projection ) { } }
// Haworth must currently be drawn vertically , I have seen them drawn // slanted but it ' s difficult to determine which way the projection // is relative if ( projection != Projection . Chair ) return new Point2d ( 0 , 0 ) ; // the atoms either side of a central atom are our reference int offset = chairCenterOffset ( turns ) ; int prev = ( offset + 5 ) % 6 ; int next = ( offset + 7 ) % 6 ; // and the axis formed by these atoms is our horizontal reference which // we normalise double deltaX = points [ prev ] . x - points [ next ] . x ; double deltaY = points [ prev ] . y - points [ next ] . y ; double mag = Math . sqrt ( deltaX * deltaX + deltaY * deltaY ) ; deltaX /= mag ; deltaY /= mag ; // we now ensure the reference always points left to right ( presumes no // vertical chairs ) if ( deltaX < 0 ) { deltaX = - deltaX ; deltaY = - deltaY ; } // horizontal = < 1,0 > so the offset if the difference from this return new Point2d ( 1 - deltaX , deltaY ) ;
public class JavaAnnotation { /** * Sets a value . */ public Object putValue ( String key , Object value ) { } }
return _valueMap . put ( key , value ) ;
public class JobGraph { /** * Checks for all registered job vertices if their in - / out - degree is correct . * @ return < code > null < / code > if the in - / out - degree of all vertices is correct or the first job vertex whose * in - / out - degree is incorrect . */ public AbstractJobVertex areVertexDegreesCorrect ( ) { } }
// Check input vertices final Iterator < AbstractJobInputVertex > iter = getInputVertices ( ) ; while ( iter . hasNext ( ) ) { final AbstractJobVertex jv = iter . next ( ) ; if ( jv . getNumberOfForwardConnections ( ) < 1 || jv . getNumberOfBackwardConnections ( ) > 0 ) { return jv ; } } // Check task vertices final Iterator < JobTaskVertex > iter2 = getTaskVertices ( ) ; while ( iter2 . hasNext ( ) ) { final AbstractJobVertex jv = iter2 . next ( ) ; if ( jv . getNumberOfForwardConnections ( ) < 1 || jv . getNumberOfBackwardConnections ( ) < 1 ) { return jv ; } } // Check output vertices final Iterator < AbstractJobOutputVertex > iter3 = getOutputVertices ( ) ; while ( iter3 . hasNext ( ) ) { final AbstractJobVertex jv = iter3 . next ( ) ; if ( jv . getNumberOfForwardConnections ( ) > 0 || jv . getNumberOfBackwardConnections ( ) < 1 ) { return jv ; } } return null ;
public class ListenerContainer { /** * Utility - apply the given function to each listener . The function receives * the listener as an argument . * @ param function function to call for each listener */ public void forEach ( final Function < T , Void > function ) { } }
for ( final ListenerEntry < T > entry : listeners . values ( ) ) { entry . executor . execute ( new Runnable ( ) { @ Override public void run ( ) { try { function . apply ( entry . listener ) ; } catch ( Throwable e ) { log . error ( String . format ( "Listener (%s) threw an exception" , entry . listener ) , e ) ; } } } ) ; }
public class VariablesInner { /** * Retrieve the variable identified by variable name . * @ param resourceGroupName Name of an Azure Resource group . * @ param automationAccountName The name of the automation account . * @ param variableName The name of variable . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < VariableInner > getAsync ( String resourceGroupName , String automationAccountName , String variableName , final ServiceCallback < VariableInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getWithServiceResponseAsync ( resourceGroupName , automationAccountName , variableName ) , serviceCallback ) ;
public class OkHttpProtocolNegotiator { /** * Start and wait until the negotiation is done , returns the negotiated protocol . * @ throws IOException if an IO error was encountered during the handshake . * @ throws RuntimeException if the negotiation completed , but no protocol was selected . */ public String negotiate ( SSLSocket sslSocket , String hostname , @ Nullable List < Protocol > protocols ) throws IOException { } }
if ( protocols != null ) { configureTlsExtensions ( sslSocket , hostname , protocols ) ; } try { // Force handshake . sslSocket . startHandshake ( ) ; String negotiatedProtocol = getSelectedProtocol ( sslSocket ) ; if ( negotiatedProtocol == null ) { throw new RuntimeException ( "TLS ALPN negotiation failed with protocols: " + protocols ) ; } return negotiatedProtocol ; } finally { platform . afterHandshake ( sslSocket ) ; }
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcExternalReference ( ) { } }
if ( ifcExternalReferenceEClass == null ) { ifcExternalReferenceEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 244 ) ; } return ifcExternalReferenceEClass ;
public class WebSecurityConfig { /** * Work from the http security object and enable or disable CSRF handling , * as requested in the application properties . * @ param http the http security object * @ return the http security object * @ throws Exception if there is a problem */ private HttpSecurity handleCsrf ( final HttpSecurity http ) throws Exception { } }
if ( "test" . equals ( activeProfile ) ) { return http . csrf ( ) . disable ( ) ; } else { return http . csrf ( ) . ignoringAntMatchers ( "/gedbrowserng/v1/login" , "/gedbrowserng/v1/signup" ) . csrfTokenRepository ( CookieCsrfTokenRepository . withHttpOnlyFalse ( ) ) . and ( ) ; }
public class DownloadDefaultKeyPairRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DownloadDefaultKeyPairRequest downloadDefaultKeyPairRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( downloadDefaultKeyPairRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class PipelineArbitrateEvent { /** * 销毁对应的pipeline节点 , 同步调用 */ public void destory ( Long channelId , Long pipelineId ) { } }
String path = ManagePathUtils . getPipeline ( channelId , pipelineId ) ; String processRootPath = ManagePathUtils . getProcessRoot ( channelId , pipelineId ) ; String terminRootPath = ManagePathUtils . getTerminRoot ( channelId , pipelineId ) ; String remedyRootPath = ManagePathUtils . getRemedyRoot ( channelId , pipelineId ) ; String lockRootPath = ManagePathUtils . getLockRoot ( channelId , pipelineId ) ; String loadLockPath = lockRootPath + "/" + ArbitrateConstants . NODE_LOCK_LOAD ; try { zookeeper . deleteRecursive ( loadLockPath ) ; // 删除节点 , 不关心版本 zookeeper . deleteRecursive ( lockRootPath ) ; // 删除节点 , 不关心版本 zookeeper . deleteRecursive ( terminRootPath ) ; // 删除节点 , 不关心版本 zookeeper . deleteRecursive ( remedyRootPath ) ; // 删除节点 , 不关心版本 zookeeper . deleteRecursive ( processRootPath ) ; // 删除节点 , 不关心版本 zookeeper . deleteRecursive ( path ) ; // 删除节点 , 不关心版本 } catch ( ZkNoNodeException e ) { // 如果节点已经不存在 , 则不抛异常 // ignore } catch ( ZkException e ) { throw new ArbitrateException ( "Pipeline_destory" , pipelineId . toString ( ) , e ) ; }
public class ByteBufUtil { /** * Returns a < a href = " http : / / en . wikipedia . org / wiki / Hex _ dump " > hex dump < / a > * of the specified buffer ' s sub - region . */ public static String hexDump ( ByteBuf buffer , int fromIndex , int length ) { } }
return HexUtil . hexDump ( buffer , fromIndex , length ) ;
public class MediaUtils { /** * Create a temporary file to use to handle selected images from the picker * @ return the temporary file in the given storage directory * @ throws IOException */ private static File createTempFile ( File storageDir ) throws IOException { } }
// Create an image file name String timeStamp = new SimpleDateFormat ( "yyyyMMdd_HHmmss" , Locale . US ) . format ( new Date ( ) ) ; String imageFileName = "JPEG_" + timeStamp + "_" ; if ( ! storageDir . mkdir ( ) && ! storageDir . exists ( ) ) { throw new IOException ( "Unable to create temporary media directory" ) ; } return File . createTempFile ( imageFileName , /* prefix */ ".jpg" , /* suffix */ storageDir /* directory */ ) ;
public class MediaType { /** * < em > Replaces < / em > all parameters with the given attribute with a single parameter with the * given value . If multiple parameters with the same attributes are necessary use * { @ link # withParameters } . Prefer { @ link # withCharset } for setting the { @ code charset } parameter * when using a { @ link Charset } object . * @ throws IllegalArgumentException if either { @ code attribute } or { @ code value } is invalid */ public MediaType withParameter ( String attribute , String value ) { } }
checkNotNull ( attribute ) ; checkNotNull ( value ) ; String normalizedAttribute = normalizeToken ( attribute ) ; ImmutableListMultimap . Builder < String , String > builder = ImmutableListMultimap . builder ( ) ; for ( Entry < String , String > entry : parameters . entries ( ) ) { String key = entry . getKey ( ) ; if ( ! normalizedAttribute . equals ( key ) ) { builder . put ( key , entry . getValue ( ) ) ; } } builder . put ( normalizedAttribute , normalizeParameterValue ( normalizedAttribute , value ) ) ; MediaType mediaType = new MediaType ( type , subtype , builder . build ( ) ) ; // Return one of the constants if the media type is a known type . return MoreObjects . firstNonNull ( KNOWN_TYPES . get ( mediaType ) , mediaType ) ;
public class ToDoubleFunctionBuilder { /** * One of ways of creating builder . This might be the only way ( considering all _ functional _ builders ) that might be utilize to specify generic params only once . */ @ Nonnull public static < T > ToDoubleFunctionBuilder < T > toDblFunction ( Consumer < ToDoubleFunction < T > > consumer ) { } }
return new ToDoubleFunctionBuilder ( consumer ) ;
public class DescribeApplicationVersionsResult { /** * List of < code > ApplicationVersionDescription < / code > objects sorted in order of creation . * @ return List of < code > ApplicationVersionDescription < / code > objects sorted in order of creation . */ public java . util . List < ApplicationVersionDescription > getApplicationVersions ( ) { } }
if ( applicationVersions == null ) { applicationVersions = new com . amazonaws . internal . SdkInternalList < ApplicationVersionDescription > ( ) ; } return applicationVersions ;
public class BaseDestinationHandler { /** * Checks if there is space available on the outputHandlers ( xmit steams ) * for this message . * @ param checkSendAllowed true if the " sendAllowed " flag on local * messaging points should be taken into account . Otherwise they are ignored . * @ return int result if an outputHandler can be found to take message , or reason for failure */ @ Override public int checkPtoPOutputHandlers ( SIBUuid8 fixedMEUuid , HashSet < SIBUuid8 > scopedMEs ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "checkPtoPOutputHandlers" , new Object [ ] { this , fixedMEUuid , scopedMEs } ) ; int result = DestinationHandler . OUTPUT_HANDLER_SEND_ALLOWED_FALSE ; // First make sure the destination is sendAllowed if ( isSendAllowed ( ) ) { result = DestinationHandler . OUTPUT_HANDLER_NOT_FOUND ; int localResult = DestinationHandler . NOT_SET ; // If we have a suitable local message point // then we can only check it if any ME restictions include this local ME . boolean checkLocal = false ; if ( hasLocal ( ) ) { if ( ( fixedMEUuid == null ) && ( scopedMEs == null ) ) checkLocal = true ; else if ( ( fixedMEUuid != null ) && fixedMEUuid . equals ( getMessageProcessor ( ) . getMessagingEngineUuid ( ) ) ) checkLocal = true ; else if ( ( scopedMEs != null ) && scopedMEs . contains ( getMessageProcessor ( ) . getMessagingEngineUuid ( ) ) ) checkLocal = true ; if ( checkLocal ) { // Check that it is SendAllowed true and not at high limit localResult = _ptoPRealization . checkAbleToSend ( ) ; } } // If we don ' t have a suitable local QP then check all qualifying output handlers // ( potentially fixed / scoped ) for availability if ( ( ! _singleServer || isLink ( ) ) && ( localResult != DestinationHandler . OUTPUT_HANDLER_FOUND ) && ! ( checkLocal && ( fixedMEUuid != null ) ) ) // Don ' t bother checking for a remote one if we ' re fixed to the local one result = getLocalisationManager ( ) . checkRemoteMessagePointOutputHandlers ( fixedMEUuid , scopedMEs ) ; // If we still failed to find anything suitable remote from us then we use any // local result we have to try to better it if ( ( result == DestinationHandler . OUTPUT_HANDLER_NOT_FOUND ) && ( localResult != DestinationHandler . NOT_SET ) ) result = localResult ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "checkPtoPOutputHandlers" , Integer . valueOf ( result ) ) ; return result ;
public class AmazonGameLiftClient { /** * Places a request for a new game session in a queue ( see < a > CreateGameSessionQueue < / a > ) . When processing a * placement request , Amazon GameLift searches for available resources on the queue ' s destinations , scanning each * until it finds resources or the placement request times out . * A game session placement request can also request player sessions . When a new game session is successfully * created , Amazon GameLift creates a player session for each player included in the request . * When placing a game session , by default Amazon GameLift tries each fleet in the order they are listed in the * queue configuration . Ideally , a queue ' s destinations are listed in preference order . * Alternatively , when requesting a game session with players , you can also provide latency data for each player in * relevant regions . Latency data indicates the performance lag a player experiences when connected to a fleet in * the region . Amazon GameLift uses latency data to reorder the list of destinations to place the game session in a * region with minimal lag . If latency data is provided for multiple players , Amazon GameLift calculates each * region ' s average lag for all players and reorders to get the best game play across all players . * To place a new game session request , specify the following : * < ul > * < li > * The queue name and a set of game session properties and settings * < / li > * < li > * A unique ID ( such as a UUID ) for the placement . You use this ID to track the status of the placement request * < / li > * < li > * ( Optional ) A set of player data and a unique player ID for each player that you are joining to the new game * session ( player data is optional , but if you include it , you must also provide a unique ID for each player ) * < / li > * < li > * Latency data for all players ( if you want to optimize game play for the players ) * < / li > * < / ul > * If successful , a new game session placement is created . * To track the status of a placement request , call < a > DescribeGameSessionPlacement < / a > and check the request ' s * status . If the status is < code > FULFILLED < / code > , a new game session has been created and a game session ARN and * region are referenced . If the placement request times out , you can resubmit the request or retry it with a * different queue . * < ul > * < li > * < a > CreateGameSession < / a > * < / li > * < li > * < a > DescribeGameSessions < / a > * < / li > * < li > * < a > DescribeGameSessionDetails < / a > * < / li > * < li > * < a > SearchGameSessions < / a > * < / li > * < li > * < a > UpdateGameSession < / a > * < / li > * < li > * < a > GetGameSessionLogUrl < / a > * < / li > * < li > * Game session placements * < ul > * < li > * < a > StartGameSessionPlacement < / a > * < / li > * < li > * < a > DescribeGameSessionPlacement < / a > * < / li > * < li > * < a > StopGameSessionPlacement < / a > * < / li > * < / ul > * < / li > * < / ul > * @ param startGameSessionPlacementRequest * Represents the input for a request action . * @ return Result of the StartGameSessionPlacement operation returned by the service . * @ throws InternalServiceException * The service encountered an unrecoverable internal failure while processing the request . Clients can retry * such requests immediately or after a waiting period . * @ throws InvalidRequestException * One or more parameter values in the request are invalid . Correct the invalid parameter values before * retrying . * @ throws NotFoundException * A service resource associated with the request could not be found . Clients should not retry such * requests . * @ throws UnauthorizedException * The client failed authentication . Clients should not retry such requests . * @ sample AmazonGameLift . StartGameSessionPlacement * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / gamelift - 2015-10-01 / StartGameSessionPlacement " * target = " _ top " > AWS API Documentation < / a > */ @ Override public StartGameSessionPlacementResult startGameSessionPlacement ( StartGameSessionPlacementRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeStartGameSessionPlacement ( request ) ;
public class SoapServiceClient { /** * Creates the { @ link SoapCall } from the { @ code soapClientMethod } and its * { @ code args } . */ protected SoapCall < T > createSoapCall ( Method soapClientMethod , Object [ ] args ) { } }
return new SoapCall < T > ( soapClientMethod , soapClient , args ) ;
public class AgentRegistrationInformationsInner { /** * Retrieve the automation agent registration information . * @ param resourceGroupName Name of an Azure Resource group . * @ param automationAccountName The name of the automation account . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the AgentRegistrationInner object */ public Observable < AgentRegistrationInner > getAsync ( String resourceGroupName , String automationAccountName ) { } }
return getWithServiceResponseAsync ( resourceGroupName , automationAccountName ) . map ( new Func1 < ServiceResponse < AgentRegistrationInner > , AgentRegistrationInner > ( ) { @ Override public AgentRegistrationInner call ( ServiceResponse < AgentRegistrationInner > response ) { return response . body ( ) ; } } ) ;
public class DescribeRemediationConfigurationsRequest { /** * A list of AWS Config rule names of remediation configurations for which you want details . * @ return A list of AWS Config rule names of remediation configurations for which you want details . */ public java . util . List < String > getConfigRuleNames ( ) { } }
if ( configRuleNames == null ) { configRuleNames = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return configRuleNames ;
public class EncryptionUtil { /** * Decrypt . * @ param encrypted * the encrypted * @ return the string * @ throws Exception * the exception */ public String decrypt ( String encrypted ) throws Exception { } }
byte [ ] decodedValue = Base64Coder . decode ( encrypted . toCharArray ( ) ) ; Cipher c = getCipher ( Cipher . DECRYPT_MODE ) ; byte [ ] decValue = c . doFinal ( decodedValue ) ; return new String ( decValue ) ;
public class ClientInterface { /** * start or stop MigratePartitionLeader task */ void processMigratePartitionLeaderTask ( MigratePartitionLeaderMessage message ) { } }
synchronized ( m_lock ) { // start MigratePartitionLeader service if ( message . startMigratingPartitionLeaders ( ) ) { if ( m_migratePartitionLeaderExecutor == null ) { m_migratePartitionLeaderExecutor = Executors . newSingleThreadScheduledExecutor ( CoreUtils . getThreadFactory ( "MigratePartitionLeader" ) ) ; final int interval = Integer . parseInt ( System . getProperty ( "MIGRATE_PARTITION_LEADER_INTERVAL" , "1" ) ) ; final int delay = Integer . parseInt ( System . getProperty ( "MIGRATE_PARTITION_LEADER_DELAY" , "1" ) ) ; m_migratePartitionLeaderExecutor . scheduleAtFixedRate ( ( ) -> startMigratePartitionLeader ( message . isForStopNode ( ) ) , delay , interval , TimeUnit . SECONDS ) ; } hostLog . info ( "MigratePartitionLeader task is started." ) ; return ; } // stop MigratePartitionLeader service if ( m_migratePartitionLeaderExecutor != null ) { m_migratePartitionLeaderExecutor . shutdown ( ) ; m_migratePartitionLeaderExecutor = null ; } } hostLog . info ( "MigratePartitionLeader task is stopped." ) ;
public class StreamSegmentContainer { /** * region AutoCloseable Implementation */ @ Override public void close ( ) { } }
if ( this . closed . compareAndSet ( false , true ) ) { this . extensions . values ( ) . forEach ( SegmentContainerExtension :: close ) ; Futures . await ( Services . stopAsync ( this , this . executor ) ) ; this . metadataCleaner . close ( ) ; this . writer . close ( ) ; this . durableLog . close ( ) ; this . readIndex . close ( ) ; this . storage . close ( ) ; log . info ( "{}: Closed." , this . traceObjectId ) ; }
public class ResolutionPreference { /** * Initializes the preference . * @ param attributeSet * The attribute set , the attributes should be obtained from , as an instance of the type * { @ link AttributeSet } or null , if no attributes should be obtained * @ param defaultStyle * The default style to apply to this preference . If 0 , no style will be applied ( beyond * what is included in the theme ) . This may either be an attribute resource , whose value * will be retrieved from the current theme , or an explicit style resource * @ param defaultStyleResource * A resource identifier of a style resource that supplies default values for the * preference , used only if the default style is 0 or can not be found in the theme . Can * be 0 to not look for defaults */ private void initialize ( @ Nullable final AttributeSet attributeSet , @ AttrRes final int defaultStyle , @ StyleRes final int defaultStyleResource ) { } }
obtainStyledAttributes ( attributeSet , defaultStyle , defaultStyleResource ) ; setPositiveButtonText ( android . R . string . ok ) ; setNegativeButtonText ( android . R . string . cancel ) ; addValidator ( Validators . notEmpty ( getContext ( ) , R . string . resolution_not_empty_error_message ) ) ; addValidator ( Validators . number ( getContext ( ) , R . string . resolution_number_error_message ) ) ; addValidator ( Validators . regex ( getContext ( ) , R . string . resolution_min_value_error_message , MIN_VALUE_REGEX ) ) ;
public class GenericCollectionTypeResolver { /** * Extract the generic return type from the given method . * @ param method the method to check the return type for * @ param source the source class / interface defining the generic parameter types * @ param typeIndex the index of the type ( e . g . 0 for Collections , * 0 for Map keys , 1 for Map values ) * @ param nestingLevel the nesting level of the target type * @ return the generic type , or { @ code null } if none */ private static Class < ? > getGenericReturnType ( Method method , Class < ? > source , int typeIndex , int nestingLevel ) { } }
return extractType ( method . getGenericReturnType ( ) , source , typeIndex , null , null , nestingLevel , 1 ) ;
public class DefaultTypeRepository { /** * Finds the set of types referenced by the specified type . * @ param typeName the starting type * @ return a Set of Class objects , or an empty set if none were found */ public Set < Class < ? > > findReferencedTypes ( String typeName ) { } }
Set < Class < ? > > referencedTypes = new HashSet < > ( ) ; // use the cached version if possible if ( referencedTypesCache . containsKey ( typeName ) ) { return referencedTypesCache . get ( typeName ) ; } try { CtClass cc = classPool . get ( typeName ) ; for ( Object referencedType : cc . getRefClasses ( ) ) { String referencedTypeName = ( String ) referencedType ; if ( ! isExcluded ( referencedTypeName ) ) { try { referencedTypes . add ( loadClass ( referencedTypeName ) ) ; } catch ( Throwable t ) { log . debug ( "Could not find " + referencedTypeName + " ... ignoring." ) ; } } } // remove the type itself referencedTypes . remove ( loadClass ( typeName ) ) ; } catch ( Exception e ) { log . debug ( "Error finding referenced types for " + typeName + " ... ignoring." ) ; // since there was an error , we can ' t find the set of referenced types from it , so . . . referencedTypesCache . put ( typeName , new HashSet < > ( ) ) ; } // cache for the next time referencedTypesCache . put ( typeName , referencedTypes ) ; return referencedTypes ;
public class MechanizeAgent { /** * Returns the resource received uppon the request . The resource can be casted to any expected subclass of resource * but will fail with ClassCastException if the expected type of resource is not the actual returned resource . */ @ SuppressWarnings ( "unchecked" ) @ Override public < T extends Resource > T request ( final HttpRequestBase request ) { } }
try { HttpResponse response = execute ( client , request ) ; Resource resource = toPage ( request , response ) ; return ( T ) resource ; } catch ( Exception e ) { throw MechanizeExceptionFactory . newException ( e ) ; }
public class Transaction { /** * The transaction fee is the difference of the value of all inputs and the value of all outputs . Currently , the fee * can only be determined for transactions created by us . * @ return fee , or null if it cannot be determined */ public Coin getFee ( ) { } }
Coin fee = Coin . ZERO ; if ( inputs . isEmpty ( ) || outputs . isEmpty ( ) ) // Incomplete transaction return null ; for ( TransactionInput input : inputs ) { if ( input . getValue ( ) == null ) return null ; fee = fee . add ( input . getValue ( ) ) ; } for ( TransactionOutput output : outputs ) { fee = fee . subtract ( output . getValue ( ) ) ; } return fee ;
public class MultipleSequenceAlignment { /** * A sequence that has been aligned to other sequences will have inserts . * @ param sequence */ public void addAlignedSequence ( S sequence ) { } }
if ( length == null ) { length = sequence . getLength ( ) ; } if ( sequence . getLength ( ) != length ) { throw new IllegalArgumentException ( sequence . getAccession ( ) + " length = " + sequence . getLength ( ) + " not equal to MSA length = " + length ) ; } sequences . add ( sequence ) ;
public class AttachmentLoader { /** * validates the current attachment * @ param currentAttachment given attachment * @ return true , if the current attachment is valid , false otherwisesr */ public static boolean validateAttachment ( Attachment currentAttachment ) { } }
try { currentAttachment . getId ( ) ; if ( currentAttachment . getAlternateId ( ) == "null" || currentAttachment . getAlternateId ( ) . isEmpty ( ) ) { return false ; } if ( currentAttachment . getCapGroupName ( ) == "null" || currentAttachment . getCapGroupName ( ) . isEmpty ( ) ) { return false ; } if ( currentAttachment . getCapGroupSMILES ( ) == "null" || currentAttachment . getCapGroupSMILES ( ) . isEmpty ( ) ) { return false ; } if ( currentAttachment . getLabel ( ) . equals ( "null" ) || currentAttachment . getLabel ( ) . equals ( " " ) ) { return false ; } } catch ( Exception e ) { return false ; } return true ;
public class WrappedByteBuffer { /** * Puts a string into the buffer at the current position , using the character set to encode the string as bytes . * @ param v * the string * @ param cs * the character set * @ return the buffer */ public WrappedByteBuffer putString ( String v , Charset cs ) { } }
java . nio . ByteBuffer strBuf = cs . encode ( v ) ; _autoExpand ( strBuf . limit ( ) ) ; _buf . put ( strBuf ) ; return this ;
public class Workflow { /** * Create a WorkflowCreator to execute create . * @ param pathWorkspaceSid The workspace _ sid * @ param friendlyName A string representing a human readable name for this * Workflow . * @ param configuration JSON document configuring the rules for this Workflow . * @ return WorkflowCreator capable of executing the create */ public static WorkflowCreator creator ( final String pathWorkspaceSid , final String friendlyName , final String configuration ) { } }
return new WorkflowCreator ( pathWorkspaceSid , friendlyName , configuration ) ;
public class StringPropertyReplacer { /** * Go through the input string and replace any occurrence of $ { p } with * the System . getProtocolProperty ( p ) value . If there is no such property p defined , * then the $ { p } reference will remain unchanged . * If the property reference is of the form $ { p : v } and there is no such property p , * then the default value v will be returned . * If the property reference is of the form $ { p1 , p2 } or $ { p1 , p2 : v } then * the primary and the secondary properties will be tried in turn , before * returning either the unchanged input , or the default value . * The property $ { / } is replaced with System . getProtocolProperty ( " file . separator " ) * value and the property $ { : } is replaced with System . getProtocolProperty ( " path . separator " ) . * Prior to resolving variables , environment variables are assigned to the * collection of properties . Each environment variable is prefixed with the * prefix " env . " . If a system property is already defined for the prefixed * environment variable , the system property is honored as an override * ( primarily for testing ) . * @ param string * - the string with possible $ { } references * @ return the input string with all property references replaced if any . * If there are no valid references the input string will be returned . */ public static String replaceProperties ( final String string ) { } }
Properties props = System . getProperties ( ) ; for ( Map . Entry < String , String > var : System . getenv ( ) . entrySet ( ) ) { String propKey = ENV_VAR_BASE_PROPERTY_KEY + var . getKey ( ) ; // honor overridden environment variable ( primarily for testing ) if ( ! props . containsKey ( propKey ) ) { props . setProperty ( propKey , var . getValue ( ) ) ; } } return replaceProperties ( string , new PropertiesPropertyResolver ( props ) ) ;
public class DefaultOrthologize { /** * Walks { @ link OrthologizedKam # RELS certain relationship types } and infers * orthologous edges based on matching relationships . * For instance if there are two { @ code transcribedTo } edges from an * orthologized { @ code geneAbundance } then we infer that the downstream * { @ code rnaAbundance } s are also orthologous and collapse to the first * one . * @ param kam { @ link Kam } * @ param snode { @ link KamNode } species node to walk from * @ param param { @ link TermParameter } for orthologous species node * @ param direction { @ link EdgeDirectionType } direction to walk * @ param ortho { @ link Map } of orthologous node to species node * @ param ntp { @ link Map } of node id to { @ link TermParameter } * @ param etp { @ link Map } of edge id to { @ link TermParameter } */ private static void recurseConnections ( Kam kam , KamNode snode , TermParameter param , EdgeFilter inferf , EdgeDirectionType direction , Map < KamNode , KamNode > ortho , Map < Integer , TermParameter > ntp , Map < Integer , TermParameter > etp ) { } }
// get adjacent edges that can be inferred final Set < KamEdge > out = kam . getAdjacentEdges ( snode , direction , inferf ) ; // map ACTS _ IN edges by activity function final Map < FunctionEnum , KamNode > acts = new HashMap < FunctionEnum , KamNode > ( ) ; final Map < RelationshipType , KamNode > rels = new HashMap < RelationshipType , Kam . KamNode > ( ) ; for ( final KamEdge e : out ) { // get correct edge opposite node based on search direction final KamNode opnode = ( direction == FORWARD ? e . getTargetNode ( ) : e . getSourceNode ( ) ) ; // handle ACTS _ IN edge independently since we care about similar // activity functions if ( e . getRelationshipType ( ) == ACTS_IN ) { final FunctionEnum actfun = opnode . getFunctionType ( ) ; // lookup first seen node for activity function KamNode node = acts . get ( actfun ) ; // if not yet seen mark opposite node and edge as species collapse // target . continue to next edge . if ( node == null ) { acts . put ( opnode . getFunctionType ( ) , opnode ) ; ntp . put ( opnode . getId ( ) , param ) ; etp . put ( e . getId ( ) , param ) ; continue ; } kam . collapseNode ( opnode , node ) ; // hang on to collapsed node ortho . put ( opnode , node ) ; } else { // handle all other edges by relationship type final RelationshipType rel = e . getRelationshipType ( ) ; // lookup first seen relationship type KamNode node = rels . get ( rel ) ; // if not yet seen mark opposite node and edge as species collapse // target . continue to next edge . if ( node == null ) { rels . put ( rel , opnode ) ; ntp . put ( opnode . getId ( ) , param ) ; etp . put ( e . getId ( ) , param ) ; continue ; } // hang on to collapsed node ortho . put ( opnode , node ) ; kam . collapseNode ( opnode , node ) ; } } // recursively process all collapsed nodes Collection < KamNode > actn = acts . values ( ) ; Collection < KamNode > reln = rels . values ( ) ; final Set < KamNode > nodes = constrainedHashSet ( actn . size ( ) + reln . size ( ) ) ; nodes . addAll ( actn ) ; nodes . addAll ( reln ) ; for ( final KamNode n : nodes ) { recurseConnections ( kam , n , param , inferf , direction , ortho , ntp , etp ) ; }
public class DefaultCertStoreInspector { /** * { @ inheritDoc } */ protected Collection < X509CertSelector > getRecipientSelectors ( ) { } }
boolean digitalSignature = false ; boolean nonRepudiation = false ; boolean keyEncipherment = true ; boolean dataEncipherment = false ; X509CertSelector keyEncSelector = new X509CertSelector ( ) ; keyEncSelector . setBasicConstraints ( - 2 ) ; keyEncSelector . setKeyUsage ( new boolean [ ] { digitalSignature , nonRepudiation , keyEncipherment , dataEncipherment } ) ; keyEncipherment = false ; dataEncipherment = true ; X509CertSelector dataEncSelector = new X509CertSelector ( ) ; dataEncSelector . setBasicConstraints ( - 2 ) ; dataEncSelector . setKeyUsage ( new boolean [ ] { digitalSignature , nonRepudiation , keyEncipherment , dataEncipherment } ) ; X509CertSelector caSelector = new X509CertSelector ( ) ; caSelector . setBasicConstraints ( 0 ) ; return Arrays . asList ( keyEncSelector , dataEncSelector , caSelector ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link FunctionsType } { @ code > } } */ @ XmlElementDecl ( namespace = "http://www.w3.org/1998/Math/MathML" , name = "compose" ) public JAXBElement < FunctionsType > createCompose ( FunctionsType value ) { } }
return new JAXBElement < FunctionsType > ( _Compose_QNAME , FunctionsType . class , null , value ) ;
public class FileEntryAdapterIterator { /** * @ return */ private SmbResource advance ( ) { } }
while ( this . delegate . hasNext ( ) ) { FileEntry fe = this . delegate . next ( ) ; if ( this . filter == null ) { try { return adapt ( fe ) ; } catch ( MalformedURLException e ) { log . error ( "Failed to create child URL" , e ) ; continue ; } } try ( SmbResource r = adapt ( fe ) ) { if ( this . filter . accept ( r ) ) { return r ; } } catch ( MalformedURLException e ) { log . error ( "Failed to create child URL" , e ) ; continue ; } catch ( CIFSException e ) { log . error ( "Filter failed" , e ) ; continue ; } } return null ;
public class EntryStream { /** * Merge series of adjacent stream entries with equal keys combining the * corresponding values using the provided function . * This is a < a href = " package - summary . html # StreamOps " > quasi - intermediate < / a > * partial reduction operation . * The key of the resulting entry is the key of the first merged entry . * @ param merger a non - interfering , stateless , associative function to merge * values of two adjacent entries which keys are equal . Note that it * can be applied to the results if previous merges . * @ return a new { @ code EntryStream } which keys are the keys of the original * stream and the values are values of the adjacent entries with the * same keys , combined using the provided merger function . * @ see StreamEx # collapse ( BiPredicate , BinaryOperator ) * @ since 0.5.5 */ public EntryStream < K , V > collapseKeys ( BinaryOperator < V > merger ) { } }
BinaryOperator < Entry < K , V > > entryMerger = ( e1 , e2 ) -> new SimpleImmutableEntry < > ( e1 . getKey ( ) , merger . apply ( e1 . getValue ( ) , e2 . getValue ( ) ) ) ; return new EntryStream < > ( new CollapseSpliterator < > ( equalKeys ( ) , Function . identity ( ) , entryMerger , entryMerger , spliterator ( ) ) , context ) ;
public class ListIdentityPoolUsageResult { /** * Usage information for the identity pools . * @ return Usage information for the identity pools . */ public java . util . List < IdentityPoolUsage > getIdentityPoolUsages ( ) { } }
if ( identityPoolUsages == null ) { identityPoolUsages = new com . amazonaws . internal . SdkInternalList < IdentityPoolUsage > ( ) ; } return identityPoolUsages ;
public class BindPath { /** * Called when we detect a change somewhere down our path . * First , check to see if our object is changing . If so remove our old listener * Next , update the reference object the children have and recurse * Finally , add listeners if we have a different object * @ param listener This listener to attach . * @ param newObject The object we should read our property off of . * @ param updateSet The list of objects we have added listeners to */ public synchronized void updatePath ( PropertyChangeListener listener , Object newObject , Set updateSet ) { } }
if ( currentObject != newObject ) { removeListeners ( ) ; } if ( ( children != null ) && ( children . length > 0 ) ) { try { Object newValue = null ; if ( newObject != null ) { updateSet . add ( newObject ) ; newValue = extractNewValue ( newObject ) ; } for ( BindPath child : children ) { child . updatePath ( listener , newValue , updateSet ) ; } } catch ( Exception e ) { // LOGME // do we ignore it , or fail ? } } if ( currentObject != newObject ) { addListeners ( listener , newObject , updateSet ) ; }
public class MetadataServiceListUnmarshaller { /** * { @ inheritDoc } */ protected void processAttribute ( XMLObject samlObject , Attr attribute ) throws UnmarshallingException { } }
MetadataServiceList mdsl = ( MetadataServiceList ) samlObject ; if ( attribute . getLocalName ( ) . equals ( MetadataServiceList . ID_ATTR_NAME ) ) { mdsl . setID ( attribute . getValue ( ) ) ; attribute . getOwnerElement ( ) . setIdAttributeNode ( attribute , true ) ; } else if ( attribute . getLocalName ( ) . equals ( MetadataServiceList . VERSION_ATTR_NAME ) ) { mdsl . setVersion ( MetadataServiceListVersion . valueOf ( attribute . getValue ( ) ) ) ; } else if ( attribute . getLocalName ( ) . equals ( MetadataServiceList . ISSUE_DATE_ATTR_NAME ) && ! Strings . isNullOrEmpty ( attribute . getValue ( ) ) ) { mdsl . setIssueDate ( new DateTime ( attribute . getValue ( ) , ISOChronology . getInstanceUTC ( ) ) ) ; } else if ( attribute . getLocalName ( ) . equals ( MetadataServiceList . NEXT_UPDATE_ATTR_NAME ) && ! Strings . isNullOrEmpty ( attribute . getValue ( ) ) ) { mdsl . setNextUpdate ( new DateTime ( attribute . getValue ( ) , ISOChronology . getInstanceUTC ( ) ) ) ; } else { super . processAttribute ( samlObject , attribute ) ; }
public class UndertowHttpManagementService { /** * Starts the service . * @ param context The start context * @ throws StartException If any errors occur */ @ Override public synchronized void start ( final StartContext context ) throws StartException { } }
final ModelController modelController = modelControllerSupplier . get ( ) ; final ControlledProcessStateService controlledProcessStateService = controlledProcessStateServiceSupplier . get ( ) ; socketBindingManager = socketBindingManagerSupplier != null ? socketBindingManagerSupplier . get ( ) : null ; final SecurityRealm securityRealm = securityRealmSupplier != null ? securityRealmSupplier . get ( ) : null ; final HttpAuthenticationFactory httpAuthenticationFactory = httpAuthFactorySupplier != null ? httpAuthFactorySupplier . get ( ) : null ; SSLContext sslContext = sslContextSupplier != null ? sslContextSupplier . get ( ) : null ; final SslClientAuthMode sslClientAuthMode ; if ( sslContext == null && securityRealm != null ) { sslContext = securityRealm . getSSLContext ( ) ; sslClientAuthMode = getSslClientAuthMode ( securityRealm ) ; } else { sslClientAuthMode = null ; } InetSocketAddress bindAddress = null ; InetSocketAddress secureBindAddress = null ; final SocketBinding basicBinding = socketBindingSupplier != null ? socketBindingSupplier . get ( ) : null ; final SocketBinding secureBinding = secureSocketBindingSupplier != null ? secureSocketBindingSupplier . get ( ) : null ; final NetworkInterfaceBinding interfaceBinding = interfaceBindingSupplier != null ? interfaceBindingSupplier . get ( ) : null ; final NetworkInterfaceBinding secureInterfaceBinding = secureInterfaceBindingSupplier != null ? secureInterfaceBindingSupplier . get ( ) : null ; if ( interfaceBinding != null ) { useUnmanagedBindings = true ; assert this . port != null ; final int port = this . port ; if ( port > 0 ) { bindAddress = new InetSocketAddress ( interfaceBinding . getAddress ( ) , port ) ; } assert this . securePort != null ; final int securePort = this . securePort ; if ( securePort > 0 ) { InetAddress secureAddress = secureInterfaceBinding == null ? interfaceBinding . getAddress ( ) : secureInterfaceBinding . getAddress ( ) ; secureBindAddress = new InetSocketAddress ( secureAddress , securePort ) ; } } else { if ( basicBinding != null ) { bindAddress = basicBinding . getSocketAddress ( ) ; } if ( secureBinding != null ) { secureBindAddress = secureBinding . getSocketAddress ( ) ; } } List < ListenerRegistry . Listener > listeners = new ArrayList < > ( ) ; // TODO : rethink this whole ListenerRegistry business if ( bindAddress != null ) { ListenerRegistry . Listener http = new ListenerRegistry . Listener ( "http" , HTTP_MANAGEMENT , SERVER_NAME , bindAddress ) ; http . setContextInformation ( "socket-binding" , basicBinding ) ; listeners . add ( http ) ; } if ( secureBindAddress != null ) { ListenerRegistry . Listener https = new ListenerRegistry . Listener ( "https" , HTTPS_MANAGEMENT , SERVER_NAME , bindAddress ) ; https . setContextInformation ( "socket-binding" , secureBinding ) ; listeners . add ( https ) ; } final ChannelUpgradeHandler upgradeHandler = new ChannelUpgradeHandler ( ) ; final ServiceBuilder < ? > builder = context . getChildTarget ( ) . addService ( HTTP_UPGRADE_SERVICE_NAME ) ; final Consumer < Object > upgradeHandlerConsumer = builder . provides ( HTTP_UPGRADE_SERVICE_NAME , HTTPS_UPGRADE_SERVICE_NAME ) ; builder . setInstance ( org . jboss . msc . Service . newInstance ( upgradeHandlerConsumer , upgradeHandler ) ) ; builder . install ( ) ; for ( ListenerRegistry . Listener listener : listeners ) { listener . addHttpUpgradeMetadata ( new ListenerRegistry . HttpUpgradeMetadata ( JBOSS_REMOTING , MANAGEMENT_ENDPOINT ) ) ; } if ( listenerRegistrySupplier . get ( ) != null ) { for ( ListenerRegistry . Listener listener : listeners ) { listenerRegistrySupplier . get ( ) . addListener ( listener ) ; } } try { serverManagement = ManagementHttpServer . builder ( ) . setBindAddress ( bindAddress ) . setSecureBindAddress ( secureBindAddress ) . setModelController ( modelController ) . setSecurityRealm ( securityRealm ) . setSSLContext ( sslContext ) . setSSLClientAuthMode ( sslClientAuthMode ) . setHttpAuthenticationFactory ( httpAuthenticationFactory ) . setControlledProcessStateService ( controlledProcessStateService ) . setConsoleMode ( consoleMode ) . setConsoleSlot ( consoleSlot ) . setChannelUpgradeHandler ( upgradeHandler ) . setManagementHttpRequestProcessor ( requestProcessorSupplier . get ( ) ) . setAllowedOrigins ( allowedOrigins ) . setWorker ( workerSupplier . get ( ) ) . setExecutor ( executorSupplier . get ( ) ) . build ( ) ; serverManagement . start ( ) ; // Register the now - created sockets with the SBM if ( socketBindingManager != null ) { if ( useUnmanagedBindings ) { SocketBindingManager . UnnamedBindingRegistry registry = socketBindingManager . getUnnamedRegistry ( ) ; if ( bindAddress != null ) { final InetSocketAddress boundAddress = serverManagement . getLocalAddress ( InetSocketAddress . class ) ; basicManagedBinding = ManagedBinding . Factory . createSimpleManagedBinding ( "management-http" , boundAddress , null ) ; registry . registerBinding ( basicManagedBinding ) ; } if ( secureBindAddress != null ) { final InetSocketAddress boundAddress = serverManagement . getSecureLocalAddress ( InetSocketAddress . class ) ; secureManagedBinding = ManagedBinding . Factory . createSimpleManagedBinding ( "management-https" , boundAddress , null ) ; registry . registerBinding ( secureManagedBinding ) ; } } else { SocketBindingManager . NamedManagedBindingRegistry registry = socketBindingManager . getNamedRegistry ( ) ; if ( basicBinding != null ) { final InetSocketAddress boundAddress = serverManagement . getLocalAddress ( InetSocketAddress . class ) ; basicManagedBinding = ManagedBinding . Factory . createSimpleManagedBinding ( basicBinding . getName ( ) , boundAddress , null ) ; registry . registerBinding ( basicManagedBinding ) ; } if ( secureBinding != null ) { final InetSocketAddress boundAddress = serverManagement . getSecureLocalAddress ( InetSocketAddress . class ) ; secureManagedBinding = ManagedBinding . Factory . createSimpleManagedBinding ( secureBinding . getName ( ) , boundAddress , null ) ; registry . registerBinding ( secureManagedBinding ) ; } } } } catch ( Exception e ) { Throwable cause = e . getCause ( ) ; if ( e instanceof BindException || cause instanceof BindException ) { final StringBuilder sb = new StringBuilder ( ) . append ( e . getLocalizedMessage ( ) ) ; if ( bindAddress != null ) sb . append ( " " ) . append ( bindAddress ) ; if ( secureBindAddress != null ) sb . append ( " " ) . append ( secureBindAddress ) ; throw new StartException ( sb . toString ( ) ) ; } else { throw ServerLogger . ROOT_LOGGER . failedToStartHttpManagementService ( e ) ; } } httpManagementConsumer . accept ( httpManagement ) ;
public class AbstractMessage { /** * / * ( non - Javadoc ) * @ see javax . jms . Message # setJMSDestination ( javax . jms . Destination ) */ @ Override public final void setJMSDestination ( Destination destination ) throws JMSException { } }
assertDeserializationLevel ( MessageSerializationLevel . FULL ) ; this . destination = DestinationTools . asRef ( destination ) ;