signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class GroupService { /** * Update group * @ param groupId group id to update * @ param groupConfig group config * @ return < tt > true < / tt > if update was successful , < br / > * < tt > false < / tt > otherwise */ private boolean updateGroup ( String groupId , GroupConfig groupConfig ) { } }
return client . updateGroup ( groupId , converter . createUpdateGroupRequest ( groupConfig , groupConfig . getParentGroup ( ) != null ? idByRef ( groupConfig . getParentGroup ( ) ) : null , groupConfig . getCustomFields ( ) == null || groupConfig . getCustomFields ( ) . size ( ) == 0 ? null : client . getCustomFields ( ) ) ) ;
public class Link { /** * Parses a textual link specification ( as used by the Docker CLI ) to a { @ link Link } . * @ param serialized * the specification , e . g . < code > name : alias < / code > or < code > / name1 : / name2 / alias < / code > * @ return a { @ link Link } matching the specification * @ throws IllegalArgumentException * if the specification cannot be parsed */ public static Link parse ( final String serialized ) throws IllegalArgumentException { } }
try { final String [ ] parts = serialized . split ( ":" ) ; switch ( parts . length ) { case 2 : { String [ ] nameSplit = parts [ 0 ] . split ( "/" ) ; String [ ] aliasSplit = parts [ 1 ] . split ( "/" ) ; return new Link ( nameSplit [ nameSplit . length - 1 ] , aliasSplit [ aliasSplit . length - 1 ] ) ; } default : { throw new IllegalArgumentException ( ) ; } } } catch ( final Exception e ) { throw new IllegalArgumentException ( "Error parsing Link '" + serialized + "'" ) ; }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link VerticalDatumType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link VerticalDatumType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "VerticalDatum" , substitutionHeadNamespace = "http://www.opengis.net/gml" , substitutionHeadName = "_Datum" ) public JAXBElement < VerticalDatumType > createVerticalDatum ( VerticalDatumType value ) { } }
return new JAXBElement < VerticalDatumType > ( _VerticalDatum_QNAME , VerticalDatumType . class , null , value ) ;
public class MetricsTimeVaryingRate { /** * The min time for a single operation since the last reset * { @ link # resetMinMax ( ) } * @ return min time for an operation */ public long getMinTime ( ) { } }
lock . lock ( ) ; try { if ( printMinMax ) { return previousIntervalMinMax . minTime ; } return minMax . minTime ; } finally { lock . unlock ( ) ; }
public class CoreNLPParseNode { /** * Resolves a head down to a terminal node . A terminal node is its own head here . */ public Optional < CoreNLPParseNode > terminalHead ( ) { } }
if ( terminal ( ) ) { return Optional . of ( this ) ; } if ( immediateHead ( ) . isPresent ( ) ) { return immediateHead ( ) . get ( ) . terminalHead ( ) ; } return Optional . absent ( ) ;
public class BSHAutoCloseable { /** * We may not always have a type node ( loose typed resources ) . * Then we create the BSHType node and get the type * from the BSHVariableDeclarator AllocationExpression nodes . */ private void renderTypeNode ( ) { } }
if ( jjtGetNumChildren ( ) == 1 ) { SimpleNode tNode = new BSHType ( ParserTreeConstants . JJTTYPE ) ; Node ambigName = jjtGetChild ( 0 ) ; while ( ambigName . jjtGetNumChildren ( ) > 0 ) if ( ( ambigName = ambigName . jjtGetChild ( 0 ) ) instanceof BSHAmbiguousName ) break ; BSHAmbiguousName ambigNew = new BSHAmbiguousName ( ParserTreeConstants . JJTAMBIGUOUSNAME ) ; ambigNew . jjtSetParent ( tNode ) ; ambigNew . text = ( ( BSHAmbiguousName ) ambigName ) . text ; tNode . jjtAddChild ( ambigNew , 0 ) ; tNode . jjtSetParent ( this ) ; Node [ ] n = new Node [ 2 ] ; n [ 0 ] = tNode ; n [ 1 ] = children [ 0 ] ; children = n ; }
public class Assert { /** * Assert that the provided object is an instance of the provided class . * < pre class = " code " > Assert . instanceOf ( Foo . class , foo , " Foo expected " ) ; < / pre > * @ param type the type to check against * @ param obj the object to check * @ param message a message which will be prepended to provide further context . * If it is empty or ends in " : " or " ; " or " , " or " . " , a full exception message * will be appended . If it ends in a space , the name of the offending object ' s * type will be appended . In any other case , a " : " with a space and the name * of the offending object ' s type will be appended . * @ throws IllegalArgumentException if the object is not an instance of type */ public static void isInstanceOf ( Class < ? > type , Object obj , String message ) { } }
notNull ( type , "Type to check against must not be null" ) ; if ( ! type . isInstance ( obj ) ) { instanceCheckFailed ( type , obj , message ) ; }
public class LiaisonRegistry { /** * Fetch the appropriate database liaison for the supplied URL , which should be the same string * that would be used to configure a connection to the database . */ public static DatabaseLiaison getLiaison ( String url ) { } }
if ( url == null ) throw new NullPointerException ( "URL must not be null" ) ; // see if we already have a liaison mapped for this connection DatabaseLiaison liaison = _mappings . get ( url ) ; if ( liaison == null ) { // scan the list looking for a matching liaison for ( DatabaseLiaison candidate : _liaisons ) { if ( candidate . matchesURL ( url ) ) { liaison = candidate ; break ; } } // if we didn ' t find a matching liaison , use the default if ( liaison == null ) { log . warning ( "Unable to match liaison for database. Using default." , "url" , url ) ; liaison = new DefaultLiaison ( ) ; } // map this URL to this liaison _mappings . put ( url , liaison ) ; } return liaison ;
public class CoinbeneBaseService { /** * Sign request JSON . */ protected JsonNode formAndSignRequestJson ( Map < String , String > params ) { } }
CoinbeneUtils . signParams ( params ) ; return toJson ( params ) ;
public class Levenshtein { /** * This is the original , naive implementation , using the Wagner & * Fischer algorithm from 1974 . It uses a flattened matrix for * speed , but still computes the entire matrix . */ public static int distance ( String s1 , String s2 ) { } }
if ( s1 . length ( ) == 0 ) return s2 . length ( ) ; if ( s2 . length ( ) == 0 ) return s1 . length ( ) ; int s1len = s1 . length ( ) ; // we use a flat array for better performance . we address it by // s1ix + s1len * s2ix . this modification improves performance // by about 30 % , which is definitely worth the extra complexity . int [ ] matrix = new int [ ( s1len + 1 ) * ( s2 . length ( ) + 1 ) ] ; for ( int col = 0 ; col <= s2 . length ( ) ; col ++ ) matrix [ col * s1len ] = col ; for ( int row = 0 ; row <= s1len ; row ++ ) matrix [ row ] = row ; for ( int ix1 = 0 ; ix1 < s1len ; ix1 ++ ) { char ch1 = s1 . charAt ( ix1 ) ; for ( int ix2 = 0 ; ix2 < s2 . length ( ) ; ix2 ++ ) { int cost ; if ( ch1 == s2 . charAt ( ix2 ) ) cost = 0 ; else cost = 1 ; int left = matrix [ ix1 + ( ( ix2 + 1 ) * s1len ) ] + 1 ; int above = matrix [ ix1 + 1 + ( ix2 * s1len ) ] + 1 ; int aboveleft = matrix [ ix1 + ( ix2 * s1len ) ] + cost ; matrix [ ix1 + 1 + ( ( ix2 + 1 ) * s1len ) ] = Math . min ( left , Math . min ( above , aboveleft ) ) ; } } // for ( int ix1 = 0 ; ix1 < = s1len ; ix1 + + ) { // for ( int ix2 = 0 ; ix2 < = s2 . length ( ) ; ix2 + + ) { // System . out . print ( matrix [ ix1 + ( ix2 * s1len ) ] + " " ) ; // System . out . println ( ) ; return matrix [ s1len + ( s2 . length ( ) * s1len ) ] ;
public class BackedHashMap { /** * doScheduledInvalidation - determines if background invalidation processing should proceed . * Returns true if : * 1 ) Scheduled invalidation feature is not enabled ( normal case as I don ' t think schduled inval is really popular ) * 2 ) Scheduled invalidation is enabled AND we ' ve received a remove invalidation from invalidateAll ( true ) AND the * DoRemoteInvalidations property is true . ( Highly unlikely scenario ) * 3 ) Scheduled invalidation is enabled and it is currently one of the 2 scheduled invalidation hours . */ protected boolean doScheduledInvalidation ( ) { } }
boolean scheduledInvalidationEnabled = _smc . getScheduledInvalidation ( ) ; if ( com . ibm . websphere . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_WAS . isLoggable ( Level . FINER ) ) { LoggingUtil . SESSION_LOGGER_WAS . entering ( methodClassName , methodNames [ DO_SCHEDULED_INVALIDATION ] , "scheduledInvalidationEnabled is " + scheduledInvalidationEnabled ) ; } if ( ! scheduledInvalidationEnabled || ( ( ( BackedStore ) getIStore ( ) ) . remoteInvalReceived && SessionManagerConfig . isDoRemoteInvalidations ( ) ) ) { // then we always do invalidation if ( com . ibm . websphere . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_WAS . isLoggable ( Level . FINER ) ) { LoggingUtil . SESSION_LOGGER_WAS . exiting ( methodClassName , methodNames [ DO_SCHEDULED_INVALIDATION ] , "returning true because scheduled invalidation is not enabled" ) ; } return true ; } Calendar current = Calendar . getInstance ( ) ; int currentHour = current . get ( Calendar . HOUR_OF_DAY ) ; int scheduledHour1 = _smc . getInvalTime1 ( ) ; int scheduledHour2 = _smc . getInvalTime2 ( ) ; if ( com . ibm . websphere . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_WAS . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_WAS . logp ( Level . FINE , methodClassName , methodNames [ DO_SCHEDULED_INVALIDATION ] , "currentHour:" + currentHour ) ; LoggingUtil . SESSION_LOGGER_WAS . logp ( Level . FINE , methodClassName , methodNames [ DO_SCHEDULED_INVALIDATION ] , "scheduled hours are " + scheduledHour1 + " and " + scheduledHour2 ) ; } if ( currentHour == scheduledHour1 || currentHour == scheduledHour2 ) { if ( com . ibm . websphere . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_WAS . isLoggable ( Level . FINER ) ) { LoggingUtil . SESSION_LOGGER_WAS . exiting ( methodClassName , methodNames [ DO_SCHEDULED_INVALIDATION ] , "returning true because current hour matches scheduled hour" ) ; } return true ; } else { if ( com . ibm . websphere . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_WAS . isLoggable ( Level . FINER ) ) { LoggingUtil . SESSION_LOGGER_WAS . exiting ( methodClassName , methodNames [ DO_SCHEDULED_INVALIDATION ] , "returning false because it is not currently one of the scheduled hours" ) ; } return false ; }
public class HarFileSystem { /** * return the filestatus of files in har archive . * The permission returned are that of the archive * index files . The permissions are not persisted * while creating a hadoop archive . * @ param f the path in har filesystem * @ return filestatus . * @ throws IOException */ @ Override public FileStatus getFileStatus ( Path f ) throws IOException { } }
FileStatus archiveStatus = fs . getFileStatus ( archiveIndex ) ; HarStatus hstatus = getFileHarStatus ( f , archiveStatus ) ; return new FileStatus ( hstatus . isDir ( ) ? 0 : hstatus . getLength ( ) , hstatus . isDir ( ) , ( int ) archiveStatus . getReplication ( ) , archiveStatus . getBlockSize ( ) , hstatus . getModificationTime ( ) , hstatus . getAccessTime ( ) , new FsPermission ( hstatus . getPermission ( ) ) , hstatus . getOwner ( ) , hstatus . getGroup ( ) , makeRelative ( this . uri . getPath ( ) , new Path ( hstatus . getName ( ) ) ) ) ;
public class FreemarkerURLHelper { /** * Return only the path for a given URL * @ param url * @ return */ public String relative ( String url ) { } }
try { final URI uri = absolute ( url ) . build ( ) ; return new URI ( null , null , uri . getPath ( ) , uri . getQuery ( ) , uri . getFragment ( ) ) . toString ( ) ; } catch ( URISyntaxException e ) { throw new IllegalArgumentException ( e ) ; }
public class ClientSessionListener { /** * Attempts to close the client . * @ param hse the session event . */ @ Override public void attributeRemoved ( HttpSessionBindingEvent hse ) { } }
Object possibleClient = hse . getValue ( ) ; closeClient ( possibleClient ) ;
public class autoscaleaction { /** * Use this API to fetch filtered set of autoscaleaction resources . * filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */ public static autoscaleaction [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
autoscaleaction obj = new autoscaleaction ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; autoscaleaction [ ] response = ( autoscaleaction [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class HBaseDataWrapper { /** * Put column . * @ param family * the family * @ param qualifier * the qualifier * @ param qualifierValue * the qualifier value */ private void putColumn ( byte [ ] family , byte [ ] qualifier , byte [ ] qualifierValue ) { } }
this . columns . put ( Bytes . toString ( family ) + ":" + Bytes . toString ( qualifier ) , qualifierValue ) ;
public class ListManagementTermListsImpl { /** * Updates an Term List . * @ param listId List Id of the image list . * @ param contentType The content type . * @ param bodyParameter Schema of the body . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the TermList object */ public Observable < TermList > updateAsync ( String listId , String contentType , BodyModel bodyParameter ) { } }
return updateWithServiceResponseAsync ( listId , contentType , bodyParameter ) . map ( new Func1 < ServiceResponse < TermList > , TermList > ( ) { @ Override public TermList call ( ServiceResponse < TermList > response ) { return response . body ( ) ; } } ) ;
public class Version { /** * @ param content * int [ ] A version as an integer array representing the segments ( where Integer . MAX _ VALUE means " max " ) * @ deprecated DO NOT USE THIS METHOD - FOR BEAN SERIALISER ONLY */ @ Deprecated public void setContent ( int [ ] content ) { } }
if ( content == null || content . length == 0 ) { throw new NumberFormatException ( "A Version must contain at least one digit" ) ; } else { this . version = content ; }
public class TaskToProjectRepository { /** * A simple implementation of the addRelations method which presents the general concept of the method . * It SHOULD NOT be used in production because of possible race condition - production ready code should perform an * atomic operation . * @ param task * @ param projectIds * @ param fieldName */ @ Override public void addRelations ( Task task , Iterable < ObjectId > projectIds , String fieldName ) { } }
List < Project > newProjectList = new LinkedList < > ( ) ; Iterable < Project > projectsToAdd = projectRepository . findAll ( projectIds , null ) ; for ( Project project : projectsToAdd ) { newProjectList . add ( project ) ; } try { if ( PropertyUtils . getProperty ( task , fieldName ) != null ) { Iterable < Project > projects = ( Iterable < Project > ) PropertyUtils . getProperty ( task , fieldName ) ; for ( Project project : projects ) { newProjectList . add ( project ) ; } } } catch ( Exception e ) { throw new RuntimeException ( e ) ; } try { PropertyUtils . setProperty ( task , fieldName , newProjectList ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } taskRepository . save ( task ) ;
public class AbstractWComponent { /** * { @ inheritDoc } */ @ Override public void invokeLater ( final Runnable runnable ) { } }
UIContext uic = UIContextHolder . getCurrent ( ) ; if ( uic != null ) { uic . invokeLater ( runnable ) ; }
public class ToStream { /** * Returns the output format for this serializer . * @ return The output format in use */ public Properties getOutputFormat ( ) { } }
Properties def = new Properties ( ) ; { Set s = getOutputPropDefaultKeys ( ) ; Iterator i = s . iterator ( ) ; while ( i . hasNext ( ) ) { String key = ( String ) i . next ( ) ; String val = getOutputPropertyDefault ( key ) ; def . put ( key , val ) ; } } Properties props = new Properties ( def ) ; { Set s = getOutputPropKeys ( ) ; Iterator i = s . iterator ( ) ; while ( i . hasNext ( ) ) { String key = ( String ) i . next ( ) ; String val = getOutputPropertyNonDefault ( key ) ; if ( val != null ) props . put ( key , val ) ; } } return props ;
public class PreferenceFragment { /** * Creates and returns a listener , which allows to adapt , whether the headers of the * preference ' s dialogs should be shown , or not , when the corresponding setting has been * changed . * @ return The listener , which has been created , as an instance of the type { @ link * Preference . OnPreferenceChangeListener } */ private Preference . OnPreferenceChangeListener createShowDialogHeaderListener ( ) { } }
return new Preference . OnPreferenceChangeListener ( ) { @ Override public boolean onPreferenceChange ( final Preference preference , final Object newValue ) { boolean showDialogHeader = ( Boolean ) newValue ; dialogPreference . showDialogHeader ( showDialogHeader ) ; editTextPreference . showDialogHeader ( showDialogHeader ) ; listPreference . showDialogHeader ( showDialogHeader ) ; multiChoiceListPreference . showDialogHeader ( showDialogHeader ) ; seekBarPreference . showDialogHeader ( showDialogHeader ) ; numberPickerPreference . showDialogHeader ( showDialogHeader ) ; digitPickerPreference . showDialogHeader ( showDialogHeader ) ; resolutionPreference . showDialogHeader ( showDialogHeader ) ; colorPalettePreference . showDialogHeader ( showDialogHeader ) ; return true ; } } ;
public class ReadabilityStatistics { /** * Trims , removes line breaks , multiple spaces and generally cleans text before processing . * @ param strText Text to be transformed * @ return */ private static String cleanText ( String strText ) { } }
strText = HTMLParser . unsafeRemoveAllTags ( strText ) ; strText = strText . toLowerCase ( Locale . ENGLISH ) ; strText = StringCleaner . unifyTerminators ( strText ) ; strText = strText . replaceAll ( " [0-9]+ " , " " ) ; // Remove " words " comprised only of numbers strText = StringCleaner . removeExtraSpaces ( strText ) ; return strText ;
public class ComponentGroupMember { /** * Asks the given container populator to add a component to its underlying * container . */ protected void fill ( GroupContainerPopulator containerPopulator , Object controlFactory , CommandButtonConfigurer buttonConfigurer , java . util . List previousButtons ) { } }
Assert . notNull ( containerPopulator , "containerPopulator" ) ; containerPopulator . add ( component ) ;
public class SibTr { /** * Forward an Error message event to all registered * < code > TraceEventListener < / code > s if permitted by the Suppressor . * Each < code > TraceEventListener < / code > will then * determine whether to log or ignore the forward event . * @ param tc the non - null < code > TraceComponent < / code > the event is associated * with . * @ param s the Suppressor that will determine if this message should be * suppressed or not . * @ param msgKey the message key identifying an NLS message for this event . * This message takes substitution parameters and must be in the * resource bundle currently associated with the * < code > TraceComponent < / code > . * @ param objs an < code > Object < / code > or array of < code > Objects < / code > to * include as substitution text in the message . The number of objects * passed must equal the number of substitution parameters the message * expects . */ public static final void error ( TraceComponent tc , Suppressor s , String msgKey , Object objs ) { } }
SibMessage . SuppressableError ( s , getMEName ( null ) , tc , msgKey , objs ) ;
public class HttpHeader { /** * Get the first header value using the name given . If there are multiple * occurrence , only the first one will be returned as String . * @ param name * @ return the header value . null if not found . */ public String getHeader ( String name ) { } }
Vector < String > v = getHeaders ( name ) ; if ( v == null ) { return null ; } return v . firstElement ( ) ;
public class FnBigInteger { /** * It performs a module operation and returns the value * of ( input mod module ) which is always positive * ( whereas remainder is not ) * @ param module the module * @ return the result of ( input mod module ) */ public final static Function < BigInteger , BigInteger > module ( long module ) { } }
return new Module ( fromNumber ( Long . valueOf ( module ) ) ) ;
public class GeometricTetrahedralEncoderFactory { /** * Create a stereo encoder for all potential 2D and 3D tetrahedral * elements . * @ param container an atom container * @ param graph adjacency list representation of the container * @ return a new encoder for tetrahedral elements */ @ Override public StereoEncoder create ( IAtomContainer container , int [ ] [ ] graph ) { } }
// XXX : this code isn ' t pretty , the current IAtomContainer // implementations are slow for the queries ( i . e . looking at connected // atoms / bonds ) we need to ask to decide if something is a potential // tetrahedral centre . We can help out a little with the adjacency list // ( int [ ] [ ] ) but this doesn ' t help with the bonds . int n = container . getAtomCount ( ) ; List < StereoEncoder > encoders = new ArrayList < StereoEncoder > ( ) ; Map < IAtom , Integer > elevation = new HashMap < IAtom , Integer > ( 10 ) ; ATOMS : for ( int i = 0 ; i < n ; i ++ ) { int degree = graph [ i ] . length ; // ignore those which don ' t have 3 or 4 neighbors if ( degree < 3 || degree > 4 ) continue ; IAtom atom = container . getAtom ( i ) ; // only create encoders for SP3 hybridized atom . atom typing is // currently wrong for some atoms , in sulfoxide for example the atom // type sets SP2 . . . but there we don ' t to fuss about with that here if ( ! sp3 ( atom ) ) continue ; // avoid nitrogen - inversion if ( Integer . valueOf ( 7 ) . equals ( atom . getAtomicNumber ( ) ) && degree == 3 ) continue ; // TODO : we could be more strict with our selection , InChI uses C , // Si , Ge , P , As , B , Sn , N , P , S , Se but has preconditions for // certain cases . An atom or ion N , P , As , S or Se is not stereogenic // if it has a terminal H or two terminal neighbors - XHm , - XHn ( n + m > 0) // where X is O , S , Se , Te , or N // XXX : likely bottle neck List < IBond > bonds = container . getConnectedBondsList ( atom ) ; // try to create geometric parity GeometricParity geometric = geometric ( elevation , bonds , i , graph [ i ] , container ) ; if ( geometric != null ) { // add a new encoder if a geometric parity encoders . add ( new GeometryEncoder ( i , new BasicPermutationParity ( graph [ i ] ) , geometric ) ) ; } } // no encoders , replace with the empty encoder return encoders . isEmpty ( ) ? StereoEncoder . EMPTY : new MultiStereoEncoder ( encoders ) ;
public class BucketTreeStack { /** * Removes all of the elements from this list . The list will * be empty after this call returns . */ public void clear ( ) { } }
modCount ++ ; // this does not allocate more memory : // TODO possibly faster : create new empty bucket ? - > test speeds create vs set - to - null for ( int i = 0 ; i < bucketSize ; i ++ ) { bucket [ i ] = null ; } bucketDepth = 0 ; size = 0 ;
public class ServerListSubsetFilter { /** * Given all the servers , keep only a stable subset of servers to use . This method * keeps the current list of subset in use and keep returning the same list , with exceptions * to relatively unhealthy servers , which are defined as the following : * < ul > * < li > Servers with their concurrent connection count exceeding the client configuration for * { @ code < clientName > . < nameSpace > . ServerListSubsetFilter . eliminationConnectionThresold } ( default is 0) * < li > Servers with their failure count exceeding the client configuration for * { @ code < clientName > . < nameSpace > . ServerListSubsetFilter . eliminationFailureThresold } ( default is 0) * < li > If the servers evicted above is less than the forced eviction percentage as defined by client configuration * { @ code < clientName > . < nameSpace > . ServerListSubsetFilter . forceEliminatePercent } ( default is 10 % , or 0.1 ) , the * remaining servers will be sorted by their health status and servers will worst health status will be * forced evicted . * < / ul > * After the elimination , new servers will be randomly chosen from all servers pool to keep the * number of the subset unchanged . */ @ Override public List < T > getFilteredListOfServers ( List < T > servers ) { } }
List < T > zoneAffinityFiltered = super . getFilteredListOfServers ( servers ) ; Set < T > candidates = Sets . newHashSet ( zoneAffinityFiltered ) ; Set < T > newSubSet = Sets . newHashSet ( currentSubset ) ; LoadBalancerStats lbStats = getLoadBalancerStats ( ) ; for ( T server : currentSubset ) { // this server is either down or out of service if ( ! candidates . contains ( server ) ) { newSubSet . remove ( server ) ; } else { ServerStats stats = lbStats . getSingleServerStat ( server ) ; // remove the servers that do not meet health criteria if ( stats . getActiveRequestsCount ( ) > eliminationConnectionCountThreshold . get ( ) || stats . getFailureCount ( ) > eliminationFailureCountThreshold . get ( ) ) { newSubSet . remove ( server ) ; // also remove from the general pool to avoid selecting them again candidates . remove ( server ) ; } } } int targetedListSize = sizeProp . get ( ) ; int numEliminated = currentSubset . size ( ) - newSubSet . size ( ) ; int minElimination = ( int ) ( targetedListSize * eliminationPercent . get ( ) ) ; int numToForceEliminate = 0 ; if ( targetedListSize < newSubSet . size ( ) ) { // size is shrinking numToForceEliminate = newSubSet . size ( ) - targetedListSize ; } else if ( minElimination > numEliminated ) { numToForceEliminate = minElimination - numEliminated ; } if ( numToForceEliminate > newSubSet . size ( ) ) { numToForceEliminate = newSubSet . size ( ) ; } if ( numToForceEliminate > 0 ) { List < T > sortedSubSet = Lists . newArrayList ( newSubSet ) ; Collections . sort ( sortedSubSet , this ) ; List < T > forceEliminated = sortedSubSet . subList ( 0 , numToForceEliminate ) ; newSubSet . removeAll ( forceEliminated ) ; candidates . removeAll ( forceEliminated ) ; } // after forced elimination or elimination of unhealthy instances , // the size of the set may be less than the targeted size , // then we just randomly add servers from the big pool if ( newSubSet . size ( ) < targetedListSize ) { int numToChoose = targetedListSize - newSubSet . size ( ) ; candidates . removeAll ( newSubSet ) ; if ( numToChoose > candidates . size ( ) ) { // Not enough healthy instances to choose , fallback to use the // total server pool candidates = Sets . newHashSet ( zoneAffinityFiltered ) ; candidates . removeAll ( newSubSet ) ; } List < T > chosen = randomChoose ( Lists . newArrayList ( candidates ) , numToChoose ) ; for ( T server : chosen ) { newSubSet . add ( server ) ; } } currentSubset = newSubSet ; return Lists . newArrayList ( newSubSet ) ;
public class CmsHtmlImportThread { /** * The run method which starts the import process . < p > */ @ Override public void run ( ) { } }
try { // do the import m_htmlImport . startImport ( getReport ( ) ) ; } catch ( Exception e ) { getReport ( ) . println ( e ) ; if ( LOG . isErrorEnabled ( ) ) { LOG . error ( e . getLocalizedMessage ( ) ) ; } }
public class ExpressionSpecBuilder { /** * Returns an < code > IfNotExists < / code > object which represents an < a href = * " http : / / docs . aws . amazon . com / amazondynamodb / latest / developerguide / Expressions . Modifying . html " * > if _ not _ exists ( path , operand ) < / a > function call ; used for building * expression . * < pre > * " if _ not _ exists ( path , operand ) – If the item does not contain an attribute * at the specified path , then if _ not _ exists evaluates to operand ; otherwise , * it evaluates to path . You can use this function to avoid overwriting an * attribute already present in the item . " * < / pre > * @ param path * document path to an attribute * @ param defaultValue * default value if the attribute doesn ' t exist * @ return an < code > IfNotExists < / code > object for string ( S ) attribute . */ public static IfNotExistsFunction < S > if_not_exists ( String path , String defaultValue ) { } }
return if_not_exists ( new PathOperand ( path ) , new LiteralOperand ( defaultValue ) ) ;
public class CnvIbnStringToCv { /** * < p > Put String object to ColumnsValues with SQL escaping * for JDBC . < / p > * @ param pAddParam additional params , e . g . version algorithm or * bean source class for generic converter that consume set of subtypes . * @ param pFrom from a String object * @ param pTo to ColumnsValues * @ param pName by a name * @ throws Exception - an exception */ @ Override public final void convert ( final Map < String , Object > pAddParam , final String pFrom , final ColumnsValues pTo , final String pName ) throws Exception { } }
String value ; if ( this . isNeedsToSqlEscape && pFrom != null ) { value = this . srvSqlEscape . escape ( pFrom ) ; } else { value = pFrom ; } pTo . put ( pName , value ) ;
public class BasicStreamReader { /** * Note : as per StAX 1.0 specs , this method does NOT close the underlying * input reader . That is , unless the new StAX2 property * { @ link org . codehaus . stax2 . XMLInputFactory2 # P _ AUTO _ CLOSE _ INPUT } is * set to true . */ @ Override public void close ( ) throws XMLStreamException { } }
if ( mParseState != STATE_CLOSED ) { mParseState = STATE_CLOSED ; /* Let ' s see if we should notify factory that symbol table * has new entries , and may want to reuse this symbol table * instead of current root . */ if ( mCurrToken != END_DOCUMENT ) { mCurrToken = mSecondaryToken = END_DOCUMENT ; if ( mSymbols . isDirty ( ) ) { mOwner . updateSymbolTable ( mSymbols ) ; } } /* Hmmh . Actually , we need to close all the dependant input * sources , first , and then also call close ( ) * on the root input source object ; it * will only do real close if that was enabled earlier . * The root input source also prevents multiple close ( ) calls * for the underlying source , so we need not check that here . */ closeAllInput ( false ) ; // And finally , can now recycle low - level ( text ) buffers mTextBuffer . recycle ( true ) ; }
public class MappedRefresh { /** * Execute our refresh query statement and then update all of the fields in data with the fields from the result . * @ return 1 if we found the object in the table by id or 0 if not . */ public int executeRefresh ( DatabaseConnection databaseConnection , T data , ObjectCache objectCache ) throws SQLException { } }
@ SuppressWarnings ( "unchecked" ) ID id = ( ID ) idField . extractJavaFieldValue ( data ) ; // we don ' t care about the cache here T result = super . execute ( databaseConnection , id , null ) ; if ( result == null ) { return 0 ; } // copy each field from the result into the passed in object for ( FieldType fieldType : resultsFieldTypes ) { if ( fieldType != idField ) { fieldType . assignField ( connectionSource , data , fieldType . extractJavaFieldValue ( result ) , false , objectCache ) ; } } return 1 ;
public class NetworkService { /** * Claim a network for datacenter * @ param dataCenter datacenter reference * @ return OperationFuture wrapper for dataCenter */ public OperationFuture < DataCenter > claim ( DataCenter dataCenter ) { } }
NetworkLink response = networkClient . claim ( dataCenterService . findByRef ( dataCenter ) . getId ( ) ) ; return new OperationFuture < > ( dataCenter , response . getOperationId ( ) , queueClient ) ;
public class ContextAwareReporter { /** * Removes { @ link ContextAwareReporter } records from the { @ link RootMetricContext } . * This method should be considered irreversible and destructive to the { @ link ContextAwareReporter } . * @ throws IOException */ @ Override public void close ( ) throws IOException { } }
RootMetricContext . get ( ) . removeNotificationTarget ( this . notificationTargetUUID ) ; RootMetricContext . get ( ) . removeReporter ( this ) ;
public class SyndEntryImpl { /** * Returns the entry modules . * @ return a list of ModuleImpl elements with the entry modules , an empty list if none . */ @ Override public List < Module > getModules ( ) { } }
modules = Lists . createWhenNull ( modules ) ; if ( ModuleUtils . getModule ( modules , DCModule . URI ) == null ) { modules . add ( new DCModuleImpl ( ) ) ; } return modules ;
public class CullAboveValueMapping { /** * If type is percentile , find out the estimate of the limit ( th ) percentile in the datapoint sorted values . Then execute the same as type is * value . That means to cull the elements greater than value or pivotValue prerequisite : array must be sorted */ private Double findPivot ( Map < Long , Double > datapoints , Double limit ) { } }
double [ ] doubleValues = new double [ datapoints . size ( ) ] ; int k = 0 ; for ( Map . Entry < Long , Double > entry : datapoints . entrySet ( ) ) { doubleValues [ k ] = entry . getValue ( ) ; k ++ ; } Arrays . sort ( doubleValues ) ; double pivotValue = Double . MAX_VALUE ; try { pivotValue = new Percentile ( ) . evaluate ( doubleValues , ( double ) limit ) ; } catch ( IllegalArgumentException e ) { throw new IllegalArgumentException ( "Please provide a valid percentile number!" ) ; } return pivotValue ;
public class QrHelperFunctions_ZDRM { /** * Extracts a house holder vector from the column of A and stores it in u * @ param A Complex matrix with householder vectors stored in the lower left triangle * @ param row0 first row in A ( implicitly assumed to be r + i0) * @ param row1 last row + 1 in A * @ param col Column in A * @ param u Output array storage * @ param offsetU first index in U */ public static void extractHouseholderColumn ( ZMatrixRMaj A , int row0 , int row1 , int col , double u [ ] , int offsetU ) { } }
int indexU = ( row0 + offsetU ) * 2 ; u [ indexU ++ ] = 1 ; u [ indexU ++ ] = 0 ; for ( int row = row0 + 1 ; row < row1 ; row ++ ) { int indexA = A . getIndex ( row , col ) ; u [ indexU ++ ] = A . data [ indexA ] ; u [ indexU ++ ] = A . data [ indexA + 1 ] ; }
public class ICU { /** * Returns the appropriate { @ code Locale } given a { @ code String } of the form returned * by { @ code toString } . This is very lenient , and doesn ' t care what ' s between the underscores : * this method can parse strings that { @ code Locale . toString } won ' t produce . * Used to remove duplication . */ public static Locale localeFromIcuLocaleId ( String localeId ) { } }
// @ = = ULOC _ KEYWORD _ SEPARATOR _ UNICODE ( uloc . h ) . final int extensionsIndex = localeId . indexOf ( '@' ) ; Map < Character , String > extensionsMap = Collections . EMPTY_MAP ; Map < String , String > unicodeKeywordsMap = Collections . EMPTY_MAP ; Set < String > unicodeAttributeSet = Collections . EMPTY_SET ; if ( extensionsIndex != - 1 ) { extensionsMap = new HashMap < Character , String > ( ) ; unicodeKeywordsMap = new HashMap < String , String > ( ) ; unicodeAttributeSet = new HashSet < String > ( ) ; // ICU sends us a semi - colon ( ULOC _ KEYWORD _ ITEM _ SEPARATOR ) delimited string // containing all " keywords " it could parse . An ICU keyword is a key - value pair // separated by an " = " ( ULOC _ KEYWORD _ ASSIGN ) . // Each keyword item can be one of three things : // - A unicode extension attribute list : In this case the item key is " attribute " // and the value is a hyphen separated list of unicode attributes . // - A unicode extension keyword : In this case , the item key will be larger than // 1 char in length , and the value will be the unicode extension value . // - A BCP - 47 extension subtag : In this case , the item key will be exactly one // char in length , and the value will be a sequence of unparsed subtags that // represent the extension . // Note that this implies that unicode extension keywords are " promoted " to // to the same namespace as the top level extension subtags and their values . // There can ' t be any collisions in practice because the BCP - 47 spec imposes // restrictions on their lengths . final String extensionsString = localeId . substring ( extensionsIndex + 1 ) ; final String [ ] extensions = extensionsString . split ( ";" ) ; for ( String extension : extensions ) { // This is the special key for the unicode attributes if ( extension . startsWith ( "attribute=" ) ) { String unicodeAttributeValues = extension . substring ( "attribute=" . length ( ) ) ; for ( String unicodeAttribute : unicodeAttributeValues . split ( "-" ) ) { unicodeAttributeSet . add ( unicodeAttribute ) ; } } else { final int separatorIndex = extension . indexOf ( '=' ) ; if ( separatorIndex == 1 ) { // This is a BCP - 47 extension subtag . final String value = extension . substring ( 2 ) ; final char extensionId = extension . charAt ( 0 ) ; extensionsMap . put ( extensionId , value ) ; } else { // This is a unicode extension keyword . unicodeKeywordsMap . put ( extension . substring ( 0 , separatorIndex ) , extension . substring ( separatorIndex + 1 ) ) ; } } } } final String [ ] outputArray = new String [ ] { "" , "" , "" , "" } ; if ( extensionsIndex == - 1 ) { parseLangScriptRegionAndVariants ( localeId , outputArray ) ; } else { parseLangScriptRegionAndVariants ( localeId . substring ( 0 , extensionsIndex ) , outputArray ) ; } Locale . Builder builder = new Locale . Builder ( ) ; builder . setLanguage ( outputArray [ IDX_LANGUAGE ] ) ; builder . setRegion ( outputArray [ IDX_REGION ] ) ; builder . setVariant ( outputArray [ IDX_VARIANT ] ) ; builder . setScript ( outputArray [ IDX_SCRIPT ] ) ; for ( String attribute : unicodeAttributeSet ) { builder . addUnicodeLocaleAttribute ( attribute ) ; } for ( Entry < String , String > keyword : unicodeKeywordsMap . entrySet ( ) ) { builder . setUnicodeLocaleKeyword ( keyword . getKey ( ) , keyword . getValue ( ) ) ; } for ( Entry < Character , String > extension : extensionsMap . entrySet ( ) ) { builder . setExtension ( extension . getKey ( ) , extension . getValue ( ) ) ; } return builder . build ( ) ;
public class VotingLexiconInduction { /** * Creates a CCG parser given parameters and a lexicon . * @ param factory * @ param currentParameters * @ param currentLexicon * @ return */ private static ParserInfo createParser ( LexiconInductionCcgParserFactory factory , SufficientStatistics currentParameters , Collection < LexiconEntry > currentLexicon ) { } }
ParametricCcgParser family = factory . getParametricCcgParser ( currentLexicon ) ; SufficientStatistics newParameters = family . getNewSufficientStatistics ( ) ; if ( currentParameters != null ) { newParameters . transferParameters ( currentParameters ) ; } return new ParserInfo ( currentLexicon , family , newParameters , family . getModelFromParameters ( newParameters ) ) ;
public class OWLAtomTypeMappingReader { /** * Reads the atom type mappings from the data file . * @ return a Map with atom type mappings . Null , if some reading error occurred . */ public Map < String , String > readAtomTypeMappings ( ) { } }
Map < String , String > mappings = null ; try { parser . setFeature ( "http://xml.org/sax/features/validation" , false ) ; logger . info ( "Deactivated validation" ) ; } catch ( SAXException exception ) { logger . warn ( "Cannot deactivate validation: " , exception . getMessage ( ) ) ; logger . debug ( exception ) ; } OWLAtomTypeMappingHandler handler = new OWLAtomTypeMappingHandler ( ) ; parser . setContentHandler ( handler ) ; try { parser . parse ( new InputSource ( input ) ) ; mappings = handler . getAtomTypeMappings ( ) ; } catch ( IOException exception ) { logger . error ( "IOException: " , exception . getMessage ( ) ) ; logger . debug ( exception ) ; } catch ( SAXException saxe ) { logger . error ( "SAXException: " , saxe . getMessage ( ) ) ; logger . debug ( saxe ) ; } return mappings == null ? new HashMap < String , String > ( ) : mappings ;
public class FileInputSourceWatcher { /** * ファイルを監視対象に含めます 。 * @ param inputSource * 監視対象ファイル */ @ Override public void watchInputSource ( String inputSource ) { } }
File file = new File ( inputSource ) ; if ( ! file . exists ( ) ) { log . warn ( MessageManager . getMessage ( "filewatcher.fileNotFound" ) , file . getAbsolutePath ( ) ) ; return ; } if ( watchingFileMap . containsKey ( file . getAbsolutePath ( ) ) ) { return ; } log . info ( MessageManager . getMessage ( "filewatcher.addFile" ) , file . getAbsolutePath ( ) ) ; watchingFileMap . put ( file . getAbsolutePath ( ) , new InputSource ( inputSource , file . lastModified ( ) ) ) ; File dir = file . getParentFile ( ) ; if ( watchingDirSet . contains ( dir . getAbsolutePath ( ) ) ) { return ; } log . info ( MessageManager . getMessage ( "filewatcher.addDirectory" ) , dir . getAbsolutePath ( ) ) ; watchingDirSet . add ( dir . getAbsolutePath ( ) ) ; Path dirPath = dir . toPath ( ) ; try { if ( watcher == null ) { // TODO ファイル監視方式の統一 watcher = FileSystems . getDefault ( ) . newWatchService ( ) ; } WatchKey watchKey = dirPath . register ( watcher , StandardWatchEventKinds . ENTRY_MODIFY ) ; pathMap . put ( watchKey , dirPath ) ; } catch ( IOException e ) { throw new IllegalStateException ( e ) ; }
public class DomConfigurationFactory { /** * Parse prefix or suffix . * @ param dc DOM - document . * @ param tagname tag name . * @ return template . */ private Template parseFix ( Document dc , String tagname ) { } }
Template fix ; NodeList prefixElementList = dc . getElementsByTagNameNS ( SCHEMA_LOCATION , tagname ) ; if ( prefixElementList . getLength ( ) > 0 ) { fix = parseTemplate ( prefixElementList . item ( 0 ) ) ; } else { fix = new Template ( ) ; } return fix ;
public class ServerId { /** * Checks whether the specified value is a date according to the old format of the { @ link CoreProperties # SERVER _ ID } . */ private static boolean isDate ( String value ) { } }
try { new SimpleDateFormat ( "yyyyMMddHHmmss" ) . parse ( value ) ; return true ; } catch ( ParseException e ) { return false ; }
public class ProxySelector { /** * Sets ( or unsets ) the system - wide proxy selector . * Note : non - standard protocol handlers may ignore this setting . * @ param ps The HTTP proxy selector , or * < code > null < / code > to unset the proxy selector . * @ throws SecurityException * If a security manager has been installed and it denies * { @ link NetPermission } < tt > ( " setProxySelector " ) < / tt > * @ see # getDefault ( ) * @ since 1.5 */ public static void setDefault ( ProxySelector ps ) { } }
SecurityManager sm = System . getSecurityManager ( ) ; if ( sm != null ) { // sm . checkPermission ( SecurityConstants . SET _ PROXYSELECTOR _ PERMISSION ) ; } theProxySelector = ps ;
public class Node { /** * Provides lookup of elements by name . * @ param name the name of interest * @ return the nodes matching name */ private NodeList getByName ( String name ) { } }
NodeList answer = new NodeList ( ) ; for ( Object child : children ( ) ) { if ( child instanceof Node ) { Node childNode = ( Node ) child ; Object childNodeName = childNode . name ( ) ; if ( childNodeName instanceof QName ) { QName qn = ( QName ) childNodeName ; if ( qn . matches ( name ) ) { answer . add ( childNode ) ; } } else if ( name . equals ( childNodeName ) ) { answer . add ( childNode ) ; } } } return answer ;
public class LocalResourceIndex { /** * ( non - Javadoc ) * @ see org . archive . wayback . ResourceIndex # query ( org . archive . wayback . core . WaybackRequest ) */ public SearchResults query ( WaybackRequest wbRequest ) throws ResourceIndexNotAvailableException , ResourceNotInArchiveException , BadQueryException , AccessControlException { } }
SearchResults results = null ; // return value placeholder if ( wbRequest . isReplayRequest ( ) ) { results = doCaptureQuery ( wbRequest , TYPE_REPLAY ) ; results . putFilter ( WaybackRequest . REQUEST_TYPE , WaybackRequest . REQUEST_REPLAY_QUERY ) ; } else if ( wbRequest . isCaptureQueryRequest ( ) ) { results = doCaptureQuery ( wbRequest , TYPE_CAPTURE ) ; results . putFilter ( WaybackRequest . REQUEST_TYPE , WaybackRequest . REQUEST_CAPTURE_QUERY ) ; } else if ( wbRequest . isUrlQueryRequest ( ) ) { results = doUrlQuery ( wbRequest ) ; results . putFilter ( WaybackRequest . REQUEST_TYPE , WaybackRequest . REQUEST_URL_QUERY ) ; } else { throw new BadQueryException ( "Unknown query type, must be " + WaybackRequest . REQUEST_REPLAY_QUERY + ", " + WaybackRequest . REQUEST_CAPTURE_QUERY + ", or " + WaybackRequest . REQUEST_URL_QUERY ) ; } return results ;
public class CmsToolManager { /** * Returns the navigation bar html code for the given tool path . < p > * @ param toolPath the path * @ param wp the jsp page * @ return the html code */ public String generateNavBar ( String toolPath , CmsWorkplace wp ) { } }
if ( toolPath . equals ( getBaseToolPath ( wp ) ) ) { return "<div class='pathbar'>&nbsp;</div>\n" ; } CmsTool adminTool = resolveAdminTool ( getCurrentRoot ( wp ) . getKey ( ) , toolPath ) ; String html = A_CmsHtmlIconButton . defaultButtonHtml ( CmsHtmlIconButtonStyleEnum . SMALL_ICON_TEXT , "nav" + adminTool . getId ( ) , adminTool . getHandler ( ) . getName ( ) , null , false , null , null , null ) ; String parent = toolPath ; while ( ! parent . equals ( getBaseToolPath ( wp ) ) ) { parent = getParent ( wp , parent ) ; adminTool = resolveAdminTool ( getCurrentRoot ( wp ) . getKey ( ) , parent ) ; if ( adminTool == null ) { break ; } String id = "nav" + adminTool . getId ( ) ; String link = linkForToolPath ( wp . getJsp ( ) , parent , adminTool . getHandler ( ) . getParameters ( wp ) ) ; String onClic = "openPage('" + link + "');" ; String buttonHtml = A_CmsHtmlIconButton . defaultButtonHtml ( CmsHtmlIconButtonStyleEnum . SMALL_ICON_TEXT , id , adminTool . getHandler ( ) . getName ( ) , adminTool . getHandler ( ) . getHelpText ( ) , true , null , null , onClic ) ; html = "<span>" + buttonHtml + NAVBAR_SEPARATOR + "</span>" + html ; } html = CmsToolMacroResolver . resolveMacros ( html , wp ) ; html = CmsEncoder . decode ( html ) ; html = CmsToolMacroResolver . resolveMacros ( html , wp ) ; html = "<div class='pathbar'>\n" + html + "</div>\n" ; return html ;
public class AgentCoreEngine { /** * this method doesn ' t use fields that aren ' t initialized yet . Pass everything through parameters . */ private AgentCoreEngineConfiguration downloadAndOverlayConfiguration ( HttpClientBuilder hcb , AgentCoreEngineConfiguration initialConfig ) throws Exception { } }
AgentCoreEngineConfiguration newOverlaidConfiguration = initialConfig ; String typeVersionToDownload = initialConfig . getGlobalConfiguration ( ) . getTypeVersion ( ) ; // If there is no type version declared , download and overlay nothing . // If we have no inventory metadata at all , and we are not in metrics only mode // then we are required to download the config successfully ; // an exception is thrown if we cannot download and overlay the config in that case . // If we already have some inventory metadata already , then we will not abort with an exception // on download / overlay failure - we ' ll just continue with the old inventory metadata . boolean requireDownload = false ; Exception error = null ; if ( typeVersionToDownload != null ) { if ( initialConfig . getDmrConfiguration ( ) . getTypeSets ( ) . isDisabledOrEmpty ( ) && initialConfig . getJmxConfiguration ( ) . getTypeSets ( ) . isDisabledOrEmpty ( ) ) { requireDownload = ! isMetricsOnlyMode ( initialConfig ) ; } OkHttpClient httpclient = hcb . getHttpClient ( ) ; String url = Util . getContextUrlString ( initialConfig . getStorageAdapter ( ) . getUrl ( ) , initialConfig . getStorageAdapter ( ) . getInventoryContext ( ) ) . append ( "get-inventory-config" ) . append ( "/" ) . append ( typeVersionToDownload ) . toString ( ) ; Request request = hcb . buildGetRequest ( url , null ) ; Response response = null ; try { log . debugf ( "Downloading inventory configuration from server: %s" , url ) ; response = httpclient . newCall ( request ) . execute ( ) ; if ( response . code ( ) != 200 ) { error = new Exception ( String . format ( "Cannot download inventory configuration [%s]: %d/%s" , typeVersionToDownload , response . code ( ) , response . message ( ) ) ) ; } else { newOverlaidConfiguration = overlayConfiguration ( response . body ( ) . byteStream ( ) ) ; } } catch ( Exception e ) { error = new Exception ( String . format ( "Failed to download and overlay inventory configuration [%s]" , typeVersionToDownload ) , e ) ; } finally { if ( response != null ) { response . body ( ) . close ( ) ; } } } else { log . debugf ( "No inventory type version declared; no configuration will be downloaded. " + "Original configuration will be used as-is." ) ; } if ( error != null ) { if ( requireDownload ) { throw error ; } else { log . errorf ( error , "Will continue with the previous inventory configuration." ) ; } } return newOverlaidConfiguration ;
public class TypedLinkFacet { /** * A set of key - value pairs associated with the typed link . Typed link attributes are used when you have data values * that are related to the link itself , and not to one of the two objects being linked . Identity attributes also * serve to distinguish the link from others of the same type between the same objects . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setAttributes ( java . util . Collection ) } or { @ link # withAttributes ( java . util . Collection ) } if you want to * override the existing values . * @ param attributes * A set of key - value pairs associated with the typed link . Typed link attributes are used when you have data * values that are related to the link itself , and not to one of the two objects being linked . Identity * attributes also serve to distinguish the link from others of the same type between the same objects . * @ return Returns a reference to this object so that method calls can be chained together . */ public TypedLinkFacet withAttributes ( TypedLinkAttributeDefinition ... attributes ) { } }
if ( this . attributes == null ) { setAttributes ( new java . util . ArrayList < TypedLinkAttributeDefinition > ( attributes . length ) ) ; } for ( TypedLinkAttributeDefinition ele : attributes ) { this . attributes . add ( ele ) ; } return this ;
public class RelatedResource { /** * Additional information about the resource . * @ param additionalInfo * Additional information about the resource . * @ return Returns a reference to this object so that method calls can be chained together . */ public RelatedResource withAdditionalInfo ( java . util . Map < String , String > additionalInfo ) { } }
setAdditionalInfo ( additionalInfo ) ; return this ;
public class DataSourceTypeImpl { /** * Returns the < code > max - pool - size < / code > element * @ return the node defined for the element < code > max - pool - size < / code > */ public Integer getMaxPoolSize ( ) { } }
if ( childNode . getTextValueForPatternName ( "max-pool-size" ) != null && ! childNode . getTextValueForPatternName ( "max-pool-size" ) . equals ( "null" ) ) { return Integer . valueOf ( childNode . getTextValueForPatternName ( "max-pool-size" ) ) ; } return null ;
public class Strings { /** * Return true if the given { @ link String } instances are equal when outer whitespace is removed , or if both * { @ link String } instances are null . ( E . g . : " hello world " is equal to " hello world " ) */ public static boolean areEqualTrimmed ( final String left , final String right ) { } }
if ( ( left != null ) && ( right != null ) ) { return left . trim ( ) . equals ( right . trim ( ) ) ; } return areEqual ( left , right ) ;
public class AntiAffinityService { /** * Create Anti - affinity policy * @ param createConfig policy config * @ return OperationFuture wrapper for AntiAffinityPolicy */ public OperationFuture < AntiAffinityPolicy > create ( AntiAffinityPolicyConfig createConfig ) { } }
AntiAffinityPolicyMetadata policy = client . createAntiAffinityPolicy ( new AntiAffinityPolicyRequest ( ) . name ( createConfig . getName ( ) ) . location ( dataCenterService . findByRef ( createConfig . getDataCenter ( ) ) . getId ( ) ) ) ; return new OperationFuture < > ( AntiAffinityPolicy . refById ( policy . getId ( ) ) , new NoWaitingJobFuture ( ) ) ;
public class QueryCriteriaUtil { /** * This method is necessary because the AND operator in SQL has precedence over the OR operator . * That means that intersecting criteria should always be grouped together ( and processed first , basically ) , which is essentially * what this method does . * @ param query The { @ link CriteriaQuery } that is being built * @ param intersectingCriteriaList The list of intersecting ( ANDed ) { @ link QueryCriteria } * @ param builder The { @ link CriteriaBuilder } builder instance * @ param queryType The ( persistent { @ link Entity } ) { @ link Class } that we are querying on * @ return A { @ link Predicate } created on the basis of the given { @ link List } of { @ link QueryCriteria } */ private < R , T > Predicate createPredicateFromIntersectingCriteriaList ( CriteriaQuery < R > query , CriteriaBuilder builder , Class < T > queryType , List < QueryCriteria > intersectingCriteriaList , QueryWhere queryWhere ) { } }
combineIntersectingRangeCriteria ( intersectingCriteriaList ) ; assert intersectingCriteriaList . size ( ) > 0 : "Empty list of currently intersecting criteria!" ; Predicate [ ] intersectingPredicates = new Predicate [ intersectingCriteriaList . size ( ) ] ; int i = 0 ; for ( QueryCriteria intersectingCriteria : intersectingCriteriaList ) { Predicate predicate = createPredicateFromSingleOrGroupCriteria ( query , builder , queryType , intersectingCriteria , queryWhere ) ; assert predicate != null : "Null predicate when evaluating individual intersecting criteria [" + intersectingCriteria . toString ( ) + "]" ; intersectingPredicates [ i ++ ] = predicate ; } Predicate predicate ; if ( intersectingPredicates . length > 1 ) { predicate = builder . and ( intersectingPredicates ) ; } else { predicate = intersectingPredicates [ 0 ] ; } return predicate ;
public class Get { /** * Retrieves the number of columns in the element . If the element isn ' t * present or a table , the returned response will be negative one * @ return Integer : the number of columns the table has */ public int numOfTableColumns ( ) { } }
Element rows = tableRows ( ) ; if ( rows == null ) { return - 1 ; } Element thCells = rows . findChild ( app . newElement ( Locator . TAGNAME , "th" ) ) ; Element tdCells = rows . findChild ( app . newElement ( Locator . TAGNAME , "td" ) ) ; return thCells . get ( ) . matchCount ( ) + tdCells . get ( ) . matchCount ( ) ;
public class InferenceContext { /** * add a new inference var to this inference context */ void addVar ( TypeVar t ) { } }
this . undetvars = this . undetvars . prepend ( infer . fromTypeVarFun . apply ( t ) ) ; this . inferencevars = this . inferencevars . prepend ( t ) ;
public class SnowflakeChunkDownloader { /** * terminate the downloader * @ return chunk downloader metrics collected over instance lifetime */ public Metrics terminate ( ) { } }
if ( ! terminated ) { logger . debug ( "Total milliseconds waiting for chunks: {}, " + "Total memory used: {}, total download time: {} millisec, " + "total parsing time: {} milliseconds, total chunks: {}" , numberMillisWaitingForChunks , Runtime . getRuntime ( ) . totalMemory ( ) , totalMillisDownloadingChunks . get ( ) , totalMillisParsingChunks . get ( ) , chunks . size ( ) ) ; if ( executor != null ) { executor . shutdownNow ( ) ; executor = null ; } chunks = null ; chunkDataCache . clear ( ) ; terminated = true ; return new Metrics ( ) ; } return null ;
public class RtmpClient { /** * Release waiting threads if we fail for some reason */ private void releaseCallbacks ( Exception ex ) { } }
synchronized ( callbacks ) { for ( Map . Entry < Integer , InvokeCallback > cb : callbacks . entrySet ( ) ) { cb . getValue ( ) . release ( ex ) ; } }
public class TimeFragment { /** * Workaround for bug in Android TimePicker where the onTimeChanged ( ) callback * is not invoked when the user toggles between AM / PM . But we need to be able * to detect this in order to dynamically update the tab title properly when * the user toggles between AM / PM . * Registered as Issue 18982: * https : / / code . google . com / p / android / issues / detail ? id = 18982 */ private void fixTimePickerBug18982 ( ) { } }
View amPmView = ( ( ViewGroup ) mTimePicker . getChildAt ( 0 ) ) . getChildAt ( 3 ) ; if ( amPmView instanceof NumberPicker ) { ( ( NumberPicker ) amPmView ) . setOnValueChangedListener ( new OnValueChangeListener ( ) { @ Override public void onValueChange ( NumberPicker picker , int oldVal , int newVal ) { if ( picker . getValue ( ) == 1 ) // PM { if ( mTimePicker . getCurrentHour ( ) < 12 ) mTimePicker . setCurrentHour ( mTimePicker . getCurrentHour ( ) + 12 ) ; } else // AM { if ( mTimePicker . getCurrentHour ( ) >= 12 ) mTimePicker . setCurrentHour ( mTimePicker . getCurrentHour ( ) - 12 ) ; } mCallback . onTimeChanged ( mTimePicker . getCurrentHour ( ) , mTimePicker . getCurrentMinute ( ) ) ; } } ) ; }
public class ItemsAuxiliary { /** * Get the estimated quantile given a fractional rank . * @ param fRank the fractional rank where : 0 & le ; fRank & le ; 1.0. * @ return the estimated quantile */ T getQuantile ( final double fRank ) { } }
checkFractionalRankBounds ( fRank ) ; if ( auxN_ <= 0 ) { return null ; } final long pos = QuantilesHelper . posOfPhi ( fRank , auxN_ ) ; return approximatelyAnswerPositionalQuery ( pos ) ;
public class EntitlementValueMarshaller { /** * Marshall the given parameter object . */ public void marshall ( EntitlementValue entitlementValue , ProtocolMarshaller protocolMarshaller ) { } }
if ( entitlementValue == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( entitlementValue . getIntegerValue ( ) , INTEGERVALUE_BINDING ) ; protocolMarshaller . marshall ( entitlementValue . getDoubleValue ( ) , DOUBLEVALUE_BINDING ) ; protocolMarshaller . marshall ( entitlementValue . getBooleanValue ( ) , BOOLEANVALUE_BINDING ) ; protocolMarshaller . marshall ( entitlementValue . getStringValue ( ) , STRINGVALUE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class EndpointBuilder { /** * Adds classes to a list of additional interfaces implemented by an XML - RPC * server . Interface methods should be annotated by { @ link XRMethod } . * @ param additional * interfaces implemented by XML - RPC server * @ return this for method chaining */ public EndpointBuilder < T > export ( Class < ? > ... additional ) { } }
this . additionalInterfaces . addAll ( Arrays . asList ( additional ) ) ; return this ;
public class UriBasedVehicleInterfaceMixin { /** * Attempt to construct an instance of URI from the given String . * @ param uriString the String representation of the possible URI . * @ throws DataSourceException if the parameter is not a valid URI . */ public static URI createUri ( String uriString ) throws DataSourceException { } }
if ( uriString == null ) { throw new DataSourceResourceException ( "URI string is null" ) ; } try { return new URI ( uriString ) ; } catch ( URISyntaxException e ) { throw new DataSourceResourceException ( "Not a valid URI: " + uriString , e ) ; }
public class SVMLightRecordWriter { /** * Set DataVec configuration * @ param conf */ @ Override public void setConf ( Configuration conf ) { } }
super . setConf ( conf ) ; featureFirstColumn = conf . getInt ( FEATURE_FIRST_COLUMN , 0 ) ; hasLabel = conf . getBoolean ( HAS_LABELS , true ) ; multilabel = conf . getBoolean ( MULTILABEL , false ) ; labelFirstColumn = conf . getInt ( LABEL_FIRST_COLUMN , - 1 ) ; labelLastColumn = conf . getInt ( LABEL_LAST_COLUMN , - 1 ) ; featureLastColumn = conf . getInt ( FEATURE_LAST_COLUMN , labelFirstColumn > 0 ? labelFirstColumn - 1 : - 1 ) ; zeroBasedIndexing = conf . getBoolean ( ZERO_BASED_INDEXING , false ) ; zeroBasedLabelIndexing = conf . getBoolean ( ZERO_BASED_LABEL_INDEXING , false ) ;
public class ToolDescriptor { /** * Configured instances of { @ link ToolInstallation } s . * @ return read - only list of installations ; * can be empty but never null . */ @ SuppressWarnings ( "unchecked" ) public T [ ] getInstallations ( ) { } }
if ( installations != null ) return installations . clone ( ) ; Type bt = Types . getBaseClass ( getClass ( ) , ToolDescriptor . class ) ; if ( bt instanceof ParameterizedType ) { ParameterizedType pt = ( ParameterizedType ) bt ; // this ' t ' is the closest approximation of T of Descriptor < T > . Class t = Types . erasure ( pt . getActualTypeArguments ( ) [ 0 ] ) ; return ( T [ ] ) Array . newInstance ( t , 0 ) ; } else { // can ' t infer the type . Fallback return emptyArray_unsafeCast ( ) ; }
public class VakyarthaDependencyTree { /** * Get the text which is overlapped by the SNode . * @ return Empty string , if there are no token overlapped by the node . */ private String getText ( SNode node , VisualizerInput input ) { } }
SDocumentGraph sDocumentGraph = input . getSResult ( ) . getDocumentGraph ( ) ; List < DataSourceSequence > sequences = sDocumentGraph . getOverlappedDataSourceSequence ( node , SALT_TYPE . STEXT_OVERLAPPING_RELATION ) ; if ( sequences != null && sequences . size ( ) > 0 ) { return ( ( STextualDS ) sequences . get ( 0 ) . getDataSource ( ) ) . getText ( ) . substring ( sequences . get ( 0 ) . getStart ( ) . intValue ( ) , sequences . get ( 0 ) . getEnd ( ) . intValue ( ) ) ; } return "" ;
public class Smb2OplockBreakNotification { /** * { @ inheritDoc } * @ see jcifs . internal . smb2 . ServerMessageBlock2 # readBytesWireFormat ( byte [ ] , int ) */ @ Override protected int readBytesWireFormat ( byte [ ] buffer , int bufferIndex ) throws SMBProtocolDecodingException { } }
int start = bufferIndex ; int structureSize = SMBUtil . readInt2 ( buffer , bufferIndex ) ; if ( structureSize != 24 ) { throw new SMBProtocolDecodingException ( "Expected structureSize = 24" ) ; } this . oplockLevel = buffer [ bufferIndex + 2 ] ; bufferIndex += 4 ; bufferIndex += 4 ; // Reserved2 this . fileId = new byte [ 16 ] ; System . arraycopy ( buffer , bufferIndex , this . fileId , 0 , 16 ) ; bufferIndex += 16 ; return bufferIndex - start ;
public class NestedClassWriterImpl { /** * { @ inheritDoc } */ public void addSummaryLabel ( Content memberTree ) { } }
Content label = HtmlTree . HEADING ( HtmlConstants . SUMMARY_HEADING , writer . getResource ( "doclet.Nested_Class_Summary" ) ) ; memberTree . addContent ( label ) ;
public class MercatorUtils { /** * < p > Code copied from : http : / / wiki . openstreetmap . org / wiki / Slippy _ map _ tilenames # Lon . . 2Flat . _ to _ tile _ numbers < / p > * 20131128 : corrections added to correct going over or under max / min extent * - was causing http 400 Bad Requests * - updated openstreetmap wiki * @ param zoom * @ return [ zoom , xtile , ytile _ osm ] */ public static int [ ] getTileNumber ( final double lat , final double lon , final int zoom ) { } }
int xtile = ( int ) Math . floor ( ( lon + 180 ) / 360 * ( 1 << zoom ) ) ; int ytile_osm = ( int ) Math . floor ( ( 1 - Math . log ( Math . tan ( Math . toRadians ( lat ) ) + 1 / Math . cos ( Math . toRadians ( lat ) ) ) / Math . PI ) / 2 * ( 1 << zoom ) ) ; if ( xtile < 0 ) xtile = 0 ; if ( xtile >= ( 1 << zoom ) ) xtile = ( ( 1 << zoom ) - 1 ) ; if ( ytile_osm < 0 ) ytile_osm = 0 ; if ( ytile_osm >= ( 1 << zoom ) ) ytile_osm = ( ( 1 << zoom ) - 1 ) ; return new int [ ] { zoom , xtile , ytile_osm } ;
public class Common { /** * General method for set / remove json value . Recursively called for each element of path , supporting regex in array indices * @ param object JSONObject to modify * @ param name JSON key to set / remove * @ param value value to set ( null for remove ) * @ param path Path to the value within the object . Access array elements by including regex index in the path . Ex : root . objectArray [ 0 ] or root . objectArray [ \ d ] * @ param isSet true for set , false for remove * @ return The modified JSONObject * @ throws Exception */ private static JSONObject process_json_value ( JSONObject object , String name , String value , String path , Boolean isSet ) throws Exception { } }
if ( ! path . equals ( "" ) ) { String [ ] pathElements = path . split ( "\\." ) ; String remainingPath = pathElements . length > 1 ? path . replace ( pathElements [ 0 ] + "." , "" ) : "" ; String element = pathElements [ 0 ] ; if ( element . contains ( "[" ) ) { // array indexer - regular expression int startIndex = element . indexOf ( "[" ) ; int endIndex = element . indexOf ( "]" ) ; if ( startIndex == - 1 || endIndex == - 1 || endIndex <= startIndex ) { throw new InvalidParameterException ( "Invalid array indexer " + element ) ; } String indexerPattern = element . substring ( startIndex + 1 , endIndex ) ; String arrayName = element . substring ( 0 , startIndex ) ; JSONArray array = object . getJSONArray ( arrayName ) ; for ( int i = 0 ; i < array . length ( ) ; ++ i ) { Pattern pattern = Pattern . compile ( indexerPattern ) ; Matcher matcher = pattern . matcher ( String . valueOf ( i ) ) ; if ( matcher . find ( ) ) { array . put ( i , process_json_value ( array . getJSONObject ( i ) , name , value , remainingPath , isSet ) ) ; } } object . put ( arrayName , array ) ; return object ; } else { process_json_value ( object . getJSONObject ( element ) , name , value , remainingPath , isSet ) ; return object ; } } else { if ( isSet ) { // test & set if value is valid JSONArray or JSONObject try { JSONArray jsonVal = new JSONArray ( value ) ; object . put ( name , jsonVal ) ; return object ; } catch ( Exception e ) { } try { JSONObject jsonVal = new JSONObject ( value ) ; object . put ( name , jsonVal ) ; return object ; } catch ( Exception e ) { } object . put ( name , value ) ; } else { object . remove ( name ) ; } } return object ;
public class GrammarConverter { /** * This method converts a single production from a list of productions * within a production group into a single production . Some additional * productions might be created during the way due to construction grouping * and quantifiers . * The alternative name for a production is processed here , too . * @ param productionName * @ param productionConstruction * @ throws TreeException * @ throws GrammarException */ private void convertSingleProduction ( String productionName , ParseTreeNode productionConstruction ) throws TreeException , GrammarException { } }
ParseTreeNode alternativeIdentifier = productionConstruction . getChild ( "AlternativeIdentifier" ) ; Production production ; if ( alternativeIdentifier == null ) { production = new Production ( productionName ) ; } else { ParseTreeNode alternativeIdentifierName = alternativeIdentifier . getChild ( "IDENTIFIER" ) ; if ( alternativeIdentifierName == null ) { production = new Production ( productionName ) ; } else { production = new Production ( productionName , alternativeIdentifierName . getText ( ) ) ; } } production . addAllConstructions ( getConstructions ( productionConstruction ) ) ; addOptions ( production , productionConstruction ) ; productions . add ( production ) ;
public class LimitedInputStream { /** * Skips over the remainder of the available bytes . * @ throws IOException if an I / O error occurs while seeking within the inner * stream */ public void moveToEnd ( ) throws IOException { } }
int bytes ; while ( remaining > 0 ) { bytes = ( int ) inner . skip ( remaining ) ; if ( bytes < 0 ) { break ; } remaining -= bytes ; }
public class SerializerH3Collection { /** * read the list from the input stream */ @ Override public T readObject ( InRawH3 is , InH3Amp in ) { } }
Collection < Object > list = ( Collection < Object > ) newInstance ( ) ; // add a reference if in graph mode in . ref ( list ) ; while ( true ) { long chunk = is . readUnsigned ( ) ; long size = InRawH3 . chunkSize ( chunk ) ; for ( int i = 0 ; i < size ; i ++ ) { Object item = _item . read ( is , in ) ; list . add ( item ) ; } if ( InRawH3 . chunkIsFinal ( chunk ) ) { return ( T ) list ; } }
public class MathUtils { /** * Constucts a truth - table for the given column - number . < br > * The number of table rows is 2 ^ columnCount . < br > * A truth - table with three columns looks like this : < br > * ( 0 = < tt > false < / tt > , 1 = < tt > true < / tt > ) < br > * < br > * < ul > * < li > 0 0 0 < / li > * < li > 0 0 1 < / li > * < li > 0 1 0 < / li > * < li > 0 1 1 < / li > * < li > 1 0 0 < / li > * < li > 1 0 1 < / li > * < li > 1 1 0 < / li > * < li > 1 1 1 < / li > * < / ul > * < br > * @ param colCount The number of columns . * @ return The constructed truth - table */ public static boolean [ ] [ ] getTruthTable ( int colCount ) { } }
boolean [ ] [ ] table = new boolean [ colCount ] [ ( int ) Math . pow ( 2 , colCount ) ] ; for ( int i = 1 ; i <= colCount ; i ++ ) { for ( int j = 0 ; j < ( int ) Math . pow ( 2 , colCount ) ; j ++ ) { // System . out . println ( i + " " + j + " " + ( int ) Math . floor ( j / Math . pow ( 2 , i - 1 ) ) ) ; table [ i - 1 ] [ j ] = ( ( ( int ) Math . floor ( j / Math . pow ( 2 , i - 1 ) ) ) % 2 ) == 0 ; } } return table ;
public class lbvserver_filterpolicy_binding { /** * Use this API to fetch lbvserver _ filterpolicy _ binding resources of given name . */ public static lbvserver_filterpolicy_binding [ ] get ( nitro_service service , String name ) throws Exception { } }
lbvserver_filterpolicy_binding obj = new lbvserver_filterpolicy_binding ( ) ; obj . set_name ( name ) ; lbvserver_filterpolicy_binding response [ ] = ( lbvserver_filterpolicy_binding [ ] ) obj . get_resources ( service ) ; return response ;
public class Index { /** * Set settings for this index * @ param settings the settings for an index * @ param requestOptions Options to pass to this request */ public JSONObject setSettings ( JSONObject settings , RequestOptions requestOptions ) throws AlgoliaException { } }
return setSettings ( settings , false , requestOptions ) ;
public class UnderFileSystemFactoryRegistry { /** * Finds all the Under File System factories that support the given path . * @ param path path * @ param ufsConf configuration of the UFS * @ param alluxioConf Alluxio configuration * @ return list of factories that support the given path which may be an empty list */ public static List < UnderFileSystemFactory > findAll ( String path , UnderFileSystemConfiguration ufsConf , AlluxioConfiguration alluxioConf ) { } }
List < UnderFileSystemFactory > eligibleFactories = sRegistryInstance . findAll ( path , ufsConf , alluxioConf ) ; if ( eligibleFactories . isEmpty ( ) && ufsConf != null ) { // Check if any versioned factory supports the default configuration List < UnderFileSystemFactory > factories = sRegistryInstance . findAll ( path , null , alluxioConf ) ; List < String > supportedVersions = new java . util . ArrayList < > ( ) ; for ( UnderFileSystemFactory factory : factories ) { if ( ! factory . getVersion ( ) . isEmpty ( ) ) { supportedVersions . add ( factory . getVersion ( ) ) ; } } if ( ! supportedVersions . isEmpty ( ) ) { String configuredVersion = ufsConf . get ( PropertyKey . UNDERFS_VERSION ) ; LOG . warn ( "Versions [{}] are supported for path {} but you have configured version: {}" , StringUtils . join ( supportedVersions , "," ) , path , configuredVersion ) ; } } return eligibleFactories ;
public class GenericDao { /** * Executes a query for all entities whose path value is any of the target * values . The path is provided by the { @ link QueryPathProvider } and is * followed through to get the resulting value . That resulting value is * compared to the given target values in the query . * @ param < Y > the type of the target value and resulting attribute / column * value . * @ param provider provides the path from the entity to the target * attribute / column . * @ param values the target values of the given attribute . * @ return A list of entities that match the given criteria . */ protected < Y > List < T > getListByAttributeIn ( QueryPathProvider < T , Y > provider , List < Y > values ) { } }
return getDatabaseSupport ( ) . getListByAttributeIn ( getEntityClass ( ) , provider , values ) ;
public class VariantMetadataManager { /** * Add an individual to a given variant study metadata ( from study ID ) . * @ param individual Individual to add * @ param studyId Study ID */ public void addIndividual ( org . opencb . biodata . models . metadata . Individual individual , String studyId ) { } }
// Sanity check if ( individual == null || StringUtils . isEmpty ( individual . getId ( ) ) ) { logger . error ( "Individual (or its ID) is null or empty." ) ; return ; } VariantStudyMetadata variantStudyMetadata = getVariantStudyMetadata ( studyId ) ; if ( variantStudyMetadata == null ) { logger . error ( "Study not found. Check your study ID: '{}'" , studyId ) ; return ; } if ( variantStudyMetadata . getIndividuals ( ) == null ) { variantStudyMetadata . setIndividuals ( new ArrayList < > ( ) ) ; } for ( org . opencb . biodata . models . metadata . Individual indi : variantStudyMetadata . getIndividuals ( ) ) { if ( indi . getId ( ) != null && indi . getId ( ) . equals ( individual . getId ( ) ) ) { logger . error ( "Individual with id '{}' already exists in study '{}'" , individual . getId ( ) , studyId ) ; return ; } } variantStudyMetadata . getIndividuals ( ) . add ( individual ) ;
public class ScaleTransformer { /** * Uses Nineoldandroids to change the position of the view . * @ param verticalDragOffset used to calculate the new position . */ @ Override public void updatePosition ( float verticalDragOffset ) { } }
ViewHelper . setPivotX ( getView ( ) , getView ( ) . getWidth ( ) - getMarginRight ( ) ) ; ViewHelper . setPivotY ( getView ( ) , getView ( ) . getHeight ( ) - getMarginBottom ( ) ) ;
public class RoleAssignmentsInner { /** * Creates a role assignment . * @ param scope The scope of the role assignment to create . The scope can be any REST resource instance . For example , use ' / subscriptions / { subscription - id } / ' for a subscription , ' / subscriptions / { subscription - id } / resourceGroups / { resource - group - name } ' for a resource group , and ' / subscriptions / { subscription - id } / resourceGroups / { resource - group - name } / providers / { resource - provider } / { resource - type } / { resource - name } ' for a resource . * @ param roleAssignmentName The name of the role assignment to create . It can be any valid GUID . * @ param parameters Parameters for the role assignment . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the RoleAssignmentInner object */ public Observable < ServiceResponse < RoleAssignmentInner > > createWithServiceResponseAsync ( String scope , String roleAssignmentName , RoleAssignmentCreateParameters parameters ) { } }
if ( scope == null ) { throw new IllegalArgumentException ( "Parameter scope is required and cannot be null." ) ; } if ( roleAssignmentName == null ) { throw new IllegalArgumentException ( "Parameter roleAssignmentName is required and cannot be null." ) ; } if ( parameters == null ) { throw new IllegalArgumentException ( "Parameter parameters is required and cannot be null." ) ; } Validator . validate ( parameters ) ; final String apiVersion = "2018-09-01-preview" ; return service . create ( scope , roleAssignmentName , parameters , apiVersion , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < RoleAssignmentInner > > > ( ) { @ Override public Observable < ServiceResponse < RoleAssignmentInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < RoleAssignmentInner > clientResponse = createDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class WorkManagerCoordinator { /** * Remove a work manager * @ param id The id of the work manager */ public synchronized void removeWorkManager ( String id ) { } }
if ( id == null || id . trim ( ) . equals ( "" ) ) throw new IllegalArgumentException ( "The id of WorkManager is invalid: " + id ) ; Integer i = refCountWorkmanagers . get ( id ) ; if ( i != null ) { int newValue = i . intValue ( ) - 1 ; if ( newValue == 0 ) { if ( trace ) log . tracef ( "Removed WorkManager: %s" , id ) ; WorkManager wm = activeWorkmanagers . get ( id ) ; if ( wm instanceof DistributedWorkManager ) { DistributedWorkManager dwm = ( DistributedWorkManager ) wm ; if ( dwm . getTransport ( ) != null ) dwm . getTransport ( ) . unregister ( new Address ( wm . getId ( ) , wm . getName ( ) , dwm . getTransport ( ) . getId ( ) ) ) ; } activeWorkmanagers . remove ( id ) ; refCountWorkmanagers . remove ( id ) ; } else { if ( trace ) log . tracef ( "DerefCount WorkManager: %s" , id ) ; refCountWorkmanagers . put ( id , Integer . valueOf ( newValue ) ) ; } }
public class ProtoContext { /** * Resolve a type declaration by it ' s fully - qualified name * using this proto context . */ @ SuppressWarnings ( "unchecked" ) public < T extends Type > T resolve ( Class < T > typeClass , String fullyQualifiedName ) { } }
Type result = resolve ( fullyQualifiedName ) ; if ( result == null ) { return null ; } if ( typeClass . isAssignableFrom ( result . getClass ( ) ) ) { return ( T ) result ; } throw new ClassCastException ( result . getClass ( ) + " cannot be cast to " + typeClass ) ;
public class LongChromosome { /** * Java object serialization */ private void writeObject ( final ObjectOutputStream out ) throws IOException { } }
out . defaultWriteObject ( ) ; out . writeInt ( length ( ) ) ; out . writeObject ( lengthRange ( ) ) ; out . writeLong ( _min ) ; out . writeLong ( _max ) ; for ( LongGene gene : _genes ) { out . writeLong ( gene . getAllele ( ) ) ; }
public class ApiSessionImpl { /** * / * ( non - Javadoc ) * @ see com . tvd12 . ezyfox . core . command . Session # setProperty ( java . lang . String , java . lang . Object ) */ @ Override public void setProperty ( String name , Object value ) { } }
session . setProperty ( name , value ) ;
public class Related { /** * Specifies a filter for entities that are targets of a relationship with the specified entity . * @ param entityPath the entity that is the source of the relationship * @ param relationship the name of the relationship * @ return a new " related " filter instance */ public static Related asTargetWith ( CanonicalPath entityPath , String relationship ) { } }
return new Related ( entityPath , relationship , EntityRole . TARGET ) ;
public class ReadOnlyStyledDocument { /** * Maps the paragraph at the given index by calling { @ link # replace ( int , int , UnaryOperator ) } . Returns * < ol > * < li > * the updated version of this document that includes the replacement , * < / li > * < li > * the { @ link RichTextChange } that represents the change from this document to the returned one , and * < / li > * < li > * the modification used to update an area ' s list of paragraphs . * < / li > * < / ol > */ public Tuple3 < ReadOnlyStyledDocument < PS , SEG , S > , RichTextChange < PS , SEG , S > , MaterializedListModification < Paragraph < PS , SEG , S > > > replaceParagraph ( int parIdx , UnaryOperator < Paragraph < PS , SEG , S > > mapper ) { } }
ensureValidParagraphIndex ( parIdx ) ; return replace ( new BiIndex ( parIdx , 0 ) , new BiIndex ( parIdx , tree . getLeaf ( parIdx ) . length ( ) ) , doc -> doc . mapParagraphs ( mapper ) ) ;
public class Configuration { /** * Get the value of the { @ code name } configuration property as an { @ code int } . If the property is missing * from the configuration or is not a valid { @ code int } , then an exception is thrown . * @ param name the configuration property name * @ throws NumberFormatException if the configured value is not a valid { @ code int } * @ throws NullPointerException if the configuration property is not present in the loaded config * @ return the configuration property value as an { @ code int } */ public int getInt ( String name ) { } }
String valueString = getTrimmed ( name ) ; Preconditions . checkNotNull ( valueString ) ; String hexString = getHexDigits ( valueString ) ; if ( hexString != null ) { return Integer . parseInt ( hexString , 16 ) ; } return Integer . parseInt ( valueString ) ;
public class XlsWorkbook { /** * Creates a new workbook object . * @ param os The output stream for the workbook * @ param existing An existing workbook to add to * @ return The new workbook object * @ throws IOException if the workbook cannot be written */ public static XlsWorkbook createWorkbook ( OutputStream os , Workbook existing ) throws IOException { } }
try { if ( existing != null ) return new XlsWorkbook ( jxl . Workbook . createWorkbook ( os , ( jxl . Workbook ) existing . getWorkbook ( ) , settings ) ) ; else return new XlsWorkbook ( jxl . Workbook . createWorkbook ( os , settings ) ) ; } catch ( jxl . read . biff . BiffException e ) { throw new IOException ( e ) ; }
public class MongoDBSessionAspect { public Object manageMongoDBSession ( ProceedingJoinPoint proceedingJoinPoint ) throws Throwable { } }
try { Logger logger = LoggerFactory . getLogger ( proceedingJoinPoint . getSourceLocation ( ) . getWithinType ( ) ) ; MongoDBSession . create ( logger , proceedingJoinPoint . getSignature ( ) . toShortString ( ) ) ; Object result = proceedingJoinPoint . proceed ( ) ; MongoDBSession . flush ( ) ; return result ; } finally { MongoDBSession . remove ( ) ; }
public class MultiProviderNameProvider { /** * { @ inheritDoc } */ @ Override public String getMethodNameForColumn ( final Column column ) { } }
this . wasUsedBefore = true ; for ( final NameProvider provider : providers ) { final String name = provider . getMethodNameForColumn ( column ) ; if ( Objects . nonNull ( name ) ) { return name ; } } return this . fallbackProvider . getMethodNameForColumn ( column ) ;
public class AbstractEjbEndpointService { /** * { @ inheritDoc } */ @ Override public WebSphereEjbServices getWebSphereEjbServices ( String applicationID ) { } }
WebSphereEjbServices services = null ; synchronized ( ejbServices ) { services = ejbServices . get ( applicationID ) ; if ( services == null ) { services = new WebSphereEjbServicesImpl ( applicationID , ejbDescriptorMap ) ; ejbServices . put ( applicationID , services ) ; } } return services ;
public class CommonUtils { /** * Converts a list of objects to a string . * @ param list list of objects * @ param < T > type of the objects * @ return space - separated concatenation of the string representation returned by Object # toString * of the individual objects */ public static < T > String listToString ( List < T > list ) { } }
StringBuilder sb = new StringBuilder ( ) ; for ( T s : list ) { if ( sb . length ( ) != 0 ) { sb . append ( " " ) ; } sb . append ( s ) ; } return sb . toString ( ) ;
public class UserIpLimitsAccessVoter { /** * This method checks the IP limits of the principal and denys access if * those limits exist and the request is coming from outside the specified * range . * @ param auth principal seeking AuthZ * @ param resource that is under protection * @ param config access - attributes defined on resource * @ return vote ( AccessDecisionVoter . ACCESS _ GRANTED , ACCESS _ DENIED , ACCESS _ ABSTAIN ) */ public int vote ( Authentication auth , Object resource , Collection config ) { } }
String label = "UserIpLimitsAccessVoter" ; if ( resource != null && ! supports ( resource . getClass ( ) ) ) { log . debug ( debugText ( label , auth , config , resource , ACCESS_ABSTAIN ) ) ; return ACCESS_ABSTAIN ; } FilterInvocation invocation = ( FilterInvocation ) resource ; HttpServletRequest httpRequest = invocation . getHttpRequest ( ) ; if ( null == httpRequest ) { log . debug ( debugText ( label , auth , config , resource , ACCESS_DENIED ) ) ; return ACCESS_DENIED ; } String userIpLimits = getUserIpLimits ( auth ) ; // if user IP limits are set , check request IP if ( null != userIpLimits && ! userIpLimits . equals ( "" ) ) { String requestIp = httpRequest . getRemoteAddr ( ) ; String [ ] ipLimits = userIpLimits . split ( ";" ) ; for ( String ipLimit : ipLimits ) { if ( ipInRange ( requestIp , ipLimit ) ) { // User ' s IP is within this range , grant access log . debug ( debugText ( label , auth , config , resource , ACCESS_GRANTED ) ) ; return ACCESS_GRANTED ; } } // There are IP limits , and none of them match the user ' s IP , deny log . debug ( debugText ( label , auth , config , resource , ACCESS_DENIED ) ) ; return ACCESS_DENIED ; } else { // No user IP limits , abstain log . debug ( debugText ( label , auth , config , resource , ACCESS_ABSTAIN ) ) ; return ACCESS_ABSTAIN ; }
public class UsersInner { /** * List users in a given lab . * @ param resourceGroupName The name of the resource group . * @ param labAccountName The name of the lab Account . * @ param labName The name of the lab . * @ param expand Specify the $ expand query . Example : ' properties ( $ select = email ) ' * @ param filter The filter to apply to the operation . * @ param top The maximum number of resources to return from the operation . * @ param orderby The ordering expression for the results , using OData notation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; UserInner & gt ; object */ public Observable < Page < UserInner > > listAsync ( final String resourceGroupName , final String labAccountName , final String labName , final String expand , final String filter , final Integer top , final String orderby ) { } }
return listWithServiceResponseAsync ( resourceGroupName , labAccountName , labName , expand , filter , top , orderby ) . map ( new Func1 < ServiceResponse < Page < UserInner > > , Page < UserInner > > ( ) { @ Override public Page < UserInner > call ( ServiceResponse < Page < UserInner > > response ) { return response . body ( ) ; } } ) ;
public class Emitter { /** * interprets a plugin block into the StringBuilder . * @ param aOut * The StringBuilder to write to . * @ param aLines * The lines to write . * @ param sMeta * Meta information . */ protected void emitPluginLines ( final MarkdownHCStack aOut , final Line aLines , @ Nonnull final String sMeta ) { } }
Line aLine = aLines ; String sIDPlugin = sMeta ; String sParams = null ; ICommonsMap < String , String > aParams = null ; final int nIdxOfSpace = sMeta . indexOf ( ' ' ) ; if ( nIdxOfSpace != - 1 ) { sIDPlugin = sMeta . substring ( 0 , nIdxOfSpace ) ; sParams = sMeta . substring ( nIdxOfSpace + 1 ) ; if ( sParams != null ) { aParams = parsePluginParams ( sParams ) ; } } if ( aParams == null ) { aParams = new CommonsHashMap < > ( ) ; } final ICommonsList < String > aList = new CommonsArrayList < > ( ) ; while ( aLine != null ) { if ( aLine . m_bIsEmpty ) aList . add ( "" ) ; else aList . add ( aLine . m_sValue ) ; aLine = aLine . m_aNext ; } final AbstractMarkdownPlugin aPlugin = m_aPlugins . get ( sIDPlugin ) ; if ( aPlugin != null ) { aPlugin . emit ( aOut , aList , aParams ) ; }
public class UpdateFleetPortSettingsRequest { /** * Collection of port settings to be added to the fleet record . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setInboundPermissionAuthorizations ( java . util . Collection ) } or * { @ link # withInboundPermissionAuthorizations ( java . util . Collection ) } if you want to override the existing values . * @ param inboundPermissionAuthorizations * Collection of port settings to be added to the fleet record . * @ return Returns a reference to this object so that method calls can be chained together . */ public UpdateFleetPortSettingsRequest withInboundPermissionAuthorizations ( IpPermission ... inboundPermissionAuthorizations ) { } }
if ( this . inboundPermissionAuthorizations == null ) { setInboundPermissionAuthorizations ( new java . util . ArrayList < IpPermission > ( inboundPermissionAuthorizations . length ) ) ; } for ( IpPermission ele : inboundPermissionAuthorizations ) { this . inboundPermissionAuthorizations . add ( ele ) ; } return this ;