signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class AbstractRStarTree { /** * Delete a leaf at a given path - deletions for non - leaves are not supported ! * @ param deletionPath Path to delete */ protected void deletePath ( IndexTreePath < E > deletionPath ) { } }
N leaf = getNode ( deletionPath . getParentPath ( ) . getEntry ( ) ) ; int index = deletionPath . getIndex ( ) ; // delete o E entry = leaf . getEntry ( index ) ; leaf . deleteEntry ( index ) ; writeNode ( leaf ) ; // condense the tree Stack < N > stack = new Stack < > ( ) ; condenseTree ( deletionPath . getParentPath ( ) , stack ) ; // reinsert underflow nodes while ( ! stack . empty ( ) ) { N node = stack . pop ( ) ; if ( node . isLeaf ( ) ) { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { settings . getOverflowTreatment ( ) . reinitialize ( ) ; // Intended ? this . insertLeafEntry ( node . getEntry ( i ) ) ; } } else { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { stack . push ( getNode ( node . getEntry ( i ) ) ) ; } } deleteNode ( node ) ; } postDelete ( entry ) ; doExtraIntegrityChecks ( ) ;
public class AntBuilder { /** * Copied from org . apache . tools . ant . Task , since we need to get a real thing before it gets nulled in DispatchUtils . execute */ private Object performTask ( Task task ) { } }
Throwable reason = null ; try { // Have to call fireTestStared / fireTestFinished via reflection as they unfortunately have protected access in Project final Method fireTaskStarted = Project . class . getDeclaredMethod ( "fireTaskStarted" , Task . class ) ; fireTaskStarted . setAccessible ( true ) ; fireTaskStarted . invoke ( project , task ) ; Object realThing ; realThing = task ; task . maybeConfigure ( ) ; if ( task instanceof UnknownElement ) { realThing = ( ( UnknownElement ) task ) . getRealThing ( ) ; } DispatchUtils . execute ( task ) ; return realThing != null ? realThing : task ; } catch ( BuildException ex ) { if ( ex . getLocation ( ) == Location . UNKNOWN_LOCATION ) { ex . setLocation ( task . getLocation ( ) ) ; } reason = ex ; throw ex ; } catch ( Exception ex ) { reason = ex ; BuildException be = new BuildException ( ex ) ; be . setLocation ( task . getLocation ( ) ) ; throw be ; } catch ( Error ex ) { reason = ex ; throw ex ; } finally { try { final Method fireTaskFinished = Project . class . getDeclaredMethod ( "fireTaskFinished" , Task . class , Throwable . class ) ; fireTaskFinished . setAccessible ( true ) ; fireTaskFinished . invoke ( project , task , reason ) ; } catch ( Exception e ) { BuildException be = new BuildException ( e ) ; be . setLocation ( task . getLocation ( ) ) ; throw be ; } }
public class JsonPathLibrary { /** * Find JSON element by ` jsonPath ` from the ` source ` and check if the amount of found elements matches the given ` count ` . * ` source ` can be either URI or the actual JSON content . * You can add optional method ( ie GET , POST , PUT ) , data or content type as parameters . * Method defaults to GET . * Example : * | Json Should Have Element Count | http : / / example . com / test . json | $ . foo [ * ] | 3 | * | Json Should Have Element Count | { element : [ { param : hello } , { foo : bar } ] } | $ . element [ * ] | 2 | * | Json Should Have Element Count | { element : [ { param : hello } , { foo : bar } ] } | $ . element [ * ] | 2 | POST | { hello : world } | application / json | */ @ SuppressWarnings ( "unchecked" ) @ RobotKeyword public boolean jsonShouldHaveElementCount ( String source , String jsonPath , Integer count , String method , String data , String contentType ) throws Exception { } }
boolean match = false ; System . out . println ( "*DEBUG* Reading jsonPath: " + jsonPath ) ; String json = requestUtil . readSource ( source , method , data , contentType ) ; List < Object > elements = null ; Object object = null ; try { object = JsonPath . read ( json , jsonPath ) ; } catch ( PathNotFoundException e ) { throw new JsonElementNotFoundException ( "Path '" + jsonPath + "' was not found in JSON" ) ; } if ( object != null ) { // TODO : Find a way to do this without suppressing the warning if ( object instanceof List < ? > ) { elements = ( List < Object > ) object ; if ( CollectionUtils . isNotEmpty ( elements ) ) { match = ( elements . size ( ) == count ) ; if ( ! match ) { System . out . println ( "*ERROR* Element counts did not match. Expected '" + count + "', got '" + elements . size ( ) + "'" ) ; throw new JsonNotEqualException ( "Element counts did not match. Expected '" + count + "', got '" + elements . size ( ) + "'" ) ; } } else { // In practice , it ' s impossible to end here . System . out . println ( "*ERROR* Could not find elements from '" + jsonPath + "'" ) ; throw new JsonElementNotFoundException ( "Could not find elements from '" + jsonPath + "'" ) ; } } else if ( count == 1 ) { System . out . println ( "*DEBUG* Found 1 item as expected from '" + jsonPath + "'" ) ; match = true ; } else { System . out . println ( "*ERROR* Found 1 item, but expected '" + count + "'" ) ; throw new JsonElementNotFoundException ( "Found 1 item, but expected '" + count + "'" ) ; } } else { System . out . println ( "*ERROR* Could not find elements from '" + jsonPath + "'" ) ; throw new JsonElementNotFoundException ( "Could not find elements from '" + jsonPath + "'" ) ; } return match ;
public class ProductMarketplaceInfo { /** * Sets the additionalTermsSource value for this ProductMarketplaceInfo . * @ param additionalTermsSource * Specifies the source of the { @ link # additionalTerms } value . * To revert an overridden value to its default , set this field to { @ link * ValueSourceType # PARENT } . */ public void setAdditionalTermsSource ( com . google . api . ads . admanager . axis . v201808 . ValueSourceType additionalTermsSource ) { } }
this . additionalTermsSource = additionalTermsSource ;
public class TokenFilter { /** * Filters the given string , replacing any tokens with their corresponding * values . * @ param input * The string to filter . * @ return * A copy of the input string , with any tokens replaced with their * corresponding values . */ public String filter ( String input ) { } }
StringBuilder output = new StringBuilder ( ) ; Matcher tokenMatcher = tokenPattern . matcher ( input ) ; // Track last regex match int endOfLastMatch = 0 ; // For each possible token while ( tokenMatcher . find ( ) ) { // Pull possible leading text and first char before possible token String literal = tokenMatcher . group ( LEADING_TEXT_GROUP ) ; String escape = tokenMatcher . group ( ESCAPE_CHAR_GROUP ) ; // Append leading non - token text output . append ( literal ) ; // If char before token is ' $ ' , the token itself is escaped if ( "$" . equals ( escape ) ) { String notToken = tokenMatcher . group ( TOKEN_GROUP ) ; output . append ( notToken ) ; } // If char is not ' $ ' , interpret as a token else { // The char before the token , if any , is a literal output . append ( escape ) ; // Pull token value String tokenName = tokenMatcher . group ( TOKEN_NAME_GROUP ) ; String tokenValue = getToken ( tokenName ) ; // If token is unknown , interpret as literal if ( tokenValue == null ) { String notToken = tokenMatcher . group ( TOKEN_GROUP ) ; output . append ( notToken ) ; } // Otherwise , substitute value else output . append ( tokenValue ) ; } // Update last regex match endOfLastMatch = tokenMatcher . end ( ) ; } // Append any remaining non - token text output . append ( input . substring ( endOfLastMatch ) ) ; return output . toString ( ) ;
public class WarUtils { /** * Adds the shiro filter to a web . xml file . * @ param doc The xml DOM document to create the new xml elements with . * @ param root The xml Element node to add the filter to . */ public static void addFilter ( Document doc , Element root ) { } }
Element filter = doc . createElement ( "filter" ) ; Element filterName = doc . createElement ( "filter-name" ) ; filterName . appendChild ( doc . createTextNode ( "ShiroFilter" ) ) ; filter . appendChild ( filterName ) ; Element filterClass = doc . createElement ( "filter-class" ) ; filterClass . appendChild ( doc . createTextNode ( "org.apache.shiro.web.servlet.ShiroFilter" ) ) ; filter . appendChild ( filterClass ) ; addRelativeTo ( root , filter , "filter" , true ) ;
public class DefaultExceptionFactory { /** * Create an { @ link LdapException } from an { @ link LdapResultCode } and message * @ param resultCode the result code * @ param message the exception message * @ return a new LDAPException */ public static LdapException create ( LdapResultCode resultCode , String message ) { } }
return new LdapException ( resultCode , message , null ) ;
public class ReadableIntervalConverter { /** * Sets the values of the mutable duration from the specified interval . * @ param writablePeriod the period to modify * @ param object the interval to set from * @ param chrono the chronology to use */ public void setInto ( ReadWritablePeriod writablePeriod , Object object , Chronology chrono ) { } }
ReadableInterval interval = ( ReadableInterval ) object ; chrono = ( chrono != null ? chrono : DateTimeUtils . getIntervalChronology ( interval ) ) ; long start = interval . getStartMillis ( ) ; long end = interval . getEndMillis ( ) ; int [ ] values = chrono . get ( writablePeriod , start , end ) ; for ( int i = 0 ; i < values . length ; i ++ ) { writablePeriod . setValue ( i , values [ i ] ) ; }
public class ZipFileArtifactNotifier { /** * Validate change data , which is expected to be collections of files * or collections of entry paths . * Since the single root zip file is registered , the change is expected * to be a single element in exactly one of the change collections . * Null changes are unexpected . Additions are unexpected . Updates with * removals are unexpected . * The net is to allow updates alone or removals alone . * @ return A validation message if unexpected changes are noted . * Null if the changes are expected . */ @ Trivial private String validateNotification ( Collection < ? > added , Collection < ? > removed , Collection < ? > updated ) { } }
boolean isAddition = ! added . isEmpty ( ) ; boolean isRemoval = ! removed . isEmpty ( ) ; boolean isUpdate = ! updated . isEmpty ( ) ; if ( ! isAddition && ! isRemoval && ! isUpdate ) { // Should never occur : // Completely null changes are detected and cause an early return // before reaching the validation method . return "null" ; } else if ( isAddition ) { return "Addition of [ " + added . toString ( ) + " ]" ; } else if ( isUpdate && isRemoval ) { return "Update of [ " + updated . toString ( ) + " ]" + " with removal of [ " + removed . toString ( ) + " ]" ; } else { return null ; }
public class CPFriendlyURLEntryPersistenceImpl { /** * Returns the number of cp friendly url entries where groupId = & # 63 ; and classNameId = & # 63 ; and classPK = & # 63 ; and languageId = & # 63 ; and main = & # 63 ; . * @ param groupId the group ID * @ param classNameId the class name ID * @ param classPK the class pk * @ param languageId the language ID * @ param main the main * @ return the number of matching cp friendly url entries */ @ Override public int countByG_C_C_L_M ( long groupId , long classNameId , long classPK , String languageId , boolean main ) { } }
FinderPath finderPath = FINDER_PATH_COUNT_BY_G_C_C_L_M ; Object [ ] finderArgs = new Object [ ] { groupId , classNameId , classPK , languageId , main } ; Long count = ( Long ) finderCache . getResult ( finderPath , finderArgs , this ) ; if ( count == null ) { StringBundler query = new StringBundler ( 6 ) ; query . append ( _SQL_COUNT_CPFRIENDLYURLENTRY_WHERE ) ; query . append ( _FINDER_COLUMN_G_C_C_L_M_GROUPID_2 ) ; query . append ( _FINDER_COLUMN_G_C_C_L_M_CLASSNAMEID_2 ) ; query . append ( _FINDER_COLUMN_G_C_C_L_M_CLASSPK_2 ) ; boolean bindLanguageId = false ; if ( languageId == null ) { query . append ( _FINDER_COLUMN_G_C_C_L_M_LANGUAGEID_1 ) ; } else if ( languageId . equals ( "" ) ) { query . append ( _FINDER_COLUMN_G_C_C_L_M_LANGUAGEID_3 ) ; } else { bindLanguageId = true ; query . append ( _FINDER_COLUMN_G_C_C_L_M_LANGUAGEID_2 ) ; } query . append ( _FINDER_COLUMN_G_C_C_L_M_MAIN_2 ) ; String sql = query . toString ( ) ; Session session = null ; try { session = openSession ( ) ; Query q = session . createQuery ( sql ) ; QueryPos qPos = QueryPos . getInstance ( q ) ; qPos . add ( groupId ) ; qPos . add ( classNameId ) ; qPos . add ( classPK ) ; if ( bindLanguageId ) { qPos . add ( languageId ) ; } qPos . add ( main ) ; count = ( Long ) q . uniqueResult ( ) ; finderCache . putResult ( finderPath , finderArgs , count ) ; } catch ( Exception e ) { finderCache . removeResult ( finderPath , finderArgs ) ; throw processException ( e ) ; } finally { closeSession ( session ) ; } } return count . intValue ( ) ;
public class ComplexImg { /** * Sets the real part at the specified position * < br > * If { @ link # isSynchronizePowerSpectrum ( ) } is true , then this will also * update the corresponding power value . * @ param x coordinate * @ param y coordinate * @ param value to be set */ public void setValueR ( int x , int y , double value ) { } }
int idx = y * width + x ; setValueR_atIndex ( idx , value ) ;
public class RSConfig { /** * Do not modify addRestResourceClasses ( ) method . It is automatically populated with all resources defined in the project . If required , comment out calling * this method in getClasses ( ) . */ void addRestResourceClasses ( Set < Class < ? > > resources ) { } }
logger . info ( "Register ocelot resources..." ) ; for ( Object restEndpoint : restEndpoints ) { Class cls = unProxyClassServices . getRealClass ( restEndpoint . getClass ( ) ) ; logger . info ( "Register ocelot resource {}" , cls . getName ( ) ) ; resources . add ( cls ) ; }
public class ProtocolDetectionResult { /** * Returns a { @ link ProtocolDetectionResult } which holds the detected protocol . */ @ SuppressWarnings ( "unchecked" ) public static < T > ProtocolDetectionResult < T > detected ( T protocol ) { } }
return new ProtocolDetectionResult < T > ( ProtocolDetectionState . DETECTED , checkNotNull ( protocol , "protocol" ) ) ;
public class GetDetectorRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetDetectorRequest getDetectorRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getDetectorRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getDetectorRequest . getDetectorId ( ) , DETECTORID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DenialOfServiceTaf { /** * Return of " True " means ID has was removed . * Return of " False " means ID wasn ' t being denied . * @ param ip * @ return */ public static synchronized boolean removeDenyID ( String id ) { } }
if ( deniedID != null && deniedID . remove ( id ) != null ) { writeID ( ) ; if ( deniedID . isEmpty ( ) ) { deniedID = null ; } return true ; } return false ;
public class FieldAccessor { /** * Copies field ' s value to the corresponding field in the specified object . * Ignores static fields and fields that can ' t be modified reflectively . * @ param to The object into which to copy the field . * @ throws ReflectionException If the operation fails . */ public void copyTo ( Object to ) { } }
modify ( ( ) -> field . set ( to , field . get ( object ) ) , false ) ;
public class Url { /** * Parses a cloudinary identifier of the form : < br > * { @ code [ < resource _ type > / ] [ < image _ type > / ] [ v < version > / ] < public _ id > [ . < format > ] [ # < signature > ] } */ public Url fromIdentifier ( String identifier ) { } }
Matcher matcher = identifierPattern . matcher ( identifier ) ; if ( ! matcher . matches ( ) ) { throw new RuntimeException ( String . format ( "Couldn't parse identifier %s" , identifier ) ) ; } String resourceType = matcher . group ( 1 ) ; if ( resourceType != null ) { resourceType ( resourceType ) ; } String type = matcher . group ( 2 ) ; if ( type != null ) { type ( type ) ; } String version = matcher . group ( 3 ) ; if ( version != null ) { version ( version ) ; } String publicId = matcher . group ( 4 ) ; if ( publicId != null ) { publicId ( publicId ) ; } String format = matcher . group ( 5 ) ; if ( format != null ) { format ( format ) ; } // Signature ( group 6 ) is not used return this ;
public class RelationshipJacksonSerializer { /** * < pre > compact : * " id " : " 1337 " , * " source " : " / tenants / 28026b36-8fe4-4332-84c8-524e173a68bf " , * " name " : " contains " , * " target " : " 28026b36-8fe4-4332-84c8-524e173a68bf / environments / test " * } < / pre > * < pre > embedded : * " @ context " : " http : / / hawkular . org / inventory / 0.1.0 / relationship . jsonld " , * " id " : " 1337 " , * " name " : " contains " , * " source " : { * id : " / tenants / 28026b36-8fe4-4332-84c8-524e173a68bf " , * shortId : " 28026b36-8fe4-4332-84c8-524e173a68bf " , * type : " Tenant " * " target " : { * id : " 28026b36-8fe4-4332-84c8-524e173a68bf / environments / test " , * shortId : " test " , * type : " Environment " * } < / pre > */ @ Override public void serialize ( Relationship relationship , JsonGenerator jg , SerializerProvider serializerProvider ) throws IOException { } }
jg . writeStartObject ( ) ; jg . writeFieldName ( FIELD_ID ) ; jg . writeString ( relationship . getId ( ) ) ; jg . writeFieldName ( FIELD_NAME ) ; jg . writeString ( relationship . getName ( ) ) ; jg . writeFieldName ( FIELD_SOURCE ) ; jg . writeString ( relationship . getSource ( ) . toString ( ) ) ; jg . writeFieldName ( FIELD_TARGET ) ; jg . writeString ( relationship . getTarget ( ) . toString ( ) ) ; if ( relationship . getProperties ( ) != null && ! relationship . getProperties ( ) . isEmpty ( ) ) { jg . writeFieldName ( FIELD_PROPERTIES ) ; jg . writeStartObject ( ) ; for ( Map . Entry < String , Object > property : relationship . getProperties ( ) . entrySet ( ) ) { jg . writeFieldName ( property . getKey ( ) ) ; jg . writeObject ( property . getValue ( ) ) ; } jg . writeEndObject ( ) ; } jg . writeEndObject ( ) ;
public class ParseZonedDateTime { /** * { @ inheritDoc } */ @ Override protected ZonedDateTime parse ( final String string , final DateTimeFormatter formatter ) { } }
return ZonedDateTime . parse ( string , formatter ) ;
public class SocketSystem { /** * Returns a unique identifying byte array for the server , generally * the mac address . */ public byte [ ] getHardwareAddress ( ) { } }
if ( CurrentTime . isTest ( ) || System . getProperty ( "test.mac" ) != null ) { return new byte [ ] { 10 , 0 , 0 , 0 , 0 , 10 } ; } for ( NetworkInterfaceBase nic : getNetworkInterfaces ( ) ) { if ( ! nic . isLoopback ( ) ) { return nic . getHardwareAddress ( ) ; } } try { InetAddress localHost = InetAddress . getLocalHost ( ) ; return localHost . getAddress ( ) ; } catch ( Exception e ) { log . log ( Level . FINER , e . toString ( ) , e ) ; } return new byte [ 0 ] ;
public class DataUtils { /** * Generates an artifact starting from gavc * WARNING : use this method only if you have a missing reference in the database ! ! ! * @ param gavc * @ return DbArtifact */ public static DbArtifact createDbArtifact ( final String gavc ) { } }
final DbArtifact artifact = new DbArtifact ( ) ; final String [ ] artifactInfo = gavc . split ( ":" ) ; if ( artifactInfo . length > 0 ) { artifact . setGroupId ( artifactInfo [ 0 ] ) ; } if ( artifactInfo . length > 1 ) { artifact . setArtifactId ( artifactInfo [ 1 ] ) ; } if ( artifactInfo . length > 2 ) { artifact . setVersion ( artifactInfo [ 2 ] ) ; } if ( artifactInfo . length > 3 ) { artifact . setClassifier ( artifactInfo [ 3 ] ) ; } if ( artifactInfo . length > 4 ) { artifact . setExtension ( artifactInfo [ 4 ] ) ; } if ( artifactInfo . length > 5 ) { artifact . setOrigin ( artifactInfo [ 5 ] ) ; } return artifact ;
public class AvatarNode { /** * Append service name to each avatar meta directory name * @ param conf configuration of NameNode * @ param serviceKey the non - empty name of the name node service */ public static void adjustMetaDirectoryNames ( Configuration conf , String serviceKey ) { } }
adjustMetaDirectoryName ( conf , DFS_SHARED_NAME_DIR0_KEY , serviceKey ) ; adjustMetaDirectoryName ( conf , DFS_SHARED_NAME_DIR1_KEY , serviceKey ) ; adjustMetaDirectoryName ( conf , DFS_SHARED_EDITS_DIR0_KEY , serviceKey ) ; adjustMetaDirectoryName ( conf , DFS_SHARED_EDITS_DIR1_KEY , serviceKey ) ;
public class RedisInner { /** * Reboot specified Redis node ( s ) . This operation requires write permission to the cache resource . There can be potential data loss . * @ param resourceGroupName The name of the resource group . * @ param name The name of the Redis cache . * @ param parameters Specifies which Redis node ( s ) to reboot . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the RedisForceRebootResponseInner object if successful . */ public RedisForceRebootResponseInner forceReboot ( String resourceGroupName , String name , RedisRebootParameters parameters ) { } }
return forceRebootWithServiceResponseAsync ( resourceGroupName , name , parameters ) . toBlocking ( ) . single ( ) . body ( ) ;
public class DummyRPCServiceImpl { /** * { @ inheritDoc } */ public Object executeCommandOnCoordinator ( RemoteCommand command , boolean synchronous , Serializable ... args ) throws RPCException , SecurityException { } }
return executeCommand ( command , args ) ;
public class RSAUtils { /** * Decrypt encrypted data with RSA private key , using { @ link # DEFAULT _ CIPHER _ TRANSFORMATION } . * Note : if long data was encrypted using * { @ link # encryptWithPublicKey ( byte [ ] , byte [ ] ) } , it will be correctly decrypted . * @ param privateKeyData * RSA private key data ( value of { @ link RSAPrivateKey # getEncoded ( ) } ) * @ param encryptedData * @ return * @ throws NoSuchAlgorithmException * @ throws InvalidKeySpecException * @ throws InvalidKeyException * @ throws NoSuchPaddingException * @ throws IllegalBlockSizeException * @ throws BadPaddingException * @ throws IOException */ public static byte [ ] decryptWithPrivateKey ( byte [ ] privateKeyData , byte [ ] encryptedData ) throws NoSuchAlgorithmException , InvalidKeySpecException , InvalidKeyException , NoSuchPaddingException , IllegalBlockSizeException , BadPaddingException , IOException { } }
return decryptWithPrivateKey ( privateKeyData , encryptedData , DEFAULT_CIPHER_TRANSFORMATION ) ;
public class TextFileCorpus { /** * Reads a single word from a file , assuming space + tab + EOL to be word boundaries * @ param raf * @ return null if EOF * @ throws IOException */ String readWord ( BufferedReader raf ) throws IOException { } }
while ( true ) { // check the buffer first if ( wbp < wordsBuffer . length ) { return wordsBuffer [ wbp ++ ] ; } String line = raf . readLine ( ) ; if ( line == null ) { // end of corpus eoc = true ; return null ; } line = line . trim ( ) ; if ( line . length ( ) == 0 ) { continue ; } cache . writeInt ( - 3 ) ; // mark end of sentence wordsBuffer = line . split ( "\\s+" ) ; wbp = 0 ; eoc = false ; }
public class OutboundChain { /** * This method analyzes the interfaces between the input array of * inbound channels . If the interfaces between them match , implying they * have the ability to form a chain , the method returns without exception . * Otherwise , an IncoherentChainException is thrown describing the channels * that were incoherent . * @ param chainData * @ throws IncoherentChainException */ public static void verifyChainCoherency ( ChainData chainData ) throws IncoherentChainException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "verifyChainCoherency" ) ; } ChannelData [ ] channelDataArray = chainData . getChannelList ( ) ; // Verify there are multiple channels in this chain . if ( channelDataArray . length > 1 ) { ChannelFrameworkImpl fw = ( ChannelFrameworkImpl ) ChannelFrameworkFactory . getChannelFramework ( ) ; ChannelFactoryData current = null ; ChannelFactoryData next = null ; Class < ? > [ ] currentDevClasses = null ; Class < ? > nextAppClass = null ; try { current = fw . findOrCreateChannelFactoryData ( channelDataArray [ 0 ] . getFactoryType ( ) ) ; } catch ( ChannelFactoryException e ) { // No FFDC Needed if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Found invalid channel factory of type " + channelDataArray [ 0 ] . getFactoryType ( ) . getName ( ) ) ; } throw new IncoherentChainException ( "Invalid channel factory" ) ; } // Iterate the channels and verify coherency between them . for ( int i = 1 ; i < channelDataArray . length ; i ++ ) { currentDevClasses = current . getDeviceInterface ( ) ; try { next = fw . findOrCreateChannelFactoryData ( channelDataArray [ i ] . getFactoryType ( ) ) ; } catch ( ChannelFactoryException e ) { // No FFDC Needed if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Found invalid channel factory of type " + channelDataArray [ i ] . getFactoryType ( ) . getName ( ) ) ; } throw new IncoherentChainException ( "Invalid channel factory" ) ; } nextAppClass = next . getApplicationInterface ( ) ; // Check for nulls or incompatibility . if ( ( null == currentDevClasses ) || ( null == nextAppClass ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Found null interface classes between two channel factories: " + current . getFactory ( ) . getName ( ) + ", " + next . getFactory ( ) . getName ( ) ) ; } throw new IncoherentChainException ( "Found null interface classes between two channel factories: " + current . getFactory ( ) . getName ( ) + ", " + next . getFactory ( ) . getName ( ) ) ; } // Handle polymorphism boolean foundMatch = false ; for ( int j = 0 ; j < currentDevClasses . length ; j ++ ) { if ( nextAppClass . isAssignableFrom ( currentDevClasses [ j ] ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Found compatible class: " + nextAppClass + " " + currentDevClasses [ j ] ) ; } foundMatch = true ; break ; } } if ( ! foundMatch ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Found incoherency between two channel factories: " + current . getFactory ( ) . getName ( ) + ", " + next . getFactory ( ) . getName ( ) ) ; } throw new IncoherentChainException ( "Found incoherency between two channel factories: " + current . getFactory ( ) . getName ( ) + ", " + next . getFactory ( ) . getName ( ) ) ; } current = next ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "verifyChainCoherency" ) ; }
public class DefaultDependencyResolver { /** * Adds all the transitive dependencies for a symbol to the provided list . The * set is used to avoid adding dupes while keeping the correct order . NOTE : * Use of a LinkedHashSet would require reversing the results to get correct * dependency ordering . */ private void addDependency ( String symbol , Set < String > seen , List < String > list ) throws ServiceException { } }
DependencyInfo dependency = getDependencyInfo ( symbol ) ; if ( dependency == null ) { if ( this . strictRequires ) { throw new ServiceException ( "Unknown require of " + symbol ) ; } } else if ( ! seen . containsAll ( dependency . getProvides ( ) ) ) { seen . addAll ( dependency . getProvides ( ) ) ; for ( String require : dependency . getRequiredSymbols ( ) ) { addDependency ( require , seen , list ) ; } list . add ( dependency . getPathRelativeToClosureBase ( ) ) ; }
public class WeightedIntDiGraph { /** * Constructs a weighted directed graph with the same nodes and edges as * the given unweighted graph */ public static WeightedIntDiGraph fromUnweighted ( IntDiGraph g , Function < DiEdge , Double > makeDefaultWeight ) { } }
WeightedIntDiGraph wg = new WeightedIntDiGraph ( makeDefaultWeight ) ; wg . addAll ( g ) ; return wg ;
public class Bad { /** * Creates a Bad of type B . * @ param < G > the success type of the Or * @ param < B > the failure type of the Or * @ param value the value of the Bad * @ return an instance of Bad */ public static < G , B > Bad < G , B > of ( B value ) { } }
return new Bad < > ( value ) ;
public class Value { /** * < code > string string _ value = 3 ; < / code > */ public java . lang . String getStringValue ( ) { } }
java . lang . Object ref = "" ; if ( typeCase_ == 3 ) { ref = type_ ; } if ( ref instanceof java . lang . String ) { return ( java . lang . String ) ref ; } else { com . google . protobuf . ByteString bs = ( com . google . protobuf . ByteString ) ref ; java . lang . String s = bs . toStringUtf8 ( ) ; if ( typeCase_ == 3 ) { type_ = s ; } return s ; }
public class TriggerDefinition { /** * Drop specified trigger from the schema using given mutation . * @ param mutation The schema mutation * @ param cfName The name of the parent ColumnFamily * @ param timestamp The timestamp to use for the tombstone */ public void deleteFromSchema ( Mutation mutation , String cfName , long timestamp ) { } }
ColumnFamily cf = mutation . addOrGet ( SystemKeyspace . SCHEMA_TRIGGERS_CF ) ; int ldt = ( int ) ( System . currentTimeMillis ( ) / 1000 ) ; Composite prefix = CFMetaData . SchemaTriggersCf . comparator . make ( cfName , name ) ; cf . addAtom ( new RangeTombstone ( prefix , prefix . end ( ) , timestamp , ldt ) ) ;
public class AmazonCloudFrontClient { /** * Get the configuration information about a distribution . * @ param getDistributionConfigRequest * The request to get a distribution configuration . * @ return Result of the GetDistributionConfig operation returned by the service . * @ throws NoSuchDistributionException * The specified distribution does not exist . * @ throws AccessDeniedException * Access denied . * @ sample AmazonCloudFront . GetDistributionConfig * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cloudfront - 2018-11-05 / GetDistributionConfig " * target = " _ top " > AWS API Documentation < / a > */ @ Override public GetDistributionConfigResult getDistributionConfig ( GetDistributionConfigRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetDistributionConfig ( request ) ;
public class AmazonEC2Client { /** * Modifies the specified Spot Fleet request . * While the Spot Fleet request is being modified , it is in the < code > modifying < / code > state . * To scale up your Spot Fleet , increase its target capacity . The Spot Fleet launches the additional Spot Instances * according to the allocation strategy for the Spot Fleet request . If the allocation strategy is * < code > lowestPrice < / code > , the Spot Fleet launches instances using the Spot pool with the lowest price . If the * allocation strategy is < code > diversified < / code > , the Spot Fleet distributes the instances across the Spot pools . * To scale down your Spot Fleet , decrease its target capacity . First , the Spot Fleet cancels any open requests that * exceed the new target capacity . You can request that the Spot Fleet terminate Spot Instances until the size of * the fleet no longer exceeds the new target capacity . If the allocation strategy is < code > lowestPrice < / code > , the * Spot Fleet terminates the instances with the highest price per unit . If the allocation strategy is * < code > diversified < / code > , the Spot Fleet terminates instances across the Spot pools . Alternatively , you can * request that the Spot Fleet keep the fleet at its current size , but not replace any Spot Instances that are * interrupted or that you terminate manually . * If you are finished with your Spot Fleet for now , but will use it again later , you can set the target capacity to * @ param modifySpotFleetRequestRequest * Contains the parameters for ModifySpotFleetRequest . * @ return Result of the ModifySpotFleetRequest operation returned by the service . * @ sample AmazonEC2 . ModifySpotFleetRequest * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / ModifySpotFleetRequest " target = " _ top " > AWS API * Documentation < / a > */ @ Override public ModifySpotFleetRequestResult modifySpotFleetRequest ( ModifySpotFleetRequestRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeModifySpotFleetRequest ( request ) ;
public class VirtualMachineScaleSetsInner { /** * Gets list of OS upgrades on a VM scale set instance . * ServiceResponse < PageImpl1 < UpgradeOperationHistoricalStatusInfoInner > > * @ param resourceGroupName The name of the resource group . * ServiceResponse < PageImpl1 < UpgradeOperationHistoricalStatusInfoInner > > * @ param vmScaleSetName The name of the VM scale set . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the PagedList & lt ; UpgradeOperationHistoricalStatusInfoInner & gt ; object wrapped in { @ link ServiceResponse } if successful . */ public Observable < ServiceResponse < Page < UpgradeOperationHistoricalStatusInfoInner > > > getOSUpgradeHistorySinglePageAsync ( final String resourceGroupName , final String vmScaleSetName ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( vmScaleSetName == null ) { throw new IllegalArgumentException ( "Parameter vmScaleSetName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } return service . getOSUpgradeHistory ( resourceGroupName , vmScaleSetName , this . client . subscriptionId ( ) , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < Page < UpgradeOperationHistoricalStatusInfoInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < UpgradeOperationHistoricalStatusInfoInner > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < PageImpl1 < UpgradeOperationHistoricalStatusInfoInner > > result = getOSUpgradeHistoryDelegate ( response ) ; return Observable . just ( new ServiceResponse < Page < UpgradeOperationHistoricalStatusInfoInner > > ( result . body ( ) , result . response ( ) ) ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class URLClassPath { /** * Get the file extension of given fileName . * @ return the file extension , or null if there is no file extension */ public static String getFileExtension ( String fileName ) { } }
int lastDot = fileName . lastIndexOf ( '.' ) ; return ( lastDot >= 0 ) ? fileName . substring ( lastDot ) : null ;
public class CPDefinitionVirtualSettingUtil { /** * Returns the last cp definition virtual setting in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; . * @ param uuid the uuid * @ param companyId the company ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching cp definition virtual setting , or < code > null < / code > if a matching cp definition virtual setting could not be found */ public static CPDefinitionVirtualSetting fetchByUuid_C_Last ( String uuid , long companyId , OrderByComparator < CPDefinitionVirtualSetting > orderByComparator ) { } }
return getPersistence ( ) . fetchByUuid_C_Last ( uuid , companyId , orderByComparator ) ;
public class ClientDatabase { /** * Get this property from the remote database . * This does not make a remote call , it just return the property cached on remote db open . * @ param strProperty The key to the remote property . * @ return The value . */ public String getRemoteProperty ( String strProperty , boolean readIfNotCached ) { } }
if ( m_remoteProperties == null ) if ( readIfNotCached ) { try { this . getRemoteDatabase ( ) ; } catch ( RemoteException e ) { e . printStackTrace ( ) ; } } if ( m_remoteProperties != null ) return ( String ) m_remoteProperties . get ( strProperty ) ; return this . getFakeRemoteProperty ( strProperty ) ;
public class SessionCsrfSecurityManager { /** * Generates a random String * @ param length length of returned string * @ param chars set of chars which will be using during random string generation * @ return a random string with given length . * @ throws IllegalArgumentException if ( length & lt ; 1 | | chars = = null | | chars . length = = 0) */ private static String getRandomString ( int length , char [ ] chars ) throws IllegalArgumentException { } }
if ( length < 1 ) throw new IllegalArgumentException ( "Invalid length: " + length ) ; if ( chars == null || chars . length == 0 ) throw new IllegalArgumentException ( "Null/Empty chars" ) ; StringBuilder sb = new StringBuilder ( ) ; Random random = new Random ( ) ; for ( int i = 0 ; i < length ; i ++ ) { char c = chars [ random . nextInt ( chars . length ) ] ; sb . append ( c ) ; } return sb . toString ( ) ;
public class AndroidMobileCommandHelper { /** * This method forms a { @ link Map } of parameters for the element * value replacement . It is used against input elements * @ param gsmSignalStrength One of available GSM signal strength * @ return a key - value pair . The key is the command name . The value is a { @ link Map } command arguments . */ public static Map . Entry < String , Map < String , ? > > gsmSignalStrengthCommand ( GsmSignalStrength gsmSignalStrength ) { } }
return new AbstractMap . SimpleEntry < > ( GSM_SIGNAL , prepareArguments ( // https : / / github . com / appium / appium / issues / 12234 new String [ ] { "signalStrengh" , "signalStrength" } , new Object [ ] { gsmSignalStrength . ordinal ( ) , gsmSignalStrength . ordinal ( ) } ) ) ;
public class Log4jLogQuery { @ Override public void logMessage ( LoggingEvent record ) { } }
if ( addMavenCoordinates ) { appendMavenCoordinates ( record ) ; } getEvents ( ) . add ( record ) ;
public class MongoDBClient { /** * Parses the and scroll . * @ param jsonClause * the json clause * @ param collectionName * the collection name * @ return the DB cursor * @ throws JSONParseException * the JSON parse exception */ private DBCursor parseAndScroll ( String jsonClause , String collectionName ) throws JSONParseException { } }
BasicDBObject clause = ( BasicDBObject ) JSON . parse ( jsonClause ) ; DBCursor cursor = mongoDb . getCollection ( collectionName ) . find ( clause ) ; return cursor ;
public class StreamEx { /** * Creates a new Stream which is the result of applying of the mapper * { @ code BiFunction } to the first element of the current stream ( head ) and * the stream containing the rest elements ( tail ) or supplier if the current * stream is empty . The mapper or supplier may return { @ code null } instead * of empty stream . * This is a < a href = " package - summary . html # StreamOps " > quasi - intermediate * operation < / a > with < a href = " package - summary . html # TSO " > tail - stream * optimization < / a > . * Either mapper function or supplier ( but not both ) is applied at most once * during the stream terminal operation execution . Sometimes it ' s useful to * generate stream recursively like this : * < pre > { @ code * / / Stream of fixed size batches * static < T > StreamEx < List < T > > batches ( StreamEx < T > input , int size ) { * return batches ( input , size , Collections . emptyList ( ) ) ; * private static < T > StreamEx < List < T > > batches ( StreamEx < T > input , int size , List < T > cur ) { * return input . headTail ( ( head , tail ) - > cur . size ( ) > = size * ? batches ( tail , size , Arrays . asList ( head ) ) . prepend ( cur ) * : batches ( tail , size , StreamEx . of ( cur ) . append ( head ) . toList ( ) ) , * ( ) - > Stream . of ( cur ) ) ; * } } < / pre > * When possible , use tail - stream optimized operations to reduce the call * stack depth . In particular , the example shown above uses only * { @ code headTail ( ) } , and { @ link # prepend ( Object . . . ) } operations , both of * them are tail - stream optimized , so it will not fail with * { @ code StackOverflowError } on long input stream . * This operation might perform badly with parallel streams . Sometimes the * same semantics could be expressed using other operations like * { @ link # withFirst ( BiFunction ) } or { @ link # mapFirst ( Function ) } which * parallelize better . Consider using these methods if its possible in your * case . * @ param < R > The element type of the new stream * @ param mapper a < a * href = " package - summary . html # NonInterference " > non - interfering < / a > * function to apply to the first stream element and the stream of * the rest elements which creates a new stream . * @ param supplier a < a * href = " package - summary . html # NonInterference " > non - interfering < / a > * supplier which creates a resulting stream when this stream is * empty . * @ return the new stream * @ see # headTail ( BiFunction ) * @ since 0.5.3 */ public < R > StreamEx < R > headTail ( BiFunction < ? super T , ? super StreamEx < T > , ? extends Stream < R > > mapper , Supplier < ? extends Stream < R > > supplier ) { } }
HeadTailSpliterator < T , R > spliterator = new HeadTailSpliterator < > ( spliterator ( ) , mapper , supplier ) ; spliterator . context = context = context . detach ( ) ; return new StreamEx < > ( spliterator , context ) ;
public class CompileCache { /** * Unconditionally compiles the template , but does not put it into the * cache . This is useful for ( re ) compilations of a large number of template * where keeping them for a build is not necessary . * @ param tplfile * absolute path of the file to compile * @ return return a result that contains the compiled template */ public Future < CompileResult > compile ( String tplfile ) { } }
Task < CompileResult > task = createTask ( tplfile ) ; compiler . submit ( task ) ; return task ;
public class LinkUtil { /** * Builds a mapped link to the path ( resource path ) with optional selectors and extension . * @ param request the request context for path mapping ( the result is always mapped ) * @ param url the URL to use ( complete ) or the path to an addressed resource ( without any extension ) * @ param selectors an optional selector string with all necessary selectors ( can be ' null ' ) * @ param extension an optional extension ( can be ' null ' for extension determination ) * @ param mapper the mapping strategy for the final link mapping * @ return the mapped url for the referenced resource */ public static String getUrl ( SlingHttpServletRequest request , String url , String selectors , String extension , LinkMapper mapper ) { } }
// skip blank urls if ( StringUtils . isBlank ( url ) ) { return url ; } // rebuild URL if not always external only if ( ! isExternalUrl ( url ) ) { ResourceResolver resolver = request . getResourceResolver ( ) ; ResourceHandle resource = ResourceHandle . use ( resolver . getResource ( url ) ) ; // it ' s possible that the resource can not be resolved / is virtual but is valid . . . if ( resource . isValid ( ) ) { // forwards and extensions are resolvable for real resources only . . . // check for a target and ' forward ' to this target if found try { String redirect = getFinalTarget ( resource ) ; if ( StringUtils . isNotBlank ( redirect ) ) { return getUrl ( request , redirect , selectors , extension , mapper ) ; } } catch ( RedirectLoopException rlex ) { LOG . error ( rlex . toString ( ) ) ; } // check for a necessary extension and determine it if not specified extension = getExtension ( resource , extension ) ; } // map the path ( the url ) with the resource resolver ( encodes the url ) if ( mapper != null ) { url = mapper . mapUri ( request , url ) ; url = adjustMappedUrl ( request , url ) ; } if ( StringUtils . isNotBlank ( extension ) ) { url += extension ; // extension starts with a ' . ' } // inject selectors into the complete URL because // it ' s possible , that the name always contains the extension . . . if ( StringUtils . isNotBlank ( selectors ) ) { if ( ! selectors . startsWith ( "." ) ) { selectors = "." + selectors ; } Matcher matcher = SELECTOR_PATTERN . matcher ( url ) ; if ( matcher . matches ( ) ) { url = matcher . group ( 1 ) + selectors + matcher . group ( 2 ) ; } } } return url ;
public class PlotCanvas { /** * Adds a label to this canvas . */ public void label ( String text , Font font , Color color , double ... coord ) { } }
Label label = new Label ( text , coord ) ; label . setFont ( font ) ; label . setColor ( color ) ; add ( label ) ;
public class WeeklyAutoScalingSchedule { /** * The schedule for Thursday . * @ param thursday * The schedule for Thursday . * @ return Returns a reference to this object so that method calls can be chained together . */ public WeeklyAutoScalingSchedule withThursday ( java . util . Map < String , String > thursday ) { } }
setThursday ( thursday ) ; return this ;
public class Utils4J { /** * Creates a ZIP file and adds all files in a directory and all it ' s sub directories to the archive . * @ param srcDir * Directory to add - Cannot be < code > null < / code > and must be a valid directory . * @ param destPath * Path to use for the ZIP archive - May be < code > null < / code > or an empyt string . * @ param destFile * Target ZIP file - Cannot be < code > null < / code > . * @ throws IOException * Error writing to the output stream . */ public static void zipDir ( final File srcDir , final String destPath , final File destFile ) throws IOException { } }
zipDir ( srcDir , null , destPath , destFile ) ;
public class UnconditionalValueDerefAnalysis { /** * Check method call at given location to see if it unconditionally * dereferences a parameter . Mark any such arguments as derefs . * @ param location * the Location of the method call * @ param vnaFrame * ValueNumberFrame at the Location * @ param fact * the dataflow value to modify * @ throws DataflowAnalysisException */ private void checkUnconditionalDerefDatabase ( Location location , ValueNumberFrame vnaFrame , UnconditionalValueDerefSet fact ) throws DataflowAnalysisException { } }
ConstantPoolGen constantPool = methodGen . getConstantPool ( ) ; for ( ValueNumber vn : checkUnconditionalDerefDatabase ( location , vnaFrame , constantPool , invDataflow . getFactAtLocation ( location ) , typeDataflow ) ) { fact . addDeref ( vn , location ) ; }
public class TupleIndexHashTable { /** * We use this method to aviod to table lookups for the same hashcode ; which is what we would have to do if we did * a get and then a create if the value is null . */ private TupleList getOrCreate ( final Tuple tuple ) { } }
final int hashCode = this . index . hashCodeOf ( tuple , left ) ; final int index = indexOf ( hashCode , this . table . length ) ; TupleList entry = ( TupleList ) this . table [ index ] ; // search to find an existing entry while ( entry != null ) { if ( matches ( entry , tuple , hashCode , ! left ) ) { return entry ; } entry = entry . getNext ( ) ; } // entry does not exist , so create entry = this . index . createEntry ( tuple , hashCode , left ) ; entry . setNext ( ( TupleList ) this . table [ index ] ) ; this . table [ index ] = entry ; if ( this . size ++ >= this . threshold ) { resize ( 2 * this . table . length ) ; } return entry ;
public class JvmOperationImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setStrictFloatingPoint ( boolean newStrictFloatingPoint ) { } }
boolean oldStrictFloatingPoint = strictFloatingPoint ; strictFloatingPoint = newStrictFloatingPoint ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , TypesPackage . JVM_OPERATION__STRICT_FLOATING_POINT , oldStrictFloatingPoint , strictFloatingPoint ) ) ;
public class CFTree { /** * Recursive insertion . * @ param node Current node * @ param nv Object data * @ return New sibling , if the node was split . */ private TreeNode insert ( TreeNode node , NumberVector nv ) { } }
// Find closest child : ClusteringFeature [ ] cfs = node . children ; assert ( cfs [ 0 ] != null ) : "Unexpected empty node!" ; // Find the best child : ClusteringFeature best = cfs [ 0 ] ; double bestd = distance . squaredDistance ( nv , best ) ; for ( int i = 1 ; i < cfs . length ; i ++ ) { ClusteringFeature cf = cfs [ i ] ; if ( cf == null ) { break ; } double d2 = distance . squaredDistance ( nv , cf ) ; if ( d2 < bestd ) { best = cf ; bestd = d2 ; } } // Leaf node : if ( ! ( best instanceof TreeNode ) ) { // Threshold constraint satisfied ? if ( absorption . squaredCriterion ( best , nv ) <= thresholdsq ) { best . addToStatistics ( nv ) ; node . addToStatistics ( nv ) ; return null ; } best = new ClusteringFeature ( nv . getDimensionality ( ) ) ; best . addToStatistics ( nv ) ; ++ leaves ; if ( add ( node . children , best ) ) { node . addToStatistics ( nv ) ; // Update statistics return null ; } return split ( node , best ) ; } assert ( best instanceof TreeNode ) : "Node is neither child nor inner?" ; TreeNode newchild = insert ( ( TreeNode ) best , nv ) ; if ( newchild == null || add ( node . children , newchild ) ) { node . addToStatistics ( nv ) ; // Update statistics return null ; } return split ( node , newchild ) ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getTBM ( ) { } }
if ( tbmEClass == null ) { tbmEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 342 ) ; } return tbmEClass ;
public class D6CrudInsertHelper { /** * Map model object properties to DB ( prepared statement ) * @ param mModelObj * @ param preparedStatement * @ throws D6Exception */ void map ( D6Model mModelObj , PreparedStatement preparedStatement , D6Inex includeExcludeColumnNames ) throws D6Exception { } }
log ( "#map obj=" + mModelObj ) ; final Set < String > columnNameSet = getAllColumnNames ( ) ; // index starts from 1 int parameterIndex = 1 ; if ( includeExcludeColumnNames != null ) { includeExcludeColumnNames . manipulate ( columnNameSet ) ; } for ( String columnName : columnNameSet ) { final D6ModelClassFieldInfo fieldInfo = getFieldInfo ( columnName ) ; final boolean isAutoIncrement = fieldInfo . isAutoIncrement ; final boolean isNullable = fieldInfo . isNullable ; final Field field = fieldInfo . field ; final String fieldName = field . getName ( ) ; final Class < ? > type = field . getType ( ) ; Object fieldValue = null ; try { fieldValue = field . get ( mModelObj ) ; } catch ( IllegalArgumentException e ) { throw new D6Exception ( e ) ; } catch ( IllegalAccessException e ) { throw new D6Exception ( e ) ; } if ( fieldValue == null && isNullable == false && isAutoIncrement == false ) { // - In case found the null - value column and increment flag is // false even though this item is NOT NULL throw new D6Exception ( "D6Error the model object field '" + fieldName + "'(column name is " + columnName + ") should not null." + "Because of constraint of table definition." ) ; } if ( isAutoIncrement == false ) { try { log ( "#map idx=" + parameterIndex + " " + columnName + "=" + fieldValue + " (" + type + ")" ) ; setValue ( parameterIndex , preparedStatement , type , fieldValue ) ; } catch ( Exception e ) { throw new D6Exception ( e ) ; } parameterIndex ++ ; } } // end for ( String columnName : columnNameSet ) {
public class FeatureManagerBuilder { /** * Use the supplied feature enum classes for the feature manager . Same as calling { @ link # featureProvider ( FeatureProvider ) } * with { @ link EnumBasedFeatureProvider } . Please note calling this method also set the name of the feature manager to the * simple name of the first feature enum ' s type . */ public FeatureManagerBuilder featureEnums ( Class < ? extends Feature > ... featureEnum ) { } }
this . featureProvider = new EnumBasedFeatureProvider ( featureEnum ) ; this . name = "FeatureManager[" + featureEnum [ 0 ] . getSimpleName ( ) + "]" ; return this ;
public class ExecutionCompletionService { /** * { @ inheritDoc CompletionService } * This future may safely be used as a NotifyingFuture if desired . This * is because if it tries to set a listener it will be called immediately * since the task has already been completed . */ public NotifyingFuture < V > poll ( long timeout , TimeUnit unit ) throws InterruptedException { } }
return completionQueue . poll ( timeout , unit ) ;
public class DriverNode { /** * initialize driver node and all children of the node */ public void initialize ( ) { } }
thread = new Thread ( collectorProcessor ) ; thread . start ( ) ; for ( DriverNode dn : this . children ) { dn . initialize ( ) ; }
public class DFSck { /** * To get the list , we need to call iteratively until the server says * there is no more left . */ private Integer listCorruptFileBlocks ( String dir , int limit , String baseUrl ) throws IOException { } }
int errCode = - 1 ; int numCorrupt = 0 ; int cookie = 0 ; String lastBlock = null ; final String noCorruptLine = "has no CORRUPT files" ; final String noMoreCorruptLine = "has no more CORRUPT files" ; final String cookiePrefix = "Cookie:" ; boolean allDone = false ; while ( ! allDone ) { final StringBuffer url = new StringBuffer ( baseUrl ) ; if ( cookie > 0 ) { url . append ( "&startblockafterIndex=" ) . append ( String . valueOf ( cookie ) ) ; } else if ( lastBlock != null ) { // for backwards compatibility purpose url . append ( "&startblockafter=" ) . append ( lastBlock ) ; } URL path = new URL ( url . toString ( ) ) ; // SecurityUtil . fetchServiceTicket ( path ) ; URLConnection connection = path . openConnection ( ) ; InputStream stream = connection . getInputStream ( ) ; BufferedReader input = new BufferedReader ( new InputStreamReader ( stream , "UTF-8" ) ) ; try { String line = null ; while ( ( line = input . readLine ( ) ) != null ) { if ( line . startsWith ( cookiePrefix ) ) { try { cookie = Integer . parseInt ( line . split ( "\t" ) [ 1 ] ) ; } catch ( Exception e ) { allDone = true ; break ; } continue ; } if ( ( line . endsWith ( noCorruptLine ) ) || ( line . endsWith ( noMoreCorruptLine ) ) || ( line . endsWith ( NamenodeFsck . HEALTHY_STATUS ) ) || ( line . endsWith ( NamenodeFsck . NONEXISTENT_STATUS ) ) || numCorrupt >= limit ) { allDone = true ; break ; } if ( ( line . isEmpty ( ) ) || ( line . startsWith ( "FSCK started by" ) ) || ( line . startsWith ( "Unable to locate any corrupt files under" ) ) || ( line . startsWith ( "The filesystem under path" ) ) ) continue ; numCorrupt ++ ; if ( numCorrupt == 1 ) { out . println ( "The list of corrupt files under path '" + dir + "' are:" ) ; } out . println ( line ) ; try { // Get the block # that we need to send in next call lastBlock = line . split ( "\t" ) [ 0 ] ; } catch ( Exception e ) { allDone = true ; break ; } } } finally { input . close ( ) ; } } out . println ( "The filesystem under path '" + dir + "' has " + numCorrupt + " CORRUPT files" ) ; if ( numCorrupt == 0 ) errCode = 0 ; return errCode ;
public class Http2ClientInitializer { /** * Configure the pipeline for a cleartext upgrade from HTTP to HTTP / 2. */ private void configureClearTextWithHttpUpgrade ( SocketChannel ch ) { } }
HttpClientCodec sourceCodec = new HttpClientCodec ( ) ; Http2ClientUpgradeCodec upgradeCodec = new Http2ClientUpgradeCodec ( connectionHandler ) ; HttpClientUpgradeHandler upgradeHandler = new HttpClientUpgradeHandler ( sourceCodec , upgradeCodec , 65536 ) ; ch . pipeline ( ) . addLast ( sourceCodec , upgradeHandler , new UpgradeRequestHandler ( ) , new UserEventLogger ( ) ) ;
public class DynamicArray { /** * Returns the index of the last occurrence of the specified element * in this list , or - 1 if this list does not contain the element . * More formally , returns the highest index < tt > i < / tt > such that * < tt > ( o = = null & nbsp ; ? & nbsp ; get ( i ) = = null & nbsp ; : & nbsp ; o . equals ( get ( i ) ) ) < / tt > , * or - 1 if there is no such index . */ public int lastIndexOf ( Object o ) { } }
if ( o == null ) { for ( int i = size - 1 ; i >= 0 ; i -- ) if ( data [ i ] == null ) return i ; } else { for ( int i = size - 1 ; i >= 0 ; i -- ) if ( o . equals ( data [ i ] ) ) return i ; } return - 1 ;
public class DCDs { /** * returns the result of evaluation equation 24 of an individual index * @ param beta _ i the weight coefficent value * @ param gN the g ' < sub > n < / sub > ( beta _ i ) value * @ param gP the g ' < sub > p < / sub > ( beta _ i ) value * @ param U the upper bound value obtained from { @ link # getU ( double ) } * @ return the result of equation 24 */ protected static double eq24 ( final double beta_i , final double gN , final double gP , final double U ) { } }
// 6.2.2 double vi = 0 ; // Used as " other " value if ( beta_i == 0 ) // if beta _ i = 0 . . . { // if beta _ i = 0 and g ' n ( beta _ i ) > = 0 if ( gN >= 0 ) vi = gN ; else if ( gP <= 0 ) // if beta _ i = 0 and g ' p ( beta _ i ) < = 0 vi = - gP ; } else // beta _ i is non zero { // Two cases // if beta _ i in ( − U , 0 ) , or // beta _ i = − U and g ' n ( beta _ i ) < = 0 // then v _ i = | g ' n | // if beta _ i in ( 0 , U ) , or // beta _ i = U and g ' p ( βi ) > = 0 // then v _ i = | g ' p | if ( beta_i < 0 ) // first set of cases { if ( beta_i > - U || ( beta_i == - U && gN <= 0 ) ) vi = Math . abs ( gN ) ; } else // second case { if ( beta_i < U || ( beta_i == U && gP >= 0 ) ) vi = Math . abs ( gP ) ; } } return vi ;
public class UtilUnsafe { /** * Fetch the Unsafe . Use With Caution . */ public static Unsafe getUnsafe ( ) { } }
// Not on bootclasspath if ( UtilUnsafe . class . getClassLoader ( ) == null ) return Unsafe . getUnsafe ( ) ; try { final Field fld = Unsafe . class . getDeclaredField ( "theUnsafe" ) ; fld . setAccessible ( true ) ; return ( Unsafe ) fld . get ( UtilUnsafe . class ) ; } catch ( Exception e ) { throw new RuntimeException ( "Could not obtain access to sun.misc.Unsafe" , e ) ; }
public class S3OutputLocationMarshaller { /** * Marshall the given parameter object . */ public void marshall ( S3OutputLocation s3OutputLocation , ProtocolMarshaller protocolMarshaller ) { } }
if ( s3OutputLocation == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( s3OutputLocation . getOutputS3Region ( ) , OUTPUTS3REGION_BINDING ) ; protocolMarshaller . marshall ( s3OutputLocation . getOutputS3BucketName ( ) , OUTPUTS3BUCKETNAME_BINDING ) ; protocolMarshaller . marshall ( s3OutputLocation . getOutputS3KeyPrefix ( ) , OUTPUTS3KEYPREFIX_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AnnivMaster { /** * Set up the screen input fields . */ public void setupFields ( ) { } }
FieldInfo field = null ; field = new FieldInfo ( this , ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field . setDataClass ( Integer . class ) ; field . setHidden ( true ) ; field = new FieldInfo ( this , LAST_CHANGED , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field . setDataClass ( Date . class ) ; field . setHidden ( true ) ; field = new FieldInfo ( this , DELETED , 10 , null , new Boolean ( false ) ) ; field . setDataClass ( Boolean . class ) ; field . setHidden ( true ) ; field = new FieldInfo ( this , START_DATE_TIME , 25 , null , null ) ; field . setDataClass ( Date . class ) ; field = new FieldInfo ( this , END_DATE_TIME , 25 , null , null ) ; field . setDataClass ( Date . class ) ; field = new FieldInfo ( this , DESCRIPTION , 60 , null , null ) ; field = new FieldInfo ( this , REPEAT_INTERVAL_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field . setDataClass ( Integer . class ) ; field = new FieldInfo ( this , REPEAT_COUNT , 5 , null , null ) ; field . setDataClass ( Short . class ) ; field = new FieldInfo ( this , CALENDAR_CATEGORY_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field . setDataClass ( Integer . class ) ; field = new FieldInfo ( this , HIDDEN , 10 , null , new Boolean ( false ) ) ; field . setDataClass ( Boolean . class ) ; field = new FieldInfo ( this , PROPERTIES , Constants . DEFAULT_FIELD_LENGTH , null , null ) ;
public class ForgeVM { /** * Put the VM in the ready state iff * it does not already belong to the mapping . * @ param m the model to modify * @ return { @ code true } iff successful */ @ Override public boolean applyAction ( Model m ) { } }
Mapping map = m . getMapping ( ) ; if ( ! map . contains ( id ) ) { map . addReadyVM ( id ) ; return true ; } return false ;
public class FileSupport { /** * Locates a file in the classpath . * @ param fileName * @ param classPath * @ return the found file or null if the file can not be located */ public static File getFileFromDirectoryInClassPath ( String fileName , String classPath ) { } }
Collection < String > paths = StringSupport . split ( classPath , ";:" , false ) ; for ( String singlePath : paths ) { File dir = new File ( singlePath ) ; if ( dir . isDirectory ( ) ) { File file = new File ( singlePath + '/' + fileName ) ; if ( file . exists ( ) ) { return file ; } } } return null ;
public class Deadline { /** * Deadline . after ( - 100 * 365 , DAYS ) */ public Deadline offset ( long offset , TimeUnit units ) { } }
// May already be expired if ( offset == 0 ) { return this ; } return new Deadline ( ticker , deadlineNanos , units . toNanos ( offset ) , isExpired ( ) ) ;
public class PluginRepositoryUtil { /** * Loads a full repository definition from an XML file . * @ param repo * The repository that must be loaded * @ param cl * The classloader to be used to instantiate the plugin classes * @ param in * The stream to the XML file * @ throws PluginConfigurationException */ public static void loadFromXmlPluginPackageDefinitions ( final IPluginRepository repo , final ClassLoader cl , final InputStream in ) throws PluginConfigurationException { } }
for ( PluginDefinition pd : loadFromXmlPluginPackageDefinitions ( cl , in ) ) { repo . addPluginDefinition ( pd ) ; }
public class ZoneId { /** * Parses the ID , taking a flag to indicate whether { @ code ZoneRulesException } * should be thrown or not , used in deserialization . * @ param zoneId the time - zone ID , not null * @ param checkAvailable whether to check if the zone ID is available * @ return the zone ID , not null * @ throws DateTimeException if the ID format is invalid * @ throws ZoneRulesException if checking availability and the ID cannot be found */ static ZoneId of ( String zoneId , boolean checkAvailable ) { } }
Objects . requireNonNull ( zoneId , "zoneId" ) ; if ( zoneId . length ( ) <= 1 || zoneId . startsWith ( "+" ) || zoneId . startsWith ( "-" ) ) { return ZoneOffset . of ( zoneId ) ; } else if ( zoneId . startsWith ( "UTC" ) || zoneId . startsWith ( "GMT" ) ) { return ofWithPrefix ( zoneId , 3 , checkAvailable ) ; } else if ( zoneId . startsWith ( "UT" ) ) { return ofWithPrefix ( zoneId , 2 , checkAvailable ) ; } return ZoneRegion . ofId ( zoneId , checkAvailable ) ;
public class RecurringData { /** * Calculate start dates for a monthly absolute recurrence . * @ param calendar current date * @ param frequency frequency * @ param dates array of start dates */ private void getMonthlyAbsoluteDates ( Calendar calendar , int frequency , List < Date > dates ) { } }
int currentDayNumber = calendar . get ( Calendar . DAY_OF_MONTH ) ; calendar . set ( Calendar . DAY_OF_MONTH , 1 ) ; int requiredDayNumber = NumberHelper . getInt ( m_dayNumber ) ; if ( requiredDayNumber < currentDayNumber ) { calendar . add ( Calendar . MONTH , 1 ) ; } while ( moreDates ( calendar , dates ) ) { int useDayNumber = requiredDayNumber ; int maxDayNumber = calendar . getActualMaximum ( Calendar . DAY_OF_MONTH ) ; if ( useDayNumber > maxDayNumber ) { useDayNumber = maxDayNumber ; } calendar . set ( Calendar . DAY_OF_MONTH , useDayNumber ) ; dates . add ( calendar . getTime ( ) ) ; calendar . set ( Calendar . DAY_OF_MONTH , 1 ) ; calendar . add ( Calendar . MONTH , frequency ) ; }
public class SeekBarPreference { /** * Obtains the summaries , which are shown depending on the currently persisted value , from a * specific typed array . * @ param typedArray * The typed array , the summaries should be obtained from , as an instance of the class * { @ link TypedArray } . The typed array may not be null */ private void obtainSummaries ( @ NonNull final TypedArray typedArray ) { } }
try { setSummaries ( typedArray . getTextArray ( R . styleable . SeekBarPreference_android_summary ) ) ; } catch ( Resources . NotFoundException e ) { setSummaries ( null ) ; }
public class UserDaoImpl { /** * Rotate the primary access key - > secondary access key , dropping the old secondary access key and generating a new primary access key * @ param id */ @ Transactional public void rotateUserAccessKey ( final int id ) { } }
final UserEntity account = getById ( id ) ; if ( account != null ) { // Set the secondary token to the old primary token account . setAccessKeySecondary ( account . getAccessKey ( ) ) ; // Now regenerate the primary token account . setAccessKey ( SimpleId . alphanumeric ( UserManagerBearerToken . PREFIX , 100 ) ) ; update ( account ) ; } else { throw new IllegalArgumentException ( "No such user: " + id ) ; }
public class BasicFlowletContext { /** * Create a new { @ link TransactionContext } for this flowlet . Add all { @ link TransactionAware } s to the context . * @ return a new TransactionContext . */ public TransactionContext createTransactionContext ( ) { } }
transactionContext = dataFabricFacade . createTransactionManager ( ) ; for ( TransactionAware transactionAware : transactionAwares ) { this . transactionContext . addTransactionAware ( transactionAware ) ; } return transactionContext ;
public class PairSet { /** * Gets the < code > i < / code > < sup > th < / sup > element of the set * @ param index * position of the element in the sorted set * @ return the < code > i < / code > < sup > th < / sup > element of the set * @ throws IndexOutOfBoundsException * if < code > i < / code > is less than zero , or greater or equal to * { @ link # size ( ) } */ @ Override public Pair < T , I > get ( int index ) { } }
return indexToPair ( matrix . get ( index ) ) ;
public class AppServicePlansInner { /** * Get a Virtual Network gateway . * Get a Virtual Network gateway . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param name Name of the App Service plan . * @ param vnetName Name of the Virtual Network . * @ param gatewayName Name of the gateway . Only the ' primary ' gateway is supported . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the VnetGatewayInner object */ public Observable < ServiceResponse < VnetGatewayInner > > getVnetGatewayWithServiceResponseAsync ( String resourceGroupName , String name , String vnetName , String gatewayName ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( name == null ) { throw new IllegalArgumentException ( "Parameter name is required and cannot be null." ) ; } if ( vnetName == null ) { throw new IllegalArgumentException ( "Parameter vnetName is required and cannot be null." ) ; } if ( gatewayName == null ) { throw new IllegalArgumentException ( "Parameter gatewayName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } return service . getVnetGateway ( resourceGroupName , name , vnetName , gatewayName , this . client . subscriptionId ( ) , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < VnetGatewayInner > > > ( ) { @ Override public Observable < ServiceResponse < VnetGatewayInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < VnetGatewayInner > clientResponse = getVnetGatewayDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class DocumentRootImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case BpsimPackage . DOCUMENT_ROOT__MIXED : getMixed ( ) . clear ( ) ; return ; case BpsimPackage . DOCUMENT_ROOT__XMLNS_PREFIX_MAP : getXMLNSPrefixMap ( ) . clear ( ) ; return ; case BpsimPackage . DOCUMENT_ROOT__XSI_SCHEMA_LOCATION : getXSISchemaLocation ( ) . clear ( ) ; return ; case BpsimPackage . DOCUMENT_ROOT__BETA_DISTRIBUTION : setBetaDistribution ( ( BetaDistributionType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__PARAMETER_VALUE : setParameterValue ( ( ParameterValue ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__BINOMIAL_DISTRIBUTION : setBinomialDistribution ( ( BinomialDistributionType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__BOOLEAN_PARAMETER : setBooleanParameter ( ( BooleanParameterType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__BP_SIM_DATA : setBPSimData ( ( BPSimDataType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__DATE_TIME_PARAMETER : setDateTimeParameter ( ( DateTimeParameterType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__DURATION_PARAMETER : setDurationParameter ( ( DurationParameterType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__ENUM_PARAMETER : setEnumParameter ( ( EnumParameterType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__ERLANG_DISTRIBUTION : setErlangDistribution ( ( ErlangDistributionType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__EXPRESSION_PARAMETER : setExpressionParameter ( ( ExpressionParameterType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__FLOATING_PARAMETER : setFloatingParameter ( ( FloatingParameterType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__GAMMA_DISTRIBUTION : setGammaDistribution ( ( GammaDistributionType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__LOG_NORMAL_DISTRIBUTION : setLogNormalDistribution ( ( LogNormalDistributionType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__NEGATIVE_EXPONENTIAL_DISTRIBUTION : setNegativeExponentialDistribution ( ( NegativeExponentialDistributionType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__NORMAL_DISTRIBUTION : setNormalDistribution ( ( NormalDistributionType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__NUMERIC_PARAMETER : setNumericParameter ( ( NumericParameterType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__POISSON_DISTRIBUTION : setPoissonDistribution ( ( PoissonDistributionType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__STRING_PARAMETER : setStringParameter ( ( StringParameterType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__TRIANGULAR_DISTRIBUTION : setTriangularDistribution ( ( TriangularDistributionType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__TRUNCATED_NORMAL_DISTRIBUTION : setTruncatedNormalDistribution ( ( TruncatedNormalDistributionType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__UNIFORM_DISTRIBUTION : setUniformDistribution ( ( UniformDistributionType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__USER_DISTRIBUTION : setUserDistribution ( ( UserDistributionType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__USER_DISTRIBUTION_DATA_POINT : setUserDistributionDataPoint ( ( UserDistributionDataPointType ) null ) ; return ; case BpsimPackage . DOCUMENT_ROOT__WEIBULL_DISTRIBUTION : setWeibullDistribution ( ( WeibullDistributionType ) null ) ; return ; } super . eUnset ( featureID ) ;
public class ByteBufFlux { /** * Open a { @ link java . nio . channels . FileChannel } from a path and stream * { @ link ByteBuf } chunks with a given maximum size into the returned * { @ link ByteBufFlux } , using the provided { @ link ByteBufAllocator } . * @ param path the path to the resource to stream * @ param maxChunkSize the maximum per - item ByteBuf size * @ param allocator the channel { @ link ByteBufAllocator } * @ return a { @ link ByteBufFlux } */ public static ByteBufFlux fromPath ( Path path , int maxChunkSize , ByteBufAllocator allocator ) { } }
Objects . requireNonNull ( path , "path" ) ; Objects . requireNonNull ( allocator , "allocator" ) ; if ( maxChunkSize < 1 ) { throw new IllegalArgumentException ( "chunk size must be strictly positive, " + "was: " + maxChunkSize ) ; } return new ByteBufFlux ( Flux . generate ( ( ) -> FileChannel . open ( path ) , ( fc , sink ) -> { ByteBuf buf = allocator . buffer ( ) ; try { if ( buf . writeBytes ( fc , maxChunkSize ) < 0 ) { buf . release ( ) ; sink . complete ( ) ; } else { sink . next ( buf ) ; } } catch ( IOException e ) { buf . release ( ) ; sink . error ( e ) ; } return fc ; } ) , allocator ) ;
public class HttpServer { private void readObject ( java . io . ObjectInputStream in ) throws IOException , ClassNotFoundException { } }
in . defaultReadObject ( ) ; HttpListener [ ] listeners = getListeners ( ) ; HttpContext [ ] contexts = getContexts ( ) ; _listeners . clear ( ) ; _virtualHostMap . clear ( ) ; setContexts ( contexts ) ; setListeners ( listeners ) ; _statsLock = new Object [ 0 ] ;
public class Task { /** * Inserts a child task prior to a given sibling task . * @ param child new child task * @ param previousSibling sibling task */ public void addChildTaskBefore ( Task child , Task previousSibling ) { } }
int index = m_children . indexOf ( previousSibling ) ; if ( index == - 1 ) { m_children . add ( child ) ; } else { m_children . add ( index , child ) ; } child . m_parent = this ; setSummary ( true ) ; if ( getParentFile ( ) . getProjectConfig ( ) . getAutoOutlineLevel ( ) == true ) { child . setOutlineLevel ( Integer . valueOf ( NumberHelper . getInt ( getOutlineLevel ( ) ) + 1 ) ) ; }
public class Performance { /** * Writes log information . * @ param startTimeMs the start time in milliseconds * @ param times the number of the iteration * @ param msg the message * @ param workerId the id of the worker */ public static void logPerIteration ( long startTimeMs , int times , String msg , int workerId ) { } }
long takenTimeMs = System . currentTimeMillis ( ) - startTimeMs ; double result = 1000.0 * sFileBytes / takenTimeMs / 1024 / 1024 ; LOG . info ( times + msg + workerId + " : " + result + " Mb/sec. Took " + takenTimeMs + " ms. " ) ;
public class PmiDataInfo { /** * Creates a copy of this object * @ return a copy of this object */ public PmiDataInfo copy ( ) { } }
PmiDataInfo r = new PmiDataInfo ( id ) ; // name is translatable if ( name != null ) r . name = new String ( name ) ; // description is translatable if ( description != null ) r . description = new String ( description ) ; // unit is translatable if ( unit != null ) r . unit = new String ( unit ) ; r . category = category ; r . type = type ; r . level = level ; r . resettable = resettable ; r . aggregatable = aggregatable ; r . zosAggregatable = zosAggregatable ; r . onRequest = onRequest ; r . statSet = statSet ; r . platform = platform ; r . submoduleName = submoduleName ; r . participation = participation ; r . comment = comment ; r . dependencyList = dependencyList ; return r ;
public class DSLSAMLAuthenticationProvider { /** * Logger for SAML events , cannot be null , must be set . * @ param samlLogger logger */ @ Override @ Autowired ( required = false ) public void setSamlLogger ( SAMLLogger samlLogger ) { } }
Assert . notNull ( samlLogger , "SAMLLogger can't be null" ) ; this . samlLogger = samlLogger ;
public class ConfigPropertyBean { /** * { @ inheritDoc } */ @ SuppressWarnings ( "unchecked" ) @ Override public T create ( CreationalContext < T > creationalContext ) { } }
InjectionPoint injectionPoint = getInjectionPoint ( beanManager , creationalContext ) ; // Note the config is cached per thread context class loader // This shouldn ' t matter though as the config object is updated with values dynamically // Also means that injecting config does things the same way as calling ` getConfig ( ) . getValue ( ) ` Config config = ConfigProvider . getConfig ( ) ; T instance = null ; Type ipType = injectionPoint . getType ( ) ; boolean optional = false ; if ( ipType instanceof ParameterizedType ) { ParameterizedType pType = ( ParameterizedType ) ipType ; Type rType = pType . getRawType ( ) ; optional = ( rType == Optional . class ) ; } instance = ( T ) ConfigProducer . newValue ( config , injectionPoint , ipType , optional ) ; return instance ;
public class MPPUtility { /** * This method reads an eight byte double from the input array . * @ param data the input array * @ param offset offset of double data in the array * @ return double value */ public static final double getDouble ( byte [ ] data , int offset ) { } }
double result = Double . longBitsToDouble ( getLong ( data , offset ) ) ; if ( Double . isNaN ( result ) ) { result = 0 ; } return result ;
public class AbstractIntSet { /** * Add all of the values in the supplied array to the set . * @ param values elements to be added to this set . * @ return < tt > true < / tt > if this set did not already contain all of the specified elements . */ public boolean add ( int [ ] values ) { } }
boolean modified = false ; int vlength = values . length ; for ( int i = 0 ; i < vlength ; i ++ ) { modified = ( add ( values [ i ] ) || modified ) ; } return modified ;
public class DefaultCacheManager { /** * { @ inheritDoc } */ @ Override public List < Address > getMembers ( ) { } }
Transport t = getTransport ( ) ; return t == null ? null : t . getMembers ( ) ;
public class ReflectionUtils { /** * < p > setSystemOutputs . < / p > * @ param classLoader a { @ link java . lang . ClassLoader } object . * @ param out a { @ link java . io . PrintStream } object . * @ param err a { @ link java . io . PrintStream } object . * @ throws java . lang . Exception if any . */ public static void setSystemOutputs ( ClassLoader classLoader , PrintStream out , PrintStream err ) throws Exception { } }
Class < ? > systemClass = classLoader . loadClass ( "java.lang.System" ) ; Method setSystemOutMethod = systemClass . getMethod ( "setOut" , PrintStream . class ) ; setSystemOutMethod . invoke ( null , out ) ; Method setSystemErrMethod = systemClass . getMethod ( "setErr" , PrintStream . class ) ; setSystemErrMethod . invoke ( null , err ) ;
public class DividableGridAdapter { /** * Sets the width of the bottom sheet , the items , which are displayed by the adapter , belong * to . * @ param width * The width , which should be set , as an { @ link Integer } value */ public final void setWidth ( final int width ) { } }
if ( style == Style . LIST_COLUMNS && ( getDeviceType ( context ) == DeviceType . TABLET || getOrientation ( context ) == Orientation . LANDSCAPE ) ) { columnCount = 2 ; } else if ( style == Style . GRID ) { int padding = context . getResources ( ) . getDimensionPixelSize ( R . dimen . bottom_sheet_grid_item_horizontal_padding ) ; int itemSize = context . getResources ( ) . getDimensionPixelSize ( R . dimen . bottom_sheet_grid_item_size ) ; columnCount = ( ( getDeviceType ( context ) != DeviceType . TABLET && context . getResources ( ) . getConfiguration ( ) . orientation == Configuration . ORIENTATION_PORTRAIT ? context . getResources ( ) . getDisplayMetrics ( ) . widthPixels : width ) - 2 * padding ) / itemSize ; } else { columnCount = 1 ; } rawItems = null ; notifyDataSetChanged ( ) ;
public class Tesseract1 { /** * Performs OCR operation . Use < code > SetImage < / code > , ( optionally ) * < code > SetRectangle < / code > , and one or more of the < code > Get * Text < / code > * functions . * @ param xsize width of image * @ param ysize height of image * @ param buf pixel data * @ param rect the bounding rectangle defines the region of the image to be * recognized . A rectangle of zero dimension or < code > null < / code > indicates * the whole image . * @ param bpp bits per pixel , represents the bit depth of the image , with 1 * for binary bitmap , 8 for gray , and 24 for color RGB . * @ return the recognized text * @ throws TesseractException */ @ Override public String doOCR ( int xsize , int ysize , ByteBuffer buf , Rectangle rect , int bpp ) throws TesseractException { } }
return doOCR ( xsize , ysize , buf , null , rect , bpp ) ;
public class XMLHelper { /** * Helper program : Extracts the specified XPATH expression * from an XML - String . * @ param node the node * @ param xPath the x path * @ return NodeList * @ throws XPathExpressionException the x path expression exception */ public static NodeList getElementsB ( Node node , XPathExpression xPath ) throws XPathExpressionException { } }
return ( NodeList ) xPath . evaluate ( node , XPathConstants . NODESET ) ;
public class XMLParser { /** * / * ( non - Javadoc ) * @ see com . abubusoft . kripton . xml . XmlPullParser # setProperty ( java . lang . String , java . lang . Object ) */ @ Override public void setProperty ( String property , Object value ) { } }
if ( property . equals ( PROPERTY_LOCATION ) ) { location = String . valueOf ( value ) ; } else { throw new KriptonRuntimeException ( "unsupported property: " + property ) ; }
public class Wootric { /** * It configures the SDK with required parameters . * @ param activity Activity where the survey will be presented . * @ param clientId Found in API section of the Wootric ' s admin panel . * @ param accountToken Found in Install section of the Wootric ' s admin panel . */ public static Wootric init ( Activity activity , String clientId , String accountToken ) { } }
Wootric local = singleton ; if ( local == null ) { synchronized ( Wootric . class ) { local = singleton ; if ( local == null ) { checkNotNull ( activity , "Activity" ) ; checkNotNull ( clientId , "Client Id" ) ; checkNotNull ( accountToken , "Account Token" ) ; singleton = local = new Wootric ( activity , clientId , accountToken ) ; } } } return local ;
public class AbstractWSelectList { /** * Retrieves the code for the given option . Will return null if there is no matching option . * @ param option the option * @ param index the index of the option in the list . * @ return the code for the given option , or null if there is no matching option . */ protected String optionToCode ( final Object option , final int index ) { } }
if ( index < 0 ) { List < ? > options = getOptions ( ) ; if ( options == null || options . isEmpty ( ) ) { Integrity . issue ( this , "No options available, so cannot convert the option \"" + option + "\" to a code." ) ; } else { StringBuffer message = new StringBuffer ( ) ; message . append ( "The option \"" ) . append ( option ) . append ( "\" is not one of the available options." ) ; Object firstOption = SelectListUtil . getFirstOption ( options ) ; if ( firstOption != null && option != null && firstOption . getClass ( ) != option . getClass ( ) ) { message . append ( " The options in this list component are of type \"" ) ; message . append ( firstOption . getClass ( ) . getName ( ) ) . append ( "\", the selection you supplied is of type \"" ) ; message . append ( option . getClass ( ) . getName ( ) ) . append ( "\"." ) ; } Integrity . issue ( this , message . toString ( ) ) ; } return null ; } else if ( option instanceof Option ) { Option opt = ( Option ) option ; return opt . getCode ( ) == null ? "" : opt . getCode ( ) ; } else { String code = APPLICATION_LOOKUP_TABLE . getCode ( getLookupTable ( ) , option ) ; if ( code == null ) { return String . valueOf ( index + 1 ) ; } else { return code ; } }
public class COF { /** * Computes the average chaining distance , the average length of a path * through the given set of points to each target . The authors of COF decided * to approximate this value using a weighted mean that assumes every object * is reached from the previous point ( but actually every point could be best * reachable from the first , in which case this does not make much sense . ) * TODO : can we accelerate this by using the kNN of the neighbors ? * @ param knnq KNN query * @ param dq Distance query * @ param ids IDs to process * @ param acds Storage for average chaining distances */ protected void computeAverageChainingDistances ( KNNQuery < O > knnq , DistanceQuery < O > dq , DBIDs ids , WritableDoubleDataStore acds ) { } }
FiniteProgress lrdsProgress = LOG . isVerbose ( ) ? new FiniteProgress ( "Computing average chaining distances" , ids . size ( ) , LOG ) : null ; // Compute the chaining distances . // We do < i > not < / i > bother to materialize the chaining order . for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { final KNNList neighbors = knnq . getKNNForDBID ( iter , k ) ; final int r = neighbors . size ( ) ; DoubleDBIDListIter it1 = neighbors . iter ( ) , it2 = neighbors . iter ( ) ; // Store the current lowest reachability . final double [ ] mindists = new double [ r ] ; for ( int i = 0 ; it1 . valid ( ) ; it1 . advance ( ) , ++ i ) { mindists [ i ] = DBIDUtil . equal ( it1 , iter ) ? Double . NaN : it1 . doubleValue ( ) ; } double acsum = 0. ; for ( int j = ( ( r < k ) ? r : k ) - 1 ; j > 0 ; -- j ) { // Find the minimum : int minpos = - 1 ; double mindist = Double . NaN ; for ( int i = 0 ; i < mindists . length ; ++ i ) { double curdist = mindists [ i ] ; // Both values could be NaN , deliberately . if ( curdist == curdist && ! ( curdist > mindist ) ) { minpos = i ; mindist = curdist ; } } acsum += mindist * j ; // Weighted sum , decreasing weights mindists [ minpos ] = Double . NaN ; it1 . seek ( minpos ) ; // Update distances it2 . seek ( 0 ) ; for ( int i = 0 ; it2 . valid ( ) ; it2 . advance ( ) , ++ i ) { final double curdist = mindists [ i ] ; if ( curdist != curdist ) { continue ; // NaN = processed ! } double newdist = dq . distance ( it1 , it2 ) ; if ( newdist < curdist ) { mindists [ i ] = newdist ; } } } acds . putDouble ( iter , acsum / ( r * 0.5 * ( r - 1. ) ) ) ; LOG . incrementProcessed ( lrdsProgress ) ; } LOG . ensureCompleted ( lrdsProgress ) ;
public class YggdrasilAuthenticator { /** * Tries refreshing the current session . * This method will try refreshing the token . If YggdrasilAuthenticator * failed to refresh , it will call { @ link # tryPasswordLogin ( ) } to ask the * password for authentication . If no password is available , an * { @ link AuthenticationException } will be thrown . * 尝试刷新当前的 session 。 这个方法首先尝试使用 token 来刷新 session 。 如果失败了 , 则调用 * { @ link # tryPasswordLogin ( ) } 来要求提供密码 , 使用密码进行登录 。 如果又失败了 , 则抛出一个 * { @ link AuthenticationException } 。 * @ throws AuthenticationException if < code > YggdrasilAuthenticator < / code > * couldn ' t refresh the current session */ public synchronized void refresh ( ) throws AuthenticationException { } }
if ( authResult == null ) { // refresh operation is not available PasswordProvider passwordProvider = tryPasswordLogin ( ) ; if ( passwordProvider == null ) { throw new AuthenticationException ( "no more authentication methods to try" ) ; } else { refreshWithPassword ( passwordProvider ) ; } } else { try { refreshWithToken ( authResult . getClientToken ( ) , authResult . getAccessToken ( ) ) ; } catch ( AuthenticationException e ) { // token login failed PasswordProvider passwordProvider = tryPasswordLogin ( ) ; if ( passwordProvider == null ) { throw e ; } try { refreshWithPassword ( passwordProvider ) ; } catch ( AuthenticationException e1 ) { e1 . addSuppressed ( e ) ; throw e1 ; } } }
public class CastOther { /** * final public static Method TO _ EXCEL = new Method ( " toExcel " , Types . EXCEL , new * Type [ ] { Types . OBJECT } ) ; */ @ Override public Type _writeOut ( BytecodeContext bc , int mode ) throws TransformerException { } }
// Caster . toDecimal ( null ) ; GeneratorAdapter adapter = bc . getAdapter ( ) ; char first = lcType . charAt ( 0 ) ; Type rtn ; switch ( first ) { case 'a' : if ( "array" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . ARRAY ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_ARRAY ) ; return Types . ARRAY ; } break ; case 'b' : if ( "base64" . equals ( lcType ) ) { expr . writeOut ( bc , MODE_REF ) ; adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_BASE64 ) ; return Types . STRING ; } if ( "binary" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . BYTE_VALUE_ARRAY ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_BINARY ) ; return Types . BYTE_VALUE_ARRAY ; } if ( "byte" . equals ( type ) ) { rtn = expr . writeOutAsType ( bc , MODE_VALUE ) ; if ( ! rtn . equals ( Types . BYTE_VALUE ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_BYTE_VALUE [ Types . getType ( rtn ) ] ) ; return Types . BYTE_VALUE ; } if ( "byte" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . BYTE ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_BYTE [ Types . getType ( rtn ) ] ) ; return Types . BYTE ; } if ( "boolean" . equals ( lcType ) ) { return ( ( ExpressionBase ) bc . getFactory ( ) . toExprBoolean ( expr ) ) . writeOutAsType ( bc , MODE_REF ) ; } break ; case 'c' : if ( "char" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_VALUE ) ; if ( ! rtn . equals ( Types . CHAR ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_CHAR_VALUE [ Types . getType ( rtn ) ] ) ; return Types . CHAR ; } if ( "character" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . CHARACTER ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_CHARACTER [ Types . getType ( rtn ) ] ) ; return Types . CHARACTER ; } if ( "collection" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . COLLECTION ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_COLLECTION ) ; return Types . COLLECTION ; } if ( "component" . equals ( lcType ) || "class" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . COMPONENT ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_COMPONENT ) ; return Types . COMPONENT ; } break ; case 'd' : if ( "double" . equals ( lcType ) ) { return ( ( ExpressionBase ) bc . getFactory ( ) . toExprDouble ( expr ) ) . writeOutAsType ( bc , MODE_REF ) ; } if ( "date" . equals ( lcType ) || "datetime" . equals ( lcType ) ) { // First Arg rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( rtn . equals ( Types . DATE_TIME ) ) return Types . DATE_TIME ; int type = Types . getType ( rtn ) ; // Second Arg adapter . loadArg ( 0 ) ; // adapter . invokeVirtual ( Types . PAGE _ CONTEXT , GET _ CONFIG ) ; // adapter . invokeInterface ( Types . CONFIG _ WEB , GET _ TIMEZONE ) ; adapter . invokeVirtual ( Types . PAGE_CONTEXT , GET_TIMEZONE ) ; adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_DATE [ type ] ) ; return Types . DATE_TIME ; } if ( "decimal" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_DECIMAL [ Types . getType ( rtn ) ] ) ; return Types . STRING ; } break ; case 'e' : /* * if ( " excel " . equals ( type ) ) { expr . writeOut ( bc , MODE _ REF ) ; * adapter . invokeStatic ( Types . EXCEL _ UTIL , TO _ EXCEL ) ; return Types . EXCEL ; } */ break ; case 'f' : if ( "file" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . FILE ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_FILE ) ; return Types . FILE ; } if ( "float" . equals ( type ) ) { rtn = expr . writeOutAsType ( bc , MODE_VALUE ) ; if ( ! rtn . equals ( Types . FLOAT_VALUE ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_FLOAT_VALUE [ Types . getType ( rtn ) ] ) ; return Types . FLOAT_VALUE ; } if ( "float" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . FLOAT ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_FLOAT [ Types . getType ( rtn ) ] ) ; return Types . FLOAT ; } break ; case 'i' : if ( "int" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_VALUE ) ; if ( ! rtn . equals ( Types . INT_VALUE ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_INT_VALUE [ Types . getType ( rtn ) ] ) ; return Types . INT_VALUE ; } if ( "integer" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . INTEGER ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_INTEGER [ Types . getType ( rtn ) ] ) ; return Types . INTEGER ; } /* * ext . img if ( " image " . equals ( lcType ) ) { rtn = expr . writeOut ( bc , MODE _ REF ) ; Type it = * ImageUtil . getImageType ( ) ; if ( ! rtn . equals ( it ) ) { adapter . loadArg ( 0 ) ; * adapter . invokeStatic ( it , Methods _ Caster . TO _ IMAGE ) ; } return it ; } */ break ; case 'j' : if ( "java.lang.boolean" . equals ( lcType ) ) { return ( ( ExpressionBase ) bc . getFactory ( ) . toExprBoolean ( expr ) ) . writeOutAsType ( bc , MODE_REF ) ; } if ( "java.lang.double" . equals ( lcType ) ) { return ( ( ExpressionBase ) bc . getFactory ( ) . toExprDouble ( expr ) ) . writeOutAsType ( bc , MODE_REF ) ; } if ( "java.lang.string" . equals ( lcType ) ) { return ( ( ExpressionBase ) bc . getFactory ( ) . toExprString ( expr ) ) . writeOutAsType ( bc , MODE_REF ) ; } if ( "java.lang.stringbuffer" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . STRING_BUFFER ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_STRING_BUFFER ) ; return Types . STRING_BUFFER ; } if ( "java.lang.byte" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . BYTE ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_BYTE [ Types . getType ( rtn ) ] ) ; return Types . BYTE ; } if ( "java.lang.character" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . CHARACTER ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_CHARACTER [ Types . getType ( rtn ) ] ) ; return Types . CHARACTER ; } if ( "java.lang.short" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . SHORT ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_SHORT [ Types . getType ( rtn ) ] ) ; return Types . SHORT ; } if ( "java.lang.integer" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . INTEGER ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_INTEGER [ Types . getType ( rtn ) ] ) ; return Types . INTEGER ; } if ( "java.lang.long" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . LONG ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_LONG [ Types . getType ( rtn ) ] ) ; return Types . LONG ; } if ( "java.lang.float" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . FLOAT ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_FLOAT [ Types . getType ( rtn ) ] ) ; return Types . FLOAT ; } if ( "java.io.file" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . FILE ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_FILE ) ; return Types . FILE ; } if ( "java.lang.object" . equals ( lcType ) ) { return expr . writeOutAsType ( bc , MODE_REF ) ; } else if ( "java.util.date" . equals ( lcType ) ) { // First Arg rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( rtn . equals ( Types . DATE ) ) return Types . DATE ; if ( rtn . equals ( Types . DATE_TIME ) ) return Types . DATE_TIME ; // Second Arg adapter . loadArg ( 0 ) ; // adapter . invokeVirtual ( Types . PAGE _ CONTEXT , GET _ CONFIG ) ; // adapter . invokeVirtual ( Types . CONFIG _ WEB , GET _ TIMEZONE ) ; adapter . invokeVirtual ( Types . PAGE_CONTEXT , GET_TIMEZONE ) ; adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_DATE [ Types . getType ( rtn ) ] ) ; return Types . DATE_TIME ; } break ; case 'l' : if ( "long" . equals ( type ) ) { rtn = expr . writeOutAsType ( bc , MODE_VALUE ) ; if ( ! rtn . equals ( Types . LONG_VALUE ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_LONG_VALUE [ Types . getType ( rtn ) ] ) ; return Types . LONG_VALUE ; } else if ( "long" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . LONG ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_LONG [ Types . getType ( rtn ) ] ) ; return Types . LONG ; } else if ( "locale" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_LOCALE ) ; return Types . LOCALE ; } break ; case 'n' : if ( "node" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . NODE ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_NODE ) ; return Types . NODE ; } else if ( "null" . equals ( lcType ) ) { expr . writeOut ( bc , MODE_REF ) ; adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_NULL ) ; // TODO gibt es einen null typ ? return Types . OBJECT ; } break ; case 'o' : if ( "object" . equals ( lcType ) || "other" . equals ( lcType ) ) { expr . writeOut ( bc , MODE_REF ) ; return Types . OBJECT ; } break ; case 't' : if ( "timezone" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_TIMEZONE ) ; return Types . TIMEZONE ; } else if ( "timespan" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . TIMESPAN ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_TIMESPAN ) ; return Types . TIMESPAN ; } break ; case 's' : if ( "struct" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . STRUCT ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_STRUCT ) ; return Types . STRUCT ; } if ( "short" . equals ( type ) ) { rtn = expr . writeOutAsType ( bc , MODE_VALUE ) ; if ( ! rtn . equals ( Types . SHORT_VALUE ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_SHORT_VALUE [ Types . getType ( rtn ) ] ) ; return Types . SHORT_VALUE ; } if ( "short" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . SHORT ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_SHORT [ Types . getType ( rtn ) ] ) ; return Types . SHORT ; } if ( "stringbuffer" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . STRING_BUFFER ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_STRING_BUFFER ) ; return Types . STRING_BUFFER ; } break ; case 'x' : if ( "xml" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . NODE ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_NODE ) ; return Types . NODE ; } break ; default : if ( "query" . equals ( lcType ) ) { rtn = expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . QUERY ) ) adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_QUERY ) ; return Types . QUERY ; } if ( "querycolumn" . equals ( lcType ) ) { rtn = ( expr instanceof Variable ) ? ( ( VariableImpl ) expr ) . writeOutCollectionAsType ( bc , mode ) : expr . writeOutAsType ( bc , MODE_REF ) ; if ( ! rtn . equals ( Types . QUERY_COLUMN ) ) { adapter . loadArg ( 0 ) ; adapter . invokeStatic ( Types . CASTER , Methods_Caster . TO_QUERY_COLUMN ) ; } return Types . QUERY_COLUMN ; } } Type t = getType ( type ) ; expr . writeOut ( bc , MODE_REF ) ; adapter . checkCast ( t ) ; return t ;
public class Client { /** * Get a connection from the pool , or create a new one and add it to the * pool . Connections to a given host / port are reused . */ private Connection getConnection ( InetSocketAddress addr , Class < ? > protocol , Call call ) throws IOException { } }
if ( ! running . get ( ) ) { // the client is stopped throw new IOException ( "The client is stopped" ) ; } Connection connection ; /* * we could avoid this allocation for each RPC by having a * connectionsId object and with set ( ) method . We need to manage the * refs for keys in HashMap properly . For now its ok . */ ConnectionId remoteId = new ConnectionId ( addr , protocol ) ; do { synchronized ( connections ) { connection = connections . get ( remoteId ) ; if ( connection == null ) { connection = new Connection ( remoteId ) ; connections . put ( remoteId , connection ) ; } } } while ( ! connection . addCall ( call ) ) ; // we don ' t invoke the method below inside " synchronized ( connections ) " // block above . The reason for that is if the server happens to be slow , // it will take longer to establish a connection and that will slow the // entire system down . connection . setupIOstreams ( ) ; return connection ;
public class AbstractStyleRepeatedWordRule { /** * get synonyms for a repeated word */ public List < String > getSynonyms ( AnalyzedTokenReadings token ) { } }
List < String > synonyms = new ArrayList < String > ( ) ; if ( linguServices == null || token == null ) { return synonyms ; } List < AnalyzedToken > readings = token . getReadings ( ) ; for ( AnalyzedToken reading : readings ) { String lemma = reading . getLemma ( ) ; if ( lemma != null ) { List < String > rawSynonyms = linguServices . getSynonyms ( lemma , lang ) ; for ( String synonym : rawSynonyms ) { synonym = synonym . replaceAll ( "\\(.*\\)" , "" ) . trim ( ) ; if ( ! synonym . isEmpty ( ) && ! synonyms . contains ( synonym ) ) { synonyms . add ( synonym ) ; } } } } if ( synonyms . isEmpty ( ) ) { List < String > rawSynonyms = linguServices . getSynonyms ( token . getToken ( ) , lang ) ; for ( String synonym : rawSynonyms ) { synonym = synonym . replaceAll ( "\\(.*\\)" , "" ) . trim ( ) ; if ( ! synonym . isEmpty ( ) && ! synonyms . contains ( synonym ) ) { synonyms . add ( synonym ) ; } } } return synonyms ;
public class AbstractSemanticSequencer { /** * TODO : deprecate this method */ protected ISerializationContext createContext ( EObject deprecatedContext , EObject semanticObject ) { } }
return SerializationContext . fromEObject ( deprecatedContext , semanticObject ) ;
public class WebSocketSerializer { /** * Serialize the Protocol to JSON String . * @ param header * the ProtocolHeader . * @ param protocol * the Protocol . * @ return * the JSON String . * @ throws JsonGenerationException * @ throws JsonMappingException * @ throws IOException */ private static String serializeProtocol ( ProtocolHeader header , Protocol protocol ) throws JsonGenerationException , JsonMappingException , IOException { } }
ProtocolPair p = new ProtocolPair ( ) ; p . setProtocolHeader ( header ) ; if ( protocol == null ) { p . setType ( null ) ; } else { p . setType ( protocol . getClass ( ) ) ; } p . setProtocol ( toJsonStr ( protocol ) ) ; return toJsonStr ( p ) ;