signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Distribution { /** * Creates a Good - Turing smoothed Distribution from the given counter . * @ return a new Good - Turing smoothed Distribution . */ public static < E > Distribution < E > goodTuringSmoothedCounter ( Counter < E > counter , int numberOfKeys ) { } }
// gather count - counts int [ ] countCounts = getCountCounts ( counter ) ; // if count - counts are unreliable , we shouldn ' t be using G - T // revert to laplace for ( int i = 1 ; i <= 10 ; i ++ ) { if ( countCounts [ i ] < 3 ) { return laplaceSmoothedDistribution ( counter , numberOfKeys , 0.5 ) ; } } double observedMass = counter . totalCount ( ) ; double reservedMass = countCounts [ 1 ] / observedMass ; // calculate and cache adjusted frequencies // also adjusting total mass of observed items double [ ] adjustedFreq = new double [ 10 ] ; for ( int freq = 1 ; freq < 10 ; freq ++ ) { adjustedFreq [ freq ] = ( double ) ( freq + 1 ) * ( double ) countCounts [ freq + 1 ] / countCounts [ freq ] ; observedMass -= ( freq - adjustedFreq [ freq ] ) * countCounts [ freq ] ; } double normFactor = ( 1.0 - reservedMass ) / observedMass ; Distribution < E > norm = new Distribution < E > ( ) ; norm . counter = new ClassicCounter < E > ( ) ; // fill in the new Distribution , renormalizing as we go for ( E key : counter . keySet ( ) ) { int origFreq = ( int ) Math . round ( counter . getCount ( key ) ) ; if ( origFreq < 10 ) { norm . counter . setCount ( key , adjustedFreq [ origFreq ] * normFactor ) ; } else { norm . counter . setCount ( key , origFreq * normFactor ) ; } } norm . numberOfKeys = numberOfKeys ; norm . reservedMass = reservedMass ; return norm ;
public class SchemaVersionOne { /** * / * ( non - Javadoc ) * @ see net . agkn . hll . serialization . ISchemaVersion # writeMetadata ( byte [ ] , IHLLMetadata ) */ @ Override public void writeMetadata ( final byte [ ] bytes , final IHLLMetadata metadata ) { } }
final HLLType type = metadata . HLLType ( ) ; final int typeOrdinal = getOrdinal ( type ) ; final int explicitCutoffValue ; if ( metadata . explicitOff ( ) ) { explicitCutoffValue = EXPLICIT_OFF ; } else if ( metadata . explicitAuto ( ) ) { explicitCutoffValue = EXPLICIT_AUTO ; } else { explicitCutoffValue = metadata . log2ExplicitCutoff ( ) + 1 /* per spec */ ; } bytes [ 0 ] = SerializationUtil . packVersionByte ( SCHEMA_VERSION , typeOrdinal ) ; bytes [ 1 ] = SerializationUtil . packParametersByte ( metadata . registerWidth ( ) , metadata . registerCountLog2 ( ) ) ; bytes [ 2 ] = SerializationUtil . packCutoffByte ( explicitCutoffValue , metadata . sparseEnabled ( ) ) ;
public class MessageBirdClient { /** * Function to view recording by call id , leg id and recording id * @ param callID Voice call ID * @ param legId Leg ID * @ param recordingId Recording ID * @ return Recording * @ throws NotFoundException not found with callId and legId * @ throws UnauthorizedException if client is unauthorized * @ throws GeneralException general exception */ public RecordingResponse viewRecording ( String callID , String legId , String recordingId ) throws NotFoundException , GeneralException , UnauthorizedException { } }
if ( callID == null ) { throw new IllegalArgumentException ( "Voice call ID must be specified." ) ; } if ( legId == null ) { throw new IllegalArgumentException ( "Leg ID must be specified." ) ; } if ( recordingId == null ) { throw new IllegalArgumentException ( "Recording ID must be specified." ) ; } String url = String . format ( "%s%s" , VOICE_CALLS_BASE_URL , VOICECALLSPATH ) ; Map < String , Object > params = new LinkedHashMap < > ( ) ; params . put ( "legs" , legId ) ; params . put ( "recordings" , recordingId ) ; return messageBirdService . requestByID ( url , callID , params , RecordingResponse . class ) ;
public class MongoDBQueryDescriptorBuilder { /** * parse JSON * @ param json * @ return * @ see < a href = " http : / / stackoverflow . com / questions / 34436952 / json - parse - equivalent - in - mongo - driver - 3 - x - for - java " > JSON . parse equivalent < / a > */ private static Object parseAsObject ( String json ) { } }
if ( StringHelper . isNullOrEmptyString ( json ) ) { return null ; } Document object = Document . parse ( "{ 'json': " + json + "}" ) ; return object . get ( "json" ) ;
public class DescribeChapCredentialsResult { /** * An array of < a > ChapInfo < / a > objects that represent CHAP credentials . Each object in the array contains CHAP * credential information for one target - initiator pair . If no CHAP credentials are set , an empty array is returned . * CHAP credential information is provided in a JSON object with the following fields : * < ul > * < li > * < b > InitiatorName < / b > : The iSCSI initiator that connects to the target . * < / li > * < li > * < b > SecretToAuthenticateInitiator < / b > : The secret key that the initiator ( for example , the Windows client ) must * provide to participate in mutual CHAP with the target . * < / li > * < li > * < b > SecretToAuthenticateTarget < / b > : The secret key that the target must provide to participate in mutual CHAP with * the initiator ( e . g . Windows client ) . * < / li > * < li > * < b > TargetARN < / b > : The Amazon Resource Name ( ARN ) of the storage volume . * < / li > * < / ul > * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setChapCredentials ( java . util . Collection ) } or { @ link # withChapCredentials ( java . util . Collection ) } if you * want to override the existing values . * @ param chapCredentials * An array of < a > ChapInfo < / a > objects that represent CHAP credentials . Each object in the array contains * CHAP credential information for one target - initiator pair . If no CHAP credentials are set , an empty array * is returned . CHAP credential information is provided in a JSON object with the following fields : < / p > * < ul > * < li > * < b > InitiatorName < / b > : The iSCSI initiator that connects to the target . * < / li > * < li > * < b > SecretToAuthenticateInitiator < / b > : The secret key that the initiator ( for example , the Windows client ) * must provide to participate in mutual CHAP with the target . * < / li > * < li > * < b > SecretToAuthenticateTarget < / b > : The secret key that the target must provide to participate in mutual * CHAP with the initiator ( e . g . Windows client ) . * < / li > * < li > * < b > TargetARN < / b > : The Amazon Resource Name ( ARN ) of the storage volume . * < / li > * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeChapCredentialsResult withChapCredentials ( ChapInfo ... chapCredentials ) { } }
if ( this . chapCredentials == null ) { setChapCredentials ( new com . amazonaws . internal . SdkInternalList < ChapInfo > ( chapCredentials . length ) ) ; } for ( ChapInfo ele : chapCredentials ) { this . chapCredentials . add ( ele ) ; } return this ;
public class KeyUtil { /** * 生成私钥 , 仅用于非对称加密 < br > * 算法见 : https : / / docs . oracle . com / javase / 7 / docs / technotes / guides / security / StandardNames . html # KeyFactory * @ param algorithm 算法 * @ param keySpec { @ link KeySpec } * @ return 私钥 { @ link PrivateKey } * @ since 3.1.1 */ public static PrivateKey generatePrivateKey ( String algorithm , KeySpec keySpec ) { } }
if ( null == keySpec ) { return null ; } algorithm = getAlgorithmAfterWith ( algorithm ) ; try { return getKeyFactory ( algorithm ) . generatePrivate ( keySpec ) ; } catch ( Exception e ) { throw new CryptoException ( e ) ; }
public class PropertyCollectorUtil { /** * Method to convert an object to its type * For example when ArrayOfManagedObject is passed in * return a ManagedObject [ ] * @ param dynaPropVal * @ return */ public static Object convertProperty ( Object dynaPropVal ) { } }
Object propertyValue = null ; if ( dynaPropVal == null ) { throw new IllegalArgumentException ( "Unable to convertProperty on null object." ) ; } Class < ? > propClass = dynaPropVal . getClass ( ) ; String propName = propClass . getName ( ) ; // Check the dynamic propery for ArrayOfXXX object if ( propName . contains ( "ArrayOf" ) ) { String methodName = propName . substring ( propName . indexOf ( "ArrayOf" ) + "ArrayOf" . length ( ) ) ; // If object is ArrayOfXXX object , then get the XXX [ ] by invoking getXXX ( ) on the object . For Ex : // ArrayOfManagedObjectReference . getManagedObjectReference ( ) returns ManagedObjectReference [ ] array . try { Method getMethod ; try { getMethod = propClass . getMethod ( "get" + methodName , ( Class [ ] ) null ) ; } catch ( NoSuchMethodException ignore ) { getMethod = propClass . getMethod ( "get_" + methodName . toLowerCase ( ) , ( Class [ ] ) null ) ; } propertyValue = getMethod . invoke ( dynaPropVal , ( Object [ ] ) null ) ; } catch ( Exception e ) { log . error ( "Exception caught trying to convertProperty" , e ) ; } } // Handle the case of an unwrapped array being deserialized . else if ( dynaPropVal . getClass ( ) . isArray ( ) ) { propertyValue = dynaPropVal ; } else { propertyValue = dynaPropVal ; } return propertyValue ;
public class ColumnFamilyRecordReader { /** * not necessarily on Cassandra machines , too . This should be adequate for single - DC clusters , at least . */ private String getLocation ( ) { } }
Collection < InetAddress > localAddresses = FBUtilities . getAllLocalAddresses ( ) ; for ( InetAddress address : localAddresses ) { for ( String location : split . getLocations ( ) ) { InetAddress locationAddress = null ; try { locationAddress = InetAddress . getByName ( location ) ; } catch ( UnknownHostException e ) { throw new AssertionError ( e ) ; } if ( address . equals ( locationAddress ) ) { return location ; } } } return split . getLocations ( ) [ 0 ] ;
public class appfwwsdl { /** * Use this API to fetch all the appfwwsdl resources that are configured on netscaler . */ public static appfwwsdl get ( nitro_service service ) throws Exception { } }
appfwwsdl obj = new appfwwsdl ( ) ; appfwwsdl [ ] response = ( appfwwsdl [ ] ) obj . get_resources ( service ) ; return response [ 0 ] ;
public class ConfigUniqueNameGenerator { /** * 解析唯一标识UniqueName得到接口名 * @ param uniqueName 服务唯一标识 * @ return 接口名 */ public static String getInterfaceName ( String uniqueName ) { } }
if ( StringUtils . isEmpty ( uniqueName ) ) { return uniqueName ; } int index = uniqueName . indexOf ( ':' ) ; return index < 0 ? uniqueName : uniqueName . substring ( 0 , index ) ;
public class TCBeginMessageImpl { /** * ( non - Javadoc ) * @ see org . restcomm . protocols . ss7 . tcap . asn . Encodable # decode ( org . mobicents . protocols . asn . AsnInputStream ) */ public void decode ( AsnInputStream ais ) throws ParseException { } }
try { AsnInputStream localAis = ais . readSequenceStream ( ) ; int tag = localAis . readTag ( ) ; if ( tag != _TAG_OTX || localAis . getTagClass ( ) != Tag . CLASS_APPLICATION ) throw new ParseException ( PAbortCauseType . IncorrectTxPortion , null , "Error decoding TC-Begin: Expected OriginatingTransactionId, found tag: " + tag ) ; this . originatingTransactionId = localAis . readOctetString ( ) ; while ( true ) { if ( localAis . available ( ) == 0 ) return ; tag = localAis . readTag ( ) ; if ( localAis . isTagPrimitive ( ) || localAis . getTagClass ( ) != Tag . CLASS_APPLICATION ) throw new ParseException ( PAbortCauseType . IncorrectTxPortion , null , "Error decoding TC-Begin: DialogPortion and Component portion must be constructive and has tag class CLASS_APPLICATION" ) ; switch ( tag ) { case DialogPortion . _TAG : this . dp = TcapFactory . createDialogPortion ( localAis ) ; break ; case Component . _COMPONENT_TAG : AsnInputStream compAis = localAis . readSequenceStream ( ) ; List < Component > cps = new ArrayList < Component > ( ) ; // its iterator : ) while ( compAis . available ( ) > 0 ) { Component c = TcapFactory . createComponent ( compAis ) ; if ( c == null ) { break ; } cps . add ( c ) ; } this . component = new Component [ cps . size ( ) ] ; this . component = cps . toArray ( this . component ) ; break ; default : throw new ParseException ( PAbortCauseType . IncorrectTxPortion , null , "Error decoding TC-Begin: DialogPortion and Componebt parsing: bad tag - " + tag ) ; } } } catch ( IOException e ) { throw new ParseException ( PAbortCauseType . BadlyFormattedTxPortion , null , "IOException while decoding TC-Begin: " + e . getMessage ( ) , e ) ; } catch ( AsnException e ) { throw new ParseException ( PAbortCauseType . BadlyFormattedTxPortion , null , "AsnException while decoding TC-Begin: " + e . getMessage ( ) , e ) ; }
public class PartitionContainer { Indexes getIndexes ( String name ) { } }
Indexes ixs = indexes . get ( name ) ; if ( ixs == null ) { MapServiceContext mapServiceContext = mapService . getMapServiceContext ( ) ; MapContainer mapContainer = mapServiceContext . getMapContainer ( name ) ; if ( mapContainer . isGlobalIndexEnabled ( ) ) { throw new IllegalStateException ( "Can't use a partitioned-index in the context of a global-index." ) ; } Indexes indexesForMap = mapContainer . createIndexes ( false ) ; ixs = indexes . putIfAbsent ( name , indexesForMap ) ; if ( ixs == null ) { ixs = indexesForMap ; } } return ixs ;
public class WaitStrategies { /** * Returns a strategy that sleeps a random amount of time before retrying . * @ param maximumTime the maximum time to sleep * @ param timeUnit the unit of the maximum time * @ return a wait strategy with a random wait time * @ throws IllegalStateException if the maximum sleep time is & lt ; = 0. */ public static WaitStrategy randomWait ( long maximumTime , @ Nonnull TimeUnit timeUnit ) { } }
Preconditions . checkNotNull ( timeUnit , "The time unit may not be null" ) ; return new RandomWaitStrategy ( 0L , timeUnit . toMillis ( maximumTime ) ) ;
public class AmazonTranscribeClient { /** * Deletes a previously submitted transcription job along with any other generated results such as the * transcription , models , and so on . * @ param deleteTranscriptionJobRequest * @ return Result of the DeleteTranscriptionJob operation returned by the service . * @ throws LimitExceededException * Either you have sent too many requests or your input file is too long . Wait before you resend your * request , or use a smaller file and resend the request . * @ throws BadRequestException * Your request didn ' t pass one or more validation tests . For example , if the transcription you ' re trying to * delete doesn ' t exist or if it is in a non - terminal state ( for example , it ' s " in progress " ) . See the * exception < code > Message < / code > field for more information . * @ throws InternalFailureException * There was an internal error . Check the error message and try your request again . * @ sample AmazonTranscribe . DeleteTranscriptionJob * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / transcribe - 2017-10-26 / DeleteTranscriptionJob " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DeleteTranscriptionJobResult deleteTranscriptionJob ( DeleteTranscriptionJobRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteTranscriptionJob ( request ) ;
public class CLIConnectionFactory { /** * Convenience method to call { @ link # authorization } with the HTTP basic authentication . * Cf . { @ code BasicHeaderApiTokenAuthenticator } . */ public CLIConnectionFactory basicAuth ( String userInfo ) { } }
return authorization ( "Basic " + new String ( Base64 . encodeBase64 ( ( userInfo ) . getBytes ( ) ) ) ) ;
public class TEEJBInvocationInfo { /** * This is called by the EJB container server code to write a * EJB method call postinvoke exceptions record to the trace log , if enabled . */ public static void tracePostInvokeException ( EJSDeployedSupport s , EJSWrapperBase wrapper , Throwable t ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { StringBuffer sbuf = new StringBuffer ( ) ; sbuf . append ( MthdPostInvokeException_Type_Str ) . append ( DataDelimiter ) . append ( MthdPostInvokeException_Type ) . append ( DataDelimiter ) ; writeDeployedSupportInfo ( s , sbuf , wrapper , t ) ; Tr . debug ( tc , sbuf . toString ( ) ) ; }
public class IOUtils { /** * See { @ link hudson . FilePath # isAbsolute ( String ) } . * @ param path String representing < code > Platform Specific < / code > ( unlike FilePath , which may get Platform agnostic paths ) , may not be null * @ return true if String represents absolute path on this platform , false otherwise */ public static boolean isAbsolute ( String path ) { } }
Pattern DRIVE_PATTERN = Pattern . compile ( "[A-Za-z]:[\\\\/].*" ) ; return path . startsWith ( "/" ) || DRIVE_PATTERN . matcher ( path ) . matches ( ) ;
public class JusFragment { /** * TODO : Rename and change types and number of parameters */ public static JusFragment newInstance ( ) { } }
JusFragment fragment = new JusFragment ( ) ; Bundle args = new Bundle ( ) ; fragment . setArguments ( args ) ; return fragment ;
public class RankableObjectWithFields { /** * Note : We do not defensively copy the wrapped object and any accompanying * fields . We do guarantee , however , do return a defensive ( shallow ) copy of * the List object that is wrapping any accompanying fields . * @ return */ @ Override public Rankable copy ( ) { } }
List < Object > shallowCopyOfFields = ImmutableList . copyOf ( getFields ( ) ) ; return new RankableObjectWithFields ( getObject ( ) , getCount ( ) , shallowCopyOfFields ) ;
public class FeatureDescription { /** * Returns the feature name */ public String getName ( ) { } }
FeatureDescriptor fd = getFeatureDescriptor ( ) ; if ( fd == null ) { return null ; } return fd . getName ( ) ;
public class policyexpression { /** * Use this API to add policyexpression . */ public static base_response add ( nitro_service client , policyexpression resource ) throws Exception { } }
policyexpression addresource = new policyexpression ( ) ; addresource . name = resource . name ; addresource . value = resource . value ; addresource . description = resource . description ; addresource . comment = resource . comment ; addresource . clientsecuritymessage = resource . clientsecuritymessage ; return addresource . add_resource ( client ) ;
public class FluentValidator { /** * 在待验证对象数组 < tt > t < / tt > 上 , 使用 < tt > v < / tt > 验证器进行验证 * 注 : 当数组为空时 , 则会跳过 * @ param t 待验证对象数组 * @ param v 验证器 * @ return FluentValidator */ public < T > FluentValidator onEach ( T [ ] t , final Validator < T > v ) { } }
Preconditions . checkNotNull ( v , "Validator should not be NULL" ) ; if ( ArrayUtil . isEmpty ( t ) ) { lastAddCount = 0 ; return this ; } return onEach ( Arrays . asList ( t ) , v ) ;
public class MessageProcessInfo { /** * SetupMessageHeaderFromCode Method . */ public boolean setupMessageHeaderFromCode ( Message trxMessage , String strMessageCode , String strVersion ) { } }
TrxMessageHeader trxMessageHeader = ( TrxMessageHeader ) ( ( BaseMessage ) trxMessage ) . getMessageHeader ( ) ; if ( ( trxMessageHeader == null ) && ( strMessageCode == null ) ) return false ; if ( trxMessageHeader == null ) { trxMessageHeader = new TrxMessageHeader ( null , null ) ; ( ( BaseMessage ) trxMessage ) . setMessageHeader ( trxMessageHeader ) ; } if ( strMessageCode == null ) strMessageCode = ( String ) trxMessageHeader . get ( TrxMessageHeader . MESSAGE_CODE ) ; Utility . getLogger ( ) . info ( "Message code: " + strMessageCode ) ; if ( strMessageCode == null ) return false ; // Message not found MessageProcessInfo recMessageProcessInfo = ( MessageProcessInfo ) this . getMessageProcessInfo ( strMessageCode ) ; if ( recMessageProcessInfo == null ) return false ; // Message not found MessageInfo recMessageInfo = ( MessageInfo ) ( ( ReferenceField ) this . getField ( MessageProcessInfo . MESSAGE_INFO_ID ) ) . getReference ( ) ; if ( recMessageInfo == null ) return false ; // Impossible trxMessageHeader = recMessageInfo . addMessageProperties ( trxMessageHeader ) ; trxMessageHeader = this . addMessageProperties ( trxMessageHeader ) ; trxMessageHeader = this . addTransportProperties ( trxMessageHeader , strVersion ) ; return true ;
public class Application { /** * Cause an the argument < code > resolver < / code > to be added to the resolver chain as specified in section 5.5.1 of * the JavaServer Faces Specification . * It is not possible to remove an < code > ELResolver < / code > registered with this method , once it has been registered . * It is illegal to register an ELResolver after the application has received any requests from the client . If an * attempt is made to register a listener after that time , an IllegalStateException must be thrown . This restriction * is in place to allow the JSP container to optimize for the common case where no additional * < code > ELResolvers < / code > are in the chain , aside from the standard ones . It is permissible to add * < code > ELResolvers < / code > before or after initialization to a CompositeELResolver that is already in the chain . * The default implementation throws < code > UnsupportedOperationException < / code > and is provided for the sole purpose * of not breaking existing applications that extend { @ link Application } . * @ since 1.2 */ public void addELResolver ( ELResolver resolver ) { } }
// The following concrete methods were added for JSF 1.2 . They supply default // implementations that throw UnsupportedOperationException . // This allows old Application implementations to still work . Application application = getMyfacesApplicationInstance ( ) ; if ( application != null ) { application . addELResolver ( resolver ) ; return ; } throw new UnsupportedOperationException ( ) ;
public class CopyDataHandler { /** * Do the physical move operation . * @ param bDisplayOption If true , display the change . * @ param iMoveMode The type of move being done ( init / read / screen ) . * @ return The error code ( or NORMAL _ RETURN if okay ) . */ public int moveSourceToDest ( boolean bDisplayOption , int iMoveMode ) { } }
if ( m_objValue instanceof String ) return m_fldDest . setString ( ( String ) m_objValue , bDisplayOption , iMoveMode ) ; else return m_fldDest . setData ( m_objValue , bDisplayOption , iMoveMode ) ;
public class XmlMapper { /** * Java Collection - > Xml without encoding , 特别支持Root Element是Collection的情形 . */ public static String toXml ( Collection < ? > root , String rootName , Class clazz ) { } }
return toXml ( root , rootName , clazz , null ) ;
public class GetConfigResponse { /** * Returns the map of line number to line value for a given category . * @ param categoryNumber a valid category number from getCategories . * @ return the map of category numbers to names . * @ see org . asteriskjava . manager . response . GetConfigResponse # getCategories */ public Map < Integer , String > getLines ( int categoryNumber ) { } }
if ( lines == null ) { lines = new TreeMap < > ( ) ; } Map < String , Object > responseMap = super . getAttributes ( ) ; for ( Entry < String , Object > response : responseMap . entrySet ( ) ) { String key = response . getKey ( ) ; if ( key . toLowerCase ( Locale . US ) . contains ( "line" ) ) { String [ ] keyParts = key . split ( "-" ) ; // if it doesn ' t have at least line - XXXXX - XXXXX , skip if ( keyParts . length < 3 ) { continue ; } // try to get the number of this category , skip if we mess up Integer potentialCategoryNumber ; try { potentialCategoryNumber = Integer . parseInt ( keyParts [ 1 ] ) ; } catch ( Exception exception ) { continue ; } // try to get the number of this line , skip if we mess up Integer potentialLineNumber ; try { potentialLineNumber = Integer . parseInt ( keyParts [ 2 ] ) ; } catch ( Exception exception ) { continue ; } // get the List out for placing stuff in Map < Integer , String > linesForCategory = lines . get ( potentialCategoryNumber ) ; if ( linesForCategory == null ) { linesForCategory = new TreeMap < > ( ) ; } // put the line we just parsed into the line map for this category linesForCategory . put ( potentialLineNumber , ( String ) response . getValue ( ) ) ; if ( ! lines . containsKey ( potentialCategoryNumber ) ) { lines . put ( potentialCategoryNumber , linesForCategory ) ; } } } return lines . get ( categoryNumber ) ;
public class VpnTunnelClient { /** * Returns the specified VpnTunnel resource . Gets a list of available VPN tunnels by making a * list ( ) request . * < p > Sample code : * < pre > < code > * try ( VpnTunnelClient vpnTunnelClient = VpnTunnelClient . create ( ) ) { * ProjectRegionVpnTunnelName vpnTunnel = ProjectRegionVpnTunnelName . of ( " [ PROJECT ] " , " [ REGION ] " , " [ VPN _ TUNNEL ] " ) ; * VpnTunnel response = vpnTunnelClient . getVpnTunnel ( vpnTunnel ) ; * < / code > < / pre > * @ param vpnTunnel Name of the VpnTunnel resource to return . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final VpnTunnel getVpnTunnel ( ProjectRegionVpnTunnelName vpnTunnel ) { } }
GetVpnTunnelHttpRequest request = GetVpnTunnelHttpRequest . newBuilder ( ) . setVpnTunnel ( vpnTunnel == null ? null : vpnTunnel . toString ( ) ) . build ( ) ; return getVpnTunnel ( request ) ;
public class ClientDObjectMgr { /** * Called when the client is cleaned up due to having disconnected from the server . */ public void cleanup ( ) { } }
// tell any pending object subscribers that they ' re not getting their bits for ( PendingRequest < ? > req : _penders . values ( ) ) { for ( Subscriber < ? > sub : req . targets ) { sub . requestFailed ( req . oid , new ObjectAccessException ( "Client connection closed" ) ) ; } } _penders . clear ( ) ; _flusher . cancel ( ) ; _flushes . clear ( ) ; _dead . clear ( ) ; _client . getRunQueue ( ) . postRunnable ( new Runnable ( ) { public void run ( ) { _ocache . clear ( ) ; } } ) ;
public class Orbitals { /** * Set a coefficient matrix */ public void setCoefficients ( Matrix C ) { } }
if ( count_basis == C . rows ) { this . C = C ; count_orbitals = C . columns ; }
public class FunctionSQL { /** * End of VoltDB extension */ public static FunctionSQL newSQLFunction ( String token , CompileContext context ) { } }
int id = regularFuncMap . get ( token , - 1 ) ; if ( id == - 1 ) { id = valueFuncMap . get ( token , - 1 ) ; } if ( id == - 1 ) { return null ; } FunctionSQL function = new FunctionSQL ( id ) ; if ( id == FUNC_VALUE ) { if ( context . currentDomain == null ) { return null ; } function . dataType = context . currentDomain ; } return function ;
public class ActionableHelper { /** * Deletes a FilePath file on exit . * @ param filePath The FilePath to delete on exit . * @ throws IOException In case of a missing file . */ public static void deleteFilePathOnExit ( FilePath filePath ) throws IOException , InterruptedException { } }
filePath . act ( new MasterToSlaveFileCallable < Void > ( ) { public Void invoke ( File file , VirtualChannel virtualChannel ) throws IOException , InterruptedException { file . deleteOnExit ( ) ; return null ; } } ) ;
public class UriUtils { /** * Returns a string that has been encoded for use in a URL . * @ param s The string to encode . * @ return The given string encoded for use in a URL . */ public static String urlNonFormEncode ( final String s ) { } }
if ( StringUtils . isBlank ( s ) ) { LOG . warn ( "Could not encode blank string" ) ; throw new IllegalArgumentException ( "Blank string" ) ; } try { // We use the HttpClient encoder because the java URLEncoder // class uses form encoding . return URIUtil . encodeQuery ( s , "UTF-8" ) ; } catch ( final URIException e ) { LOG . error ( "Could not encode: " + s , e ) ; // This really should never happen . assert false ; return s ; }
public class ParseErrors { /** * A helper method for formatting javacc ParseExceptions . * @ param errorToken The piece of text that we were unable to parse . * @ param expectedTokens The set of formatted tokens that we were expecting next . */ private static String formatParseExceptionDetails ( String errorToken , List < String > expectedTokens ) { } }
// quotes / normalize the expected tokens before rendering , just in case after normalization some // can be deduplicated . ImmutableSet . Builder < String > normalizedTokensBuilder = ImmutableSet . builder ( ) ; for ( String t : expectedTokens ) { normalizedTokensBuilder . add ( maybeQuoteForParseError ( t ) ) ; } expectedTokens = normalizedTokensBuilder . build ( ) . asList ( ) ; StringBuilder details = new StringBuilder ( ) ; int numExpectedTokens = expectedTokens . size ( ) ; if ( numExpectedTokens != 0 ) { details . append ( ": expected " ) ; for ( int i = 0 ; i < numExpectedTokens ; i ++ ) { details . append ( expectedTokens . get ( i ) ) ; if ( i < numExpectedTokens - 2 ) { details . append ( ", " ) ; } if ( i == numExpectedTokens - 2 ) { if ( numExpectedTokens > 2 ) { details . append ( ',' ) ; } details . append ( " or " ) ; } } } return String . format ( "parse error at '%s'%s" , escapeWhitespaceForErrorPrinting ( errorToken ) , details . toString ( ) ) ;
public class DockerAgentUtils { /** * Execute push docker image on agent * @ param launcher * @ param log * @ param imageTag * @ param username * @ param password * @ param host @ return * @ throws IOException * @ throws InterruptedException */ public static boolean pushImage ( Launcher launcher , final JenkinsBuildInfoLog log , final String imageTag , final String username , final String password , final String host ) throws IOException , InterruptedException { } }
return launcher . getChannel ( ) . call ( new MasterToSlaveCallable < Boolean , IOException > ( ) { public Boolean call ( ) throws IOException { String message = "Pushing image: " + imageTag ; if ( StringUtils . isNotEmpty ( host ) ) { message += " using docker daemon host: " + host ; } log . info ( message ) ; DockerUtils . pushImage ( imageTag , username , password , host ) ; return true ; } } ) ;
public class DBTransaction { /** * Add an update that will delete the row with the given row key from the given store . * @ param storeName Name of store from which to delete an object row . * @ param rowKey Row key in string form . */ public void deleteRow ( String storeName , String rowKey ) { } }
m_rowDeletes . add ( new RowDelete ( storeName , rowKey ) ) ;
public class XATransactionWrapper { /** * Inform the resource manager to roll back work done on behalf of a transaction branch * @ param xid - A global transaction identifier * @ throws XAException - An error has occurred */ @ Override public void rollback ( Xid xid ) throws XAException { } }
final boolean isTracingEnabled = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTracingEnabled && tc . isEntryEnabled ( ) ) Tr . entry ( this , tc , "rollback" ) ; if ( getMcWrapper ( ) . isMCAborted ( ) ) { Tr . exit ( tc , "Connection was aborted. Exiting rollback." ) ; return ; } this . hasRollbackOccured = true ; try { xaResource . rollback ( xid ) ; } catch ( XAException e ) { processXAException ( e ) ; com . ibm . ws . ffdc . FFDCFilter . processException ( e , "com.ibm.ejs.j2c.XATransactionWrapper.rollback" , "755" , this ) ; if ( ! mcWrapper . isStale ( ) ) { Tr . error ( tc , "XA_RESOURCE_ADAPTER_OPERATION_ID_EXCP_J2CA0027" , "rollback" , xid , e , mcWrapper . gConfigProps . cfName ) ; } if ( isTracingEnabled && tc . isEntryEnabled ( ) ) Tr . exit ( this , tc , "rollback" , e ) ; throw e ; } catch ( Exception e ) { com . ibm . ws . ffdc . FFDCFilter . processException ( e , "com.ibm.ejs.j2c.XATransactionWrapper.rollback" , "761" , this ) ; if ( ! mcWrapper . shouldBeDestroyed ( ) ) { mcWrapper . markTransactionError ( ) ; Tr . error ( tc , "XA_RESOURCE_ADAPTER_OPERATION_ID_EXCP_J2CA0027" , "rollback" , xid , e , mcWrapper . gConfigProps . cfName ) ; } XAException x = new XAException ( "Exception:" + e . toString ( ) ) ; x . initCause ( e ) ; if ( isTracingEnabled && tc . isEntryEnabled ( ) ) Tr . exit ( this , tc , "rollback" , x ) ; throw x ; } if ( isTracingEnabled && tc . isEntryEnabled ( ) ) Tr . exit ( this , tc , "rollback" ) ;
public class EEValidationUtils { /** * Does additional validation on the WebServiceRef annotation . Note that some validation has already been done at this point by WebServiceRefProcessor . * If we are injecting into a Service type , we check that the value ( ) attribute is compatible with the type . * If we are injecting into a non - service type , we find the port types of the service type specified by the value ( ) attribute and check that one of them is compatible . * @ param wsRef the WebServiceRef annotation * @ param declaringClass class containing this WebServiceRef * @ param Annotated the member annotated with this WebServiceRef * @ param the injection point */ public static void validateWebServiceRef ( WebServiceRef wsRef , Class < ? > declaringClass , Annotated annotated ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "validateWebServiceRef" , new Object [ ] { wsRef , declaringClass , Util . identity ( annotated ) } ) ; } Class < ? > ipClass = getInjectedClass ( annotated ) ; /* * note : Thorough WebService validation is performed later , by * com . ibm . ws . jaxws . client . injection . WebServiceRefProcessor . validateAndSetMemberLevelWebServiceRef ( ) * This method performs a limited subset of that validation , in order to get the CDI CTS test to pass . * It would perhaps be better for us to delegate to WebServiceRefProcessor . validateAndSetMemberLevelWebServiceRef ( ) . */ if ( ! wsRef . lookup ( ) . isEmpty ( ) ) { // If there ' s a lookup specified , don ' t validate return ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Validating WebServiceRef injection point" , wsRef ) ; } Class < ? > serviceClass = wsRef . value ( ) ; // The injected type is determined by the field type and the attribute type parameter // It has already been validated that they are compatible , we want whichever is the subclass of the other Class < ? > effectiveClass = ipClass ; if ( ipClass . isAssignableFrom ( wsRef . type ( ) ) ) { effectiveClass = wsRef . type ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Found service type and effective injection types" , serviceClass , effectiveClass ) ; } if ( Service . class . isAssignableFrom ( effectiveClass ) ) { if ( effectiveClass . getName ( ) . equals ( Service . class . getName ( ) ) ) { if ( ! Service . class . isAssignableFrom ( serviceClass ) || serviceClass . getName ( ) . equals ( Service . class . getName ( ) ) ) { throwDefinitionException ( declaringClass , annotated ) ; } } else { if ( ! serviceClass . getName ( ) . equals ( effectiveClass . getName ( ) ) && ! serviceClass . getName ( ) . equals ( Service . class . getName ( ) ) ) { // We ' re injecting a service object , field should match service class // if ( ! effectiveClass . isAssignableFrom ( serviceClass ) ) { throwDefinitionException ( declaringClass , annotated ) ; } } } else { // We ' re injecting a port type // Enumerate the port types Set < Class < ? > > portTypes = new HashSet < Class < ? > > ( ) ; for ( Method method : serviceClass . getMethods ( ) ) { if ( method . getAnnotation ( WebEndpoint . class ) != null ) { portTypes . add ( method . getReturnType ( ) ) ; } } // Check that the effective class matches one of the port types if ( ! portTypes . isEmpty ( ) ) { for ( Class < ? > endpointType : portTypes ) { if ( effectiveClass . isAssignableFrom ( endpointType ) ) { // There is an endpoint type matching the injection point type return ; } } // There were endpoint types but none of them matched the injection point type throwDefinitionException ( declaringClass , annotated ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "validateWebServiceRef" ) ; }
public class GetWorkerReportPOptions { /** * < code > repeated . alluxio . grpc . block . WorkerInfoField fieldRanges = 2 ; < / code > */ public alluxio . grpc . WorkerInfoField getFieldRanges ( int index ) { } }
return fieldRanges_converter_ . convert ( fieldRanges_ . get ( index ) ) ;
public class GetResourcesRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetResourcesRequest getResourcesRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getResourcesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getResourcesRequest . getRestApiId ( ) , RESTAPIID_BINDING ) ; protocolMarshaller . marshall ( getResourcesRequest . getPosition ( ) , POSITION_BINDING ) ; protocolMarshaller . marshall ( getResourcesRequest . getLimit ( ) , LIMIT_BINDING ) ; protocolMarshaller . marshall ( getResourcesRequest . getEmbed ( ) , EMBED_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DnDManager { /** * Find the rectangular area that is visible in screen coordinates * for the given component . */ protected Rectangle getRectOnScreen ( JComponent comp ) { } }
Rectangle r = comp . getVisibleRect ( ) ; Point p = comp . getLocationOnScreen ( ) ; r . translate ( p . x , p . y ) ; return r ;
public class QNMinimizer { /** * computes d = a + b * c */ private static double [ ] plusAndConstMult ( double [ ] a , double [ ] b , double c , double [ ] d ) { } }
for ( int i = 0 ; i < a . length ; i ++ ) { d [ i ] = a [ i ] + c * b [ i ] ; } return d ;
public class Authorizations { /** * Checks if this Account implies the given permission strings and returns a boolean array indicating which * permissions are implied . * This is an overloaded method for the corresponding type - safe { @ link Permission Permission } variant . * Please see the class - level JavaDoc for more information on these String - based permission methods . * @ param permissions the String representations of the Permissions that are being checked . * @ return a boolean array where indices correspond to the index of the * permissions in the given list . A true value at an index indicates this Account is permitted for * for the associated { @ code Permission } string in the list . A false value at an index * indicates otherwise . */ public boolean [ ] isPermitted ( String ... permissions ) { } }
boolean [ ] rights = new boolean [ permissions . length ] ; for ( int i = 0 ; i < permissions . length ; i ++ ) { Permission permission = new Permission ( permissions [ i ] ) ; rights [ i ] = isPermitted ( permission ) ; } return rights ;
public class Profiler { /** * Starts a profiling section . * @ param section The name of the section */ public synchronized void startSection ( String section ) { } }
section = section . toLowerCase ( ) ; if ( this . startTime . containsKey ( section ) ) throw new IllegalArgumentException ( "Section \"" + section + "\" had already been started!" ) ; this . startTime . put ( section , System . nanoTime ( ) ) ; this . trace . push ( section ) ;
public class BaseDependencyCheckMojo { /** * Returns the output name . * @ return the output name */ @ Override public String getOutputName ( ) { } }
if ( "HTML" . equalsIgnoreCase ( this . format ) || "ALL" . equalsIgnoreCase ( this . format ) ) { return "dependency-check-report" ; } else if ( "XML" . equalsIgnoreCase ( this . format ) ) { return "dependency-check-report.xml" ; } else if ( "JUNIT" . equalsIgnoreCase ( this . format ) ) { return "dependency-check-junit.xml" ; } else if ( "JSON" . equalsIgnoreCase ( this . format ) ) { return "dependency-check-report.json" ; } else if ( "CSV" . equalsIgnoreCase ( this . format ) ) { return "dependency-check-report.csv" ; } else { getLog ( ) . warn ( "Unknown report format used during site generation." ) ; return "dependency-check-report" ; }
public class ExpectedConditions { /** * An expectation for checking if the given element is selected . * @ param element WebElement to be selected * @ param selected boolean state of the selection state of the element * @ return true once the element ' s selection stated is that of selected */ public static ExpectedCondition < Boolean > elementSelectionStateToBe ( final WebElement element , final boolean selected ) { } }
return new ExpectedCondition < Boolean > ( ) { @ Override public Boolean apply ( WebDriver driver ) { return element . isSelected ( ) == selected ; } @ Override public String toString ( ) { return String . format ( "element (%s) to %sbe selected" , element , ( selected ? "" : "not " ) ) ; } } ;
public class PredicateOptimisations { /** * Removes duplicate occurrences of same predicate in a conjunction or disjunction . Also detects and removes * tautology and contradiction . The following translation rules are applied : * < ul > * < li > X | | X = > X < / li > * < li > X & & X = > X < / li > * < li > ! X | | ! X = > ! X < / li > * < li > ! X & & ! X = > ! X < / li > * < li > X | | ! X = > TRUE ( tautology ) < / li > * < li > X & & ! X = > FALSE ( contradiction ) < / li > * < / ul > * @ param children the list of children expressions * @ param isConjunction is the parent boolean expression a conjunction or a disjunction ? */ private static void removeRedundantPredicates ( List < BooleanExpr > children , boolean isConjunction ) { } }
for ( int i = 0 ; i < children . size ( ) ; i ++ ) { BooleanExpr ci = children . get ( i ) ; if ( ci instanceof BooleanOperatorExpr || ci instanceof FullTextBoostExpr || ci instanceof FullTextOccurExpr ) { // we may encounter non - predicate expressions , just ignore them continue ; } boolean isCiNegated = ci instanceof NotExpr ; if ( isCiNegated ) { ci = ( ( NotExpr ) ci ) . getChild ( ) ; } assert ci instanceof PrimaryPredicateExpr ; PrimaryPredicateExpr ci1 = ( PrimaryPredicateExpr ) ci ; assert ci1 . getChild ( ) instanceof PropertyValueExpr ; PropertyValueExpr pve = ( PropertyValueExpr ) ci1 . getChild ( ) ; if ( pve . isRepeated ( ) ) { // do not optimize repeated predicates continue ; } int j = i + 1 ; while ( j < children . size ( ) ) { BooleanExpr cj = children . get ( j ) ; // we may encounter non - predicate expressions , just ignore them if ( ! ( cj instanceof BooleanOperatorExpr || cj instanceof FullTextBoostExpr || cj instanceof FullTextOccurExpr ) ) { boolean isCjNegated = cj instanceof NotExpr ; if ( isCjNegated ) { cj = ( ( NotExpr ) cj ) . getChild ( ) ; } PrimaryPredicateExpr cj1 = ( PrimaryPredicateExpr ) cj ; assert cj1 . getChild ( ) instanceof PropertyValueExpr ; PropertyValueExpr pve2 = ( PropertyValueExpr ) cj1 . getChild ( ) ; // do not optimize repeated predicates if ( ! pve2 . isRepeated ( ) ) { int res = comparePrimaryPredicates ( isCiNegated , ci1 , isCjNegated , cj1 ) ; if ( res == 0 ) { // found duplication children . remove ( j ) ; continue ; } else if ( res == 1 ) { // found tautology or contradiction children . clear ( ) ; children . add ( ConstantBooleanExpr . forBoolean ( ! isConjunction ) ) ; return ; } } } j ++ ; } }
public class BufferingBeanBuilder { /** * Gets the buffered value associated with the specified property name . * @ param metaProperty the meta - property , not null * @ return the current value in the builder , null if not found or value is null */ @ Override @ SuppressWarnings ( "unchecked" ) public < P > P get ( MetaProperty < P > metaProperty ) { } }
return ( P ) getBuffer ( ) . get ( metaProperty ) ;
public class GetData { /** * Get Current day ( + / - number of days / months / years ) * @ param days days * @ param months months * @ param years years * @ param format format * @ return data */ public static String getDate ( int days , int months , int years , String format ) { } }
return getDate ( days , months , years , format , Locale . ENGLISH ) ;
public class InternalPureXbaseLexer { /** * $ ANTLR start " T _ _ 61" */ public final void mT__61 ( ) throws RecognitionException { } }
try { int _type = T__61 ; int _channel = DEFAULT_TOKEN_CHANNEL ; // InternalPureXbase . g : 59:7 : ( ' [ ' ) // InternalPureXbase . g : 59:9 : ' [ ' { match ( '[' ) ; } state . type = _type ; state . channel = _channel ; } finally { }
public class TcpTransporter { /** * - - - DISCONNECT - - - */ protected void disconnect ( ) { } }
// Stop broadcaster if ( locator != null ) { locator . disconnect ( ) ; locator = null ; } // Stop gossiper ' s timer if ( gossiperTimer != null ) { gossiperTimer . cancel ( false ) ; gossiperTimer = null ; } // Close socket reader if ( reader != null ) { reader . disconnect ( ) ; reader = null ; } // Close socket writer if ( writer != null ) { writer . disconnect ( ) ; writer = null ; }
public class Handler { /** * handleMessage message handles loop for org . hyperledger . fabric . java . shim side of chaincode / validator stream . */ public synchronized void handleMessage ( ChaincodeMessage message ) throws Exception { } }
if ( message . getType ( ) == ChaincodeMessage . Type . KEEPALIVE ) { logger . debug ( String . format ( "[%s] Recieved KEEPALIVE message, do nothing" , shortID ( message ) ) ) ; // Received a keep alive message , we don ' t do anything with it for now // and it does not touch the state machine return ; } logger . debug ( String . format ( "[%s]Handling ChaincodeMessage of type: %s(state:%s)" , shortID ( message ) , message . getType ( ) , fsm . current ( ) ) ) ; if ( fsm . eventCannotOccur ( message . getType ( ) . toString ( ) ) ) { String errStr = String . format ( "[%s]Chaincode handler org.hyperledger.fabric.java.fsm cannot handle message (%s) with payload size (%d) while in state: %s" , message . getTxid ( ) , message . getType ( ) , message . getPayload ( ) . size ( ) , fsm . current ( ) ) ; ByteString payload = ByteString . copyFromUtf8 ( errStr ) ; ChaincodeMessage errormessage = ChaincodeMessage . newBuilder ( ) . setType ( ERROR ) . setPayload ( payload ) . setTxid ( message . getTxid ( ) ) . build ( ) ; serialSend ( errormessage ) ; throw new RuntimeException ( errStr ) ; } // Filter errors to allow NoTransitionError and CanceledError // to not propagate for cases where embedded Err = = nil . try { fsm . raiseEvent ( message . getType ( ) . toString ( ) , message ) ; } catch ( NoTransitionException e ) { if ( e . error != null ) throw e ; logger . debug ( "[" + shortID ( message ) + "]Ignoring NoTransitionError" ) ; } catch ( CancelledException e ) { if ( e . error != null ) throw e ; logger . debug ( "[" + shortID ( message ) + "]Ignoring CanceledError" ) ; }
public class Transform1D { /** * Translate . * @ param thePath the path to follow . * @ param direction is the direction to follow on the path if the path contains only one segment . * @ param curvilineCoord the curviline translation . * @ param shiftCoord the shifting translation . */ public void translate ( List < ? extends S > thePath , Direction1D direction , double curvilineCoord , double shiftCoord ) { } }
this . path = thePath == null || thePath . isEmpty ( ) ? null : new ArrayList < > ( thePath ) ; this . firstSegmentDirection = detectFirstSegmentDirection ( direction ) ; this . curvilineTranslation += curvilineCoord ; this . shiftTranslation += shiftCoord ; this . isIdentity = null ;
public class ListStringRecordReader { /** * Called once at initialization . * @ param split the split that defines the range of records to read * @ throws IOException * @ throws InterruptedException */ @ Override public void initialize ( InputSplit split ) throws IOException , InterruptedException { } }
if ( split instanceof ListStringSplit ) { ListStringSplit listStringSplit = ( ListStringSplit ) split ; delimitedData = listStringSplit . getData ( ) ; dataIter = delimitedData . iterator ( ) ; } else { throw new IllegalArgumentException ( "Illegal type of input split " + split . getClass ( ) . getName ( ) ) ; }
public class ConfigServiceFactory { /** * Setup the factory beans */ public synchronized void setUp ( ) { } }
if ( initialized ) return ; String className = null ; // Load Map < Object , Object > properties = settings . getProperties ( ) ; String key = null ; Object serviceObject = null ; for ( Map . Entry < Object , Object > entry : properties . entrySet ( ) ) { key = String . valueOf ( entry . getKey ( ) ) ; if ( key . startsWith ( PROP_PREFIX ) ) { try { className = ( String ) entry . getValue ( ) ; serviceObject = ClassPath . newInstance ( className ) ; factoryMap . put ( key , serviceObject ) ; } catch ( Throwable e ) { throw new SetupException ( "CLASS:" + className + " ERROR:" + e . getMessage ( ) , e ) ; } } } initialized = true ;
public class ArrayCounter { /** * Returns an int [ ] array of length segments containing the distribution * count of the elements in unsorted int [ ] array with values between min * and max ( range ) . Values outside the min - max range are ignored < p > * A usage example is determining the count of people of each age group * in a large int [ ] array containing the age of each person . Called with * ( array , 16,0,79 ) , it will return an int [ 16 ] with the first element * the count of people aged 0-4 , the second element the count of those * aged 5-9 , and so on . People above the age of 79 are excluded . If the * range is not a multiple of segments , the last segment will be cover a * smaller sub - range than the rest . */ public static int [ ] countSegments ( int [ ] array , int elements , int segments , int start , int limit ) { } }
int [ ] counts = new int [ segments ] ; long interval = calcInterval ( segments , start , limit ) ; int index = 0 ; int element = 0 ; if ( interval <= 0 ) { return counts ; } for ( int i = 0 ; i < elements ; i ++ ) { element = array [ i ] ; if ( element < start || element >= limit ) { continue ; } index = ( int ) ( ( element - start ) / interval ) ; counts [ index ] ++ ; } return counts ;
public class InstanceAdminClient { /** * Deletes an instance . * < p > Immediately upon completion of the request : * < p > & # 42 ; Billing ceases for all of the instance ' s reserved resources . * < p > Soon afterward : * < p > & # 42 ; The instance and & # 42 ; all of its databases & # 42 ; immediately and irrevocably disappear * from the API . All data in the databases is permanently deleted . * < p > Sample code : * < pre > < code > * try ( InstanceAdminClient instanceAdminClient = InstanceAdminClient . create ( ) ) { * InstanceName name = InstanceName . of ( " [ PROJECT ] " , " [ INSTANCE ] " ) ; * instanceAdminClient . deleteInstance ( name ) ; * < / code > < / pre > * @ param name Required . The name of the instance to be deleted . Values are of the form * ` projects / & lt ; project & gt ; / instances / & lt ; instance & gt ; ` * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final void deleteInstance ( InstanceName name ) { } }
DeleteInstanceRequest request = DeleteInstanceRequest . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . build ( ) ; deleteInstance ( request ) ;
public class XARMojo { /** * Add all the XML elements under the & lt ; info & gt ; element ( name , description , license , author , version and whether * it ' s a backup pack or not ) . * @ param infoElement the info element to which to add to */ private void addInfoElements ( Element infoElement ) { } }
Element el = new DOMElement ( "name" ) ; el . addText ( this . project . getName ( ) ) ; infoElement . add ( el ) ; el = new DOMElement ( "description" ) ; String description = this . project . getDescription ( ) ; if ( description == null ) { el . addText ( "" ) ; } else { el . addText ( description ) ; } infoElement . add ( el ) ; el = new DOMElement ( "licence" ) ; el . addText ( "" ) ; infoElement . add ( el ) ; el = new DOMElement ( "author" ) ; el . addText ( "XWiki.Admin" ) ; infoElement . add ( el ) ; el = new DOMElement ( "extensionId" ) ; el . addText ( this . project . getGroupId ( ) + ':' + this . project . getArtifactId ( ) ) ; infoElement . add ( el ) ; el = new DOMElement ( "version" ) ; el . addText ( this . project . getVersion ( ) ) ; infoElement . add ( el ) ; el = new DOMElement ( "backupPack" ) ; el . addText ( "false" ) ; infoElement . add ( el ) ;
public class S3ALowLevelOutputStream { /** * Creates a new temp file to write to . */ private void initNewFile ( ) throws IOException { } }
mFile = new File ( PathUtils . concatPath ( CommonUtils . getTmpDir ( mTmpDirs ) , UUID . randomUUID ( ) ) ) ; if ( mHash != null ) { mLocalOutputStream = new BufferedOutputStream ( new DigestOutputStream ( new FileOutputStream ( mFile ) , mHash ) ) ; } else { mLocalOutputStream = new BufferedOutputStream ( new FileOutputStream ( mFile ) ) ; } mPartitionOffset = 0 ; LOG . debug ( "Init new temp file @ {}" , mFile . getPath ( ) ) ;
public class WicketUrlExtensions { /** * Gets the base url . * @ param pageClass * the page class * @ param parameters * the parameters * @ return the base url */ public static Url getBaseUrl ( final Class < ? extends Page > pageClass , final PageParameters parameters ) { } }
return RequestCycle . get ( ) . mapUrlFor ( pageClass , parameters ) ;
public class AbstractWriteHandler { /** * Abort the write process due to error . * @ param error the error */ private void abort ( Error error ) { } }
try { if ( mContext == null || mContext . getError ( ) != null || mContext . isDoneUnsafe ( ) ) { // Note , we may reach here via events due to network errors bubbling up before // mContext is initialized , or stream error after the request is finished . return ; } mContext . setError ( error ) ; cleanupRequest ( mContext ) ; replyError ( ) ; } catch ( Exception e ) { LOG . warn ( "Failed to cleanup states with error {}." , e . getMessage ( ) ) ; }
public class SagaMessageStream { /** * { @ inheritDoc } */ @ Override public void add ( @ Nonnull final Object message , @ Nullable final Map < String , Object > headers ) { } }
checkNotNull ( message , "Message to handle must not be null." ) ; addMessage ( message , toTypedHeaders ( headers ) ) ;
public class MavenUtil { /** * parse snapshot exe name from maven - metadata . xml */ static String parseSnapshotExeName ( File mdFile ) throws IOException { } }
String exeName = null ; try { String clsStr = Protoc . getPlatformClassifier ( ) ; DocumentBuilder xmlBuilder = DocumentBuilderFactory . newInstance ( ) . newDocumentBuilder ( ) ; Document xmlDoc = xmlBuilder . parse ( mdFile ) ; NodeList versions = xmlDoc . getElementsByTagName ( "snapshotVersion" ) ; for ( int i = 0 ; i < versions . getLength ( ) ; i ++ ) { Node ver = versions . item ( i ) ; Node cls = null ; Node val = null ; for ( int j = 0 ; j < ver . getChildNodes ( ) . getLength ( ) ; j ++ ) { Node n = ver . getChildNodes ( ) . item ( j ) ; if ( n . getNodeName ( ) . equals ( "classifier" ) ) cls = n ; if ( n . getNodeName ( ) . equals ( "value" ) ) val = n ; } if ( cls != null && val != null && cls . getTextContent ( ) . equals ( clsStr ) ) { exeName = "protoc-" + val . getTextContent ( ) + "-" + clsStr + ".exe" ; break ; } } } catch ( Exception e ) { throw new IOException ( e ) ; } return exeName ;
public class DwgPolyline2D { /** * Read a Polyline2D in the DWG format Version 15 * @ param data Array of unsigned bytes obtained from the DWG binary file * @ param offset The current bit offset where the value begins * @ throws Exception If an unexpected bit value is found in the DWG file . Occurs * when we are looking for LwPolylines . */ public void readDwgPolyline2DV15 ( int [ ] data , int offset ) throws Exception { } }
// System . out . println ( " readDwgPolyline2D executing . . . " ) ; int bitPos = offset ; bitPos = readObjectHeaderV15 ( data , bitPos ) ; Vector v = DwgUtil . getBitShort ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; int flags = ( ( Integer ) v . get ( 1 ) ) . intValue ( ) ; this . flags = flags ; v = DwgUtil . getBitShort ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; int ctype = ( ( Integer ) v . get ( 1 ) ) . intValue ( ) ; curveType = ctype ; v = DwgUtil . getBitDouble ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; double sw = ( ( Double ) v . get ( 1 ) ) . doubleValue ( ) ; initWidth = sw ; v = DwgUtil . getBitDouble ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; double ew = ( ( Double ) v . get ( 1 ) ) . doubleValue ( ) ; endWidth = ew ; v = DwgUtil . testBit ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; boolean flag = ( ( Boolean ) v . get ( 1 ) ) . booleanValue ( ) ; double th = 0.0 ; if ( ! flag ) { v = DwgUtil . getBitDouble ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; th = ( ( Double ) v . get ( 1 ) ) . doubleValue ( ) ; } this . thickness = th ; v = DwgUtil . getBitDouble ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; double elev = ( ( Double ) v . get ( 1 ) ) . doubleValue ( ) ; elevation = elev ; v = DwgUtil . testBit ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; flag = ( ( Boolean ) v . get ( 1 ) ) . booleanValue ( ) ; double ex , ey , ez = 0.0 ; if ( flag ) { ex = 0.0 ; ey = 0.0 ; ez = 1.0 ; } else { v = DwgUtil . getBitDouble ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; ex = ( ( Double ) v . get ( 1 ) ) . doubleValue ( ) ; v = DwgUtil . getBitDouble ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; ey = ( ( Double ) v . get ( 1 ) ) . doubleValue ( ) ; v = DwgUtil . getBitDouble ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; ez = ( ( Double ) v . get ( 1 ) ) . doubleValue ( ) ; } extrusion = new double [ ] { ex , ey , ez } ; bitPos = readObjectTailV15 ( data , bitPos ) ; v = DwgUtil . getHandle ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; int [ ] handle = new int [ v . size ( ) - 1 ] ; for ( int i = 1 ; i < v . size ( ) ; i ++ ) { handle [ i - 1 ] = ( ( Integer ) v . get ( i ) ) . intValue ( ) ; } Vector handleVect = new Vector ( ) ; for ( int i = 0 ; i < handle . length ; i ++ ) { handleVect . add ( new Integer ( handle [ i ] ) ) ; } firstVertexHandle = DwgUtil . handleBinToHandleInt ( handleVect ) ; v = DwgUtil . getHandle ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; handle = new int [ v . size ( ) - 1 ] ; for ( int i = 1 ; i < v . size ( ) ; i ++ ) { handle [ i - 1 ] = ( ( Integer ) v . get ( i ) ) . intValue ( ) ; } handleVect = new Vector ( ) ; for ( int i = 0 ; i < handle . length ; i ++ ) { handleVect . add ( new Integer ( handle [ i ] ) ) ; } lastVertexHandle = DwgUtil . handleBinToHandleInt ( handleVect ) ; v = DwgUtil . getHandle ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; handle = new int [ v . size ( ) - 1 ] ; for ( int i = 1 ; i < v . size ( ) ; i ++ ) { handle [ i - 1 ] = ( ( Integer ) v . get ( i ) ) . intValue ( ) ; } handleVect = new Vector ( ) ; for ( int i = 0 ; i < handle . length ; i ++ ) { handleVect . add ( new Integer ( handle [ i ] ) ) ; } seqendHandle = DwgUtil . handleBinToHandleInt ( handleVect ) ;
public class IMatrix { /** * Transpose a matrix */ public IMatrix transpose ( ) { } }
IMatrix result = new IMatrix ( rows , columns ) ; transpose ( result ) ; return result ;
public class JSType { /** * Looks up a property on this type , but without properly replacing any templates in the result . * < p > Subclasses can override this if they need more complicated logic for property lookup than * just autoboxing to an object . * < p > This is only for use by { @ code findPropertyType ( JSType ) } . Call that method instead if you * need to lookup a property on a random JSType */ @ ForOverride @ Nullable protected JSType findPropertyTypeWithoutConsideringTemplateTypes ( String propertyName ) { } }
ObjectType autoboxObjType = ObjectType . cast ( autoboxesTo ( ) ) ; if ( autoboxObjType != null ) { return autoboxObjType . findPropertyType ( propertyName ) ; } return null ;
public class StringUtils { /** * < p > Find the latest index of any of a set of potential substrings . < / p > * < p > A { @ code null } CharSequence will return { @ code - 1 } . * A { @ code null } search array will return { @ code - 1 } . * A { @ code null } or zero length search array entry will be ignored , * but a search array containing " " will return the length of { @ code str } * if { @ code str } is not null . This method uses { @ link String # indexOf ( String ) } if possible < / p > * < pre > * StringUtils . lastIndexOfAny ( null , * ) = - 1 * StringUtils . lastIndexOfAny ( * , null ) = - 1 * StringUtils . lastIndexOfAny ( * , [ ] ) = - 1 * StringUtils . lastIndexOfAny ( * , [ null ] ) = - 1 * StringUtils . lastIndexOfAny ( " zzabyycdxx " , [ " ab " , " cd " ] ) = 6 * StringUtils . lastIndexOfAny ( " zzabyycdxx " , [ " cd " , " ab " ] ) = 6 * StringUtils . lastIndexOfAny ( " zzabyycdxx " , [ " mn " , " op " ] ) = - 1 * StringUtils . lastIndexOfAny ( " zzabyycdxx " , [ " mn " , " op " ] ) = - 1 * StringUtils . lastIndexOfAny ( " zzabyycdxx " , [ " mn " , " " ] ) = 10 * < / pre > * @ param str the CharSequence to check , may be null * @ param searchStrs the CharSequences to search for , may be null * @ return the last index of any of the CharSequences , - 1 if no match * @ since 3.0 Changed signature from lastIndexOfAny ( String , String [ ] ) to lastIndexOfAny ( CharSequence , CharSequence ) */ public static int lastIndexOfAny ( final CharSequence str , final CharSequence ... searchStrs ) { } }
if ( str == null || searchStrs == null ) { return INDEX_NOT_FOUND ; } int ret = INDEX_NOT_FOUND ; int tmp = 0 ; for ( final CharSequence search : searchStrs ) { if ( search == null ) { continue ; } tmp = CharSequenceUtils . lastIndexOf ( str , search , str . length ( ) ) ; if ( tmp > ret ) { ret = tmp ; } } return ret ;
public class Multiplexing { /** * Flattens passed iterators of E to an iterator of E . E . g : * < code > * chain ( [ 1,2 ] , [ 3,4 ] ) - > [ 1,2,3,4] * < / code > * @ param < E > the iterator element type * @ param < I > the iterator type * @ param iterators the array of iterators to be flattened * @ return the flattened iterator */ public static < E , I extends Iterator < E > > Iterator < E > chain ( I ... iterators ) { } }
return new ChainIterator < > ( ArrayIterator . of ( iterators ) ) ;
public class PageShellInterceptor { /** * Renders the content after the backing component . * @ param writer the writer to write to . */ protected void afterPaint ( final PrintWriter writer ) { } }
PageShell pageShell = Factory . newInstance ( PageShell . class ) ; pageShell . writeFooter ( writer ) ; pageShell . closeDoc ( writer ) ;
public class IdentityPatchContext { /** * Create a patch element for the rollback patch . * @ param entry the entry * @ return the new patch element */ protected static PatchElement createRollbackElement ( final PatchEntry entry ) { } }
final PatchElement patchElement = entry . element ; final String patchId ; final Patch . PatchType patchType = patchElement . getProvider ( ) . getPatchType ( ) ; if ( patchType == Patch . PatchType . CUMULATIVE ) { patchId = entry . getCumulativePatchID ( ) ; } else { patchId = patchElement . getId ( ) ; } return createPatchElement ( entry , patchId , entry . rollbackActions ) ;
public class DataReader { /** * Load patches which override values in the CLDR data . Use this to restore * certain cases to our preferred form . */ private void loadPatches ( ) throws IOException { } }
List < Path > paths = Utils . listResources ( PATCHES ) ; for ( Path path : paths ) { String fileName = path . getFileName ( ) . toString ( ) ; if ( fileName . endsWith ( ".json" ) ) { JsonObject root = load ( path ) ; patches . put ( fileName , root ) ; } }
public class GeneralStorable { /** * Returns the key belonging to the given field as read - only ByteBuffer * @ param field * the name of the field * @ return the requested value * @ throws IOException */ public ByteBuffer getKey ( String field ) throws IOException { } }
int hash = Arrays . hashCode ( field . getBytes ( ) ) ; return getKey ( structure . keyHash2Index . get ( hash ) ) ;
public class ProxyBranchImpl { /** * https : / / code . google . com / p / sipservlets / issues / detail ? id = 238 */ public void removeTransaction ( String branch ) { } }
synchronized ( this . ongoingTransactions ) { TransactionRequest remove = null ; for ( TransactionRequest tr : this . ongoingTransactions ) { if ( tr . branchId . equals ( branch ) ) { remove = tr ; break ; } } if ( remove != null ) { boolean removed = this . ongoingTransactions . remove ( remove ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Removed transaction " + branch + " from proxy branch ? " + removed ) ; } } else { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Removing transaction " + branch + " from proxy branch FAILED. Not found." ) ; } } }
public class ParosTableHistory { /** * Deletes from the database all the history records whose ID is in the list { @ code ids } , in batches of given * { @ code batchSize } . * @ param ids a { @ code List } containing all the IDs of the history records to be deleted * @ param batchSize the maximum size of records to delete in a single batch * @ throws IllegalArgumentException if { @ code ids } is null * @ throws IllegalArgumentException if { @ code batchSize } is not greater than zero * @ throws DatabaseException if an error occurred while deleting the history records * @ since 2.3.0 */ @ Override public synchronized void delete ( List < Integer > ids , int batchSize ) throws DatabaseException { } }
try { if ( ids == null ) { throw new IllegalArgumentException ( "Parameter ids must not be null." ) ; } if ( batchSize <= 0 ) { throw new IllegalArgumentException ( "Parameter batchSize must be greater than zero." ) ; } int count = 0 ; for ( Integer id : ids ) { psDelete . setInt ( 1 , id ) ; psDelete . addBatch ( ) ; count ++ ; if ( count % batchSize == 0 ) { psDelete . executeBatch ( ) ; count = 0 ; } } if ( count % batchSize != 0 ) { psDelete . executeBatch ( ) ; } } catch ( SQLException e ) { throw new DatabaseException ( e ) ; }
public class Block { /** * Splits this block ' s lines , creating a new child block having ' line ' as it ' s * lineTail . * @ param aLine * The line to split from . * @ return The newly created Block . */ public Block split ( final Line aLine ) { } }
final Block aBlock = new Block ( ) ; aBlock . m_aLines = m_aLines ; aBlock . m_aLineTail = aLine ; m_aLines = aLine . m_aNext ; aLine . m_aNext = null ; if ( m_aLines == null ) m_aLineTail = null ; else m_aLines . m_aPrevious = null ; if ( m_aBlocks == null ) m_aBlocks = m_aBlockTail = aBlock ; else { m_aBlockTail . m_aNext = aBlock ; m_aBlockTail = aBlock ; } return aBlock ;
public class AbstractDns { protected Object createCacheEntry ( String host , String [ ] ips ) { } }
try { long expiration = System . currentTimeMillis ( ) + EXPIRATION ; // 10年失效 InetAddress [ ] addresses = new InetAddress [ ips . length ] ; for ( int i = 0 ; i < addresses . length ; i ++ ) { // addresses [ i ] = InetAddress . getByAddress ( host , toBytes ( ips [ i ] ) ) ; addresses [ i ] = InetAddress . getByAddress ( host , InetAddress . getByName ( ips [ i ] ) . getAddress ( ) ) ; } String className = "java.net.InetAddress$CacheEntry" ; Class < ? > clazz = Class . forName ( className ) ; Constructor < ? > constructor = clazz . getDeclaredConstructors ( ) [ 0 ] ; constructor . setAccessible ( true ) ; return constructor . newInstance ( addresses , expiration ) ; } catch ( Exception e ) { throw new RuntimeException ( e . getMessage ( ) , e ) ; }
public class CommitMarkerCodec { /** * since we can recover without any consequence */ public int readMarker ( SequenceFile . Reader reader ) throws IOException { } }
if ( valueBytes == null ) { valueBytes = reader . createValueBytes ( ) ; } rawKey . reset ( ) ; rawValue . reset ( ) ; // valueBytes need not be reset since nextRaw call does it ( and it is a private method ) int status = reader . nextRaw ( rawKey , valueBytes ) ; // if we reach EOF , return - 1 if ( status == - 1 ) { return - 1 ; } // Check if the marker key is valid and return the count if ( isMarkerValid ( ) ) { valueBytes . writeUncompressedBytes ( rawValue ) ; rawValue . flush ( ) ; // rawValue . getData ( ) may return a larger byte array but Ints . fromByteArray will only read the first four bytes return Ints . fromByteArray ( rawValue . getData ( ) ) ; } // EOF not reached and marker is not valid , then thrown an IOException since we can ' t make progress throw new IOException ( String . format ( "Invalid key for num entries appended found %s, expected : %s" , new String ( rawKey . getData ( ) ) , TxConstants . TransactionLog . NUM_ENTRIES_APPENDED ) ) ;
public class CommerceRegionPersistenceImpl { /** * Removes the commerce region with the primary key from the database . Also notifies the appropriate model listeners . * @ param primaryKey the primary key of the commerce region * @ return the commerce region that was removed * @ throws NoSuchRegionException if a commerce region with the primary key could not be found */ @ Override public CommerceRegion remove ( Serializable primaryKey ) throws NoSuchRegionException { } }
Session session = null ; try { session = openSession ( ) ; CommerceRegion commerceRegion = ( CommerceRegion ) session . get ( CommerceRegionImpl . class , primaryKey ) ; if ( commerceRegion == null ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } throw new NoSuchRegionException ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } return remove ( commerceRegion ) ; } catch ( NoSuchRegionException nsee ) { throw nsee ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; }
public class SymbolTable { /** * Gets all symbols associated with the given type . For union types , this may be multiple symbols . * For instance types , this will return the constructor of that instance . */ public List < Symbol > getAllSymbolsForType ( JSType type ) { } }
if ( type == null ) { return ImmutableList . of ( ) ; } UnionType unionType = type . toMaybeUnionType ( ) ; if ( unionType != null ) { List < Symbol > result = new ArrayList < > ( 2 ) ; for ( JSType alt : unionType . getAlternates ( ) ) { // Our type system never has nested unions . Symbol altSym = getSymbolForTypeHelper ( alt , true ) ; if ( altSym != null ) { result . add ( altSym ) ; } } return result ; } Symbol result = getSymbolForTypeHelper ( type , true ) ; return result == null ? ImmutableList . of ( ) : ImmutableList . of ( result ) ;
public class DetourCommon { /** * / @ see dtOverlapQuantBounds */ public static boolean overlapBounds ( float [ ] amin , float [ ] amax , float [ ] bmin , float [ ] bmax ) { } }
boolean overlap = true ; overlap = ( amin [ 0 ] > bmax [ 0 ] || amax [ 0 ] < bmin [ 0 ] ) ? false : overlap ; overlap = ( amin [ 1 ] > bmax [ 1 ] || amax [ 1 ] < bmin [ 1 ] ) ? false : overlap ; overlap = ( amin [ 2 ] > bmax [ 2 ] || amax [ 2 ] < bmin [ 2 ] ) ? false : overlap ; return overlap ;
public class OstrovskyInitialMeans { /** * Initialize the weight list . * @ param weights Weight list * @ param ids IDs * @ param relation Data relation * @ param first First ID * @ param second Second ID * @ param distQ Distance query * @ return Weight sum * @ param < T > Object type */ protected static < T > double initialWeights ( WritableDoubleDataStore weights , Relation < ? extends T > relation , DBIDs ids , T first , T second , DistanceQuery < ? super T > distQ ) { } }
double weightsum = 0. ; for ( DBIDIter it = ids . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { // distance will usually already be squared T v = relation . get ( it ) ; double weight = Math . min ( distQ . distance ( first , v ) , distQ . distance ( second , v ) ) ; weights . putDouble ( it , weight ) ; weightsum += weight ; } return weightsum ;
public class BAMInputFormat { /** * Only include reads that overlap the given intervals . Unplaced unmapped reads are not * included . * @ param conf the Hadoop configuration to set properties on * @ param intervals the intervals to filter by * @ param < T > the { @ link Locatable } type */ public static < T extends Locatable > void setIntervals ( Configuration conf , List < T > intervals ) { } }
setTraversalParameters ( conf , intervals , false ) ;
public class DoubleHistogram { /** * Add the contents of another histogram to this one , while correcting the incoming data for coordinated omission . * To compensate for the loss of sampled values when a recorded value is larger than the expected * interval between value samples , the values added will include an auto - generated additional series of * decreasingly - smaller ( down to the expectedIntervalBetweenValueSamples ) value records for each count found * in the current histogram that is larger than the expectedIntervalBetweenValueSamples . * Note : This is a post - recording correction method , as opposed to the at - recording correction method provided * by { @ link # recordValueWithExpectedInterval ( double , double ) recordValueWithExpectedInterval } . The two * methods are mutually exclusive , and only one of the two should be be used on a given data set to correct * for the same coordinated omission issue . * by * See notes in the description of the Histogram calls for an illustration of why this corrective behavior is * important . * @ param fromHistogram Other histogram . highestToLowestValueRatio and numberOfSignificantValueDigits must match . * @ param expectedIntervalBetweenValueSamples If expectedIntervalBetweenValueSamples is larger than 0 , add * auto - generated value records as appropriate if value is larger * than expectedIntervalBetweenValueSamples * @ throws ArrayIndexOutOfBoundsException ( may throw ) if values exceed highestTrackableValue */ public void addWhileCorrectingForCoordinatedOmission ( final DoubleHistogram fromHistogram , final double expectedIntervalBetweenValueSamples ) { } }
final DoubleHistogram toHistogram = this ; for ( HistogramIterationValue v : fromHistogram . integerValuesHistogram . recordedValues ( ) ) { toHistogram . recordValueWithCountAndExpectedInterval ( v . getValueIteratedTo ( ) * integerToDoubleValueConversionRatio , v . getCountAtValueIteratedTo ( ) , expectedIntervalBetweenValueSamples ) ; }
public class TaskUpdateOptions { /** * Set a timestamp indicating the last modified time of the resource known to the client . The operation will be performed only if the resource on the service has not been modified since the specified time . * @ param ifUnmodifiedSince the ifUnmodifiedSince value to set * @ return the TaskUpdateOptions object itself . */ public TaskUpdateOptions withIfUnmodifiedSince ( DateTime ifUnmodifiedSince ) { } }
if ( ifUnmodifiedSince == null ) { this . ifUnmodifiedSince = null ; } else { this . ifUnmodifiedSince = new DateTimeRfc1123 ( ifUnmodifiedSince ) ; } return this ;
public class JDBCDatabaseMetaData { /** * # ifdef JAVA6 */ public ResultSet getSchemas ( String catalog , String schemaPattern ) throws SQLException { } }
StringBuffer select = toQueryPrefix ( "SYSTEM_SCHEMAS" ) . append ( and ( "TABLE_CATALOG" , "=" , catalog ) ) . append ( and ( "TABLE_SCHEM" , "LIKE" , schemaPattern ) ) ; // By default , query already returns result in contract order return execute ( select . toString ( ) ) ;
public class WorkAloneRedisManager { /** * get value from redis * @ param key key * @ return value */ @ Override public byte [ ] get ( byte [ ] key ) { } }
if ( key == null ) { return null ; } byte [ ] value = null ; Jedis jedis = getJedis ( ) ; try { value = jedis . get ( key ) ; } finally { jedis . close ( ) ; } return value ;
public class Signatures { /** * Selects the best equally - matching methods for the given arguments . * @ param methods * @ param args * @ return methods */ public static Method [ ] candidateMethods ( Method [ ] methods , Object [ ] args ) { } }
return candidateMethods ( methods , collectArgTypes ( args ) ) ;
public class CSSWriter { /** * Create the CSS without a specific charset . * @ param aCSS * The CSS object to be converted to text . May not be < code > null < / code > * @ return The text representation of the CSS . * @ see # writeCSS ( CascadingStyleSheet , Writer ) */ @ Nonnull public String getCSSAsString ( @ Nonnull final CascadingStyleSheet aCSS ) { } }
final NonBlockingStringWriter aSW = new NonBlockingStringWriter ( ) ; try { writeCSS ( aCSS , aSW ) ; } catch ( final IOException ex ) { // Should never occur since NonBlockingStringWriter does not throw such an // exception throw new IllegalStateException ( "Totally unexpected" , ex ) ; } return aSW . getAsString ( ) ;
public class KerasSequentialModel { /** * Build a MultiLayerNetwork from this Keras Sequential model configuration and import weights . * @ return MultiLayerNetwork */ public MultiLayerNetwork getMultiLayerNetwork ( boolean importWeights ) throws InvalidKerasConfigurationException , UnsupportedKerasConfigurationException { } }
MultiLayerNetwork model = new MultiLayerNetwork ( getMultiLayerConfiguration ( ) ) ; model . init ( ) ; if ( importWeights ) model = ( MultiLayerNetwork ) KerasModelUtils . copyWeightsToModel ( model , this . layers ) ; return model ;
public class JsMessageVisitor { /** * Appends the message parts in a JS message value extracted from the given * text node . * @ param builder the JS message builder to append parts to * @ param node the node with string literal that contains the message text * @ throws MalformedException if { @ code value } contains a reference to * an unregistered placeholder */ private static void parseMessageTextNode ( Builder builder , Node node ) throws MalformedException { } }
String value = extractStringFromStringExprNode ( node ) ; while ( true ) { int phBegin = value . indexOf ( PH_JS_PREFIX ) ; if ( phBegin < 0 ) { // Just a string literal builder . appendStringPart ( value ) ; return ; } else { if ( phBegin > 0 ) { // A string literal followed by a placeholder builder . appendStringPart ( value . substring ( 0 , phBegin ) ) ; } // A placeholder . Find where it ends int phEnd = value . indexOf ( PH_JS_SUFFIX , phBegin ) ; if ( phEnd < 0 ) { throw new MalformedException ( "Placeholder incorrectly formatted in: " + builder . getKey ( ) , node ) ; } String phName = value . substring ( phBegin + PH_JS_PREFIX . length ( ) , phEnd ) ; builder . appendPlaceholderReference ( phName ) ; int nextPos = phEnd + PH_JS_SUFFIX . length ( ) ; if ( nextPos < value . length ( ) ) { // Iterate on the rest of the message value value = value . substring ( nextPos ) ; } else { // The message is parsed return ; } } }
public class SiteSwitcherHandlerInterceptor { /** * Creates a site switcher that redirects to a custom domain for normal site requests that either * originate from a mobile device or indicate a mobile site preference . * Uses a { @ link CookieSitePreferenceRepository } that saves a cookie that is shared between the two domains . * @ param normalServerName the ' normal ' domain name e . g . " normal . com " * @ param mobileServerName the ' mobile domain name e . g . " mobile . com " * @ param cookieDomain the name to use for saving the cookie * @ param tabletIsMobile true if tablets should be presented with the ' mobile ' site * @ see # standard ( String , String , String ) * @ see # standard ( String , String , String , String ) * @ see StandardSiteUrlFactory */ public static SiteSwitcherHandlerInterceptor standard ( String normalServerName , String mobileServerName , String cookieDomain , Boolean tabletIsMobile ) { } }
return new SiteSwitcherHandlerInterceptor ( StandardSiteSwitcherHandlerFactory . standard ( normalServerName , mobileServerName , cookieDomain , tabletIsMobile ) ) ;
public class ConciseSet { /** * Performs the given operation over the bit - sets * @ param other { @ link ConciseSet } instance that represents the right * operand * @ param operator operator * @ return the result of the operation */ private ConciseSet performOperation ( ConciseSet other , Operator operator ) { } }
// non - empty arguments if ( this . isEmpty ( ) || other . isEmpty ( ) ) { return operator . combineEmptySets ( this , other ) ; } // if the two operands are disjoint , the operation is faster ConciseSet res = operator . combineDisjointSets ( this , other ) ; if ( res != null ) { return res ; } // Allocate a sufficient number of words to contain all possible results . // NOTE : since lastWordIndex is the index of the last used word in " words " , // we require " + 2 " to have the actual maximum required space . // In any case , we do not allocate more than the maximum space required // for the uncompressed representation . // Another " + 1 " is required to allows for the addition of the last word // before compacting . res = empty ( ) ; res . words = new int [ 1 + Math . min ( this . lastWordIndex + other . lastWordIndex + 2 , maxLiteralLengthDivision ( Math . max ( this . last , other . last ) ) << ( simulateWAH ? 1 : 0 ) ) ] ; // scan " this " and " other " WordIterator thisItr = new WordIterator ( ) ; WordIterator otherItr = other . new WordIterator ( ) ; while ( true ) { if ( ! thisItr . isLiteral ) { if ( ! otherItr . isLiteral ) { int minCount = Math . min ( thisItr . count , otherItr . count ) ; res . appendFill ( minCount , operator . combineLiterals ( thisItr . word , otherItr . word ) ) ; // noinspection NonShortCircuitBooleanExpression if ( ! thisItr . prepareNext ( minCount ) | /* NOT | | */ ! otherItr . prepareNext ( minCount ) ) { break ; } } else { res . appendLiteral ( operator . combineLiterals ( thisItr . toLiteral ( ) , otherItr . word ) ) ; thisItr . word -- ; // noinspection NonShortCircuitBooleanExpression if ( ! thisItr . prepareNext ( 1 ) | /* do NOT use " | | " */ ! otherItr . prepareNext ( ) ) { break ; } } } else if ( ! otherItr . isLiteral ) { res . appendLiteral ( operator . combineLiterals ( thisItr . word , otherItr . toLiteral ( ) ) ) ; otherItr . word -- ; // noinspection NonShortCircuitBooleanExpression if ( ! thisItr . prepareNext ( ) | /* do NOT use " | | " */ ! otherItr . prepareNext ( 1 ) ) { break ; } } else { res . appendLiteral ( operator . combineLiterals ( thisItr . word , otherItr . word ) ) ; // noinspection NonShortCircuitBooleanExpression if ( ! thisItr . prepareNext ( ) | /* do NOT use " | | " */ ! otherItr . prepareNext ( ) ) { break ; } } } // invalidate the size res . size = - 1 ; boolean invalidLast = true ; // if one bit string is greater than the other one , we add the remaining // bits depending on the given operation . switch ( operator ) { case AND : break ; case OR : res . last = Math . max ( this . last , other . last ) ; invalidLast = thisItr . flush ( res ) ; invalidLast |= otherItr . flush ( res ) ; break ; case XOR : if ( this . last != other . last ) { res . last = Math . max ( this . last , other . last ) ; invalidLast = false ; } invalidLast |= thisItr . flush ( res ) ; invalidLast |= otherItr . flush ( res ) ; break ; case ANDNOT : if ( this . last > other . last ) { res . last = this . last ; invalidLast = false ; } invalidLast |= thisItr . flush ( res ) ; break ; } // remove trailing zeros res . trimZeros ( ) ; if ( res . isEmpty ( ) ) { return res ; } // compute the greatest element if ( invalidLast ) { res . updateLast ( ) ; } // compact the memory res . compact ( ) ; return res ;
public class DBaseFileWriter { /** * Write a string inside the current < var > stream < / var > . * Each character of the string will be written as bytes . * No terminal null character is written . The * string area will be filled with the given byte . * @ param str is the string to write * @ param size if the max size of the string to write . If < var > str < / var > * is longer than , it is cut . * @ param fillingChar is the character used to fill the string area . * @ throws IOException in case of error . */ private void writeDBFString ( String str , int size , byte fillingChar ) throws IOException { } }
assert this . language != null ; // Be sure that the encoding will be the right one int strSize = 0 ; if ( str != null ) { final Charset encodingCharset = this . language . getChatset ( ) ; if ( encodingCharset == null ) { throw new IOException ( Locale . getString ( "UNKNOWN_CHARSET" ) ) ; // $ NON - NLS - 1 $ } final byte [ ] bytes = str . getBytes ( encodingCharset ) ; if ( bytes != null ) { strSize = bytes . length ; for ( int i = 0 ; i < size && i < strSize ; ++ i ) { this . stream . writeByte ( bytes [ i ] ) ; } } } for ( int i = strSize ; i < size ; ++ i ) { this . stream . writeByte ( fillingChar ) ; }
public class Translation { /** * Translates { @ code Order . Direction } to Google App Engine Datastore * { @ code SortDirection } . * @ param direction Acid House { @ code Order . Direction } . * @ return Google App Engine Datastore { @ code SortDirection } translated * from Acid House { @ code Order . Direction } . */ public static SortDirection toSortDirection ( Order . Direction direction ) { } }
if ( direction == Order . Direction . ASC ) { return SortDirection . ASCENDING ; } else if ( direction == Order . Direction . DESC ) { return SortDirection . DESCENDING ; } else { throw new UnsupportedOperationException ( "Direction [" + direction + "] is not supported by Google App Engine Datastore" ) ; }
public class CDDB { /** * Fetches and returns the list of categories supported by the server . * @ throws IOException if a problem occurs chatting to the server . * @ throws CDDBException if the server responds with an error . */ public String [ ] lscat ( ) throws IOException , CDDBException { } }
// sanity check if ( _sock == null ) { throw new CDDBException ( 500 , "Not connected" ) ; } // make the request Response rsp = request ( "cddb lscat" ) ; // anything other than an OK response earns an exception if ( rsp . code != 210 ) { throw new CDDBException ( rsp . code , rsp . message ) ; } ArrayList < String > list = new ArrayList < String > ( ) ; String input ; while ( ! ( input = _in . readLine ( ) ) . equals ( CDDBProtocol . TERMINATOR ) ) { list . add ( input ) ; } String [ ] categories = new String [ list . size ( ) ] ; list . toArray ( categories ) ; return categories ;
public class SubtitleChatOverlay { /** * Figure out how many of the first history elements fit in our bounds such that we can set the * bounds on the scrollbar correctly such that the scrolling to the smallest value just barely * puts the first element onscreen . */ protected void figureHistoryOffset ( Graphics2D gfx ) { } }
if ( ! isLaidOut ( ) ) { return ; } int hei = _subtitleYSpacing ; int hsize = _history . size ( ) ; for ( int ii = 0 ; ii < hsize ; ii ++ ) { ChatGlyph rec = getHistorySubtitle ( ii , gfx ) ; Rectangle r = rec . getBounds ( ) ; hei += r . height ; // oop , we passed it , it was the last one if ( hei >= _subtitleHeight ) { _histOffset = Math . max ( 0 , ii - 1 ) ; _histOffsetFinal = true ; return ; } hei += getHistorySubtitleSpacing ( ii ) ; } // basically , this means there isn ' t yet enough history to fill the first ' page ' of the // history scrollback , so we set the offset to the max value , but we do not set // _ histOffsetFinal to be true so that this will be recalculated _histOffset = hsize - 1 ;
public class Main { /** * / * public static void main ( String [ ] arg ) { * String tuneAsString = " X : 0 \ nT : A simple scale exercise \ nM : 4/4 \ nK : D \ n ( CD EF | G ) A Bc | de fg - | gf ed | cB A ( G | FE DC ) \ n " ; * Tune tune = new TuneParser ( ) . parse ( tuneAsString ) ; * JScoreComponent scoreUI = new JScoreComponent ( ) ; * scoreUI . setTune ( tune ) ; * JFrame j = new JFrame ( ) ; * j . add ( scoreUI ) ; * j . pack ( ) ; * System . out . println ( sp . getSize ( ) ) ; * j . setVisible ( true ) ; */ public static void main ( String [ ] arg ) { } }
try { boolean chr = arg . length > 0 && arg [ 0 ] . equals ( "--chromatic" ) ; String rn = chr ? CHROMATIC_RECOURCE_NAME : DEMO_RESOURCE_NAME ; Reader isr = new InputStreamReader ( Main . class . getResourceAsStream ( rn ) , "UTF-8" ) ; TuneBook tb = new TuneBookParser ( ) . parse ( isr ) ; Tune tune = tb . getTune ( chr ? 1 : 7 ) ; JScoreComponent scoreUI = new JScoreComponent ( ) ; scoreUI . setTune ( tune ) ; // Dimension d = scoreUI . getSize ( ) ; // d . height * = 3; // scoreUI . setSize ( d ) ; File home = new File ( System . getProperty ( "user.home" ) ) ; File desktop = new File ( home , "Desktop" ) ; File dir = desktop . isDirectory ( ) ? desktop : home ; scoreUI . writeScoreTo ( new File ( dir , "abc4j_demoNotjustified.jpg" ) ) ; scoreUI . setJustification ( true ) ; scoreUI . writeScoreTo ( new File ( dir , "abc4j_demoJustified.jpg" ) ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; }
public class KeyUtils { /** * Gets the key string . * @ param server * @ param query the query * @ param result the result * @ param typeNames the type names * @ param rootPrefix the root prefix * @ return the key string */ public static String getKeyString ( Server server , Query query , Result result , List < String > typeNames , String rootPrefix ) { } }
StringBuilder sb = new StringBuilder ( ) ; addRootPrefix ( rootPrefix , sb ) ; addAlias ( server , sb ) ; addSeparator ( sb ) ; addMBeanIdentifier ( query , result , sb ) ; addSeparator ( sb ) ; addTypeName ( query , result , typeNames , sb ) ; addKeyString ( query , result , sb ) ; return sb . toString ( ) ;
public class DataEncoder { /** * Encodes the given optional String into a variable amount of bytes . The * amount written can be determined by calling * calculateEncodedStringLength . * Strings are encoded in a fashion similar to UTF - 8 , in that ASCII * characters are written in one byte . This encoding is more efficient than * UTF - 8 , but it isn ' t compatible with UTF - 8. * @ param value String value to encode , may be null * @ param dst destination for encoded bytes * @ param dstOffset offset into destination array * @ return amount of bytes written */ public static int encode ( String value , byte [ ] dst , int dstOffset ) { } }
if ( value == null ) { dst [ dstOffset ] = NULL_BYTE_HIGH ; return 1 ; } final int originalOffset = dstOffset ; int valueLength = value . length ( ) ; // Write the value length first , in a variable amount of bytes . dstOffset += encodeUnsignedVarInt ( valueLength , dst , dstOffset ) ; for ( int i = 0 ; i < valueLength ; i ++ ) { int c = value . charAt ( i ) ; if ( c <= 0x7f ) { dst [ dstOffset ++ ] = ( byte ) c ; } else if ( c <= 0x3fff ) { dst [ dstOffset ++ ] = ( byte ) ( 0x80 | ( c >> 8 ) ) ; dst [ dstOffset ++ ] = ( byte ) ( c & 0xff ) ; } else { if ( c >= 0xd800 && c <= 0xdbff ) { // Found a high surrogate . Verify that surrogate pair is // well - formed . Low surrogate must follow high surrogate . if ( i + 1 < valueLength ) { int c2 = value . charAt ( i + 1 ) ; if ( c2 >= 0xdc00 && c2 <= 0xdfff ) { c = 0x10000 + ( ( ( c & 0x3ff ) << 10 ) | ( c2 & 0x3ff ) ) ; i ++ ; } } } dst [ dstOffset ++ ] = ( byte ) ( 0xc0 | ( c >> 16 ) ) ; dst [ dstOffset ++ ] = ( byte ) ( ( c >> 8 ) & 0xff ) ; dst [ dstOffset ++ ] = ( byte ) ( c & 0xff ) ; } } return dstOffset - originalOffset ;
public class APSPSolver { /** * TP creation */ private int tpCreate ( ) { } }
logger . finest ( "Creating 1 TP" ) ; int i = 2 ; boolean found = false ; while ( i < MAX_TPS && ! found ) { if ( ! tPoints [ i ] . isUsed ( ) ) { tPoints [ i ] . setUsed ( true ) ; found = true ; // reusing a timepoint , check ! // System . out . println ( " REUSING TP " + i + " and with Origin - > " + i + " is " + tPoints [ 0 ] . getOut ( i ) ) ; // System . out . println ( " REUSING TP " + i + " and " + Arrays . toString ( theNetwork . getIncidentEdges ( tPoints [ i ] ) ) ) ; if ( i == MAX_USED + 1 ) MAX_USED = i ; } else i ++ ; } for ( int l = 2 ; l <= MAX_USED ; l ++ ) { distance [ i ] [ l ] = H ; distance [ l ] [ i ] = H ; } distance [ i ] [ i ] = 0 ; distance [ i ] [ 0 ] = 0 ; distance [ i ] [ 1 ] = H ; // APSPSolver . INF ; distance [ 0 ] [ i ] = H ; // Needed for bookmark / revert ( Uwe ) distance [ 1 ] [ i ] = 0 ; // Needed for bookmark / revert ( Uwe ) return i ;