signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class NlsBundleHelper { /** * This method determines if the given { @ link Method } is a regular { @ link NlsBundle } - method . * @ param method the { @ link Method } to check . * @ param ignoreIllegalMethods - { @ code true } if illegal methods ( non NlsBundleMethods other than those defined by * { @ link Object } ) should be ignored , { @ code false } if they should cause an exception . * @ return { @ code true } if the given { @ link Method } is a legal { @ link NlsBundle } method , { @ code false } otherwise ( e . g . * for { @ code toString ( ) } ) . */ public boolean isNlsBundleMethod ( Method method , boolean ignoreIllegalMethods ) { } }
Class < ? > declaringClass = method . getDeclaringClass ( ) ; assert ( declaringClass . isInterface ( ) ) ; if ( NlsMessage . class . equals ( method . getReturnType ( ) ) ) { assert ( NlsBundle . class . isAssignableFrom ( declaringClass ) ) ; return true ; } else if ( ! declaringClass . isAssignableFrom ( NlsBundle . class ) ) { if ( ! ignoreIllegalMethods ) { throw new IllegalArgumentException ( declaringClass . getName ( ) + "." + method . getName ( ) ) ; } } return false ;
public class CmsLoginMessage { /** * Sets the time when to start displaying this message . < p > * @ param timeStart the time to set */ public void setTimeStart ( long timeStart ) { } }
checkFrozen ( ) ; if ( timeStart < 0 ) { throw new CmsIllegalArgumentException ( Messages . get ( ) . container ( Messages . ERR_LOGIN_MESSAGE_BAD_TIME_1 , new Long ( timeStart ) ) ) ; } if ( timeStart == 0 ) { timeStart = DEFAULT_TIME_START ; } m_timeStart = timeStart ;
public class SchematronValidatingParser { /** * Runs the schematron file against the input source . */ public boolean executeSchematronDriver ( InputSource inputDoc , File schemaFile , String phase ) { } }
boolean isValid = false ; ValidationDriver driver = createSchematronDriver ( phase ) ; assert null != driver : "Unable to create Schematron ValidationDriver" ; InputSource is = null ; // Fortify Mod : move fis out so it can be closed on exit FileInputStream fis = null ; try { // FileInputStream fis = new FileInputStream ( schemaFile ) ; fis = new FileInputStream ( schemaFile ) ; is = new InputSource ( fis ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } try { if ( driver . loadSchema ( is ) ) { isValid = driver . validate ( inputDoc ) ; fis . close ( ) ; // Fortify addition } else { assert false : ( "Failed to load Schematron schema: " + schemaFile + "\nIs the schema valid? Is the phase defined?" ) ; } } catch ( SAXException e ) { assert false : e . toString ( ) ; } catch ( IOException e ) { assert false : e . toString ( ) ; } return isValid ;
public class Utils { /** * Closes a connection to a database . * @ param conn * @ param logger */ public static void closeConnection ( Connection conn , Logger logger ) { } }
try { if ( conn != null ) conn . close ( ) ; } catch ( SQLException e ) { // Not important . Utils . logException ( logger , e ) ; }
public class ResourceName { /** * Attempts to resolve a resource name into a resource , by calling the associated API . * The resource name must have an endpoint . An optional version can be specified to * determine in which version of the API to call . */ public < T > T resolve ( Class < T > resourceType , @ Nullable String version ) { } }
Preconditions . checkArgument ( hasEndpoint ( ) , "Resource name must have an endpoint." ) ; return resourceNameResolver . resolve ( resourceType , this , version ) ;
public class AnnotationExpander { /** * アノテーションを展開する 。 * @ param targetAnno 展開対象のアノテーション * @ return 展開されたアノテーション * @ throws NullPointerException { @ literal targetAnno = = null . } */ public List < ExpandedAnnotation > expand ( final Annotation targetAnno ) { } }
Objects . requireNonNull ( targetAnno ) ; final List < ExpandedAnnotation > expandedList = new ArrayList < > ( ) ; if ( isRepeated ( targetAnno ) ) { // 繰り返しのアノテーションの場合 、 要素を抽出する 。 try { final Method method = targetAnno . getClass ( ) . getMethod ( "value" ) ; final Annotation [ ] annos = ( Annotation [ ] ) method . invoke ( targetAnno ) ; int index = 0 ; for ( Annotation anno : annos ) { final List < ExpandedAnnotation > repeatedAnnos = expand ( anno ) ; for ( ExpandedAnnotation repeatedAnno : repeatedAnnos ) { repeatedAnno . setIndex ( index ) ; } expandedList . addAll ( repeatedAnnos ) ; index ++ ; } } catch ( Exception e ) { throw new RuntimeException ( "fail get repeated value attribute." , e ) ; } } else if ( isComposed ( targetAnno ) ) { final ExpandedAnnotation composedAnno = new ExpandedAnnotation ( targetAnno , true ) ; // 合成のアノテーションの場合 、 メタアノテーションを子供としてさらに抽出する 。 final List < Annotation > childAnnos = Arrays . asList ( targetAnno . annotationType ( ) . getAnnotations ( ) ) ; for ( Annotation anno : childAnnos ) { final List < ExpandedAnnotation > nestedAnnos = expand ( anno ) . stream ( ) . map ( nestedAnno -> overrideAttribute ( targetAnno , nestedAnno ) ) . collect ( Collectors . toList ( ) ) ; composedAnno . addChilds ( nestedAnnos ) ; } Collections . sort ( composedAnno . getChilds ( ) , comparator ) ; expandedList . add ( composedAnno ) ; } else { // 通常のアノテーションの場合 expandedList . add ( new ExpandedAnnotation ( targetAnno , false ) ) ; } Collections . sort ( expandedList , comparator ) ; return expandedList ;
public class MessageRouterImpl { /** * Route the message to the LogHandler identified by the given logHandlerId . * @ param msg The fully formatted message . * @ param logRecord The associated LogRecord , in case the LogHandler needs it . * @ param logHandlerId The LogHandler ID in which to route . */ protected void routeTo ( String msg , LogRecord logRecord , String logHandlerId ) { } }
LogHandler logHandler = logHandlerServices . get ( logHandlerId ) ; if ( logHandler != null ) { logHandler . publish ( msg , logRecord ) ; }
public class TargetsApi { /** * Get a target * Get a specific target by type and ID . Targets can be agents , agent groups , queues , route points , skills , and custom contacts . * @ param id The ID of the target . ( required ) * @ param type The type of target to retrieve . ( required ) * @ return ApiResponse & lt ; TargetsResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < TargetsResponse > getTargetWithHttpInfo ( BigDecimal id , String type ) throws ApiException { } }
com . squareup . okhttp . Call call = getTargetValidateBeforeCall ( id , type , null , null ) ; Type localVarReturnType = new TypeToken < TargetsResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class FileUtils { /** * Write to a file atomically , by first writing to a temporary file in the same directory and then moving it to * the target location . This function attempts to clean up its temporary files when possible , but they may stick * around ( for example , if the JVM crashes partway through executing the function ) . In any case , the target file * should be unharmed . * The OutputStream passed to the consumer is uncloseable ; calling close on it will do nothing . This is to ensure * that the stream stays open so we can fsync it here before closing . Hopefully , this doesn ' t cause any problems * for callers . * This method is not just thread - safe , but is also safe to use from multiple processes on the same machine . */ public static < T > T writeAtomically ( final File file , OutputStreamConsumer < T > f ) throws IOException { } }
return writeAtomically ( file , file . getParentFile ( ) , f ) ;
public class JsCodeBuilder { /** * Adds a { @ code goog . require } * @ param require The namespace being required */ public void addGoogRequire ( GoogRequire require ) { } }
GoogRequire oldRequire = googRequires . put ( require . symbol ( ) , require ) ; if ( oldRequire != null ) { googRequires . put ( require . symbol ( ) , require . merge ( oldRequire ) ) ; }
public class TorrentHandle { /** * For SSL torrents , use this to specify a path to a . pem file to use as * this client ' s certificate . The certificate must be signed by the * certificate in the . torrent file to be valid . * Note that when a torrent first starts up , and it needs a certificate , * it will suspend connecting to any peers until it has one . It ' s * typically desirable to resume the torrent after setting the SSL * certificate . * If you receive a { @ link com . frostwire . jlibtorrent . alerts . TorrentNeedCertAlert } , * you need to call this to provide a valid cert . If you don ' t have a cert * you won ' t be allowed to connect to any peers . * @ param certificate is a path to the ( signed ) certificate in . pem format * corresponding to this torrent . * @ param privateKey is a path to the private key for the specified * certificate . This must be in . pem format . * @ param dhParams is a path to the Diffie - Hellman parameter file , which * needs to be in . pem format . You can generate this file using the * openssl command like this : ` ` openssl dhparam - outform PEM - out * dhparams . pem 512 ` ` . */ public void setSslCertificate ( String certificate , String privateKey , String dhParams ) { } }
th . set_ssl_certificate ( certificate , privateKey , dhParams ) ;
public class PowerMockito { /** * Verify a private method invocation for a class with a given verification * mode . * @ see Mockito # verify ( Object ) */ public static PrivateMethodVerification verifyPrivate ( Class < ? > clazz , VerificationMode verificationMode ) { } }
return verifyPrivate ( ( Object ) clazz , verificationMode ) ;
public class GetResponseStubbing { /** * Defines the content body of the response * @ param someContent * the content as string that should be responded * @ return * this stubbing */ public GetResponseStubbing respond ( final String someContent ) { } }
server . addResource ( this . path , someContent . getBytes ( Charset . defaultCharset ( ) ) ) ; return this ;
public class UnsafeOperations { /** * Performs a shallow copy of the given object - a new instance is allocated with the same contents . Any object * references inside the copy will be the same as the original object . * @ param obj Object to copy * @ param < T > The type being copied * @ return A new instance , identical to the original */ public final < T > T shallowCopy ( T obj ) { } }
long size = shallowSizeOf ( obj ) ; long address = THE_UNSAFE . allocateMemory ( size ) ; long start = toAddress ( obj ) ; THE_UNSAFE . copyMemory ( start , address , size ) ; @ SuppressWarnings ( "unchecked" ) final T result = ( T ) fromAddress ( address ) ; return result ;
public class ListIncomingTypedLinksRequest { /** * Provides range filters for multiple attributes . When providing ranges to typed link selection , any inexact ranges * must be specified at the end . Any attributes that do not have a range specified are presumed to match the entire * range . * @ param filterAttributeRanges * Provides range filters for multiple attributes . When providing ranges to typed link selection , any inexact * ranges must be specified at the end . Any attributes that do not have a range specified are presumed to * match the entire range . */ public void setFilterAttributeRanges ( java . util . Collection < TypedLinkAttributeRange > filterAttributeRanges ) { } }
if ( filterAttributeRanges == null ) { this . filterAttributeRanges = null ; return ; } this . filterAttributeRanges = new java . util . ArrayList < TypedLinkAttributeRange > ( filterAttributeRanges ) ;
public class AnnotationTypeElementDocImpl { /** * Returns the default value of this element . * Returns null if this element has no default . */ public AnnotationValue defaultValue ( ) { } }
return ( sym . defaultValue == null ) ? null : new AnnotationValueImpl ( env , sym . defaultValue ) ;
public class Client { /** * set custom response for the default profile ' s default client * @ param pathName friendly name of path * @ param customData custom response / request data * @ return true if success , false otherwise */ public static boolean setCustomResponseForDefaultProfile ( String pathName , String customData ) { } }
try { return setCustomForDefaultProfile ( pathName , true , customData ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } return false ;
public class MiniProfilerServlet { /** * Serve one of the static resources for the profiler UI . */ private void doResource ( HttpServletRequest req , HttpServletResponse resp ) throws IOException { } }
boolean success = true ; String resource = ( String ) req . getParameter ( "id" ) ; if ( ! isEmpty ( resource ) ) { if ( resource . endsWith ( ".js" ) ) { resp . setContentType ( "text/javascript" ) ; } else if ( resource . endsWith ( ".css" ) ) { resp . setContentType ( "text/css" ) ; } else if ( resource . endsWith ( ".html" ) ) { resp . setContentType ( "text/html" ) ; } else { resp . setContentType ( "text/plain" ) ; } String contents = resourceLoader . getResource ( resource , resourceReplacements ) ; if ( contents != null ) { if ( resourceCacheHours > 0 ) { Calendar c = Calendar . getInstance ( ) ; c . add ( Calendar . HOUR , resourceCacheHours ) ; resp . setHeader ( "Cache-Control" , "public, must-revalidate" ) ; resp . setHeader ( "Expires" , new SimpleDateFormat ( "EEE, dd MMM yyyy HH:mm:ss zzz" ) . format ( c . getTime ( ) ) ) ; } else { resp . setHeader ( "Cache-Control" , "no-cache" ) ; } PrintWriter w = resp . getWriter ( ) ; w . print ( contents ) ; } else { success = false ; } } if ( ! success ) { resp . sendError ( 404 ) ; }
public class AuditFileHandler { /** * Get the default directory for logs * @ return full path String of logs directory */ private String getLogDir ( ) { } }
StringBuffer output = new StringBuffer ( ) ; WsLocationAdmin locationAdmin = locationAdminRef . getService ( ) ; output . append ( locationAdmin . resolveString ( "${server.output.dir}" ) . replace ( '\\' , '/' ) ) . append ( "/logs" ) ; return output . toString ( ) ;
public class RedisClusterStorage { /** * Get the current state of the identified < code > { @ link Trigger } < / code > . * @ param triggerKey the key of the desired trigger * @ param jedis a thread - safe Redis connection * @ return the state of the trigger */ @ Override public Trigger . TriggerState getTriggerState ( TriggerKey triggerKey , JedisCluster jedis ) { } }
final String triggerHashKey = redisSchema . triggerHashKey ( triggerKey ) ; Map < RedisTriggerState , Double > scores = new HashMap < > ( RedisTriggerState . values ( ) . length ) ; for ( RedisTriggerState redisTriggerState : RedisTriggerState . values ( ) ) { scores . put ( redisTriggerState , jedis . zscore ( redisSchema . triggerStateKey ( redisTriggerState ) , triggerHashKey ) ) ; } for ( Map . Entry < RedisTriggerState , Double > entry : scores . entrySet ( ) ) { if ( entry . getValue ( ) != null ) { return entry . getKey ( ) . getTriggerState ( ) ; } } return Trigger . TriggerState . NONE ;
public class SourcePrinter { /** * String characters only used by lexer , never visited / created directly */ public void visitStringConstructor ( GroovySourceAST t , int visit ) { } }
if ( visit == OPENING_VISIT ) { stringConstructorCounter = 0 ; print ( t , visit , "\"" ) ; } if ( visit == SUBSEQUENT_VISIT ) { // every other subsequent visit use an escaping $ if ( stringConstructorCounter % 2 == 0 ) { print ( t , visit , "$" ) ; } stringConstructorCounter ++ ; } if ( visit == CLOSING_VISIT ) { print ( t , visit , "\"" ) ; }
public class EditShape { /** * Adds a path */ int addPathFromMultiPath ( MultiPath multi_path , int ipath , boolean as_polygon ) { } }
int newgeom = createGeometry ( as_polygon ? Geometry . Type . Polygon : Geometry . Type . Polyline , multi_path . getDescription ( ) ) ; MultiPathImpl mp_impl = ( MultiPathImpl ) multi_path . _getImpl ( ) ; if ( multi_path . getPathSize ( ipath ) < 2 ) return newgeom ; // return empty geometry // m _ vertices - > reserve _ rounded ( m _ vertices - > get _ point _ count ( ) + // multi _ path . get _ path _ size ( ipath ) ) ; / / ensure reallocation happens by // blocks so that already allocated vertices do not get reallocated . m_vertices_mp . add ( multi_path , multi_path . getPathStart ( ipath ) , mp_impl . getPathEnd ( ipath ) ) ; m_xy_stream = ( AttributeStreamOfDbl ) m_vertices . getAttributeStreamRef ( VertexDescription . Semantics . POSITION ) ; int path = insertPath ( newgeom , - 1 ) ; setClosedPath ( path , mp_impl . isClosedPath ( ipath ) || as_polygon ) ; boolean b_some_segments = m_segments != null && mp_impl . getSegmentFlagsStreamRef ( ) != null ; for ( int ivertex = mp_impl . getPathStart ( ipath ) , iend = mp_impl . getPathEnd ( ipath ) ; ivertex < iend ; ivertex ++ ) { int vertex = insertVertex_ ( path , - 1 , null ) ; if ( b_some_segments ) { int vindex = getVertexIndex ( vertex ) ; if ( ( mp_impl . getSegmentFlags ( ivertex ) & SegmentFlags . enumLineSeg ) != 0 ) { setSegmentToIndex_ ( vindex , null ) ; } else { SegmentBuffer seg_buffer = new SegmentBuffer ( ) ; mp_impl . getSegment ( ivertex , seg_buffer , true ) ; setSegmentToIndex_ ( vindex , seg_buffer . get ( ) ) ; } } } return newgeom ;
public class AbstractPlainDatagramSocketImpl { /** * set a value - since we only support ( setting ) binary options * here , o must be a Boolean */ public void setOption ( int optID , Object o ) throws SocketException { } }
if ( isClosed ( ) ) { throw new SocketException ( "Socket Closed" ) ; } switch ( optID ) { /* check type safety b4 going native . These should never * fail , since only java . Socket * has access to * PlainSocketImpl . setOption ( ) . */ case SO_TIMEOUT : if ( o == null || ! ( o instanceof Integer ) ) { throw new SocketException ( "bad argument for SO_TIMEOUT" ) ; } int tmp = ( ( Integer ) o ) . intValue ( ) ; if ( tmp < 0 ) throw new IllegalArgumentException ( "timeout < 0" ) ; timeout = tmp ; return ; case IP_TOS : if ( o == null || ! ( o instanceof Integer ) ) { throw new SocketException ( "bad argument for IP_TOS" ) ; } trafficClass = ( ( Integer ) o ) . intValue ( ) ; break ; case SO_REUSEADDR : if ( o == null || ! ( o instanceof Boolean ) ) { throw new SocketException ( "bad argument for SO_REUSEADDR" ) ; } break ; case SO_BROADCAST : if ( o == null || ! ( o instanceof Boolean ) ) { throw new SocketException ( "bad argument for SO_BROADCAST" ) ; } break ; case SO_BINDADDR : throw new SocketException ( "Cannot re-bind Socket" ) ; case SO_RCVBUF : case SO_SNDBUF : if ( o == null || ! ( o instanceof Integer ) || ( ( Integer ) o ) . intValue ( ) < 0 ) { throw new SocketException ( "bad argument for SO_SNDBUF or " + "SO_RCVBUF" ) ; } break ; case IP_MULTICAST_IF : if ( o == null || ! ( o instanceof InetAddress ) ) throw new SocketException ( "bad argument for IP_MULTICAST_IF" ) ; break ; case IP_MULTICAST_IF2 : if ( o == null || ! ( o instanceof Integer || o instanceof NetworkInterface ) ) throw new SocketException ( "bad argument for IP_MULTICAST_IF2" ) ; if ( o instanceof NetworkInterface ) { o = new Integer ( ( ( NetworkInterface ) o ) . getIndex ( ) ) ; } break ; case IP_MULTICAST_LOOP : if ( o == null || ! ( o instanceof Boolean ) ) throw new SocketException ( "bad argument for IP_MULTICAST_LOOP" ) ; break ; default : throw new SocketException ( "invalid option: " + optID ) ; } socketSetOption ( optID , o ) ;
public class DeploymentPlanResultImpl { /** * Builds the data structures that show the effects of the plan by server group */ private static Map < String , ServerGroupDeploymentPlanResult > buildServerGroupResults ( Map < UUID , DeploymentActionResult > deploymentActionResults ) { } }
Map < String , ServerGroupDeploymentPlanResult > serverGroupResults = new HashMap < String , ServerGroupDeploymentPlanResult > ( ) ; for ( Map . Entry < UUID , DeploymentActionResult > entry : deploymentActionResults . entrySet ( ) ) { UUID actionId = entry . getKey ( ) ; DeploymentActionResult actionResult = entry . getValue ( ) ; Map < String , ServerGroupDeploymentActionResult > actionResultsByServerGroup = actionResult . getResultsByServerGroup ( ) ; for ( ServerGroupDeploymentActionResult serverGroupActionResult : actionResultsByServerGroup . values ( ) ) { String serverGroupName = serverGroupActionResult . getServerGroupName ( ) ; ServerGroupDeploymentPlanResultImpl sgdpr = ( ServerGroupDeploymentPlanResultImpl ) serverGroupResults . get ( serverGroupName ) ; if ( sgdpr == null ) { sgdpr = new ServerGroupDeploymentPlanResultImpl ( serverGroupName ) ; serverGroupResults . put ( serverGroupName , sgdpr ) ; } for ( Map . Entry < String , ServerUpdateResult > serverEntry : serverGroupActionResult . getResultByServer ( ) . entrySet ( ) ) { String serverName = serverEntry . getKey ( ) ; ServerUpdateResult sud = serverEntry . getValue ( ) ; ServerDeploymentPlanResultImpl sdpr = ( ServerDeploymentPlanResultImpl ) sgdpr . getServerResult ( serverName ) ; if ( sdpr == null ) { sdpr = new ServerDeploymentPlanResultImpl ( serverName ) ; sgdpr . storeServerResult ( serverName , sdpr ) ; } sdpr . storeServerUpdateResult ( actionId , sud ) ; } } } return serverGroupResults ;
public class Operation { /** * The name of the target entity that is associated with the operation : * < ul > * < li > * < b > NAMESPACE < / b > : The namespace ID is returned in the < code > ResourceId < / code > property . * < / li > * < li > * < b > SERVICE < / b > : The service ID is returned in the < code > ResourceId < / code > property . * < / li > * < li > * < b > INSTANCE < / b > : The instance ID is returned in the < code > ResourceId < / code > property . * < / li > * < / ul > * @ param targets * The name of the target entity that is associated with the operation : < / p > * < ul > * < li > * < b > NAMESPACE < / b > : The namespace ID is returned in the < code > ResourceId < / code > property . * < / li > * < li > * < b > SERVICE < / b > : The service ID is returned in the < code > ResourceId < / code > property . * < / li > * < li > * < b > INSTANCE < / b > : The instance ID is returned in the < code > ResourceId < / code > property . * < / li > * @ return Returns a reference to this object so that method calls can be chained together . */ public Operation withTargets ( java . util . Map < String , String > targets ) { } }
setTargets ( targets ) ; return this ;
public class TaskSlotTable { /** * Marks the slot under the given allocation id as inactive . If the slot could not be found , * then a { @ link SlotNotFoundException } is thrown . * @ param allocationId to identify the task slot to mark as inactive * @ param slotTimeout until the slot times out * @ throws SlotNotFoundException if the slot could not be found for the given allocation id * @ return True if the slot could be marked inactive */ public boolean markSlotInactive ( AllocationID allocationId , Time slotTimeout ) throws SlotNotFoundException { } }
checkInit ( ) ; TaskSlot taskSlot = getTaskSlot ( allocationId ) ; if ( taskSlot != null ) { if ( taskSlot . markInactive ( ) ) { // register a timeout to free the slot timerService . registerTimeout ( allocationId , slotTimeout . getSize ( ) , slotTimeout . getUnit ( ) ) ; return true ; } else { return false ; } } else { throw new SlotNotFoundException ( allocationId ) ; }
public class AbstractHCElementWithInternalChildren { /** * Helper method that returns the elements in the correct order for emitting . * This can e . g . be used for sorting or ordering . * @ param aChildren * The children to be emitted . Is a direct reference to the container * where the children are stored . So handle with care ! * @ return The non - < code > null < / code > list with all child elements to be * emitted . */ @ Nonnull @ Nonempty @ OverrideOnDemand protected ICommonsList < ? extends CHILDTYPE > getChildrenFormEmitting ( @ Nonnull @ Nonempty final ICommonsList < CHILDTYPE > aChildren ) { } }
return aChildren ;
public class ArrayUtil { /** * 新建一个空数组 * @ param < T > 数组元素类型 * @ param componentType 元素类型 * @ param newSize 大小 * @ return 空数组 */ @ SuppressWarnings ( "unchecked" ) public static < T > T [ ] newArray ( Class < ? > componentType , int newSize ) { } }
return ( T [ ] ) Array . newInstance ( componentType , newSize ) ;
public class MessageMD5ChecksumHandler { /** * Update the digest using a sequence of bytes that consists of the length ( in 4 bytes ) of the * input ByteBuffer and all the bytes it contains . */ private static void updateLengthAndBytes ( MessageDigest digest , ByteBuffer binaryValue ) { } }
ByteBuffer readOnlyBuffer = binaryValue . asReadOnlyBuffer ( ) ; int size = readOnlyBuffer . remaining ( ) ; ByteBuffer lengthBytes = ByteBuffer . allocate ( INTEGER_SIZE_IN_BYTES ) . putInt ( size ) ; digest . update ( lengthBytes . array ( ) ) ; digest . update ( readOnlyBuffer ) ;
public class CorrelationAnalysisSolution { /** * Returns the linear equation system for printing purposes . If normalization * is null the linear equation system is returned , otherwise the linear * equation system will be transformed according to the normalization . * @ param normalization the normalization , can be null * @ return the linear equation system for printing purposes * @ throws NonNumericFeaturesException if the linear equation system is not * compatible with values initialized during normalization */ public LinearEquationSystem getNormalizedLinearEquationSystem ( Normalization < ? > normalization ) throws NonNumericFeaturesException { } }
if ( normalization != null ) { LinearEquationSystem lq = normalization . transform ( linearEquationSystem ) ; lq . solveByTotalPivotSearch ( ) ; return lq ; } else { return linearEquationSystem ; }
public class CmsContainerpageController { /** * Reloads the page . < p > */ public void reloadPage ( ) { } }
Timer timer = new Timer ( ) { @ Override @ SuppressWarnings ( "synthetic-access" ) public void run ( ) { Window . Location . assign ( m_originalUrl ) ; } } ; timer . schedule ( 150 ) ;
public class VaultsInner { /** * Updates the vault . * @ param resourceGroupName The name of the resource group where the recovery services vault is present . * @ param vaultName The name of the recovery services vault . * @ param vault Recovery Services Vault to be created . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the VaultInner object if successful . */ public VaultInner update ( String resourceGroupName , String vaultName , PatchVault vault ) { } }
return updateWithServiceResponseAsync ( resourceGroupName , vaultName , vault ) . toBlocking ( ) . single ( ) . body ( ) ;
public class KafkaPusher { /** * Push all mbyte array messages to the Kafka topic . * @ param messages List of byte array messages to push to Kakfa . */ public void pushMessages ( List < byte [ ] > messages ) { } }
List < KeyedMessage < String , byte [ ] > > keyedMessages = Lists . transform ( messages , new Function < byte [ ] , KeyedMessage < String , byte [ ] > > ( ) { @ Nullable @ Override public KeyedMessage < String , byte [ ] > apply ( byte [ ] bytes ) { return new KeyedMessage < String , byte [ ] > ( topic , bytes ) ; } } ) ; this . producer . send ( keyedMessages ) ;
public class StreamExecutionEnvironment { /** * Reads the given file line - by - line and creates a data stream that contains a string with the * contents of each such line . The { @ link java . nio . charset . Charset } with the given name will be * used to read the files . * < p > < b > NOTES ON CHECKPOINTING : < / b > The source monitors the path , creates the * { @ link org . apache . flink . core . fs . FileInputSplit FileInputSplits } to be processed , * forwards them to the downstream { @ link ContinuousFileReaderOperator readers } to read the actual data , * and exits , without waiting for the readers to finish reading . This implies that no more checkpoint * barriers are going to be forwarded after the source exits , thus having no checkpoints after that point . * @ param filePath * The path of the file , as a URI ( e . g . , " file : / / / some / local / file " or " hdfs : / / host : port / file / path " ) * @ param charsetName * The name of the character set used to read the file * @ return The data stream that represents the data read from the given file as text lines */ public DataStreamSource < String > readTextFile ( String filePath , String charsetName ) { } }
Preconditions . checkArgument ( ! StringUtils . isNullOrWhitespaceOnly ( filePath ) , "The file path must not be null or blank." ) ; TextInputFormat format = new TextInputFormat ( new Path ( filePath ) ) ; format . setFilesFilter ( FilePathFilter . createDefaultFilter ( ) ) ; TypeInformation < String > typeInfo = BasicTypeInfo . STRING_TYPE_INFO ; format . setCharsetName ( charsetName ) ; return readFile ( format , filePath , FileProcessingMode . PROCESS_ONCE , - 1 , typeInfo ) ;
public class NfsResponseBase { /** * Create the object if it is there , or skip the existence check if * < code > force < / code > is < code > true < / code > . Convenience method for use in * subclasses . * @ param xdr * @ param force * do not check whether it is there * @ return the created object */ protected static NfsGetAttributes makeNfsGetAttributes ( Xdr xdr , boolean force ) { } }
NfsGetAttributes attributes = null ; if ( force || xdr . getBoolean ( ) ) { attributes = new NfsGetAttributes ( ) ; attributes . unmarshalling ( xdr ) ; } return attributes ;
public class DefaultAsyncSearchQueryResult { /** * A utility method to return a result when the index is not found . * @ return an { @ link AsyncSearchQueryResult } that will emit a { @ link IndexDoesNotExistException } when calling * its { @ link AsyncSearchQueryResult # hits ( ) hits ( ) } method . * @ deprecated FTS is still in BETA so the response format is likely to change in a future version , and be * unified with the HTTP 200 response format . */ @ Deprecated public static AsyncSearchQueryResult fromIndexNotFound ( final String indexName ) { } }
// dummy default values SearchStatus status = new DefaultSearchStatus ( 1L , 1L , 0L ) ; SearchMetrics metrics = new DefaultSearchMetrics ( 0L , 0L , 0d ) ; return new DefaultAsyncSearchQueryResult ( status , Observable . < SearchQueryRow > error ( new IndexDoesNotExistException ( "Search Index \"" + indexName + "\" Not Found" ) ) , Observable . < FacetResult > empty ( ) , Observable . just ( metrics ) ) ;
public class Peek { /** * Finish the builder by selecting the base view that you want to show the PeekView from . * @ param activity the PeekViewActivity that you are on . * @ param base the view that you want to touch to apply the peek to . */ public void applyTo ( final PeekViewActivity activity , final View base ) { } }
final GestureDetectorCompat detector = new GestureDetectorCompat ( activity , new GestureListener ( activity , base , this ) ) ; base . setOnTouchListener ( new View . OnTouchListener ( ) { @ Override public boolean onTouch ( View view , final MotionEvent motionEvent ) { // we use the detector for the long and short click motion events detector . onTouchEvent ( motionEvent ) ; if ( motionEvent . getAction ( ) == MotionEvent . ACTION_DOWN ) { forceRippleAnimation ( base , motionEvent ) ; } return true ; } } ) ;
public class Table { /** * Select records from database table according to search condition * including the specified ( comma separated ) extra tables into the SELECT * clause to facilitate a join in determining the key . * @ param tables the ( comma separated ) names of extra tables to include in * the SELECT clause . * @ param condition valid SQL condition expression started with WHERE . */ public final Cursor < T > select ( Connection conn , String tables , String condition ) { } }
String query = "select " + qualifiedListOfFields + " from " + name + "," + tables + " " + condition ; return new Cursor < T > ( this , conn , query ) ;
public class Rabbitify { /** * @ param scriptFile * script file , separated with { @ link Rabbitify # BEGIN _ RABBITIFY } * and { @ link Rabbitify # END _ RABBITIFY } * @ param replacementVars * see { @ link PipelineScriptParser # parse ( ) } * @ param mode * either sender ( the first part of the pipeline ) , slave ( the * middle part of a pipeline ) or receiver ( the last part of a * pipeline ) * @ param runId * gets used to name the rabbit queues , use ' test ' for testing * @ param timeout * how long to wait ( in seconds ) before the reader exits the * queue */ public static void run ( File scriptFile , String [ ] replacementVars , Mode mode , String runId , int timeout ) throws IOException , ParseException , UIMAException { } }
LOG . info ( "Rabbitifying pipeline script at '{}'" , scriptFile . getAbsolutePath ( ) + " \n with CLI parameters: " + join ( replacementVars , ", " ) ) ; // SPLITTING PIPELINE final String pipelineLines = asText ( scriptFile ) ; checkArgument ( pipelineLines . length ( ) > 2 ) ; // in 3 parts final List < String > masterSender , slave , masterReceiver ; String [ ] split1 = pipelineLines . split ( BEGIN_RABBITIFY ) ; checkEquals ( 2 , split1 . length ) ; masterSender = list ( split1 [ 0 ] . split ( "\n" ) ) ; String [ ] split2 = split1 [ 1 ] . split ( END_RABBITIFY ) ; checkEquals ( 2 , split1 . length ) ; slave = list ( split2 [ 0 ] . split ( "\n" ) ) ; masterReceiver = list ( split2 [ 1 ] . split ( "\n" ) ) ; // preparing script lines List < String > lines = list ( ) ; if ( mode . equals ( sender ) ) { // MASTER _ SENDER PIPELINE lines = masterSender ; // add Rabbit writer lines . add ( "" ) ; lines . add ( "ae: " + RabbitWriter . class . getName ( ) ) ; lines . add ( " " + PARAM_QUEUE + ": " + getMasterToSlaveQueue ( runId + "" ) ) ; lines . add ( "ae: StatsAnnotatorPlus" ) ; lines . add ( " printEvery__java: 1000" ) ; } else if ( mode . equals ( Mode . slave ) ) { // SLAVE PIPELINE // add Rabbit reader lines . add ( "cr: " + RabbitReader . class . getName ( ) ) ; lines . add ( " " + PARAM_QUEUE + ": " + getMasterToSlaveQueue ( runId + "" ) ) ; lines . add ( " " + PARAM_TIMEOUT + "__java: " + timeout ) ; lines . add ( "" ) ; lines . addAll ( slave ) ; lines . add ( "" ) ; // add Rabbit writer lines . add ( "ae: " + RabbitWriter . class . getName ( ) ) ; lines . add ( " " + PARAM_QUEUE + ": " + getSlaveToMasterQueue ( runId + "" ) ) ; } else if ( mode . equals ( receiver ) ) { // MASTER _ RECEIVER PIPELINE // add Rabbit reader lines . add ( "cr: " + RabbitReader . class . getName ( ) ) ; lines . add ( " " + PARAM_QUEUE + ": " + getSlaveToMasterQueue ( runId + "" ) ) ; lines . add ( " " + PARAM_TIMEOUT + "__java: " + timeout ) ; lines . add ( "" ) ; lines . add ( "threads: 1" ) ; lines . add ( "" ) ; lines . addAll ( masterReceiver ) ; } // RUN PIPELINE try { LOG . info ( "Starting Rabbit " + mode ) ; Pipeline p = PipelineScriptParser . parse ( lines , scriptFile . getParent ( ) , list ( replacementVars ) ) ; p . run ( ) ; } catch ( ParseException e ) { throw new ParseException ( "\nERROR parsing " + mode + "\n" + e . getMessage ( ) + "\n(see the README.txt for the pipeline script format)" , e . getErrorOffset ( ) ) ; } System . out . println ( Launcher . OK_MESSAGE ) ;
public class ValueBindingValueExpressionAdapter { /** * / * ( non - Javadoc ) * @ see javax . faces . el . ValueBinding # getValue ( javax . faces . context . FacesContext ) */ public Object getValue ( FacesContext context ) throws EvaluationException , PropertyNotFoundException { } }
if ( context == null ) { throw new NullPointerException ( "FacesContext -> null" ) ; } Object result = null ; try { result = valueExpression . getValue ( context . getELContext ( ) ) ; } catch ( javax . el . PropertyNotFoundException pnfe ) { throw new PropertyNotFoundException ( pnfe ) ; } catch ( ELException elex ) { throw new EvaluationException ( elex ) ; } return result ;
public class JMRandom { /** * Foreach random int . * @ param streamSize the stream size * @ param inclusiveLowerBound the inclusive lower bound * @ param exclusiveUpperBound the exclusive upper bound * @ param eachRandomIntConsumer the each random int consumer */ public static void foreachRandomInt ( int streamSize , int inclusiveLowerBound , int exclusiveUpperBound , IntConsumer eachRandomIntConsumer ) { } }
foreachRandomInt ( streamSize , new Random ( ) , inclusiveLowerBound , exclusiveUpperBound , eachRandomIntConsumer ) ;
public class AcroFields { /** * Gets the field type . The type can be one of : < CODE > FIELD _ TYPE _ PUSHBUTTON < / CODE > , * < CODE > FIELD _ TYPE _ CHECKBOX < / CODE > , < CODE > FIELD _ TYPE _ RADIOBUTTON < / CODE > , * < CODE > FIELD _ TYPE _ TEXT < / CODE > , < CODE > FIELD _ TYPE _ LIST < / CODE > , * < CODE > FIELD _ TYPE _ COMBO < / CODE > or < CODE > FIELD _ TYPE _ SIGNATURE < / CODE > . * If the field does not exist or is invalid it returns * < CODE > FIELD _ TYPE _ NONE < / CODE > . * @ param fieldName the field name * @ return the field type */ public int getFieldType ( String fieldName ) { } }
Item fd = getFieldItem ( fieldName ) ; if ( fd == null ) return FIELD_TYPE_NONE ; PdfDictionary merged = fd . getMerged ( 0 ) ; PdfName type = merged . getAsName ( PdfName . FT ) ; if ( type == null ) return FIELD_TYPE_NONE ; int ff = 0 ; PdfNumber ffo = merged . getAsNumber ( PdfName . FF ) ; if ( ffo != null ) { ff = ffo . intValue ( ) ; } if ( PdfName . BTN . equals ( type ) ) { if ( ( ff & PdfFormField . FF_PUSHBUTTON ) != 0 ) return FIELD_TYPE_PUSHBUTTON ; if ( ( ff & PdfFormField . FF_RADIO ) != 0 ) return FIELD_TYPE_RADIOBUTTON ; else return FIELD_TYPE_CHECKBOX ; } else if ( PdfName . TX . equals ( type ) ) { return FIELD_TYPE_TEXT ; } else if ( PdfName . CH . equals ( type ) ) { if ( ( ff & PdfFormField . FF_COMBO ) != 0 ) return FIELD_TYPE_COMBO ; else return FIELD_TYPE_LIST ; } else if ( PdfName . SIG . equals ( type ) ) { return FIELD_TYPE_SIGNATURE ; } return FIELD_TYPE_NONE ;
public class BaseTable { /** * Dig down and get the physical table for this record . * @ param table * @ param record * @ return */ private BaseTable getPhysicalTable ( PassThruTable table , Record record ) { } }
BaseTable altTable = table . getNextTable ( ) ; if ( altTable instanceof PassThruTable ) { BaseTable physicalTable = getPhysicalTable ( ( PassThruTable ) altTable , record ) ; if ( physicalTable != null ) if ( physicalTable != altTable ) return physicalTable ; } else if ( altTable . getDatabase ( ) . getDatabaseName ( true ) . equals ( record . getTable ( ) . getDatabase ( ) . getDatabaseName ( true ) ) ) return altTable ; Iterator < BaseTable > tables = table . getTables ( ) ; while ( tables . hasNext ( ) ) { altTable = tables . next ( ) ; if ( altTable instanceof PassThruTable ) { BaseTable physicalTable = getPhysicalTable ( ( PassThruTable ) altTable , record ) ; if ( physicalTable != null ) if ( physicalTable != altTable ) return physicalTable ; } else if ( altTable . getDatabase ( ) . getDatabaseName ( true ) . equals ( record . getTable ( ) . getDatabase ( ) . getDatabaseName ( true ) ) ) return altTable ; } return table ;
public class DeleteBaiduChannelRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteBaiduChannelRequest deleteBaiduChannelRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteBaiduChannelRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteBaiduChannelRequest . getApplicationId ( ) , APPLICATIONID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CmsSetupWorkplaceImportThread { /** * Kills this Thread as well as the included logging Thread . < p > */ public void kill ( ) { } }
if ( m_shell != null ) { m_shell . exit ( ) ; } if ( m_loggingThread != null ) { m_loggingThread . stopThread ( ) ; } m_shell = null ; m_setupBean = null ;
public class MetadataUtil { /** * Returns the next parent node which has the specific attribute name defined . * @ param parent * the w3c node from which the search will start . * @ param attrName * the attribute name which is searched for . * @ return a parent node , if the attribute is found , otherwise null . */ public static Node getNextParentNodeWithAttr ( final Node parent , final String attrName ) { } }
Node parentNode = parent ; Element parendElement = ( Element ) parentNode ; Node valueNode = parendElement . getAttributes ( ) . getNamedItem ( attrName ) ; while ( valueNode == null ) { parentNode = parentNode . getParentNode ( ) ; if ( parentNode != null ) { if ( parentNode . getNodeType ( ) == Node . ELEMENT_NODE ) { parendElement = ( Element ) parentNode ; valueNode = parendElement . getAttributes ( ) . getNamedItem ( attrName ) ; } } else { break ; } } return parendElement ;
public class StatisticalMoments { /** * Add data with a given weight . * @ param val data * @ param weight weight */ @ Override public void put ( double val , double weight ) { } }
if ( weight <= 0 ) { return ; } if ( this . n == 0 ) { n = weight ; min = max = val ; sum = val * weight ; m2 = m3 = m4 = 0 ; return ; } final double nn = weight + this . n ; final double deltan = val * this . n - this . sum ; final double inc = deltan * weight ; // Some factors used below : final double delta_nn = deltan / ( this . n * nn ) ; final double delta_nnw = delta_nn * weight ; final double delta_nn2 = delta_nn * delta_nn ; final double delta_nn3 = delta_nn2 * delta_nn ; final double nb2 = weight * weight ; final double tmp1 = this . n - weight ; final double tmp2 = this . n * tmp1 + nb2 ; this . m4 += inc * delta_nn3 * tmp2 + 6. * nb2 * this . m2 * delta_nn2 - 4. * this . m3 * delta_nnw ; this . m3 += inc * delta_nn2 * tmp1 - 3. * this . m2 * delta_nnw ; this . m2 += inc * delta_nn ; this . sum += weight * val ; this . n = nn ; min = val < min ? val : min ; max = val > max ? val : max ;
public class MPdfWriter { /** * Effectue le rendu des headers . * @ param table * MBasicTable * @ param datatable * Table * @ throws BadElementException */ protected void renderHeaders ( final MBasicTable table , final Table datatable ) throws BadElementException { } }
final int columnCount = table . getColumnCount ( ) ; final TableColumnModel columnModel = table . getColumnModel ( ) ; // size of columns float totalWidth = 0 ; for ( int i = 0 ; i < columnCount ; i ++ ) { totalWidth += columnModel . getColumn ( i ) . getWidth ( ) ; } final float [ ] headerwidths = new float [ columnCount ] ; for ( int i = 0 ; i < columnCount ; i ++ ) { headerwidths [ i ] = 100f * columnModel . getColumn ( i ) . getWidth ( ) / totalWidth ; } datatable . setWidths ( headerwidths ) ; datatable . setWidth ( 100f ) ; // table header final Font font = FontFactory . getFont ( FontFactory . HELVETICA , 12 , Font . BOLD ) ; datatable . getDefaultCell ( ) . setBorderWidth ( 2 ) ; datatable . getDefaultCell ( ) . setHorizontalAlignment ( Element . ALIGN_CENTER ) ; // datatable . setDefaultCellGrayFill ( 0.75f ) ; String text ; Object value ; for ( int i = 0 ; i < columnCount ; i ++ ) { value = columnModel . getColumn ( i ) . getHeaderValue ( ) ; text = value != null ? value . toString ( ) : "" ; datatable . addCell ( new Phrase ( text , font ) ) ; } // end of the table header datatable . endHeaders ( ) ;
public class AbstractSARLLaunchConfigurationDelegate { /** * Replies a string that is the concatenation of the given values . * @ param values the values to merge . * @ return the concatenation result . */ protected static String join ( String ... values ) { } }
final StringBuilder buffer = new StringBuilder ( ) ; for ( final String value : values ) { if ( ! Strings . isNullOrEmpty ( value ) ) { if ( buffer . length ( ) > 0 ) { buffer . append ( " " ) ; // $ NON - NLS - 1 $ } buffer . append ( value ) ; } } return buffer . toString ( ) ;
public class CPDefinitionOptionValueRelUtil { /** * Returns the last cp definition option value rel in the ordered set where companyId = & # 63 ; . * @ param companyId the company ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching cp definition option value rel , or < code > null < / code > if a matching cp definition option value rel could not be found */ public static CPDefinitionOptionValueRel fetchByCompanyId_Last ( long companyId , OrderByComparator < CPDefinitionOptionValueRel > orderByComparator ) { } }
return getPersistence ( ) . fetchByCompanyId_Last ( companyId , orderByComparator ) ;
public class ZipSerializableViewImpl { /** * Serializes the invocation with a custom form * @ serialData After all non - transient fields are written , we send the { @ link Archive } contents encoded as ZIP . */ private void writeObject ( final ObjectOutputStream out ) throws IOException { } }
// Default write of non - transient fields out . defaultWriteObject ( ) ; // Write as ZIP final InputStream in = archive . as ( ZipExporter . class ) . exportAsInputStream ( ) ; try { IOUtil . copy ( in , out ) ; // Don ' t close the outstream // Write the ID explicitly out . writeObject ( id ) ; } finally { // In case we get an InputStream type that supports closing in . close ( ) ; } // Log if ( log . isLoggable ( Level . FINER ) ) { log . finer ( "Wrote archive: " + archive . toString ( ) ) ; }
public class XCostExtension { /** * Assigns ( to the given trace ) multiple amounts given their key lists . The * i - th element in the key list should correspond to an i - level attribute * with the prescribed key . Note that as a side effect this method creates * attributes when it does not find an attribute with the proper key . * For example , the call : * < pre > * assignAmounts ( trace , [ [ [ a ] 10.00 ] [ [ a b ] 20.00 ] [ [ a c ] 30.00 ] [ [ b ] 15.00 ] [ [ c ] 25.00 ] ] ) * < / pre > * should result into the following XES fragment : * < pre > * { @ code * < trace > * < string key = " a " value = " " > * < float key = " cost : amount " value = " 10.00 " / > * < string key = " b " value = " " > * < float key = " cost : amount " value = " 20.00 " / > * < / string > * < string key = " c " value = " " > * < float key = " cost : amount " value = " 30.00 " / > * < / string > * < / string > * < string key = " b " value = " " > * < float key = " cost : amount " value = " 15.00 " / > * < / string > * < string key = " c " value = " " > * < float key = " cost : amount " value = " 25.00 " / > * < / string > * < / trace > * < / pre > * @ param trace * Trace to assign the amounts to . * @ param amounts * Mapping from key lists to amounts which are to be assigned . */ public void assignNestedAmounts ( XTrace trace , Map < List < String > , Double > amounts ) { } }
XCostAmount . instance ( ) . assignNestedValues ( trace , amounts ) ;
public class RGBE { /** * scalbn ( double x , int n ) < BR > * scalbn ( x , n ) returns x * 2 * * n computed by exponent * manipulation rather than by actually performing an * exponentiation or a multiplication . */ private static double scalbn ( double x , int n ) { } }
int hx = hi ( x ) ; int lx = lo ( x ) ; int k = ( hx & 0x7ff00000 ) >> 20 ; // extract exponent if ( k == 0 ) { // 0 or subnormal x if ( ( lx | ( hx & 0x7fffffff ) ) == 0 ) { return x ; } x *= two54 ; hx = hi ( x ) ; k = ( ( hx & 0x7ff00000 ) >> 20 ) - 54 ; if ( n < - 50000 ) { return tiny * x ; // underflow } } if ( k == 0x7ff ) { return x + x ; // NaN or Inf } k = k + n ; if ( k > 0x7fe ) { return huge * copysign ( huge , x ) ; // overflow } if ( k > 0 ) { // normal result return fromhilo ( ( hx & 0x800fffff ) | ( k << 20 ) , lo ( x ) ) ; } if ( k <= - 54 ) { if ( n > 50000 ) { // in case integer overflow in n + k return huge * copysign ( huge , x ) ; // overflow } else { return tiny * copysign ( tiny , x ) ; // underflow } } k += 54 ; // subnormal result x = fromhilo ( ( hx & 0x800fffff ) | ( k << 20 ) , lo ( x ) ) ; return x * twom54 ;
public class ReactiveWifi { /** * Observes WiFi signal level with predefined max num levels . * Returns WiFi signal level as enum with information about current level * @ param context Context of the activity or an application * @ return WifiSignalLevel as an enum */ @ RequiresPermission ( ACCESS_WIFI_STATE ) public static Observable < WifiSignalLevel > observeWifiSignalLevel ( final Context context ) { } }
return observeWifiSignalLevel ( context , WifiSignalLevel . getMaxLevel ( ) ) . map ( new Function < Integer , WifiSignalLevel > ( ) { @ Override public WifiSignalLevel apply ( Integer level ) throws Exception { return WifiSignalLevel . fromLevel ( level ) ; } } ) ;
public class CmsSitemapExtensionConnector { /** * Opens the page copy dialog . < p > * @ param id the structure id of the resource for which to open the dialog * @ param callback the native callback to call with the result when the dialog has finished */ public void openPageCopyDialog ( String id , JavaScriptObject callback ) { } }
openPageCopyDialog ( id , CmsJsUtil . wrapCallback ( callback ) ) ;
public class BlockInlineChecksumReader { /** * Calculate CRC Checksum of the whole block . Implemented by concatenating * checksums of all the chunks . * @ param datanode * @ param ri * @ param namespaceId * @ param block * @ return * @ throws IOException */ static public int getBlockCrc ( DataNode datanode , ReplicaToRead ri , int namespaceId , Block block ) throws IOException { } }
InputStream rawStreamIn = null ; DataInputStream streamIn = null ; int blockCrc = 0 ; try { int bytesPerCRC ; int checksumSize ; bytesPerCRC = ri . getBytesPerChecksum ( ) ; int checksumType = ri . getChecksumType ( ) ; if ( checksumType != DataChecksum . CHECKSUM_CRC32 ) { throw new IOException ( "File Checksum now is only supported for CRC32" ) ; } DataChecksum dataChecksum = DataChecksum . newDataChecksum ( checksumType , bytesPerCRC ) ; checksumSize = dataChecksum . getChecksumSize ( ) ; rawStreamIn = ri . getBlockInputStream ( datanode , 0 ) ; streamIn = new DataInputStream ( new BufferedInputStream ( rawStreamIn , FSConstants . BUFFER_SIZE ) ) ; IOUtils . skipFully ( streamIn , BlockInlineChecksumReader . getHeaderSize ( ) ) ; long lengthLeft = ( ( FileInputStream ) rawStreamIn ) . getChannel ( ) . size ( ) - BlockInlineChecksumReader . getHeaderSize ( ) ; if ( lengthLeft == 0 ) { blockCrc = ( int ) dataChecksum . getValue ( ) ; } else { byte [ ] buffer = new byte [ checksumSize ] ; boolean firstChecksum = true ; while ( lengthLeft > 0 ) { long dataByteLengh ; if ( lengthLeft >= bytesPerCRC + checksumSize ) { lengthLeft -= bytesPerCRC + checksumSize ; dataByteLengh = bytesPerCRC ; } else if ( lengthLeft > checksumSize ) { dataByteLengh = lengthLeft - checksumSize ; lengthLeft = 0 ; } else { // report to name node the corruption . DataBlockScanner . reportBadBlocks ( block , namespaceId , datanode ) ; throw new IOException ( "File for namespace " + namespaceId + " block " + block + " seems to be corrupted" ) ; } IOUtils . skipFully ( streamIn , dataByteLengh ) ; IOUtils . readFully ( streamIn , buffer , 0 , buffer . length ) ; int intChecksum = DataChecksum . getIntFromBytes ( buffer , 0 ) ; if ( firstChecksum ) { blockCrc = intChecksum ; firstChecksum = false ; } else { blockCrc = CrcConcat . concatCrc ( blockCrc , intChecksum , ( int ) dataByteLengh ) ; } } } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "block=" + block + ", bytesPerCRC=" + bytesPerCRC + ", crc=" + blockCrc ) ; } return blockCrc ; } finally { IOUtils . closeStream ( streamIn ) ; IOUtils . closeStream ( rawStreamIn ) ; }
public class WarBuilder { /** * the assumption is that the Runner is invoked in the WAR module ' s directory */ private boolean isWar ( String path ) { } }
String currentDir = Paths . get ( "." ) . toAbsolutePath ( ) . normalize ( ) . toString ( ) ; String classesDirPath = Paths . get ( path ) . toAbsolutePath ( ) . normalize ( ) . toString ( ) ; return classesDirPath . startsWith ( currentDir ) ;
public class PutObjectProgressBar { /** * MinioClient . putObjectProgressBar ( ) example . */ public static void main ( String [ ] args ) throws InvalidKeyException , NoSuchAlgorithmException , InvalidEndpointException , InvalidPortException , InvalidBucketNameException , InsufficientDataException , NoResponseException , ErrorResponseException , InternalException , InvalidArgumentException , IOException , XmlPullParserException { } }
/* play . min . io for test and development . */ MinioClient minioClient = new MinioClient ( "https://play.min.io:9000" , "Q3AM3UQ867SPQQA43P2F" , "zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG" ) ; /* Amazon S3: */ // MinioClient minioClient = new MinioClient ( " https : / / s3 . amazonaws . com " , // " YOUR - ACCESSKEYID " , // " YOUR - SECRETACCESSKEY " ) ; String objectName = "my-objectname" ; String bucketName = "my-bucketname" ; File file = new File ( "my-filename" ) ; InputStream pis = new BufferedInputStream ( new ProgressStream ( "Uploading... " , ProgressBarStyle . ASCII , new FileInputStream ( file ) ) ) ; minioClient . putObject ( bucketName , objectName , pis , pis . available ( ) , "application/octet-stream" ) ; pis . close ( ) ; System . out . println ( "my-objectname is uploaded successfully" ) ;
public class ListMailboxPermissionsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListMailboxPermissionsRequest listMailboxPermissionsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listMailboxPermissionsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listMailboxPermissionsRequest . getOrganizationId ( ) , ORGANIZATIONID_BINDING ) ; protocolMarshaller . marshall ( listMailboxPermissionsRequest . getEntityId ( ) , ENTITYID_BINDING ) ; protocolMarshaller . marshall ( listMailboxPermissionsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listMailboxPermissionsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class GetIndividualProfilesRequest { /** * Request the IndividualProfile for the given login and password . * This option is mutually exclusive with { @ link # withIndividualId ( int ) } * and { @ link # withMICR ( String , String ) } . * @ param login The individual ' s login . * @ param password The individual ' s password . * @ return this . */ public GetIndividualProfilesRequest withLoginPassword ( final String login , final char [ ] password ) { } }
this . login = login ; this . password = password ; this . id = 0 ; this . accountNumber = this . routingNumber = null ; return this ;
public class AbstractLockTableHandler { /** * { @ inheritDoc } */ public void removeLockedNode ( String nodeId ) throws SQLException { } }
ResultSet resultSet = null ; PreparedStatement preparedStatement = null ; Connection jdbcConnection = openConnection ( ) ; try { InspectionQuery query = getDeleteQuery ( nodeId ) ; preparedStatement = jdbcConnection . prepareStatement ( query . getStatement ( ) ) ; preparedStatement . executeUpdate ( ) ; } finally { JDBCUtils . freeResources ( resultSet , preparedStatement , jdbcConnection ) ; }
public class ProcessorConfigurationUtils { /** * Wraps an implementation of { @ link ICDATASectionProcessor } into an object that adds some information * required internally ( like e . g . the dialect this processor was registered for ) . * This method is meant for < strong > internal < / strong > use only . * @ param processor the processor to be wrapped . * @ param dialect the dialect this processor was configured for . * @ return the wrapped processor . */ public static ICDATASectionProcessor wrap ( final ICDATASectionProcessor processor , final IProcessorDialect dialect ) { } }
Validate . notNull ( dialect , "Dialect cannot be null" ) ; if ( processor == null ) { return null ; } return new CDATASectionProcessorWrapper ( processor , dialect ) ;
public class CmsAliasManager { /** * Saves the aliases for a given structure id , < b > completely replacing < / b > any existing aliases for the same structure id . < p > * @ param cms the current CMS context * @ param structureId the structure id of a resource * @ param aliases the list of aliases which should be written * @ throws CmsException if something goes wrong */ public synchronized void saveAliases ( CmsObject cms , CmsUUID structureId , List < CmsAlias > aliases ) throws CmsException { } }
m_securityManager . saveAliases ( cms . getRequestContext ( ) , cms . readResource ( structureId ) , aliases ) ; touch ( cms , cms . readResource ( structureId ) ) ;
public class PluginDefaultGroovyMethods { /** * If the optional contains a value , returns an optional containing the transformed value obtained using the < code > transform < / code > closure * or otherwise an empty optional . * < pre class = " groovyTestCase " > * assert Optional . of ( " foobar " ) . collect { it . size ( ) } . get ( ) = = 6 * assert ! Optional . empty ( ) . collect { it . size ( ) } . isPresent ( ) * < / pre > * @ param self an Optional * @ param transform the closure used to transform the optional value if present * @ return an Optional containing the transformed value or empty if the optional is empty or the transform returns null */ public static < S , T > Optional < T > collect ( Optional < S > self , @ ClosureParams ( FirstParam . FirstGenericType . class ) Closure < T > transform ) { } }
Objects . requireNonNull ( self ) ; Objects . requireNonNull ( transform ) ; if ( ! self . isPresent ( ) ) { return self . empty ( ) ; } return Optional . ofNullable ( transform . call ( self . get ( ) ) ) ;
public class FilterMenuLayout { /** * calculate distance of two points * @ param a * @ param b * @ return */ private static double pointsDistance ( Point a , Point b ) { } }
int dx = b . x - a . x ; int dy = b . y - a . y ; return Math . sqrt ( dx * dx + dy * dy ) ;
public class InputMapTemplate { /** * When the given event type occurs and { @ code condition } is false , * consumes the event and does not attempt to match additional * { @ code InputMap } s ( if they exist ) . If { @ code condition } is true , continues to try to pattern match * the event type with the next { @ code InputMap } ( if one exists ) . */ public static < S , T extends Event > InputMapTemplate < S , T > consumeUnless ( EventType < ? extends T > eventType , Predicate < ? super S > condition , BiConsumer < ? super S , ? super T > action ) { } }
return consumeUnless ( EventPattern . eventType ( eventType ) , condition , action ) ;
public class FacebookEndpoint { /** * Finishes a { @ link com . groundupworks . wings . facebook . FacebookEndpoint # startSettingsRequest ( android . app . Activity , android . support . v4 . app . Fragment ) } . * @ param requestCode the integer request code originally supplied to startActivityForResult ( ) , allowing you to identify who * this result came from . * @ param resultCode the integer result code returned by the child activity through its setResult ( ) . * @ param data an Intent , which can return result data to the caller ( various data can be attached to Intent * " extras " ) . * @ return the settings ; or null if failed . */ private FacebookSettings finishSettingsRequest ( int requestCode , int resultCode , Intent data ) { } }
FacebookSettings settings = null ; if ( requestCode == SETTINGS_REQUEST_CODE && resultCode == Activity . RESULT_OK && data != null ) { // Construct settings from the extras bundle . settings = FacebookSettings . newInstance ( data . getExtras ( ) ) ; } return settings ;
public class Readability { /** * Get the number of times a string s appears in the node e . * @ param e * @ param s * @ return */ private static int getCharCount ( Element e , String s ) { } }
if ( s == null || s . length ( ) == 0 ) { s = "," ; } return getInnerText ( e , true ) . split ( s ) . length ;
public class CauseOfBlockage { /** * Obtains a simple implementation backed by { @ link Localizable } . */ public static CauseOfBlockage fromMessage ( @ Nonnull final Localizable l ) { } }
l . getKey ( ) ; // null check return new CauseOfBlockage ( ) { @ Override public String getShortDescription ( ) { return l . toString ( ) ; } } ;
public class InfoPanelService { /** * Search children of the specified parent for an occurrence of an active info panel . This is a * recursive , breadth - first search of the component tree . * @ param parent Parent whose children are to be searched . * @ param exclude An optional child to be excluded from the search . * @ param activeOnly If true , only active info panels are considered . * @ return The requested info panel , or null if none found . */ private static IInfoPanel searchChildren ( ElementBase parent , ElementBase exclude , boolean activeOnly ) { } }
IInfoPanel infoPanel = null ; if ( parent != null ) { for ( ElementBase child : parent . getChildren ( ) ) { if ( ( child != exclude ) && ( ( infoPanel = getInfoPanel ( child , activeOnly ) ) != null ) ) { break ; } } if ( infoPanel == null ) { for ( ElementBase child : parent . getChildren ( ) ) { if ( ( child != exclude ) && ( ( infoPanel = searchChildren ( child , null , activeOnly ) ) != null ) ) { break ; } } } } return infoPanel ;
public class LinkDeserializer { /** * Non whitespace ; could possibly be narrowed */ @ Override public Link deserialize ( JsonParser jp , DeserializationContext ctxt ) throws IOException { } }
String tmp = jp . getText ( ) ; validate ( jp , tmp , "{" ) ; jp . nextToken ( ) ; // skip over { to the rel String rel = jp . getText ( ) ; validateText ( jp , rel ) ; jp . nextToken ( ) ; // skip over { tmp = jp . getText ( ) ; validate ( jp , tmp , "{" ) ; jp . nextToken ( ) ; // skip over " href " tmp = jp . getText ( ) ; validate ( jp , tmp , "href" ) ; jp . nextToken ( ) ; // skip to " http : / / . . . " String href = jp . getText ( ) ; validateText ( jp , href ) ; jp . nextToken ( ) ; // skip } tmp = jp . getText ( ) ; validate ( jp , tmp , "}" ) ; jp . nextToken ( ) ; // skip } tmp = jp . getText ( ) ; validate ( jp , tmp , "}" ) ; Link link = new Link ( rel , href ) ; return link ;
public class EntityTypesClient { /** * Deletes the specified entity type . * < p > Sample code : * < pre > < code > * try ( EntityTypesClient entityTypesClient = EntityTypesClient . create ( ) ) { * EntityTypeName name = EntityTypeName . of ( " [ PROJECT ] " , " [ ENTITY _ TYPE ] " ) ; * entityTypesClient . deleteEntityType ( name ) ; * < / code > < / pre > * @ param name Required . The name of the entity type to delete . Format : ` projects / & lt ; Project * ID & gt ; / agent / entityTypes / & lt ; EntityType ID & gt ; ` . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final void deleteEntityType ( EntityTypeName name ) { } }
DeleteEntityTypeRequest request = DeleteEntityTypeRequest . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . build ( ) ; deleteEntityType ( request ) ;
public class PopulateHelper { /** * 将request中的参数设置到clazz对应的bean 。 * @ param clazz * @ param name */ @ SuppressWarnings ( "unchecked" ) public static < T > T populate ( Class < T > clazz , String name ) { } }
EntityType type = null ; if ( clazz . isInterface ( ) ) { type = Model . getType ( clazz . getName ( ) ) ; } else { type = Model . getType ( clazz ) ; } return ( T ) populate ( type . newInstance ( ) , type . getEntityName ( ) , name ) ;
public class StgFileSource { /** * Initialize the file source with any parameters presented by the constructors . * @ param file a file with all information * @ param directory a directory to locate a file in * @ param fileName a file name with or without path information * @ param option an option on how to locate the file ( not used if parameter file is set ) */ protected void init ( File file , String directory , String fileName , InfoLocationOptions option ) { } }
super . init ( file , directory , fileName , option ) ; if ( ! this . errors . hasErrors ( ) ) { if ( ! "stg" . equals ( this . getFileExtension ( ) ) ) { this . errors . addError ( "file name must have '.stg' extension" ) ; this . reset ( ) ; } else { } }
public class voice { /** * Get all results from the Google Speech Recognition activity ( DO NOT FORGET call this function on * onActivityResult ( ) ) * @ param requestCode * - onActivityResult request code * @ param resultCode * - onActivityResult result code * @ param data * - onActivityResult Intent data * @ return ArrayList < String > with all results or null if was not possible to * get any results */ public static ArrayList < String > getSpeechRecognitionResults ( int requestCode , int resultCode , Intent data ) { } }
ArrayList < String > matches = null ; if ( requestCode == 0 && resultCode == - 1 ) { matches = data . getStringArrayListExtra ( RecognizerIntent . EXTRA_RESULTS ) ; StringBuilder sb = new StringBuilder ( ) ; for ( String match : matches ) { sb . append ( match + ", " ) ; } } return matches ;
public class JSONObjectException { /** * Method called to prepend a reference information in front of * current path */ public void prependPath ( Object referrer , int index ) { } }
Reference ref = new Reference ( referrer , index ) ; prependPath ( ref ) ;
public class LoadStatistics { /** * Computes a self - consistent snapshot of the load statistics . * Note : The original method of computing load statistics would compute the total and idle counts independently * which could lead to counting errors while jobs started in between the different state counting operations . * By returning a { @ link LoadStatisticsSnapshot } we get a single consistent view of the counts which was valid * for at least one point in time during the execution of this method . * @ return a self - consistent snapshot of the load statistics . * @ since 1.607 */ public LoadStatisticsSnapshot computeSnapshot ( ) { } }
if ( modern ) { return computeSnapshot ( Jenkins . getInstance ( ) . getQueue ( ) . getBuildableItems ( ) ) ; } else { int t = computeTotalExecutors ( ) ; int i = computeIdleExecutors ( ) ; return new LoadStatisticsSnapshot ( t , t , Math . max ( i - t , 0 ) , Math . max ( t - i , 0 ) , i , i , computeQueueLength ( ) ) ; }
public class JSMin { /** * get - - return the next character from stdin . Watch out for lookahead . If * the character is a control character , translate it to a space or * linefeed . */ private int get ( boolean inStringLiteral ) throws IOException { } }
int c = theLookahead ; theLookahead = EOF ; if ( c == EOF ) { c = in . read ( ) ; if ( c != EOF ) { currentByteIndex ++ ; } } if ( c == '\n' ) { line ++ ; column = 0 ; } else { column ++ ; } if ( c >= ' ' || c == '\n' || c == EOF || ( inStringLiteral && c == '\t' ) ) { // Handle // the // case // of // tab // character // in // String // literal return c ; } if ( c == '\r' ) { return '\n' ; } return ' ' ;
public class RaidNode { /** * Stop all RaidNode threads and wait for all to finish . */ public void stop ( ) { } }
if ( stopRequested ) { return ; } stopRequested = true ; running = false ; if ( server != null ) server . stop ( ) ; if ( triggerThread != null ) { triggerThread . interrupt ( ) ; triggerMonitor = null ; } if ( urfThread != null ) { urfThread . interrupt ( ) ; urfProcessor = null ; } if ( blockIntegrityMonitor != null ) blockIntegrityMonitor . running = false ; if ( blockFixerThread != null ) blockFixerThread . interrupt ( ) ; if ( blockCopierThread != null ) blockCopierThread . interrupt ( ) ; if ( corruptFileCounterThread != null ) corruptFileCounterThread . interrupt ( ) ; if ( purgeMonitor != null ) purgeMonitor . running = false ; if ( purgeThread != null ) purgeThread . interrupt ( ) ; if ( placementMonitor != null ) placementMonitor . stop ( ) ; if ( statsCollector != null ) statsCollector . stop ( ) ; if ( statsCollectorThread != null ) statsCollectorThread . interrupt ( ) ; if ( infoServer != null ) { try { infoServer . stop ( ) ; } catch ( Exception e ) { LOG . warn ( "Exception shutting down " + RaidNode . class , e ) ; } } this . unregisterMBean ( ) ;
public class EventImpl { /** * { @ inheritDoc } */ public List < String > getPropertyNames ( ) { } }
final int size = this . properties . size ( ) ; if ( 0 == size ) { return Collections . emptyList ( ) ; } List < String > names = new ArrayList < String > ( size ) ; names . addAll ( this . properties . keySet ( ) ) ; return names ;
public class LargeList { /** * Select values from list . * @ param value value to select * @ return list of entries selected */ @ SuppressWarnings ( "serial" ) public List < ? > find ( Value value ) throws AerospikeException { } }
Key subKey = makeSubKey ( value ) ; Record record = client . get ( this . policy , subKey , ListElementBinName ) ; if ( record != null ) { final Object result = record . getValue ( ListElementBinName ) ; return new ArrayList < Object > ( ) { { add ( result ) ; } } ; } else { return null ; }
public class Parser { /** * < p > Filter the SQL string of Java SQL Escape clauses . < / p > * < p > Currently implemented Escape clauses are those mentioned in 11.3 in the specification . * Basically we look through the sql string for { d xxx } , { t xxx } , { ts xxx } , { oj xxx } or { fn xxx } * in non - string sql code . When we find them , we just strip the escape part leaving only the xxx * part . So , something like " select * from x where d = { d ' 2001-10-09 ' } " would return " select * from * x where d = ' 2001-10-09 ' " . < / p > * @ param sql the original query text * @ param replaceProcessingEnabled whether replace _ processing _ enabled is on * @ param standardConformingStrings whether standard _ conforming _ strings is on * @ return PostgreSQL - compatible SQL * @ throws SQLException if given SQL is wrong */ public static String replaceProcessing ( String sql , boolean replaceProcessingEnabled , boolean standardConformingStrings ) throws SQLException { } }
if ( replaceProcessingEnabled ) { // Since escape codes can only appear in SQL CODE , we keep track // of if we enter a string or not . int len = sql . length ( ) ; char [ ] chars = sql . toCharArray ( ) ; StringBuilder newsql = new StringBuilder ( len ) ; int i = 0 ; while ( i < len ) { i = parseSql ( chars , i , newsql , false , standardConformingStrings ) ; // We need to loop here in case we encounter invalid // SQL , consider : SELECT a FROM t WHERE ( 1 > 0 ) ) ORDER BY a // We can ' t ending replacing after the extra closing paren // because that changes a syntax error to a valid query // that isn ' t what the user specified . if ( i < len ) { newsql . append ( chars [ i ] ) ; i ++ ; } } return newsql . toString ( ) ; } else { return sql ; }
public class ELImageInputTag { /** * Resets attribute values for tag reuse . */ @ Override public void release ( ) { } }
super . release ( ) ; setBase64Expr ( null ) ; setAlignExpr ( null ) ; setAltExpr ( null ) ; setBorderExpr ( null ) ; setDirExpr ( null ) ; setDisabledExpr ( null ) ; setLangExpr ( null ) ; setOnblurExpr ( null ) ; setOnchangeExpr ( null ) ; setOnclickExpr ( null ) ; setOndblclickExpr ( null ) ; setOnfocusExpr ( null ) ; setOnkeydownExpr ( null ) ; setOnkeypressExpr ( null ) ; setOnkeyupExpr ( null ) ; setOnmousedownExpr ( null ) ; setOnmousemoveExpr ( null ) ; setOnmouseoutExpr ( null ) ; setOnmouseoverExpr ( null ) ; setOnmouseupExpr ( null ) ; setSrcExpr ( null ) ; setStyleExpr ( null ) ; setStyleClassExpr ( null ) ; setStyleIdExpr ( null ) ; setTabindexExpr ( null ) ; setTitleExpr ( null ) ; setValueExpr ( null ) ;
public class SerializerFactory { /** * Returns a deserializer based on a string type . */ public Deserializer getDeserializer ( String type ) throws HessianProtocolException { } }
if ( type == null || type . equals ( "" ) ) return null ; Deserializer deserializer ; deserializer = ( Deserializer ) _cachedTypeDeserializerMap . get ( type ) ; if ( deserializer != null ) return deserializer ; deserializer = ( Deserializer ) _staticTypeMap . get ( type ) ; if ( deserializer != null ) return deserializer ; if ( classNameResolver != null ) { try { type = classNameResolver . resolve ( type ) ; } catch ( Exception e ) { throw new HessianProtocolException ( e ) ; } } if ( type . startsWith ( "[" ) ) { Deserializer subDeserializer = getDeserializer ( type . substring ( 1 ) ) ; if ( subDeserializer != null ) deserializer = new ArrayDeserializer ( subDeserializer . getType ( ) ) ; else deserializer = new ArrayDeserializer ( Object . class ) ; } else { try { // Class cl = Class . forName ( type , false , getClassLoader ( ) ) ; Class cl = loadSerializedClass ( type ) ; deserializer = getDeserializer ( cl ) ; } catch ( Exception e ) { log . warning ( "Hessian/Burlap: '" + type + "' is an unknown class in " + getClassLoader ( ) + ":\n" + e ) ; log . log ( Level . FINER , e . toString ( ) , e ) ; } } if ( deserializer != null ) { _cachedTypeDeserializerMap . put ( type , deserializer ) ; } return deserializer ;
public class FTPConnector { /** * Connects and authenticates with the ftp server specified by the * { @ code ftpUrl } . * @ param ftpUrl { @ link URL } , the ftp server url * @ throws ResourceDownloadError - Thrown if there was an ftp error * connecting or authenticating with the ftp server . */ public void connectAndLogin ( URL ftpUrl ) throws ResourceDownloadError { } }
if ( ! ftpUrl . getProtocol ( ) . equals ( "ftp" ) ) { throw new InvalidArgument ( "The ftp connection does not support protocol '" + ftpUrl . getProtocol ( ) + "', only 'ftp'." ) ; } // connect to ftp server String host = ftpUrl . getHost ( ) ; int port = ftpUrl . getPort ( ) == - 1 ? DEFAULT_FTP_PORT : ftpUrl . getPort ( ) ; ftpClient = new FTPClient ( ) ; try { ftpClient . connect ( host , port ) ; } catch ( Exception e ) { final String url = ftpUrl . toString ( ) ; final String msg = e . getMessage ( ) ; throw new ResourceDownloadError ( url , msg , e ) ; } // login to user account String userInfo = ftpUrl . getUserInfo ( ) ; String username = DEFAULT_USER_NAME ; String password = "" ; if ( userInfo != null ) { if ( userInfo . contains ( ":" ) ) { // provided username & password so parse String [ ] userInfoTokens = userInfo . split ( "\\:" ) ; if ( userInfoTokens . length == 2 ) { username = userInfoTokens [ 0 ] ; password = userInfoTokens [ 1 ] ; } } else { // provided only username username = userInfo ; // prompt for password char [ ] pwd ; try { pwd = PasswordPrompter . getPassword ( pwdInputStream , "Connecting to '" + ftpUrl . toString ( ) + "'. Enter password for user '" + username + "': " ) ; } catch ( IOException e ) { final String name = ftpUrl . toString ( ) ; final String msg = e . getMessage ( ) ; throw new ResourceDownloadError ( name , msg , e ) ; } if ( pwd == null ) { password = "" ; } else { password = String . valueOf ( pwd ) ; } } } try { if ( ! ftpClient . login ( username , password ) ) { final String name = ftpUrl . toString ( ) ; final String msg = "Login error for username and password" ; throw new ResourceDownloadError ( name , msg ) ; } } catch ( IOException e ) { final String name = ftpUrl . toString ( ) ; final String msg = "Login error for username and password" ; throw new ResourceDownloadError ( name , msg , e ) ; } try { ftpClient . pasv ( ) ; ftpClient . setFileType ( FTP . BINARY_FILE_TYPE ) ; } catch ( IOException e ) { final String url = ftpUrl . toString ( ) ; final String msg = "Error setting passive mode or transfer type" ; throw new ResourceDownloadError ( url , msg , e ) ; }
public class SipPacketImpl { /** * ( non - Javadoc ) * @ see io . pkts . packet . sip . SipPacket # getHeader ( java . lang . String ) */ @ Override public Optional < SipHeader > getHeader ( final String headerName ) throws SipPacketParseException { } }
return this . msg . getHeader ( headerName ) ;
public class ChainedProperty { /** * Returns a new ChainedProperty which contains everything that follows * this ChainedProperty ' s prime property . * @ throws IllegalStateException if chain count is zero */ public ChainedProperty < ? > tail ( ) { } }
if ( getChainCount ( ) == 0 ) { throw new IllegalStateException ( ) ; } if ( getChainCount ( ) == 1 ) { if ( ! isOuterJoin ( 1 ) ) { return get ( mChain [ 0 ] ) ; } else { return get ( mChain [ 0 ] , null , new boolean [ ] { true } ) ; } } StorableProperty < ? > [ ] newChain = new StorableProperty [ getChainCount ( ) - 1 ] ; System . arraycopy ( mChain , 1 , newChain , 0 , newChain . length ) ; boolean [ ] newOuterJoin = mOuterJoin ; if ( newOuterJoin != null ) { newOuterJoin = new boolean [ newChain . length + 1 ] ; System . arraycopy ( mOuterJoin , 1 , newOuterJoin , 0 , mOuterJoin . length - 1 ) ; } return get ( mChain [ 0 ] , newChain , newOuterJoin ) ;
public class KeyVaultClientCustomImpl { /** * Get a specified secret from a given key vault . * @ param secretIdentifier * The URL for the secret . * @ param serviceCallback * the async ServiceCallback to handle successful and failed * responses . * @ return the { @ link ServiceFuture } object */ public ServiceFuture < SecretBundle > getSecretAsync ( String secretIdentifier , final ServiceCallback < SecretBundle > serviceCallback ) { } }
SecretIdentifier id = new SecretIdentifier ( secretIdentifier ) ; return getSecretAsync ( id . vault ( ) , id . name ( ) , id . version ( ) == null ? "" : id . version ( ) , serviceCallback ) ;
public class BOGImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . BOG__OEG_NAME : setOEGName ( ( String ) newValue ) ; return ; case AfplibPackage . BOG__TRIPLETS : getTriplets ( ) . clear ( ) ; getTriplets ( ) . addAll ( ( Collection < ? extends Triplet > ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class PlatformDescription { /** * Describes the platform . Outputs Java version and vendor . * @ return Description of the current platform */ public static String describePlatform ( ) { } }
String desc = "Java " + SPECIFICATION_VERSION + " (" + "VM vendor name=\"" + VENDOR + "\", " + "VM vendor version=" + VENDOR_VERSION + ", " + "JVM name=\"" + JVM_NAME + "\", " + "JVM version=" + VM_VERSION + ", " + "JVM info=" + VM_INFO ; // Add the API level if it ' s an Android platform if ( ANDROID_VERSION != 0 ) { desc += ", API level=" + ANDROID_VERSION ; } desc += ")" ; return desc ;
public class Publishers { /** * Is the given object a Publisher or convertible to a publisher . * @ param object The object * @ return True if it is */ public static boolean isConvertibleToPublisher ( Object object ) { } }
if ( object == null ) { return false ; } if ( object instanceof Publisher ) { return true ; } else { return isConvertibleToPublisher ( object . getClass ( ) ) ; }
public class MsgPackOutput { /** * Writes a MessagePack long . * @ param value the value * @ throws IOException if an error occurs */ void writeLong ( long value ) throws IOException { } }
if ( value < MIN_FIX_INT ) { // large negative if ( value >= Byte . MIN_VALUE ) { output . writeByte ( SINT_8 ) ; output . writeByte ( ( byte ) value ) ; } else if ( value >= Short . MIN_VALUE ) { output . writeByte ( SINT_16 ) ; output . writeShort ( ( short ) value ) ; } else if ( value >= Integer . MIN_VALUE ) { output . writeByte ( SINT_32 ) ; output . writeInt ( ( int ) value ) ; } else { output . writeByte ( SINT_64 ) ; output . writeLong ( value ) ; } } else if ( value < MAX_FIX_INT ) { // in range - 64 to 127 output . writeByte ( ( byte ) value ) ; } else { // large positive if ( value < 0xFF ) { output . writeByte ( UINT_8 ) ; output . writeByte ( ( byte ) value ) ; } else if ( value < 0xFFFF ) { output . writeByte ( UINT_16 ) ; output . writeShort ( ( short ) value ) ; } else if ( value < 0xFFFFFFFFL ) { output . writeByte ( UINT_32 ) ; output . writeInt ( ( int ) value ) ; } else { output . writeByte ( UINT_64 ) ; output . writeLong ( value ) ; } }
public class CPDefinitionOptionValueRelUtil { /** * Returns the cp definition option value rel where uuid = & # 63 ; and groupId = & # 63 ; or returns < code > null < / code > if it could not be found , optionally using the finder cache . * @ param uuid the uuid * @ param groupId the group ID * @ param retrieveFromCache whether to retrieve from the finder cache * @ return the matching cp definition option value rel , or < code > null < / code > if a matching cp definition option value rel could not be found */ public static CPDefinitionOptionValueRel fetchByUUID_G ( String uuid , long groupId , boolean retrieveFromCache ) { } }
return getPersistence ( ) . fetchByUUID_G ( uuid , groupId , retrieveFromCache ) ;
public class CPDefinitionGroupedEntryPersistenceImpl { /** * Caches the cp definition grouped entry in the entity cache if it is enabled . * @ param cpDefinitionGroupedEntry the cp definition grouped entry */ @ Override public void cacheResult ( CPDefinitionGroupedEntry cpDefinitionGroupedEntry ) { } }
entityCache . putResult ( CPDefinitionGroupedEntryModelImpl . ENTITY_CACHE_ENABLED , CPDefinitionGroupedEntryImpl . class , cpDefinitionGroupedEntry . getPrimaryKey ( ) , cpDefinitionGroupedEntry ) ; finderCache . putResult ( FINDER_PATH_FETCH_BY_UUID_G , new Object [ ] { cpDefinitionGroupedEntry . getUuid ( ) , cpDefinitionGroupedEntry . getGroupId ( ) } , cpDefinitionGroupedEntry ) ; finderCache . putResult ( FINDER_PATH_FETCH_BY_C_E , new Object [ ] { cpDefinitionGroupedEntry . getCPDefinitionId ( ) , cpDefinitionGroupedEntry . getEntryCProductId ( ) } , cpDefinitionGroupedEntry ) ; cpDefinitionGroupedEntry . resetOriginalValues ( ) ;
public class XmlUtils { /** * Parses an XML document from a file . * @ param file the file * @ return the parsed DOM * @ throws SAXException if the XML is not valid * @ throws IOException if there is a problem reading from the file */ public static Document toDocument ( File file ) throws SAXException , IOException { } }
InputStream in = new BufferedInputStream ( new FileInputStream ( file ) ) ; try { return XmlUtils . toDocument ( in ) ; } finally { in . close ( ) ; }
public class SVGParser { /** * Parse a font family list */ private static List < String > parseFontFamily ( String val ) { } }
List < String > fonts = null ; TextScanner scan = new TextScanner ( val ) ; while ( true ) { String item = scan . nextQuotedString ( ) ; if ( item == null ) item = scan . nextTokenWithWhitespace ( ',' ) ; if ( item == null ) break ; if ( fonts == null ) fonts = new ArrayList < > ( ) ; fonts . add ( item ) ; scan . skipCommaWhitespace ( ) ; if ( scan . empty ( ) ) break ; } return fonts ;
public class SimpleMeasureManager { /** * Provides the keys of all the { @ link Measure } s which are supported by this * { @ link SimpleMeasureManager } . If a key is provided , then at least one * version is available through { @ link # getPullMeasure ( Object ) } or * { @ link # getPushMeasure ( Object ) } . */ @ Override public Collection < Object > getMeasureKeys ( ) { } }
HashSet < Object > keys = new HashSet < > ( ) ; keys . addAll ( pullers . keySet ( ) ) ; keys . addAll ( pushers . keySet ( ) ) ; return keys ;
public class RecurlyClient { /** * Redeem a Gift Card * @ param redemptionCode The redemption code the { @ link GiftCard } * @ param accountCode The account code for the { @ link Account } * @ return The updated { @ link GiftCard } object as identified by the passed in id */ public GiftCard redeemGiftCard ( final String redemptionCode , final String accountCode ) { } }
final GiftCard . Redemption redemptionData = GiftCard . createRedemption ( accountCode ) ; final String url = GiftCards . GIFT_CARDS_RESOURCE + "/" + redemptionCode + "/redeem" ; return doPOST ( url , redemptionData , GiftCard . class ) ;
public class InnerRankUpdate_DDRB { /** * Rank N update function for a symmetric inner submatrix and only operates on the lower * triangular portion of the submatrix . < br > * < br > * A = A - B * B < sup > T < / sup > < br > */ public static void symmRankNMinus_L ( int blockLength , DSubmatrixD1 A , DSubmatrixD1 B ) { } }
int widthB = B . col1 - B . col0 ; if ( widthB > blockLength ) throw new IllegalArgumentException ( "Width of B cannot be greater than the block length" ) ; int N = B . row1 - B . row0 ; if ( A . col1 - A . col0 != N ) throw new IllegalArgumentException ( "A does not have the expected number of columns based on B's height" ) ; if ( A . row1 - A . row0 != N ) throw new IllegalArgumentException ( "A does not have the expected number of rows based on B's height" ) ; for ( int i = B . row0 ; i < B . row1 ; i += blockLength ) { int heightB_i = Math . min ( blockLength , B . row1 - i ) ; int indexB_i = i * B . original . numCols + heightB_i * B . col0 ; int rowA = i - B . row0 + A . row0 ; int heightA = Math . min ( blockLength , A . row1 - rowA ) ; for ( int j = B . row0 ; j <= i ; j += blockLength ) { int widthB_j = Math . min ( blockLength , B . row1 - j ) ; int indexA = rowA * A . original . numCols + ( j - B . row0 + A . col0 ) * heightA ; int indexB_j = j * B . original . numCols + widthB_j * B . col0 ; if ( i == j ) { multTransBBlockMinus_L ( B . original . data , A . original . data , indexB_i , indexB_j , indexA , widthB , heightB_i , widthB_j ) ; } else { multTransBBlockMinus ( B . original . data , A . original . data , indexB_i , indexB_j , indexA , widthB , heightB_i , widthB_j ) ; } } }
public class CreateSubscriptionDefinitionRequest { /** * Tag ( s ) to add to the new resource * @ param tags * Tag ( s ) to add to the new resource * @ return Returns a reference to this object so that method calls can be chained together . */ public CreateSubscriptionDefinitionRequest withTags ( java . util . Map < String , String > tags ) { } }
setTags ( tags ) ; return this ;