signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ListConstraintsForPortfolioResult { /** * Information about the constraints . * @ param constraintDetails * Information about the constraints . */ public void setConstraintDetails ( java . util . Collection < ConstraintDetail > constraintDetails ) { } }
if ( constraintDetails == null ) { this . constraintDetails = null ; return ; } this . constraintDetails = new java . util . ArrayList < ConstraintDetail > ( constraintDetails ) ;
public class Similarity { /** * Returns the Tanimoto Coefficient of the two { @ code IntegerVector } * instances * @ throws IllegalArgumentException when the length of the two vectors are * not the same . */ @ SuppressWarnings ( "unchecked" ) public static double tanimotoCoefficient ( IntegerVector a , IntegerVector b ) { } }
check ( a , b ) ; // IMPLEMENTATION NOTE : The Tanimoto coefficient uses the squart of the // vector magnitudes , which we could compute by just summing the square // of the vector values . This would save a . sqrt ( ) call from the // . magnitude ( ) call . However , we expect that this method might be // called multiple times . Therefore , we square the . magnitude ( ) which // should only be two multiplications instaned of | nz | multiplications // on the second call ( assuming the vector instances cache their // magnitude , which almost all do ) . double aMagnitude = a . magnitude ( ) ; double bMagnitude = b . magnitude ( ) ; if ( aMagnitude == 0 || bMagnitude == 0 ) return 0 ; int dotProduct = VectorMath . dotProduct ( a , b ) ; double aMagSq = aMagnitude * aMagnitude ; double bMagSq = bMagnitude * bMagnitude ; return dotProduct / ( aMagSq + bMagSq - dotProduct ) ;
public class Utils { /** * Test whether a class is a subclass of another class . * @ param t1 the candidate superclass . * @ param t2 the target * @ return true if t1 is a superclass of t2. */ public boolean isSubclassOf ( TypeElement t1 , TypeElement t2 ) { } }
return typeUtils . isSubtype ( t1 . asType ( ) , t2 . asType ( ) ) ;
public class ConvolveImage { /** * Performs a 2D convolution across the image . * @ param input The original image . Not modified . * @ param output Where the resulting image is written to . Modified . * @ param kernel The kernel that is being convolved . Not modified . * @ param border How the image borders are handled . */ public static void convolve ( Kernel2D_S32 kernel , InterleavedU8 input , InterleavedI16 output , ImageBorder_IL_S32 < InterleavedU8 > border ) { } }
InputSanityCheck . checkSameShapeB ( input , output ) ; boolean processed = BOverrideConvolveImage . invokeNativeConvolve ( kernel , input , output , border ) ; if ( ! processed ) { border . setImage ( input ) ; ConvolveImageNoBorder . convolve ( kernel , input , output ) ; ConvolveJustBorder_General_IL . convolve ( kernel , border , output ) ; }
public class ProcedureRunner { /** * This function returns the ExecutionEngine for this site . * ProcedureRunner needs the access to the ExecutionEngine for two purposes : * - Telling the C + + side to turn on / off the per - fragment time measurement . * - Collecting the time measurements and the failure indicator from * the per - fragment stats shared buffer . * The ExecutionEngine may not have been initialized when a ProcedureRunner is created , * ( See BaseInitiator . java , the configureCommon ( ) function ) * so we do not find this engine in the constructor . */ public ExecutionEngine getExecutionEngine ( ) { } }
if ( m_ee != null ) { return m_ee ; } // m _ site is declared as SiteProcedureConnection here . // SiteProcedureConnection only has two implementations : Site and MpRoSite . // Only SP site has an underlying ExecutionEngine , MpRoSite does not . if ( m_site instanceof Site ) { m_ee = ( ( Site ) m_site ) . getExecutionEngine ( ) ; return m_ee ; } return null ;
public class Configuration { /** * Generate a UID or retrieve the latest if it is valid depending the context given * by the category , project name and project version * @ param category The category * @ param projectName The project name * @ param projectVersion The project version * @ param force Force the generation of a new UID * @ return The valid UID */ public String getUid ( String category , String projectName , String projectVersion , boolean force ) { } }
String uid = null ; if ( ! force ) { uid = readUid ( new File ( uidDirectory , "latest" ) ) ; } // Check if the UID was already used for the ROX client and projec / version if ( uid != null && uidAlreadyUsed ( category , projectName , uid ) ) { uid = null ; } // Generate UID and store it if ( uid == null ) { uid = generateUid ( ) ; writeUid ( new File ( uidDirectory , "latest" ) , uid ) ; } writeUid ( getUidFile ( category , projectName , projectVersion ) , uid ) ; return uid ;
public class SecretKeyFactory { /** * Returns a < code > SecretKeyFactory < / code > object that converts * secret keys of the specified algorithm . * < p > A new SecretKeyFactory object encapsulating the * SecretKeyFactorySpi implementation from the specified provider * is returned . The specified provider must be registered * in the security provider list . * < p > Note that the list of registered providers may be retrieved via * the { @ link Security # getProviders ( ) Security . getProviders ( ) } method . * @ param algorithm the standard name of the requested secret - key * algorithm . * See the SecretKeyFactory section in the < a href = * " { @ docRoot } openjdk - redirect . html ? v = 8 & path = / technotes / guides / security / StandardNames . html # SecretKeyFactory " > * Java Cryptography Architecture Standard Algorithm Name Documentation < / a > * for information about standard algorithm names . * @ param provider the name of the provider . * @ return the new < code > SecretKeyFactory < / code > object . * @ exception NoSuchAlgorithmException if a SecretKeyFactorySpi * implementation for the specified algorithm is not * available from the specified provider . * @ exception NullPointerException if the specified algorithm * is null . * @ throws NoSuchProviderException if the specified provider is not * registered in the security provider list . * @ exception IllegalArgumentException if the < code > provider < / code > * is null or empty . * @ see java . security . Provider */ public static final SecretKeyFactory getInstance ( String algorithm , String provider ) throws NoSuchAlgorithmException , NoSuchProviderException { } }
Instance instance = JceSecurity . getInstance ( "SecretKeyFactory" , SecretKeyFactorySpi . class , algorithm , provider ) ; return new SecretKeyFactory ( ( SecretKeyFactorySpi ) instance . impl , instance . provider , algorithm ) ;
public class TypePoolInfoMap { /** * Put the value with the appropriate keys * @ param type Resource type * @ param poolInfo Pool info * @ param value Value to put * @ return Previous value , can be null if wasn ' t set */ public V put ( ResourceType type , PoolInfo poolInfo , V value ) { } }
Map < PoolInfo , V > poolInfoMap = typePoolInfoMap . get ( type ) ; if ( poolInfoMap == null ) { poolInfoMap = new HashMap < PoolInfo , V > ( ) ; typePoolInfoMap . put ( type , poolInfoMap ) ; } return poolInfoMap . put ( poolInfo , value ) ;
public class AbstractQueuedSynchronizer { /** * Acquires in shared timed mode . * @ param arg the acquire argument * @ param nanosTimeout max wait time * @ return { @ code true } if acquired */ private boolean doAcquireSharedNanos ( int arg , long nanosTimeout ) throws InterruptedException { } }
if ( nanosTimeout <= 0L ) return false ; final long deadline = System . nanoTime ( ) + nanosTimeout ; final Node node = addWaiter ( Node . SHARED ) ; try { for ( ; ; ) { final Node p = node . predecessor ( ) ; if ( p == head ) { int r = tryAcquireShared ( arg ) ; if ( r >= 0 ) { setHeadAndPropagate ( node , r ) ; p . next = null ; // help GC return true ; } } nanosTimeout = deadline - System . nanoTime ( ) ; if ( nanosTimeout <= 0L ) { cancelAcquire ( node ) ; return false ; } if ( shouldParkAfterFailedAcquire ( p , node ) && nanosTimeout > SPIN_FOR_TIMEOUT_THRESHOLD ) LockSupport . parkNanos ( this , nanosTimeout ) ; if ( Thread . interrupted ( ) ) throw new InterruptedException ( ) ; } } catch ( Throwable t ) { cancelAcquire ( node ) ; throw t ; }
public class Utility { /** * Get this item from the map and convert it to the target class . * Convert this object to an formatted string . * @ param map The map to pull the param from . * @ param strKey The property key . * @ param classData The java class to convert the data to . * @ param objDefault The default value . * @ return The propety value in the correct class . */ public static String getAsFormattedString ( Map < String , Object > map , String strKey , Class < ? > classData , Object objDefault ) { } }
Object objData = map . get ( strKey ) ; try { return Converter . formatObjectToString ( objData , classData , objDefault ) ; } catch ( Exception ex ) { return null ; }
public class CQJDBCStorageConnection { /** * This method is similar to { @ link CQJDBCStorageConnection # getChildNodesData ( NodeData , List ) } except that if the * QPathEntryFilter is an exact name the method { @ link JDBCStorageConnection # getItemData ( NodeData , QPathEntry , ItemType ) } * will be called instead . */ protected List < NodeData > getDirectChildNodesData ( NodeData parent , List < QPathEntryFilter > itemDataFilters ) throws RepositoryException , IllegalStateException { } }
checkIfOpened ( ) ; if ( itemDataFilters . isEmpty ( ) ) { return new ArrayList < NodeData > ( ) ; } List < NodeData > children = new ArrayList < NodeData > ( ) ; for ( Iterator < QPathEntryFilter > it = itemDataFilters . iterator ( ) ; it . hasNext ( ) ; ) { QPathEntryFilter filter = it . next ( ) ; if ( filter . isExactName ( ) ) { NodeData data = ( NodeData ) getItemData ( parent , filter . getQPathEntry ( ) , ItemType . NODE ) ; if ( data != null ) { children . add ( data ) ; } it . remove ( ) ; } } if ( ! itemDataFilters . isEmpty ( ) ) { children . addAll ( getChildNodesDataInternal ( parent , itemDataFilters ) ) ; } return children ;
public class TimeSpanFormatter { /** * / * [ deutsch ] * < p > Erzeugt eine textuelle Ausgabe der angegebenen Dauer und * schreibt sie in den Puffer . < / p > * @ param durationduration object * @ param buffer I / O - buffer where the result is written to * @ throwsIllegalArgumentException if some aspects of duration * prevents printing ( for example too many nanoseconds ) * @ throws IOException if writing into buffer fails */ public void print ( TimeSpan < ? super U > duration , Appendable buffer ) throws IOException { } }
for ( FormatItem < U > item : this . items ) { if ( item == OrItem . INSTANCE ) { break ; } item . print ( duration , buffer ) ; }
public class StorageAccountsInner { /** * Lists the access keys for the specified storage account . * @ param resourceGroupName The name of the resource group within the user ' s subscription . The name is case insensitive . * @ param accountName The name of the storage account within the specified resource group . Storage account names must be between 3 and 24 characters in length and use numbers and lower - case letters only . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < StorageAccountListKeysResultInner > listKeysAsync ( String resourceGroupName , String accountName , final ServiceCallback < StorageAccountListKeysResultInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( listKeysWithServiceResponseAsync ( resourceGroupName , accountName ) , serviceCallback ) ;
public class Matth { /** * Returns { @ code true } if { @ code x } represents a mathematical integer . * < p > This is equivalent to , but not necessarily implemented as , the expression { @ code * ! Double . isNaN ( x ) & & ! Double . isInfinite ( x ) & & x = = Math . rint ( x ) } . */ public static boolean isMathematicalInteger ( double x ) { } }
return isFinite ( x ) && ( x == 0.0 || SIGNIFICAND_BITS - Long . numberOfTrailingZeros ( getSignificand ( x ) ) <= getExponent ( x ) ) ;
public class SSLChannelProvider { /** * DS method for deactivating this component . * @ param context */ protected synchronized void deactivate ( ComponentContext context ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( this , tc , "Deactivating" ) ; } // Unregister all services that we registered . while ( ! sslOptions . isEmpty ( ) ) { Iterator < Map . Entry < String , SSLChannelOptions > > i = sslOptions . entrySet ( ) . iterator ( ) ; while ( i . hasNext ( ) ) { Map . Entry < String , SSLChannelOptions > entry = i . next ( ) ; entry . getValue ( ) . unregister ( ) ; i . remove ( ) ; } } sslConfigs . deactivate ( context ) ; cfwBundle . getFramework ( ) . deregisterFactories ( this ) ; bContext = null ; // clear the instance if we haven ' t been replaced . instance . compareAndSet ( this , null ) ;
public class SimpleResponseManager { @ Override public void write ( String text , String contentType ) { } }
assertArgumentNotNull ( "text" , text ) ; assertArgumentNotNull ( "contentType" , contentType ) ; doWrite ( text , contentType ) ;
public class Widget { /** * update widget state * @ param delta time elapsed in seconds */ public void update ( float delta ) { } }
background . update ( delta ) ; focusedBackground . update ( delta ) ; leftBorder . update ( delta ) ; rightBorder . update ( delta ) ; topBorder . update ( delta ) ; bottomBorder . update ( delta ) ; for ( Widget child : children ) { child . update ( delta ) ; }
public class LoggingToolFactory { /** * Dynamically create a { @ link ILoggingTool } for the given * < code > sourceClass < / code > . * @ param sourceClass Class for which the { @ link ILoggingTool } should be * constructed . * @ return An { @ link ILoggingTool } implementation . */ public static ILoggingTool createLoggingTool ( Class < ? > sourceClass ) { } }
ILoggingTool tool = null ; // first attempt the user set ILoggingTool if ( userSetILoggerTool != null ) { tool = instantiateWithCreateMethod ( sourceClass , userSetILoggerTool ) ; } if ( tool == null ) { tool = initializeLoggingTool ( sourceClass , DEFAULT_LOGGING_TOOL_CLASS ) ; } if ( tool == null ) { tool = initializeLoggingTool ( sourceClass , STDOUT_LOGGING_TOOL_CLASS ) ; } return tool ;
public class SystemViewStreamExporter { /** * ( non - Javadoc ) * @ see * org . exoplatform . services . jcr . dataflow . ItemDataTraversingVisitor # leaving ( org . exoplatform . services * . jcr . datamodel . NodeData , int ) */ @ Override protected void leaving ( NodeData node , int level ) throws RepositoryException { } }
try { if ( exportChildVersionHistory && node . getPrimaryTypeName ( ) . equals ( Constants . NT_VERSIONEDCHILD ) ) { try { PropertyData childVersionHistory = ( ( PropertyData ) dataManager . getItemData ( node , new QPathEntry ( Constants . JCR_CHILDVERSIONHISTORY , 1 ) , ItemType . PROPERTY ) ) ; String childVersionHistoryId = getValueAsStringForExport ( childVersionHistory . getValues ( ) . get ( 0 ) , childVersionHistory . getType ( ) ) ; // check does this child version history was already exported if ( ! exportedVersionHistories . contains ( childVersionHistoryId ) ) { writer . writeStartElement ( Constants . NS_SV_PREFIX , Constants . SV_VERSION_HISTORY , getSvNamespaceUri ( ) ) ; writer . writeAttribute ( Constants . NS_SV_PREFIX , getSvNamespaceUri ( ) , Constants . SV_NAME , childVersionHistoryId ) ; NodeData versionStorage = ( NodeData ) dataManager . getItemData ( Constants . VERSIONSTORAGE_UUID ) ; NodeData childVersionNodeData = ( NodeData ) dataManager . getItemData ( versionStorage , new QPathEntry ( "" , childVersionHistoryId , 1 ) , ItemType . NODE ) ; childVersionNodeData . accept ( this ) ; writer . writeEndElement ( ) ; exportedVersionHistories . add ( childVersionHistoryId ) ; } } catch ( IOException e ) { throw new RepositoryException ( "Can't export versioned child version history: " + e . getMessage ( ) , e ) ; } } writer . writeEndElement ( ) ; } catch ( XMLStreamException e ) { throw new RepositoryException ( e ) ; }
public class LeftistHeap { /** * Top - down union of two leftist heaps with comparator . * @ param root1 * the root of the first heap * @ param root2 * the root of the right heap * @ return the new root of the merged heap */ @ Override protected Node < K , V > unionWithComparator ( Node < K , V > root1 , Node < K , V > root2 ) { } }
if ( root1 == null ) { return root2 ; } else if ( root2 == null ) { return root1 ; } Node < K , V > newRoot ; Deque < LeftistNode < K , V > > path = new LinkedList < LeftistNode < K , V > > ( ) ; // find initial int c = comparator . compare ( root1 . key , root2 . key ) ; if ( c <= 0 ) { newRoot = root1 ; root1 = unlinkRightChild ( root1 ) ; } else { newRoot = root2 ; root2 = unlinkRightChild ( root2 ) ; } Node < K , V > cur = newRoot ; path . push ( ( LeftistNode < K , V > ) cur ) ; // merge while ( root1 != null && root2 != null ) { c = comparator . compare ( root1 . key , root2 . key ) ; if ( c <= 0 ) { // link as right child of cur if ( cur . o_c == null ) { cur . o_c = root1 ; } else { cur . o_c . y_s = root1 ; } root1 . y_s = cur ; cur = root1 ; path . push ( ( LeftistNode < K , V > ) cur ) ; root1 = unlinkRightChild ( root1 ) ; } else { // link as right child of cur if ( cur . o_c == null ) { cur . o_c = root2 ; } else { cur . o_c . y_s = root2 ; } root2 . y_s = cur ; cur = root2 ; path . push ( ( LeftistNode < K , V > ) cur ) ; root2 = unlinkRightChild ( root2 ) ; } } if ( root1 != null ) { // link as right child of cur if ( cur . o_c == null ) { cur . o_c = root1 ; } else { cur . o_c . y_s = root1 ; } root1 . y_s = cur ; } if ( root2 != null ) { // link as right child of cur if ( cur . o_c == null ) { cur . o_c = root2 ; } else { cur . o_c . y_s = root2 ; } root2 . y_s = cur ; } /* * Traverse path upwards , update null path length and swap if needed . */ while ( ! path . isEmpty ( ) ) { LeftistNode < K , V > n = path . pop ( ) ; if ( n . o_c != null ) { // at least on child LeftistNode < K , V > nLeft = ( LeftistNode < K , V > ) n . o_c ; int nplLeft = nLeft . npl ; int nplRight = - 1 ; if ( nLeft . y_s != n ) { // two children LeftistNode < K , V > nRight = ( LeftistNode < K , V > ) nLeft . y_s ; nplRight = nRight . npl ; } n . npl = 1 + Math . min ( nplLeft , nplRight ) ; if ( nplLeft < nplRight ) { // swap swapChildren ( n ) ; } } else { // no children n . npl = 0 ; } } return newRoot ;
public class Clock { /** * Defines the custom font that can be used to render all * text elements . To enable the custom font one has to set * customFontEnabled = true * @ param FONT */ public void setCustomFont ( final Font FONT ) { } }
if ( null == customFont ) { _customFont = FONT ; fireUpdateEvent ( RESIZE_EVENT ) ; } else { customFont . set ( FONT ) ; }
public class DSLMappingFile { /** * Saves current mapping into a DSL mapping file * @ param out * @ throws IOException */ public void saveMapping ( final Writer out ) throws IOException { } }
for ( final Iterator it = this . mapping . getEntries ( ) . iterator ( ) ; it . hasNext ( ) ; ) { out . write ( it . next ( ) . toString ( ) ) ; out . write ( "\n" ) ; }
public class CmsWorkplace { /** * Returns the message String for the broadcast message alert of the workplace . < p > * Caution : returns the pure message String ( not escaped ) or null , if no message is pending . < p > * @ return the message String for the broadcast message alert of the workplace */ public String getBroadcastMessageString ( ) { } }
CmsSessionInfo sessionInfo = OpenCms . getSessionManager ( ) . getSessionInfo ( getSession ( ) ) ; if ( sessionInfo == null ) { return null ; } String sessionId = sessionInfo . getSessionId ( ) . toString ( ) ; Buffer messageQueue = OpenCms . getSessionManager ( ) . getBroadcastQueue ( sessionId ) ; if ( ! messageQueue . isEmpty ( ) ) { // create message String StringBuffer result = new StringBuffer ( 512 ) ; // the user has pending messages , display them all while ( ! messageQueue . isEmpty ( ) ) { CmsBroadcast message = ( CmsBroadcast ) messageQueue . remove ( ) ; result . append ( '[' ) ; result . append ( getMessages ( ) . getDateTime ( message . getSendTime ( ) ) ) ; result . append ( "] " ) ; result . append ( key ( Messages . GUI_LABEL_BROADCASTMESSAGEFROM_0 ) ) ; result . append ( ' ' ) ; if ( message . getUser ( ) != null ) { result . append ( message . getUser ( ) . getName ( ) ) ; } else { // system message result . append ( key ( Messages . GUI_LABEL_BROADCAST_FROM_SYSTEM_0 ) ) ; } result . append ( ":\n" ) ; result . append ( message . getMessage ( ) ) ; result . append ( "\n\n" ) ; } return result . toString ( ) ; } // no message pending , return null return null ;
public class RoutesModule { /** * Adds Routes to the Router respecting exclusion rules . * Also wraps RouteHandlers with Metrics handlers and sets Route names . */ private void compileRoutes ( ) { } }
Iterator < RouteRegistration > iterator = routeRegistrations . iterator ( ) ; while ( iterator . hasNext ( ) ) { RouteRegistration routeRegistration = iterator . next ( ) ; iterator . remove ( ) ; // Enforce mode requirements specified for Route if ( routeRegistration . getModes ( ) != null ) { // Enforce specified modes for the route if ( ! routeRegistration . getModes ( ) . contains ( settings . getMode ( ) ) ) { log . debug ( "Excluding {} '{}' because {} is not specified in mode set {}" , routeRegistration . getRequestMethod ( ) , routeRegistration . getUriPattern ( ) , settings . getMode ( ) , routeRegistration . getModes ( ) ) ; continue ; } } // Enforce annotated requirements on Controllers if ( routeRegistration . getRouteHandler ( ) instanceof ControllerHandler ) { // Enforce RequireUtil rules for the controller ControllerHandler controllerHandler = ( ControllerHandler ) routeRegistration . getRouteHandler ( ) ; if ( ! RequireUtil . allowMethod ( settings , controllerHandler . getControllerMethod ( ) ) ) { continue ; } if ( Strings . isNullOrEmpty ( routeRegistration . getName ( ) ) ) { routeRegistration . setName ( Util . toString ( controllerHandler . getControllerMethod ( ) ) ) ; } } // Automatically name Route if name is not specified else if ( Strings . isNullOrEmpty ( routeRegistration . getName ( ) ) ) { // try to name the route Class < ? extends RouteHandler > routeHandlerClass = routeRegistration . getRouteHandler ( ) . getClass ( ) ; if ( routeHandlerClass . isSynthetic ( ) ) { routeRegistration . setName ( "lambda handler" ) ; } else if ( routeHandlerClass . isAnonymousClass ( ) ) { routeRegistration . setName ( "anonymous handler" ) ; } else { routeRegistration . setName ( routeHandlerClass . getName ( ) ) ; } } // Wrap any Route designated to collect Metrics RouteHandler routeHandler ; if ( routeRegistration . isMetered ( ) ) { log . debug ( "Wrapping {} '{}' handler with {}" , routeRegistration . getRequestMethod ( ) , routeRegistration . getUriPattern ( ) , MeteredRouteHandler . class . getSimpleName ( ) ) ; routeHandler = new MeteredRouteHandler ( routeRegistration . getMetricName ( ) , routeRegistration . getRouteHandler ( ) , metricRegistry ) ; } else if ( routeRegistration . isTimed ( ) ) { log . debug ( "Wrapping {} '{}' handler with {}" , routeRegistration . getRequestMethod ( ) , routeRegistration . getUriPattern ( ) , TimedRouteHandler . class . getSimpleName ( ) ) ; routeHandler = new TimedRouteHandler ( routeRegistration . getMetricName ( ) , routeRegistration . getRouteHandler ( ) , metricRegistry ) ; } else if ( routeRegistration . isCounted ( ) ) { log . debug ( "Wrapping {} '{}' handler with {}" , routeRegistration . getRequestMethod ( ) , routeRegistration . getUriPattern ( ) , CountedRouteHandler . class . getSimpleName ( ) ) ; routeHandler = new CountedRouteHandler ( routeRegistration . getMetricName ( ) , false , routeRegistration . getRouteHandler ( ) , metricRegistry ) ; } else { routeHandler = routeRegistration . getRouteHandler ( ) ; } Route route = new Route ( routeRegistration . getRequestMethod ( ) , routeRegistration . getUriPattern ( ) , routeHandler ) ; route . setName ( routeRegistration . getName ( ) ) ; if ( routeRegistration . isRunAsFinally ( ) ) { route . runAsFinally ( ) ; } router . addRoute ( route ) ; }
public class JsonObject { /** * Returns the { @ link JsonArray } at the given key , or null if it does not exist or is the wrong * type . */ public Json . Array getArray ( String key ) { } }
return getArray ( key , ( Json . Array ) null ) ;
public class CreateBallCommand { /** * { @ inheritDoc } */ @ Override public void perform ( final Wave wave ) { } }
final JRebirthEvent event = wave . get ( EditorWaves . EVENT ) ; LOGGER . trace ( "Process " + event . eventType ( ) + " of type " + event . target ( ) ) ; final EditorModel editorModel = getModel ( EditorModel . class ) ; final BallModel targetBallModel = getModel ( BallModel . class , event ) ; editorModel . registerBall ( targetBallModel ) ; if ( editorModel . retrieveBall ( targetBallModel . getEventModel ( ) . source ( ) ) == null ) { // it ' s the application node , we shall center it ! ! ! minus 70 because it ' s the globalFacade node that resides in the center targetBallModel . view ( ) . node ( ) . layoutXProperty ( ) . bind ( editorModel . view ( ) . node ( ) . widthProperty ( ) . divide ( 2 ) . subtract ( 70 ) . subtract ( 24 ) ) ; targetBallModel . view ( ) . node ( ) . layoutYProperty ( ) . bind ( editorModel . view ( ) . node ( ) . heightProperty ( ) . divide ( 2 ) . subtract ( 24 ) ) ; } else { final BallModel sourceBallModel = editorModel . retrieveBall ( targetBallModel . getEventModel ( ) . source ( ) ) ; // All other nodes shall be positionned relatively to their parent /* * System . out . println ( " Create " + targetBallModel . getEventModel ( ) . getTarget ( ) . getSimpleName ( ) + " From : " + targetBallModel . getEventModel ( ) . getSource ( ) . getSimpleName ( ) + " x = " + * sourceBallModel . getView ( ) . getRootNode ( ) . getCenterX ( ) + " trX = " + sourceBallModel . getView ( ) . getRootNode ( ) . getTranslateX ( ) + " y = " + sourceBallModel . getView ( ) . getRootNode ( ) . getCenterY ( ) + * " trY = " + sourceBallModel . getView ( ) . getRootNode ( ) . getTranslateY ( ) ) ; */ targetBallModel . view ( ) . node ( ) . layoutXProperty ( ) . bind ( sourceBallModel . view ( ) . node ( ) . layoutXProperty ( ) . add ( sourceBallModel . view ( ) . node ( ) . translateXProperty ( ) ) ) ; targetBallModel . view ( ) . node ( ) . layoutYProperty ( ) . bind ( sourceBallModel . view ( ) . node ( ) . layoutYProperty ( ) . add ( sourceBallModel . view ( ) . node ( ) . translateYProperty ( ) ) ) ; } targetBallModel . show ( ) ;
public class SubTool { /** * Process a list of tokens to apply the stated policy to the tokens . * @ param tokens a list of tokens to process * @ param policy either to expand or concatenate tokens in the list * @ return a processed list of tokens */ public static List < String > process ( List < String > tokens , Policy policy ) { } }
if ( policy == Policy . CONCATENATE ) { return concatenate ( tokens ) ; } else { return expand ( tokens ) ; }
public class GroupApi { /** * Get a Pager of visible direct subgroups in this group . * < pre > < code > GitLab Endpoint : GET / groups / : id / subgroups < / code > < / pre > * @ param groupIdOrPath the group ID , path of the group , or a Group instance holding the group ID or path , required * @ param skipGroups skip the group IDs passed * @ param allAvailable show all the groups you have access to ( defaults to false for authenticated users ) * @ param search return the list of authorized groups matching the search criteria * @ param orderBy order groups by NAME or PATH . Default is NAME * @ param sortOrder order groups in ASC or DESC order . Default is ASC * @ param statistics include group statistics ( admins only ) * @ param owned limit to groups owned by the current user * @ param itemsPerPage the number of Group instances that will be fetched per page * @ return a Pager containing matching Group instances * @ throws GitLabApiException if any exception occurs * @ since GitLab 10.3.0 */ public Pager < Group > getSubGroups ( Object groupIdOrPath , List < Integer > skipGroups , Boolean allAvailable , String search , GroupOrderBy orderBy , SortOrder sortOrder , Boolean statistics , Boolean owned , int itemsPerPage ) throws GitLabApiException { } }
Form formData = new GitLabApiForm ( ) . withParam ( "skip_groups" , skipGroups ) . withParam ( "all_available" , allAvailable ) . withParam ( "search" , search ) . withParam ( "order_by" , orderBy ) . withParam ( "sort_order" , sortOrder ) . withParam ( "statistics" , statistics ) . withParam ( "owned" , owned ) ; return ( new Pager < Group > ( this , Group . class , itemsPerPage , formData . asMap ( ) , "groups" , getGroupIdOrPath ( groupIdOrPath ) , "subgroups" ) ) ;
public class AbstractJob { /** * Called when an exception of the specified type occurred * @ param t * The exception . Never < code > null < / code > . * @ param sJobClassName * The name of the job class * @ param aJob * The { @ link IJob } instance */ protected static void triggerCustomExceptionHandler ( @ Nonnull final Throwable t , @ Nullable final String sJobClassName , @ Nonnull final IJob aJob ) { } }
exceptionCallbacks ( ) . forEach ( x -> x . onScheduledJobException ( t , sJobClassName , aJob ) ) ;
public class WorkDir { /** * Returns the user directory from / tmp / */ public static PathImpl getTmpWorkDir ( ) { } }
String userName = System . getProperty ( "user.name" ) ; PathImpl path ; // Windows uses / temp as a work dir if ( com . caucho . v5 . util . CauchoUtil . isWindows ( ) && ! CurrentTime . isTest ( ) ) path = VfsOld . lookup ( "file:/c:/tmp/" + userName ) ; else path = VfsOld . lookup ( "file:/tmp/" + userName ) ; return path ;
public class GosuClassTransformer { /** * - - - - - Additions */ public boolean isBlockInvoke ( DynamicFunctionSymbol dfs ) { } }
return dfs . getDisplayName ( ) . equals ( BlockClass . INVOKE_METHOD_NAME ) && _context . compilingBlock ( ) ;
public class Trie2Writable { /** * Compact a build - time trie . * The compaction * - removes blocks that are identical with earlier ones * - overlaps adjacent blocks as much as possible ( if overlap = = TRUE ) * - moves blocks in steps of the data granularity * - moves and overlaps blocks that overlap with multiple values in the overlap region * It does not * - try to move and overlap blocks that are not already adjacent */ private void compactData ( ) { } }
int start , newStart , movedStart ; int blockLength , overlap ; int i , mapIndex , blockCount ; /* do not compact linear - ASCII data */ newStart = UTRIE2_DATA_START_OFFSET ; for ( start = 0 , i = 0 ; start < newStart ; start += UTRIE2_DATA_BLOCK_LENGTH , ++ i ) { map [ i ] = start ; } /* * Start with a block length of 64 for 2 - byte UTF - 8, * then switch to UTRIE2 _ DATA _ BLOCK _ LENGTH . */ blockLength = 64 ; blockCount = blockLength >> UTRIE2_SHIFT_2 ; for ( start = newStart ; start < dataLength ; ) { /* * start : index of first entry of current block * newStart : index where the current block is to be moved * ( right after current end of already - compacted data ) */ if ( start == UNEWTRIE2_DATA_0800_OFFSET ) { blockLength = UTRIE2_DATA_BLOCK_LENGTH ; blockCount = 1 ; } /* skip blocks that are not used */ if ( map [ start >> UTRIE2_SHIFT_2 ] <= 0 ) { /* advance start to the next block */ start += blockLength ; /* leave newStart with the previous block ! */ continue ; } /* search for an identical block */ movedStart = findSameDataBlock ( newStart , start , blockLength ) ; if ( movedStart >= 0 ) { /* found an identical block , set the other block ' s index value for the current block */ for ( i = blockCount , mapIndex = start >> UTRIE2_SHIFT_2 ; i > 0 ; -- i ) { map [ mapIndex ++ ] = movedStart ; movedStart += UTRIE2_DATA_BLOCK_LENGTH ; } /* advance start to the next block */ start += blockLength ; /* leave newStart with the previous block ! */ continue ; } /* see if the beginning of this block can be overlapped with the end of the previous block */ /* look for maximum overlap ( modulo granularity ) with the previous , adjacent block */ for ( overlap = blockLength - UTRIE2_DATA_GRANULARITY ; overlap > 0 && ! equal_int ( data , ( newStart - overlap ) , start , overlap ) ; overlap -= UTRIE2_DATA_GRANULARITY ) { } if ( overlap > 0 || newStart < start ) { /* some overlap , or just move the whole block */ movedStart = newStart - overlap ; for ( i = blockCount , mapIndex = start >> UTRIE2_SHIFT_2 ; i > 0 ; -- i ) { map [ mapIndex ++ ] = movedStart ; movedStart += UTRIE2_DATA_BLOCK_LENGTH ; } /* move the non - overlapping indexes to their new positions */ start += overlap ; for ( i = blockLength - overlap ; i > 0 ; -- i ) { data [ newStart ++ ] = data [ start ++ ] ; } } else /* no overlap & & newStart = = start */ { for ( i = blockCount , mapIndex = start >> UTRIE2_SHIFT_2 ; i > 0 ; -- i ) { map [ mapIndex ++ ] = start ; start += UTRIE2_DATA_BLOCK_LENGTH ; } newStart = start ; } } /* now adjust the index - 2 table */ for ( i = 0 ; i < index2Length ; ++ i ) { if ( i == UNEWTRIE2_INDEX_GAP_OFFSET ) { /* Gap indexes are invalid ( - 1 ) . Skip over the gap . */ i += UNEWTRIE2_INDEX_GAP_LENGTH ; } index2 [ i ] = map [ index2 [ i ] >> UTRIE2_SHIFT_2 ] ; } dataNullOffset = map [ dataNullOffset >> UTRIE2_SHIFT_2 ] ; /* ensure dataLength alignment */ while ( ( newStart & ( UTRIE2_DATA_GRANULARITY - 1 ) ) != 0 ) { data [ newStart ++ ] = initialValue ; } if ( UTRIE2_DEBUG ) { /* we saved some space */ System . out . printf ( "compacting UTrie2: count of 32-bit data words %d->%d%n" , dataLength , newStart ) ; } dataLength = newStart ;
public class EventProcessor { /** * Executes all the actions configured on all the event handlers triggered by the { @ link Message } on the queue * If any of the actions on an event handler fails due to a transient failure , the execution is not persisted such that it can be retried * @ return a list of { @ link EventExecution } that failed due to transient failures . */ private List < EventExecution > executeEvent ( String event , Message msg ) throws Exception { } }
List < EventHandler > eventHandlerList = metadataService . getEventHandlersForEvent ( event , true ) ; Object payloadObject = getPayloadObject ( msg . getPayload ( ) ) ; List < EventExecution > transientFailures = new ArrayList < > ( ) ; for ( EventHandler eventHandler : eventHandlerList ) { String condition = eventHandler . getCondition ( ) ; if ( StringUtils . isNotEmpty ( condition ) ) { logger . debug ( "Checking condition: {} for event: {}" , condition , event ) ; Boolean success = ScriptEvaluator . evalBool ( condition , jsonUtils . expand ( payloadObject ) ) ; if ( ! success ) { String id = msg . getId ( ) + "_" + 0 ; EventExecution eventExecution = new EventExecution ( id , msg . getId ( ) ) ; eventExecution . setCreated ( System . currentTimeMillis ( ) ) ; eventExecution . setEvent ( eventHandler . getEvent ( ) ) ; eventExecution . setName ( eventHandler . getName ( ) ) ; eventExecution . setStatus ( Status . SKIPPED ) ; eventExecution . getOutput ( ) . put ( "msg" , msg . getPayload ( ) ) ; eventExecution . getOutput ( ) . put ( "condition" , condition ) ; executionService . addEventExecution ( eventExecution ) ; logger . debug ( "Condition: {} not successful for event: {} with payload: {}" , condition , eventHandler . getEvent ( ) , msg . getPayload ( ) ) ; continue ; } } CompletableFuture < List < EventExecution > > future = executeActionsForEventHandler ( eventHandler , msg ) ; future . whenComplete ( ( result , error ) -> result . forEach ( eventExecution -> { if ( error != null || eventExecution . getStatus ( ) == Status . IN_PROGRESS ) { executionService . removeEventExecution ( eventExecution ) ; transientFailures . add ( eventExecution ) ; } else { executionService . updateEventExecution ( eventExecution ) ; } } ) ) . get ( ) ; } return transientFailures ;
public class BrokerAccessor { /** * For firing simple queries ( i . e non join queries ) . * @ param jsonQuery * @ param reqHeaders * @ param requiresMapping * @ return */ public Either < String , Either < Mapper4All , JSONArray > > fireQuery ( String jsonQuery , Map < String , String > reqHeaders , boolean requiresMapping ) { } }
CloseableHttpResponse resp = null ; String respStr ; String url = format ( brokerUrl , brokerHost , brokerPort ) ; try { resp = postJson ( url , jsonQuery , reqHeaders ) ; respStr = IOUtils . toString ( resp . getEntity ( ) . getContent ( ) ) ; } catch ( IOException ex ) { return new Left < > ( format ( "Http %s, faced exception %s\n" , resp , ex ) ) ; } finally { returnClient ( resp ) ; } JSONArray possibleResArray = null ; try { possibleResArray = new JSONArray ( respStr ) ; } catch ( JSONException je ) { return new Left < > ( format ( "Recieved data %s not in json format. \n" , respStr ) ) ; } if ( requiresMapping ) { return new Right < String , Either < Mapper4All , JSONArray > > ( new Left < Mapper4All , JSONArray > ( new Mapper4All ( possibleResArray ) ) ) ; } return new Right < String , Either < Mapper4All , JSONArray > > ( new Right < Mapper4All , JSONArray > ( possibleResArray ) ) ;
public class Classfile { /** * Check if scanning needs to be extended upwards to an external superclass , interface or annotation . */ private void extendScanningUpwards ( ) { } }
// Check superclass if ( superclassName != null ) { scheduleScanningIfExternalClass ( superclassName , "superclass" ) ; } // Check implemented interfaces if ( implementedInterfaces != null ) { for ( final String interfaceName : implementedInterfaces ) { scheduleScanningIfExternalClass ( interfaceName , "interface" ) ; } } // Check class annotations if ( classAnnotations != null ) { for ( final AnnotationInfo annotationInfo : classAnnotations ) { scheduleScanningIfExternalClass ( annotationInfo . getName ( ) , "class annotation" ) ; } } // Check method annotations and method parameter annotations if ( methodInfoList != null ) { for ( final MethodInfo methodInfo : methodInfoList ) { if ( methodInfo . annotationInfo != null ) { for ( final AnnotationInfo methodAnnotationInfo : methodInfo . annotationInfo ) { scheduleScanningIfExternalClass ( methodAnnotationInfo . getName ( ) , "method annotation" ) ; } if ( methodInfo . parameterAnnotationInfo != null && methodInfo . parameterAnnotationInfo . length > 0 ) { for ( final AnnotationInfo [ ] paramAnns : methodInfo . parameterAnnotationInfo ) { if ( paramAnns != null && paramAnns . length > 0 ) { for ( final AnnotationInfo paramAnn : paramAnns ) { scheduleScanningIfExternalClass ( paramAnn . getName ( ) , "method parameter annotation" ) ; } } } } } } } // Check field annotations if ( fieldInfoList != null ) { for ( final FieldInfo fieldInfo : fieldInfoList ) { if ( fieldInfo . annotationInfo != null ) { for ( final AnnotationInfo fieldAnnotationInfo : fieldInfo . annotationInfo ) { scheduleScanningIfExternalClass ( fieldAnnotationInfo . getName ( ) , "field annotation" ) ; } } } }
public class FadeTransitionCommand { /** * { @ inheritDoc } */ @ Override protected void perform ( final Wave wave ) { } }
// The old node is the one that exists into the parent container ( or null if none ) Node oldNode = waveBean ( wave ) . hideModel ( ) == null ? null : waveBean ( wave ) . hideModel ( ) . node ( ) ; if ( oldNode == null ) { final ObservableList < Node > parentContainer = waveBean ( wave ) . childrenPlaceHolder ( ) ; oldNode = parentContainer . size ( ) > 1 ? parentContainer . get ( waveBean ( wave ) . childrenPlaceHolder ( ) . size ( ) - 1 ) : null ; } // The new node is the one create by PrepareModelCommand final Node newNode = waveBean ( wave ) . showModel ( ) == null ? null : waveBean ( wave ) . showModel ( ) . node ( ) ; if ( oldNode != null || newNode != null ) { final ParallelTransition animation = new ParallelTransition ( ) ; if ( oldNode != null ) { final FadeTransition ft = new FadeTransition ( ) ; ft . setDuration ( Duration . millis ( 600 ) ) ; ft . setNode ( oldNode ) ; ft . setFromValue ( 1.0 ) ; ft . setToValue ( 0.0 ) ; animation . getChildren ( ) . add ( ft ) ; } if ( newNode != null ) { final FadeTransition ft = new FadeTransition ( ) ; ft . setDuration ( Duration . millis ( 600 ) ) ; ft . setNode ( newNode ) ; ft . setFromValue ( 0.0 ) ; ft . setToValue ( 1.0 ) ; animation . getChildren ( ) . add ( ft ) ; } final Node oldNodeLink = oldNode ; // When animation is finished remove the hidden node from the stack to let only one node at the same time animation . setOnFinished ( event -> { if ( oldNodeLink != null ) { // remove the old nod from the stack to hide it waveBean ( wave ) . childrenPlaceHolder ( ) . remove ( oldNodeLink ) ; LOGGER . info ( "Remove " + oldNodeLink . toString ( ) + " from stack container" ) ; } // FIXME do it in the right way waveBean ( wave ) . showModel ( ) . doShowView ( wave ) ; } ) ; animation . playFromStart ( ) ; }
public class WebSocketConnection { /** * Close with an associated error status . * @ param statusCode * @ param errResponse */ public void close ( int statusCode , HandshakeResponse errResponse ) { } }
log . warn ( "Closing connection with status: {}" , statusCode ) ; // remove handshake flag session . removeAttribute ( Constants . HANDSHAKE_COMPLETE ) ; // clear the delay queue queue . clear ( ) ; // send http error response session . write ( errResponse ) ; // whether to attempt a nice close or a forceful one if ( WebSocketTransport . isNiceClose ( ) ) { // now send close packet with error code IoBuffer buf = IoBuffer . allocate ( 16 ) ; buf . setAutoExpand ( true ) ; // all errors except 403 will use 1002 buf . putUnsigned ( ( short ) statusCode ) ; try { if ( statusCode == 1008 ) { // if its a 403 forbidden buf . put ( "Policy Violation" . getBytes ( "UTF8" ) ) ; } else { buf . put ( "Protocol error" . getBytes ( "UTF8" ) ) ; } } catch ( Exception e ) { // shouldnt be any text encoding issues . . . } buf . flip ( ) ; byte [ ] errBytes = new byte [ buf . remaining ( ) ] ; buf . get ( errBytes ) ; // construct the packet Packet packet = Packet . build ( errBytes , MessageType . CLOSE ) ; WriteFuture writeFuture = session . write ( packet ) ; writeFuture . addListener ( new IoFutureListener < WriteFuture > ( ) { @ Override public void operationComplete ( WriteFuture future ) { if ( future . isWritten ( ) ) { log . debug ( "Close message written" ) ; // only set on success for now to skip boolean check later session . setAttribute ( Constants . STATUS_CLOSE_WRITTEN , Boolean . TRUE ) ; } future . removeListener ( this ) ; } } ) ; // adjust close routine to allow for flushing CloseFuture closeFuture = session . closeOnFlush ( ) ; closeFuture . addListener ( new IoFutureListener < CloseFuture > ( ) { public void operationComplete ( CloseFuture future ) { if ( future . isClosed ( ) ) { log . debug ( "Connection is closed" ) ; } else { log . debug ( "Connection is not yet closed" ) ; } future . removeListener ( this ) ; } } ) ; } else { // force close CloseFuture closeFuture = session . closeNow ( ) ; closeFuture . addListener ( new IoFutureListener < CloseFuture > ( ) { public void operationComplete ( CloseFuture future ) { if ( future . isClosed ( ) ) { log . debug ( "Connection is closed" ) ; } else { log . debug ( "Connection is not yet closed" ) ; } future . removeListener ( this ) ; } } ) ; } log . debug ( "Close complete" ) ;
public class LottieCompositionFactory { /** * Parse an animation from raw / res . This is recommended over putting your animation in assets because * it uses a hard reference to R . * The resource id will be used as a cache key so future usages won ' t parse the json again . */ @ WorkerThread public static LottieResult < LottieComposition > fromRawResSync ( Context context , @ RawRes int rawRes ) { } }
try { return fromJsonInputStreamSync ( context . getResources ( ) . openRawResource ( rawRes ) , rawResCacheKey ( rawRes ) ) ; } catch ( Resources . NotFoundException e ) { return new LottieResult < > ( e ) ; }
public class SSLConnectionLink { /** * This method is called to handle the results of an SSL handshake . This may be called * by a callback or in the same thread as the connect request . * @ param netBuffer buffer for data flowing in fron the net * @ param decryptedNetBuffer buffer for decrypted data from the net * @ param encryptedAppBuffer buffer for encrypted data flowing from the app * @ param hsStatus output from the last call to the SSL engine * @ param async whether this is for an async ( true ) or sync ( false ) request * @ throws IOException */ protected void readyOutboundPostHandshake ( WsByteBuffer netBuffer , WsByteBuffer decryptedNetBuffer , WsByteBuffer encryptedAppBuffer , HandshakeStatus hsStatus , boolean async ) throws IOException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "readyOutboundPostHandshake, vc=" + getVCHash ( ) ) ; } // Exception to call destroy with in case of bad return code from SSL engine . IOException exception = null ; if ( hsStatus != HandshakeStatus . FINISHED ) { // Handshake failed . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Unexpected results of handshake after connect, " + hsStatus ) ; } exception = new IOException ( "Unexpected results of handshake after connect, " + hsStatus ) ; } // PK16095 - take certain actions when the handshake completes getChannel ( ) . onHandshakeFinish ( getSSLEngine ( ) ) ; // Null out the buffer references on the device side so they don ' t wrongly reused later . getDeviceReadInterface ( ) . setBuffers ( null ) ; // Clean up the buffers . // PI48725 Start // Handshake complete . Now get the request . Use our read interface so unwrap already done . // Check if data exists in the network buffer still . This would be app data beyond handshake . if ( netBuffer . remaining ( ) == 0 || netBuffer . position ( ) == 0 ) { // No app data . Release the netBuffer as it will no longer be used . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Releasing netBuffer: " + netBuffer . hashCode ( ) ) ; } // Null out the buffer references on the device side so they don ' t wrongly reused later . netBuffer . release ( ) ; } else { // Found encrypted app data . Don ' t release the network buffer yet . Let the read decrypt it . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "App data exists in netBuffer after handshake: " + netBuffer . remaining ( ) ) ; } this . readInterface . setNetBuffer ( netBuffer ) ; } // PI48725 Finish // Clean up the buffers . decryptedNetBuffer . release ( ) ; encryptedAppBuffer . release ( ) ; // Call appropriate callback if async if ( async ) { if ( exception != null ) { close ( getVirtualConnection ( ) , exception ) ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Calling ready method." ) ; } super . ready ( getVirtualConnection ( ) ) ; } } else { if ( exception != null ) { throw exception ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "readyOutboundPostHandshake" ) ; }
public class RuleFilterEvaluator { /** * Resolves the backref arguments , e . g . replaces { @ code \ 1 } by the value of the first token in the pattern . */ public Map < String , String > getResolvedArguments ( String filterArgs , AnalyzedTokenReadings [ ] patternTokens , List < Integer > tokenPositions ) { } }
Map < String , String > result = new HashMap < > ( ) ; String [ ] arguments = filterArgs . split ( "\\s+" ) ; for ( String arg : arguments ) { int delimPos = arg . indexOf ( ':' ) ; if ( delimPos == - 1 ) { throw new RuntimeException ( "Invalid syntax for key/value, expected 'key:value', got: '" + arg + "'" ) ; } String key = arg . substring ( 0 , delimPos ) ; String val = arg . substring ( delimPos + 1 ) ; if ( val . startsWith ( "\\" ) ) { int refNumber = Integer . parseInt ( val . replace ( "\\" , "" ) ) ; if ( refNumber > tokenPositions . size ( ) ) { throw new RuntimeException ( "Your reference number " + refNumber + " is bigger than the number of tokens: " + tokenPositions . size ( ) ) ; } int correctedRef = getSkipCorrectedReference ( tokenPositions , refNumber ) ; if ( correctedRef >= patternTokens . length ) { throw new RuntimeException ( "Your reference number " + refNumber + " is bigger than number of matching tokens: " + patternTokens . length ) ; } if ( result . containsKey ( key ) ) { throw new RuntimeException ( "Duplicate key '" + key + "'" ) ; } result . put ( key , patternTokens [ correctedRef ] . getToken ( ) ) ; } else { result . put ( key , val ) ; } } return result ;
public class DescribeMaintenanceWindowExecutionsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeMaintenanceWindowExecutionsRequest describeMaintenanceWindowExecutionsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeMaintenanceWindowExecutionsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeMaintenanceWindowExecutionsRequest . getWindowId ( ) , WINDOWID_BINDING ) ; protocolMarshaller . marshall ( describeMaintenanceWindowExecutionsRequest . getFilters ( ) , FILTERS_BINDING ) ; protocolMarshaller . marshall ( describeMaintenanceWindowExecutionsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( describeMaintenanceWindowExecutionsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class OCSP { /** * Obtains the revocation status of a certificate using OCSP . * @ param cert the certificate to be checked * @ param issuerCert the issuer certificate * @ param responderURI the URI of the OCSP responder * @ param responderCert the OCSP responder ' s certificate * @ param date the time the validity of the OCSP responder ' s certificate * should be checked against . If null , the current time is used . * @ return the RevocationStatus * @ throws IOException if there is an exception connecting to or * communicating with the OCSP responder * @ throws CertPathValidatorException if an exception occurs while * encoding the OCSP Request or validating the OCSP Response */ public static RevocationStatus check ( X509Certificate cert , X509Certificate issuerCert , URI responderURI , X509Certificate responderCert , Date date ) throws IOException , CertPathValidatorException { } }
return check ( cert , issuerCert , responderURI , responderCert , date , Collections . < Extension > emptyList ( ) ) ;
public class JaxWsUtils { /** * Get the string value of attribute in WebService or WebserviceProvider annotation . * If it is provider , get the attribute from its annotation . * If it is not provider , * First , try to get the attribute from serviceImplBean , * then , try to get the attribute from the SEI either from seiClassName or " endpointInterface " attribute , * finally , return the defaultForService if is the WebService annotation , or defaultForServiceProvider for WebServiceProvider annotation . * If could not find neither the WebService or WebServiceProvider annotation , just return an empty string . * @ param classInfo * @ param seiClassName * @ param infoStore * @ param attribute * @ param defaultForService * @ param defaultForServiceProvider * @ return */ private static String getStringAttributeFromAnnotation ( ClassInfo classInfo , String seiClassName , InfoStore infoStore , String attribute , String defaultForService , String defaultForServiceProvider ) { } }
AnnotationInfo annotationInfo = getAnnotationInfoFromClass ( classInfo , attribute ) ; if ( annotationInfo == null ) { return "" ; } boolean isProvider = isProvider ( classInfo ) ; // if the serviceImplBean is a WebServiceProvider , return the attribute value or the defaultValue for ServiceProvider if ( isProvider ) { return getStringAttributeFromWebServiceProviderAnnotation ( annotationInfo , attribute , defaultForServiceProvider ) ; } // if is as WebService , need to get the attribute from itself , the SEI or the interfaces , then the default value for Service String attrFromImplBean = annotationInfo . getValue ( attribute ) . getStringValue ( ) . trim ( ) ; if ( attrFromImplBean . isEmpty ( ) ) { // can not get the SEI class name just return the default value if ( seiClassName . isEmpty ( ) ) { return defaultForService ; } else { // if can get the SEI className , go here . ClassInfo seiClassInfo = infoStore . getDelayableClassInfo ( seiClassName ) ; annotationInfo = seiClassInfo . getAnnotation ( JaxWsConstants . WEB_SERVICE_ANNOTATION_NAME ) ; if ( null == annotationInfo ) { // if the SEI does not have the @ WebService annotation , we should report it as error ? ( RI 2.2 will do ) if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "No @WebService or @WebServiceProvider annotation is found on the class " + seiClassInfo + " will return " + defaultForService ) ; } return defaultForService ; } // if the attribute is presented in SEI ' s @ WebService , just return it . or , return the default value for Service String attrFromSEI = annotationInfo . getValue ( attribute ) . getStringValue ( ) . trim ( ) ; return StringUtils . isEmpty ( attrFromSEI ) ? defaultForService : attrFromSEI ; } } return attrFromImplBean ;
public class RunCommandTargetMarshaller { /** * Marshall the given parameter object . */ public void marshall ( RunCommandTarget runCommandTarget , ProtocolMarshaller protocolMarshaller ) { } }
if ( runCommandTarget == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( runCommandTarget . getKey ( ) , KEY_BINDING ) ; protocolMarshaller . marshall ( runCommandTarget . getValues ( ) , VALUES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class JOGLTypeConversions { /** * Convert depth functions from GL constants . * @ param d The GL constant . * @ return The value . */ public static JCGLDepthFunction depthFunctionFromGL ( final int d ) { } }
switch ( d ) { case GL . GL_ALWAYS : return JCGLDepthFunction . DEPTH_ALWAYS ; case GL . GL_EQUAL : return JCGLDepthFunction . DEPTH_EQUAL ; case GL . GL_GREATER : return JCGLDepthFunction . DEPTH_GREATER_THAN ; case GL . GL_GEQUAL : return JCGLDepthFunction . DEPTH_GREATER_THAN_OR_EQUAL ; case GL . GL_LESS : return JCGLDepthFunction . DEPTH_LESS_THAN ; case GL . GL_LEQUAL : return JCGLDepthFunction . DEPTH_LESS_THAN_OR_EQUAL ; case GL . GL_NEVER : return JCGLDepthFunction . DEPTH_NEVER ; case GL . GL_NOTEQUAL : return JCGLDepthFunction . DEPTH_NOT_EQUAL ; default : throw new UnreachableCodeException ( ) ; }
public class EnvironmentPrefixHelper { /** * Extract keys for looking up a { @ link TextEncryptor } from the input text in the form * of a prefix of zero or many < code > { name : value } < / code > pairs . The name and profiles * properties are always added to the keys ( replacing any provided in the inputs ) . * @ param name application name * @ param profiles list of profiles * @ param text text to cipher * @ return encryptor keys */ public Map < String , String > getEncryptorKeys ( String name , String profiles , String text ) { } }
Map < String , String > keys = new LinkedHashMap < String , String > ( ) ; text = removeEnvironmentPrefix ( text ) ; keys . put ( NAME , name ) ; keys . put ( PROFILES , profiles ) ; if ( text . contains ( ESCAPE ) ) { text = text . substring ( 0 , text . indexOf ( ESCAPE ) ) ; } String [ ] tokens = StringUtils . split ( text , "}" ) ; while ( tokens != null ) { String token = tokens [ 0 ] . trim ( ) ; if ( token . startsWith ( "{" ) ) { String key = "" ; String value = "" ; if ( token . contains ( ":" ) && ! token . endsWith ( ":" ) ) { key = token . substring ( 1 , token . indexOf ( ":" ) ) ; value = token . substring ( token . indexOf ( ":" ) + 1 ) ; } else { key = token . substring ( 1 ) ; } keys . put ( key , value ) ; } text = tokens [ 1 ] ; tokens = StringUtils . split ( text , "}" ) ; } return keys ;
public class TaskMonitor { /** * Make a new Monitor * @ param start the task start moment * @ param duration the task duration * @ param end the task end * @ return the resulting task . */ public static TaskMonitor build ( IntVar start , IntVar duration , IntVar end ) { } }
return new TaskMonitor ( start , duration , end ) ;
public class RestUtils { /** * A generic JSON response handler . * @ param status status code * @ param messages zero or more errors * @ return a response as JSON */ public static Response getStatusResponse ( Response . Status status , String ... messages ) { } }
if ( status == null ) { return Response . status ( Response . Status . BAD_REQUEST ) . build ( ) ; } String msg = StringUtils . join ( messages , ". " ) ; if ( StringUtils . isBlank ( msg ) ) { msg = status . getReasonPhrase ( ) ; } try { return GenericExceptionMapper . getExceptionResponse ( status . getStatusCode ( ) , msg ) ; } catch ( Exception ex ) { logger . error ( null , ex ) ; return Response . status ( Response . Status . BAD_REQUEST ) . build ( ) ; }
public class VariantVcfFactory { /** * Creates a list of Variant objects using the fields in a record of a VCF * file . A new Variant object is created per allele , so several of them can * be created from a single line . * Start / end coordinates assignment tries to work as similarly as possible * as Ensembl does , except for insertions , where start is greater than end : * http : / / www . ensembl . org / info / docs / tools / vep / vep _ formats . html # vcf * @ param metadata Origin of the variants information * @ param line Contents of the line in the file * @ return The list of Variant objects that can be created using the fields * from a VCF record */ @ Override public List < Variant > create ( VariantStudyMetadata metadata , String line ) throws IllegalArgumentException , NotAVariantException { } }
String [ ] fields = line . split ( "\t" ) ; if ( fields . length < 8 ) { throw new IllegalArgumentException ( "Not enough fields provided (min 8)" ) ; } // if ( fields [ 4 ] . equals ( " . " ) ) { // throw new NotAVariantException ( " Alternative allele is a ' . ' . This is not an actual variant but a reference position . " ) ; String chromosome = fields [ 0 ] ; int position = Integer . parseInt ( fields [ 1 ] ) ; String id = fields [ 2 ] . equals ( "." ) ? null : fields [ 2 ] ; List < String > ids = id == null ? Collections . emptyList ( ) : Arrays . asList ( id . split ( ";" ) ) ; String reference = fields [ 3 ] . equals ( "." ) ? "" : fields [ 3 ] ; String alternate = fields [ 4 ] ; // String alternate = fields [ 4 ] . equals ( " . " ) ? " " : fields [ 4 ] ; String [ ] alternateAlleles = alternate . split ( "," ) ; String mainAlternate = alternateAlleles [ 0 ] ; float quality = fields [ 5 ] . equals ( "." ) ? - 1 : Float . parseFloat ( fields [ 5 ] ) ; String filter = fields [ 6 ] . equals ( "." ) ? "" : fields [ 6 ] ; String info = fields [ 7 ] . equals ( "." ) ? "" : fields [ 7 ] ; String format = ( fields . length <= 8 || fields [ 8 ] . equals ( "." ) ) ? "" : fields [ 8 ] ; int end = position + reference . length ( ) - 1 ; Variant variant = new Variant ( chromosome , position , end , reference , mainAlternate ) ; List < AlternateCoordinate > secondaryAlternatesMap = Arrays . stream ( alternateAlleles , 1 , alternateAlleles . length ) . map ( a -> new AlternateCoordinate ( chromosome , null , null , null , a , null ) ) . collect ( Collectors . toList ( ) ) ; StudyEntry entry = new StudyEntry ( metadata . getId ( ) , secondaryAlternatesMap , Arrays . asList ( format . split ( ":" ) ) ) ; VariantFileMetadata fileMetadata = new VariantFileMetadata ( metadata . getFiles ( ) . get ( 0 ) ) ; entry . setFileId ( fileMetadata . getId ( ) ) ; variant . addStudyEntry ( entry ) ; try { parseSplitSampleData ( entry , fileMetadata , fields , reference , alternateAlleles ) ; // Fill the rest of fields ( after samples because INFO depends on them ) setOtherFields ( variant , entry , fileMetadata , ids , quality , filter , info , format , alternateAlleles , line ) ; } catch ( NonStandardCompliantSampleField ex ) { Logger . getLogger ( VariantFactory . class . getName ( ) ) . log ( Level . SEVERE , String . format ( "Variant %s:%d:%s>%s will not be saved\n%s" , chromosome , position , reference , alternate , ex . getMessage ( ) ) ) ; } return Collections . singletonList ( variant ) ;
public class JcaServiceUtilities { /** * Set context classloader to the one for the resource adapter * @ param raClassLoader * @ return the current classloader */ public ClassLoader beginContextClassLoader ( ClassLoader raClassLoader ) { } }
return raClassLoader == null ? null : AccessController . doPrivileged ( new GetAndSetContextClassLoader ( raClassLoader ) ) ;
public class HTODDynacache { /** * dump _ object _ statistics ( ) * Dump filemanager and HTOD statistics to stdout . We could capture these * for forward to monitors if needed . */ public void dump_object_statistics ( ) { } }
final String methodName = "dump_object_statistics()" ; try { OutputStreamWriter out = new OutputStreamWriter ( System . out ) ; out . write ( "========================================================================" ) ; out . write ( "------------------ Object File Manager Statistics ---------------\n" ) ; object_filemgr . dump_stats ( out , true ) ; out . write ( "------------------ Object HTOD Statistics ---------------\n" ) ; object_cache . dump_htod_stats ( out , true ) ; out . write ( "========================================================================" ) ; if ( ! this . disableDependencyId ) { out . write ( "========================================================================" ) ; out . write ( "------------------ Dependency File Manager Statistics ---------------\n" ) ; dependency_filemgr . dump_stats ( out , true ) ; out . write ( "------------------ Dependency HTOD Statistics ---------------\n" ) ; dependency_cache . dump_htod_stats ( out , true ) ; out . write ( "========================================================================" ) ; } if ( ! this . disableTemplatesSupport ) { out . write ( "========================================================================" ) ; out . write ( "------------------ Template File Manager Statistics ---------------\n" ) ; template_filemgr . dump_stats ( out , true ) ; out . write ( "------------------ Template HTOD Statistics ---------------\n" ) ; template_cache . dump_htod_stats ( out , true ) ; out . write ( "========================================================================" ) ; } out . flush ( ) ; } catch ( Throwable t ) { com . ibm . ws . ffdc . FFDCFilter . processException ( t , "com.ibm.ws.cache.HTODDynacache.dump_object_statistics" , "376" , this ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , methodName , "cacheName=" + this . cacheName + "\nException: " + ExceptionUtility . getStackTrace ( t ) ) ; }
public class Types { /** * Cast Java reflective type to language class . If < code > type < / code > is instance of { @ link Class } just return it . If * is parameterized type returns the raw class . * @ param t Java reflective type . * @ return the class described by given < code > type < / code > . */ private static Class < ? > typeToClass ( Type t ) { } }
if ( t instanceof Class < ? > ) { return ( Class < ? > ) t ; } if ( t instanceof ParameterizedType ) { return ( Class < ? > ) ( ( ParameterizedType ) t ) . getRawType ( ) ; } throw new BugError ( "Unknown type %s to convert to class." , t ) ;
public class dnsnameserver { /** * Use this API to delete dnsnameserver . */ public static base_response delete ( nitro_service client , dnsnameserver resource ) throws Exception { } }
dnsnameserver deleteresource = new dnsnameserver ( ) ; deleteresource . ip = resource . ip ; deleteresource . dnsvservername = resource . dnsvservername ; return deleteresource . delete_resource ( client ) ;
public class ObjectMapper { /** * Maps a ResultSet column ( from the current ResultSet row ) to a named * property of an object , using reflection . * @ param rs The JDBC ResultSet * @ param index The column index to get the value from * @ param property The name of the property to set the value of * @ param obj The object to set the property to */ void mapColumnProperty ( ResultSet pRSet , int pIndex , String pProperty , Object pObj ) { } }
if ( pRSet == null || pProperty == null || pObj == null ) throw new IllegalArgumentException ( "ResultSet, Property or Object" + " arguments cannot be null!" ) ; if ( pIndex <= 0 ) throw new IllegalArgumentException ( "Index parameter must be > 0!" ) ; String methodName = "set" + StringUtil . capitalize ( pProperty ) ; Method setMethod = ( Method ) mMethods . get ( methodName ) ; if ( setMethod == null ) { // No setMethod for this property mLog . logError ( "No set method for property \"" + pProperty + "\" in " + pObj . getClass ( ) + "!" ) ; return ; } // System . err . println ( " DEBUG : setMethod = " + setMethod ) ; Method getMethod = null ; String type = "" ; try { Class [ ] cl = { Integer . TYPE } ; type = setMethod . getParameterTypes ( ) [ 0 ] . getName ( ) ; type = type . substring ( type . lastIndexOf ( "." ) + 1 ) ; // There is no getInteger , use getInt instead if ( type . equals ( "Integer" ) ) { type = "int" ; } // System . err . println ( " DEBUG : type = " + type ) ; getMethod = pRSet . getClass ( ) . getMethod ( "get" + StringUtil . capitalize ( type ) , cl ) ; } catch ( Exception e ) { mLog . logError ( "Can't find method \"get" + StringUtil . capitalize ( type ) + "(int)\" " + "(for class " + StringUtil . capitalize ( type ) + ") in ResultSet" , e ) ; return ; } try { // Get the data from the DB // System . err . println ( " DEBUG : " + getMethod . getName ( ) + " ( " + ( i + 1 ) + " ) " ) ; Object [ ] colIdx = { new Integer ( pIndex ) } ; Object [ ] arg = { getMethod . invoke ( pRSet , colIdx ) } ; // Set it to the object // System . err . println ( " DEBUG : " + setMethod . getName ( ) + " ( " + arg [ 0 ] + " ) " ) ; setMethod . invoke ( pObj , arg ) ; } catch ( InvocationTargetException ite ) { mLog . logError ( ite ) ; // ite . printStackTrace ( ) ; } catch ( IllegalAccessException iae ) { mLog . logError ( iae ) ; // iae . printStackTrace ( ) ; }
public class FastjsonDecoder { /** * 判断JSON字符串是否是一个合法的JSON字符串 * @ param jsonStr * JSON字符串 * @ return true - 合法的json字符串 , false - 不是一个json字符串 * @ since 2.0.0 */ public static boolean isValidJson ( String jsonStr ) { } }
if ( jsonStr == null || jsonStr . trim ( ) . equals ( "" ) ) { return false ; } try { Object object = JSON . parse ( jsonStr ) ; if ( object instanceof JSONObject ) { return true ; } else if ( object instanceof JSONArray ) { return true ; } } catch ( Throwable e ) { return false ; } return false ;
public class ConfigObject { /** * Returns the editor panel for the specified field . */ public JPanel getEditor ( PresentsContext ctx , Field field ) { } }
if ( field . getType ( ) . equals ( Boolean . TYPE ) ) { return new BooleanFieldEditor ( ctx , field , this ) ; } else { return new AsStringFieldEditor ( ctx , field , this ) ; }
public class SigningDigest { /** * Performs MAC signing of the SMB . This is done as follows . * The signature field of the SMB is overwritted with the sequence number ; * The MD5 digest of the MAC signing key + the entire SMB is taken ; * The first 8 bytes of this are placed in the signature field . * @ param data The data . * @ param offset The starting offset at which the SMB header begins . * @ param length The length of the SMB data starting at offset . */ void sign ( byte [ ] data , int offset , int length , ServerMessageBlock request , ServerMessageBlock response ) { } }
request . signSeq = signSequence ; if ( response != null ) { response . signSeq = signSequence + 1 ; response . verifyFailed = false ; } try { update ( macSigningKey , 0 , macSigningKey . length ) ; int index = offset + ServerMessageBlock . SIGNATURE_OFFSET ; for ( int i = 0 ; i < 8 ; i ++ ) data [ index + i ] = 0 ; ServerMessageBlock . writeInt4 ( signSequence , data , index ) ; update ( data , offset , length ) ; System . arraycopy ( digest ( ) , 0 , data , index , 8 ) ; if ( bypass ) { bypass = false ; System . arraycopy ( "BSRSPYL " . getBytes ( ) , 0 , data , index , 8 ) ; } } catch ( Exception ex ) { if ( log . level > 0 ) ex . printStackTrace ( log ) ; } finally { signSequence += 2 ; }
public class ApiOvhDedicatedCloud { /** * Get this object properties * REST : GET / dedicatedCloud / { serviceName } / datacenter / { datacenterId } / vm / { vmId } / backupJob / restorePoints / { restorePointId } * @ param serviceName [ required ] Domain of the service * @ param datacenterId [ required ] * @ param vmId [ required ] Id of the virtual machine . * @ param restorePointId [ required ] Id of the restore point . * @ deprecated */ public OvhRestorePoint serviceName_datacenter_datacenterId_vm_vmId_backupJob_restorePoints_restorePointId_GET ( String serviceName , Long datacenterId , Long vmId , Long restorePointId ) throws IOException { } }
String qPath = "/dedicatedCloud/{serviceName}/datacenter/{datacenterId}/vm/{vmId}/backupJob/restorePoints/{restorePointId}" ; StringBuilder sb = path ( qPath , serviceName , datacenterId , vmId , restorePointId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhRestorePoint . class ) ;
public class ExtendedLikeFilterImpl { /** * Given OGC PropertyIsLike Filter information , construct an SQL - compatible ' like ' pattern . * SQL % - - > match any number of characters _ - - > match a single character * NOTE ; the SQL command is ' string LIKE pattern [ ESCAPE escape - character ] ' We could re - define the escape character , * but I ' m not doing to do that in this code since some databases will not handle this case . * Method : 1. * Examples : ( escape = ' ! ' , multi = ' * ' , single = ' . ' ) broadway * - > ' broadway % ' broad _ ay - > ' broad _ ay ' broadway - > * ' broadway ' * broadway ! * - > ' broadway * ' ( * has no significance and is escaped ) can ' t - > ' can ' ' t ' ( ' escaped for SQL * compliance ) * NOTE : we also handle " ' " characters as special because they are end - of - string characters . SQL will convert ' to * ' ' ( double single quote ) . * NOTE : we don ' t handle " ' " as a ' special ' character because it would be too confusing to have a special char as * another special char . Using this will throw an error ( IllegalArgumentException ) . * @ param escape escape character * @ param multi ? ? ? ? ? * @ param single ? ? ? ? ? * @ param pattern pattern to match * @ return SQL like sub - expression * @ throws IllegalArgumentException oops */ public static String convertToSQL92 ( char escape , char multi , char single , String pattern ) throws IllegalArgumentException { } }
if ( ( escape == '\'' ) || ( multi == '\'' ) || ( single == '\'' ) ) { throw new IllegalArgumentException ( "do not use single quote (') as special char!" ) ; } StringBuilder result = new StringBuilder ( pattern . length ( ) + 5 ) ; int i = 0 ; while ( i < pattern . length ( ) ) { char chr = pattern . charAt ( i ) ; if ( chr == escape ) { // emit the next char and skip it if ( i != ( pattern . length ( ) - 1 ) ) { result . append ( pattern . charAt ( i + 1 ) ) ; } i ++ ; // skip next char } else if ( chr == single ) { result . append ( '_' ) ; } else if ( chr == multi ) { result . append ( '%' ) ; } else if ( chr == '\'' ) { result . append ( '\'' ) ; result . append ( '\'' ) ; } else { result . append ( chr ) ; } i ++ ; } return result . toString ( ) ;
public class MerlinReader { /** * By the time we reach this method , we should be looking at the SQLite * database file itself . * @ param file SQLite database file * @ return ProjectFile instance */ private ProjectFile readFile ( File file ) throws MPXJException { } }
try { String url = "jdbc:sqlite:" + file . getAbsolutePath ( ) ; Properties props = new Properties ( ) ; m_connection = org . sqlite . JDBC . createConnection ( url , props ) ; m_documentBuilder = DocumentBuilderFactory . newInstance ( ) . newDocumentBuilder ( ) ; XPathFactory xPathfactory = XPathFactory . newInstance ( ) ; XPath xpath = xPathfactory . newXPath ( ) ; m_dayTimeIntervals = xpath . compile ( "/array/dayTimeInterval" ) ; m_entityMap = new HashMap < String , Integer > ( ) ; return read ( ) ; } catch ( Exception ex ) { throw new MPXJException ( MPXJException . INVALID_FORMAT , ex ) ; } finally { if ( m_connection != null ) { try { m_connection . close ( ) ; } catch ( SQLException ex ) { // silently ignore exceptions when closing connection } } m_documentBuilder = null ; m_dayTimeIntervals = null ; m_entityMap = null ; }
public class HttpResponse { /** * Sets a header field value based on its name . * @ param name The header name to set * @ param value The header value to set * @ return the previous value or null if the field was previously unset . */ public final String setHeader ( String name , String value ) { } }
sortedHeaders = null ; return headers . put ( name , value ) ;
public class AbstractMultiMatcherElement { /** * Creates and return all the children matchers . * @ return matcher array */ protected Matcher < ? > [ ] getMatchers ( ) { } }
Matcher < ? > [ ] matchers = new Matcher < ? > [ tasks . size ( ) ] ; for ( int i = 0 ; i < matchers . length ; i ++ ) { matchers [ i ] = tasks . get ( i ) . createMatcher ( ) ; } return matchers ;
public class GramJob { /** * Convert the status of a GramJob from an integer to a string . This * method is not typically called by users . * @ return string representing the status of the GramJob passed as an * argument . */ public static String getStatusAsString ( int status ) { } }
if ( status == STATUS_PENDING ) { return "PENDING" ; } else if ( status == STATUS_ACTIVE ) { return "ACTIVE" ; } else if ( status == STATUS_DONE ) { return "DONE" ; } else if ( status == STATUS_FAILED ) { return "FAILED" ; } else if ( status == STATUS_SUSPENDED ) { return "SUSPENDED" ; } else if ( status == STATUS_UNSUBMITTED ) { return "UNSUBMITTED" ; } else if ( status == STATUS_STAGE_IN ) { return "STAGE_IN" ; } else if ( status == STATUS_STAGE_OUT ) { return "STAGE_OUT" ; } return "Unknown" ;
public class FaceDetailMarshaller { /** * Marshall the given parameter object . */ public void marshall ( FaceDetail faceDetail , ProtocolMarshaller protocolMarshaller ) { } }
if ( faceDetail == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( faceDetail . getBoundingBox ( ) , BOUNDINGBOX_BINDING ) ; protocolMarshaller . marshall ( faceDetail . getAgeRange ( ) , AGERANGE_BINDING ) ; protocolMarshaller . marshall ( faceDetail . getSmile ( ) , SMILE_BINDING ) ; protocolMarshaller . marshall ( faceDetail . getEyeglasses ( ) , EYEGLASSES_BINDING ) ; protocolMarshaller . marshall ( faceDetail . getSunglasses ( ) , SUNGLASSES_BINDING ) ; protocolMarshaller . marshall ( faceDetail . getGender ( ) , GENDER_BINDING ) ; protocolMarshaller . marshall ( faceDetail . getBeard ( ) , BEARD_BINDING ) ; protocolMarshaller . marshall ( faceDetail . getMustache ( ) , MUSTACHE_BINDING ) ; protocolMarshaller . marshall ( faceDetail . getEyesOpen ( ) , EYESOPEN_BINDING ) ; protocolMarshaller . marshall ( faceDetail . getMouthOpen ( ) , MOUTHOPEN_BINDING ) ; protocolMarshaller . marshall ( faceDetail . getEmotions ( ) , EMOTIONS_BINDING ) ; protocolMarshaller . marshall ( faceDetail . getLandmarks ( ) , LANDMARKS_BINDING ) ; protocolMarshaller . marshall ( faceDetail . getPose ( ) , POSE_BINDING ) ; protocolMarshaller . marshall ( faceDetail . getQuality ( ) , QUALITY_BINDING ) ; protocolMarshaller . marshall ( faceDetail . getConfidence ( ) , CONFIDENCE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Jar { /** * Sets an attribute in the main section of the manifest to a list . * The list elements will be joined with a single whitespace character . * @ param name the attribute ' s name * @ param values the attribute ' s value * @ return { @ code this } * @ throws IllegalStateException if entries have been added or the JAR has been written prior to calling this methods . */ public Jar setListAttribute ( String name , Collection < ? > values ) { } }
return setAttribute ( name , join ( values ) ) ;
public class DataFlowAnalysis { /** * Performs a single flow through a node . * @ return { @ code true } if the flow state differs from the previous state . */ protected boolean flow ( DiGraphNode < N , Branch > node ) { } }
FlowState < L > state = node . getAnnotation ( ) ; if ( isForward ( ) ) { L outBefore = state . out ; state . out = flowThrough ( node . getValue ( ) , state . in ) ; return ! outBefore . equals ( state . out ) ; } else { L inBefore = state . in ; state . in = flowThrough ( node . getValue ( ) , state . out ) ; return ! inBefore . equals ( state . in ) ; }
public class JobQueuesManager { /** * because of the change in the job priority or job start - time . */ private void reorderJobs ( JobInProgress job , JobSchedulingInfo oldInfo , QueueInfo qi ) { } }
if ( qi . removeWaitingJob ( oldInfo ) != null ) { qi . addWaitingJob ( job ) ; } if ( qi . removeRunningJob ( oldInfo ) != null ) { qi . addRunningJob ( job ) ; }
public class ArrayUtils { /** * < p > Copies the given array and adds the given element at the end of the new array . < / p > * < p > The new array contains the same elements of the input * array plus the given element in the last position . The component type of * the new array is the same as that of the input array . < / p > * < p > If the input array is < code > null < / code > , a new one element array is returned * whose component type is the same as the element . < / p > * < pre > * ArrayUtils . add ( null , 0 ) = [ 0] * ArrayUtils . add ( [ 1 ] , 0 ) = [ 1 , 0] * ArrayUtils . add ( [ 1 , 0 ] , 1 ) = [ 1 , 0 , 1] * < / pre > * @ param array the array to copy and add the element to , may be < code > null < / code > * @ param element the object to add at the last index of the new array * @ return A new array containing the existing elements plus the new element * @ since 2.1 */ public static long [ ] add ( long [ ] array , long element ) { } }
long [ ] newArray = ( long [ ] ) copyArrayGrow1 ( array , Long . TYPE ) ; newArray [ newArray . length - 1 ] = element ; return newArray ;
public class ApiClient { /** * Helper method to configure the token endpoint of the first oauth found in * the apiAuthorizations ( there should be only one ) * @ return Token request builder */ public TokenRequestBuilder getTokenEndPoint ( ) { } }
for ( Authentication apiAuth : authentications . values ( ) ) { if ( apiAuth instanceof RetryingOAuth ) { RetryingOAuth retryingOAuth = ( RetryingOAuth ) apiAuth ; return retryingOAuth . getTokenRequestBuilder ( ) ; } } return null ;
public class CmsLogin { /** * Returns the available organizational units as JSON array string . < p > * @ return the available organizational units as JSON array string */ public String getJsonOrgUnitList ( ) { } }
List < CmsOrganizationalUnit > allOus = getOus ( ) ; List < JSONObject > jsonOus = new ArrayList < JSONObject > ( allOus . size ( ) ) ; int index = 0 ; for ( CmsOrganizationalUnit ou : allOus ) { JSONObject jsonObj = new JSONObject ( ) ; try { // 1 : OU fully qualified name jsonObj . put ( "name" , ou . getName ( ) ) ; // 2 : OU display name jsonObj . put ( "displayname" , ou . getDisplayName ( m_locale ) ) ; // 3 : OU simple name jsonObj . put ( "simplename" , ou . getSimpleName ( ) ) ; // 4 : OU description jsonObj . put ( "description" , ou . getDescription ( m_locale ) ) ; // 5 : selection flag boolean isSelected = false ; if ( ou . getName ( ) . equals ( m_oufqn ) || ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( m_oufqn ) && ou . getName ( ) . equals ( m_oufqn . substring ( 1 ) ) ) ) { isSelected = true ; } jsonObj . put ( "active" , isSelected ) ; // 6 : level of the OU jsonObj . put ( "level" , CmsResource . getPathLevel ( ou . getName ( ) ) ) ; // 7 : OU index jsonObj . put ( "index" , index ) ; // add the generated JSON object to the result list jsonOus . add ( jsonObj ) ; index ++ ; } catch ( JSONException e ) { // error creating JSON object , skip this OU } } // generate a JSON array from the JSON object list JSONArray jsonArr = new JSONArray ( jsonOus ) ; return jsonArr . toString ( ) ;
public class MockBundle { /** * Get the class loader of the mock , rather than that of the bundle . */ @ Override public Class < ? > loadClass ( String classname ) throws ClassNotFoundException { } }
return getClass ( ) . getClassLoader ( ) . loadClass ( classname ) ;
public class CaseInsensitiveStringMap { /** * Returns the double value to which the specified key is mapped , * or defaultValue if there is no mapping for the key . The key match is case - insensitive . */ public double getDouble ( String key , double defaultValue ) { } }
String value = get ( key ) ; return value == null ? defaultValue : Double . parseDouble ( value ) ;
public class IfcFaceImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) public EList < IfcFaceBound > getBounds ( ) { } }
return ( EList < IfcFaceBound > ) eGet ( Ifc2x3tc1Package . Literals . IFC_FACE__BOUNDS , true ) ;
public class ManagementGroupVertex { /** * Adds all management vertices which are included in this group vertex to the given list . * @ param vertices * the list to which the vertices shall be added */ void collectVertices ( final List < ManagementVertex > vertices ) { } }
final Iterator < ManagementVertex > it = this . groupMembers . iterator ( ) ; while ( it . hasNext ( ) ) { vertices . add ( it . next ( ) ) ; }
public class AbstractItemExporter { /** * { @ inheritDoc } */ public void transfer ( TransferResult tr ) { } }
this . transferResult = tr ; tr . setTransfer ( this ) ; if ( ! beforeExport ( ) ) return ; for ( final TransferListener listener : listeners ) { listener . onStart ( tr ) ; } while ( hasNext ( ) ) { next ( ) ; int errors = tr . errors ( ) ; // 实体转换开始 for ( final TransferListener listener : listeners ) { listener . onItemStart ( tr ) ; } long transferItemStart = System . currentTimeMillis ( ) ; // 进行转换 transferItem ( ) ; // 如果导出过程中没有错误 , 将成功记录数增一 if ( tr . errors ( ) == errors ) { this . success ++ ; } else { this . fail ++ ; } logger . debug ( "tranfer item:{} take time:{}" , String . valueOf ( getTranferIndex ( ) ) , String . valueOf ( System . currentTimeMillis ( ) - transferItemStart ) ) ; // 实体转换结束 for ( final TransferListener listener : listeners ) { listener . onItemFinish ( tr ) ; } } for ( final TransferListener listener : listeners ) { listener . onFinish ( tr ) ; } // 告诉输出者 , 输出完成 writer . flush ( ) ;
public class ObjectType { /** * Due to the complexity of some of our internal type systems , sometimes * we have different types constructed by the same constructor . * In other parts of the type system , these are called delegates . * We construct these types by appending suffixes to the constructor name . * The normalized reference name does not have these suffixes , and as such , * recollapses these implicit types back to their real type . Note that * suffixes such as " . prototype " can be added < i > after < / i > the delegate * suffix , so anything after the parentheses must still be retained . */ @ Nullable public final String getNormalizedReferenceName ( ) { } }
String name = getReferenceName ( ) ; if ( name != null ) { int start = name . indexOf ( '(' ) ; if ( start != - 1 ) { int end = name . lastIndexOf ( ')' ) ; String prefix = name . substring ( 0 , start ) ; return end + 1 % name . length ( ) == 0 ? prefix : prefix + name . substring ( end + 1 ) ; } } return name ;
public class ConnectionHandle { /** * Get ConnectionHandle stored in VirtualConnection state map ( if one isn ' t * there already , assign one ) . * @ param vc * @ return ConnectionHandle */ public static ConnectionHandle getConnectionHandle ( VirtualConnection vc ) { } }
if ( vc == null ) { return null ; } ConnectionHandle connHandle = ( ConnectionHandle ) vc . getStateMap ( ) . get ( CONNECTION_HANDLE_VC_KEY ) ; // We can be here for two reasons : // a ) We have one , just needed to find it in VC state map if ( connHandle != null ) return connHandle ; // b ) We want a new one connHandle = factory . createConnectionHandle ( ) ; // For some connections ( outbound , most notably ) , the connection type // will be set on the VC before connect . . . in which case , read it // and set it on the connection handle at the earliest point // possible . if ( connHandle != null ) { connHandle . setConnectionType ( vc ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "getConnectionHandle - created new connection handle: " + connHandle ) ; } setConnectionHandle ( vc , connHandle ) ; return connHandle ;
public class SequenceListType { /** * Gets the value of the sequence property . * This accessor method returns a reference to the live list , * not a snapshot . Therefore any modification you make to the * returned list will be present inside the JAXB object . * This is why there is not a < CODE > set < / CODE > method for the sequence property . * For example , to add a new item , do as follows : * < pre > * getSequence ( ) . add ( newItem ) ; * < / pre > * Objects of the following type ( s ) are allowed in the list * { @ link SequenceListType . Sequence } */ public List < SequenceListType . Sequence > getSequence ( ) { } }
if ( sequence == null ) { sequence = new ArrayList < SequenceListType . Sequence > ( ) ; } return this . sequence ;
public class C10NConfigBase { /** * < p > Create a method annotation binding to the specified locale * < p > There are two basic usages : * < pre > < code > * bindAnnotation ( Ja . class ) . to ( Locale . JAPANESE ) ; * < / code > < / pre > * which will tell c10n to take the value given in the < code > @ Ja < / code > * annotation whenever the current locale is < code > Locale . JAPANESE < / code > * < p > The second usage is : * < pre > < code > * bindAnnotation ( En . class ) ; * < / code > < / pre > * which will make c10n always fallback to the value given in the < code > @ En < / code > * annotation if no other annotation binding matched the current locale . * < p > Note : Some default annotation bindings are defined in { @ link com . github . rodionmoiseev . c10n . annotations . DefaultC10NAnnotations } . * In order to use < code > install ( new DefaultC10NAnnotations ( ) ) ; < / code > somewhere in your configuration * ( see { @ link # install ( C10NConfigBase ) } * @ param annotationClass Class of the annotation to create a local binding for ( not - null ) * @ return annotation locale binding DSL object */ protected C10NAnnotationBinder bindAnnotation ( Class < ? extends Annotation > annotationClass ) { } }
Preconditions . assertNotNull ( annotationClass , "annotationClass" ) ; checkAnnotationInterface ( annotationClass ) ; C10NAnnotationBinder binder = new C10NAnnotationBinder ( ) ; annotationBinders . put ( annotationClass , binder ) ; return binder ;
public class AtomicGrowingSparseHashMatrix { /** * Returns the row vector , locking the data if { @ code shouldLock } is { @ code * true } . */ private SparseDoubleVector getRowVector ( int row , boolean shouldLock ) { } }
int c = cols . get ( ) ; if ( shouldLock ) lockRow ( row , c ) ; // Ensure that the column data is up to date while ( lastVectorCacheUpdate . get ( ) != modifications . get ( ) ) updateVectorCache ( ) ; int [ ] colArr = rowToColsCache [ row ] ; SparseDoubleVector rowVec = new SparseHashDoubleVector ( c ) ; for ( int column : colArr ) rowVec . set ( column , matrixEntries . get ( new Entry ( row , column ) ) ) ; if ( shouldLock ) unlockRow ( row , c ) ; return rowVec ;
public class DataDirEntry { /** * Calculates the file offset of the data directory based on the virtual * address and the entries in the section table . * This method is subject to change . * @ Beta * @ param table * @ return file offset of data directory */ public long getFileOffset ( SectionTable table ) { } }
checkArgument ( table != null , "section table must not be null" ) ; Optional < SectionHeader > section = maybeGetSectionTableEntry ( table ) ; if ( section . isPresent ( ) ) { long sectionRVA = section . get ( ) . getAlignedVirtualAddress ( ) ; long sectionOffset = section . get ( ) . getAlignedPointerToRaw ( ) ; return ( virtualAddress - sectionRVA ) + sectionOffset ; } return virtualAddress ; // TODO should be smaller than file length !
public class WritableName { /** * Return the name for a class . Default is { @ link Class # getName ( ) } . */ public static synchronized String getName ( Class writableClass ) { } }
String name = CLASS_TO_NAME . get ( writableClass ) ; if ( name != null ) return name ; return writableClass . getName ( ) ;
public class LangTools { /** * feats string should following the pattern " k1 = v1 | k2 = v2 | k3 = v3" */ public static DTree getDTreeFromCoNLLXString ( final String input ) { } }
if ( input == null || input . trim ( ) . isEmpty ( ) ) { return null ; } Map < Integer , Map < Integer , String > > semanticHeadsMap = new HashMap < > ( ) ; String [ ] dNodesString = input . split ( System . lineSeparator ( ) ) ; DTree tree = new DTree ( ) ; Arrays . stream ( dNodesString ) . map ( s -> s . split ( "\t" ) ) . forEachOrdered ( fields -> { int currentIndex = 0 ; int id = Integer . parseInt ( fields [ currentIndex ++ ] ) ; String form = fields [ currentIndex ++ ] ; String lemma = fields [ currentIndex ++ ] ; String cPOSTag = fields [ currentIndex ++ ] ; String pos = fields [ currentIndex ++ ] ; String feats = fields [ currentIndex ++ ] ; Map < String , String > featsMap = null ; if ( ! feats . equals ( "_" ) ) { featsMap = Arrays . stream ( feats . split ( "\\|" ) ) . map ( entry -> entry . split ( "=" ) ) . collect ( Collectors . toMap ( e -> e [ 0 ] , e -> e . length > 1 ? e [ 1 ] : StringUtils . EMPTY ) ) ; } else { featsMap = new HashMap < > ( ) ; } String headIndex = fields [ currentIndex ++ ] ; String depLabel = fields [ currentIndex ++ ] ; String dump1 = fields [ currentIndex ++ ] ; String dump2 = fields [ currentIndex ++ ] ; String semanticHeadsString = currentIndex >= fields . length ? "_" : fields [ currentIndex ] ; if ( ! semanticHeadsString . equals ( "_" ) ) { Map < Integer , String > semanticHeads = Arrays . stream ( semanticHeadsString . split ( "\\|" ) ) . map ( entry -> entry . split ( ":" ) ) . collect ( Collectors . toMap ( e -> Integer . parseInt ( e [ 0 ] ) , e -> ( e . length > 1 ) ? e [ 1 ] : StringUtils . EMPTY ) ) ; semanticHeadsMap . put ( id , semanticHeads ) ; } DNode node = id == 0 ? tree . getPaddingNode ( ) : new DNode ( id , form , lemma , cPOSTag , pos , depLabel ) ; if ( id == 0 && ! featsMap . containsKey ( "uuid" ) ) { featsMap . put ( "uuid" , tree . getUUID ( ) . toString ( ) ) ; } node . setFeats ( featsMap ) ; node . addFeature ( "head" , headIndex ) ; // by the time head might not be generated ! if ( node . getId ( ) == 0 ) { tree . getPaddingNode ( ) . setFeats ( node . getFeats ( ) ) ; // Substitute padding with actual 0 node from CoNLLX } else { tree . add ( node ) ; } } ) ; tree . getPaddingNode ( ) . getFeats ( ) . remove ( "head" ) ; for ( int i = 1 ; i < tree . size ( ) ; i ++ ) { DNode node = tree . get ( i ) ; int headIndex = Integer . parseInt ( node . getFeature ( "head" ) ) ; DNode head = tree . get ( headIndex ) ; head . addChild ( node ) ; node . getFeats ( ) . remove ( "head" ) ; node . setHead ( head ) ; } // Recover semantic heads . semanticHeadsMap . entrySet ( ) . parallelStream ( ) . map ( e -> { DNode node = tree . get ( e . getKey ( ) ) ; Map < Integer , String > nodeSemanticInfo = e . getValue ( ) ; for ( Integer id : nodeSemanticInfo . keySet ( ) ) { node . addSemanticHead ( tree . get ( id ) , nodeSemanticInfo . get ( id ) ) ; tree . get ( id ) . getSemanticChildren ( ) . add ( node ) ; } return null ; } ) . collect ( Collectors . toList ( ) ) ; return tree ;
public class CmsJspTagLink { /** * @ see javax . servlet . jsp . tagext . Tag # doEndTag ( ) * @ return EVAL _ PAGE * @ throws JspException in case something goes wrong */ @ Override public int doEndTag ( ) throws JspException { } }
ServletRequest req = pageContext . getRequest ( ) ; // This will always be true if the page is called through OpenCms if ( CmsFlexController . isCmsRequest ( req ) ) { try { // Get link - string from the body and reset body String link = getBodyContent ( ) . getString ( ) ; getBodyContent ( ) . clear ( ) ; // Calculate the link substitution String newlink = linkTagAction ( link , req , getBaseUri ( ) , getDetailPage ( ) , m_locale ) ; // Write the result back to the page getBodyContent ( ) . print ( newlink ) ; getBodyContent ( ) . writeOut ( pageContext . getOut ( ) ) ; } catch ( Exception ex ) { if ( LOG . isErrorEnabled ( ) ) { LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . ERR_PROCESS_TAG_1 , "link" ) , ex ) ; } throw new JspException ( ex ) ; } } return EVAL_PAGE ;
public class BDBResourceFileLocationDB { /** * add an url location for a name , unless it already exists * @ param name * @ param url * @ throws IOException */ public void addNameUrl ( final String name , final String url ) throws IOException { } }
// need to first see if there is already an entry for this name . // if not , add url as the value . // if so , check the current url locations for name // if url exists , do nothing // if url does not exist , add , and set that as the value . String newValue = null ; String oldValue = get ( name ) ; if ( oldValue != null && oldValue . length ( ) > 0 ) { String curUrls [ ] = oldValue . split ( urlDelimiterRE ) ; boolean found = false ; for ( int i = 0 ; i < curUrls . length ; i ++ ) { if ( url . equals ( curUrls [ i ] ) ) { found = true ; break ; } } if ( found == false ) { newValue = oldValue + " " + url ; } } else { // null or empty value newValue = url ; if ( oldValue == null ) log . addName ( name ) ; } // did we find a value ? if ( newValue != null ) { put ( name , newValue ) ; }
public class PNGDecoder { /** * Overwrites the tRNS chunk entry to make a selected color transparent . * < p > This can only be invoked when the image has no alpha channel . < / p > * < p > Calling this method causes { @ link # hasAlpha ( ) } to return true . < / p > * @ param r the red component of the color to make transparent * @ param g the green component of the color to make transparent * @ param b the blue component of the color to make transparent * @ throws UnsupportedOperationException if the tRNS chunk data can ' t be set * @ see # hasAlphaChannel ( ) */ public void overwriteTRNS ( byte r , byte g , byte b ) { } }
if ( hasAlphaChannel ( ) ) { throw new UnsupportedOperationException ( "image has an alpha channel" ) ; } byte [ ] pal = this . palette ; if ( pal == null ) { transPixel = new byte [ ] { 0 , r , 0 , g , 0 , b } ; } else { paletteA = new byte [ pal . length / 3 ] ; for ( int i = 0 , j = 0 ; i < pal . length ; i += 3 , j ++ ) { if ( pal [ i ] != r || pal [ i + 1 ] != g || pal [ i + 2 ] != b ) { paletteA [ j ] = ( byte ) 0xFF ; } } }
public class ServiceFacade { /** * Call destination ( lightblue ) using a timeout in dual read / write phases . * Do not use facade timeout during lightblue proxy and kinda proxy phases ( when * reading from source is disabled ) . * @ param listenableFuture * @ param methodName method name is used to read method specific timeout * configuration * @ param destinationCallTimeout Set future timeout to this amount * @ return * @ throws InterruptedException * @ throws ExecutionException * @ throws TimeoutException */ private < T > T getWithTimeout ( ListenableFuture < T > listenableFuture , String methodName , FacadeOperation facadeOperation , int destinationCallTimeout ) throws InterruptedException , ExecutionException , TimeoutException { } }
// not reading from source / legacy means this is either proxy or kinda proxy phase // in that case , ignore timeout settings for Lightblue call if ( ! shouldSource ( FacadeOperation . READ ) || timeoutConfiguration . getTimeoutMS ( methodName , facadeOperation ) <= 0 ) { return listenableFuture . get ( ) ; } else { return listenableFuture . get ( destinationCallTimeout , TimeUnit . MILLISECONDS ) ; }
public class DynamicCacheHelper { /** * Basic logic to compare two keys for equality */ protected final boolean compareKeys ( K k1 , K k2 ) { } }
return k1 == k2 || ( k1 != null && k1 . equals ( k2 ) ) ;
public class SecurityToken { /** * Creates a encoded secure random string . * @ return Base64 encoded string . */ private static String createSecureRandom ( ) { } }
try { final String no = "" + SECURE_RANDOM . nextInt ( ) ; final MessageDigest md = MessageDigest . getInstance ( "SHA-1" ) ; final byte [ ] digest = md . digest ( no . getBytes ( ) ) ; return encodeBase64 ( digest ) ; } catch ( final NoSuchAlgorithmException ex ) { throw new RuntimeException ( ex ) ; }
public class OkCoinExchange { /** * Adjust host parameters depending on exchange specific parameters */ private static void concludeHostParams ( ExchangeSpecification exchangeSpecification ) { } }
if ( exchangeSpecification . getExchangeSpecificParameters ( ) != null ) { if ( exchangeSpecification . getExchangeSpecificParametersItem ( "Use_Intl" ) . equals ( true ) && exchangeSpecification . getExchangeSpecificParametersItem ( "Use_Futures" ) . equals ( false ) ) { exchangeSpecification . setSslUri ( "https://www.okex.com/api" ) ; exchangeSpecification . setHost ( "www.okex.com" ) ; } else if ( exchangeSpecification . getExchangeSpecificParametersItem ( "Use_Intl" ) . equals ( true ) && exchangeSpecification . getExchangeSpecificParametersItem ( "Use_Futures" ) . equals ( true ) ) { exchangeSpecification . setSslUri ( "https://www.okex.com/api" ) ; exchangeSpecification . setHost ( "www.okex.com" ) ; } }
public class CommerceWishListItemPersistenceImpl { /** * Returns all the commerce wish list items . * @ return the commerce wish list items */ @ Override public List < CommerceWishListItem > findAll ( ) { } }
return findAll ( QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ;
public class InetAddressRange { /** * static utils */ private static InetAddress forString ( String s ) throws ParseException { } }
try { return InetAddress . getByName ( s ) ; } catch ( UnknownHostException e ) { throw new ParseException ( InetAddressRange . class , "Invalid network supplied" , e ) ; }
public class Character { /** * Returns the { @ code int } value that the specified * character ( Unicode code point ) represents . For example , the character * { @ code ' \ u005Cu216C ' } ( the Roman numeral fifty ) will return * an { @ code int } with a value of 50. * The letters A - Z in their uppercase ( { @ code ' \ u005Cu0041 ' } through * { @ code ' \ u005Cu005A ' } ) , lowercase * ( { @ code ' \ u005Cu0061 ' } through { @ code ' \ u005Cu007A ' } ) , and * full width variant ( { @ code ' \ u005CuFF21 ' } through * { @ code ' \ u005CuFF3A ' } and { @ code ' \ u005CuFF41 ' } through * { @ code ' \ u005CuFF5A ' } ) forms have numeric values from 10 * through 35 . This is independent of the Unicode specification , * which does not assign numeric values to these { @ code char } * values . * If the character does not have a numeric value , then - 1 is returned . * If the character has a numeric value that cannot be represented as a * nonnegative integer ( for example , a fractional value ) , then - 2 * is returned . * @ param codePoint the character ( Unicode code point ) to be converted . * @ return the numeric value of the character , as a nonnegative { @ code int } * value ; - 2 if the character has a numeric value that is not a * nonnegative integer ; - 1 if the character has no numeric value . * @ see Character # forDigit ( int , int ) * @ see Character # isDigit ( int ) * @ since 1.5 */ public static int getNumericValue ( int codePoint ) { } }
// This is both an optimization and papers over differences between Java and ICU . if ( codePoint < 128 ) { if ( codePoint >= '0' && codePoint <= '9' ) { return codePoint - '0' ; } if ( codePoint >= 'a' && codePoint <= 'z' ) { return codePoint - ( 'a' - 10 ) ; } if ( codePoint >= 'A' && codePoint <= 'Z' ) { return codePoint - ( 'A' - 10 ) ; } return - 1 ; } // Full - width uppercase A - Z . if ( codePoint >= 0xff21 && codePoint <= 0xff3a ) { return codePoint - 0xff17 ; } // Full - width lowercase a - z . if ( codePoint >= 0xff41 && codePoint <= 0xff5a ) { return codePoint - 0xff37 ; } return getNumericValueImpl ( codePoint ) ;
public class DocumentModelResources { /** * Load the external resources such as gazetters and clustering lexicons . * @ param params * the training parameters * @ return the map contanining and id and the resource * @ throws IOException * if io error */ public static Map < String , Object > loadDocumentResources ( final TrainingParameters params ) throws IOException { } }
final Map < String , Object > resources = new HashMap < String , Object > ( ) ; @ SuppressWarnings ( "rawtypes" ) final Map < String , ArtifactSerializer > artifactSerializers = DocumentClassifierModel . createArtifactSerializers ( ) ; if ( Flags . isBrownFeatures ( params ) ) { final String ClusterLexiconPath = Flags . getBrownFeatures ( params ) ; final String serializerId = "brownserializer" ; final List < File > ClusterLexiconFiles = Flags . getClusterLexiconFiles ( ClusterLexiconPath ) ; for ( final File ClusterLexiconFile : ClusterLexiconFiles ) { final String brownFilePath = ClusterLexiconFile . getCanonicalPath ( ) ; artifactSerializers . put ( serializerId , new WordCluster . WordClusterSerializer ( ) ) ; loadResource ( serializerId , artifactSerializers , brownFilePath , resources ) ; } } if ( Flags . isClarkFeatures ( params ) ) { final String clarkClusterPath = Flags . getClarkFeatures ( params ) ; final String serializerId = "clarkserializer" ; final List < File > clarkClusterFiles = Flags . getClusterLexiconFiles ( clarkClusterPath ) ; for ( final File clarkClusterFile : clarkClusterFiles ) { final String clarkFilePath = clarkClusterFile . getCanonicalPath ( ) ; artifactSerializers . put ( serializerId , new WordCluster . WordClusterSerializer ( ) ) ; loadResource ( serializerId , artifactSerializers , clarkFilePath , resources ) ; } } if ( Flags . isWord2VecClusterFeatures ( params ) ) { final String ClusterLexiconPath = Flags . getWord2VecClusterFeatures ( params ) ; final String serializerId = "word2vecserializer" ; final List < File > ClusterLexiconFiles = Flags . getClusterLexiconFiles ( ClusterLexiconPath ) ; for ( final File ClusterLexiconFile : ClusterLexiconFiles ) { final String word2vecFilePath = ClusterLexiconFile . getCanonicalPath ( ) ; artifactSerializers . put ( serializerId , new WordCluster . WordClusterSerializer ( ) ) ; loadResource ( serializerId , artifactSerializers , word2vecFilePath , resources ) ; } } if ( Flags . isDictionaryPolarityFeatures ( params ) ) { final String dictDir = Flags . getDictionaryPolarityFeatures ( params ) ; final String serializerId = "polaritydictionaryserializer" ; final List < File > fileList = StringUtils . getFilesInDir ( new File ( dictDir ) ) ; for ( final File dictFile : fileList ) { final String dictionaryPath = dictFile . getCanonicalPath ( ) ; artifactSerializers . put ( serializerId , new Dictionary . DictionarySerializer ( ) ) ; loadResource ( serializerId , artifactSerializers , dictionaryPath , resources ) ; } } if ( Flags . isDictionaryFeatures ( params ) ) { final String dictDir = Flags . getDictionaryFeatures ( params ) ; final String serializerId = "dictionaryserializer" ; final List < File > fileList = StringUtils . getFilesInDir ( new File ( dictDir ) ) ; for ( final File dictFile : fileList ) { final String dictionaryPath = dictFile . getCanonicalPath ( ) ; artifactSerializers . put ( serializerId , new Dictionary . DictionarySerializer ( ) ) ; loadResource ( serializerId , artifactSerializers , dictionaryPath , resources ) ; } } if ( Flags . isFrequentWordFeatures ( params ) ) { final String dictDir = Flags . getFrequentWordFeatures ( params ) ; final String serializerId = "frequentdictionaryserializer" ; final List < File > fileList = StringUtils . getFilesInDir ( new File ( dictDir ) ) ; for ( final File dictFile : fileList ) { final String dictionaryPath = dictFile . getCanonicalPath ( ) ; artifactSerializers . put ( serializerId , new Dictionary . DictionarySerializer ( ) ) ; loadResource ( serializerId , artifactSerializers , dictionaryPath , resources ) ; } } if ( Flags . isTargetFeatures ( params ) ) { final String morphoResourcesPath = Flags . getTargetFeatures ( params ) ; final String posSerializerId = "otemodelserializer" ; artifactSerializers . put ( posSerializerId , new SequenceModelResource . SequenceModelResourceSerializer ( ) ) ; loadResource ( posSerializerId , artifactSerializers , morphoResourcesPath , resources ) ; } if ( Flags . isPOSTagModelFeatures ( params ) ) { final String morphoResourcesPath = Flags . getPOSTagModelFeatures ( params ) ; final String posSerializerId = "seqmodelserializer" ; artifactSerializers . put ( posSerializerId , new SequenceModelResource . SequenceModelResourceSerializer ( ) ) ; loadResource ( posSerializerId , artifactSerializers , morphoResourcesPath , resources ) ; } if ( Flags . isPOSDictionaryFeatures ( params ) ) { final String posDictPath = Flags . getPOSDictionaryFeatures ( params ) ; final String posSerializerId = "posdictserializer" ; artifactSerializers . put ( posSerializerId , new POSDictionary . POSDictionarySerializer ( ) ) ; loadResource ( posSerializerId , artifactSerializers , posDictPath , resources ) ; } if ( Flags . isLemmaModelFeatures ( params ) ) { final String lemmaModelPath = Flags . getLemmaModelFeatures ( params ) ; final String lemmaSerializerId = "seqmodelserializer" ; artifactSerializers . put ( lemmaSerializerId , new SequenceModelResource . SequenceModelResourceSerializer ( ) ) ; loadResource ( lemmaSerializerId , artifactSerializers , lemmaModelPath , resources ) ; } if ( Flags . isLemmaDictionaryFeatures ( params ) ) { final String lemmaDictPath = Flags . getLemmaDictionaryFeatures ( params ) ; final String [ ] lemmaDictResources = Flags . getLemmaDictionaryResources ( lemmaDictPath ) ; final String posSerializerId = "seqmodelserializer" ; final String lemmaDictSerializerId = "lemmadictserializer" ; artifactSerializers . put ( posSerializerId , new SequenceModelResource . SequenceModelResourceSerializer ( ) ) ; loadResource ( posSerializerId , artifactSerializers , lemmaDictResources [ 0 ] , resources ) ; artifactSerializers . put ( lemmaDictSerializerId , new DictionaryLemmatizer . DictionaryLemmatizerSerializer ( ) ) ; loadResource ( lemmaDictSerializerId , artifactSerializers , lemmaDictResources [ 1 ] , resources ) ; } return resources ;
public class Cast { /** * < p > Casts a map in a lazy fashion . Changes to the returned map are reflected in the * the given map . < / p > * @ param < K > the type parameter * @ param < V > the type parameter * @ param map the map to cast * @ return the casted map or null if the map is null */ public static < K , V > Map < K , V > cast ( Map < ? , ? > map ) { } }
return map == null ? null : new CastingMap < K , V > ( map ) ;
public class DOType { /** * Answer a list of all field names declared by this type . * @ return */ public List < String > getDeclaredFieldNames ( ) { } }
if ( this . declaredFieldNames == null ) { this . declaredFieldNames = new ArrayList < String > ( this . declaredFields . size ( ) ) ; for ( DOField f : this . declaredFields ) { this . declaredFieldNames . add ( f . getName ( ) ) ; } } return this . declaredFieldNames ;
public class DetectDocumentTextRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DetectDocumentTextRequest detectDocumentTextRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( detectDocumentTextRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( detectDocumentTextRequest . getDocument ( ) , DOCUMENT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class TextBuilder { /** * Print an integer to internal buffer or output ( os or writer ) * @ param i * @ return this builder */ public final TextBuilder p ( int i ) { } }
if ( null != __buffer ) __append ( i ) ; else __caller . p ( i ) ; return this ;
public class BlockLocation { /** * < code > optional . alluxio . grpc . WorkerNetAddress workerAddress = 2 ; < / code > */ public alluxio . grpc . WorkerNetAddress getWorkerAddress ( ) { } }
return workerAddress_ == null ? alluxio . grpc . WorkerNetAddress . getDefaultInstance ( ) : workerAddress_ ;
public class JdbcDatabaseManager { /** * Searches embedded databases where the driver matches the argument . * @ param driverClassName The driver class name to search on * @ return True if the driver matches an embedded database type */ public static boolean isEmbedded ( String driverClassName ) { } }
return databases . stream ( ) . filter ( JdbcDatabase :: isEmbedded ) . anyMatch ( db -> db . driverClassName . equals ( driverClassName ) ) ;