signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class DiffNode { /** * Retrieve a child that matches the given path element relative to this node . * @ param elementSelector The path element of the child node to get . * @ return The requested child node or < code > null < / code > . */ public DiffNode getChild ( final ElementSelector elementSelector ) { } }
if ( elementSelector instanceof CollectionItemElementSelector && childIdentityStrategy != null ) { return children . get ( ( ( CollectionItemElementSelector ) elementSelector ) . copyWithIdentityStrategy ( childIdentityStrategy ) ) ; } else { return children . get ( elementSelector ) ; }
public class GCMParameterSpec { /** * Check input parameters . */ private void init ( int tLen , byte [ ] src , int offset , int len ) { } }
if ( tLen < 0 ) { throw new IllegalArgumentException ( "Length argument is negative" ) ; } this . tLen = tLen ; // Input sanity check if ( ( src == null ) || ( len < 0 ) || ( offset < 0 ) || ( ( len + offset ) > src . length ) ) { throw new IllegalArgumentException ( "Invalid buffer arguments" ) ; } iv = new byte [ len ] ; System . arraycopy ( src , offset , iv , 0 , len ) ;
public class SSLUtil { /** * Reads an entire SSL message from the specified * input stream . * @ param in the input stream to read the SSL message * from . * @ return the byte array containing the SSL message * @ exception IOException if I / O error occurs . */ public static byte [ ] readSslMessage ( InputStream in ) throws IOException { } }
byte [ ] header = new byte [ 5 ] ; readFully ( in , header , 0 , header . length ) ; int length ; if ( isSSLv3Packet ( header ) ) length = toShort ( header [ 3 ] , header [ 4 ] ) ; else if ( isSSLv2HelloPacket ( header ) ) length = ( ( ( header [ 0 ] & 0x7f ) << 8 ) | ( header [ 1 ] & 0xff ) ) - 3 ; else { throw new IOException ( "Invalid SSL header" ) ; } byte [ ] inToken = new byte [ header . length + length ] ; System . arraycopy ( header , 0 , inToken , 0 , header . length ) ; readFully ( in , inToken , header . length , length ) ; return inToken ;
public class DayOpeningHours { /** * Returns the difference when changing this opening hours to the other one . The day of the week must be equal for both compared * instances . * @ param toOther * Opening hours to compare with . * @ return List of changes or an empty list if both are equal . */ public final List < Change > diff ( final DayOpeningHours toOther ) { } }
Contract . requireArgNotNull ( "toOther" , toOther ) ; if ( dayOfTheWeek != toOther . dayOfTheWeek ) { throw new ConstraintViolationException ( "Expected same day (" + dayOfTheWeek + ") for argument 'toOther', but was: " + toOther . dayOfTheWeek ) ; } final List < DayOpeningHours > fromDays = normalize ( ) ; final List < DayOpeningHours > toDays = toOther . normalize ( ) ; final List < Change > changes = new ArrayList < > ( ) ; if ( fromDays . size ( ) == 1 ) { if ( toDays . size ( ) == 1 ) { // Both only 1 day final DayOpeningHours from = fromDays . get ( 0 ) ; final DayOpeningHours to = toDays . get ( 0 ) ; changes . addAll ( changes ( this . dayOfTheWeek , from . hourRanges , to . hourRanges ) ) ; } else { // From 1 day / To 2 days final DayOpeningHours from = fromDays . get ( 0 ) ; final DayOpeningHours to1 = toDays . get ( 0 ) ; final DayOpeningHours to2 = toDays . get ( 1 ) ; changes . addAll ( changes ( this . dayOfTheWeek , from . hourRanges , to1 . hourRanges ) ) ; changes . addAll ( changes ( ChangeType . ADDED , to2 . dayOfTheWeek , to2 . hourRanges ) ) ; } } else { if ( toDays . size ( ) == 1 ) { // From 2 days / To 1 day final DayOpeningHours from1 = fromDays . get ( 0 ) ; final DayOpeningHours from2 = fromDays . get ( 1 ) ; final DayOpeningHours to = toDays . get ( 0 ) ; changes . addAll ( changes ( this . dayOfTheWeek , from1 . hourRanges , to . hourRanges ) ) ; changes . addAll ( changes ( ChangeType . REMOVED , from2 . dayOfTheWeek , from2 . hourRanges ) ) ; } else { // Both 2 days final DayOpeningHours from1 = fromDays . get ( 0 ) ; final DayOpeningHours from2 = fromDays . get ( 1 ) ; final DayOpeningHours to1 = toDays . get ( 0 ) ; final DayOpeningHours to2 = toDays . get ( 1 ) ; changes . addAll ( changes ( from1 . dayOfTheWeek , from1 . hourRanges , to1 . hourRanges ) ) ; changes . addAll ( changes ( from2 . dayOfTheWeek , from2 . hourRanges , to2 . hourRanges ) ) ; } } return changes ;
public class SSLConnectionLink { /** * This method is called if connect or connectAsync are called redundantly , after * the connection is already established . It cleans up the SSL engine . The connect * methods will then pass the connect on down the chain where , eventually , a new * socket will be established with this virtual connection . */ private void handleRedundantConnect ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "handleRedundantConnect, vc=" + getVCHash ( ) ) ; } // This conn link has already been connected . // Need to shut get a new SSL engine . cleanup ( ) ; // PK46069 - use engine that allows session id re - use sslEngine = SSLUtils . getOutboundSSLEngine ( sslContext , getLinkConfig ( ) , targetAddress . getRemoteAddress ( ) . getHostName ( ) , targetAddress . getRemoteAddress ( ) . getPort ( ) , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "New SSL engine=" + getSSLEngine ( ) . hashCode ( ) + " for vc=" + getVCHash ( ) ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "handleRedundantConnect" ) ; }
public class Instrumented { /** * Determines whether an object or , if it is a { @ link org . apache . gobblin . util . Decorator } , any object on its lineage , * is of class { @ link org . apache . gobblin . instrumented . Instrumentable } . * @ param obj Object to analyze . * @ return Whether the lineage is instrumented . */ public static boolean isLineageInstrumented ( Object obj ) { } }
List < Object > lineage = DecoratorUtils . getDecoratorLineage ( obj ) ; for ( Object node : lineage ) { if ( node instanceof Instrumentable ) { return true ; } } return false ;
public class BaseAnnotationProcessorFactory { /** * Get a Sun annotation processor for the given annotation type declarations . This just wraps the type declarations * in our wrapper types , and returns an annotation processor that delegates to our " core " ( apt - independent ) * processor to do the real work . */ public final AnnotationProcessor getProcessorFor ( Set annotationTypeDeclarations , AnnotationProcessorEnvironment env ) { } }
CoreAnnotationProcessorEnv coreEnv = CoreAnnotationProcessorEnvImpl . get ( env ) ; AnnotationTypeDeclaration [ ] atds = new AnnotationTypeDeclaration [ annotationTypeDeclarations . size ( ) ] ; int j = 0 ; for ( Iterator i = annotationTypeDeclarations . iterator ( ) ; i . hasNext ( ) ; ) { // Wrap each Sun / Mirror annotation type declaration with our core AnnotationTypeDeclaration . com . sun . mirror . declaration . AnnotationTypeDeclaration decl = ( com . sun . mirror . declaration . AnnotationTypeDeclaration ) i . next ( ) ; atds [ j ++ ] = WrapperFactory . get ( ) . getAnnotationTypeDeclaration ( decl ) ; } CoreAnnotationProcessor ap = getCoreProcessorFor ( atds , coreEnv ) ; return ap != null ? new DelegatingAnnotationProcessor ( ap ) : null ;
public class MilestonesApi { /** * Get a list of group milestones . * @ param groupIdOrPath the group in the form of an Integer ( ID ) , String ( path ) , or Group instance * @ param page the page number to get * @ param perPage how many milestones per page * @ return the milestones associated with the specified group * @ throws GitLabApiException if any exception occurs */ public List < Milestone > getGroupMilestones ( Object groupIdOrPath , int page , int perPage ) throws GitLabApiException { } }
Response response = get ( Response . Status . OK , getPageQueryParams ( page , perPage ) , "groups" , getGroupIdOrPath ( groupIdOrPath ) , "milestones" ) ; return ( response . readEntity ( new GenericType < List < Milestone > > ( ) { } ) ) ;
public class XAbstractWhileExpressionImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public NotificationChain basicSetBody ( XExpression newBody , NotificationChain msgs ) { } }
XExpression oldBody = body ; body = newBody ; if ( eNotificationRequired ( ) ) { ENotificationImpl notification = new ENotificationImpl ( this , Notification . SET , XbasePackage . XABSTRACT_WHILE_EXPRESSION__BODY , oldBody , newBody ) ; if ( msgs == null ) msgs = notification ; else msgs . add ( notification ) ; } return msgs ;
public class Interaction { /** * Create a InteractionDeleter to execute delete . * @ param pathServiceSid The SID of the parent Service of the resource to delete * @ param pathSessionSid he SID of the parent Session of the resource to delete * @ param pathSid The unique string that identifies the resource * @ return InteractionDeleter capable of executing the delete */ public static InteractionDeleter deleter ( final String pathServiceSid , final String pathSessionSid , final String pathSid ) { } }
return new InteractionDeleter ( pathServiceSid , pathSessionSid , pathSid ) ;
public class DefaultGroovyMethods { /** * Concatenates the < code > toString ( ) < / code > representation of each * item from the iterator , with the given String as a separator between * each item . The iterator will become exhausted of elements after * determining the resulting conjoined value . * @ param self an Iterator of items * @ param separator a String separator * @ return the joined String * @ since 1.5.5 */ public static String join ( Iterator < Object > self , String separator ) { } }
return join ( ( Iterable ) toList ( self ) , separator ) ;
public class UnlinkIdentityRequest { /** * Provider names to unlink from this identity . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setLoginsToRemove ( java . util . Collection ) } or { @ link # withLoginsToRemove ( java . util . Collection ) } if you want * to override the existing values . * @ param loginsToRemove * Provider names to unlink from this identity . * @ return Returns a reference to this object so that method calls can be chained together . */ public UnlinkIdentityRequest withLoginsToRemove ( String ... loginsToRemove ) { } }
if ( this . loginsToRemove == null ) { setLoginsToRemove ( new java . util . ArrayList < String > ( loginsToRemove . length ) ) ; } for ( String ele : loginsToRemove ) { this . loginsToRemove . add ( ele ) ; } return this ;
public class ProjectServiceV1 { /** * DELETE / projects / { projectName } * < p > Removes a project . */ @ Delete ( "/projects/{projectName}" ) @ RequiresRole ( roles = ProjectRole . OWNER ) public CompletableFuture < Void > removeProject ( Project project , Author author ) { } }
// Metadata must be updated first because it cannot be updated if the project is removed . return mds . removeProject ( author , project . name ( ) ) . thenCompose ( unused -> execute ( Command . removeProject ( author , project . name ( ) ) ) ) . handle ( HttpApiUtil :: throwUnsafelyIfNonNull ) ;
public class UnitFactorMap { /** * Debugging conversion factors . */ public String dump ( Unit unit ) { } }
StringBuilder buf = new StringBuilder ( ) ; buf . append ( unit ) . append ( ":\n" ) ; Map < Unit , UnitFactor > map = factors . get ( unit ) ; for ( Unit base : map . keySet ( ) ) { UnitFactor factor = map . get ( base ) ; buf . append ( " " ) . append ( factor ) . append ( " " ) ; buf . append ( factor . rational ( ) . compute ( RoundingMode . HALF_EVEN ) . toPlainString ( ) ) . append ( '\n' ) ; } return buf . toString ( ) ;
public class Pubsub { /** * Pull a batch of messages . * @ param canonicalSubscriptionName The canonical ( including project name ) subscription to pull from . * @ param returnImmediately { @ code true } to return immediately if the queue is empty . { @ code false } to wait * for at least one message before returning . * @ param maxMessages Maximum number of messages to return in batch . * @ return a future that is completed with a list of received messages . */ public PubsubFuture < List < ReceivedMessage > > pull ( final String canonicalSubscriptionName , final boolean returnImmediately , final int maxMessages ) { } }
final String path = canonicalSubscriptionName + ":pull" ; final PullRequest req = PullRequest . builder ( ) . returnImmediately ( returnImmediately ) . maxMessages ( maxMessages ) . build ( ) ; return pull ( path , req ) ;
public class UTFDataOutputStream { /** * The UTF - 8 encoding uses sequences of 1 , 2 , or 3 bytes per character . With * he maximal length of the fragment we want to ensure , that there are no * overflow of 65536 byte sized buffer * @ param str * String to be written in the output stream * @ throws IOException */ public void writeFragmentedUTF ( String str ) throws IOException { } }
if ( str . length ( ) <= MAX_LENGTH ) { writeLastUTFFragment ( str ) ; } else { writeUTFFragment ( str . substring ( 0 , MAX_LENGTH ) ) ; writeFragmentedUTF ( str . substring ( MAX_LENGTH ) ) ; }
public class MigrateRowsBase { /** * Execute a pre - compiled adHoc SQL statement , throw exception if not . * @ return Count of rows inserted or upserted . * @ throws VoltAbortException if any failure at all . */ VoltTable executePrecompiledSQL ( Statement catStmt , Object [ ] params , boolean replicated ) throws VoltAbortException { } }
// Create a SQLStmt instance on the fly // This is unusual to do , as they are typically required to be final instance variables . // This only works because the SQL text and plan is identical from the borrowed procedure . SQLStmt stmt = new SQLStmt ( catStmt . getSqltext ( ) ) ; if ( replicated ) { stmt . setInCatalog ( false ) ; } m_runner . initSQLStmt ( stmt , catStmt ) ; voltQueueSQL ( stmt , params ) ; return voltExecuteSQL ( ) [ 0 ] ;
public class CommerceWarehouseItemLocalServiceWrapper { /** * Creates a new commerce warehouse item with the primary key . Does not add the commerce warehouse item to the database . * @ param commerceWarehouseItemId the primary key for the new commerce warehouse item * @ return the new commerce warehouse item */ @ Override public com . liferay . commerce . model . CommerceWarehouseItem createCommerceWarehouseItem ( long commerceWarehouseItemId ) { } }
return _commerceWarehouseItemLocalService . createCommerceWarehouseItem ( commerceWarehouseItemId ) ;
public class AbstractIoBuffer { /** * { @ inheritDoc } */ @ Override public final IoBuffer limit ( int newLimit ) { } }
autoExpand ( newLimit , 0 ) ; buf ( ) . limit ( newLimit ) ; if ( mark > newLimit ) { mark = - 1 ; } return this ;
public class DefaultExpander { /** * Expand all configured keywords * @ param drl * @ return */ private String expandKeywords ( String drl ) { } }
substitutions = new ArrayList < Map < String , String > > ( ) ; // apply all keywords templates drl = substitute ( drl , this . keywords , 0 , useKeyword , false ) ; substitutions = null ; return drl ;
public class TimedInterface { /** * Creates a new TimedInterface for a given interface < code > ctype < / code > with a concrete class * < code > concrete < / code > . */ public static < T > T newProxy ( Class < T > ctype , T concrete ) { } }
return newProxy ( ctype , concrete , null ) ;
public class PdfOutline { /** * Sets the title of this outline * @ param title */ public void setTitle ( String title ) { } }
put ( PdfName . TITLE , new PdfString ( title , PdfObject . TEXT_UNICODE ) ) ;
public class DeviceAttribute_3 { public void insert ( final String [ ] argin , final int dim_x , final int dim_y ) { } }
deviceattribute_3DAO . insert ( argin , dim_x , dim_y ) ;
public class CmsXmlGroupContainerFactory { /** * Factory method to unmarshal ( read ) a group container instance from a OpenCms VFS file * that contains XML data , using wither the encoding set * in the XML file header , or the encoding set in the VFS file property . < p > * If you are not sure about the implications of the encoding issues , * use { @ link # unmarshal ( CmsObject , CmsFile ) } instead . < p > * < b > Warning : < / b > < br / > * This method does not support requested historic versions , it always loads the * most recent version . Use < code > { @ link # unmarshal ( CmsObject , CmsResource , ServletRequest ) } < / code > * for history support . < p > * @ param cms the current cms object * @ param file the file with the XML data to unmarshal * @ param keepEncoding if < code > true < / code > , the encoding specified in the XML header is used , * otherwise the encoding from the VFS file property is used * @ return a group container instance unmarshalled from the provided file * @ throws CmsXmlException if something goes wrong */ public static CmsXmlGroupContainer unmarshal ( CmsObject cms , CmsFile file , boolean keepEncoding ) throws CmsXmlException { } }
// check the cache CmsXmlGroupContainer content = getCache ( cms , file , keepEncoding ) ; if ( content != null ) { return content ; } // not found in cache , read as normally byte [ ] contentBytes = file . getContents ( ) ; String filename = cms . getSitePath ( file ) ; String encoding = null ; try { encoding = cms . readPropertyObject ( filename , CmsPropertyDefinition . PROPERTY_CONTENT_ENCODING , true ) . getValue ( ) ; } catch ( CmsException e ) { // encoding will be null } if ( encoding == null ) { encoding = OpenCms . getSystemInfo ( ) . getDefaultEncoding ( ) ; } else { encoding = CmsEncoder . lookupEncoding ( encoding , null ) ; if ( encoding == null ) { throw new CmsXmlException ( Messages . get ( ) . container ( Messages . ERR_XMLCONTENT_INVALID_ENC_1 , filename ) ) ; } } if ( contentBytes . length > 0 ) { // content is initialized if ( keepEncoding ) { // use the encoding from the content content = unmarshal ( cms , contentBytes , encoding , new CmsXmlEntityResolver ( cms ) ) ; } else { // use the encoding from the file property // this usually only triggered by a save operation try { String contentStr = new String ( contentBytes , encoding ) ; content = unmarshal ( cms , contentStr , encoding , new CmsXmlEntityResolver ( cms ) ) ; } catch ( UnsupportedEncodingException e ) { // this will not happen since the encoding has already been validated throw new CmsXmlException ( Messages . get ( ) . container ( Messages . ERR_XMLCONTENT_INVALID_ENC_1 , filename ) ) ; } } } else { // content is empty content = new CmsXmlGroupContainer ( cms , DocumentHelper . createDocument ( ) , encoding , new CmsXmlEntityResolver ( cms ) ) ; } // set the file content . setFile ( file ) ; // call prepare for use content handler and return the result CmsXmlGroupContainer xmlGroupContainer = ( CmsXmlGroupContainer ) content . getHandler ( ) . prepareForUse ( cms , content ) ; // set the cache setCache ( cms , xmlGroupContainer , keepEncoding ) ; return xmlGroupContainer ;
public class HttpUtil { /** * Performs an HTTP PUT on the given URL . * ( without BasicAuth ) * @ param uriString String representing the URI to connect to * @ param body * @ param contentType * @ return * @ throws URISyntaxException * @ throws HttpException */ public static Response put ( String uriString , String body , ContentType contentType ) throws URISyntaxException , HttpException { } }
return putBody ( new HttpPut ( uriString ) , body , contentType , null , null ) ;
public class RegionInstanceGroupManagerClient { /** * Flags the specified instances to be immediately removed from the managed instance group . * Abandoning an instance does not delete the instance , but it does remove the instance from any * target pools that are applied by the managed instance group . This method reduces the targetSize * of the managed instance group by the number of instances that you abandon . This operation is * marked as DONE when the action is scheduled even if the instances have not yet been removed * from the group . You must separately verify the status of the abandoning action with the * listmanagedinstances method . * < p > If the group is part of a backend service that has enabled connection draining , it can take * up to 60 seconds after the connection draining duration has elapsed before the VM instance is * removed or deleted . * < p > You can specify a maximum of 1000 instances with this method per request . * < p > Sample code : * < pre > < code > * try ( RegionInstanceGroupManagerClient regionInstanceGroupManagerClient = RegionInstanceGroupManagerClient . create ( ) ) { * ProjectRegionInstanceGroupManagerName instanceGroupManager = ProjectRegionInstanceGroupManagerName . of ( " [ PROJECT ] " , " [ REGION ] " , " [ INSTANCE _ GROUP _ MANAGER ] " ) ; * RegionInstanceGroupManagersAbandonInstancesRequest regionInstanceGroupManagersAbandonInstancesRequestResource = RegionInstanceGroupManagersAbandonInstancesRequest . newBuilder ( ) . build ( ) ; * Operation response = regionInstanceGroupManagerClient . abandonInstancesRegionInstanceGroupManager ( instanceGroupManager , regionInstanceGroupManagersAbandonInstancesRequestResource ) ; * < / code > < / pre > * @ param instanceGroupManager Name of the managed instance group . * @ param regionInstanceGroupManagersAbandonInstancesRequestResource * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation abandonInstancesRegionInstanceGroupManager ( ProjectRegionInstanceGroupManagerName instanceGroupManager , RegionInstanceGroupManagersAbandonInstancesRequest regionInstanceGroupManagersAbandonInstancesRequestResource ) { } }
AbandonInstancesRegionInstanceGroupManagerHttpRequest request = AbandonInstancesRegionInstanceGroupManagerHttpRequest . newBuilder ( ) . setInstanceGroupManager ( instanceGroupManager == null ? null : instanceGroupManager . toString ( ) ) . setRegionInstanceGroupManagersAbandonInstancesRequestResource ( regionInstanceGroupManagersAbandonInstancesRequestResource ) . build ( ) ; return abandonInstancesRegionInstanceGroupManager ( request ) ;
public class BatchModifyClusterSnapshotsRequest { /** * A list of snapshot identifiers you want to modify . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setSnapshotIdentifierList ( java . util . Collection ) } or * { @ link # withSnapshotIdentifierList ( java . util . Collection ) } if you want to override the existing values . * @ param snapshotIdentifierList * A list of snapshot identifiers you want to modify . * @ return Returns a reference to this object so that method calls can be chained together . */ public BatchModifyClusterSnapshotsRequest withSnapshotIdentifierList ( String ... snapshotIdentifierList ) { } }
if ( this . snapshotIdentifierList == null ) { setSnapshotIdentifierList ( new com . amazonaws . internal . SdkInternalList < String > ( snapshotIdentifierList . length ) ) ; } for ( String ele : snapshotIdentifierList ) { this . snapshotIdentifierList . add ( ele ) ; } return this ;
public class Session { /** * Return current session data * @ return */ public static SessionBean getSessionBean ( ) { } }
String id = idLocal . get ( ) ; if ( id == null ) return null ; SessionBean s = getSessionMap ( ) . get ( id ) ; // check TTL if ( s != null && ( System . currentTimeMillis ( ) - s . lastUsed ) > TTL ) { if ( LOG . isDebugEnabled ( ) ) LOG . debug ( "Discarding session " + id + " with livetime=" + ( System . currentTimeMillis ( ) - s . lastUsed ) + "ms" ) ; s = null ; } // new session if ( s == null ) s = new SessionBean ( ) ; s . lastUsed = System . currentTimeMillis ( ) ; updateSessionBean ( s ) ; s . id = id ; return s ;
public class MemoryUtil { /** * Performs default check on the size of a " registry " of UI widgets ( or similar ) . * In practice " size " is likely to be the size of a java Collection but it could be anything you want . * This is a convenience which should be adequate for the majority of cases . * @ param size The current size of a component registry ( or similar ) . * @ param className The name of the class which holds the registry . */ public static void checkSize ( final int size , final String className ) { } }
if ( MemoryUtil . WARN_THRESHOLD < size ) { MemoryUtil . log ( className + " may be leaking memory, it contains a large number of items: " + size ) ; }
public class BaseAuthenticator { /** * Authenticate the given request * @ param pRequest request to examine * @ return true if authentication passes , false otherwise */ public boolean authenticate ( HttpServletRequest pRequest ) { } }
String auth = pRequest . getHeader ( "Authorization" ) ; if ( auth == null ) { return false ; } AuthorizationHeaderParser . Result authInfo = AuthorizationHeaderParser . parse ( auth ) ; return authInfo . isValid ( ) && doAuthenticate ( pRequest , authInfo ) ;
public class NewObjectDialog { /** * for the checkbox */ public void itemStateChanged ( ItemEvent e ) { } }
if ( e . getStateChange ( ) == ItemEvent . DESELECTED ) { // disable text entry m_customPIDField . setEditable ( false ) ; } else if ( e . getStateChange ( ) == ItemEvent . SELECTED ) { // enable text entry m_customPIDField . setEditable ( true ) ; }
public class AbstractCommonConfigResolver { /** * Returns a set of full qualified class names of the handlers from the specified endpoint config * @ param The config to get the handler class names of * @ return A set of full qualified class names of the handlers from the specified endpoint config */ public Set < String > getAllHandlers ( EndpointConfig config ) { } }
Set < String > set = new HashSet < String > ( ) ; if ( config != null ) { for ( UnifiedHandlerChainMetaData uhcmd : config . getPreHandlerChains ( ) ) { for ( UnifiedHandlerMetaData uhmd : uhcmd . getHandlers ( ) ) { set . add ( uhmd . getHandlerClass ( ) ) ; } } for ( UnifiedHandlerChainMetaData uhcmd : config . getPostHandlerChains ( ) ) { for ( UnifiedHandlerMetaData uhmd : uhcmd . getHandlers ( ) ) { set . add ( uhmd . getHandlerClass ( ) ) ; } } } return set ;
public class PostOrderAxis { /** * { @ inheritDoc } */ @ Override public boolean hasNext ( ) { } }
resetToLastKey ( ) ; long key = mNextKey ; if ( key != NULL_NODE ) { moveTo ( mNextKey ) ; while ( ( ( ITreeStructData ) getNode ( ) ) . hasFirstChild ( ) && key != mLastParent . peek ( ) ) { mLastParent . push ( key ) ; key = ( ( ITreeStructData ) getNode ( ) ) . getFirstChildKey ( ) ; moveTo ( ( ( ITreeStructData ) getNode ( ) ) . getFirstChildKey ( ) ) ; } if ( key == mLastParent . peek ( ) ) { mLastParent . pop ( ) ; } if ( ( ( ITreeStructData ) getNode ( ) ) . hasRightSibling ( ) ) { mNextKey = ( ( ITreeStructData ) getNode ( ) ) . getRightSiblingKey ( ) ; } else { mNextKey = mLastParent . peek ( ) ; } return true ; } else { resetToStartKey ( ) ; return false ; }
public class UtilImageIO { /** * Loads a PGM image from a file . * @ param fileName Location of PGM image * @ param storage ( Optional ) Storage for output image . Must be the width and height of the image being read . * Better performance of type BufferedImage . TYPE _ BYTE _ GRAY . If null or width / height incorrect a new image * will be declared . * @ return The image * @ throws IOException Thrown if there is a problem reading the image */ public static BufferedImage loadPGM ( String fileName , BufferedImage storage ) throws IOException { } }
return loadPGM ( new FileInputStream ( fileName ) , storage ) ;
public class SearchUtils { /** * Find local variable node for a local variable at some instruction . * @ param lvnList list of local variable nodes for method * @ param insnList instruction list for method * @ param insnNode instruction within method being searched against * @ param idx local variable table index , or { @ code null } if no local variable nodes are specified for { @ code idx } and { @ code insnNode } * combination * @ throws NullPointerException if any argument is { @ code null } or contains { @ code null } * @ throws IllegalArgumentException if arguments aren ' t all from the same method , or if { @ code idx < 0} * @ return local variable node associated with the instruction */ public static LocalVariableNode findLocalVariableNodeForInstruction ( List < LocalVariableNode > lvnList , InsnList insnList , final AbstractInsnNode insnNode , int idx ) { } }
Validate . notNull ( insnList ) ; Validate . notNull ( insnNode ) ; Validate . isTrue ( idx >= 0 ) ; int insnIdx = insnList . indexOf ( insnNode ) ; Validate . isTrue ( insnIdx != - 1 ) ; lvnList = lvnList . stream ( ) . filter ( lvn -> lvn . index == idx ) // filter to lvns at the index we want . filter ( lvn -> { // filter to lvns that ' s scope starts before the instruction we want AbstractInsnNode currentInsnNode = insnNode . getPrevious ( ) ; while ( currentInsnNode != null ) { if ( currentInsnNode == lvn . start ) { return true ; } currentInsnNode = currentInsnNode . getPrevious ( ) ; } return false ; } ) . filter ( lvn -> { // filter to lvns that ' s scope stops after the instruction we want AbstractInsnNode currentInsnNode = insnNode . getNext ( ) ; while ( currentInsnNode != null ) { if ( currentInsnNode == lvn . end ) { return true ; } currentInsnNode = currentInsnNode . getNext ( ) ; } return false ; } ) . collect ( Collectors . toList ( ) ) ; // If we don ' t have any LVNs at this point , return null if ( lvnList . isEmpty ( ) ) { return null ; } // Should this be a list or should it always be a single entry ? The problem is that there ' s nothing stopping multiple LVN ' s coming // back for some instruction + lvt _ index combination . // The one thing we can be sure of at this point is that IF WE GET BACK MULTIPLE LVNs , THEY MUST OVERLAP AT SOME POINT . // The assumption at this point is . . . // 1 . LVNs are scoped such that the index of start label is BEFORE the index of the end label // 2 . LVNs must fully overlap , meaning that they can ' t go past each other ' s boundaries // 3 . LVNs can end at the same label , but they can ' t start at the same label // e . g . not allowed // x - - - - - x // x - - - - - x // e . g . allowed // x - - - - - x // x - - - - - x // e . g . allowed // x - - - - - x // x - - - - - x // e . g . not allowed // x - - - - - x // x - - - - - x // Error out if you spot this - - someone will eventually report it and it ' ll get fixed // the following blocks of code are far from efficient , but they ' re easily readable / understandable for ( LocalVariableNode lvn : lvnList ) { // test condition 1 int start = insnList . indexOf ( lvn . start ) ; int end = insnList . indexOf ( lvn . end ) ; Validate . validState ( end > start ) ; } for ( LocalVariableNode lvnTester : lvnList ) { // test condition 2 and 3 int startTester = insnList . indexOf ( lvnTester . start ) ; int endTester = insnList . indexOf ( lvnTester . end ) ; Range rangeTester = Range . between ( startTester , endTester ) ; for ( LocalVariableNode lvnTestee : lvnList ) { if ( lvnTester == lvnTestee ) { continue ; } int startTestee = insnList . indexOf ( lvnTestee . start ) ; int endTestee = insnList . indexOf ( lvnTestee . end ) ; Range rangeTestee = Range . between ( startTestee , endTestee ) ; Range intersectRange = rangeTester . intersectionWith ( rangeTestee ) ; Validate . validState ( intersectRange . equals ( rangeTester ) || intersectRange . equals ( rangeTestee ) ) ; // test condition 2 Validate . validState ( rangeTester . getMinimum ( ) != rangeTestee . getMinimum ( ) ) ; // test condition 3 } } // Given that all the above assumptions are correct , the LVN with the smallest range will be the correct one . It ' s the one that ' s // most tightly scoped around the instruction . // e . g . // x - - - - - i - - - - x // x - - - - - i - x // x - - - i - x return Collections . min ( lvnList , ( o1 , o2 ) -> { int o1Len = insnList . indexOf ( o1 . end ) - insnList . indexOf ( o1 . start ) ; int o2Len = insnList . indexOf ( o2 . end ) - insnList . indexOf ( o2 . start ) ; return Integer . compare ( o1Len , o2Len ) ; } ) ;
public class Parameters { /** * Returns { @ link Parameter } instances with effectively all special parameters removed . * @ return */ private List < P > createBindableParameters ( ) { } }
List < P > bindables = new ArrayList < > ( parameters . size ( ) ) ; for ( P parameter : parameters ) { if ( parameter . isBindable ( ) ) { bindables . add ( parameter ) ; } } return bindables ;
public class Alias { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . runtime . SIMPAliasControllable # isReceiveAllowed ( ) */ public boolean isReceiveAllowed ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "isReceiveAllowed" ) ; boolean isReceiveAllowed = aliasDest . isReceiveAllowed ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "isReceiveAllowed" , new Boolean ( isReceiveAllowed ) ) ; return isReceiveAllowed ;
public class XPathParser { /** * Parses the the rule ElementNameOrWildcard according to the following * production rule : * [ 64 ] ElementNameOrWildcard : : = ElementName | " * " . * @ return name */ private String parseElementNameOrWildcard ( ) { } }
String name ; if ( is ( TokenType . STAR , true ) ) { name = mToken . getContent ( ) ; } else { name = parseElementName ( ) ; } return name ;
public class PaxPropertySetter { /** * Convert < code > val < / code > a String parameter to an object of a * given type . */ protected Object convertArg ( String val , Class type ) { } }
if ( val == null ) return null ; String v = val . trim ( ) ; if ( String . class . isAssignableFrom ( type ) ) { return val ; } else if ( Integer . TYPE . isAssignableFrom ( type ) ) { return new Integer ( v ) ; } else if ( Long . TYPE . isAssignableFrom ( type ) ) { return new Long ( v ) ; } else if ( Boolean . TYPE . isAssignableFrom ( type ) ) { if ( "true" . equalsIgnoreCase ( v ) ) { return Boolean . TRUE ; } else if ( "false" . equalsIgnoreCase ( v ) ) { return Boolean . FALSE ; } } else if ( Priority . class . isAssignableFrom ( type ) ) { return OptionConverter . toLevel ( v , ( Level ) Level . DEBUG ) ; } else if ( ErrorHandler . class . isAssignableFrom ( type ) ) { return OptionConverter . instantiateByClassName ( v , ErrorHandler . class , null ) ; } return null ;
public class DateUtils { /** * Return string describing the time until / elapsed time since ' time ' formatted like * " [ relative time / date ] , [ time ] " . * See { @ link android . text . format . DateUtils # getRelativeDateTimeString } for full docs . * @ param context the context * @ param time some time * @ param transitionResolution the elapsed time ( period ) at which * to stop reporting relative measurements . Periods greater * than this resolution will default to normal date formatting . * For example , will transition from " 6 days ago " to " Dec 12" * when using Weeks . ONE . If null , defaults to Days . ONE . * Clamps to min value of Days . ONE , max of Weeks . ONE . * @ param flags flags for getRelativeTimeSpanString ( ) ( if duration is less than transitionResolution ) */ public static CharSequence getRelativeDateTimeString ( Context context , ReadableInstant time , ReadablePeriod transitionResolution , int flags ) { } }
Resources r = context . getResources ( ) ; // We set the millis to 0 so we aren ' t off by a fraction of a second when counting duration DateTime now = DateTime . now ( time . getZone ( ) ) . withMillisOfSecond ( 0 ) ; DateTime timeDt = new DateTime ( time ) . withMillisOfSecond ( 0 ) ; boolean past = ! now . isBefore ( timeDt ) ; Duration duration = past ? new Duration ( timeDt , now ) : new Duration ( now , timeDt ) ; // getRelativeTimeSpanString ( ) doesn ' t correctly format relative dates // above a week or exact dates below a day , so clamp // transitionResolution as needed . Duration transitionDuration ; Duration minDuration = Days . ONE . toPeriod ( ) . toDurationTo ( timeDt ) ; if ( transitionResolution == null ) { transitionDuration = minDuration ; } else { transitionDuration = past ? transitionResolution . toPeriod ( ) . toDurationTo ( now ) : transitionResolution . toPeriod ( ) . toDurationFrom ( now ) ; Duration maxDuration = Weeks . ONE . toPeriod ( ) . toDurationTo ( timeDt ) ; if ( transitionDuration . isLongerThan ( maxDuration ) ) { transitionDuration = maxDuration ; } else if ( transitionDuration . isShorterThan ( minDuration ) ) { transitionDuration = minDuration ; } } CharSequence timeClause = formatDateRange ( context , time , time , FORMAT_SHOW_TIME ) ; String result ; if ( ! duration . isLongerThan ( transitionDuration ) ) { CharSequence relativeClause = getRelativeTimeSpanString ( context , time , flags ) ; result = r . getString ( R . string . joda_time_android_relative_time , relativeClause , timeClause ) ; } else { CharSequence dateClause = getRelativeTimeSpanString ( context , time , false ) ; result = r . getString ( R . string . joda_time_android_date_time , dateClause , timeClause ) ; } return result ;
public class Node { /** * Comparable */ @ Override public int compareTo ( Node other ) { } }
final double f = getHeuristic ( ) + getCost ( ) ; final double of = other . getHeuristic ( ) + other . getCost ( ) ; return Double . compare ( f , of ) ;
public class AnalysisContext { /** * file a ClassNotFoundException with the lookupFailureCallback * @ see # getLookupFailureCallback ( ) */ static public void reportMissingClass ( ClassNotFoundException e ) { } }
requireNonNull ( e , "argument is null" ) ; String missing = AbstractBugReporter . getMissingClassName ( e ) ; if ( skipReportingMissingClass ( missing ) ) { return ; } if ( ! analyzingApplicationClass ( ) ) { return ; } RepositoryLookupFailureCallback lookupFailureCallback = getCurrentLookupFailureCallback ( ) ; if ( lookupFailureCallback != null ) { lookupFailureCallback . reportMissingClass ( e ) ; }
public class SivMode { /** * Code taken from { @ link org . bouncycastle . crypto . macs . CMac } */ static byte [ ] dbl ( byte [ ] in ) { } }
byte [ ] ret = new byte [ in . length ] ; int carry = shiftLeft ( in , ret ) ; int xor = 0xff & DOUBLING_CONST ; /* * NOTE : This construction is an attempt at a constant - time implementation . */ int mask = ( - carry ) & 0xff ; ret [ in . length - 1 ] ^= xor & mask ; return ret ;
public class SortedGrouping { /** * Sorts { @ link Tuple } elements within a group on the specified field in the specified { @ link Order } . < / br > * < b > Note : Only groups of Tuple elements can be sorted . < / b > < br / > * Groups can be sorted by multiple fields by chaining { @ link # sortGroup ( int , Order ) } calls . * @ param field The Tuple field on which the group is sorted . * @ param order The Order in which the specified Tuple field is sorted . * @ return A SortedGrouping with specified order of group element . * @ see Tuple * @ see Order */ public SortedGrouping < T > sortGroup ( int field , Order order ) { } }
int pos ; if ( ! dataSet . getType ( ) . isTupleType ( ) ) { throw new InvalidProgramException ( "Specifying order keys via field positions is only valid for tuple data types" ) ; } if ( field >= dataSet . getType ( ) . getArity ( ) ) { throw new IllegalArgumentException ( "Order key out of tuple bounds." ) ; } int newLength = this . groupSortKeyPositions . length + 1 ; this . groupSortKeyPositions = Arrays . copyOf ( this . groupSortKeyPositions , newLength ) ; this . groupSortOrders = Arrays . copyOf ( this . groupSortOrders , newLength ) ; pos = newLength - 1 ; this . groupSortKeyPositions [ pos ] = field ; this . groupSortOrders [ pos ] = order ; return this ;
public class DataSet { /** * Applies a Map - style operation to the entire partition of the data . * The function is called once per parallel partition of the data , * and the entire partition is available through the given Iterator . * The number of elements that each instance of the MapPartition function * sees is non deterministic and depends on the parallelism of the operation . * < p > This function is intended for operations that cannot transform individual elements , * requires no grouping of elements . To transform individual elements , * the use of { @ code map ( ) } and { @ code flatMap ( ) } is preferable . * @ param mapPartition The MapPartitionFunction that is called for the full DataSet . * @ return A MapPartitionOperator that represents the transformed DataSet . * @ see MapPartitionFunction * @ see MapPartitionOperator */ public < R > MapPartitionOperator < T , R > mapPartition ( MapPartitionFunction < T , R > mapPartition ) { } }
if ( mapPartition == null ) { throw new NullPointerException ( "MapPartition function must not be null." ) ; } String callLocation = Utils . getCallLocationName ( ) ; TypeInformation < R > resultType = TypeExtractor . getMapPartitionReturnTypes ( mapPartition , getType ( ) , callLocation , true ) ; return new MapPartitionOperator < > ( this , resultType , clean ( mapPartition ) , callLocation ) ;
public class CmsSitemapToolbar { /** * Enables / disables the new menu button . < p > * @ param enabled < code > true < / code > to enable the button * @ param disabledReason the reason , why the button is disabled */ public void setNewEnabled ( boolean enabled , String disabledReason ) { } }
if ( enabled ) { m_newMenuButton . enable ( ) ; } else { m_newMenuButton . disable ( disabledReason ) ; }
public class CloudMe { /** * Creates folder with given path , with required intermediate folders . * @ param cmRoot contains the whole folders structure * @ param cpath path of folder to create * @ return the createdfolder corresponding to targeted cpath * @ throws CInvalidFileTypeException if a blob exists along that path */ private CMFolder createIntermediateFolders ( CMFolder cmRoot , CPath cpath ) { } }
List < String > baseNames = cpath . split ( ) ; CMFolder currentFolder = cmRoot ; CMFolder childFolder = null ; boolean firstFolderCreation = true ; for ( String baseName : baseNames ) { childFolder = currentFolder . getChildByName ( baseName ) ; if ( childFolder == null ) { // Intermediate folder does not exist : has to be created if ( firstFolderCreation ) { // This is the first intermediate folder to create : // let ' s check that there is no blob with that name already existing try { CMBlob cmBlob = getBlobByName ( currentFolder , baseName ) ; if ( cmBlob != null ) { throw new CInvalidFileTypeException ( cmBlob . getPath ( ) , false ) ; } } catch ( ParseException e ) { throw new CStorageException ( e . getMessage ( ) , e ) ; } firstFolderCreation = false ; } childFolder = rawCreateFolder ( currentFolder , baseName ) ; } currentFolder = childFolder ; } return childFolder ;
public class CosmosDbObjectFactory { /** * Create document db factory . * @ param properties the properties * @ return the document db factory */ public DocumentDbFactory createDocumentDbFactory ( final BaseCosmosDbProperties properties ) { } }
val documentClient = createDocumentClient ( properties ) ; return new DocumentDbFactory ( documentClient ) ;
public class ApiOvhOrder { /** * Get allowed durations for ' new ' option * REST : GET / order / license / virtuozzo / new * @ param version [ required ] This license version * @ param ip [ required ] Ip on which this license would be installed * @ param containerNumber [ required ] How much container is this license able to manage . . . * @ param serviceType [ required ] # DEPRECATED # The kind of service on which this license will be used # Will not be used , keeped only for compatibility # */ public ArrayList < String > license_virtuozzo_new_GET ( OvhOrderableVirtuozzoContainerNumberEnum containerNumber , String ip , OvhLicenseTypeEnum serviceType , OvhOrderableVirtuozzoVersionEnum version ) throws IOException { } }
String qPath = "/order/license/virtuozzo/new" ; StringBuilder sb = path ( qPath ) ; query ( sb , "containerNumber" , containerNumber ) ; query ( sb , "ip" , ip ) ; query ( sb , "serviceType" , serviceType ) ; query ( sb , "version" , version ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t1 ) ;
public class GeometryUtil { /** * Return the RMSD of bonds length between the 2 aligned molecules . * @ param firstAtomContainer the ( largest ) first aligned AtomContainer which is the reference * @ param secondAtomContainer the second aligned AtomContainer * @ param mappedAtoms Map : a Map of the mapped atoms * @ param Coords3d boolean : true if moecules has 3D coords , false if molecules has 2D * coords * @ return double : all the RMSD of bonds length */ public static double getBondLengthRMSD ( IAtomContainer firstAtomContainer , IAtomContainer secondAtomContainer , Map < Integer , Integer > mappedAtoms , boolean Coords3d ) { } }
// logger . debug ( " * * * * GT getBondLengthRMSD * * * * " ) ; Iterator < Integer > firstAtoms = mappedAtoms . keySet ( ) . iterator ( ) ; IAtom centerAtomFirstMolecule ; IAtom centerAtomSecondMolecule ; List < IAtom > connectedAtoms ; double sum = 0 ; double n = 0 ; double distance1 = 0 ; double distance2 = 0 ; setVisitedFlagsToFalse ( firstAtomContainer ) ; setVisitedFlagsToFalse ( secondAtomContainer ) ; while ( firstAtoms . hasNext ( ) ) { centerAtomFirstMolecule = firstAtomContainer . getAtom ( firstAtoms . next ( ) ) ; centerAtomFirstMolecule . setFlag ( CDKConstants . VISITED , true ) ; centerAtomSecondMolecule = secondAtomContainer . getAtom ( mappedAtoms . get ( firstAtomContainer . indexOf ( centerAtomFirstMolecule ) ) ) ; connectedAtoms = firstAtomContainer . getConnectedAtomsList ( centerAtomFirstMolecule ) ; for ( int i = 0 ; i < connectedAtoms . size ( ) ; i ++ ) { IAtom conAtom = connectedAtoms . get ( i ) ; // this step is built to know if the program has already calculate a bond length ( so as not to have duplicate values ) if ( ! conAtom . getFlag ( CDKConstants . VISITED ) ) { if ( Coords3d ) { distance1 = centerAtomFirstMolecule . getPoint3d ( ) . distance ( conAtom . getPoint3d ( ) ) ; distance2 = centerAtomSecondMolecule . getPoint3d ( ) . distance ( secondAtomContainer . getAtom ( mappedAtoms . get ( firstAtomContainer . indexOf ( conAtom ) ) ) . getPoint3d ( ) ) ; sum = sum + Math . pow ( ( distance1 - distance2 ) , 2 ) ; n ++ ; } else { distance1 = centerAtomFirstMolecule . getPoint2d ( ) . distance ( conAtom . getPoint2d ( ) ) ; distance2 = centerAtomSecondMolecule . getPoint2d ( ) . distance ( secondAtomContainer . getAtom ( ( mappedAtoms . get ( firstAtomContainer . indexOf ( conAtom ) ) ) ) . getPoint2d ( ) ) ; sum = sum + Math . pow ( ( distance1 - distance2 ) , 2 ) ; n ++ ; } } } } setVisitedFlagsToFalse ( firstAtomContainer ) ; setVisitedFlagsToFalse ( secondAtomContainer ) ; return Math . sqrt ( sum / n ) ;
public class PreferencesFx { /** * Unregisters a previously registered event handler from the model . One handler might have been * registered for different event types , so the caller needs to specify the particular event type * from which to unregister the handler . * @ param eventType the event type from which to unregister * @ param eventHandler the handler to unregister * @ throws NullPointerException if either event type or handler are { @ code null } . */ public PreferencesFx removeEventHandler ( EventType < PreferencesFxEvent > eventType , EventHandler < ? super PreferencesFxEvent > eventHandler ) { } }
preferencesFxModel . removeEventHandler ( eventType , eventHandler ) ; return this ;
public class FieldLevelEncryptionList { /** * An array of field - level encryption items . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setItems ( java . util . Collection ) } or { @ link # withItems ( java . util . Collection ) } if you want to override the * existing values . * @ param items * An array of field - level encryption items . * @ return Returns a reference to this object so that method calls can be chained together . */ public FieldLevelEncryptionList withItems ( FieldLevelEncryptionSummary ... items ) { } }
if ( this . items == null ) { setItems ( new com . amazonaws . internal . SdkInternalList < FieldLevelEncryptionSummary > ( items . length ) ) ; } for ( FieldLevelEncryptionSummary ele : items ) { this . items . add ( ele ) ; } return this ;
public class DefaultDatastoreReader { /** * Executes the given { @ link EntityQueryRequest } and returns the response . * @ param expectedResultType * the expected type of results . * @ param request * the entity query request * @ return the query response */ public < E > QueryResponse < E > executeEntityQueryRequest ( Class < E > expectedResultType , EntityQueryRequest request ) { } }
try { GqlQuery . Builder < Entity > queryBuilder = Query . newGqlQueryBuilder ( ResultType . ENTITY , request . getQuery ( ) ) ; queryBuilder . setNamespace ( entityManager . getEffectiveNamespace ( ) ) ; queryBuilder . setAllowLiteral ( request . isAllowLiterals ( ) ) ; QueryUtils . applyNamedBindings ( queryBuilder , request . getNamedBindings ( ) ) ; QueryUtils . applyPositionalBindings ( queryBuilder , request . getPositionalBindings ( ) ) ; GqlQuery < Entity > gqlQuery = queryBuilder . build ( ) ; QueryResults < Entity > results = nativeReader . run ( gqlQuery ) ; List < E > entities = new ArrayList < > ( ) ; DefaultQueryResponse < E > response = new DefaultQueryResponse < > ( ) ; response . setStartCursor ( new DefaultDatastoreCursor ( results . getCursorAfter ( ) . toUrlSafe ( ) ) ) ; while ( results . hasNext ( ) ) { Entity result = results . next ( ) ; E entity = Unmarshaller . unmarshal ( result , expectedResultType ) ; entities . add ( entity ) ; } response . setResults ( entities ) ; response . setEndCursor ( new DefaultDatastoreCursor ( results . getCursorAfter ( ) . toUrlSafe ( ) ) ) ; response . setQueryResponseMetadata ( new DefaultQueryResponseMetadata ( QueryResponseMetadata . QueryState . forMoreResultsType ( results . getMoreResults ( ) ) ) ) ; entityManager . executeEntityListeners ( CallbackType . POST_LOAD , entities ) ; return response ; } catch ( DatastoreException exp ) { throw new EntityManagerException ( exp ) ; }
public class NewSarlProjectWizard { /** * Replies the default group id for a maven project . * @ return the default group id , never { @ code null } nor empty string . */ @ SuppressWarnings ( "static-method" ) protected String getDefaultMavenGroupId ( ) { } }
final String userdomain = System . getenv ( "userdomain" ) ; // $ NON - NLS - 1 $ if ( Strings . isNullOrEmpty ( userdomain ) ) { return "com.foo" ; // $ NON - NLS - 1 $ } final String [ ] elements = userdomain . split ( Pattern . quote ( "." ) ) ; // $ NON - NLS - 1 $ final StringBuilder groupId = new StringBuilder ( ) ; for ( int i = elements . length - 1 ; i >= 0 ; -- i ) { if ( groupId . length ( ) > 0 ) { groupId . append ( "." ) ; // $ NON - NLS - 1 $ } groupId . append ( elements [ i ] ) ; } return groupId . toString ( ) ;
public class PlatformLevel { /** * Add a component to container only if clustering is enabled . * @ throws IllegalStateException if called from PlatformLevel1 , when cluster settings are not loaded */ AddIfCluster addIfCluster ( Object ... objects ) { } }
if ( addIfCluster == null ) { addIfCluster = new AddIfCluster ( ! getWebServer ( ) . isStandalone ( ) ) ; } addIfCluster . ifAdd ( objects ) ; return addIfCluster ;
public class StylesheetHandler { /** * Warn the user of an problem . * @ param msg An key into the { @ link org . apache . xalan . res . XSLTErrorResources } * table , that is one of the WG _ prefixed definitions . * @ param args An array of arguments for the given warning . * @ throws org . xml . sax . SAXException that wraps a * { @ link javax . xml . transform . TransformerException } if the current * { @ link javax . xml . transform . ErrorListener # warning } * method chooses to flag this condition as an error . * @ xsl . usage internal */ public void warn ( String msg , Object args [ ] ) throws org . xml . sax . SAXException { } }
String formattedMsg = XSLMessages . createWarning ( msg , args ) ; SAXSourceLocator locator = getLocator ( ) ; ErrorListener handler = m_stylesheetProcessor . getErrorListener ( ) ; try { if ( null != handler ) handler . warning ( new TransformerException ( formattedMsg , locator ) ) ; } catch ( TransformerException te ) { throw new org . xml . sax . SAXException ( te ) ; }
public class WorkspaceConfiguration { /** * Adds a user , with the specified username and role . * @ param username the username ( e . g . an e - mail address ) * @ param role the user ' s role */ public void addUser ( String username , Role role ) { } }
if ( StringUtils . isNullOrEmpty ( username ) ) { throw new IllegalArgumentException ( "A username must be specified." ) ; } if ( role == null ) { throw new IllegalArgumentException ( "A role must be specified." ) ; } users . add ( new User ( username , role ) ) ;
public class HttpChannelConfig { /** * Check the input configuration to decide whether to enforce a strict RFC * compliance while parsing URLs . * @ param props */ private void parseStrictURLFormat ( Map < Object , Object > props ) { } }
Object value = props . get ( HttpConfigConstants . PROPNAME_STRICT_URL_FORMAT ) ; if ( null != value ) { this . bStrictURLFormat = convertBoolean ( value ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Config: Strict URL formatting is " + isStrictURLFormat ( ) ) ; } }
public class RemoteCacheProducer { /** * Produces the remote cache . * @ param injectionPoint the injection point . * @ param < K > the type of the key . * @ param < V > the type of the value . * @ return the remote cache instance . */ @ Remote @ Produces public < K , V > RemoteCache < K , V > getRemoteCache ( InjectionPoint injectionPoint ) { } }
final Set < Annotation > qualifiers = injectionPoint . getQualifiers ( ) ; final RemoteCacheManager cacheManager = getRemoteCacheManager ( qualifiers . toArray ( new Annotation [ 0 ] ) ) ; final Remote remote = getRemoteAnnotation ( injectionPoint . getAnnotated ( ) ) ; if ( remote != null && ! remote . value ( ) . isEmpty ( ) ) { return cacheManager . getCache ( remote . value ( ) ) ; } return cacheManager . getCache ( ) ;
public class PropertyType { /** * Returns the name of the specified < code > type < / code > . * @ param type the property type * @ return the name of the specified < code > type < / code > * @ throws IllegalArgumentException if < code > type < / code > is not a valid * property type . */ public static String nameFromValue ( int type ) { } }
switch ( type ) { case SIMPLE_REFERENCE : return TYPENAME_SIMPLE_REFERENCE ; default : return javax . jcr . PropertyType . nameFromValue ( type ) ; }
public class ReflectionHelper { /** * This is recursive with searchClassAndSuperClassesForField */ public JFieldVar searchSuperClassesForField ( String property , JDefinedClass jclass ) { } }
JClass superClass = jclass . _extends ( ) ; JDefinedClass definedSuperClass = definedClassOrNullFromType ( superClass ) ; if ( definedSuperClass == null ) { return null ; } return searchClassAndSuperClassesForField ( property , definedSuperClass ) ;
public class CardAPI { /** * 更改Code * @ param accessToken accessToken * @ param codeUpdate codeUpdate * @ return result */ public static BaseResult codeUpdate ( String accessToken , CodeUpdate codeUpdate ) { } }
return codeUpdate ( accessToken , JsonUtil . toJSONString ( codeUpdate ) ) ;
public class DefaultAlertService { /** * Evaluates all triggers associated with the missing data notification and updates the job history . */ private void _processMissingDataNotification ( Alert alert , History history , Set < Trigger > triggers , Notification notification , boolean isDataMissing , Long alertEnqueueTimestamp ) { } }
// refocus notifier does not need cool down logic , and every evaluation needs to send notification boolean isRefocusNotifier = SupportedNotifier . REFOCUS . getName ( ) . equals ( notification . getNotifierName ( ) ) ; for ( Trigger trigger : notification . getTriggers ( ) ) { if ( triggers . contains ( trigger ) ) { Metric m = new Metric ( "argus" , "argus" ) ; if ( isDataMissing ) { String logMessage = MessageFormat . format ( "The trigger {0} was evaluated and it is fired as data for the metric expression {1} does not exist" , trigger . getName ( ) , alert . getExpression ( ) ) ; history . appendMessageNUpdateHistory ( logMessage , null , 0 ) ; if ( isRefocusNotifier ) { sendNotification ( trigger , m , history , notification , alert , System . currentTimeMillis ( ) , alertEnqueueTimestamp ) ; continue ; } if ( ! notification . onCooldown ( trigger , m ) ) { _updateNotificationSetActiveStatus ( trigger , m , history , notification ) ; sendNotification ( trigger , m , history , notification , alert , System . currentTimeMillis ( ) , alertEnqueueTimestamp ) ; } else { logMessage = MessageFormat . format ( "The notification {0} is on cooldown until {1}." , notification . getName ( ) , getDateMMDDYYYY ( notification . getCooldownExpirationByTriggerAndMetric ( trigger , m ) ) ) ; history . appendMessageNUpdateHistory ( logMessage , null , 0 ) ; } } else { // Data is not missing String logMessage = MessageFormat . format ( "The trigger {0} was evaluated and it is not fired as data exists for the expression {1}" , trigger . getName ( ) , alert . getExpression ( ) ) ; history . appendMessageNUpdateHistory ( logMessage , null , 0 ) ; if ( isRefocusNotifier ) { sendClearNotification ( trigger , m , history , notification , alert , alertEnqueueTimestamp ) ; continue ; } if ( notification . isActiveForTriggerAndMetric ( trigger , m ) ) { // This is case when the notification was active for the given trigger , metric combination // and the metric did not violate triggering condition on current evaluation . Hence we must clear it . _updateNotificationClearActiveStatus ( trigger , m , notification ) ; sendClearNotification ( trigger , m , history , notification , alert , alertEnqueueTimestamp ) ; } } } }
public class HeartbeatTask { /** * Publish the heartbeat event with current health status . */ @ Scheduled ( fixedDelay = "${micronaut.heartbeat.interval:15s}" , initialDelay = "${micronaut.heartbeat.initial-delay:5s}" ) public void pulsate ( ) { } }
ServiceInstance instance = eventReference . get ( ) ; if ( instance != null ) { eventPublisher . publishEvent ( new HeartbeatEvent ( instance , currentHealthStatus . current ( ) ) ) ; }
public class SQLiteRepository { /** * Generate the SQL definition for a column . * @ param builder current StringBuilder to add the SQL definition . * @ param column column definition . */ private void addColumn ( StringBuilder builder , Column column ) { } }
builder . append ( column . getColumnName ( ) ) ; builder . append ( " " ) ; builder . append ( column . getDataType ( ) . getType ( ) ) ; Boolean optional = column . isOptional ( ) ; if ( optional != null ) { builder . append ( optional ? " NULL" : " NOT NULL" ) ; } String extraQualifier = column . getExtraQualifier ( ) ; if ( extraQualifier != null ) { builder . append ( " " ) ; builder . append ( extraQualifier ) ; }
public class DescribeEndpointResult { /** * An array of < a > ProductionVariantSummary < / a > objects , one for each model hosted behind this endpoint . * @ param productionVariants * An array of < a > ProductionVariantSummary < / a > objects , one for each model hosted behind this endpoint . */ public void setProductionVariants ( java . util . Collection < ProductionVariantSummary > productionVariants ) { } }
if ( productionVariants == null ) { this . productionVariants = null ; return ; } this . productionVariants = new java . util . ArrayList < ProductionVariantSummary > ( productionVariants ) ;
public class DescendantSelfAxisQuery { /** * { @ inheritDoc } */ @ Override public void extractTerms ( Set < Term > terms ) { } }
contextQuery . extractTerms ( terms ) ; subQuery . extractTerms ( terms ) ;
public class Journal { /** * Close the journal . * @ throws IOException */ public synchronized void close ( ) throws IOException { } }
if ( ! opened ) { return ; } opened = false ; accessor . close ( ) ; appender . close ( ) ; hints . clear ( ) ; inflightWrites . clear ( ) ; if ( managedWriter ) { ( ( ExecutorService ) writer ) . shutdown ( ) ; writer = null ; } if ( managedDisposer ) { disposer . shutdown ( ) ; disposer = null ; }
public class NFA { /** * Extracts all the sequences of events from the start to the given computation state . An event * sequence is returned as a map which contains the events and the names of the states to which * the events were mapped . * @ param sharedBufferAccessor The accessor to { @ link SharedBuffer } from which to extract the matches * @ param computationState The end computation state of the extracted event sequences * @ return Collection of event sequences which end in the given computation state * @ throws Exception Thrown if the system cannot access the state . */ private Map < String , List < EventId > > extractCurrentMatches ( final SharedBufferAccessor < T > sharedBufferAccessor , final ComputationState computationState ) throws Exception { } }
if ( computationState . getPreviousBufferEntry ( ) == null ) { return new HashMap < > ( ) ; } List < Map < String , List < EventId > > > paths = sharedBufferAccessor . extractPatterns ( computationState . getPreviousBufferEntry ( ) , computationState . getVersion ( ) ) ; if ( paths . isEmpty ( ) ) { return new HashMap < > ( ) ; } // for a given computation state , we cannot have more than one matching patterns . Preconditions . checkState ( paths . size ( ) == 1 ) ; return paths . get ( 0 ) ;
public class CmsDefaultFileNameGenerator { /** * Checks the given pattern for the number macro . < p > * @ param pattern the pattern to check * @ return < code > true < / code > if the pattern contains the macro */ public static boolean hasNumberMacro ( String pattern ) { } }
// check both macro variants return hasNumberMacro ( pattern , "" + I_CmsMacroResolver . MACRO_DELIMITER + I_CmsMacroResolver . MACRO_START , "" + I_CmsMacroResolver . MACRO_END ) || hasNumberMacro ( pattern , "" + I_CmsMacroResolver . MACRO_DELIMITER_OLD + I_CmsMacroResolver . MACRO_START_OLD , "" + I_CmsMacroResolver . MACRO_END_OLD ) ;
public class JsBusImpl { /** * ( non - Javadoc ) * @ see * com . ibm . ws . sib . admin . JsBus # getSIBDestinationLocalitySet ( java . lang . String , * java . lang . String ) */ public Set getSIBDestinationLocalitySet ( String busName , String uuid ) throws SIBExceptionBase { } }
return getDestinationCache ( ) . getSIBDestinationLocalitySet ( busName , uuid ) ;
public class FileUtils { /** * Returns another file just like the input but with a different extension . If the input file has * an extension ( a suffix beginning with " . " ) , everything after the . is replaced with * newExtension . Otherwise , a newExtension is appended to the filename and a new File is returned . * Note that unless you want double . s , newExtension should not begin with a . */ public static File swapExtension ( final File f , final String newExtension ) { } }
checkNotNull ( f ) ; checkNotNull ( newExtension ) ; Preconditions . checkArgument ( ! f . isDirectory ( ) ) ; final String absolutePath = f . getAbsolutePath ( ) ; final int dotIndex = absolutePath . lastIndexOf ( "." ) ; String basePath ; if ( dotIndex >= 0 ) { basePath = absolutePath . substring ( 0 , dotIndex ) ; } else { basePath = absolutePath ; } return new File ( String . format ( "%s.%s" , basePath , newExtension ) ) ;
public class AbstractNodeVisitor { /** * Helper to visit all the children of a node , in order . * @ param node The parent node whose children to visit . * @ see # visitChildrenAllowingConcurrentModification */ protected void visitChildren ( ParentNode < ? extends N > node ) { } }
List < ? extends N > children = node . getChildren ( ) ; int size = children . size ( ) ; for ( int i = 0 ; i < size ; i ++ ) { visit ( children . get ( i ) ) ; }
public class Tuple15 { /** * Skip 9 degrees from this tuple . */ public final Tuple6 < T10 , T11 , T12 , T13 , T14 , T15 > skip9 ( ) { } }
return new Tuple6 < > ( v10 , v11 , v12 , v13 , v14 , v15 ) ;
public class FunctionSQL { /** * Returns a String representation of this object . < p > */ @ Override public String describe ( Session session , int blanks ) { } }
StringBuffer sb = new StringBuffer ( ) ; sb . append ( '\n' ) ; for ( int i = 0 ; i < blanks ; i ++ ) { sb . append ( ' ' ) ; } sb . append ( "FUNCTION " ) . append ( "=[\n" ) ; sb . append ( name ) . append ( "(" ) ; for ( int i = 0 ; i < nodes . length ; i ++ ) { if ( nodes [ i ] != null ) { sb . append ( "[" ) . append ( nodes [ i ] . describe ( session ) ) . append ( "]" ) ; } } sb . append ( ") returns " ) . append ( dataType . getNameString ( ) ) ; sb . append ( "]\n" ) ; return sb . toString ( ) ;
public class XConstructor { /** * Checks if annoated parameters match method parameters . * @ throws IllegalArgumentException * If annotated parameters do not match method parameters . */ private void check ( ) { } }
final Constructor < ? > constructor = getConstructor ( ) ; final XParameter [ ] parameters = getParameters ( ) ; final Class < ? > [ ] parameterTypes = constructor . getParameterTypes ( ) ; Validate . isTrue ( parameters . length == parameterTypes . length , "Wrong number of parameters: [" + parameters . length + "], expected [" + parameterTypes . length + "]." ) ; for ( int index = 0 ; index < parameters . length ; index ++ ) { final XParameter parameter = parameters [ index ] ; final Class < ? > parameterType = parameterTypes [ index ] ; Validate . isTrue ( parameterType . equals ( parameter . getType ( ) ) , "Wrong parameter type: [" + parameter . getType ( ) + "], expected [" + parameterType + "]" ) ; }
public class ButterKnife { /** * BindView annotated fields and methods in the specified { @ link View } . The view and its children * are used as the view root . * @ param target Target view for view binding . */ @ NonNull @ UiThread public static Unbinder bind ( @ NonNull View target ) { } }
return bind ( target , target ) ;
public class FsCrawlerUtil { /** * We check if we can index the file or if we should ignore it * @ param filename The filename to scan * @ param excludes exclude rules , may be empty not null */ public static boolean isExcluded ( String filename , List < String > excludes ) { } }
logger . debug ( "filename = [{}], excludes = [{}]" , filename , excludes ) ; // No rules ? Fine , we index everything if ( excludes == null || excludes . isEmpty ( ) ) { logger . trace ( "no rules" ) ; return false ; } // Exclude rules : we know that whatever includes rules are , we should exclude matching files for ( String exclude : excludes ) { String regex = exclude . toLowerCase ( ) . replace ( "?" , ".?" ) . replace ( "*" , ".*?" ) ; logger . trace ( "regex is [{}]" , regex ) ; if ( filename . toLowerCase ( ) . matches ( regex ) ) { logger . trace ( "does match exclude regex" ) ; return true ; } } logger . trace ( "does not match any exclude pattern" ) ; return false ;
public class FTPUploader { /** * Upload a single file to FTP server with the provided FTP client object . * @ param sourceFilePath * @ param targetFilePath * @ param logPrefix * @ throws IOException */ protected void uploadFile ( final FTPClient ftpClient , final String sourceFilePath , final String targetFilePath , final String logPrefix ) throws IOException { } }
log . info ( String . format ( UPLOAD_FILE , logPrefix , sourceFilePath , targetFilePath ) ) ; final File sourceFile = new File ( sourceFilePath ) ; try ( final InputStream is = new FileInputStream ( sourceFile ) ) { ftpClient . changeWorkingDirectory ( targetFilePath ) ; ftpClient . storeFile ( sourceFile . getName ( ) , is ) ; final int replyCode = ftpClient . getReplyCode ( ) ; final String replyMessage = ftpClient . getReplyString ( ) ; if ( isCommandFailed ( replyCode ) ) { log . error ( String . format ( UPLOAD_FILE_REPLY , logPrefix , replyMessage ) ) ; throw new IOException ( "Failed to upload file: " + sourceFilePath ) ; } else { log . info ( String . format ( UPLOAD_FILE_REPLY , logPrefix , replyMessage ) ) ; } }
public class NetUtil { /** * 在范围里随机找一个空闲端口 , from Spring SocketUtils . * @ throws IllegalStateException 最多尝试 ( maxPort - minPort ) 次 , 如无空闲端口 , 抛出此异常 . */ public static int findRandomAvailablePort ( int minPort , int maxPort ) { } }
int portRange = maxPort - minPort ; int candidatePort ; int searchCounter = 0 ; do { if ( ++ searchCounter > portRange ) { throw new IllegalStateException ( String . format ( "Could not find an available tcp port in the range [%d, %d] after %d attempts" , minPort , maxPort , searchCounter ) ) ; } candidatePort = minPort + random . nextInt ( portRange + 1 ) ; } while ( ! isPortAvailable ( candidatePort ) ) ; return candidatePort ;
public class Util { private void printInstanceNames ( ) { } }
try { System . out . println ( "Instance name defined in database for server PowerSupply :" ) ; final String [ ] instnames = ApiUtil . get_db_obj ( ) . get_instance_name_list ( ds_exec_name ) ; for ( final String instname : instnames ) { System . out . println ( "\t" + instname ) ; } } catch ( final DevFailed e ) { Except . print_exception ( e ) ; }
public class DescribeModelPackageRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeModelPackageRequest describeModelPackageRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeModelPackageRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeModelPackageRequest . getModelPackageName ( ) , MODELPACKAGENAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DefaultSessionFactory { /** * Creates a session and ensures schema if configured . Closes the cluster and session if any * exception occurred . */ @ Override public Session create ( CassandraStorage cassandra ) { } }
Closer closer = Closer . create ( ) ; try { Cluster cluster = closer . register ( buildCluster ( cassandra ) ) ; cluster . register ( new QueryLogger . Builder ( ) . build ( ) ) ; Session session ; String keyspace = cassandra . keyspace ( ) ; if ( cassandra . ensureSchema ( ) ) { session = closer . register ( cluster . connect ( ) ) ; Schema . ensureExists ( keyspace , cassandra . searchEnabled ( ) , session ) ; session . execute ( "USE " + keyspace ) ; } else { LOG . debug ( "Skipping schema check on keyspace {} as ensureSchema was false" , keyspace ) ; session = cluster . connect ( keyspace ) ; } initializeUDTs ( session ) ; return session ; } catch ( RuntimeException e ) { try { closer . close ( ) ; } catch ( IOException ignored ) { } throw e ; }
public class SectionNumber { /** * Change this version number from the given string representation . * < p > The string representation should be integer numbers , separated by dot characters . * @ param sectionNumber the string representation of the version number . * @ param level is the level at which the section number is visible ( 1 for the top level , 2 for subsections . . . ) */ public void setFromString ( String sectionNumber , int level ) { } }
assert level >= 1 ; final String [ ] numbers = sectionNumber . split ( "[^0-9]+" ) ; // $ NON - NLS - 1 $ final int len = Math . max ( 0 , this . numbers . size ( ) - numbers . length ) ; for ( int i = 0 ; i < len ; ++ i ) { this . numbers . removeLast ( ) ; } for ( int i = 0 ; i < numbers . length && i < level ; ++ i ) { this . numbers . addLast ( Integer . valueOf ( numbers [ i ] ) ) ; }
public class SpriteManager { /** * Finds the sprite with the highest render order that hits the specified pixel . * @ param x the x ( screen ) coordinate to be checked * @ param y the y ( screen ) coordinate to be checked * @ return the highest sprite hit */ public Sprite getHighestHitSprite ( int x , int y ) { } }
// since they ' re stored in lowest - > highest order . . for ( int ii = _sprites . size ( ) - 1 ; ii >= 0 ; ii -- ) { Sprite sprite = _sprites . get ( ii ) ; if ( sprite . hitTest ( x , y ) ) { return sprite ; } } return null ;
public class UpdateDeviceStateRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateDeviceStateRequest updateDeviceStateRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateDeviceStateRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateDeviceStateRequest . getDeviceId ( ) , DEVICEID_BINDING ) ; protocolMarshaller . marshall ( updateDeviceStateRequest . getEnabled ( ) , ENABLED_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Vector { /** * Subtracts the given { @ link Vector } from this < code > Vector < / code > . * @ param v the vector to subtract */ public void subtract ( Vector v ) { } }
x = x - v . x ; y = y - v . y ; z = z - v . z ;
public class ColumnSchema { /** * Returns SQL for default value . */ public String getDefaultSQL ( ) { } }
String ddl = null ; ddl = defaultExpression == null ? null : defaultExpression . getSQL ( ) ; return ddl ;
public class StandardSpawnService { /** * Replies the registered agent . * @ param id * is the identifier of the agent . * @ return the registered agent , or < code > null < / code > . */ Agent getAgent ( UUID id ) { } }
assert id != null ; synchronized ( getAgentRepositoryMutex ( ) ) { return this . agents . get ( id ) ; }
public class JsonParser { /** * { @ link Beta } < br > * Parse a JSON Array from the given JSON parser ( which is closed after parsing completes ) into * the given destination collection , optionally using the given parser customizer . * @ param destinationCollectionClass class of destination collection ( must have a public default * constructor ) * @ param destinationItemClass class of destination collection item ( must have a public default * constructor ) * @ param customizeParser optional parser customizer or { @ code null } for none */ @ Beta public final < T > Collection < T > parseArrayAndClose ( Class < ? > destinationCollectionClass , Class < T > destinationItemClass , CustomizeJsonParser customizeParser ) throws IOException { } }
try { return parseArray ( destinationCollectionClass , destinationItemClass , customizeParser ) ; } finally { close ( ) ; }
public class JdonConstructorInjectionComponentAdapter { /** * overide InstantiatingComponentAdapter ' s newInstance */ protected Object newInstance ( Constructor constructor , Object [ ] parameters ) throws InstantiationException , IllegalAccessException , InvocationTargetException { } }
if ( allowNonPublicClasses ) { constructor . setAccessible ( true ) ; } Object o = constructor . newInstance ( parameters ) ; ComponentAdvsior componentAdvsior = ( ComponentAdvsior ) configInfo . getContainerWrapper ( ) . lookup ( ComponentAdvsior . NAME ) ; Object proxy = null ; if ( componentAdvsior != null ) proxy = componentAdvsior . createProxy ( o ) ; if ( ! proxy . getClass ( ) . isInstance ( o ) ) { Map orignals = getContainerOrignals ( configInfo . getContainerWrapper ( ) ) ; orignals . put ( ( String ) this . getComponentKey ( ) , o ) ; } return proxy ;
public class FTPFileSystem { /** * Convenience method , so that we don ' t open a new connection when using this * method from within another method . Otherwise every API invocation incurs * the overhead of opening / closing a TCP connection . */ private boolean delete ( FTPClient client , Path file , boolean recursive ) throws IOException { } }
Path workDir = new Path ( client . printWorkingDirectory ( ) ) ; Path absolute = makeAbsolute ( workDir , file ) ; String pathName = absolute . toUri ( ) . getPath ( ) ; FileStatus fileStat = getFileStatus ( client , absolute ) ; if ( ! fileStat . isDir ( ) ) { return client . deleteFile ( pathName ) ; } FileStatus [ ] dirEntries = listStatus ( client , absolute ) ; if ( dirEntries != null && dirEntries . length > 0 && ! ( recursive ) ) { throw new IOException ( "Directory: " + file + " is not empty." ) ; } if ( dirEntries != null ) { for ( int i = 0 ; i < dirEntries . length ; i ++ ) { delete ( client , new Path ( absolute , dirEntries [ i ] . getPath ( ) ) , recursive ) ; } } return client . removeDirectory ( pathName ) ;
public class ClassDefinitionAction { /** * { @ inheritDoc } */ public Class < ? > run ( ) { } }
int packageIndex = name . lastIndexOf ( '.' ) ; if ( packageIndex != - 1 ) { String packageName = name . substring ( 0 , packageIndex ) ; PackageDefinitionStrategy . Definition definition = packageDefinitionStrategy . define ( ByteArrayClassLoader . this , packageName , name ) ; if ( definition . isDefined ( ) ) { Package definedPackage = PACKAGE_LOOKUP_STRATEGY . apply ( ByteArrayClassLoader . this , packageName ) ; if ( definedPackage == null ) { definePackage ( packageName , definition . getSpecificationTitle ( ) , definition . getSpecificationVersion ( ) , definition . getSpecificationVendor ( ) , definition . getImplementationTitle ( ) , definition . getImplementationVersion ( ) , definition . getImplementationVendor ( ) , definition . getSealBase ( ) ) ; } else if ( ! definition . isCompatibleTo ( definedPackage ) ) { throw new SecurityException ( "Sealing violation for package " + packageName ) ; } } } return defineClass ( name , binaryRepresentation , FROM_BEGINNING , binaryRepresentation . length , protectionDomain ) ;
public class TurtleSerializer { /** * Serialize a FedoraEvent in RDF using Turtle syntax * @ param evt the Fedora event * @ return a string of RDF , using Turtle syntax */ @ Override public String serialize ( final FedoraEvent evt ) { } }
try { final Model model = EventSerializer . toModel ( evt ) ; final ByteArrayOutputStream out = new ByteArrayOutputStream ( ) ; model . write ( out , "TTL" ) ; return out . toString ( "UTF-8" ) ; } catch ( final UnsupportedEncodingException ex ) { throw new RepositoryRuntimeException ( ex ) ; }
public class BitcoinSerializer { /** * Deserialize payload only . You must provide a header , typically obtained by calling * { @ link BitcoinSerializer # deserializeHeader } . */ @ Override public Message deserializePayload ( BitcoinPacketHeader header , ByteBuffer in ) throws ProtocolException , BufferUnderflowException { } }
byte [ ] payloadBytes = new byte [ header . size ] ; in . get ( payloadBytes , 0 , header . size ) ; // Verify the checksum . byte [ ] hash ; hash = Sha256Hash . hashTwice ( payloadBytes ) ; if ( header . checksum [ 0 ] != hash [ 0 ] || header . checksum [ 1 ] != hash [ 1 ] || header . checksum [ 2 ] != hash [ 2 ] || header . checksum [ 3 ] != hash [ 3 ] ) { throw new ProtocolException ( "Checksum failed to verify, actual " + HEX . encode ( hash ) + " vs " + HEX . encode ( header . checksum ) ) ; } if ( log . isDebugEnabled ( ) ) { log . debug ( "Received {} byte '{}' message: {}" , header . size , header . command , HEX . encode ( payloadBytes ) ) ; } try { return makeMessage ( header . command , header . size , payloadBytes , hash , header . checksum ) ; } catch ( Exception e ) { throw new ProtocolException ( "Error deserializing message " + HEX . encode ( payloadBytes ) + "\n" , e ) ; }
public class Line { /** * Get a list of LatLng for the line , which represents the locations of the line on the map * @ return a list of the locations of the line in a latitude and longitude pairs */ @ NonNull public List < LatLng > getLatLngs ( ) { } }
LineString lineString = ( LineString ) geometry ; List < LatLng > latLngs = new ArrayList < > ( ) ; for ( Point point : lineString . coordinates ( ) ) { latLngs . add ( new LatLng ( point . latitude ( ) , point . longitude ( ) ) ) ; } return latLngs ;
public class OutputArchive { /** * Creates a new < code > OutputArchive < / code > that writes to the specified * < code > OutputStream < / code > . * @ param stream The < code > OutputStream < / code > to be written to . * @ return The new < code > OutputArchive < / code > . * @ throws IOException If unable to instantiate an * < code > ObjectOutputStream < / code > for the provided * < code > OutputStream < / code > . */ public static OutputArchive fromOutputStream ( OutputStream stream ) throws IOException { } }
if ( stream instanceof ObjectOutput ) { return new OutputArchive ( ( ObjectOutput ) stream ) ; } else { return new OutputArchive ( new ObjectOutputStream ( stream ) ) ; }
public class PolygonExpression { /** * Returns the N th interior ring for this Polygon as a LineString . * @ param idx one based index * @ return interior ring at index */ public LineStringExpression < LineString > interiorRingN ( int idx ) { } }
return GeometryExpressions . lineStringOperation ( SpatialOps . INTERIOR_RINGN , mixin , ConstantImpl . create ( idx ) ) ;
public class CmsStaticExportManager { /** * Shuts down all this static export manager . < p > * This is required since there may still be a thread running when the system is being shut down . < p > */ public synchronized void shutDown ( ) { } }
int count = 0 ; // if the handler is still running , we must wait up to 30 seconds until it is finished while ( ( count < HANDLER_FINISH_TIME ) && m_handler . isBusy ( ) ) { count ++ ; try { if ( CmsLog . INIT . isInfoEnabled ( ) ) { CmsLog . INIT . info ( Messages . get ( ) . getBundle ( ) . key ( Messages . INIT_STATIC_EXPORT_SHUTDOWN_3 , m_handler . getClass ( ) . getName ( ) , String . valueOf ( count ) , String . valueOf ( HANDLER_FINISH_TIME ) ) ) ; } wait ( 1000 ) ; } catch ( InterruptedException e ) { // if interrupted we ignore the handler , this will produce some log messages but should be ok count = HANDLER_FINISH_TIME ; } } if ( CmsLog . INIT . isInfoEnabled ( ) ) { CmsLog . INIT . info ( Messages . get ( ) . getBundle ( ) . key ( Messages . INIT_SHUTDOWN_1 , this . getClass ( ) . getName ( ) ) ) ; }
public class AssociationKey { /** * Returns the value of the given column if part of this key . Use { @ link AssociationKeyMetadata # isKeyColumn ( String ) } * to check whether a given column is part of this key prior to invoking this method . * @ param columnName the name of interest * @ return the value of the given column . */ public Object getColumnValue ( String columnName ) { } }
for ( int i = 0 ; i < getColumnNames ( ) . length ; i ++ ) { String name = getColumnNames ( ) [ i ] ; if ( name . equals ( columnName ) ) { return getColumnValues ( ) [ i ] ; } } throw new AssertionFailure ( String . format ( "Given column %s is not part of this key: %s" , columnName , this . toString ( ) ) ) ;