signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class JcrManagedConnection { /** * Adds a connection event listener to the ManagedConnection instance . * @ param listener A new ConnectionEventListener to be registered */ @ Override public void addConnectionEventListener ( ConnectionEventListener listener ) { } }
if ( listener == null ) { throw new IllegalArgumentException ( "Listener is null" ) ; } listeners . add ( listener ) ;
public class ImageFormatList { /** * This method moves the given image format < code > format < / code > * in the first position of the vector . * @ param v the vector if image format * @ param format the index of the format to be moved in first position */ private void moveToFirstIfNative ( List < ImageFormat > v , int format ) { } }
for ( ImageFormat i : v ) if ( ( i . getIndex ( ) == format ) && ( formats . contains ( i ) ) ) { v . remove ( i ) ; v . add ( 0 , i ) ; break ; }
public class LdapAdapter { /** * Update the Group member * @ param oldDN * @ param newDN * @ throws WIMException */ private void updateGroupMember ( String oldDN , String newDN ) throws WIMException { } }
if ( ! iLdapConfigMgr . updateGroupMembership ( ) ) { return ; } String filter = iLdapConfigMgr . getGroupMemberFilter ( oldDN ) ; String [ ] mbrAttrs = iLdapConfigMgr . getMemberAttributes ( ) ; Map < String , ModificationItem [ ] > mbrAttrMap = new HashMap < String , ModificationItem [ ] > ( mbrAttrs . length ) ; for ( int i = 0 ; i < mbrAttrs . length ; i ++ ) { String mbrAttr = mbrAttrs [ i ] ; ModificationItem removeAttr = new ModificationItem ( DirContext . REMOVE_ATTRIBUTE , new BasicAttribute ( mbrAttr , oldDN ) ) ; ModificationItem [ ] modifAttrs = null ; if ( newDN != null ) { ModificationItem addAttr = new ModificationItem ( DirContext . ADD_ATTRIBUTE , new BasicAttribute ( mbrAttr , newDN ) ) ; modifAttrs = new ModificationItem [ 2 ] ; modifAttrs [ 0 ] = addAttr ; modifAttrs [ 1 ] = removeAttr ; } else { modifAttrs = new ModificationItem [ 1 ] ; modifAttrs [ 0 ] = removeAttr ; } mbrAttrMap . put ( mbrAttr . toLowerCase ( ) , modifAttrs ) ; } String [ ] searchBases = iLdapConfigMgr . getGroupSearchBases ( ) ; for ( int i = 0 ; i < searchBases . length ; i ++ ) { String searchBase = searchBases [ i ] ; NamingEnumeration < SearchResult > nenu = iLdapConn . search ( searchBase , filter , SearchControls . SUBTREE_SCOPE , LDAP_ATTR_OBJECTCLASS_ARRAY ) ; while ( nenu . hasMoreElements ( ) ) { SearchResult thisEntry = nenu . nextElement ( ) ; if ( thisEntry == null ) { continue ; } String entryName = thisEntry . getName ( ) ; if ( entryName == null || entryName . trim ( ) . length ( ) == 0 ) { continue ; } String DN = LdapHelper . prepareDN ( entryName , searchBase ) ; Attributes attrs = thisEntry . getAttributes ( ) ; String [ ] thisMbrAttrs = iLdapConfigMgr . getMemberAttribute ( attrs . get ( LDAP_ATTR_OBJECTCLASS ) ) ; if ( thisMbrAttrs != null ) { for ( int j = 0 ; j < thisMbrAttrs . length ; j ++ ) { ModificationItem [ ] attrsTobeModify = mbrAttrMap . get ( thisMbrAttrs [ j ] . toLowerCase ( ) ) ; if ( attrsTobeModify != null ) { try { iLdapConn . modifyAttributes ( DN , attrsTobeModify ) ; } catch ( Exception e ) { if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Updating group " + DN + " for " + oldDN + " failed due to: " + e . toString ( ) ) ; } } } } } } }
public class SoundGroup { /** * Disposes this sound group , freeing up the OpenAL sources with which it is associated . All * sounds obtained from this group will no longer be usable and should be discarded . */ public void dispose ( ) { } }
reclaimAll ( ) ; for ( PooledSource pooled : _sources ) { pooled . source . delete ( ) ; } _sources . clear ( ) ; // remove from the manager _manager . removeGroup ( this ) ;
public class MatchSpaceImpl { /** * Initialise a newly created MatchSpace * @ param rootId * @ param enableCache */ public void initialise ( Identifier rootId , boolean enableCache ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) tc . entry ( cclass , "initialise" , new Object [ ] { rootId , new Boolean ( enableCache ) } ) ; switch ( rootId . getType ( ) ) { case Selector . UNKNOWN : case Selector . OBJECT : matchTree = new EqualityMatcher ( rootId ) ; break ; case Selector . STRING : case Selector . TOPIC : matchTree = new StringMatcher ( rootId ) ; break ; case Selector . BOOLEAN : matchTree = new BooleanMatcher ( rootId ) ; break ; default : matchTree = new NumericMatcher ( rootId ) ; break ; } if ( enableCache ) { this . rootId = rootId ; matchCache = new MatchCache ( MATCH_CACHE_INITIAL_CAPACITY ) ; matchCache . setRehashFilter ( this ) ; ( ( EqualityMatcher ) matchTree ) . setCacheing ( true ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) tc . exit ( cclass , "MatchSpaceImpl" , this ) ;
public class ContextTrampoline { /** * Sets the concrete strategy for accessing and manipulating the context . * < p > NB : The agent is responsible for setting the context strategy once before any other method * of this class is called . * @ param contextStrategy the concrete strategy for accessing and manipulating the context * @ since 0.9 */ public static void setContextStrategy ( ContextStrategy contextStrategy ) { } }
if ( ContextTrampoline . contextStrategy != null ) { throw new IllegalStateException ( "contextStrategy was already set" ) ; } if ( contextStrategy == null ) { throw new NullPointerException ( "contextStrategy" ) ; } ContextTrampoline . contextStrategy = contextStrategy ;
public class GeoPackageJavaProperties { /** * Get a color by key * @ param key * key * @ param required * required flag * @ return property value */ public static Color getColorProperty ( String key , boolean required ) { } }
Color value = null ; String redProperty = key + JavaPropertyConstants . PROPERTY_DIVIDER + JavaPropertyConstants . COLOR_RED ; String greenProperty = key + JavaPropertyConstants . PROPERTY_DIVIDER + JavaPropertyConstants . COLOR_GREEN ; String blueProperty = key + JavaPropertyConstants . PROPERTY_DIVIDER + JavaPropertyConstants . COLOR_BLUE ; String alphaProperty = key + JavaPropertyConstants . PROPERTY_DIVIDER + JavaPropertyConstants . COLOR_ALPHA ; Integer red = getIntegerProperty ( redProperty , required ) ; Integer green = getIntegerProperty ( greenProperty , required ) ; Integer blue = getIntegerProperty ( blueProperty , required ) ; Integer alpha = getIntegerProperty ( alphaProperty , required ) ; if ( red != null && green != null && blue != null && alpha != null ) { value = new Color ( red , green , blue , alpha ) ; } return value ;
public class ReloadingPropertyPlaceholderConfigurer { /** * 对于被标记为动态的 , 进行 构造 property dependency * 非动态的 , 则由原来的spring进行处理 * @ param strVal * @ param props * @ param visitedPlaceholders * @ return * @ throws BeanDefinitionStoreException */ protected String parseStringValue ( String strVal , Properties props , Set visitedPlaceholders ) throws BeanDefinitionStoreException { } }
DynamicProperty dynamic = null ; // replace reloading prefix and suffix by " normal " prefix and suffix . // remember all the " dynamic " placeholders encountered . StringBuffer buf = new StringBuffer ( strVal ) ; int startIndex = strVal . indexOf ( this . placeholderPrefix ) ; while ( startIndex != - 1 ) { int endIndex = buf . toString ( ) . indexOf ( this . placeholderSuffix , startIndex + this . placeholderPrefix . length ( ) ) ; if ( endIndex != - 1 ) { if ( currentBeanName != null && currentPropertyName != null ) { String placeholder = buf . substring ( startIndex + this . placeholderPrefix . length ( ) , endIndex ) ; placeholder = getPlaceholder ( placeholder ) ; if ( dynamic == null ) { dynamic = getDynamic ( currentBeanName , currentPropertyName , strVal ) ; } addDependency ( dynamic , placeholder ) ; } else { logger . debug ( "dynamic property outside bean property value - ignored: " + strVal ) ; } startIndex = endIndex - this . placeholderPrefix . length ( ) + this . placeholderPrefix . length ( ) + this . placeholderSuffix . length ( ) ; startIndex = strVal . indexOf ( this . placeholderPrefix , startIndex ) ; } else { startIndex = - 1 ; } } // then , business as usual . no recursive reloading placeholders please . return super . parseStringValue ( buf . toString ( ) , props , visitedPlaceholders ) ;
public class DRL5Lexer { /** * $ ANTLR start " AMPER " */ public final void mAMPER ( ) throws RecognitionException { } }
try { int _type = AMPER ; int _channel = DEFAULT_TOKEN_CHANNEL ; // src / main / resources / org / drools / compiler / lang / DRL5Lexer . g : 295:5 : ( ' & ' ) // src / main / resources / org / drools / compiler / lang / DRL5Lexer . g : 295:7 : ' & ' { match ( '&' ) ; if ( state . failed ) return ; } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving }
public class DescribeImagesRequest { /** * Scopes the images by users with explicit launch permissions . Specify an AWS account ID , < code > self < / code > ( the * sender of the request ) , or < code > all < / code > ( public AMIs ) . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setExecutableUsers ( java . util . Collection ) } or { @ link # withExecutableUsers ( java . util . Collection ) } if you * want to override the existing values . * @ param executableUsers * Scopes the images by users with explicit launch permissions . Specify an AWS account ID , < code > self < / code > * ( the sender of the request ) , or < code > all < / code > ( public AMIs ) . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeImagesRequest withExecutableUsers ( String ... executableUsers ) { } }
if ( this . executableUsers == null ) { setExecutableUsers ( new com . amazonaws . internal . SdkInternalList < String > ( executableUsers . length ) ) ; } for ( String ele : executableUsers ) { this . executableUsers . add ( ele ) ; } return this ;
public class GlobalServiceElector { /** * Add a service to be notified if this node becomes the global leader */ synchronized void registerService ( Promotable service ) { } }
m_services . add ( service ) ; if ( m_isLeader ) { try { service . acceptPromotion ( ) ; } catch ( Exception e ) { VoltDB . crashLocalVoltDB ( "Unable to promote global service." , true , e ) ; } }
public class SdkThreadLocalsRegistry { /** * Registers { @ link ThreadLocal } objects in use by the AWS SDK so that their values can * be removed via the { @ link # remove ( ) } method . * < p > To avoid memory leaks and reduce contention this method should only be called when * setting static final locations ( for example finals in enums or static final fields ) . * @ param threadLocal ThreadLocal to register * @ return the input ThreadLocal */ public static synchronized < T > ThreadLocal < T > register ( ThreadLocal < T > threadLocal ) { } }
threadLocals . add ( threadLocal ) ; return threadLocal ;
public class XmlRequestMatcher { /** * Whitespace will be kept by DOM parser . */ private void trimNode ( final Node node ) { } }
NodeList children = node . getChildNodes ( ) ; for ( int i = children . getLength ( ) - 1 ; i >= 0 ; i -- ) { trimChild ( node , children . item ( i ) ) ; }
public class Schema { /** * TODO : detect and complain about malformed JSON */ private static String [ ] splitArgs ( String argStr ) { } }
StringBuilder sb = new StringBuilder ( argStr ) ; StringBuilder arg = new StringBuilder ( ) ; List < String > splitArgList = new ArrayList < String > ( ) ; boolean inDoubleQuotes = false ; boolean inSquareBrackets = false ; // for arrays of arrays for ( int i = 0 ; i < sb . length ( ) ; i ++ ) { if ( sb . charAt ( i ) == '"' && ! inDoubleQuotes && ! inSquareBrackets ) { inDoubleQuotes = true ; arg . append ( sb . charAt ( i ) ) ; } else if ( sb . charAt ( i ) == '"' && inDoubleQuotes && ! inSquareBrackets ) { inDoubleQuotes = false ; arg . append ( sb . charAt ( i ) ) ; } else if ( sb . charAt ( i ) == ',' && ! inDoubleQuotes && ! inSquareBrackets ) { splitArgList . add ( arg . toString ( ) ) ; // clear the field for next word arg . setLength ( 0 ) ; } else if ( sb . charAt ( i ) == '[' ) { inSquareBrackets = true ; arg . append ( sb . charAt ( i ) ) ; } else if ( sb . charAt ( i ) == ']' ) { inSquareBrackets = false ; arg . append ( sb . charAt ( i ) ) ; } else { arg . append ( sb . charAt ( i ) ) ; } } if ( arg . length ( ) > 0 ) splitArgList . add ( arg . toString ( ) ) ; return splitArgList . toArray ( new String [ splitArgList . size ( ) ] ) ;
public class SessionBeanImpl { /** * Check that the scope type is allowed by the stereotypes on the bean and * the bean type */ protected void checkScopeAllowed ( ) { } }
if ( ejbDescriptor . isStateless ( ) && ! isDependent ( ) ) { throw BeanLogger . LOG . scopeNotAllowedOnStatelessSessionBean ( getScope ( ) , getType ( ) ) ; } if ( ejbDescriptor . isSingleton ( ) && ! ( isDependent ( ) || getScope ( ) . equals ( ApplicationScoped . class ) ) ) { throw BeanLogger . LOG . scopeNotAllowedOnSingletonBean ( getScope ( ) , getType ( ) ) ; }
public class Singles { /** * Block until a Quorum of results have returned as determined by the provided Predicate * < pre > * { @ code * Single < ListX < Integer > > strings = Singles . quorum ( status - > status . getCompleted ( ) > 0 , Single . deferred ( ( ) - > 1 ) , Single . empty ( ) , Single . empty ( ) ) ; * strings . get ( ) . size ( ) * < / pre > * @ param breakout Predicate that determines whether the block should be * continued or removed * @ param fts Singles to wait on results from * @ return Single which will be populated with a Quorum of results */ @ SafeVarargs public static < T > Single < Seq < T > > quorum ( Predicate < Status < T > > breakout , Single < T > ... fts ) { } }
return Single . fromPublisher ( Futures . quorum ( breakout , futures ( fts ) ) ) ;
public class PathHelper { /** * Walks a file tree . * This method walks a file tree rooted at a given starting file . The file * tree traversal is < em > depth - first < / em > with the given { @ link FileVisitor } * invoked for each file encountered . File tree traversal completes when all * accessible files in the tree have been visited , or a visit method returns a * result of { @ link FileVisitResult # TERMINATE TERMINATE } . Where a visit method * terminates due an { @ code IOException } , an uncaught error , or runtime * exception , then the traversal is terminated and the error or exception is * propagated to the caller of this method . * For each file encountered this method attempts to read its * { @ link java . nio . file . attribute . BasicFileAttributes } . If the file is not a * directory then the { @ link FileVisitor # visitFile visitFile } method is * invoked with the file attributes . If the file attributes cannot be read , * due to an I / O exception , then the { @ link FileVisitor # visitFileFailed * visitFileFailed } method is invoked with the I / O exception . * Where the file is a directory , and the directory could not be opened , then * the { @ code visitFileFailed } method is invoked with the I / O exception , after * which , the file tree walk continues , by default , at the next * < em > sibling < / em > of the directory . * Where the directory is opened successfully , then the entries in the * directory , and their < em > descendants < / em > are visited . When all entries * have been visited , or an I / O error occurs during iteration of the * directory , then the directory is closed and the visitor ' s * { @ link FileVisitor # postVisitDirectory postVisitDirectory } method is * invoked . The file tree walk then continues , by default , at the next * < em > sibling < / em > of the directory . * By default , symbolic links are not automatically followed by this method . * If the { @ code options } parameter contains the * { @ link FileVisitOption # FOLLOW _ LINKS FOLLOW _ LINKS } option then symbolic * links are followed . When following links , and the attributes of the target * cannot be read , then this method attempts to get the * { @ code BasicFileAttributes } of the link . If they can be read then the * { @ code visitFile } method is invoked with the attributes of the link * ( otherwise the { @ code visitFileFailed } method is invoked as specified * above ) . * If the { @ code options } parameter contains the * { @ link FileVisitOption # FOLLOW _ LINKS FOLLOW _ LINKS } option then this method * keeps track of directories visited so that cycles can be detected . A cycle * arises when there is an entry in a directory that is an ancestor of the * directory . Cycle detection is done by recording the * { @ link java . nio . file . attribute . BasicFileAttributes # fileKey file - key } of * directories , or if file keys are not available , by invoking the * { @ link Files # isSameFile } method to test if a directory is the same file as * an ancestor . When a cycle is detected it is treated as an I / O error , and * the { @ link FileVisitor # visitFileFailed visitFileFailed } method is invoked * with an instance of { @ link FileSystemLoopException } . * The { @ code maxDepth } parameter is the maximum number of levels of * directories to visit . A value of { @ code 0 } means that only the starting * file is visited , unless denied by the security manager . A value of * { @ link Integer # MAX _ VALUE MAX _ VALUE } may be used to indicate that all levels * should be visited . The { @ code visitFile } method is invoked for all files , * including directories , encountered at { @ code maxDepth } , unless the basic * file attributes cannot be read , in which case the { @ code * visitFileFailed } method is invoked . * If a visitor returns a result of { @ code null } then { @ code * NullPointerException } is thrown . * When a security manager is installed and it denies access to a file ( or * directory ) , then it is ignored and the visitor is not invoked for that file * ( or directory ) . * @ param aStart * the starting file * @ param aOptions * options to configure the traversal * @ param nMaxDepth * the maximum number of directory levels to visit * @ param aVisitor * the file visitor to invoke for each file * @ return the starting file * @ throws UncheckedIOException * if an I / O error is thrown by a visitor method */ @ Nonnull public static Path walkFileTree ( @ Nonnull final Path aStart , @ Nonnull final Set < FileVisitOption > aOptions , @ Nonnegative final int nMaxDepth , @ Nonnull final FileVisitor < ? super Path > aVisitor ) { } }
try { return Files . walkFileTree ( aStart , aOptions , nMaxDepth , aVisitor ) ; } catch ( final IOException ex ) { throw new UncheckedIOException ( ex ) ; }
public class HornSchunck { /** * Computes average flow using an 8 - connect neighborhood for the image border */ protected static void borderAverageFlow ( ImageFlow flow , ImageFlow averageFlow ) { } }
for ( int y = 0 ; y < flow . height ; y ++ ) { computeBorder ( flow , averageFlow , 0 , y ) ; computeBorder ( flow , averageFlow , flow . width - 1 , y ) ; } for ( int x = 1 ; x < flow . width - 1 ; x ++ ) { computeBorder ( flow , averageFlow , x , 0 ) ; computeBorder ( flow , averageFlow , x , flow . height - 1 ) ; }
public class WWindow { /** * Set the WComponent that will handle the content for this pop up window . * @ param content the window content . */ public void setContent ( final WComponent content ) { } }
WindowModel model = getOrCreateComponentModel ( ) ; // If the previous content had been wrapped , then remove it from the wrapping WApplication . if ( model . wrappedContent != null && model . wrappedContent != model . content ) { model . wrappedContent . removeAll ( ) ; } model . content = content ; // Wrap content in a WApplication if ( content instanceof WApplication ) { model . wrappedContent = ( WApplication ) content ; } else { model . wrappedContent = new WApplication ( ) ; model . wrappedContent . add ( content ) ; } // There should only be one content . holder . removeAll ( ) ; holder . add ( model . wrappedContent ) ;
public class Common { /** * Generate acker ' s input Map < GlobalStreamId , Grouping > * for spout < GlobalStreamId ( spoutId , ACKER _ INIT _ STREAM _ ID ) , . . . > for * bolt < GlobalStreamId ( boltId , ACKER _ ACK _ STREAM _ ID ) , . . . > < GlobalStreamId ( boltId , * ACKER _ FAIL _ STREAM _ ID ) , . . . > */ public static Map < GlobalStreamId , Grouping > acker_inputs ( StormTopology topology ) { } }
Map < GlobalStreamId , Grouping > spout_inputs = new HashMap < > ( ) ; Map < String , SpoutSpec > spout_ids = topology . get_spouts ( ) ; for ( Entry < String , SpoutSpec > spout : spout_ids . entrySet ( ) ) { String id = spout . getKey ( ) ; GlobalStreamId stream = new GlobalStreamId ( id , ACKER_INIT_STREAM_ID ) ; Grouping group = Thrift . mkFieldsGrouping ( JStormUtils . mk_list ( "id" ) ) ; spout_inputs . put ( stream , group ) ; } Map < String , Bolt > bolt_ids = topology . get_bolts ( ) ; Map < GlobalStreamId , Grouping > bolt_inputs = new HashMap < > ( ) ; for ( Entry < String , Bolt > bolt : bolt_ids . entrySet ( ) ) { String id = bolt . getKey ( ) ; GlobalStreamId streamAck = new GlobalStreamId ( id , ACKER_ACK_STREAM_ID ) ; Grouping groupAck = Thrift . mkFieldsGrouping ( JStormUtils . mk_list ( "id" ) ) ; GlobalStreamId streamFail = new GlobalStreamId ( id , ACKER_FAIL_STREAM_ID ) ; Grouping groupFail = Thrift . mkFieldsGrouping ( JStormUtils . mk_list ( "id" ) ) ; bolt_inputs . put ( streamAck , groupAck ) ; bolt_inputs . put ( streamFail , groupFail ) ; } Map < GlobalStreamId , Grouping > allInputs = new HashMap < > ( ) ; allInputs . putAll ( bolt_inputs ) ; allInputs . putAll ( spout_inputs ) ; return allInputs ;
public class PlanetaryInteractionApi { /** * Get colony layout ( asynchronously ) Returns full details on the layout of * a single planetary colony , including links , pins and routes . Note : * Planetary information is only recalculated when the colony is viewed * through the client . Information will not update until this criteria is * met . - - - This route is cached for up to 600 seconds SSO Scope : * esi - planets . manage _ planets . v1 * @ param characterId * An EVE character ID ( required ) * @ param planetId * Planet id of the target planet ( required ) * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ param token * Access token to use if unable to set a header ( optional ) * @ param callback * The callback to be executed when the API call finishes * @ return The request call * @ throws ApiException * If fail to process the API call , e . g . serializing the request * body object */ public com . squareup . okhttp . Call getCharactersCharacterIdPlanetsPlanetIdAsync ( Integer characterId , Integer planetId , String datasource , String ifNoneMatch , String token , final ApiCallback < CharacterPlanetResponse > callback ) throws ApiException { } }
com . squareup . okhttp . Call call = getCharactersCharacterIdPlanetsPlanetIdValidateBeforeCall ( characterId , planetId , datasource , ifNoneMatch , token , callback ) ; Type localVarReturnType = new TypeToken < CharacterPlanetResponse > ( ) { } . getType ( ) ; apiClient . executeAsync ( call , localVarReturnType , callback ) ; return call ;
public class Unchecked { /** * Wrap a { @ link CheckedLongToDoubleFunction } in a { @ link LongToDoubleFunction } with a custom handler for checked exceptions . * Example : * < code > < pre > * LongStream . of ( 1L , 2L , 3L ) . mapToInt ( Unchecked . longToDoubleFunction ( * if ( l & lt ; 0L ) * throw new Exception ( " Only positive numbers allowed " ) ; * return ( double ) l ; * throw new IllegalStateException ( e ) ; * < / pre > < / code > */ public static LongToDoubleFunction longToDoubleFunction ( CheckedLongToDoubleFunction function , Consumer < Throwable > handler ) { } }
return t -> { try { return function . applyAsDouble ( t ) ; } catch ( Throwable e ) { handler . accept ( e ) ; throw new IllegalStateException ( "Exception handler must throw a RuntimeException" , e ) ; } } ;
public class NodeUtil { /** * Given the function , this returns the nth * argument or null if no such parameter exists . */ static Node getArgumentForFunction ( Node function , int index ) { } }
checkState ( function . isFunction ( ) ) ; return getNthSibling ( function . getSecondChild ( ) . getFirstChild ( ) , index ) ;
public class XExtensionParser { /** * Parses an extension from a definition file . * @ param file The definition file containing the extension . * @ return The extension object , as defined in the provided file . */ public XExtension parse ( File file ) throws IOException , ParserConfigurationException , SAXException { } }
BufferedInputStream is = new BufferedInputStream ( new FileInputStream ( file ) ) ; // set up a specialized SAX2 handler to fill the container XExtensionHandler handler = new XExtensionHandler ( ) ; // set up SAX parser and parse provided log file into the container SAXParserFactory parserFactory = SAXParserFactory . newInstance ( ) ; SAXParser parser = parserFactory . newSAXParser ( ) ; parser . parse ( is , handler ) ; is . close ( ) ; return handler . getExtension ( ) ;
public class BaseBuffer { /** * { @ inheritDoc } */ @ Override public final Buffer readUntilSingleCRLF ( ) throws IOException { } }
final int start = getReaderIndex ( ) ; int found = 0 ; while ( found < 2 && hasReadableBytes ( ) ) { final byte b = readByte ( ) ; if ( found == 0 && b == CR ) { ++ found ; } else if ( found == 1 && b == LF ) { ++ found ; } else { found = 0 ; } } if ( found == 2 ) { return slice ( start , getReaderIndex ( ) - 2 ) ; } else { setReaderIndex ( start ) ; return null ; }
public class CrawlToFile { /** * Get the options . * @ return the specific CrawlToCsv options */ @ Override protected Options getOptions ( ) { } }
final Options options = super . getOptions ( ) ; final Option filenameOption = new Option ( "f" , "the name of the output file, default name is " + DEFAULT_FILENAME + " [optional]" ) ; filenameOption . setArgName ( "FILENAME" ) ; filenameOption . setLongOpt ( "filename" ) ; filenameOption . setRequired ( false ) ; filenameOption . setArgs ( 1 ) ; options . addOption ( filenameOption ) ; final Option errorFilenameOption = new Option ( "ef" , "the name of the error output file, default name is " + DEFAULT_ERROR_FILENAME + " [optional]" ) ; errorFilenameOption . setArgName ( "ERRORFILENAME" ) ; errorFilenameOption . setLongOpt ( "errorfilename" ) ; errorFilenameOption . setRequired ( false ) ; errorFilenameOption . setArgs ( 1 ) ; options . addOption ( errorFilenameOption ) ; final Option verboseOption = new Option ( "ve" , "verbose logging, default is false [optional]" ) ; verboseOption . setArgName ( "VERBOSE" ) ; verboseOption . setLongOpt ( "verbose" ) ; verboseOption . setRequired ( false ) ; verboseOption . setArgs ( 1 ) ; verboseOption . setType ( Boolean . class ) ; options . addOption ( verboseOption ) ; return options ;
public class ParameterizedJobMixIn { /** * Standard implementation of { @ link ParameterizedJob # doBuildWithParameters } . */ @ SuppressWarnings ( "deprecation" ) public final void doBuildWithParameters ( StaplerRequest req , StaplerResponse rsp , @ QueryParameter TimeDuration delay ) throws IOException , ServletException { } }
hudson . model . BuildAuthorizationToken . checkPermission ( asJob ( ) , asJob ( ) . getAuthToken ( ) , req , rsp ) ; ParametersDefinitionProperty pp = asJob ( ) . getProperty ( ParametersDefinitionProperty . class ) ; if ( ! asJob ( ) . isBuildable ( ) ) { throw HttpResponses . error ( SC_CONFLICT , new IOException ( asJob ( ) . getFullName ( ) + " is not buildable!" ) ) ; } if ( pp != null ) { pp . buildWithParameters ( req , rsp , delay ) ; } else { throw new IllegalStateException ( "This build is not parameterized!" ) ; }
public class SerializingCopier { /** * Returns a copy of the passed in instance by serializing and deserializing it . */ @ Override public T copy ( final T obj ) { } }
try { return serializer . read ( serializer . serialize ( obj ) ) ; } catch ( ClassNotFoundException e ) { throw new SerializerException ( "Copying failed." , e ) ; }
public class EIIImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case AfplibPackage . EII__IMO_NAME : return getImoName ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class EventUtilities { /** * Marshall AttDataReady * @ param dataReady the DataReady object to marshall * @ return result of the marshall action * @ throws DevFailed */ static byte [ ] marshall ( final AttDataReady dataReady ) throws DevFailed { } }
XLOGGER . entry ( ) ; final CDROutputStream os = new CDROutputStream ( ) ; try { AttDataReadyHelper . write ( os , dataReady ) ; XLOGGER . exit ( ) ; return cppAlignment ( os . getBufferCopy ( ) ) ; } finally { os . close ( ) ; }
public class ApiOvhIpLoadbalancing { /** * Add a new TCP Farm on your IP Load Balancing * REST : POST / ipLoadbalancing / { serviceName } / tcp / farm * @ param balance [ required ] Load balancing algorithm . ' roundrobin ' if null * @ param zone [ required ] Zone of your farm * @ param stickiness [ required ] Stickiness type . No stickiness if null * @ param port [ required ] Port attached to your farm ( [ 1 . . 49151 ] ) . Inherited from frontend if null * @ param displayName [ required ] Human readable name for your backend , this field is for you * @ param vrackNetworkId [ required ] Internal Load Balancer identifier of the vRack private network to attach to your farm , mandatory when your Load Balancer is attached to a vRack * @ param probe [ required ] Probe used to determine if a backend is alive and can handle requests * @ param serviceName [ required ] The internal name of your IP load balancing */ public OvhBackendTcp serviceName_tcp_farm_POST ( String serviceName , OvhBalanceTCPEnum balance , String displayName , Long port , OvhBackendProbe probe , OvhStickinessTCPEnum stickiness , Long vrackNetworkId , String zone ) throws IOException { } }
String qPath = "/ipLoadbalancing/{serviceName}/tcp/farm" ; StringBuilder sb = path ( qPath , serviceName ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "balance" , balance ) ; addBody ( o , "displayName" , displayName ) ; addBody ( o , "port" , port ) ; addBody ( o , "probe" , probe ) ; addBody ( o , "stickiness" , stickiness ) ; addBody ( o , "vrackNetworkId" , vrackNetworkId ) ; addBody ( o , "zone" , zone ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhBackendTcp . class ) ;
public class InspectionReport { /** * Adds description to log . */ public void logDescription ( String description ) throws IOException { } }
// The ThreadLocal has been initialized so we know that we are in multithreaded mode . if ( reportContext . get ( ) != null ) { reportContext . get ( ) . addComment ( description ) ; } else { writeMessage ( description ) ; }
public class BackupUploadServlet { /** * Gets uploaded file as stream . * @ param items * @ return the stream * @ throws IOException */ private InputStream getStream ( List < FileItem > items ) throws IOException { } }
for ( FileItem i : items ) { if ( ! i . isFormField ( ) && i . getFieldName ( ) . equals ( CONTENT_PARAMETER ) ) { return i . getInputStream ( ) ; } } return null ;
public class TokenCompleteTextView { /** * Remove an object from the token list . Will remove duplicates if present or do nothing if no * object is present in the view . Uses { @ link Object # equals ( Object ) } to find objects . May only * be called from the main thread * @ param object object to remove , may be null or not in the view */ @ UiThread public void removeObjectSync ( T object ) { } }
// To make sure all the appropriate callbacks happen , we just want to piggyback on the // existing code that handles deleting spans when the text changes ArrayList < Editable > texts = new ArrayList < > ( ) ; // If there is hidden content , it ' s important that we update it first if ( hiddenContent != null ) { texts . add ( hiddenContent ) ; } if ( getText ( ) != null ) { texts . add ( getText ( ) ) ; } // If the object is currently visible , remove it for ( Editable text : texts ) { TokenImageSpan [ ] spans = text . getSpans ( 0 , text . length ( ) , TokenImageSpan . class ) ; for ( TokenImageSpan span : spans ) { if ( span . getToken ( ) . equals ( object ) ) { removeSpan ( text , span ) ; } } } updateCountSpan ( ) ;
public class Base64EncodedSignerImpl { /** * Signs a message . * @ param message the message to sign * @ return a base64 encoded version of the signature */ public String sign ( String message ) { } }
try { final byte [ ] signature = signer . sign ( message . getBytes ( charsetName ) ) ; return new String ( Base64 . encodeBase64 ( signature , false ) ) ; } catch ( UnsupportedEncodingException e ) { throw new SignatureException ( "unsupported encoding: charsetName=" + charsetName , e ) ; }
public class DeleteThingGroupRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteThingGroupRequest deleteThingGroupRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteThingGroupRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteThingGroupRequest . getThingGroupName ( ) , THINGGROUPNAME_BINDING ) ; protocolMarshaller . marshall ( deleteThingGroupRequest . getExpectedVersion ( ) , EXPECTEDVERSION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ViewRequestBuilder { /** * Create a new builder for a paginated request on the view . * Defaults to 20 rows per page if not specified by * { @ link PaginatedRequestBuilder # rowsPerPage ( int ) } * @ param keyType { @ link com . cloudant . client . api . views . Key . Type } of the key emitted by the * view * @ param valueType class of the type of value emitted by the view * @ param < K > type of key emitted by the view * @ param < V > type of value emitted by the view * @ return a new { @ link PaginatedRequestBuilder } for the database view specified by this * ViewRequestBuilder */ public < K , V > PaginatedRequestBuilder < K , V > newPaginatedRequest ( Key . Type < K > keyType , Class < V > valueType ) { } }
return new PaginatedRequestBuilderImpl < K , V > ( newViewRequestParameters ( keyType . getType ( ) , valueType ) ) ;
public class ToStream { /** * A private helper method to output the * @ throws SAXException * @ throws IOException */ private void DTDprolog ( ) throws SAXException , IOException { } }
final java . io . Writer writer = m_writer ; if ( m_needToOutputDocTypeDecl ) { outputDocTypeDecl ( m_elemContext . m_elementName , false ) ; m_needToOutputDocTypeDecl = false ; } if ( m_inDoctype ) { writer . write ( " [" ) ; writer . write ( m_lineSep , 0 , m_lineSepLen ) ; m_inDoctype = false ; }
public class ConfigResourcesLoader { /** * Make sure the URL is correct and exists . used to support different * locations for a resources ( E . g . under WEB - INF or not etc . ) * @ param externalResource the resource to check it ' s validity . * @ return true if exists , false if not . */ private boolean validateResourceIsValid ( final Resource externalResource ) { } }
boolean isValid = true ; try { URL url = externalResource . getURL ( ) ; if ( url . getPath ( ) != null && url . getPath ( ) . contains ( ".jar" ) && ! url . getPath ( ) . contains ( "dummyConfig" ) ) { throw new IllegalArgumentException ( "The customer config file: \"config.properties\" was found in a jar file. This is not legal. Offending jar is: " + url . getPath ( ) ) ; } } catch ( IOException e ) { isValid = false ; } return isValid ;
public class Equation { /** * Infer the type of and create a new output variable using the results from the right side of the equation . * If the type is already known just return that . */ private Variable createVariableInferred ( TokenList . Token t0 , Variable variableRight ) { } }
Variable result ; if ( t0 . getType ( ) == Type . WORD ) { switch ( variableRight . getType ( ) ) { case MATRIX : alias ( new DMatrixRMaj ( 1 , 1 ) , t0 . getWord ( ) ) ; break ; case SCALAR : if ( variableRight instanceof VariableInteger ) { alias ( 0 , t0 . getWord ( ) ) ; } else { alias ( 1.0 , t0 . getWord ( ) ) ; } break ; case INTEGER_SEQUENCE : alias ( ( IntegerSequence ) null , t0 . getWord ( ) ) ; break ; default : throw new RuntimeException ( "Type not supported for assignment: " + variableRight . getType ( ) ) ; } result = variables . get ( t0 . getWord ( ) ) ; } else { result = t0 . getVariable ( ) ; } return result ;
public class CmsImportVersion2 { /** * Gets the encoding from the & lt ; ? XML . . . & gt ; tag if present . < p > * @ param content the file content * @ return String the found encoding */ protected String getEncoding ( String content ) { } }
String encoding = content ; int index = encoding . toLowerCase ( ) . indexOf ( "encoding=\"" ) ; // encoding attribute found , get the value if ( index != - 1 ) { encoding = encoding . substring ( index + 10 ) ; index = encoding . indexOf ( "\"" ) ; if ( index != - 1 ) { encoding = encoding . substring ( 0 , index ) ; return encoding . toUpperCase ( ) ; } } // no encoding attribute found return "" ;
public class LongMomentStatistics { /** * Compares the state of two { @ code LongMomentStatistics } objects . This is * a replacement for the { @ link # equals ( Object ) } which is not advisable to * implement for this mutable object . If two object have the same state , it * has still the same state when updated with the same value . * < pre > { @ code * final LongMomentStatistics lms1 = . . . ; * final LongMomentStatistics lms2 = . . . ; * if ( lms1 . sameState ( lms2 ) ) { * final long value = random . nextInt ( 1_000_000 ) ; * lms1 . accept ( value ) ; * lms2 . accept ( value ) ; * assert lms1 . sameState ( lms2 ) ; * assert lms2 . sameState ( lms1 ) ; * assert lms1 . sameState ( lms1 ) ; * } < / pre > * @ since 3.7 * @ param other the other object for the test * @ return { @ code true } the { @ code this } and the { @ code other } objects have * the same state , { @ code false } otherwise */ public boolean sameState ( final LongMomentStatistics other ) { } }
return this == other || _min == other . _min && _max == other . _max && _sum == other . _sum && super . sameState ( other ) ;
public class OnlineLDAsvi { /** * Returns the topic vector for a given topic . The vector should not be * altered , and is scaled so that the sum of all term weights sums to one . * @ param k the topic to get the vector for * @ return the raw topic vector for the requested topic . */ public Vec getTopicVec ( int k ) { } }
return new ScaledVector ( 1.0 / lambda . get ( k ) . sum ( ) , lambda . get ( k ) ) ;
public class InstanceAccessDetails { /** * Describes the public SSH host keys or the RDP certificate . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setHostKeys ( java . util . Collection ) } or { @ link # withHostKeys ( java . util . Collection ) } if you want to override * the existing values . * @ param hostKeys * Describes the public SSH host keys or the RDP certificate . * @ return Returns a reference to this object so that method calls can be chained together . */ public InstanceAccessDetails withHostKeys ( HostKeyAttributes ... hostKeys ) { } }
if ( this . hostKeys == null ) { setHostKeys ( new java . util . ArrayList < HostKeyAttributes > ( hostKeys . length ) ) ; } for ( HostKeyAttributes ele : hostKeys ) { this . hostKeys . add ( ele ) ; } return this ;
public class ConnectionProperties { /** * Returns the end point previously assigned . This is only valid * when in hostname / port mode of operation - otherwise an illegal * argument exception will be thrown . * @ return EndPoint The end point . */ public ProviderEndPoint getEndPoint ( ) { } }
if ( mode != PropertiesType . HOST_PORT ) { throw new SIErrorException ( nls . getFormattedMessage ( "INVALID_METHOD_FOR_OBJECT_TYPE_SICO0006" , null , "INVALID_METHOD_FOR_OBJECT_TYPE_SICO0006" ) // D270373 ) ; } return endPoint ;
public class JpaBitLogStore { /** * / * ( non - Javadoc ) * @ see org . duracloud . mill . bitlog . BitLogStore # addReport ( java . lang . String , java . lang . String , java . lang . String , * java . lang . String , java . lang . String , org . duracloud . mill . bitlog . BitIntegrityResult , java . util . Date ) */ @ Transactional ( MillJpaRepoConfig . TRANSACTION_MANAGER_BEAN ) @ Override public BitIntegrityReport addReport ( String account , String storeId , String spaceId , String reportSpaceId , String reportContentId , BitIntegrityReportResult result , Date completionDate ) { } }
BitIntegrityReport report = new BitIntegrityReport ( ) ; report . setModified ( new Date ( ) ) ; report . setAccount ( account ) ; report . setStoreId ( storeId ) ; report . setSpaceId ( spaceId ) ; report . setReportSpaceId ( reportSpaceId ) ; report . setReportContentId ( reportContentId ) ; report . setCompletionDate ( new Date ( ) ) ; report . setResult ( result ) ; report . setDisplay ( ! result . equals ( BitIntegrityReportResult . FAILURE ) ) ; return this . bitReportRepo . saveAndFlush ( report ) ;
public class FileInstrumentationData { /** * Store a node to be instrumented later for branch coverage . * @ param lineNumber 1 - based line number * @ param branchNumber 1 - based branch number * @ param block the node of the conditional block . */ void putBranchNode ( int lineNumber , int branchNumber , Node block ) { } }
Preconditions . checkArgument ( lineNumber > 0 , "Expected non-zero positive integer as line number: %s" , lineNumber ) ; Preconditions . checkArgument ( branchNumber > 0 , "Expected non-zero positive integer as branch number: %s" , branchNumber ) ; branchNodes . put ( BranchIndexPair . of ( lineNumber - 1 , branchNumber - 1 ) , block ) ;
public class PagePositionInformationImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . PAGE_POSITION_INFORMATION__PGPRG : return PGPRG_EDEFAULT == null ? pgprg != null : ! PGPRG_EDEFAULT . equals ( pgprg ) ; } return super . eIsSet ( featureID ) ;
public class JmsRunnableFactory { /** * Creates a new { @ link QueueConsumer } . For every text message received , the callback * is invoked with the contents of the text message as string . */ public QueueConsumer createQueueTextMessageListener ( final String topic , final ConsumerCallback < String > messageCallback ) { } }
Preconditions . checkState ( connectionFactory != null , "connection factory was never injected!" ) ; return new QueueConsumer ( connectionFactory , jmsConfig , topic , new TextMessageConsumerCallback ( messageCallback ) ) ;
public class TimerView { /** * Start the timer running from the specified percentage complete , * to expire at the specified time . * @ param startPercent a value in [ 0f , 1f ) indicating how much * hourglass time has already elapsed when the timer starts . * @ param duration The time interval over which the timer would * run if it started at 0 % . * @ param finisher a listener that will be notified when the timer * finishes , or null if nothing should be notified . */ public void start ( float startPercent , long duration , ResultListener < TimerView > finisher ) { } }
// Sanity check input arguments if ( startPercent < 0.0f || startPercent >= 1.0f ) { throw new IllegalArgumentException ( "Invalid starting percent " + startPercent ) ; } if ( duration < 0 ) { throw new IllegalArgumentException ( "Invalid duration " + duration ) ; } // Stop any current processing stop ( ) ; // Record the timer ' s full duration and effective start time _duration = duration ; // Change the completion percent and make sure the starting // time gets updated on the next tick changeComplete ( startPercent ) ; _start = Long . MIN_VALUE ; // Thank you sir ; would you kindly take a chair in the waiting room ? _finisher = finisher ; // The warning and completion handlers haven ' t been triggered yet _warned = false ; _completed = false ; // Start things running _running = true ;
public class ChainingAWSCredentialsProvider { /** * Gets instance . * @ param credentialAccessKey the credential access key * @ param credentialSecretKey the credential secret key * @ param credentialPropertiesFile the credential properties file * @ return the instance */ public static AWSCredentialsProvider getInstance ( final String credentialAccessKey , final String credentialSecretKey , final Resource credentialPropertiesFile ) { } }
return getInstance ( credentialAccessKey , credentialSecretKey , credentialPropertiesFile , null , null ) ;
public class PairSet { /** * { @ inheritDoc } */ @ Override public PairSet < T , I > intersection ( Collection < ? extends Pair < T , I > > c ) { } }
return c == null ? empty ( ) : createFromIndices ( matrix . intersection ( convert ( c ) . matrix ) ) ;
public class LastaPrepareFilter { protected void hookCurtainFinally ( FwAssistantDirector assistantDirector ) { } }
final FwCoreDirection coreDirection = assistantDirector . assistCoreDirection ( ) ; final CurtainFinallyHook hook = coreDirection . assistCurtainFinallyHook ( ) ; if ( hook != null ) { hook . hook ( assistantDirector ) ; }
public class Hessian2Output { /** * Writes the list header to the stream . List writers will call * < code > writeListBegin < / code > followed by the list contents and then * call < code > writeListEnd < / code > . * < code > < pre > * list : : = V type value * Z * : : = v type int value * * < / pre > < / code > * @ return true for variable lists , false for fixed lists */ public boolean writeListBegin ( int length , String type ) throws IOException { } }
flushIfFull ( ) ; if ( length < 0 ) { if ( type != null ) { _buffer [ _offset ++ ] = ( byte ) BC_LIST_VARIABLE ; writeType ( type ) ; } else _buffer [ _offset ++ ] = ( byte ) BC_LIST_VARIABLE_UNTYPED ; return true ; } else if ( length <= LIST_DIRECT_MAX ) { if ( type != null ) { _buffer [ _offset ++ ] = ( byte ) ( BC_LIST_DIRECT + length ) ; writeType ( type ) ; } else { _buffer [ _offset ++ ] = ( byte ) ( BC_LIST_DIRECT_UNTYPED + length ) ; } return false ; } else { if ( type != null ) { _buffer [ _offset ++ ] = ( byte ) BC_LIST_FIXED ; writeType ( type ) ; } else { _buffer [ _offset ++ ] = ( byte ) BC_LIST_FIXED_UNTYPED ; } writeInt ( length ) ; return false ; }
public class PackageManagerUtils { /** * Checks if the device has a accelerometer sensor . * @ param context the context . * @ return { @ code true } if the device has a accelerometer sensor . */ @ TargetApi ( Build . VERSION_CODES . FROYO ) public static boolean hasAccelerometerSensorFeature ( Context context ) { } }
return hasAccelerometerSensorFeature ( context . getPackageManager ( ) ) ;
public class ASTPrinter { /** * Prints a primitive type as " boolean " | " char " | " short " | " int " | " long " | " float " | " double " * @ param type PrimitiveType to be printed * @ return pretty string */ public static String print ( PrimitiveType type ) { } }
return type . match ( new PrimitiveType . MatchBlock < String > ( ) { @ Override public String _case ( BooleanType x ) { return "boolean" ; } @ Override public String _case ( ByteType x ) { return "byte" ; } @ Override public String _case ( CharType x ) { return "char" ; } @ Override public String _case ( DoubleType x ) { return "double" ; } @ Override public String _case ( FloatType x ) { return "float" ; } @ Override public String _case ( IntType x ) { return "int" ; } @ Override public String _case ( LongType x ) { return "long" ; } @ Override public String _case ( ShortType x ) { return "short" ; } } ) ;
public class DataModelConverter { /** * Converts an iCalendar { @ link VTimezone } component into the appropriate * vCalendar properties . * @ param timezone the TIMEZONE component * @ param dates the date values in the vCalendar object that are effected by * the timezone . * @ return the vCalendar properties */ public static VCalTimezoneProperties convert ( VTimezone timezone , List < Date > dates ) { } }
List < Daylight > daylights = new ArrayList < Daylight > ( ) ; Timezone tz = null ; if ( dates . isEmpty ( ) ) { return new VCalTimezoneProperties ( daylights , tz ) ; } ICalTimeZone icalTz = new ICalTimeZone ( timezone ) ; Collections . sort ( dates ) ; Set < DateTimeValue > daylightStartDates = new HashSet < DateTimeValue > ( ) ; boolean zeroObservanceUsed = false ; for ( Date date : dates ) { Boundary boundary = icalTz . getObservanceBoundary ( date ) ; Observance observance = boundary . getObservanceIn ( ) ; Observance observanceAfter = boundary . getObservanceAfter ( ) ; if ( observance == null && observanceAfter == null ) { continue ; } if ( observance == null ) { // the date comes before the earliest observance if ( observanceAfter instanceof StandardTime && ! zeroObservanceUsed ) { UtcOffset offset = getOffset ( observanceAfter . getTimezoneOffsetFrom ( ) ) ; DateTimeValue start = null ; DateTimeValue end = boundary . getObservanceAfterStart ( ) ; String standardName = icalTz . getDisplayName ( false , TimeZone . SHORT ) ; String daylightName = icalTz . getDisplayName ( true , TimeZone . SHORT ) ; Daylight daylight = new Daylight ( true , offset , convert ( start ) , convert ( end ) , standardName , daylightName ) ; daylights . add ( daylight ) ; zeroObservanceUsed = true ; } if ( observanceAfter instanceof DaylightSavingsTime ) { UtcOffset offset = getOffset ( observanceAfter . getTimezoneOffsetFrom ( ) ) ; if ( offset != null ) { tz = new Timezone ( offset ) ; } } continue ; } if ( observance instanceof StandardTime ) { UtcOffset offset = getOffset ( observance . getTimezoneOffsetTo ( ) ) ; if ( offset != null ) { tz = new Timezone ( offset ) ; } continue ; } if ( observance instanceof DaylightSavingsTime && ! daylightStartDates . contains ( boundary . getObservanceInStart ( ) ) ) { UtcOffset offset = getOffset ( observance . getTimezoneOffsetTo ( ) ) ; DateTimeValue start = boundary . getObservanceInStart ( ) ; DateTimeValue end = null ; if ( observanceAfter != null ) { end = boundary . getObservanceAfterStart ( ) ; } String standardName = icalTz . getDisplayName ( false , TimeZone . SHORT ) ; String daylightName = icalTz . getDisplayName ( true , TimeZone . SHORT ) ; Daylight daylight = new Daylight ( true , offset , convert ( start ) , convert ( end ) , standardName , daylightName ) ; daylights . add ( daylight ) ; daylightStartDates . add ( start ) ; continue ; } } if ( tz == null ) { int rawOffset = icalTz . getRawOffset ( ) ; UtcOffset offset = new UtcOffset ( rawOffset ) ; tz = new Timezone ( offset ) ; } if ( daylights . isEmpty ( ) ) { Daylight daylight = new Daylight ( ) ; daylight . setDaylight ( false ) ; daylights . add ( daylight ) ; } return new VCalTimezoneProperties ( daylights , tz ) ;
public class HttpMockServer { /** * Starts mock server and keeps reference to it . * @ param configReader wrapper for platform specific bits * @ param simulatedNetworkType delay time before response is sent . */ public static HttpMockServer startMockApiServer ( @ Nonnull ConfigReader configReader , @ Nonnull NetworkType simulatedNetworkType ) { } }
try { String configJson = new String ( readInitialData ( configReader . getMainConfigFile ( ) ) ) ; JSONObject jsonObj = configJson . isEmpty ( ) ? new JSONObject ( ) : new JSONObject ( configJson ) ; sMockServer = new HttpMockServer ( jsonObj , configReader , simulatedNetworkType ) ; return sMockServer ; } catch ( IOException e ) { LOGGER . log ( Level . SEVERE , "MockServer error:" , e ) ; } catch ( JSONException e ) { LOGGER . log ( Level . SEVERE , "MockServer error:" , e ) ; } return null ;
public class SipSubsystemParser { /** * { @ inheritDoc } */ @ Override public void readElement ( XMLExtendedStreamReader reader , List < ModelNode > list ) throws XMLStreamException { } }
PathAddress address = PathAddress . pathAddress ( PathElement . pathElement ( SUBSYSTEM , SipExtension . SUBSYSTEM_NAME ) ) ; final ModelNode subsystem = new ModelNode ( ) ; subsystem . get ( OP ) . set ( ADD ) ; subsystem . get ( OP_ADDR ) . set ( address . toModelNode ( ) ) ; final int count = reader . getAttributeCount ( ) ; for ( int i = 0 ; i < count ; i ++ ) { requireNoNamespaceAttribute ( reader , i ) ; final String value = reader . getAttributeValue ( i ) ; final Attribute attribute = Attribute . forName ( reader . getAttributeLocalName ( i ) ) ; switch ( attribute ) { case INSTANCE_ID : case APPLICATION_ROUTER : case SIP_STACK_PROPS : case SIP_APP_DISPATCHER_CLASS : case SIP_PATH_NAME : case ADDITIONAL_PARAMETERABLE_HEADERS : case BASE_TIMER_INTERVAL : case T2_INTERVAL : case T4_INTERVAL : case TIMER_D_INTERVAL : case GATHER_STATISTICS : case CALL_ID_MAX_LENGTH : case TAG_HASH_MAX_LENGTH : case DIALOG_PENDING_REQUEST_CHECKING : case DNS_SERVER_LOCATOR_CLASS : case DNS_TIMEOUT : case DNS_RESOLVER_CLASS : case CANCELED_TIMER_TASKS_PURGE_PERIOD : case PROXY_TIMER_SERVICE_IMPEMENTATION_TYPE : case SAS_TIMER_SERVICE_IMPEMENTATION_TYPE : case CONGESTION_CONTROL_INTERVAL : case CONGESTION_CONTROL_POLICY : case MEMORY_THRESHOLD : case BACK_TO_NORMAL_MEMORY_THRESHOLD : case OUTBOUND_PROXY : case CONCURRENCY_CONTROL_MODE : case USE_PRETTY_ENCODING : subsystem . get ( attribute . getLocalName ( ) ) . set ( value ) ; break ; default : throw unexpectedAttribute ( reader , i ) ; } } list . add ( subsystem ) ; // elements while ( reader . hasNext ( ) && reader . nextTag ( ) != END_ELEMENT ) { switch ( Namespace . forUri ( reader . getNamespaceURI ( ) ) ) { case SIP_1_0 : { final Element element = Element . forName ( reader . getLocalName ( ) ) ; switch ( element ) { case CONNECTOR : { parseConnector ( reader , address , list ) ; break ; } default : { throw unexpectedElement ( reader ) ; } } break ; } default : { throw unexpectedElement ( reader ) ; } } }
public class SparkDl4jMultiLayer { /** * Perform distributed evaluation of any type of { @ link IEvaluation } - or multiple IEvaluation instances . * Distributed equivalent of { @ link MultiLayerNetwork # doEvaluation ( DataSetIterator , IEvaluation [ ] ) } * @ param data Data to evaluate on * @ param emptyEvaluations Empty evaluation instances . Starting point ( serialized / duplicated , then merged ) * @ param evalBatchSize Evaluation batch size * @ param < T > Type of evaluation instance to return * @ return IEvaluation instances */ @ SuppressWarnings ( "unchecked" ) public < T extends IEvaluation > T [ ] doEvaluation ( JavaRDD < DataSet > data , int evalBatchSize , T ... emptyEvaluations ) { } }
return doEvaluation ( data , getDefaultEvaluationWorkers ( ) , evalBatchSize , emptyEvaluations ) ;
public class CountMap { /** * Add the specified amount to the count for the specified key , return the new count . * Adding 0 will ensure that a Map . Entry is created for the specified key . */ public int add ( K key , int amount ) { } }
CountEntry < K > entry = _backing . get ( key ) ; if ( entry == null ) { _backing . put ( key , new CountEntry < K > ( key , amount ) ) ; return amount ; } return ( entry . count += amount ) ;
public class CmsWebdavRange { /** * Validate range . < p > * @ return true if the actual range is valid otherwise false */ public boolean validate ( ) { } }
if ( m_end >= m_length ) { m_end = m_length - 1 ; } return ( ( m_start >= 0 ) && ( m_end >= 0 ) && ( m_start <= m_end ) && ( m_length > 0 ) ) ;
public class DescribeIdentityPoolRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeIdentityPoolRequest describeIdentityPoolRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeIdentityPoolRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeIdentityPoolRequest . getIdentityPoolId ( ) , IDENTITYPOOLID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class OutputUpdateMarshaller { /** * Marshall the given parameter object . */ public void marshall ( OutputUpdate outputUpdate , ProtocolMarshaller protocolMarshaller ) { } }
if ( outputUpdate == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( outputUpdate . getOutputId ( ) , OUTPUTID_BINDING ) ; protocolMarshaller . marshall ( outputUpdate . getNameUpdate ( ) , NAMEUPDATE_BINDING ) ; protocolMarshaller . marshall ( outputUpdate . getKinesisStreamsOutputUpdate ( ) , KINESISSTREAMSOUTPUTUPDATE_BINDING ) ; protocolMarshaller . marshall ( outputUpdate . getKinesisFirehoseOutputUpdate ( ) , KINESISFIREHOSEOUTPUTUPDATE_BINDING ) ; protocolMarshaller . marshall ( outputUpdate . getLambdaOutputUpdate ( ) , LAMBDAOUTPUTUPDATE_BINDING ) ; protocolMarshaller . marshall ( outputUpdate . getDestinationSchemaUpdate ( ) , DESTINATIONSCHEMAUPDATE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class MetadataFinder { /** * Discards any tracks from the hot cache that were loaded from a now - unmounted media slot , because they are no * longer valid . */ private void flushHotCacheSlot ( SlotReference slot ) { } }
// Iterate over a copy to avoid concurrent modification issues for ( Map . Entry < DeckReference , TrackMetadata > entry : new HashMap < DeckReference , TrackMetadata > ( hotCache ) . entrySet ( ) ) { if ( slot == SlotReference . getSlotReference ( entry . getValue ( ) . trackReference ) ) { logger . debug ( "Evicting cached metadata in response to unmount report {}" , entry . getValue ( ) ) ; hotCache . remove ( entry . getKey ( ) ) ; } }
public class DeweyNumber { /** * Creates a dewey number from a string representation . The input string must be a dot separated * string of integers . * @ param deweyNumberString Dot separated string of integers * @ return Dewey number generated from the given input string */ public static DeweyNumber fromString ( final String deweyNumberString ) { } }
String [ ] splits = deweyNumberString . split ( "\\." ) ; if ( splits . length == 0 ) { return new DeweyNumber ( Integer . parseInt ( deweyNumberString ) ) ; } else { int [ ] deweyNumber = new int [ splits . length ] ; for ( int i = 0 ; i < splits . length ; i ++ ) { deweyNumber [ i ] = Integer . parseInt ( splits [ i ] ) ; } return new DeweyNumber ( deweyNumber ) ; }
public class HamcrestValidationMatcher { /** * Checks for numeric matcher presence in expression . * @ param matcherExpression * @ return */ private boolean containsNumericMatcher ( String matcherExpression ) { } }
for ( String numericMatcher : numericMatchers ) { if ( matcherExpression . contains ( numericMatcher ) ) { return true ; } } return false ;
public class LocatedBlocks { /** * If file is under construction , set block size of the last block . It updates * file length in the same time . */ public synchronized void setLastBlockSize ( long blockId , long blockSize ) { } }
assert blocks . size ( ) > 0 ; LocatedBlock last = blocks . get ( blocks . size ( ) - 1 ) ; if ( underConstruction && blockSize > last . getBlockSize ( ) ) { assert blockId == last . getBlock ( ) . getBlockId ( ) ; this . setFileLength ( this . getFileLength ( ) + blockSize - last . getBlockSize ( ) ) ; last . setBlockSize ( blockSize ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "DFSClient setting last block " + last + " to length " + blockSize + " filesize is now " + getFileLength ( ) ) ; } }
public class KMeans { public static void main ( String [ ] args ) throws Exception { } }
if ( ! parseParameters ( args ) ) { return ; } // set up execution environment ExecutionEnvironment env = ExecutionEnvironment . getExecutionEnvironment ( ) ; // get input data DataSet < Point > points = getPointDataSet ( env ) ; DataSet < Centroid > centroids = getCentroidDataSet ( env ) ; // set number of bulk iterations for KMeans algorithm IterativeDataSet < Centroid > loop = centroids . iterate ( numIterations ) ; DataSet < Centroid > newCentroids = points // compute closest centroid for each point . map ( new SelectNearestCenter ( ) ) . withBroadcastSet ( loop , "centroids" ) // count and sum point coordinates for each centroid . map ( new CountAppender ( ) ) . groupBy ( 0 ) . reduce ( new CentroidAccumulator ( ) ) // compute new centroids from point counts and coordinate sums . map ( new CentroidAverager ( ) ) ; // feed new centroids back into next iteration DataSet < Centroid > finalCentroids = loop . closeWith ( newCentroids ) ; DataSet < Tuple2 < Integer , Point > > clusteredPoints = points // assign points to final clusters . map ( new SelectNearestCenter ( ) ) . withBroadcastSet ( finalCentroids , "centroids" ) ; // emit result if ( fileOutput ) { clusteredPoints . writeAsCsv ( outputPath , "\n" , " " ) ; } else { clusteredPoints . print ( ) ; } // execute program env . execute ( "KMeans Example" ) ;
public class PrivateVoltTableFactory { /** * End users should not call this method . * Obtain a reference to the table ' s underlying buffer . * The returned reference ' s position and mark are independent of * the table ' s buffer position and mark . The returned buffer has * no mark and is at position 0. */ public static ByteBuffer getTableDataReference ( VoltTable vt ) { } }
ByteBuffer buf = vt . m_buffer . duplicate ( ) ; buf . rewind ( ) ; return buf ;
public class AlphaIndex { /** * Parses a one - based alpha - index . * An empty string will be parsed as zero . * @ see # fromAlpha ( java . lang . String ) */ public static long fromAlpha1 ( String s ) { } }
s = s . trim ( ) ; if ( s . isEmpty ( ) ) return 0 ; return fromAlpha ( s ) + 1 ;
public class JmsSessionImpl { /** * This method exists for the sole purpose of querying whether an application * onMessage called recover under an auto _ ack session . It returns the current * value of the commit count , and then zeros the value . */ int getAndResetCommitCount ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "getAndResetCommitCount" ) ; int currentUncommittedReceiveCount = uncommittedReceiveCount ; uncommittedReceiveCount = 0 ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "getAndResetCommitCount" , currentUncommittedReceiveCount ) ; return currentUncommittedReceiveCount ;
public class JacksonTypeOracle { /** * < p > replaceType < / p > * @ param logger a { @ link com . google . gwt . core . ext . TreeLogger } object . * @ param type a { @ link com . google . gwt . core . ext . typeinfo . JType } object . * @ param deserializeAs a { @ link java . lang . annotation . Annotation } object . * @ return a { @ link com . google . gwt . core . ext . typeinfo . JType } object . * @ throws com . google . gwt . core . ext . UnableToCompleteException if any . */ public JType replaceType ( TreeLogger logger , JType type , Annotation deserializeAs ) throws UnableToCompleteException { } }
JClassType classType = type . isClassOrInterface ( ) ; if ( null == classType ) { return type ; } Optional < JClassType > typeAs = getClassFromJsonDeserializeAnnotation ( logger , deserializeAs , "as" ) ; Optional < JClassType > keyAs = getClassFromJsonDeserializeAnnotation ( logger , deserializeAs , "keyAs" ) ; Optional < JClassType > contentAs = getClassFromJsonDeserializeAnnotation ( logger , deserializeAs , "contentAs" ) ; if ( ! typeAs . isPresent ( ) && ! keyAs . isPresent ( ) && ! contentAs . isPresent ( ) ) { return type ; } JArrayType arrayType = type . isArray ( ) ; if ( null != arrayType ) { if ( contentAs . isPresent ( ) ) { return typeOracle . getArrayType ( contentAs . get ( ) ) ; } else if ( typeAs . isPresent ( ) ) { return typeOracle . getArrayType ( typeAs . get ( ) ) ; } else { return classType ; } } JParameterizedType parameterizedType = type . isParameterized ( ) ; if ( null != parameterizedType ) { JGenericType genericType ; if ( typeAs . isPresent ( ) ) { genericType = typeAs . get ( ) . isGenericType ( ) ; } else { genericType = parameterizedType . getBaseType ( ) ; } if ( ! keyAs . isPresent ( ) && ! contentAs . isPresent ( ) ) { return typeOracle . getParameterizedType ( genericType , parameterizedType . getTypeArgs ( ) ) ; } else if ( contentAs . isPresent ( ) && isIterable ( parameterizedType ) ) { return typeOracle . getParameterizedType ( genericType , new JClassType [ ] { contentAs . get ( ) } ) ; } else if ( isMap ( parameterizedType ) ) { JClassType key ; if ( keyAs . isPresent ( ) ) { key = keyAs . get ( ) ; } else { key = parameterizedType . getTypeArgs ( ) [ 0 ] ; } JClassType content ; if ( contentAs . isPresent ( ) ) { content = contentAs . get ( ) ; } else { content = parameterizedType . getTypeArgs ( ) [ 1 ] ; } return typeOracle . getParameterizedType ( genericType , new JClassType [ ] { key , content } ) ; } } if ( typeAs . isPresent ( ) ) { return typeAs . get ( ) ; } return type ;
public class WhitelistingApi { /** * Get whitelist certificate of device type . * Get whitelist certificate of device type . * @ param dtid Device Type ID . ( required ) * @ return CertificateEnvelope * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public CertificateEnvelope getWhitelistCertificate ( String dtid ) throws ApiException { } }
ApiResponse < CertificateEnvelope > resp = getWhitelistCertificateWithHttpInfo ( dtid ) ; return resp . getData ( ) ;
public class ClassSourceFileRecorder { /** * { @ inheritDoc } * @ see org . modeshape . sequencer . javafile . SourceFileRecorder # record ( org . modeshape . jcr . api . sequencer . Sequencer . Context , java . io . InputStream , long , java . lang . String , javax . jcr . Node ) */ @ Override public void record ( final Context context , final InputStream inputStream , final long length , final String encoding , final Node outputNode ) throws Exception { } }
JavaMetadata javaMetadata = JavaMetadata . instance ( inputStream , length , encoding ) ; record ( context , outputNode , javaMetadata ) ;
public class AbstractBaseController { /** * Return an { @ link EventHandler } . * @ param eventType the event type of the handler we want to return * @ return the right event handler * @ param < E > the Event type to manage * @ throws CoreException an exception if the current class doesn ' t implement the right EventAdapter interface . */ @ SuppressWarnings ( "unchecked" ) protected final < E extends Event > EventHandler < E > getHandler ( final EventType < E > eventType ) throws CoreException { } }
EventType < ? > temp = eventType ; EventHandler < E > handler = null ; while ( temp != null && handler == null ) { handler = ( EventHandler < E > ) this . eventHandlerMap . get ( temp ) ; temp = temp . getSuperType ( ) ; } // / / Check supertype ( ANY ) // if ( handler = = null ) { // handler = ( EventHandler < E > ) this . eventHandlerMap . get ( ) ; // Check if the handler has been created or not if ( handler == null ) { for ( final EventAdapter . Linker linker : EventAdapter . Linker . values ( ) ) { if ( isEventType ( eventType , linker . eventType ( ) ) ) { handler = buildEventHandler ( linker . adapterClass ( ) , ( Class < ? extends EventHandler < E > > ) linker . handlerClass ( ) ) ; } } if ( handler != null ) { // store the handler this . eventHandlerMap . put ( eventType , handler ) ; } } return handler ;
public class GroovyServlet { /** * Handle web requests to the GroovyServlet */ public void service ( HttpServletRequest request , HttpServletResponse response ) throws IOException { } }
// Get the script path from the request - include aware ( GROOVY - 815) final String scriptUri = getScriptUri ( request ) ; // Set it to HTML by default response . setContentType ( "text/html; charset=" + encoding ) ; // Set up the script context final ServletBinding binding = new ServletBinding ( request , response , servletContext ) ; setVariables ( binding ) ; // Run the script try { Closure closure = new Closure ( gse ) { public Object call ( ) { try { return ( ( GroovyScriptEngine ) getDelegate ( ) ) . run ( scriptUri , binding ) ; } catch ( ResourceException | ScriptException e ) { throw new RuntimeException ( e ) ; } } } ; GroovyCategorySupport . use ( ServletCategory . class , closure ) ; } catch ( RuntimeException runtimeException ) { StringBuilder error = new StringBuilder ( "GroovyServlet Error: " ) ; error . append ( " script: '" ) ; error . append ( scriptUri ) ; error . append ( "': " ) ; Throwable e = runtimeException . getCause ( ) ; /* * Null cause ? ! */ if ( e == null ) { error . append ( " Script processing failed.\n" ) ; error . append ( runtimeException . getMessage ( ) ) ; if ( runtimeException . getStackTrace ( ) . length > 0 ) error . append ( runtimeException . getStackTrace ( ) [ 0 ] . toString ( ) ) ; servletContext . log ( error . toString ( ) ) ; System . err . println ( error . toString ( ) ) ; runtimeException . printStackTrace ( System . err ) ; response . sendError ( HttpServletResponse . SC_INTERNAL_SERVER_ERROR , error . toString ( ) ) ; return ; } /* * Resource not found . */ if ( e instanceof ResourceException ) { error . append ( " Script not found, sending 404." ) ; servletContext . log ( error . toString ( ) ) ; System . err . println ( error . toString ( ) ) ; response . sendError ( HttpServletResponse . SC_NOT_FOUND ) ; return ; } /* * Other internal error . Perhaps syntax ? ! */ servletContext . log ( "An error occurred processing the request" , runtimeException ) ; error . append ( e . getMessage ( ) ) ; if ( e . getStackTrace ( ) . length > 0 ) error . append ( e . getStackTrace ( ) [ 0 ] . toString ( ) ) ; servletContext . log ( e . toString ( ) ) ; System . err . println ( e . toString ( ) ) ; runtimeException . printStackTrace ( System . err ) ; response . sendError ( HttpServletResponse . SC_INTERNAL_SERVER_ERROR , e . toString ( ) ) ; }
public class BatchWorker { /** * This method start send object metadata to server . * @ param context Object worker context . * @ param meta Object metadata . * @ return Return future with result . For same objects can return same future . */ @ NotNull protected CompletableFuture < R > enqueue ( @ NotNull final Meta meta , @ NotNull final T context ) { } }
State < T , R > state = objectQueue . get ( meta . getOid ( ) ) ; if ( state != null ) { if ( state . future . isCancelled ( ) ) { objectQueue . remove ( meta . getOid ( ) , state ) ; state = null ; } } if ( state == null ) { final State < T , R > newState = new State < > ( meta , context ) ; state = objectQueue . putIfAbsent ( meta . getOid ( ) , newState ) ; if ( state == null ) { state = newState ; stateEnqueue ( true ) ; } } return state . future ;
public class Padding { /** * Allows to set Cell ' s spacing with the Padding object , which has be done externally , as it ' s not part of * the standard libGDX API . Padding holds 4 floats for each direction , so it ' s compatible with both * padding and spacing settings without any additional changes . * @ param spacing contains data of spacing sizes . * @ param cell will have the padding set according to the given data . * @ return the given cell for chaining . */ public static Cell < ? > setSpacing ( final Padding spacing , final Cell < ? > cell ) { } }
return cell . space ( spacing . getTop ( ) , spacing . getLeft ( ) , spacing . getBottom ( ) , spacing . getRight ( ) ) ;
public class CacheUnitImpl { /** * This implements the method in the CacheUnit interface . * It applies the updates to the local internal caches and * the external caches . * It validates timestamps to prevent race conditions . * @ param invalidateIdEvents A HashMap of invalidate by id . * @ param invalidateTemplateEvents A HashMap of invalidate by template . * @ param pushEntryEvents A HashMap of cache entries . */ public void batchUpdate ( String cacheName , HashMap invalidateIdEvents , HashMap invalidateTemplateEvents , ArrayList pushEntryEvents ) { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "batchUpdate():" + cacheName ) ; invalidationAuditDaemon . registerInvalidations ( cacheName , invalidateIdEvents . values ( ) . iterator ( ) ) ; invalidationAuditDaemon . registerInvalidations ( cacheName , invalidateTemplateEvents . values ( ) . iterator ( ) ) ; pushEntryEvents = invalidationAuditDaemon . filterEntryList ( cacheName , pushEntryEvents ) ; DCache cache = ServerCache . getCache ( cacheName ) ; if ( cache != null ) { cache . batchUpdate ( invalidateIdEvents , invalidateTemplateEvents , pushEntryEvents ) ; if ( cache . getCacheConfig ( ) . isEnableServletSupport ( ) == true ) { if ( servletCacheUnit != null ) { servletCacheUnit . invalidateExternalCaches ( invalidateIdEvents , invalidateTemplateEvents ) ; } else { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "batchUpdate() cannot do invalidateExternalCaches because servletCacheUnit=NULL." ) ; } } } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "batchUpdate()" ) ;
public class AbstractConsoleEditor { /** * Moves cursor to the end of the current line . */ public void moveToEndOfLine ( ) { } }
String currentLine = getContent ( getLine ( ) ) ; LinkedList < String > toDisplayLines = toDisplayLines ( currentLine ) ; int remainingLines = getColumn ( ) / terminal . getWidth ( ) + 1 ; for ( int r = 0 ; r < remainingLines ; r ++ ) { toDisplayLines . removeFirst ( ) ; } frameColumn = currentLine . length ( ) ; delegate . moveToEndOfLine ( ) ; for ( int l = 0 ; l < toDisplayLines . size ( ) ; l ++ ) { frameLine ++ ; frameColumn -= terminal . getWidth ( ) ; if ( frameLine >= terminal . getHeight ( ) - getFooterSize ( ) ) { frameLine = terminal . getHeight ( ) - getHeaderSize ( ) - getFooterSize ( ) ; scrollUp ( 1 ) ; console . out ( ) . print ( ansi ( ) . cursor ( frameLine + getHeaderSize ( ) , 1 ) ) ; displayText ( toDisplayLines . get ( l ) ) ; console . out ( ) . print ( ansi ( ) . cursor ( frameLine + getHeaderSize ( ) , getColumn ( ) ) ) ; } } console . out ( ) . print ( ansi ( ) . cursor ( frameLine + getHeaderSize ( ) , frameColumn ) ) ;
public class UpdateRunner { /** * Add a new update to run at any appropriate time . * @ param r New runnable to perform the update */ public void invokeLater ( Runnable r ) { } }
queue . add ( r ) ; synchronized ( this ) { if ( synchronizer == null ) { runQueue ( ) ; } else { synchronizer . activate ( ) ; } }
public class MathUtils { /** * The # toArray methods of collections cannot be used with primitive arrays . This loops over and array list of * Integers and outputs and array of int . * @ param result The array of Integers to convert . * @ return An array of int . */ private static int [ ] intListToPrimitiveArray ( List < Integer > result ) { } }
int [ ] resultArray = new int [ result . size ( ) ] ; int index = 0 ; for ( int r : result ) { resultArray [ index ] = result . get ( index ) ; index ++ ; } return resultArray ;
public class CmsThreadStore { /** * Adds a Thread to this Thread store . < p > * @ param thread the Thread to add */ public void addThread ( A_CmsReportThread thread ) { } }
m_threads . put ( thread . getUUID ( ) , thread ) ; if ( LOG . isDebugEnabled ( ) ) { dumpThreads ( ) ; }
public class RouteProcessor { /** * Verify the route meta * @ param meta raw meta */ private boolean routeVerify ( RouteMeta meta ) { } }
String path = meta . getPath ( ) ; if ( StringUtils . isEmpty ( path ) || ! path . startsWith ( "/" ) ) { // The path must be start with ' / ' and not empty ! return false ; } if ( StringUtils . isEmpty ( meta . getGroup ( ) ) ) { // Use default group ( the first word in path ) try { String defaultGroup = path . substring ( 1 , path . indexOf ( "/" , 1 ) ) ; if ( StringUtils . isEmpty ( defaultGroup ) ) { return false ; } meta . setGroup ( defaultGroup ) ; return true ; } catch ( Exception e ) { logger . error ( "Failed to extract default group! " + e . getMessage ( ) ) ; return false ; } } return true ;
public class HtmlStreamRenderer { /** * Canonicalizes the element name and possibly substitutes an alternative * that has more consistent semantics . */ static String safeName ( String unsafeElementName ) { } }
String elementName = HtmlLexer . canonicalName ( unsafeElementName ) ; // Substitute a reliably non - raw - text element for raw - text and // plain - text elements . switch ( elementName . length ( ) ) { case 3 : if ( "xmp" . equals ( elementName ) ) { return "pre" ; } break ; case 7 : if ( "listing" . equals ( elementName ) ) { return "pre" ; } break ; case 9 : if ( "plaintext" . equals ( elementName ) ) { return "pre" ; } break ; } return elementName ;
public class CommerceAccountOrganizationRelUtil { /** * Returns the last commerce account organization rel in the ordered set where commerceAccountId = & # 63 ; . * @ param commerceAccountId the commerce account ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce account organization rel , or < code > null < / code > if a matching commerce account organization rel could not be found */ public static CommerceAccountOrganizationRel fetchByCommerceAccountId_Last ( long commerceAccountId , OrderByComparator < CommerceAccountOrganizationRel > orderByComparator ) { } }
return getPersistence ( ) . fetchByCommerceAccountId_Last ( commerceAccountId , orderByComparator ) ;
public class FileRandomAccessStream { /** * Reads a block starting from the current file pointer . */ public int read ( byte [ ] buffer , int offset , int length ) throws IOException { } }
return _file . read ( buffer , offset , length ) ;
public class MembershipTypeHandlerImpl { /** * Moves memberships in cache from old key to new one . */ private void moveMembershipsInCache ( String oldType , String newType ) { } }
cache . move ( CacheHandler . MEMBERSHIPTYPE_PREFIX + oldType , CacheHandler . MEMBERSHIPTYPE_PREFIX + newType , CacheType . MEMBERSHIP ) ;
public class StorageAccountsInner { /** * Asynchronously creates a new storage account with the specified parameters . If an account is already created and a subsequent create request is issued with different properties , the account properties will be updated . If an account is already created and a subsequent create or update request is issued with the exact same set of properties , the request will succeed . * @ param resourceGroupName The name of the resource group within the user ' s subscription . The name is case insensitive . * @ param accountName The name of the storage account within the specified resource group . Storage account names must be between 3 and 24 characters in length and use numbers and lower - case letters only . * @ param parameters The parameters to provide for the created account . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < StorageAccountInner > beginCreateAsync ( String resourceGroupName , String accountName , StorageAccountCreateParameters parameters , final ServiceCallback < StorageAccountInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginCreateWithServiceResponseAsync ( resourceGroupName , accountName , parameters ) , serviceCallback ) ;
public class AbstractManagedType { /** * ( non - Javadoc ) * @ see * javax . persistence . metamodel . ManagedType # getDeclaredSingularAttributes ( ) */ @ Override public Set < SingularAttribute < X , ? > > getDeclaredSingularAttributes ( ) { } }
Set < SingularAttribute < X , ? > > declaredAttribSet = null ; if ( declaredSingluarAttribs != null ) { declaredAttribSet = new HashSet < SingularAttribute < X , ? > > ( ) ; declaredAttribSet . addAll ( declaredSingluarAttribs . values ( ) ) ; } return declaredAttribSet ;
public class Setting { /** * 获取分组对应的值 , 如果分组不存在或者值不存在则返回null * @ param group 分组 * @ param key 键 * @ return 值 , 如果分组不存在或者值不存在则返回null */ public String get ( String group , String key ) { } }
return this . groupedMap . get ( group , key ) ;
public class CmsContentEditor { /** * Calls the editor change handlers . < p > * @ param changedScopes the changed content value scopes */ void callEditorChangeHandlers ( final Set < String > changedScopes ) { } }
m_changedScopes . addAll ( changedScopes ) ; if ( ! m_callingChangeHandlers && ( m_changedScopes . size ( ) > 0 ) ) { m_callingChangeHandlers = true ; final Set < String > scopesToSend = new HashSet < String > ( m_changedScopes ) ; m_changedScopes . clear ( ) ; final CmsEntity entity = m_entityBackend . getEntity ( m_entityId ) ; final org . opencms . acacia . shared . CmsEntity currentState = entity . createDeepCopy ( m_entityId ) ; CmsRpcAction < CmsContentDefinition > action = new CmsRpcAction < CmsContentDefinition > ( ) { @ Override public void execute ( ) { start ( 200 , true ) ; getService ( ) . callEditorChangeHandlers ( getEntityId ( ) , currentState , getSkipPaths ( ) , scopesToSend , this ) ; } @ Override public void onFailure ( Throwable t ) { m_callingChangeHandlers = false ; super . onFailure ( t ) ; } @ Override protected void onResponse ( CmsContentDefinition result ) { m_callingChangeHandlers = false ; stop ( false ) ; updateEditorValues ( currentState , result . getEntity ( ) ) ; callEditorChangeHandlers ( new HashSet < String > ( ) ) ; } } ; action . execute ( ) ; }
public class IOCImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setConData2 ( byte [ ] newConData2 ) { } }
byte [ ] oldConData2 = conData2 ; conData2 = newConData2 ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . IOC__CON_DATA2 , oldConData2 , conData2 ) ) ;
public class InternalConfig { /** * Builds and returns a signer configuration map . * @ param signerIndexes * signer configuration entries loaded from JSON * @ param theme * used for message logging . eg region , service , region + service */ private Map < String , SignerConfig > buildSignerMap ( JsonIndex < SignerConfigJsonHelper , SignerConfig > [ ] signerIndexes , String theme ) { } }
Map < String , SignerConfig > map = new HashMap < String , SignerConfig > ( ) ; if ( signerIndexes != null ) { for ( JsonIndex < SignerConfigJsonHelper , SignerConfig > index : signerIndexes ) { String region = index . getKey ( ) ; SignerConfig prev = map . put ( region , index . newReadOnlyConfig ( ) ) ; if ( prev != null ) { log . warn ( "Duplicate definition of signer for " + theme + " " + index . getKey ( ) ) ; } } } return map ;
public class IntrospectionLevelMember { /** * Introspect the object via reflection */ private void introspectViaReflection ( ) { } }
{ Class < ? > memberClass = _member . getClass ( ) ; if ( memberClass . isArray ( ) ) { int length = Array . getLength ( _member ) ; Class < ? > componentType = memberClass . getComponentType ( ) ; if ( componentType . isPrimitive ( ) ) { addNewChild ( componentType + "[0.." + ( length - 1 ) + "]" , _member ) ; } else { String simpleName = componentType . getSimpleName ( ) ; for ( int i = 0 ; i < length && i < MAX_ARRAY_LENGTH ; i ++ ) { Object value = Array . get ( _member , i ) ; addNewChild ( simpleName + "[" + i + "]" , value ) ; } if ( length > MAX_ARRAY_LENGTH ) { addNewChild ( simpleName + "[...]" , "/* array length = " + length + " */" ) ; } } } else { /* * Loop around the fields of the object ( including fields of its * superclass ) adding them as children ( and , it we haven ' t seen * the child before , getting introspected at the next level if * its worth doing so ) */ Class < ? > currentClass = _member . getClass ( ) ; while ( currentClass != Object . class ) { Field [ ] fields = getFields ( currentClass ) ; for ( int i = 0 ; i < fields . length && i < MAX_ARRAY_LENGTH ; i ++ ) { final Field field = fields [ i ] ; Object value = getFieldValue ( field ) ; addNewChild ( field . getName ( ) , value ) ; } if ( fields . length > MAX_ARRAY_LENGTH ) { addNewChild ( "field..." , "/* total # of fields = " + fields . length + " */" ) ; } currentClass = currentClass . getSuperclass ( ) ; } } }
public class Data { /** * Returns the single instance of the magic object that represents the " null " value for the given * Java class ( including array or enum ) . * @ param objClass class of the object needed * @ return magic object instance that represents the " null " value ( not Java { @ code null } ) * @ throws IllegalArgumentException if unable to create a new instance */ public static < T > T nullOf ( Class < T > objClass ) { } }
Object result = NULL_CACHE . get ( objClass ) ; if ( result == null ) { synchronized ( NULL_CACHE ) { result = NULL_CACHE . get ( objClass ) ; if ( result == null ) { if ( objClass . isArray ( ) ) { // arrays are special because we need to compute both the dimension and component type int dims = 0 ; Class < ? > componentType = objClass ; do { componentType = componentType . getComponentType ( ) ; dims ++ ; } while ( componentType . isArray ( ) ) ; result = Array . newInstance ( componentType , new int [ dims ] ) ; } else if ( objClass . isEnum ( ) ) { // enum requires look for constant with @ NullValue FieldInfo fieldInfo = ClassInfo . of ( objClass ) . getFieldInfo ( null ) ; Preconditions . checkNotNull ( fieldInfo , "enum missing constant with @NullValue annotation: %s" , objClass ) ; @ SuppressWarnings ( { "unchecked" , "rawtypes" } ) Enum e = fieldInfo . < Enum > enumValue ( ) ; result = e ; } else { // other classes are simpler result = Types . newInstance ( objClass ) ; } NULL_CACHE . put ( objClass , result ) ; } } } @ SuppressWarnings ( "unchecked" ) T tResult = ( T ) result ; return tResult ;
public class JCommander { /** * Main method that parses the values and initializes the fields accordingly . */ private void parseValues ( String [ ] args , boolean validate ) { } }
// This boolean becomes true if we encounter a command , which indicates we need // to stop parsing ( the parsing of the command will be done in a sub JCommander // object ) boolean commandParsed = false ; int i = 0 ; boolean isDashDash = false ; // once we encounter - - , everything goes into the main parameter while ( i < args . length && ! commandParsed ) { String arg = args [ i ] ; String a = trim ( arg ) ; args [ i ] = a ; p ( "Parsing arg: " + a ) ; JCommander jc = findCommandByAlias ( arg ) ; int increment = 1 ; if ( ! isDashDash && ! "--" . equals ( a ) && isOption ( a ) && jc == null ) { // Option ParameterDescription pd = findParameterDescription ( a ) ; if ( pd != null ) { if ( pd . getParameter ( ) . password ( ) ) { increment = processPassword ( args , i , pd , validate ) ; } else { if ( pd . getParameter ( ) . variableArity ( ) ) { // Variable arity ? increment = processVariableArity ( args , i , pd , validate ) ; } else { // Regular option Class < ? > fieldType = pd . getParameterized ( ) . getType ( ) ; // Boolean , set to true as soon as we see it , unless it specified // an arity of 1 , in which case we need to read the next value if ( pd . getParameter ( ) . arity ( ) == - 1 && isBooleanType ( fieldType ) ) { handleBooleanOption ( pd , fieldType ) ; } else { increment = processFixedArity ( args , i , pd , validate , fieldType ) ; } // If it ' s a help option , remember for later if ( pd . isHelp ( ) ) { helpWasSpecified = true ; } } } } else { if ( options . acceptUnknownOptions ) { unknownArgs . add ( arg ) ; i ++ ; while ( i < args . length && ! isOption ( args [ i ] ) ) { unknownArgs . add ( args [ i ++ ] ) ; } increment = 0 ; } else { throw new ParameterException ( "Unknown option: " + arg ) ; } } } else { // Main parameter if ( "--" . equals ( arg ) && ! isDashDash ) { isDashDash = true ; } else if ( commands . isEmpty ( ) ) { // Regular ( non - command ) parsing initMainParameterValue ( arg ) ; String value = a ; // If there ' s a non - quoted version , prefer that one Object convertedValue = value ; // Fix // Main parameter doesn ' t support Converter // https : / / github . com / cbeust / jcommander / issues / 380 if ( mainParameter . annotation . converter ( ) != null && mainParameter . annotation . converter ( ) != NoConverter . class ) { convertedValue = convertValue ( mainParameter . parameterized , mainParameter . parameterized . getType ( ) , null , value ) ; } Type genericType = mainParameter . parameterized . getGenericType ( ) ; if ( genericType instanceof ParameterizedType ) { ParameterizedType p = ( ParameterizedType ) genericType ; Type cls = p . getActualTypeArguments ( ) [ 0 ] ; if ( cls instanceof Class ) { convertedValue = convertValue ( mainParameter . parameterized , ( Class ) cls , null , value ) ; } } for ( final Class < ? extends IParameterValidator > validator : mainParameter . annotation . validateWith ( ) ) { mainParameter . description . validateParameter ( validator , "Default" , value ) ; } mainParameter . description . setAssigned ( true ) ; mainParameter . addValue ( convertedValue ) ; } else { // Command parsing if ( jc == null && validate ) { throw new MissingCommandException ( "Expected a command, got " + arg , arg ) ; } else if ( jc != null ) { parsedCommand = jc . programName . name ; parsedAlias = arg ; // preserve the original form // Found a valid command , ask it to parse the remainder of the arguments . // Setting the boolean commandParsed to true will force the current // loop to end . jc . parse ( validate , subArray ( args , i + 1 ) ) ; commandParsed = true ; } } } i += increment ; } // Mark the parameter descriptions held in fields as assigned for ( ParameterDescription parameterDescription : descriptions . values ( ) ) { if ( parameterDescription . isAssigned ( ) ) { fields . get ( parameterDescription . getParameterized ( ) ) . setAssigned ( true ) ; } }
public class NotifyRegion { /** * * * * * * Initialization * * * * * */ private void initGraphics ( ) { } }
if ( Double . compare ( getPrefWidth ( ) , 0.0 ) <= 0 || Double . compare ( getPrefHeight ( ) , 0.0 ) <= 0 || Double . compare ( getWidth ( ) , 0.0 ) <= 0 || Double . compare ( getHeight ( ) , 0.0 ) <= 0 ) { if ( getPrefWidth ( ) > 0 && getPrefHeight ( ) > 0 ) { setPrefSize ( getPrefWidth ( ) , getPrefHeight ( ) ) ; } else { setPrefSize ( PREFERRED_WIDTH , PREFERRED_HEIGHT ) ; } } path = new Path ( ) ; path . setStroke ( Color . TRANSPARENT ) ; icon = new Path ( ) ; icon . setStroke ( Color . TRANSPARENT ) ; getChildren ( ) . setAll ( path , icon ) ;
public class TableFactoryService { /** * Prepares the properties of a context to be used for match operations . */ private static Map < String , String > normalizeContext ( TableFactory factory ) { } }
Map < String , String > requiredContext = factory . requiredContext ( ) ; if ( requiredContext == null ) { throw new TableException ( String . format ( "Required context of factory '%s' must not be null." , factory . getClass ( ) . getName ( ) ) ) ; } return requiredContext . keySet ( ) . stream ( ) . collect ( Collectors . toMap ( key -> key . toLowerCase ( ) , key -> requiredContext . get ( key ) ) ) ;