signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class StringSupport { /** * replaces all occurrences of needle in haystack with newNeedle * the input itself is not modified * @ param haystack input string * @ param needlestring to replace * @ param newNeedle replacement */ public static void replaceAll ( StringBuffer haystack , String needle , String newNeedle ) { } }
/* if ( needle = = null | | " " . equals ( needle ) ) throw new IllegalArgumentException ( " string to replace may not be empty " ) ; int idx = haystack . indexOf ( needle ) ; int needleLength = needle . length ( ) ; int newNeedleLength = newNeedle . length ( ) ; while ( idx ! = - 1) haystack . replace ( idx , idx + needleLength , newNeedle ) ; idx = haystack . indexOf ( needle , idx + newNeedleLength ) ; */ replaceAll ( haystack , needle , newNeedle , 0 ) ;
public class BottomNavigationBar { /** * Clears all stored data and this helps to re - initialise tabs from scratch */ public void clearAll ( ) { } }
mTabContainer . removeAllViews ( ) ; mBottomNavigationTabs . clear ( ) ; mBottomNavigationItems . clear ( ) ; mBackgroundOverlay . setVisibility ( View . GONE ) ; mContainer . setBackgroundColor ( Color . TRANSPARENT ) ; mSelectedPosition = DEFAULT_SELECTED_POSITION ;
public class ServerRoomEventHandler { /** * Propagate event to handlers * @ param sfsRoom smartfox room object * @ param sfsUser smartfox user object */ private void notifyHandlers ( Room sfsRoom , User sfsUser ) { } }
ApiRoom apiRoom = AgentUtil . getRoomAgent ( sfsRoom ) ; ApiUser apiUser = AgentUtil . getUserAgent ( sfsUser ) ; for ( RoomHandlerClass handler : handlers ) { Object userAgent = checkUserAgent ( handler , apiUser ) ; if ( ! checkHandler ( handler , apiRoom , userAgent ) ) continue ; Object instance = handler . newInstance ( ) ; callHandleMethod ( handler . getHandleMethod ( ) , instance , apiRoom , userAgent ) ; }
public class AbsSeekBar { /** * Sets the maximum value for this AbsSeekBar * if the supplied argument is smaller than the Current MIN value , * the MIN value will be set to MAX - 1 * Also if the current progress is out of the new range , it will be set to MIN * @ param max Progress max value * @ see # setMin ( int ) * @ see # setProgress ( int ) */ public void setMax ( int max ) { } }
mMax = max ; if ( mMax <= mMin ) { setMin ( mMax - 1 ) ; } updateKeyboardRange ( ) ; mSeekBarDrawable . setNumSegments ( mMax - mMin ) ; if ( mValue < mMin || mValue > mMax ) { setProgress ( mValue ) ; } else { updateThumbPosForScale ( - 1 ) ; }
public class AbstractJavaMetadata { /** * Gets a method meta data from { @ link MethodDeclaration } . * @ param methodDeclaration - the MethodDeclaration . * @ return methodMetadata - the method meta data . */ protected MethodMetadata getMethodMetadataFrom ( MethodDeclaration methodDeclaration ) { } }
if ( methodDeclaration != null ) { if ( methodDeclaration . isConstructor ( ) ) { return getConstructorMetadataFrom ( methodDeclaration ) ; } return getMethodTypeMemberMetadataFrom ( methodDeclaration ) ; } return null ;
public class MCEDependenceMeasure { /** * Recursive call to further subdivide the array . * @ param idx Object indexes . * @ param data 1D data , sorted * @ param ret Output index * @ param start Interval start * @ param end Interval end * @ param depth Depth */ private void divide ( int [ ] idx , double [ ] data , ArrayList < int [ ] > ret , int start , int end , int depth ) { } }
if ( depth == 0 ) { int [ ] a = Arrays . copyOfRange ( idx , start , end ) ; Arrays . sort ( a ) ; ret . add ( a ) ; return ; } final int count = end - start ; if ( count == 0 ) { // Corner case , that should barely happen . But for ties , we currently // Do not yet assure that it doesn ' t happen ! for ( int j = 1 << depth ; j > 0 ; -- j ) { ret . add ( new int [ 0 ] ) ; } return ; } double m = 0. ; for ( int i = start ; i < end ; i ++ ) { m += data [ i ] ; } m /= count ; int pos = Arrays . binarySearch ( data , start , end , m ) ; if ( pos >= 0 ) { // Ties : try to choose the most central element . final int opt = ( start + end ) >> 1 ; while ( data [ pos ] == m ) { if ( pos < opt ) { pos ++ ; } else if ( pos > opt ) { pos -- ; } else { break ; } } } else { pos = ( - pos - 1 ) ; } divide ( idx , data , ret , start , pos , depth - 1 ) ; divide ( idx , data , ret , pos , end , depth - 1 ) ;
public class PatternBox { /** * Pattern for a EntityReference has a member PhysicalEntity that is controlling a state change * reaction of another EntityReference . * @ return the pattern */ public static Pattern controlsStateChange ( ) { } }
Pattern p = new Pattern ( SequenceEntityReference . class , "controller ER" ) ; p . add ( linkedER ( true ) , "controller ER" , "generic controller ER" ) ; p . add ( erToPE ( ) , "generic controller ER" , "controller simple PE" ) ; p . add ( linkToComplex ( ) , "controller simple PE" , "controller PE" ) ; p . add ( peToControl ( ) , "controller PE" , "Control" ) ; p . add ( controlToConv ( ) , "Control" , "Conversion" ) ; p . add ( new NOT ( participantER ( ) ) , "Conversion" , "controller ER" ) ; p . add ( new Participant ( RelType . INPUT , true ) , "Control" , "Conversion" , "input PE" ) ; p . add ( new NOT ( new ConversionSide ( ConversionSide . Type . OTHER_SIDE ) ) , "input PE" , "Conversion" , "input PE" ) ; p . add ( linkToSpecific ( ) , "input PE" , "input simple PE" ) ; p . add ( new Type ( SequenceEntity . class ) , "input simple PE" ) ; p . add ( peToER ( ) , "input simple PE" , "changed generic ER" ) ; p . add ( new ConversionSide ( ConversionSide . Type . OTHER_SIDE ) , "input PE" , "Conversion" , "output PE" ) ; p . add ( new NOT ( new ConversionSide ( ConversionSide . Type . OTHER_SIDE ) ) , "output PE" , "Conversion" , "output PE" ) ; p . add ( equal ( false ) , "input PE" , "output PE" ) ; p . add ( linkToSpecific ( ) , "output PE" , "output simple PE" ) ; p . add ( peToER ( ) , "output simple PE" , "changed generic ER" ) ; p . add ( linkedER ( false ) , "changed generic ER" , "changed ER" ) ; return p ;
public class NetworkService { /** * Claim a networks for datacenters * @ param dataCenters datacenter references * @ return OperationFuture wrapper for dataCenter list */ public OperationFuture < List < DataCenter > > claim ( DataCenter ... dataCenters ) { } }
return claim ( Arrays . asList ( dataCenters ) ) ;
public class JedisSet { /** * Removes and returns a random element from the set . * @ return the removed element , or < code > null < / code > when the key does not exist . */ public String pop ( ) { } }
return doWithJedis ( new JedisCallable < String > ( ) { @ Override public String call ( Jedis jedis ) { return jedis . spop ( getKey ( ) ) ; } } ) ;
public class DSLMapParser { /** * src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 251:1 : condition _ key : { . . . } ? value = LITERAL - > VT _ CONDITION [ $ value ] ; */ public final DSLMapParser . condition_key_return condition_key ( ) throws RecognitionException { } }
DSLMapParser . condition_key_return retval = new DSLMapParser . condition_key_return ( ) ; retval . start = input . LT ( 1 ) ; Object root_0 = null ; Token value = null ; Object value_tree = null ; RewriteRuleTokenStream stream_LITERAL = new RewriteRuleTokenStream ( adaptor , "token LITERAL" ) ; try { // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 252:5 : ( { . . . } ? value = LITERAL - > VT _ CONDITION [ $ value ] ) // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 252:7 : { . . . } ? value = LITERAL { if ( ! ( ( validateIdentifierKey ( "condition" ) || validateIdentifierKey ( "when" ) ) ) ) { if ( state . backtracking > 0 ) { state . failed = true ; return retval ; } throw new FailedPredicateException ( input , "condition_key" , "validateIdentifierKey(\"condition\")||validateIdentifierKey(\"when\")" ) ; } value = ( Token ) match ( input , LITERAL , FOLLOW_LITERAL_in_condition_key1599 ) ; if ( state . failed ) return retval ; if ( state . backtracking == 0 ) stream_LITERAL . add ( value ) ; // AST REWRITE // elements : // token labels : // rule labels : retval // token list labels : // rule list labels : // wildcard labels : if ( state . backtracking == 0 ) { retval . tree = root_0 ; RewriteRuleSubtreeStream stream_retval = new RewriteRuleSubtreeStream ( adaptor , "rule retval" , retval != null ? retval . getTree ( ) : null ) ; root_0 = ( Object ) adaptor . nil ( ) ; // 253:5 : - > VT _ CONDITION [ $ value ] { adaptor . addChild ( root_0 , ( Object ) adaptor . create ( VT_CONDITION , value ) ) ; } retval . tree = root_0 ; } } retval . stop = input . LT ( - 1 ) ; if ( state . backtracking == 0 ) { retval . tree = ( Object ) adaptor . rulePostProcessing ( root_0 ) ; adaptor . setTokenBoundaries ( retval . tree , retval . start , retval . stop ) ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; retval . tree = ( Object ) adaptor . errorNode ( input , retval . start , input . LT ( - 1 ) , re ) ; } finally { // do for sure before leaving } return retval ;
public class SparseDirectedEdgeSet { /** * Adds the edge to this set if one of the vertices is the root vertex and * if the non - root vertex has a greater index that this vertex . */ public boolean add ( DirectedEdge e ) { } }
if ( e . from ( ) == rootVertex ) { return outEdges . add ( e . to ( ) ) ; } else if ( e . to ( ) == rootVertex ) { return inEdges . add ( e . from ( ) ) ; } return false ;
public class VideoFrameDropper { /** * { @ inheritDoc } */ public boolean canSendPacket ( RTMPMessage message , long pending ) { } }
IRTMPEvent packet = message . getBody ( ) ; boolean result = true ; // We currently only drop video packets . if ( packet instanceof VideoData ) { VideoData video = ( VideoData ) packet ; FrameType type = video . getFrameType ( ) ; switch ( state ) { case SEND_ALL : // All packets will be sent break ; case SEND_INTERFRAMES : // Only keyframes and interframes will be sent . if ( type == FrameType . KEYFRAME ) { if ( pending == 0 ) { // Send all frames from now on . state = SEND_ALL ; } } else if ( type == FrameType . INTERFRAME ) { } break ; case SEND_KEYFRAMES : // Only keyframes will be sent . result = ( type == FrameType . KEYFRAME ) ; if ( result && pending == 0 ) { // Maybe switch back to SEND _ INTERFRAMES after the next keyframe state = SEND_KEYFRAMES_CHECK ; } break ; case SEND_KEYFRAMES_CHECK : // Only keyframes will be sent . result = ( type == FrameType . KEYFRAME ) ; if ( result && pending == 0 ) { // Continue with sending interframes as well state = SEND_INTERFRAMES ; } break ; default : } } return result ;
public class ClassLoader { /** * Finds a class with the specified < a href = " # name " > binary name < / a > , * loading it if necessary . * < p > This method loads the class through the system class loader ( see * { @ link # getSystemClassLoader ( ) } ) . The < tt > Class < / tt > object returned * might have more than one < tt > ClassLoader < / tt > associated with it . * Subclasses of < tt > ClassLoader < / tt > need not usually invoke this method , * because most class loaders need to override just { @ link * # findClass ( String ) } . < / p > * @ param name * The < a href = " # name " > binary name < / a > of the class * @ return The < tt > Class < / tt > object for the specified < tt > name < / tt > * @ throws ClassNotFoundException * If the class could not be found * @ see # ClassLoader ( ClassLoader ) * @ see # getParent ( ) */ protected final Class < ? > findSystemClass ( String name ) throws ClassNotFoundException { } }
return Class . forName ( name , false , getSystemClassLoader ( ) ) ;
public class CertificateExtensions { /** * Set the attribute value . * @ param name the extension name used in the cache . * @ param obj the object to set . * @ exception IOException if the object could not be cached . */ public void set ( String name , Object obj ) throws IOException { } }
if ( obj instanceof Extension ) { map . put ( name , ( Extension ) obj ) ; } else { throw new IOException ( "Unknown extension type." ) ; }
public class RTMPConnection { /** * Specify name , connection , scope and etc for stream * @ param streamId * Stream id * @ param stream * Stream */ private void customizeStream ( Number streamId , AbstractClientStream stream ) { } }
Integer buffer = streamBuffers . get ( streamId . doubleValue ( ) ) ; if ( buffer != null ) { stream . setClientBufferDuration ( buffer ) ; } stream . setName ( createStreamName ( ) ) ; stream . setConnection ( this ) ; stream . setScope ( this . getScope ( ) ) ; stream . setStreamId ( streamId ) ;
public class DefaultMaven2OsgiConverter { /** * Computes the file name of the bundle used in Wisdom distribution for the given Maven coordinates . * This convention is based on the uniqueness at runtime of ' bsn - version ' ( bsn is the bundle symbolic name ) . * @ param groupId the groupId * @ param artifactId the artifactId * @ param version the version * @ return the computed name , composed by the symbolic name and the version : { @ code bsn - version . jar } */ public static String getBundleFileName ( String groupId , String artifactId , String version ) { } }
return DefaultMaven2OsgiConverter . getBundleSymbolicName ( groupId , artifactId ) + "-" + version + ".jar" ;
public class EJSHome { /** * Return an enumeration instance that wraps the given enumeration * of primary keys . < p > * This helper method is used to construct the result enumeration * for finder methods on homes that contain container managed entity * beans . < p > */ @ Override @ SuppressWarnings ( "rawtypes" ) public Enumeration getEnumeration ( Finder finder ) throws FinderException , RemoteException { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; // d532639.2 if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "getEnumeration(EJSFinder)" ) ; homeEnabled ( ) ; final ContainerTx currentTx = container . getCurrentContainerTx ( ) ; // d171654 Enumeration rtnEnum = container . persisterFactory . wrapResultsInEnumeration ( currentTx , this , finder ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "getEnumeration" ) ; return rtnEnum ;
public class YggdrasilAuthenticator { /** * Refreshes the current session manually using password . * @ param passwordProvider the password provider * @ param clientToken the client token * @ throws AuthenticationException If an exception occurs during the * authentication */ public synchronized void refreshWithPassword ( PasswordProvider passwordProvider , String clientToken ) throws AuthenticationException { } }
Objects . requireNonNull ( passwordProvider ) ; Objects . requireNonNull ( clientToken ) ; String username = passwordProvider . getUsername ( ) ; String password = passwordProvider . getPassword ( ) ; authResult = authenticationService . login ( username , password , clientToken ) ; if ( authResult . getSelectedProfile ( ) == null ) { GameProfile [ ] profiles = authResult . getProfiles ( ) ; if ( profiles == null || profiles . length == 0 ) { return ; } // no profile is selected // let ' s select one CharacterSelector selector = passwordProvider . getCharacterSelector ( ) ; if ( selector == null ) { selector = new DefaultCharacterSelector ( ) ; } GameProfile selectedProfile = selector . select ( profiles ) ; if ( selectedProfile != null ) { authResult = authenticationService . selectProfile ( authResult . getClientToken ( ) , authResult . getAccessToken ( ) , selectedProfile ) ; } }
public class PortletPreAuthenticatedAuthenticationDetailsSource { /** * Obtains the list of user roles based on the current user ' s Portlet roles . The * { @ link javax . portlet . PortletRequest # isUserInRole ( String ) } method is called for each of the values * in the { @ code portletMappableRoles } set to determine if that role should be assigned to the user . * @ param request the request which should be used to extract the user ' s roles . * @ return The subset of { @ code portletMappableRoles } which applies to the current user making the request . */ protected Collection < String > getUserRoles ( PortletRequest request ) { } }
ArrayList < String > portletUserRolesList = new ArrayList < String > ( ) ; for ( String role : portletMappableRoles ) { if ( request . isUserInRole ( role ) ) { portletUserRolesList . add ( role ) ; } } return portletUserRolesList ;
public class CmsPublishProject { /** * Returns the html code to build the lock dialog . < p > * @ return html code * @ throws CmsException if something goes wrong */ public String buildLockDialog ( ) throws CmsException { } }
CmsLockFilter nonBlockingFilter = CmsLockFilter . FILTER_ALL ; nonBlockingFilter = nonBlockingFilter . filterLockableByUser ( getCms ( ) . getRequestContext ( ) . getCurrentUser ( ) ) ; nonBlockingFilter = nonBlockingFilter . filterSharedExclusive ( ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( getParamProjectid ( ) ) ) { nonBlockingFilter = nonBlockingFilter . filterProject ( new CmsUUID ( getParamProjectid ( ) ) ) ; } return org . opencms . workplace . commons . CmsLock . buildLockDialog ( this , nonBlockingFilter , getBlockingFilter ( ) , 0 , true ) ;
public class MailService { /** * Deletes all messages from the mail account reserved under the specified * { @ code accountReservationKey } . * @ param accountReservationKey * the key under which the account has been reserved */ public void deleteMessages ( final String accountReservationKey ) { } }
MailAccount mailAccount = checkNotNull ( mailAccountManager . lookupUsedMailAccountForCurrentThread ( accountReservationKey ) , "No mail account reserved for current thread under key '%s'" , accountReservationKey ) ; storeManagerFactory . create ( mailAccount ) . deleteAllMessages ( ) ;
public class Properties { /** * If { @ link Num } don ' t define scale then use scale from { @ link AbstractCalculator } instance . * Othervise use default scale { @ link Properties # DEFAULT _ SCALE } * @ param calc * @ param value */ public static int getInheritedScale ( AbstractCalculator calc , Num value ) { } }
if ( value . getScale ( ) == null ) { if ( calc != null && calc . getScale ( ) != null ) return calc . getScale ( ) ; else return Properties . DEFAULT_SCALE ; } else return value . getScale ( ) ;
public class AuthnContextServiceImpl { /** * Given a base URI , the method returns its corresponding sigmessage URI . * @ param uri * the URI to transform * @ return the sigmessage URI , or { @ code null } if no such exists */ protected String toSignMessageURI ( String uri ) { } }
LoaEnum loa = LoaEnum . parse ( uri ) ; if ( loa == null ) { return null ; } if ( loa . isSignatureMessageUri ( ) ) { return uri ; } for ( LoaEnum l : LoaEnum . values ( ) ) { if ( l . getBaseUri ( ) . equals ( loa . getBaseUri ( ) ) && l . isSignatureMessageUri ( ) && l . isNotified ( ) == loa . isNotified ( ) ) { return l . getUri ( ) ; } } return null ;
public class ArrayUtil { /** * / * - - - - - [ ensureCapacity ] - - - - - */ public static byte [ ] ensureCapacity ( byte array [ ] , int capacity ) { } }
if ( capacity <= 0 || capacity - array . length <= 0 ) return array ; int newCapacity = array . length * 2 ; if ( newCapacity - capacity < 0 ) newCapacity = capacity ; if ( newCapacity < 0 ) { if ( capacity < 0 ) // overflow throw new OutOfMemoryError ( ) ; newCapacity = Integer . MAX_VALUE ; } return Arrays . copyOf ( array , newCapacity ) ;
public class LettuceFutures { /** * Wait until futures are complete or the supplied timeout is reached . Commands are not canceled ( in contrast to * { @ link # awaitOrCancel ( RedisFuture , long , TimeUnit ) } ) when the timeout expires . * @ param timeout Maximum time to wait for futures to complete . * @ param futures Futures to wait for . * @ return { @ literal true } if all futures complete in time , otherwise { @ literal false } * @ since 5.0 */ public static boolean awaitAll ( Duration timeout , Future < ? > ... futures ) { } }
return awaitAll ( timeout . toNanos ( ) , TimeUnit . NANOSECONDS , futures ) ;
public class ClusTree { /** * Insert a new point in the < code > Tree < / code > . The point should be * represented as a cluster with a single data point ( i . e . N = 1 ) . A * < code > Budget < / code > class is also given , which is informed of the number * of operation the tree does , and informs the tree when it does not have * time left and should stop the insertion . * @ param newPoint The point to be inserted . * @ param budget The budget and statistics recollector for the insertion . * @ param timestamp The moment at which this point is inserted . * @ see Kernel * @ see Budget */ public void insert ( ClusKernel newPoint , Budget budget , long timestamp ) { } }
if ( breadthFirstStrat ) { insertBreadthFirst ( newPoint , budget , timestamp ) ; } else { Entry rootEntry = new Entry ( this . numberDimensions , root , timestamp , null , null ) ; ClusKernel carriedBuffer = new ClusKernel ( this . numberDimensions ) ; Entry toInsertHere = insert ( newPoint , carriedBuffer , root , rootEntry , budget , timestamp ) ; if ( toInsertHere != null ) { this . numRootSplits ++ ; this . height += this . height < this . maxHeight ? 1 : 0 ; Node newRoot = new Node ( this . numberDimensions , toInsertHere . getChild ( ) . getRawLevel ( ) + 1 ) ; newRoot . addEntry ( rootEntry , timestamp ) ; newRoot . addEntry ( toInsertHere , timestamp ) ; rootEntry . setNode ( newRoot ) ; toInsertHere . setNode ( newRoot ) ; this . root = newRoot ; } } this . numberInsertions ++ ; if ( this . numberInsertions % INSERTIONS_BETWEEN_CLEANUPS == 0 ) { cleanUp ( this . root , 0 ) ; }
public class MetadataOperation { /** * Convert wildchars and escape sequence of schema pattern from JDBC format to datanucleous / regex * The schema pattern treats empty string also as wildchar */ protected String convertSchemaPattern ( final String pattern ) { } }
if ( ( pattern == null ) || pattern . isEmpty ( ) ) { return convertPattern ( "%" , true ) ; } else { return convertPattern ( pattern , true ) ; }
public class HttpRequest { /** * Return a new instance of HttpRequest replacing the URI . */ @ Override public HttpRequest replaceUri ( URI newURI ) { } }
return ( new Builder ( ) ) . uri ( newURI ) . headers ( this . httpHeaders ) . overrideConfig ( this . getOverrideConfig ( ) ) . queryParams ( this . queryParams ) . setRetriable ( this . isRetriable ( ) ) . loadBalancerKey ( this . getLoadBalancerKey ( ) ) . verb ( this . getVerb ( ) ) . entity ( this . entity ) . build ( ) ;
public class AppsImpl { /** * Gets the endpoint URLs for the prebuilt Cortana applications . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PersonalAssistantsResponse object */ public Observable < PersonalAssistantsResponse > listCortanaEndpointsAsync ( ) { } }
return listCortanaEndpointsWithServiceResponseAsync ( ) . map ( new Func1 < ServiceResponse < PersonalAssistantsResponse > , PersonalAssistantsResponse > ( ) { @ Override public PersonalAssistantsResponse call ( ServiceResponse < PersonalAssistantsResponse > response ) { return response . body ( ) ; } } ) ;
public class MapUtil { /** * 将键值对转换为二维数组 , 第一维是key , 第二纬是value * @ param map Map < ? , ? > map * @ return 数组 * @ since 4.1.9 */ public static Object [ ] [ ] toObjectArray ( Map < ? , ? > map ) { } }
if ( map == null ) { return null ; } final Object [ ] [ ] result = new Object [ map . size ( ) ] [ 2 ] ; if ( map . isEmpty ( ) ) { return result ; } int index = 0 ; for ( Entry < ? , ? > entry : map . entrySet ( ) ) { result [ index ] [ 0 ] = entry . getKey ( ) ; result [ index ] [ 1 ] = entry . getValue ( ) ; index ++ ; } return result ;
public class HashIntSet { /** * Adds the specified element to this set if it is not already present . * @ param element element to be added to this set . * @ return < tt > true < / tt > if the set did not already contain the specified * element . */ @ Override public boolean add ( int element ) { } }
if ( element < 0 ) { throw new IndexOutOfBoundsException ( "element < 0: " + element ) ; } int index = findElementOrRemoved ( element ) ; if ( index >= 0 ) { if ( cells [ index ] == element ) { return false ; } freecells -- ; } else { index = - ( index + 1 ) ; } modCount ++ ; size ++ ; // set the integer cells [ index ] = element ; // do we need to rehash ? if ( 1 - ( ( double ) freecells / cells . length ) > LOAD_FACTOR ) { rehash ( ) ; } return true ;
public class CmsAppHierarchyBuilder { /** * Adds an app configuration to the node belonging to its parent category id . < p > * @ param appConfig the app configuration to add to its parent node */ protected void addAppConfigToCategory ( I_CmsWorkplaceAppConfiguration appConfig ) { } }
CmsAppCategoryNode node = m_nodes . get ( appConfig . getAppCategory ( ) ) ; if ( node == null ) { LOG . info ( "Missing parent [" + appConfig . getAppCategory ( ) + "] for " + appConfig . getId ( ) + " / " + appConfig . getClass ( ) . getName ( ) ) ; } else { node . addAppConfiguration ( appConfig ) ; }
public class BigtableTableAdminClient { /** * Gets the instanceName this client is associated with . * @ deprecated Please use { @ link # getProjectId ( ) } and { @ link # getInstanceId ( ) } . */ @ Deprecated public com . google . bigtable . admin . v2 . InstanceName getInstanceName ( ) { } }
return com . google . bigtable . admin . v2 . InstanceName . of ( projectId , instanceId ) ;
public class NIOServerCnxn { /** * send buffer without using the asynchronous * calls to selector and then close the socket * @ param bb */ void sendBufferSync ( ByteBuffer bb ) { } }
try { /* configure socket to be blocking * so that we dont have to do write in * a tight while loop */ sock . configureBlocking ( true ) ; if ( bb != closeConn ) { if ( sock != null ) { sock . write ( bb ) ; } packetSent ( ) ; } } catch ( IOException ie ) { LOG . error ( "Error sending data synchronously " , ie ) ; }
public class Parsed { private void crossCheck ( ) { } }
// only cross - check date , time and date - time // avoid object creation if possible if ( date != null ) { crossCheck ( date ) ; } if ( time != null ) { crossCheck ( time ) ; if ( date != null && fieldValues . size ( ) > 0 ) { crossCheck ( date . atTime ( time ) ) ; } }
public class ContextRuntime { /** * Issue a close call to the Task * Note that due to races , the task might have already ended . In that case , we drop this call and leave a WARNING * in the log . * @ param message the close message to deliver or null if there is none . */ void closeTask ( final byte [ ] message ) { } }
synchronized ( this . contextLifeCycle ) { if ( ! this . task . isPresent ( ) ) { LOG . log ( Level . WARNING , "Received a close task while there was no task running. Ignoring." ) ; } else { this . task . get ( ) . close ( message ) ; } }
public class FilterVersion { /** * This method creates a version range from the supplied min and max FilterVersion * @ param minVersion The min version in the range . Can be null , if so treated as { @ link Version # emptyVersion } * @ param maxVersion The max version . Can be null , null in a version range is treated as infinite * @ return A version range object representing the min and max values supplied */ public static VersionRange getFilterRange ( FilterVersion minVersion , FilterVersion maxVersion ) { } }
VersionRange vr = null ; Version vmin = minVersion == null ? Version . emptyVersion : new Version ( minVersion . getValue ( ) ) ; Version vmax = maxVersion == null ? null : new Version ( maxVersion . getValue ( ) ) ; char leftType = ( minVersion == null || minVersion . getInclusive ( ) ) ? VersionRange . LEFT_CLOSED : VersionRange . LEFT_OPEN ; char rightType = ( maxVersion == null || maxVersion . getInclusive ( ) ) ? VersionRange . RIGHT_CLOSED : VersionRange . RIGHT_OPEN ; vr = new VersionRange ( leftType , vmin , vmax , rightType ) ; return vr ;
public class SlicedFileConsumer { /** * Adjust timestamp and write to the file . * @ param queued * queued data for write */ private final void write ( QueuedMediaData queued ) { } }
// get data type byte dataType = queued . getDataType ( ) ; // get timestamp int timestamp = queued . getTimestamp ( ) ; log . debug ( "Write - timestamp: {} type: {}" , timestamp , dataType ) ; // get queued ITag tag = queued . getData ( ) ; if ( tag != null ) { // only allow blank tags if they are of audio type if ( tag . getBodySize ( ) > 0 || dataType == ITag . TYPE_AUDIO ) { // if the last message was a reset or we just started , use the header timer if ( startTimestamp == - 1 ) { startTimestamp = timestamp ; timestamp = 0 ; } else { timestamp -= startTimestamp ; } // update the timestamp tag . setTimestamp ( timestamp ) ; try { if ( timestamp >= 0 ) { if ( ! writer . writeTag ( tag ) ) { log . warn ( "Tag was not written" ) ; } } else { log . warn ( "Skipping message with negative timestamp." ) ; } } catch ( ClosedChannelException cce ) { // the channel we tried to write to is closed , we should not try // again on that writer log . error ( "The writer is no longer able to write to the file: {} writable: {}" , path . getFileName ( ) , path . toFile ( ) . canWrite ( ) ) ; } catch ( IOException e ) { log . warn ( "Error writing tag" , e ) ; if ( e . getCause ( ) instanceof ClosedChannelException ) { // the channel we tried to write to is closed , we should not // try again on that writer log . error ( "The writer is no longer able to write to the file: {} writable: {}" , path . getFileName ( ) , path . toFile ( ) . canWrite ( ) ) ; } } finally { queued . dispose ( ) ; } } }
public class AbstractMBean { /** * XMLGregorianCalendar used because it provides a standard formatting ( ISO 8601 ) and timezone handling * @ param millis * @ return an XMLGregorianCalendar in the current timezone . < p > * TODO consider forcing TimeZone to UTC */ protected XMLGregorianCalendar date ( Long millis ) { } }
if ( millis == null ) { return null ; } GregorianCalendar gCal = new GregorianCalendar ( ) ; gCal . setTimeInMillis ( millis ) ; return dtf . newXMLGregorianCalendar ( gCal ) ;
public class MapUtils { /** * Returns a map with the same keys as { @ code map } , but the reverse iteration * order . * @ param map * @ return */ public static < A , B > Map < A , B > reverse ( Map < A , B > map ) { } }
List < Map . Entry < A , B > > list = Lists . newArrayList ( map . entrySet ( ) ) ; Collections . reverse ( list ) ; return listToMap ( list ) ;
public class YarnAppSecurityManager { /** * Renew the existing delegation token . */ private synchronized void renewDelegationToken ( ) throws IOException , InterruptedException { } }
this . token . renew ( this . fs . getConf ( ) ) ; writeDelegationTokenToFile ( ) ; if ( ! this . firstLogin ) { // Send a message to the controller and all the participants if this is not the first login sendTokenFileUpdatedMessage ( InstanceType . CONTROLLER ) ; sendTokenFileUpdatedMessage ( InstanceType . PARTICIPANT ) ; }
public class RmiServerBuilder { /** * Sets the remote interfaces property . * @ param remoteInterfaces * @ return */ public RmiServerBuilder remoteInterfaces ( Class < ? extends Remote > ... remoteInterfaces ) { } }
endpoint . setRemoteInterfaces ( Arrays . asList ( remoteInterfaces ) ) ; return this ;
public class FnObject { /** * Determines whether the result of executing the specified function * on the target object is less or equal to the specified object parameter * in value , this is , whether < tt > functionResult . compareTo ( object ) & lt ; = 0 < / tt > . Both * the target and the specified object have to implement { @ link Comparable } . * @ param object the object to compare to the target * @ return true if function result is less or equal to the specified object , false if not */ public static final < X > Function < X , Boolean > lessOrEqToBy ( final IFunction < X , ? > by , final Object object ) { } }
return FnFunc . chain ( by , lessOrEqTo ( object ) ) ;
public class Document { /** * setter for relations - sets * @ generated * @ param v value to set into the feature */ public void setRelations ( FSArray v ) { } }
if ( Document_Type . featOkTst && ( ( Document_Type ) jcasType ) . casFeat_relations == null ) jcasType . jcas . throwFeatMissing ( "relations" , "de.julielab.jules.types.ace.Document" ) ; jcasType . ll_cas . ll_setRefValue ( addr , ( ( Document_Type ) jcasType ) . casFeatCode_relations , jcasType . ll_cas . ll_getFSRef ( v ) ) ;
public class CPDefinitionVirtualSettingPersistenceImpl { /** * Returns the cp definition virtual settings before and after the current cp definition virtual setting in the ordered set where uuid = & # 63 ; . * @ param CPDefinitionVirtualSettingId the primary key of the current cp definition virtual setting * @ param uuid the uuid * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the previous , current , and next cp definition virtual setting * @ throws NoSuchCPDefinitionVirtualSettingException if a cp definition virtual setting with the primary key could not be found */ @ Override public CPDefinitionVirtualSetting [ ] findByUuid_PrevAndNext ( long CPDefinitionVirtualSettingId , String uuid , OrderByComparator < CPDefinitionVirtualSetting > orderByComparator ) throws NoSuchCPDefinitionVirtualSettingException { } }
CPDefinitionVirtualSetting cpDefinitionVirtualSetting = findByPrimaryKey ( CPDefinitionVirtualSettingId ) ; Session session = null ; try { session = openSession ( ) ; CPDefinitionVirtualSetting [ ] array = new CPDefinitionVirtualSettingImpl [ 3 ] ; array [ 0 ] = getByUuid_PrevAndNext ( session , cpDefinitionVirtualSetting , uuid , orderByComparator , true ) ; array [ 1 ] = cpDefinitionVirtualSetting ; array [ 2 ] = getByUuid_PrevAndNext ( session , cpDefinitionVirtualSetting , uuid , orderByComparator , false ) ; return array ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; }
public class JTSUtils { /** * Converts a way with potential inner ways to a JTS geometry . * @ param way the way * @ param innerWays the inner ways or null * @ return the JTS geometry */ public static Geometry toJtsGeometry ( TDWay way , List < TDWay > innerWays ) { } }
if ( way == null ) { LOGGER . warning ( "way is null" ) ; return null ; } if ( way . isForcePolygonLine ( ) ) { // may build a single line string if inner ways are empty return buildMultiLineString ( way , innerWays ) ; } if ( way . getShape ( ) != TDWay . LINE || innerWays != null && innerWays . size ( ) > 0 ) { // Have to be careful here about polygons and lines again , the problem with // polygons is that a certain direction is forced , so we do not want to reverse // closed lines that are not meant to be polygons // may contain holes if inner ways are not empty Polygon polygon = buildPolygon ( way , innerWays ) ; if ( polygon . isValid ( ) ) { return polygon ; } return repairInvalidPolygon ( polygon ) ; } // not a closed line return buildLineString ( way ) ;
public class DrawerView { /** * Gets an item from the drawer view * @ param id The item ID * @ return Item from the drawer view */ public DrawerItem findItemById ( long id ) { } }
for ( DrawerItem item : mAdapter . getItems ( ) ) { if ( item . getId ( ) == id ) { return item ; } } return null ;
public class DatabaseInformationMain { /** * Retrieves a < code > Table < / code > object describing the accessible * tables defined within this database . < p > * Each row is a table description with the following columns : < p > * < pre class = " SqlCodeExample " > * TABLE _ CAT VARCHAR table catalog * TABLE _ SCHEM VARCHAR table schema * TABLE _ NAME VARCHAR table name * TABLE _ TYPE VARCHAR { " TABLE " | " VIEW " | * " SYSTEM TABLE " | " GLOBAL TEMPORARY " } * REMARKS VARCHAR comment on the table . * TYPE _ CAT VARCHAR table type catalog ( not implemented ) . * TYPE _ SCHEM VARCHAR table type schema ( not implemented ) . * TYPE _ NAME VARCHAR table type name ( not implemented ) . * SELF _ REFERENCING _ COL _ NAME VARCHAR designated " identifier " column of * typed table ( not implemented ) . * REF _ GENERATION VARCHAR { " SYSTEM " | " USER " | * " DERIVED " | NULL } ( not implemented ) * HSQLDB _ TYPE VARCHAR HSQLDB - specific type : * { " MEMORY " | " CACHED " | " TEXT " | . . . } * READ _ ONLY BOOLEAN TRUE if table is read - only , * else FALSE . * COMMIT _ ACTION VARCHAR " PRESERVE " or " DELETE " for temp tables , * else NULL * < / pre > < p > * @ return a < code > Table < / code > object describing the accessible * tables defined within this database */ final Table SYSTEM_TABLES ( ) { } }
Table t = sysTables [ SYSTEM_TABLES ] ; if ( t == null ) { t = createBlankTable ( sysTableHsqlNames [ SYSTEM_TABLES ] ) ; // required addColumn ( t , "TABLE_CAT" , SQL_IDENTIFIER ) ; addColumn ( t , "TABLE_SCHEM" , SQL_IDENTIFIER ) ; addColumn ( t , "TABLE_NAME" , SQL_IDENTIFIER ) ; // not null addColumn ( t , "TABLE_TYPE" , CHARACTER_DATA ) ; // not null addColumn ( t , "REMARKS" , CHARACTER_DATA ) ; // JDBC 3.0 addColumn ( t , "TYPE_CAT" , SQL_IDENTIFIER ) ; addColumn ( t , "TYPE_SCHEM" , SQL_IDENTIFIER ) ; addColumn ( t , "TYPE_NAME" , SQL_IDENTIFIER ) ; addColumn ( t , "SELF_REFERENCING_COL_NAME" , SQL_IDENTIFIER ) ; addColumn ( t , "REF_GENERATION" , CHARACTER_DATA ) ; // extended addColumn ( t , "HSQLDB_TYPE" , SQL_IDENTIFIER ) ; addColumn ( t , "READ_ONLY" , Type . SQL_BOOLEAN ) ; // not null addColumn ( t , "COMMIT_ACTION" , CHARACTER_DATA ) ; // not null // order TABLE _ TYPE , TABLE _ SCHEM and TABLE _ NAME // added for unique : TABLE _ CAT // false PK , as TABLE _ SCHEM and / or TABLE _ CAT may be null HsqlName name = HsqlNameManager . newInfoSchemaObjectName ( sysTableHsqlNames [ SYSTEM_TABLES ] . name , false , SchemaObject . INDEX ) ; t . createPrimaryKey ( name , new int [ ] { 3 , 1 , 2 , 0 } , false ) ; return t ; } PersistentStore store = database . persistentStoreCollection . getStore ( t ) ; // intermediate holders Iterator tables ; Table table ; Object [ ] row ; HsqlName accessKey ; DITableInfo ti ; // column number mappings // JDBC 1 final int itable_cat = 0 ; final int itable_schem = 1 ; final int itable_name = 2 ; final int itable_type = 3 ; final int iremark = 4 ; // JDBC 3.0 final int itype_cat = 5 ; final int itype_schem = 6 ; final int itype_name = 7 ; final int isref_cname = 8 ; final int iref_gen = 9 ; // hsqldb ext final int ihsqldb_type = 10 ; final int iread_only = 11 ; final int icommit_action = 12 ; // Initialization tables = allTables ( ) ; ti = new DITableInfo ( ) ; // Do it . while ( tables . hasNext ( ) ) { table = ( Table ) tables . next ( ) ; if ( ! isAccessibleTable ( table ) ) { continue ; } ti . setTable ( table ) ; row = t . getEmptyRowData ( ) ; row [ itable_cat ] = database . getCatalogName ( ) . name ; row [ itable_schem ] = table . getSchemaName ( ) . name ; row [ itable_name ] = ti . getName ( ) ; row [ itable_type ] = ti . getJDBCStandardType ( ) ; row [ iremark ] = ti . getRemark ( ) ; row [ ihsqldb_type ] = ti . getHsqlType ( ) ; row [ iread_only ] = ti . isReadOnly ( ) ; row [ icommit_action ] = table . isTemp ( ) ? ( table . onCommitPreserve ( ) ? "PRESERVE" : "DELETE" ) : null ; t . insertSys ( store , row ) ; } return t ;
public class SystemConfiguration { /** * Method to initialize the { @ link # CACHE cache } for the system * configurations . */ public static void initialize ( ) { } }
if ( InfinispanCache . get ( ) . exists ( SystemConfiguration . UUIDCACHE ) ) { InfinispanCache . get ( ) . < UUID , SystemConfiguration > getCache ( SystemConfiguration . UUIDCACHE ) . clear ( ) ; } else { InfinispanCache . get ( ) . < UUID , SystemConfiguration > getCache ( SystemConfiguration . UUIDCACHE ) . addListener ( new CacheLogListener ( SystemConfiguration . LOG ) ) ; } if ( InfinispanCache . get ( ) . exists ( SystemConfiguration . IDCACHE ) ) { InfinispanCache . get ( ) . < Long , SystemConfiguration > getCache ( SystemConfiguration . IDCACHE ) . clear ( ) ; } else { InfinispanCache . get ( ) . < Long , SystemConfiguration > getCache ( SystemConfiguration . IDCACHE ) . addListener ( new CacheLogListener ( SystemConfiguration . LOG ) ) ; } if ( InfinispanCache . get ( ) . exists ( SystemConfiguration . NAMECACHE ) ) { InfinispanCache . get ( ) . < String , SystemConfiguration > getCache ( SystemConfiguration . NAMECACHE ) . clear ( ) ; } else { InfinispanCache . get ( ) . < String , SystemConfiguration > getCache ( SystemConfiguration . NAMECACHE ) . addListener ( new CacheLogListener ( SystemConfiguration . LOG ) ) ; } SystemConfiguration . ENCRYPTOR = new StandardPBEStringEncryptor ( ) ; SystemConfiguration . ENCRYPTOR . setConfig ( SystemConfiguration . getPBEConfig ( ) ) ;
public class Tools { /** * Load properties . * @ param clazz the class from classpath where the properties are * @ param fileName properties file name * @ return properties * @ throws IOException for any error */ public static Properties loadProperties ( Class < ? > clazz , String fileName ) throws IOException { } }
Properties properties = new Properties ( ) ; try ( InputStream is = clazz . getClassLoader ( ) . getResourceAsStream ( fileName ) ) { properties . load ( is ) ; } return properties ;
public class MyStaticValue { /** * 名字词性对象反序列化 * @ return */ @ SuppressWarnings ( "unchecked" ) public static Map < String , int [ ] [ ] > getPersonFreqMap ( ) { } }
Map < String , int [ ] [ ] > map = new HashMap < > ( 0 ) ; try ( InputStream inputStream = DicReader . getInputStream ( "person/asian_name_freq.data" ) ) { ObjectInputStream objectInputStream = new ObjectInputStream ( inputStream ) ; map = ( Map < String , int [ ] [ ] > ) objectInputStream . readObject ( ) ; } catch ( IOException e ) { LOG . warn ( "IO异常" , e ) ; } catch ( ClassNotFoundException e ) { LOG . warn ( "找不到类" , e ) ; } return map ;
public class SphericalUtil { /** * Returns the length of the given path , in meters , on Earth . */ public static double computeLength ( List < IGeoPoint > path ) { } }
if ( path . size ( ) < 2 ) { return 0 ; } double length = 0 ; IGeoPoint prev = path . get ( 0 ) ; double prevLat = toRadians ( prev . getLatitude ( ) ) ; double prevLng = toRadians ( prev . getLongitude ( ) ) ; for ( IGeoPoint point : path ) { double lat = toRadians ( point . getLatitude ( ) ) ; double lng = toRadians ( point . getLongitude ( ) ) ; length += distanceRadians ( prevLat , prevLng , lat , lng ) ; prevLat = lat ; prevLng = lng ; } return length * EARTH_RADIUS ;
public class ExtensionLoader { /** * ZAP : Added the type argument . */ private void addParamPanel ( List < AbstractParamPanel > panelList , AbstractParamDialog dialog ) { } }
String [ ] ROOT = { } ; for ( AbstractParamPanel panel : panelList ) { try { dialog . addParamPanel ( ROOT , panel , true ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; } }
public class StringValueArray { /** * Initializes the array with the provided number of bytes . * @ param bytes initial size of the encapsulated array in bytes */ private void initialize ( int bytes ) { } }
Preconditions . checkArgument ( bytes > 0 , "Requested array with zero capacity" ) ; Preconditions . checkArgument ( bytes <= MAX_ARRAY_SIZE , "Requested capacity exceeds limit of " + MAX_ARRAY_SIZE ) ; data = new byte [ bytes ] ;
public class MAPApplicationContext { /** * Return if the contextVersion is available for the contextName * @ param contextName * @ param version * @ return */ public static boolean availableApplicationContextVersion ( MAPApplicationContextName contextName , int contextVersion ) { } }
switch ( contextName ) { // - - Mobility Services // - - - Location management services case networkLocUpContext : case locationCancellationContext : case gprsLocationUpdateContext : if ( contextVersion >= 1 && contextVersion <= 3 ) return true ; else return false ; case interVlrInfoRetrievalContext : case msPurgingContext : if ( contextVersion >= 2 && contextVersion <= 3 ) return true ; else return false ; case mmEventReportingContext : if ( contextVersion == 3 ) return true ; else return false ; // - - - Handover services case handoverControlContext : if ( contextVersion >= 1 && contextVersion <= 3 ) return true ; else return false ; // - - - Authentication management services case infoRetrievalContext : if ( contextVersion >= 1 && contextVersion <= 3 ) return true ; else return false ; case authenticationFailureReportContext : if ( contextVersion == 3 ) return true ; else return false ; // - - - IMEI management services case equipmentMngtContext : if ( contextVersion >= 1 && contextVersion <= 3 ) return true ; else return false ; // - - - Subscriber management services case subscriberDataMngtContext : if ( contextVersion >= 1 && contextVersion <= 3 ) return true ; else return false ; // - - - Fault recovery services case resetContext : if ( contextVersion >= 1 && contextVersion <= 2 ) return true ; else return false ; // - - - Subscriber Information services case anyTimeEnquiryContext : case subscriberInfoEnquiryContext : case anyTimeInfoHandlingContext : case subscriberDataModificationNotificationContext : if ( contextVersion == 3 ) return true ; else return false ; // - - oam case tracingContext : if ( contextVersion >= 1 && contextVersion <= 3 ) return true ; else return false ; case imsiRetrievalContext : if ( contextVersion == 2 ) return true ; else return false ; // - - Call Handling Services case locationInfoRetrievalContext : case roamingNumberEnquiryContext : if ( contextVersion >= 1 && contextVersion <= 3 ) return true ; else return false ; case callControlTransferContext : if ( contextVersion >= 3 && contextVersion <= 4 ) return true ; else return false ; case groupCallControlContext : case groupCallInfoRetrievalContext : case reportingContext : case istAlertingContext : case ServiceTerminationContext : case resourceManagementContext : if ( contextVersion == 3 ) return true ; else return false ; // - - Supplementary services case networkFunctionalSsContext : if ( contextVersion >= 1 && contextVersion <= 2 ) return true ; else return false ; case networkUnstructuredSsContext : if ( contextVersion == 2 ) return true ; else return false ; case ssInvocationNotificationContext : case callCompletionContext : if ( contextVersion == 3 ) return true ; else return false ; // - - short message service case shortMsgGatewayContext : case shortMsgMORelayContext : case shortMsgMTRelayContext : case mwdMngtContext : if ( contextVersion >= 1 && contextVersion <= 3 ) return true ; else return false ; case shortMsgMTVgcsRelayContext : if ( contextVersion == 3 ) return true ; else return false ; case shortMsgAlertContext : if ( contextVersion >= 1 && contextVersion <= 2 ) return true ; else return false ; // - - Network - Requested PDP Context Activation services case gprsLocationInfoRetrievalContext : if ( contextVersion >= 3 && contextVersion <= 4 ) return true ; else return false ; case failureReportContext : case gprsNotifyContext : if ( contextVersion == 3 ) return true ; else return false ; // - - Location Service ( lms ) case locationSvcEnquiryContext : case locationSvcGatewayContext : if ( contextVersion == 3 ) return true ; else return false ; } return false ;
public class Job { /** * Report new work done for this job */ public final void update ( final long newworked , final String msg ) { } }
if ( newworked > 0 || ( msg != null && ! msg . equals ( _msg ) ) ) { new JAtomic ( ) { @ Override boolean abort ( Job job ) { return newworked == 0 && ( ( msg == null && _msg == null ) || ( msg != null && msg . equals ( job . _msg ) ) ) ; } @ Override void update ( Job old ) { old . _worked += newworked ; old . _msg = msg ; } } . apply ( this ) ; }
public class IfcConstraintClassificationRelationshipImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) public EList < IfcClassificationNotationSelect > getRelatedClassifications ( ) { } }
return ( EList < IfcClassificationNotationSelect > ) eGet ( Ifc2x3tc1Package . Literals . IFC_CONSTRAINT_CLASSIFICATION_RELATIONSHIP__RELATED_CLASSIFICATIONS , true ) ;
public class DocBookBuilder { /** * Adds a Topics contents as the introduction text for a Level . * @ param docBookVersion * @ param level The level the intro topic is being added for . * @ param specTopic The Topic that contains the introduction content . * @ param parentNode The DOM parent node the intro content is to be appended to . * @ param doc The DOM Document the content is to be added to . */ protected void addTopicContentsToLevelDocument ( final DocBookVersion docBookVersion , final Level level , final SpecTopic specTopic , final Element parentNode , final Document doc ) { } }
addTopicContentsToLevelDocument ( docBookVersion , level , specTopic , parentNode , doc , true ) ;
public class ByteArrayRequestConverterFunction { /** * Converts the specified { @ link AggregatedHttpMessage } to an object of { @ code expectedResultType } . * This converter allows only { @ code byte [ ] } and { @ link HttpData } as its return type , and * { @ link AggregatedHttpMessage } would be consumed only if it does not have a { @ code Content - Type } header * or if it has { @ code Content - Type : application / octet - stream } or { @ code Content - Type : application / binary } . */ @ Override public Object convertRequest ( ServiceRequestContext ctx , AggregatedHttpMessage request , Class < ? > expectedResultType ) throws Exception { } }
final MediaType mediaType = request . contentType ( ) ; if ( mediaType == null || mediaType . is ( MediaType . OCTET_STREAM ) || mediaType . is ( MediaType . APPLICATION_BINARY ) ) { if ( expectedResultType == byte [ ] . class ) { return request . content ( ) . array ( ) ; } if ( expectedResultType == HttpData . class ) { return request . content ( ) ; } } return RequestConverterFunction . fallthrough ( ) ;
public class StorableIntrospector { /** * If property is a join , then it is not yet completely resolved . Returns * null if there are any errors . * @ param errorMessages error messages go here * @ param property property to examine * @ param enclosing enclosing class * @ param pkPropertyNames primary key property names * @ param altKeyPropertyNames alternate key property names * @ param parKeyPropertyNames partition key property names */ @ SuppressWarnings ( "unchecked" ) private static < S extends Storable > StorableProperty < S > makeStorableProperty ( List < String > errorMessages , BeanProperty property , Class < S > enclosing , Set < String > pkPropertyNames , Set < String > altKeyPropertyNames , Set < String > parKeyPropertyNames ) { } }
Nullable nullable = null ; Alias alias = null ; Version version = null ; Sequence sequence = null ; Automatic automatic = null ; Independent independent = null ; Join join = null ; Derived derived = null ; Name name = null ; Method readMethod = property . getReadMethod ( ) ; Method writeMethod = property . getWriteMethod ( ) ; if ( readMethod == null ) { if ( writeMethod == null || Modifier . isAbstract ( writeMethod . getModifiers ( ) ) ) { // If we got here , the onus is on us to create this property . It ' s never // ok for the read method ( get ) to be null . errorMessages . add ( "Must define proper 'get' method for property: " + property . getName ( ) ) ; } } else { nullable = readMethod . getAnnotation ( Nullable . class ) ; alias = readMethod . getAnnotation ( Alias . class ) ; version = readMethod . getAnnotation ( Version . class ) ; sequence = readMethod . getAnnotation ( Sequence . class ) ; automatic = readMethod . getAnnotation ( Automatic . class ) ; independent = readMethod . getAnnotation ( Independent . class ) ; join = readMethod . getAnnotation ( Join . class ) ; derived = readMethod . getAnnotation ( Derived . class ) ; name = readMethod . getAnnotation ( Name . class ) ; } String propertyName ; if ( name == null ) { propertyName = property . getName ( ) ; } else { propertyName = name . value ( ) ; // Ensure that only valid characters are used . int length = propertyName . length ( ) ; if ( length == 0 ) { errorMessages . add ( "Property name for method cannot be blank: " + readMethod ) ; } else { if ( ! Character . isUnicodeIdentifierStart ( propertyName . charAt ( 0 ) ) ) { errorMessages . add ( "First character of property name must be a " + "unicode identifier start: " + propertyName ) ; } else { for ( int i = 1 ; i < length ; i ++ ) { if ( ! Character . isUnicodeIdentifierPart ( propertyName . charAt ( i ) ) ) { errorMessages . add ( "Characters of property name must be a " + "unicode identifier part: " + propertyName ) ; break ; } } } } } boolean pk = pkPropertyNames . contains ( propertyName ) ; boolean altKey = altKeyPropertyNames . contains ( propertyName ) ; boolean parKey = parKeyPropertyNames . contains ( propertyName ) ; if ( writeMethod == null ) { if ( readMethod == null || Modifier . isAbstract ( readMethod . getModifiers ( ) ) ) { // Set method is always required for non - join properties . More // work is done later on join properties , and sometimes the // write method is required . Derived properties don ' t need a // set method . if ( join == null && derived == null ) { errorMessages . add ( "Must define proper 'set' method for property: " + propertyName ) ; } } } else { Class [ ] writeParams = writeMethod . getParameterTypes ( ) ; if ( writeParams == null || writeParams . length != 1 ) { errorMessages . add ( "Mutator method must contain one parameter: " + writeMethod ) ; } else if ( ! writeParams [ 0 ] . isAssignableFrom ( property . getType ( ) ) ) { errorMessages . add ( "Property type doesn't match mutator method parameter: " + property . getType ( ) . getName ( ) + " != " + writeParams [ 0 ] . getName ( ) + " for " + writeMethod ) ; } if ( writeMethod . getAnnotation ( Nullable . class ) != null ) { errorMessages . add ( "Nullable annotation not allowed on mutator: " + writeMethod ) ; } if ( writeMethod . getAnnotation ( Alias . class ) != null ) { errorMessages . add ( "Alias annotation not allowed on mutator: " + writeMethod ) ; } if ( writeMethod . getAnnotation ( Version . class ) != null ) { errorMessages . add ( "Version annotation not allowed on mutator: " + writeMethod ) ; } if ( writeMethod . getAnnotation ( Sequence . class ) != null ) { errorMessages . add ( "Sequence annotation not allowed on mutator: " + writeMethod ) ; } if ( writeMethod . getAnnotation ( Automatic . class ) != null ) { errorMessages . add ( "Automatic annotation not allowed on mutator: " + writeMethod ) ; } if ( writeMethod . getAnnotation ( Independent . class ) != null ) { errorMessages . add ( "Independent annotation not allowed on mutator: " + writeMethod ) ; } if ( writeMethod . getAnnotation ( Join . class ) != null ) { errorMessages . add ( "Join annotation not allowed on mutator: " + writeMethod ) ; } if ( writeMethod . getAnnotation ( Derived . class ) != null ) { errorMessages . add ( "Derived annotation not allowed on mutator: " + writeMethod ) ; } if ( writeMethod . getAnnotation ( Name . class ) != null ) { errorMessages . add ( "Name annotation not allowed on mutator: " + writeMethod ) ; } } if ( derived != null ) { if ( readMethod != null && Modifier . isAbstract ( readMethod . getModifiers ( ) ) || writeMethod != null && Modifier . isAbstract ( writeMethod . getModifiers ( ) ) ) { errorMessages . add ( "Derived properties cannot be abstract: " + propertyName ) ; } if ( writeMethod == null && derived . shouldCopy ( ) ) { errorMessages . add ( "Derived properties which should be copied " + "must have a write method: " + propertyName ) ; } if ( pk ) { errorMessages . add ( "Derived properties cannot be a member of primary key: " + propertyName ) ; } if ( parKey ) { errorMessages . add ( "Derived properties cannot be a member of partition key: " + propertyName ) ; } if ( sequence != null ) { errorMessages . add ( "Derived properties cannot have a Sequence annotation: " + propertyName ) ; } if ( automatic != null ) { errorMessages . add ( "Derived properties cannot have an Automatic annotation: " + propertyName ) ; } if ( join != null ) { errorMessages . add ( "Derived properties cannot have a Join annotation: " + propertyName ) ; } } if ( nullable != null && property . getType ( ) . isPrimitive ( ) ) { errorMessages . add ( "Properties which have a primitive type cannot be declared nullable: " + "Property \"" + propertyName + "\" has type \"" + property . getType ( ) + '"' ) ; } String [ ] aliases = null ; if ( alias != null ) { aliases = alias . value ( ) ; if ( aliases . length == 0 ) { errorMessages . add ( "Alias list is empty for property: " + propertyName ) ; } } StorablePropertyConstraint [ ] constraints = null ; if ( readMethod != null ) { // Constraints not allowed on read method . Look for them and // generate errors if any found . gatherConstraints ( property , readMethod , false , errorMessages ) ; } if ( writeMethod != null ) { constraints = gatherConstraints ( property , writeMethod , true , errorMessages ) ; } StorablePropertyAdapter [ ] adapters = null ; if ( readMethod != null ) { adapters = gatherAdapters ( property , readMethod , true , errorMessages ) ; if ( adapters != null && adapters . length > 0 ) { if ( join != null ) { errorMessages . add ( "Join properties cannot have adapters: " + propertyName ) ; } if ( adapters . length > 1 ) { errorMessages . add ( "Only one adpater allowed per property: " + propertyName ) ; } } if ( adapters == null || adapters . length == 0 ) { StorablePropertyAdapter autoAdapter = AutomaticAdapterSelector . selectAdapterFor ( property ) ; if ( autoAdapter != null ) { adapters = new StorablePropertyAdapter [ ] { autoAdapter } ; } } } if ( writeMethod != null ) { // Adapters not allowed on write method . Look for them and generate // errors if any found . gatherAdapters ( property , writeMethod , false , errorMessages ) ; } // Check that declared checked exceptions are allowed . if ( readMethod != null ) { for ( Class < ? > ex : readMethod . getExceptionTypes ( ) ) { if ( RuntimeException . class . isAssignableFrom ( ex ) || Error . class . isAssignableFrom ( ex ) ) { continue ; } if ( join != null || derived != null ) { if ( FetchException . class . isAssignableFrom ( ex ) ) { continue ; } errorMessages . add ( "Checked exceptions thrown by join or derived property accessors " + "must be of type FetchException: \"" + readMethod . getName ( ) + "\" declares throwing \"" + ex . getName ( ) + '"' ) ; break ; } else { errorMessages . add ( "Only join and derived property accessors can throw checked " + "exceptions: \"" + readMethod . getName ( ) + "\" declares throwing \"" + ex . getName ( ) + '"' ) ; break ; } } } // Check that declared checked exceptions are allowed . if ( writeMethod != null ) { for ( Class < ? > ex : writeMethod . getExceptionTypes ( ) ) { if ( RuntimeException . class . isAssignableFrom ( ex ) || Error . class . isAssignableFrom ( ex ) ) { continue ; } errorMessages . add ( "Mutators cannot throw checked exceptions: \"" + writeMethod . getName ( ) + "\" declares throwing \"" + ex . getName ( ) + '"' ) ; break ; } } String sequenceName = null ; if ( sequence != null ) { sequenceName = sequence . value ( ) ; } if ( join == null ) { if ( errorMessages . size ( ) > 0 ) { return null ; } return new SimpleProperty < S > ( property , enclosing , nullable != null , pk , altKey , parKey , aliases , constraints , adapters == null ? null : adapters [ 0 ] , version != null , sequenceName , independent != null , automatic != null , derived , propertyName ) ; } // Do additional work for join properties . String [ ] internal = join . internal ( ) ; String [ ] external = join . external ( ) ; if ( internal == null ) { internal = new String [ 0 ] ; } if ( external == null ) { external = new String [ 0 ] ; } if ( internal . length != external . length ) { errorMessages . add ( "Internal/external lists on Join property \"" + propertyName + "\" differ in length: " + internal . length + " != " + external . length ) ; } Class joinedType = property . getType ( ) ; if ( Query . class == joinedType ) { if ( nullable != null ) { errorMessages . add ( "Join property \"" + propertyName + "\" cannot be declared as nullable because the type is Query" ) ; } // Recover the results element type from the accessor . A Mutator is // not allowed . if ( property . getWriteMethod ( ) != null ) { errorMessages . add ( "Join property \"" + propertyName + "\" cannot have a mutator because the type is Query: " + property . getWriteMethod ( ) ) ; } if ( property . getReadMethod ( ) == null ) { // Default . joinedType = Storable . class ; } else { Type genericType = property . getReadMethod ( ) . getGenericReturnType ( ) ; if ( genericType instanceof Class ) { // Default . joinedType = Storable . class ; } else if ( genericType instanceof ParameterizedType ) { ParameterizedType pt = ( ParameterizedType ) genericType ; Type [ ] args = pt . getActualTypeArguments ( ) ; if ( args == null || args . length == 0 ) { // Default . joinedType = Storable . class ; } else { Type arg = args [ 0 ] ; if ( arg instanceof WildcardType ) { Type [ ] upper = ( ( WildcardType ) arg ) . getUpperBounds ( ) ; // Length should only be one or zero . if ( upper . length == 1 ) { arg = upper [ 0 ] ; } else { // Default . arg = Storable . class ; } } while ( arg instanceof ParameterizedType ) { arg = ( ( ParameterizedType ) arg ) . getRawType ( ) ; } if ( arg instanceof Class ) { joinedType = ( Class ) arg ; } } } } } if ( ! Storable . class . isAssignableFrom ( joinedType ) ) { errorMessages . add ( "Type of join property \"" + propertyName + "\" is not a Storable: " + joinedType ) ; } if ( property . getReadMethod ( ) != null ) { Class exceptionType = FetchException . class ; Class < ? > [ ] exceptions = property . getReadMethod ( ) . getExceptionTypes ( ) ; check : { for ( int i = exceptions . length ; -- i >= 0 ; ) { if ( exceptions [ i ] . isAssignableFrom ( exceptionType ) ) { break check ; } } String exceptionName = exceptionType . getName ( ) ; int index = exceptionName . lastIndexOf ( '.' ) ; if ( index >= 0 ) { exceptionName = exceptionName . substring ( index + 1 ) ; } errorMessages . add ( "Join property accessor must declare throwing a " + exceptionName + ": " + property . getReadMethod ( ) ) ; } } if ( version != null ) { errorMessages . add ( "Join property cannot be declared as a version property: " + propertyName ) ; } if ( errorMessages . size ( ) > 0 ) { return null ; } return new JoinProperty < S > ( property , enclosing , nullable != null , aliases , constraints , adapters == null ? null : adapters [ 0 ] , sequenceName , independent != null , automatic != null , derived , joinedType , internal , external , propertyName ) ;
public class ApiOvhHorizonView { /** * Manage your session Timeout on Unified Access Gateway * REST : POST / horizonView / { serviceName } / accessPoint / { accessPointId } / changeSessionTimeout * @ param onSingleAP [ required ] Update timeout session on a single Unified Access Gateway ( only for hybrid Pool ) * @ param expiration [ required ] Timeout ( in hour ) * @ param serviceName [ required ] Domain of the service * @ param accessPointId [ required ] Pool id */ public OvhTask serviceName_accessPoint_accessPointId_changeSessionTimeout_POST ( String serviceName , Long accessPointId , Long expiration , OvhAccessPointTypeEnum onSingleAP ) throws IOException { } }
String qPath = "/horizonView/{serviceName}/accessPoint/{accessPointId}/changeSessionTimeout" ; StringBuilder sb = path ( qPath , serviceName , accessPointId ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "expiration" , expiration ) ; addBody ( o , "onSingleAP" , onSingleAP ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhTask . class ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcWallElementedCase ( ) { } }
if ( ifcWallElementedCaseEClass == null ) { ifcWallElementedCaseEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 759 ) ; } return ifcWallElementedCaseEClass ;
public class YearWeek { /** * Compares this year - week to another * The comparison is based first on the value of the year , then on the value of the week . * It is " consistent with equals " , as defined by { @ link Comparable } . * @ param other the other year - week to compare to , not null * @ return the comparator value , negative if less , positive if greater */ @ Override public int compareTo ( YearWeek other ) { } }
int cmp = ( year - other . year ) ; if ( cmp == 0 ) { cmp = ( week - other . week ) ; } return cmp ;
public class XMLCaster { /** * Check if all Node are of the type defnined by para , meter * @ param nodes nodes to check * @ param type to compare * @ return are all of the same type */ private static boolean _isAllOfSameType ( Node [ ] nodes , short type ) { } }
for ( int i = 0 ; i < nodes . length ; i ++ ) { if ( nodes [ i ] . getNodeType ( ) != type ) return false ; } return true ;
public class RebalancePlan { /** * Determines storage overhead and returns pretty printed summary . * @ param finalNodeToOverhead Map of node IDs from final cluster to number * of partition - stores to be moved to the node . * @ return pretty printed string summary of storage overhead . */ private String storageOverhead ( Map < Integer , Integer > finalNodeToOverhead ) { } }
double maxOverhead = Double . MIN_VALUE ; PartitionBalance pb = new PartitionBalance ( currentCluster , currentStoreDefs ) ; StringBuilder sb = new StringBuilder ( ) ; sb . append ( "Per-node store-overhead:" ) . append ( Utils . NEWLINE ) ; DecimalFormat doubleDf = new DecimalFormat ( "####.##" ) ; for ( int nodeId : finalCluster . getNodeIds ( ) ) { Node node = finalCluster . getNodeById ( nodeId ) ; String nodeTag = "Node " + String . format ( "%4d" , nodeId ) + " (" + node . getHost ( ) + ")" ; int initialLoad = 0 ; if ( currentCluster . getNodeIds ( ) . contains ( nodeId ) ) { initialLoad = pb . getNaryPartitionCount ( nodeId ) ; } int toLoad = 0 ; if ( finalNodeToOverhead . containsKey ( nodeId ) ) { toLoad = finalNodeToOverhead . get ( nodeId ) ; } double overhead = ( initialLoad + toLoad ) / ( double ) initialLoad ; if ( initialLoad > 0 && maxOverhead < overhead ) { maxOverhead = overhead ; } String loadTag = String . format ( "%6d" , initialLoad ) + " + " + String . format ( "%6d" , toLoad ) + " -> " + String . format ( "%6d" , initialLoad + toLoad ) + " (" + doubleDf . format ( overhead ) + " X)" ; sb . append ( nodeTag + " : " + loadTag ) . append ( Utils . NEWLINE ) ; } sb . append ( Utils . NEWLINE ) . append ( "**** Max per-node storage overhead: " + doubleDf . format ( maxOverhead ) + " X." ) . append ( Utils . NEWLINE ) ; return ( sb . toString ( ) ) ;
public class BarcodeValue { /** * Add an occurrence of a value */ void setValue ( int value ) { } }
Integer confidence = values . get ( value ) ; if ( confidence == null ) { confidence = 0 ; } confidence ++ ; values . put ( value , confidence ) ;
public class PersistenceBrokerFactoryDefaultImpl { /** * Return broker instance from pool . If given { @ link PBKey } was not found in pool * a new pool for given * @ param pbKey * @ return * @ throws PBFactoryException */ public PersistenceBrokerInternal createPersistenceBroker ( PBKey pbKey ) throws PBFactoryException { } }
if ( log . isDebugEnabled ( ) ) log . debug ( "Obtain broker from pool, used PBKey is " + pbKey ) ; PersistenceBrokerInternal broker = null ; /* try to find a valid PBKey , if given key does not full match */ pbKey = BrokerHelper . crossCheckPBKey ( pbKey ) ; try { /* get a pooled PB instance , the pool is reponsible to create new PB instances if not found in pool */ broker = ( ( PersistenceBrokerInternal ) brokerPool . borrowObject ( pbKey ) ) ; /* now warp pooled PB instance with a handle to avoid PB corruption of closed PB instances . */ broker = wrapRequestedBrokerInstance ( broker ) ; } catch ( Exception e ) { try { // if something going wrong , tryto close broker if ( broker != null ) broker . close ( ) ; } catch ( Exception ignore ) { // ignore it } throw new PBFactoryException ( "Borrow broker from pool failed, using PBKey " + pbKey , e ) ; } return broker ;
public class DefaultTupleMapper { /** * Default behavior is to write each value in the tuple as a key : value pair * in the Cassandra row . * @ param tuple * @ return */ @ Override public Map < String , String > mapToColumns ( Tuple tuple ) { } }
Fields fields = tuple . getFields ( ) ; Map < String , String > columns = new HashMap < String , String > ( ) ; for ( int i = 0 ; i < fields . size ( ) ; i ++ ) { String name = fields . get ( i ) ; Object value = tuple . getValueByField ( name ) ; columns . put ( name , ( value != null ? value . toString ( ) : "" ) ) ; } return columns ;
public class MPXReader { /** * Populates a resource . * @ param resource resource instance * @ param record MPX record * @ throws MPXJException */ private void populateResource ( Resource resource , Record record ) throws MPXJException { } }
String falseText = LocaleData . getString ( m_locale , LocaleData . NO ) ; int length = record . getLength ( ) ; int [ ] model = m_resourceModel . getModel ( ) ; for ( int i = 0 ; i < length ; i ++ ) { int mpxFieldType = model [ i ] ; if ( mpxFieldType == - 1 ) { break ; } String field = record . getString ( i ) ; if ( field == null || field . length ( ) == 0 ) { continue ; } ResourceField resourceField = MPXResourceField . getMpxjField ( mpxFieldType ) ; switch ( resourceField ) { case OBJECTS : { resource . set ( resourceField , record . getInteger ( i ) ) ; break ; } case ID : { resource . setID ( record . getInteger ( i ) ) ; break ; } case UNIQUE_ID : { resource . setUniqueID ( record . getInteger ( i ) ) ; break ; } case MAX_UNITS : { resource . set ( resourceField , record . getUnits ( i ) ) ; break ; } case PERCENT_WORK_COMPLETE : case PEAK : { resource . set ( resourceField , record . getPercentage ( i ) ) ; break ; } case COST : case COST_PER_USE : case COST_VARIANCE : case BASELINE_COST : case ACTUAL_COST : case REMAINING_COST : { resource . set ( resourceField , record . getCurrency ( i ) ) ; break ; } case OVERTIME_RATE : case STANDARD_RATE : { resource . set ( resourceField , record . getRate ( i ) ) ; break ; } case REMAINING_WORK : case OVERTIME_WORK : case BASELINE_WORK : case ACTUAL_WORK : case WORK : case WORK_VARIANCE : { resource . set ( resourceField , record . getDuration ( i ) ) ; break ; } case ACCRUE_AT : { resource . set ( resourceField , record . getAccrueType ( i ) ) ; break ; } case LINKED_FIELDS : case OVERALLOCATED : { resource . set ( resourceField , record . getBoolean ( i , falseText ) ) ; break ; } default : { resource . set ( resourceField , field ) ; break ; } } } if ( m_projectConfig . getAutoResourceUniqueID ( ) == true ) { resource . setUniqueID ( Integer . valueOf ( m_projectConfig . getNextResourceUniqueID ( ) ) ) ; } if ( m_projectConfig . getAutoResourceID ( ) == true ) { resource . setID ( Integer . valueOf ( m_projectConfig . getNextResourceID ( ) ) ) ; } // Handle malformed MPX files - ensure we have a unique ID if ( resource . getUniqueID ( ) == null ) { resource . setUniqueID ( resource . getID ( ) ) ; }
public class EC2MetadataClient { /** * Connects to the Amazon EC2 Instance Metadata Service to retrieve the * default credential information ( if any ) . * @ return The response from the Amazon EC2 Instance Metadata Service , or * null if no credential information was available . * @ throws IOException * If any problems are encountered while connecting to the * Amazon EC2 Instance Metadata Service . */ public String getDefaultCredentials ( ) throws IOException { } }
String securityCredentialsList = readResource ( SECURITY_CREDENTIALS_RESOURCE ) ; securityCredentialsList = securityCredentialsList . trim ( ) ; String [ ] securityCredentials = securityCredentialsList . split ( "\n" ) ; if ( securityCredentials . length == 0 ) return null ; String securityCredentialsName = securityCredentials [ 0 ] ; return readResource ( SECURITY_CREDENTIALS_RESOURCE + securityCredentialsName ) ;
public class PdfContentByte { /** * Changes the text matrix . * Remark : this operation also initializes the current point position . < / P > * @ param a operand 1,1 in the matrix * @ param b operand 1,2 in the matrix * @ param c operand 2,1 in the matrix * @ param d operand 2,2 in the matrix * @ param x operand 3,1 in the matrix * @ param y operand 3,2 in the matrix */ public void setTextMatrix ( float a , float b , float c , float d , float x , float y ) { } }
state . xTLM = x ; state . yTLM = y ; content . append ( a ) . append ( ' ' ) . append ( b ) . append_i ( ' ' ) . append ( c ) . append_i ( ' ' ) . append ( d ) . append_i ( ' ' ) . append ( x ) . append_i ( ' ' ) . append ( y ) . append ( " Tm" ) . append_i ( separator ) ;
public class RequestRoute { /** * Sets the controller class of this request * @ param clazz The controller class */ public void withControllerClass ( Class < ? > clazz ) { } }
Objects . requireNonNull ( clazz , Required . CONTROLLER_CLASS . toString ( ) ) ; this . controllerClass = clazz ;
public class CmsEditSiteForm { /** * Reads out all aliases from the form . < p > * @ return a List of CmsSiteMatcher */ private List < CmsSiteMatcher > getAliases ( ) { } }
List < CmsSiteMatcher > ret = new ArrayList < CmsSiteMatcher > ( ) ; for ( I_CmsEditableGroupRow row : m_aliasGroup . getRows ( ) ) { FormLayout layout = ( FormLayout ) ( row . getComponent ( ) ) ; CheckBox box = ( CheckBox ) ( layout . getComponent ( 1 ) ) ; TextField field = ( TextField ) layout . getComponent ( 0 ) ; CmsSiteMatcher matcher = new CmsSiteMatcher ( field . getValue ( ) ) ; matcher . setRedirect ( box . getValue ( ) . booleanValue ( ) ) ; ret . add ( matcher ) ; } return ret ;
public class AbstractCDIRuntime { /** * Determine whether this jar is completely ignored by CDI , so no need to create bda for it . The jar will be ignored if it does not contain beans . xml and * in the server . xml , implicit bean archive scanning is disabled . * @ param moduleContainer the module container * @ param type the module type * @ return whether the jar will be ignored by CDI */ @ Override public final boolean skipCreatingBda ( CDIArchive archive ) { } }
// only skip this if it is a leaf archive boolean skip = isImplicitBeanArchivesScanningDisabled ( archive ) ; skip = skip && ( archive . getBeansXml ( ) == null ) ; skip = skip && ( ! ( archive . getType ( ) == ArchiveType . WEB_MODULE ) ) ; return skip ;
public class Chat { /** * Invoked when the connection has been fully established and suspended , that is , ready for receiving messages . */ @ Ready public void onReady ( /* In you don ' t want injection AtmosphereResource r */ ) { } }
logger . info ( "Browser {} connected" , r . uuid ( ) ) ; logger . info ( "BroadcasterFactory used {}" , factory . getClass ( ) . getName ( ) ) ; logger . info ( "Broadcaster injected {}" , broadcaster . getID ( ) ) ;
public class Waiter { /** * Waits for a certain view . * @ param view the id of the view to wait for * @ param index the index of the { @ link View } . { @ code 0 } if only one is available * @ return the specified View */ public View waitForView ( int id , int index , int timeout , boolean scroll ) { } }
Set < View > uniqueViewsMatchingId = new HashSet < View > ( ) ; long endTime = SystemClock . uptimeMillis ( ) + timeout ; while ( SystemClock . uptimeMillis ( ) <= endTime ) { sleeper . sleep ( ) ; for ( View view : viewFetcher . getAllViews ( false ) ) { Integer idOfView = Integer . valueOf ( view . getId ( ) ) ; if ( idOfView . equals ( id ) ) { uniqueViewsMatchingId . add ( view ) ; if ( uniqueViewsMatchingId . size ( ) > index ) { return view ; } } } if ( scroll ) scroller . scrollDown ( ) ; } return null ;
public class NodeVector { /** * Removes the first occurrence of the argument from this vector . * If the object is found in this vector , each component in the vector * with an index greater or equal to the object ' s index is shifted * downward to have an index one smaller than the value it had * previously . * @ param s Node to remove from the list * @ return True if the node was successfully removed */ public boolean removeElement ( int s ) { } }
if ( null == m_map ) return false ; for ( int i = 0 ; i < m_firstFree ; i ++ ) { int node = m_map [ i ] ; if ( node == s ) { if ( i > m_firstFree ) System . arraycopy ( m_map , i + 1 , m_map , i - 1 , m_firstFree - i ) ; else m_map [ i ] = DTM . NULL ; m_firstFree -- ; return true ; } } return false ;
public class Logger { /** * generate tag . * @ return tag of logger */ protected static String generateTag ( ) { } }
StackTraceElement [ ] elements = Thread . currentThread ( ) . getStackTrace ( ) ; String currentPath = elements [ 4 ] . getClassName ( ) ; String method = elements [ 4 ] . getMethodName ( ) ; int line = elements [ 4 ] . getLineNumber ( ) ; String tag = currentPath + ", " + method + " (line " + line + ")" ; tag = tag . substring ( tag . lastIndexOf ( "." ) + 1 ) ; return tag ;
public class LoadMultipartitionTable { /** * Execute a set of queued inserts . Ensure each insert successfully * inserts one row . Throw exception if not . * @ return Count of rows inserted . * @ throws VoltAbortException if any failure at all . */ long executeSQL ( boolean isFinal ) throws VoltAbortException { } }
long count = 0 ; VoltTable [ ] results = voltExecuteSQL ( isFinal ) ; for ( VoltTable result : results ) { long dmlUpdated = result . asScalarLong ( ) ; if ( dmlUpdated == 0 ) { throw new VoltAbortException ( "Insert failed for tuple." ) ; } if ( dmlUpdated > 1 ) { throw new VoltAbortException ( "Insert modified more than one tuple." ) ; } ++ count ; } return count ;
public class AABBd { /** * Set the minimum corner coordinates . * @ param minX * the x coordinate of the minimum corner * @ param minY * the y coordinate of the minimum corner * @ param minZ * the z coordinate of the minimum corner * @ return this */ public AABBd setMin ( double minX , double minY , double minZ ) { } }
this . minX = minX ; this . minY = minY ; this . minZ = minZ ; return this ;
public class ParameterMetaData { /** * Decimal parameter . * @ param sqlType the SQL type for the parameter definition * @ param scale the scale of the numeric parameter * @ return the parameter definition for a number with specified scale */ public static ParameterDef Scaled ( final int sqlType , final int scale ) { } }
return new ParameterDef ( jdbcTypeMappings . get ( sqlType ) , parameterModeIn , sqlType , jdbcTypeNames . get ( sqlType ) , jdbcTypePrecisions . get ( sqlType ) , scale , parameterNullableUnknown , jdbcTypeSigns . get ( sqlType ) ) ;
public class PointDeserializer { /** * Deserialize the coordinates inside the json into { @ link Point } s capturing the same information . * Otherwise when deserializing , you ' ll most likely see this error : Required to handle the * " Expected BEGIN _ OBJECT but was BEGIN _ ARRAY " . * @ param json a class representing an element of Json * @ param typeOfT common superinterface for all types in the Java * @ param context Context for deserialization that is passed to a custom deserializer during * invocation of its { @ link JsonDeserializer # deserialize ( JsonElement , Type , * JsonDeserializationContext ) } method * @ return either { @ link Point } with an altitude or one that doesn ' t * @ since 1.0.0 */ @ Override public Point deserialize ( JsonElement json , Type typeOfT , JsonDeserializationContext context ) { } }
JsonArray rawCoordinates = json . getAsJsonArray ( ) ; double longitude = rawCoordinates . get ( 0 ) . getAsDouble ( ) ; double latitude = rawCoordinates . get ( 1 ) . getAsDouble ( ) ; // Includes altitude if ( rawCoordinates . size ( ) > 2 ) { double altitude = rawCoordinates . get ( 2 ) . getAsDouble ( ) ; return Point . fromLngLat ( longitude , latitude , altitude ) ; } // It doesn ' t have altitude return Point . fromLngLat ( longitude , latitude ) ;
public class MethodHash { /** * Compute hash on given method . * @ param method * the method * @ return this object */ public MethodHash computeHash ( Method method ) { } }
final MessageDigest digest = Util . getMD5Digest ( ) ; byte [ ] code ; if ( method . getCode ( ) == null || method . getCode ( ) . getCode ( ) == null ) { code = new byte [ 0 ] ; } else { code = method . getCode ( ) . getCode ( ) ; } BytecodeScanner . Callback callback = ( opcode , index ) -> digest . update ( ( byte ) opcode ) ; BytecodeScanner bytecodeScanner = new BytecodeScanner ( ) ; bytecodeScanner . scan ( code , callback ) ; hash = digest . digest ( ) ; return this ;
public class ExecutionImpl { /** * process instance / / / / / */ public void start ( Map < String , Object > variables ) { } }
if ( isProcessInstanceExecution ( ) ) { if ( startContext == null ) { startContext = new ProcessInstanceStartContext ( processDefinition . getInitial ( ) ) ; } } super . start ( variables ) ;
public class BaseL2Kernel { /** * Returns the squared L < sup > 2 < / sup > norm between two points from the cache values . * @ param i the first index in the vector list * @ param j the second index in the vector list * @ param vecs the list of vectors that make the collection * @ param cache the cache of values for each vector in the collection * @ return the squared norm | | x < sub > i < / sub > - x < sub > j < / sub > | | < sup > 2 < / sup > */ protected double getSqrdNorm ( int i , int j , List < ? extends Vec > vecs , List < Double > cache ) { } }
if ( cache == null ) return Math . pow ( vecs . get ( i ) . pNormDist ( 2.0 , vecs . get ( j ) ) , 2 ) ; return cache . get ( i ) + cache . get ( j ) - 2 * vecs . get ( i ) . dot ( vecs . get ( j ) ) ;
public class WeixinSupport { /** * 处理微信服务器发来的请求方法 * @ param request http请求对象 * @ return 处理消息的结果 , 已经是接口要求的xml报文了 */ public String processRequest ( HttpServletRequest request ) { } }
Map < String , Object > reqMap = MessageUtil . parseXml ( request , getToken ( ) , getAppId ( ) , getAESKey ( ) ) ; String fromUserName = ( String ) reqMap . get ( "FromUserName" ) ; String toUserName = ( String ) reqMap . get ( "ToUserName" ) ; String msgType = ( String ) reqMap . get ( "MsgType" ) ; LOG . debug ( "收到消息,消息类型:{}" , msgType ) ; BaseMsg msg = null ; if ( msgType . equals ( ReqType . EVENT ) ) { String eventType = ( String ) reqMap . get ( "Event" ) ; String ticket = ( String ) reqMap . get ( "Ticket" ) ; QrCodeEvent qrCodeEvent = null ; if ( isNotBlank ( ticket ) ) { String eventKey = ( String ) reqMap . get ( "EventKey" ) ; LOG . debug ( "eventKey:{}" , eventKey ) ; LOG . debug ( "ticket:{}" , ticket ) ; qrCodeEvent = new QrCodeEvent ( eventKey , ticket ) ; buildBasicEvent ( reqMap , qrCodeEvent ) ; if ( eventType . equals ( EventType . SCAN ) ) { msg = handleQrCodeEvent ( qrCodeEvent ) ; if ( isNull ( msg ) ) { msg = processEventHandle ( qrCodeEvent ) ; } } } if ( eventType . equals ( EventType . SUBSCRIBE ) ) { BaseEvent event = new BaseEvent ( ) ; if ( qrCodeEvent != null ) { event = qrCodeEvent ; } else { buildBasicEvent ( reqMap , event ) ; } msg = handleSubscribe ( event ) ; if ( isNull ( msg ) ) { msg = processEventHandle ( event ) ; } } else if ( eventType . equals ( EventType . UNSUBSCRIBE ) ) { BaseEvent event = new BaseEvent ( ) ; buildBasicEvent ( reqMap , event ) ; msg = handleUnsubscribe ( event ) ; if ( isNull ( msg ) ) { msg = processEventHandle ( event ) ; } } else if ( eventType . equals ( EventType . CLICK ) ) { String eventKey = ( String ) reqMap . get ( "EventKey" ) ; LOG . debug ( "eventKey:{}" , eventKey ) ; MenuEvent event = new MenuEvent ( eventKey ) ; buildBasicEvent ( reqMap , event ) ; msg = handleMenuClickEvent ( event ) ; if ( isNull ( msg ) ) { msg = processEventHandle ( event ) ; } } else if ( eventType . equals ( EventType . VIEW ) ) { String eventKey = ( String ) reqMap . get ( "EventKey" ) ; LOG . debug ( "eventKey:{}" , eventKey ) ; MenuEvent event = new MenuEvent ( eventKey ) ; buildBasicEvent ( reqMap , event ) ; msg = handleMenuViewEvent ( event ) ; if ( isNull ( msg ) ) { msg = processEventHandle ( event ) ; } } else if ( eventType . equals ( EventType . LOCATION ) ) { double latitude = Double . parseDouble ( ( String ) reqMap . get ( "Latitude" ) ) ; double longitude = Double . parseDouble ( ( String ) reqMap . get ( "Longitude" ) ) ; double precision = Double . parseDouble ( ( String ) reqMap . get ( "Precision" ) ) ; LocationEvent event = new LocationEvent ( latitude , longitude , precision ) ; buildBasicEvent ( reqMap , event ) ; msg = handleLocationEvent ( event ) ; if ( isNull ( msg ) ) { msg = processEventHandle ( event ) ; } } else if ( EventType . SCANCODEPUSH . equals ( eventType ) || EventType . SCANCODEWAITMSG . equals ( eventType ) ) { String eventKey = ( String ) reqMap . get ( "EventKey" ) ; Map < String , Object > scanCodeInfo = ( Map < String , Object > ) reqMap . get ( "ScanCodeInfo" ) ; String scanType = ( String ) scanCodeInfo . get ( "ScanType" ) ; String scanResult = ( String ) scanCodeInfo . get ( "ScanResult" ) ; ScanCodeEvent event = new ScanCodeEvent ( eventKey , scanType , scanResult ) ; buildBasicEvent ( reqMap , event ) ; msg = handleScanCodeEvent ( event ) ; if ( isNull ( msg ) ) { msg = processEventHandle ( event ) ; } } else if ( EventType . PICPHOTOORALBUM . equals ( eventType ) || EventType . PICSYSPHOTO . equals ( eventType ) || EventType . PICWEIXIN . equals ( eventType ) ) { String eventKey = ( String ) reqMap . get ( "EventKey" ) ; Map < String , Object > sendPicsInfo = ( Map < String , Object > ) reqMap . get ( "SendPicsInfo" ) ; int count = Integer . parseInt ( ( String ) sendPicsInfo . get ( "Count" ) ) ; List < Map > picList = ( List ) sendPicsInfo . get ( "PicList" ) ; SendPicsInfoEvent event = new SendPicsInfoEvent ( eventKey , count , picList ) ; buildBasicEvent ( reqMap , event ) ; msg = handlePSendPicsInfoEvent ( event ) ; if ( isNull ( msg ) ) { msg = processEventHandle ( event ) ; } } else if ( EventType . TEMPLATESENDJOBFINISH . equals ( eventType ) ) { String msgId = ( String ) reqMap . get ( "MsgID" ) ; String status = ( String ) reqMap . get ( "Status" ) ; TemplateMsgEvent event = new TemplateMsgEvent ( msgId , status ) ; buildBasicEvent ( reqMap , event ) ; msg = handleTemplateMsgEvent ( event ) ; if ( isNull ( msg ) ) { msg = processEventHandle ( event ) ; } } else if ( EventType . MASSSENDJOBFINISH . equals ( eventType ) ) { String msgId = ( String ) reqMap . get ( "MsgID" ) ; String status = ( String ) reqMap . get ( "Status" ) ; Integer TotalCount = Integer . valueOf ( String . valueOf ( reqMap . get ( "TotalCount" ) ) ) ; Integer filterCount = Integer . valueOf ( String . valueOf ( reqMap . get ( "FilterCount" ) ) ) ; Integer sentCount = Integer . valueOf ( String . valueOf ( reqMap . get ( "SentCount" ) ) ) ; Integer errorCount = Integer . valueOf ( String . valueOf ( reqMap . get ( "ErrorCount" ) ) ) ; SendMessageEvent event = new SendMessageEvent ( msgId , status , TotalCount , filterCount , sentCount , errorCount ) ; buildBasicEvent ( reqMap , event ) ; msg = callBackAllMessage ( event ) ; if ( isNull ( msg ) ) { msg = processEventHandle ( event ) ; } } } else { if ( msgType . equals ( ReqType . TEXT ) ) { String content = ( String ) reqMap . get ( "Content" ) ; LOG . debug ( "文本消息内容:{}" , content ) ; TextReqMsg textReqMsg = new TextReqMsg ( content ) ; buildBasicReqMsg ( reqMap , textReqMsg ) ; msg = handleTextMsg ( textReqMsg ) ; if ( isNull ( msg ) ) { msg = processMessageHandle ( textReqMsg ) ; } } else if ( msgType . equals ( ReqType . IMAGE ) ) { String picUrl = ( String ) reqMap . get ( "PicUrl" ) ; String mediaId = ( String ) reqMap . get ( "MediaId" ) ; ImageReqMsg imageReqMsg = new ImageReqMsg ( picUrl , mediaId ) ; buildBasicReqMsg ( reqMap , imageReqMsg ) ; msg = handleImageMsg ( imageReqMsg ) ; if ( isNull ( msg ) ) { msg = processMessageHandle ( imageReqMsg ) ; } } else if ( msgType . equals ( ReqType . VOICE ) ) { String format = ( String ) reqMap . get ( "Format" ) ; String mediaId = ( String ) reqMap . get ( "MediaId" ) ; String recognition = ( String ) reqMap . get ( "Recognition" ) ; VoiceReqMsg voiceReqMsg = new VoiceReqMsg ( mediaId , format , recognition ) ; buildBasicReqMsg ( reqMap , voiceReqMsg ) ; msg = handleVoiceMsg ( voiceReqMsg ) ; if ( isNull ( msg ) ) { msg = processMessageHandle ( voiceReqMsg ) ; } } else if ( msgType . equals ( ReqType . VIDEO ) ) { String thumbMediaId = ( String ) reqMap . get ( "ThumbMediaId" ) ; String mediaId = ( String ) reqMap . get ( "MediaId" ) ; VideoReqMsg videoReqMsg = new VideoReqMsg ( mediaId , thumbMediaId ) ; buildBasicReqMsg ( reqMap , videoReqMsg ) ; msg = handleVideoMsg ( videoReqMsg ) ; if ( isNull ( msg ) ) { msg = processMessageHandle ( videoReqMsg ) ; } } else if ( msgType . equals ( ReqType . SHORT_VIDEO ) ) { String thumbMediaId = ( String ) reqMap . get ( "ThumbMediaId" ) ; String mediaId = ( String ) reqMap . get ( "MediaId" ) ; VideoReqMsg videoReqMsg = new VideoReqMsg ( mediaId , thumbMediaId ) ; buildBasicReqMsg ( reqMap , videoReqMsg ) ; msg = hadnleShortVideoMsg ( videoReqMsg ) ; if ( isNull ( msg ) ) { msg = processMessageHandle ( videoReqMsg ) ; } } else if ( msgType . equals ( ReqType . LOCATION ) ) { double locationX = Double . parseDouble ( ( String ) reqMap . get ( "Location_X" ) ) ; double locationY = Double . parseDouble ( ( String ) reqMap . get ( "Location_Y" ) ) ; int scale = Integer . parseInt ( ( String ) reqMap . get ( "Scale" ) ) ; String label = ( String ) reqMap . get ( "Label" ) ; LocationReqMsg locationReqMsg = new LocationReqMsg ( locationX , locationY , scale , label ) ; buildBasicReqMsg ( reqMap , locationReqMsg ) ; msg = handleLocationMsg ( locationReqMsg ) ; if ( isNull ( msg ) ) { msg = processMessageHandle ( locationReqMsg ) ; } } else if ( msgType . equals ( ReqType . LINK ) ) { String title = ( String ) reqMap . get ( "Title" ) ; String description = ( String ) reqMap . get ( "Description" ) ; String url = ( String ) reqMap . get ( "Url" ) ; LOG . debug ( "链接消息地址:{}" , url ) ; LinkReqMsg linkReqMsg = new LinkReqMsg ( title , description , url ) ; buildBasicReqMsg ( reqMap , linkReqMsg ) ; msg = handleLinkMsg ( linkReqMsg ) ; if ( isNull ( msg ) ) { msg = processMessageHandle ( linkReqMsg ) ; } } } String result = "" ; if ( nonNull ( msg ) ) { msg . setFromUserName ( toUserName ) ; msg . setToUserName ( fromUserName ) ; result = msg . toXml ( ) ; if ( StrUtil . isNotBlank ( getAESKey ( ) ) ) { try { WXBizMsgCrypt pc = new WXBizMsgCrypt ( getToken ( ) , getAESKey ( ) , getAppId ( ) ) ; result = pc . encryptMsg ( result , request . getParameter ( "timestamp" ) , request . getParameter ( "nonce" ) ) ; LOG . debug ( "加密后密文:{}" , result ) ; } catch ( AesException e ) { LOG . error ( "加密异常" , e ) ; } } } return result ;
public class KeyEncoder { /** * Encodes the given Float object into exactly 4 bytes for descending * order . A non - canonical NaN value is used to represent null . * @ param value optional Float value to encode * @ param dst destination for encoded bytes * @ param dstOffset offset into destination array */ public static void encodeDesc ( Float value , byte [ ] dst , int dstOffset ) { } }
if ( value == null ) { DataEncoder . encode ( ~ 0x7fffffff , dst , dstOffset ) ; } else { encodeDesc ( value . floatValue ( ) , dst , dstOffset ) ; }
public class ImplicitInjectionResolver { /** * < p > Determines if this { @ link Field } injection falls into * { @ link InjectionCategory # RESOURCE _ INTEGER } . < / p > * @ param context * the context from which the injection resolution is requested * @ param field * the { @ link Field } whose { @ link InjectionCategory } is to * be resolved * < br > < br > * @ return { @ code true } if it ' s { @ link InjectionCategory # RESOURCE _ INTEGER } , * < br > < br > * @ since 1.1.0 */ private boolean isCategoryResourceInteger ( Context context , Field field ) { } }
return ( ( Integer . class . isAssignableFrom ( field . getType ( ) ) || int . class . isAssignableFrom ( field . getType ( ) ) ) && ( ( ReflectiveR . integer ( context , field . getName ( ) ) ) != 0 ) ) ? true : false ;
public class PropertiesManagerCore { /** * Delete the property and values from a specified GeoPackage * @ param geoPackage * GeoPackage name * @ param property * property name * @ return true if deleted */ public boolean deleteProperty ( String geoPackage , String property ) { } }
boolean deleted = false ; PropertiesCoreExtension < T , ? , ? , ? > properties = propertiesMap . get ( geoPackage ) ; if ( properties != null ) { deleted = properties . deleteProperty ( property ) > 0 ; } return deleted ;
public class GroovyRowResult { /** * Associates the specified value with the specified property name in this result . * @ param key the property name for the result * @ param value the property value for the result * @ return the previous value associated with < tt > key < / tt > , or * < tt > null < / tt > if there was no mapping for < tt > key < / tt > . * ( A < tt > null < / tt > return can also indicate that the map * previously associated < tt > null < / tt > with < tt > key < / tt > . ) */ @ SuppressWarnings ( "unchecked" ) public Object put ( Object key , Object value ) { } }
// avoid different case keys being added by explicit remove Object orig = remove ( key ) ; result . put ( key , value ) ; return orig ;
public class Settings { /** * to check availability , then class name is truncated to bundle id */ private static String getBundle ( String friendlyName , String className , int truncate ) { } }
try { cl . loadClass ( className ) ; int start = className . length ( ) ; for ( int i = 0 ; i < truncate ; ++ i ) start = className . lastIndexOf ( '.' , start - 1 ) ; final String bundle = className . substring ( 0 , start ) ; return "+ " + friendlyName + align ( friendlyName ) + "- " + bundle ; } catch ( final ClassNotFoundException e ) { } catch ( final NoClassDefFoundError e ) { } return "- " + friendlyName + align ( friendlyName ) + "- not available" ;
public class Config { /** * Returns the DurableExecutorConfig for the given name , creating one * if necessary and adding it to the collection of known configurations . * The configuration is found by matching the configuration name * pattern to the provided { @ code name } without the partition qualifier * ( the part of the name after { @ code ' @ ' } ) . * If no configuration matches , it will create one by cloning the * { @ code " default " } configuration and add it to the configuration * collection . * This method is intended to easily and fluently create and add * configurations more specific than the default configuration without * explicitly adding it by invoking * { @ link # addDurableExecutorConfig ( DurableExecutorConfig ) } . * Because it adds new configurations if they are not already present , * this method is intended to be used before this config is used to * create a hazelcast instance . Afterwards , newly added configurations * may be ignored . * @ param name name of the durable executor config * @ return the durable executor configuration * @ throws ConfigurationException if ambiguous configurations are found * @ see StringPartitioningStrategy # getBaseName ( java . lang . String ) * @ see # setConfigPatternMatcher ( ConfigPatternMatcher ) * @ see # getConfigPatternMatcher ( ) */ public DurableExecutorConfig getDurableExecutorConfig ( String name ) { } }
return ConfigUtils . getConfig ( configPatternMatcher , durableExecutorConfigs , name , DurableExecutorConfig . class ) ;
public class JsonResponse { @ SuppressWarnings ( "unchecked" ) public static < OBJ > JsonResponse < OBJ > asJsonDirectly ( String json ) { } }
// user interface return ( JsonResponse < OBJ > ) new JsonResponse < Object > ( DUMMY ) . ofJsonDirectly ( json ) ;
public class MongoDBNativeQuery { /** * This method insert a single document into a collection . Params w and wtimeout are read from QueryOptions . * @ param dbObject * @ param options * @ return */ public WriteResult insert ( DBObject dbObject , QueryOptions options ) { } }
if ( options != null && ( options . containsKey ( "w" ) || options . containsKey ( "wtimeout" ) ) ) { // Some info about params : http : / / api . mongodb . org / java / current / com / mongodb / WriteConcern . html return dbCollection . insert ( dbObject , new WriteConcern ( options . getInt ( "w" , 1 ) , options . getInt ( "wtimeout" , 0 ) ) ) ; } else { return dbCollection . insert ( dbObject ) ; }
public class CouchDBSchemaManager { /** * Check for db existence . * @ return true , if successful * @ throws ClientProtocolException * the client protocol exception * @ throws IOException * Signals that an I / O exception has occurred . * @ throws URISyntaxException * the URI syntax exception */ private boolean checkForDBExistence ( ) throws ClientProtocolException , IOException , URISyntaxException { } }
URI uri = new URI ( CouchDBConstants . PROTOCOL , null , httpHost . getHostName ( ) , httpHost . getPort ( ) , CouchDBConstants . URL_SEPARATOR + databaseName . toLowerCase ( ) , null , null ) ; HttpGet get = new HttpGet ( uri ) ; HttpResponse getRes = null ; try { // creating database . getRes = httpClient . execute ( httpHost , get , CouchDBUtils . getContext ( httpHost ) ) ; if ( getRes . getStatusLine ( ) . getStatusCode ( ) == HttpStatus . SC_OK ) { return true ; } return false ; } finally { CouchDBUtils . closeContent ( getRes ) ; }
public class CassandraResourceTreeWalker { /** * Visits all nodes in the resource tree bellow the given resource using * depth - first search . */ public void depthFirstSearch ( Context context , SearchResultVisitor visitor , Resource root ) { } }
ArrayDeque < SearchResults . Result > stack = Queues . newArrayDeque ( ) ; // Build an instance of a SearchResult for the root resource // but don ' t invoke the visitor with it boolean skipFirstVisit = true ; SearchResults initialResults = new SearchResults ( ) ; initialResults . addResult ( root , new ArrayList < String > ( 0 ) ) ; stack . add ( initialResults . iterator ( ) . next ( ) ) ; while ( ! stack . isEmpty ( ) ) { SearchResults . Result r = stack . pop ( ) ; if ( skipFirstVisit ) { skipFirstVisit = false ; } else { if ( ! visitor . visit ( r ) ) { return ; } } // Reverse the order of the results so we walk the left - most // branches first ImmutableList < SearchResults . Result > results = ImmutableList . copyOf ( m_searcher . search ( context , matchKeyAndValue ( Constants . PARENT_TERM_FIELD , r . getResource ( ) . getId ( ) ) ) ) ; for ( SearchResults . Result result : results . reverse ( ) ) { stack . push ( result ) ; } }
public class DBCluster { /** * A list of log types that this DB cluster is configured to export to CloudWatch Logs . * Log types vary by DB engine . For information about the log types for each DB engine , see < a * href = " https : / / docs . aws . amazon . com / AmazonRDS / latest / AuroraUserGuide / USER _ LogAccess . html " > Amazon RDS Database Log * Files < / a > in the < i > Amazon Aurora User Guide . < / i > * @ param enabledCloudwatchLogsExports * A list of log types that this DB cluster is configured to export to CloudWatch Logs . < / p > * Log types vary by DB engine . For information about the log types for each DB engine , see < a * href = " https : / / docs . aws . amazon . com / AmazonRDS / latest / AuroraUserGuide / USER _ LogAccess . html " > Amazon RDS * Database Log Files < / a > in the < i > Amazon Aurora User Guide . < / i > */ public void setEnabledCloudwatchLogsExports ( java . util . Collection < String > enabledCloudwatchLogsExports ) { } }
if ( enabledCloudwatchLogsExports == null ) { this . enabledCloudwatchLogsExports = null ; return ; } this . enabledCloudwatchLogsExports = new com . amazonaws . internal . SdkInternalList < String > ( enabledCloudwatchLogsExports ) ;
public class ByteSource { /** * Counts the bytes in the given input stream using skip if possible . Returns SKIP _ FAILED if the * first call to skip threw , in which case skip may just not be supported . */ private long countBySkipping ( InputStream in ) throws IOException { } }
long count = 0 ; while ( true ) { // don ' t try to skip more than available ( ) // things may work really wrong with FileInputStream otherwise long skipped = in . skip ( Math . min ( in . available ( ) , Integer . MAX_VALUE ) ) ; if ( skipped <= 0 ) { if ( in . read ( ) == - 1 ) { return count ; } else if ( count == 0 && in . available ( ) == 0 ) { // if available is still zero after reading a single byte , it // will probably always be zero , so we should countByReading throw new IOException ( ) ; } count ++ ; } else { count += skipped ; } }
public class DefaultPromiseAdapter { /** * Creates a vert . x { @ link AsyncResult } handler for a { @ link Resolver } * @ param resolver * @ param < T > * @ return */ @ Override public < T > Handler < AsyncResult < T > > toHandler ( Resolver < T > resolver ) { } }
return result -> { if ( result . succeeded ( ) ) { resolver . resolve ( result . result ( ) ) ; } else { resolver . reject ( result . cause ( ) ) ; } } ;
public class CommercePriceEntryServiceBaseImpl { /** * Sets the commerce price list account rel remote service . * @ param commercePriceListAccountRelService the commerce price list account rel remote service */ public void setCommercePriceListAccountRelService ( com . liferay . commerce . price . list . service . CommercePriceListAccountRelService commercePriceListAccountRelService ) { } }
this . commercePriceListAccountRelService = commercePriceListAccountRelService ;