signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class CommerceDiscountPersistenceImpl { /** * Returns all the commerce discounts where groupId = & # 63 ; and couponCode = & # 63 ; .
* @ param groupId the group ID
* @ param couponCode the coupon code
* @ return the matching commerce discounts */
@ Override public List < CommerceDiscount > findByG_C ( long groupId , String couponCode ) { } } | return findByG_C ( groupId , couponCode , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ; |
public class INodeDirectory { /** * Add a child inode to the directory .
* @ param node INode to insert
* @ param inheritPermission inherit permission from parent ?
* @ param propagateModTime set parent ' s mod time to that of a child ?
* @ param childIndex index of the inserted child if known
* @ return null if the child with this name already exists ;
* node , otherwise */
< T extends INode > T addChild ( final T node , boolean inheritPermission , boolean propagateModTime , int childIndex ) { } } | if ( inheritPermission ) { FsPermission p = getFsPermission ( ) ; // make sure the permission has wx for the user
if ( ! p . getUserAction ( ) . implies ( FsAction . WRITE_EXECUTE ) ) { p = new FsPermission ( p . getUserAction ( ) . or ( FsAction . WRITE_EXECUTE ) , p . getGroupAction ( ) , p . getOtherAction ( ) ) ; } node . setPermission ( p ) ; } if ( children == null ) { children = new ArrayList < INode > ( DEFAULT_FILES_PER_DIRECTORY ) ; } int index ; if ( childIndex >= 0 ) { index = childIndex ; } else { int low = Collections . binarySearch ( children , node . name ) ; if ( low >= 0 ) return null ; index = - low - 1 ; } node . parent = this ; children . add ( index , node ) ; if ( propagateModTime ) { // update modification time of the parent directory
setModificationTime ( node . getModificationTime ( ) ) ; } if ( childIndex < 0 ) { // if child Index is provided ( > = 0 ) , this is a result of
// loading the image , and the group name is set , no need
// to check
if ( node . getGroupName ( ) == null ) { node . setGroup ( getGroupName ( ) ) ; } } return node ; |
public class GenRestBuilderProcessor { /** * Generate the rest service builder
* @ param restService The rest service */
private TypeSpec generateBuilder ( RestService restService ) { } } | TypeSpec . Builder typeBuilder = TypeSpec . classBuilder ( restService . getBuilderSimpleClassName ( ) ) . addModifiers ( Modifier . PUBLIC , Modifier . FINAL ) . addJavadoc ( "Generated REST service builder for {@link $L}.\n" , restService . getTypeElement ( ) . getQualifiedName ( ) ) . addMethod ( MethodSpec . constructorBuilder ( ) . addModifiers ( Modifier . PRIVATE ) . build ( ) ) ; Map < TypeMirror , MethodSpec > mapperGetters = buildMappers ( typeBuilder , restService ) ; for ( RestServiceMethod method : restService . getMethods ( ) ) { buildMethod ( typeBuilder , mapperGetters , method ) ; } return typeBuilder . build ( ) ; |
public class OptionsMapper { /** * Map environment variable value to option .
* @ param name environment variable name
* @ param option option
* @ param converter value converter ( may be null to use default converters )
* @ param < V > helper value type
* @ param < T > helper option type
* @ return mapper instance for chained calls */
public < V , T extends Enum & Option > OptionsMapper env ( final String name , final T option , final Function < String , V > converter ) { } } | register ( "env: " + name , option , System . getenv ( name ) , converter ) ; return this ; |
public class OperationsInner { /** * Lists all the available Cognitive Services account operations .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; OperationEntityInner & gt ; object */
public Observable < Page < OperationEntityInner > > listNextAsync ( final String nextPageLink ) { } } | return listNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < OperationEntityInner > > , Page < OperationEntityInner > > ( ) { @ Override public Page < OperationEntityInner > call ( ServiceResponse < Page < OperationEntityInner > > response ) { return response . body ( ) ; } } ) ; |
public class DriverManagerAccessorSupport { /** * Definines the class using the given ClassLoader and ProtectionDomain */
public static Class < ? > define ( ClassLoader loader ) { } } | try { SecurityManager sm = System . getSecurityManager ( ) ; if ( sm != null ) { String packageName = DriverManagerAccessor . class . getPackage ( ) . getName ( ) ; RuntimePermission permission = new RuntimePermission ( "defineClassInPackage." + packageName ) ; sm . checkPermission ( permission ) ; } byte [ ] b = loadBytes ( ) ; Object [ ] args = new Object [ ] { DriverManagerAccessor . class . getName ( ) . replace ( '/' , '.' ) , b , new Integer ( 0 ) , new Integer ( b . length ) } ; Class < ? > clazz = ( Class < ? > ) defineClass . invoke ( loader , args ) ; return clazz ; } catch ( RuntimeException e ) { // we have to attempt to define the class first
// otherwise it may be returned by a different class loader
try { return loader . loadClass ( DriverManagerAccessor . class . getName ( ) ) ; } catch ( ClassNotFoundException ex ) { // ignore
} throw e ; } catch ( Exception e ) { try { return loader . loadClass ( DriverManagerAccessor . class . getName ( ) ) ; } catch ( ClassNotFoundException ex ) { // ignore
} throw new RuntimeException ( e ) ; } |
public class ProfileService { /** * { @ inheritDoc } */
@ Override public boolean isActive ( Object context , Profile profile ) { } } | if ( context == null || profile == null ) { StringBuilder builder = new StringBuilder ( ) . append ( "Failed to determine profile active profiles. " ) ; if ( context == null ) builder . append ( "Context cannot be null. " ) ; if ( profile == null ) builder . append ( "Profile cannot be null. " ) ; throw new IllegalArgumentException ( builder . toString ( ) ) ; } if ( context . getClass ( ) . isAnnotationPresent ( IncludeProfiles . class ) ) { IncludeProfiles profiles = context . getClass ( ) . getAnnotation ( IncludeProfiles . class ) ; Profile [ ] activeProfiles = profiles . value ( ) ; for ( Profile currentProfile : activeProfiles ) if ( currentProfile . equals ( profile ) ) return true ; return false ; } if ( context . getClass ( ) . isAnnotationPresent ( ExcludeProfiles . class ) ) { ExcludeProfiles profiles = context . getClass ( ) . getAnnotation ( ExcludeProfiles . class ) ; Profile [ ] inactiveProfiles = profiles . value ( ) ; for ( Profile currentProfile : inactiveProfiles ) if ( currentProfile . equals ( profile ) ) return false ; return true ; } else { return true ; } |
public class Es6RewriteModules { /** * Add alias nodes to the symbol table as they going to be removed by rewriter . Example aliases :
* < pre >
* import * as foo from ' . / foo ' ;
* import { doBar } from ' . / bar ' ;
* console . log ( doBar ) ;
* < / pre >
* @ param n Alias node . In the example above alias nodes are foo , doBar , and doBar .
* @ param module Name of the module currently being processed . */
private void maybeAddAliasToSymbolTable ( Node n , String module ) { } } | if ( preprocessorSymbolTable == null ) { return ; } n . putBooleanProp ( Node . MODULE_ALIAS , true ) ; // Alias can be used in js types . Types have node type STRING and not NAME so we have to
// use their name as string .
String nodeName = n . isString ( ) || n . isImportStar ( ) ? n . getString ( ) : preprocessorSymbolTable . getQualifiedName ( n ) ; // We need to include module as part of the name because aliases are local to current module .
// Aliases with the same name from different module should be completely different entities .
String name = "alias_" + module + "_" + nodeName ; preprocessorSymbolTable . addReference ( n , name ) ; |
public class SendGrid { /** * Attempt an API call . This method executes the API call asynchronously
* on an internal thread pool . If the call is rate limited , the thread
* will retry up to the maximum configured time . The supplied callback
* will be called in the event of an error , or a successful response .
* @ param request the API request .
* @ param callback the callback . */
public void attempt ( final Request request , final APICallback callback ) { } } | this . pool . execute ( new Runnable ( ) { @ Override public void run ( ) { Response response ; // Retry until the retry limit has been reached .
for ( int i = 0 ; i < rateLimitRetry ; ++ i ) { try { response = api ( request ) ; } catch ( IOException ex ) { // Stop retrying if there is a network error .
callback . error ( ex ) ; return ; } // We have been rate limited .
if ( response . getStatusCode ( ) == RATE_LIMIT_RESPONSE_CODE ) { try { Thread . sleep ( rateLimitSleep ) ; } catch ( InterruptedException ex ) { // Can safely ignore this exception and retry .
} } else { callback . response ( response ) ; return ; } } // Retries exhausted . Return error .
callback . error ( new RateLimitException ( request , rateLimitRetry ) ) ; } } ) ; |
public class Element { /** * Create a new element by tag name , and add it as the last child .
* @ param tagName the name of the tag ( e . g . { @ code div } ) .
* @ return the new element , to allow you to add content to it , e . g . :
* { @ code parent . appendElement ( " h1 " ) . attr ( " id " , " header " ) . text ( " Welcome " ) ; } */
public Element appendElement ( String tagName ) { } } | Element child = new Element ( Tag . valueOf ( tagName , NodeUtils . parser ( this ) . settings ( ) ) , baseUri ( ) ) ; appendChild ( child ) ; return child ; |
public class ScanRecord { /** * Returns the service data byte array associated with the { @ code serviceUuid } . Returns
* { @ code null } if the { @ code serviceDataUuid } is not found . */
@ Nullable public byte [ ] getServiceData ( @ NonNull final ParcelUuid serviceDataUuid ) { } } | // noinspection ConstantConditions
if ( serviceDataUuid == null || serviceData == null ) { return null ; } return serviceData . get ( serviceDataUuid ) ; |
public class AmazonPinpointClient { /** * Returns information about a segment version .
* @ param getSegmentVersionRequest
* @ return Result of the GetSegmentVersion operation returned by the service .
* @ throws BadRequestException
* 400 response
* @ throws InternalServerErrorException
* 500 response
* @ throws ForbiddenException
* 403 response
* @ throws NotFoundException
* 404 response
* @ throws MethodNotAllowedException
* 405 response
* @ throws TooManyRequestsException
* 429 response
* @ sample AmazonPinpoint . GetSegmentVersion
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / pinpoint - 2016-12-01 / GetSegmentVersion " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public GetSegmentVersionResult getSegmentVersion ( GetSegmentVersionRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeGetSegmentVersion ( request ) ; |
public class WorkbooksInner { /** * Get all Workbooks defined within a specified resource group and category .
* @ param resourceGroupName The name of the resource group .
* @ param category Category of workbook to return . Possible values include : ' workbook ' , ' TSG ' , ' performance ' , ' retention '
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws WorkbookErrorException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the List & lt ; WorkbookInner & gt ; object if successful . */
public List < WorkbookInner > listByResourceGroup ( String resourceGroupName , CategoryType category ) { } } | return listByResourceGroupWithServiceResponseAsync ( resourceGroupName , category ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class ICUHumanize { /** * Same as { @ link # parseNumber ( String ) parseNumber } for the specified
* locale .
* @ param text
* String containing a spelled out number .
* @ param locale
* Target locale
* @ return Text converted to Number
* @ throws ParseException */
public static Number parseNumber ( final String text , final Locale locale ) throws ParseException { } } | return withinLocale ( new Callable < Number > ( ) { public Number call ( ) throws Exception { return parseNumber ( text ) ; } } , locale ) ; |
public class CommerceSubscriptionEntryPersistenceImpl { /** * Returns the first commerce subscription entry in the ordered set where subscriptionStatus = & # 63 ; .
* @ param subscriptionStatus the subscription status
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching commerce subscription entry , or < code > null < / code > if a matching commerce subscription entry could not be found */
@ Override public CommerceSubscriptionEntry fetchBySubscriptionStatus_First ( int subscriptionStatus , OrderByComparator < CommerceSubscriptionEntry > orderByComparator ) { } } | List < CommerceSubscriptionEntry > list = findBySubscriptionStatus ( subscriptionStatus , 0 , 1 , orderByComparator ) ; if ( ! list . isEmpty ( ) ) { return list . get ( 0 ) ; } return null ; |
public class CmsXmlContainerPageFactory { /** * Create a new instance of an container page based on the given default content ,
* that will have all language nodes of the default content and ensures the presence of the given locale . < p >
* The given encoding is used when marshalling the XML again later . < p >
* @ param cms the current users OpenCms content
* @ param locale the locale to generate the default content for
* @ param modelUri the absolute path to the container page file acting as model
* @ throws CmsException in case the model file is not found or not valid
* @ return the created container page */
public static CmsXmlContainerPage createDocument ( CmsObject cms , Locale locale , String modelUri ) throws CmsException { } } | // create the XML content
CmsXmlContainerPage content = new CmsXmlContainerPage ( cms , locale , modelUri ) ; // call prepare for use content handler and return the result
return ( CmsXmlContainerPage ) content . getHandler ( ) . prepareForUse ( cms , content ) ; |
public class MaybeLens { /** * Given a lens and a default < code > B < / code > , lift < code > B < / code > into { @ link Maybe } .
* Note that this lens is NOT lawful , since " putting back what you got changes nothing " fails for any value
* < code > B < / code > where < code > S < / code > is { @ link Maybe # nothing ( ) } .
* @ param lens the lens
* @ param defaultB the B to use if { @ link Maybe # nothing ( ) } is given
* @ param < S > the type of the " larger " value for reading
* @ param < T > the type of the " larger " value for putting
* @ param < A > the type of the " smaller " value that is read
* @ param < B > the type of the " smaller " update value
* @ return the lens with B lifted */
public static < S , T , A , B > Lens < S , T , A , Maybe < B > > liftB ( Lens < S , T , A , B > lens , B defaultB ) { } } | return lens . mapB ( m -> m . orElse ( defaultB ) ) ; |
public class NaiveBayesClassifier { /** * Split the data between trainable and wrong . */
public static void splitData ( final String originalTrainingDataFile ) { } } | List < Tuple > trainingData = NaiveBayesClassifier . readTrainingData ( originalTrainingDataFile , "\\s" ) ; List < Tuple > wrongData = new ArrayList < > ( ) ; int lastTrainingDataSize ; int iterCount = 0 ; do { System . out . println ( "Iteration:\t" + ( ++ iterCount ) ) ; lastTrainingDataSize = trainingData . size ( ) ; NaiveBayesClassifier nbc = new NaiveBayesClassifier ( ) ; nbc . train ( trainingData ) ; Iterator < Tuple > trainingDataIter = trainingData . iterator ( ) ; while ( trainingDataIter . hasNext ( ) ) { Tuple t = trainingDataIter . next ( ) ; String actual = nbc . predictLabel ( t ) ; if ( ! t . label . equals ( actual ) && ! t . label . equals ( "1" ) ) { // preserve 1 since too few .
wrongData . add ( t ) ; trainingDataIter . remove ( ) ; } } Iterator < Tuple > wrongDataIter = wrongData . iterator ( ) ; while ( wrongDataIter . hasNext ( ) ) { Tuple t = wrongDataIter . next ( ) ; String actual = nbc . predictLabel ( t ) ; if ( t . label . equals ( actual ) ) { trainingData . add ( t ) ; wrongDataIter . remove ( ) ; } } } while ( trainingData . size ( ) != lastTrainingDataSize ) ; writeToFile ( trainingData , originalTrainingDataFile + ".aligned" ) ; writeToFile ( wrongData , originalTrainingDataFile + ".wrong" ) ; |
public class AbstractInteraction { /** * Trigger reverse .
* @ param type
* the type */
protected boolean triggerReverse ( Type type , Throwable e ) { } } | return trigger ( Lists . reverse ( getAllListeners ( ) ) , type , e ) ; |
public class ExtensionDescriptor { /** * ExtDescrTagEndRange = 0xFE */
static int [ ] allTags ( ) { } } | int [ ] ints = new int [ 0xFE - 0x6A ] ; for ( int i = 0x6A ; i < 0xFE ; i ++ ) { final int pos = i - 0x6A ; LOG . trace ( "pos: {}" , pos ) ; ints [ pos ] = i ; } return ints ; |
public class forwardingsession { /** * Use this API to update forwardingsession . */
public static base_response update ( nitro_service client , forwardingsession resource ) throws Exception { } } | forwardingsession updateresource = new forwardingsession ( ) ; updateresource . name = resource . name ; updateresource . connfailover = resource . connfailover ; return updateresource . update_resource ( client ) ; |
public class UdpClient { /** * The host to which this client should connect .
* @ param host The host to connect to .
* @ return a new { @ link UdpClient } */
public final UdpClient host ( String host ) { } } | Objects . requireNonNull ( host , "host" ) ; return bootstrap ( b -> b . remoteAddress ( host , getPort ( b ) ) ) ; |
public class SmtpMailer { /** * Check to see if email should be rate limited , and if so , send a rate limit
* email notification . Next attempt to email will immediately return .
* @ param destination collection of enum values used to specify who will receive this email .
* @ param request SingularityRequest this email is about .
* @ param emailType what the email is about ( e . g . TASK _ FAILED ) .
* @ param actionTaker the user taking the action
* @ param subject the subject line of the email .
* @ param body the body of the email . */
private void queueMail ( final Collection < SingularityEmailDestination > destination , final SingularityRequest request , final SingularityEmailType emailType , final Optional < String > actionTaker , String subject , String body ) { } } | RateLimitResult result = checkRateLimitForMail ( request , emailType ) ; if ( result == RateLimitResult . DONT_SEND_MAIL_IN_COOLDOWN ) { return ; } if ( result == RateLimitResult . SEND_COOLDOWN_STARTED_MAIL ) { subject = String . format ( "%s notifications for %s are being rate limited" , emailType . name ( ) , request . getId ( ) ) ; body = Jade4J . render ( rateLimitedTemplate , getRateLimitTemplateProperties ( request , emailType ) ) ; } final Set < String > toList = new HashSet < > ( ) ; final Set < String > ccList = new HashSet < > ( ) ; // Decide where to send this email .
if ( destination . contains ( SingularityEmailDestination . OWNERS ) && request . getOwners ( ) . isPresent ( ) && ! request . getOwners ( ) . get ( ) . isEmpty ( ) ) { toList . addAll ( request . getOwners ( ) . get ( ) ) ; } if ( destination . contains ( SingularityEmailDestination . ADMINS ) && ! smtpConfiguration . getAdmins ( ) . isEmpty ( ) ) { if ( toList . isEmpty ( ) ) { toList . addAll ( smtpConfiguration . getAdmins ( ) ) ; } else { ccList . addAll ( smtpConfiguration . getAdmins ( ) ) ; } } if ( actionTaker . isPresent ( ) && ! Strings . isNullOrEmpty ( actionTaker . get ( ) ) ) { if ( destination . contains ( SingularityEmailDestination . ACTION_TAKER ) ) { toList . add ( actionTaker . get ( ) ) ; } else { final Iterator < String > i = toList . iterator ( ) ; while ( i . hasNext ( ) ) { if ( actionTaker . get ( ) . equalsIgnoreCase ( i . next ( ) ) ) { i . remove ( ) ; } } } } Set < String > emailBlacklist = Sets . newHashSet ( notificationsManager . getBlacklist ( ) ) ; toList . removeAll ( emailBlacklist ) ; ccList . removeAll ( emailBlacklist ) ; smtpSender . queueMail ( Lists . newArrayList ( toList ) , Lists . newArrayList ( ccList ) , subject , body ) ; |
public class Bits { /** * Pad left bytes byte [ ] .
* @ param src the src
* @ param bytes the bytes
* @ return the byte [ ] */
public static byte [ ] padLeftBytes ( final byte [ ] src , final int bytes ) { } } | final byte [ ] dst = new byte [ bytes ] ; for ( int i = 1 ; i <= src . length ; i ++ ) { dst [ dst . length - i ] = src [ src . length - i ] ; } return dst ; |
public class CommerceWishListLocalServiceBaseImpl { /** * Returns a range of all the commerce wish lists .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link com . liferay . commerce . wish . list . model . impl . CommerceWishListModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param start the lower bound of the range of commerce wish lists
* @ param end the upper bound of the range of commerce wish lists ( not inclusive )
* @ return the range of commerce wish lists */
@ Override public List < CommerceWishList > getCommerceWishLists ( int start , int end ) { } } | return commerceWishListPersistence . findAll ( start , end ) ; |
public class XAnnotationImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setAnnotationType ( JvmType newAnnotationType ) { } } | JvmType oldAnnotationType = annotationType ; annotationType = newAnnotationType ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , XAnnotationsPackage . XANNOTATION__ANNOTATION_TYPE , oldAnnotationType , annotationType ) ) ; |
public class Default { public void init ( ) throws UnavailableException { } } | ServletContext config = getServletContext ( ) ; _servletHandler = ( ( ServletHandler . Context ) config ) . getServletHandler ( ) ; _httpContext = _servletHandler . getHttpContext ( ) ; _acceptRanges = getInitBoolean ( "acceptRanges" ) ; _dirAllowed = getInitBoolean ( "dirAllowed" ) ; _putAllowed = getInitBoolean ( "putAllowed" ) ; _delAllowed = getInitBoolean ( "delAllowed" ) ; _redirectWelcomeFiles = getInitBoolean ( "redirectWelcome" ) ; _minGzipLength = getInitInt ( "minGzipLength" ) ; String rrb = getInitParameter ( "relativeResourceBase" ) ; if ( rrb != null ) { try { _resourceBase = _httpContext . getBaseResource ( ) . addPath ( rrb ) ; } catch ( Exception e ) { log . warn ( LogSupport . EXCEPTION , e ) ; throw new UnavailableException ( e . toString ( ) ) ; } } String rb = getInitParameter ( "resourceBase" ) ; if ( rrb != null && rb != null ) throw new UnavailableException ( "resourceBase & relativeResourceBase" ) ; if ( rb != null ) { try { _resourceBase = Resource . newResource ( rb ) ; } catch ( Exception e ) { log . warn ( LogSupport . EXCEPTION , e ) ; throw new UnavailableException ( e . toString ( ) ) ; } } if ( log . isDebugEnabled ( ) ) log . debug ( "resource base = " + _resourceBase ) ; if ( _putAllowed ) _AllowString += ", PUT" ; if ( _delAllowed ) _AllowString += ", DELETE" ; if ( _putAllowed && _delAllowed ) _AllowString += ", MOVE" ; |
public class XmlDocumentReader { /** * Convert a string to an InputSource object
* @ param str string
* @ return InputSource of input */
private static InputSource stringToSource ( String str ) { } } | InputSource is = new InputSource ( new StringReader ( str ) ) ; is . setEncoding ( "UTF-8" ) ; return is ; |
public class Operators { /** * Main operator lookup routine ; lookup an operator ( either unary or binary ) in its corresponding
* map . If there ' s a matching operator , its resolve routine is called and the result is returned ;
* otherwise the result of a fallback function is returned . */
private < O > OperatorSymbol resolve ( Tag tag , Map < Name , List < O > > opMap , Predicate < O > opTestFunc , Function < O , OperatorSymbol > resolveFunc , Supplier < OperatorSymbol > noResultFunc ) { } } | return opMap . get ( operatorName ( tag ) ) . stream ( ) . filter ( opTestFunc ) . map ( resolveFunc ) . findFirst ( ) . orElseGet ( noResultFunc ) ; |
public class RichClientFramework { /** * Creates a copy of originalEndPoint .
* This is to prevent us causing problems when we change it . As there is no exposed way to do this we will use serialization .
* It may be a bit slow , but it is implementation safe as the implementation of CFEndPoint is designed to be serialized by WLM .
* Plus we only need to do this when creating the original connection so it is not main line code .
* @ param originalEndPoint the CFEndPoint to be cloned
* @ return a cloned CFEndPoint or the original one if the clone failed . */
private CFEndPoint cloneEndpoint ( final CFEndPoint originalEndPoint ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "cloneEndpoint" , originalEndPoint ) ; CFEndPoint endPoint ; ByteArrayOutputStream baos = null ; ObjectOutputStream out = null ; ObjectInputStream in = null ; try { baos = new ByteArrayOutputStream ( ) ; out = new ObjectOutputStream ( baos ) ; out . writeObject ( originalEndPoint ) ; out . flush ( ) ; ClassLoader cl = AccessController . doPrivileged ( new PrivilegedAction < ClassLoader > ( ) { @ Override public ClassLoader run ( ) { return Thread . currentThread ( ) . getContextClassLoader ( ) ; } } ) ; in = new DeserializationObjectInputStream ( new ByteArrayInputStream ( baos . toByteArray ( ) ) , cl ) ; endPoint = ( CFEndPoint ) in . readObject ( ) ; } catch ( IOException e ) { FFDCFilter . processException ( e , CLASS_NAME + ".cloneEndpoint" , JFapChannelConstants . RICHCLIENTFRAMEWORK_CLONE_01 , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Caught IOException copying endpoint" , e ) ; // Use input parameter .
endPoint = originalEndPoint ; } catch ( ClassNotFoundException e ) { FFDCFilter . processException ( e , CLASS_NAME + ".cloneEndpoint" , JFapChannelConstants . RICHCLIENTFRAMEWORK_CLONE_02 , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Caught ClassNotFoundException copying endpoint" , e ) ; // Use input parameter .
endPoint = originalEndPoint ; } finally { // Tidy up resources .
try { if ( out != null ) { out . close ( ) ; } if ( in != null ) { in . close ( ) ; } } catch ( IOException e ) { // No FFDC code needed .
// Absorb any exceptions as we no longer care .
} } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "cloneEndpoint" , endPoint ) ; return endPoint ; |
public class WavImpl { /** * Play a sound .
* @ param media The audio media .
* @ param alignment The alignment type .
* @ param volume The audio volume value .
* @ return The created and opened playback ready to be played .
* @ throws IOException If playback error . */
private static Playback createPlayback ( Media media , Align alignment , int volume ) throws IOException { } } | final AudioInputStream input = openStream ( media ) ; final SourceDataLine dataLine = getDataLine ( input ) ; dataLine . start ( ) ; updateAlignment ( dataLine , alignment ) ; updateVolume ( dataLine , volume ) ; return new Playback ( input , dataLine ) ; |
public class LazySocket { /** * Option 2. */
public void setSoTimeout ( int timeout ) throws SocketException { } } | if ( mSocket != null ) { mSocket . setSoTimeout ( timeout ) ; } else { setOption ( 2 , new Integer ( timeout ) ) ; } |
public class JavaScriptUtils { /** * Creates and returns a JavaScript line that sets a cookie with the specified name and value . For example , a cookie name
* of " test " and value of " 123 " would return { @ code document . cookie = " test = 123 ; " ; } . Note : The name and value will be
* HTML - encoded . */
public String getJavaScriptHtmlCookieString ( String name , String value ) { } } | return getJavaScriptHtmlCookieString ( name , value , null ) ; |
public class HostDatastoreSystem { /** * SDK5.0 signature */
public VmfsDatastoreOption [ ] queryVmfsDatastoreCreateOptions ( String devicePath , int vmfsMajorVersion ) throws HostConfigFault , NotFound , RuntimeFault , RemoteException { } } | return getVimService ( ) . queryVmfsDatastoreCreateOptions ( getMOR ( ) , devicePath , vmfsMajorVersion ) ; |
public class SarlPackageImpl { /** * Creates , registers , and initializes the < b > Package < / b > for this model , and for any others upon which it depends .
* < p > This method is used to initialize { @ link SarlPackage # eINSTANCE } when that field is accessed .
* Clients should not invoke it directly . Instead , they should simply access that field to obtain the package .
* < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ see # eNS _ URI
* @ see # createPackageContents ( )
* @ see # initializePackageContents ( )
* @ generated */
public static SarlPackage init ( ) { } } | if ( isInited ) return ( SarlPackage ) EPackage . Registry . INSTANCE . getEPackage ( SarlPackage . eNS_URI ) ; // Obtain or create and register package
Object registeredSarlPackage = EPackage . Registry . INSTANCE . get ( eNS_URI ) ; SarlPackageImpl theSarlPackage = registeredSarlPackage instanceof SarlPackageImpl ? ( SarlPackageImpl ) registeredSarlPackage : new SarlPackageImpl ( ) ; isInited = true ; // Initialize simple dependencies
XtendPackage . eINSTANCE . eClass ( ) ; TypesPackage . eINSTANCE . eClass ( ) ; XbasePackage . eINSTANCE . eClass ( ) ; XtypePackage . eINSTANCE . eClass ( ) ; XAnnotationsPackage . eINSTANCE . eClass ( ) ; // Create package meta - data objects
theSarlPackage . createPackageContents ( ) ; // Initialize created meta - data
theSarlPackage . initializePackageContents ( ) ; // Mark meta - data to indicate it can ' t be changed
theSarlPackage . freeze ( ) ; // Update the registry and return the package
EPackage . Registry . INSTANCE . put ( SarlPackage . eNS_URI , theSarlPackage ) ; return theSarlPackage ; |
public class DayOfTheWeek { /** * Returns the information this day of the week logically follows directly the given one . Example TUE . follows ( MON ) would be true , but
* MON . follows ( TUE ) or WED . follows ( MON ) is not . Public holidays does not follow any other day .
* @ param other
* Day to compare with .
* @ return { @ literal true } if this day of the week is the one right after the given one . */
public boolean follows ( @ NotNull final DayOfTheWeek other ) { } } | Contract . requireArgNotNull ( "other" , other ) ; if ( this == PH || other == PH ) { return false ; } return this . id == ( other . id + 1 ) ; |
public class SipServletRequestImpl { /** * ( non - Javadoc )
* @ see javax . servlet . sip . SipServletRequest # setRequestURI ( javax . servlet . sip . URI ) */
public void setRequestURI ( URI uri ) { } } | checkReadOnly ( ) ; Request request = ( Request ) message ; URIImpl uriImpl = ( URIImpl ) uri ; javax . sip . address . URI wrappedUri = uriImpl . getURI ( ) ; request . setRequestURI ( wrappedUri ) ; // TODO look through all contacts of the user and change them depending of if STUN is enabled
// and the request is aimed to the local network or outside |
public class Bitflyer4jFactory { /** * Create a { @ link Configuration } instance ,
* composed of multiple configurations which are enumerated in { @ link ConfigurationType } .
* @ return Composite configuration instance . */
@ VisibleForTesting AbstractConfiguration createConfiguration ( Properties properties ) { } } | CompositeConfiguration composite = new CompositeConfiguration ( ) ; ConfigurationType [ ] types = ConfigurationType . values ( ) ; Arrays . stream ( types ) . forEach ( s -> s . get ( ) . ifPresent ( composite :: addConfiguration ) ) ; Optional . ofNullable ( properties ) . ifPresent ( p -> composite . addConfiguration ( new MapConfiguration ( p ) ) ) ; return composite ; |
public class VPTree { /** * Create an ndarray
* from the datapoints
* @ param data
* @ return */
public static INDArray buildFromData ( List < DataPoint > data ) { } } | INDArray ret = Nd4j . create ( data . size ( ) , data . get ( 0 ) . getD ( ) ) ; for ( int i = 0 ; i < ret . slices ( ) ; i ++ ) ret . putSlice ( i , data . get ( i ) . getPoint ( ) ) ; return ret ; |
public class MarkerRulerAction { /** * Retrieves the document from the editor .
* @ return the document from the editor */
protected IDocument getDocument ( ) { } } | Assert . isNotNull ( editor ) ; IDocumentProvider provider = editor . getDocumentProvider ( ) ; return provider . getDocument ( editor . getEditorInput ( ) ) ; |
public class BooleanUtil { /** * 对Boolean数组取异或
* < pre >
* BooleanUtil . xor ( new Boolean [ ] { Boolean . TRUE , Boolean . TRUE } ) = Boolean . FALSE
* BooleanUtil . xor ( new Boolean [ ] { Boolean . FALSE , Boolean . FALSE } ) = Boolean . FALSE
* BooleanUtil . xor ( new Boolean [ ] { Boolean . TRUE , Boolean . FALSE } ) = Boolean . TRUE
* < / pre >
* @ param array { @ code Boolean } 数组
* @ return 异或为真取 { @ code true } */
public static Boolean xor ( Boolean ... array ) { } } | if ( ArrayUtil . isEmpty ( array ) ) { throw new IllegalArgumentException ( "The Array must not be empty !" ) ; } final boolean [ ] primitive = Convert . convert ( boolean [ ] . class , array ) ; return Boolean . valueOf ( xor ( primitive ) ) ; |
public class CommerceWishListPersistenceImpl { /** * Caches the commerce wish list in the entity cache if it is enabled .
* @ param commerceWishList the commerce wish list */
@ Override public void cacheResult ( CommerceWishList commerceWishList ) { } } | entityCache . putResult ( CommerceWishListModelImpl . ENTITY_CACHE_ENABLED , CommerceWishListImpl . class , commerceWishList . getPrimaryKey ( ) , commerceWishList ) ; finderCache . putResult ( FINDER_PATH_FETCH_BY_UUID_G , new Object [ ] { commerceWishList . getUuid ( ) , commerceWishList . getGroupId ( ) } , commerceWishList ) ; commerceWishList . resetOriginalValues ( ) ; |
public class Flowable { /** * Returns a Flowable that emits a specified item before it begins to emit items emitted by the source
* Publisher .
* < img width = " 640 " height = " 315 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / startWith . png " alt = " " >
* < dl >
* < dt > < b > Backpressure : < / b > < / dt >
* < dd > The operator honors backpressure from downstream . The source { @ code Publisher }
* is expected to honor backpressure as well . If it violates this rule , it < em > may < / em > throw an
* { @ code IllegalStateException } when the source { @ code Publisher } completes . < / dd >
* < dt > < b > Scheduler : < / b > < / dt >
* < dd > { @ code startWith } does not operate by default on a particular { @ link Scheduler } . < / dd >
* < / dl >
* @ param value
* the item to emit first
* @ return a Flowable that emits the specified item before it begins to emit items emitted by the source
* Publisher
* @ see < a href = " http : / / reactivex . io / documentation / operators / startwith . html " > ReactiveX operators documentation : StartWith < / a > */
@ SuppressWarnings ( "unchecked" ) @ CheckReturnValue @ BackpressureSupport ( BackpressureKind . FULL ) @ SchedulerSupport ( SchedulerSupport . NONE ) public final Flowable < T > startWith ( T value ) { } } | ObjectHelper . requireNonNull ( value , "item is null" ) ; return concatArray ( just ( value ) , this ) ; |
public class ConfigurationContext { /** * Disable predicate could be registered after some items registration and to make sure that predicate
* affects all these items - apply to all currenlty registered items .
* @ param predicates new predicates */
private void applyPredicatesForRegisteredItems ( final List < PredicateHandler > predicates ) { } } | ImmutableList . builder ( ) . addAll ( getEnabledModules ( ) ) . addAll ( getEnabledBundles ( ) ) . addAll ( getEnabledExtensions ( ) ) . addAll ( getEnabledInstallers ( ) ) . build ( ) . stream ( ) . < ItemInfo > map ( this :: getInfo ) . forEach ( item -> applyDisablePredicates ( predicates , item ) ) ; |
public class JBBPParser { /** * Parse am input stream with defined external value provider .
* @ param in an input stream which content will be parsed , it must not be null
* @ param varFieldProcessor a var field processor , it may be null if there is
* not any var field in a script , otherwise NPE will be thrown during parsing
* @ param externalValueProvider an external value provider , it can be null but
* only if the script doesn ' t have fields desired the provider
* @ return the parsed content as the root structure
* @ throws IOException it will be thrown for transport errors */
public JBBPFieldStruct parse ( final InputStream in , final JBBPVarFieldProcessor varFieldProcessor , final JBBPExternalValueProvider externalValueProvider ) throws IOException { } } | final JBBPBitInputStream bitInStream = in instanceof JBBPBitInputStream ? ( JBBPBitInputStream ) in : new JBBPBitInputStream ( in , bitOrder ) ; this . finalStreamByteCounter = bitInStream . getCounter ( ) ; final JBBPNamedNumericFieldMap fieldMap ; if ( this . compiledBlock . hasEvaluatedSizeArrays ( ) || this . compiledBlock . hasVarFields ( ) ) { fieldMap = new JBBPNamedNumericFieldMap ( externalValueProvider ) ; } else { fieldMap = null ; } if ( this . compiledBlock . hasVarFields ( ) ) { JBBPUtils . assertNotNull ( varFieldProcessor , "The Script contains VAR fields, a var field processor must be provided" ) ; } try { return new JBBPFieldStruct ( new JBBPNamedFieldInfo ( "" , "" , - 1 ) , parseStruct ( bitInStream , new JBBPIntCounter ( ) , varFieldProcessor , fieldMap , new JBBPIntCounter ( ) , new JBBPIntCounter ( ) , false ) ) ; } finally { this . finalStreamByteCounter = bitInStream . getCounter ( ) ; } |
public class AnyTransliterator { /** * Temporary hack for registry problem . Needs to be replaced by better architecture . */
public Transliterator safeClone ( ) { } } | UnicodeFilter filter = getFilter ( ) ; if ( filter != null && filter instanceof UnicodeSet ) { filter = new UnicodeSet ( ( UnicodeSet ) filter ) ; } return new AnyTransliterator ( getID ( ) , filter , target , targetScript , widthFix , cache ) ; |
public class AbstractJSSEProvider { /** * Get the key manager factory instance using the provided information .
* @ see com . ibm . websphere . ssl . JSSEProvider # getKeyManagerFactoryInstance ( )
* @ param keyMgr
* @ param ctxtProvider
* @ return KeyManagerFactory
* @ throws NoSuchAlgorithmException
* @ throws NoSuchProviderException */
public KeyManagerFactory getKeyManagerFactoryInstance ( String keyMgr , String ctxtProvider ) throws NoSuchAlgorithmException , NoSuchProviderException { } } | String mgr = keyMgr ; String provider = ctxtProvider ; if ( mgr . indexOf ( '|' ) != - 1 ) { String [ ] keyManagerArray = mgr . split ( "\\|" ) ; if ( keyManagerArray != null && keyManagerArray . length == 2 ) { mgr = keyManagerArray [ 0 ] ; provider = keyManagerArray [ 1 ] ; } } KeyManagerFactory rc = KeyManagerFactory . getInstance ( mgr , provider ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "getKeyManagerFactory.getInstance(" + mgr + ", " + provider + ") " + rc ) ; return rc ; |
public class ListThingRegistrationTasksRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ListThingRegistrationTasksRequest listThingRegistrationTasksRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( listThingRegistrationTasksRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listThingRegistrationTasksRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listThingRegistrationTasksRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( listThingRegistrationTasksRequest . getStatus ( ) , STATUS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class PoolOperations { /** * Adds a pool to the Batch account .
* @ param poolId
* The ID of the pool .
* @ param virtualMachineSize
* The size of virtual machines in the pool . See < a href =
* " https : / / azure . microsoft . com / documentation / articles / virtual - machines - size - specs / " > https : / / azure . microsoft . com / documentation / articles / virtual - machines - size - specs / < / a >
* for sizes .
* @ param cloudServiceConfiguration
* The { @ link CloudServiceConfiguration } for the pool .
* @ param targetDedicatedNodes
* The desired number of dedicated compute nodes in the pool .
* @ throws BatchErrorException
* Exception thrown when an error response is received from the
* Batch service .
* @ throws IOException
* Exception thrown when there is an error in
* serialization / deserialization of data sent to / received from the
* Batch service . */
public void createPool ( String poolId , String virtualMachineSize , CloudServiceConfiguration cloudServiceConfiguration , int targetDedicatedNodes ) throws BatchErrorException , IOException { } } | createPool ( poolId , virtualMachineSize , cloudServiceConfiguration , targetDedicatedNodes , 0 , null ) ; |
public class BasicAtomGenerator { /** * Generate an atom symbol element .
* @ param atom the atom to use
* @ param alignment the alignment of the atom ' s label
* @ param model the renderer model
* @ return an atom symbol element */
public AtomSymbolElement generateElement ( IAtom atom , int alignment , RendererModel model ) { } } | String text ; if ( atom instanceof IPseudoAtom ) { text = ( ( IPseudoAtom ) atom ) . getLabel ( ) ; } else { text = atom . getSymbol ( ) ; } return new AtomSymbolElement ( atom . getPoint2d ( ) . x , atom . getPoint2d ( ) . y , text , atom . getFormalCharge ( ) , atom . getImplicitHydrogenCount ( ) , alignment , getAtomColor ( atom , model ) ) ; |
public class LogViewSerialization { /** * Parses a { @ link LogView } under a given path and using the given
* { @ link Log } .
* @ param path Path to the log view .
* @ param log Log for the view .
* @ return The parsed log view .
* @ throws IOException If the log view can ' t be read under the given
* path . */
public static LogView parse ( String path , Log log ) throws IOException { } } | return parse ( new File ( path ) , log ) ; |
public class RDSInstanceDetailsMarshaller { /** * Marshall the given parameter object . */
public void marshall ( RDSInstanceDetails rDSInstanceDetails , ProtocolMarshaller protocolMarshaller ) { } } | if ( rDSInstanceDetails == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( rDSInstanceDetails . getFamily ( ) , FAMILY_BINDING ) ; protocolMarshaller . marshall ( rDSInstanceDetails . getInstanceType ( ) , INSTANCETYPE_BINDING ) ; protocolMarshaller . marshall ( rDSInstanceDetails . getRegion ( ) , REGION_BINDING ) ; protocolMarshaller . marshall ( rDSInstanceDetails . getDatabaseEngine ( ) , DATABASEENGINE_BINDING ) ; protocolMarshaller . marshall ( rDSInstanceDetails . getDatabaseEdition ( ) , DATABASEEDITION_BINDING ) ; protocolMarshaller . marshall ( rDSInstanceDetails . getDeploymentOption ( ) , DEPLOYMENTOPTION_BINDING ) ; protocolMarshaller . marshall ( rDSInstanceDetails . getLicenseModel ( ) , LICENSEMODEL_BINDING ) ; protocolMarshaller . marshall ( rDSInstanceDetails . getCurrentGeneration ( ) , CURRENTGENERATION_BINDING ) ; protocolMarshaller . marshall ( rDSInstanceDetails . getSizeFlexEligible ( ) , SIZEFLEXELIGIBLE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class AmazonDynamoDBAsyncClient { /** * Deletes a table and all of its items .
* If the table is in the < code > ACTIVE < / code > state , you can delete it .
* If a table is in < code > CREATING < / code > or < code > UPDATING < / code > states
* then Amazon DynamoDB returns a < code > ResourceInUseException < / code > .
* If the specified table does not exist , Amazon DynamoDB returns a
* < code > ResourceNotFoundException < / code > .
* @ param deleteTableRequest Container for the necessary parameters to
* execute the DeleteTable operation on AmazonDynamoDB .
* @ param asyncHandler Asynchronous callback handler for events in the
* life - cycle of the request . Users could provide the implementation of
* the four callback methods in this interface to process the operation
* result or handle the exception .
* @ return A Java Future object containing the response from the
* DeleteTable service method , as returned by AmazonDynamoDB .
* @ throws AmazonClientException
* If any internal errors are encountered inside the client while
* attempting to make the request or handle the response . For example
* if a network connection is not available .
* @ throws AmazonServiceException
* If an error response is returned by AmazonDynamoDB indicating
* either a problem with the data in the request , or a server side issue . */
public Future < DeleteTableResult > deleteTableAsync ( final DeleteTableRequest deleteTableRequest , final AsyncHandler < DeleteTableRequest , DeleteTableResult > asyncHandler ) throws AmazonServiceException , AmazonClientException { } } | return executorService . submit ( new Callable < DeleteTableResult > ( ) { public DeleteTableResult call ( ) throws Exception { DeleteTableResult result ; try { result = deleteTable ( deleteTableRequest ) ; } catch ( Exception ex ) { asyncHandler . onError ( ex ) ; throw ex ; } asyncHandler . onSuccess ( deleteTableRequest , result ) ; return result ; } } ) ; |
public class ShanksAgentMovementCapability { /** * Move the agent to the target location with the specific speed . Call this
* method always you want to move . This method only moves the agent a
* fragment equals to the velocity .
* @ param simulation
* @ param agent
* @ param currentLocation
* @ param targetLocation
* @ param speed */
private static void goTo ( ShanksSimulation simulation , MobileShanksAgent agent , Double2D currentLocation , Double2D targetLocation , double speed ) { } } | if ( ! targetLocation . equals ( currentLocation ) && agent . isAllowedToMove ( ) ) { Double2D direction = targetLocation . subtract ( currentLocation ) ; direction = direction . normalize ( ) ; Double2D movement = direction . multiply ( speed ) ; ShanksAgentMovementCapability . updateLocation ( simulation , agent , currentLocation . add ( movement ) ) ; } |
public class SplitControllerImpl { /** * Note we restart all flows . There is no concept of " the flow completed " . It is only steps
* within the flows that may have already completed and so may not have needed to be rerun . */
private void buildSubJobBatchWorkUnits ( ) { } } | List < Flow > flows = this . split . getFlows ( ) ; parallelBatchWorkUnits = new ArrayList < BatchFlowInSplitWorkUnit > ( ) ; // Build all sub jobs from flows in split
synchronized ( subJobs ) { for ( Flow flow : flows ) { subJobs . add ( PartitionedStepBuilder . buildFlowInSplitSubJob ( jobContext , this . split , flow ) ) ; } // Go back to earlier idea that we may have seen this id before , and need a special " always restart " behavior
// for split - flows .
for ( JSLJob job : subJobs ) { int count = batchKernel . getJobInstanceCount ( job . getId ( ) ) ; FlowInSplitBuilderConfig config = new FlowInSplitBuilderConfig ( job , completedWorkQueue , rootJobExecutionId ) ; if ( count == 0 ) { parallelBatchWorkUnits . add ( batchKernel . buildNewFlowInSplitWorkUnit ( config ) ) ; } else if ( count == 1 ) { parallelBatchWorkUnits . add ( batchKernel . buildOnRestartFlowInSplitWorkUnit ( config ) ) ; } else { throw new IllegalStateException ( "There is an inconsistency somewhere in the internal subjob creation" ) ; } } } |
public class YamlMappingNode { /** * Adds the specified { @ code key } / { @ code value } pair to this mapping .
* @ param key the key
* @ param value the value
* @ return { @ code this } */
public T put ( YamlNode key , Float value ) { } } | return put ( key , getNodeFactory ( ) . floatNode ( value ) ) ; |
public class RelatedWaveListener { /** * { @ inheritDoc } */
@ Override public void waveHandled ( final Wave wave ) { } } | if ( wave . relatedWave ( ) != null ) { // Return wave has been handled , so the triggered wave can be handled too
LOGGER . trace ( RELATED_WAVE_HANDLES , wave . fromClass ( ) . getSimpleName ( ) , wave . relatedWave ( ) . toString ( ) ) ; wave . relatedWave ( ) . status ( Status . Handled ) ; } |
public class ModelMessageHandler { /** * Constructor .
* @ param messageReceiver The message receiver that this listener is added to .
* @ param model The target table model . */
public void init ( BaseMessageReceiver messageReceiver , AbstractTableModel model ) { } } | super . init ( messageReceiver , null ) ; m_model = model ; |
public class StructureInterfaceList { /** * Sorts the interface list and reassigns ids based on new sorting */
public void sort ( ) { } } | Collections . sort ( list ) ; int i = 1 ; for ( StructureInterface interf : list ) { interf . setId ( i ) ; i ++ ; } |
public class Beta { /** * Beta function , also called the Euler integral of the first kind .
* The beta function is symmetric , i . e . B ( x , y ) = = B ( y , x ) . */
public static double beta ( double x , double y ) { } } | return Math . exp ( Gamma . lgamma ( x ) + Gamma . lgamma ( y ) - Gamma . lgamma ( x + y ) ) ; |
public class JvmAnnotationReferenceBuilder { /** * Visits a primitive value of the annotation .
* @ param name
* the value name .
* @ param value
* the actual value , whose type must be { @ link Byte } , { @ link Boolean } , { @ link Character } , { @ link Short } ,
* { @ link Integer } , { @ link Long } , { @ link Float } , { @ link Double } , { @ link String } or { @ link Type } . This
* value can also be an array of byte , boolean , short , char , int , long , float or double values ( this is
* equivalent to using { @ link # visitArray visitArray } and visiting each array element in turn , but is
* more convenient ) . */
@ Override public void visit ( final String name , final Object value ) { } } | JvmAnnotationValue annotationValue = proxies . createAnnotationValue ( value ) ; annotationValue . setOperation ( proxies . createMethodProxy ( annotationType , name ) ) ; values . addUnique ( annotationValue ) ; |
public class Inflection { /** * Return true if the word is uncountable .
* @ param word The word
* @ return True if it is uncountable */
public static boolean isUncountable ( String word ) { } } | for ( String w : UNCOUNTABLE ) { if ( w . equalsIgnoreCase ( word ) ) { return true ; } } return false ; |
public class MapExtensions { /** * Replies the elements of the given map except the pair with the given key .
* The replied map is a view on the given map . It means that any change
* in the original map is reflected to the result of this operation .
* @ param < K > type of the map keys .
* @ param < V > type of the map values .
* @ param map the map to update .
* @ param key the key to remove .
* @ return the map with the content of the map except the key .
* @ since 2.15 */
@ Pure public static < K , V > Map < K , V > operator_minus ( Map < K , V > map , final K key ) { } } | return Maps . filterKeys ( map , new Predicate < K > ( ) { @ Override public boolean apply ( K input ) { return ! Objects . equal ( input , key ) ; } } ) ; |
public class ListComplianceSummariesRequest { /** * One or more compliance or inventory filters . Use a filter to return a more specific list of results .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setFilters ( java . util . Collection ) } or { @ link # withFilters ( java . util . Collection ) } if you want to override
* the existing values .
* @ param filters
* One or more compliance or inventory filters . Use a filter to return a more specific list of results .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ListComplianceSummariesRequest withFilters ( ComplianceStringFilter ... filters ) { } } | if ( this . filters == null ) { setFilters ( new com . amazonaws . internal . SdkInternalList < ComplianceStringFilter > ( filters . length ) ) ; } for ( ComplianceStringFilter ele : filters ) { this . filters . add ( ele ) ; } return this ; |
public class Cpe { /** * Compare version numbers to obtain the correct ordering .
* @ param left the left hand version for comparison
* @ param right the right hand version for comparison
* @ return < code > - 1 < / code > if left is before the right ; < code > 0 < / code > if
* the left and right are equal ; < code > 1 < / code > if left is after the right */
protected static int compareVersions ( String left , String right ) { } } | int result = 0 ; // while the strings are well formed - the backslashes will be in the exact
// same location in equal strings - for version numbers the cost of conversion
// should not be incurred
// final List < String > subLeft = splitVersion ( Convert . fromWellFormed ( left ) ) ;
// final List < String > subRight = splitVersion ( Convert . fromWellFormed ( right ) ) ;
final List < String > subLeft = splitVersion ( left ) ; final List < String > subRight = splitVersion ( right ) ; final int subMax = ( subLeft . size ( ) <= subRight . size ( ) ) ? subLeft . size ( ) : subRight . size ( ) ; for ( int x = 0 ; result == 0 && x < subMax ; x ++ ) { if ( isPositiveInteger ( subLeft . get ( x ) ) && isPositiveInteger ( subRight . get ( x ) ) ) { try { result = Long . valueOf ( subLeft . get ( x ) ) . compareTo ( Long . valueOf ( subRight . get ( x ) ) ) ; } catch ( NumberFormatException ex ) { // infeasible path - unless one of the values is larger then a long ?
if ( ! subLeft . get ( x ) . equalsIgnoreCase ( subRight . get ( x ) ) ) { result = subLeft . get ( x ) . compareTo ( subRight . get ( x ) ) ; } } } else { result = subLeft . get ( x ) . compareTo ( subRight . get ( x ) ) ; } if ( result != 0 ) { return result ; } } if ( subLeft . size ( ) > subRight . size ( ) ) { result = 1 ; } if ( subRight . size ( ) > subLeft . size ( ) ) { result = - 1 ; } return result ; |
public class Commands { /** * Creates a new task that will append the result of the second task to the result - list
* of the first .
* TODO Fabian : test behavior and explain here . */
public static < A , B > BaseListCommand < A , B > flatAnd ( Command < A , ? extends Iterable < B > > first , Command < A , B > second ) { } } | return new FlatAndCommand < > ( first , second ) ; |
public class StandardUserAgentClient { /** * Creates and configures a new StandardUserAgentClient instance .
* @ param settings the configuration settings
* @ return a new configured StandardUserAgentClient
* @ see Factory # newUserAgentClient ( Settings ) */
public static StandardUserAgentClient newInstance ( Settings settings ) { } } | UDPConnector . Factory udpFactory = Factories . newInstance ( settings , UDP_CONNECTOR_FACTORY_KEY ) ; TCPConnector . Factory tcpFactory = Factories . newInstance ( settings , TCP_CONNECTOR_FACTORY_KEY ) ; return new StandardUserAgentClient ( udpFactory . newUDPConnector ( settings ) , tcpFactory . newTCPConnector ( settings ) , settings ) ; |
public class ClientController { /** * Deletes a specific client id for a profile
* @ param model
* @ param profileIdentifier
* @ param clientUUID
* @ return returns the table of the remaining clients or an exception if deletion failed for some reason
* @ throws Exception */
@ RequestMapping ( value = "/api/profile/{profileIdentifier}/clients/{clientUUID}" , method = RequestMethod . DELETE ) public @ ResponseBody HashMap < String , Object > deleteClient ( Model model , @ PathVariable ( "profileIdentifier" ) String profileIdentifier , @ PathVariable ( "clientUUID" ) String clientUUID ) throws Exception { } } | logger . info ( "Attempting to remove the following client: {}" , clientUUID ) ; if ( clientUUID . compareTo ( Constants . PROFILE_CLIENT_DEFAULT_ID ) == 0 ) throw new Exception ( "Default client cannot be deleted" ) ; Integer profileId = ControllerUtils . convertProfileIdentifier ( profileIdentifier ) ; clientService . remove ( profileId , clientUUID ) ; HashMap < String , Object > valueHash = new HashMap < String , Object > ( ) ; valueHash . put ( "clients" , clientService . findAllClients ( profileId ) ) ; return valueHash ; |
public class PathHelper { /** * Converts path to posix path . Converted path starts with ' / ' if path is
* absolute and ends with ' / ' if path is directory .
* @ param path
* @ return
* @ see java . nio . file . Path # isAbsolute ( )
* @ see java . nio . file . Files # */
public static final String posixString ( Path path ) { } } | String prefix = path . isAbsolute ( ) ? "/" : "" ; String suffix = Files . isDirectory ( path ) ? "/" : "" ; return StreamSupport . stream ( path . spliterator ( ) , false ) . map ( ( p ) -> p . toString ( ) ) . collect ( Collectors . joining ( "/" , prefix , suffix ) ) ; |
public class DecimalFormat { /** * Append an affix to the given StringBuffer .
* @ param buf
* buffer to append to
* @ param isNegative
* @ param isPrefix
* @ param fieldPosition
* @ param parseAttr */
private int appendAffix ( StringBuffer buf , boolean isNegative , boolean isPrefix , FieldPosition fieldPosition , boolean parseAttr ) { } } | if ( currencyChoice != null ) { String affixPat = null ; if ( isPrefix ) { affixPat = isNegative ? negPrefixPattern : posPrefixPattern ; } else { affixPat = isNegative ? negSuffixPattern : posSuffixPattern ; } StringBuffer affixBuf = new StringBuffer ( ) ; expandAffix ( affixPat , null , affixBuf ) ; buf . append ( affixBuf ) ; return affixBuf . length ( ) ; } String affix = null ; String pattern ; if ( isPrefix ) { affix = isNegative ? negativePrefix : positivePrefix ; pattern = isNegative ? negPrefixPattern : posPrefixPattern ; } else { affix = isNegative ? negativeSuffix : positiveSuffix ; pattern = isNegative ? negSuffixPattern : posSuffixPattern ; } // [ Spark / CDL ] Invoke formatAffix2Attribute to add attributes for affix
if ( parseAttr ) { // Updates for Ticket 11805.
int offset = affix . indexOf ( symbols . getCurrencySymbol ( ) ) ; if ( offset > - 1 ) { formatAffix2Attribute ( isPrefix , Field . CURRENCY , buf , offset , symbols . getCurrencySymbol ( ) . length ( ) ) ; } offset = affix . indexOf ( symbols . getMinusSignString ( ) ) ; if ( offset > - 1 ) { formatAffix2Attribute ( isPrefix , Field . SIGN , buf , offset , symbols . getMinusSignString ( ) . length ( ) ) ; } offset = affix . indexOf ( symbols . getPercentString ( ) ) ; if ( offset > - 1 ) { formatAffix2Attribute ( isPrefix , Field . PERCENT , buf , offset , symbols . getPercentString ( ) . length ( ) ) ; } offset = affix . indexOf ( symbols . getPerMillString ( ) ) ; if ( offset > - 1 ) { formatAffix2Attribute ( isPrefix , Field . PERMILLE , buf , offset , symbols . getPerMillString ( ) . length ( ) ) ; } offset = pattern . indexOf ( "¤¤¤" ) ; if ( offset > - 1 ) { formatAffix2Attribute ( isPrefix , Field . CURRENCY , buf , offset , affix . length ( ) - offset ) ; } } // Look for SIGN , PERCENT , PERMILLE in the formatted affix .
if ( fieldPosition . getFieldAttribute ( ) == NumberFormat . Field . SIGN ) { String sign = isNegative ? symbols . getMinusSignString ( ) : symbols . getPlusSignString ( ) ; int firstPos = affix . indexOf ( sign ) ; if ( firstPos > - 1 ) { int startPos = buf . length ( ) + firstPos ; fieldPosition . setBeginIndex ( startPos ) ; fieldPosition . setEndIndex ( startPos + sign . length ( ) ) ; } } else if ( fieldPosition . getFieldAttribute ( ) == NumberFormat . Field . PERCENT ) { int firstPos = affix . indexOf ( symbols . getPercentString ( ) ) ; if ( firstPos > - 1 ) { int startPos = buf . length ( ) + firstPos ; fieldPosition . setBeginIndex ( startPos ) ; fieldPosition . setEndIndex ( startPos + symbols . getPercentString ( ) . length ( ) ) ; } } else if ( fieldPosition . getFieldAttribute ( ) == NumberFormat . Field . PERMILLE ) { int firstPos = affix . indexOf ( symbols . getPerMillString ( ) ) ; if ( firstPos > - 1 ) { int startPos = buf . length ( ) + firstPos ; fieldPosition . setBeginIndex ( startPos ) ; fieldPosition . setEndIndex ( startPos + symbols . getPerMillString ( ) . length ( ) ) ; } } else // If CurrencySymbol or InternationalCurrencySymbol is in the affix , check for currency symbol .
// Get spelled out name if " ¤ ¤ ¤ " is in the pattern .
if ( fieldPosition . getFieldAttribute ( ) == NumberFormat . Field . CURRENCY ) { if ( affix . indexOf ( symbols . getCurrencySymbol ( ) ) > - 1 ) { String aff = symbols . getCurrencySymbol ( ) ; int firstPos = affix . indexOf ( aff ) ; int start = buf . length ( ) + firstPos ; int end = start + aff . length ( ) ; fieldPosition . setBeginIndex ( start ) ; fieldPosition . setEndIndex ( end ) ; } else if ( affix . indexOf ( symbols . getInternationalCurrencySymbol ( ) ) > - 1 ) { String aff = symbols . getInternationalCurrencySymbol ( ) ; int firstPos = affix . indexOf ( aff ) ; int start = buf . length ( ) + firstPos ; int end = start + aff . length ( ) ; fieldPosition . setBeginIndex ( start ) ; fieldPosition . setEndIndex ( end ) ; } else if ( pattern . indexOf ( "¤¤¤" ) > - 1 ) { // It ' s a plural , and we know where it is in the pattern .
int firstPos = pattern . indexOf ( "¤¤¤" ) ; int start = buf . length ( ) + firstPos ; int end = buf . length ( ) + affix . length ( ) ; // This seems clunky and wrong .
fieldPosition . setBeginIndex ( start ) ; fieldPosition . setEndIndex ( end ) ; } } buf . append ( affix ) ; return affix . length ( ) ; |
public class InstanceInfoMarshaller { /** * Marshall the given parameter object . */
public void marshall ( InstanceInfo instanceInfo , ProtocolMarshaller protocolMarshaller ) { } } | if ( instanceInfo == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( instanceInfo . getInstanceName ( ) , INSTANCENAME_BINDING ) ; protocolMarshaller . marshall ( instanceInfo . getIamSessionArn ( ) , IAMSESSIONARN_BINDING ) ; protocolMarshaller . marshall ( instanceInfo . getIamUserArn ( ) , IAMUSERARN_BINDING ) ; protocolMarshaller . marshall ( instanceInfo . getInstanceArn ( ) , INSTANCEARN_BINDING ) ; protocolMarshaller . marshall ( instanceInfo . getRegisterTime ( ) , REGISTERTIME_BINDING ) ; protocolMarshaller . marshall ( instanceInfo . getDeregisterTime ( ) , DEREGISTERTIME_BINDING ) ; protocolMarshaller . marshall ( instanceInfo . getTags ( ) , TAGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class StringArrayList { /** * Adds a whole List of elements .
* @ param vec List of elements to add */
public void addAll ( StringArrayList vec ) { } } | int i ; int max ; max = vec . size ( ) ; for ( i = 0 ; i < max ; i ++ ) { add ( vec . get ( i ) ) ; } |
public class AsyncRestBuilder { /** * Sets the body for the request to be an url - encoded form . This is equivalent to setting
* the { @ link # contentType ( String ) " Content - Type " } to < code > " application / x - www - form - urlencoded " < / code >
* and then setting the body via { @ link # bodyRaw ( String ) } .
* @ param form the { @ link Form } builder object used to set form parameters . */
public AsyncRestBuilder bodyForm ( Form form ) { } } | contentType ( HttpHeaders . Values . APPLICATION_X_WWW_FORM_URLENCODED ) ; return bodyRaw ( form . toUrlEncodedString ( ) ) ; |
public class VoltXMLElementHelper { /** * Build an element to be inserted under the " parameters " tree . */
public static VoltXMLElement buildParamElement ( String elementId , String index , String valueType ) { } } | VoltXMLElement retval = new VoltXMLElement ( "parameter" ) ; retval . attributes . put ( "id" , elementId ) ; retval . attributes . put ( "index" , index ) ; retval . attributes . put ( "valuetype" , valueType ) ; return retval ; |
public class MessagingUtils { /** * Builds a string identifying a messaging client .
* @ param ownerKind { @ link RecipientKind # DM } or { @ link RecipientKind # AGENTS }
* @ param domain the domain
* @ param applicationName the application name ( only makes sense for agents )
* @ param scopedInstancePath the scoped instance path ( only makes sense for agents )
* @ return a non - null string */
public static String buildId ( RecipientKind ownerKind , String domain , String applicationName , String scopedInstancePath ) { } } | StringBuilder sb = new StringBuilder ( ) ; sb . append ( "[ " ) ; sb . append ( domain ) ; sb . append ( " ] " ) ; if ( ownerKind == RecipientKind . DM ) { sb . append ( "DM" ) ; } else { sb . append ( scopedInstancePath ) ; sb . append ( " @ " ) ; sb . append ( applicationName ) ; } return sb . toString ( ) ; |
public class ArrayFile { /** * Apply entries to the array file .
* The method will flatten entry data and sort it by position .
* So the array file can be updated sequentially to reduce disk seeking time .
* This method updates hwmScn and lwmScn in the array file .
* @ param entryList
* @ throws IOException */
public synchronized < T extends EntryValue > void update ( List < Entry < T > > entryList ) throws IOException { } } | Chronos c = new Chronos ( ) ; // Sort values by position in the array file
T [ ] values = EntryUtility . sortEntriesToValues ( entryList ) ; if ( values == null || values . length == 0 ) return ; // Obtain maxScn
long maxScn = _arrayHwmScn ; for ( Entry < ? > e : entryList ) { maxScn = Math . max ( e . getMaxScn ( ) , maxScn ) ; } // Write hwmScn
_log . info ( "write hwmScn:" + maxScn ) ; _writer . writeLong ( HWM_SCN_POSITION , maxScn ) ; _writer . flush ( ) ; // Write values
for ( T v : values ) { v . updateArrayFile ( _writer , getPosition ( v . pos ) ) ; } _writer . flush ( ) ; // Write lwmScn
_log . info ( "write lwmScn:" + maxScn ) ; _writer . writeLong ( LWM_SCN_POSITION , maxScn ) ; _writer . flush ( ) ; _arrayLwmScn = maxScn ; _arrayHwmScn = maxScn ; _log . info ( entryList . size ( ) + " entries flushed to " + _file . getAbsolutePath ( ) + " in " + c . getElapsedTime ( ) ) ; |
public class ClosureGlobalPostProcessor { /** * ( non - Javadoc )
* @ see net . jawr . web . resource . bundle . global . processor . GlobalProcessor #
* processBundles ( net . jawr . web . resource . bundle . global . processor .
* AbstractGlobalProcessingContext , java . util . List ) */
@ Override public void processBundles ( GlobalPostProcessingContext ctx , List < JoinableResourceBundle > bundles ) { } } | if ( ctx . hasBundleToBeProcessed ( ) ) { String workingDir = ctx . getRsReaderHandler ( ) . getWorkingDirectory ( ) ; if ( srcDir == null || destDir == null || tempDir == null || srcZipDir == null || destZipDir == null ) { srcDir = ctx . getBundleHandler ( ) . getBundleTextDirPath ( ) ; srcZipDir = ctx . getBundleHandler ( ) . getBundleZipDirPath ( ) ; destDir = workingDir + GOOGLE_CLOSURE_RESULT_TEXT_DIR ; destZipDir = workingDir + GOOGLE_CLOSURE_RESULT_ZIP_DIR ; tempDir = workingDir + GOOGLE_CLOSURE_TEMP_DIR ; } // Create result directory
File dir = new File ( destDir ) ; if ( ! dir . exists ( ) && ! dir . mkdirs ( ) ) { throw new BundlingProcessException ( "Impossible to create temporary directory :" + destDir ) ; } // Create result directory
dir = new File ( destZipDir ) ; if ( ! dir . exists ( ) && ! dir . mkdirs ( ) ) { throw new BundlingProcessException ( "Impossible to create temporary directory :" + destZipDir ) ; } // Create temporary directory
dir = new File ( tempDir ) ; if ( ! dir . exists ( ) && ! dir . mkdirs ( ) ) { throw new BundlingProcessException ( "Impossible to create temporary directory :" + tempDir ) ; } // Copy the bundle files in a temp directory
try { FileUtils . copyDirectory ( new File ( srcDir ) , new File ( tempDir ) ) ; Map < String , String > resultBundleMapping = new HashMap < > ( ) ; JawrClosureCommandLineRunner cmdRunner = new JawrClosureCommandLineRunner ( ctx , bundles , resultBundleMapping ) ; cmdRunner . doRun ( ) ; // Copy compiled bundles
FileUtils . copyDirectory ( new File ( destDir ) , new File ( srcDir ) ) ; // Copy zipped compiled bundles
FileUtils . copyDirectory ( new File ( destZipDir ) , new File ( srcZipDir ) ) ; } catch ( Exception e ) { throw new BundlingProcessException ( e ) ; } } |
public class WSRdbManagedConnectionImpl { /** * Removes and closes all statements in the statement cache for this ManagedConnection . */
public final void clearStatementCache ( ) { } } | final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; // The closing of cached statements is now separated from the removing of statements
// from the cache to avoid synchronization during the closing of statements .
if ( statementCache == null ) { if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "statement cache is null. caching is disabled" ) ; return ; } if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( this , tc , "clearStatementCache" ) ; Object [ ] stmts = statementCache . removeAll ( ) ; for ( int i = stmts . length ; i > 0 ; ) try { ( ( Statement ) stmts [ -- i ] ) . close ( ) ; } catch ( SQLException closeX ) { FFDCFilter . processException ( closeX , getClass ( ) . getName ( ) + ".clearStatementCache" , "2169" , this ) ; if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "Error closing statement" , closeX ) ; } if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( this , tc , "clearStatementCache" ) ; |
public class AndroidJsonHandler { /** * Converts a Reader to a String by copying the Reader ' s contents into a StringWriter via a
* buffer .
* @ param reader The Reader from which to extract a String .
* @ return The String contained in the Reader .
* @ throws IOException If there is an error reading from the input Reader . */
private static String readerToString ( Reader reader ) throws IOException { } } | StringWriter writer = new StringWriter ( ) ; try { char [ ] buffer = new char [ COPY_BUFFER_SIZE ] ; while ( true ) { int bytesRead = reader . read ( buffer ) ; if ( bytesRead == - 1 ) { break ; } else { writer . write ( buffer , 0 , bytesRead ) ; } } return writer . toString ( ) ; } finally { reader . close ( ) ; } |
public class DRL6Expressions { /** * $ ANTLR start synpred47 _ DRL6Expressions */
public final void synpred47_DRL6Expressions_fragment ( ) throws RecognitionException { } } | // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 740:7 : ( GREATER GREATER GREATER )
// src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 740:8 : GREATER GREATER GREATER
{ match ( input , GREATER , FOLLOW_GREATER_in_synpred47_DRL6Expressions4674 ) ; if ( state . failed ) return ; match ( input , GREATER , FOLLOW_GREATER_in_synpred47_DRL6Expressions4676 ) ; if ( state . failed ) return ; match ( input , GREATER , FOLLOW_GREATER_in_synpred47_DRL6Expressions4678 ) ; if ( state . failed ) return ; } |
public class ElasticHashinator { /** * Add the given tokens to the ring and generate the new hashinator . The current hashinator is not changed .
* @ param tokensToAdd Tokens to add as a map of tokens to partitions
* @ return The new hashinator */
public ElasticHashinator addTokens ( NavigableMap < Integer , Integer > tokensToAdd ) { } } | // figure out the interval
long interval = deriveTokenInterval ( m_tokensMap . get ( ) . keySet ( ) ) ; Map < Integer , Integer > tokens = Maps . newTreeMap ( ) ; for ( Map . Entry < Integer , Integer > e : m_tokensMap . get ( ) . entrySet ( ) ) { if ( tokensToAdd . containsKey ( e . getKey ( ) ) ) { continue ; } // see if we are moving an intermediate token forward
if ( isIntermediateToken ( e . getKey ( ) , interval ) ) { Map . Entry < Integer , Integer > floorEntry = tokensToAdd . floorEntry ( e . getKey ( ) ) ; // If the two tokens belong to the same partition and bucket , we are moving the one on the ring
// forward , so remove it from the ring
if ( floorEntry != null && floorEntry . getValue ( ) . equals ( e . getValue ( ) ) && containingBucket ( floorEntry . getKey ( ) , interval ) == containingBucket ( e . getKey ( ) , interval ) ) { continue ; } } tokens . put ( e . getKey ( ) , e . getValue ( ) ) ; } tokens . putAll ( tokensToAdd ) ; return new ElasticHashinator ( ImmutableSortedMap . copyOf ( tokens ) ) ; |
public class JSONConverter { /** * Debug ( dump ) the contents of an input stream . This effectively copies
* the input stream into a byte array , dumps one and then returns another .
* If logging is not enabled , nothing happens .
* @ param in
* @ throws IOException */
private InputStream debugInputStream ( InputStream in ) throws IOException { } } | if ( logger . isLoggable ( Level . FINEST ) ) { final byte [ ] buffer = new byte [ 1024 ] ; final ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; int len ; String line ; logger . finest ( "[START] Dumping the InputStream that will be passed to JSON.parse()" ) ; while ( ( len = in . read ( buffer ) ) > - 1 ) { baos . write ( buffer , 0 , len ) ; } baos . flush ( ) ; ByteArrayInputStream dumpIn = new ByteArrayInputStream ( baos . toByteArray ( ) ) ; BufferedReader br = new BufferedReader ( new InputStreamReader ( dumpIn ) ) ; while ( ( line = br . readLine ( ) ) != null ) { logger . finest ( line ) ; } logger . finest ( "[END] Dumping the InputStream that will be passed to JSON.parse()" ) ; return new ByteArrayInputStream ( baos . toByteArray ( ) ) ; } else { return in ; } |
public class AbstractWFieldIndicator { /** * Return the diagnostics for this indicator .
* @ return A list of diagnostics ( may be empty ) for this indicator . */
public List < Diagnostic > getDiagnostics ( ) { } } | FieldIndicatorModel model = getComponentModel ( ) ; return Collections . unmodifiableList ( model . diagnostics ) ; |
public class AccessControlList { /** * Gets the list of { @ link Grant } objects in this access control list ( ACL ) .
* @ return The list of { @ link Grant } objects in this ACL . */
public List < Grant > getGrantsAsList ( ) { } } | checkState ( ) ; if ( grantList == null ) { if ( grantSet == null ) { grantList = new LinkedList < Grant > ( ) ; } else { grantList = new LinkedList < Grant > ( grantSet ) ; grantSet = null ; } } return grantList ; |
public class DateTimeUtil { /** * Get Date from " yyyyMMddThhmmssZ " with timezone
* @ param val String " yyyyMMddThhmmssZ "
* @ param tz TimeZone
* @ return Date
* @ throws BadDateException on format error */
@ SuppressWarnings ( "unused" ) public static Date fromISODateTimeUTC ( final String val , final TimeZone tz ) throws BadDateException { } } | try { synchronized ( isoDateTimeUTCTZFormat ) { isoDateTimeUTCTZFormat . setTimeZone ( tz ) ; return isoDateTimeUTCTZFormat . parse ( val ) ; } } catch ( Throwable t ) { throw new BadDateException ( ) ; } |
public class BufferUtils { /** * Like append , but does not throw { @ link BufferOverflowException }
* @ param to Buffer is flush mode
* @ param b bytes to fill
* @ param off offset into byte
* @ param len length to fill
* @ return The position of the valid data before the flipped position . */
public static int fill ( ByteBuffer to , byte [ ] b , int off , int len ) { } } | int pos = flipToFill ( to ) ; try { int remaining = to . remaining ( ) ; int take = remaining < len ? remaining : len ; to . put ( b , off , take ) ; return take ; } finally { flipToFlush ( to , pos ) ; } |
public class GetReservedInstancesExchangeQuoteResult { /** * The configuration of your Convertible Reserved Instances .
* @ param reservedInstanceValueSet
* The configuration of your Convertible Reserved Instances . */
public void setReservedInstanceValueSet ( java . util . Collection < ReservedInstanceReservationValue > reservedInstanceValueSet ) { } } | if ( reservedInstanceValueSet == null ) { this . reservedInstanceValueSet = null ; return ; } this . reservedInstanceValueSet = new com . amazonaws . internal . SdkInternalList < ReservedInstanceReservationValue > ( reservedInstanceValueSet ) ; |
public class ServerPluginRepository { /** * Return the key of the plugin the extension ( in the sense of { @ link Plugin . Context # addExtension ( Object ) } is coming from . */
@ CheckForNull public String getPluginKey ( Object extension ) { } } | return keysByClassLoader . get ( extension . getClass ( ) . getClassLoader ( ) ) ; |
public class DdosProtectionPlansInner { /** * Gets information about the specified DDoS protection plan .
* @ param resourceGroupName The name of the resource group .
* @ param ddosProtectionPlanName The name of the DDoS protection plan .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < DdosProtectionPlanInner > getByResourceGroupAsync ( String resourceGroupName , String ddosProtectionPlanName , final ServiceCallback < DdosProtectionPlanInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( getByResourceGroupWithServiceResponseAsync ( resourceGroupName , ddosProtectionPlanName ) , serviceCallback ) ; |
public class MemoryCache { /** * Removes a value from the cache by its key .
* @ param correlationId ( optional ) transaction id to trace execution through
* call chain .
* @ param key a unique value key . */
public void remove ( String correlationId , String key ) { } } | synchronized ( _lock ) { // Get the entry
CacheEntry entry = _cache . get ( key ) ; // Remove entry from the cache
if ( entry != null ) { _cache . remove ( key ) ; _count -- ; } } |
public class BoundedBuffer { /** * D638088 - implemented split locks for get queue */
private void waitGet_ ( long timeout ) throws InterruptedException { } } | // wait on a member of the lock array
int lockIndex = getQueueIndex ( getQueueLocks_ , getQueueCounter_ , true ) ; synchronized ( getQueueLocks_ [ lockIndex ] ) { try { // increment waiting threads
getQueueLocks_ [ lockIndex ] . threadsWaiting ++ ; // D497382 - Now that we have the getQueue _ lock , recheck the
// condition from take / poll to try to minimize the number of
// threads that wait but don ' t get notified .
if ( numberOfUsedSlots . get ( ) <= 0 ) { // determine timeout if unspecified
if ( timeout < 0 ) { if ( WAIT_SHORT_SLICE_ != WAIT_LONG_SLICE_ && getQueueLocks_ [ lockIndex ] . shortWaiter == null ) { // use default timeout if not specified
timeout = WAIT_SHORT_SLICE_ ; // there are no threads on waiting with short timeout ,
// so this thread will use timeout value and set flag .
getQueueLocks_ [ lockIndex ] . shortWaiter = Thread . currentThread ( ) ; } else { // there is already a thread on this queue waiting with
// short timeout set , so all other threads will wait a
// longer time to avoid contention of threads looking for
// work without being notified .
timeout = WAIT_LONG_SLICE_ ; } } // notification through notifyGet _ may be lost here
getQueueLocks_ [ lockIndex ] . wait ( timeout ) ; } } catch ( InterruptedException ex ) { getQueueLocks_ [ lockIndex ] . notify ( ) ; throw ex ; } finally { // decrement waiting threads
getQueueLocks_ [ lockIndex ] . threadsWaiting -- ; // check whether the current thread was waiting with a timeout .
// if so , need to clear the flag to indicate that no thread is
// waiting with timeout on this queue .
if ( WAIT_SHORT_SLICE_ != WAIT_LONG_SLICE_ ) if ( getQueueLocks_ [ lockIndex ] . shortWaiter == Thread . currentThread ( ) ) getQueueLocks_ [ lockIndex ] . shortWaiter = null ; } } |
public class WorkflowManagerImpl { /** * / * ( non - Javadoc )
* @ see nz . co . senanque . workflow . WorkflowManager # execute ( nz . co . senanque . workflow . instances . DeferredEvent ) */
@ Transactional public void executeDeferredEvent ( long deferredEventId ) { } } | DeferredEvent deferredEvent = getWorkflowDAO ( ) . findDeferredEvent ( deferredEventId ) ; log . debug ( "fired deferred event {} for {} {}" , deferredEvent . getEventType ( ) , deferredEvent . getProcessInstance ( ) . getId ( ) , deferredEvent . getComment ( ) ) ; ProcessInstance processInstance = deferredEvent . getProcessInstance ( ) ; switch ( deferredEvent . getEventType ( ) ) { case DEFERRED : ProcessInstanceUtils . clearQueue ( processInstance , TaskStatus . TIMEOUT ) ; int lastAuditIndex = processInstance . getAudits ( ) . size ( ) - 1 ; Audit lastAudit = processInstance . getAudits ( ) . get ( lastAuditIndex ) ; lastAudit . setInterrupted ( true ) ; lastAudit . setStatus ( TaskStatus . TIMEOUT ) ; Date now = new Date ( ) ; lastAudit . setComment ( trimComment ( lastAudit . getComment ( ) + " Timed out at " + now ) ) ; // this sets the task to the TryTask that generated the timeout .
processInstance . setProcessDefinitionName ( deferredEvent . getProcessDefinitionName ( ) ) ; processInstance . setTaskId ( deferredEvent . getTaskId ( ) ) ; break ; case FORCE_ABORT : ProcessInstanceUtils . clearQueue ( processInstance , TaskStatus . ABORTING ) ; processInstance . setComment ( "Sibling aborted" ) ; break ; case SUBPROCESS_END : if ( processInstance . getWaitCount ( ) == 0 ) { break ; } processInstance . setWaitCount ( processInstance . getWaitCount ( ) - 1 ) ; // If this is the last process then kick off the parent , but
// if any of the siblings aborted then abort the parent .
if ( processInstance . getWaitCount ( ) == 0 ) { if ( processInstance . isCyclic ( ) ) { // but if this is a cyclic there won ' t be any siblings
// and we want to re - execute the parent . Use the Retry logic .
TaskBase previous = getCurrentTask ( processInstance ) . getPreviousTask ( processInstance ) ; if ( previous == null ) { throw new WorkflowException ( "Trying to retry a task when there is none" ) ; } previous . loadTask ( processInstance ) ; processInstance . setStatus ( TaskStatus . GO ) ; } else { processInstance . setStatus ( TaskStatus . GO ) ; for ( ProcessInstance sibling : processInstance . getChildProcesses ( ) ) { if ( sibling . getStatus ( ) == TaskStatus . ABORTED ) { processInstance . setStatus ( TaskStatus . ABORTING ) ; processInstance . setComment ( "Child aborted" ) ; } } } } break ; default : log . error ( "Unexpected event type {} (ignoring)" , deferredEvent . getEventType ( ) ) ; return ; } deferredEvent . setEventType ( EventType . DONE ) ; |
public class WorkerPool { /** * Wait for the { @ link RingBuffer } to drain of published events then halt the workers . */
public void drainAndHalt ( ) { } } | Sequence [ ] workerSequences = getWorkerSequences ( ) ; while ( ringBuffer . getCursor ( ) > Util . getMinimumSequence ( workerSequences ) ) { Thread . yield ( ) ; } for ( WorkProcessor < ? > processor : workProcessors ) { processor . halt ( ) ; } started . set ( false ) ; |
public class EldaRouterRestlet { /** * Answer true of m ' s endpoint has no formatter called type . */
private boolean notFormat ( Match m , String type ) { } } | return m . getEndpoint ( ) . getRendererNamed ( type ) == null ; |
public class DataSetBuilder { /** * Set < code > double < / code > sequence filler for column
* with a specified step .
* A call to this method is shorthand for
* < code > sequence ( column , initial , x - & gt ; x + step ) < / code > .
* @ param column Column name .
* @ param initial Initial sequence value .
* @ param step Sequence step .
* @ return The builder instance ( for chained calls ) .
* @ see # sequence ( String , Object , UnaryOperator )
* @ see # sequence ( String , float , float ) */
public DataSetBuilder sequence ( String column , double initial , double step ) { } } | return sequence ( column , ( Double ) initial , x -> x + step ) ; |
public class IO { /** * int c simplifies checking , casting */
public static void writeU2 ( OutputStream dest , int u2 ) throws IOException { } } | if ( ( u2 < Character . MIN_VALUE ) || ( u2 > Character . MAX_VALUE ) ) { throw new IllegalArgumentException ( ) ; } dest . write ( ( byte ) ( u2 >> 8 ) ) ; dest . write ( ( byte ) u2 ) ; |
public class Assign { /** * 数组或 Map 赋值 */
@ SuppressWarnings ( { } } | "unchecked" , "rawtypes" } ) Object assignElement ( Scope scope ) { Object target = scope . get ( id ) ; if ( target == null ) { throw new TemplateException ( "The assigned targets \"" + id + "\" can not be null" , location ) ; } Object idx = index . eval ( scope ) ; if ( idx == null ) { throw new TemplateException ( "The index of list/array and the key of map can not be null" , location ) ; } Object value ; if ( target instanceof Map ) { value = right . eval ( scope ) ; ( ( Map ) target ) . put ( idx , value ) ; return value ; } if ( ! ( idx instanceof Integer ) ) { throw new TemplateException ( "The index of list/array can only be integer" , location ) ; } if ( target instanceof List ) { value = right . eval ( scope ) ; ( ( List ) target ) . set ( ( Integer ) idx , value ) ; return value ; } if ( target . getClass ( ) . isArray ( ) ) { value = right . eval ( scope ) ; java . lang . reflect . Array . set ( target , ( Integer ) idx , value ) ; return value ; } throw new TemplateException ( "Only the list array and map is supported by index assignment" , location ) ; |
public class PackedRecordPointer { /** * Pack a record address and partition id into a single word .
* @ param recordPointer a record pointer encoded by TaskMemoryManager .
* @ param partitionId a shuffle partition id ( maximum value of 2 ^ 24 ) .
* @ return a packed pointer that can be decoded using the { @ link PackedRecordPointer } class . */
public static long packPointer ( long recordPointer , int partitionId ) { } } | assert ( partitionId <= MAXIMUM_PARTITION_ID ) ; // Note that without word alignment we can address 2 ^ 27 bytes = 128 megabytes per page .
// Also note that this relies on some internals of how TaskMemoryManager encodes its addresses .
final long pageNumber = ( recordPointer & MASK_LONG_UPPER_13_BITS ) >>> 24 ; final long compressedAddress = pageNumber | ( recordPointer & MASK_LONG_LOWER_27_BITS ) ; return ( ( ( long ) partitionId ) << 40 ) | compressedAddress ; |
public class UnconditionalValueDerefSet { /** * Set a value as being unconditionally dereferenced at the given set of
* locations .
* @ param vn
* the value
* @ param derefSet
* the Set of dereference Locations */
public void setDerefSet ( ValueNumber vn , Set < Location > derefSet ) { } } | if ( UnconditionalValueDerefAnalysis . DEBUG ) { System . out . println ( "Adding dereference of " + vn + " for # " + System . identityHashCode ( this ) + " to " + derefSet ) ; } valueNumbersUnconditionallyDereferenced . set ( vn . getNumber ( ) ) ; Set < Location > derefLocationSet = getDerefLocationSet ( vn ) ; derefLocationSet . clear ( ) ; derefLocationSet . addAll ( derefSet ) ; |
public class ServerConfig { /** * Set a configuration parameter with a Collection type . */
private static void setCollectionParam ( String name , List < ? > values ) throws ConfigurationException { } } | try { Field field = config . getClass ( ) . getDeclaredField ( name ) ; Class < ? > fieldClass = field . getType ( ) ; if ( Map . class . isAssignableFrom ( fieldClass ) ) { setMapParam ( field , values ) ; } else if ( List . class . isAssignableFrom ( fieldClass ) ) { setListParam ( field , values ) ; } else { throw new ConfigurationException ( "Invalid value type for parameter: " + name ) ; } } catch ( SecurityException | NoSuchFieldException e ) { throw new ConfigurationException ( "Unknown configuration parameter: " + name ) ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.