signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class LogEventFilter { /** * Filter by checkpoint * @ param from the log event id to start retrieving logs from . * @ param take the limit of items to retrieve . * @ return this filter instance */ public LogEventFilter withCheckpoint ( String from , int take ) { } }
parameters . put ( "from" , from ) ; parameters . put ( "take" , take ) ; return this ;
public class FileLoader { /** * Returns the file name for the given source key . * @ param source configuration source key * @ return the file name for the given source key */ public static String getFileName ( final ConfigurationSourceKey source ) { } }
// ensure an exception is thrown if we are not file . if ( source . getType ( ) != ConfigurationSourceKey . Type . FILE ) throw new AssertionError ( "Can only load configuration sources with type " + ConfigurationSourceKey . Type . FILE ) ; return source . getName ( ) + '.' + source . getFormat ( ) . getExtension ( ) ;
public class UserServiceImpl { /** * < p > If includeEmpty is false , returns a list of UserRoles containing only users with one or more of the roles * listed in roleList . If it is true , the list also includes users without any of the given roles . In either case , * the UserRoles ' roles lists only contain roles contained in roleList . < / p > * < p > Given a role list , it gets all the users belonging to each role then extracts uids from UserReadUserListDetails . * This should minimize the number of calls to community since it ' s most likely that num ( users ) > > num ( roles ) . < / p > * @ param roleList List of roles * @ param includeEmpty Whether to include users who have none of the roles in roleList . * @ return List of users and uids , with roles taken from roleList . */ private List < UserRoles > createUserRoles ( final Iterable < String > roleList , final boolean includeEmpty ) { } }
final List < User > userDetails = getUsers ( ) ; final Map < String , List < String > > userNamesRolesMap = createUsernameRolesMap ( roleList ) ; final List < UserRoles > userRoles = new ArrayList < > ( ) ; for ( final User user : userDetails ) { final String username = user . getUsername ( ) ; final long uid = user . getUid ( ) ; final String securityInfo = user . getSecurityinfo ( ) ; final List < String > roles = userNamesRolesMap . get ( username ) ; if ( roles != null ) { userRoles . add ( new UserRoles ( username , uid , securityInfo , roles , user . getFields ( ) ) ) ; } else if ( includeEmpty ) { userRoles . add ( new UserRoles ( username , uid , securityInfo , new ArrayList < > ( ) , user . getFields ( ) ) ) ; } } return userRoles ;
public class CSSRGBA { /** * { @ inheritDoc } * @ since 3.8.3 */ @ Nonnull @ Nonempty public String getAsString ( ) { } }
return CCSSValue . PREFIX_RGBA_OPEN + m_sRed + ',' + m_sGreen + ',' + m_sBlue + ',' + m_sOpacity + CCSSValue . SUFFIX_RGBA_CLOSE ;
public class CmsJspStandardContextBean { /** * Returns the instance id parent container mapping . < p > * @ return the instance id parent container mapping */ public Map < String , CmsContainerBean > getParentContainers ( ) { } }
if ( m_parentContainers == null ) { initPageData ( ) ; } return Collections . unmodifiableMap ( m_parentContainers ) ;
public class ScreenAbstract { /** * { @ inheritDoc } * Set the icon if has . */ @ Override public void start ( ) { } }
config . getIcon ( ) . map ( Media :: getFile ) . filter ( File :: exists ) . ifPresent ( file -> setIcon ( file . getPath ( ) ) ) ;
public class Bucket { /** * Returns a list of requested blobs in this bucket . Blobs that do not exist are null . * < p > Example of getting some blobs in the bucket , using a batch request . * < pre > { @ code * String blobName1 = " my _ blob _ name1 " ; * String blobName2 = " my _ blob _ name2 " ; * List < String > blobNames = new LinkedList < > ( ) ; * blobNames . add ( blobName1 ) ; * blobNames . add ( blobName2 ) ; * List < Blob > blobs = bucket . get ( blobNames ) ; * for ( Blob blob : blobs ) { * if ( blob = = null ) { * / / the blob was not found * } < / pre > * @ param blobNames blobs to get * @ return an immutable list of { @ code Blob } objects * @ throws StorageException upon failure */ public List < Blob > get ( Iterable < String > blobNames ) { } }
ImmutableList . Builder < BlobId > builder = ImmutableList . builder ( ) ; for ( String blobName : blobNames ) { builder . add ( BlobId . of ( getName ( ) , blobName ) ) ; } return storage . get ( builder . build ( ) ) ;
public class ItemAPI { /** * Returns the items on app matching the given filters . * @ param appId * The id of the app * @ param limit * The maximum number of items to receive , defaults to 20 * @ param offset * The offset from the start of the items returned , defaults to 0 * @ param sortBy * How the items should be sorted . For the possible options , see * the filter area . * @ param sortDesc * < code > true < / code > or leave out to sort descending , use * < code > false < / code > to sort ascending * @ param filters * The filters to apply * @ return The items matching the filters */ public ItemsResponse getItems ( int appId , Integer limit , Integer offset , SortBy sortBy , Boolean sortDesc , FilterByValue < ? > ... filters ) { } }
WebResource resource = getResourceFactory ( ) . getApiResource ( "/item/app/" + appId + "/v2/" ) ; if ( limit != null ) { resource = resource . queryParam ( "limit" , limit . toString ( ) ) ; } if ( offset != null ) { resource = resource . queryParam ( "offset" , offset . toString ( ) ) ; } if ( sortBy != null ) { resource = resource . queryParam ( "sort_by" , sortBy . getKey ( ) ) ; } if ( sortDesc != null ) { resource = resource . queryParam ( "sort_desc" , sortDesc ? "1" : "0" ) ; } for ( FilterByValue < ? > filter : filters ) { resource = resource . queryParam ( filter . getBy ( ) . getKey ( ) , filter . getFormattedValue ( ) ) ; } return resource . get ( ItemsResponse . class ) ;
public class CommerceOrderPersistenceImpl { /** * Removes all the commerce orders where userId = & # 63 ; and createDate & lt ; & # 63 ; and orderStatus = & # 63 ; from the database . * @ param userId the user ID * @ param createDate the create date * @ param orderStatus the order status */ @ Override public void removeByU_LtC_O ( long userId , Date createDate , int orderStatus ) { } }
for ( CommerceOrder commerceOrder : findByU_LtC_O ( userId , createDate , orderStatus , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( commerceOrder ) ; }
public class ConfigurationMetadataUnitParser { /** * Parse compilation unit . * @ param collectedProps the collected props * @ param collectedGroups the collected groups * @ param p the p * @ param typePath the type path * @ param typeName the type name * @ param indexNameWithBrackets the index name with brackets */ @ SneakyThrows public void parseCompilationUnit ( final Set < ConfigurationMetadataProperty > collectedProps , final Set < ConfigurationMetadataProperty > collectedGroups , final ConfigurationMetadataProperty p , final String typePath , final String typeName , final boolean indexNameWithBrackets ) { } }
try ( val is = Files . newInputStream ( Paths . get ( typePath ) ) ) { val cu = StaticJavaParser . parse ( is ) ; new ConfigurationMetadataFieldVisitor ( collectedProps , collectedGroups , indexNameWithBrackets , typeName , sourcePath ) . visit ( cu , p ) ; if ( ! cu . getTypes ( ) . isEmpty ( ) ) { val decl = ClassOrInterfaceDeclaration . class . cast ( cu . getType ( 0 ) ) ; for ( var i = 0 ; i < decl . getExtendedTypes ( ) . size ( ) ; i ++ ) { val parentType = decl . getExtendedTypes ( ) . get ( i ) ; val instance = ConfigurationMetadataClassSourceLocator . getInstance ( ) ; val parentClazz = instance . locatePropertiesClassForType ( parentType ) ; val parentTypePath = ConfigurationMetadataClassSourceLocator . buildTypeSourcePath ( this . sourcePath , parentClazz . getName ( ) ) ; parseCompilationUnit ( collectedProps , collectedGroups , p , parentTypePath , parentClazz . getName ( ) , indexNameWithBrackets ) ; } } }
public class Property { /** * Determines if the headless parameter was set , to have the browser run in headless mode . This only * can be used for Chrome and Firefox . * @ return boolean : is headless set or not */ public static boolean runHeadless ( ) { } }
String headless = getProgramProperty ( HEADLESS ) ; if ( headless == null ) { return false ; } if ( "" . equals ( headless ) ) { return true ; } return "true" . equalsIgnoreCase ( headless ) ;
public class ProjectAnalyzer { /** * Analyzes all classes in the given project path . * @ param projectClassPaths The project class paths * @ param projectSourcePaths The project source file paths * @ param ignoredResources The fully - qualified root resource class names to be ignored * @ return The REST resource representations */ public Resources analyze ( Set < Path > projectClassPaths , Set < Path > projectSourcePaths , Set < String > ignoredResources ) { } }
lock . lock ( ) ; try { projectClassPaths . forEach ( this :: addProjectPath ) ; // analyze relevant classes final JobRegistry jobRegistry = JobRegistry . getInstance ( ) ; final Set < ClassResult > classResults = new HashSet < > ( ) ; classes . stream ( ) . filter ( this :: isJAXRSRootResource ) . filter ( r -> ! ignoredResources . contains ( r ) ) . forEach ( c -> jobRegistry . analyzeResourceClass ( c , new ClassResult ( ) ) ) ; Pair < String , ClassResult > classResultPair ; while ( ( classResultPair = jobRegistry . nextUnhandledClass ( ) ) != null ) { final ClassResult classResult = classResultPair . getRight ( ) ; classResults . add ( classResult ) ; analyzeClass ( classResultPair . getLeft ( ) , classResult ) ; bytecodeAnalyzer . analyzeBytecode ( classResult ) ; } javaDocAnalyzer . analyze ( projectSourcePaths , classResults ) ; return resultInterpreter . interpret ( classResults ) ; } finally { lock . unlock ( ) ; }
public class Intersectiond { /** * Determine whether the given ray with the given < code > origin < / code > and direction < code > dir < / code > * intersects the axis - aligned rectangle given as its minimum corner < code > min < / code > and maximum corner < code > max < / code > , * and return the values of the parameter < i > t < / i > in the ray equation < i > p ( t ) = origin + t * dir < / i > of the near and far point of intersection * as well as the side of the axis - aligned rectangle the ray intersects . * This method also detects an intersection for a ray whose origin lies inside the axis - aligned rectangle . * Reference : < a href = " https : / / dl . acm . org / citation . cfm ? id = 1198748 " > An Efficient and Robust Ray – Box Intersection < / a > * @ see # intersectRayAar ( double , double , double , double , double , double , double , double , Vector2d ) * @ param origin * the ray ' s origin * @ param dir * the ray ' s direction * @ param min * the minimum corner of the axis - aligned rectangle * @ param max * the maximum corner of the axis - aligned rectangle * @ param result * a vector which will hold the values of the parameter < i > t < / i > in the ray equation * < i > p ( t ) = origin + t * dir < / i > of the near and far point of intersection * @ return the side on which the near intersection occurred as one of * { @ link # AAR _ SIDE _ MINX } , { @ link # AAR _ SIDE _ MINY } , { @ link # AAR _ SIDE _ MAXX } or { @ link # AAR _ SIDE _ MAXY } ; * or < code > - 1 < / code > if the ray does not intersect the axis - aligned rectangle ; */ public static int intersectRayAar ( Vector2dc origin , Vector2dc dir , Vector2dc min , Vector2dc max , Vector2d result ) { } }
return intersectRayAar ( origin . x ( ) , origin . y ( ) , dir . x ( ) , dir . y ( ) , min . x ( ) , min . y ( ) , max . x ( ) , max . y ( ) , result ) ;
public class ClientFactory { /** * Accept a { @ link IMessageSession } in default { @ link ReceiveMode # PEEKLOCK } mode from service bus connection string builder with specified session id . Session Id can be null , if null , service will return the first available session . * @ param amqpConnectionStringBuilder the connection string builder * @ param sessionId session id , if null , service will return the first available session , otherwise , service will return specified session * @ return { @ link IMessageSession } instance * @ throws InterruptedException if the current thread was interrupted while waiting * @ throws ServiceBusException if the session cannot be accepted */ public static IMessageSession acceptSessionFromConnectionStringBuilder ( ConnectionStringBuilder amqpConnectionStringBuilder , String sessionId ) throws InterruptedException , ServiceBusException { } }
return acceptSessionFromConnectionStringBuilder ( amqpConnectionStringBuilder , sessionId , DEFAULTRECEIVEMODE ) ;
public class ThresholdBlock { /** * Converts the gray scale input image into a binary image * @ param input Input image * @ param output Output binary image */ public void process ( T input , GrayU8 output ) { } }
output . reshape ( input . width , input . height ) ; int requestedBlockWidth = this . requestedBlockWidth . computeI ( Math . min ( input . width , input . height ) ) ; selectBlockSize ( input . width , input . height , requestedBlockWidth ) ; stats . reshape ( input . width / blockWidth , input . height / blockHeight ) ; int innerWidth = input . width % blockWidth == 0 ? input . width : input . width - blockWidth - input . width % blockWidth ; int innerHeight = input . height % blockHeight == 0 ? input . height : input . height - blockHeight - input . height % blockHeight ; computeStatistics ( input , innerWidth , innerHeight ) ; applyThreshold ( input , output ) ;
public class CmsSearchWidgetDialog { /** * Returns the creation date the resources have to have as maximum . < p > * @ return the creation date the resources have to have as maximum */ public String getMaxDateCreated ( ) { } }
if ( m_searchParams . getMaxDateCreated ( ) == Long . MAX_VALUE ) { return "" ; } return Long . toString ( m_searchParams . getMaxDateCreated ( ) ) ;
public class DeleteApplicationsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteApplicationsRequest deleteApplicationsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteApplicationsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteApplicationsRequest . getConfigurationIds ( ) , CONFIGURATIONIDS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ObjectMapper { /** * Maps each row of the given result set to an object ot this OM ' s type . * @ param rs The ResultSet to process ( map to objects ) * @ return An array of objects ( of this OM ' s class ) . If there are no rows * in the ResultSet , an empty ( zero - length ) array will be returned . */ public synchronized Object [ ] mapObjects ( ResultSet pRSet ) throws SQLException { } }
Vector result = new Vector ( ) ; ResultSetMetaData meta = pRSet . getMetaData ( ) ; int cols = meta . getColumnCount ( ) ; // Get colum names String [ ] colNames = new String [ cols ] ; for ( int i = 0 ; i < cols ; i ++ ) { colNames [ i ] = meta . getColumnName ( i + 1 ) ; // JDBC cols start at 1 . . . /* System . out . println ( meta . getColumnLabel ( i + 1 ) ) ; System . out . println ( meta . getColumnName ( i + 1 ) ) ; System . out . println ( meta . getColumnType ( i + 1 ) ) ; System . out . println ( meta . getColumnTypeName ( i + 1 ) ) ; / / System . out . println ( meta . getTableName ( i + 1 ) ) ; / / System . out . println ( meta . getCatalogName ( i + 1 ) ) ; / / System . out . println ( meta . getSchemaName ( i + 1 ) ) ; / / Last three NOT IMPLEMENTED ! ! */ } // Loop through rows in resultset while ( pRSet . next ( ) ) { Object obj = null ; try { obj = mInstanceClass . newInstance ( ) ; // Asserts empty constructor ! } catch ( IllegalAccessException iae ) { mLog . logError ( iae ) ; // iae . printStackTrace ( ) ; } catch ( InstantiationException ie ) { mLog . logError ( ie ) ; // ie . printStackTrace ( ) ; } // Read each colum from this row into object for ( int i = 0 ; i < cols ; i ++ ) { String property = ( String ) mColumnMap . get ( colNames [ i ] ) ; if ( property != null ) { // This column is mapped to a property mapColumnProperty ( pRSet , i + 1 , property , obj ) ; } } // Add object to the result Vector result . addElement ( obj ) ; } return result . toArray ( ( Object [ ] ) Array . newInstance ( mInstanceClass , result . size ( ) ) ) ;
public class SingleExecutionTime { /** * If date is not match , will return previous closest match . * If date is match , will return this date . * @ param date - reference ZonedDateTime instance - never null ; * @ return ZonedDateTime instance , never null . Value obeys logic specified above . * @ throws NoSuchValueException if there is not previous year */ private ZonedDateTime previousClosestMatch ( final ZonedDateTime date ) throws NoSuchValueException { } }
ExecutionTimeResult result = new ExecutionTimeResult ( date , false ) ; for ( int i = 0 ; i < MAX_ITERATIONS ; i ++ ) { result = potentialPreviousClosestMatch ( result . getTime ( ) ) ; if ( result . isMatch ( ) ) { return result . getTime ( ) ; } } throw new NoSuchValueException ( ) ;
public class CloudantClient { /** * Get a database reference for the database with the specified name . * Note that if create is { @ code false } and the database does not exist an instance will be * returned , but the first operation on that instance will throw a * { @ link com . cloudant . client . org . lightcouch . NoDocumentException } because the database does not * exist . * @ param name name of database to access * @ param create flag indicating whether to create the database if it does not exist * @ return Database object * @ see < a * href = " https : / / console . bluemix . net / docs / services / Cloudant / api / database . html " * target = " _ blank " > Databases < / a > */ public Database database ( String name , boolean create ) { } }
return new Database ( this , couchDbClient . database ( name , create ) ) ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getTilePosition ( ) { } }
if ( tilePositionEClass == null ) { tilePositionEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 394 ) ; } return tilePositionEClass ;
public class ExtensionHelp { /** * Gets the help icon . * Should be called / used only when in view mode . * @ return the help icon , never { @ code null } . * @ since 2.7.0 */ public static ImageIcon getHelpIcon ( ) { } }
if ( helpIcon == null ) { helpIcon = DisplayUtils . getScaledIcon ( new ImageIcon ( ExtensionHelp . class . getResource ( "/resource/icon/16/201.png" ) ) ) ; } return helpIcon ;
public class StickyGridHeadersBaseAdapterWrapper { /** * Counts the number of items that would be need to fill out the last row in * the group of items with the given header . * @ param header Header set of items are grouped by . * @ return The count of unfilled spaces in the last row . */ private int unFilledSpacesInHeaderGroup ( int header ) { } }
// If mNumColumns is equal to zero we will have a divide by 0 exception if ( mNumColumns == 0 ) { return 0 ; } int remainder = mDelegate . getCountForHeader ( header ) % mNumColumns ; return remainder == 0 ? 0 : mNumColumns - remainder ;
public class FluentIterableWrapper { /** * Returns the elements from this fluent iterable that are instances of the supplied type . The * resulting fluent iterable ' s iterator does not support { @ code remove ( ) } . * @ since 1.25.0 */ @ CheckReturnValue public final < F extends E > FluentIterableWrapper < F > filter ( Class < F > clazz ) { } }
return from ( Iterables . filter ( iterable , clazz ) ) ;
public class ListChangeSetsResult { /** * A list of < code > ChangeSetSummary < / code > structures that provides the ID and status of each change set for the * specified stack . * @ param summaries * A list of < code > ChangeSetSummary < / code > structures that provides the ID and status of each change set for * the specified stack . */ public void setSummaries ( java . util . Collection < ChangeSetSummary > summaries ) { } }
if ( summaries == null ) { this . summaries = null ; return ; } this . summaries = new com . amazonaws . internal . SdkInternalList < ChangeSetSummary > ( summaries ) ;
public class HtmlReport { /** * The same capture is used for the all StackLines . */ private void addLineScreenCaptureForErrorEachStackLine ( List < LineScreenCapture > lineScreenCaptures , RunFailure runFailure ) { } }
if ( runFailure == null ) { return ; // do nothing } List < StackLine > failureLines = runFailure . getStackLines ( ) ; int failureCaptureIndex = matchedLineScreenCaptureIndex ( lineScreenCaptures , failureLines ) ; if ( failureCaptureIndex == - 1 ) { return ; // no failure capture } LineScreenCapture failureCapture = lineScreenCaptures . get ( failureCaptureIndex ) ; for ( int i = 1 ; i < failureLines . size ( ) ; i ++ ) { List < StackLine > errorEachStackLine = new ArrayList < > ( failureLines . size ( ) - i ) ; for ( int j = i ; j < failureLines . size ( ) ; j ++ ) { errorEachStackLine . add ( failureLines . get ( j ) ) ; } LineScreenCapture newCapture = new LineScreenCapture ( ) ; newCapture . setPath ( failureCapture . getPath ( ) ) ; newCapture . addAllStackLines ( errorEachStackLine ) ; int errEachStackLineCaptureIndex = matchedLineScreenCaptureIndex ( lineScreenCaptures , errorEachStackLine ) ; if ( errEachStackLineCaptureIndex == - 1 ) { lineScreenCaptures . add ( newCapture ) ; } else { lineScreenCaptures . set ( errEachStackLineCaptureIndex , newCapture ) ; } }
public class HttpPipelineNextPolicy { /** * Invokes the next { @ link HttpPipelinePolicy } . * @ return a publisher upon subscription invokes next policy and emits response from the policy . */ public Mono < HttpResponse > process ( ) { } }
final int size = this . pipeline . pipelinePolicies ( ) . length ; if ( this . currentPolicyIndex > size ) { return Mono . error ( new IllegalStateException ( "There is no more policies to execute." ) ) ; } else { this . currentPolicyIndex ++ ; if ( this . currentPolicyIndex == size ) { return this . pipeline . httpClient ( ) . send ( this . context . httpRequest ( ) ) ; } else { return this . pipeline . pipelinePolicies ( ) [ this . currentPolicyIndex ] . process ( this . context , this ) ; } }
public class Response { /** * The list of graph objects returned for this request , if any , cast into a particular type of GraphObject . * @ param graphObjectClass the GraphObject - derived interface to cast the graph objects into * @ return the list of graph objects returned , or null if none was returned ( or if the result was not a list ) * @ throws FacebookException If the passed in Class is not a valid GraphObject interface */ public final < T extends GraphObject > GraphObjectList < T > getGraphObjectListAs ( Class < T > graphObjectClass ) { } }
if ( graphObjectList == null ) { return null ; } return graphObjectList . castToListOf ( graphObjectClass ) ;
public class StringBuilderWriter { /** * Appends the specified character sequence to this writer . * < p > An invocation of this method of the form < tt > out . append ( csq ) < / tt > * behaves in exactly the same way as the invocation * < pre > * out . write ( csq . toUrlQueryTypeString ( ) ) < / pre > * < p > Depending on the specification of < tt > toUrlQueryTypeString < / tt > for the * character sequence < tt > csq < / tt > , the entire sequence may not be * appended . For instance , invoking the < tt > toUrlQueryTypeString < / tt > method of a * character buffer will return a subsequence whose content depends upon * the buffer ' s position and limit . * @ param csq The character sequence to append . If < tt > csq < / tt > is * < tt > null < / tt > , then the four characters < tt > " null " < / tt > are * appended to this writer . * @ return This writer * @ since 1.5 */ @ Override public StringBuilderWriter append ( CharSequence csq ) { } }
if ( csq == null ) buf . append ( "null" ) ; else buf . append ( csq ) ; return this ;
public class QuickSort { /** * Performs sorting based on the double values natural comparator . * { @ link Double # NaN } values will not be handled appropriately . * @ param x the array to sort * @ param start the starting index ( inclusive ) to sort * @ param end the ending index ( exclusive ) to sort * @ param paired a collection of lists , every list will have its indices swapped as well */ public static void sort ( double [ ] x , int start , int end , Collection < List < ? > > paired ) { } }
int a = start ; int n = end - start ; if ( n < 7 ) /* Insertion sort on smallest arrays */ { for ( int i = a ; i < end ; i ++ ) for ( int j = i ; j > a && x [ j - 1 ] > x [ j ] ; j -- ) swap ( x , j , j - 1 , paired ) ; return ; } int pm = a + ( n / 2 ) ; /* Small arrays , middle element */ if ( n > 7 ) { int pl = a ; int pn = a + n - 1 ; if ( n > 40 ) /* Big arrays , pseudomedian of 9 */ { int s = n / 8 ; pl = med3 ( x , pl , pl + s , pl + 2 * s ) ; pm = med3 ( x , pm - s , pm , pm + s ) ; pn = med3 ( x , pn - 2 * s , pn - s , pn ) ; } pm = med3 ( x , pl , pm , pn ) ; /* Mid - size , med of 3 */ } double pivotValue = x [ pm ] ; int pa = a , pb = pa , pc = end - 1 , pd = pc ; while ( true ) { while ( pb <= pc && x [ pb ] <= pivotValue ) { if ( x [ pb ] == pivotValue ) swap ( x , pa ++ , pb , paired ) ; pb ++ ; } while ( pc >= pb && x [ pc ] >= pivotValue ) { if ( x [ pc ] == pivotValue ) swap ( x , pc , pd -- , paired ) ; pc -- ; } if ( pb > pc ) break ; swap ( x , pb ++ , pc -- , paired ) ; } int s ; int pn = end ; s = Math . min ( pa - a , pb - pa ) ; vecswap ( x , a , pb - s , s , paired ) ; s = Math . min ( pd - pc , pn - pd - 1 ) ; vecswap ( x , pb , pn - s , s , paired ) ; // recurse if ( ( s = pb - pa ) > 1 ) sort ( x , a , a + s , paired ) ; if ( ( s = pd - pc ) > 1 ) sort ( x , pn - s , pn , paired ) ;
public class Reporter { /** * Takes the response returned from the HTTP call , and writes it to the output * file , in properly HTML formatted fashion * @ param response - the response object obtained from the call * @ return String : an HTML formatted string with headers */ public static String getResponseOutput ( Response response ) { } }
if ( response == null || response . getMessage ( ) == null || "" . equals ( response . getMessage ( ) ) ) { return "" ; } StringBuilder responseOutput = new StringBuilder ( ) ; String uuid = getUUID ( ) ; responseOutput . append ( ONCLICK_TOGGLE ) . append ( uuid ) . append ( "\")'>Toggle Raw Response</a> " ) ; responseOutput . append ( SPAN_ID ) . append ( uuid ) . append ( DISPLAY_NONE ) ; responseOutput . append ( DIV ) . append ( response . getMessage ( ) ) . append ( END_DIV ) ; responseOutput . append ( END_SPAN ) ; return responseOutput . toString ( ) ;
public class AmazonApiGatewayV2Client { /** * The API mapping . * @ param updateApiMappingRequest * @ return Result of the UpdateApiMapping operation returned by the service . * @ throws NotFoundException * The resource specified in the request was not found . * @ throws TooManyRequestsException * The client is sending more than the allowed number of requests per unit of time . * @ throws BadRequestException * One of the parameters in the request is invalid . * @ throws ConflictException * The resource already exists . * @ sample AmazonApiGatewayV2 . UpdateApiMapping */ @ Override public UpdateApiMappingResult updateApiMapping ( UpdateApiMappingRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeUpdateApiMapping ( request ) ;
public class ClassGraph { /** * Print all relations for a given ' s class ' s tag * @ param tagname the tag containing the given relation * @ param from the source class * @ param edgetype the dot edge specification */ private void allRelation ( Options opt , RelationType rt , ClassDoc from ) { } }
String tagname = rt . lower ; for ( Tag tag : from . tags ( tagname ) ) { String t [ ] = tokenize ( tag . text ( ) ) ; // l - src label l - dst target t = t . length == 1 ? new String [ ] { "-" , "-" , "-" , t [ 0 ] } : t ; // Shorthand if ( t . length != 4 ) { System . err . println ( "Error in " + from + "\n" + tagname + " expects four fields (l-src label l-dst target): " + tag . text ( ) ) ; return ; } ClassDoc to = from . findClass ( t [ 3 ] ) ; if ( to != null ) { if ( hidden ( to ) ) continue ; relation ( opt , rt , from , to , t [ 0 ] , t [ 1 ] , t [ 2 ] ) ; } else { if ( hidden ( t [ 3 ] ) ) continue ; relation ( opt , rt , from , from . toString ( ) , to , t [ 3 ] , t [ 0 ] , t [ 1 ] , t [ 2 ] ) ; } }
public class MetricSchemaRecordQuery { /** * Specifies the metric name of the query . * @ param metric The metric name of the query . Cannot be null or empty . */ public void setMetric ( String metric ) { } }
SystemAssert . requireArgument ( metric != null && ! metric . isEmpty ( ) , "Metric cannot be null or empty." ) ; this . metric = metric ;
public class AdapterManager { /** * TODO : this method could move to a more general place . */ private static Adapter tryAdapter ( Class adapterClass , Object externalContext ) { } }
try { Adapter sa = ( Adapter ) adapterClass . newInstance ( ) ; try { AdapterContext context = new AdapterContext ( externalContext ) ; if ( sa . accept ( context ) ) { _log . info ( "Adapter " + adapterClass . getName ( ) + " accepted." ) ; sa . setContext ( context ) ; return sa ; } else { _log . info ( "Adapter " + adapterClass . getName ( ) + " is present but did not accept." ) ; } } catch ( Exception e ) { _log . error ( adapterClass . getName ( ) + ".accept() threw an exception." , e ) ; } catch ( LinkageError e ) { _log . error ( adapterClass . getName ( ) + ".accept() caused a linkage error and may be out of date." , e ) ; } } catch ( InstantiationException e ) { _log . error ( "Could not create instance of Adapter class " + adapterClass . getName ( ) , e ) ; } catch ( IllegalAccessException e ) { _log . error ( "Could not create instance of Adapter class " + adapterClass . getName ( ) , e ) ; } catch ( Exception e ) { _log . error ( "Error creating instance of Adapter class " + adapterClass . getName ( ) , e ) ; } return null ;
public class Filters { /** * Specify one or more < a > ExtendedKeyUsage < / a > extension values . * @ param extendedKeyUsage * Specify one or more < a > ExtendedKeyUsage < / a > extension values . * @ see ExtendedKeyUsageName */ public void setExtendedKeyUsage ( java . util . Collection < String > extendedKeyUsage ) { } }
if ( extendedKeyUsage == null ) { this . extendedKeyUsage = null ; return ; } this . extendedKeyUsage = new java . util . ArrayList < String > ( extendedKeyUsage ) ;
public class DecisionTableImpl { /** * If valid input values are defined , check that all parameters match the respective valid inputs * @ param ctx * @ param params * @ return */ private Either < FEELEvent , Object > actualInputsMatchInputValues ( EvaluationContext ctx , Object [ ] params ) { } }
// check that all the parameters match the input list values if they are defined for ( int i = 0 ; i < params . length ; i ++ ) { final DTInputClause input = inputs . get ( i ) ; // if a list of values is defined , check the the parameter matches the value if ( input . getInputValues ( ) != null && ! input . getInputValues ( ) . isEmpty ( ) ) { final Object parameter = params [ i ] ; boolean satisfies = input . getInputValues ( ) . stream ( ) . map ( ut -> ut . apply ( ctx , parameter ) ) . filter ( Boolean :: booleanValue ) . findAny ( ) . orElse ( false ) ; if ( ! satisfies ) { String values = input . getInputValuesText ( ) ; return Either . ofLeft ( new InvalidInputEvent ( FEELEvent . Severity . ERROR , input . getInputExpression ( ) + "='" + parameter + "' does not match any of the valid values " + values + " for decision table '" + getName ( ) + "'." , getName ( ) , null , values ) ) ; } } } return Either . ofRight ( true ) ;
public class Arc { /** * Sets the location , size , angular extents , and closure type of this arc to the specified * values . */ public void setArc ( XY point , IDimension size , double start , double extent , int type ) { } }
setArc ( point . x ( ) , point . y ( ) , size . width ( ) , size . height ( ) , start , extent , type ) ;
public class PreconditionUtil { /** * Fails a test with the given message . * @ param message the identifying message for the { @ link AssertionError } ( * < code > null < / code > okay ) * @ see AssertionError */ public static void fail ( String message , Object ... args ) { } }
throw new IllegalStateException ( message == null ? "" : String . format ( message , args ) ) ;
public class DBFUtils { /** * Read a littleEndian integer ( 32b its ) from DataInput * @ param in DataInput to read from * @ return int value of next 32 bits as littleEndian * @ throws IOException if an IO error happens * @ throws EOFException if reached end of file before 4 bytes are readed */ public static int readLittleEndianInt ( DataInput in ) throws IOException { } }
int bigEndian = 0 ; for ( int shiftBy = 0 ; shiftBy < 32 ; shiftBy += 8 ) { bigEndian |= ( in . readUnsignedByte ( ) & 0xff ) << shiftBy ; } return bigEndian ;
public class FTPClient { /** * Returns remote current working directory . * @ return remote current working directory . */ public String getCurrentDir ( ) throws IOException , ServerException { } }
Reply reply = null ; try { reply = controlChannel . execute ( Command . PWD ) ; } catch ( FTPReplyParseException rpe ) { throw ServerException . embedFTPReplyParseException ( rpe ) ; } catch ( UnexpectedReplyCodeException urce ) { throw ServerException . embedUnexpectedReplyCodeException ( urce , "Server refused returning current directory" ) ; } String strReply = reply . getMessage ( ) ; if ( strReply . length ( ) > 0 && strReply . charAt ( 0 ) == '"' ) { return strReply . substring ( 1 , strReply . indexOf ( '"' , 1 ) ) ; } else { throw ServerException . embedFTPReplyParseException ( new FTPReplyParseException ( 0 , "Cannot parse 'PWD' reply: " + reply ) ) ; }
public class MLLibUtil { /** * Convert an ndarray to a matrix . * Note that the matrix will be con * @ param arr the array * @ return an mllib vector */ public static Matrix toMatrix ( INDArray arr ) { } }
if ( ! arr . isMatrix ( ) ) { throw new IllegalArgumentException ( "passed in array must be a matrix" ) ; } // if arr is a view - we have to dup anyway if ( arr . isView ( ) ) { return Matrices . dense ( arr . rows ( ) , arr . columns ( ) , arr . dup ( 'f' ) . data ( ) . asDouble ( ) ) ; } else // if not a view - we must ensure data is F ordered return Matrices . dense ( arr . rows ( ) , arr . columns ( ) , arr . ordering ( ) == 'f' ? arr . data ( ) . asDouble ( ) : arr . dup ( 'f' ) . data ( ) . asDouble ( ) ) ;
public class RollingLog { /** * Gets the last log file in the list of logs . * @ return the SimpleLog instance of the last log . */ SimpleLog getLastLog ( ) throws IOException { } }
if ( logFiles . isEmpty ( ) ) { return null ; } return new SimpleLog ( logFiles . get ( logFiles . size ( ) - 1 ) ) ;
public class HtmlCommandButton { /** * < p > Return the value of the < code > type < / code > property . < / p > * < p > Contents : Type of button to create . Valid values are " submit " , " button " , * and " reset " . If not specified , or not a valid value , the default * value is " submit " . */ public java . lang . String getType ( ) { } }
return ( java . lang . String ) getStateHelper ( ) . eval ( PropertyKeys . type , "submit" ) ;
public class PortletResourceExecutionWorker { /** * / * ( non - Javadoc ) * @ see org . apereo . portal . portlet . rendering . PortletExecutionManager . PortletExecutionWorker # callInternal ( ) */ @ Override protected Long callInternal ( ) throws Exception { } }
return portletRenderer . doServeResource ( portletWindowId , request , response , new ResourcePortletOutputHandler ( response ) ) ;
public class BinlogDumpCommandPacket { /** * < pre > * Bytes Name * 1 command * n arg * Bytes Name * 4 binlog position to start at ( little endian ) * 2 binlog flags ( currently not used ; always 0) * 4 server _ id of the slave ( little endian ) * n binlog file name ( optional ) * < / pre > */ public byte [ ] toBytes ( ) throws IOException { } }
ByteArrayOutputStream out = new ByteArrayOutputStream ( ) ; // 0 . write command number out . write ( getCommand ( ) ) ; // 1 . write 4 bytes bin - log position to start at ByteHelper . writeUnsignedIntLittleEndian ( binlogPosition , out ) ; // 2 . write 2 bytes bin - log flags int binlog_flags = 0 ; binlog_flags |= BINLOG_SEND_ANNOTATE_ROWS_EVENT ; out . write ( binlog_flags ) ; out . write ( 0x00 ) ; // 3 . write 4 bytes server id of the slave ByteHelper . writeUnsignedIntLittleEndian ( this . slaveServerId , out ) ; // 4 . write bin - log file name if necessary if ( StringUtils . isNotEmpty ( this . binlogFileName ) ) { out . write ( this . binlogFileName . getBytes ( ) ) ; } return out . toByteArray ( ) ;
public class CProductPersistenceImpl { /** * Returns the first c product in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; . * @ param uuid the uuid * @ param companyId the company ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching c product * @ throws NoSuchCProductException if a matching c product could not be found */ @ Override public CProduct findByUuid_C_First ( String uuid , long companyId , OrderByComparator < CProduct > orderByComparator ) throws NoSuchCProductException { } }
CProduct cProduct = fetchByUuid_C_First ( uuid , companyId , orderByComparator ) ; if ( cProduct != null ) { return cProduct ; } StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "uuid=" ) ; msg . append ( uuid ) ; msg . append ( ", companyId=" ) ; msg . append ( companyId ) ; msg . append ( "}" ) ; throw new NoSuchCProductException ( msg . toString ( ) ) ;
public class SerializationUtils { /** * Writes the object to the output stream * THIS DOES NOT FLUSH THE STREAM * @ param toSave the object to save * @ param writeTo the output stream to write to */ public static void writeObject ( Serializable toSave , OutputStream writeTo ) { } }
try { ObjectOutputStream os = new ObjectOutputStream ( writeTo ) ; os . writeObject ( toSave ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; }
public class FeatureCoreStyleExtension { /** * Delete the style and icon table and row relationships for the feature * table * @ param featureTable * feature table */ public void deleteRelationships ( String featureTable ) { } }
deleteStyleRelationship ( featureTable ) ; deleteTableStyleRelationship ( featureTable ) ; deleteIconRelationship ( featureTable ) ; deleteTableIconRelationship ( featureTable ) ;
public class LinkFactoryImpl { /** * Return path to the given file name in the given package . So if the name * passed is " Object . html " and the name of the package is " java . lang " , and * if the relative path is " . . / . . " then returned string will be * " . . / . . / java / lang / Object . html " * @ param linkInfo the information about the link . */ private DocPath getPath ( LinkInfoImpl linkInfo ) { } }
if ( linkInfo . context == LinkInfoImpl . Kind . PACKAGE_FRAME ) { // Not really necessary to do this but we want to be consistent // with 1.4.2 output . return DocPath . forName ( linkInfo . utils , linkInfo . typeElement ) ; } return m_writer . pathToRoot . resolve ( DocPath . forClass ( linkInfo . utils , linkInfo . typeElement ) ) ;
public class ThinTableModel { /** * There is no - where else to put this . * Add a mouse listener to the Table to trigger a table sort * when a column heading is clicked in the JTable . * @ param table The table to listen for a header mouse click . */ public void addMouseListenerToHeaderInTable ( JTable table ) { } }
table . setColumnSelectionAllowed ( false ) ; MouseListener mouseListener = new MouseAdapter ( ) { public void mouseClicked ( MouseEvent e ) { if ( e . getSource ( ) instanceof JTableHeader ) { // Always JTableHeader tableHeader = ( JTableHeader ) e . getSource ( ) ; TableColumnModel columnModel = tableHeader . getColumnModel ( ) ; int viewColumn = columnModel . getColumnIndexAtX ( e . getX ( ) ) ; int column = tableHeader . getTable ( ) . convertColumnIndexToModel ( viewColumn ) ; if ( e . getClickCount ( ) == 1 && column != - 1 ) { boolean order = Constants . ASCENDING ; if ( ( e . getModifiers ( ) & InputEvent . SHIFT_MASK ) != 0 ) order = ! order ; if ( ! ( tableHeader . getDefaultRenderer ( ) instanceof SortableHeaderRenderer ) ) tableHeader . setDefaultRenderer ( new SortableHeaderRenderer ( tableHeader . getDefaultRenderer ( ) ) ) ; // Set up header renderer the first time if ( ( ( ( SortableHeaderRenderer ) tableHeader . getDefaultRenderer ( ) ) . getSortedByColumn ( ) == viewColumn ) && ( ( ( SortableHeaderRenderer ) tableHeader . getDefaultRenderer ( ) ) . getSortedOrder ( ) == order ) ) order = ! order ; column = columnToFieldColumn ( column ) ; boolean bSuccess = sortByColumn ( column , order ) ; if ( bSuccess ) setSortedByColumn ( tableHeader , viewColumn , order ) ; } } } } ; table . getTableHeader ( ) . addMouseListener ( mouseListener ) ;
public class JsonLog4jLayout { /** * Property for specifying date format . * See < a href = " https : / / docs . oracle . com / javase / 8 / docs / api / java / text / SimpleDateFormat . html " > SimpleDateFormat < / a > for details . * @ param dateFormatPattern */ public void setDateFormatPattern ( String dateFormatPattern ) { } }
try { simpleDateFormat = new SimpleDateFormat ( dateFormatPattern ) ; } catch ( Exception e ) { throw new IllegalArgumentException ( "Incorrect date pattern. " + "Ensure to use formats provided in https://docs.oracle.com/javase/8/docs/api/java/text/SimpleDateFormat.html" , e ) ; }
public class HotSpotJavaDumperImpl { /** * Create a VirtualMachine wrapper . */ private VirtualMachine createVirtualMachine ( ) throws VirtualMachineException { } }
ClassLoader toolsClassLoader ; File toolsJar = getToolsJar ( ) ; if ( toolsJar == null ) { // The attach classes are on the boot classpath on Mac . toolsClassLoader = HotSpotJavaDumperImpl . class . getClassLoader ( ) ; } else { try { toolsClassLoader = new URLClassLoader ( new URL [ ] { toolsJar . getAbsoluteFile ( ) . toURI ( ) . toURL ( ) } ) ; } catch ( MalformedURLException e ) { throw new RuntimeException ( e ) ; } } try { Class < ? > vmClass = toolsClassLoader . loadClass ( "com.sun.tools.attach.VirtualMachine" ) ; Method attachMethod = vmClass . getMethod ( "attach" , new Class < ? > [ ] { String . class } ) ; Object toolsVM = attachMethod . invoke ( null , new Object [ ] { PID } ) ; Method remoteDataDumpMethod = toolsVM . getClass ( ) . getMethod ( "remoteDataDump" , new Class < ? > [ ] { Object [ ] . class } ) ; return new VirtualMachine ( toolsVM , remoteDataDumpMethod ) ; } catch ( ClassNotFoundException e ) { // The class isn ' t found , so we won ' t be able to create dumps . } catch ( InvocationTargetException e ) { throw new VirtualMachineException ( e ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } return new VirtualMachine ( null , null ) ;
public class AbstractAggregatorImpl { /** * / * ( non - Javadoc ) * @ see com . ibm . jaggr . core . IAggregator # buildAsync ( java . util . concurrent . Callable , javax . servlet . http . HttpServletRequest ) */ @ Override public Future < ? > buildAsync ( final Callable < ? > builder , final HttpServletRequest req ) { } }
return getExecutors ( ) . getBuildExecutor ( ) . submit ( new Callable < Object > ( ) { public Object call ( ) throws Exception { AbstractAggregatorImpl . this . currentRequest . set ( req ) ; Object result ; try { result = builder . call ( ) ; } finally { AbstractAggregatorImpl . this . currentRequest . set ( null ) ; } return result ; } } ) ;
public class StringUtils { /** * Convert byte data to hex code string . * @ param data to convert . * @ return the hex code of the byte array . */ public static String byteToHex ( byte [ ] data ) { } }
if ( data == null ) { return null ; } final StringBuilder builder = new StringBuilder ( ) ; for ( final byte b : data ) { builder . append ( String . format ( "%02X" , b ) ) ; } return builder . toString ( ) ;
public class FailurePolicy { /** * Returns whether an execution result can be retried given the configured failure conditions . * @ see # handle ( Class . . . ) * @ see # handle ( List ) * @ see # handleIf ( BiPredicate ) * @ see # handleIf ( Predicate ) * @ see # handleResult ( R ) * @ see # handleResultIf ( Predicate ) */ public boolean isFailure ( R result , Throwable failure ) { } }
for ( BiPredicate < R , Throwable > predicate : failureConditions ) { try { if ( predicate . test ( result , failure ) ) return true ; } catch ( Exception ignored ) { // Ignore confused user - supplied predicates . // They should not be allowed to halt execution of the operation . } } // Fail by default if a failure is not checked by a condition return failure != null && ! failuresChecked ;
public class ArraySet { /** * What is the index of the given string in the array ? * @ param String * aStr */ Integer indexOf ( T t ) { } }
if ( t == null ) { return null ; } Integer i = _set . get ( t ) ; if ( i == null ) { return - 1 ; } return i ;
public class ReplCache { /** * Places a key / value pair into one or several nodes in the cluster . * @ param key The key , needs to be serializable * @ param val The value , needs to be serializable * @ param repl _ count Number of replicas . The total number of times a data item should be present in a cluster . * Needs to be & gt ; 0 * < ul > * < li > - 1 : create key / val in all the nodes in the cluster * < li > 1 : create key / val only in one node in the cluster , picked by computing the consistent hash of KEY * < li > K & gt ; 1 : create key / val in those nodes in the cluster which match the consistent hashes created for KEY * < / ul > * @ param timeout Expiration time for key / value . * < ul > * < li > - 1 : don ' t cache at all in the L1 cache * < li > 0 : cache forever , until removed or evicted because we need space for newer elements * < li > & gt ; 0 : number of milliseconds to keep an idle element in the cache . An element is idle when not accessed . * < / ul > * @ param synchronous Whether or not to block until all cluster nodes have applied the change */ @ ManagedOperation public void put ( K key , V val , short repl_count , long timeout , boolean synchronous ) { } }
if ( repl_count == 0 ) { if ( log . isWarnEnabled ( ) ) log . warn ( "repl_count of 0 is invalid, data will not be stored in the cluster" ) ; return ; } mcastPut ( key , val , repl_count , timeout , synchronous ) ; if ( l1_cache != null && timeout >= 0 ) l1_cache . put ( key , val , timeout ) ;
public class Base64 { /** * Encodes the content of the supplied InputStream into Base64 notation . * Valid options : * < pre > * GZIP : gzip - compresses object before encoding it . * DONT _ BREAK _ LINES : don ' t break lines at 76 characters * & lt ; i & gt ; Note : Technically , this makes your encoding non - compliant . & lt ; / i & gt ; * < / pre > * Example : < code > encodeBytes ( myData , Base64 . GZIP ) < / code > or * Example : < code > encodeBytes ( myData , Base64 . GZIP | Base64 . DONT _ BREAK _ LINES ) < / code > * @ param source The data to convert * @ param options Specified options - the alphabet type is pulled from this ( standard , url - safe , ordered ) * @ return the encoded bytes * @ see Base64 # GZIP */ public static String encode ( java . io . InputStream source , int options ) { } }
if ( source == null ) { throw new IllegalArgumentException ( "Source cannot be null" ) ; } java . io . ByteArrayOutputStream baos = new java . io . ByteArrayOutputStream ( ) ; Base64 . OutputStream b64os = new Base64 . OutputStream ( baos , ENCODE | options ) ; BufferedInputStream input = new BufferedInputStream ( source ) ; java . io . OutputStream output = b64os ; boolean error = false ; try { if ( ( options & GZIP ) == GZIP ) { output = new java . util . zip . GZIPOutputStream ( output ) ; } int numRead = 0 ; byte [ ] buffer = new byte [ 1024 ] ; while ( ( numRead = input . read ( buffer ) ) > - 1 ) { output . write ( buffer , 0 , numRead ) ; } output . close ( ) ; } catch ( IOException e ) { error = true ; throw new RuntimeException ( e ) ; // error using reading from byte array ! } finally { try { input . close ( ) ; } catch ( IOException e ) { if ( ! error ) new RuntimeException ( e ) ; // error closing input stream } } // Return value according to relevant encoding . try { return new String ( baos . toByteArray ( ) , PREFERRED_ENCODING ) ; } catch ( java . io . UnsupportedEncodingException uue ) { return new String ( baos . toByteArray ( ) ) ; }
public class LockChecker { /** * Get LockSet at given Location . * @ param location * the Location * @ return the LockSet at that Location * @ throws DataflowAnalysisException */ public LockSet getFactAtLocation ( Location location ) throws DataflowAnalysisException { } }
if ( lockDataflow != null ) { return lockDataflow . getFactAtLocation ( location ) ; } else { LockSet lockSet = cache . get ( location ) ; if ( lockSet == null ) { lockSet = new LockSet ( ) ; lockSet . setDefaultLockCount ( 0 ) ; if ( method . isSynchronized ( ) && ! method . isStatic ( ) ) { // LockSet contains just the " this " reference ValueNumber instance = vnaDataflow . getAnalysis ( ) . getThisValue ( ) ; lockSet . setLockCount ( instance . getNumber ( ) , 1 ) ; } else { // LockSet is completely empty - nothing to do } cache . put ( location , lockSet ) ; } return lockSet ; }
public class ContentOutputFormat { /** * must be attempting or doing fastload when this method is called . * result format of the query varies based on policy * bucket : ( fid , host , updateAllow ) * * range : ( fid , host , fragmentCount ) * * statistical : ( fid , host , fragmentCount ) * * legacy : ( fid , host ) * * @ param cs * @ return a forest - info map * @ throws IOException */ protected LinkedMapWritable queryForestInfo ( ContentSource cs ) throws IOException { } }
Session session = null ; ResultSequence result = null ; try { session = cs . newSession ( ) ; AdhocQuery query = null ; if ( legacy ) { LOG . debug ( "Legacy assignment is assumed for older MarkLogic" + " Server." ) ; query = session . newAdhocQuery ( FOREST_HOST_MAP_QUERY ) ; } else { /* * failover if restrict host is not set and the server is 9.0 * we need the failover forests and hosts for failover */ if ( failover ) { query = session . newAdhocQuery ( FOREST_REPLICA_HOST_QUERY ) ; } else { query = session . newAdhocQuery ( FOREST_HOST_QUERY ) ; } if ( policy == AssignmentPolicy . Kind . RANGE || policy == AssignmentPolicy . Kind . QUERY ) { String pName = conf . get ( OUTPUT_PARTITION ) ; query . setNewStringVariable ( "partition-name" , pName ) ; } else { query . setNewStringVariable ( "partition-name" , "" ) ; } query . setNewStringVariable ( "policy" , policy . toString ( ) . toLowerCase ( ) ) ; } // query forest status mapping RequestOptions options = new RequestOptions ( ) ; options . setDefaultXQueryVersion ( "1.0-ml" ) ; query . setOptions ( options ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( query . getQuery ( ) ) ; } result = session . submitRequest ( query ) ; LinkedMapWritable forestStatusMap = new LinkedMapWritable ( ) ; Text forest = null ; List < ForestHost > replicas = new ArrayList < ForestHost > ( ) ; String outputHost = cs . getConnectionProvider ( ) . getHostName ( ) ; boolean local = MODE_LOCAL . equals ( conf . get ( EXECUTION_MODE ) ) ; while ( result . hasNext ( ) ) { ResultItem item = result . next ( ) ; if ( forest == null ) { forest = new Text ( item . asString ( ) ) ; } else { String hostName = item . asString ( ) ; // 23798 : replace hostname in forest config with // user - specified output host if ( local && hostName != null && hostName . equals ( initHostName ) ) { hostName = outputHost ; } boolean updatable = true ; long dc = - 1 ; if ( ! legacy ) { if ( policy == AssignmentPolicy . Kind . BUCKET ) { item = result . next ( ) ; updatable = Boolean . parseBoolean ( item . asString ( ) ) ; } else if ( policy == AssignmentPolicy . Kind . RANGE || policy == AssignmentPolicy . Kind . STATISTICAL || policy == AssignmentPolicy . Kind . QUERY ) { // range or statistical item = result . next ( ) ; dc = Long . parseLong ( item . asString ( ) ) ; } } if ( failover ) { String curForest = "" ; String curHost = "" ; int count = 0 ; while ( result . hasNext ( ) ) { item = result . next ( ) ; if ( ItemType . XS_INTEGER == item . getItemType ( ) ) { if ( ( ( XSInteger ) item . getItem ( ) ) . asPrimitiveInt ( ) == 0 ) { break ; } } int index = count % 2 ; if ( index == 0 ) { curForest = item . asString ( ) ; } else if ( index == 1 ) { curHost = item . asString ( ) ; ForestHost info = new ForestHost ( curForest , curHost ) ; replicas . add ( info ) ; } count ++ ; } } else { ForestHost info = new ForestHost ( forest . toString ( ) , hostName ) ; replicas . add ( info ) ; } forestStatusMap . put ( forest , new ForestInfo ( hostName , dc , updatable , replicas ) ) ; forest = null ; replicas . clear ( ) ; } } if ( forestStatusMap . size ( ) == 0 ) { throw new IOException ( "Target database has no forests attached: " + "check forests in database" ) ; } am . initialize ( policy , forestStatusMap , conf . getInt ( BATCH_SIZE , 10 ) ) ; return forestStatusMap ; } catch ( RequestException e ) { LOG . error ( e . getMessage ( ) , e ) ; throw new IOException ( e ) ; } finally { if ( result != null ) { result . close ( ) ; } if ( session != null ) { session . close ( ) ; } }
public class CmsXmlSaxWriter { /** * Writes a linebreak to the output stream , also handles the indentation . < p > * @ throws SAXException in case of I / O errors */ private void writeNewLine ( ) throws SAXException { } }
try { // write new line m_writer . write ( "\r\n" ) ; // write indentation for ( int i = 1 ; i < m_indentLevel ; i ++ ) { m_writer . write ( INDENT_STR ) ; } // flush the stream m_writer . flush ( ) ; } catch ( IOException e ) { throw new SAXException ( Messages . get ( ) . getBundle ( ) . key ( Messages . ERR_IOERROR_0 ) , e ) ; }
public class AbstractRemoteTransport { /** * localDoWork * @ param address the logical address * @ param work the work * @ throws WorkException in case of error */ public void localDoWork ( Address address , DistributableWork work ) throws WorkException { } }
if ( trace ) log . tracef ( "LOCAL_DO_WORK(%s, %s)" , address , work ) ; DistributedWorkManager dwm = workManagerCoordinator . resolveDistributedWorkManager ( address ) ; dwm . localDoWork ( work ) ;
public class ProcVariable { /** * Find this element * @ param source text source * @ return start offset */ public int findIn ( Source source ) { } }
if ( regex != null ) { Matcher matcher = regex . matcher ( source . subToEnd ( ) ) ; if ( matcher . find ( ) ) { return source . getOffset ( ) + matcher . start ( ) ; } else { return - 1 ; } } else { return - 1 ; }
public class JBBPDslBuilder { /** * Create named custom type array which size calculated by expression . * @ param type custom type , must not be null * @ param name name of the array , can be null for anonymous one * @ param sizeExpression expression to calculate array length , must not be null . * @ param param optional parameter for the field , can be null * @ return the builder instance , must not be null */ public JBBPDslBuilder CustomArray ( final String type , final String name , final String sizeExpression , final String param ) { } }
final ItemCustom item = new ItemCustom ( type , name , this . byteOrder ) ; item . array = true ; item . bitLenExpression = param == null ? null : assertExpressionChars ( param ) ; item . sizeExpression = assertExpressionChars ( sizeExpression ) ; this . addItem ( item ) ; return this ;
public class ImgCompressUtils { /** * 根据指定宽高和压缩质量进行压缩 , 当isForceWh为false时 , 如果指定宽或者高大于源图片则按照源图片大小宽高压缩 , * 当isForceWh为true时 , 不论怎样均按照指定宽高压缩 * @ param srcFile 指定原图片地址 * @ param desFile 指定压缩后图片存放地址 , 包括图片名称 * @ param width 指定压缩宽 * @ param height 指定压缩高 * @ param quality 指定压缩质量 , 范围 [ 0.0,1.0 ] , 如果指定为null则按照默认值 * @ param isForceWh 指定是否强制使用指定宽高进行压缩 , true代表强制 , false反之 */ public static void imgCompressByWH ( String srcFile , String desFile , int width , int height , Float quality , boolean isForceWh ) { } }
try { Image srcImg = ImageIO . read ( new File ( srcFile ) ) ; if ( ! isForceWh && ( srcImg . getHeight ( null ) < height || srcImg . getWidth ( null ) < width ) ) { width = srcImg . getWidth ( null ) ; height = srcImg . getHeight ( null ) ; } // 指定目标图片 BufferedImage desImg = new BufferedImage ( width , height , BufferedImage . TYPE_INT_RGB ) ; // 根据源图片绘制目标图片 desImg . getGraphics ( ) . drawImage ( srcImg , 0 , 0 , width , height , null ) ; ImgCompressUtils . encodeImg ( desFile , desImg , quality ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; }
public class RedisJobStore { /** * Get the number of < code > { @ link org . quartz . Job } < / code > s that are * stored in the < code > JobsStore < / code > . */ @ Override public int getNumberOfJobs ( ) throws JobPersistenceException { } }
return doWithLock ( new LockCallback < Integer > ( ) { @ Override public Integer doWithLock ( JedisCommands jedis ) throws JobPersistenceException { return storage . getNumberOfJobs ( jedis ) ; } } , "Could not get number of jobs." ) ;
public class JFapChannelInbound { /** * begin F177053 */ public Class getDeviceInterface ( ) { } }
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "getDeviceInterface" ) ; if ( tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "getDeviceInterface" ) ; return TCPConnectionContext . class ;
public class ResourceInjectionBinding { /** * Sets the injection type as specified in XML . * @ param typeName the type name specified in XML * @ param element the XML ref element * @ param nameElement the XML name element in the ref element * @ param typeElement the XML type element in the ref element * @ throws InjectionConfigurationException */ private void setXMLType ( String typeName , String element , String nameElement , String typeElement ) // F743-32443 throws InjectionConfigurationException { } }
if ( ivNameSpaceConfig . getClassLoader ( ) == null ) { setInjectionClassTypeName ( typeName ) ; } else { Class < ? > type = loadClass ( typeName ) ; // The type parameter is " optional " if ( type != null ) { ResourceImpl curAnnotation = ( ResourceImpl ) getAnnotation ( ) ; if ( curAnnotation . ivIsSetType ) { Class < ? > curType = getInjectionClassType ( ) ; // check that value from xml is a subclasss , if not throw an error Class < ? > mostSpecificClass = mostSpecificClass ( type , curType ) ; if ( mostSpecificClass == null ) { Tr . error ( tc , "CONFLICTING_XML_VALUES_CWNEN0052E" , ivComponent , ivModule , ivApplication , typeElement , element , nameElement , getJndiName ( ) , curType , type ) ; // d479669 String exMsg = "The " + ivComponent + " bean in the " + ivModule + " module of the " + ivApplication + " application has conflicting configuration data in the XML" + " deployment descriptor. Conflicting " + typeElement + " element values exist for multiple " + element + " elements with the same " + nameElement + " element value : " + getJndiName ( ) + ". The conflicting " + typeElement + " element values are " + curType + " and " + type + "." ; // d479669 throw new InjectionConfigurationException ( exMsg ) ; } curAnnotation . ivType = mostSpecificClass ; } else { curAnnotation . ivType = type ; curAnnotation . ivIsSetType = true ; } } }
public class DoublePoint { /** * Adds values of two points . * @ param point1 DoublePoint . * @ param point2 DoublePoint . * @ return A new DoublePoint with the add operation . */ public DoublePoint Add ( DoublePoint point1 , DoublePoint point2 ) { } }
DoublePoint result = new DoublePoint ( point1 ) ; result . Add ( point2 ) ; return result ;
public class ExpiryDate { /** * NOTE : relative Ttl can be negative if the ExpiryDate passed in was in the past * @ param expiryDate time measured in milliseconds , between the current time and midnight , January 1 , 1970 UTC * @ return an ExpiryDate object with creationTime = current time , and relativeTtl = ExpiryDate - creationTime ; */ public static ExpiryDate fromAbsolute ( long expiryDate ) { } }
long creationTime = System . currentTimeMillis ( ) ; long relativeTtl ; try { relativeTtl = Math . subtractExact ( expiryDate , creationTime ) ; } catch ( ArithmeticException exception ) { relativeTtl = Long . MIN_VALUE ; } return new ExpiryDate ( relativeTtl , expiryDate , creationTime ) ;
public class ColorValidator { /** * Calls < code > validateXXX < / code > for the corresponding classifier of the model . * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override protected boolean validate ( int classifierID , Object value , DiagnosticChain diagnostics , Map < Object , Object > context ) { } }
switch ( classifierID ) { case ColorPackage . DOCUMENT_ROOT : return validateDocumentRoot ( ( DocumentRoot ) value , diagnostics , context ) ; case ColorPackage . HEX_COLOR : return validateHexColor ( ( String ) value , diagnostics , context ) ; default : return true ; }
public class GraphvizMojo { /** * Strategy implementation of running the command line tool < code > dot < / code > * < ol > * < li > check the < code > skip < / code > flag * < li > get a list of modified dot files from the StatusFile * < li > run the dot command * < / ol > */ public final void execute ( ) throws MojoExecutionException , MojoFailureException { } }
// If skip flag set then omit image generation if ( isSkip ( ) ) { getLog ( ) . info ( "Skipping creation of image files" ) ; } else { // Run the dot command only if dot files have been changed Set < String > changedDotFiles = getChangedDotFiles ( ) ; if ( changedDotFiles != null && ! changedDotFiles . isEmpty ( ) ) { if ( ! executeDot ( changedDotFiles ) ) { throw new MojoExecutionException ( "Executing '" + command + "' command failed" ) ; } } }
public class Boolean { /** * Apply the filter on a { @ code TreeNode } and check the result . * < p > This method will call the { @ link # test ( List , TreeNode ) } to get the result < / p > * @ param path a list of tree node that are ancestors of the node * @ param theNode the tree node to be evaluated * @ return { @ code true } if the node applied to the filter or { @ code false } otherwise */ @ Override public final Boolean apply ( List < ? extends TreeNode > path , TreeNode theNode ) { } }
return test ( path , theNode ) ;
public class RamlLoader { /** * Loads a RAML document from a file . This method will * @ param ramlFileUrl * The path to the file , this can either be a resource on the * class path ( in which case the classpath : prefix should be * omitted ) or a file on disk ( in which case the file : prefix * should be included ) * @ return Built Raml model * @ throws InvalidRamlResourceException * If the Raml Provided isnt correct for the required parser */ public static RamlRoot loadRamlFromFile ( String ramlFileUrl ) { } }
try { return createRamlModelFactoryFor ( ramlFileUrl ) . buildRamlRoot ( ramlFileUrl ) ; } catch ( NullPointerException npe ) { logger . error ( "File not found at {}" , ramlFileUrl ) ; return null ; }
public class OIndexProxy { /** * { @ inheritDoc } */ public Collection < OIdentifiable > getValuesMinor ( Object toKey , boolean isInclusive ) { } }
final Object result = lastIndex . getValuesMinor ( toKey , isInclusive ) ; return ( Collection < OIdentifiable > ) applyTailIndexes ( result , - 1 ) ;
public class ChannelFinderClientImpl { /** * Returns a channel that exactly matches the channelName * < tt > channelName < / tt > . * @ param channelName * - name of the required channel . * @ return { @ link Channel } with name < tt > channelName < / tt > or null * @ throws ChannelFinderException - channelfinder exception */ public Channel getChannel ( String channelName ) throws ChannelFinderException { } }
try { return wrappedSubmit ( new FindByChannelName ( channelName ) ) ; } catch ( ChannelFinderException e ) { if ( e . getStatus ( ) . equals ( ClientResponse . Status . NOT_FOUND ) ) { return null ; } else { throw e ; } }
public class CPDisplayLayoutPersistenceImpl { /** * Caches the cp display layout in the entity cache if it is enabled . * @ param cpDisplayLayout the cp display layout */ @ Override public void cacheResult ( CPDisplayLayout cpDisplayLayout ) { } }
entityCache . putResult ( CPDisplayLayoutModelImpl . ENTITY_CACHE_ENABLED , CPDisplayLayoutImpl . class , cpDisplayLayout . getPrimaryKey ( ) , cpDisplayLayout ) ; finderCache . putResult ( FINDER_PATH_FETCH_BY_UUID_G , new Object [ ] { cpDisplayLayout . getUuid ( ) , cpDisplayLayout . getGroupId ( ) } , cpDisplayLayout ) ; finderCache . putResult ( FINDER_PATH_FETCH_BY_C_C , new Object [ ] { cpDisplayLayout . getClassNameId ( ) , cpDisplayLayout . getClassPK ( ) } , cpDisplayLayout ) ; cpDisplayLayout . resetOriginalValues ( ) ;
public class BundleWriter { /** * Ensure the buffer is large enough . * @ param size Required size to add * @ param buffer Existing buffer * @ param output Output channel * @ return Buffer , eventually resized * @ throws IOException on IO errors */ private ByteBuffer ensureBuffer ( int size , ByteBuffer buffer , WritableByteChannel output ) throws IOException { } }
if ( buffer . remaining ( ) >= size ) { return buffer ; } flushBuffer ( buffer , output ) ; if ( buffer . remaining ( ) >= size ) { return buffer ; } // Aggressively grow the buffer return ByteBuffer . allocateDirect ( Math . max ( buffer . capacity ( ) << 1 , buffer . capacity ( ) + size ) ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcFlowMeterTypeEnum ( ) { } }
if ( ifcFlowMeterTypeEnumEEnum == null ) { ifcFlowMeterTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 841 ) ; } return ifcFlowMeterTypeEnumEEnum ;
public class KunderaJTAUserTransaction { /** * ( non - Javadoc ) * @ see javax . transaction . UserTransaction # rollback ( ) */ @ Override public void rollback ( ) throws IllegalStateException , SecurityException , SystemException { } }
try { Transaction tx = threadLocal . get ( ) ; if ( tx == null ) { throw new IllegalStateException ( "Cannot locate a Transaction for rollback." ) ; } if ( log . isDebugEnabled ( ) ) log . info ( "Rollback transaction:" + tx ) ; tx . rollback ( ) ; } finally { if ( log . isDebugEnabled ( ) ) log . info ( "Resetting after rollback." ) ; threadLocal . set ( null ) ; timerThead . set ( null ) ; }
public class ReportManager { /** * Starts up our periodic report generation task . */ public void activatePeriodicReport ( RootDObjectManager omgr ) { } }
// queue up an interval which will generate reports as long as the omgr is alive omgr . newInterval ( new Runnable ( ) { public void run ( ) { logReport ( LOG_REPORT_HEADER + generateReport ( DEFAULT_TYPE , System . currentTimeMillis ( ) , true ) ) ; } } ) . schedule ( getReportInterval ( ) , true ) ;
public class FactoryKernelGaussian { /** * Computes the derivative of a Gaussian kernel . * @ param sigma Distributions standard deviation . * @ param radius Kernel ' s radius . * @ param normalize * @ return The derivative of the gaussian */ protected static Kernel1D_F32 derivative1D_F32 ( int order , double sigma , int radius , boolean normalize ) { } }
Kernel1D_F32 ret = new Kernel1D_F32 ( radius * 2 + 1 ) ; float [ ] gaussian = ret . data ; int index = 0 ; switch ( order ) { case 1 : for ( int i = radius ; i >= - radius ; i -- ) { gaussian [ index ++ ] = ( float ) UtilGaussian . derivative1 ( 0 , sigma , i ) ; } break ; case 2 : for ( int i = radius ; i >= - radius ; i -- ) { gaussian [ index ++ ] = ( float ) UtilGaussian . derivative2 ( 0 , sigma , i ) ; } break ; case 3 : for ( int i = radius ; i >= - radius ; i -- ) { gaussian [ index ++ ] = ( float ) UtilGaussian . derivative3 ( 0 , sigma , i ) ; } break ; case 4 : for ( int i = radius ; i >= - radius ; i -- ) { gaussian [ index ++ ] = ( float ) UtilGaussian . derivative4 ( 0 , sigma , i ) ; } break ; default : throw new IllegalArgumentException ( "Only derivatives of order 1 to 4 are supported" ) ; } // multiply by the same factor as the gaussian would be normalized by // otherwise it will effective change the intensity of the input image if ( normalize ) { double sum = 0 ; for ( int i = radius ; i >= - radius ; i -- ) { sum += UtilGaussian . computePDF ( 0 , sigma , i ) ; } for ( int i = 0 ; i < gaussian . length ; i ++ ) { gaussian [ i ] /= sum ; } } return ret ;
public class HtmlBaseTag { /** * Sets the onMouseMove javascript event . * @ param onmousemove the onMouseMove event . * @ jsptagref . attributedescription The onMouseMove JavaScript event . * @ jsptagref . databindable false * @ jsptagref . attributesyntaxvalue < i > string _ onMouseMove < / i > * @ netui : attribute required = " false " rtexprvalue = " true " * description = " The onMouseMove JavaScript event . " */ public void setOnMouseMove ( String onmousemove ) { } }
AbstractHtmlState tsh = getState ( ) ; tsh . registerAttribute ( AbstractHtmlState . ATTR_JAVASCRIPT , ONMOUSEMOVE , onmousemove ) ;
public class Expression { /** * Returns an expression that performs a checked cast from the current type to the target type . * @ throws IllegalArgumentException if either type is not a reference type . */ public Expression checkedCast ( final Type target ) { } }
checkArgument ( target . getSort ( ) == Type . OBJECT , "cast targets must be reference types. (%s)" , target . getClassName ( ) ) ; checkArgument ( resultType ( ) . getSort ( ) == Type . OBJECT , "you may only cast from reference types. (%s)" , resultType ( ) . getClassName ( ) ) ; if ( BytecodeUtils . isDefinitelyAssignableFrom ( target , resultType ( ) ) ) { return this ; } return new Expression ( target , features ( ) ) { @ Override protected void doGen ( CodeBuilder adapter ) { Expression . this . gen ( adapter ) ; // TODO ( b / 191662001 ) Remove this once we have fully switched the type // system over . Normally , we should just cast this result over , but in // the case of SoyString , there are temporarily two states ( SanitizedContent = = SoyString ) // and ( SanitizedContent ! = SoyString ) . This branch bails out to a runtime function that // effectively does the below but also optionally logs a warning . if ( resultType ( ) . equals ( BytecodeUtils . SOY_STRING_TYPE ) ) { MethodRef . RUNTIME_CHECK_SOY_STRING . invokeUnchecked ( adapter ) ; } else { adapter . checkCast ( resultType ( ) ) ; } } } ;
public class BCryptOpenBSDProtocol { /** * Cyclically extract a word of key material * @ param data the string to extract the data from * @ param offp a " pointer " ( as a one - entry array ) to the * current offset into data * @ return the next word of material from data */ private static int streamToWord ( byte [ ] data , int [ ] offp ) { } }
int i ; int word = 0 ; int off = offp [ 0 ] ; for ( i = 0 ; i < 4 ; i ++ ) { word = ( word << 8 ) | ( data [ off ] & 0xff ) ; off = ( off + 1 ) % data . length ; } offp [ 0 ] = off ; return word ;
public class PythonDistributionAnalyzer { /** * Collects the meta data from an archive . * @ param dependency the archive being scanned * @ param folderFilter the filter to apply to the folder * @ param metadataFilter the filter to apply to the meta data * @ throws AnalysisException thrown when there is a problem analyzing the * dependency */ private void collectMetadataFromArchiveFormat ( Dependency dependency , FilenameFilter folderFilter , FilenameFilter metadataFilter ) throws AnalysisException { } }
final File temp = getNextTempDirectory ( ) ; LOGGER . debug ( "{} exists? {}" , temp , temp . exists ( ) ) ; try { ExtractionUtil . extractFilesUsingFilter ( new File ( dependency . getActualFilePath ( ) ) , temp , metadataFilter ) ; } catch ( ExtractionException ex ) { throw new AnalysisException ( ex ) ; } File matchingFile = getMatchingFile ( temp , folderFilter ) ; if ( matchingFile != null ) { matchingFile = getMatchingFile ( matchingFile , metadataFilter ) ; if ( matchingFile != null ) { collectWheelMetadata ( dependency , matchingFile ) ; } }
public class CharSequenceWrapper { /** * transform given value to a javascript parameter value * @ param value The value to transform * @ return value as string */ public static CharSequence toParameterValue ( final Boolean value ) { } }
return value != null ? Boolean . toString ( value ) : Attr . nullValue ( ) ;
public class SBGNLayoutManager { /** * This method connects the existing arcs to the newly created * ports which are created by ChiLay and SBGNPD Layout . * @ param lPort l level port object . * @ param vPort v level port object . */ private void connectArcToPort ( SbgnPDNode lPort , Port vPort ) { } }
// Iterate over the edges of l level port for ( Object e : ( lPort . getEdges ( ) ) ) { // Ignore rigid edges if ( ( ( LEdge ) e ) . type . equals ( "rigid edge" ) ) continue ; // Determine the if vPort is source or target Arc arc = idToArcs . get ( ( ( LEdge ) e ) . label ) ; if ( lPort . label . equals ( ( ( LEdge ) e ) . getSource ( ) . label ) ) { arc . setSource ( vPort ) ; } else if ( lPort . label . equals ( ( ( LEdge ) e ) . getTarget ( ) . label ) ) { arc . setTarget ( vPort ) ; } }
public class FtpServerFtpLet { /** * Construct ftp reply from response message and write reply to given session . * @ param session * @ param response */ private void writeFtpReply ( FtpSession session , FtpMessage response ) { } }
try { CommandResultType commandResult = response . getPayload ( CommandResultType . class ) ; FtpReply reply = new DefaultFtpReply ( Integer . valueOf ( commandResult . getReplyCode ( ) ) , commandResult . getReplyString ( ) ) ; session . write ( reply ) ; } catch ( FtpException e ) { throw new CitrusRuntimeException ( "Failed to write ftp reply" , e ) ; }
public class BeanDeployer { /** * Fires { @ link ProcessBeanAttributes } for each enabled bean and updates the environment based on the events . */ public void processClassBeanAttributes ( ) { } }
preInitializeBeans ( getEnvironment ( ) . getClassBeans ( ) ) ; preInitializeBeans ( getEnvironment ( ) . getDecorators ( ) ) ; preInitializeBeans ( getEnvironment ( ) . getInterceptors ( ) ) ; processBeans ( getEnvironment ( ) . getClassBeans ( ) ) ; processBeans ( getEnvironment ( ) . getDecorators ( ) ) ; processBeans ( getEnvironment ( ) . getInterceptors ( ) ) ; // now that we know that the bean won ' t be vetoed , it ' s the right time to register @ New injection points searchForNewBeanDeclarations ( getEnvironment ( ) . getClassBeans ( ) ) ; searchForNewBeanDeclarations ( getEnvironment ( ) . getDecorators ( ) ) ; searchForNewBeanDeclarations ( getEnvironment ( ) . getInterceptors ( ) ) ;
public class ProtoLexer { /** * $ ANTLR start " INT32" */ public final void mINT32 ( ) throws RecognitionException { } }
try { int _type = INT32 ; int _channel = DEFAULT_TOKEN_CHANNEL ; // com / dyuproject / protostuff / parser / ProtoLexer . g : 155:5 : ( ' int32 ' ) // com / dyuproject / protostuff / parser / ProtoLexer . g : 155:9 : ' int32' { match ( "int32" ) ; } state . type = _type ; state . channel = _channel ; } finally { }
public class RedisClient { /** * Gets the columns . * @ param connection * the connection * @ param hashKey * the hash key * @ param columns * the columns * @ return the columns */ private Map < byte [ ] , byte [ ] > getColumns ( Object connection , String hashKey , Map < byte [ ] , byte [ ] > columns ) { } }
if ( resource != null && resource . isActive ( ) ) { // Why transaction API returns response in byte [ ] format / ? Response response = ( ( Transaction ) connection ) . hgetAll ( getEncodedBytes ( hashKey ) ) ; ( ( RedisTransaction ) resource ) . onExecute ( ( ( Transaction ) connection ) ) ; // ( ( Transaction ) connection ) . exec ( ) ; Map < String , String > cols = ( Map < String , String > ) response . get ( ) ; connection = getConnection ( ) ; if ( cols != null ) { for ( String name : cols . keySet ( ) ) { columns . put ( getEncodedBytes ( name ) , getEncodedBytes ( cols . get ( name ) ) ) ; } } } else { columns = ( ( Jedis ) connection ) . hgetAll ( getEncodedBytes ( hashKey ) ) ; } return columns ;
public class TaskFormatter { /** * now - startedAt */ @ CheckForNull private Long computeExecutionTimeMs ( CeQueueDto dto ) { } }
Long startedAt = dto . getStartedAt ( ) ; if ( startedAt == null ) { return null ; } return system2 . now ( ) - startedAt ;
public class FactoryAssociation { /** * Scores features based on Sum of Absolute Difference ( SAD ) . * @ param tupleType Type of descriptor being scored * @ return SAD scorer */ public static < D > ScoreAssociation < D > scoreSad ( Class < D > tupleType ) { } }
if ( TupleDesc_F64 . class . isAssignableFrom ( tupleType ) ) { return ( ScoreAssociation ) new ScoreAssociateSad_F64 ( ) ; } else if ( tupleType == TupleDesc_F32 . class ) { return ( ScoreAssociation ) new ScoreAssociateSad_F32 ( ) ; } else if ( tupleType == TupleDesc_U8 . class ) { return ( ScoreAssociation ) new ScoreAssociateSad_U8 ( ) ; } else if ( tupleType == TupleDesc_S8 . class ) { return ( ScoreAssociation ) new ScoreAssociateSad_S8 ( ) ; } else { throw new IllegalArgumentException ( "SAD score not supported for type " + tupleType . getSimpleName ( ) ) ; }
public class DataBinder { /** * Puts a new result set into this binder . */ public CachedResultSet putResultSet ( String name , CachedResultSet rs ) { } }
return _rsets . put ( name , rs ) ;
public class Equalizer { /** * Retrieves the eq setting for a given band . */ public float getBand ( int band ) { } }
float eq = 0.0f ; if ( ( band >= 0 ) && ( band < BANDS ) ) { eq = settings [ band ] ; } return eq ;
public class ReportLog { /** * Creates report logs that consists of test statics by extracting information from * the Test suite results . These Report logs are then printed in the TestNG HTML reports . * @ param suite is the test suite from which you want to extract test results information . */ public void generateLogs ( ISuite suite ) { } }
Reporter . clear ( ) ; // clear output from previous test runs Reporter . log ( "The result of the test is-\n\n" ) ; // Following code gets the suite name String suiteName = suite . getName ( ) ; // Getting the results for the said suite Map < String , ISuiteResult > suiteResults = suite . getResults ( ) ; String input = null ; String result ; String failReport = null ; String failReportConformance2 = "," ; int passedTest = 0 ; int failedTest = 0 ; int skippedTest = 0 ; int finalPassedTest = 0 ; int finalSkippedTest = 0 ; int finalFailedTest = 0 ; int count = 0 ; String date = null ; for ( ISuiteResult sr : suiteResults . values ( ) ) { count ++ ; ITestContext tc = sr . getTestContext ( ) ; DateFormat dateFormat = new SimpleDateFormat ( "yyyy/MM/dd HH:mm:ss" ) ; Calendar cal = Calendar . getInstance ( ) ; if ( count == 1 ) { date = dateFormat . format ( cal . getTime ( ) ) ; input = tc . getAttribute ( "Input" ) . toString ( ) ; failReport = tc . getAttribute ( "TestResultReport" ) . toString ( ) ; passedTest = tc . getPassedTests ( ) . getAllResults ( ) . size ( ) ; skippedTest = tc . getSkippedTests ( ) . getAllResults ( ) . size ( ) ; failedTest = tc . getFailedTests ( ) . getAllResults ( ) . size ( ) ; } else { int no_of_failedTest = tc . getFailedTests ( ) . getAllResults ( ) . size ( ) ; int no_of_skippedTest = tc . getSkippedTests ( ) . getAllResults ( ) . size ( ) ; int no_of_passedTest = tc . getPassedTests ( ) . getAllResults ( ) . size ( ) ; if ( no_of_failedTest != 0 || no_of_passedTest != 0 ) { if ( no_of_failedTest == 0 && no_of_passedTest != 0 ) { failReportConformance2 = failReportConformance2 + ", " + input + " conform to the clause A." + count + " of " + suiteName ; } else { failReportConformance2 = failReportConformance2 + ", " + input + " does not conform to the clause A." + count + " of " + suiteName ; } finalPassedTest = finalPassedTest + no_of_passedTest ; finalSkippedTest = finalSkippedTest + no_of_skippedTest ; finalFailedTest = finalFailedTest + no_of_failedTest ; } } } failedTest += finalFailedTest ; skippedTest += finalSkippedTest ; passedTest += finalPassedTest ; if ( failedTest > 0 ) { result = "Fail" ; } else { result = "Pass" ; } Reporter . log ( "**RESULT: " + result ) ; Reporter . log ( "**INPUT: " + input ) ; Reporter . log ( "**TEST NAME AND VERSION :" + suiteName ) ; Reporter . log ( "**DATE AND TIME PERFORMED :" + date ) ; Reporter . log ( "Passed tests for suite '" + suiteName + "' is:" + passedTest ) ; Reporter . log ( "Failed tests for suite '" + suiteName + "' is:" + failedTest ) ; Reporter . log ( "Skipped tests for suite '" + suiteName + "' is:" + skippedTest ) ; Reporter . log ( "\nREASON:\n\n" ) ; Reporter . log ( failReport ) ; Reporter . log ( failReportConformance2 ) ;
public class CPSpecificationOptionLocalServiceWrapper { /** * Creates a new cp specification option with the primary key . Does not add the cp specification option to the database . * @ param CPSpecificationOptionId the primary key for the new cp specification option * @ return the new cp specification option */ @ Override public com . liferay . commerce . product . model . CPSpecificationOption createCPSpecificationOption ( long CPSpecificationOptionId ) { } }
return _cpSpecificationOptionLocalService . createCPSpecificationOption ( CPSpecificationOptionId ) ;
public class TypeTransformers { /** * Adds a type transformer applied at runtime . * This method handles transformations to String from GString , * array transformations and number based transformations */ protected static MethodHandle addTransformer ( MethodHandle handle , int pos , Object arg , Class parameter ) { } }
MethodHandle transformer = null ; if ( arg instanceof GString ) { transformer = TO_STRING ; } else if ( arg instanceof Closure ) { transformer = createSAMTransform ( arg , parameter ) ; } else if ( Number . class . isAssignableFrom ( parameter ) ) { transformer = selectNumberTransformer ( parameter , arg ) ; } else if ( parameter . isArray ( ) ) { transformer = MethodHandles . insertArguments ( AS_ARRAY , 1 , parameter ) ; } if ( transformer == null ) throw new GroovyBugError ( "Unknown transformation for argument " + arg + " at position " + pos + " with " + arg . getClass ( ) + " for parameter of type " + parameter ) ; return applyUnsharpFilter ( handle , pos , transformer ) ;