signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class CSVSummariser { /** * Summarise the CSV file from the input { @ link Reader } and emit the summary CSV * file to the output { @ link Writer } , including the given maximum number of * sample values in the summary for each field . * @ param input * The input CSV file , as a { @ link Reader } . * @ param output * The output CSV file as a { @ link Writer } . * @ param mappingOutput * The output mapping template file as a { @ link Writer } . * @ param maxSampleCount * The maximum number of sample values in the summary for each field . * Set to - 1 to include all unique values for each field . * @ param showSampleCounts * Show counts next to sample values * @ param debug * Set to true to add debug statements . * @ throws IOException * If there is an error reading or writing . */ public static void runSummarise ( Reader input , Writer output , Writer mappingOutput , int maxSampleCount , boolean showSampleCounts , boolean debug ) throws IOException { } }
runSummarise ( input , output , mappingOutput , maxSampleCount , showSampleCounts , debug , null , CSVStream . DEFAULT_HEADER_COUNT ) ;
public class AnnotationValueBuilder { /** * Sets the value member to the given annotation class values . * @ param name The name of the member * @ param classValues The annotation [ ] * @ return This builder */ public AnnotationValueBuilder < T > member ( String name , @ Nullable AnnotationClassValue < ? > ... classValues ) { } }
if ( classValues != null ) { values . put ( name , classValues ) ; } return this ;
public class Base64Kit { /** * 编码 * @ param value * 字符串 * @ return { String } */ public static String encode ( String value ) { } }
byte [ ] val = value . getBytes ( UTF_8 ) ; return delegate . encode ( val ) ;
public class DebugAminoAcid { /** * { @ inheritDoc } */ @ Override public Order getMinimumBondOrder ( IAtom atom ) { } }
logger . debug ( "Getting minimum bond order for atom: " , atom ) ; return super . getMinimumBondOrder ( atom ) ;
public class AccountListPoolNodeCountsNextOptions { /** * Set the time the request was issued . Client libraries typically set this to the current system clock time ; set it explicitly if you are calling the REST API directly . * @ param ocpDate the ocpDate value to set * @ return the AccountListPoolNodeCountsNextOptions object itself . */ public AccountListPoolNodeCountsNextOptions withOcpDate ( DateTime ocpDate ) { } }
if ( ocpDate == null ) { this . ocpDate = null ; } else { this . ocpDate = new DateTimeRfc1123 ( ocpDate ) ; } return this ;
public class GradleDependencyResolutionHelper { /** * Parse the plugin definition file and extract the version details from it . */ public static String determinePluginVersion ( ) { } }
if ( pluginVersion == null ) { final String fileName = "META-INF/gradle-plugins/thorntail.properties" ; ClassLoader loader = Thread . currentThread ( ) . getContextClassLoader ( ) ; String version ; try ( InputStream stream = loader . getResourceAsStream ( fileName ) ) { Properties props = new Properties ( ) ; props . load ( stream ) ; version = props . getProperty ( "implementation-version" ) ; } catch ( IOException e ) { throw new IllegalStateException ( "Unable to locate file: " + fileName , e ) ; } pluginVersion = version ; } return pluginVersion ;
public class KeypadAdapter { /** * Vibrate device on each key press if the feature is enabled */ private void vibrateIfEnabled ( ) { } }
final boolean enabled = styledAttributes . getBoolean ( R . styleable . PinLock_vibrateOnClick , false ) ; if ( enabled ) { Vibrator v = ( Vibrator ) context . getSystemService ( Context . VIBRATOR_SERVICE ) ; final int duration = styledAttributes . getInt ( R . styleable . PinLock_vibrateDuration , 20 ) ; v . vibrate ( duration ) ; }
public class ApplicationResource { /** * ( non - Javadoc ) * @ see net . roboconf . dm . rest . services . internal . resources . IApplicationResource * # executeCommand ( java . lang . String , java . lang . String ) */ @ Override public Response executeCommand ( String app , String commandName ) { } }
this . logger . fine ( "Request: execute command " + commandName + " in the " + app + " application." ) ; String lang = lang ( this . manager ) ; Response response = Response . ok ( ) . build ( ) ; try { Application application = this . manager . applicationMngr ( ) . findApplicationByName ( app ) ; if ( application == null ) response = handleError ( Status . NOT_FOUND , new RestError ( REST_INEXISTING , application ( app ) ) , lang ) . build ( ) ; else this . manager . commandsMngr ( ) . execute ( application , commandName , CommandHistoryItem . ORIGIN_REST_API , null ) ; } catch ( NoSuchFileException e ) { response = RestServicesUtils . handleError ( Status . NOT_FOUND , new RestError ( ErrorCode . REST_INEXISTING , e , ErrorDetails . name ( commandName ) ) , lang ) . build ( ) ; } catch ( CommandException e ) { response = RestServicesUtils . handleError ( Status . CONFLICT , new RestError ( ErrorCode . REST_APP_EXEC_ERROR , e , ErrorDetails . name ( commandName ) ) , lang ) . build ( ) ; } catch ( Exception e ) { response = RestServicesUtils . handleError ( Status . INTERNAL_SERVER_ERROR , new RestError ( ErrorCode . REST_UNDETAILED_ERROR , e ) , lang ) . build ( ) ; } return response ;
public class CsvToSqlExtensions { /** * Gets the csv file as sql insert script . * @ param tableName * the table name * @ param csvBean * the csv bean * @ param withHeader * the with header * @ param withEndSemicolon * the with end semicolon * @ return the csv file as sql insert script */ public static String getCsvFileAsSqlInsertScript ( final String tableName , final CsvBean csvBean , final boolean withHeader , final boolean withEndSemicolon ) { } }
final StringBuffer sb = new StringBuffer ( ) ; if ( withHeader ) { final String sqlColumns = extractSqlColumns ( csvBean . getHeaders ( ) ) ; sb . append ( "INSERT INTO " + tableName + " ( " + sqlColumns + ") VALUES \n" ) ; } final String [ ] columnTypesEdit = csvBean . getColumnTypesEdit ( ) ; if ( columnTypesEdit != null ) { final StringBuffer sqlData = getSqlData ( csvBean . getHeaders ( ) , csvBean . getColumnTypes ( ) , columnTypesEdit , csvBean . getLineOrder ( ) , csvBean . getLines ( ) , withEndSemicolon ) ; sb . append ( sqlData . toString ( ) ) ; } else { final StringBuffer sqlData = getSqlData ( csvBean . getHeaders ( ) , csvBean . getColumnTypes ( ) , null , null , csvBean . getLines ( ) , withEndSemicolon ) ; sb . append ( sqlData . toString ( ) ) ; } return sb . toString ( ) ;
public class PhotonCoreValidator { /** * Validate the content of the < code > web . xml < / code > file . It checks if all * referenced classes are loadable ! * @ throws Exception * In case of an error . */ public static void validateWebXML ( ) throws Exception { } }
final IMicroDocument aDoc = MicroReader . readMicroXML ( new File ( "src/main/webapp/WEB-INF/web.xml" ) ) ; if ( aDoc != null ) for ( final IMicroNode aNode : new MicroRecursiveIterator ( aDoc . getDocumentElement ( ) ) ) if ( aNode . isElement ( ) ) { final IMicroElement e = ( IMicroElement ) aNode ; if ( e . getTagName ( ) . endsWith ( "-class" ) ) { final String sClassName = e . getTextContentTrimmed ( ) ; try { Class . forName ( sClassName ) ; } catch ( final Exception ex ) { LOGGER . error ( "Failed to resolve class '" + sClassName + "'" ) ; throw ex ; } } }
public class EntityInfo { /** * Given a Group g of Chain c ( member of this EntityInfo ) return the corresponding position in the * alignment of all member sequences ( 1 - based numbering ) , i . e . the index ( 1 - based ) in the SEQRES sequence . * This allows for comparisons of residues belonging to different chains of the same EntityInfo ( entity ) . * Note this method should only be used for entities of type { @ link EntityType # POLYMER } * If { @ link FileParsingParameters # setAlignSeqRes ( boolean ) } is not used or SEQRES not present , a mapping * will not be available and this method will return { @ link ResidueNumber # getSeqNum ( ) } for all residues , which * in some cases will be correctly aligned indices ( when no insertion codes are * used and when all chains within the entity are numbered in the same way ) , but * in general they will be neither unique ( because of insertion codes ) nor aligned . * @ param g * @ param c * @ return the aligned residue index ( 1 to n ) , if no SEQRES groups are available at all then { @ link ResidueNumber # getSeqNum ( ) } * is returned as a fall - back , if the group is not found in the SEQRES groups then - 1 is returned * for the given group and chain * @ throws IllegalArgumentException if the given Chain is not a member of this EnityInfo * @ see Chain # getSeqResGroup ( int ) */ public int getAlignedResIndex ( Group g , Chain c ) { } }
boolean contained = false ; for ( Chain member : getChains ( ) ) { if ( c . getId ( ) . equals ( member . getId ( ) ) ) { contained = true ; break ; } } if ( ! contained ) throw new IllegalArgumentException ( "Given chain with asym_id " + c . getId ( ) + " is not a member of this entity: " + getChainIds ( ) . toString ( ) ) ; if ( ! chains2pdbResNums2ResSerials . containsKey ( c . getId ( ) ) ) { // we do lazy initialisation of the map initResSerialsMap ( c ) ; } // if no seqres groups are available at all the map will be null Map < ResidueNumber , Integer > map = chains2pdbResNums2ResSerials . get ( c . getId ( ) ) ; int serial ; if ( map != null ) { ResidueNumber resNum = g . getResidueNumber ( ) ; // the resNum will be null for groups that are SEQRES only and not in ATOM , // still it can happen that a group is in ATOM in one chain but not in other of the same entity . // This is what we try to find out here ( analogously to what we do in initResSerialsMap ( ) ) : if ( resNum == null && c . getSeqResGroups ( ) != null && ! c . getSeqResGroups ( ) . isEmpty ( ) ) { int index = c . getSeqResGroups ( ) . indexOf ( g ) ; resNum = findResNumInOtherChains ( index , c ) ; } if ( resNum == null ) { // still null , we really can ' t map serial = - 1 ; } else { Integer alignedSerial = map . get ( resNum ) ; if ( alignedSerial == null ) { // the map doesn ' t contain this group , something ' s wrong : return - 1 serial = - 1 ; } else { serial = alignedSerial ; } } } else { // no seqres groups available we resort to using the pdb residue numbers are given serial = g . getResidueNumber ( ) . getSeqNum ( ) ; } return serial ;
public class ResourcePoolsBuilder { /** * Add the { @ link ResourcePool } of { @ link ResourceType } in the returned builder . * @ param resourcePool the non - { @ code null } resource pool to add * @ return a new builder with the added pool * @ throws IllegalArgumentException if the set of resource pools already contains a pool for { @ code type } */ public ResourcePoolsBuilder with ( ResourcePool resourcePool ) { } }
final ResourceType < ? > type = resourcePool . getType ( ) ; final ResourcePool existingPool = resourcePools . get ( type ) ; if ( existingPool != null ) { throw new IllegalArgumentException ( "Can not add '" + resourcePool + "'; configuration already contains '" + existingPool + "'" ) ; } Map < ResourceType < ? > , ResourcePool > newPools = new HashMap < > ( resourcePools ) ; newPools . put ( type , resourcePool ) ; return new ResourcePoolsBuilder ( newPools ) ;
public class PdfDocument { /** * Adds an image to the document . * @ param image the < CODE > Image < / CODE > to add * @ throws PdfException on error * @ throws DocumentException on error */ protected void add ( Image image ) throws PdfException , DocumentException { } }
if ( image . hasAbsoluteY ( ) ) { graphics . addImage ( image ) ; pageEmpty = false ; return ; } // if there isn ' t enough room for the image on this page , save it for the next page if ( currentHeight != 0 && indentTop ( ) - currentHeight - image . getScaledHeight ( ) < indentBottom ( ) ) { if ( ! strictImageSequence && imageWait == null ) { imageWait = image ; return ; } newPage ( ) ; if ( currentHeight != 0 && indentTop ( ) - currentHeight - image . getScaledHeight ( ) < indentBottom ( ) ) { imageWait = image ; return ; } } pageEmpty = false ; // avoid endless loops if ( image == imageWait ) imageWait = null ; boolean textwrap = ( image . getAlignment ( ) & Image . TEXTWRAP ) == Image . TEXTWRAP && ! ( ( image . getAlignment ( ) & Image . MIDDLE ) == Image . MIDDLE ) ; boolean underlying = ( image . getAlignment ( ) & Image . UNDERLYING ) == Image . UNDERLYING ; float diff = leading / 2 ; if ( textwrap ) { diff += leading ; } float lowerleft = indentTop ( ) - currentHeight - image . getScaledHeight ( ) - diff ; float mt [ ] = image . matrix ( ) ; float startPosition = indentLeft ( ) - mt [ 4 ] ; if ( ( image . getAlignment ( ) & Image . RIGHT ) == Image . RIGHT ) startPosition = indentRight ( ) - image . getScaledWidth ( ) - mt [ 4 ] ; if ( ( image . getAlignment ( ) & Image . MIDDLE ) == Image . MIDDLE ) startPosition = indentLeft ( ) + ( ( indentRight ( ) - indentLeft ( ) - image . getScaledWidth ( ) ) / 2 ) - mt [ 4 ] ; if ( image . hasAbsoluteX ( ) ) startPosition = image . getAbsoluteX ( ) ; if ( textwrap ) { if ( imageEnd < 0 || imageEnd < currentHeight + image . getScaledHeight ( ) + diff ) { imageEnd = currentHeight + image . getScaledHeight ( ) + diff ; } if ( ( image . getAlignment ( ) & Image . RIGHT ) == Image . RIGHT ) { // indentation suggested by Pelikan Stephan indentation . imageIndentRight += image . getScaledWidth ( ) + image . getIndentationLeft ( ) ; } else { // indentation suggested by Pelikan Stephan indentation . imageIndentLeft += image . getScaledWidth ( ) + image . getIndentationRight ( ) ; } } else { if ( ( image . getAlignment ( ) & Image . RIGHT ) == Image . RIGHT ) startPosition -= image . getIndentationRight ( ) ; else if ( ( image . getAlignment ( ) & Image . MIDDLE ) == Image . MIDDLE ) startPosition += image . getIndentationLeft ( ) - image . getIndentationRight ( ) ; else startPosition += image . getIndentationLeft ( ) ; } graphics . addImage ( image , mt [ 0 ] , mt [ 1 ] , mt [ 2 ] , mt [ 3 ] , startPosition , lowerleft - mt [ 5 ] ) ; if ( ! ( textwrap || underlying ) ) { currentHeight += image . getScaledHeight ( ) + diff ; flushLines ( ) ; text . moveText ( 0 , - ( image . getScaledHeight ( ) + diff ) ) ; newLine ( ) ; }
public class CASH { /** * Builds a dim - 1 dimensional database where the objects are projected into * the specified subspace . * @ param dim the dimensionality of the database * @ param basis the basis defining the subspace * @ param ids the ids for the new database * @ param relation the database storing the parameterization functions * @ return a dim - 1 dimensional database where the objects are projected into * the specified subspace */ private MaterializedRelation < ParameterizationFunction > buildDB ( int dim , double [ ] [ ] basis , DBIDs ids , Relation < ParameterizationFunction > relation ) { } }
ProxyDatabase proxy = new ProxyDatabase ( ids ) ; SimpleTypeInformation < ParameterizationFunction > type = new SimpleTypeInformation < > ( ParameterizationFunction . class ) ; WritableDataStore < ParameterizationFunction > prep = DataStoreUtil . makeStorage ( ids , DataStoreFactory . HINT_HOT , ParameterizationFunction . class ) ; // Project for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { prep . put ( iter , project ( basis , relation . get ( iter ) ) ) ; } if ( LOG . isDebugging ( ) ) { LOG . debugFine ( "db fuer dim " + ( dim - 1 ) + ": " + ids . size ( ) ) ; } MaterializedRelation < ParameterizationFunction > prel = new MaterializedRelation < > ( type , ids , null , prep ) ; proxy . addRelation ( prel ) ; return prel ;
public class PeerGroup { /** * < p > Link the given PeerFilterProvider to this PeerGroup . DO NOT use this for Wallets , use * { @ link PeerGroup # addWallet ( Wallet ) } instead . < / p > * < p > Note that this should be done before chain download commences because if you add a listener with keys earlier * than the current chain head , the relevant parts of the chain won ' t be redownloaded for you . < / p > * < p > This method invokes { @ link PeerGroup # recalculateFastCatchupAndFilter ( FilterRecalculateMode ) } . * The return value of this method is the { @ code ListenableFuture } returned by that invocation . < / p > * @ return a future that completes once each { @ code Peer } in this group has had its * { @ code BloomFilter } ( re ) set . */ public ListenableFuture < BloomFilter > addPeerFilterProvider ( PeerFilterProvider provider ) { } }
lock . lock ( ) ; try { checkNotNull ( provider ) ; checkState ( ! peerFilterProviders . contains ( provider ) ) ; // Insert provider at the start . This avoids various concurrency problems that could occur because we need // all providers to be in a consistent , unchanging state whilst the filter is built . Providers can give // this guarantee by taking a lock in their begin method , but if we add to the end of the list here , it // means we establish a lock ordering a > b > c if that ' s the order the providers were added in . Given that // the main wallet will usually be first , this establishes an ordering wallet > other - provider , which means // other - provider can then not call into the wallet itself . Other providers installed by the API user should // come first so the expected ordering is preserved . This can also manifest itself in providers that use // synchronous RPCs into an actor instead of locking , but the same issue applies . peerFilterProviders . add ( 0 , provider ) ; // Don ' t bother downloading block bodies before the oldest keys in all our wallets . Make sure we recalculate // if a key is added . Of course , by then we may have downloaded the chain already . Ideally adding keys would // automatically rewind the block chain and redownload the blocks to find transactions relevant to those keys , // all transparently and in the background . But we are a long way from that yet . ListenableFuture < BloomFilter > future = recalculateFastCatchupAndFilter ( FilterRecalculateMode . SEND_IF_CHANGED ) ; updateVersionMessageRelayTxesBeforeFilter ( getVersionMessage ( ) ) ; return future ; } finally { lock . unlock ( ) ; }
public class ElementList { /** * Appends a piece of text to the contents of this node * @ param value */ public void addValue ( String value ) { } }
contents . add ( value ) ; if ( value . trim ( ) . length ( ) > 0 ) { if ( contents . size ( ) == 1 ) { containsSingleString = true ; } else { containsSingleString = false ; containsMarkupText = true ; } }
public class Payloads { /** * Create a { @ link Payload } from ByteString with the * specified contentType */ public static Payload create ( ByteString byteString , String contentType ) { } }
return new PayloadImpl ( byteString , Optional . ofNullable ( contentType ) ) ;
public class UnicodeSet { /** * Reallocate this objects internal structures to take up the least * possible space , without changing this object ' s value . */ public UnicodeSet compact ( ) { } }
checkFrozen ( ) ; if ( len != list . length ) { int [ ] temp = new int [ len ] ; System . arraycopy ( list , 0 , temp , 0 , len ) ; list = temp ; } rangeList = null ; buffer = null ; return this ;
public class ZipUtils { /** * Checks if the string denotes a file inside a ZIP file using the notation * used for getZipContentsRecursive ( ) . * @ param name The name to check * @ return true if the name denotes a file inside a known zip file format . */ public static boolean isFileInZip ( String name ) { } }
if ( name == null ) { return false ; } for ( String element : ZIP_EXTENSIONS ) { if ( name . toLowerCase ( ) . contains ( element + ZIP_DELIMITER ) ) { return true ; } } return false ;
public class HttpResponse { /** * Static builder to create a response with a 200 status code and the string response body . * @ param body a string */ public static HttpResponse response ( String body ) { } }
return new HttpResponse ( ) . withStatusCode ( OK_200 . code ( ) ) . withReasonPhrase ( OK_200 . reasonPhrase ( ) ) . withBody ( body ) ;
public class ResponseWrapper { private < T extends Resource > T get ( String field , Class < T > k ) { } }
Gson gson = gsonParser ( ) ; JsonObject object = ( JsonObject ) response ; T e = gson . fromJson ( object . get ( field ) . toString ( ) , k ) ; e . setClient ( client ) ; return e ;
public class Bucket { /** * Gets the list of < b > active < / b > nodes within the bucket . An active node is one that * is not failed over * @ return The list of active nodes in the cluster . */ public List < MemcachedServer > activeServers ( ) { } }
ArrayList < MemcachedServer > active = new ArrayList < MemcachedServer > ( servers . length ) ; for ( MemcachedServer server : servers ) { if ( server . isActive ( ) ) { active . add ( server ) ; } } return active ;
public class DashboardInvalidInputErrorException { /** * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setDashboardValidationMessages ( java . util . Collection ) } or * { @ link # withDashboardValidationMessages ( java . util . Collection ) } if you want to override the existing values . * @ param dashboardValidationMessages * @ return Returns a reference to this object so that method calls can be chained together . */ public DashboardInvalidInputErrorException withDashboardValidationMessages ( DashboardValidationMessage ... dashboardValidationMessages ) { } }
if ( this . dashboardValidationMessages == null ) { setDashboardValidationMessages ( new com . amazonaws . internal . SdkInternalList < DashboardValidationMessage > ( dashboardValidationMessages . length ) ) ; } for ( DashboardValidationMessage ele : dashboardValidationMessages ) { this . dashboardValidationMessages . add ( ele ) ; } return this ;
public class ReviewsImpl { /** * The reviews created would show up for Reviewers on your team . As Reviewers complete reviewing , results of the Review would be POSTED ( i . e . HTTP POST ) on the specified CallBackEndpoint . * & lt ; h3 & gt ; CallBack Schemas & lt ; / h3 & gt ; * & lt ; h4 & gt ; Review Completion CallBack Sample & lt ; / h4 & gt ; * & lt ; p & gt ; * { & lt ; br / & gt ; * " ReviewId " : " & lt ; Review Id & gt ; " , & lt ; br / & gt ; * " ModifiedOn " : " 2016-10-11T22:36:32.9934851Z " , & lt ; br / & gt ; * " ModifiedBy " : " & lt ; Name of the Reviewer & gt ; " , & lt ; br / & gt ; * " CallBackType " : " Review " , & lt ; br / & gt ; * " ContentId " : " & lt ; The ContentId that was specified input & gt ; " , & lt ; br / & gt ; * " Metadata " : { & lt ; br / & gt ; * " adultscore " : " 0 . xxx " , & lt ; br / & gt ; * " a " : " False " , & lt ; br / & gt ; * " racyscore " : " 0 . xxx " , & lt ; br / & gt ; * " r " : " True " & lt ; br / & gt ; * } , & lt ; br / & gt ; * " ReviewerResultTags " : { & lt ; br / & gt ; * " a " : " False " , & lt ; br / & gt ; * " r " : " True " & lt ; br / & gt ; * } & lt ; br / & gt ; * } & lt ; br / & gt ; * & lt ; / p & gt ; . * @ param teamName Your team name . * @ param reviewId Id of the review . * @ param addVideoFrameOptionalParameter the object representing the optional parameters to be set before calling this API * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > addVideoFrameAsync ( String teamName , String reviewId , AddVideoFrameOptionalParameter addVideoFrameOptionalParameter , final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromResponse ( addVideoFrameWithServiceResponseAsync ( teamName , reviewId , addVideoFrameOptionalParameter ) , serviceCallback ) ;
public class DataGridStateFactory { /** * Lookup a { @ link DataGridURLBuilder } object given a data grid identifier and a specific * { @ link DataGridConfig } object . * @ param name the name of the data grid * @ param config the { @ link DataGridConfig } object to use when creating the * grid ' s { @ link DataGridURLBuilder } object . * @ return the URL builder for a data grid ' s state object */ public final DataGridURLBuilder getDataGridURLBuilder ( String name , DataGridConfig config ) { } }
if ( config == null ) throw new IllegalArgumentException ( Bundle . getErrorString ( "DataGridStateFactory_nullDataGridConfig" ) ) ; DataGridStateCodec codec = lookupCodec ( name , config ) ; DataGridURLBuilder builder = codec . getDataGridURLBuilder ( ) ; return builder ;
public class Journal { /** * Return an iterable to replay the journal by going through all records * locations starting from the given one . * @ param start * @ return * @ throws IOException * @ throws CompactedDataFileException * @ throws ClosedJournalException */ public Iterable < Location > redo ( Location start ) throws ClosedJournalException , CompactedDataFileException , IOException { } }
return new Redo ( start ) ;
public class TextColumn { /** * Returns a new Column containing all the unique values in this column * @ return a column with unique values . */ @ Override public TextColumn unique ( ) { } }
List < String > strings = new ArrayList < > ( asSet ( ) ) ; return TextColumn . create ( name ( ) + " Unique values" , strings ) ;
public class ProcessEpoll { private void cleanupProcess ( LinuxProcess linuxProcess , int stdinFd , int stdoutFd , int stderrFd ) { } }
pidToProcessMap . remove ( linuxProcess . getPid ( ) ) ; fildesToProcessMap . remove ( stdinFd ) ; fildesToProcessMap . remove ( stdoutFd ) ; fildesToProcessMap . remove ( stderrFd ) ; // linuxProcess . close ( linuxProcess . getStdin ( ) ) ; // linuxProcess . close ( linuxProcess . getStdout ( ) ) ; // linuxProcess . close ( linuxProcess . getStderr ( ) ) ; if ( linuxProcess . cleanlyExitedBeforeProcess . get ( ) ) { linuxProcess . onExit ( 0 ) ; return ; } IntByReference ret = new IntByReference ( ) ; int rc = LibC . waitpid ( linuxProcess . getPid ( ) , ret , LibC . WNOHANG ) ; if ( rc == 0 ) { deadPool . add ( linuxProcess ) ; } else if ( rc < 0 ) { linuxProcess . onExit ( ( Native . getLastError ( ) == LibC . ECHILD ) ? Integer . MAX_VALUE : Integer . MIN_VALUE ) ; } else { handleExit ( linuxProcess , ret . getValue ( ) ) ; }
public class DialogPreference { /** * Obtains the background of the dialog , which is shown by the preference , from a specific typed * array . * @ param typedArray * The typed array , the background should be obtained from , as an instance of the class * { @ link TypedArray } . The typed array may not be null */ private void obtainDialogBackground ( @ NonNull final TypedArray typedArray ) { } }
int resourceId = typedArray . getResourceId ( R . styleable . DialogPreference_dialogBackground , - 1 ) ; if ( resourceId != - 1 ) { setDialogBackground ( resourceId ) ; } else { int color = typedArray . getColor ( R . styleable . DialogPreference_dialogBackground , - 1 ) ; if ( color != - 1 ) { setDialogBackgroundColor ( color ) ; } }
public class IfcCartesianPointImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) public EList < String > getCoordinatesAsString ( ) { } }
return ( EList < String > ) eGet ( Ifc2x3tc1Package . Literals . IFC_CARTESIAN_POINT__COORDINATES_AS_STRING , true ) ;
public class ClassIndex { /** * Retrieves names of classes from given package . * The package must be annotated with { @ link IndexSubclasses } for the classes inside * to be indexed at compile - time by { @ link org . atteo . classindex . processor . ClassIndexProcessor } . * @ param packageName name of the package to search classes for * @ param classLoader classloader for loading index file * @ return names of classes from package */ public static Iterable < String > getPackageClassesNames ( String packageName , ClassLoader classLoader ) { } }
Iterable < String > entries = readIndexFile ( classLoader , packageName . replace ( "." , "/" ) + "/" + PACKAGE_INDEX_NAME ) ; List < String > result = new ArrayList < > ( ) ; for ( String simpleName : entries ) { result . add ( packageName + "." + simpleName ) ; } return result ;
public class CredentialFactory { /** * Obtain the Application Default com . google . api . client . auth . oauth2 . Credential * @ return the Application Default Credential */ public static GoogleCredential getApplicationDefaultCredential ( ) { } }
try { GoogleCredential credential = GoogleCredential . getApplicationDefault ( ) ; if ( credential . createScopedRequired ( ) ) { credential = credential . createScoped ( Arrays . asList ( "https://www.googleapis.com/auth/genomics" ) ) ; } return credential ; } catch ( IOException e ) { throw new RuntimeException ( MISSING_ADC_EXCEPTION_MESSAGE , e ) ; }
public class TAIWebUtils { /** * Gets and validates the authorization endpoint URL from the provided social login configuration . */ public String getAuthorizationEndpoint ( SocialLoginConfig clientConfig ) throws SocialLoginException { } }
final String authzEndpoint = clientConfig . getAuthorizationEndpoint ( ) ; SocialUtil . validateEndpointWithQuery ( authzEndpoint ) ; return authzEndpoint ;
public class ManagedObject { /** * A subclass may override this method to do its own optimisticReplace processing after the log * has been written . For the ManagedObject to get this call it must have been on the Notify * list when Transaction . optimisticReplace ( ) was invoked . * @ param transaction controling the replacement . * @ throws ObjectManagerException */ public void optimisticReplaceLogged ( Transaction transaction ) throws ObjectManagerException { } }
if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "optimisticReplaceLogged" , "transaction=" + transaction + "(Transaction)" ) ; // By default does nothing , unless overridden . if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "optimisticReplaceLogged" ) ;
public class LoadFileImageSequence { /** * Loads the next image into a BufferedImage and returns it . The same instance * or a new instance of a BufferedImage might be returned each time . Don ' t rely * on either behavior being consistent . * @ return A BufferedImage containing the next image . */ public T next ( ) { } }
if ( loop ) { if ( forwards ) { if ( index >= fileNames . size ( ) ) { index = fileNames . size ( ) - 1 ; forwards = false ; } } else { if ( index < 0 ) { index = 0 ; forwards = true ; } } } if ( forwards ) imageGUI = UtilImageIO . loadImage ( fileNames . get ( index ++ ) ) ; else imageGUI = UtilImageIO . loadImage ( fileNames . get ( index -- ) ) ; if ( imageGUI == null ) return null ; image = type . createImage ( imageGUI . getWidth ( ) , imageGUI . getHeight ( ) ) ; ConvertBufferedImage . convertFrom ( imageGUI , image , true ) ; // no changes needed so return the original if ( scalefactor == 1 ) return image ; // scale down the image int width = image . getWidth ( ) / scalefactor ; int height = image . getHeight ( ) / scalefactor ; if ( scaled == null || scaled . getWidth ( ) != width || scaled . getHeight ( ) != height ) { scaled = new BufferedImage ( width , height , imageGUI . getType ( ) ) ; } Graphics2D g2 = scaled . createGraphics ( ) ; AffineTransform affine = new AffineTransform ( ) ; affine . setToScale ( 1.0 / scalefactor , 1.0 / scalefactor ) ; g2 . drawImage ( imageGUI , affine , null ) ; imageGUI = scaled ; return image ;
public class ExprParser { /** * map : ' { ' ( mapEntry ( , mapEntry ) * ) ? ' } ' * mapEntry : ( ID | STR ) ' : ' expr */ Expr map ( ) { } }
if ( peek ( ) . sym != Sym . LBRACE ) { return array ( ) ; } LinkedHashMap < Object , Expr > mapEntry = new LinkedHashMap < Object , Expr > ( ) ; Map map = new Map ( mapEntry ) ; move ( ) ; if ( peek ( ) . sym == Sym . RBRACE ) { move ( ) ; return map ; } buildMapEntry ( mapEntry ) ; while ( peek ( ) . sym == Sym . COMMA ) { move ( ) ; buildMapEntry ( mapEntry ) ; } match ( Sym . RBRACE ) ; return map ;
public class AddressDivisionGrouping { /** * Handles the cases in which we can use longs rather than BigInteger * @ param section * @ param increment * @ param addrCreator * @ param lowerProducer * @ param upperProducer * @ param prefixLength * @ return */ protected static < R extends AddressSection , S extends AddressSegment > R fastIncrement ( R section , long increment , AddressCreator < ? , R , ? , S > addrCreator , Supplier < R > lowerProducer , Supplier < R > upperProducer , Integer prefixLength ) { } }
if ( increment >= 0 ) { BigInteger count = section . getCount ( ) ; if ( count . compareTo ( LONG_MAX ) <= 0 ) { long longCount = count . longValue ( ) ; if ( longCount > increment ) { if ( longCount == increment + 1 ) { return upperProducer . get ( ) ; } return incrementRange ( section , increment , addrCreator , lowerProducer , prefixLength ) ; } BigInteger value = section . getValue ( ) ; BigInteger upperValue ; if ( value . compareTo ( LONG_MAX ) <= 0 && ( upperValue = section . getUpperValue ( ) ) . compareTo ( LONG_MAX ) <= 0 ) { return increment ( section , increment , addrCreator , count . longValue ( ) , value . longValue ( ) , upperValue . longValue ( ) , lowerProducer , upperProducer , prefixLength ) ; } } } else { BigInteger value = section . getValue ( ) ; if ( value . compareTo ( LONG_MAX ) <= 0 ) { return add ( lowerProducer . get ( ) , value . longValue ( ) , increment , addrCreator , prefixLength ) ; } } return null ;
public class AxesWalker { /** * Moves the < code > TreeWalker < / code > to the next visible node in document * order relative to the current node , and returns the new node . If the * current node has no next node , or if the search for nextNode attempts * to step upward from the TreeWalker ' s root node , returns * < code > null < / code > , and retains the current node . * @ return The new node , or < code > null < / code > if the current node has no * next node in the TreeWalker ' s logical view . */ public int nextNode ( ) { } }
int nextNode = DTM . NULL ; AxesWalker walker = wi ( ) . getLastUsedWalker ( ) ; while ( true ) { if ( null == walker ) break ; nextNode = walker . getNextNode ( ) ; if ( DTM . NULL == nextNode ) { walker = walker . m_prevWalker ; } else { if ( walker . acceptNode ( nextNode ) != DTMIterator . FILTER_ACCEPT ) { continue ; } if ( null == walker . m_nextWalker ) { wi ( ) . setLastUsedWalker ( walker ) ; // return walker . returnNextNode ( nextNode ) ; break ; } else { AxesWalker prev = walker ; walker = walker . m_nextWalker ; walker . setRoot ( nextNode ) ; walker . m_prevWalker = prev ; continue ; } } // if ( null ! = nextNode ) } // while ( null ! = walker ) return nextNode ;
public class CSVUtil { /** * Joins the two input CSV files according to the { @ link ValueMapping } s , * optionally applying the given prefixes to fields in the input and other * inputs respectively . * Can also perform a full outer join by setting leftOuterJoin to false . * @ param input * The reference input ( left ) * @ param otherInput * The input to join against ( right ) * @ param map * The mappings to apply and use to define the join fields * @ param output * The Writer which will receive the output CSV file containing the * results of the join * @ param inputPrefix * An optional prefix to apply to all of the fields in the input * file , set to the empty string to disable it . * @ param otherPrefix * An optional prefix to apply to all of the fields in the other * file , set to the empty string to disable it . * @ param leftOuterJoin * True to use a left outer join and false to use a full outer join . * @ return The output headers for the joined file . * @ throws ScriptException * If there are issues mapping fields . * @ throws IOException * If there are issues reading or writing files . */ public static List < String > runJoiner ( Reader input , Reader otherInput , List < ValueMapping > map , Writer output , String inputPrefix , String otherPrefix , boolean leftOuterJoin ) throws ScriptException , IOException { } }
// TODO : Use the following measurements to determine what processing // method to use int inputFileBytes = - 1 ; int otherFileBytes = - 1 ; final Path tempInputFile = Files . createTempFile ( "tempInputFile-" , ".csv" ) ; try ( final BufferedWriter tempOutput = Files . newBufferedWriter ( tempInputFile , StandardCharsets . UTF_8 ) ; ) { inputFileBytes = IOUtils . copy ( input , tempOutput ) ; } final Path tempOtherFile = Files . createTempFile ( "tempOtherFile-" , ".csv" ) ; try ( final BufferedWriter tempOtherOutput = Files . newBufferedWriter ( tempOtherFile , StandardCharsets . UTF_8 ) ; ) { otherFileBytes = IOUtils . copy ( otherInput , tempOtherOutput ) ; } if ( inputFileBytes < otherFileBytes ) { // TODO : Swap source and destination so that in - memory set is the smaller of the // two } try { final List < String > otherH = new ArrayList < > ( ) ; final List < List < String > > otherLines = new ArrayList < > ( ) ; System . out . println ( "Starting adding other lines to in-memory list..." ) ; try ( final BufferedReader otherTemp = Files . newBufferedReader ( tempOtherFile , StandardCharsets . UTF_8 ) ) { CSVStream . parse ( otherTemp , otherHeader -> otherHeader . forEach ( h -> otherH . add ( otherPrefix + h ) ) , ( otherHeader , otherL ) -> { return otherL ; } , otherL -> { otherLines . add ( new ArrayList < > ( otherL ) ) ; } ) ; } System . out . println ( "Completed adding other lines to in-memory list." ) ; // Create a set for efficient lookup final Set < String > otherHSet = new HashSet < > ( otherH ) ; final Function < ValueMapping , String > outputFields = e -> e . getOutputField ( ) ; final List < String > outputHeaders = ValueMapping . getOutputFieldsFromList ( map ) ; final Map < String , String > defaultValues = ValueMapping . getDefaultValuesFromList ( map ) ; final List < ValueMapping > mergeFieldsOrdered = map . stream ( ) . filter ( k -> k . getLanguage ( ) == ValueMappingLanguage . CSVJOIN ) . collect ( Collectors . toList ( ) ) ; if ( mergeFieldsOrdered . size ( ) != 1 ) { throw new RuntimeException ( "Can only support exactly one CsvJoin mapping: found " + mergeFieldsOrdered . size ( ) ) ; } final List < ValueMapping > nonMergeFieldsOrdered = map . stream ( ) . filter ( k -> k . getLanguage ( ) != ValueMappingLanguage . CSVJOIN ) . collect ( Collectors . toList ( ) ) ; final ValueMapping m = mergeFieldsOrdered . get ( 0 ) ; final String [ ] destFields = m . getDestFields ( ) ; final String [ ] sourceFields = m . getSourceFields ( ) ; final CsvSchema schema = CSVStream . buildSchema ( outputHeaders ) ; final Writer writer = output ; try ( final SequenceWriter csvWriter = CSVStream . newCSVWriter ( writer , schema ) ; ) { final JDefaultDict < String , Set < String > > primaryKeys = new JDefaultDict < > ( k -> new HashSet < > ( ) ) ; final JDefaultDict < String , JDefaultDict < String , AtomicInteger > > valueCounts = new JDefaultDict < > ( k -> new JDefaultDict < > ( v -> new AtomicInteger ( 0 ) ) ) ; final Set < List < String > > matchedOtherLines = new LinkedHashSet < > ( ) ; final List < String > previousLine = new ArrayList < > ( ) ; final List < String > previousMappedLine = new ArrayList < > ( ) ; final AtomicInteger lineNumber = new AtomicInteger ( 0 ) ; final AtomicInteger filteredLineNumber = new AtomicInteger ( 0 ) ; final long startTime = System . currentTimeMillis ( ) ; final BiConsumer < List < String > , List < String > > mapLineConsumer = Unchecked . biConsumer ( ( line , mapped ) -> { previousLine . clear ( ) ; previousLine . addAll ( line ) ; previousMappedLine . clear ( ) ; previousMappedLine . addAll ( mapped ) ; csvWriter . write ( mapped ) ; } ) ; // If the streamCSV below is parallelised , each thread must be // given // a separate temporaryMatchMap // Map < String , Object > temporaryMatchMap = new // HashMap < > ( destFields . length , 0.75f ) ; // Map < String , Object > temporaryMatchMap = new // LinkedHashMap < > ( destFields . length , 0.75f ) ; // final Map < String , Object > temporaryMatchMap = new // ConcurrentHashMap < > ( destFields . length , 0.75f , 4 ) ; final Map < String , Object > temporaryMatchMap = new HashMap < > ( destFields . length , 0.75f ) ; final List < String > inputHeaders = new ArrayList < > ( ) ; try ( final BufferedReader inputTemp = Files . newBufferedReader ( tempInputFile , StandardCharsets . UTF_8 ) ) { CSVStream . parse ( inputTemp , h -> h . forEach ( nextH -> inputHeaders . add ( inputPrefix + nextH ) ) , ( h , l ) -> { final int nextLineNumber = lineNumber . incrementAndGet ( ) ; if ( nextLineNumber % 1000 == 0 ) { double secondsSinceStart = ( System . currentTimeMillis ( ) - startTime ) / 1000.0d ; System . out . printf ( "%d\tSeconds since start: %f\tRecords per second: %f%n" , nextLineNumber , secondsSinceStart , nextLineNumber / secondsSinceStart ) ; } final int nextFilteredLineNumber = filteredLineNumber . incrementAndGet ( ) ; try { final List < String > mergedInputHeaders = new ArrayList < > ( inputHeaders ) ; final List < String > nextMergedLine = new ArrayList < > ( l ) ; final Map < String , Object > matchMap = buildMatchMap ( m , mergedInputHeaders , nextMergedLine , false , temporaryMatchMap , sourceFields , destFields ) ; final Predicate < List < String > > otherLinePredicate = otherL -> { return ! matchMap . entrySet ( ) . stream ( ) . filter ( nextOtherFieldMatcher -> { final String key = nextOtherFieldMatcher . getKey ( ) ; return ! otherHSet . contains ( key ) || ! otherL . get ( otherH . indexOf ( key ) ) . equals ( nextOtherFieldMatcher . getValue ( ) ) ; } ) . findAny ( ) . isPresent ( ) ; } ; final Consumer < List < String > > otherLineConsumer = otherL -> { matchedOtherLines . add ( otherL ) ; final Map < String , Object > leftOuterJoinMap = leftOuterJoin ( m , mergedInputHeaders , nextMergedLine , otherH , otherL , false ) ; nonMergeFieldsOrdered . stream ( ) . map ( nextMapping -> nextMapping . getInputField ( ) ) . forEachOrdered ( inputField -> { if ( leftOuterJoinMap . containsKey ( inputField ) && ! mergedInputHeaders . contains ( inputField ) ) { mergedInputHeaders . add ( inputField ) ; nextMergedLine . add ( ( String ) leftOuterJoinMap . get ( inputField ) ) ; } } ) ; } ; otherLines . parallelStream ( ) . filter ( otherLinePredicate ) . findAny ( ) . ifPresent ( otherLineConsumer ) ; final List < String > mapLine = ValueMapping . mapLine ( new ValueMappingContext ( mergedInputHeaders , nextMergedLine , previousLine , previousMappedLine , map , primaryKeys , valueCounts , nextLineNumber , nextFilteredLineNumber , mapLineConsumer , outputHeaders , defaultValues , Optional . empty ( ) ) ) ; mapLineConsumer . accept ( nextMergedLine , mapLine ) ; } catch ( final LineFilteredException e ) { // Swallow line filtered exception and return // null // below to eliminate it // We expect streamCSV to operate in sequential // order , print a warning if it doesn ' t final boolean success = filteredLineNumber . compareAndSet ( nextFilteredLineNumber , nextFilteredLineNumber - 1 ) ; if ( ! success ) { System . out . println ( "Line numbers may not be consistent" ) ; } } return null ; } , l -> { } ) ; } if ( ! leftOuterJoin ) { final Consumer < List < String > > fullOuterJoinConsumer = Unchecked . consumer ( l -> { final int nextLineNumber = lineNumber . incrementAndGet ( ) ; final int nextFilteredLineNumber = filteredLineNumber . incrementAndGet ( ) ; try { final List < String > mergedInputHeaders = new ArrayList < > ( inputHeaders ) ; final List < String > nextMergedLine = new ArrayList < > ( l ) ; nonMergeFieldsOrdered . stream ( ) . map ( nextMapping -> nextMapping . getInputField ( ) ) . forEachOrdered ( inputField -> { if ( otherH . contains ( inputField ) && ! mergedInputHeaders . contains ( inputField ) ) { mergedInputHeaders . add ( inputField ) ; nextMergedLine . add ( l . get ( otherH . indexOf ( inputField ) ) ) ; } } ) ; final List < String > mapLine = ValueMapping . mapLine ( new ValueMappingContext ( otherH , nextMergedLine , previousLine , previousMappedLine , map , primaryKeys , valueCounts , nextLineNumber , nextFilteredLineNumber , mapLineConsumer , outputHeaders , defaultValues , Optional . empty ( ) ) ) ; mapLineConsumer . accept ( nextMergedLine , mapLine ) ; } catch ( final LineFilteredException e ) { // Swallow line filtered exception and return // null below to eliminate it // We expect streamCSV to operate in sequential // order , print a warning if it doesn ' t final boolean success = filteredLineNumber . compareAndSet ( nextFilteredLineNumber , nextFilteredLineNumber - 1 ) ; if ( ! success ) { System . out . println ( "Line numbers may not be consistent" ) ; } } } ) ; // Any line that nevermatched any join lines must , for left outer join , be // emitted separately final Predicate < List < String > > fullOuterJoinPredicate = l -> ! matchedOtherLines . contains ( l ) ; otherLines . stream ( ) . filter ( fullOuterJoinPredicate ) . forEach ( fullOuterJoinConsumer ) ; } } return outputHeaders ; } finally { Files . deleteIfExists ( tempInputFile ) ; Files . deleteIfExists ( tempOtherFile ) ; }
public class MailClient { /** * IMAP only */ public void createFolder ( String folderName ) throws MessagingException , ApplicationException { } }
if ( folderExists ( folderName ) ) throw new ApplicationException ( "cannot create folder [" + folderName + "], folder already exists." ) ; Folder folder = getFolder ( folderName , null , false , true ) ; if ( ! folder . exists ( ) ) folder . create ( Folder . HOLDS_MESSAGES ) ;
public class FileListOperations { /** * Adds a new fileName in the list of files making up this index * @ param fileName */ void addFileName ( final String fileName ) { } }
writeLock . lock ( ) ; try { final FileListCacheValue fileList = getFileList ( ) ; boolean done = fileList . add ( fileName ) ; if ( done ) { updateFileList ( fileList ) ; if ( trace ) log . trace ( "Updated file listing: added " + fileName ) ; } } finally { writeLock . unlock ( ) ; }
public class DefaultAsyncSearchQueryResult { /** * A utility method to convert an HTTP 412 response from the search service into a proper * { @ link AsyncSearchQueryResult } . HTTP 412 indicates the request couldn ' t be satisfied with given * consistency before the timeout expired . This is translated to a { @ link FtsConsistencyTimeoutException } . * @ return an { @ link AsyncSearchQueryResult } that will emit a { @ link FtsConsistencyTimeoutException } when calling * its { @ link AsyncSearchQueryResult # hits ( ) hits ( ) } method . * @ deprecated FTS is still in BETA so the response format is likely to change in a future version , and be * unified with the HTTP 200 response format . */ @ Deprecated public static AsyncSearchQueryResult fromHttp412 ( ) { } }
// dummy default values SearchStatus status = new DefaultSearchStatus ( 1L , 1L , 0L ) ; SearchMetrics metrics = new DefaultSearchMetrics ( 0L , 0L , 0d ) ; return new DefaultAsyncSearchQueryResult ( status , Observable . < SearchQueryRow > error ( new FtsConsistencyTimeoutException ( ) ) , Observable . < FacetResult > empty ( ) , Observable . just ( metrics ) ) ;
public class ProgramControlScreen { /** * Set up all the screen fields . */ public void setupSFields ( ) { } }
BaseField field = this . getMainRecord ( ) . getField ( ProgramControl . PROJECT_NAME ) ; field . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . SET_ANCHOR ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; SCannedBox button = new SCannedBox ( this . getNextLocation ( ScreenConstants . RIGHT_OF_LAST , ScreenConstants . DONT_SET_ANCHOR ) , this , null , ScreenConstants . DEFAULT_DISPLAY , null , ProgramControl . SET_DEFAULT_COMMAND , Constants . SUBMIT , ProgramControl . SET_DEFAULT_COMMAND , null ) ; field . setHidden ( true ) ; super . setupSFields ( ) ; field . setHidden ( false ) ;
public class CacheUnitImpl { /** * This is called by ServerCache to start BatchUpdateDaemon , * InvalidationAuditDaemon , TimeLimitDaemon and ExternalCacheServices * These services should only start once for all cache instances * @ param startTLD this param is false for a third party cache provider */ public void startServices ( boolean startTLD ) { } }
synchronized ( this . serviceMonitor ) { // multiple threads can call this concurrently if ( this . batchUpdateDaemon == null ) { // Initialize BatchUpdateDaemon object batchUpdateDaemon = new BatchUpdateDaemon ( cacheConfig . batchUpdateInterval ) ; // Initialize InvalidationAuditDaemon object invalidationAuditDaemon = new InvalidationAuditDaemon ( cacheConfig . timeHoldingInvalidations ) ; // link invalidationAuditDaemon to BatchUpdateDaemon batchUpdateDaemon . setInvalidationAuditDaemon ( invalidationAuditDaemon ) ; if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "startServices() - starting BatchUpdateDaemon/invalidationAuditDaemon services. " + "These services should only start once for all cache instances. Settings are: " + " batchUpdateInterval=" + cacheConfig . batchUpdateInterval + " timeHoldingInvalidations=" + cacheConfig . timeHoldingInvalidations ) ; } // start services batchUpdateDaemon . start ( ) ; invalidationAuditDaemon . start ( ) ; } if ( startTLD && this . timeLimitDaemon == null ) { // Initialize TimeLimitDaemon object // lruToDiskTriggerTime is set to the default ( 5 sec ) under the following conditions // (1 ) less than 1 msec // (2 ) larger than timeGranularityInSeconds int lruToDiskTriggerTime = CacheConfig . DEFAULT_LRU_TO_DISK_TRIGGER_TIME ; if ( cacheConfig . lruToDiskTriggerTime > cacheConfig . timeGranularityInSeconds * 1000 || cacheConfig . lruToDiskTriggerTime < CacheConfig . MIN_LRU_TO_DISK_TRIGGER_TIME ) { Tr . warning ( tc , "DYNA0069W" , new Object [ ] { new Integer ( cacheConfig . lruToDiskTriggerTime ) , "lruToDiskTriggerTime" , cacheConfig . cacheName , new Integer ( CacheConfig . MIN_LRU_TO_DISK_TRIGGER_TIME ) , new Integer ( cacheConfig . timeGranularityInSeconds * 1000 ) , new Integer ( CacheConfig . DEFAULT_LRU_TO_DISK_TRIGGER_TIME ) } ) ; cacheConfig . lruToDiskTriggerTime = lruToDiskTriggerTime ; } else { lruToDiskTriggerTime = cacheConfig . lruToDiskTriggerTime ; } if ( lruToDiskTriggerTime == CacheConfig . DEFAULT_LRU_TO_DISK_TRIGGER_TIME && ( cacheConfig . lruToDiskTriggerPercent > CacheConfig . DEFAULT_LRU_TO_DISK_TRIGGER_PERCENT || cacheConfig . memoryCacheSizeInMB != CacheConfig . DEFAULT_DISABLE_CACHE_SIZE_MB ) ) { lruToDiskTriggerTime = CacheConfig . DEFAULT_LRU_TO_DISK_TRIGGER_TIME_FOR_TRIMCACHE ; cacheConfig . lruToDiskTriggerTime = lruToDiskTriggerTime ; Tr . audit ( tc , "DYNA1069I" , new Object [ ] { new Integer ( cacheConfig . lruToDiskTriggerTime ) } ) ; } // Initialize TimeLimitDaemon object timeLimitDaemon = new TimeLimitDaemon ( cacheConfig . timeGranularityInSeconds , lruToDiskTriggerTime ) ; if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "startServices() - starting TimeLimitDaemon service. " + "This service should only start once for all cache instances. Settings are: " + " timeGranularityInSeconds=" + cacheConfig . timeGranularityInSeconds + " lruToDiskTriggerTime=" + cacheConfig . lruToDiskTriggerTime ) ; } // start service timeLimitDaemon . start ( ) ; } }
public class ConstraintAdapter { /** * Searches pathways that contains this conversion for the possible directions . If both * directions exist , then the result is reversible . * @ param conv the conversion * @ return direction inferred from pathway membership */ protected ConversionDirectionType findDirectionInPathways ( Conversion conv ) { } }
Set < StepDirection > dirs = new HashSet < StepDirection > ( ) ; for ( PathwayStep step : conv . getStepProcessOf ( ) ) { if ( step instanceof BiochemicalPathwayStep ) { StepDirection dir = ( ( BiochemicalPathwayStep ) step ) . getStepDirection ( ) ; if ( dir != null ) dirs . add ( dir ) ; } } if ( dirs . size ( ) > 1 ) return ConversionDirectionType . REVERSIBLE ; else if ( ! dirs . isEmpty ( ) ) { return dirs . iterator ( ) . next ( ) == StepDirection . LEFT_TO_RIGHT ? ConversionDirectionType . LEFT_TO_RIGHT : ConversionDirectionType . RIGHT_TO_LEFT ; } else return null ;
public class FileSystem { /** * Replies the parts of a path . * < p > If the input is { @ code " http : / / www . arakhne . org / path / to / file . x . z . z " } , the replied paths * are : { @ code " " } , { @ code " path " } , { @ code " to " } , and { @ code " file . x . z . z " } . * < p > If the input is { @ code " jar : file : / path1 / archive . jar ! / path2 / file " } , the replied paths * are : { @ code " " } , { @ code " path2 " } , and { @ code " file " } . * @ param filename is the name to parse . * @ return the parts of a path . */ @ Pure public static String [ ] split ( URL filename ) { } }
if ( filename == null ) { return new String [ 0 ] ; } if ( isJarURL ( filename ) ) { return split ( getJarFile ( filename ) ) ; } final String path = filename . getPath ( ) ; String [ ] tab = path . split ( Pattern . quote ( URL_PATH_SEPARATOR ) ) ; if ( tab . length >= 2 && "" . equals ( tab [ 0 ] ) && Pattern . matches ( "^[a-zA-Z][:|]$" , tab [ 1 ] ) ) { // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ tab = Arrays . copyOfRange ( tab , 1 , tab . length ) ; for ( int i = 1 ; i < tab . length ; ++ i ) { tab [ i ] = decodeHTMLEntities ( tab [ i ] ) ; } } else { for ( int i = 0 ; i < tab . length ; ++ i ) { tab [ i ] = decodeHTMLEntities ( tab [ i ] ) ; } } return tab ;
public class SourceStream { /** * this value straight away and also that we don ' t need to persist it */ public synchronized void initialiseSendWindow ( long sendWindow , long definedSendWindow ) throws SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "initialiseSendWindow" , new Object [ ] { Long . valueOf ( sendWindow ) , Long . valueOf ( definedSendWindow ) } ) ; // Prior to V7 the sendWindow was not persisted unless it was modified due to a change in // reachability , otherwise it was stored as INFINITY ( Long . MAX _ VALUE ) . Therefore , if the // defined sendWindow was modified we could be in the situation where before a restart we // had 1000 messages indoubt , then shut the ME down , changes the defined sendWindow to 50 // and restarted . At that point we ' d set the sendWindow to 50 and suddenly we ' d have 950 // available messages again ( not indoubt ) . We ' d then be allowed to reallocate them elsewhere ! // Luckily , the ability to modify the sendWindow wasn ' t correctly exposed prior to V7 ( it ' s // now a custom property ) , so no - one could have modified it . So , we can interpret a value // of INFINITY as the original sendWindow value , which is 1000 . And use that when we see it . if ( sendWindow == RangeList . INFINITY ) { this . definedSendWindow = definedSendWindow ; this . sendWindow = 1000 ; // Original default sendWindow // Now persist the 1000 to make it clearer persistSendWindow ( this . sendWindow , null ) ; } else { this . sendWindow = sendWindow ; this . definedSendWindow = definedSendWindow ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "initialiseSendWindow" ) ;
public class EquivalencesDAGImpl { /** * construction : main algorithms ( static generic methods ) */ public static < TT > EquivalencesDAGImpl < TT > getEquivalencesDAG ( DefaultDirectedGraph < TT , DefaultEdge > graph ) { } }
// each set contains vertices which together form a strongly connected // component within the given graph GabowSCC < TT , DefaultEdge > inspector = new GabowSCC < > ( graph ) ; List < Equivalences < TT > > equivalenceSets = inspector . stronglyConnectedSets ( ) ; // create the vertex index ImmutableMap . Builder < TT , Equivalences < TT > > vertexIndexBuilder = new ImmutableMap . Builder < > ( ) ; for ( Equivalences < TT > equivalenceSet : equivalenceSets ) { for ( TT node : equivalenceSet ) vertexIndexBuilder . put ( node , equivalenceSet ) ; } ImmutableMap < TT , Equivalences < TT > > vertexIndex = vertexIndexBuilder . build ( ) ; // compute the edges between the SCCs Map < Equivalences < TT > , Set < Equivalences < TT > > > outgoingEdges = new HashMap < > ( ) ; for ( DefaultEdge edge : graph . edgeSet ( ) ) { Equivalences < TT > v1 = vertexIndex . get ( graph . getEdgeSource ( edge ) ) ; Equivalences < TT > v2 = vertexIndex . get ( graph . getEdgeTarget ( edge ) ) ; if ( v1 == v2 ) continue ; // do not add loops Set < Equivalences < TT > > out = outgoingEdges . get ( v1 ) ; if ( out == null ) { out = new HashSet < > ( ) ; outgoingEdges . put ( v1 , out ) ; } out . add ( v2 ) ; } // compute the transitively reduced DAG SimpleDirectedGraph < Equivalences < TT > , DefaultEdge > dag = new SimpleDirectedGraph < > ( DefaultEdge . class ) ; for ( Equivalences < TT > equivalenceSet : equivalenceSets ) dag . addVertex ( equivalenceSet ) ; for ( Map . Entry < Equivalences < TT > , Set < Equivalences < TT > > > edges : outgoingEdges . entrySet ( ) ) { Equivalences < TT > v1 = edges . getKey ( ) ; for ( Equivalences < TT > v2 : edges . getValue ( ) ) { // an edge from v1 to v2 is redundant if // v1 has an edge going to a vertex v2p // from which v2 is reachable ( in one step ) boolean redundant = false ; if ( edges . getValue ( ) . size ( ) > 1 ) { for ( Equivalences < TT > v2p : edges . getValue ( ) ) { Set < Equivalences < TT > > t2p = outgoingEdges . get ( v2p ) ; if ( t2p != null && t2p . contains ( v2 ) ) { redundant = true ; break ; } } } if ( ! redundant ) dag . addEdge ( v1 , v2 ) ; } } return new EquivalencesDAGImpl < > ( graph , dag , vertexIndex , vertexIndex ) ;
public class SpringContextUtils { /** * Get the { @ link IThymeleafRequestContext } from the Thymeleaf context . * The returned object is a wrapper on the Spring request context that hides the fact of this request * context corresponding to a Spring WebMVC or Spring WebFlux application . * This will be done by looking for a context variable called * { @ link SpringContextVariableNames # THYMELEAF _ REQUEST _ CONTEXT } . * @ param context the context * @ return the thymeleaf request context */ public static IThymeleafRequestContext getRequestContext ( final IExpressionContext context ) { } }
if ( context == null ) { return null ; } return ( IThymeleafRequestContext ) context . getVariable ( SpringContextVariableNames . THYMELEAF_REQUEST_CONTEXT ) ;
public class EglHelper { /** * Initialize EGL for a given configuration spec . * @ param configSpec */ public void start ( ) { } }
// Log . d ( " EglHelper " + instanceId , " start ( ) " ) ; if ( mEgl == null ) { // Log . d ( " EglHelper " + instanceId , " getting new EGL " ) ; /* * Get an EGL instance */ mEgl = ( EGL10 ) EGLContext . getEGL ( ) ; } else { // Log . d ( " EglHelper " + instanceId , " reusing EGL " ) ; } if ( mEglDisplay == null ) { // Log . d ( " EglHelper " + instanceId , " getting new display " ) ; /* * Get to the default display . */ mEglDisplay = mEgl . eglGetDisplay ( EGL10 . EGL_DEFAULT_DISPLAY ) ; } else { // Log . d ( " EglHelper " + instanceId , " reusing display " ) ; } if ( mEglConfig == null ) { // Log . d ( " EglHelper " + instanceId , " getting new config " ) ; /* * We can now initialize EGL for that display */ int [ ] version = new int [ 2 ] ; mEgl . eglInitialize ( mEglDisplay , version ) ; mEglConfig = mEGLConfigChooser . chooseConfig ( mEgl , mEglDisplay ) ; } else { // Log . d ( " EglHelper " + instanceId , " reusing config " ) ; } if ( mEglContext == null ) { // Log . d ( " EglHelper " + instanceId , " creating new context " ) ; /* * Create an OpenGL ES context . This must be done only once , an OpenGL context is a somewhat heavy object . */ mEglContext = mEGLContextFactory . createContext ( mEgl , mEglDisplay , mEglConfig ) ; if ( mEglContext == null || mEglContext == EGL10 . EGL_NO_CONTEXT ) { throw new RuntimeException ( "createContext failed" ) ; } } else { // Log . d ( " EglHelper " + instanceId , " reusing context " ) ; } mEglSurface = null ;
public class RabbitConnectionFactoryMetricsPostProcessor { /** * Get the name of a ConnectionFactory based on its { @ code beanName } . * @ param beanName the name of the connection factory bean * @ return a name for the given connection factory */ private String getConnectionFactoryName ( String beanName ) { } }
if ( beanName . length ( ) > CONNECTION_FACTORY_SUFFIX . length ( ) && StringUtils . endsWithIgnoreCase ( beanName , CONNECTION_FACTORY_SUFFIX ) ) { return beanName . substring ( 0 , beanName . length ( ) - CONNECTION_FACTORY_SUFFIX . length ( ) ) ; } return beanName ;
public class TangoEventsAdapter { public void addTangoChangeListener ( ITangoChangeListener listener , String attrName , boolean stateless ) throws DevFailed { } }
addTangoChangeListener ( listener , attrName , new String [ 0 ] , stateless ) ;
public class MapMessage { /** * Formats the Structured data as described in < a href = " https : / / tools . ietf . org / html / rfc5424 " > RFC 5424 < / a > . * @ param format The format identifier . * @ return The formatted String . */ public String asString ( final String format ) { } }
try { return format ( EnglishEnums . valueOf ( MapFormat . class , format ) , new StringBuilder ( ) ) . toString ( ) ; } catch ( final IllegalArgumentException ex ) { return asString ( ) ; }
public class LdapAdapter { /** * Find the LdapEntry mapped to the Certificate . * @ param certs * @ param srchCtrl * @ return * @ throws WIMException */ @ FFDCIgnore ( { } }
EntityNotFoundException . class , com . ibm . websphere . security . CertificateMapNotSupportedException . class , com . ibm . websphere . security . CertificateMapFailedException . class } ) private LdapEntry mapCertificate ( X509Certificate [ ] certs , LdapSearchControl srchCtrl ) throws WIMException { LdapEntry result = null ; String dn = null ; String filter = null ; String certMapMode = iLdapConfigMgr . getCertificateMapMode ( ) ; if ( ConfigConstants . CONFIG_VALUE_CERT_NOT_SUPPORTED_MODE . equalsIgnoreCase ( certMapMode ) ) { if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Certificate authentication has been disabled for this LDAP registry." ) ; } String msg = Tr . formatMessage ( tc , WIMMessageKey . LDAP_REGISTRY_CERT_IGNORED ) ; throw new CertificateMapNotSupportedException ( WIMMessageKey . LDAP_REGISTRY_CERT_IGNORED , msg ) ; } else if ( ConfigConstants . CONFIG_VALUE_CUSTOM_MODE . equalsIgnoreCase ( certMapMode ) ) { String mapping ; try { X509CertificateMapper mapper = iCertificateMapperRef . get ( ) ; if ( mapper == null ) { String msg = Tr . formatMessage ( tc , WIMMessageKey . LDAP_REGISTRY_MAPPER_NOT_BOUND ) ; throw new CertificateMapFailedException ( WIMMessageKey . LDAP_REGISTRY_MAPPER_NOT_BOUND , msg ) ; } if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Using custom X.509 certificate mapper: " + mapper . getClass ( ) ) ; } mapping = mapper . mapCertificate ( certs ) ; if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "The custom X.509 certificate mapper returned the following mapping: " + mapping ) ; } } catch ( com . ibm . websphere . security . CertificateMapNotSupportedException e ) { String msg = Tr . formatMessage ( tc , WIMMessageKey . LDAP_REGISTRY_CUSTOM_MAPPER_NOT_SUPPORTED ) ; throw new CertificateMapNotSupportedException ( WIMMessageKey . LDAP_REGISTRY_CUSTOM_MAPPER_NOT_SUPPORTED , msg , e ) ; } catch ( com . ibm . websphere . security . CertificateMapFailedException e ) { String msg = Tr . formatMessage ( tc , WIMMessageKey . LDAP_REGISTRY_CUSTOM_MAPPER_FAILED ) ; throw new CertificateMapFailedException ( WIMMessageKey . LDAP_REGISTRY_CUSTOM_MAPPER_FAILED , msg , e ) ; } /* * The mapper should return some value . */ if ( mapping == null || mapping . trim ( ) . isEmpty ( ) ) { String msg = Tr . formatMessage ( tc , WIMMessageKey . LDAP_REGISTRY_INVALID_MAPPING ) ; throw new CertificateMapFailedException ( WIMMessageKey . LDAP_REGISTRY_INVALID_MAPPING , msg ) ; } /* * If in the form of a distinguished name */ dn = LdapHelper . getValidDN ( mapping ) ; if ( dn == null ) { filter = mapping ; } } else if ( ConfigConstants . CONFIG_VALUE_FILTER_DESCRIPTOR_MODE . equalsIgnoreCase ( certMapMode ) ) { filter = iLdapConfigMgr . getCertificateLDAPFilter ( certs [ 0 ] ) . trim ( ) ; } else { dn = LdapHelper . getValidDN ( certs [ 0 ] . getSubjectX500Principal ( ) . getName ( ) ) ; } /* * Try and validate the user with the LDAP server . */ if ( dn != null ) { /* * We have a distinguished name . Search for the user directly . */ try { result = iLdapConn . getEntityByIdentifier ( dn , null , null , null , srchCtrl . getPropertyNames ( ) , false , false ) ; } catch ( EntityNotFoundException e ) { /* User not found in this repository . */ } } else { /* * We have a search filter . Search over all the base entries . */ String [ ] searchBases = srchCtrl . getBases ( ) ; int countLimit = srchCtrl . getCountLimit ( ) ; int timeLimit = srchCtrl . getTimeLimit ( ) ; List < String > entityTypes = srchCtrl . getEntityTypes ( ) ; List < String > propNames = srchCtrl . getPropertyNames ( ) ; int scope = srchCtrl . getScope ( ) ; int count = 0 ; for ( int i = 0 ; i < searchBases . length ; i ++ ) { try { Set < LdapEntry > ldapEntries = iLdapConn . searchEntities ( searchBases [ i ] , filter , null , scope , entityTypes , propNames , false , false , countLimit , timeLimit ) ; if ( ldapEntries . size ( ) > 1 ) { // Uncomment this when MULTIPLE _ PRINCIPALS _ FOUND is added to LdapUtilMesssages . nlsprops /* * if ( tc . isErrorEnabled ( ) ) { * Tr . error ( tc , WIMMessageKey . MULTIPLE _ PRINCIPALS _ FOUND , WIMMessageHelper . generateMsgParms ( sFilter ) ) ; */ String msg = Tr . formatMessage ( tc , WIMMessageKey . MULTIPLE_PRINCIPALS_FOUND , WIMMessageHelper . generateMsgParms ( filter ) ) ; throw new CertificateMapFailedException ( WIMMessageKey . MULTIPLE_PRINCIPALS_FOUND , msg ) ; } else if ( ldapEntries . size ( ) == 1 ) { if ( count == 0 ) { result = ldapEntries . iterator ( ) . next ( ) ; } count ++ ; if ( count > 1 ) { // Uncomment this when MULTIPLE _ PRINCIPALS _ FOUND is added to LdapUtilMesssages . nlsprops /* * if ( tc . isErrorEnabled ( ) ) { * Tr . error ( tc , WIMMessageKey . MULTIPLE _ PRINCIPALS _ FOUND , WIMMessageHelper . generateMsgParms ( sFilter ) ) ; */ String msg = Tr . formatMessage ( tc , WIMMessageKey . MULTIPLE_PRINCIPALS_FOUND , WIMMessageHelper . generateMsgParms ( filter ) ) ; throw new CertificateMapFailedException ( WIMMessageKey . MULTIPLE_PRINCIPALS_FOUND , msg ) ; } } } catch ( EntityNotFoundException e ) { /* User not found in this search base . */ continue ; } } } return result ;
public class TopologyContext { /** * Get component ' s metric from registered metrics by name . Notice : Normally , * one component can only register one metric name once . But now registerMetric * has a bug ( https : / / issues . apache . org / jira / browse / STORM - 254 ) cause the same metric name can register twice . * So we just return the first metric we meet . */ public IMetric getRegisteredMetricByName ( String name ) { } }
IMetric metric = null ; for ( Map < Integer , Map < String , IMetric > > taskIdToNameToMetric : _registeredMetrics . values ( ) ) { Map < String , IMetric > nameToMetric = taskIdToNameToMetric . get ( _taskId ) ; if ( nameToMetric != null ) { metric = nameToMetric . get ( name ) ; if ( metric != null ) { // we just return the first metric we meet break ; } } } return metric ;
public class RevisionApi { /** * Returns a map of usernames mapped to the timestamps of their contributions . * Users of certain user groups ( e . g . bots ) can be filtered by providing the unwanted groups in * the { @ code groupFilter } . Nothing is filtered if the { @ code groupFilter } is { @ code null } or empty . < br > * < br > * Filtered results also include unregistered users ( because they cannot be filtered using user * groups ) In order to get results containing only registered users , use { @ link * # getUserContributionMap ( int , String [ ] , boolean ) } and set { @ code onlyRegistered = true } . < br > * < br > * In order to make this query fast , create a MySQL - Index ( BTREE ) on the ArticleID in the * revisions - table . * @ param articleID * ID of the article * @ param groupfilter * a list of unwanted user groups * @ return map of Timestamp - DiffPart - Collection pairs * @ throws WikiApiException * if an error occurs */ public Map < String , Timestamp > getUserContributionMap ( final int articleID , String [ ] groupfilter ) throws WikiApiException { } }
return getUserContributionMap ( articleID , groupfilter , false ) ;
public class CmsIdentifiableObjectContainer { /** * Returns the list of objects with the given id . < p > * @ param id the object id * @ return the list of objects if found , or < code > null < / code > */ public List < T > getObjectList ( String id ) { } }
if ( m_uniqueIds ) { throw new UnsupportedOperationException ( "Not supported for unique ids" ) ; } return m_objectsListsById . get ( id ) ;
public class EventSelectorMarshaller { /** * Marshall the given parameter object . */ public void marshall ( EventSelector eventSelector , ProtocolMarshaller protocolMarshaller ) { } }
if ( eventSelector == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( eventSelector . getReadWriteType ( ) , READWRITETYPE_BINDING ) ; protocolMarshaller . marshall ( eventSelector . getIncludeManagementEvents ( ) , INCLUDEMANAGEMENTEVENTS_BINDING ) ; protocolMarshaller . marshall ( eventSelector . getDataResources ( ) , DATARESOURCES_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class TreeTraverser { /** * Returns an unmodifiable iterable over the nodes in a tree structure , using breadth - first * traversal . That is , all the nodes of depth 0 are returned , then depth 1 , then 2 , and so on . * < p > No guarantees are made about the behavior of the traversal when nodes change while * iteration is in progress or when the iterators generated by { @ link # children } are advanced . */ public final FluentIterable < T > breadthFirstTraversal ( final T root ) { } }
checkNotNull ( root ) ; return new FluentIterable < T > ( ) { @ Override public UnmodifiableIterator < T > iterator ( ) { return new BreadthFirstIterator ( root ) ; } } ;
public class ConditionEvaluator { /** * Evaluates the error condition . Returns empty if threshold or measure value is not defined . */ private static Optional < EvaluatedCondition > evaluateCondition ( Condition condition , ValueType type , Comparable value ) { } }
Comparable threshold = getThreshold ( condition , type ) ; if ( reachThreshold ( value , threshold , condition ) ) { return of ( new EvaluatedCondition ( condition , EvaluationStatus . ERROR , value . toString ( ) ) ) ; } return Optional . empty ( ) ;
public class MessageDetail { /** * Get the message properties for this vendor . * @ param strMessageName The message name . * @ return A map with the message properties . */ public TrxMessageHeader addMessageProperties ( TrxMessageHeader trxMessageHeader , MessageDetailTarget recMessageDetailTarget , MessageProcessInfo recMessageProcessInfo , MessageTransport recMessageTransport ) { } }
try { if ( trxMessageHeader == null ) trxMessageHeader = new TrxMessageHeader ( null , null ) ; ContactType recContactType = ( ContactType ) ( ( ReferenceField ) this . getField ( MessageDetail . CONTACT_TYPE_ID ) ) . getReferenceRecord ( null ) ; recContactType = ( ContactType ) recContactType . getContactType ( ( Record ) recMessageDetailTarget ) ; if ( recContactType == null ) return trxMessageHeader ; // Just being careful this . setKeyArea ( MessageDetail . CONTACT_TYPE_ID_KEY ) ; this . getField ( MessageDetail . CONTACT_TYPE_ID ) . moveFieldToThis ( ( BaseField ) recContactType . getCounterField ( ) ) ; this . getField ( MessageDetail . PERSON_ID ) . moveFieldToThis ( ( BaseField ) ( ( Record ) recMessageDetailTarget ) . getCounterField ( ) ) ; this . getField ( MessageDetail . MESSAGE_PROCESS_INFO_ID ) . moveFieldToThis ( ( BaseField ) recMessageProcessInfo . getCounterField ( ) ) ; this . getField ( MessageDetail . MESSAGE_TRANSPORT_ID ) . moveFieldToThis ( ( BaseField ) recMessageTransport . getCounterField ( ) ) ; if ( this . seek ( null ) ) { Map < String , Object > propHeader = ( ( PropertiesField ) this . getField ( MessageDetail . PROPERTIES ) ) . loadProperties ( ) ; if ( propHeader == null ) propHeader = new HashMap < String , Object > ( ) ; // Never return null . Map < String , Object > map = trxMessageHeader . getMessageHeaderMap ( ) ; if ( map != null ) map . putAll ( propHeader ) ; else map = propHeader ; trxMessageHeader . setMessageHeaderMap ( map ) ; if ( ( recMessageTransport != null ) && ( ( recMessageTransport . getEditMode ( ) == DBConstants . EDIT_CURRENT ) || ( recMessageTransport . getEditMode ( ) == DBConstants . EDIT_IN_PROGRESS ) ) ) { trxMessageHeader = recMessageTransport . addMessageProperties ( trxMessageHeader ) ; } } } catch ( DBException ex ) { ex . printStackTrace ( ) ; } // No need to free the two files as they are linked to the fields in this record return trxMessageHeader ;
public class IfConditionImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case SimpleExpressionsPackage . IF_CONDITION__ELSEIF : setElseif ( ELSEIF_EDEFAULT ) ; return ; case SimpleExpressionsPackage . IF_CONDITION__CONDITION : setCondition ( ( Expression ) null ) ; return ; } super . eUnset ( featureID ) ;
public class SearchHandler { /** * asciidoctor Documentation - tag : : compareMatches [ ] */ private Category getSelectedCategoryByMatch ( ) { } }
// Strategy : Go from most specific match to most unspecific match Category firstFilteredSetting = filteredSettingsLst . size ( ) == 0 ? null : settingCategoryMap . get ( filteredSettingsLst . get ( 0 ) ) ; Category firstFilteredGroup = filteredGroupsLst . size ( ) == 0 ? null : groupCategoryMap . get ( filteredGroupsLst . get ( 0 ) ) ; Category firstFilteredCategory = filteredCategoriesLst . size ( ) == 0 ? null : filteredCategoriesLst . get ( 0 ) ; return compareMatches ( firstFilteredSetting , firstFilteredGroup , firstFilteredCategory , settingMatches , groupMatches , categoryMatches ) ;
public class CheckBoxGroup { /** * Release any acquired resources . */ protected void localRelease ( ) { } }
// cleanup the context variables used for binding during repeater if ( _repeater ) DataAccessProviderStack . removeDataAccessProvider ( pageContext ) ; super . localRelease ( ) ; _defaultSelections = null ; _match = null ; _dynamicAttrs = null ; _saveBody = null ; _defaultSingleton = false ; _defaultSingleValue = false ; _writer = null ; _state . clear ( ) ; _hiddenState . clear ( ) ;
public class MappeableRunContainer { /** * convert to bitmap or array * if needed * */ private MappeableContainer toEfficientContainer ( ) { } }
int sizeAsRunContainer = MappeableRunContainer . serializedSizeInBytes ( this . nbrruns ) ; int sizeAsBitmapContainer = MappeableBitmapContainer . serializedSizeInBytes ( 0 ) ; int card = this . getCardinality ( ) ; int sizeAsArrayContainer = MappeableArrayContainer . serializedSizeInBytes ( card ) ; if ( sizeAsRunContainer <= Math . min ( sizeAsBitmapContainer , sizeAsArrayContainer ) ) { return this ; } if ( card <= MappeableArrayContainer . DEFAULT_MAX_SIZE ) { MappeableArrayContainer answer = new MappeableArrayContainer ( card ) ; answer . cardinality = 0 ; for ( int rlepos = 0 ; rlepos < this . nbrruns ; ++ rlepos ) { int runStart = toIntUnsigned ( this . getValue ( rlepos ) ) ; int runEnd = runStart + toIntUnsigned ( this . getLength ( rlepos ) ) ; // next bit could potentially be faster , test if ( BufferUtil . isBackedBySimpleArray ( answer . content ) ) { short [ ] ba = answer . content . array ( ) ; for ( int runValue = runStart ; runValue <= runEnd ; ++ runValue ) { ba [ answer . cardinality ++ ] = ( short ) runValue ; } } else { for ( int runValue = runStart ; runValue <= runEnd ; ++ runValue ) { answer . content . put ( answer . cardinality ++ , ( short ) runValue ) ; } } } return answer ; } MappeableBitmapContainer answer = new MappeableBitmapContainer ( ) ; for ( int rlepos = 0 ; rlepos < this . nbrruns ; ++ rlepos ) { int start = toIntUnsigned ( this . getValue ( rlepos ) ) ; int end = start + toIntUnsigned ( this . getLength ( rlepos ) ) + 1 ; BufferUtil . setBitmapRange ( answer . bitmap , start , end ) ; } answer . cardinality = card ; return answer ;
public class Matrix4d { /** * Apply an orthographic projection transformation for a right - handed coordinate system to this matrix . * This method is equivalent to calling { @ link # ortho ( double , double , double , double , double , double ) ortho ( ) } with * < code > zNear = - 1 < / code > and < code > zFar = + 1 < / code > . * If < code > M < / code > is < code > this < / code > matrix and < code > O < / code > the orthographic projection matrix , * then the new matrix will be < code > M * O < / code > . So when transforming a * vector < code > v < / code > with the new matrix by using < code > M * O * v < / code > , the * orthographic projection transformation will be applied first ! * In order to set the matrix to an orthographic projection without post - multiplying it , * use { @ link # setOrtho2D ( double , double , double , double ) setOrtho2D ( ) } . * Reference : < a href = " http : / / www . songho . ca / opengl / gl _ projectionmatrix . html # ortho " > http : / / www . songho . ca < / a > * @ see # ortho ( double , double , double , double , double , double ) * @ see # setOrtho2D ( double , double , double , double ) * @ param left * the distance from the center to the left frustum edge * @ param right * the distance from the center to the right frustum edge * @ param bottom * the distance from the center to the bottom frustum edge * @ param top * the distance from the center to the top frustum edge * @ return this */ public Matrix4d ortho2D ( double left , double right , double bottom , double top ) { } }
return ortho2D ( left , right , bottom , top , this ) ;
public class Util { /** * Formats a string and puts the result into a StringBuffer . * Allows for standard Java backslash escapes and a customized behavior * for % escapes in the form of a PrintfSpec . * @ param buf the buffer to append the result to * @ param formatString the string to format * @ param printfSpec the specialization for printf */ public static void printf ( StringBuffer buf , String formatString , PrintfSpec printfSpec ) { } }
for ( int i = 0 ; i < formatString . length ( ) ; ++ i ) { char c = formatString . charAt ( i ) ; if ( ( c == '%' ) && ( i + 1 < formatString . length ( ) ) ) { ++ i ; char code = formatString . charAt ( i ) ; if ( code == '%' ) { buf . append ( '%' ) ; } else { boolean handled = printfSpec . printSpec ( buf , code ) ; if ( ! handled ) { buf . append ( '%' ) ; buf . append ( code ) ; } } } else if ( ( c == '\\' ) && ( i + 1 < formatString . length ( ) ) ) { ++ i ; buf . append ( Util . unescapeChar ( formatString . charAt ( i ) ) ) ; } else { buf . append ( c ) ; } }
public class VetoableASTTransformation { /** * Wrap an existing setter . */ private void wrapSetterMethod ( ClassNode classNode , boolean bindable , String propertyName ) { } }
String getterName = "get" + MetaClassHelper . capitalize ( propertyName ) ; MethodNode setter = classNode . getSetterMethod ( "set" + MetaClassHelper . capitalize ( propertyName ) ) ; if ( setter != null ) { // Get the existing code block Statement code = setter . getCode ( ) ; Expression oldValue = varX ( "$oldValue" ) ; Expression newValue = varX ( "$newValue" ) ; Expression proposedValue = varX ( setter . getParameters ( ) [ 0 ] . getName ( ) ) ; BlockStatement block = new BlockStatement ( ) ; // create a local variable to hold the old value from the getter block . addStatement ( declS ( oldValue , callThisX ( getterName ) ) ) ; // add the fireVetoableChange method call block . addStatement ( stmt ( callThisX ( "fireVetoableChange" , args ( constX ( propertyName ) , oldValue , proposedValue ) ) ) ) ; // call the existing block , which will presumably set the value properly block . addStatement ( code ) ; if ( bindable ) { // get the new value to emit in the event block . addStatement ( declS ( newValue , callThisX ( getterName ) ) ) ; // add the firePropertyChange method call block . addStatement ( stmt ( callThisX ( "firePropertyChange" , args ( constX ( propertyName ) , oldValue , newValue ) ) ) ) ; } // replace the existing code block with our new one setter . setCode ( block ) ; }
public class LazyReact { /** * Generate an infinite Stream * < pre > * { @ code * new LazyReact ( ) . generate ( ( ) - > " hello " ) * . limit ( 5) * . reduce ( SemigroupK . stringConcat ) ; * Optional [ hellohellohellohellohello ] * } < / pre > * @ param generate Supplier that generates stream input * @ return Infinite FutureStream */ public < U > FutureStream < U > generate ( final Supplier < U > generate ) { } }
return construct ( StreamSupport . < U > stream ( new InfiniteClosingSpliteratorFromSupplier < U > ( Long . MAX_VALUE , generate , new Subscription ( ) ) , false ) ) ;
public class JemmyDSL { /** * Finds a component and stores it under the given id . The component can later be used on other * commands using the locator " id = ID _ ASSIGNED " . This method searches both VISIBLE and INVISIBLE * components . * @ param locator The locator ( accepted are name ( default ) , title , text , label ) * @ param id The id * @ param componentType The component type * @ return The component found */ public static Component find ( String locator , String id , String componentType , boolean required ) { } }
java . awt . Component component = findComponent ( locator , currentWindow ( ) . getComponent ( ) . getSource ( ) ) ; if ( component == null ) { if ( ! required ) { componentMap . putComponent ( id , null ) ; return null ; } else { throw new JemmyDSLException ( "Component not found" ) ; } } JComponentOperator operator = convertFind ( component ) ; componentMap . putComponent ( id , operator ) ; final Component finalComponent = convertFind ( operator ) ; if ( finalComponent instanceof Window ) { currentWindow = ( Window ) finalComponent ; } return finalComponent ;
public class ActiveMQQueueJmxStats { /** * Copy out the values to the given destination . * @ param other target stats object to receive the values from this one . */ public void copyOut ( ActiveMQQueueJmxStats other ) { } }
other . setCursorPercentUsage ( this . getCursorPercentUsage ( ) ) ; other . setDequeueCount ( this . getDequeueCount ( ) ) ; other . setEnqueueCount ( this . getEnqueueCount ( ) ) ; other . setMemoryPercentUsage ( this . getMemoryPercentUsage ( ) ) ; other . setNumConsumers ( this . getNumConsumers ( ) ) ; other . setNumProducers ( this . getNumProducers ( ) ) ; other . setQueueSize ( this . getQueueSize ( ) ) ; other . setInflightCount ( this . getInflightCount ( ) ) ;
public class ExampleColorHistogramLookup { /** * Computes a histogram from the gray scale intensity image alone . Probably the least effective at looking up * similar images . */ public static List < double [ ] > histogramGray ( List < File > images ) { } }
List < double [ ] > points = new ArrayList < > ( ) ; GrayU8 gray = new GrayU8 ( 1 , 1 ) ; for ( File f : images ) { BufferedImage buffered = UtilImageIO . loadImage ( f . getPath ( ) ) ; if ( buffered == null ) throw new RuntimeException ( "Can't load image!" ) ; gray . reshape ( buffered . getWidth ( ) , buffered . getHeight ( ) ) ; ConvertBufferedImage . convertFrom ( buffered , gray , true ) ; TupleDesc_F64 imageHist = new TupleDesc_F64 ( 150 ) ; HistogramFeatureOps . histogram ( gray , 255 , imageHist ) ; UtilFeature . normalizeL2 ( imageHist ) ; // normalize so that image size doesn ' t matter points . add ( imageHist . value ) ; } return points ;
public class JdbcSqlAdapter { /** * Invokes the JDBC Query . * If QueryType is Select , returns a java . sql . ResultSet . * If QueryType is Update , returns an Integer with the number of rows updated . */ public Object invoke ( Object conn , Object requestData ) throws AdapterException { } }
try { DatabaseAccess dbAccess = ( DatabaseAccess ) conn ; if ( requestData == null ) throw new AdapterException ( "Missing SQL Query" ) ; String query = ( String ) requestData ; QueryType queryType = getQueryType ( ) ; Object queryParams = getQueryParameters ( ) ; if ( queryParams instanceof List < ? > ) { if ( queryType == QueryType . Select ) return dbAccess . runSelect ( query , ( ( List < ? > ) queryParams ) . toArray ( ) ) ; else if ( queryType == QueryType . Update ) { Integer ret = new Integer ( dbAccess . runUpdate ( query , ( ( List < ? > ) queryParams ) . toArray ( ) ) ) ; dbAccess . commit ( ) ; return ret ; } else throw new AdapterException ( "Unsupported query type: " + queryType ) ; } else { if ( queryType == QueryType . Select ) return dbAccess . runSelect ( query , queryParams ) ; else if ( queryType == QueryType . Update ) { Integer ret = new Integer ( dbAccess . runUpdate ( query , queryParams ) ) ; dbAccess . commit ( ) ; return ret ; } else throw new AdapterException ( "Unsupported query type: " + queryType ) ; } } catch ( SQLException ex ) { AdapterException adapEx = new AdapterException ( - 1 , ex . getMessage ( ) , ex ) ; if ( isRetryable ( ex ) ) adapEx . setRetryable ( true ) ; throw adapEx ; } catch ( Exception ex ) { throw new AdapterException ( - 1 , ex . getMessage ( ) , ex ) ; }
public class HealthCheckCustomConfigMarshaller { /** * Marshall the given parameter object . */ public void marshall ( HealthCheckCustomConfig healthCheckCustomConfig , ProtocolMarshaller protocolMarshaller ) { } }
if ( healthCheckCustomConfig == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( healthCheckCustomConfig . getFailureThreshold ( ) , FAILURETHRESHOLD_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class NameService { /** * This method will return the state map associated with the Nameable object if the * object has been stored in the < code > NameService < / code > and something has been stored * into the < code > Map < / code > . Otherwise this will return null indicating that the map * is empty . If the < code > create < / code > parameter is true , we will always return the * < code > Map < / code > object . * @ param name The name of the object to return the named object . This must not be null . * @ param create This will create the map if necessary . * @ return A Map Object for the named object . This will return null if nothing has been stored in * the map and < code > create < / code > is false . */ public Map getMap ( String name , boolean create ) { } }
if ( name == null ) throw new IllegalStateException ( "name must not be null" ) ; if ( _nameMap == null ) return null ; TrackingObject to = ( TrackingObject ) _nameMap . get ( name ) ; // The object wasn ' t found if ( to == null ) return null ; // If the object has been reclaimed , then we remove the named object from the map . WeakReference wr = to . getWeakINameable ( ) ; INameable o = ( INameable ) wr . get ( ) ; if ( o == null ) { synchronized ( _nameMap ) { _nameMap . remove ( name ) ; } return null ; } if ( create ) return to ; return to . isMapCreated ( ) ? to : null ;
public class Monitor { /** * Implemented from runnable */ @ Override public void run ( ) { } }
while ( true ) { if ( countObservers ( ) == 0 ) { try { Thread . sleep ( Long . MAX_VALUE ) ; } catch ( InterruptedException e ) { } } else { startWatching ( ) ; while ( countObservers ( ) > 0 ) { getNewLines ( ) ; try { Thread . sleep ( getDelay ( ) ) ; } catch ( InterruptedException e ) { } } } }
public class MCMPAdvertiseTask { /** * the messages to send are something like : * HTTP / 1.0 200 OK * Date : Thu , 13 Sep 2012 09:24:02 GMT * Sequence : 5 * Digest : ae8e7feb7cd85be346134657de3b0661 * Server : b58743ba - fd84-11e1 - bd12 - ad866be2b4cc * X - Manager - Address : 127.0.0.1:6666 * X - Manager - Url : / b58743ba - fd84-11e1 - bd12 - ad866be2b4cc * X - Manager - Protocol : http * X - Manager - Host : 10.33.144.3 */ @ Override public void run ( ) { } }
try { /* * apr _ uuid _ get ( & magd - > suuid ) ; * magd - > srvid [ 0 ] = ' / ' ; * apr _ uuid _ format ( & magd - > srvid [ 1 ] , & magd - > suuid ) ; * In fact we use the srvid on the 2 second byte [ 1] */ final byte [ ] ssalt = this . ssalt ; final String server = container . getServerID ( ) ; final String date = DATE_FORMAT . format ( new Date ( System . currentTimeMillis ( ) ) ) ; final String seq = "" + this . seq ++ ; final byte [ ] digest ; synchronized ( md ) { md . reset ( ) ; md . update ( ssalt ) ; digestString ( md , date ) ; digestString ( md , seq ) ; digestString ( md , server ) ; digest = md . digest ( ) ; } final String digestString = bytesToHexString ( digest ) ; final StringBuilder builder = new StringBuilder ( ) ; builder . append ( "HTTP/1.0 200 OK" ) . append ( CRLF ) . append ( "Date: " ) . append ( date ) . append ( CRLF ) . append ( "Sequence: " ) . append ( seq ) . append ( CRLF ) . append ( "Digest: " ) . append ( digestString ) . append ( CRLF ) . append ( "Server: " ) . append ( server ) . append ( CRLF ) . append ( "X-Manager-Address: " ) . append ( NetworkUtils . formatPossibleIpv6Address ( host ) ) . append ( ":" ) . append ( port ) . append ( CRLF ) . append ( "X-Manager-Url: " ) . append ( path ) . append ( CRLF ) . append ( "X-Manager-Protocol: " ) . append ( protocol ) . append ( CRLF ) . append ( "X-Manager-Host: " ) . append ( host ) . append ( CRLF ) ; final String payload = builder . toString ( ) ; final ByteBuffer byteBuffer = ByteBuffer . wrap ( payload . getBytes ( StandardCharsets . US_ASCII ) ) ; UndertowLogger . ROOT_LOGGER . proxyAdvertiseMessagePayload ( payload ) ; channel . sendTo ( address , byteBuffer ) ; } catch ( Exception e ) { UndertowLogger . ROOT_LOGGER . proxyAdvertiseCannotSendMessage ( e , address ) ; }
public class JobsInner { /** * Gets information about a Job . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param workspaceName The name of the workspace . Workspace names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long . * @ param experimentName The name of the experiment . Experiment names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long . * @ param jobName The name of the job within the specified resource group . Job names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the JobInner object if successful . */ public JobInner get ( String resourceGroupName , String workspaceName , String experimentName , String jobName ) { } }
return getWithServiceResponseAsync ( resourceGroupName , workspaceName , experimentName , jobName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class ProxyBranchImpl { /** * This callback is called when the remote side has been idle too long while * establishing the dialog . * @ throws DispatcherException */ public void onTimeout ( ResponseType responseType ) throws DispatcherException { } }
if ( ! proxy . getAckReceived ( ) ) { this . cancel ( ) ; if ( responseType == ResponseType . FINAL ) { cancel1xxTimer ( ) ; } this . timedOut = true ; if ( originalRequest != null ) { List < ProxyBranchListener > proxyBranchListeners = originalRequest . getSipSession ( ) . getSipApplicationSession ( ) . getSipContext ( ) . getListeners ( ) . getProxyBranchListeners ( ) ; if ( proxyBranchListeners != null ) { for ( ProxyBranchListener proxyBranchListener : proxyBranchListeners ) { proxyBranchListener . onProxyBranchResponseTimeout ( responseType , this ) ; } } } // Just do a timeout response proxy . onBranchTimeOut ( this ) ; logger . warn ( "Proxy branch has timed out" ) ; } else { logger . debug ( "ACKed proxybranch has timeout" ) ; }
public class ServerContext { /** * Sets the commit index . * @ param commitIndex The commit index . * @ return The Raft context . */ ServerContext setCommitIndex ( long commitIndex ) { } }
Assert . argNot ( commitIndex < 0 , "commit index must be positive" ) ; long previousCommitIndex = this . commitIndex ; if ( commitIndex > previousCommitIndex ) { this . commitIndex = commitIndex ; log . commit ( Math . min ( commitIndex , log . lastIndex ( ) ) ) ; long configurationIndex = cluster . getConfiguration ( ) . index ( ) ; if ( configurationIndex > previousCommitIndex && configurationIndex <= commitIndex ) { cluster . commit ( ) ; } } return this ;
public class SummaryBottomSheet { /** * Clears all { @ link View } s . */ private void clearViews ( ) { } }
arrivalTimeText . setText ( EMPTY_STRING ) ; timeRemainingText . setText ( EMPTY_STRING ) ; distanceRemainingText . setText ( EMPTY_STRING ) ;
public class ContainerDirector { /** * prepare the applicaition configure files * @ param configureFileName */ public synchronized void prepareAppRoot ( String configureFileName ) throws Exception { } }
if ( ! cb . isKernelStartup ( ) ) { cb . registerAppRoot ( configureFileName ) ; logger . info ( configureFileName + " is ready." ) ; }
public class XARecoveryData { /** * Perform a post - log data check after logging the XARecoveryData prior to the force . * Use this to log the priority in a separate log unit section to the * main XARecoveryData serialized wrapper and classpath data . Note : this * method is not called if we have no logs defined . */ @ Override protected void postLogData ( RecoverableUnit ru ) throws Exception { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "postLogData" ) ; // Only log if priority is non - zero to keep compatability with old releases if ( _priority != JTAResource . DEFAULT_COMMIT_PRIORITY ) { // Let caller catch any exceptions as it is already handling RU / RUS failures final RecoverableUnitSection section = ru . createSection ( TransactionImpl . RESOURCE_PRIORITY_SECTION , true ) ; section . addData ( Util . intToBytes ( _priority ) ) ; }
public class PreferencesFxUtils { /** * Filters a list of { @ code categories } by a given { @ code description } . * @ param categories the list of categories to be filtered * @ param description to be searched for * @ return a list of { @ code categories } , containing ( ignoring case ) the given { @ code description } */ public static List < Category > filterCategoriesByDescription ( List < Category > categories , String description ) { } }
return categories . stream ( ) . filter ( category -> containsIgnoreCase ( category . getDescription ( ) , description ) ) . collect ( Collectors . toList ( ) ) ;
public class ConfigurationImpl { /** * The " die " method forces this key to be set . Otherwise a runtime exception * will be thrown . * @ param key the key * @ return the Integer or a IllegalArgumentException will be thrown . */ @ Override public Integer getIntegerOrDie ( String key ) { } }
Integer value = getInteger ( key ) ; if ( value == null ) { throw new IllegalArgumentException ( String . format ( ERROR_KEYNOTFOUND , key ) ) ; } else { return value ; }
public class Solo2 { /** * Enter text into a given field resource id * @ param fieldResource - Resource id of a field ( R . id . * ) * @ param value - value to enter into the given field */ public void enterTextAndWait ( int fieldResource , String value ) { } }
EditText textBox = ( EditText ) this . getView ( fieldResource ) ; this . enterText ( textBox , value ) ; this . waitForText ( value ) ;
public class ConversionStringUtils { /** * Unescape all delimiter chars in string . * @ param values Strings * @ return Decoded strings */ public static String [ ] decodeString ( String [ ] values ) { } }
if ( values == null ) { return null ; } String [ ] decodedValues = new String [ values . length ] ; for ( int i = 0 ; i < values . length ; i ++ ) { decodedValues [ i ] = decodeString ( values [ i ] ) ; } return decodedValues ;
public class Dependency { /** * Adds the Maven artifact as evidence . * @ param source The source of the evidence * @ param mavenArtifact The Maven artifact * @ param confidence The confidence level of this evidence */ public void addAsEvidence ( String source , MavenArtifact mavenArtifact , Confidence confidence ) { } }
if ( mavenArtifact . getGroupId ( ) != null && ! mavenArtifact . getGroupId ( ) . isEmpty ( ) ) { this . addEvidence ( EvidenceType . VENDOR , source , "groupid" , mavenArtifact . getGroupId ( ) , confidence ) ; } if ( mavenArtifact . getArtifactId ( ) != null && ! mavenArtifact . getArtifactId ( ) . isEmpty ( ) ) { this . addEvidence ( EvidenceType . PRODUCT , source , "artifactid" , mavenArtifact . getArtifactId ( ) , confidence ) ; } if ( mavenArtifact . getVersion ( ) != null && ! mavenArtifact . getVersion ( ) . isEmpty ( ) ) { this . addEvidence ( EvidenceType . VERSION , source , "version" , mavenArtifact . getVersion ( ) , confidence ) ; } boolean found = false ; if ( mavenArtifact . getArtifactUrl ( ) != null && ! mavenArtifact . getArtifactUrl ( ) . isEmpty ( ) ) { synchronized ( this ) { for ( Identifier i : this . softwareIdentifiers ) { if ( i instanceof PurlIdentifier ) { final PurlIdentifier id = ( PurlIdentifier ) i ; if ( mavenArtifact . getArtifactId ( ) . equals ( id . getName ( ) ) && mavenArtifact . getGroupId ( ) . equals ( id . getNamespace ( ) ) ) { found = true ; i . setConfidence ( Confidence . HIGHEST ) ; final String url = "http://search.maven.org/#search|ga|1|1%3A%22" + this . getSha1sum ( ) + "%22" ; i . setUrl ( url ) ; // i . setUrl ( mavenArtifact . getArtifactUrl ( ) ) ; LOGGER . debug ( "Already found identifier {}. Confidence set to highest" , i . getValue ( ) ) ; break ; } } } } } if ( ! found && mavenArtifact . getGroupId ( ) != null && mavenArtifact . getArtifactId ( ) != null && mavenArtifact . getVersion ( ) != null ) { try { LOGGER . debug ( "Adding new maven identifier {}" , mavenArtifact ) ; final PackageURL p = new PackageURL ( "maven" , mavenArtifact . getGroupId ( ) , mavenArtifact . getArtifactId ( ) , mavenArtifact . getVersion ( ) , null , null ) ; final PurlIdentifier id = new PurlIdentifier ( p , Confidence . HIGHEST ) ; this . addSoftwareIdentifier ( id ) ; } catch ( MalformedPackageURLException ex ) { throw new UnexpectedAnalysisException ( ex ) ; } }
public class MatrixFeatures_DDRM { /** * Checks to see if all the diagonal elements in the matrix are positive . * @ param a A matrix . Not modified . * @ return true if all the diagonal elements are positive , false otherwise . */ public static boolean isDiagonalPositive ( DMatrixRMaj a ) { } }
for ( int i = 0 ; i < a . numRows ; i ++ ) { if ( ! ( a . get ( i , i ) >= 0 ) ) return false ; } return true ;
public class ExampleBase { /** * Converts a response to JSON string * @ param response { @ link com . basistech . rosette . apimodel . Response Response } from RosetteAPI * @ return the json string . * @ throws JsonProcessingException if the Jackson library throws an error serializing . */ protected static String responseToJson ( Response response ) throws JsonProcessingException { } }
ObjectMapper mapper = ApiModelMixinModule . setupObjectMapper ( new ObjectMapper ( ) ) ; mapper . enable ( SerializationFeature . INDENT_OUTPUT ) ; mapper . setSerializationInclusion ( JsonInclude . Include . NON_NULL ) ; return mapper . writeValueAsString ( response ) ;
public class Check { /** * Ensures that a readable sequence of { @ code char } values is numeric . Numeric arguments consist only of the * characters 0-9 and may start with 0 ( compared to number arguments , which must be valid numbers - think of a bank * account number ) . * We recommend to use the overloaded method { @ link Check # isNumeric ( CharSequence , String ) } and pass as second * argument the name of the parameter to enhance the exception message . * @ param value * a readable sequence of { @ code char } values which must be a number * @ return the given string argument * @ throws IllegalNumberArgumentException * if the given argument { @ code value } is no number */ @ ArgumentsChecked @ Throws ( { } }
IllegalNullArgumentException . class , IllegalNumberArgumentException . class } ) public static < T extends CharSequence > T isNumeric ( @ Nonnull final T value ) { return isNumeric ( value , EMPTY_ARGUMENT_NAME ) ;
public class JolokiaServer { /** * Initialize this JolokiaServer with the given HttpServer . The calle is responsible for managing ( starting / stopping ) * the HttpServer . * @ param pServer server to use * @ param pConfig configuration * @ param pLazy whether the initialization should be done lazy or not */ protected final void init ( HttpServer pServer , JolokiaServerConfig pConfig , boolean pLazy ) { } }
config = pConfig ; lazy = pLazy ; // Create proper context along with handler final String contextPath = pConfig . getContextPath ( ) ; jolokiaHttpHandler = new JolokiaHttpHandler ( pConfig . getJolokiaConfig ( ) ) ; HttpContext context = pServer . createContext ( contextPath , jolokiaHttpHandler ) ; // Add authentication if configured final Authenticator authenticator = pConfig . getAuthenticator ( ) ; if ( authenticator != null ) { context . setAuthenticator ( authenticator ) ; } url = detectAgentUrl ( pServer , pConfig , contextPath ) ;
public class ListenerKeysLmlAttribute { /** * Utility . * @ param parser parses template . * @ param actor contains the listener . * @ param rawAttributeData unparsed attribute data . * @ param keys handled keys set . */ public static void processKeysAttribute ( final LmlParser parser , final Actor actor , final String rawAttributeData , final IntSet keys ) { } }
final String [ ] keyNames = parser . parseArray ( rawAttributeData , actor ) ; for ( final String keyName : keyNames ) { final int key = Keys . valueOf ( keyName ) ; if ( key <= Keys . UNKNOWN ) { if ( Strings . isInt ( keyName ) ) { keys . add ( Integer . parseInt ( keyName ) ) ; } else { parser . throwErrorIfStrict ( "Unable to determine key for name: " + keyName + ". Note that key name should match the EXACT name from Keys class (see Keys#valueOf(String)) or be the desired int value of key code." ) ; } } else { keys . add ( key ) ; } }
public class SimulatePlanarWorld { /** * Project a point which lies on the 2D planar polygon ' s surface onto the rendered image */ public void computePixel ( int which , double x , double y , Point2D_F64 output ) { } }
SurfaceRect r = scene . get ( which ) ; Point3D_F64 p3 = new Point3D_F64 ( - x , - y , 0 ) ; SePointOps_F64 . transform ( r . rectToCamera , p3 , p3 ) ; // unit sphere p3 . scale ( 1.0 / p3 . norm ( ) ) ; sphereToPixel . compute ( p3 . x , p3 . y , p3 . z , output ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link Object } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link Object } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/citygml/bridge/2.0" , name = "_GenericApplicationPropertyOfAbstractBridge" ) public JAXBElement < Object > create_GenericApplicationPropertyOfAbstractBridge ( Object value ) { } }
return new JAXBElement < Object > ( __GenericApplicationPropertyOfAbstractBridge_QNAME , Object . class , null , value ) ;
public class SrvEntitySyncAccEntry { /** * Synchronize AccountingEntry to invoke * getSrvBalance ( ) . handleNewAccountEntry . * @ param pEntity object * @ param pAddParam additional params * @ return isNew if entity exist in database ( need update ) * @ throws Exception - an exception */ @ Override public final boolean sync ( final Map < String , Object > pAddParam , final Object pEntity ) throws Exception { } }
AccountingEntry entityPb = ( AccountingEntry ) pEntity ; int currDbId = getSrvOrm ( ) . getIdDatabase ( ) ; if ( currDbId == entityPb . getIdDatabaseBirth ( ) ) { throw new ExceptionWithCode ( ExceptionWithCode . SOMETHING_WRONG , "Foreign entity born in this database! {ID, ID BIRTH, DB BIRTH}:" + " {" + entityPb . getItsId ( ) + ", " + entityPb . getIdBirth ( ) + "," + entityPb . getIdDatabaseBirth ( ) ) ; } String tblNm = pEntity . getClass ( ) . getSimpleName ( ) . toUpperCase ( ) ; String whereStr = " where " + tblNm + ".IDBIRTH=" + entityPb . getItsId ( ) + " and " + tblNm + ".IDDATABASEBIRTH=" + entityPb . getIdDatabaseBirth ( ) ; AccountingEntry entityPbDb = getSrvOrm ( ) . retrieveEntityWithConditions ( pAddParam , entityPb . getClass ( ) , whereStr ) ; if ( entityPb . getSourceType ( ) . equals ( this . accountingEntriesCode ) ) { tblNm = AccountingEntries . class . getSimpleName ( ) . toUpperCase ( ) ; whereStr = " where " + tblNm + ".IDBIRTH=" + entityPb . getSourceId ( ) + " and " + tblNm + ".IDDATABASEBIRTH=" + entityPb . getIdDatabaseBirth ( ) ; AccountingEntries accountingEntries = getSrvOrm ( ) . retrieveEntityWithConditions ( pAddParam , AccountingEntries . class , whereStr ) ; if ( accountingEntries == null ) { throw new ExceptionWithCode ( ExceptionWithCode . SOMETHING_WRONG , "Can't find foreign AccountingEntries {ID BIRTH, DB BIRTH}:" + " {" + entityPb . getSourceId ( ) + "," + entityPb . getIdDatabaseBirth ( ) ) ; } entityPb . setSourceId ( accountingEntries . getItsId ( ) ) ; } entityPb . setIdBirth ( entityPb . getItsId ( ) ) ; entityPb . setItsId ( null ) ; boolean isNew = true ; if ( entityPbDb != null ) { entityPb . setItsId ( entityPbDb . getItsId ( ) ) ; isNew = false ; } getSrvBalance ( ) . handleNewAccountEntry ( pAddParam , null , null , entityPb . getItsDate ( ) ) ; // This is for SrvBalanceStd only ! ! ! return isNew ;
public class OutRawH3Impl { /** * string data without length */ @ Override public void writeStringData ( String value , int offset , int length ) { } }
char [ ] cBuf = _charBuffer ; int cBufLength = cBuf . length ; for ( int i = 0 ; i < length ; i += cBufLength ) { int sublen = Math . min ( length - i , cBufLength ) ; value . getChars ( offset + i , offset + i + sublen , cBuf , 0 ) ; writeStringChunk ( cBuf , 0 , sublen ) ; }
public class AbstractLog { /** * Provide a non - fatal notification , unless suppressed by the - nowarn option . * @ param noteKey The key for the localized notification message . */ public void note ( DiagnosticPosition pos , Note noteKey ) { } }
report ( diags . note ( source , pos , noteKey ) ) ;
public class HBeanProperties { /** * Set a string matrix of properties on a particular bean . * @ param bean Bean to set properties on . * @ param properties in string matrix form . */ public void setPropertiesOn ( final Bean bean ) { } }
String [ ] [ ] properties = getProperties ( ) ; for ( int i = 0 ; i < properties . length ; i ++ ) { if ( properties [ i ] . length < 2 ) { continue ; } for ( int j = 0 ; j < properties [ i ] . length - 1 ; j ++ ) { bean . addProperty ( properties [ i ] [ 0 ] , properties [ i ] [ j + 1 ] ) ; } }
public class FrameworkUtil { /** * Stores an arbitrary named attribute in the attribute cache . * @ param key Attribute name . * @ param value Attribute value . If null , value is removed from cache . * @ throws IllegalStateException if AppFramework is not initialized */ public static void setAttribute ( String key , Object value ) { } }
assertInitialized ( ) ; getAppFramework ( ) . setAttribute ( key , value ) ;