signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class CompressBackupUtil { /** * Adds the file to the tar archive represented by output stream . It ' s caller ' s responsibility to close output stream
* properly .
* @ param out target archive .
* @ param source file to be added .
* @ param fileSize size of the file ( which is known in most cases ) .
* @ throws IOException in case of any issues with underlying store . */
public static void archiveFile ( @ NotNull final ArchiveOutputStream out , @ NotNull final VirtualFileDescriptor source , final long fileSize ) throws IOException { } }
|
if ( ! source . hasContent ( ) ) { throw new IllegalArgumentException ( "Provided source is not a file: " + source . getPath ( ) ) ; } // noinspection ChainOfInstanceofChecks
if ( out instanceof TarArchiveOutputStream ) { final TarArchiveEntry entry = new TarArchiveEntry ( source . getPath ( ) + source . getName ( ) ) ; entry . setSize ( fileSize ) ; entry . setModTime ( source . getTimeStamp ( ) ) ; out . putArchiveEntry ( entry ) ; } else if ( out instanceof ZipArchiveOutputStream ) { final ZipArchiveEntry entry = new ZipArchiveEntry ( source . getPath ( ) + source . getName ( ) ) ; entry . setSize ( fileSize ) ; entry . setTime ( source . getTimeStamp ( ) ) ; out . putArchiveEntry ( entry ) ; } else { throw new IOException ( "Unknown archive output stream" ) ; } final InputStream input = source . getInputStream ( ) ; try { IOUtil . copyStreams ( input , fileSize , out , IOUtil . BUFFER_ALLOCATOR ) ; } finally { if ( source . shouldCloseStream ( ) ) { input . close ( ) ; } } out . closeArchiveEntry ( ) ;
|
public class StringUtility { /** * Creates a < code > String [ ] < / code > by calling the toString ( ) method of each object in a list .
* @ deprecated Please use List . toArray ( Object [ ] )
* @ see List # toArray ( Object [ ] ) */
@ Deprecated public static String [ ] getStringArray ( List < ? > V ) { } }
|
if ( V == null ) return null ; int len = V . size ( ) ; String [ ] SA = new String [ len ] ; for ( int c = 0 ; c < len ; c ++ ) { Object O = V . get ( c ) ; SA [ c ] = O == null ? null : O . toString ( ) ; } return SA ;
|
public class PackratParser { /** * This message just generates a parser exception message to be returned
* containing the maximum position where the parser could not proceed . This
* should be in most cases the position where the error within the text is
* located .
* @ return */
private String getParserErrorMessage ( ) { } }
|
StringBuffer code = new StringBuffer ( text ) ; code = code . insert ( maxPosition , " >><< " ) ; String codeString = code . substring ( maxPosition - 100 < 0 ? 0 : maxPosition - 100 , maxPosition + 100 >= code . length ( ) ? code . length ( ) : maxPosition + 100 ) ; return "Could not parse the input string near '" + codeString + "'!" ;
|
public class SubCommandSet { /** * Add a sub - command to the sub - command - set .
* @ param subCommand The sub - command to add .
* @ return The sub - command - set . */
public SubCommandSet add ( SubCommand < SubCommandDef > subCommand ) { } }
|
if ( subCommandMap . containsKey ( subCommand . getName ( ) ) ) { throw new IllegalArgumentException ( "SubCommand with name " + subCommand . getName ( ) + " already exists" ) ; } this . subCommands . add ( subCommand ) ; this . subCommandMap . put ( subCommand . getName ( ) , subCommand ) ; for ( String alias : subCommand . getAliases ( ) ) { if ( subCommandMap . containsKey ( alias ) ) { throw new IllegalArgumentException ( "SubCommand (" + subCommand . getName ( ) + ") alias " + alias + " already exists" ) ; } this . subCommandMap . put ( alias , subCommand ) ; } return this ;
|
public class UITextArea { /** * Called when a cursor is updated . < br >
* Offsets the content to make sure the cursor is still visible . */
@ Override protected void onCursorUpdated ( ) { } }
|
if ( getParent ( ) == null ) return ; startTimer = System . currentTimeMillis ( ) ; Integer yOffset = null ; if ( text . length ( ) == 0 ) yOffset = 0 ; else if ( cursor . y < - offset ( ) . y ( ) ) yOffset = - cursor . y + 2 ; else if ( cursor . y + cursor . height > innerSize ( ) . height ( ) - offset ( ) . y ( ) ) yOffset = Math . min ( innerSize ( ) . height ( ) - cursor . y - cursor . height - 2 , 0 ) ; if ( yOffset != null ) scrollbar . setOffset ( yOffset ) ;
|
public class ExtinctionCoefficient { /** * method to calculate the extinction coefficient for the whole HELM molecule
* @ param helm2notation input HELM2Notation
* @ param unitType Unit of the extinction coefficient
* @ return extinction coefficient
* @ throws ExtinctionCoefficientException if the HELM contains HELM2 features
* @ throws ChemistryException if the Chemistry Engine can not be initialized */
public float calculate ( HELM2Notation helm2notation , int unitType ) throws ExtinctionCoefficientException , ChemistryException { } }
|
LOG . debug ( "ExtinctionCalculation is starting with the unitType: " + unitType ) ; float result = 0.0f ; List < PolymerNotation > polymerNodes = helm2notation . getListOfPolymers ( ) ; for ( PolymerNotation polymerNode : polymerNodes ) { String polymerType = polymerNode . getPolymerID ( ) . getType ( ) ; float ext = 0.0f ; ArrayList < PolymerNotation > not = new ArrayList < PolymerNotation > ( ) ; not . add ( polymerNode ) ; if ( polymerType . equals ( Monomer . NUCLIEC_ACID_POLYMER_TYPE ) ) { try { ext = calculateExtinctionFromRNA ( MethodsMonomerUtils . getListOfHandledMonomersOnlyBase ( polymerNode . getPolymerElements ( ) . getListOfElements ( ) ) ) ; } catch ( CalculationException | IOException | HELM2HandledException | NotationException e ) { throw new ExtinctionCoefficientException ( e . getMessage ( ) ) ; } if ( unitType == PEPTIDE_UNIT_TYPE ) { ext = ext * UNIT ; } } else if ( polymerType . equals ( Monomer . PEPTIDE_POLYMER_TYPE ) ) { try { ext = calculateExtinctionFromPeptide ( MethodsMonomerUtils . getListOfHandledMonomers ( polymerNode . getPolymerElements ( ) . getListOfElements ( ) ) ) ; } catch ( IOException | HELM2HandledException e ) { throw new ExtinctionCoefficientException ( e . getMessage ( ) ) ; } if ( unitType == RNA_UNIT_TYPE ) { ext = ext / UNIT ; } } result = result + ext ; } return result ;
|
public class BaseFilterQueryBuilder { /** * Add a Field Search Condition that will search a field for a specified value using the following SQL logic :
* { @ code lower ( field ) = ' value ' }
* @ param propertyName The name of the field as defined in the Entity mapping class .
* @ param value The value to search against . */
protected void addEqualsIgnoreCaseCondition ( final String propertyName , final String value ) { } }
|
final Expression < String > propertyNameField = getCriteriaBuilder ( ) . lower ( getRootPath ( ) . get ( propertyName ) . as ( String . class ) ) ; fieldConditions . add ( getCriteriaBuilder ( ) . equal ( propertyNameField , value . toString ( ) ) ) ;
|
public class MetaGraphDef { /** * < code > optional . tensorflow . MetaGraphDef . MetaInfoDef meta _ info _ def = 1 ; < / code > */
public org . tensorflow . framework . MetaGraphDef . MetaInfoDef getMetaInfoDef ( ) { } }
|
return metaInfoDef_ == null ? org . tensorflow . framework . MetaGraphDef . MetaInfoDef . getDefaultInstance ( ) : metaInfoDef_ ;
|
public class IFixCompareCommandTask { /** * This will print the map of APARs to iFixes by giving a line to each APAR listing which iFixes it is in
* @ param console The console to print to
* @ param aparToIFixMap The map to print */
private void printAparIFixInfo ( CommandConsole console , Map < String , Set < String > > aparToIFixMap ) { } }
|
for ( Map . Entry < String , Set < String > > aparIFixInfo : aparToIFixMap . entrySet ( ) ) { console . printlnInfoMessage ( getMessage ( "compare.ifix.apar.info" , aparIFixInfo . getKey ( ) , aparIFixInfo . getValue ( ) ) ) ; }
|
public class BoundedLocalCache { /** * Evicts entries from the main space if the cache exceeds the maximum capacity . The main space
* determines whether admitting an entry ( coming from the window space ) is preferable to retaining
* the eviction policy ' s victim . This is decision is made using a frequency filter so that the
* least frequently used entry is removed .
* The window space candidates were previously placed in the MRU position and the eviction
* policy ' s victim is at the LRU position . The two ends of the queue are evaluated while an
* eviction is required . The number of remaining candidates is provided and decremented on
* eviction , so that when there are no more candidates the victim is evicted .
* @ param candidates the number of candidate entries evicted from the window space */
@ GuardedBy ( "evictionLock" ) void evictFromMain ( int candidates ) { } }
|
int victimQueue = PROBATION ; Node < K , V > victim = accessOrderProbationDeque ( ) . peekFirst ( ) ; Node < K , V > candidate = accessOrderProbationDeque ( ) . peekLast ( ) ; while ( weightedSize ( ) > maximum ( ) ) { // Stop trying to evict candidates and always prefer the victim
if ( candidates == 0 ) { candidate = null ; } // Try evicting from the protected and window queues
if ( ( candidate == null ) && ( victim == null ) ) { if ( victimQueue == PROBATION ) { victim = accessOrderProtectedDeque ( ) . peekFirst ( ) ; victimQueue = PROTECTED ; continue ; } else if ( victimQueue == PROTECTED ) { victim = accessOrderWindowDeque ( ) . peekFirst ( ) ; victimQueue = WINDOW ; continue ; } // The pending operations will adjust the size to reflect the correct weight
break ; } // Skip over entries with zero weight
if ( ( victim != null ) && ( victim . getPolicyWeight ( ) == 0 ) ) { victim = victim . getNextInAccessOrder ( ) ; continue ; } else if ( ( candidate != null ) && ( candidate . getPolicyWeight ( ) == 0 ) ) { candidate = candidate . getPreviousInAccessOrder ( ) ; candidates -- ; continue ; } // Evict immediately if only one of the entries is present
if ( victim == null ) { @ SuppressWarnings ( "NullAway" ) Node < K , V > previous = candidate . getPreviousInAccessOrder ( ) ; Node < K , V > evict = candidate ; candidate = previous ; candidates -- ; evictEntry ( evict , RemovalCause . SIZE , 0L ) ; continue ; } else if ( candidate == null ) { Node < K , V > evict = victim ; victim = victim . getNextInAccessOrder ( ) ; evictEntry ( evict , RemovalCause . SIZE , 0L ) ; continue ; } // Evict immediately if an entry was collected
K victimKey = victim . getKey ( ) ; K candidateKey = candidate . getKey ( ) ; if ( victimKey == null ) { @ NonNull Node < K , V > evict = victim ; victim = victim . getNextInAccessOrder ( ) ; evictEntry ( evict , RemovalCause . COLLECTED , 0L ) ; continue ; } else if ( candidateKey == null ) { candidates -- ; @ NonNull Node < K , V > evict = candidate ; candidate = candidate . getPreviousInAccessOrder ( ) ; evictEntry ( evict , RemovalCause . COLLECTED , 0L ) ; continue ; } // Evict immediately if the candidate ' s weight exceeds the maximum
if ( candidate . getPolicyWeight ( ) > maximum ( ) ) { candidates -- ; Node < K , V > evict = candidate ; candidate = candidate . getPreviousInAccessOrder ( ) ; evictEntry ( evict , RemovalCause . SIZE , 0L ) ; continue ; } // Evict the entry with the lowest frequency
candidates -- ; if ( admit ( candidateKey , victimKey ) ) { Node < K , V > evict = victim ; victim = victim . getNextInAccessOrder ( ) ; evictEntry ( evict , RemovalCause . SIZE , 0L ) ; candidate = candidate . getPreviousInAccessOrder ( ) ; } else { Node < K , V > evict = candidate ; candidate = candidate . getPreviousInAccessOrder ( ) ; evictEntry ( evict , RemovalCause . SIZE , 0L ) ; } }
|
public class HeatChart { /** * Draws the bars of the x - axis and y - axis . */
private void drawAxisBars ( Graphics2D chartGraphics ) { } }
|
if ( axisThickness > 0 ) { chartGraphics . setColor ( axisColour ) ; // Draw x - axis .
int x = heatMapTL . x - axisThickness ; int y = heatMapBR . y ; int width = heatMapSize . width + axisThickness ; int height = axisThickness ; chartGraphics . fillRect ( x , y , width , height ) ; // Draw y - axis .
x = heatMapTL . x - axisThickness ; y = heatMapTL . y ; width = axisThickness ; height = heatMapSize . height ; chartGraphics . fillRect ( x , y , width , height ) ; }
|
public class AbstractBeanDefinition { /** * Obtains an optional bean for the method at the given index and the argument at the given index
* Warning : this method is used by internal generated code and should not be called by user code .
* @ param resolutionContext The resolution context
* @ param context The context
* @ param injectionPoint The method injection point
* @ param argument The argument
* @ return The resolved bean */
@ SuppressWarnings ( "WeakerAccess" ) @ Internal protected final Optional findBeanForMethodArgument ( BeanResolutionContext resolutionContext , BeanContext context , MethodInjectionPoint injectionPoint , Argument argument ) { } }
|
return resolveBeanWithGenericsFromMethodArgument ( resolutionContext , injectionPoint , argument , ( beanType , qualifier ) -> ( ( DefaultBeanContext ) context ) . findBean ( resolutionContext , beanType , qualifier ) ) ;
|
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link FunctionType } { @ code > } } */
@ XmlElementDecl ( namespace = "urn:oasis:names:tc:xacml:3.0:core:schema:wd-17" , name = "Function" , substitutionHeadNamespace = "urn:oasis:names:tc:xacml:3.0:core:schema:wd-17" , substitutionHeadName = "Expression" ) public JAXBElement < FunctionType > createFunction ( FunctionType value ) { } }
|
return new JAXBElement < FunctionType > ( _Function_QNAME , FunctionType . class , null , value ) ;
|
public class PropertiesManagerCore { /** * Add GeoPackage
* @ param geoPackage
* GeoPackage */
public void addGeoPackage ( T geoPackage ) { } }
|
PropertiesCoreExtension < T , ? , ? , ? > propertiesExtension = getPropertiesExtension ( geoPackage ) ; propertiesMap . put ( geoPackage . getName ( ) , propertiesExtension ) ;
|
public class XARecorderRecovery { /** * / * ( non - Javadoc )
* @ see org . csc . phynixx . loggersystem . logrecord . IXARecoderRecovery # destroy ( ) */
@ Override public synchronized void destroy ( ) throws IOException { } }
|
Set < IXADataRecorder > recoveredXADataRecorders = this . getRecoveredXADataRecorders ( ) ; for ( IXADataRecorder dataRecorder : recoveredXADataRecorders ) { dataRecorder . destroy ( ) ; }
|
public class ADTUtil { /** * Build a single trace ADS from the given information .
* @ param input
* the input sequence of the trace
* @ param output
* the output sequence of the trace
* @ param finalState
* the hypothesis state that should be referenced in the leaf of the ADS
* @ param < S >
* ( hypothesis ) state type
* @ param < I >
* input alphabet type
* @ param < O >
* output alphabet type
* @ return the root node of the constructed ADS */
public static < S , I , O > ADTNode < S , I , O > buildADSFromObservation ( final Word < I > input , final Word < O > output , final S finalState ) { } }
|
if ( input . size ( ) != output . size ( ) ) { throw new IllegalArgumentException ( "Arguments differ in length" ) ; } final Iterator < I > inputIterator = input . iterator ( ) ; final Iterator < O > outputIterator = output . iterator ( ) ; final ADTNode < S , I , O > result = new ADTSymbolNode < > ( null , inputIterator . next ( ) ) ; ADTNode < S , I , O > nodeIter = result ; while ( inputIterator . hasNext ( ) ) { final ADTNode < S , I , O > nextNode = new ADTSymbolNode < > ( nodeIter , inputIterator . next ( ) ) ; nodeIter . getChildren ( ) . put ( outputIterator . next ( ) , nextNode ) ; nodeIter = nextNode ; } final ADTNode < S , I , O > finalNode = new ADTLeafNode < > ( nodeIter , finalState ) ; nodeIter . getChildren ( ) . put ( outputIterator . next ( ) , finalNode ) ; return result ;
|
public class DefaultImportationLinker { /** * Update the Target Filter of the ImporterService .
* Apply the induce modifications on the links of the ImporterService
* @ param serviceReference */
@ Modified ( id = "importerServices" ) void modifiedImporterService ( ServiceReference < ImporterService > serviceReference ) { } }
|
try { importersManager . modified ( serviceReference ) ; } catch ( InvalidFilterException invalidFilterException ) { LOG . error ( "The ServiceProperty \"" + TARGET_FILTER_PROPERTY + "\" of the ImporterService " + bundleContext . getService ( serviceReference ) + " doesn't provides a valid Filter." + " To be used, it must provides a correct \"" + TARGET_FILTER_PROPERTY + "\" ServiceProperty." , invalidFilterException ) ; importersManager . removeLinks ( serviceReference ) ; return ; } if ( importersManager . matched ( serviceReference ) ) { importersManager . updateLinks ( serviceReference ) ; } else { importersManager . removeLinks ( serviceReference ) ; }
|
public class CharArrayBuffer { /** * Returns a substring of this buffer with leading and trailing whitespace
* omitted . The substring begins with the first non - whitespace character
* from { @ code beginIndex } and extends to the last
* non - whitespace character with the index lesser than
* { @ code endIndex } .
* @ param beginIndex the beginning index , inclusive .
* @ param endIndex the ending index , exclusive .
* @ return the specified substring .
* @ throws IndexOutOfBoundsException if the
* { @ code beginIndex } is negative , or
* { @ code endIndex } is larger than the length of this
* buffer , or { @ code beginIndex } is larger than
* { @ code endIndex } . */
public String substringTrimmed ( final int beginIndex , final int endIndex ) { } }
|
if ( beginIndex < 0 ) { throw new IndexOutOfBoundsException ( "Negative beginIndex: " + beginIndex ) ; } if ( endIndex > this . len ) { throw new IndexOutOfBoundsException ( "endIndex: " + endIndex + " > length: " + this . len ) ; } if ( beginIndex > endIndex ) { throw new IndexOutOfBoundsException ( "beginIndex: " + beginIndex + " > endIndex: " + endIndex ) ; } int beginIndex0 = beginIndex ; int endIndex0 = endIndex ; while ( beginIndex0 < endIndex && isWhitespace ( this . array [ beginIndex0 ] ) ) { beginIndex0 ++ ; } while ( endIndex0 > beginIndex0 && isWhitespace ( this . array [ endIndex0 - 1 ] ) ) { endIndex0 -- ; } return new String ( this . array , beginIndex0 , endIndex0 - beginIndex0 ) ;
|
public class PortletFilterChainProxy { /** * Sets the mapping of URL patterns to filter chains .
* The map keys should be the paths and the values should be arrays of { @ code PortletFilter } objects .
* It ' s VERY important that the type of map used preserves ordering - the order in which the iterator
* returns the entries must be the same as the order they were added to the map , otherwise you have no way
* of guaranteeing that the most specific patterns are returned before the more general ones . So make sure
* the Map used is an instance of { @ code LinkedHashMap } or an equivalent , rather than a plain { @ code HashMap } , for
* example .
* @ param filterChainMap the map of path Strings to { @ code List & lt ; PortletFilter & gt ; } s .
* @ deprecated Use the constructor which takes a { @ code List & lt ; PortletSecurityFilterChain & gt ; } instead . */
@ Deprecated public void setFilterChainMap ( Map < RequestMatcher , List < PortletFilter > > filterChainMap ) { } }
|
filterChains = new ArrayList < PortletSecurityFilterChain > ( filterChainMap . size ( ) ) ; for ( Map . Entry < RequestMatcher , List < PortletFilter > > entry : filterChainMap . entrySet ( ) ) { filterChains . add ( new DefaultPortletSecurityFilterChain ( entry . getKey ( ) , entry . getValue ( ) ) ) ; }
|
public class Options { /** * Check that the memory management option wasn ' t previously set to a
* different value . If okay , then set the option . */
private void checkMemoryManagementOption ( MemoryManagementOption option ) { } }
|
if ( memoryManagementOption != null && memoryManagementOption != option ) { usage ( "Multiple memory management options cannot be set." ) ; } setMemoryManagementOption ( option ) ;
|
public class AbstractAppender { /** * Handles a configure failure . */
protected void handleConfigureResponseFailure ( RaftMemberContext member , ConfigureRequest request , Throwable error ) { } }
|
// Log the failed attempt to contact the member .
failAttempt ( member , request , error ) ;
|
public class AbstractExpression { /** * Replace avg expression with sum / count for optimization .
* @ return */
public AbstractExpression replaceAVG ( ) { } }
|
if ( getExpressionType ( ) == ExpressionType . AGGREGATE_AVG ) { AbstractExpression child = getLeft ( ) ; AbstractExpression left = new AggregateExpression ( ExpressionType . AGGREGATE_SUM ) ; left . setLeft ( child . clone ( ) ) ; AbstractExpression right = new AggregateExpression ( ExpressionType . AGGREGATE_COUNT ) ; right . setLeft ( child . clone ( ) ) ; return new OperatorExpression ( ExpressionType . OPERATOR_DIVIDE , left , right ) ; } AbstractExpression lnode = null ; AbstractExpression rnode = null ; if ( m_left != null ) { lnode = m_left . replaceAVG ( ) ; } if ( m_right != null ) { rnode = m_right . replaceAVG ( ) ; } ArrayList < AbstractExpression > newArgs = null ; boolean changed = false ; if ( m_args != null ) { newArgs = new ArrayList < > ( ) ; for ( AbstractExpression expr : m_args ) { AbstractExpression ex = expr . replaceAVG ( ) ; newArgs . add ( ex ) ; if ( ex != expr ) { changed = true ; } } } if ( m_left != lnode || m_right != rnode || changed ) { AbstractExpression resExpr = clone ( ) ; resExpr . setLeft ( lnode ) ; resExpr . setRight ( rnode ) ; resExpr . setArgs ( newArgs ) ; return resExpr ; } return this ;
|
public class ExceptionImposter { /** * < p > imposterize . < / p >
* @ param t a { @ link java . lang . Throwable } object .
* @ return a { @ link java . lang . RuntimeException } object . */
public static RuntimeException imposterize ( Throwable t ) { } }
|
if ( t instanceof RuntimeException ) return ( RuntimeException ) t ; return new ExceptionImposter ( t ) ;
|
public class PlayRecordContext { /** * If set to true , initial prompt is not interruptible by either voice or digits .
* < b > Defaults to false . < / b > Valid values are the text strings " true " and " false " .
* @ return */
public boolean getNonInterruptibleAudio ( ) { } }
|
String value = Optional . fromNullable ( getParameter ( SignalParameters . NON_INTERRUPTIBLE_PLAY . symbol ( ) ) ) . or ( "false" ) ; return Boolean . parseBoolean ( value ) ;
|
public class GrassLegacyUtilities { /** * Returns the list of files involved in the raster map issues . If for example a map has to be
* deleted , then all these files have to .
* @ param mapsetPath - the path of the mapset
* @ param mapname - the name of the map
* @ return the array of strings containing the full path to the involved files */
public static String [ ] filesOfRasterMap ( String mapsetPath , String mapname ) { } }
|
String filesOfRaster [ ] = new String [ ] { mapsetPath + File . separator + GrassLegacyConstans . FCELL + File . separator + mapname , mapsetPath + File . separator + GrassLegacyConstans . CELL + File . separator + mapname , mapsetPath + File . separator + GrassLegacyConstans . CATS + File . separator + mapname , mapsetPath + File . separator + GrassLegacyConstans . HIST + File . separator + mapname , mapsetPath + File . separator + GrassLegacyConstans . CELLHD + File . separator + mapname , mapsetPath + File . separator + GrassLegacyConstans . COLR + File . separator + mapname , // it is very important that the folder cell _ misc / mapname comes
// before the files in it
mapsetPath + File . separator + GrassLegacyConstans . CELL_MISC + File . separator + mapname , mapsetPath + File . separator + GrassLegacyConstans . CELL_MISC + File . separator + mapname + File . separator + GrassLegacyConstans . CELLMISC_FORMAT , mapsetPath + File . separator + GrassLegacyConstans . CELL_MISC + File . separator + mapname + File . separator + GrassLegacyConstans . CELLMISC_QUANT , mapsetPath + File . separator + GrassLegacyConstans . CELL_MISC + File . separator + mapname + File . separator + GrassLegacyConstans . CELLMISC_RANGE , mapsetPath + File . separator + GrassLegacyConstans . CELL_MISC + File . separator + mapname + File . separator + GrassLegacyConstans . CELLMISC_NULL } ; return filesOfRaster ;
|
public class OrthogonalPolyLine { /** * When tail is NONE it needs to try multiple directions to determine which gives the least number of corners , and then selects that as the final direction .
* @ param points
* @ param buffer
* @ param lastDirection
* @ param tailDirection
* @ param correction
* @ param pline
* @ param p0x
* @ param p0y
* @ param p1x
* @ param p1y
* @ return */
private static Direction getTailDirection ( final Point2DArray points , final NFastDoubleArrayJSO buffer , final Direction lastDirection , Direction tailDirection , final double correction , final OrthogonalPolyLine pline , final double p0x , final double p0y , final double p1x , final double p1y ) { } }
|
final double offset = pline . getHeadOffset ( ) + correction ; switch ( tailDirection ) { case NONE : { final double dx = ( p1x - p0x ) ; final double dy = ( p1y - p0y ) ; int bestPoints = 0 ; if ( dx > offset ) { tailDirection = WEST ; bestPoints = drawTail ( points , buffer , lastDirection , WEST , correction , pline , p0x , p0y , p1x , p1y , false ) ; } else { tailDirection = EAST ; bestPoints = drawTail ( points , buffer , lastDirection , EAST , correction , pline , p0x , p0y , p1x , p1y , false ) ; } if ( dy > 0 ) { final int points3 = drawTail ( points , buffer , lastDirection , NORTH , correction , pline , p0x , p0y , p1x , p1y , false ) ; if ( points3 < bestPoints ) { tailDirection = NORTH ; bestPoints = points3 ; } } else { final int points4 = drawTail ( points , buffer , lastDirection , SOUTH , correction , pline , p0x , p0y , p1x , p1y , false ) ; if ( points4 < bestPoints ) { tailDirection = SOUTH ; bestPoints = points4 ; } } break ; } default : break ; } return tailDirection ;
|
public class SpatialUtil { /** * Returns a clone of the minimum hyper point .
* @ param box spatial object
* @ return the minimum hyper point */
public static double [ ] getMin ( SpatialComparable box ) { } }
|
final int dim = box . getDimensionality ( ) ; double [ ] min = new double [ dim ] ; for ( int i = 0 ; i < dim ; i ++ ) { min [ i ] = box . getMin ( i ) ; } return min ;
|
public class ProxiedFileSystemWrapper { /** * Get token from the token sequence file .
* @ param authPath
* @ param proxyUserName
* @ return Token for proxyUserName if it exists .
* @ throws IOException */
private static Optional < Token < ? > > getTokenFromSeqFile ( String authPath , String proxyUserName ) throws IOException { } }
|
try ( Closer closer = Closer . create ( ) ) { FileSystem localFs = FileSystem . getLocal ( new Configuration ( ) ) ; SequenceFile . Reader tokenReader = closer . register ( new SequenceFile . Reader ( localFs , new Path ( authPath ) , localFs . getConf ( ) ) ) ; Text key = new Text ( ) ; Token < ? > value = new Token < > ( ) ; while ( tokenReader . next ( key , value ) ) { LOG . info ( "Found token for " + key ) ; if ( key . toString ( ) . equals ( proxyUserName ) ) { return Optional . < Token < ? > > of ( value ) ; } } } return Optional . absent ( ) ;
|
public class QueryParameters { /** * Returns Key by searching key assigned to that position
* @ param position Position which would be searched
* @ return Key */
public String getNameByPosition ( Integer position ) { } }
|
String name = null ; name = this . order . get ( position ) ; return name ;
|
public class SubModel { /** * Clone this model using a { @ link DefaultModel } .
* @ return a mutable clone */
@ Override public Model copy ( ) { } }
|
DefaultModel m = new DefaultModel ( eb . copy ( ) ) ; MappingUtils . fill ( sm , m . getMapping ( ) ) ; for ( ModelView rc : parent . getViews ( ) ) { m . attach ( rc . copy ( ) ) ; } m . setAttributes ( this . getAttributes ( ) . copy ( ) ) ; return m ;
|
public class MockupTypeEnumerator { /** * Returns the url given the mockup type .
* @ param mockupType The mockup type .
* @ return The url pattern , or null if not found . */
public String getUrl ( String mockupType ) { } }
|
return mockupType == null ? null : mockupTypes . getProperty ( mockupType ) ;
|
public class DescribeAggregateComplianceByConfigRulesResult { /** * Returns a list of AggregateComplianceByConfigRule object .
* @ param aggregateComplianceByConfigRules
* Returns a list of AggregateComplianceByConfigRule object . */
public void setAggregateComplianceByConfigRules ( java . util . Collection < AggregateComplianceByConfigRule > aggregateComplianceByConfigRules ) { } }
|
if ( aggregateComplianceByConfigRules == null ) { this . aggregateComplianceByConfigRules = null ; return ; } this . aggregateComplianceByConfigRules = new com . amazonaws . internal . SdkInternalList < AggregateComplianceByConfigRule > ( aggregateComplianceByConfigRules ) ;
|
public class GeoPackageCoreImpl { /** * { @ inheritDoc } */
@ Override public boolean createExtendedRelationsTable ( ) { } }
|
verifyWritable ( ) ; boolean created = false ; ExtendedRelationsDao dao = getExtendedRelationsDao ( ) ; try { if ( ! dao . isTableExists ( ) ) { created = tableCreator . createExtendedRelations ( ) > 0 ; } } catch ( SQLException e ) { throw new GeoPackageException ( "Failed to check if " + ExtendedRelation . class . getSimpleName ( ) + " table exists and create it" , e ) ; } return created ;
|
public class IdType { /** * Creates a new instance of this ID which is identical , but refers to the
* specific version of this resource ID noted by theVersion .
* @ param theVersion The actual version string , e . g . " 1 " . If theVersion is blank or null , returns the same as { @ link # toVersionless ( ) } }
* @ return A new instance of IdType which is identical , but refers to the
* specific version of this resource ID noted by theVersion . */
@ Override public IdType withVersion ( String theVersion ) { } }
|
if ( isBlank ( theVersion ) ) { return toVersionless ( ) ; } if ( isLocal ( ) || isUrn ( ) ) { return new IdType ( getValueAsString ( ) ) ; } String existingValue = getValue ( ) ; int i = existingValue . indexOf ( "_history" ) ; String value ; if ( i > 1 ) { value = existingValue . substring ( 0 , i - 1 ) ; } else { value = existingValue ; } return new IdType ( value + '/' + "_history" + '/' + theVersion ) ;
|
public class Vector3d { /** * Sets the values of this vector to those of v1.
* @ param v1
* vector whose values are copied */
public void set ( Vector3d v1 ) { } }
|
x = v1 . x ; y = v1 . y ; z = v1 . z ;
|
public class BatchedTimeoutManager { /** * Method to close this timer forever */
public void close ( ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "close" ) ; btmLockManager . lockExclusive ( ) ; try { stopTimer ( ) ; activeEntries = null ; } finally { btmLockManager . unlockExclusive ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "close" ) ;
|
public class MultiPartParser { private void handleField ( ) { } }
|
if ( LOG . isDebugEnabled ( ) ) LOG . debug ( "parsedField: _fieldName={} _fieldValue={} {}" , _fieldName , _fieldValue , this ) ; if ( _fieldName != null && _fieldValue != null ) _handler . parsedField ( _fieldName , _fieldValue ) ; _fieldName = _fieldValue = null ;
|
public class InternationalizationServiceSingleton { /** * Retrieves the ETAG for the given locale .
* @ param locale the locale
* @ return the computed etag , must not be { @ code null } or empty */
@ Override public String etag ( Locale locale ) { } }
|
String etag = etags . get ( locale ) ; if ( etag == null ) { // We don ' t have a stored etag , that means we don ' t have messages . We returns 0.
// There is a potential race condition here :
// We retrieve the etag get 0 , but when we retrieve the messages , we get messages . The browser receives 0
// as etag , which will not match the next request . It ' s should not be too critical as it will just send
// the same content a second time .
return "0" ; } else { return etag ; }
|
public class AmazonAlexaForBusinessClient { /** * Retrieves the configured values for the user enrollment invitation email template .
* @ param getInvitationConfigurationRequest
* @ return Result of the GetInvitationConfiguration operation returned by the service .
* @ throws NotFoundException
* The resource is not found .
* @ sample AmazonAlexaForBusiness . GetInvitationConfiguration
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / alexaforbusiness - 2017-11-09 / GetInvitationConfiguration "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public GetInvitationConfigurationResult getInvitationConfiguration ( GetInvitationConfigurationRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeGetInvitationConfiguration ( request ) ;
|
public class SimulatorTaskTracker { /** * Updates the progress indicator of a task if it is running .
* @ param tip simulator task in progress whose progress is to be updated
* @ param now current simulation time */
private void progressTaskStatus ( SimulatorTaskInProgress tip , long now ) { } }
|
TaskStatus status = tip . getTaskStatus ( ) ; if ( status . getRunState ( ) != State . RUNNING ) { return ; // nothing to be done
} boolean isMap = tip . isMapTask ( ) ; // Time when the user space code started
long startTime = - 1 ; // Time spent in map or just in the REDUCE phase of a reduce task
long runTime = tip . getUserSpaceRunTime ( ) ; float progress = 0.0f ; if ( isMap ) { // We linearly estimate the progress of maps since their start
startTime = status . getStartTime ( ) ; progress = ( ( float ) ( now - startTime ) ) / runTime ; } else { // We don ' t model reduce progress in the SHUFFLE or SORT phases
// We use linear estimate for the 3rd , REDUCE phase
Phase reducePhase = status . getPhase ( ) ; switch ( reducePhase ) { case SHUFFLE : progress = 0.0f ; // 0 phase is done out of 3
break ; case SORT : progress = 1.0f / 3 ; // 1 phase is done out of 3
break ; case REDUCE : { // REDUCE phase with the user code started when sort finished
startTime = status . getSortFinishTime ( ) ; // 0.66f : 2 phases are done out of of 3
progress = 2.0f / 3 + ( ( ( float ) ( now - startTime ) ) / runTime ) / 3.0f ; } break ; default : // should never get here
throw new IllegalArgumentException ( "Invalid reducePhase=" + reducePhase ) ; } } final float EPSILON = 0.0001f ; if ( progress < - EPSILON || progress > 1 + EPSILON ) { throw new IllegalStateException ( "Task progress out of range: " + progress ) ; } progress = Math . max ( Math . min ( 1.0f , progress ) , 0.0f ) ; status . setProgress ( progress ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Updated task progress, taskId=" + status . getTaskID ( ) + ", progress=" + status . getProgress ( ) ) ; }
|
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link CategoryExtentType } { @ code > }
* @ param value
* Java instance representing xml element ' s value .
* @ return
* the new instance of { @ link JAXBElement } { @ code < } { @ link CategoryExtentType } { @ code > } */
@ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "CategoryExtent" ) public JAXBElement < CategoryExtentType > createCategoryExtent ( CategoryExtentType value ) { } }
|
return new JAXBElement < CategoryExtentType > ( _CategoryExtent_QNAME , CategoryExtentType . class , null , value ) ;
|
public class Postconditions { /** * < p > Evaluate the given { @ code predicate } using { @ code value } as input . < / p >
* < p > The function throws { @ link PostconditionViolationException } if the
* predicate is false . < / p >
* @ param value The value
* @ param condition The predicate
* @ param < T > The type of values
* @ return value
* @ throws PostconditionViolationException If the predicate is false */
public static < T > T checkPostcondition ( final T value , final ContractConditionType < T > condition ) throws PostconditionViolationException { } }
|
return checkPostcondition ( value , condition . predicate ( ) , condition . describer ( ) ) ;
|
public class TransactionManager { /** * add session to the end of queue when a transaction starts
* ( depending on isolation mode ) */
public void beginAction ( Session session , Statement cs ) { } }
|
synchronized ( liveTransactionTimestamps ) { session . actionTimestamp = nextChangeTimestamp ( ) ; if ( ! session . isTransaction ) { session . transactionTimestamp = session . actionTimestamp ; session . isTransaction = true ; liveTransactionTimestamps . addLast ( session . actionTimestamp ) ; try { if ( this . mvcc ) { database . logger . writeToLog ( session , session . getStartTransactionSQL ( ) ) ; } } catch ( HsqlException e ) { } } } if ( session . isReadOnly ( ) ) { return ; } if ( ! mvcc ) { if ( session . hasLocks ( ) ) { return ; } try { writeLock . lock ( ) ; boolean canProceed = beginActionTPL ( session , cs ) ; if ( ! canProceed ) { session . abortTransaction = true ; } /* debug 190
if ( ! canProceed ) {
System . out . println ( " * * * * * cannot start " ) ;
/ / debug 190 */
} finally { writeLock . unlock ( ) ; } }
|
public class DoubleUtils { /** * Returns the maximum value in the array within the specified bounds . If the supplied range is
* empty or invalid , an { @ link IllegalArgumentException } is thrown . */
public static double max ( final double [ ] data , final int startInclusive , final int endExclusive ) { } }
|
checkArgument ( endExclusive > startInclusive ) ; checkArgument ( startInclusive >= 0 ) ; checkArgument ( endExclusive <= data . length ) ; double maxValue = Double . NEGATIVE_INFINITY ; for ( int i = startInclusive ; i < endExclusive ; ++ i ) { maxValue = Math . max ( maxValue , data [ i ] ) ; } return maxValue ;
|
public class Table { /** * returns if a table contains a certain element
* @ param searchElement the searchElement of the table element on which the search is done
* @ return if a table contains a certain element */
public boolean isRowPresent ( String searchElement ) { } }
|
ready ( ) ; Cell cell = getCell ( searchElement ) ; return cell . isElementPresent ( ) ;
|
public class XSLTElementProcessor { /** * Receive notification of an unparsed entity declaration .
* @ param handler non - null reference to current StylesheetHandler that is constructing the Templates .
* @ param name The entity name .
* @ param publicId The entity public identifier , or null if not
* available .
* @ param systemId The entity system identifier .
* @ param notationName The name of the associated notation .
* @ see org . xml . sax . DTDHandler # unparsedEntityDecl */
public void unparsedEntityDecl ( StylesheetHandler handler , String name , String publicId , String systemId , String notationName ) { } }
|
// no op
|
public class ConfigEvaluator { /** * Converts a list of raw configuration values to a string array .
* @ param attrDef a string - based attribute definition , or null if this is a simple evaluation
* @ return the array of converted values , or null if all values are unresolved */
private String [ ] convertListToStringArray ( List < ? > rawValues , ExtendedAttributeDefinition attrDef , EvaluationContext context , boolean ignoreWarnings ) throws ConfigEvaluatorException { } }
|
@ SuppressWarnings ( { "unchecked" , "rawtypes" } ) Collection < String > collection = ( Collection ) convertListToVector ( rawValues , attrDef , context , ignoreWarnings ) ; return collection == null ? null : collection . toArray ( new String [ collection . size ( ) ] ) ;
|
public class Db { /** * Find record by id with default primary key .
* < pre >
* Example :
* Record user = Db . findById ( " user " , 15 ) ;
* < / pre >
* @ param tableName the table name of the table
* @ param idValue the id value of the record */
public static Record findById ( String tableName , Object idValue ) { } }
|
return MAIN . findById ( tableName , idValue ) ;
|
public class WaveHandlerBase { /** * Retrieve the custom wave handler method .
* @ param wave the wave to be handled
* @ return the custom handler method or null if none exists */
private Method retrieveCustomMethod ( final Wave wave ) { } }
|
Method customMethod = null ; // Search the wave handler method to call
customMethod = this . defaultMethod == null // Method computed according to wave prefix and wave type action
// name
? ClassUtility . retrieveMethodList ( getWaveReady ( ) . getClass ( ) , wave . waveType ( ) . toString ( ) ) . stream ( ) . filter ( m -> CheckerUtility . checkMethodSignature ( m , wave . waveType ( ) . items ( ) ) ) . findFirst ( ) . orElse ( null ) // Method defined with annotation
: this . defaultMethod ; if ( customMethod == null ) { LOGGER . info ( CUSTOM_METHOD_NOT_FOUND ) ; } return customMethod ;
|
public class BatchXMLDescriptorImpl { /** * Adds a new namespace
* @ return the current instance of < code > BatchXMLDescriptor < / code > */
public BatchXMLDescriptor addNamespace ( String name , String value ) { } }
|
model . attribute ( name , value ) ; return this ;
|
public class AnalyzedTokenReadings { /** * Checks if the token has a particular POS tag .
* @ param posTag POS tag to look for */
public boolean hasPosTag ( String posTag ) { } }
|
boolean found = false ; for ( AnalyzedToken reading : anTokReadings ) { if ( reading . getPOSTag ( ) != null ) { found = posTag . equals ( reading . getPOSTag ( ) ) ; if ( found ) { break ; } } } return found ;
|
public class TrxMessageListener { /** * Constructor . */
public void init ( BaseMessageFilter messageFilter , Application application , String strProcessClassName , Map < String , Object > properties ) { } }
|
m_application = application ; m_strProcessClassName = strProcessClassName ; m_properties = properties ; super . init ( null , messageFilter ) ;
|
public class JoinableResourceBundlePropertySerializer { /** * Serialize the variant sets .
* @ param map
* the map to serialize
* @ return the serialized variant sets */
private static String serializeVariantSets ( Map < String , VariantSet > map ) { } }
|
StringBuilder result = new StringBuilder ( ) ; for ( Entry < String , VariantSet > entry : map . entrySet ( ) ) { result . append ( entry . getKey ( ) ) . append ( ":" ) ; VariantSet variantSet = ( VariantSet ) entry . getValue ( ) ; result . append ( variantSet . getDefaultVariant ( ) ) . append ( ":" ) ; result . append ( getCommaSeparatedString ( variantSet ) ) ; result . append ( ";" ) ; } return result . toString ( ) ;
|
public class RequestContext { /** * Gets a parameter specified by the given name from request body form or query string .
* @ param name the given name
* @ return parameter , returns { @ code null } if not found */
public String param ( final String name ) { } }
|
try { return request . getParameter ( name ) ; } catch ( final Exception e ) { LOGGER . log ( Level . ERROR , "Can't parse request parameter [uri=" + request . getRequestURI ( ) + ", method=" + request . getMethod ( ) + ", parameterName=" + name + "]: " + e . getMessage ( ) ) ; return null ; }
|
public class CommandHelper { /** * Convert the value according the type of DeviceData .
* @ param value
* the value to insert on DeviceData
* @ param deviceDataArgin
* the DeviceData attribute to write
* @ param dataType
* the type of inserted data
* @ throws DevFailed */
public static void insertFromDevVarLongStringArray ( final DevVarLongStringArray value , final DeviceData deviceDataArgin , final int dataType ) throws DevFailed { } }
|
if ( dataType == TangoConst . Tango_DEVVAR_LONGSTRINGARRAY ) { deviceDataArgin . insert ( value ) ; } else { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "input type " + deviceDataArgin . getType ( ) + " not supported" , "CommandHelper.insertFromDevVarLongStringArray(DevVarLongStringArray value,deviceDataArgin)" ) ; }
|
public class GlobalizationPreferences { /** * Explicitly set the collator for this object .
* @ param collator The collator object to be passed .
* @ return this , for chaining
* @ hide draft / provisional / internal are hidden on Android */
public GlobalizationPreferences setCollator ( Collator collator ) { } }
|
if ( isFrozen ( ) ) { throw new UnsupportedOperationException ( "Attempt to modify immutable object" ) ; } try { this . collator = ( Collator ) collator . clone ( ) ; // clone for safety
} catch ( CloneNotSupportedException e ) { throw new ICUCloneNotSupportedException ( "Error in cloning collator" , e ) ; } return this ;
|
public class IteratorFactory { /** * Create a new { @ link Iterator } for the supplied object .
* @ param object the object to build an iterator from
* @ return an { @ link Iterator } for the < code > object < / code > or < code > null < / code > if the value is null . */
public static final Iterator createIterator ( Object object ) { } }
|
LOGGER . debug ( "Create an iterator for class: " + ( object == null ? "null" : object . getClass ( ) . getName ( ) ) ) ; if ( object == null ) return null ; if ( object instanceof Iterator ) { return ( Iterator ) object ; } else if ( object instanceof Collection ) { Collection collection = ( Collection ) object ; return collection . iterator ( ) ; } else if ( object instanceof Map ) { return new MapIterator ( ( Map ) object ) ; } else if ( object . getClass ( ) . isArray ( ) ) { return new ArrayIterator ( object ) ; } else if ( object instanceof Enumeration ) return new EnumerationIterator ( ( Enumeration ) object ) ; else if ( object instanceof ResultSet && ! ( object instanceof RowSet ) ) return new ResultSetIterator ( ( ResultSet ) object ) ; // check to see if there is a registered IteratorPlant that can handle this type
Iterator ret = null ; Iterator factories = ITERATOR_FACTORIES . keySet ( ) . iterator ( ) ; while ( factories . hasNext ( ) ) { IteratorPlant plant = ( IteratorPlant ) ITERATOR_FACTORIES . get ( factories . next ( ) ) ; ret = plant . createIterator ( object ) ; if ( ret != null ) return ret ; } return new AtomicObjectIterator ( object ) ;
|
public class Attachment { /** * Get the URL of the file containing the contents .
* This property is somewhat deprecated and is made available only for use with platform APIs that
* require file paths / URLs , e . g . some media playback APIs . Whenever possible , use the ` getContent ( ) `
* method to get the input stream of the content instead .
* The file must be treated as read - only ! DO NOT MODIFY OR DELETE IT .
* If the database is encrypted , attachment files are also encrypted and not directly readable ,
* so this property will return null . */
@ InterfaceAudience . Public public URL getContentURL ( ) { } }
|
try { return internalAttachment ( ) . getContentURL ( ) ; } catch ( MalformedURLException e ) { Log . w ( Database . TAG , e . toString ( ) ) ; } catch ( CouchbaseLiteException e ) { Log . w ( Database . TAG , e . toString ( ) ) ; } return null ;
|
public class UtcOffset { /** * Parses a UTC offset from a string .
* @ param text the text to parse ( e . g . " - 0500 " )
* @ return the parsed UTC offset
* @ throws IllegalArgumentException if the text cannot be parsed */
public static UtcOffset parse ( String text ) { } }
|
Pattern timeZoneRegex = Pattern . compile ( "^([-\\+])?(\\d{1,2})(:?(\\d{2}))?(:?(\\d{2}))?$" ) ; Matcher m = timeZoneRegex . matcher ( text ) ; if ( ! m . find ( ) ) { throw Messages . INSTANCE . getIllegalArgumentException ( 21 , text ) ; } String signStr = m . group ( 1 ) ; boolean positive = ! "-" . equals ( signStr ) ; String hourStr = m . group ( 2 ) ; int hourOffset = Integer . parseInt ( hourStr ) ; String minuteStr = m . group ( 4 ) ; int minuteOffset = ( minuteStr == null ) ? 0 : Integer . parseInt ( minuteStr ) ; return new UtcOffset ( positive , hourOffset , minuteOffset ) ;
|
public class SqlQueryImpl { /** * { @ inheritDoc }
* @ see jp . co . future . uroborosql . fluent . SqlQuery # collect ( java . lang . Class ) */
@ Override public < T > List < T > collect ( final Class < T > type ) { } }
|
try ( Stream < T > stream = stream ( new EntityResultSetConverter < > ( type , new PropertyMapperManager ( ) ) ) ) { return stream . collect ( Collectors . toList ( ) ) ; }
|
public class AbstractCounterFactory { /** * Destroys and removes a counter from the cache .
* @ param counter
* @ since 0.2.0 */
protected void destroyCounter ( ICounter counter ) { } }
|
try { if ( counter instanceof AbstractCounter ) { ( ( AbstractCounter ) counter ) . destroy ( ) ; } } catch ( Exception e ) { }
|
public class WanPublisherConfigDTO { /** * Deserializes the aliased discovery config nested under the { @ code tag } in the provided JSON .
* @ param json the JSON object containing the serialized config
* @ param tag the tag under which the config is nested
* @ return the deserialized config or { @ code null } if the serialized config
* was missing in the JSON object */
private AliasedDiscoveryConfig deserializeAliasedDiscoveryConfig ( JsonObject json , String tag ) { } }
|
JsonValue configJson = json . get ( tag ) ; if ( configJson != null && ! configJson . isNull ( ) ) { AliasedDiscoveryConfigDTO dto = new AliasedDiscoveryConfigDTO ( tag ) ; dto . fromJson ( configJson . asObject ( ) ) ; return dto . getConfig ( ) ; } return null ;
|
public class InterceptorContext { /** * Instantiates an interceptor , based on the class name in the given InterceptorConfig , and adds it to the
* given collection of interceptors .
* @ param config the InterceptorConfig used to determine the interceptor class .
* @ param baseClassOrInterface the required base class or interface . May be < code > null < / code > .
* @ param interceptors the List of interceptors to which to add .
* @ return an initialized Interceptor , or < code > null < / code > if an error occurred . */
protected static Interceptor addInterceptor ( InterceptorConfig config , Class baseClassOrInterface , List /* < Interceptor > */
interceptors ) { } }
|
Interceptor interceptor = createInterceptor ( config , baseClassOrInterface ) ; if ( interceptor != null ) interceptors . add ( interceptor ) ; return interceptor ;
|
public class NetworkBuffer { /** * Returns the network buffer for the given installation ID .
* @ param installationID installation ID for the network buffer
* @ return the network buffer , or < code > null < / code > if no buffer found */
public static synchronized NetworkBuffer getBuffer ( String installationID ) { } }
|
for ( final Iterator i = buffers . iterator ( ) ; i . hasNext ( ) ; ) { final NetworkBuffer db = ( NetworkBuffer ) i . next ( ) ; if ( db . inst . equals ( installationID ) ) return db ; } return null ;
|
public class HttpBody { /** * Sets the current length of the body . If the current content is longer , the excessive data will be truncated .
* @ param length the new length to set . */
public void setLength ( int length ) { } }
|
if ( length < 0 || body . length == length ) { return ; } int oldPos = pos ; pos = Math . min ( pos , length ) ; byte [ ] newBody = new byte [ length ] ; System . arraycopy ( body , 0 , newBody , 0 , pos ) ; body = newBody ; if ( oldPos > pos ) { cachedString = null ; }
|
public class GrammarFile { /** * This is the central reading routine which starts all sub routines like
* lexer , parser and converter .
* @ throws IOException
* @ throws GrammarException */
private void read ( ) throws IOException , GrammarException { } }
|
try { logger . debug ( "Read grammar file..." ) ; logger . debug ( "Starting lexer..." ) ; Lexer lexer = new RegExpLexer ( uhuraGrammar ) ; TokenStream tokenStream = lexer . lex ( SourceCode . read ( reader , new UnspecifiedSourceCodeLocation ( ) ) ) ; logger . debug ( "Starting parser..." ) ; parse ( tokenStream ) ; logger . debug ( "done reading grammar file." ) ; } catch ( LexerException e ) { logger . error ( e . getMessage ( ) , e ) ; throw new IOException ( e . getMessage ( ) , e ) ; } catch ( ParserException e ) { logger . error ( e . getMessage ( ) , e ) ; throw new IOException ( e . getMessage ( ) , e ) ; }
|
public class JdbcUtil { /** * Safely closes resources and logs errors .
* @ param rs ResultSet to close */
public static void close ( ResultSet rs ) { } }
|
if ( rs != null ) { try { rs . close ( ) ; } catch ( SQLException ex ) { logger . error ( "" , ex ) ; } }
|
public class CommerceTaxMethodLocalServiceWrapper { /** * Creates a new commerce tax method with the primary key . Does not add the commerce tax method to the database .
* @ param commerceTaxMethodId the primary key for the new commerce tax method
* @ return the new commerce tax method */
@ Override public com . liferay . commerce . tax . model . CommerceTaxMethod createCommerceTaxMethod ( long commerceTaxMethodId ) { } }
|
return _commerceTaxMethodLocalService . createCommerceTaxMethod ( commerceTaxMethodId ) ;
|
public class PackageFrameWriter { /** * Generate a package summary page for the left - hand bottom frame . Construct
* the PackageFrameWriter object and then uses it generate the file .
* @ param configuration the current configuration of the doclet .
* @ param packageDoc The package for which " pacakge - frame . html " is to be generated . */
public static void generate ( ConfigurationImpl configuration , PackageDoc packageDoc ) { } }
|
PackageFrameWriter packgen ; try { packgen = new PackageFrameWriter ( configuration , packageDoc ) ; String pkgName = Util . getPackageName ( packageDoc ) ; Content body = packgen . getBody ( false , packgen . getWindowTitle ( pkgName ) ) ; Content pkgNameContent = new StringContent ( pkgName ) ; Content heading = HtmlTree . HEADING ( HtmlConstants . TITLE_HEADING , HtmlStyle . bar , packgen . getTargetPackageLink ( packageDoc , "classFrame" , pkgNameContent ) ) ; body . addContent ( heading ) ; HtmlTree div = new HtmlTree ( HtmlTag . DIV ) ; div . addStyle ( HtmlStyle . indexContainer ) ; packgen . addClassListing ( div ) ; body . addContent ( div ) ; packgen . printHtmlDocument ( configuration . metakeywords . getMetaKeywords ( packageDoc ) , false , body ) ; packgen . close ( ) ; } catch ( IOException exc ) { configuration . standardmessage . error ( "doclet.exception_encountered" , exc . toString ( ) , DocPaths . PACKAGE_FRAME . getPath ( ) ) ; throw new DocletAbortException ( exc ) ; }
|
public class CmsSiteMatcher { /** * Sets the hostname ( e . g . localhost ) which is required to access this site . < p >
* Setting the hostname to " * " is a wildcard that matches all hostnames
* @ param serverName the hostname ( e . g . localhost ) which is required to access this site */
protected void setServerName ( String serverName ) { } }
|
if ( CmsStringUtil . isEmpty ( serverName ) || ( WILDCARD . equals ( serverName ) ) ) { m_serverName = WILDCARD ; } else { m_serverName = serverName . trim ( ) ; }
|
public class OCSPResponseBuilder { /** * List Certificate authority url information .
* @ param userCertificate User ' s own certificate .
* @ return AIA Locations
* @ throws CertificateVerificationException If an error occurs while getting the AIA locations from the certificate . */
public static List < String > getAIALocations ( X509Certificate userCertificate ) throws CertificateVerificationException { } }
|
List < String > locations ; // List the AIA locations from the certificate . Those are the URL ' s of CA s .
try { locations = OCSPVerifier . getAIALocations ( userCertificate ) ; } catch ( CertificateVerificationException e ) { throw new CertificateVerificationException ( "Failed to find AIA locations in the cetificate" , e ) ; } return locations ;
|
public class CommerceRegionPersistenceImpl { /** * Returns an ordered range of all the commerce regions where commerceCountryId = & # 63 ; and active = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceRegionModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param commerceCountryId the commerce country ID
* @ param active the active
* @ param start the lower bound of the range of commerce regions
* @ param end the upper bound of the range of commerce regions ( not inclusive )
* @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > )
* @ param retrieveFromCache whether to retrieve from the finder cache
* @ return the ordered range of matching commerce regions */
@ Override public List < CommerceRegion > findByC_A ( long commerceCountryId , boolean active , int start , int end , OrderByComparator < CommerceRegion > orderByComparator , boolean retrieveFromCache ) { } }
|
boolean pagination = true ; FinderPath finderPath = null ; Object [ ] finderArgs = null ; if ( ( start == QueryUtil . ALL_POS ) && ( end == QueryUtil . ALL_POS ) && ( orderByComparator == null ) ) { pagination = false ; finderPath = FINDER_PATH_WITHOUT_PAGINATION_FIND_BY_C_A ; finderArgs = new Object [ ] { commerceCountryId , active } ; } else { finderPath = FINDER_PATH_WITH_PAGINATION_FIND_BY_C_A ; finderArgs = new Object [ ] { commerceCountryId , active , start , end , orderByComparator } ; } List < CommerceRegion > list = null ; if ( retrieveFromCache ) { list = ( List < CommerceRegion > ) finderCache . getResult ( finderPath , finderArgs , this ) ; if ( ( list != null ) && ! list . isEmpty ( ) ) { for ( CommerceRegion commerceRegion : list ) { if ( ( commerceCountryId != commerceRegion . getCommerceCountryId ( ) ) || ( active != commerceRegion . isActive ( ) ) ) { list = null ; break ; } } } } if ( list == null ) { StringBundler query = null ; if ( orderByComparator != null ) { query = new StringBundler ( 4 + ( orderByComparator . getOrderByFields ( ) . length * 2 ) ) ; } else { query = new StringBundler ( 4 ) ; } query . append ( _SQL_SELECT_COMMERCEREGION_WHERE ) ; query . append ( _FINDER_COLUMN_C_A_COMMERCECOUNTRYID_2 ) ; query . append ( _FINDER_COLUMN_C_A_ACTIVE_2 ) ; if ( orderByComparator != null ) { appendOrderByComparator ( query , _ORDER_BY_ENTITY_ALIAS , orderByComparator ) ; } else if ( pagination ) { query . append ( CommerceRegionModelImpl . ORDER_BY_JPQL ) ; } String sql = query . toString ( ) ; Session session = null ; try { session = openSession ( ) ; Query q = session . createQuery ( sql ) ; QueryPos qPos = QueryPos . getInstance ( q ) ; qPos . add ( commerceCountryId ) ; qPos . add ( active ) ; if ( ! pagination ) { list = ( List < CommerceRegion > ) QueryUtil . list ( q , getDialect ( ) , start , end , false ) ; Collections . sort ( list ) ; list = Collections . unmodifiableList ( list ) ; } else { list = ( List < CommerceRegion > ) QueryUtil . list ( q , getDialect ( ) , start , end ) ; } cacheResult ( list ) ; finderCache . putResult ( finderPath , finderArgs , list ) ; } catch ( Exception e ) { finderCache . removeResult ( finderPath , finderArgs ) ; throw processException ( e ) ; } finally { closeSession ( session ) ; } } return list ;
|
public class JobInProgress { /** * Remove a reduce TIP from the list for running - reduces
* Called when a reduce fails / completes
* @ param tip the tip that needs to be retired */
private synchronized void retireReduce ( TaskInProgress tip ) { } }
|
if ( runningReduces == null ) { LOG . warn ( "Running list for reducers missing!! " + "Job details are missing." ) ; return ; } runningReduces . remove ( tip ) ;
|
public class Where { /** * Reset the Where object so it can be re - used . */
public Where < T , ID > reset ( ) { } }
|
for ( int i = 0 ; i < clauseStackLevel ; i ++ ) { // help with gc
clauseStack [ i ] = null ; } clauseStackLevel = 0 ; return this ;
|
public class ExportManager { /** * Indicate to associated { @ link ExportGeneration } s to become
* masters for the given partition id
* @ param partitionId */
synchronized public void takeMastership ( int partitionId ) { } }
|
m_masterOfPartitions . add ( partitionId ) ; ExportGeneration generation = m_generation . get ( ) ; if ( generation == null ) { return ; } generation . takeMastership ( partitionId ) ;
|
public class ConnectionManager { /** * 获取连接
* @ param address
* @ return */
protected Connection getConnection ( InetSocketAddress address ) { } }
|
Connection conn = null ; try { // 获取连接
conn = pool . borrowObject ( address ) ; } catch ( FdfsException e ) { throw e ; } catch ( Exception e ) { LOGGER . error ( "Unable to borrow buffer from pool" , e ) ; throw new RuntimeException ( "Unable to borrow buffer from pool" , e ) ; } return conn ;
|
public class GraphHelper { /** * finds the instance ( s ) that correspond to the given vertex */
private Collection < IndexedInstance > getInstancesForVertex ( Map < String , AttributeValueMap > map , AtlasVertex foundVertex ) { } }
|
// loop through the unique attributes . For each attribute , check to see if the vertex property that
// corresponds to that attribute has a value from one or more of the instances that were passed in .
for ( Map . Entry < String , AttributeValueMap > entry : map . entrySet ( ) ) { String propertyName = entry . getKey ( ) ; AttributeValueMap valueMap = entry . getValue ( ) ; Object vertexValue = foundVertex . getProperty ( propertyName , Object . class ) ; Collection < IndexedInstance > instances = valueMap . get ( vertexValue ) ; if ( instances != null && instances . size ( ) > 0 ) { // return first match . Let the underling graph determine if this is a problem
// ( i . e . if the other unique attributes change be changed safely to match what
// the user requested ) .
return instances ; } // try another attribute
} return Collections . emptyList ( ) ;
|
public class Utils { /** * Concatenate string with the specified delimiter .
* @ param strings
* Strings to be concatenated .
* @ param delimiter
* A delimiter used between strings . If { @ code null } or an empty
* string is given , delimiters are not inserted between strings .
* @ return
* A concatenated string . If { @ code strings } is { @ code null } ,
* { @ code null } is returned . If the size of { @ code strings } is 0,
* an empty string is returned . */
public static String join ( String [ ] strings , String delimiter ) { } }
|
if ( strings == null ) { return null ; } if ( strings . length == 0 ) { return "" ; } boolean useDelimiter = ( delimiter != null && delimiter . length ( ) != 0 ) ; StringBuilder sb = new StringBuilder ( ) ; for ( String string : strings ) { sb . append ( string ) ; if ( useDelimiter ) ; { sb . append ( delimiter ) ; } } if ( useDelimiter && sb . length ( ) != 0 ) { // Remove the last delimiter .
sb . setLength ( sb . length ( ) - delimiter . length ( ) ) ; } return sb . toString ( ) ;
|
public class ContainerSetupScript { /** * desroy Application container
* @ param context
* ServletContext */
public synchronized void destroyed ( AppContextWrapper context ) { } }
|
try { ContainerRegistryBuilder cb = ( ContainerRegistryBuilder ) context . getAttribute ( ContainerRegistryBuilder . APPLICATION_CONTEXT_ATTRIBUTE_NAME ) ; if ( cb != null ) { ContainerDirector cd = new ContainerDirector ( cb ) ; cd . shutdown ( ) ; context . removeAttribute ( ContainerRegistryBuilder . APPLICATION_CONTEXT_ATTRIBUTE_NAME ) ; containerBuilderContext = null ; // context . removeAttribute ( ContainerBuilder . APPLICATION _ CONTEXT _ ATTRIBUTE _ NAME ) ;
Debug . logVerbose ( "[JdonFramework] stop the container .." , module ) ; } } catch ( Exception e ) { Debug . logError ( "[JdonFramework] destroyed error: " + e , module ) ; }
|
public class GraphDOT { /** * Renders a { @ link Graph } in the GraphVIZ DOT format .
* @ param graph
* the graph to render
* @ param a
* the appendable to write to .
* @ param additionalHelpers
* additional helpers for providing visualization properties .
* @ throws IOException
* if writing to { @ code a } fails . */
public static < N , E > void write ( Graph < N , E > graph , Appendable a , List < VisualizationHelper < N , ? super E > > additionalHelpers ) throws IOException { } }
|
final List < VisualizationHelper < N , ? super E > > helpers = new ArrayList < > ( additionalHelpers . size ( ) + 1 ) ; helpers . add ( graph . getVisualizationHelper ( ) ) ; helpers . addAll ( additionalHelpers ) ; writeRaw ( graph , a , toDOTVisualizationHelper ( helpers ) ) ;
|
public class ConfigEvaluator { /** * Evaluate a string to a value of the target attribute type .
* @ see # convertListToArray */
private Object evaluateString ( String strVal , ExtendedAttributeDefinition attrDef , EvaluationContext context ) throws ConfigEvaluatorException { } }
|
if ( attrDef == null ) { return strVal ; } int type = attrDef . getType ( ) ; if ( type == AttributeDefinition . BOOLEAN ) { return Boolean . valueOf ( strVal ) ; } else if ( type == AttributeDefinition . BYTE ) { return Byte . valueOf ( strVal ) ; } else if ( type == AttributeDefinition . CHARACTER ) { return Character . valueOf ( strVal . charAt ( 0 ) ) ; } else if ( type == AttributeDefinition . DOUBLE ) { return Double . valueOf ( strVal ) ; } else if ( type == AttributeDefinition . FLOAT ) { return Float . valueOf ( strVal ) ; } else if ( type == AttributeDefinition . INTEGER ) { return Integer . valueOf ( strVal ) ; } else if ( type == AttributeDefinition . LONG ) { return Long . valueOf ( strVal ) ; } else if ( type == AttributeDefinition . SHORT ) { return Short . valueOf ( strVal ) ; } else if ( type == MetaTypeFactory . DURATION_TYPE ) { return MetatypeUtils . evaluateDuration ( strVal , TimeUnit . MILLISECONDS ) ; } else if ( type == MetaTypeFactory . DURATION_S_TYPE ) { return MetatypeUtils . evaluateDuration ( strVal , TimeUnit . SECONDS ) ; } else if ( type == MetaTypeFactory . DURATION_M_TYPE ) { return MetatypeUtils . evaluateDuration ( strVal , TimeUnit . MINUTES ) ; } else if ( type == MetaTypeFactory . DURATION_H_TYPE ) { return MetatypeUtils . evaluateDuration ( strVal , TimeUnit . HOURS ) ; } else if ( type == MetaTypeFactory . PASSWORD_TYPE || type == MetaTypeFactory . HASHED_PASSWORD_TYPE ) { return new SerializableProtectedString ( strVal . toCharArray ( ) ) ; } else if ( type == MetaTypeFactory . ON_ERROR_TYPE ) { return Enum . valueOf ( OnError . class , strVal . trim ( ) . toUpperCase ( ) ) ; } else if ( type == MetaTypeFactory . TOKEN_TYPE ) { return MetatypeUtils . evaluateToken ( strVal ) ; } else if ( type == MetaTypeFactory . PID_TYPE ) { return evaluateReference ( strVal , attrDef , context ) ; } else { // STRING and all other unknown / invalid types .
return strVal ; }
|
public class ThriftMultigetSubSliceQuery { /** * Set the supercolumn to run the slice query on */
@ Override public MultigetSubSliceQuery < K , SN , N , V > setSuperColumn ( SN superColumn ) { } }
|
Assert . notNull ( superColumn , "supercolumn may not be null" ) ; this . superColumn = superColumn ; return this ;
|
public class CmsLabel { /** * Returns the title to be displayed , which is either produced by a title generator ,
* or is equal to the original text if no title generator is set and the label is being
* truncated . < p >
* @ param truncating true if the label is being truncated
* @ return the title to display */
protected String getTitle ( boolean truncating ) { } }
|
if ( m_titleGenerator != null ) { return m_titleGenerator . getTitle ( m_originalText ) ; } if ( truncating ) { return getText ( ) ; } else { return super . getTitle ( ) ; }
|
public class ResourceGroovyMethods { /** * Creates a buffered input stream for this URL .
* @ param url a URL
* @ return a BufferedInputStream for the URL
* @ throws MalformedURLException is thrown if the URL is not well formed
* @ throws IOException if an I / O error occurs while creating the input stream
* @ since 1.5.2 */
public static BufferedInputStream newInputStream ( URL url ) throws MalformedURLException , IOException { } }
|
return new BufferedInputStream ( configuredInputStream ( null , url ) ) ;
|
public class KeyPairCache { /** * Returns a key pair of size < code > bits < / code > . The same key pair
* may be returned several times within a period of the cache
* lifetime .
* If lifetime was set to zero or less than zero , no keys are cached .
* @ param bits the keysize . This is an algorithm - specific metric ,
* such as modulus length , specified in number of bits .
* @ throws NoSuchAlgorithmException if the algorithm is not
* available in the environment .
* @ throws NoSuchProviderException if the provider is not
* available in the environment . */
public KeyPair getKeyPair ( int bits ) throws NoSuchAlgorithmException , NoSuchProviderException { } }
|
if ( this . lifetime < 1 ) { logger . debug ( "Cache lifetime is less than 1, generating new " + "keypair each time" ) ; KeyPairGenerator generator = KeyPairGenerator . getInstance ( this . algorithm , this . provider ) ; generator . initialize ( bits ) ; return generator . generateKeyPair ( ) ; } long st = System . currentTimeMillis ( ) ; Integer keysize = new Integer ( bits ) ; KeyPairCacheEntry entry = ( KeyPairCacheEntry ) entries . get ( keysize ) ; if ( entry == null || st - entry . getCreatedAt ( ) >= lifetime ) { logger . debug ( "Creating " + bits + " bits keypair" ) ; KeyPairGenerator generator = KeyPairGenerator . getInstance ( algorithm , provider ) ; generator . initialize ( bits ) ; logger . debug ( "Time to generate key pair: " + ( System . currentTimeMillis ( ) - st ) ) ; entry = new KeyPairCacheEntry ( generator . generateKeyPair ( ) , st ) ; entries . put ( keysize , entry ) ; } return entry . getKeyPair ( ) ;
|
public class TargetEncoder { /** * Overloaded for the case when user had not specified the noise parameter */
public Frame applyTargetEncoding ( Frame data , String targetColumnName , Map < String , Frame > targetEncodingMap , byte dataLeakageHandlingStrategy , String foldColumn , boolean withBlendedAvg , boolean imputeNAs , long seed ) { } }
|
double defaultNoiseLevel = 0.01 ; int targetIndex = data . find ( targetColumnName ) ; Vec targetVec = data . vec ( targetIndex ) ; double noiseLevel = targetVec . isNumeric ( ) ? defaultNoiseLevel * ( targetVec . max ( ) - targetVec . min ( ) ) : defaultNoiseLevel ; return applyTargetEncoding ( data , targetColumnName , targetEncodingMap , dataLeakageHandlingStrategy , foldColumn , withBlendedAvg , noiseLevel , true , seed ) ;
|
public class PercentEscaper { /** * Escapes a string with the current settings on the escaper .
* @ param original the origin string to escape
* @ return the escaped string */
public String escape ( String original ) { } }
|
StringBuilder output = new StringBuilder ( ) ; for ( int i = 0 ; i != utf16ToAscii ( original ) . length ( ) ; i ++ ) { char c = original . charAt ( i ) ; if ( c == ' ' ) { output . append ( usePlusForSpace ? "+" : HEX [ ' ' ] ) ; } else if ( c >= 'a' && c <= 'z' ) { output . append ( c ) ; } else if ( c >= 'A' && c <= 'Z' ) { output . append ( c ) ; } else if ( c >= '0' && c <= '9' ) { output . append ( c ) ; } else if ( safeChars . contains ( c ) ) { output . append ( c ) ; } else { output . append ( HEX [ c ] ) ; } } return output . toString ( ) ;
|
public class LocalDate { /** * Returns a copy of this { @ code LocalDate } with the year altered .
* If the day - of - month is invalid for the year , it will be changed to the last valid day of the month .
* This instance is immutable and unaffected by this method call .
* @ param year the year to set in the result , from MIN _ YEAR to MAX _ YEAR
* @ return a { @ code LocalDate } based on this date with the requested year , not null
* @ throws DateTimeException if the year value is invalid */
public LocalDate withYear ( int year ) { } }
|
if ( this . year == year ) { return this ; } YEAR . checkValidValue ( year ) ; return resolvePreviousValid ( year , month , day ) ;
|
public class AbstractAzkabanServlet { /** * Retrieves a success message from a cookie . azkaban . success . message */
protected String getSuccessMessageFromCookie ( final HttpServletRequest request ) { } }
|
final Cookie cookie = getCookieByName ( request , AZKABAN_SUCCESS_MESSAGE ) ; if ( cookie == null ) { return null ; } return cookie . getValue ( ) ;
|
public class DRL5Expressions { /** * $ ANTLR start synpred11 _ DRL5Expressions */
public final void synpred11_DRL5Expressions_fragment ( ) throws RecognitionException { } }
|
// src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 424:8 : ( squareArguments shiftExpression )
// src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 424:9 : squareArguments shiftExpression
{ pushFollow ( FOLLOW_squareArguments_in_synpred11_DRL5Expressions1984 ) ; squareArguments ( ) ; state . _fsp -- ; if ( state . failed ) return ; pushFollow ( FOLLOW_shiftExpression_in_synpred11_DRL5Expressions1986 ) ; shiftExpression ( ) ; state . _fsp -- ; if ( state . failed ) return ; }
|
public class Main { /** * Creates a new instance of this class using the arguments specified , gives
* it any extra user properties which have been specified , and then runs the
* build using the classloader provided .
* @ param args Command line arguments . Must not be < code > null < / code > .
* @ param additionalUserProperties Any extra properties to use in this
* build . May be < code > null < / code > , which is the equivalent to
* passing in an empty set of properties .
* @ param coreLoader Classloader used for core classes . May be
* < code > null < / code > in which case the system classloader is
* used . */
public static void start ( final String [ ] args , final Properties additionalUserProperties , final ClassLoader coreLoader ) { } }
|
final Main m = new Main ( ) ; m . startAnt ( args , additionalUserProperties , coreLoader ) ;
|
public class ProvFactory { /** * Creates a new { @ link Agent } with provided identifier
* @ param ag a { @ link QualifiedName } for the agent
* @ return an object of type { @ link Agent } */
public Agent newAgent ( QualifiedName ag ) { } }
|
Agent res = of . createAgent ( ) ; res . setId ( ag ) ; return res ;
|
public class ST_RemoveHoles { /** * Remove any holes from the geometry . If the geometry doesn ' t contain any
* holes , return it unchanged .
* @ param geometry Geometry
* @ return Geometry with no holes * */
public static Geometry removeHoles ( Geometry geometry ) { } }
|
if ( geometry == null ) { return null ; } if ( geometry instanceof Polygon ) { return removeHolesPolygon ( ( Polygon ) geometry ) ; } else if ( geometry instanceof MultiPolygon ) { return removeHolesMultiPolygon ( ( MultiPolygon ) geometry ) ; } else if ( geometry instanceof GeometryCollection ) { Geometry [ ] geometries = new Geometry [ geometry . getNumGeometries ( ) ] ; for ( int i = 0 ; i < geometry . getNumGeometries ( ) ; i ++ ) { Geometry geom = geometry . getGeometryN ( i ) ; if ( geometry instanceof Polygon ) { geometries [ i ] = removeHolesPolygon ( ( Polygon ) geom ) ; } else if ( geometry instanceof MultiPolygon ) { geometries [ i ] = removeHolesMultiPolygon ( ( MultiPolygon ) geom ) ; } else { geometries [ i ] = geom ; } } return FACTORY . createGeometryCollection ( geometries ) ; } return null ;
|
public class MPrinter { /** * Affiche le document . < br / >
* Les implémentations peuvent surcharger cette méthode .
* @ param targetFile
* File
* @ throws IOException
* Erreur disque */
protected void showDocument ( final File targetFile ) throws IOException { } }
|
try { Desktop . getDesktop ( ) . open ( targetFile ) ; } catch ( final IOException e ) { throw new IOException ( "Is there an associated application for \"" + targetFile . getName ( ) + "\"?\n" + e . getMessage ( ) , e ) ; } // on pourrait imprimer le fichier directement ( par exemple CSV avec Excel ) en supposant que Desktop . getDesktop ( ) . isDesktopSupported ( )
// et Desktop . getDesktop ( ) . isSupported ( Desktop . Action . PRINT ) retournent true
// ce qui est le cas en Windows XP SP2 ( et Java 1.6)
// Desktop . getDesktop ( ) . print ( targetFile ) ;
|
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EEnum getIfcTimeSeriesDataTypeEnum ( ) { } }
|
if ( ifcTimeSeriesDataTypeEnumEEnum == null ) { ifcTimeSeriesDataTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 1088 ) ; } return ifcTimeSeriesDataTypeEnumEEnum ;
|
public class LanguageUtils { /** * Sets the default language map . It is the basis language template which is to be translated .
* @ param deflang the default language map */
public void setDefaultLanguage ( Map < String , String > deflang ) { } }
|
if ( deflang != null && ! deflang . isEmpty ( ) ) { LANG_CACHE . put ( getDefaultLanguageCode ( ) , deflang ) ; }
|
public class BitmexTradeServiceRaw { /** * See { @ link Bitmex # getOrders }
* @ return List of { @ link BitmexPrivateOrder } s . */
public List < BitmexPrivateOrder > getBitmexOrders ( @ Nullable String symbol , @ Nullable String filter , @ Nullable String columns , @ Nullable Date startTime , @ Nullable Date endTime ) throws ExchangeException { } }
|
ArrayList < BitmexPrivateOrder > orders = new ArrayList < > ( ) ; for ( int i = 0 ; orders . size ( ) % 500 == 0 ; i ++ ) { final int j = i ; List < BitmexPrivateOrder > orderResponse = updateRateLimit ( ( ) -> bitmex . getOrders ( apiKey , exchange . getNonceFactory ( ) , signatureCreator , symbol , filter , columns , 500 , ( long ) ( j * 500 ) , true , startTime , endTime ) ) ; orders . addAll ( orderResponse ) ; // Prevent loop when no orders found
if ( orderResponse . size ( ) == 0 ) break ; } return orders ;
|
public class AbstractNumberPickerPreference { /** * Obtains , whether the selection wheel of the selection wheel of the preference ' s { @ link
* NumberPicker } should be wrapped , or not , from a specific typed array .
* @ param typedArray
* The typed array , which should be used to retrieve , whether the selection wheel of the
* preference ' s { @ link NumberPicker } should be wrapped , or not , as an instance of the
* class { @ link TypedArray } . The typed array may not be null */
private void obtainWrapSelectorWheel ( @ NonNull final TypedArray typedArray ) { } }
|
boolean defaultValue = getContext ( ) . getResources ( ) . getBoolean ( R . bool . number_picker_preference_default_wrap_selector_wheel ) ; wrapSelectorWheel ( typedArray . getBoolean ( R . styleable . AbstractNumberPickerPreference_wrapSelectorWheel , defaultValue ) ) ;
|
public class LinearClassifierFactory { /** * Trains the linear classifier using Generalized Expectation criteria as described in
* < tt > Generalized Expectation Criteria for Semi Supervised Learning of Conditional Random Fields < / tt > , Mann and McCallum , ACL 2008.
* The original algorithm is proposed for CRFs but has been adopted to LinearClassifier ( which is a simpler , special case of a CRF ) .
* Automatically discovers high precision , high frequency labeled features to be used as GE constraints .
* IMPORTANT : the current feature selector assumes the features are binary . The GE constraints assume the constraining features are binary anyway , although
* it doesn ' t make such assumptions about other features . */
public LinearClassifier < L , F > trainSemiSupGE ( GeneralDataset < L , F > labeledDataset , List < ? extends Datum < L , F > > unlabeledDataList ) { } }
|
List < F > GEFeatures = getHighPrecisionFeatures ( labeledDataset , 0.9 , 10 ) ; return trainSemiSupGE ( labeledDataset , unlabeledDataList , GEFeatures , 0.5 ) ;
|
public class ByteArrayList { /** * Sets the i ' th element of the array list to the given value .
* @ param i The index to set .
* @ param value The value to set . */
public void set ( int i , byte value ) { } }
|
if ( i < 0 || i >= size ) { throw new IndexOutOfBoundsException ( ) ; } elements [ i ] = value ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.