signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class LockAdviser { /** * Reads { @ link LockAdviser # lockPath lock path directory } and finds the
* { @ link File write lock file } , if any .
* @ return the { @ link write lock file } , or < tt > null < / tt > if no write lock
* exists */
private File getWriteLock ( ) { } } | File [ ] lckFiles = this . lockPath . listFiles ( lockFilter ) ; for ( final File lckFile : lckFiles ) { if ( lockFilter . isWriterLockFile ( lckFile ) ) { return lckFile ; } } return null ; |
public class Shape { /** * Get array shape from the buffer , as an int [ ]
* @ param buffer Buffer to get the shape from
* @ return Shape array */
public static long [ ] shape ( DataBuffer buffer ) { } } | val ret = new long [ rank ( buffer ) ] ; for ( int i = 0 ; i < ret . length ; i ++ ) ret [ i ] = buffer . getInt ( 1 + i ) ; return ret ; |
public class OperationAggregator { /** * Combines { @ code op } with the other operation ( s ) merged into this instance .
* @ param other an { @ code Operation } to merge into the aggregate . */
public void add ( Operation other ) { } } | op . addAllLogEntries ( other . getLogEntriesList ( ) ) ; mergeMetricValues ( other ) ; mergeTimestamps ( other ) ; |
public class JavaScriptParser { /** * Parse the file content and return variants of the file .
* @ param fileContent to parse
* @ return the parsing result */
public ParseResult parse ( String fileContent ) { } } | ParseResult result = null ; // setup environment and initialize the parser .
CompilerEnvirons environment = new CompilerEnvirons ( ) ; environment . setLanguageVersion ( 180 ) ; environment . setStrictMode ( false ) ; environment . setRecordingComments ( true ) ; environment . setAllowSharpComments ( true ) ; environment . setRecordingLocalJsDocComments ( true ) ; // IMPORTANT : the parser can only be used once !
Parser parser = new Parser ( environment ) ; try { AstRoot root = parser . parse ( new StringReader ( fileContent ) , null , 1 ) ; result = new ParseResult ( ) ; // remove all comments
result . setContentWithoutComments ( root . toSource ( ) ) ; // remove all header comments
SortedSet < Comment > comments = root . getComments ( ) ; if ( comments != null && ! comments . isEmpty ( ) ) { String headerlessFileContent = removeHeaderComments ( fileContent , comments ) ; result . setContentWithoutHeaderComments ( headerlessFileContent ) ; } } catch ( Exception e ) { logger . debug ( "Error parsing JavaScript file: {}" , e . getMessage ( ) ) ; } return result ; |
public class UpdateClusterRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( UpdateClusterRequest updateClusterRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( updateClusterRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateClusterRequest . getClusterName ( ) , CLUSTERNAME_BINDING ) ; protocolMarshaller . marshall ( updateClusterRequest . getDescription ( ) , DESCRIPTION_BINDING ) ; protocolMarshaller . marshall ( updateClusterRequest . getPreferredMaintenanceWindow ( ) , PREFERREDMAINTENANCEWINDOW_BINDING ) ; protocolMarshaller . marshall ( updateClusterRequest . getNotificationTopicArn ( ) , NOTIFICATIONTOPICARN_BINDING ) ; protocolMarshaller . marshall ( updateClusterRequest . getNotificationTopicStatus ( ) , NOTIFICATIONTOPICSTATUS_BINDING ) ; protocolMarshaller . marshall ( updateClusterRequest . getParameterGroupName ( ) , PARAMETERGROUPNAME_BINDING ) ; protocolMarshaller . marshall ( updateClusterRequest . getSecurityGroupIds ( ) , SECURITYGROUPIDS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class WidgetUtil { /** * Given an HTML string that defines a Flash object , creates a new DOM node for it and adds it
* to the appropriate container . Please note : the container should be added to the page prior
* to calling this function . */
public static HTML embedFlashObject ( Panel container , String htmlString ) { } } | // Please note : the following is a work - around for an IE7 bug . If we create a Flash object
// node * before * attaching it to the DOM tree , IE will silently fail to register
// the Flash object ' s callback functions for access from JavaScript . To make this work ,
// create an empty node first , add it to the DOM tree , and then initialize it with
// the Flash object definition .
HTML element = new HTML ( ) ; container . add ( element ) ; element . setHTML ( htmlString ) ; return element ; |
public class ORecordIteratorClusters { /** * Tell to the iterator that the upper limit must be checked at every cycle . Useful when concurrent deletes or additions change
* the size of the cluster while you ' re browsing it . Default is false .
* @ param iLiveUpdated
* True to activate it , otherwise false ( default )
* @ see # isLiveUpdated ( ) */
@ Override public ORecordIteratorClusters < REC > setLiveUpdated ( boolean iLiveUpdated ) { } } | super . setLiveUpdated ( iLiveUpdated ) ; // SET THE UPPER LIMIT TO - 1 IF IT ' S ENABLED
lastClusterPosition = iLiveUpdated ? - 1 : database . countClusterElements ( current . clusterId ) ; if ( iLiveUpdated ) { firstClusterPosition = - 1 ; lastClusterPosition = - 1 ; } else { updateClusterRange ( ) ; } return this ; |
public class ExportManager { /** * Creates the initial export processor if export is enabled */
private void initialize ( CatalogContext catalogContext , List < Pair < Integer , Integer > > localPartitionsToSites , boolean isRejoin ) { } } | try { CatalogMap < Connector > connectors = CatalogUtil . getConnectors ( catalogContext ) ; if ( exportLog . isDebugEnabled ( ) ) { exportLog . debug ( "initialize for " + connectors . size ( ) + " connectors." ) ; CatalogUtil . dumpConnectors ( exportLog , connectors ) ; } if ( ! CatalogUtil . hasExportedTables ( connectors ) ) { return ; } if ( exportLog . isDebugEnabled ( ) ) { exportLog . debug ( "Creating processor " + m_loaderClass ) ; } ExportDataProcessor newProcessor = getNewProcessorWithProcessConfigSet ( m_processorConfig ) ; m_processor . set ( newProcessor ) ; File exportOverflowDirectory = new File ( VoltDB . instance ( ) . getExportOverflowPath ( ) ) ; ExportGeneration generation = new ExportGeneration ( exportOverflowDirectory , m_messenger ) ; generation . initialize ( m_hostId , catalogContext , connectors , newProcessor , localPartitionsToSites , exportOverflowDirectory ) ; m_generation . set ( generation ) ; newProcessor . setExportGeneration ( generation ) ; newProcessor . readyForData ( ) ; } catch ( final ClassNotFoundException e ) { exportLog . l7dlog ( Level . ERROR , LogKeys . export_ExportManager_NoLoaderExtensions . name ( ) , e ) ; throw new RuntimeException ( e ) ; } catch ( final Exception e ) { exportLog . error ( "Initialize failed with:" , e ) ; throw new RuntimeException ( e ) ; } |
public class AsmUtils { /** * Loads the class defined with the given name and bytecode using the given class loader .
* Since package and class idendity includes the ClassLoader instance used to load a class we use reflection
* on the given class loader to define generated classes . If we used our own class loader ( in order to be able
* to access the protected " defineClass " method ) we would likely still be able to load generated classes ,
* however , they would not have access to package - private classes and members of their super classes .
* @ param className the full name of the class to be loaded
* @ param code the bytecode of the class to load
* @ param classLoader the class loader to use
* @ return the class instance */
public static Class < ? > loadClass ( String className , byte [ ] code , ClassLoader classLoader ) { } } | checkArgNotNull ( className , "className" ) ; checkArgNotNull ( code , "code" ) ; checkArgNotNull ( classLoader , "classLoader" ) ; try { Class < ? > classLoaderBaseClass = Class . forName ( "java.lang.ClassLoader" ) ; Method defineClassMethod = classLoaderBaseClass . getDeclaredMethod ( "defineClass" , String . class , byte [ ] . class , int . class , int . class ) ; // protected method invocation
defineClassMethod . setAccessible ( true ) ; try { return ( Class < ? > ) defineClassMethod . invoke ( classLoader , className , code , 0 , code . length ) ; } finally { defineClassMethod . setAccessible ( false ) ; } } catch ( Exception e ) { throw new RuntimeException ( "Could not load class '" + className + '\'' , e ) ; } |
public class WriterBasedGenerator { /** * Method called to try to either prepend character escape at front of
* given buffer ; or if not possible , to write it out directly .
* @ return Pointer to start of prepended entity ( if prepended ) ; or ' ptr '
* if not . */
private final int _prependOrWriteCharacterEscape ( char [ ] buffer , int ptr , int end , char ch , int escCode ) throws IOException , JsonGenerationException { } } | if ( escCode >= 0 ) { // \ \ N ( 2 char )
if ( ptr > 1 && ptr < end ) { // fits , just prepend
ptr -= 2 ; buffer [ ptr ] = '\\' ; buffer [ ptr + 1 ] = ( char ) escCode ; } else { // won ' t fit , write
char [ ] ent = _entityBuffer ; if ( ent == null ) { ent = _allocateEntityBuffer ( ) ; } ent [ 1 ] = ( char ) escCode ; _writer . write ( ent , 0 , 2 ) ; } return ptr ; } // if ( escCode ! = CharacterEscapes . ESCAPE _ CUSTOM ) { / / std , \ \ uXXXX
if ( ptr > 5 && ptr < end ) { // fits , prepend to buffer
ptr -= 6 ; buffer [ ptr ++ ] = '\\' ; buffer [ ptr ++ ] = 'u' ; // We know it ' s a control char , so only the last 2 chars are non - 0
if ( ch > 0xFF ) { // beyond 8 bytes
int hi = ( ch >> 8 ) & 0xFF ; buffer [ ptr ++ ] = HEX_CHARS [ hi >> 4 ] ; buffer [ ptr ++ ] = HEX_CHARS [ hi & 0xF ] ; ch &= 0xFF ; } else { buffer [ ptr ++ ] = '0' ; buffer [ ptr ++ ] = '0' ; } buffer [ ptr ++ ] = HEX_CHARS [ ch >> 4 ] ; buffer [ ptr ] = HEX_CHARS [ ch & 0xF ] ; ptr -= 5 ; } else { // won ' t fit , flush and write
char [ ] ent = _entityBuffer ; if ( ent == null ) { ent = _allocateEntityBuffer ( ) ; } _outputHead = _outputTail ; if ( ch > 0xFF ) { // beyond 8 bytes
int hi = ( ch >> 8 ) & 0xFF ; int lo = ch & 0xFF ; ent [ 10 ] = HEX_CHARS [ hi >> 4 ] ; ent [ 11 ] = HEX_CHARS [ hi & 0xF ] ; ent [ 12 ] = HEX_CHARS [ lo >> 4 ] ; ent [ 13 ] = HEX_CHARS [ lo & 0xF ] ; _writer . write ( ent , 8 , 6 ) ; } else { // We know it ' s a control char , so only the last 2 chars are non - 0
ent [ 6 ] = HEX_CHARS [ ch >> 4 ] ; ent [ 7 ] = HEX_CHARS [ ch & 0xF ] ; _writer . write ( ent , 2 , 6 ) ; } } return ptr ; |
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcRamp ( ) { } } | if ( ifcRampEClass == null ) { ifcRampEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 496 ) ; } return ifcRampEClass ; |
public class ArchiveBase { /** * { @ inheritDoc }
* @ see org . jboss . shrinkwrap . api . Archive # addAsDirectory ( java . lang . String ) */
@ Override public T addAsDirectory ( final String path ) throws IllegalArgumentException { } } | // Precondition check
Validate . notNullOrEmpty ( path , "path must be specified" ) ; // Delegate and return
return this . addAsDirectory ( ArchivePaths . create ( path ) ) ; |
public class ParserUtils { /** * Determines if the given line is the first part of a multiline record . It does this by verifying that the
* qualifer on the last element is not closed
* @ param chrArry -
* char data of the line
* @ param delimiter -
* delimiter being used
* @ param qualifier -
* qualifier being used
* @ return boolean */
public static boolean isMultiLine ( final char [ ] chrArry , final char delimiter , final char qualifier ) { } } | // check if the last char is the qualifier , if so then this a good
// chance it is not multiline
if ( chrArry [ chrArry . length - 1 ] != qualifier ) { // could be a potential line break
boolean qualiFound = false ; for ( int i = chrArry . length - 1 ; i >= 0 ; i -- ) { if ( chrArry [ i ] == ' ' ) { continue ; } // check to see if we can find a qualifier followed by a
// delimiter
// remember we are working are way backwards on the line
if ( qualiFound ) { if ( chrArry [ i ] == delimiter ) { // before deciding if this is the begining of a qualified new line
// I think we have to go back to the beginning of the line and see if we are inside a qualified
// field or not ?
boolean qualifiedContent = chrArry [ 0 ] == qualifier ; for ( int index = 0 ; index < chrArry . length ; index ++ ) { final char currentChar = chrArry [ index ] ; qualifiedContent = currentChar == qualifier ; if ( qualifiedContent ) { // go until first occurence of closing qualifierdelimiter combination
for ( ; index < chrArry . length ; index ++ ) { if ( chrArry [ index ] == delimiter && chrArry [ ++ index ] == qualifier ) { qualifiedContent = false ; } } } } return qualifiedContent ; } // guard against multiple qualifiers in the sequence [ , " " We ]
qualiFound = chrArry [ i ] == qualifier ; } else if ( chrArry [ i ] == delimiter ) { // if we have a delimiter followed by a qualifier , then we
// have moved on to a new element and this could not be multiline .
// start a new loop here in case there is
// space between the delimiter and qualifier
for ( int j = i - 1 ; j >= 0 ; j -- ) { if ( chrArry [ j ] == ' ' ) { continue ; } else if ( chrArry [ j ] == qualifier ) { return false ; } break ; } } else if ( chrArry [ i ] == qualifier ) { qualiFound = true ; } } } else { // we have determined that the last char on the line is a qualifier .
// This most likely means
// that this is not multiline , however we must account for the
// following scenario
// data , data , "
// data
// / data "
for ( int i = chrArry . length - 1 ; i >= 0 ; i -- ) { if ( i == chrArry . length - 1 || chrArry [ i ] == ' ' ) { // skip the first char , or any spaces we come across between
// the delimiter and qualifier
continue ; } if ( chrArry [ i ] == delimiter ) { // before deciding if this is the begining of a qualified new line
// I think we have to go back to the beginning of the line and see if we are inside a qualified
// field or not ?
boolean qualifiedContent = chrArry [ 0 ] == qualifier ; for ( int index = 0 ; index < chrArry . length ; index ++ ) { final char currentChar = chrArry [ index ] ; qualifiedContent = currentChar == qualifier ; if ( qualifiedContent ) { // go until first occurence of closing qualifierdelimiter combination
for ( ; index < chrArry . length ; index ++ ) { if ( chrArry [ index ] == delimiter && chrArry [ ++ index ] == qualifier ) { qualifiedContent = false ; } } } } return qualifiedContent ; } break ; } } return false ; |
public class TensorShapeProto { /** * < pre >
* Dimensions of the tensor , such as { " input " , 30 } , { " output " , 40}
* for a 30 x 40 2D tensor . If an entry has size - 1 , this
* corresponds to a dimension of unknown size . The names are
* optional .
* The order of entries in " dim " matters : It indicates the layout of the
* values in the tensor in - memory representation .
* The first entry in " dim " is the outermost dimension used to layout the
* values , the last entry is the innermost dimension . This matches the
* in - memory layout of RowMajor Eigen tensors .
* If " dim . size ( ) " & gt ; 0 , " unknown _ rank " must be false .
* < / pre >
* < code > repeated . tensorflow . TensorShapeProto . Dim dim = 2 ; < / code > */
public java . util . List < org . tensorflow . framework . TensorShapeProto . Dim > getDimList ( ) { } } | return dim_ ; |
public class JavaNetClientTransport { /** * { @ inheritDoc } */
@ Override public TransportResult doPut ( URI uri , String payload ) { } } | return doit ( "PUT" , uri , payload ) ; |
public class EnumConverter { /** * < p > Convert the enum constant given by the < code > value < / code >
* argument into a String . If no target class argument has been
* provided to the constructor of this instance , throw a
* < code > ConverterException < / code > containing the { @ link
* # ENUM _ NO _ CLASS _ ID } message with proper parameters . If the
* < code > value < / code > argument is < code > null < / code > , return
* < code > null < / code > . If the value is an instance of the provided
* target class , return its string value by < span
* class = " changed _ added _ 2_0 " > casting it to a
* < code > java . lang . Enum < / code > and returning the result of calling
* the < code > name ( ) < / code > method . < / span > Otherwise , throw a { @ link
* ConverterException } containing the { @ link # ENUM _ ID } message with
* proper parameters . < / p >
* @ throws ConverterException { @ inheritDoc }
* @ throws NullPointerException { @ inheritDoc } */
public String getAsString ( FacesContext context , UIComponent component , Object value ) { } } | if ( context == null || component == null ) { throw new NullPointerException ( ) ; } if ( targetClass == null ) { throw new ConverterException ( MessageFactory . getMessage ( context , ENUM_NO_CLASS_ID , value , MessageFactory . getLabel ( context , component ) ) ) ; } // If the specified value is null , return null
if ( value == null ) { // FIXSPEC even though the Javadoc states that we need to return
// null the master Converter contract states that a null value
// results in a zero - length string ( see JAVASERVERFACES _ SPEC _ PUBLIC - 1217)
return "" ; } if ( targetClass . isInstance ( value ) ) { return ( ( Enum ) value ) . name ( ) ; } throw new ConverterException ( MessageFactory . getMessage ( context , ENUM_ID , value , value , MessageFactory . getLabel ( context , component ) ) ) ; |
public class JavaParser { /** * $ ANTLR start synpred286 _ Java */
public final void synpred286_Java_fragment ( ) throws RecognitionException { } } | // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1262:29 : ( ' [ ' expression ' ] ' )
// src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1262:29 : ' [ ' expression ' ] '
{ match ( input , 59 , FOLLOW_59_in_synpred286_Java6139 ) ; if ( state . failed ) return ; pushFollow ( FOLLOW_expression_in_synpred286_Java6141 ) ; expression ( ) ; state . _fsp -- ; if ( state . failed ) return ; match ( input , 60 , FOLLOW_60_in_synpred286_Java6143 ) ; if ( state . failed ) return ; } |
public class TransactionException { /** * Throw when trying to add instances to an abstract Type */
public static TransactionException addingInstancesToAbstractType ( Type type ) { } } | return create ( ErrorMessage . IS_ABSTRACT . getMessage ( type . label ( ) ) ) ; |
public class ListJobsResult { /** * A list of job objects . Each job object contains metadata describing the job .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setJobList ( java . util . Collection ) } or { @ link # withJobList ( java . util . Collection ) } if you want to override
* the existing values .
* @ param jobList
* A list of job objects . Each job object contains metadata describing the job .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ListJobsResult withJobList ( GlacierJobDescription ... jobList ) { } } | if ( this . jobList == null ) { setJobList ( new java . util . ArrayList < GlacierJobDescription > ( jobList . length ) ) ; } for ( GlacierJobDescription ele : jobList ) { this . jobList . add ( ele ) ; } return this ; |
public class GrailsResourceUtils { /** * Takes any number of Strings and appends them into a uri , making
* sure that a forward slash is inserted between each piece and
* making sure that no duplicate slashes are in the uri
* < pre >
* Input : " "
* Output : " "
* Input : " / alpha " , " / beta " , " / gamma "
* Output : " / alpha / beta / gamma
* Input : " / alpha / , " / beta / " , " / gamma "
* Output : " / alpha / beta / gamma
* Input : " / alpha / " , " / beta / " , " / gamma / "
* Output " / alpha / beta / gamma /
* Input : " alpha " , " beta " , " gamma "
* Output : " alpha / beta / gamma
* < / pre >
* @ param pieces Strings to concatenate together into a uri
* @ return a uri */
public static String appendPiecesForUri ( String ... pieces ) { } } | if ( pieces == null || pieces . length == 0 ) return "" ; // join parts & & strip double slashes
StringBuilder builder = new StringBuilder ( 16 * pieces . length ) ; char previous = 0 ; for ( int i = 0 ; i < pieces . length ; i ++ ) { String piece = pieces [ i ] ; if ( piece != null && piece . length ( ) > 0 ) { for ( int j = 0 , maxlen = piece . length ( ) ; j < maxlen ; j ++ ) { char current = piece . charAt ( j ) ; if ( ! ( previous == '/' && current == '/' ) ) { builder . append ( current ) ; previous = current ; } } if ( i + 1 < pieces . length && previous != '/' ) { builder . append ( '/' ) ; previous = '/' ; } } } return builder . toString ( ) ; |
public class ListInstanceGroupsResult { /** * The list of instance groups for the cluster and given filters .
* @ return The list of instance groups for the cluster and given filters . */
public java . util . List < InstanceGroup > getInstanceGroups ( ) { } } | if ( instanceGroups == null ) { instanceGroups = new com . amazonaws . internal . SdkInternalList < InstanceGroup > ( ) ; } return instanceGroups ; |
public class CeServer { /** * Can ' t be started as is . Needs to be bootstrapped by sonar - application */
public static void main ( String [ ] args ) { } } | ProcessEntryPoint entryPoint = ProcessEntryPoint . createForArguments ( args ) ; Props props = entryPoint . getProps ( ) ; new CeProcessLogging ( ) . configure ( props ) ; CeServer server = new CeServer ( new ComputeEngineImpl ( props , new ComputeEngineContainerImpl ( ) ) , new MinimumViableSystem ( ) ) ; entryPoint . launch ( server ) ; |
public class XMLDecoder { /** * Close the input stream of xml data . */
public void close ( ) { } } | if ( inputStream == null ) { return ; } try { inputStream . close ( ) ; } catch ( Exception e ) { listener . exceptionThrown ( e ) ; } |
public class StandardBullhornData { /** * { @ inheritDoc } */
@ Override public FileWrapper getFile ( Class < ? extends FileEntity > type , Integer entityId , Integer fileId ) { } } | return this . handleGetFileContentWithMetaData ( type , entityId , fileId ) ; |
public class CoverageUtilities { /** * Simple method to get a value from a single band raster .
* < p > Note that this method does always return a value . If invalid , a novalue is returned . < / p >
* @ param raster the single band raster .
* @ param col the column .
* @ param row the row .
* @ return the value in the [ col , row ] of the first band . */
public static double getValue ( GridCoverage2D raster , int col , int row ) { } } | double [ ] values = null ; try { values = raster . evaluate ( new GridCoordinates2D ( col , row ) , ( double [ ] ) null ) ; } catch ( Exception e ) { return doubleNovalue ; } return values [ 0 ] ; |
public class XmlRpcHandler { /** * Convenience method that creates a XMLReader configured with this
* XmlRpcHandler instance .
* @ return
* @ throws ParserConfigurationException
* @ throws SAXException
* @ throws FactoryConfigurationError */
XMLReader newXMLReader ( ) throws ParserConfigurationException , SAXException , FactoryConfigurationError { } } | SAXParserFactory parserFactory = SAXParserFactory . newInstance ( ) ; SAXParser saxParser = parserFactory . newSAXParser ( ) ; XMLReader reader = saxParser . getXMLReader ( ) ; reader . setContentHandler ( this ) ; return reader ; |
public class LightMetaProperty { /** * Creates an instance from a { @ code Method } .
* @ param < P > the property type
* @ param metaBean the meta bean , not null
* @ param getMethod the method , not null
* @ param setMethod the method , not null
* @ param constructorIndex the index of the property in the constructor
* @ return the property , not null */
@ SuppressWarnings ( "unchecked" ) static < P > LightMetaProperty < P > of ( MetaBean metaBean , Field field , Method getMethod , Method setMethod , MethodHandles . Lookup lookup , String propertyName , int constructorIndex ) { } } | MethodHandle getter ; try { MethodType type = MethodType . methodType ( getMethod . getReturnType ( ) , getMethod . getParameterTypes ( ) ) ; getter = lookup . findVirtual ( field . getDeclaringClass ( ) , getMethod . getName ( ) , type ) ; } catch ( IllegalArgumentException | NoSuchMethodException | IllegalAccessException ex ) { throw new UnsupportedOperationException ( "Property cannot be read: " + propertyName , ex ) ; } MethodHandle setter = null ; if ( setMethod != null ) { try { MethodType type = MethodType . methodType ( void . class , setMethod . getParameterTypes ( ) ) ; setter = lookup . findVirtual ( field . getDeclaringClass ( ) , setMethod . getName ( ) , type ) ; } catch ( IllegalArgumentException | NoSuchMethodException | IllegalAccessException ex ) { throw new UnsupportedOperationException ( "Property cannot be written: " + propertyName , ex ) ; } } return new LightMetaProperty < > ( metaBean , propertyName , ( Class < P > ) field . getType ( ) , field . getGenericType ( ) , Arrays . asList ( field . getAnnotations ( ) ) , getter , setter , constructorIndex , calculateStyle ( metaBean , setter ) ) ; |
public class Utils { /** * For the class return all implemented interfaces including the
* superinterfaces of the implementing interfaces , also iterate over for
* all the superclasses . For interface return all the extended interfaces
* as well as superinterfaces for those extended interfaces .
* @ param type type whose implemented or
* super interfaces are sought .
* @ param configuration the current configuration of the doclet .
* @ param sort if true , return list of interfaces sorted alphabetically .
* @ return List of all the required interfaces . */
public List < Type > getAllInterfaces ( Type type , Configuration configuration , boolean sort ) { } } | Map < ClassDoc , Type > results = sort ? new TreeMap < ClassDoc , Type > ( ) : new LinkedHashMap < ClassDoc , Type > ( ) ; Type [ ] interfaceTypes = null ; Type superType = null ; if ( type instanceof ParameterizedType ) { interfaceTypes = ( ( ParameterizedType ) type ) . interfaceTypes ( ) ; superType = ( ( ParameterizedType ) type ) . superclassType ( ) ; } else if ( type instanceof ClassDoc ) { interfaceTypes = ( ( ClassDoc ) type ) . interfaceTypes ( ) ; superType = ( ( ClassDoc ) type ) . superclassType ( ) ; } else { interfaceTypes = type . asClassDoc ( ) . interfaceTypes ( ) ; superType = type . asClassDoc ( ) . superclassType ( ) ; } for ( Type interfaceType : interfaceTypes ) { ClassDoc interfaceClassDoc = interfaceType . asClassDoc ( ) ; if ( ! ( interfaceClassDoc . isPublic ( ) || ( configuration == null || isLinkable ( interfaceClassDoc , configuration ) ) ) ) { continue ; } results . put ( interfaceClassDoc , interfaceType ) ; for ( Type t : getAllInterfaces ( interfaceType , configuration , sort ) ) { results . put ( t . asClassDoc ( ) , t ) ; } } if ( superType == null ) return new ArrayList < > ( results . values ( ) ) ; // Try walking the tree .
addAllInterfaceTypes ( results , superType , interfaceTypesOf ( superType ) , false , configuration ) ; List < Type > resultsList = new ArrayList < > ( results . values ( ) ) ; if ( sort ) { Collections . sort ( resultsList , new TypeComparator ( ) ) ; } return resultsList ; |
public class Grefenstette { /** * Checks to see if tag marks a phrase or clause */
private boolean isPhraseOrClause ( String tag ) { } } | // find out why adding more reduced the number of relations
return ( ! tag . equals ( "SYM" ) && tag . startsWith ( "S" ) ) || tag . equals ( "ADJP" ) || tag . equals ( "ADVP" ) || tag . equals ( "CONJP" ) || tag . equals ( "FRAG" ) || tag . equals ( "INTJ" ) || tag . equals ( "LST" ) || tag . equals ( "NAC" ) || tag . equals ( "NP" ) || tag . equals ( "NX" ) || tag . equals ( "PP" ) || tag . equals ( "PRN" ) || /* removing prt adds 1 % more relations */
tag . equals ( "PRT" ) || tag . equals ( "QP" ) || tag . equals ( "RRC" ) || tag . equals ( "UCP" ) || tag . equals ( "VP" ) || tag . startsWith ( "WH" ) || tag . equals ( "X" ) ; |
public class Consumers { /** * Yields all elements of the iterable ' s iterator ( in a list ) .
* @ param < E > the iterable element type
* @ param iterable the iterable that will be consumed
* @ return a list filled with iterable values */
public static < E > List < E > all ( Iterable < E > iterable ) { } } | dbc . precondition ( iterable != null , "cannot call all with a null iterable" ) ; final Function < Iterator < E > , ArrayList < E > > consumer = new ConsumeIntoCollection < > ( new ArrayListFactory < E > ( ) ) ; return consumer . apply ( iterable . iterator ( ) ) ; |
public class KeyRange { /** * Create a { @ link KeyRangeType # BACKWARD _ AT _ MOST } range .
* @ param < T > buffer type
* @ param stop stop key ( required )
* @ return a key range ( never null ) */
public static < T > KeyRange < T > atMostBackward ( final T stop ) { } } | return new KeyRange < > ( KeyRangeType . BACKWARD_AT_MOST , null , stop ) ; |
public class PactDslJsonArray { /** * Element that must be an ISO formatted timestamp */
public PactDslJsonArray timestamp ( ) { } } | String pattern = DateFormatUtils . ISO_DATETIME_FORMAT . getPattern ( ) ; body . put ( DateFormatUtils . ISO_DATETIME_FORMAT . format ( new Date ( DATE_2000 ) ) ) ; generators . addGenerator ( Category . BODY , rootPath + appendArrayIndex ( 0 ) , new DateTimeGenerator ( pattern ) ) ; matchers . addRule ( rootPath + appendArrayIndex ( 0 ) , matchTimestamp ( pattern ) ) ; return this ; |
public class AWSGlueClient { /** * Deletes a specified batch of versions of a table .
* @ param batchDeleteTableVersionRequest
* @ return Result of the BatchDeleteTableVersion operation returned by the service .
* @ throws EntityNotFoundException
* A specified entity does not exist
* @ throws InvalidInputException
* The input provided was not valid .
* @ throws InternalServiceException
* An internal service error occurred .
* @ throws OperationTimeoutException
* The operation timed out .
* @ sample AWSGlue . BatchDeleteTableVersion
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / glue - 2017-03-31 / BatchDeleteTableVersion " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public BatchDeleteTableVersionResult batchDeleteTableVersion ( BatchDeleteTableVersionRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeBatchDeleteTableVersion ( request ) ; |
public class ParameterUtil { /** * Get parent dependent parameters
* @ param report next report object
* @ param p current parameter
* @ return a map of all parameters that are used in the source definition of the current parameter */
public static Map < String , QueryParameter > getParentDependentParameters ( Report report , QueryParameter p ) { } } | return getParentDependentParameters ( report . getParameters ( ) , p ) ; |
public class DirectedGraph { /** * Remove an edge from the graph . Nothing happens if no such edge .
* @ throws { @ link IllegalArgumentException } if either vertex doesn ' t exist . */
public void removeEdge ( V from , V to ) { } } | if ( ! containsVertex ( from ) ) { throw new IllegalArgumentException ( "Nonexistent vertex " + from ) ; } if ( ! containsVertex ( to ) ) { throw new IllegalArgumentException ( "Nonexistent vertex " + to ) ; } neighbors . get ( from ) . remove ( to ) ; |
public class SAML2AuthnResponseValidator { /** * Validate the given authnStatements :
* - authnInstant
* - sessionNotOnOrAfter
* @ param authnStatements the authn statements
* @ param context the context */
protected final void validateAuthenticationStatements ( final List < AuthnStatement > authnStatements , final SAML2MessageContext context ) { } } | for ( final AuthnStatement statement : authnStatements ) { if ( ! isAuthnInstantValid ( statement . getAuthnInstant ( ) ) ) { throw new SAMLAuthnInstantException ( "Authentication issue instant is too old or in the future" ) ; } if ( statement . getSessionNotOnOrAfter ( ) != null && statement . getSessionNotOnOrAfter ( ) . isBeforeNow ( ) ) { throw new SAMLAuthnSessionCriteriaException ( "Authentication session between IDP and subject has ended" ) ; } // TODO implement authnContext validation
} |
public class MapParser { /** * New method based on Reader . Reads the XMLDocument for a PZMetaData
* file from an InputStream , WebStart compatible . Parses the XML file , and
* returns a Map containing Lists of ColumnMetaData .
* @ param xmlStreamReader
* @ param pzparser
* Can be null . Allows additional opts to be set during the XML map read
* @ return Map & lt ; records & gt ; with their corresponding
* @ throws IOException
* @ throws SAXException
* @ throws ParserConfigurationException */
public static MetaData parseMap ( final Reader xmlStreamReader , final Parser pzparser ) throws IOException , ParserConfigurationException , SAXException { } } | final Map map = parse ( xmlStreamReader , pzparser ) ; final List < ColumnMetaData > col = ( List < ColumnMetaData > ) map . get ( FPConstants . DETAIL_ID ) ; map . remove ( FPConstants . DETAIL_ID ) ; final Map m = ( Map ) map . get ( FPConstants . COL_IDX ) ; map . remove ( FPConstants . COL_IDX ) ; // loop through the map and remove anything else that is an index of FPConstancts . COL _ IDX + _
// these were put in for the writer .
// TODO maybe these should be thrown into the MetaData instead of just discarded , but they are unused
// in the Reader the moment . This parseMap is not utilized in the writer so it is safe to remove them here
final Iterator entrySetIt = map . entrySet ( ) . iterator ( ) ; while ( entrySetIt . hasNext ( ) ) { final Entry e = ( Entry ) entrySetIt . next ( ) ; if ( ( ( String ) e . getKey ( ) ) . startsWith ( FPConstants . COL_IDX + "_" ) ) { entrySetIt . remove ( ) ; } } return new MetaData ( col , m , map ) ; |
public class Queries { /** * Returns query . getParameters ( ) { @ link Observable } but only after query
* dependencies have been fully emitted ( and ignored ) .
* @ return query parameters */
static Observable < Parameter > parametersAfterDependencies ( Query query ) { } } | return concatButIgnoreFirstSequence ( query . depends ( ) , query . parameters ( ) ) ; |
public class MockDriverRestartContext { /** * Generate a DriverRestarted event to be passed to the
* { @ link org . apache . reef . driver . parameters . DriverRestartHandler } .
* @ return DriverRestarted event based on the state at the time of driver failure */
public DriverRestarted getDriverRestarted ( ) { } } | final Set < String > expectedEvaluatorIds = new HashSet < > ( ) ; for ( final MockAllocatedEvaluator allocatedEvaluator : this . allocatedEvaluators ) { expectedEvaluatorIds . add ( allocatedEvaluator . getId ( ) ) ; } return new DriverRestarted ( ) { @ Override public int getResubmissionAttempts ( ) { return restartAttemps ; } @ Override public StartTime getStartTime ( ) { return startTime ; } @ Override public Set < String > getExpectedEvaluatorIds ( ) { return expectedEvaluatorIds ; } } ; |
public class KllFloatsSketch { /** * Updates this sketch with the given data item .
* @ param value an item from a stream of items . NaNs are ignored . */
public void update ( final float value ) { } } | if ( Float . isNaN ( value ) ) { return ; } if ( isEmpty ( ) ) { minValue_ = value ; maxValue_ = value ; } else { if ( value < minValue_ ) { minValue_ = value ; } if ( value > maxValue_ ) { maxValue_ = value ; } } if ( levels_ [ 0 ] == 0 ) { compressWhileUpdating ( ) ; } n_ ++ ; isLevelZeroSorted_ = false ; final int nextPos = levels_ [ 0 ] - 1 ; assert levels_ [ 0 ] >= 0 ; levels_ [ 0 ] = nextPos ; items_ [ nextPos ] = value ; |
public class AbstractHttpMessageConverter { /** * This implementation simple delegates to { @ link # readInternal ( Class , HttpInputMessage ) } .
* Future implementations might add some default behavior , however . */
public final T read ( Class < ? extends T > clazz , HttpInputMessage inputMessage ) throws IOException { } } | return readInternal ( clazz , inputMessage ) ; |
public class StateManager { /** * < p > Return the tree structure and component state information for the
* view contained in the specified { @ link FacesContext } instance as an
* object of type < code > StateManager . SerializedView < / code > . If there
* is no state information to be saved , return < code > null < / code >
* instead . < / p >
* < p > Components may opt out of being included in the serialized view
* by setting their < code > transient < / code > property to < code > true < / code > .
* This must cause the component itself , as well as all of that component ' s
* children and facets , to be omitted from the saved tree structure
* and component state information . < / p >
* < p > This method must also enforce the rule that , for components with
* non - null < code > id < / code > s , all components that are descendants of the
* same nearest { @ link NamingContainer } must have unique identifiers . < / p >
* @ param context { @ link FacesContext } for the current request
* @ throws IllegalStateException if more than one component or
* facet within the same { @ link NamingContainer } in this view has
* the same non - < code > null < / code > component id
* @ deprecated this has been replaced by { @ link # saveView } . The
* default implementation calls < code > saveView < / code > and inspects the
* return . If the return is an < code > Object [ ] < / code > , it casts the
* result to an < code > Object [ ] < / code > wrapping the first and second
* elements in an instance of { @ link SerializedView } , which it then
* returns . Otherwise , it returns < code > null < / code > */
public SerializedView saveSerializedView ( FacesContext context ) { } } | context . getAttributes ( ) . put ( IS_CALLED_FROM_API_CLASS , Boolean . TRUE ) ; Object stateObj = null ; try { stateObj = saveView ( context ) ; } finally { context . getAttributes ( ) . remove ( IS_CALLED_FROM_API_CLASS ) ; } SerializedView result = null ; if ( null != stateObj ) { if ( stateObj instanceof Object [ ] ) { Object [ ] state = ( Object [ ] ) stateObj ; if ( state . length == 2 ) { result = new SerializedView ( state [ 0 ] , state [ 1 ] ) ; } } } return result ; |
public class TransactionLog { /** * Maps this instance to another { @ code MapTransaction } with different key and value types .
* @ param mapper function for mapping record types
* @ param < U > record type of returned instance
* @ return newly typed instance */
public < U > TransactionLog < U > map ( Function < T , U > mapper ) { } } | return new TransactionLog < > ( transactionId , version , Lists . transform ( records , mapper :: apply ) ) ; |
public class SingularOps_DDRM { /** * TODO the number of copies can probably be reduced here */
public static void descendingOrder ( DMatrixRMaj U , boolean tranU , DMatrixRMaj W , DMatrixRMaj V , boolean tranV ) { } } | int numSingular = Math . min ( W . numRows , W . numCols ) ; checkSvdMatrixSize ( U , tranU , W , V , tranV ) ; for ( int i = 0 ; i < numSingular ; i ++ ) { double bigValue = - 1 ; int bigIndex = - 1 ; // find the smallest singular value in the submatrix
for ( int j = i ; j < numSingular ; j ++ ) { double v = W . get ( j , j ) ; if ( v > bigValue ) { bigValue = v ; bigIndex = j ; } } // only swap if the current index is not the smallest
if ( bigIndex == i ) continue ; if ( bigIndex == - 1 ) { // there is at least one uncountable singular value . just stop here
break ; } double tmp = W . get ( i , i ) ; W . set ( i , i , bigValue ) ; W . set ( bigIndex , bigIndex , tmp ) ; if ( V != null ) { swapRowOrCol ( V , tranV , i , bigIndex ) ; } if ( U != null ) { swapRowOrCol ( U , tranU , i , bigIndex ) ; } } |
public class FileLock { /** * returns the last log entry
* @ param fs
* @ param lockFile
* @ return
* @ throws IOException */
public static LogEntry getLastEntry ( FileSystem fs , Path lockFile ) throws IOException { } } | FSDataInputStream in = fs . open ( lockFile ) ; BufferedReader reader = new BufferedReader ( new InputStreamReader ( in ) ) ; String lastLine = null ; for ( String line = reader . readLine ( ) ; line != null ; line = reader . readLine ( ) ) { lastLine = line ; } return LogEntry . deserialize ( lastLine ) ; |
public class ManagedBackupShortTermRetentionPoliciesInner { /** * Updates a managed database ' s short term retention policy .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param managedInstanceName The name of the managed instance .
* @ param databaseName The name of the database .
* @ param retentionDays The backup retention period in days . This is how many days Point - in - Time Restore will be supported .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the ManagedBackupShortTermRetentionPolicyInner object if successful . */
public ManagedBackupShortTermRetentionPolicyInner createOrUpdate ( String resourceGroupName , String managedInstanceName , String databaseName , Integer retentionDays ) { } } | return createOrUpdateWithServiceResponseAsync ( resourceGroupName , managedInstanceName , databaseName , retentionDays ) . toBlocking ( ) . last ( ) . body ( ) ; |
public class InlineRendition { /** * Builds URL to rescaled version of the binary image .
* @ return Media URL */
private String buildDownloadMediaUrl ( ) { } } | String resourcePath = this . resource . getPath ( ) ; // if parent resource is a nt : file resource , use this one as path for scaled image
Resource parentResource = this . resource . getParent ( ) ; if ( JcrBinary . isNtFile ( parentResource ) ) { resourcePath = parentResource . getPath ( ) ; } // URL to render scaled image via { @ link InlineRenditionServlet }
String path = resourcePath + "." + MediaFileServlet . SELECTOR + "." + MediaFileServlet . SELECTOR_DOWNLOAD + "." + MediaFileServlet . EXTENSION + "/" + getFileName ( ) ; // build externalized URL
UrlHandler urlHandler = AdaptTo . notNull ( this . adaptable , UrlHandler . class ) ; return urlHandler . get ( path ) . urlMode ( this . mediaArgs . getUrlMode ( ) ) . buildExternalResourceUrl ( this . resource ) ; |
public class OpenIabHelper { /** * Returns a mapped application internal SKU using the store name and a store SKU .
* @ see org . onepf . oms . SkuManager # mapSku ( String , String , String )
* @ deprecated Use { @ link org . onepf . oms . SkuManager # getSku ( String , String ) } */
@ NotNull public static String getSku ( @ NotNull final String appStoreName , @ NotNull String storeSku ) { } } | return SkuManager . getInstance ( ) . getSku ( appStoreName , storeSku ) ; |
public class ProxyMetaClass { /** * Call invokeMethod on adaptee with logic like in MetaClass unless we have an Interceptor .
* With Interceptor the call is nested in its beforeInvoke and afterInvoke methods .
* The method call is suppressed if Interceptor . doInvoke ( ) returns false .
* See Interceptor for details . */
public Object invokeMethod ( final Object object , final String methodName , final Object [ ] arguments ) { } } | return doCall ( object , methodName , arguments , interceptor , new Callable ( ) { public Object call ( ) { return adaptee . invokeMethod ( object , methodName , arguments ) ; } } ) ; |
public class MappingOptionImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public void eSet ( int featureID , Object newValue ) { } } | switch ( featureID ) { case AfplibPackage . MAPPING_OPTION__MAP_VALUE : setMapValue ( ( Integer ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ; |
public class ProjectApi { /** * Get a list of projects accessible by the authenticated user that match the provided search string .
* < pre > < code > GET / projects ? search = search < / code > < / pre >
* @ param search the project name search criteria
* @ return a list of projects accessible by the authenticated user that match the provided search string
* @ throws GitLabApiException if any exception occurs */
public List < Project > getProjects ( String search ) throws GitLabApiException { } } | return ( getProjects ( search , getDefaultPerPage ( ) ) . all ( ) ) ; |
public class UpdateUserRoutingProfileRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( UpdateUserRoutingProfileRequest updateUserRoutingProfileRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( updateUserRoutingProfileRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateUserRoutingProfileRequest . getRoutingProfileId ( ) , ROUTINGPROFILEID_BINDING ) ; protocolMarshaller . marshall ( updateUserRoutingProfileRequest . getUserId ( ) , USERID_BINDING ) ; protocolMarshaller . marshall ( updateUserRoutingProfileRequest . getInstanceId ( ) , INSTANCEID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class LayoutUtils { /** * Returns the JRDesignGroup for the DJGroup passed
* @ param jd
* @ param layoutManager
* @ param group
* @ return */
public static JRDesignGroup getJRDesignGroup ( DynamicJasperDesign jd , LayoutManager layoutManager , DJGroup group ) { } } | Map references = layoutManager . getReferencesMap ( ) ; for ( Object o : references . keySet ( ) ) { String groupName = ( String ) o ; DJGroup djGroup = ( DJGroup ) references . get ( groupName ) ; if ( group == djGroup ) { return ( JRDesignGroup ) jd . getGroupsMap ( ) . get ( groupName ) ; } } return null ; |
public class Transforms { /** * Floor function
* @ param ndArray
* @ return */
public static INDArray floor ( INDArray ndArray , boolean dup ) { } } | return exec ( dup ? new Floor ( ndArray , ndArray . ulike ( ) ) : new Floor ( ndArray ) ) ; |
public class DefaultImageFormats { /** * Check if the given image format is static WebP ( not animated ) .
* @ param imageFormat the image format to check
* @ return true if static WebP */
public static boolean isStaticWebpFormat ( ImageFormat imageFormat ) { } } | return imageFormat == WEBP_SIMPLE || imageFormat == WEBP_LOSSLESS || imageFormat == WEBP_EXTENDED || imageFormat == WEBP_EXTENDED_WITH_ALPHA ; |
public class Values { /** * Returns a value which is the logical NOT of the supplied value . */
public static Value < Boolean > not ( Value < Boolean > value ) { } } | return value . map ( Functions . NOT ) ; |
public class LogbackLoggingImpl { /** * Creates an { @ link OutputStreamAppender } for the required filter , pattern and logger output .
* @ param context Logger context to associate the appender with .
* @ param filter Event log filter .
* @ param logOutput Logger output information for the destination to write logger events to .
* @ param patternProperty Logger context property that defines the pattern for formatting logger event output .
* @ param name The name of the appender .
* @ return An { @ link OutputStreamAppender } for the required parameters . */
private static OutputStreamAppender < ILoggingEvent > createAppender ( LoggerContext context , Filter < ILoggingEvent > filter , LoggerOutput logOutput , String patternProperty , String name ) { } } | final PatternLayoutEncoder patternLayoutEncoder = createPatternLayoutEncoder ( context , patternProperty ) ; final OutputStreamAppender < ILoggingEvent > appender ; if ( logOutput . isConsole ( ) ) { appender = new OutputStreamAppender < > ( ) ; appender . setContext ( context ) ; appender . setEncoder ( patternLayoutEncoder ) ; appender . setOutputStream ( logOutput . getPrintStream ( ) ) ; appender . setName ( name ) ; appender . addFilter ( filter ) ; appender . start ( ) ; } else { RollingFileAppender < ILoggingEvent > rAppender = new RollingFileAppender < > ( ) ; rAppender . setContext ( context ) ; rAppender . setEncoder ( patternLayoutEncoder ) ; rAppender . setFile ( logOutput . getOutputName ( ) + "." + logOutput . getOutputType ( ) ) ; rAppender . setName ( name ) ; rAppender . addFilter ( filter ) ; final FixedWindowRollingPolicy rollingPolicy = new FixedWindowRollingPolicy ( ) ; rollingPolicy . setContext ( context ) ; rollingPolicy . setParent ( rAppender ) ; rollingPolicy . setFileNamePattern ( logOutput . getOutputName ( ) + "%i" + "." + logOutput . getOutputType ( ) ) ; rollingPolicy . setMinIndex ( 1 ) ; rollingPolicy . setMaxIndex ( logOutput . getFileCount ( ) ) ; rollingPolicy . start ( ) ; final SizeBasedTriggeringPolicy < ILoggingEvent > triggeringPolicy = new SizeBasedTriggeringPolicy < > ( ) ; triggeringPolicy . setContext ( context ) ; triggeringPolicy . setMaxFileSize ( logOutput . getFileLimit ( ) ) ; triggeringPolicy . start ( ) ; rAppender . setRollingPolicy ( rollingPolicy ) ; rAppender . setTriggeringPolicy ( triggeringPolicy ) ; rAppender . start ( ) ; appender = rAppender ; } return appender ; |
public class Transaction { /** * This method gets the lowest message priority being used in this transaction
* and as such is the JFAP priority that commit and rollback will be sent as .
* @ return short */
public short getLowestMessagePriority ( ) { } } | if ( tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "getLowestMessagePriority" ) ; if ( tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "getLowestMessagePriority" , "" + lowestPriority ) ; return lowestPriority ; |
public class Objects { /** * Resolves type and returns casted object
* @ param obj
* @ param type
* @ return */
public static < T > T cast ( Object obj , String type ) { } } | return ( T ) cast ( obj , resolveType ( type ) ) ; |
public class FailsafeExecutor { /** * Executes the { @ code runnable } asynchronously until successful or until the configured policies are exceeded .
* If a configured circuit breaker is open , the resulting future is completed with { @ link
* CircuitBreakerOpenException } .
* @ throws NullPointerException if the { @ code runnable } is null
* @ throws RejectedExecutionException if the { @ code runnable } cannot be scheduled for execution */
public CompletableFuture < Void > runAsync ( CheckedRunnable runnable ) { } } | return callAsync ( execution -> Functions . promiseOf ( runnable , execution ) , false ) ; |
public class PasswordPolicyService { /** * Returns whether the given user ' s password is expired due to the password
* aging policy .
* @ param user
* The user to check .
* @ return
* true if the user needs to change their password to comply with the
* password aging policy , false otherwise .
* @ throws GuacamoleException
* If the password policy cannot be parsed . */
public boolean isPasswordExpired ( ModeledUser user ) throws GuacamoleException { } } | // Retrieve password policy from environment
PasswordPolicy policy = environment . getPasswordPolicy ( ) ; // There is no maximum password age if 0
int maxPasswordAge = policy . getMaximumAge ( ) ; if ( maxPasswordAge == 0 ) return false ; // Determine whether password is expired based on maximum age
return getPasswordAge ( user ) >= maxPasswordAge ; |
public class SymbolTable { /** * Gets the symbol for the given constructor or interface . */
public Symbol getSymbolDeclaredBy ( FunctionType fn ) { } } | checkState ( fn . isConstructor ( ) || fn . isInterface ( ) ) ; ObjectType instanceType = fn . getInstanceType ( ) ; return getSymbolForName ( fn . getSource ( ) , instanceType . getReferenceName ( ) ) ; |
public class BurninDestinationSettingsMarshaller { /** * Marshall the given parameter object . */
public void marshall ( BurninDestinationSettings burninDestinationSettings , ProtocolMarshaller protocolMarshaller ) { } } | if ( burninDestinationSettings == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( burninDestinationSettings . getAlignment ( ) , ALIGNMENT_BINDING ) ; protocolMarshaller . marshall ( burninDestinationSettings . getBackgroundColor ( ) , BACKGROUNDCOLOR_BINDING ) ; protocolMarshaller . marshall ( burninDestinationSettings . getBackgroundOpacity ( ) , BACKGROUNDOPACITY_BINDING ) ; protocolMarshaller . marshall ( burninDestinationSettings . getFontColor ( ) , FONTCOLOR_BINDING ) ; protocolMarshaller . marshall ( burninDestinationSettings . getFontOpacity ( ) , FONTOPACITY_BINDING ) ; protocolMarshaller . marshall ( burninDestinationSettings . getFontResolution ( ) , FONTRESOLUTION_BINDING ) ; protocolMarshaller . marshall ( burninDestinationSettings . getFontScript ( ) , FONTSCRIPT_BINDING ) ; protocolMarshaller . marshall ( burninDestinationSettings . getFontSize ( ) , FONTSIZE_BINDING ) ; protocolMarshaller . marshall ( burninDestinationSettings . getOutlineColor ( ) , OUTLINECOLOR_BINDING ) ; protocolMarshaller . marshall ( burninDestinationSettings . getOutlineSize ( ) , OUTLINESIZE_BINDING ) ; protocolMarshaller . marshall ( burninDestinationSettings . getShadowColor ( ) , SHADOWCOLOR_BINDING ) ; protocolMarshaller . marshall ( burninDestinationSettings . getShadowOpacity ( ) , SHADOWOPACITY_BINDING ) ; protocolMarshaller . marshall ( burninDestinationSettings . getShadowXOffset ( ) , SHADOWXOFFSET_BINDING ) ; protocolMarshaller . marshall ( burninDestinationSettings . getShadowYOffset ( ) , SHADOWYOFFSET_BINDING ) ; protocolMarshaller . marshall ( burninDestinationSettings . getTeletextSpacing ( ) , TELETEXTSPACING_BINDING ) ; protocolMarshaller . marshall ( burninDestinationSettings . getXPosition ( ) , XPOSITION_BINDING ) ; protocolMarshaller . marshall ( burninDestinationSettings . getYPosition ( ) , YPOSITION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class FileMgr { /** * Appends the contents of a byte buffer to the end of the specified file .
* @ param fileName
* the name of the file
* @ param buffer
* the byte buffer
* @ return a block ID refers to the newly - created block . */
BlockId append ( String fileName , IoBuffer buffer ) { } } | try { IoChannel fileChannel = getFileChannel ( fileName ) ; // Rewind the buffer for writing
buffer . rewind ( ) ; // Append the block to the file
long newSize = fileChannel . append ( buffer ) ; // Return the new block id
return new BlockId ( fileName , newSize / BLOCK_SIZE - 1 ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; return null ; } |
public class TracedParallelBatch { /** * Create a { @ link TracedParallelBatch } for list of { @ link Promise } s ,
* with the specified trace context .
* @ param context the trace context .
* @ param promises vararg containing a list of { @ link Promise } s .
* @ param < T > the type of value produced by each promise
* @ return an instance of { @ link ParallelBatch } . */
public static < T > ParallelBatch < T > of ( final TraceContext context , Promise < T > ... promises ) { } } | return of ( context , Arrays . asList ( promises ) ) ; |
public class SliceOps { /** * Appends a " slice " operation to the provided LongStream . The slice
* operation may be may be skip - only , limit - only , or skip - and - limit .
* @ param upstream A LongStream
* @ param skip The number of elements to skip . Must be > = 0.
* @ param limit The maximum size of the resulting stream , or - 1 if no limit
* is to be imposed */
public static LongStream makeLong ( AbstractPipeline < ? , Long , ? > upstream , long skip , long limit ) { } } | if ( skip < 0 ) throw new IllegalArgumentException ( "Skip must be non-negative: " + skip ) ; return new LongPipeline . StatefulOp < Long > ( upstream , StreamShape . LONG_VALUE , flags ( limit ) ) { Spliterator . OfLong unorderedSkipLimitSpliterator ( Spliterator . OfLong s , long skip , long limit , long sizeIfKnown ) { if ( skip <= sizeIfKnown ) { // Use just the limit if the number of elements
// to skip is < = the known pipeline size
limit = limit >= 0 ? Math . min ( limit , sizeIfKnown - skip ) : sizeIfKnown - skip ; skip = 0 ; } return new StreamSpliterators . UnorderedSliceSpliterator . OfLong ( s , skip , limit ) ; } @ Override public < P_IN > Spliterator < Long > opEvaluateParallelLazy ( PipelineHelper < Long > helper , Spliterator < P_IN > spliterator ) { long size = helper . exactOutputSizeIfKnown ( spliterator ) ; if ( size > 0 && spliterator . hasCharacteristics ( Spliterator . SUBSIZED ) ) { return new StreamSpliterators . SliceSpliterator . OfLong ( ( Spliterator . OfLong ) helper . wrapSpliterator ( spliterator ) , skip , calcSliceFence ( skip , limit ) ) ; } else if ( ! StreamOpFlag . ORDERED . isKnown ( helper . getStreamAndOpFlags ( ) ) ) { return unorderedSkipLimitSpliterator ( ( Spliterator . OfLong ) helper . wrapSpliterator ( spliterator ) , skip , limit , size ) ; } else { return new SliceTask < > ( this , helper , spliterator , Long [ ] :: new , skip , limit ) . invoke ( ) . spliterator ( ) ; } } @ Override public < P_IN > Node < Long > opEvaluateParallel ( PipelineHelper < Long > helper , Spliterator < P_IN > spliterator , IntFunction < Long [ ] > generator ) { long size = helper . exactOutputSizeIfKnown ( spliterator ) ; if ( size > 0 && spliterator . hasCharacteristics ( Spliterator . SUBSIZED ) ) { // Because the pipeline is SIZED the slice spliterator
// can be created from the source , this requires matching
// to shape of the source , and is potentially more efficient
// than creating the slice spliterator from the pipeline
// wrapping spliterator
Spliterator < P_IN > s = sliceSpliterator ( helper . getSourceShape ( ) , spliterator , skip , limit ) ; return Nodes . collectLong ( helper , s , true ) ; } else if ( ! StreamOpFlag . ORDERED . isKnown ( helper . getStreamAndOpFlags ( ) ) ) { Spliterator . OfLong s = unorderedSkipLimitSpliterator ( ( Spliterator . OfLong ) helper . wrapSpliterator ( spliterator ) , skip , limit , size ) ; // Collect using this pipeline , which is empty and therefore
// can be used with the pipeline wrapping spliterator
// Note that we cannot create a slice spliterator from
// the source spliterator if the pipeline is not SIZED
return Nodes . collectLong ( this , s , true ) ; } else { return new SliceTask < > ( this , helper , spliterator , generator , skip , limit ) . invoke ( ) ; } } @ Override public Sink < Long > opWrapSink ( int flags , Sink < Long > sink ) { return new Sink . ChainedLong < Long > ( sink ) { long n = skip ; long m = limit >= 0 ? limit : Long . MAX_VALUE ; @ Override public void begin ( long size ) { downstream . begin ( calcSize ( size , skip , m ) ) ; } @ Override public void accept ( long t ) { if ( n == 0 ) { if ( m > 0 ) { m -- ; downstream . accept ( t ) ; } } else { n -- ; } } @ Override public boolean cancellationRequested ( ) { return m == 0 || downstream . cancellationRequested ( ) ; } } ; } } ; |
public class AmazonWorkspacesClient { /** * Retrieves a list that describes modifications to the configuration of bring your own license ( BYOL ) for the
* specified account .
* @ param describeAccountModificationsRequest
* @ return Result of the DescribeAccountModifications operation returned by the service .
* @ throws AccessDeniedException
* The user is not authorized to access a resource .
* @ sample AmazonWorkspaces . DescribeAccountModifications
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / workspaces - 2015-04-08 / DescribeAccountModifications "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public DescribeAccountModificationsResult describeAccountModifications ( DescribeAccountModificationsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDescribeAccountModifications ( request ) ; |
public class LdapUtils { /** * Builds a new request .
* @ param baseDn the base dn
* @ param filter the filter
* @ param binaryAttributes the binary attributes
* @ param returnAttributes the return attributes
* @ return the search request */
public static SearchRequest newLdaptiveSearchRequest ( final String baseDn , final SearchFilter filter , final String [ ] binaryAttributes , final String [ ] returnAttributes ) { } } | val sr = new SearchRequest ( baseDn , filter ) ; sr . setBinaryAttributes ( binaryAttributes ) ; sr . setReturnAttributes ( returnAttributes ) ; sr . setSearchScope ( SearchScope . SUBTREE ) ; return sr ; |
public class CmsSitemapTreeItem { /** * Updates the recursively the site path . < p >
* @ param sitePath the new site path to set */
public void updateSitePath ( String sitePath ) { } } | String newSubTitle = getDisplayedUrl ( sitePath ) ; removeInvalidChildren ( ) ; getListItemWidget ( ) . setSubtitleLabel ( newSubTitle ) ; String name = getName ( sitePath ) ; setId ( name ) ; getListItemWidget ( ) . setAdditionalInfoValue ( 1 , name ) ; if ( getLoadState ( ) == LoadState . LOADED ) { for ( int i = 0 ; i < getChildCount ( ) ; i ++ ) { CmsSitemapTreeItem item = ( CmsSitemapTreeItem ) getChild ( i ) ; if ( ( item != null ) && ( CmsSitemapView . getInstance ( ) . getController ( ) . getEntryById ( item . getEntryId ( ) ) != null ) ) { String path = CmsStringUtil . joinPaths ( sitePath , CmsResource . getName ( item . getSitePath ( ) ) ) ; item . updateSitePath ( path ) ; } } } getListItemWidget ( ) . updateTruncation ( ) ; |
public class MergeableManifest2 { /** * Add the set with given bundles to the " Export - Package " main attribute .
* @ param exportedPackages The set of all packages to add . */
public void addExportedPackages ( Set < String > exportedPackages ) { } } | addExportedPackages ( exportedPackages . toArray ( new String [ exportedPackages . size ( ) ] ) ) ; |
public class DurationRangeMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DurationRange durationRange , ProtocolMarshaller protocolMarshaller ) { } } | if ( durationRange == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( durationRange . getMinSeconds ( ) , MINSECONDS_BINDING ) ; protocolMarshaller . marshall ( durationRange . getMaxSeconds ( ) , MAXSECONDS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class JSMarshaller { /** * Turn special characters into escaped characters conforming to JavaScript .
* Handles complete character set defined in HTML 4.01 recommendation . < br >
* Reference : < a href =
* " http : / / developer . mozilla . org / en / docs / Core _ JavaScript _ 1.5 _ Guide : Literals # String _ Literals "
* > Core JavaScript 1.5 Guide < / a >
* @ param sInput
* the input string
* @ return the escaped string */
@ Nullable public static String javaScriptEscape ( @ Nullable final String sInput ) { } } | if ( StringHelper . hasNoText ( sInput ) ) return sInput ; final char [ ] aInput = sInput . toCharArray ( ) ; if ( ! StringHelper . containsAny ( aInput , CHARS_TO_MASK ) ) return sInput ; final char [ ] ret = new char [ aInput . length * 2 ] ; int nIndex = 0 ; char cPrevChar = '\u0000' ; for ( final char cCurrent : aInput ) { switch ( cCurrent ) { case '"' : case '\'' : case '\\' : case '/' : ret [ nIndex ++ ] = MASK_CHAR ; ret [ nIndex ++ ] = cCurrent ; break ; case '\t' : ret [ nIndex ++ ] = MASK_CHAR ; ret [ nIndex ++ ] = 't' ; break ; case '\n' : if ( cPrevChar != '\r' ) { ret [ nIndex ++ ] = MASK_CHAR ; ret [ nIndex ++ ] = 'n' ; } break ; case '\r' : ret [ nIndex ++ ] = MASK_CHAR ; ret [ nIndex ++ ] = 'n' ; break ; case '\f' : ret [ nIndex ++ ] = MASK_CHAR ; ret [ nIndex ++ ] = 'f' ; break ; default : ret [ nIndex ++ ] = cCurrent ; break ; } cPrevChar = cCurrent ; } return new String ( ret , 0 , nIndex ) ; |
public class MappingAssistantPanel { /** * GEN - LAST : event _ txtRowCountFocusLost */
private void txtRowCountKeyPressed ( java . awt . event . KeyEvent evt ) { } } | // GEN - FIRST : event _ txtRowCountKeyPressed
int code = evt . getKeyCode ( ) ; if ( code == KeyEvent . VK_ENTER ) { txtRowCount . transferFocus ( ) ; } |
public class Expressions { /** * Create a new Path expression
* @ param type type of expression
* @ param variable variable name
* @ return path expression */
public static < T extends Comparable < ? > > TimePath < T > timePath ( Class < ? extends T > type , String variable ) { } } | return new TimePath < T > ( type , PathMetadataFactory . forVariable ( variable ) ) ; |
public class DatastoreImpl { /** * Creates a copy of this Datastore and all its configuration but with a new database
* @ param database the new database to use for operations
* @ return the new Datastore instance
* @ deprecated use { @ link Morphia # createDatastore ( MongoClient , Mapper , String ) } */
@ Deprecated public DatastoreImpl copy ( final String database ) { } } | return new DatastoreImpl ( morphia , mapper , mongoClient , database ) ; |
public class JsonMapper { /** * Object可以是POJO , 也可以是Collection或数组 。 如果对象为Null , 返回 " null " . 如果集合为空集合 , 返回 " [ ] " . */
public String toJson ( Object object ) { } } | try { return mapper . writeValueAsString ( object ) ; } catch ( IOException e ) { logger . warn ( "write to json string error:" + object , e ) ; return null ; } |
public class AmazonPinpointClient { /** * Creates or updates an import job .
* @ param createImportJobRequest
* @ return Result of the CreateImportJob operation returned by the service .
* @ throws BadRequestException
* 400 response
* @ throws InternalServerErrorException
* 500 response
* @ throws ForbiddenException
* 403 response
* @ throws NotFoundException
* 404 response
* @ throws MethodNotAllowedException
* 405 response
* @ throws TooManyRequestsException
* 429 response
* @ sample AmazonPinpoint . CreateImportJob
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / pinpoint - 2016-12-01 / CreateImportJob " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public CreateImportJobResult createImportJob ( CreateImportJobRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeCreateImportJob ( request ) ; |
public class ProxyReceiveListener { /** * SIB0115d . comms start */
private void processAsyncSessionStoppedCallback ( CommsByteBuffer buffer , Conversation conversation ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "processAsyncSessionStoppedCallback" , new Object [ ] { buffer , conversation } ) ; final short connectionObjectId = buffer . getShort ( ) ; final short clientSessionId = buffer . getShort ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "connectionObjectId=" + connectionObjectId ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "clientSessionId=" + clientSessionId ) ; // Obtain the proxy queue group
final ClientConversationState convState = ( ClientConversationState ) conversation . getAttachment ( ) ; final ProxyQueueConversationGroup pqcg = convState . getProxyQueueConversationGroup ( ) ; if ( pqcg == null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "ProxyQueueConversationGroup=null" ) ; SIErrorException e = new SIErrorException ( nls . getFormattedMessage ( "NULL_PROXY_QUEUE_CONV_GROUP_CWSICO8020" , new Object [ ] { } , null ) ) ; FFDCFilter . processException ( e , CLASS_NAME + ".processAsyncSessionStoppedCallback" , CommsConstants . PROXYRECEIVELISTENER_SESSION_STOPPED_01 , this ) ; SibTr . error ( tc , "NULL_PROXY_QUEUE_CONV_GROUP_CWSICO8020" , e ) ; throw e ; } // Obtain the required proxy queue from the proxy queue group and ensure its of the right class ( ie not read ahead )
final ProxyQueue proxyQueue = pqcg . find ( clientSessionId ) ; if ( proxyQueue instanceof AsynchConsumerProxyQueue ) { final ConsumerSessionProxy consumerSessionProxy = ( ( AsynchConsumerProxyQueue ) proxyQueue ) . getConsumerSessionProxy ( ) ; // Drive the ConsumerSessionProxy . stoppableConsumerSessionStopped method on a different thread .
ClientAsynchEventThreadPool . getInstance ( ) . dispatchStoppableConsumerSessionStopped ( consumerSessionProxy ) ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "proxyQueue not an instance of AsynchConsumerProxyQueue is an instance of " + proxyQueue . getClass ( ) . getName ( ) ) ; SIErrorException e = new SIErrorException ( nls . getFormattedMessage ( "WRONG_CLASS_CWSICO8021" , new Object [ ] { proxyQueue . getClass ( ) . getName ( ) } , null ) ) ; FFDCFilter . processException ( e , CLASS_NAME + ".processAsyncSessionStoppedCallback" , CommsConstants . PROXYRECEIVELISTENER_SESSION_STOPPED_02 , this ) ; SibTr . error ( tc , "WRONG_CLASS_CWSICO8021" , e ) ; throw e ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "processAsyncSessionStoppedCallback" ) ; |
public class ListOfferingPromotionsResult { /** * Information about the offering promotions .
* @ param offeringPromotions
* Information about the offering promotions . */
public void setOfferingPromotions ( java . util . Collection < OfferingPromotion > offeringPromotions ) { } } | if ( offeringPromotions == null ) { this . offeringPromotions = null ; return ; } this . offeringPromotions = new java . util . ArrayList < OfferingPromotion > ( offeringPromotions ) ; |
public class WeakHashMap { /** * Associates the specified value with the specified key in this map .
* If the map previously contained a mapping for this key , the old
* value is replaced .
* @ param key key with which the specified value is to be associated .
* @ param value value to be associated with the specified key .
* @ return the previous value associated with < tt > key < / tt > , or
* < tt > null < / tt > if there was no mapping for < tt > key < / tt > .
* ( A < tt > null < / tt > return can also indicate that the map
* previously associated < tt > null < / tt > with < tt > key < / tt > . ) */
public V put ( K key , V value ) { } } | Object k = maskNull ( key ) ; int h = sun . misc . Hashing . singleWordWangJenkinsHash ( k ) ; Entry < K , V > [ ] tab = getTable ( ) ; int i = indexFor ( h , tab . length ) ; for ( Entry < K , V > e = tab [ i ] ; e != null ; e = e . next ) { if ( h == e . hash && eq ( k , e . get ( ) ) ) { V oldValue = e . value ; if ( value != oldValue ) e . value = value ; return oldValue ; } } modCount ++ ; Entry < K , V > e = tab [ i ] ; tab [ i ] = new Entry < > ( k , value , queue , h , e ) ; if ( ++ size >= threshold ) resize ( tab . length * 2 ) ; return null ; |
public class GaussianDistribution { /** * This is the probability density function for the Gaussian
* distribution . */
private double getExpectedProbability ( double x ) { } } | double y = 1 / ( standardDeviation * Math . sqrt ( Math . PI * 2 ) ) ; double z = - ( Math . pow ( x - mean , 2 ) / ( 2 * Math . pow ( standardDeviation , 2 ) ) ) ; return y * Math . exp ( z ) ; |
public class PhysicalConnectionRequirements { /** * The security group ID list used by the connection .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setSecurityGroupIdList ( java . util . Collection ) } or { @ link # withSecurityGroupIdList ( java . util . Collection ) }
* if you want to override the existing values .
* @ param securityGroupIdList
* The security group ID list used by the connection .
* @ return Returns a reference to this object so that method calls can be chained together . */
public PhysicalConnectionRequirements withSecurityGroupIdList ( String ... securityGroupIdList ) { } } | if ( this . securityGroupIdList == null ) { setSecurityGroupIdList ( new java . util . ArrayList < String > ( securityGroupIdList . length ) ) ; } for ( String ele : securityGroupIdList ) { this . securityGroupIdList . add ( ele ) ; } return this ; |
public class GhprbRepository { /** * comments / hashes that have been added since the last time we checked . */
public void check ( ) { } } | if ( ! trigger . isActive ( ) ) { LOGGER . log ( Level . FINE , "Project is not active, not checking github state" ) ; return ; } if ( ! initGhRepository ( ) ) { return ; } GHRepository repo = getGitHubRepo ( ) ; List < GHPullRequest > openPulls ; try { openPulls = repo . getPullRequests ( GHIssueState . OPEN ) ; } catch ( IOException ex ) { LOGGER . log ( Level . SEVERE , "Could not retrieve open pull requests." , ex ) ; return ; } Set < Integer > closedPulls = new HashSet < Integer > ( pullRequests . keySet ( ) ) ; for ( GHPullRequest pr : openPulls ) { if ( pr . getHead ( ) == null ) { // Not sure if we need this , but leaving it for now .
try { pr = getActualPullRequest ( pr . getNumber ( ) ) ; } catch ( IOException ex ) { LOGGER . log ( Level . SEVERE , "Could not retrieve pr " + pr . getNumber ( ) , ex ) ; return ; } } check ( pr ) ; closedPulls . remove ( pr . getNumber ( ) ) ; } // remove closed pulls so we don ' t check them again
for ( Integer id : closedPulls ) { pullRequests . remove ( id ) ; } try { this . save ( ) ; } catch ( IOException e ) { LOGGER . log ( Level . SEVERE , "Unable to save repository!" , e ) ; } |
public class InteractionSpec { /** * Reorders columns of a Frame so that columns that only used to make interactions
* are at the end of the Frame . Only Vecs that will actually be used are kept in the frame .
* @ param f frame to adjust
* @ return reordered frame */
public Frame reorderColumns ( Frame f ) { } } | if ( ( _interactionsOnly == null ) || ( f == null ) ) return f ; Vec [ ] interOnlyVecs = f . vecs ( _interactionsOnly ) ; f . remove ( _interactionsOnly ) ; for ( int i = 0 ; i < _interactionsOnly . length ; i ++ ) { if ( isUsed ( _interactionsOnly [ i ] ) ) { f . add ( _interactionsOnly [ i ] , interOnlyVecs [ i ] ) ; } else if ( ! isIgnored ( _interactionsOnly [ i ] ) ) { Log . warn ( "Column '" + _interactionsOnly [ i ] + "' was marked to be used for interactions only " + "but it is not actually required in any interaction." ) ; } } return f ; |
public class Tokenizer { /** * { @ inheritDoc } */
@ Override public boolean readColumns ( final List < String > columns ) throws IOException { } } | if ( columns == null ) { throw new NullPointerException ( "columns should not be null" ) ; } // clear the reusable List and StringBuilders
columns . clear ( ) ; currentColumn . setLength ( 0 ) ; currentRow . setLength ( 0 ) ; // keep reading lines until data is found
String line ; do { line = readLine ( ) ; if ( line == null ) { return false ; // EOF
} } while ( line . length ( ) == 0 || line . trim ( ) . isEmpty ( ) || ( commentMatcher != null && commentMatcher . isComment ( line ) ) || ( ! header && skip >= getLineNumber ( ) ) ) ; if ( header ) { header = false ; skip ++ ; } // update the untokenized CSV row
currentRow . append ( line ) ; // add a newline to determine end of line ( making parsing easier )
line += NEWLINE ; // process each character in the line , catering for surrounding quotes ( QUOTE _ MODE )
TokenizerState state = TokenizerState . NORMAL ; int quoteScopeStartingLine = - 1 ; // the line number where a potential multi - line cell starts
int potentialSpaces = 0 ; // keep track of spaces ( so leading / trailing space can be removed if required )
int charIndex = 0 ; boolean espectQuote = this . strictQuotes ; boolean isEscape = false ; boolean sawNewLineInQuote = false ; while ( true ) { final char c = line . charAt ( charIndex ) ; if ( TokenizerState . NORMAL . equals ( state ) ) { /* * NORMAL mode ( not within quotes ) . */
if ( isEscape ) { /* * Just a normal character . Add any required spaces ( but trim any leading spaces if surrounding
* spaces need quotes ) , add the character , then continue to next character . */
isEscape = false ; if ( this . strictQuotes && espectQuote ) { throw new SuperCsvException ( String . format ( "strictQuotes: quotes needed at line %d column %d. To proceed, " + "either quote the column or remove --strictquotes" , getLineNumber ( ) , columns . size ( ) + 1 ) ) ; } if ( ! surroundingSpacesNeedQuotes || currentColumn . length ( ) > 0 ) { appendSpaces ( currentColumn , potentialSpaces ) ; } potentialSpaces = 0 ; currentColumn . append ( c ) ; } else if ( c == escapeChar && ! ( line . charAt ( charIndex + 1 ) == 'N' ) ) { isEscape = true ; } else if ( c == delimeterChar ) { /* * Delimiter . Save the column ( trim trailing space if required ) then continue to next character . */
espectQuote = true ; if ( ! surroundingSpacesNeedQuotes ) { appendSpaces ( currentColumn , potentialSpaces ) ; } columns . add ( currentColumn . length ( ) > 0 ? currentColumn . toString ( ) : null ) ; // " " - > null
potentialSpaces = 0 ; currentColumn . setLength ( 0 ) ; } else if ( c == SPACE ) { /* * Space . Remember it , then continue to next character . */
potentialSpaces ++ ; } else if ( c == NEWLINE ) { /* * Newline . Add any required spaces ( if surrounding spaces don ' t need quotes ) and return ( we ' ve read
* a line ! ) . */
if ( ! surroundingSpacesNeedQuotes ) { appendSpaces ( currentColumn , potentialSpaces ) ; } columns . add ( currentColumn . length ( ) > 0 ? currentColumn . toString ( ) : null ) ; // " " - > null
return true ; } else if ( c == quoteChar ) { /* * A single quote ( " ) . Update to QUOTESCOPE ( but don ' t save quote ) , then continue to next character . */
espectQuote = false ; state = TokenizerState . QUOTE_MODE ; quoteScopeStartingLine = getLineNumber ( ) ; // cater for spaces before a quoted section ( be lenient ! )
if ( ! surroundingSpacesNeedQuotes || currentColumn . length ( ) > 0 ) { appendSpaces ( currentColumn , potentialSpaces ) ; } potentialSpaces = 0 ; } else { /* * Just a normal character . Add any required spaces ( but trim any leading spaces if surrounding
* spaces need quotes ) , add the character , then continue to next character . */
if ( this . strictQuotes && espectQuote ) { throw new SuperCsvException ( String . format ( "strictQuotes: quotes needed at line %d column %d. To proceed, " + "either quote the column or remove --strictquotes" , getLineNumber ( ) , columns . size ( ) + 1 ) ) ; } if ( ! surroundingSpacesNeedQuotes || currentColumn . length ( ) > 0 ) { appendSpaces ( currentColumn , potentialSpaces ) ; } potentialSpaces = 0 ; currentColumn . append ( c ) ; } } else { /* * QUOTE _ MODE ( within quotes ) . */
if ( sawNewLineInQuote ) { if ( currentColumn . length ( ) > columnSizeLimit ) { state = TokenizerState . NORMAL ; sawNewLineInQuote = false ; throw new SuperCsvException ( String . format ( "oversized column while reading quoted column %d beginning on line %d and ending on line %d. " + "See --columnsizelimit." , columns . size ( ) + 1 , quoteScopeStartingLine , getLineNumber ( ) ) ) ; } } if ( c == NEWLINE ) { /* * Newline . Doesn ' t count as newline while in QUOTESCOPE . Add the newline char , reset the charIndex
* ( will update to 0 for next iteration ) , read in the next line , then then continue to next
* character . For a large file with an unterminated quoted section ( no trailing quote ) , this could
* cause memory issues as it will keep reading lines looking for the trailing quote . Maybe there
* should be a configurable limit on max lines to read in quoted mode ?
* Yes I ' ll set the limit to be 16*1024*1024B = 16MB by default */
if ( currentColumn . length ( ) > columnSizeLimit ) { state = TokenizerState . NORMAL ; sawNewLineInQuote = false ; throw new SuperCsvException ( String . format ( "oversized column while reading quoted column %d beginning on line %d and ending on line %d. " + "See --columnsizelimit." , columns . size ( ) + 1 , quoteScopeStartingLine , getLineNumber ( ) ) ) ; } sawNewLineInQuote = true ; currentColumn . append ( NEWLINE ) ; currentRow . append ( NEWLINE ) ; // specific line terminator lost , \ n will have to suffice
charIndex = - 1 ; line = readLine ( ) ; if ( line == null ) { throw new SuperCsvException ( String . format ( "unexpected end of file while reading quoted column %d beginning on line %d and ending on line %d" , columns . size ( ) + 1 , quoteScopeStartingLine , getLineNumber ( ) ) ) ; } currentRow . append ( line ) ; // update untokenized CSV row
line += NEWLINE ; // add newline to simplify parsing
} else if ( c == quoteChar ) { if ( line . charAt ( charIndex + 1 ) == quoteChar ) { /* * An escaped quote ( " " ) . Add a single quote , then move the cursor so the next iteration of the
* loop will read the character following the escaped quote . */
currentColumn . append ( c ) ; charIndex ++ ; } else { /* * A single quote ( " ) . Update to NORMAL ( but don ' t save quote ) , then continue to next character . */
state = TokenizerState . NORMAL ; sawNewLineInQuote = false ; quoteScopeStartingLine = - 1 ; // reset ready for next multi - line cell
// Check that we haven ' t gone over the column size limit
if ( currentColumn . length ( ) > columnSizeLimit ) { state = TokenizerState . NORMAL ; sawNewLineInQuote = false ; throw new SuperCsvException ( String . format ( "oversized column while reading quoted column %d beginning on line %d and ending on line %d. " + "See --columnsizelimit." , columns . size ( ) + 1 , quoteScopeStartingLine , getLineNumber ( ) ) ) ; } } } else { /* * Just a normal character , delimiter ( they don ' t count in QUOTESCOPE ) or space . Add the character ,
* then continue to next character . */
currentColumn . append ( c ) ; } } charIndex ++ ; // read next char of the line
} |
public class RuntimeManagerMain { /** * construct command line help options */
private static Options constructHelpOptions ( ) { } } | Options options = new Options ( ) ; Option help = Option . builder ( "h" ) . desc ( "List all options and their description" ) . longOpt ( "help" ) . build ( ) ; options . addOption ( help ) ; return options ; |
public class ConvertImage { /** * Converts a { @ link InterleavedS8 } into a { @ link GrayS8 } by computing the average value of each pixel
* across all the bands .
* @ param input ( Input ) The ImageInterleaved that is being converted . Not modified .
* @ param output ( Optional ) The single band output image . If null a new image is created . Modified .
* @ return Converted image . */
public static GrayS8 average ( InterleavedS8 input , GrayS8 output ) { } } | if ( output == null ) { output = new GrayS8 ( input . width , input . height ) ; } else { output . reshape ( input . width , input . height ) ; } if ( BoofConcurrency . USE_CONCURRENT ) { ConvertInterleavedToSingle_MT . average ( input , output ) ; } else { ConvertInterleavedToSingle . average ( input , output ) ; } return output ; |
public class EntityMention { /** * setter for mention _ type - sets
* @ generated
* @ param v value to set into the feature */
public void setMention_type ( String v ) { } } | if ( EntityMention_Type . featOkTst && ( ( EntityMention_Type ) jcasType ) . casFeat_mention_type == null ) jcasType . jcas . throwFeatMissing ( "mention_type" , "de.julielab.jules.types.ace.EntityMention" ) ; jcasType . ll_cas . ll_setStringValue ( addr , ( ( EntityMention_Type ) jcasType ) . casFeatCode_mention_type , v ) ; |
public class MethodTarget { /** * Construct a MethodTarget for the unique method named { @ literal name } on the given object . Fails with an exception
* in case of overloaded method . */
public static MethodTarget of ( String name , Object bean , Help help , Supplier < Availability > availabilityIndicator ) { } } | Set < Method > found = new HashSet < > ( ) ; ReflectionUtils . doWithMethods ( bean . getClass ( ) , found :: add , m -> m . getName ( ) . equals ( name ) ) ; if ( found . size ( ) != 1 ) { throw new IllegalArgumentException ( String . format ( "Could not find unique method named '%s' on object of class %s. Found %s" , name , bean . getClass ( ) , found ) ) ; } return new MethodTarget ( found . iterator ( ) . next ( ) , bean , help , availabilityIndicator ) ; |
public class FramesHandler { /** * TODO : almost identical to ModelsHandler ; refactor */
public static Frame getFromDKV ( String param_name , String key_str ) { } } | return getFromDKV ( param_name , Key . make ( key_str ) ) ; |
public class GVRComponent { /** * Attach this component to a scene object .
* @ param owner scene object to become new owner . */
public void setOwnerObject ( GVRSceneObject owner ) { } } | if ( owner != null ) { if ( getNative ( ) != 0 ) { NativeComponent . setOwnerObject ( getNative ( ) , owner . getNative ( ) ) ; } this . owner = owner ; onAttach ( owner ) ; } else { if ( null != this . owner ) { onDetach ( this . owner ) ; if ( getNative ( ) != 0 ) { NativeComponent . setOwnerObject ( getNative ( ) , 0L ) ; } this . owner = null ; } } |
public class Configuration { /** * This is a manual implementation of the following regex
* " \ \ $ \ \ { [ ^ \ \ } \ \ $ \ u0020 ] + \ \ } " . It can be 15x more efficient than
* a regex matcher as demonstrated by HADOOP - 11506 . This is noticeable with
* Hadoop apps building on the assumption Configuration # get is an O ( 1)
* hash table lookup , especially when the eval is a long string .
* @ param eval a string that may contain variables requiring expansion .
* @ return a 2 - element int array res such that
* eval . substring ( res [ 0 ] , res [ 1 ] ) is " var " for the left - most occurrence of
* $ { var } in eval . If no variable is found - 1 , - 1 is returned . */
private static int [ ] findSubVariable ( String eval ) { } } | int [ ] result = { - 1 , - 1 } ; int matchStart ; int leftBrace ; // scanning for a brace first because it ' s less frequent than $
// that can occur in nested class names
match_loop : for ( matchStart = 1 , leftBrace = eval . indexOf ( '{' , matchStart ) ; // minimum left brace position ( follows ' $ ' )
leftBrace > 0 // right brace of a smallest valid expression " $ { c } "
&& leftBrace + "{c" . length ( ) < eval . length ( ) ; leftBrace = eval . indexOf ( '{' , matchStart ) ) { int matchedLen = 0 ; if ( eval . charAt ( leftBrace - 1 ) == '$' ) { int subStart = leftBrace + 1 ; // after ' { '
for ( int i = subStart ; i < eval . length ( ) ; i ++ ) { switch ( eval . charAt ( i ) ) { case '}' : if ( matchedLen > 0 ) { // match
result [ SUB_START_IDX ] = subStart ; result [ SUB_END_IDX ] = subStart + matchedLen ; break match_loop ; } // fall through to skip 1 char
case ' ' : case '$' : matchStart = i + 1 ; continue match_loop ; default : matchedLen ++ ; } } // scanned from " $ { " to the end of eval , and no reset via ' ' , ' $ ' :
// no match !
break match_loop ; } else { // not a start of a variable
matchStart = leftBrace + 1 ; } } return result ; |
public class SipServletMessageImpl { /** * Return a mimemultipart from raw Content
* FIXME Doesn ' t support nested multipart in the body content
* @ param contentTypeHeader content type header related to the rawContent
* @ param rawContent body content
* @ return a mimemultipart from raw Content */
private static MimeMultipart getContentAsMimeMultipart ( ContentTypeHeader contentTypeHeader , byte [ ] rawContent ) { } } | // Issue 1123 : http : / / code . google . com / p / mobicents / issues / detail ? id = 1123 : Multipart type is supported
String delimiter = contentTypeHeader . getParameter ( MULTIPART_BOUNDARY ) ; String start = contentTypeHeader . getParameter ( MULTIPART_START ) ; MimeMultipart mimeMultipart = new MimeMultipart ( contentTypeHeader . getContentSubType ( ) ) ; if ( delimiter == null ) { MimeBodyPart mbp = new MimeBodyPart ( ) ; DataSource ds = new ByteArrayDataSource ( rawContent , contentTypeHeader . getContentSubType ( ) ) ; try { mbp . setDataHandler ( new DataHandler ( ds ) ) ; mimeMultipart . addBodyPart ( mbp ) ; } catch ( MessagingException e ) { throw new IllegalArgumentException ( "couldn't create the multipart object from the message content " + rawContent , e ) ; } } else { // splitting the body content by delimiter
String [ ] fragments = new String ( rawContent ) . split ( MULTIPART_BOUNDARY_DELIM + delimiter ) ; for ( String fragment : fragments ) { final String trimmedFragment = fragment . trim ( ) ; // skipping empty fragment and ending fragment looking like - -
if ( trimmedFragment . length ( ) > 0 && ! MULTIPART_BOUNDARY_DELIM . equals ( trimmedFragment ) ) { String fragmentHeaders = null ; String fragmentBody = fragment ; // if there is a start , it means that there is probably headers before the content that need to be added to the mime body part
// so we split headers from body content
if ( start != null && start . length ( ) > 0 ) { int indexOfStart = fragment . indexOf ( start ) ; if ( indexOfStart != - 1 ) { fragmentHeaders = fragmentBody . substring ( 0 , indexOfStart + start . length ( ) ) ; fragmentBody = fragmentBody . substring ( indexOfStart + start . length ( ) ) . trim ( ) ; } } MimeBodyPart mbp = new MimeBodyPart ( ) ; try { String contentType = contentTypeHeader . getContentSubType ( ) ; // check if the body content start with a Content - Type header
// if so we strip it from the content body
if ( fragmentBody . startsWith ( ContentTypeHeader . NAME ) ) { int indexOfLineReturn = fragmentBody . indexOf ( LINE_RETURN_DELIM ) ; contentType = fragmentBody . substring ( 0 , indexOfLineReturn - 1 ) . trim ( ) ; fragmentBody = fragmentBody . substring ( indexOfLineReturn ) . trim ( ) ; } // setting the content body stripped from the headers
mbp . setContent ( fragmentBody , contentType ) ; // adding the headers to the body part
mbp . addHeaderLine ( contentType ) ; if ( fragmentHeaders != null ) { StringTokenizer stringTokenizer = new StringTokenizer ( fragmentHeaders , LINE_RETURN_DELIM ) ; while ( stringTokenizer . hasMoreTokens ( ) ) { String token = stringTokenizer . nextToken ( ) . trim ( ) ; if ( token != null && token . length ( ) > 0 ) { mbp . addHeaderLine ( token ) ; } } } mimeMultipart . addBodyPart ( mbp ) ; } catch ( MessagingException e ) { throw new IllegalArgumentException ( "couldn't create the multipart object from the message content " + rawContent , e ) ; } } } } return mimeMultipart ; |
public class ApplicationResource { /** * ( non - Javadoc )
* @ see net . roboconf . dm . internal . rest . api . IApplicationWs
* # listChildrenInstances ( java . lang . String , java . lang . String , boolean ) */
@ Override public List < Instance > listChildrenInstances ( String applicationName , String instancePath , boolean allChildren ) { } } | List < Instance > result = new ArrayList < > ( ) ; Application app = this . manager . applicationMngr ( ) . findApplicationByName ( applicationName ) ; // Log
if ( instancePath == null ) this . logger . fine ( "Request: list " + ( allChildren ? "all" : "root" ) + " instances for " + applicationName + "." ) ; else this . logger . fine ( "Request: list " + ( allChildren ? "all" : "direct" ) + " children instances for " + instancePath + " in " + applicationName + "." ) ; // Find the instances
Instance inst ; if ( app != null ) { if ( instancePath == null ) { if ( allChildren ) result . addAll ( InstanceHelpers . getAllInstances ( app ) ) ; else result . addAll ( app . getRootInstances ( ) ) ; } else if ( ( inst = InstanceHelpers . findInstanceByPath ( app , instancePath ) ) != null ) { if ( allChildren ) { result . addAll ( InstanceHelpers . buildHierarchicalList ( inst ) ) ; result . remove ( inst ) ; } else { result . addAll ( inst . getChildren ( ) ) ; } } } // Bug # 64 : sort instance paths for the clients
Collections . sort ( result , new InstanceComparator ( ) ) ; return result ; |
public class ZooController { /** * 获取ZK 部署情况
* @ param zkDeployForm
* @ return */
@ RequestMapping ( value = "/zkdeploy" , method = RequestMethod . GET ) @ ResponseBody public JsonObjectBase getZkDeployInfo ( @ Valid ZkDeployForm zkDeployForm ) { } } | LOG . info ( zkDeployForm . toString ( ) ) ; ConfigFullModel configFullModel = zkDeployValidator . verify ( zkDeployForm ) ; String data = zkDeployMgr . getDeployInfo ( configFullModel . getApp ( ) . getName ( ) , configFullModel . getEnv ( ) . getName ( ) , zkDeployForm . getVersion ( ) ) ; return buildSuccess ( "hostInfo" , data ) ; |
public class RequestParameters { /** * Converts the given < code > value < / code > to the given < code > targetType < / code > .
* @ param < T > is the generic type of < code > targetType < / code > .
* @ param value is the value to convert .
* @ param targetType is the { @ link Class } reflecting the type to convert the value to .
* @ return the converted value .
* @ throws ParseException if parsing of the given < code > value < / code > failed while converting . */
@ SuppressWarnings ( "unchecked" ) protected < T > T convertValue ( String value , Class < T > targetType ) throws ParseException { } } | if ( value == null ) { return null ; } Object result ; if ( targetType == String . class ) { result = value ; } else if ( targetType . isEnum ( ) ) { for ( T instance : targetType . getEnumConstants ( ) ) { Enum < ? > e = ( Enum < ? > ) instance ; if ( e . name ( ) . equalsIgnoreCase ( value ) ) { return instance ; } } throw new IllegalArgumentException ( "Enum constant not found!" ) ; } else if ( ( targetType == boolean . class ) || ( targetType == Boolean . class ) ) { result = Boolean . parseBoolean ( value ) ; } else if ( ( targetType == int . class ) || ( targetType == Integer . class ) ) { result = Integer . valueOf ( value ) ; } else if ( ( targetType == long . class ) || ( targetType == Long . class ) ) { result = Long . valueOf ( value ) ; } else if ( ( targetType == double . class ) || ( targetType == Double . class ) ) { result = Double . valueOf ( value ) ; } else if ( ( targetType == float . class ) || ( targetType == Float . class ) ) { result = Float . valueOf ( value ) ; } else if ( ( targetType == short . class ) || ( targetType == Short . class ) ) { result = Short . valueOf ( value ) ; } else if ( ( targetType == byte . class ) || ( targetType == Byte . class ) ) { result = Byte . valueOf ( value ) ; } else if ( targetType == BigDecimal . class ) { result = new BigDecimal ( value ) ; } else if ( targetType == BigInteger . class ) { result = new BigInteger ( value ) ; } else if ( targetType == Date . class ) { result = new SimpleDateFormat ( "YYYY-MM-dd'T'HH:mm:ss" ) . parseObject ( value ) ; } else { throw new InternalServerErrorException ( "Unsupported type " + targetType ) ; } // do not use type . cast ( ) as not working for primitive types .
return ( T ) result ; |
public class MalisisFont { /** * # end Prepare / Clean */
private boolean isCharVisible ( int x , int y , StringWalker walker , ClipArea area ) { } } | if ( area == null || area . noClip ( ) ) return true ; if ( area . fullClip ( ) ) return false ; return area . isInside ( x , y ) || area . isInside ( x + ( int ) Math . ceil ( walker . width ( ) ) , y + ( int ) Math . ceil ( walker . height ( ) ) ) ; |
public class For { /** * # for ( id : expr ) */
private void forIterator ( Env env , Scope scope , Writer writer ) { } } | Ctrl ctrl = scope . getCtrl ( ) ; Object outer = scope . get ( "for" ) ; ctrl . setLocalAssignment ( ) ; ForIteratorStatus forIteratorStatus = new ForIteratorStatus ( outer , forCtrl . getExpr ( ) . eval ( scope ) , location ) ; ctrl . setWisdomAssignment ( ) ; scope . setLocal ( "for" , forIteratorStatus ) ; Iterator < ? > it = forIteratorStatus . getIterator ( ) ; String itemName = forCtrl . getId ( ) ; while ( it . hasNext ( ) ) { scope . setLocal ( itemName , it . next ( ) ) ; stat . exec ( env , scope , writer ) ; forIteratorStatus . nextState ( ) ; if ( ctrl . isJump ( ) ) { if ( ctrl . isBreak ( ) ) { ctrl . setJumpNone ( ) ; break ; } else if ( ctrl . isContinue ( ) ) { ctrl . setJumpNone ( ) ; continue ; } else { return ; } } } if ( _else != null && forIteratorStatus . getIndex ( ) == 0 ) { _else . exec ( env , scope , writer ) ; } |
public class CommerceOrderItemUtil { /** * Returns the last commerce order item in the ordered set where commerceOrderId = & # 63 ; .
* @ param commerceOrderId the commerce order ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching commerce order item , or < code > null < / code > if a matching commerce order item could not be found */
public static CommerceOrderItem fetchByCommerceOrderId_Last ( long commerceOrderId , OrderByComparator < CommerceOrderItem > orderByComparator ) { } } | return getPersistence ( ) . fetchByCommerceOrderId_Last ( commerceOrderId , orderByComparator ) ; |
public class JPAGenericDAORulesBasedImpl { /** * Méthode de validation des contraintes referentielles
* @ param entityEntité à valider
* @ param modeMode DAO
* @ param validationTimeMoment d ' évaluation */
protected void validateEntityReferentialConstraint ( Object entity , DAOMode mode , DAOValidatorEvaluationTime validationTime ) { } } | // Obtention de la liste des annotations DAO qui sont sur la classe
List < Annotation > daoAnnotations = DAOValidatorHelper . loadDAOValidatorAnnotations ( entity ) ; // Si la liste est vide
if ( daoAnnotations == null || daoAnnotations . size ( ) == 0 ) return ; // On parcours cette liste
for ( Annotation daoAnnotation : daoAnnotations ) { // Obtention de la classe du Validateur
Class < ? > validatorClass = DAOValidatorHelper . getValidationLogicClass ( daoAnnotation ) ; // Le validateur
IDAOValidator < Annotation > validator = null ; try { // On instancie le validateur
validator = ( IDAOValidator < Annotation > ) validatorClass . newInstance ( ) ; // Initialisation du Validateur
validator . initialize ( daoAnnotation , getEntityManager ( ) , mode , validationTime ) ; } catch ( Throwable e ) { // On relance l ' exception
throw new JPersistenceToolsException ( "ValidatorInstanciationException.message" , e ) ; } // Validation des contraintes d ' integrites
validator . processValidation ( entity ) ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.