signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class LiveNode { /** * Enumerables are copied to new structures ( immutable when possible ) */
@ Override public Object getDehydratedPropertyValue ( JaversProperty property ) { } } | Edge edge = getEdge ( property ) ; if ( edge != null ) { return edge . getDehydratedPropertyValue ( ) ; } Object propertyValue = getCdo ( ) . getPropertyValue ( property ) ; if ( propertyValue == null ) { return null ; } // Collections & Maps are copied to a new immutable structure
if ( property . getType ( ) instanceof EnumerableType ) { EnumerableType enumerableType = property . getType ( ) ; return enumerableType . map ( propertyValue , it -> it ) ; } return getCdo ( ) . getPropertyValue ( property ) ; |
public class ListUtil { /** * gets a value from list
* @ param list list to cast
* @ param delimiter delimter of the list
* @ param position
* @ return Array Object */
public static String getAt ( String list , String delimiter , int position , boolean ignoreEmpty , String defaultValue ) { } } | if ( delimiter . length ( ) == 1 ) return getAt ( list , delimiter . charAt ( 0 ) , position , ignoreEmpty , defaultValue ) ; int len = list . length ( ) ; if ( len == 0 ) return defaultValue ; int last = - 1 ; int count = - 1 ; char [ ] del = delimiter . toCharArray ( ) ; char c ; for ( int i = 0 ; i < len ; i ++ ) { c = list . charAt ( i ) ; for ( int y = 0 ; y < del . length ; y ++ ) { if ( c == del [ y ] ) { if ( ignoreEmpty && ( last + 1 ) == i ) { last = i ; break ; } count ++ ; if ( count == position ) { return list . substring ( last + 1 , i ) ; } last = i ; break ; } } } if ( position == count + 1 ) { if ( ! ignoreEmpty || last + 1 < len ) return list . substring ( last + 1 ) ; } return defaultValue ; |
public class Controller { /** * Get model from http request . */
public < T > T getModel ( Class < T > modelClass ) { } } | return ( T ) Injector . injectModel ( modelClass , request , false ) ; |
public class AccessValidator { /** * Liefert das n - te Element des uebergebenen Arrays zurueck , falls ein
* korrekter Index uebergaben wird
* @ param < T > Typ - Parameter
* @ param array Array , auf das zugegriffen wird
* @ param n Array - Index , beginnend bei 0
* @ return n - te Element des Arrays */
public static < T > T access ( T [ ] array , int n ) { } } | int max = array . length - 1 ; if ( ( n < 0 ) || ( n > max ) ) { throw new InvalidValueException ( n , "n" , Range . between ( 0 , max ) ) ; } return array [ n ] ; |
public class Fields { /** * Returns the position of the specified field . */
public int fieldIndex ( String field ) { } } | Integer ret = mIndex . get ( field ) ; if ( ret == null ) { throw new IllegalArgumentException ( field + " does not exist" ) ; } return ret ; |
public class DistcpFileSplitter { /** * Split an input { @ link CopyableFile } into multiple splits aligned with block boundaries .
* @ param file { @ link CopyableFile } to split .
* @ param workUnit { @ link WorkUnit } generated for this file .
* @ param targetFs destination { @ link FileSystem } where file is to be copied .
* @ return a list of { @ link WorkUnit } , each for a split of this file .
* @ throws IOException */
public static Collection < WorkUnit > splitFile ( CopyableFile file , WorkUnit workUnit , FileSystem targetFs ) throws IOException { } } | long len = file . getFileStatus ( ) . getLen ( ) ; // get lcm of source and target block size so that split aligns with block boundaries for both extract and write
long blockSize = ArithmeticUtils . lcm ( file . getFileStatus ( ) . getBlockSize ( ) , file . getBlockSize ( targetFs ) ) ; long maxSplitSize = workUnit . getPropAsLong ( MAX_SPLIT_SIZE_KEY , DEFAULT_MAX_SPLIT_SIZE ) ; if ( maxSplitSize < blockSize ) { log . warn ( String . format ( "Max split size must be at least block size. Adjusting to %d." , blockSize ) ) ; maxSplitSize = blockSize ; } if ( len < maxSplitSize ) { return Lists . newArrayList ( workUnit ) ; } Collection < WorkUnit > newWorkUnits = Lists . newArrayList ( ) ; long lengthPerSplit = ( maxSplitSize / blockSize ) * blockSize ; int splits = ( int ) ( len / lengthPerSplit + 1 ) ; for ( int i = 0 ; i < splits ; i ++ ) { WorkUnit newWorkUnit = WorkUnit . copyOf ( workUnit ) ; long lowPos = lengthPerSplit * i ; long highPos = Math . min ( lengthPerSplit * ( i + 1 ) , len ) ; Split split = new Split ( lowPos , highPos , i , splits , String . format ( "%s.__PART%d__" , file . getDestination ( ) . getName ( ) , i ) ) ; String serializedSplit = GSON . toJson ( split ) ; newWorkUnit . setProp ( SPLIT_KEY , serializedSplit ) ; Guid oldGuid = CopySource . getWorkUnitGuid ( newWorkUnit ) . get ( ) ; Guid newGuid = oldGuid . append ( Guid . fromStrings ( serializedSplit ) ) ; CopySource . setWorkUnitGuid ( workUnit , newGuid ) ; newWorkUnits . add ( newWorkUnit ) ; } return newWorkUnits ; |
public class RecurringData { /** * Calculate start dates for a weekly recurrence .
* @ param calendar current date
* @ param frequency frequency
* @ param dates array of start dates */
private void getWeeklyDates ( Calendar calendar , int frequency , List < Date > dates ) { } } | int currentDay = calendar . get ( Calendar . DAY_OF_WEEK ) ; while ( moreDates ( calendar , dates ) ) { int offset = 0 ; for ( int dayIndex = 0 ; dayIndex < 7 ; dayIndex ++ ) { if ( getWeeklyDay ( Day . getInstance ( currentDay ) ) ) { if ( offset != 0 ) { calendar . add ( Calendar . DAY_OF_YEAR , offset ) ; offset = 0 ; } if ( ! moreDates ( calendar , dates ) ) { break ; } dates . add ( calendar . getTime ( ) ) ; } ++ offset ; ++ currentDay ; if ( currentDay > 7 ) { currentDay = 1 ; } } if ( frequency > 1 ) { offset += ( 7 * ( frequency - 1 ) ) ; } calendar . add ( Calendar . DAY_OF_YEAR , offset ) ; } |
public class AbstractAlpineQueryManager { /** * Retrieves an object by its UUID .
* @ param < T > A type parameter . This type will be returned
* @ param clazz the persistence class to retrive the ID for
* @ param uuid the uuid of the object to retrieve
* @ param fetchGroup the JDO fetchgroup to use when making the query
* @ return an object of the specified type
* @ since 1.0.0 */
@ SuppressWarnings ( "unchecked" ) public < T > T getObjectByUuid ( Class < T > clazz , UUID uuid , String fetchGroup ) { } } | pm . getFetchPlan ( ) . addGroup ( fetchGroup ) ; return getObjectByUuid ( clazz , uuid ) ; |
public class LeaseManager { /** * Check the leases beginning from the oldest . */
synchronized void checkLeases ( ) { } } | int numPathsChecked = 0 ; for ( ; sortedLeases . size ( ) > 0 ; ) { final Lease oldest = sortedLeases . first ( ) ; if ( ! oldest . expiredHardLimit ( ) ) { return ; } // internalReleaseLease ( ) removes paths corresponding to empty files ,
// i . e . it needs to modify the collection being iterated over
// causing ConcurrentModificationException
String [ ] leasePaths = new String [ oldest . getPaths ( ) . size ( ) ] ; oldest . getPaths ( ) . toArray ( leasePaths ) ; LOG . info ( "Lease " + oldest + " has expired hard limit. Recovering lease for paths: " + Arrays . toString ( leasePaths ) ) ; for ( String p : leasePaths ) { if ( ++ numPathsChecked > this . maxPathsPerCheck ) { return ; } try { fsnamesystem . getFSNamesystemMetrics ( ) . numLeaseRecoveries . inc ( ) ; fsnamesystem . internalReleaseLeaseOne ( oldest , p , this . discardLastBlockIfNoSync ) ; } catch ( IOException e ) { LOG . error ( "Cannot release the path " + p + " in the lease " + oldest , e ) ; removeLease ( oldest , p ) ; fsnamesystem . getFSNamesystemMetrics ( ) . numLeaseManagerMonitorExceptions . inc ( ) ; } } } |
public class MultipartStream { /** * Reads from the multipart mime buffer . */
@ Override public int read ( byte [ ] buffer , int offset , int length ) throws IOException { } } | int b = - 1 ; if ( _isPartDone ) return - 1 ; int i = 0 ; // Need the last peek or would miss the initial ' \ n '
while ( _peekOffset + 1 < _peekLength && length > 0 ) { buffer [ offset + i ++ ] = _peek [ _peekOffset ++ ] ; length -- ; } while ( i < length && ( b = read ( ) ) >= 0 ) { boolean hasCr = false ; if ( b == '\r' ) { hasCr = true ; b = read ( ) ; // XXX : Macintosh ?
if ( b != '\n' ) { buffer [ offset + i ++ ] = ( byte ) '\r' ; _peek [ 0 ] = ( byte ) b ; _peekOffset = 0 ; _peekLength = 1 ; continue ; } } else if ( b != '\n' ) { buffer [ offset + i ++ ] = ( byte ) b ; continue ; } int j ; for ( j = 0 ; j < _boundaryLength && ( b = read ( ) ) >= 0 && _boundaryBuffer [ j ] == b ; j ++ ) { } if ( j == _boundaryLength ) { _isPartDone = true ; if ( ( b = read ( ) ) == '-' ) { if ( ( b = read ( ) ) == '-' ) { _isDone = true ; _isComplete = true ; } } for ( ; b > 0 && b != '\r' && b != '\n' ; b = read ( ) ) { } if ( b == '\r' && ( b = read ( ) ) != '\n' ) { _peek [ 0 ] = ( byte ) b ; _peekOffset = 0 ; _peekLength = 1 ; } return i > 0 ? i : - 1 ; } _peekLength = 0 ; if ( hasCr && i + 1 < length ) { buffer [ offset + i ++ ] = ( byte ) '\r' ; buffer [ offset + i ++ ] = ( byte ) '\n' ; } else if ( hasCr ) { buffer [ offset + i ++ ] = ( byte ) '\r' ; _peek [ _peekLength ++ ] = ( byte ) '\n' ; } else { buffer [ offset + i ++ ] = ( byte ) '\n' ; } int k = 0 ; while ( k < j && i + 1 < length ) buffer [ offset + i ++ ] = _boundaryBuffer [ k ++ ] ; while ( k < j ) _peek [ _peekLength ++ ] = _boundaryBuffer [ k ++ ] ; _peek [ _peekLength ++ ] = ( byte ) b ; _peekOffset = 0 ; } if ( i <= 0 ) { _isPartDone = true ; if ( b < 0 ) _isDone = true ; return - 1 ; } else { return i ; } |
public class SecondaryIndexManager { /** * Removes a existing index
* @ param column the indexed column to remove */
public void removeIndexedColumn ( ByteBuffer column ) { } } | SecondaryIndex index = indexesByColumn . remove ( column ) ; if ( index == null ) return ; // Remove this column from from row level index map as well as all indexes set
if ( index instanceof PerRowSecondaryIndex ) { index . removeColumnDef ( column ) ; // If no columns left remove from row level lookup as well as all indexes set
if ( index . getColumnDefs ( ) . isEmpty ( ) ) { allIndexes . remove ( index ) ; rowLevelIndexMap . remove ( index . getClass ( ) ) ; } } else { allIndexes . remove ( index ) ; } index . removeIndex ( column ) ; SystemKeyspace . setIndexRemoved ( baseCfs . metadata . ksName , index . getNameForSystemKeyspace ( column ) ) ; |
public class ApiOvhMe { /** * VIP Status of this account
* REST : GET / me / vipStatus */
public OvhVipStatus vipStatus_GET ( ) throws IOException { } } | String qPath = "/me/vipStatus" ; StringBuilder sb = path ( qPath ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhVipStatus . class ) ; |
public class ClassPathAnnotationScanner { /** * Scan the given packages .
* @ param annotation The annotation to scan for
* @ param pkg The package to scan
* @ return A stream of classes */
@ Override public Stream < Class > scan ( String annotation , String pkg ) { } } | if ( pkg == null ) { return Stream . empty ( ) ; } List < Class > classes = doScan ( annotation , pkg ) ; return classes . stream ( ) ; |
public class JBBPUtils { /** * Convert float value into string representation with defined radix
* base .
* @ param floatValue value to be converted in string
* @ param radix radix base to be used for conversion , must be 10 or 16
* @ return converted value as upper case string
* @ throws IllegalArgumentException for wrong radix base
* @ since 1.4.0 */
public static String float2str ( final float floatValue , final int radix ) { } } | if ( radix != 10 && radix != 16 ) { throw new IllegalArgumentException ( "Illegal radix [" + radix + ']' ) ; } final String result ; if ( radix == 16 ) { String converted = Double . toHexString ( floatValue ) ; boolean minus = converted . startsWith ( "-" ) ; if ( minus ) { converted = converted . substring ( 1 ) ; } if ( converted . startsWith ( "0x" ) ) { converted = converted . substring ( 2 ) ; } result = ( minus ? '-' + converted : converted ) . toUpperCase ( Locale . ENGLISH ) ; } else { result = Double . toString ( floatValue ) ; } return result ; |
public class OpenConnectionQuery { /** * / * ( non - Javadoc )
* @ see net . timewalker . ffmq4 . network . packet . AbstractPacket # serializeTo ( net . timewalker . ffmq4 . utils . RawDataOutputStream ) */
@ Override protected void serializeTo ( RawDataBuffer out ) { } } | super . serializeTo ( out ) ; out . writeNullableUTF ( userName ) ; out . writeNullableUTF ( password ) ; out . writeNullableUTF ( clientID ) ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link DocumentType . Remove } { @ code > } } */
@ XmlElementDecl ( namespace = "urn:ietf:params:xml:ns:xcap-diff" , name = "remove" , scope = DocumentType . class ) public JAXBElement < DocumentType . Remove > createDocumentTypeRemove ( DocumentType . Remove value ) { } } | return new JAXBElement < DocumentType . Remove > ( _DocumentTypeRemove_QNAME , DocumentType . Remove . class , DocumentType . class , value ) ; |
public class LREnvelope { /** * Builds and returns a map of the envelope data , suitable for signing with the included signer
* @ return map of envelope data , suitable for signing */
protected Map < String , Object > getSignableData ( ) { } } | final Map < String , Object > doc = getSendableData ( ) ; // remove node - specific data
for ( int i = 0 ; i < excludedFields . length ; i ++ ) { doc . remove ( excludedFields [ i ] ) ; } return doc ; |
public class CoreActivity { /** * searches for the activity recursively */
public CoreActivity findActivity ( String activityId ) { } } | CoreActivity localActivity = getChildActivity ( activityId ) ; if ( localActivity != null ) { return localActivity ; } for ( CoreActivity activity : getActivities ( ) ) { CoreActivity nestedActivity = activity . findActivity ( activityId ) ; if ( nestedActivity != null ) { return nestedActivity ; } } return null ; |
public class OptionalJsonDeserializer { /** * @ param deserializer { @ link JsonDeserializer } used to deserialize the object inside the { @ link Optional } .
* @ param < T > Type of the element inside the { @ link Optional }
* @ return a new instance of { @ link OptionalJsonDeserializer } */
public static < T > OptionalJsonDeserializer < T > newInstance ( JsonDeserializer < T > deserializer ) { } } | return new OptionalJsonDeserializer < T > ( deserializer ) ; |
public class SnippetsApi { /** * Get a Pager of the authenticated user ' s snippets .
* < pre > < code > GitLab Endpoint : GET / snippets < / code > < / pre >
* @ param itemsPerPage the number of snippets per page
* @ return the Pager of snippets
* @ throws GitLabApiException if any exception occurs */
public Pager < Snippet > getSnippets ( int itemsPerPage ) throws GitLabApiException { } } | return ( new Pager < Snippet > ( this , Snippet . class , itemsPerPage , null , "snippets" ) ) ; |
public class Router { /** * Helper for toString . */
private static int maxLength ( List < String > coll ) { } } | int max = 0 ; for ( String e : coll ) { int length = e . length ( ) ; if ( length > max ) { max = length ; } } return max ; |
public class HandlerFactory { /** * get a phone number key press handler .
* @ return PhoneNumberKeyPressHandler */
public static final KeyPressHandler getPhoneNumberKeyPressHandler ( ) { } } | // NOPMD it ' s thread save !
if ( HandlerFactory . phoneNumberKeyPressHandler == null ) { synchronized ( PhoneNumberKeyPressHandler . class ) { if ( HandlerFactory . phoneNumberKeyPressHandler == null ) { HandlerFactory . phoneNumberKeyPressHandler = new PhoneNumberKeyPressHandler ( ) ; } } } return HandlerFactory . phoneNumberKeyPressHandler ; |
public class TiffTags { /** * Generate tag rules .
* @ throws ReadTagsIOException the read tags io exception */
protected void generateTagRules ( ) throws ReadTagsIOException { } } | try { PrintWriter writer = new PrintWriter ( "typecheck.xml" , "UTF-8" ) ; for ( int tagId : tagMap . keySet ( ) ) { Tag tag = tagMap . get ( tagId ) ; writer . println ( " <rule context=\"tag[id=" + tag . getId ( ) + "]\">" ) ; String typeRule = "" ; for ( String tagType : tag . getType ( ) ) { if ( typeRule . length ( ) > 0 ) typeRule += " || " ; typeRule += "{type=='" + tagType + "'}" ; } writer . println ( " <assert test=\"" + typeRule + "\">Tag type does not match</assert>" ) ; writer . println ( " </rule>" ) ; } writer . close ( ) ; writer = new PrintWriter ( "cardinalitycheck.xml" , "UTF-8" ) ; for ( int tagId : tagMap . keySet ( ) ) { Tag tag = tagMap . get ( tagId ) ; if ( tag . getCardinality ( ) . length ( ) > 0 && ! tag . getCardinality ( ) . equals ( "N" ) ) { try { int card = Integer . parseInt ( tag . getCardinality ( ) ) ; writer . println ( " <rule context=\"tag[id=" + tag . getId ( ) + "]\">" ) ; String typeRule = "{cardinality==" + card + "}" ; writer . println ( " <assert test=\"" + typeRule + "\">Tag cardinality does not match</assert>" ) ; writer . println ( " </rule>" ) ; } catch ( Exception ex ) { // TODO : Deal with formulas
System . err . println ( "Formula in tag " + tag . getName ( ) + ": " + tag . getCardinality ( ) ) ; } } } writer . close ( ) ; } catch ( Exception ex ) { throw new ReadTagsIOException ( ) ; } |
public class GridBy { /** * Creates an XPath expression that will find a header row , selecting the row based on the
* header texts present .
* @ param columnName first header text which must be present .
* @ param extraColumnNames name of other header texts that must be present in table ' s header row .
* @ return XPath expression selecting a tr in the row */
public static String getXPathForHeaderRowByHeaders ( String columnName , String ... extraColumnNames ) { } } | String allHeadersPresent ; if ( extraColumnNames != null && extraColumnNames . length > 0 ) { int extraCount = extraColumnNames . length ; String [ ] columnNames = new String [ extraCount + 1 ] ; columnNames [ 0 ] = columnName ; System . arraycopy ( extraColumnNames , 0 , columnNames , 1 , extraCount ) ; allHeadersPresent = Stream . of ( columnNames ) . map ( GridBy :: getXPathForHeaderCellWithText ) . collect ( Collectors . joining ( " and " ) ) ; } else { allHeadersPresent = getXPathForHeaderCellWithText ( columnName ) ; } return String . format ( "/tr[%1$s]" , allHeadersPresent ) ; |
public class AbstractPrincipalAttributesRepository { /** * Obtains attributes first from the repository by calling
* { @ link IPersonAttributeDao # getPerson ( String , org . apereo . services . persondir . IPersonAttributeDaoFilter ) } .
* @ param id the person id to locate in the attribute repository
* @ return the map of attributes */
protected Map < String , List < Object > > retrievePersonAttributesFromAttributeRepository ( final String id ) { } } | synchronized ( lock ) { val repository = getAttributeRepository ( ) ; if ( repository == null ) { LOGGER . warn ( "No attribute repositories could be fetched from application context" ) ; return new HashMap < > ( 0 ) ; } return CoreAuthenticationUtils . retrieveAttributesFromAttributeRepository ( repository , id , this . attributeRepositoryIds ) ; } |
public class CmsJspTagBundle { /** * Gets the resource bundle with the given base name and preferred locale .
* @ param basename the resource bundle base name
* @ param pref the preferred locale */
private static ResourceBundle findMatch ( String basename , Locale pref ) { } } | ResourceBundle match = null ; try { ResourceBundle bundle = CmsResourceBundleLoader . getBundle ( basename , pref ) ; match = bundle ; } catch ( MissingResourceException mre ) { // ignored
} return match ; |
public class PolicyTypeDescription { /** * The description of the policy attributes associated with the policies defined by Elastic Load Balancing .
* @ param policyAttributeTypeDescriptions
* The description of the policy attributes associated with the policies defined by Elastic Load Balancing . */
public void setPolicyAttributeTypeDescriptions ( java . util . Collection < PolicyAttributeTypeDescription > policyAttributeTypeDescriptions ) { } } | if ( policyAttributeTypeDescriptions == null ) { this . policyAttributeTypeDescriptions = null ; return ; } this . policyAttributeTypeDescriptions = new com . amazonaws . internal . SdkInternalList < PolicyAttributeTypeDescription > ( policyAttributeTypeDescriptions ) ; |
public class Expressions { /** * Creates an IsEqual expression from the given expression and constant .
* @ param left The left expression .
* @ param constant The constant to compare to .
* @ return A new IsEqual binary expression . */
public static IsEqual isEqual ( ComparableExpression < Number > left , Number constant ) { } } | return new IsEqual ( left , constant ( constant ) ) ; |
public class Matrix4x3d { /** * Set this matrix to be a symmetric orthographic projection transformation for a right - handed coordinate system
* using OpenGL ' s NDC z range of < code > [ - 1 . . + 1 ] < / code > .
* This method is equivalent to calling { @ link # setOrtho ( double , double , double , double , double , double ) setOrtho ( ) } with
* < code > left = - width / 2 < / code > , < code > right = + width / 2 < / code > , < code > bottom = - height / 2 < / code > and < code > top = + height / 2 < / code > .
* In order to apply the symmetric orthographic projection to an already existing transformation ,
* use { @ link # orthoSymmetric ( double , double , double , double ) orthoSymmetric ( ) } .
* Reference : < a href = " http : / / www . songho . ca / opengl / gl _ projectionmatrix . html # ortho " > http : / / www . songho . ca < / a >
* @ see # orthoSymmetric ( double , double , double , double )
* @ param width
* the distance between the right and left frustum edges
* @ param height
* the distance between the top and bottom frustum edges
* @ param zNear
* near clipping plane distance
* @ param zFar
* far clipping plane distance
* @ return this */
public Matrix4x3d setOrthoSymmetric ( double width , double height , double zNear , double zFar ) { } } | return setOrthoSymmetric ( width , height , zNear , zFar , false ) ; |
public class AbstractAttributeDefinitionBuilder { /** * Sets the name of the attribute group with which this attribute is associated .
* @ param attributeGroup the attribute group name . Cannot be an empty string but can be { @ code null }
* if the attribute is not associated with a group .
* @ return a builder that can be used to continue building the attribute definition */
public BUILDER setAttributeGroup ( String attributeGroup ) { } } | assert attributeGroup == null || attributeGroup . length ( ) > 0 ; // noinspection deprecation
this . attributeGroup = attributeGroup ; return ( BUILDER ) this ; |
public class DBInstance { /** * Provides the list of DB parameter groups applied to this DB instance .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setDBParameterGroups ( java . util . Collection ) } or { @ link # withDBParameterGroups ( java . util . Collection ) } if
* you want to override the existing values .
* @ param dBParameterGroups
* Provides the list of DB parameter groups applied to this DB instance .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DBInstance withDBParameterGroups ( DBParameterGroupStatus ... dBParameterGroups ) { } } | if ( this . dBParameterGroups == null ) { setDBParameterGroups ( new java . util . ArrayList < DBParameterGroupStatus > ( dBParameterGroups . length ) ) ; } for ( DBParameterGroupStatus ele : dBParameterGroups ) { this . dBParameterGroups . add ( ele ) ; } return this ; |
public class PipelineBuilder { /** * Adds a predicate to the pipeline .
* @ param mTransaction
* Transaction to operate with . */
public void addPredicate ( final INodeReadTrx mTransaction ) { } } | assert getPipeStack ( ) . size ( ) >= 2 ; final AbsAxis mPredicate = getPipeStack ( ) . pop ( ) . getExpr ( ) ; if ( mPredicate instanceof LiteralExpr ) { mPredicate . hasNext ( ) ; // if is numeric literal - > abbrev for position ( )
final int type = mTransaction . getNode ( ) . getTypeKey ( ) ; if ( type == NamePageHash . generateHashForString ( "xs:integer" ) || type == NamePageHash . generateHashForString ( "xs:double" ) || type == NamePageHash . generateHashForString ( "xs:float" ) || type == NamePageHash . generateHashForString ( "xs:decimal" ) ) { throw new IllegalStateException ( "function fn:position() is not implemented yet." ) ; // getExpression ( ) . add (
// new PosFilter ( transaction , ( int )
// Double . parseDouble ( transaction
// . getValue ( ) ) ) ) ;
// return ; / / TODO : YES ! it is dirty !
// AtomicValue pos =
// new AtomicValue ( mTransaction . getNode ( ) . getRawValue ( ) ,
// mTransaction
// . keyForName ( " xs : integer " ) ) ;
// long position = mTransaction . getItemList ( ) . addItem ( pos ) ;
// mPredicate . reset ( mTransaction . getNode ( ) . getNodeKey ( ) ) ;
// IAxis function =
// new FNPosition ( mTransaction , new ArrayList < IAxis > ( ) ,
// FuncDef . POS . getMin ( ) , FuncDef . POS
// . getMax ( ) ,
// mTransaction . keyForName ( FuncDef . POS . getReturnType ( ) ) ) ;
// IAxis expectedPos = new LiteralExpr ( mTransaction , position ) ;
// mPredicate = new ValueComp ( mTransaction , function ,
// expectedPos , CompKind . EQ ) ;
} } getExpression ( ) . add ( new PredicateFilterAxis ( mTransaction , mPredicate ) ) ; |
public class SnappyServer { /** * Define a REST endpoint mapped to HTTP POST
* @ param url The relative URL to be map this endpoint .
* @ param endpoint The endpoint handler
* @ param mediaTypes ( Optional ) The accepted and returned types for this endpoint */
public static void post ( String url , HttpConsumer < HttpExchange > endpoint , MediaTypes ... mediaTypes ) { } } | addResource ( Methods . POST , url , endpoint , mediaTypes ) ; |
public class SearchExpressionFacade { /** * Resolves a list of { @ link UIComponent } clientIds and / or passtrough expressions for the given expression or expressions .
* @ param context The { @ link FacesContext } .
* @ param source The source component . E . g . a button .
* @ param expressions The search expressions .
* @ param hints The hints .
* @ return A { @ link List } with resolved clientIds and / or passtrough expression ( like PFS , widgetVar ) . */
public static String resolveClientIds ( FacesContext context , UIComponent source , String expressions , int hints ) { } } | if ( LangUtils . isValueBlank ( expressions ) ) { if ( SearchExpressionUtils . isHintSet ( hints , SearchExpressionHint . PARENT_FALLBACK ) ) { return source . getParent ( ) . getClientId ( context ) ; } return null ; } String [ ] splittedExpressions = splitExpressions ( context , source , expressions ) ; if ( splittedExpressions != null && splittedExpressions . length > 0 ) { final char separatorChar = UINamingContainer . getSeparatorChar ( context ) ; StringBuilder expressionsBuffer = SharedStringBuilder . get ( context , SHARED_EXPRESSION_BUFFER_KEY ) ; for ( int i = 0 ; i < splittedExpressions . length ; i ++ ) { String expression = splittedExpressions [ i ] . trim ( ) ; if ( LangUtils . isValueBlank ( expression ) ) { continue ; } validateExpression ( context , source , expression , separatorChar ) ; if ( isPassTroughExpression ( expression ) ) { if ( expressionsBuffer . length ( ) > 0 ) { expressionsBuffer . append ( " " ) ; } expressionsBuffer . append ( expression ) ; } else { // if it contains a keyword and it ' s not a nested expression ( e . g . @ parent : @ parent ) , we don ' t need to loop
if ( expression . contains ( SearchExpressionConstants . KEYWORD_PREFIX ) && expression . indexOf ( separatorChar ) != - 1 ) { String clientIds = resolveClientIdsByExpressionChain ( context , source , expression , separatorChar , hints ) ; if ( ! LangUtils . isValueBlank ( clientIds ) ) { if ( expressionsBuffer . length ( ) > 0 ) { expressionsBuffer . append ( " " ) ; } expressionsBuffer . append ( clientIds ) ; } } else { // it ' s a keyword and not nested , just ask our resolvers
if ( expression . contains ( SearchExpressionConstants . KEYWORD_PREFIX ) ) { SearchExpressionResolver resolver = SearchExpressionResolverFactory . findResolver ( expression ) ; if ( resolver instanceof ClientIdSearchExpressionResolver ) { String clientIds = ( ( ClientIdSearchExpressionResolver ) resolver ) . resolveClientIds ( context , source , source , expression , hints ) ; if ( ! LangUtils . isValueBlank ( clientIds ) ) { if ( expressionsBuffer . length ( ) > 0 ) { expressionsBuffer . append ( " " ) ; } expressionsBuffer . append ( clientIds ) ; } } else if ( resolver instanceof MultiSearchExpressionResolver ) { ArrayList < UIComponent > result = new ArrayList < > ( ) ; ( ( MultiSearchExpressionResolver ) resolver ) . resolveComponents ( context , source , source , expression , result , hints ) ; for ( int j = 0 ; j < result . size ( ) ; j ++ ) { UIComponent component = result . get ( j ) ; validateRenderer ( context , source , component , expression , hints ) ; if ( expressionsBuffer . length ( ) > 0 ) { expressionsBuffer . append ( " " ) ; } expressionsBuffer . append ( component . getClientId ( ) ) ; } } else { UIComponent component = resolver . resolveComponent ( context , source , source , expression , hints ) ; if ( component == null ) { if ( ! SearchExpressionUtils . isHintSet ( hints , SearchExpressionHint . IGNORE_NO_RESULT ) ) { cannotFindComponent ( context , source , expression ) ; } } else { validateRenderer ( context , source , component , expression , hints ) ; if ( expressionsBuffer . length ( ) > 0 ) { expressionsBuffer . append ( " " ) ; } expressionsBuffer . append ( component . getClientId ( context ) ) ; } } } // default ID case
else { ResolveClientIdCallback callback = new ResolveClientIdCallback ( source , hints , expression ) ; resolveComponentById ( source , expression , separatorChar , context , callback ) ; if ( callback . getClientId ( ) == null && ! SearchExpressionUtils . isHintSet ( hints , SearchExpressionHint . IGNORE_NO_RESULT ) ) { cannotFindComponent ( context , source , expression ) ; } if ( callback . getClientId ( ) != null ) { if ( expressionsBuffer . length ( ) > 0 ) { expressionsBuffer . append ( " " ) ; } expressionsBuffer . append ( callback . getClientId ( ) ) ; } } } } } String clientIds = expressionsBuffer . toString ( ) ; if ( ! LangUtils . isValueBlank ( clientIds ) ) { return clientIds ; } } return null ; |
public class WrapperManager { /** * unregisterHome removes from cache homeObj and all Objects that
* have homeObj as it ' s home . It also unregisters these objects from
* from the orb object adapter . */
public void unregisterHome ( J2EEName homeName , EJSHome homeObj ) throws CSIException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "unregisterHome" ) ; J2EEName cacheHomeName ; int numEnumerated = 0 , numRemoved = 0 ; // d103404.2
Enumeration < ? > enumerate = wrapperCache . enumerateElements ( ) ; while ( enumerate . hasMoreElements ( ) ) { // need to get the beanid from either the remote or local wrapper ,
// whichever is available , the beanid must be the same for both wrappers
EJSWrapperCommon wCommon = ( EJSWrapperCommon ) // f111627
( ( CacheElement ) enumerate . nextElement ( ) ) . getObject ( ) ; // f111627
BeanId cacheMemberBeanId = wCommon . getBeanId ( ) ; // d181569
cacheHomeName = cacheMemberBeanId . getJ2EEName ( ) ; numEnumerated ++ ; // If the cache has homeObj as it ' s home or is itself the home ,
// remove it . If the wrapper has been removed since it was found
// ( above ) , then the call to unregister ( ) will just return false .
// Note that the enumeration can handle elements being removed
// from the cache while enumerating . d103404.2
if ( cacheHomeName . equals ( homeName ) || cacheMemberBeanId . equals ( homeObj . getId ( ) ) ) { unregister ( cacheMemberBeanId , true ) ; // d181217 d181569
numRemoved ++ ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { // d103404.1 d103404.2
Tr . debug ( tc , "Unregistered " + numRemoved + " wrappers (total = " + numEnumerated + ")" ) ; } // Now remove any cached BeanIds for this home . d152323
beanIdCache . removeAll ( homeObj ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "unregisterHome" ) ; |
public class GeoLocationUtil { /** * Replies the precision used to test a distance value .
* @ param newPrecisionValue the new precision value ( in meters ) .
* @ return the old precision value . */
public static double setDistanceEpsilon ( double newPrecisionValue ) { } } | if ( ( newPrecisionValue >= 1 ) || ( newPrecisionValue <= 0 ) ) { throw new IllegalArgumentException ( ) ; } final double old = distancePrecision ; distancePrecision = newPrecisionValue ; return old ; |
public class ExpiryIndex { /** * Remove a specific ExpirableReference from the expiry index . This method
* removes the object directly from the tree and does not use the iterator .
* It does not therefore require a prior call to next ( ) .
* @ param expirable the ExpirableReference to be removed .
* @ return true if the object was removed from the index successfully . */
public boolean remove ( ExpirableReference expirable ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "remove" , ( expirable == null ? "null" : "ObjId=" + expirable . getID ( ) + " ET=" + expirable . getExpiryTime ( ) ) ) ; boolean reply = tree . delete ( expirable ) ; if ( reply ) { size -- ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "remove" , "reply=" + reply ) ; return reply ; |
public class AmazonEC2Client { /** * Modifies the Capacity Reservation settings for a stopped instance . Use this action to configure an instance to
* target a specific Capacity Reservation , run in any < code > open < / code > Capacity Reservation with matching
* attributes , or run On - Demand Instance capacity .
* @ param modifyInstanceCapacityReservationAttributesRequest
* @ return Result of the ModifyInstanceCapacityReservationAttributes operation returned by the service .
* @ sample AmazonEC2 . ModifyInstanceCapacityReservationAttributes
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / ModifyInstanceCapacityReservationAttributes "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public ModifyInstanceCapacityReservationAttributesResult modifyInstanceCapacityReservationAttributes ( ModifyInstanceCapacityReservationAttributesRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeModifyInstanceCapacityReservationAttributes ( request ) ; |
public class CPAttachmentFileEntryWrapper { /** * Returns the localized title of this cp attachment file entry in the language , optionally using the default language if no localization exists for the requested language .
* @ param languageId the ID of the language
* @ param useDefault whether to use the default language if no localization exists for the requested language
* @ return the localized title of this cp attachment file entry */
@ Override public String getTitle ( String languageId , boolean useDefault ) { } } | return _cpAttachmentFileEntry . getTitle ( languageId , useDefault ) ; |
public class SolrIndexer { /** * Send the document to buffer directly
* @ param index
* @ param fields */
public void sendIndexToBuffer ( String index , Map < String , List < String > > fields ) { } } | String doc = pyUtils . solrDocument ( fields ) ; addToBuffer ( index , doc ) ; |
public class Mysql { /** * Get database connection .
* @ return The connection
* @ throws SQLException
* @ throws InstantiationException
* @ throws IllegalAccessException
* @ throws ClassNotFoundException */
public final Connection getConnection ( ) throws SQLException , InstantiationException , IllegalAccessException , ClassNotFoundException { } } | String url = "jdbc:mysql://" + this . hostname + ":" + this . port + "/" + this . database + "?user=" + this . username + "&password=" + this . password + "&autoReconnect=true" + "&failOverReadOnly=false&maxReconnects=3" ; DriverManager . registerDriver ( ( Driver ) Class . forName ( "com.mysql.jdbc.Driver" ) . newInstance ( ) ) ; return DriverManager . getConnection ( url ) ; |
public class CmsXmlSitemapGenerator { /** * Gets the detail link for a given container page and detail content . < p >
* @ param pageRes the container page
* @ param detailRes the detail content
* @ param locale the locale for which we want the link
* @ return the detail page link */
protected String getDetailLink ( CmsResource pageRes , CmsResource detailRes , Locale locale ) { } } | String pageSitePath = m_siteGuestCms . getSitePath ( pageRes ) ; String detailSitePath = m_siteGuestCms . getSitePath ( detailRes ) ; CmsRequestContext requestContext = m_siteGuestCms . getRequestContext ( ) ; String originalUri = requestContext . getUri ( ) ; Locale originalLocale = requestContext . getLocale ( ) ; try { requestContext . setUri ( pageSitePath ) ; requestContext . setLocale ( locale ) ; return OpenCms . getLinkManager ( ) . getOnlineLink ( m_siteGuestCms , detailSitePath , true ) ; } finally { requestContext . setUri ( originalUri ) ; requestContext . setLocale ( originalLocale ) ; } |
public class ContextManager { /** * Joins the CCOW common context , if available . */
public void ccowJoin ( ) { } } | if ( ccowIsActive ( ) ) { return ; } if ( ccowContextManager == null && ccowEnabled ) { ccowContextManager = new CCOWContextManager ( ) ; ccowContextManager . subscribe ( this ) ; ccowContextManager . run ( "CareWebFramework#" , "" , true , "*" ) ; } if ( ccowContextManager != null ) { if ( ! ccowContextManager . isActive ( ) ) { ccowContextManager . resume ( ) ; } init ( response -> { if ( response . rejected ( ) ) { ccowContextManager . suspend ( ) ; } updateCCOWStatus ( ) ; } ) ; } |
public class ClusteringKeyMapper { /** * Returns the clustering key contained in the specified { @ link CellName } .
* @ param document A { @ link Document } .
* @ return The clustering key contained in the specified { @ link CellName } . */
public final CellName clusteringKey ( Document document ) { } } | String string = document . get ( FIELD_NAME ) ; ByteBuffer bb = ByteBufferUtils . fromString ( string ) ; return cellNameType . cellFromByteBuffer ( bb ) ; |
public class WrapFactory { /** * Wrap Java object as Scriptable instance to allow full access to its
* methods and fields from JavaScript .
* { @ link # wrap ( Context , Scriptable , Object , Class ) } and
* { @ link # wrapNewObject ( Context , Scriptable , Object ) } call this method
* when they can not convert < tt > javaObject < / tt > to JavaScript primitive
* value or JavaScript array .
* Subclasses can override the method to provide custom wrappers
* for Java objects .
* @ param cx the current Context for this thread
* @ param scope the scope of the executing script
* @ param javaObject the object to be wrapped
* @ param staticType type hint . If security restrictions prevent to wrap
* object based on its class , staticType will be used instead .
* @ return the wrapped value which shall not be null */
public Scriptable wrapAsJavaObject ( Context cx , Scriptable scope , Object javaObject , Class < ? > staticType ) { } } | return new NativeJavaObject ( scope , javaObject , staticType ) ; |
public class WhitelistWarningsGuard { /** * Loads legacy warnings list from the file .
* @ return The lines of the file . */
public static Set < String > loadWhitelistedJsWarnings ( File file ) { } } | return loadWhitelistedJsWarnings ( Files . asCharSource ( file , UTF_8 ) ) ; |
public class ColorPository { /** * Loads up a serialized ColorPository from the supplied resource manager . */
public static ColorPository loadColorPository ( InputStream source ) { } } | try { return ( ColorPository ) CompiledConfig . loadConfig ( source ) ; } catch ( IOException ioe ) { log . warning ( "Failure loading color pository" , "ioe" , ioe ) ; return new ColorPository ( ) ; } |
public class RegistriesInner { /** * Schedules a new run based on the request parameters and add it to the run queue .
* @ param resourceGroupName The name of the resource group to which the container registry belongs .
* @ param registryName The name of the container registry .
* @ param runRequest The parameters of a run that needs to scheduled .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the RunInner object */
public Observable < RunInner > beginScheduleRunAsync ( String resourceGroupName , String registryName , RunRequest runRequest ) { } } | return beginScheduleRunWithServiceResponseAsync ( resourceGroupName , registryName , runRequest ) . map ( new Func1 < ServiceResponse < RunInner > , RunInner > ( ) { @ Override public RunInner call ( ServiceResponse < RunInner > response ) { return response . body ( ) ; } } ) ; |
public class RLSControllerImpl { /** * Cancels the corresponding suspend operation , identified by the supplied token .
* If there are no outstanding suspend operation , then resumes i / o to the recovery log files .
* @ param token identifies the corresponding suspend operation to cancel
* @ exception RLSInvalidSuspendTokenException Thrown if token is null , invalid or has expired
* @ exception RecoveryLogServiceException Thrown if RecoveryLogService failed to resume */
public void resume ( RLSSuspendToken token ) throws RLSInvalidSuspendTokenException { } } | if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "resume" , token ) ; if ( Configuration . isZOS ( ) ) { if ( tc . isEventEnabled ( ) ) Tr . event ( tc , "Operation not supported on ZOS - throwing UnsupportedOperationException" ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "resume" , "java.lang.UnsupportedOperationException" ) ; throw new UnsupportedOperationException ( ) ; } RLSControllerImpl . resumeRLS ( token ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "resume" ) ; |
public class JsonRpcMultiServer { /** * Get the service name from the methodNode . JSON - RPC methods with the form
* Service . method will result in " Service " being returned in this case .
* @ param methodName method name
* @ return the name of the service , or < code > null < / code > */
@ Override protected String getServiceName ( final String methodName ) { } } | if ( methodName != null ) { int ndx = methodName . indexOf ( this . separator ) ; if ( ndx > 0 ) { return methodName . substring ( 0 , ndx ) ; } } return methodName ; |
public class AWSAutoScalingPlansClient { /** * Deletes the specified scaling plan .
* Deleting a scaling plan deletes the underlying < a > ScalingInstruction < / a > for all of the scalable resources that
* are covered by the plan .
* If the plan has launched resources or has scaling activities in progress , you must delete those resources
* separately .
* @ param deleteScalingPlanRequest
* @ return Result of the DeleteScalingPlan operation returned by the service .
* @ throws ValidationException
* An exception was thrown for a validation issue . Review the parameters provided .
* @ throws ObjectNotFoundException
* The specified object could not be found .
* @ throws ConcurrentUpdateException
* Concurrent updates caused an exception , for example , if you request an update to a scaling plan that
* already has a pending update .
* @ throws InternalServiceException
* The service encountered an internal error .
* @ sample AWSAutoScalingPlans . DeleteScalingPlan
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / autoscaling - plans - 2018-01-06 / DeleteScalingPlan "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public DeleteScalingPlanResult deleteScalingPlan ( DeleteScalingPlanRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDeleteScalingPlan ( request ) ; |
public class FnJodaTimeUtils { /** * A { @ link DateTime } is created from the given { @ link String } { @ link Collection } .
* The result will be created with the given { @ link Chronology }
* The valid input Collection & lt ; String & gt ; are :
* < ul >
* < li > year ( month and day will be set to 1 ) < / li >
* < li > year , month ( day will be set to 1 ) < / li >
* < li > year , month , day < / li >
* < / ul >
* @ param chronology { @ link Chronology } to be used
* @ return the { @ link DateTime } created from the input and arguments */
public static final Function < Collection < String > , DateTime > strFieldCollectionToDateTime ( Chronology chronology ) { } } | return FnDateTime . strFieldCollectionToDateTime ( chronology ) ; |
public class JoinNode { /** * Returns tables in the order they are joined in the tree by iterating the tree depth - first */
public Collection < String > generateTableJoinOrder ( ) { } } | List < JoinNode > leafNodes = generateLeafNodesJoinOrder ( ) ; Collection < String > tables = new ArrayList < > ( ) ; for ( JoinNode node : leafNodes ) { tables . add ( node . getTableAlias ( ) ) ; } return tables ; |
public class DomainObject { /** * Removes all of the elements from the list field .
* @ param fieldName */
public void clearListField ( String fieldName ) { } } | Object val = getFieldValue ( fieldName , true ) ; // internal
if ( val instanceof List < ? > ) ( ( List < ? > ) val ) . clear ( ) ; else if ( val != null && val . getClass ( ) . isArray ( ) ) { DOField fld = getDomainObjectType ( ) . getFieldByName ( fieldName ) ; String ctn = fld . getComponentTypeName ( ) ; Class < ? > clazz ; try { clazz = getDomainObjectType ( ) . getDomainModel ( ) . getClassForName ( ctn ) ; tryInitListOrArray ( null , fld , clazz ) ; } catch ( ClassNotFoundException e ) { throw new RuntimeException ( e ) ; } } else { if ( ! getDomainObjectType ( ) . getFieldByName ( fieldName ) . isListOrArray ( ) ) throw new RuntimeException ( "field: [" + fieldName + "] is neither list nor array" ) ; if ( val == null ) throw new RuntimeException ( "list or array field: [" + fieldName + "] is null" ) ; } |
public class StringConverter { /** * Extracts duration values from an object of this converter ' s type , and
* sets them into the given ReadWritableDuration .
* @ param period period to get modified
* @ param object the String to convert , must not be null
* @ param chrono the chronology to use
* @ return the millisecond duration
* @ throws ClassCastException if the object is invalid */
public void setInto ( ReadWritablePeriod period , Object object , Chronology chrono ) { } } | String str = ( String ) object ; PeriodFormatter parser = ISOPeriodFormat . standard ( ) ; period . clear ( ) ; int pos = parser . parseInto ( period , str , 0 ) ; if ( pos < str . length ( ) ) { if ( pos < 0 ) { // Parse again to get a better exception thrown .
parser . withParseType ( period . getPeriodType ( ) ) . parseMutablePeriod ( str ) ; } throw new IllegalArgumentException ( "Invalid format: \"" + str + '"' ) ; } |
public class BindSharedPreferencesBuilder { /** * create editor .
* @ param entity
* the entity */
private static void generateEditor ( PrefsEntity entity ) { } } | com . abubusoft . kripton . common . Converter < String , String > converter = CaseFormat . LOWER_CAMEL . converterTo ( CaseFormat . UPPER_CAMEL ) ; Builder innerClassBuilder = TypeSpec . classBuilder ( "BindEditor" ) . addModifiers ( Modifier . PUBLIC ) . addJavadoc ( "editor class for shared preferences\n" ) . superclass ( typeName ( "AbstractEditor" ) ) ; innerClassBuilder . addMethod ( MethodSpec . constructorBuilder ( ) . addModifiers ( Modifier . PRIVATE ) . build ( ) ) ; PrefsTransform transform ; // write method
for ( PrefsProperty item : entity . getCollection ( ) ) { { // put
MethodSpec . Builder builder = MethodSpec . methodBuilder ( "put" + converter . convert ( item . getName ( ) ) ) . addModifiers ( Modifier . PUBLIC ) . addParameter ( typeName ( item . getElement ( ) ) , "value" ) . addJavadoc ( "modifier for property $L\n" , item . getName ( ) ) . returns ( typeName ( "BindEditor" ) ) ; TypeName type ; if ( item . hasTypeAdapter ( ) ) { type = typeName ( item . typeAdapter . dataType ) ; } else { type = TypeUtility . typeName ( item . getElement ( ) ) ; } transform = PrefsTransformer . lookup ( type ) ; transform . generateWriteProperty ( builder , "editor" , null , "value" , item ) ; builder . addCode ( "\n" ) ; builder . addStatement ( "return this" ) ; innerClassBuilder . addMethod ( builder . build ( ) ) ; } { // remove
MethodSpec . Builder builder = MethodSpec . methodBuilder ( "remove" + converter . convert ( item . getName ( ) ) ) . addModifiers ( Modifier . PUBLIC ) . addJavadoc ( "remove property $L\n" , item . getName ( ) ) . returns ( typeName ( "BindEditor" ) ) ; builder . addStatement ( "editor.remove($S)" , item . getPreferenceKey ( ) ) ; builder . addStatement ( "return this" ) ; innerClassBuilder . addMethod ( builder . build ( ) ) ; } } { // clear
MethodSpec . Builder builder = MethodSpec . methodBuilder ( "clear" ) . addModifiers ( Modifier . PUBLIC ) . addJavadoc ( "clear all properties\n" ) . returns ( typeName ( "BindEditor" ) ) ; builder . addStatement ( "editor.clear()" ) ; builder . addStatement ( "return this" ) ; innerClassBuilder . addMethod ( builder . build ( ) ) ; } builder . addType ( innerClassBuilder . build ( ) ) ; |
public class UnicodeFormatter { /** * 转换 \ \ u123这种编码为正常字符
* @ param s 待转换字符
* @ return String */
public static String decodeUnicodeString ( String s ) { } } | if ( Strings . isNullOrEmpty ( s ) ) return s ; int length = s . length ( ) ; StringBuilder sb = new StringBuilder ( length ) ; Matcher m = UNICODE_PATTERN . matcher ( s ) ; int begin = 0 ; while ( m . find ( ) ) { int start = m . start ( ) ; if ( start > 0 && s . charAt ( start - 1 ) == '\u0003' ) { if ( start - 1 > begin ) { sb . append ( s , begin , start - 1 ) ; } begin = start ; continue ; } sb . append ( s , begin , start ) ; String mcStr = m . group ( 1 ) ; try { char charValue = ( char ) Integer . parseInt ( mcStr , 16 ) ; sb . append ( charValue ) ; begin = m . end ( ) ; } catch ( NumberFormatException e ) { System . out . println ( e . getMessage ( ) ) ; } } if ( begin < length ) { sb . append ( s , begin , length ) ; } return sb . toString ( ) ; |
public class Learner { /** * Generate an instance of Learner from an existing directory .
* @ param overwrite
* deletes any existing data in the model directory
* @ return an instance of a Learner corresponding to the implementationName
* @ throws ClassNotFoundException
* @ throws IllegalAccessException
* @ throws InstantiationException */
public static Learner getLearner ( String workdirectory , String implementationName , boolean overwrite ) throws Exception { } } | File directory = new File ( workdirectory ) ; if ( directory . exists ( ) == false ) throw new Exception ( workdirectory + " must exist" ) ; if ( directory . isDirectory ( ) == false ) throw new Exception ( workdirectory + " must be a directory" ) ; // create the file names
String model_file_name = workdirectory + File . separator + Parameters . modelName ; String lexicon_file_name = workdirectory + File . separator + Parameters . lexiconName ; String vector_file_name = workdirectory + File . separator + Parameters . vectorName ; String raw_file_name = workdirectory + File . separator + Parameters . rawName ; Learner learner = null ; // removes existing files for lexicon model and vector
if ( overwrite ) { removeExistingFile ( model_file_name ) ; removeExistingFile ( lexicon_file_name ) ; removeExistingFile ( vector_file_name ) ; removeExistingFile ( raw_file_name ) ; } // define which implementation to use
if ( LibSVMModelCreator . equals ( implementationName ) ) learner = new LibSVMModelCreator ( lexicon_file_name , model_file_name , vector_file_name ) ; else if ( LibLinearModelCreator . equals ( implementationName ) ) learner = new LibLinearModelCreator ( lexicon_file_name , model_file_name , vector_file_name ) ; else throw new Exception ( implementationName + " is unknown" ) ; // reuse the existing lexicon
if ( ! overwrite ) { Lexicon oldlexicon = new Lexicon ( lexicon_file_name ) ; if ( oldlexicon != null ) learner . lexicon = oldlexicon ; } learner . workdirectory = directory ; return learner ; |
public class StartMonitoringMembersRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( StartMonitoringMembersRequest startMonitoringMembersRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( startMonitoringMembersRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( startMonitoringMembersRequest . getAccountIds ( ) , ACCOUNTIDS_BINDING ) ; protocolMarshaller . marshall ( startMonitoringMembersRequest . getDetectorId ( ) , DETECTORID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class PoolSchedulable { /** * Get the queue of sessions sorted for preemption
* @ return the queue of sessions sorted for preemption */
public Queue < SessionSchedulable > getPreemptQueue ( ) { } } | if ( preemptQueue == null ) { ScheduleComparator comparator = null ; switch ( configManager . getPoolComparator ( poolInfo ) ) { case FIFO : comparator = ScheduleComparator . FIFO_PREEMPT ; break ; case FAIR : comparator = ScheduleComparator . FAIR_PREEMPT ; break ; case DEADLINE : comparator = ScheduleComparator . DEADLINE_PREEMPT ; break ; default : throw new IllegalArgumentException ( "Unknown comparator" ) ; } preemptQueue = createSessionQueue ( comparator ) ; } return preemptQueue ; |
public class BoxFolder { /** * Adds a collaborator to this folder .
* @ param collaborator the collaborator to add .
* @ param role the role of the collaborator .
* @ param notify the user / group should receive email notification of the collaboration or not .
* @ param canViewPath the view path collaboration feature is enabled or not .
* View path collaborations allow the invitee to see the entire ancestral path to the associated
* folder . The user will not gain privileges in any ancestral folder .
* @ return info about the new collaboration . */
public BoxCollaboration . Info collaborate ( BoxCollaborator collaborator , BoxCollaboration . Role role , Boolean notify , Boolean canViewPath ) { } } | JsonObject accessibleByField = new JsonObject ( ) ; accessibleByField . add ( "id" , collaborator . getID ( ) ) ; if ( collaborator instanceof BoxUser ) { accessibleByField . add ( "type" , "user" ) ; } else if ( collaborator instanceof BoxGroup ) { accessibleByField . add ( "type" , "group" ) ; } else { throw new IllegalArgumentException ( "The given collaborator is of an unknown type." ) ; } return this . collaborate ( accessibleByField , role , notify , canViewPath ) ; |
public class PiwikRequest { /** * Add a value to the json array at the specified parameter
* @ param key the key of the json array to add to
* @ param value the value to add . Cannot be null */
private void addToJsonArray ( String key , JsonValue value ) { } } | if ( value == null ) { throw new NullPointerException ( "Value cannot be null." ) ; } PiwikJsonArray a = ( PiwikJsonArray ) parameters . get ( key ) ; if ( a == null ) { a = new PiwikJsonArray ( ) ; parameters . put ( key , a ) ; } a . add ( value ) ; |
public class ReflectiveRuntimeMXBean { /** * { @ inheritDoc } */
@ SuppressWarnings ( "unchecked" ) public List < String > getInputArguments ( ) { } } | if ( Holder . getInputArgumentsMethod != null ) { try { return ( List < String > ) Holder . getInputArgumentsMethod . invoke ( runtimeMxBean ) ; } catch ( ClassCastException e ) { // no multi - catch with source level 6
// fallthrough
} catch ( IllegalAccessException e ) { // fallthrough
} catch ( IllegalArgumentException e ) { // fallthrough
} catch ( InvocationTargetException e ) { // fallthrough
} } return Collections . emptyList ( ) ; |
public class GenericShuffleJXTable { /** * Adds the all right rows to left table . */
public void addAllRightRowsToLeftTable ( ) { } } | leftTable . getGenericTableModel ( ) . addList ( rightTable . getGenericTableModel ( ) . getData ( ) ) ; rightTable . getGenericTableModel ( ) . clear ( ) ; |
public class ArrayELResolver { /** * If the base object is a Java language array , returns the value at the given index . The index
* is specified by the property argument , and coerced into an integer . If the coercion could not
* be performed , an IllegalArgumentException is thrown . If the index is out of bounds , null is
* returned . If the base is a Java language array , the propertyResolved property of the
* ELContext object must be set to true by this resolver , before returning . If this property is
* not true after this method is called , the caller should ignore the return value .
* @ param context
* The context of this evaluation .
* @ param base
* The array to analyze . Only bases that are a Java language array are handled by
* this resolver .
* @ param property
* The index of the element in the array to return the acceptable type for . Will be
* coerced into an integer , but otherwise ignored by this resolver .
* @ return If the propertyResolved property of ELContext was set to true , then the value at the
* given index or null if the index was out of bounds . Otherwise , undefined .
* @ throws PropertyNotFoundException
* if the given index is out of bounds for this array .
* @ throws NullPointerException
* if context is null
* @ throws ELException
* if an exception was thrown while performing the property or variable resolution .
* The thrown exception must be included as the cause property of this exception , if
* available . */
@ Override public Object getValue ( ELContext context , Object base , Object property ) { } } | if ( context == null ) { throw new NullPointerException ( "context is null" ) ; } Object result = null ; if ( isResolvable ( base ) ) { int index = toIndex ( null , property ) ; result = index < 0 || index >= Array . getLength ( base ) ? null : Array . get ( base , index ) ; context . setPropertyResolved ( true ) ; } return result ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link MtextType } { @ code > } } */
@ XmlElementDecl ( namespace = "http://www.w3.org/1998/Math/MathML" , name = "mtext" ) public JAXBElement < MtextType > createMtext ( MtextType value ) { } } | return new JAXBElement < MtextType > ( _Mtext_QNAME , MtextType . class , null , value ) ; |
public class StorePackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getModelCheckerResultItem ( ) { } } | if ( modelCheckerResultItemEClass == null ) { modelCheckerResultItemEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( StorePackage . eNS_URI ) . getEClassifiers ( ) . get ( 92 ) ; } return modelCheckerResultItemEClass ; |
public class Messages { /** * Loads all registered message resources . */
private Map < String , Properties > loadRegisteredMessageResources ( String name ) { } } | Map < String , Properties > messageResources = new TreeMap < > ( ) ; // Load default messages
Properties defaultMessages = loadMessages ( String . format ( name , "" ) ) ; if ( defaultMessages == null ) { log . error ( "Could not locate the default messages resource '{}', please create it." , String . format ( name , "" ) ) ; } else { messageResources . put ( "" , defaultMessages ) ; } // Load the registered language resources
List < String > registeredLanguages = languages . getRegisteredLanguages ( ) ; for ( String language : registeredLanguages ) { // First step : Load complete language eg . en - US
Properties messages = loadMessages ( String . format ( name , "_" + language ) ) ; Properties messagesLangOnly = null ; // If the language has a country code load the default values for
// the language . For example missing keys in en - US will
// be filled - in by the default language .
String langComponent = languages . getLanguageComponent ( language ) ; if ( ! langComponent . equals ( language ) ) { // see if we have already loaded the language messages
messagesLangOnly = messageResources . get ( langComponent ) ; if ( messagesLangOnly == null ) { // load the language messages
messagesLangOnly = loadMessages ( String . format ( name , "_" + langComponent ) ) ; } } // If a language is registered in application . properties it should
// be there .
if ( messages == null ) { log . error ( "Could not locate the '{}' messages resource '{}' specified in '{}'." , language , String . format ( name , "_" + language ) , PippoConstants . SETTING_APPLICATION_LANGUAGES ) ; } else { // add a new language
// start with the default messages
Properties compositeMessages = new Properties ( defaultMessages ) ; // put all the language component messages " en "
if ( messagesLangOnly != null ) { compositeMessages . putAll ( messagesLangOnly ) ; // cache language component messages
if ( ! messageResources . containsKey ( langComponent ) ) { Properties langResources = new Properties ( ) ; langResources . putAll ( compositeMessages ) ; messageResources . put ( langComponent , langResources ) ; } } // put all the language specific messages " en - US "
compositeMessages . putAll ( messages ) ; // and add the composite messages to the hashmap with the
// mapping .
messageResources . put ( language . toLowerCase ( ) , compositeMessages ) ; } } return Collections . unmodifiableMap ( messageResources ) ; |
public class NamespaceResources { /** * Returns namespaces owned by the user .
* @ param req The HTTP request .
* @ return The namespaces owned by the user . */
@ GET @ Produces ( MediaType . APPLICATION_JSON ) @ Description ( "Returns namespaces owned by user." ) public List < NamespaceDto > getNamespacesByOwner ( @ Context HttpServletRequest req ) { } } | PrincipalUser remoteUser = validateAndGetOwner ( req , null ) ; List < Namespace > namespaces = _namespaceService . findNamespacesByOwner ( remoteUser ) ; return NamespaceDto . transformToDto ( namespaces ) ; |
public class ContextManager { /** * Helper method to get the configured list of URLs .
* @ return The list of URLs . */
@ SuppressWarnings ( "unchecked" ) @ Trivial private List < String > getEnvURLList ( ) { } } | return ( List < String > ) iEnvironment . get ( ENVKEY_URL_LIST ) ; |
public class PdfBoxGraphics2DFontTextDrawer { /** * Register a font which is already associated with the PDDocument
* @ param name
* the name of the font as returned by
* { @ link java . awt . Font # getFontName ( ) } . This name is used for the
* mapping the java . awt . Font to this PDFont .
* @ param font
* the PDFont to use . This font must be loaded in the current
* document . */
@ SuppressWarnings ( "WeakerAccess" ) public void registerFont ( String name , PDFont font ) { } } | fontMap . put ( name , font ) ; |
public class RandomAccessReader { /** * Returns the object at given record No .
* Record numbers are zero - based ! */
@ Override public synchronized IChemObject readRecord ( int record ) throws Exception { } } | String buffer = readContent ( record ) ; if ( chemObjectReader == null ) throw new CDKException ( "No chemobject reader!" ) ; else { chemObjectReader . setReader ( new StringReader ( buffer ) ) ; currentRecord = record ; return processContent ( ) ; } |
public class BDBRepositoryBuilder { /** * Opens the BDB environment , checks if it is corrupt , and then closes it .
* Only one process should open the environment for verification . Expect it
* to take a long time .
* @ param out optional stream to capture any verfication errors
* @ return true if environment passes verification */
public boolean verify ( PrintStream out ) throws RepositoryException { } } | final StorableCodecFactory codecFactory = mStorableCodecFactory ; final String name = mName ; final boolean readOnly = mReadOnly ; final boolean runCheckpointer = mRunCheckpointer ; final boolean runDeadlockDetector = mRunDeadlockDetector ; final boolean lockConflictDeadlockDetect = mLockConflictDeadlockDetect ; final boolean isPrivate = mPrivate ; if ( mName == null ) { // Allow a dummy name for verification .
mName = "BDB verification" ; } if ( mStorableCodecFactory == null ) { mStorableCodecFactory = new CompressedStorableCodecFactory ( mCompressionMap ) ; } mReadOnly = true ; mRunCheckpointer = false ; mRunDeadlockDetector = false ; mLockConflictDeadlockDetect = false ; try { assertReady ( ) ; File homeFile = getEnvironmentHomeFile ( ) ; if ( ! homeFile . exists ( ) ) { throw new RepositoryException ( "Environment home directory does not exist: " + homeFile ) ; } AtomicReference < Repository > rootRef = new AtomicReference < Repository > ( ) ; BDBRepository repo ; try { repo = getRepositoryConstructor ( ) . newInstance ( rootRef , this ) ; } catch ( Exception e ) { ThrowUnchecked . fireFirstDeclaredCause ( e , RepositoryException . class ) ; // Not reached .
return false ; } rootRef . set ( repo ) ; try { return repo . verify ( out ) ; } catch ( Exception e ) { throw repo . toRepositoryException ( e ) ; } finally { repo . close ( ) ; } } finally { mName = name ; mStorableCodecFactory = codecFactory ; mReadOnly = readOnly ; mRunCheckpointer = runCheckpointer ; mRunDeadlockDetector = runDeadlockDetector ; mLockConflictDeadlockDetect = lockConflictDeadlockDetect ; } |
public class TransparentDataEncryptionsInner { /** * Creates or updates a database ' s transparent data encryption configuration .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server .
* @ param databaseName The name of the database for which setting the transparent data encryption applies .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the TransparentDataEncryptionInner object */
public Observable < TransparentDataEncryptionInner > createOrUpdateAsync ( String resourceGroupName , String serverName , String databaseName ) { } } | return createOrUpdateWithServiceResponseAsync ( resourceGroupName , serverName , databaseName ) . map ( new Func1 < ServiceResponse < TransparentDataEncryptionInner > , TransparentDataEncryptionInner > ( ) { @ Override public TransparentDataEncryptionInner call ( ServiceResponse < TransparentDataEncryptionInner > response ) { return response . body ( ) ; } } ) ; |
public class FastStr { /** * Construct a FastStr instance from char array , from the start position , finished at end position
* without copying the array . This method might use the array directly instead of copying elements
* from the array . Thus it is extremely important that the array buf passed in will NOT be updated
* outside the FastStr instance .
* @ param buf the char array
* @ param start the start position ( inclusive )
* @ param end the end position ( exclusive )
* @ return a FastStr instance that consist of chars specified */
public static FastStr unsafeOf ( char [ ] buf , int start , int end ) { } } | E . NPE ( buf ) ; E . illegalArgumentIf ( start < 0 || end > buf . length ) ; if ( end < start ) return EMPTY_STR ; return new FastStr ( buf , start , end ) ; |
public class JsonWebSignature { /** * Parses the given JWS token string and returns the parsed { @ link JsonWebSignature } .
* @ param jsonFactory JSON factory
* @ param tokenString JWS token string
* @ return parsed JWS */
public static JsonWebSignature parse ( JsonFactory jsonFactory , String tokenString ) throws IOException { } } | return parser ( jsonFactory ) . parse ( tokenString ) ; |
public class ProcessStore { /** * get a Primitive variable , first from the highest block hierarchy down to
* the global variables
* @ param key
* name of the variable
* @ return value of the variable */
public Primitive getPrimitiveVariable ( Object key ) { } } | Primitive object = null ; for ( int i = working . size ( ) - 1 ; i >= 0 ; -- i ) { Map < Object , Object > map = working . get ( i ) ; object = ( Primitive ) map . get ( key ) ; if ( object != null ) break ; } if ( object == null ) object = ( Primitive ) global . get ( key ) ; return object ; |
public class ComponentImpl { /** * @ deprecated injected is not used */
public void registerUDF ( Collection . Key key , UDFPlus udf , boolean useShadow , boolean injected ) throws ApplicationException { } } | udf . setOwnerComponent ( this ) ; if ( insideStaticConstr ) { _static . put ( key , udf ) ; return ; } // Abstact UDF
if ( udf . getModifier ( ) == MODIFIER_ABSTRACT ) { // abstract methods are not allowed
if ( getModifier ( ) != MODIFIER_ABSTRACT ) { throw new ApplicationException ( "the abstract function [" + key + "] is not allowed within the no abstract component [" + _getPageSource ( ) . getDisplayPath ( ) + "]" ) ; } if ( absFin == null ) absFin = new AbstractFinal ( ) ; absFin . add ( key , udf ) ; return ; // abstract methods are not registered here
} // Final UDF
else if ( udf . getModifier ( ) == MODIFIER_FINAL ) { if ( absFin == null ) absFin = new AbstractFinal ( ) ; absFin . add ( key , udf ) ; } _udfs . put ( key , udf ) ; _data . put ( key , udf ) ; if ( useShadow ) scope . setEL ( key , udf ) ; |
public class FnBigInteger { /** * It performs a module operation and returns the value
* of ( input mod module ) which is always positive
* ( whereas remainder is not )
* @ param module the module
* @ return the result of ( input mod module ) */
public final static Function < BigInteger , BigInteger > module ( int module ) { } } | return new Module ( fromNumber ( Integer . valueOf ( module ) ) ) ; |
public class LinkedBlockingDeque { /** * Returns an array containing all of the elements in this deque , in
* proper sequence ( from first to last element ) .
* < p > The returned array will be " safe " in that no references to it are
* maintained by this deque . ( In other words , this method must allocate
* a new array ) . The caller is thus free to modify the returned array .
* < p > This method acts as bridge between array - based and collection - based
* APIs .
* @ return an array containing all of the elements in this deque */
public Object [ ] toArray ( ) { } } | final ReentrantLock lock = this . lock ; lock . lock ( ) ; try { Object [ ] a = new Object [ count ] ; int k = 0 ; for ( Node < E > p = first ; p != null ; p = p . next ) a [ k ++ ] = p . item ; return a ; } finally { lock . unlock ( ) ; } |
public class Popups { /** * Binds the popped up state of a popup to the supplied boolean value and vice versa ( i . e . if
* the popup is popped down , the value will be updated to false ) . */
public static void bindPopped ( final Value < Boolean > popped , final Position pos , final Widget target , final PopupPanel panel ) { } } | Preconditions . checkNotNull ( target , "target" ) ; Preconditions . checkNotNull ( panel , "panel" ) ; panel . addCloseHandler ( new CloseHandler < PopupPanel > ( ) { public void onClose ( CloseEvent < PopupPanel > event ) { popped . update ( false ) ; } } ) ; popped . addListenerAndTrigger ( new Value . Listener < Boolean > ( ) { public void valueChanged ( Boolean visible ) { if ( visible ) { show ( panel , pos , target ) ; } else { panel . hide ( ) ; } } } ) ; |
public class BaseTypeRegistry { /** * Get the qualified name of the type . Note that this will put in the
* < i > final < / i > typename for generic types .
* @ param typeName The identifier name .
* @ param programContext The package context .
* @ return The qualified typename . */
@ Nonnull private String qualifiedTypenameInternal ( @ Nonnull String typeName , @ Nonnull String programContext ) { } } | if ( PPrimitive . findByName ( typeName ) != null ) return typeName ; if ( typeName . startsWith ( "map<" ) && typeName . endsWith ( ">" ) ) { String [ ] generic = typeName . substring ( 4 , typeName . length ( ) - 1 ) . split ( "," , 2 ) ; if ( generic . length != 2 ) { throw new IllegalArgumentException ( "Invalid map generic part \"" + typeName + "\": missing ',' kv separator" ) ; } try { return "map<" + finalTypename ( generic [ 0 ] . trim ( ) , programContext ) + "," + finalTypename ( generic [ 1 ] . trim ( ) , programContext ) + ">" ; } catch ( IllegalArgumentException e ) { throw new IllegalArgumentException ( "Invalid map generic part \"" + typeName + "\": " + e . getMessage ( ) , e ) ; } } else if ( typeName . startsWith ( "set<" ) && typeName . endsWith ( ">" ) ) { String generic = typeName . substring ( 4 , typeName . length ( ) - 1 ) ; try { return "set<" + finalTypename ( generic . trim ( ) , programContext ) + ">" ; } catch ( IllegalArgumentException e ) { throw new IllegalArgumentException ( "Invalid set generic part \"" + typeName + "\": " + e . getMessage ( ) , e ) ; } } else if ( typeName . startsWith ( "list<" ) && typeName . endsWith ( ">" ) ) { String generic = typeName . substring ( 5 , typeName . length ( ) - 1 ) ; try { return "list<" + finalTypename ( generic . trim ( ) , programContext ) + ">" ; } catch ( IllegalArgumentException e ) { throw new IllegalArgumentException ( "Invalid list generic part \"" + typeName + "\": " + e . getMessage ( ) , e ) ; } } else if ( ! typeName . matches ( "([a-zA-Z_][a-zA-Z0-9_]*\\.)?[a-zA-Z_][a-zA-Z0-9_]*" ) ) { throw new IllegalArgumentException ( "Invalid atomic type name " + typeName ) ; } else { return qualifiedNameFromIdAndContext ( typeName , programContext ) ; } |
public class ParameterUtils { /** * check whether parameters type are compatible
* each parameter should match the following condition :
* { @ code targetParameter . getType ( ) . getTypeClass ( ) . isAssignableFrom ( sourceParameter . getType ( ) . getTypeClass ( ) ) }
* @ param source source parameters
* @ param target target parameters
* @ return the check result
* @ since 3.0.0 */
public static boolean parametersCompatible ( Parameter [ ] source , Parameter [ ] target ) { } } | return parametersMatch ( source , target , t -> ClassHelper . getWrapper ( t . getV2 ( ) ) . getTypeClass ( ) . isAssignableFrom ( ClassHelper . getWrapper ( t . getV1 ( ) ) . getTypeClass ( ) ) ) ; |
public class MorePreconditions { /** * Checks that a string is both non - null and non - empty .
* @ see # checkNotBlank ( String , String , Object . . . ) */
public static String checkNotBlank ( String argument ) { } } | Objects . requireNonNull ( argument , ARG_NOT_BLANK_MSG ) ; if ( StringUtils . isBlank ( argument ) ) { throw new IllegalArgumentException ( ARG_NOT_BLANK_MSG ) ; } return argument ; |
public class WikiParser { /** * Same as parseItem ( ) ; blank line adds { @ code < br / > < br / > } .
* @ param start */
private int parseListItem ( int start ) { } } | start = skipSpacesToNewline ( start , wikiLength ) ; // skip spaces
int end = parseItem ( start , null , ContextType . LIST_ITEM ) ; if ( ( listLevels [ listLevel ] == '>' || listLevels [ listLevel ] == ':' ) && wikiText . substring ( start , end ) . trim ( ) . length ( ) == 0 ) { // empty line within blockquote / div
if ( ! blockquoteBR ) { sb . append ( "<br/><br/>" ) ; blockquoteBR = true ; } } else { blockquoteBR = false ; } return end ; |
public class CmsWidgetDialog { /** * Returns the html for a button to remove an optional element . < p >
* @ param elementName name of the element
* @ param index the element index of the element to remove
* @ param enabled if true , the button to remove an element is shown , otherwise a spacer is returned
* @ return the html for a button to remove an optional element */
public String buildRemoveElement ( String elementName , int index , boolean enabled ) { } } | if ( enabled ) { StringBuffer href = new StringBuffer ( 4 ) ; href . append ( "javascript:removeElement('" ) ; href . append ( elementName ) ; href . append ( "', " ) ; href . append ( index ) ; href . append ( ");" ) ; return button ( href . toString ( ) , null , "deletecontent.png" , Messages . GUI_DIALOG_BUTTON_DELETE_0 , 0 ) ; } else { return "" ; } |
public class RoundRectangle2dfx { /** * Replies the property for the arc height .
* @ return the arcHeight property . */
public DoubleProperty arcHeightProperty ( ) { } } | if ( this . arcHeight == null ) { this . arcHeight = new DependentSimpleDoubleProperty < ReadOnlyDoubleProperty > ( this , MathFXAttributeNames . ARC_HEIGHT , heightProperty ( ) ) { @ Override protected void invalidated ( ReadOnlyDoubleProperty dependency ) { final double value = get ( ) ; if ( value < 0. ) { set ( 0. ) ; } else { final double maxArcHeight = dependency . get ( ) / 2. ; if ( value > maxArcHeight ) { set ( maxArcHeight ) ; } } } } ; } return this . arcHeight ; |
public class FSNamesystem { /** * Adds block to list of blocks which will be invalidated on
* specified datanode and log the move
* @ param b block
* @ param n datanode */
void addToInvalidates ( Block b , DatanodeInfo n , boolean ackRequired ) { } } | addToInvalidatesNoLog ( b , n , ackRequired ) ; if ( isInitialized && ! isInSafeModeInternal ( ) ) { // do not log in startup phase
NameNode . stateChangeLog . info ( "BLOCK* NameSystem.addToInvalidates: " + b . getBlockName ( ) + " is added to invalidSet of " + n . getName ( ) ) ; } |
public class Json { /** * Writes the value as a field on the current JSON object , writing the class of the object if it differs from the specified
* known type .
* @ param value May be null .
* @ param knownType May be null if the type is unknown .
* @ see # writeValue ( String , Object , Class , Class ) */
public void writeValue ( String name , Object value , Class knownType ) { } } | try { writer . name ( name ) ; } catch ( IOException ex ) { throw new JsonException ( ex ) ; } writeValue ( value , knownType , null ) ; |
public class ConfidenceInterval { /** * Method that takes only one metric as parameter . It is useful when
* comparing more than two metrics ( so that a confidence interval is
* computed for each of them ) , as suggested in [ Sakai , 2014]
* @ param alpha probability of incorrectly rejecting the null hypothesis ( 1
* - confidence _ level )
* @ param metricValuesPerDimension one value of the metric for each
* dimension
* @ return array with the confidence interval : [ mean - margin of error , mean
* + margin of error ] */
public double [ ] getConfidenceInterval ( final double alpha , final Map < ? , Double > metricValuesPerDimension ) { } } | SummaryStatistics differences = new SummaryStatistics ( ) ; for ( Double d : metricValuesPerDimension . values ( ) ) { differences . addValue ( d ) ; } return getConfidenceInterval ( alpha , ( int ) differences . getN ( ) - 1 , ( int ) differences . getN ( ) , differences . getStandardDeviation ( ) , differences . getMean ( ) ) ; |
public class Collectors { /** * Returns a { @ code Collector } which applies an { @ code long } - producing
* mapping function to each input element , and returns summary statistics
* for the resulting values .
* @ param < T > the type of the input elements
* @ param mapper the mapping function to apply to each element
* @ return a { @ code Collector } implementing the summary - statistics reduction
* @ see # summarizingDouble ( ToDoubleFunction )
* @ see # summarizingInt ( ToIntFunction ) */
public static < T > Collector < T , ? , LongSummaryStatistics > summarizingLong ( ToLongFunction < ? super T > mapper ) { } } | return new CollectorImpl < T , LongSummaryStatistics , LongSummaryStatistics > ( LongSummaryStatistics :: new , ( r , t ) -> r . accept ( mapper . applyAsLong ( t ) ) , ( l , r ) -> { l . combine ( r ) ; return l ; } , CH_ID ) ; |
public class BndUtils { /** * Processes the input jar and generates the necessary OSGi headers using specified instructions .
* @ param jarInputStream input stream for the jar to be processed . Cannot be null .
* @ param instructions bnd specific processing instructions . Cannot be null .
* @ param jarInfo information about the jar to be processed . Usually the jar url . Cannot be null or empty .
* @ return an input stream for the generated bundle
* @ throws NullArgumentException if any of the parameters is null
* @ throws IOException re - thron during jar processing */
public static InputStream createBundle ( final InputStream jarInputStream , final Properties instructions , final String jarInfo ) throws IOException { } } | return createBundle ( jarInputStream , instructions , jarInfo , OverwriteMode . KEEP ) ; |
public class IOUtil { /** * Copies all bytes from a { @ linkplain ReadableByteChannel } to a { @ linkplain WritableByteChannel } .
* @ param dst the { @ linkplain WritableByteChannel } to copy to .
* @ param src the { @ linkplain ReadableByteChannel } to copy from .
* @ return the number of copied bytes .
* @ throws IOException if an I / O error occurs . */
public static long copyChannel ( WritableByteChannel dst , ReadableByteChannel src ) throws IOException { } } | ByteBuffer buffer = ByteBuffer . allocateDirect ( CHANNEL_IO_BUFFER_SIZE ) ; long copied = 0 ; int read ; while ( ( read = src . read ( buffer ) ) >= 0 ) { buffer . flip ( ) ; dst . write ( buffer ) ; buffer . clear ( ) ; copied += read ; } return copied ; |
public class DataSet { /** * Applies a Filter transformation on a { @ link DataSet } .
* < p > The transformation calls a { @ link org . apache . flink . api . common . functions . RichFilterFunction } for each element of the DataSet
* and retains only those element for which the function returns true . Elements for
* which the function returns false are filtered .
* @ param filter The FilterFunction that is called for each element of the DataSet .
* @ return A FilterOperator that represents the filtered DataSet .
* @ see org . apache . flink . api . common . functions . RichFilterFunction
* @ see FilterOperator
* @ see DataSet */
public FilterOperator < T > filter ( FilterFunction < T > filter ) { } } | if ( filter == null ) { throw new NullPointerException ( "Filter function must not be null." ) ; } return new FilterOperator < > ( this , clean ( filter ) , Utils . getCallLocationName ( ) ) ; |
public class RBACModel { /** * Returns all users with the given role . < br >
* It is assumed that the caller ensures parameter validity .
* @ param roles */
private List < String > getUsersFor ( Collection < String > roles ) { } } | List < String > users = new HashList < > ( ) ; for ( String role : roles ) { if ( roleMembershipRU . containsKey ( role ) ) users . addAll ( roleMembershipRU . get ( role ) ) ; } return users ; |
public class Symmetry454Date { @ Override public long toEpochDay ( ) { } } | long epochDay = ( long ) ( this . prolepticYear - 1 ) * DAYS_IN_YEAR + Symmetry454Chronology . getLeapYearsBefore ( this . prolepticYear ) * DAYS_IN_WEEK + this . dayOfYear - DAYS_0001_TO_1970 - 1 ; return epochDay ; |
public class FieldSelectorCentroid { /** * ST _ X ( ST _ Centroid ( coalesce ( ST _ GeometryN ( the _ geom , 1 ) , the _ geom ) ) )
* ST _ Y ( ST _ Centroid ( coalesce ( ST _ GeometryN ( the _ geom , 1 ) , the _ geom ) ) )
* ST _ GeometryN - Return the 1 - based Nth geometry if the geometry is a
* GEOMETRYCOLLECTION , MULTIPOINT , MULTILINESTRING , MULTICURVE or
* MULTIPOLYGON . Otherwise , return NULL .
* ST _ Centroid - Returns the geometric center of a geometry
* coalesce - The COALESCE function returns the first of its arguments that
* is not null . Null is returned only if all arguments are null .
* ST _ X (
* ST _ Centroid (
* coalesce (
* ST _ GeometryN (
* the _ geom
* , the _ geom */
public String getQueryString ( TableSchema tableSchema , Phase phase ) throws Exception { } } | StringWriter sw = new StringWriter ( ) ; PrintWriter pw = new PrintWriter ( sw ) ; if ( Type . X == type ) { pw . print ( "ST_X" ) ; } else if ( Type . Y == type ) { pw . print ( "ST_Y" ) ; } else { throw new Exception ( "Can not handle type: " + type ) ; } pw . print ( "(ST_Centroid(coalesce(ST_GeometryN(" ) ; pw . print ( fieldName ) ; pw . print ( ",1)," ) ; pw . print ( fieldName ) ; pw . print ( ")))" ) ; if ( Phase . SELECT == phase ) { if ( Type . X == type ) { pw . print ( " AS x" ) ; } else if ( Type . Y == type ) { pw . print ( " AS y" ) ; } else { throw new Exception ( "Can not handle type: " + type ) ; } } pw . flush ( ) ; return sw . toString ( ) ; |
public class DescribeSpotPriceHistoryResult { /** * The historical Spot prices .
* @ param spotPriceHistory
* The historical Spot prices . */
public void setSpotPriceHistory ( java . util . Collection < SpotPrice > spotPriceHistory ) { } } | if ( spotPriceHistory == null ) { this . spotPriceHistory = null ; return ; } this . spotPriceHistory = new com . amazonaws . internal . SdkInternalList < SpotPrice > ( spotPriceHistory ) ; |
public class ControlAnnotationProcessor { /** * Returns the CodeGenerator instance supporting this processor , instantiating a new
* generator instance if necessary . */
protected CodeGenerator getGenerator ( ) { } } | if ( _generator == null ) { // Locate the class that wraps the Velocity code generation process
AnnotationProcessorEnvironment env = getAnnotationProcessorEnvironment ( ) ; try { _generator = new VelocityGenerator ( env ) ; } catch ( Exception e ) { throw new CodeGenerationException ( "Unable to create code generator" , e ) ; } } return _generator ; |
public class CommerceOrderPaymentLocalServiceUtil { /** * Creates a new commerce order payment with the primary key . Does not add the commerce order payment to the database .
* @ param commerceOrderPaymentId the primary key for the new commerce order payment
* @ return the new commerce order payment */
public static com . liferay . commerce . model . CommerceOrderPayment createCommerceOrderPayment ( long commerceOrderPaymentId ) { } } | return getService ( ) . createCommerceOrderPayment ( commerceOrderPaymentId ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.