signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class ByteArrayQueue { /** * Reads from an { @ link InputStream } */
public ByteArrayQueue readFrom ( InputStream in ) throws IOException { } } | byte [ ] buffer = new byte [ BUFFER_SIZE ] ; int bytesRead ; while ( ( bytesRead = in . read ( buffer ) ) > 0 ) { add ( buffer , 0 , bytesRead ) ; } return this ; |
public class WindowsPathType { /** * Parses a normal drive - letter root , e . g . " C : \ " . */
@ Nullable private String parseDriveRoot ( String path ) { } } | Matcher drivePathMatcher = DRIVE_LETTER_ROOT . matcher ( path ) ; if ( drivePathMatcher . find ( ) ) { return path . substring ( drivePathMatcher . start ( ) , drivePathMatcher . end ( ) ) ; } return null ; |
public class DeepSparkContext { /** * Creates a JavaSchemaRDD from a DeepJobConfig and a JavaSQLContext .
* @ param config Specific Deep ExtractorConfig .
* @ return A JavaSchemaRDD built from Cells .
* @ throws UnsupportedDataTypeException */
public DataFrame createJavaSchemaRDD ( ExtractorConfig < Cells > config ) throws UnsupportedDataTypeException , UnsupportedOperationException { } } | JavaRDD < Cells > cellsRDD = createJavaRDD ( config ) ; JavaRDD < Row > rowsRDD = DeepSparkContext . createJavaRowRDD ( cellsRDD ) ; try { Cells firstCells = cellsRDD . first ( ) ; StructType schema = CellsUtils . getStructTypeFromCells ( firstCells ) ; return sqlContext . applySchema ( rowsRDD , schema ) ; } catch ( UnsupportedOperationException e ) { throw new UnsupportedOperationException ( "Cannot infer schema from empty data RDD" , e ) ; } |
public class SimpleTimeZone { /** * { @ inheritDoc } */
@ Override public TimeZoneTransition getPreviousTransition ( long base , boolean inclusive ) { } } | if ( ! useDaylight ) { return null ; } initTransitionRules ( ) ; long firstTransitionTime = firstTransition . getTime ( ) ; if ( base < firstTransitionTime || ( ! inclusive && base == firstTransitionTime ) ) { return null ; } Date stdDate = stdRule . getPreviousStart ( base , dstRule . getRawOffset ( ) , dstRule . getDSTSavings ( ) , inclusive ) ; Date dstDate = dstRule . getPreviousStart ( base , stdRule . getRawOffset ( ) , stdRule . getDSTSavings ( ) , inclusive ) ; if ( stdDate != null && ( dstDate == null || stdDate . after ( dstDate ) ) ) { return new TimeZoneTransition ( stdDate . getTime ( ) , dstRule , stdRule ) ; } if ( dstDate != null && ( stdDate == null || dstDate . after ( stdDate ) ) ) { return new TimeZoneTransition ( dstDate . getTime ( ) , stdRule , dstRule ) ; } return null ; |
public class CommerceWarehousePersistenceImpl { /** * Returns the number of commerce warehouses where groupId = & # 63 ; and commerceCountryId = & # 63 ; and primary = & # 63 ; .
* @ param groupId the group ID
* @ param commerceCountryId the commerce country ID
* @ param primary the primary
* @ return the number of matching commerce warehouses */
@ Override public int countByG_C_P ( long groupId , long commerceCountryId , boolean primary ) { } } | FinderPath finderPath = FINDER_PATH_COUNT_BY_G_C_P ; Object [ ] finderArgs = new Object [ ] { groupId , commerceCountryId , primary } ; Long count = ( Long ) finderCache . getResult ( finderPath , finderArgs , this ) ; if ( count == null ) { StringBundler query = new StringBundler ( 4 ) ; query . append ( _SQL_COUNT_COMMERCEWAREHOUSE_WHERE ) ; query . append ( _FINDER_COLUMN_G_C_P_GROUPID_2 ) ; query . append ( _FINDER_COLUMN_G_C_P_COMMERCECOUNTRYID_2 ) ; query . append ( _FINDER_COLUMN_G_C_P_PRIMARY_2 ) ; String sql = query . toString ( ) ; Session session = null ; try { session = openSession ( ) ; Query q = session . createQuery ( sql ) ; QueryPos qPos = QueryPos . getInstance ( q ) ; qPos . add ( groupId ) ; qPos . add ( commerceCountryId ) ; qPos . add ( primary ) ; count = ( Long ) q . uniqueResult ( ) ; finderCache . putResult ( finderPath , finderArgs , count ) ; } catch ( Exception e ) { finderCache . removeResult ( finderPath , finderArgs ) ; throw processException ( e ) ; } finally { closeSession ( session ) ; } } return count . intValue ( ) ; |
public class RPMBuilder { /** * Set RPMTAG _ PREIN and RPMTAG _ PREINPROG
* @ param script
* @ param interpreter
* @ return */
@ Override public RPMBuilder setPreInstallation ( String script , String interpreter ) { } } | addString ( RPMTAG_PREIN , script ) ; addString ( RPMTAG_PREINPROG , interpreter ) ; ensureBinSh ( ) ; return this ; |
public class ElementMatchers { /** * Matches a { @ link ByteCodeElement } for being declared by a { @ link TypeDescription . Generic } that is matched by the given matcher .
* @ param matcher A matcher for the declaring type of the matched byte code element as long as it is not { @ code null } .
* @ param < T > The type of the matched object .
* @ return A matcher for byte code elements being declared by a type matched by the given { @ code matcher } . */
public static < T extends ByteCodeElement > ElementMatcher . Junction < T > isDeclaredByGeneric ( ElementMatcher < ? super TypeDescription . Generic > matcher ) { } } | return new DeclaringTypeMatcher < T > ( matcher ) ; |
public class JodaBeanSer { /** * Returns a copy of this serializer with the specified string converter .
* The default converter can be modified .
* @ param converter the converter , not null
* @ return a copy of this object with the converter changed , not null */
public JodaBeanSer withConverter ( StringConvert converter ) { } } | JodaBeanUtils . notNull ( converter , "converter" ) ; return new JodaBeanSer ( indent , newLine , converter , iteratorFactory , shortTypes , deserializers , includeDerived ) ; |
public class EMTopDownTreeBuilder { /** * This methods splits the given data set into partitions using the EM algorithm .
* The resulting number of partitions is < = nrOfPartitions .
* If EM returns only one partition , the data set is split using mean shift .
* The size of each resulting partition is AT LEAST 2,
* i . e . partitions with size 1 are merged with the closest remaining partition .
* Hence , this method might return only one partition containing the whole data set .
* @ param dataSet
* @ param nrOfPartitions
* @ return an array of DataSets containing the partitions ( minimal 1 , maximal nrOfPartitions )
* @ throws Exception */
private DataSet [ ] splitDataSetUsingEM ( DataSet dataSet , int nrOfPartitions ) throws Exception { } } | if ( dataSet . size ( ) <= 1 ) throw new Exception ( "EMsplit needs at least 2 objects!" ) ; EMProjectedClustering myEM = new EMProjectedClustering ( ) ; // iterate several times and take best solution
int nrOfIterations = 1 ; // maximum - - > 10 iterations
// 10 ^ 2 objects - - > 8 iterations
// 10 ^ 6 objects - - > 4 iteration
// minimum - - > 2 iterations
// # iterations = max { 1 , ( 10 - log _ 10 ( # objects ) ) }
double log10 = Math . log ( dataSet . size ( ) * 1.0 ) / Math . log ( 10.0 ) ; nrOfIterations = Math . max ( 1 , ( 10 - ( ( Long ) Math . round ( log10 ) ) . intValue ( ) ) ) ; nrOfIterations = Math . min ( 10 , nrOfIterations ) ; int [ ] [ ] emMapping = myEM . getEMClusteringVariancesBestChoice ( dataSet . getFeaturesAsArray ( ) , nrOfPartitions , nrOfIterations ) ; DataSet [ ] subDataSets = new DataSet [ emMapping . length ] ; for ( int i = 0 ; i < subDataSets . length ; i ++ ) { subDataSets [ i ] = new DataSet ( dataSet . getNrOfDimensions ( ) ) ; for ( int j = 0 ; j < emMapping [ i ] . length ; j ++ ) { subDataSets [ i ] . addObject ( dataSet . getObject ( emMapping [ i ] [ j ] ) ) ; } } // the EM part ends here
// now we try to create at least 2 partitions
// and make sure that each partition contains at least 2 objects
if ( subDataSets . length < 2 ) { System . out . println ( "mean shift split" ) ; subDataSets = splitDataSetUsingMeanShift ( dataSet ) ; } // decide what to do with kernels in inner nodes
// by default they are allowed , i . e . no changes are made if the case occurs
boolean changes = ! ALLOW_KERNELS_IN_INNER_NODES ; while ( changes ) { changes = false ; for ( int i = 0 ; i < subDataSets . length ; i ++ ) { if ( subDataSets [ i ] . size ( ) == 1 ) { System . out . println ( "merge singular sets" ) ; subDataSets = mergeDataSets ( subDataSets , i ) ; changes = true ; break ; } } } return subDataSets ; |
public class PassiveState { /** * Checks the previous entry in the append request for consistency . */
protected AppendResponse checkPreviousEntry ( AppendRequest request ) { } } | if ( request . logIndex ( ) != 0 && context . getLog ( ) . isEmpty ( ) ) { LOGGER . debug ( "{} - Rejected {}: Previous index ({}) is greater than the local log's last index ({})" , context . getCluster ( ) . member ( ) . address ( ) , request , request . logIndex ( ) , context . getLog ( ) . lastIndex ( ) ) ; return AppendResponse . builder ( ) . withStatus ( Response . Status . OK ) . withTerm ( context . getTerm ( ) ) . withSucceeded ( false ) . withLogIndex ( context . getLog ( ) . lastIndex ( ) ) . build ( ) ; } else if ( request . logIndex ( ) != 0 && context . getLog ( ) . lastIndex ( ) != 0 && request . logIndex ( ) > context . getLog ( ) . lastIndex ( ) ) { LOGGER . debug ( "{} - Rejected {}: Previous index ({}) is greater than the local log's last index ({})" , context . getCluster ( ) . member ( ) . address ( ) , request , request . logIndex ( ) , context . getLog ( ) . lastIndex ( ) ) ; return AppendResponse . builder ( ) . withStatus ( Response . Status . OK ) . withTerm ( context . getTerm ( ) ) . withSucceeded ( false ) . withLogIndex ( context . getLog ( ) . lastIndex ( ) ) . build ( ) ; } return appendEntries ( request ) ; |
public class JwtAuthenticator { /** * Validates the token and returns the corresponding user profile .
* @ param token the JWT
* @ return the corresponding user profile */
public CommonProfile validateToken ( final String token ) { } } | final TokenCredentials credentials = new TokenCredentials ( token ) ; try { validate ( credentials , null ) ; } catch ( final HttpAction e ) { throw new TechnicalException ( e ) ; } catch ( final CredentialsException e ) { logger . info ( "Failed to retrieve or validate credentials: {}" , e . getMessage ( ) ) ; logger . debug ( "Failed to retrieve or validate credentials" , e ) ; return null ; } return credentials . getUserProfile ( ) ; |
public class BackupPlansListMemberMarshaller { /** * Marshall the given parameter object . */
public void marshall ( BackupPlansListMember backupPlansListMember , ProtocolMarshaller protocolMarshaller ) { } } | if ( backupPlansListMember == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( backupPlansListMember . getBackupPlanArn ( ) , BACKUPPLANARN_BINDING ) ; protocolMarshaller . marshall ( backupPlansListMember . getBackupPlanId ( ) , BACKUPPLANID_BINDING ) ; protocolMarshaller . marshall ( backupPlansListMember . getCreationDate ( ) , CREATIONDATE_BINDING ) ; protocolMarshaller . marshall ( backupPlansListMember . getDeletionDate ( ) , DELETIONDATE_BINDING ) ; protocolMarshaller . marshall ( backupPlansListMember . getVersionId ( ) , VERSIONID_BINDING ) ; protocolMarshaller . marshall ( backupPlansListMember . getBackupPlanName ( ) , BACKUPPLANNAME_BINDING ) ; protocolMarshaller . marshall ( backupPlansListMember . getCreatorRequestId ( ) , CREATORREQUESTID_BINDING ) ; protocolMarshaller . marshall ( backupPlansListMember . getLastExecutionDate ( ) , LASTEXECUTIONDATE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ServletHelper { /** * Get the path info of an request , supporting sync and async requests .
* @ param aRequest
* Source request . May be < code > null < / code > .
* @ return Empty string if request is < code > null < / code > or a the path info . */
@ Nonnull public static String getRequestPathInfo ( @ Nullable final HttpServletRequest aRequest ) { } } | String ret = null ; if ( aRequest != null ) try { // They may return null !
if ( aRequest . isAsyncSupported ( ) && aRequest . isAsyncStarted ( ) ) ret = ( String ) aRequest . getAttribute ( AsyncContext . ASYNC_PATH_INFO ) ; else ret = aRequest . getPathInfo ( ) ; } catch ( final UnsupportedOperationException ex ) { // Offline request - fall through
} catch ( final Exception ex ) { // fall through
if ( isLogExceptions ( ) ) if ( LOGGER . isWarnEnabled ( ) ) LOGGER . warn ( "[ServletHelper] Failed to determine path info of HTTP request" , ex ) ; } return ret == null ? "" : ret ; |
public class MockFactory { /** * Generates a mock value assignable to provided field .
* Parametrized types are only supported for List fields .
* If not a list , raw type is used .
* @ param field Field to generate value for
* @ return Generated value */
private static Object generateValue ( Field field ) { } } | Class < ? > rawType = field . getType ( ) ; // TODO Objects other than Lists might need parametrized types .
if ( List . class . isAssignableFrom ( rawType ) ) { ParameterizedType genericType = ( ParameterizedType ) field . getGenericType ( ) ; return createListObject ( genericType ) ; } return generateValue ( rawType ) ; |
public class SARLQuickfixProvider { /** * Remove the element related to the issue , and the whitespaces before the element until the begin separator ,
* and the whitespaces after the element until the end separator .
* @ param issue the issue .
* @ param document the document .
* @ param beginSeparator the separator before the element .
* @ param endSeparator the separator after the element .
* @ return < code > true < / code > if the separator was found , < code > false < / code > if not .
* @ throws BadLocationException if there is a problem with the location of the element . */
public boolean removeBetweenSeparators ( Issue issue , IXtextDocument document , String beginSeparator , String endSeparator ) throws BadLocationException { } } | int offset = issue . getOffset ( ) ; int length = issue . getLength ( ) ; // Skip spaces before the identifier until the separator
int index = offset - 1 ; char c = document . getChar ( index ) ; while ( Character . isWhitespace ( c ) ) { index -- ; c = document . getChar ( index ) ; } // Test if it previous non - space character is the separator
boolean foundSeparator = document . getChar ( index ) == beginSeparator . charAt ( 0 ) ; if ( foundSeparator ) { index -- ; c = document . getChar ( index ) ; // Skip the previous spaces
while ( Character . isWhitespace ( c ) ) { index -- ; c = document . getChar ( index ) ; } length = length + ( offset - index - 1 ) ; offset = index + 1 ; // Skip spaces after the identifier until the separator
index = offset + length ; c = document . getChar ( index ) ; while ( Character . isWhitespace ( c ) ) { index ++ ; c = document . getChar ( index ) ; } // Test if it next non - space character is the separator
foundSeparator = document . getChar ( index ) == endSeparator . charAt ( 0 ) ; if ( foundSeparator ) { index ++ ; length = index - offset ; document . replace ( offset , length , "" ) ; // $ NON - NLS - 1 $
} } return foundSeparator ; |
public class ApiTokenStore { /** * Search in the store if there is a matching token that has the same secret .
* @ return { @ code null } iff there is no matching token */
private @ CheckForNull HashedToken searchMatch ( @ Nonnull String plainSecret ) { } } | byte [ ] hashedBytes = plainSecretToHashBytes ( plainSecret ) ; for ( HashedToken token : tokenList ) { if ( token . match ( hashedBytes ) ) { return token ; } } return null ; |
public class ReactiveDelegatingContract { /** * Ensure that the type provided implements a Reactive Streams Publisher .
* @ param type to inspect .
* @ return true if the type implements the Reactive Streams Publisher specification . */
private boolean isReactive ( Type type ) { } } | if ( ! ParameterizedType . class . isAssignableFrom ( type . getClass ( ) ) ) { return false ; } ParameterizedType parameterizedType = ( ParameterizedType ) type ; Type raw = parameterizedType . getRawType ( ) ; return Arrays . asList ( ( ( Class ) raw ) . getInterfaces ( ) ) . contains ( Publisher . class ) ; |
public class HtmlTree { /** * Generates a heading tag ( h1 to h6 ) with the title attribute . It also encloses
* a content .
* @ param headingTag the heading tag to be generated
* @ param printTitle true if the title for the tag needs to be printed else false
* @ param body content for the tag
* @ return an HtmlTree object for the tag */
public static HtmlTree HEADING ( HtmlTag headingTag , boolean printTitle , Content body ) { } } | return HEADING ( headingTag , printTitle , null , body ) ; |
public class ConvolveImage { /** * Performs a horizontal 1D convolution across the image .
* @ param input The original image . Not modified .
* @ param output Where the resulting image is written to . Modified .
* @ param kernel The kernel that is being convolved . Not modified .
* @ param border How the image borders are handled . */
public static void horizontal ( Kernel1D_S32 kernel , InterleavedS32 input , InterleavedS32 output , ImageBorder_IL_S32 < InterleavedS32 > border ) { } } | InputSanityCheck . checkSameShapeB ( input , output ) ; boolean processed = BOverrideConvolveImage . invokeNativeHorizontal ( kernel , input , output , border ) ; if ( ! processed ) { border . setImage ( input ) ; ConvolveImageNoBorder . horizontal ( kernel , input , output ) ; ConvolveJustBorder_General_IL . horizontal ( kernel , border , output ) ; } |
public class JSONArray { /** * Returns the value at { @ code index } if it exists and is a long or can be coerced to
* a long . Returns { @ code fallback } otherwise .
* @ param index the index to get the value from
* @ param fallback the fallback value
* @ return the value at { @ code index } of { @ code fallback } */
public long optLong ( int index , long fallback ) { } } | Object object = opt ( index ) ; Long result = JSON . toLong ( object ) ; return result != null ? result : fallback ; |
public class ReactorAdapter { /** * Converts the EventPublisher into a Flux .
* @ param eventPublisher the event publisher
* @ param < T > the type of the event
* @ return the Flux */
public static < T > Flux < T > toFlux ( EventPublisher < T > eventPublisher ) { } } | DirectProcessor < T > directProcessor = DirectProcessor . create ( ) ; eventPublisher . onEvent ( directProcessor :: onNext ) ; return directProcessor ; |
public class ZoneRulesProvider { /** * Gets the rules for the zone ID .
* This returns the latest available rules for the zone ID .
* This method relies on time - zone data provider files that are configured .
* These are loaded using a { @ code ServiceLoader } .
* The caching flag is designed to allow provider implementations to
* prevent the rules being cached in { @ code ZoneId } .
* Under normal circumstances , the caching of zone rules is highly desirable
* as it will provide greater performance . However , there is a use case where
* the caching would not be desirable , see { @ link # provideRules } .
* @ param zoneId the zone ID as defined by { @ code ZoneId } , not null
* @ param forCaching whether the rules are being queried for caching ,
* true if the returned rules will be cached by { @ code ZoneId } ,
* false if they will be returned to the user without being cached in { @ code ZoneId }
* @ return the rules , null if { @ code forCaching } is true and this
* is a dynamic provider that wants to prevent caching in { @ code ZoneId } ,
* otherwise not null
* @ throws ZoneRulesException if rules cannot be obtained for the zone ID */
public static ZoneRules getRules ( String zoneId , boolean forCaching ) { } } | Jdk8Methods . requireNonNull ( zoneId , "zoneId" ) ; return getProvider ( zoneId ) . provideRules ( zoneId , forCaching ) ; |
public class CommerceWishListUtil { /** * Returns the last commerce wish list in the ordered set where groupId = & # 63 ; and userId = & # 63 ; .
* @ param groupId the group ID
* @ param userId the user ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching commerce wish list
* @ throws NoSuchWishListException if a matching commerce wish list could not be found */
public static CommerceWishList findByG_U_Last ( long groupId , long userId , OrderByComparator < CommerceWishList > orderByComparator ) throws com . liferay . commerce . wish . list . exception . NoSuchWishListException { } } | return getPersistence ( ) . findByG_U_Last ( groupId , userId , orderByComparator ) ; |
public class IfcRelAssociatesImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ SuppressWarnings ( "unchecked" ) public EList < IfcRoot > getRelatedObjects ( ) { } } | return ( EList < IfcRoot > ) eGet ( Ifc2x3tc1Package . Literals . IFC_REL_ASSOCIATES__RELATED_OBJECTS , true ) ; |
public class MaF09 { /** * given vertexes , evaluate the straight lines of a polygon */
public double [ ] [ ] lines_of_polygon ( double [ ] [ ] p ) { } } | double [ ] [ ] c9 = new double [ p . length ] [ 3 ] ; for ( int i = 0 ; i < p . length - 1 ; i ++ ) { // evaluate formula of the straight line l1 , . . . , m - 1
c9 [ i ] = line_of_twoP ( p [ i ] , p [ i + 1 ] ) ; } // evaluate formula of the straight line lm
c9 [ p . length - 1 ] = line_of_twoP ( p [ p . length - 1 ] , p [ 0 ] ) ; return c9 ; |
public class BeaconManager { /** * Specifies a class that should be called each time the < code > BeaconService < / code > gets ranging
* data , which is nominally once per second when beacons are detected .
* IMPORTANT : Only one RangeNotifier may be active for a given application . If two different
* activities or services set different RangeNotifier instances , the last one set will receive
* all the notifications .
* @ param notifier The { @ link RangeNotifier } to register .
* @ see RangeNotifier
* @ deprecated replaced by ( @ link # addRangeNotifier ) */
@ Deprecated public void setRangeNotifier ( @ Nullable RangeNotifier notifier ) { } } | rangeNotifiers . clear ( ) ; if ( null != notifier ) { addRangeNotifier ( notifier ) ; } |
public class DataFramePrinter { /** * Returns the header template given the widths specified
* @ param widths the token widths
* @ return the line format template */
private static String getHeaderTemplate ( int [ ] widths , String [ ] headers ) { } } | return IntStream . range ( 0 , widths . length ) . mapToObj ( i -> { final int width = widths [ i ] ; final int length = headers [ i ] . length ( ) ; final int leading = ( width - length ) / 2 ; final int trailing = width - ( length + leading ) ; final StringBuilder text = new StringBuilder ( ) ; whitespace ( text , leading + 1 ) ; text . append ( "%" ) . append ( i + 1 ) . append ( "$s" ) ; whitespace ( text , trailing ) ; text . append ( " |" ) ; return text . toString ( ) ; } ) . reduce ( ( left , right ) -> left + " " + right ) . orElse ( "" ) ; |
public class BacklinkTitles { /** * picks the article name from a MediaWiki api response .
* @ param xml text for parsing */
@ Override protected ImmutableList < String > parseElements ( String xml ) { } } | Optional < XmlElement > child = XmlConverter . getChildOpt ( xml , "query" , "backlinks" ) ; ImmutableList . Builder < String > titleCollection = ImmutableList . builder ( ) ; if ( child . isPresent ( ) ) { List < XmlElement > backlinks = child . get ( ) . getChildren ( "bl" ) ; for ( XmlElement backlink : backlinks ) { titleCollection . add ( backlink . getAttributeValue ( "title" ) ) ; } } return titleCollection . build ( ) ; |
public class BaseCrawler { /** * Downloads the file specified by the URL .
* @ param source the source URL
* @ param destination the destination file
* @ throws IOException if an I / O error occurs while downloading the file */
protected final void downloadFile ( final URI source , final File destination ) throws IOException { } } | Validate . validState ( ! isStopped , "Cannot download file when the crawler is not started." ) ; Validate . validState ( ! isStopping , "Cannot download file when the crawler is stopping." ) ; Validate . notNull ( source , "The source URL cannot be null." ) ; Validate . notNull ( destination , "The destination file cannot be null." ) ; HttpGet request = new HttpGet ( source ) ; try ( CloseableHttpResponse response = httpClient . execute ( request ) ) { HttpEntity entity = response . getEntity ( ) ; if ( entity != null ) { FileUtils . copyInputStreamToFile ( entity . getContent ( ) , destination ) ; } } |
public class LockedObject { /** * helper of checkLocks ( ) . looks if the children are locked
* @ param exclusive
* wheather the new lock should be exclusive
* @ return true if no locks at the children paths are forbidding a new lock
* @ param depth
* depth */
private boolean checkChildren ( boolean exclusive , int depth ) { } } | if ( _children == null ) { // a file
return _owner == null || ! ( _exclusive || exclusive ) ; } else { // a folder
if ( _owner == null ) { // no owner , checking children
if ( depth != 0 ) { boolean canLock = true ; int limit = _children . length ; for ( int i = 0 ; i < limit ; i ++ ) { if ( ! _children [ i ] . checkChildren ( exclusive , depth - 1 ) ) { canLock = false ; } } return canLock ; } else { // depth = = 0 - > we don ' t care for children
return true ; } } else { // there already is a owner
return ! ( _exclusive || exclusive ) ; } } |
public class SwipeBack { /** * Compute the touch area based on the touch mode . */
protected void updateTouchAreaSize ( ) { } } | if ( mTouchMode == TOUCH_MODE_BEZEL ) { mTouchSize = mTouchBezelSize ; } else if ( mTouchMode == TOUCH_MODE_FULLSCREEN ) { mTouchSize = getMeasuredWidth ( ) ; } else { mTouchSize = 0 ; } |
public class ApplicationCache { /** * Adds the key transactions to the applications for the account .
* @ param keyTransactions The key transactions to add */
public void addKeyTransactions ( Collection < KeyTransaction > keyTransactions ) { } } | for ( KeyTransaction keyTransaction : keyTransactions ) { // Add the transaction to any applications it is associated with
long applicationId = keyTransaction . getLinks ( ) . getApplication ( ) ; Application application = applications . get ( applicationId ) ; if ( application != null ) keyTransactions ( applicationId ) . add ( keyTransaction ) ; else logger . severe ( String . format ( "Unable to find application for key transaction '%s': %d" , keyTransaction . getName ( ) , applicationId ) ) ; } |
public class AbstractReaderModule { /** * Init xml reader used for pipeline parsing .
* @ param ditaDir absolute path to DITA - OT directory
* @ param validate whether validate input file
* @ throws SAXException parsing exception */
void initXMLReader ( final File ditaDir , final boolean validate ) throws SAXException { } } | reader = XMLUtils . getXMLReader ( ) ; reader . setFeature ( FEATURE_NAMESPACE , true ) ; reader . setFeature ( FEATURE_NAMESPACE_PREFIX , true ) ; if ( validate ) { reader . setFeature ( FEATURE_VALIDATION , true ) ; try { reader . setFeature ( FEATURE_VALIDATION_SCHEMA , true ) ; } catch ( final SAXNotRecognizedException e ) { // Not Xerces , ignore exception
} } else { logger . warn ( MessageUtils . getMessage ( "DOTJ037W" ) . toString ( ) ) ; } if ( gramcache ) { final XMLGrammarPool grammarPool = GrammarPoolManager . getGrammarPool ( ) ; try { reader . setProperty ( "http://apache.org/xml/properties/internal/grammar-pool" , grammarPool ) ; logger . info ( "Using Xerces grammar pool for DTD and schema caching." ) ; } catch ( final NoClassDefFoundError e ) { logger . debug ( "Xerces not available, not using grammar caching" ) ; } catch ( final SAXNotRecognizedException | SAXNotSupportedException e ) { logger . warn ( "Failed to set Xerces grammar pool for parser: " + e . getMessage ( ) ) ; } } CatalogUtils . setDitaDir ( ditaDir ) ; reader . setEntityResolver ( CatalogUtils . getCatalogResolver ( ) ) ; |
public class IniFile { /** * Converts a java . util . date into String
* @ param pd Date that need to be converted to String
* @ param pstrFmt The date format pattern .
* @ return String */
private String utilDateToStr ( Date pdt , String pstrFmt ) { } } | String strRet = null ; SimpleDateFormat dtFmt = null ; try { dtFmt = new SimpleDateFormat ( pstrFmt ) ; strRet = dtFmt . format ( pdt ) ; } catch ( Exception e ) { strRet = null ; } finally { if ( dtFmt != null ) dtFmt = null ; } return strRet ; |
public class ConnectionManager { /** * Obtain a dbserver client session that can be used to perform some task , call that task with the client ,
* then release the client .
* @ param targetPlayer the player number whose dbserver we wish to communicate with
* @ param task the activity that will be performed with exclusive access to a dbserver connection
* @ param description a short description of the task being performed for error reporting if it fails ,
* should be a verb phrase like " requesting track metadata "
* @ param < T > the type that will be returned by the task to be performed
* @ return the value returned by the completed task
* @ throws IOException if there is a problem communicating
* @ throws Exception from the underlying { @ code task } , if any */
public < T > T invokeWithClientSession ( int targetPlayer , ClientTask < T > task , String description ) throws Exception { } } | if ( ! isRunning ( ) ) { throw new IllegalStateException ( "ConnectionManager is not running, aborting " + description ) ; } final Client client = allocateClient ( targetPlayer , description ) ; try { return task . useClient ( client ) ; } finally { freeClient ( client ) ; } |
public class FileHdr { /** * Is this the physical file that can be imported / exported , etc ? . */
public boolean isPhysicalFile ( ) { } } | String strFileType = this . getField ( FileHdr . TYPE ) . toString ( ) ; if ( strFileType == null ) return false ; if ( strFileType . length ( ) == 0 ) strFileType = DBParams . REMOTE ; // Default type
strFileType = strFileType . toUpperCase ( ) ; boolean bPhysicalFile = false ; if ( strFileType . indexOf ( DBParams . LOCAL . toUpperCase ( ) ) != - 1 ) bPhysicalFile = true ; if ( strFileType . indexOf ( DBParams . REMOTE . toUpperCase ( ) ) != - 1 ) bPhysicalFile = true ; if ( ( " " + strFileType ) . indexOf ( " " + DBParams . TABLE . toUpperCase ( ) ) != - 1 ) bPhysicalFile = true ; if ( bPhysicalFile ) if ( strFileType . indexOf ( "SHARED_TABLE" ) != - 1 ) if ( strFileType . indexOf ( "BASE_TABLE_CLASS" ) == - 1 ) bPhysicalFile = false ; // Only the base table is considered physical
if ( bPhysicalFile ) if ( strFileType . indexOf ( "MAPPED" ) != - 1 ) bPhysicalFile = false ; // Only the base table is considered physical
return bPhysicalFile ; |
public class Word2VecExamples { /** * Trains a model and allows user to find similar words
* demo - word . sh example from the open source C implementation */
public static void demoWord ( ) throws IOException , TException , InterruptedException , UnknownWordException { } } | File f = new File ( "text8" ) ; if ( ! f . exists ( ) ) throw new IllegalStateException ( "Please download and unzip the text8 example from http://mattmahoney.net/dc/text8.zip" ) ; List < String > read = Common . readToList ( f ) ; List < List < String > > partitioned = Lists . transform ( read , new Function < String , List < String > > ( ) { @ Override public List < String > apply ( String input ) { return Arrays . asList ( input . split ( " " ) ) ; } } ) ; Word2VecModel model = Word2VecModel . trainer ( ) . setMinVocabFrequency ( 5 ) . useNumThreads ( 20 ) . setWindowSize ( 8 ) . type ( NeuralNetworkType . CBOW ) . setLayerSize ( 200 ) . useNegativeSamples ( 25 ) . setDownSamplingRate ( 1e-4 ) . setNumIterations ( 5 ) . setListener ( new TrainingProgressListener ( ) { @ Override public void update ( Stage stage , double progress ) { System . out . println ( String . format ( "%s is %.2f%% complete" , Format . formatEnum ( stage ) , progress * 100 ) ) ; } } ) . train ( partitioned ) ; // Writes model to a thrift file
try ( ProfilingTimer timer = ProfilingTimer . create ( LOG , "Writing output to file" ) ) { FileUtils . writeStringToFile ( new File ( "text8.model" ) , ThriftUtils . serializeJson ( model . toThrift ( ) ) ) ; } // Alternatively , you can write the model to a bin file that ' s compatible with the C
// implementation .
try ( final OutputStream os = Files . newOutputStream ( Paths . get ( "text8.bin" ) ) ) { model . toBinFile ( os ) ; } interact ( model . forSearch ( ) ) ; |
public class Threads { /** * Execute a < code > Callable < / code > on a thread pool thread , at default
* priority . This allows you to start a background thread , and later do a
* < code > < i > join ( ) < / i > < / code > ( < i > i . e . < / i > , block until the thread is done )
* to get the thread ' s output .
* @ param < T >
* The type - parameter of the Callable and the returned Future
* @ param callable
* The thread proc
* @ return A < code > Future & lt ; T & gt ; < / code > that can be queried for the
* thread ' s result */
public static < T > Future < T > spawn ( Callable < T > callable ) { } } | return threadPool . submit ( callable ) ; |
public class UITabGroup { /** * Attach this { @ link UITabGroup } to a { @ link UIContainer } .
* @ param container the container to attach to .
* @ param displace if true , moves and resize the UIContainer to make place for the UITabGroup
* @ return this { @ link UITab } */
public UITabGroup attachTo ( UIContainer container , boolean displace ) { } } | attachedContainer = container ; if ( activeTab != null ) activeTab . setActive ( true ) ; switch ( tabPosition ) { case TOP : setPosition ( Position . above ( this , container , - 2 ) ) ; break ; case BOTTOM : setPosition ( Position . below ( this , container , - 2 ) ) ; break ; case LEFT : setPosition ( Position . leftOf ( this , container , - 2 ) ) ; break ; case RIGHT : setPosition ( Position . rightOf ( this , container , - 2 ) ) ; break ; } for ( UIContainer tabContainer : listTabs . values ( ) ) setupTabContainer ( tabContainer ) ; calculateTabPosition ( ) ; if ( activeTab != null ) { UITab tab = activeTab ; activeTab = null ; setActiveTab ( tab ) ; } if ( displace ) { attachedContainer . setPosition ( new AttachedContainerPosition ( attachedContainer . position ( ) ) ) ; attachedContainer . setSize ( new AttachedContainerSize ( attachedContainer . size ( ) ) ) ; } return this ; |
public class TableToXls { /** * process html to output stream
* @ param html html char sequence
* @ param output output stream */
public static void process ( CharSequence html , OutputStream output ) { } } | new TableToXls ( ) . doProcess ( html instanceof String ? ( String ) html : html . toString ( ) , output ) ; |
public class DataBuilder { /** * Appends an operating system pattern to the map of pattern sorted by ID .
* @ param pattern
* a pattern for a browser
* @ throws net . sf . qualitycheck . exception . IllegalNullArgumentException
* if the pattern is { @ code null }
* @ return itself */
@ Nonnull public DataBuilder appendOperatingSystemPattern ( @ Nonnull final OperatingSystemPattern pattern ) { } } | Check . notNull ( pattern , "pattern" ) ; if ( ! operatingSystemPatterns . containsKey ( pattern . getId ( ) ) ) { operatingSystemPatterns . put ( pattern . getId ( ) , new TreeSet < OperatingSystemPattern > ( OS_PATTERN_COMPARATOR ) ) ; } operatingSystemPatterns . get ( pattern . getId ( ) ) . add ( pattern ) ; return this ; |
public class MultipartProcessor { /** * Utility method to read all the bytes from an InputStream into the outputStream .
* @ param inputStream Stream of bytes to read from .
* @ throws IOException Thrown on errors reading / writing . */
private void streamToOutput ( InputStream inputStream ) throws IOException { } } | try { byte [ ] buffer = new byte [ 4096 ] ; int bytesRead = - 1 ; while ( ( bytesRead = inputStream . read ( buffer ) ) != - 1 ) { outputStream . write ( buffer , 0 , bytesRead ) ; } outputStream . flush ( ) ; } finally { inputStream . close ( ) ; } |
public class JdbcNeo4JDeepJobConfig { /** * { @ inheritDoc } */
public JdbcNeo4JDeepJobConfig < T > initialize ( ExtractorConfig config ) { } } | Map < String , Serializable > values = config . getValues ( ) ; if ( values . get ( JDBC_CONNECTION_URL ) != null ) { connectionUrl ( config . getString ( JDBC_CONNECTION_URL ) ) ; } if ( values . get ( JDBC_QUERY ) != null ) { cypherQuery ( config . getString ( JDBC_QUERY ) ) ; } super . initialize ( config ) ; return this ; |
public class AppsImpl { /** * Imports an application to LUIS , the application ' s structure should be included in in the request body .
* @ param luisApp A LUIS application structure .
* @ param importMethodOptionalParameter the object representing the optional parameters to be set before calling this API
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < UUID > importMethodAsync ( LuisApp luisApp , ImportMethodAppsOptionalParameter importMethodOptionalParameter , final ServiceCallback < UUID > serviceCallback ) { } } | return ServiceFuture . fromResponse ( importMethodWithServiceResponseAsync ( luisApp , importMethodOptionalParameter ) , serviceCallback ) ; |
public class AbstractNode { /** * Splits the entries of this node into a new node using the given assignments
* @ param newNode Node to split to
* @ param assignment Assignment mask */
public final void splitByMask ( AbstractNode < E > newNode , long [ ] assignment ) { } } | assert ( isLeaf ( ) == newNode . isLeaf ( ) ) ; int dest = BitsUtil . nextSetBit ( assignment , 0 ) ; if ( dest < 0 ) { throw new AbortException ( "No bits set in splitting mask." ) ; } // FIXME : use faster iteration / testing
int pos = dest ; while ( pos < numEntries ) { if ( BitsUtil . get ( assignment , pos ) ) { // Move to new node
newNode . addEntry ( getEntry ( pos ) ) ; } else { // Move to new position
entries [ dest ] = entries [ pos ] ; dest ++ ; } pos ++ ; } final int rm = numEntries - dest ; while ( dest < numEntries ) { entries [ dest ] = null ; dest ++ ; } numEntries -= rm ; |
public class Histogram { /** * Set the histogram to use the supplied minimum and maximum values to determine the bucket size .
* @ param minimum
* @ param maximum */
public void setStrategy ( T minimum , T maximum ) { } } | this . bucketingStrategy = new ExplicitBucketingStrategy ( minimum , maximum ) ; this . bucketWidth = null ; |
public class IscrollSettings { /** * { @ inheritDoc } */
@ Override public Set < StringTextValue < ? > > asSet ( ) { } } | final Set < StringTextValue < ? > > allSettings = new HashSet < > ( ) ; allSettings . add ( getUseTransform ( ) ) ; allSettings . add ( getUseTransition ( ) ) ; allSettings . add ( getHWCompositing ( ) ) ; allSettings . add ( getBounce ( ) ) ; allSettings . add ( getClick ( ) ) ; allSettings . add ( getDisableMouse ( ) ) ; allSettings . add ( getDisablePointer ( ) ) ; allSettings . add ( getDisableTouch ( ) ) ; allSettings . add ( getMouseWheel ( ) ) ; return allSettings ; |
public class ModbusResponse { /** * Utility method to set the raw data of the message . Should not be used
* except under rare circumstances .
* @ param msg the < tt > byte [ ] < / tt > resembling the raw modbus response
* message . */
protected void setMessage ( byte [ ] msg ) { } } | try { readData ( new DataInputStream ( new ByteArrayInputStream ( msg ) ) ) ; } catch ( IOException ex ) { logger . error ( "Problem setting response message - {}" , ex . getMessage ( ) ) ; } |
public class MetricResolver11 { /** * Checks whether the metric should be re - usable */
private < T extends Annotation > boolean checkReusable ( MetricResolver . Of < T > of ) { } } | String name = of . metadata ( ) . getName ( ) ; // If the metric has been registered before ( eg . metrics found in RequestScoped beans ) ,
// we don ' t need to worry about re - usable
if ( ! of . isInitialDiscovery ( ) ) { return true ; } Metadata existingMetadata = registry . getMetadata ( ) . get ( name ) ; if ( existingMetadata != null && ( existingMetadata . isReusable ( ) == false || of . metadata ( ) . isReusable ( ) == false ) ) { throw new IllegalArgumentException ( "Cannot reuse metric for " + of . metricName ( ) ) ; } return true ; |
public class ClassFactory { /** * Parses a string of class names separated by separator into a list of objects .
* @ param str names of classes
* @ param separator separator characters
* @ param attrTypes attrTypes
* @ param attrValues attrValues
* @ return ArrayList of class instances
* @ throws DISIException DISIException */
public static List < Object > stringToClassInstances ( String str , String separator , Class [ ] attrTypes , Object [ ] attrValues ) throws DISIException { } } | ArrayList < Object > tmp = new ArrayList < Object > ( ) ; StringTokenizer stringTokenizer = new StringTokenizer ( str , separator ) ; while ( stringTokenizer . hasMoreTokens ( ) ) { Object obj = getClassInstance ( stringTokenizer . nextToken ( ) , attrTypes , attrValues ) ; if ( obj != null ) { tmp . add ( obj ) ; } } return tmp ; |
public class Matrix1NornRescaler { /** * Check if the scaling algorithm returned proper results .
* Note that AOriginal cannot be only subdiagonal filled , because this check
* is for both symm and bath notsymm matrices .
* @ param AOriginal the ORIGINAL ( before scaling ) matrix
* @ param U the return of the scaling algorithm
* @ param V the return of the scaling algorithm
* @ param base
* @ return */
@ Override public boolean checkScaling ( final DoubleMatrix2D AOriginal , final DoubleMatrix1D U , final DoubleMatrix1D V ) { } } | int c = AOriginal . columns ( ) ; int r = AOriginal . rows ( ) ; final double [ ] maxValueHolder = new double [ ] { - Double . MAX_VALUE } ; IntIntDoubleFunction myFunct = new IntIntDoubleFunction ( ) { @ Override public double apply ( int i , int j , double pij ) { maxValueHolder [ 0 ] = Math . max ( maxValueHolder [ 0 ] , Math . abs ( pij ) ) ; return pij ; } } ; DoubleMatrix2D AScaled = ColtUtils . diagonalMatrixMult ( U , AOriginal , V ) ; // view A row by row
boolean isOk = true ; for ( int i = 0 ; isOk && i < r ; i ++ ) { maxValueHolder [ 0 ] = - Double . MAX_VALUE ; DoubleMatrix2D P = AScaled . viewPart ( i , 0 , 1 , c ) ; P . forEachNonZero ( myFunct ) ; isOk = Math . abs ( 1. - maxValueHolder [ 0 ] ) < eps ; } // view A col by col
for ( int j = 0 ; isOk && j < c ; j ++ ) { maxValueHolder [ 0 ] = - Double . MAX_VALUE ; DoubleMatrix2D P = AScaled . viewPart ( 0 , j , r , 1 ) ; P . forEachNonZero ( myFunct ) ; isOk = Math . abs ( 1. - maxValueHolder [ 0 ] ) < eps ; } return isOk ; |
public class RaftServiceContext { /** * Keeps the given session alive .
* @ param index The index of the keep - alive .
* @ param timestamp The timestamp of the keep - alive .
* @ param session The session to keep - alive .
* @ param commandSequence The session command sequence number .
* @ param eventIndex The session event index . */
public boolean keepAlive ( long index , long timestamp , RaftSession session , long commandSequence , long eventIndex ) { } } | // If the service has been deleted , just return false to ignore the keep - alive .
if ( deleted ) { return false ; } // Update the state machine index / timestamp .
tick ( index , timestamp ) ; // The session may have been closed by the time this update was executed on the service thread .
if ( session . getState ( ) != Session . State . CLOSED ) { // Update the session ' s timestamp to prevent it from being expired .
session . setLastUpdated ( timestamp ) ; // Clear results cached in the session .
session . clearResults ( commandSequence ) ; // Resend missing events starting from the last received event index .
session . resendEvents ( eventIndex ) ; // Update the session ' s request sequence number . The command sequence number will be applied
// iff the existing request sequence number is less than the command sequence number . This must
// be applied to ensure that request sequence numbers are reset after a leader change since leaders
// track request sequence numbers in local memory .
session . resetRequestSequence ( commandSequence ) ; // Update the sessions ' command sequence number . The command sequence number will be applied
// iff the existing sequence number is less than the keep - alive command sequence number . This should
// not be the case under normal operation since the command sequence number in keep - alive requests
// represents the highest sequence for which a client has received a response ( the command has already
// been completed ) , but since the log compaction algorithm can exclude individual entries from replication ,
// the command sequence number must be applied for keep - alive requests to reset the sequence number in
// the event the last command for the session was cleaned / compacted from the log .
session . setCommandSequence ( commandSequence ) ; // Complete the future .
return true ; } else { return false ; } |
public class MavenModelScannerPlugin { /** * Adds information about managed plugins .
* @ param pomDescriptor
* The descriptor for the current POM .
* @ param build
* Information required to build the project .
* @ param scannerContext
* The scanner context . */
private void addManagedPlugins ( BaseProfileDescriptor pomDescriptor , BuildBase build , ScannerContext scannerContext ) { } } | if ( null == build ) { return ; } PluginManagement pluginManagement = build . getPluginManagement ( ) ; if ( null == pluginManagement ) { return ; } List < MavenPluginDescriptor > pluginDescriptors = createMavenPluginDescriptors ( pluginManagement . getPlugins ( ) , scannerContext ) ; pomDescriptor . getManagedPlugins ( ) . addAll ( pluginDescriptors ) ; |
public class DialogConverter { /** * Replaces the value of a mapped property with a value from the original tree .
* @ param root the root node of the original tree
* @ param node the replacement tree object
* @ param key property name of the ( potentially ) mapped property in the replacement copy tree
* @ return true if there was a successful mapping , false otherwise
* @ throws JSONException */
private boolean mapProperty ( JSONObject root , JSONObject node , String key , String ... mapping ) throws JSONException { } } | boolean deleteProperty = false ; for ( String value : mapping ) { Matcher matcher = MAPPED_PATTERN . matcher ( value ) ; if ( matcher . matches ( ) ) { // this is a mapped property , we will delete it if the mapped destination
// property doesn ' t exist
deleteProperty = true ; String path = matcher . group ( 2 ) ; // unwrap quoted property paths
path = StringUtils . removeStart ( StringUtils . stripEnd ( path , "\'" ) , "\'" ) ; if ( root . has ( cleanup ( path ) ) ) { // replace property by mapped value in the original tree
Object originalValue = root . get ( cleanup ( path ) ) ; node . put ( cleanup ( key ) , originalValue ) ; // negate boolean properties if negation character has been set
String negate = matcher . group ( 1 ) ; if ( "!" . equals ( negate ) && ( originalValue instanceof Boolean ) ) { node . put ( cleanup ( key ) , ! ( ( Boolean ) originalValue ) ) ; } // the mapping was successful
deleteProperty = false ; break ; } else { String defaultValue = matcher . group ( 4 ) ; if ( defaultValue != null ) { node . put ( cleanup ( key ) , defaultValue ) ; deleteProperty = false ; break ; } } } } if ( deleteProperty ) { // mapped destination does not exist , we don ' t include the property in replacement tree
node . remove ( key ) ; return false ; } return true ; |
public class FileUtils { /** * Loads a file in the format { @ code key value1 value2 value3 } ( tab - separated ) into a { @ link
* com . google . common . collect . Multimap } of { @ code String } to { @ code String } . Each key should only
* appear on one line , and there should be no duplicate values . Each key and value has whitespace
* trimmed off . Skips empty lines and allows comment - lines with { @ code # } in the first position .
* If a key has no values , it will not show up in the keySet of the returned multimap . */
public static ImmutableMultimap < String , String > loadStringMultimap ( CharSource multimapSource ) throws IOException { } } | final ImmutableMultimap . Builder < String , String > ret = ImmutableMultimap . builder ( ) ; int count = 0 ; for ( final String line : multimapSource . readLines ( ) ) { ++ count ; if ( isCommentLine ( line ) ) { continue ; } final List < String > parts = multimapSplitter . splitToList ( line ) ; if ( parts . isEmpty ( ) ) { continue ; } ret . putAll ( parts . get ( 0 ) , skip ( parts , 1 ) ) ; } return ret . build ( ) ; |
public class MTree { /** * Adds and indexes a data object .
* < p > An object that is already indexed should not be added . There is no
* validation regarding this , and the behavior is undefined if done .
* @ param data The data object to index . */
public void add ( DATA data ) { } } | if ( root == null ) { root = new RootLeafNode ( data ) ; try { root . addData ( data , 0 ) ; } catch ( SplitNodeReplacement e ) { throw new RuntimeException ( "Should never happen!" ) ; } } else { double distance = distanceFunction . calculate ( data , root . data ) ; try { root . addData ( data , distance ) ; } catch ( SplitNodeReplacement e ) { Node newRoot = new RootNode ( data ) ; root = newRoot ; for ( int i = 0 ; i < e . newNodes . length ; i ++ ) { @ SuppressWarnings ( "unchecked" ) Node newNode = ( Node ) e . newNodes [ i ] ; distance = distanceFunction . calculate ( root . data , newNode . data ) ; root . addChild ( newNode , distance ) ; } } } |
public class AuditLog { /** * < pre >
* Metadata about the operation .
* < / pre >
* < code > . google . cloud . audit . RequestMetadata request _ metadata = 4 ; < / code > */
public com . google . cloud . audit . RequestMetadata getRequestMetadata ( ) { } } | return requestMetadata_ == null ? com . google . cloud . audit . RequestMetadata . getDefaultInstance ( ) : requestMetadata_ ; |
public class WorkQueueManager { /** * @ see
* com . ibm . ws . udp . channel . internal . UDPSelectorMonitor # setChannel ( java . nio .
* channels . DatagramChannel , com . ibm . ws . udp . channel . internal . UDPNetworkLayer ) */
public synchronized void setChannel ( DatagramChannel channel , UDPNetworkLayer udpNetworkLayer ) throws IOException { } } | int interestOps = 0 ; if ( udpNetworkLayer . getUDPChannel ( ) . getConfig ( ) . isInboundChannel ( ) ) { interestOps = SelectionKey . OP_READ ; } NIOChannelModRequest request = new NIOChannelModRequest ( NIOChannelModRequest . ADD_REQUEST , channel , interestOps , udpNetworkLayer ) ; synchronized ( channelModList ) { channelModList . add ( request ) ; } synchronized ( channelRequestingToBeAddedRemovedSync ) { channelRequestingToBeAddedRemoved = true ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "Adding channel for port: " + channel . socket ( ) . getLocalPort ( ) + " to WQM : " + hashCode ( ) ) ; } selector . wakeup ( ) ; |
public class LinkedList { /** * Add all elements in list into this linked list
* @ param list List */
public void addAll ( List < T > list ) { } } | if ( list . isEmpty ( ) ) return ; Element < T > a = requestNew ( ) ; a . object = list . get ( 0 ) ; if ( first == null ) { first = a ; } else if ( last != null ) { last . next = a ; a . previous = last ; } for ( int i = 1 ; i < list . size ( ) ; i ++ ) { Element < T > b = requestNew ( ) ; b . object = list . get ( i ) ; a . next = b ; b . previous = a ; a = b ; } last = a ; size += list . size ( ) ; |
public class CmsRedirectLoader { /** * Delivers a Flex controller , either by creating a new one , or by re - using an existing one . < p >
* @ param cms the initial CmsObject to wrap in the controller
* @ param resource the resource requested
* @ param req the current request
* @ param res the current response
* @ param streaming indicates if the response is streaming
* @ param top indicates if the response is the top response
* @ return a Flex controller */
protected CmsFlexController getController ( CmsObject cms , CmsResource resource , HttpServletRequest req , HttpServletResponse res , boolean streaming , boolean top ) { } } | CmsFlexController controller = null ; if ( top ) { // only check for existing controller if this is the " top " request / response
controller = CmsFlexController . getController ( req ) ; } if ( controller == null ) { // create new request / response wrappers
controller = new CmsFlexController ( cms , resource , m_cache , req , res , streaming , top ) ; CmsFlexController . setController ( req , controller ) ; CmsFlexRequest f_req = new CmsFlexRequest ( req , controller ) ; CmsFlexResponse f_res = new CmsFlexResponse ( res , controller , streaming , true ) ; controller . push ( f_req , f_res ) ; } else if ( controller . isForwardMode ( ) ) { // reset CmsObject ( because of URI ) if in forward mode
controller = new CmsFlexController ( cms , controller ) ; CmsFlexController . setController ( req , controller ) ; } return controller ; |
public class Types { /** * Make an intersection type from non - empty list of types . The list should be ordered according to
* { @ link TypeSymbol # precedes ( TypeSymbol , Types ) } . This does not cause symbol completion as
* an extra parameter indicates as to whether all bounds are interfaces - in which case the
* supertype is implicitly assumed to be ' Object ' .
* @ param bounds the types from which the intersection type is formed
* @ param allInterfaces are all bounds interface types ? */
public IntersectionClassType makeIntersectionType ( List < Type > bounds , boolean allInterfaces ) { } } | Assert . check ( bounds . nonEmpty ( ) ) ; Type firstExplicitBound = bounds . head ; if ( allInterfaces ) { bounds = bounds . prepend ( syms . objectType ) ; } ClassSymbol bc = new ClassSymbol ( ABSTRACT | PUBLIC | SYNTHETIC | COMPOUND | ACYCLIC , Type . moreInfo ? names . fromString ( bounds . toString ( ) ) : names . empty , null , syms . noSymbol ) ; IntersectionClassType intersectionType = new IntersectionClassType ( bounds , bc , allInterfaces ) ; bc . type = intersectionType ; bc . erasure_field = ( bounds . head . hasTag ( TYPEVAR ) ) ? syms . objectType : // error condition , recover
erasure ( firstExplicitBound ) ; bc . members_field = WriteableScope . create ( bc ) ; return intersectionType ; |
public class InternalSARLParser { /** * InternalSARL . g : 84:1 : entryRuleSarlScript returns [ EObject current = null ] : iv _ ruleSarlScript = ruleSarlScript EOF ; */
public final EObject entryRuleSarlScript ( ) throws RecognitionException { } } | EObject current = null ; EObject iv_ruleSarlScript = null ; try { // InternalSARL . g : 84:51 : ( iv _ ruleSarlScript = ruleSarlScript EOF )
// InternalSARL . g : 85:2 : iv _ ruleSarlScript = ruleSarlScript EOF
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getSarlScriptRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; iv_ruleSarlScript = ruleSarlScript ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = iv_ruleSarlScript ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return current ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ; |
public class WTree { /** * Save the currently open rows .
* Note - Only used for when the tree is in LAZY mode */
protected void addPrevExpandedCurrent ( ) { } } | Set < String > rows = getExpandedRows ( ) ; if ( ! rows . isEmpty ( ) ) { WTreeComponentModel model = getOrCreateComponentModel ( ) ; if ( model . prevExpandedRows == null ) { model . prevExpandedRows = new HashSet < > ( ) ; } model . prevExpandedRows . addAll ( rows ) ; } |
public class AvroDeserializationSchema { /** * Creates { @ link AvroDeserializationSchema } that produces classes that were generated from avro schema .
* @ param tClass class of record to be produced
* @ return deserialized record */
public static < T extends SpecificRecord > AvroDeserializationSchema < T > forSpecific ( Class < T > tClass ) { } } | return new AvroDeserializationSchema < > ( tClass , null ) ; |
public class ClassUseWriter { /** * Get the tree link .
* @ return a content tree for the tree link */
protected Content getNavLinkTree ( ) { } } | Content linkContent = classdoc . containingPackage ( ) . isIncluded ( ) ? getHyperLink ( DocPath . parent . resolve ( DocPaths . PACKAGE_TREE ) , treeLabel ) : getHyperLink ( pathToRoot . resolve ( DocPaths . OVERVIEW_TREE ) , treeLabel ) ; Content li = HtmlTree . LI ( linkContent ) ; return li ; |
public class PolicyComplianceDetail { /** * An array of resources that are not protected by the policy .
* @ param violators
* An array of resources that are not protected by the policy . */
public void setViolators ( java . util . Collection < ComplianceViolator > violators ) { } } | if ( violators == null ) { this . violators = null ; return ; } this . violators = new java . util . ArrayList < ComplianceViolator > ( violators ) ; |
public class CommerceDiscountRelLocalServiceUtil { /** * Returns a range of all the commerce discount rels .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link com . liferay . commerce . discount . model . impl . CommerceDiscountRelModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param start the lower bound of the range of commerce discount rels
* @ param end the upper bound of the range of commerce discount rels ( not inclusive )
* @ return the range of commerce discount rels */
public static java . util . List < com . liferay . commerce . discount . model . CommerceDiscountRel > getCommerceDiscountRels ( int start , int end ) { } } | return getService ( ) . getCommerceDiscountRels ( start , end ) ; |
public class Scheduler { /** * Get a snapshot of the { @ link PoolMetadata } objects for all the schedulers
* for each resource type . This is used for gathering metrics .
* @ return List of snapshots for each pool and its resources */
private List < PoolMetadata > getPoolMetadataList ( ) { } } | Map < String , PoolMetadata > poolNameMetadataMap = new HashMap < String , PoolMetadata > ( ) ; for ( Map . Entry < ResourceType , SchedulerForType > schedulerEntry : schedulersForTypes . entrySet ( ) ) { for ( Map . Entry < PoolInfo , PoolInfoMetrics > poolEntry : schedulerEntry . getValue ( ) . getPoolInfoMetrics ( ) . entrySet ( ) ) { ResourceMetadata resourceMetadata = poolEntry . getValue ( ) . getResourceMetadata ( ) ; // Ignore any invalid pool metrics
if ( resourceMetadata == null ) { continue ; } String stringifiedPoolInfo = PoolInfo . createStringFromPoolInfo ( poolEntry . getKey ( ) ) ; PoolMetadata poolMetadata = poolNameMetadataMap . get ( stringifiedPoolInfo ) ; if ( poolMetadata == null ) { poolMetadata = new PoolMetadata ( stringifiedPoolInfo ) ; poolNameMetadataMap . put ( stringifiedPoolInfo , poolMetadata ) ; } poolMetadata . addResourceMetadata ( schedulerEntry . getKey ( ) . toString ( ) , resourceMetadata ) ; } } return new ArrayList < PoolMetadata > ( poolNameMetadataMap . values ( ) ) ; |
public class DefaultJavaRunner { /** * { @ inheritDoc } */
public void shutdown ( ) { } } | try { if ( m_shutdownHook != null ) { synchronized ( m_shutdownHook ) { if ( m_shutdownHook != null ) { LOG . debug ( "Shutdown in progress..." ) ; Runtime . getRuntime ( ) . removeShutdownHook ( m_shutdownHook ) ; m_frameworkProcess = null ; m_shutdownHook . run ( ) ; m_shutdownHook = null ; LOG . info ( "Platform has been shutdown." ) ; } } } } catch ( IllegalStateException ignore ) { // just ignore
} |
public class GeometryUtil { /** * Return the variation of each angle value between the 2 aligned molecules .
* @ param firstAtomContainer the ( largest ) first aligned AtomContainer which is the reference
* @ param secondAtomContainer the second aligned AtomContainer
* @ param mappedAtoms Map : a Map of the mapped atoms
* @ return double : the value of the RMSD */
public static double getAngleRMSD ( IAtomContainer firstAtomContainer , IAtomContainer secondAtomContainer , Map < Integer , Integer > mappedAtoms ) { } } | // logger . debug ( " * * * * GT getAngleRMSD * * * * " ) ;
Iterator < Integer > firstAtoms = mappedAtoms . keySet ( ) . iterator ( ) ; // logger . debug ( " mappedAtoms : " + mappedAtoms . toString ( ) ) ;
IAtom firstAtomfirstAC ; IAtom centerAtomfirstAC ; IAtom firstAtomsecondAC ; IAtom secondAtomsecondAC ; IAtom centerAtomsecondAC ; double angleFirstMolecule ; double angleSecondMolecule ; double sum = 0 ; double n = 0 ; while ( firstAtoms . hasNext ( ) ) { int firstAtomNumber = firstAtoms . next ( ) ; centerAtomfirstAC = firstAtomContainer . getAtom ( firstAtomNumber ) ; List < IAtom > connectedAtoms = firstAtomContainer . getConnectedAtomsList ( centerAtomfirstAC ) ; if ( connectedAtoms . size ( ) > 1 ) { // logger . debug ( " If " + centerAtomfirstAC . getSymbol ( ) + " is the center atom : " ) ;
for ( int i = 0 ; i < connectedAtoms . size ( ) - 1 ; i ++ ) { firstAtomfirstAC = connectedAtoms . get ( i ) ; for ( int j = i + 1 ; j < connectedAtoms . size ( ) ; j ++ ) { angleFirstMolecule = getAngle ( centerAtomfirstAC , firstAtomfirstAC , connectedAtoms . get ( j ) ) ; centerAtomsecondAC = secondAtomContainer . getAtom ( mappedAtoms . get ( firstAtomContainer . indexOf ( centerAtomfirstAC ) ) ) ; firstAtomsecondAC = secondAtomContainer . getAtom ( mappedAtoms . get ( firstAtomContainer . indexOf ( firstAtomfirstAC ) ) ) ; secondAtomsecondAC = secondAtomContainer . getAtom ( mappedAtoms . get ( firstAtomContainer . indexOf ( connectedAtoms . get ( j ) ) ) ) ; angleSecondMolecule = getAngle ( centerAtomsecondAC , firstAtomsecondAC , secondAtomsecondAC ) ; sum = sum + Math . pow ( angleFirstMolecule - angleSecondMolecule , 2 ) ; n ++ ; // logger . debug ( " Error for the " + firstAtomfirstAC . getSymbol ( ) . toLowerCase ( ) + " - " + centerAtomfirstAC . getSymbol ( ) + " - " + connectedAtoms [ j ] . getSymbol ( ) . toLowerCase ( ) + " Angle : " + deltaAngle + " degrees " ) ;
} } } // if
} return Math . sqrt ( sum / n ) ; |
public class Streams { /** * Implements OutputStream . write ( int ) in terms of OutputStream . write ( byte [ ] , int , int ) .
* OutputStream assumes that you implement OutputStream . write ( int ) and provides default
* implementations of the others , but often the opposite is more efficient . */
public static void writeSingleByte ( OutputStream out , int b ) throws IOException { } } | byte [ ] buffer = new byte [ 1 ] ; buffer [ 0 ] = ( byte ) ( b & 0xff ) ; out . write ( buffer ) ; |
public class ChangeRoomPasswordImpl { /** * / * ( non - Javadoc )
* @ see com . tvd12 . ezyfox . core . command . BaseCommand # execute ( ) */
@ SuppressWarnings ( "unchecked" ) @ Override public Boolean execute ( ) { } } | try { api . changeRoomPassword ( CommandUtil . getSfsUser ( owner , api ) , CommandUtil . getSfsRoom ( targetRoom , extension ) , password ) ; } catch ( SFSRoomException e ) { throw new IllegalStateException ( e ) ; } return Boolean . TRUE ; |
public class ValidationHelper { /** * Checks whether the value for the given property key of the given node
* is a non - empty string .
* @ param node the node
* @ param key the property key
* @ param errorBuffer the error buffer
* @ return true if the condition is valid */
public static boolean isValidPropertyNotNull ( final GraphObject node , final PropertyKey key , final ErrorBuffer errorBuffer ) { } } | final String type = node . getType ( ) ; if ( key == null ) { errorBuffer . add ( new EmptyPropertyToken ( type , UnknownType ) ) ; return false ; } final Object value = node . getProperty ( key ) ; if ( value != null ) { if ( value instanceof Iterable ) { if ( ( ( Iterable ) value ) . iterator ( ) . hasNext ( ) ) { return true ; } } else { return true ; } } errorBuffer . add ( new EmptyPropertyToken ( type , key ) ) ; return false ; |
public class ElementFactory { /** * Used to build a RelationEdge by ThingImpl when it needs to connect itself with an attribute ( implicit relation ) */
RelationImpl buildRelation ( EdgeElement edge , RelationType type , Role owner , Role value ) { } } | return getOrBuildConcept ( edge , ( e ) -> RelationImpl . create ( RelationEdge . create ( type , owner , value , edge ) ) ) ; |
public class SesClient { /** * Send email
* This method allow you to construct request object by yourself .
* @ param request The request object which includes the parameters of sending mail , you can see detail from class
* < code > com . baidubce . services . ses . model . SendEmailRequest < / code >
* @ see com . baidubce . services . ses . model . SendEmailRequest */
public SendEmailResponse sendEmail ( SendEmailRequest request ) { } } | checkNotNull ( request , "object request should not be null." ) ; checkNotNull ( request . getMail ( ) , "object mail of request should not be null." ) ; // validate source
checkNotNull ( request . getMail ( ) . getSource ( ) , "object source of request should not be null." ) ; assertStringNotNullOrEmpty ( request . getMail ( ) . getSource ( ) . getFrom ( ) , "from should not be null or empty." ) ; checkIsEmail ( request . getMail ( ) . getSource ( ) . getFrom ( ) ) ; // validate subject
checkNotNull ( request . getMail ( ) . getSubject ( ) , "object subject of request should not be null." ) ; assertStringNotNullOrEmpty ( request . getMail ( ) . getSubject ( ) . getData ( ) , "subject should not be null or empty." ) ; if ( request . getMail ( ) . getSubject ( ) . getCharset ( ) != null && ! Arrays . asList ( new Integer [ ] { 0 , 1 , 2 , 3 , 4 } ) . contains ( request . getMail ( ) . getSubject ( ) . getCharset ( ) ) ) { throw new IllegalArgumentException ( "illegal charset." ) ; } // validate body
checkNotNull ( request . getMail ( ) . getMessage ( ) , "object message of request should not be null." ) ; checkNotNull ( request . getMail ( ) . getMessage ( ) . getHtml ( ) , "object html of request should not be null." ) ; assertStringNotNullOrEmpty ( request . getMail ( ) . getMessage ( ) . getHtml ( ) . getData ( ) , "body should not be null or empty." ) ; if ( request . getMail ( ) . getMessage ( ) . getHtml ( ) . getCharset ( ) != null && ! Arrays . asList ( new Integer [ ] { 0 , 1 , 2 , 3 , 4 } ) . contains ( request . getMail ( ) . getMessage ( ) . getHtml ( ) . getCharset ( ) ) ) { throw new IllegalArgumentException ( "illegal charset." ) ; } // validate priority
if ( request . getMail ( ) . getPriority ( ) != null && ( request . getMail ( ) . getPriority ( ) <= - 1 || request . getMail ( ) . getPriority ( ) >= 100 ) ) { throw new IllegalArgumentException ( "illegal priority." ) ; } // validate destination
checkNotNull ( request . getMail ( ) . getDestination ( ) , "object destination of request should not be null." ) ; assertListNotNullOrEmpty ( request . getMail ( ) . getDestination ( ) . getToAddr ( ) , "toAddr should not be null or empty." ) ; for ( Addr toAddr : request . getMail ( ) . getDestination ( ) . getToAddr ( ) ) { checkNotNull ( toAddr , "object toAddr of request should not be null." ) ; checkIsEmail ( toAddr . getAddr ( ) ) ; } InternalRequest internalRequest = this . createRequest ( "email" , request , HttpMethodName . POST ) ; // fill in the request payload
internalRequest = fillRequestPayload ( internalRequest , JsonUtils . toJsonString ( request ) ) ; // send email
return this . invokeHttpClient ( internalRequest , SendEmailResponse . class ) ; |
public class CmsUserInfoDialogAction { /** * Handles the user image file upload . < p >
* @ param uploadedFiles the uploaded file names
* @ param context the dialog context */
void handleUpload ( List < String > uploadedFiles , I_CmsDialogContext context ) { } } | CmsObject cms = context . getCms ( ) ; boolean success = OpenCms . getWorkplaceAppManager ( ) . getUserIconHelper ( ) . handleImageUpload ( cms , uploadedFiles ) ; if ( success ) { context . reload ( ) ; } |
public class DateTimeExtensions { /** * Formats this date / time with the { @ link java . time . format . DateTimeFormatter # ISO _ LOCAL _ DATE _ TIME } formatter
* and appends the zone ' s short name , e . g . { @ code 2018-03-10T14:34:55.144EST } .
* @ param self a ZonedDateTime
* @ return a formatted String
* @ see java . time . format . DateTimeFormatter
* @ since 2.5.0 */
public static String getDateTimeString ( final ZonedDateTime self ) { } } | return self . format ( DateTimeFormatter . ISO_LOCAL_DATE_TIME ) + self . format ( ZONE_SHORT_FORMATTER ) ; |
public class CurrencyQueryBuilder { /** * Sets the currency code , or the regular expression to select codes .
* @ param codes the currency codes or code expressions , not null .
* @ return the query for chaining . */
public CurrencyQueryBuilder setCurrencyCodes ( String ... codes ) { } } | return set ( CurrencyQuery . KEY_QUERY_CURRENCY_CODES , Arrays . asList ( codes ) ) ; |
public class StringUtils { /** * Checks that the supplied string is non - empty . If it is empty , an { @ link
* java . lang . IllegalArgumentException } is thrown with the supplied message . */
public static String checkNonEmpty ( String s , String msg ) { } } | checkArgument ( ! s . isEmpty ( ) , msg ) ; return s ; |
public class AbstractDialect { /** * { @ inheritDoc }
* @ see jp . co . future . uroborosql . dialect . Dialect # getJavaType ( int , java . lang . String ) */
@ Override public JavaType getJavaType ( final int jdbcType , final String jdbcTypeName ) { } } | JDBCType type = null ; try { type = JDBCType . valueOf ( jdbcType ) ; } catch ( IllegalArgumentException ex ) { type = JDBCType . OTHER ; } return getJavaType ( type , jdbcTypeName ) ; |
public class DeleteLoadBalancerTlsCertificateRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DeleteLoadBalancerTlsCertificateRequest deleteLoadBalancerTlsCertificateRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( deleteLoadBalancerTlsCertificateRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteLoadBalancerTlsCertificateRequest . getLoadBalancerName ( ) , LOADBALANCERNAME_BINDING ) ; protocolMarshaller . marshall ( deleteLoadBalancerTlsCertificateRequest . getCertificateName ( ) , CERTIFICATENAME_BINDING ) ; protocolMarshaller . marshall ( deleteLoadBalancerTlsCertificateRequest . getForce ( ) , FORCE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class GenJsCodeVisitor { /** * Example :
* < pre >
* { let $ boo : $ foo . goo [ $ moo ] / }
* < / pre >
* might generate
* < pre >
* var boo35 = opt _ data . foo . goo [ opt _ data . moo ] ;
* < / pre > */
@ Override protected void visitLetValueNode ( LetValueNode node ) { } } | String generatedVarName = node . getUniqueVarName ( ) ; // Generate code to define the local var .
Expression value = translateExpr ( node . getExpr ( ) ) ; jsCodeBuilder . append ( VariableDeclaration . builder ( generatedVarName ) . setRhs ( value ) . build ( ) ) ; // Add a mapping for generating future references to this local var .
templateTranslationContext . soyToJsVariableMappings ( ) . put ( node . getVarName ( ) , id ( generatedVarName ) ) ; |
public class FlexiantComputeClient { /** * Starts the given server
* @ param server the server to start .
* @ throws FlexiantException
* @ see FlexiantComputeClient # startServer ( String ) */
public void startServer ( de . uniulm . omi . cloudiator . flexiant . client . domain . Server server ) throws FlexiantException { } } | if ( server == null ) { throw new IllegalArgumentException ( "The given server must not be null." ) ; } this . startServer ( server . getId ( ) ) ; |
public class CmdbRemoteServiceImpl { /** * Builds collector Item for new Cmdb item
* @ param request
* @ param collector
* @ return */
private CollectorItem buildCollectorItem ( CmdbRequest request , Collector collector ) { } } | CollectorItem collectorItem = new CollectorItem ( ) ; collectorItem . setCollectorId ( collector . getId ( ) ) ; collectorItem . setEnabled ( false ) ; collectorItem . setPushed ( true ) ; collectorItem . setDescription ( request . getCommonName ( ) ) ; collectorItem . setLastUpdated ( System . currentTimeMillis ( ) ) ; collectorItem . getOptions ( ) . put ( CONFIGURATION_ITEM , request . getConfigurationItem ( ) ) ; collectorItem . getOptions ( ) . put ( COMMON_NAME , request . getCommonName ( ) ) ; return collectorService . createCollectorItem ( collectorItem ) ; |
public class ExpandoMetaClass { /** * Overrides default implementation just in case invokeMethod has been overridden by ExpandoMetaClass
* @ see groovy . lang . MetaClassImpl # invokeMethod ( Class , Object , String , Object [ ] , boolean , boolean ) */
public Object invokeMethod ( Class sender , Object object , String methodName , Object [ ] originalArguments , boolean isCallToSuper , boolean fromInsideClass ) { } } | if ( invokeMethodMethod != null ) { MetaClassHelper . unwrap ( originalArguments ) ; return invokeMethodMethod . invoke ( object , new Object [ ] { methodName , originalArguments } ) ; } return super . invokeMethod ( sender , object , methodName , originalArguments , isCallToSuper , fromInsideClass ) ; |
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcMechanicalFastenerType ( ) { } } | if ( ifcMechanicalFastenerTypeEClass == null ) { ifcMechanicalFastenerTypeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 377 ) ; } return ifcMechanicalFastenerTypeEClass ; |
public class DTMDocumentImpl { /** * Given a node handle , find its preceeding sibling .
* WARNING : DTM is asymmetric ; this operation is resolved by search , and is
* relatively expensive .
* @ param nodeHandle the id of the node .
* @ return int Node - number of the previous sib ,
* or DTM . NULL to indicate none exists . */
public int getPreviousSibling ( int nodeHandle ) { } } | nodeHandle &= NODEHANDLE_MASK ; // Document root has no previous sibling
if ( nodeHandle == 0 ) return NULL ; int parent = nodes . readEntry ( nodeHandle , 1 ) ; int kid = NULL ; for ( int nextkid = getFirstChild ( parent ) ; nextkid != nodeHandle ; nextkid = getNextSibling ( nextkid ) ) { kid = nextkid ; } return kid | m_docHandle ; |
public class servicegroup { /** * Use this API to disable servicegroup resources . */
public static base_responses disable ( nitro_service client , servicegroup resources [ ] ) throws Exception { } } | base_responses result = null ; if ( resources != null && resources . length > 0 ) { servicegroup disableresources [ ] = new servicegroup [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { disableresources [ i ] = new servicegroup ( ) ; disableresources [ i ] . servicegroupname = resources [ i ] . servicegroupname ; disableresources [ i ] . servername = resources [ i ] . servername ; disableresources [ i ] . port = resources [ i ] . port ; disableresources [ i ] . delay = resources [ i ] . delay ; disableresources [ i ] . graceful = resources [ i ] . graceful ; } result = perform_operation_bulk_request ( client , disableresources , "disable" ) ; } return result ; |
public class Composition { /** * Gets the argument count . The parameter function takes away 1
* argument from the base function , but it adds its own arguments .
* @ return the argument count */
@ Override public Class [ ] getParameterTypes ( ) { } } | Class [ ] a , b , result ; a = base . getParameterTypes ( ) ; b = para . getParameterTypes ( ) ; result = new Class [ a . length - 1 + b . length ] ; System . arraycopy ( a , 0 , result , 0 , idx ) ; System . arraycopy ( b , 0 , result , idx , b . length ) ; System . arraycopy ( a , idx + 1 , result , idx + b . length , a . length - ( idx + 1 ) ) ; return result ; |
public class ConfigurationManager { /** * Returns the configuration property for the given key , or the given default value .
* @ param key
* configuration key
* @ param defaultValue
* default value for key
* @ return configuration value , or the default value if the key is not defined */
public String getProperty ( String key , String defaultValue ) { } } | String value = resolver . get ( key ) ; return ( value == null ) ? defaultValue : value ; |
public class ControlUtils { /** * Returns the default binding based entirely upon annotations or naming conventions .
* @ param controlIntf the control interface class
* @ return the class name of the default control implementation binding */
static String getDefaultControlBinding ( Class controlIntf ) { } } | controlIntf = getMostDerivedInterface ( controlIntf ) ; ControlInterface intfAnnot = ( ControlInterface ) controlIntf . getAnnotation ( ControlInterface . class ) ; String implBinding = intfAnnot . defaultBinding ( ) ; implBinding = resolveDefaultBinding ( implBinding , controlIntf . getName ( ) ) ; return implBinding ; |
public class AWSOpsWorksClient { /** * Updates a specified user profile .
* < b > Required Permissions < / b > : To use this action , an IAM user must have an attached policy that explicitly grants
* permissions . For more information about user permissions , see < a
* href = " http : / / docs . aws . amazon . com / opsworks / latest / userguide / opsworks - security - users . html " > Managing User
* Permissions < / a > .
* @ param updateUserProfileRequest
* @ return Result of the UpdateUserProfile operation returned by the service .
* @ throws ValidationException
* Indicates that a request was not valid .
* @ throws ResourceNotFoundException
* Indicates that a resource was not found .
* @ sample AWSOpsWorks . UpdateUserProfile
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / opsworks - 2013-02-18 / UpdateUserProfile " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public UpdateUserProfileResult updateUserProfile ( UpdateUserProfileRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeUpdateUserProfile ( request ) ; |
public class TuneBook { /** * Put a tune into the book . If exists , replace tune with the same reference
* number and returns true . If doesn ' t exist , add it and return false .
* @ return true if it replaced an existing tune , false otherwise */
public boolean putTune ( Tune tune ) { } } | boolean ret = containsTune ( tune ) ; tune . getTuneInfos ( ) . setBookInfos ( getBookInfos ( ) ) ; tune . getInstructions ( ) . addAll ( 0 , getInstructions ( ) ) ; m_tunes . put ( tune . getReferenceNumber ( ) , tune ) ; return ret ; |
public class CascadingStyleSheet { /** * Add a new < code > @ namespace < / code > rule at the end of the
* < code > @ namespace < / code > rule list .
* @ param aNamespaceRule
* The namespace rule to be added . May not be < code > null < / code > .
* @ return this */
@ Nonnull public CascadingStyleSheet addNamespaceRule ( @ Nonnull final CSSNamespaceRule aNamespaceRule ) { } } | ValueEnforcer . notNull ( aNamespaceRule , "NamespaceRule" ) ; m_aNamespaceRules . add ( aNamespaceRule ) ; return this ; |
public class KeenQueryClient { /** * Select Unique query with only the required arguments .
* Query API info here : https : / / keen . io / docs / api / # select - unique
* @ param eventCollection The name of the event collection you are analyzing .
* @ param targetProperty The name of the property you are analyzing .
* @ param timeframe The { @ link RelativeTimeframe } or { @ link AbsoluteTimeframe } .
* @ return The select unique query response .
* @ throws IOException If there was an error communicating with the server or
* an error message received from the server . */
public QueryResult selectUnique ( String eventCollection , String targetProperty , Timeframe timeframe ) throws IOException { } } | Query queryParams = new Query . Builder ( QueryType . SELECT_UNIQUE ) . withEventCollection ( eventCollection ) . withTargetProperty ( targetProperty ) . withTimeframe ( timeframe ) . build ( ) ; QueryResult result = execute ( queryParams ) ; return result ; |
public class AmazonIdentityManagementClient { /** * Creates a new role for your AWS account . For more information about roles , go to < a
* href = " https : / / docs . aws . amazon . com / IAM / latest / UserGuide / WorkingWithRoles . html " > IAM Roles < / a > . For information
* about limitations on role names and the number of roles you can create , go to < a
* href = " https : / / docs . aws . amazon . com / IAM / latest / UserGuide / LimitationsOnEntities . html " > Limitations on IAM
* Entities < / a > in the < i > IAM User Guide < / i > .
* @ param createRoleRequest
* @ return Result of the CreateRole operation returned by the service .
* @ throws LimitExceededException
* The request was rejected because it attempted to create resources beyond the current AWS account limits .
* The error message describes the limit exceeded .
* @ throws InvalidInputException
* The request was rejected because an invalid or out - of - range value was supplied for an input parameter .
* @ throws EntityAlreadyExistsException
* The request was rejected because it attempted to create a resource that already exists .
* @ throws MalformedPolicyDocumentException
* The request was rejected because the policy document was malformed . The error message describes the
* specific error .
* @ throws ConcurrentModificationException
* The request was rejected because multiple requests to change this object were submitted simultaneously .
* Wait a few minutes and submit your request again .
* @ throws ServiceFailureException
* The request processing has failed because of an unknown error , exception or failure .
* @ sample AmazonIdentityManagement . CreateRole
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / iam - 2010-05-08 / CreateRole " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public CreateRoleResult createRole ( CreateRoleRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeCreateRole ( request ) ; |
public class CmsJspTagContainer { /** * Generates the detail view element . < p >
* @ param request the current request
* @ param cms the CMS context
* @ param detailContent the detail content resource
* @ param container the container
* @ return the detail view element */
private CmsContainerElementBean generateDetailViewElement ( ServletRequest request , CmsObject cms , CmsResource detailContent , CmsContainerBean container ) { } } | CmsContainerElementBean element = null ; if ( detailContent != null ) { // get the right formatter
CmsADEConfigData config = OpenCms . getADEManager ( ) . lookupConfiguration ( cms , cms . getRequestContext ( ) . getRootUri ( ) ) ; CmsFormatterConfiguration formatters = config . getFormatters ( cms , detailContent ) ; I_CmsFormatterBean formatter = formatters . getDetailFormatter ( getType ( ) , getContainerWidth ( ) ) ; if ( formatter != null ) { // use structure id as the instance id to enable use of nested containers
Map < String , String > settings = new HashMap < String , String > ( ) ; if ( ! container . getElements ( ) . isEmpty ( ) ) { // in case the first element in the container is of the same type as the detail content , transfer it ' s settings
CmsContainerElementBean el = container . getElements ( ) . get ( 0 ) ; try { el . initResource ( cms ) ; if ( el . getResource ( ) . getTypeId ( ) == detailContent . getTypeId ( ) ) { settings . putAll ( el . getIndividualSettings ( ) ) ; } } catch ( CmsException e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } } String formatterKey = CmsFormatterConfig . getSettingsKeyForContainer ( container . getName ( ) ) ; if ( settings . containsKey ( formatterKey ) ) { String formatterConfigId = settings . get ( formatterKey ) ; if ( CmsUUID . isValidUUID ( formatterConfigId ) ) { I_CmsFormatterBean formatterBean = OpenCms . getADEManager ( ) . getCachedFormatters ( cms . getRequestContext ( ) . getCurrentProject ( ) . isOnlineProject ( ) ) . getFormatters ( ) . get ( new CmsUUID ( formatterConfigId ) ) ; if ( formatterBean != null ) { formatter = formatterBean ; } } } settings . put ( formatterKey , formatter . getId ( ) ) ; settings . put ( CmsContainerElement . ELEMENT_INSTANCE_ID , new CmsUUID ( ) . toString ( ) ) ; // create element bean
element = new CmsContainerElementBean ( detailContent . getStructureId ( ) , formatter . getJspStructureId ( ) , settings , false ) ; String pageRootPath = cms . getRequestContext ( ) . addSiteRoot ( cms . getRequestContext ( ) . getUri ( ) ) ; element = CmsTemplateMapper . get ( request ) . transformDetailElement ( cms , element , pageRootPath ) ; } } return element ; |
public class GetDiskRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetDiskRequest getDiskRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( getDiskRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getDiskRequest . getDiskName ( ) , DISKNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class LambdaDslJsonArray { /** * Element that is an array with a minimum and maximum size where each item must match the following example
* @ param minSize minimum size of the array
* @ param maxSize maximum size of the array */
public LambdaDslJsonArray minMaxArrayLike ( Integer minSize , Integer maxSize , Consumer < LambdaDslJsonBody > nestedObject ) { } } | final PactDslJsonBody arrayLike = pactArray . minMaxArrayLike ( minSize , maxSize ) ; final LambdaDslJsonBody dslBody = new LambdaDslJsonBody ( arrayLike ) ; nestedObject . accept ( dslBody ) ; arrayLike . closeArray ( ) ; return this ; |
public class InternalService { /** * Returns observable to get all visible conversations .
* @ param isPublic Has the conversation public or private access .
* @ param callback Callback to deliver new session instance . */
public void getConversations ( final boolean isPublic , @ Nullable Callback < ComapiResult < List < Conversation > > > callback ) { } } | adapter . adapt ( getConversations ( isPublic ) , callback ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.