signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class CommerceAddressRestrictionLocalServiceUtil { /** * Returns the commerce address restriction with the primary key . * @ param commerceAddressRestrictionId the primary key of the commerce address restriction * @ return the commerce address restriction * @ throws PortalException if a commerce address restriction with the primary key could not be found */ public static com . liferay . commerce . model . CommerceAddressRestriction getCommerceAddressRestriction ( long commerceAddressRestrictionId ) throws com . liferay . portal . kernel . exception . PortalException { } }
return getService ( ) . getCommerceAddressRestriction ( commerceAddressRestrictionId ) ;
public class SimpleStatistics { /** * Get the minimum value in the series . * @ return the minimum value , or 0.0 if the { @ link # getCount ( ) count } is 0 */ public T getMinimum ( ) { } }
Lock lock = this . lock . readLock ( ) ; lock . lock ( ) ; try { return this . minimum != null ? this . minimum : ( T ) this . math . createZeroValue ( ) ; } finally { lock . unlock ( ) ; }
public class JSMessageData { /** * if an unassemble occurs in between them . The call to getFieldDef ( ) needs the lock held too . */ @ Override public boolean isPresent ( int accessor ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) JmfTr . entry ( this , tc , "isPresent" , new Object [ ] { Integer . valueOf ( accessor ) } ) ; boolean result ; checkIndex ( accessor ) ; synchronized ( getMessageLockArtefact ( ) ) { if ( cache [ accessor ] != null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) JmfTr . exit ( this , tc , "isPresent" , Boolean . TRUE ) ; return true ; } // If it ' s not in the cache and the message isn ' t assembled it can ' t be present if ( contents == null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) JmfTr . exit ( this , tc , "isPresent" , Boolean . FALSE ) ; return false ; } // For an assembled message , the subclass ' s getFieldDef method should answer the question . result = getFieldDef ( accessor , true ) != null ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) JmfTr . exit ( this , tc , "isPresent" , Boolean . valueOf ( result ) ) ; return result ;
public class ApkBuilder { /** * Constructor init method . * @ see # ApkBuilder ( File , File , File , String , PackagingOptions , PrintStream ) * @ see # ApkBuilder ( String , String , String , String , PackagingOptions , PrintStream ) * @ see # ApkBuilder ( File , File , File , PrivateKey , X509Certificate , PackagingOptions , PrintStream ) */ private void init ( File apkFile , File resFile , File dexFile , PrivateKey key , X509Certificate certificate , PrintStream verboseStream ) throws ApkCreationException { } }
try { checkOutputFile ( mApkFile = apkFile ) ; checkInputFile ( mResFile = resFile ) ; if ( dexFile != null ) { checkInputFile ( mDexFile = dexFile ) ; } else { mDexFile = null ; } mVerboseStream = verboseStream ; mBuilder = new SignedJarBuilder ( new FileOutputStream ( mApkFile , false /* append */ ) , key , certificate ) ; verbosePrintln ( "Packaging %s" , mApkFile . getName ( ) ) ; // add the resources addZipFile ( mResFile ) ; // add the class dex file at the root of the apk if ( mDexFile != null ) { addFile ( mDexFile , SdkConstants . FN_APK_CLASSES_DEX ) ; } } catch ( ApkCreationException e ) { if ( mBuilder != null ) { mBuilder . cleanUp ( ) ; } throw e ; } catch ( Exception e ) { if ( mBuilder != null ) { mBuilder . cleanUp ( ) ; } throw new ApkCreationException ( e ) ; }
public class JobOperatorImpl { /** * Helper method to publish event * @ param jobEx * @ param topicToPublish * @ param correlationId */ private void publishEvent ( WSJobExecution jobEx , String topicToPublish , String correlationId ) { } }
if ( eventsPublisher != null ) { eventsPublisher . publishJobExecutionEvent ( jobEx , topicToPublish , correlationId ) ; }
public class ApiOvhMe { /** * Create an order in order to pay this order ' s debt * REST : POST / me / debtAccount / debt / { debtId } / pay * @ param debtId [ required ] */ public OvhOrder debtAccount_debt_debtId_pay_POST ( Long debtId ) throws IOException { } }
String qPath = "/me/debtAccount/debt/{debtId}/pay" ; StringBuilder sb = path ( qPath , debtId ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhOrder . class ) ;
public class CMMClassifier { /** * Classify a List of { @ link CoreLabel } s without using sequence information * ( i . e . no Viterbi algorithm , just distribution over next class ) . * @ param document a List of { @ link CoreLabel } s to be classified */ private void classifyNoSeq ( List < IN > document ) { } }
if ( flags . useReverse ) { Collections . reverse ( document ) ; } if ( flags . lowerNewgeneThreshold ) { // Used to raise recall for task 1B System . err . println ( "Using NEWGENE threshold: " + flags . newgeneThreshold ) ; for ( int i = 0 , docSize = document . size ( ) ; i < docSize ; i ++ ) { CoreLabel wordInfo = document . get ( i ) ; Datum < String , String > d = makeDatum ( document , i , featureFactory ) ; Counter < String > scores = classifier . scoresOf ( d ) ; // String answer = BACKGROUND ; String answer = flags . backgroundSymbol ; // HN : The evaluation of scoresOf seems to result in some // kind of side effect . Specifically , the symptom is that // if scoresOf is not evaluated at every position , the // answers are different if ( "NEWGENE" . equals ( wordInfo . get ( GazAnnotation . class ) ) ) { for ( String label : scores . keySet ( ) ) { if ( "G" . equals ( label ) ) { System . err . println ( wordInfo . word ( ) + ':' + scores . getCount ( label ) ) ; if ( scores . getCount ( label ) > flags . newgeneThreshold ) { answer = label ; } } } } wordInfo . set ( AnswerAnnotation . class , answer ) ; } } else { for ( int i = 0 , listSize = document . size ( ) ; i < listSize ; i ++ ) { String answer = classOf ( document , i ) ; CoreLabel wordInfo = document . get ( i ) ; // System . err . println ( " XXX answer for " + // wordInfo . word ( ) + " is " + answer ) ; wordInfo . set ( AnswerAnnotation . class , answer ) ; } if ( flags . justify && ( classifier instanceof LinearClassifier ) ) { LinearClassifier < String , String > lc = ( LinearClassifier < String , String > ) classifier ; for ( int i = 0 , lsize = document . size ( ) ; i < lsize ; i ++ ) { CoreLabel lineInfo = document . get ( i ) ; System . err . print ( "@@ Position " + i + ": " ) ; System . err . println ( lineInfo . word ( ) + " chose " + lineInfo . get ( AnswerAnnotation . class ) ) ; lc . justificationOf ( makeDatum ( document , i , featureFactory ) ) ; } } } if ( flags . useReverse ) { Collections . reverse ( document ) ; }
public class AbstractResourceBundleHandler { /** * Resolves the file path of the bundle from the root directory . * @ param rootDir * the rootDir * @ param bundleName * the bundle name * @ return the file path */ private String getStoredBundlePath ( String rootDir , String bundleName ) { } }
if ( bundleName . indexOf ( '/' ) != - 1 ) { bundleName = bundleName . replace ( '/' , File . separatorChar ) ; } if ( ! bundleName . startsWith ( File . separator ) ) { rootDir += File . separator ; } return rootDir + PathNormalizer . escapeToPhysicalPath ( bundleName ) ;
public class SolutionListUtils { /** * This methods takes a list of solutions , removes a percentage of its solutions , and it is filled * with new random generated solutions * @ param solutionList * @ param problem * @ param percentageOfSolutionsToRemove */ public static < S > void restart ( List < S > solutionList , Problem < S > problem , int percentageOfSolutionsToRemove ) { } }
if ( solutionList == null ) { throw new NullSolutionListException ( ) ; } else if ( problem == null ) { throw new JMetalException ( "The problem is null" ) ; } else if ( ( percentageOfSolutionsToRemove < 0 ) || ( percentageOfSolutionsToRemove > 100 ) ) { throw new JMetalException ( "The percentage of solutions to remove is invalid: " + percentageOfSolutionsToRemove ) ; } int solutionListOriginalSize = solutionList . size ( ) ; int numberOfSolutionsToRemove = ( int ) ( solutionListOriginalSize * percentageOfSolutionsToRemove / 100.0 ) ; removeSolutionsFromList ( solutionList , numberOfSolutionsToRemove ) ; fillPopulationWithNewSolutions ( solutionList , problem , solutionListOriginalSize ) ;
public class AbstractDynamoDbStore { /** * Creates the schemata for the DynamoDB table or tables each store requires . * Implementations should override and reuse this logic * @ return a create table request appropriate for the schema of the selected implementation . */ public CreateTableRequest getTableSchema ( ) { } }
return new CreateTableRequest ( ) . withTableName ( tableName ) . withProvisionedThroughput ( new ProvisionedThroughput ( client . readCapacity ( tableName ) , client . writeCapacity ( tableName ) ) ) ;
public class ValidationPane { /** * update the size of error container and its clip * @ param w * @ param errorContainerHeight */ private void updateErrorContainerSize ( double w , double errorContainerHeight ) { } }
errorContainerClip . setWidth ( w ) ; errorContainerClip . setHeight ( errorContainerHeight ) ; resize ( w , errorContainerHeight ) ;
public class ExtractRegular { /** * 将json格式的URL模式转换为JAVA对象表示 * @ param json URL模式的JSON表示 * @ return URL模式的JAVA对象表示 */ private List < UrlPattern > parseJson ( String json ) { } }
List < UrlPattern > urlPatterns = new ArrayList < > ( ) ; try { List < Map < String , Object > > ups = MAPPER . readValue ( json , List . class ) ; for ( Map < String , Object > up : ups ) { try { UrlPattern urlPattern = new UrlPattern ( ) ; urlPatterns . add ( urlPattern ) ; urlPattern . setUrlPattern ( up . get ( "urlPattern" ) . toString ( ) ) ; List < Map < String , Object > > pageTemplates = ( List < Map < String , Object > > ) up . get ( "pageTemplates" ) ; for ( Map < String , Object > pt : pageTemplates ) { try { HtmlTemplate htmlTemplate = new HtmlTemplate ( ) ; urlPattern . addHtmlTemplate ( htmlTemplate ) ; htmlTemplate . setTemplateName ( pt . get ( "templateName" ) . toString ( ) ) ; htmlTemplate . setTableName ( pt . get ( "tableName" ) . toString ( ) ) ; List < Map < String , Object > > cssPaths = ( List < Map < String , Object > > ) pt . get ( "cssPaths" ) ; for ( Map < String , Object > cp : cssPaths ) { try { CssPath cssPath = new CssPath ( ) ; htmlTemplate . addCssPath ( cssPath ) ; cssPath . setCssPath ( cp . get ( "cssPath" ) . toString ( ) ) ; cssPath . setFieldName ( cp . get ( "fieldName" ) . toString ( ) ) ; cssPath . setFieldDescription ( cp . get ( "fieldDescription" ) . toString ( ) ) ; List < Map < String , Object > > extractFunctions = ( List < Map < String , Object > > ) cp . get ( "extractFunctions" ) ; for ( Map < String , Object > pf : extractFunctions ) { try { ExtractFunction extractFunction = new ExtractFunction ( ) ; cssPath . addExtractFunction ( extractFunction ) ; extractFunction . setExtractExpression ( pf . get ( "extractExpression" ) . toString ( ) ) ; extractFunction . setFieldName ( pf . get ( "fieldName" ) . toString ( ) ) ; extractFunction . setFieldDescription ( pf . get ( "fieldDescription" ) . toString ( ) ) ; } catch ( Exception e ) { LOGGER . error ( "JSON抽取失败" , e ) ; } } } catch ( Exception e ) { LOGGER . error ( "JSON抽取失败" , e ) ; } } } catch ( Exception e ) { LOGGER . error ( "JSON抽取失败" , e ) ; } } } catch ( Exception e ) { LOGGER . error ( "JSON抽取失败" , e ) ; } } } catch ( Exception e ) { LOGGER . error ( "JSON抽取失败" , e ) ; } return urlPatterns ;
public class XMLElement { /** * Get the value specified for a given attribute on this element * @ param name The name of the attribute whose value should be retrieved * @ param def The default value to return if the attribute is specified * @ return The value given for the attribute */ public String getAttribute ( String name , String def ) { } }
String value = dom . getAttribute ( name ) ; if ( ( value == null ) || ( value . length ( ) == 0 ) ) { return def ; } return value ;
public class Cache { /** * 查看哈希表 key 中 , 给定域 field 是否存在 。 */ public boolean hexists ( Object key , Object field ) { } }
Jedis jedis = getJedis ( ) ; try { return jedis . hexists ( keyToBytes ( key ) , fieldToBytes ( field ) ) ; } finally { close ( jedis ) ; }
public class FloatingActionButton { /** * Initializes the animation , which is used while hiding or dismissing * < b > Action Button < / b > * @ deprecated since 1.0.2 and will be removed in version 2.0.0 * Use < b > show _ animation < / b > and < b > hide _ animation < / b > in XML instead * @ param attrs attributes of the XML tag that is inflating the view */ @ Deprecated private void initHideAnimation ( TypedArray attrs ) { } }
if ( attrs . hasValue ( R . styleable . ActionButton_animation_onHide ) ) { final int animResId = attrs . getResourceId ( R . styleable . ActionButton_animation_onHide , Animations . NONE . animResId ) ; setHideAnimation ( Animations . load ( getContext ( ) , animResId ) ) ; }
public class Converters { /** * 尝试获取属性 * 不会抛出异常 , 不存在则返回null * @ param clazz * @ param itemName * @ return */ private static Field tryGetFieldWithoutExp ( Class < ? > clazz , String itemName ) { } }
try { return clazz . getDeclaredField ( itemName ) ; } catch ( Exception e ) { return null ; }
public class ChronoIterator { /** * Returns the next revision . * @ return next revision */ public Revision next ( ) throws Exception { } }
// Checks whether the next revision has already been reconstructed . Revision revision ; if ( chronoStorage . isTop ( revisionIndex ) ) { // If this is the case the revision will removed from the storage return chronoStorage . remove ( ) ; } // Otherwise the chronological order counter will be mapped to the // revsision counter int revCount = revisionIndex ; if ( mappingStorage . containsKey ( revisionIndex ) ) { revCount = mappingStorage . get ( revisionIndex ) ; } // Retrieve the related full revision block ChronoFullRevision cfr = fullRevStorage . get ( revCount ) ; int queryPK , limit , previousRevisionCounter ; String previousRevision ; // Determine the nearest revision that could be used to construct // the specified revision revision = cfr . getNearest ( revCount ) ; if ( revision == null ) { // Create query bounds ( all revisions from the full revision till // now ) queryPK = articlePK + cfr . getStartRC ( ) - 1 ; limit = revCount - cfr . getStartRC ( ) + 1 ; previousRevision = null ; previousRevisionCounter = - 1 ; } else { // Create query bounds ( only new revisions , last known + 1 till now ) queryPK = revision . getPrimaryKey ( ) + 1 ; limit = revCount - revision . getRevisionCounter ( ) ; previousRevision = revision . getRevisionText ( ) ; previousRevisionCounter = revision . getRevisionCounter ( ) ; } Statement statement = null ; ResultSet result = null ; revision = null ; try { statement = this . connection . createStatement ( ) ; // Retrieve encoded revisions result = statement . executeQuery ( "SELECT Revision, PrimaryKey, RevisionCounter, RevisionID, ArticleID, Timestamp " + "FROM revisions " + "WHERE PrimaryKey >= " + queryPK + " LIMIT " + limit ) ; String currentRevision = null ; Diff diff ; RevisionDecoder decoder ; boolean binaryData = result . getMetaData ( ) . getColumnType ( 1 ) == Types . LONGVARBINARY ; while ( result . next ( ) ) { decoder = new RevisionDecoder ( config . getCharacterSet ( ) ) ; // binary or base64 encoded if ( binaryData ) { decoder . setInput ( result . getBinaryStream ( 1 ) , true ) ; } else { decoder . setInput ( result . getString ( 1 ) ) ; } // Decode and rebuild diff = decoder . decode ( ) ; if ( previousRevisionCounter != - 1 ) { if ( previousRevisionCounter + 1 != result . getInt ( 3 ) ) { System . err . println ( "Reconstruction data invalid - " + "\r\n\t" + "Expected " + ( previousRevisionCounter + 1 ) + " instead of " + result . getInt ( 3 ) ) ; return null ; } } else { if ( cfr . getStartRC ( ) != result . getInt ( 3 ) ) { System . err . println ( "Reconstruction data invalid - " + "\r\n\t" + "Expected " + ( cfr . getStartRC ( ) ) + " instead of " + result . getInt ( 3 ) ) ; return null ; } } try { currentRevision = diff . buildRevision ( previousRevision ) ; revision = new Revision ( result . getInt ( 3 ) ) ; revision . setRevisionText ( currentRevision ) ; revision . setPrimaryKey ( result . getInt ( 2 ) ) ; revision . setRevisionID ( result . getInt ( 4 ) ) ; revision . setArticleID ( result . getInt ( 5 ) ) ; revision . setTimeStamp ( new Timestamp ( result . getLong ( 6 ) ) ) ; previousRevision = currentRevision ; previousRevisionCounter = revision . getRevisionCounter ( ) ; } catch ( Exception e ) { System . err . println ( "Reconstruction failed while retrieving" + " data to reconstruct <" + revisionIndex + ">" + "\r\n\t" + "[ArticleId " + result . getInt ( 5 ) + ", RevisionId " + result . getInt ( 4 ) + ", RevisionCounter " + result . getInt ( 3 ) + "]" ) ; previousRevision = null ; revision = null ; return null ; } // Add the reconstructed revision to the storage if ( revision != null ) { chronoStorage . add ( revision ) ; } } // Ensure that the correct revision is on top of the storage if ( chronoStorage . isTop ( revisionIndex ) ) { chronoStorage . remove ( ) ; return revision ; } else { return null ; } } finally { if ( statement != null ) { statement . close ( ) ; } if ( result != null ) { result . close ( ) ; } }
public class Compiler { /** * Compile string . * @ param string The input string . * @ param inputPath The input path . * @ param outputPath The output path . * @ param options The compile options . * @ return The compilation output . * @ throws CompilationException If the compilation failed . */ public Output compileString ( String string , URI inputPath , URI outputPath , Options options ) throws CompilationException { } }
StringContext context = new StringContext ( string , inputPath , outputPath , options ) ; return compile ( context ) ;
public class PutIntegrationResponseRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( PutIntegrationResponseRequest putIntegrationResponseRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( putIntegrationResponseRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( putIntegrationResponseRequest . getRestApiId ( ) , RESTAPIID_BINDING ) ; protocolMarshaller . marshall ( putIntegrationResponseRequest . getResourceId ( ) , RESOURCEID_BINDING ) ; protocolMarshaller . marshall ( putIntegrationResponseRequest . getHttpMethod ( ) , HTTPMETHOD_BINDING ) ; protocolMarshaller . marshall ( putIntegrationResponseRequest . getStatusCode ( ) , STATUSCODE_BINDING ) ; protocolMarshaller . marshall ( putIntegrationResponseRequest . getSelectionPattern ( ) , SELECTIONPATTERN_BINDING ) ; protocolMarshaller . marshall ( putIntegrationResponseRequest . getResponseParameters ( ) , RESPONSEPARAMETERS_BINDING ) ; protocolMarshaller . marshall ( putIntegrationResponseRequest . getResponseTemplates ( ) , RESPONSETEMPLATES_BINDING ) ; protocolMarshaller . marshall ( putIntegrationResponseRequest . getContentHandling ( ) , CONTENTHANDLING_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class KeyDecoder { /** * Decodes a boolean from exactly 1 byte , as encoded for descending order . * @ param src source of encoded bytes * @ param srcOffset offset into source array * @ return boolean value */ public static boolean decodeBooleanDesc ( byte [ ] src , int srcOffset ) throws CorruptEncodingException { } }
try { return src [ srcOffset ] == 127 ; } catch ( IndexOutOfBoundsException e ) { throw new CorruptEncodingException ( null , e ) ; }
public class Group { /** * Gets a list of all supported properties for this model object , < b > Group < / b > * @ param entityTypeName * allowed object is { @ link String } * @ return * returned object is { @ link List } */ public static synchronized List getPropertyNames ( String entityTypeName ) { } }
if ( propertyNames != null ) { return propertyNames ; } else { { List names = new ArrayList ( ) ; names . add ( PROP_CN ) ; names . add ( PROP_MEMBERS ) ; names . add ( PROP_DISPLAY_NAME ) ; names . add ( PROP_DESCRIPTION ) ; names . add ( PROP_BUSINESS_CATEGORY ) ; names . add ( PROP_SEE_ALSO ) ; if ( extendedPropertiesDataType != null && extendedPropertiesDataType . keySet ( ) . size ( ) > 0 ) names . addAll ( extendedPropertiesDataType . keySet ( ) ) ; names . addAll ( Party . getPropertyNames ( "Party" ) ) ; propertyNames = Collections . unmodifiableList ( names ) ; return propertyNames ; } }
public class CacheingMatcher { /** * Remove just delegates */ public ContentMatcher remove ( Conjunction selector , MatchTarget object , InternTable subExpr , OrdinalPosition parentId ) throws MatchingException { } }
if ( tc . isEntryEnabled ( ) ) tc . entry ( this , cclass , "remove" , "selector: " + selector + ", object: " + object ) ; vacantChild = vacantChild . remove ( selector , object , subExpr , ordinalPosition ) ; ContentMatcher result = this ; if ( vacantChild == null ) result = null ; if ( tc . isEntryEnabled ( ) ) tc . exit ( this , cclass , "remove" , "result: " + result ) ; return result ;
public class JwtSSOTokenImpl { /** * ( non - Javadoc ) * @ see * com . ibm . ws . security . sso . cookie . JwtSSOToken # createSSOToken ( javax . security . * auth . Subject ) */ @ Override // @ FFDCIgnore ( Exception . class ) public void createJwtSSOToken ( Subject subject ) throws WSLoginFailedException { } }
// TODO Auto - generated method stub if ( subject != null ) { if ( isSubjectUnauthenticated ( subject ) || subjectHasJwtPrincipal ( subject ) ) { return ; } JwtSsoTokenUtils tokenUtil = getJwtSsoTokenBuilderUtils ( ) ; if ( tokenUtil != null ) { JsonWebToken ssotoken = null ; try { ssotoken = tokenUtil . buildTokenFromSecuritySubject ( subject ) ; } catch ( Exception e ) { // TODO ffdc throw new WSLoginFailedException ( e . getLocalizedMessage ( ) ) ; } updateSubject ( subject , ssotoken ) ; } else { String msg = Tr . formatMessage ( tc , "JWTSSO_CONFIG_INVALID" , new Object [ ] { } ) ; throw new WSLoginFailedException ( msg ) ; } }
public class PoolsImpl { /** * Disables automatic scaling for a pool . * @ param poolId The ID of the pool on which to disable automatic scaling . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceResponseWithHeaders } object if successful . */ public Observable < ServiceResponseWithHeaders < Void , PoolDisableAutoScaleHeaders > > disableAutoScaleWithServiceResponseAsync ( String poolId ) { } }
if ( this . client . batchUrl ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.batchUrl() is required and cannot be null." ) ; } if ( poolId == null ) { throw new IllegalArgumentException ( "Parameter poolId is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } final PoolDisableAutoScaleOptions poolDisableAutoScaleOptions = null ; Integer timeout = null ; UUID clientRequestId = null ; Boolean returnClientRequestId = null ; DateTime ocpDate = null ; String parameterizedHost = Joiner . on ( ", " ) . join ( "{batchUrl}" , this . client . batchUrl ( ) ) ; DateTimeRfc1123 ocpDateConverted = null ; if ( ocpDate != null ) { ocpDateConverted = new DateTimeRfc1123 ( ocpDate ) ; } return service . disableAutoScale ( poolId , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , timeout , clientRequestId , returnClientRequestId , ocpDateConverted , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponseWithHeaders < Void , PoolDisableAutoScaleHeaders > > > ( ) { @ Override public Observable < ServiceResponseWithHeaders < Void , PoolDisableAutoScaleHeaders > > call ( Response < ResponseBody > response ) { try { ServiceResponseWithHeaders < Void , PoolDisableAutoScaleHeaders > clientResponse = disableAutoScaleDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class CSVImporter { /** * This method returns the files found in the given directory matching the given regular * expression . * @ param dirName - ex . " . / path / to / directory / " * make sure you have the ' / ' on the end * @ param regex - ex . " . * . csv " * @ return File [ ] - an array of files */ private static File [ ] getAllFiles ( String dirName , String regex ) { } }
File [ ] allFiles = getAllFiles ( dirName ) ; List < File > matchingFiles = new ArrayList < File > ( ) ; for ( File allFile : allFiles ) { if ( allFile . getName ( ) . matches ( regex ) ) { matchingFiles . add ( allFile ) ; } } return matchingFiles . toArray ( new File [ matchingFiles . size ( ) ] ) ;
public class ByteUtils { /** * This function converts the bytes in a byte array at the specified index to its * corresponding double value . * @ param buffer The byte array containing the double . * @ param index The index for the first byte in the byte array . * @ return The corresponding double value . */ static public double bytesToDouble ( byte [ ] buffer , int index ) { } }
double real ; long bits = bytesToLong ( buffer , index ) ; real = Double . longBitsToDouble ( bits ) ; return real ;
public class FileListUtils { /** * Get any data file , which is not hidden or a directory , from the given path */ public static FileStatus getAnyNonHiddenFile ( FileSystem fs , Path path ) throws IOException { } }
HiddenFilter hiddenFilter = new HiddenFilter ( ) ; FileStatus root = fs . getFileStatus ( path ) ; if ( ! root . isDirectory ( ) ) { return hiddenFilter . accept ( path ) ? root : null ; } // DFS to get the first data file Stack < FileStatus > folders = new Stack < > ( ) ; folders . push ( root ) ; while ( ! folders . empty ( ) ) { FileStatus curFolder = folders . pop ( ) ; try { for ( FileStatus status : fs . listStatus ( curFolder . getPath ( ) , hiddenFilter ) ) { if ( status . isDirectory ( ) ) { folders . push ( status ) ; } else { return status ; } } } catch ( FileNotFoundException exc ) { // continue } } return null ;
public class BNFHeadersImpl { /** * Clear all traces of the headers from storage . */ private void clearAllHeaders ( ) { } }
final boolean bTrace = TraceComponent . isAnyTracingEnabled ( ) ; if ( bTrace && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "clearAllHeaders()" ) ; } HeaderElement elem = this . hdrSequence ; while ( null != elem ) { final HeaderElement next = elem . nextSequence ; final HeaderKeys key = elem . getKey ( ) ; final int ord = key . getOrdinal ( ) ; if ( storage . containsKey ( ord ) ) { // first instance being removed if ( key . useFilters ( ) ) { filterRemove ( key , null ) ; } storage . remove ( ord ) ; } elem . destroy ( ) ; elem = next ; } this . hdrSequence = null ; this . lastHdrInSequence = null ; this . numberOfHeaders = 0 ; if ( bTrace && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "clearAllHeaders()" ) ; }
public class TerminateClientVpnConnectionsResult { /** * The current state of the client connections . * @ param connectionStatuses * The current state of the client connections . */ public void setConnectionStatuses ( java . util . Collection < TerminateConnectionStatus > connectionStatuses ) { } }
if ( connectionStatuses == null ) { this . connectionStatuses = null ; return ; } this . connectionStatuses = new com . amazonaws . internal . SdkInternalList < TerminateConnectionStatus > ( connectionStatuses ) ;
public class PollingMonitoredValue { /** * Convenient factory method to create pollers you don ' t care about keeping * – that is , pollers which should be registered and start updating their * value , but which you don ' t need to hold a reference to ( because you will * presumably just be modifying the polled source ) . */ public static < T > void poll ( String name , String description , MonitorableRegistry registry , int updateInterval , Supplier < T > poller , ValueSemantics semantics , Unit < ? > unit ) { } }
new PollingMonitoredValue < T > ( name , description , registry , updateInterval , poller , semantics , unit ) ;
public class UnitResponse { /** * Create a rolling back response object with the given message . * @ param errMsg the error message . * @ return An response object which will indicate a transactional rolling back . */ @ SuppressWarnings ( "all" ) public static UnitResponse createRollingBack ( String errMsg ) { } }
return UnitResponse . createUnknownError ( null , errMsg ) . setContext ( Context . create ( ) . setRollback ( true ) ) ;
public class NewChunk { /** * Compute a compressed double buffer */ private Chunk chunkD ( ) { } }
if ( H2O . SINGLE_PRECISION ) { final byte [ ] bs = MemoryManager . malloc1 ( _len * 4 , true ) ; int j = 0 ; for ( int i = 0 ; i < _len ; ++ i ) { float f = 0 ; if ( _id == null || _id . length == 0 || ( j < _id . length && _id [ j ] == i ) ) { f = _ds != null ? ( float ) _ds [ j ] : ( isNA2 ( j ) || isEnum ( j ) ) ? Float . NaN : ( float ) ( _ls [ j ] * PrettyPrint . pow10 ( _xs [ j ] ) ) ; ++ j ; } UDP . set4f ( bs , 4 * i , f ) ; } assert j == _sparseLen : "j = " + j + ", _sparseLen = " + _sparseLen ; return new C4FChunk ( bs ) ; } else { final byte [ ] bs = MemoryManager . malloc1 ( _len * 8 , true ) ; int j = 0 ; for ( int i = 0 ; i < _len ; ++ i ) { double d = 0 ; if ( _id == null || _id . length == 0 || ( j < _id . length && _id [ j ] == i ) ) { d = _ds != null ? _ds [ j ] : ( isNA2 ( j ) || isEnum ( j ) ) ? Double . NaN : _ls [ j ] * PrettyPrint . pow10 ( _xs [ j ] ) ; ++ j ; } UDP . set8d ( bs , 8 * i , d ) ; } assert j == _sparseLen : "j = " + j + ", _sparseLen = " + _sparseLen ; return new C8DChunk ( bs ) ; }
public class Report { /** * Adds the log entries to the report . * @ throws JSONException if the log entries cannot be added */ private void buildLog ( ) throws JSONException { } }
logs = new JSONObject ( ) ; List < String > list = Log . getReportedEntries ( ) ; if ( list != null ) { logs . put ( "numberOfEntry" , list . size ( ) ) ; JSONArray array = new JSONArray ( ) ; for ( String s : list ) { array . put ( s ) ; } logs . put ( "log" , array ) ; }
public class DirectoryBasedOverlayContainerImpl { /** * Little recursive routine to collect all the files present within a ArtifactContainer . < p > * @ param c The ArtifactContainer to process * @ param s The set to add paths to . */ private void collectPaths ( ArtifactContainer c , Set < String > s ) { } }
if ( ! "/" . equals ( c . getPath ( ) ) ) { s . add ( c . getPath ( ) ) ; } for ( ArtifactEntry e : c ) { s . add ( e . getPath ( ) ) ; ArtifactContainer n = e . convertToContainer ( ) ; if ( n != null && ! n . isRoot ( ) ) { collectPaths ( n , s ) ; } }
public class DeviceManagerClient { /** * Deletes a device registry configuration . * < p > Sample code : * < pre > < code > * try ( DeviceManagerClient deviceManagerClient = DeviceManagerClient . create ( ) ) { * RegistryName name = RegistryName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ REGISTRY ] " ) ; * deviceManagerClient . deleteDeviceRegistry ( name . toString ( ) ) ; * < / code > < / pre > * @ param name The name of the device registry . For example , * ` projects / example - project / locations / us - central1 / registries / my - registry ` . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final void deleteDeviceRegistry ( String name ) { } }
DeleteDeviceRegistryRequest request = DeleteDeviceRegistryRequest . newBuilder ( ) . setName ( name ) . build ( ) ; deleteDeviceRegistry ( request ) ;
public class XmlPrintStream { /** * Translate reserved XML characters to XML entities . * @ param in Input string . */ public String escape ( String in ) { } }
StringBuffer out = new StringBuffer ( ) ; for ( char c : in . toCharArray ( ) ) { switch ( c ) { case '<' : out . append ( "&lt;" ) ; break ; case '>' : out . append ( "&gt;" ) ; break ; case '&' : out . append ( "&amp;" ) ; break ; case '"' : out . append ( "&quot;" ) ; break ; default : out . append ( c ) ; } } return out . toString ( ) ;
public class AbstractSlideModel { /** * Return the default content or null . * @ return the default SlideContent */ public SlideContent getDefaultContent ( ) { } }
SlideContent res = null ; if ( getSlide ( ) . getContent ( ) != null && ! getSlide ( ) . getContent ( ) . isEmpty ( ) ) { res = getSlide ( ) . getContent ( ) . get ( 0 ) ; } return res ;
public class MultimediaAppender { /** * { @ inheritDoc } */ @ Override public void appendString ( final String appendage ) { } }
owner . setTail ( owner . getTail ( ) . concat ( appendage ) ) ;
public class XBinaryOperationImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setReassignFirstArgument ( boolean newReassignFirstArgument ) { } }
boolean oldReassignFirstArgument = reassignFirstArgument ; reassignFirstArgument = newReassignFirstArgument ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , XbasePackage . XBINARY_OPERATION__REASSIGN_FIRST_ARGUMENT , oldReassignFirstArgument , reassignFirstArgument ) ) ;
public class ExecutorResource { /** * instance details */ @ ApiOperation ( value = "Returns information about a specified job." , response = RequestInfoInstance . class , code = 200 ) @ ApiResponses ( value = { } }
@ ApiResponse ( code = 500 , message = "Unexpected error" ) , @ ApiResponse ( code = 200 , message = "Successfull response" , examples = @ Example ( value = { @ ExampleProperty ( mediaType = JSON , value = GET_REQUEST_RESPONSE_JSON ) } ) ) } ) @ GET @ Path ( JOB_INSTANCE_GET_URI ) @ Produces ( { MediaType . APPLICATION_JSON , MediaType . APPLICATION_XML } ) public Response getRequestById ( @ javax . ws . rs . core . Context HttpHeaders headers , @ ApiParam ( value = "identifier of the asynchronous job to be retrieved" , required = true , example = "123" ) @ PathParam ( "jobId" ) Long requestId , @ ApiParam ( value = "optional flag that indicats if errors should be loaded as well" , required = false ) @ QueryParam ( "withErrors" ) boolean withErrors , @ ApiParam ( value = "optional flag that indicats if input/output data should be loaded as well" , required = false ) @ QueryParam ( "withData" ) boolean withData ) { Variant v = getVariant ( headers ) ; String type = getContentType ( headers ) ; // no container id available so only used to transfer conversation id if given by client Header conversationIdHeader = buildConversationIdHeader ( "" , context , headers ) ; try { String response = executorServiceBase . getRequestById ( requestId , withErrors , withData , type ) ; return createResponse ( response , v , Response . Status . OK , conversationIdHeader ) ; } catch ( IllegalArgumentException e ) { return notFound ( e . getMessage ( ) , v , conversationIdHeader ) ; } catch ( Exception e ) { logger . error ( "Unexpected error during processing {}" , e . getMessage ( ) , e ) ; return internalServerError ( errorMessage ( e ) , v , conversationIdHeader ) ; }
public class VirtualFile { /** * Get all the children recursively < p > * This always uses { @ link VisitorAttributes # RECURSE } * @ param filter to filter the children * @ return the children * @ throws IOException for any problem accessing the virtual file system * @ throws IllegalStateException if the file is closed or it is a leaf node */ public List < VirtualFile > getChildrenRecursively ( VirtualFileFilter filter ) throws IOException { } }
// isDirectory does the read security check if ( ! isDirectory ( ) ) { return Collections . emptyList ( ) ; } if ( filter == null ) { filter = MatchAllVirtualFileFilter . INSTANCE ; } FilterVirtualFileVisitor visitor = new FilterVirtualFileVisitor ( filter , VisitorAttributes . RECURSE ) ; visit ( visitor ) ; return visitor . getMatched ( ) ;
public class HttpUrl { /** * Returns this URL ' s query , like { @ code " abc " } for { @ code http : / / host / ? abc } . Most callers should * prefer { @ link # queryParameterName } and { @ link # queryParameterValue } because these methods offer * direct access to individual query parameters . * < p > < table summary = " " > * < tr > < th > URL < / th > < th > { @ code query ( ) } < / th > < / tr > * < tr > < td > { @ code http : / / host / } < / td > < td > null < / td > < / tr > * < tr > < td > { @ code http : / / host / ? } < / td > < td > { @ code " " } < / td > < / tr > * < tr > < td > { @ code http : / / host / ? a = apple & k = key + lime } < / td > < td > { @ code " a = apple & k = key * lime " } < / td > < / tr > * < tr > < td > { @ code http : / / host / ? a = apple & a = apricot } < / td > < td > { @ code " a = apple & a = apricot " } < / td > < / tr > * < tr > < td > { @ code http : / / host / ? a = apple & b } < / td > < td > { @ code " a = apple & b " } < / td > < / tr > * < / table > */ public String query ( ) { } }
if ( queryNamesAndValues == null ) return null ; // No query . StringBuilder result = new StringBuilder ( ) ; namesAndValuesToQueryString ( result , queryNamesAndValues ) ; return result . toString ( ) ;
public class ReduceOperatorBase { @ Override protected List < T > executeOnCollections ( List < T > inputData , RuntimeContext ctx , ExecutionConfig executionConfig ) throws Exception { } }
// make sure we can handle empty inputs if ( inputData . isEmpty ( ) ) { return Collections . emptyList ( ) ; } ReduceFunction < T > function = this . userFunction . getUserCodeObject ( ) ; UnaryOperatorInformation < T , T > operatorInfo = getOperatorInfo ( ) ; TypeInformation < T > inputType = operatorInfo . getInputType ( ) ; int [ ] inputColumns = getKeyColumns ( 0 ) ; if ( ! ( inputType instanceof CompositeType ) && inputColumns . length > 1 ) { throw new InvalidProgramException ( "Grouping is only possible on composite types." ) ; } FunctionUtils . setFunctionRuntimeContext ( function , ctx ) ; FunctionUtils . openFunction ( function , this . parameters ) ; TypeSerializer < T > serializer = getOperatorInfo ( ) . getInputType ( ) . createSerializer ( executionConfig ) ; if ( inputColumns . length > 0 ) { boolean [ ] inputOrderings = new boolean [ inputColumns . length ] ; TypeComparator < T > inputComparator = inputType instanceof AtomicType ? ( ( AtomicType < T > ) inputType ) . createComparator ( false , executionConfig ) : ( ( CompositeType < T > ) inputType ) . createComparator ( inputColumns , inputOrderings , 0 , executionConfig ) ; Map < TypeComparable < T > , T > aggregateMap = new HashMap < TypeComparable < T > , T > ( inputData . size ( ) / 10 ) ; for ( T next : inputData ) { TypeComparable < T > wrapper = new TypeComparable < T > ( next , inputComparator ) ; T existing = aggregateMap . get ( wrapper ) ; T result ; if ( existing != null ) { result = function . reduce ( existing , serializer . copy ( next ) ) ; } else { result = next ; } result = serializer . copy ( result ) ; aggregateMap . put ( wrapper , result ) ; } FunctionUtils . closeFunction ( function ) ; return new ArrayList < T > ( aggregateMap . values ( ) ) ; } else { T aggregate = inputData . get ( 0 ) ; aggregate = serializer . copy ( aggregate ) ; for ( int i = 1 ; i < inputData . size ( ) ; i ++ ) { T next = function . reduce ( aggregate , serializer . copy ( inputData . get ( i ) ) ) ; aggregate = serializer . copy ( next ) ; } FunctionUtils . setFunctionRuntimeContext ( function , ctx ) ; return Collections . singletonList ( aggregate ) ; }
public class VelocityEngineFactory { /** * Set Velocity properties as Map , to allow for non - String values * like " ds . resource . loader . instance " . * @ see # setVelocityProperties */ public void setVelocityPropertiesMap ( Map < String , Object > velocityPropertiesMap ) { } }
if ( velocityPropertiesMap != null ) { this . velocityProperties . putAll ( velocityPropertiesMap ) ; }
public class CPCImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setVSFlags ( Integer newVSFlags ) { } }
Integer oldVSFlags = vsFlags ; vsFlags = newVSFlags ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . CPC__VS_FLAGS , oldVSFlags , vsFlags ) ) ;
public class OpsRegistrar { /** * Return the OpsAgent for the specified selector . */ public OpsAgent getAgent ( OpsSelector selector ) { } }
OpsAgent agent = m_agents . get ( selector ) ; assert ( agent != null ) ; return agent ;
public class WebUtils { /** * Gets the ticket granting ticket id from the request and flow scopes . * @ param context the context * @ return the ticket granting ticket id */ public static String getTicketGrantingTicketId ( final RequestContext context ) { } }
val tgtFromRequest = getTicketGrantingTicketIdFrom ( context . getRequestScope ( ) ) ; val tgtFromFlow = getTicketGrantingTicketIdFrom ( context . getFlowScope ( ) ) ; return tgtFromRequest != null ? tgtFromRequest : tgtFromFlow ;
public class WaitSet { /** * see javadoc */ public void park ( BlockingOperation op ) { } }
long timeout = op . getWaitTimeout ( ) ; WaitSetEntry entry = new WaitSetEntry ( queue , op ) ; entry . setNodeEngine ( nodeEngine ) ; queue . offer ( entry ) ; if ( timeout > - 1 && timeout < TIMEOUT_UPPER_BOUND ) { delayQueue . offer ( entry ) ; }
public class ExtendedByteBuf { /** * Reads a range of bytes if possible . If not present the reader index is reset to the last mark . * @ param bf * @ return */ public static Optional < byte [ ] > readMaybeRangedBytes ( ByteBuf bf ) { } }
Optional < Integer > length = readMaybeVInt ( bf ) ; if ( length . isPresent ( ) ) { int l = length . get ( ) ; if ( bf . readableBytes ( ) >= l ) { if ( l > 0 ) { byte [ ] array = new byte [ l ] ; bf . readBytes ( array ) ; return Optional . of ( array ) ; } else { return Optional . of ( Util . EMPTY_BYTE_ARRAY ) ; } } else { bf . resetReaderIndex ( ) ; return Optional . empty ( ) ; } } else return Optional . empty ( ) ;
public class ValidationUtilities { /** * Validate the given value against a regular expression pattern * @ param pattern Regular expression pattern * @ param value Value to test * @ return boolean { @ code true } if { @ code value } matches { @ code pattern } * @ throws PatternSyntaxException * if the given pattern is invalid */ protected static boolean validateRegExp ( String pattern , String value ) throws PatternSyntaxException { } }
Pattern re = patternCache . get ( pattern ) ; if ( re == null ) { re = compile ( pattern , MULTILINE | DOTALL ) ; patternCache . put ( pattern , re ) ; } return ( re . matcher ( value ) . matches ( ) ) ;
public class Bits { /** * / * J2ObjC : unused . * Copy from source address into given destination array . * @ param srcAddr source address * @ param dst destination array * @ param dstBaseOffset offset of first element of storage in destination array * @ param dstPos offset within destination array of the first element to write * @ param length number of bytes to copy * static void copyToArray ( long srcAddr , Object dst , long dstBaseOffset , long dstPos , * long length ) { * long offset = dstBaseOffset + dstPos ; * while ( length > 0 ) { * long size = ( length > UNSAFE _ COPY _ THRESHOLD ) ? UNSAFE _ COPY _ THRESHOLD : length ; * unsafe . copyMemoryToPrimitiveArray ( srcAddr , dst , offset , size ) ; * length - = size ; * srcAddr + = size ; * offset + = size ; */ static void copyFromCharArray ( Object src , long srcPos , long dstAddr , long length ) { } }
copyFromShortArray ( src , srcPos , dstAddr , length ) ;
public class BinaryJedis { /** * Sort a Set or a List accordingly to the specified parameters . * < b > examples : < / b > * Given are the following sets and key / values : * < pre > * x = [ 1 , 2 , 3] * y = [ a , b , c ] * k1 = z * k2 = y * k3 = x * w1 = 9 * w2 = 8 * w3 = 7 * < / pre > * Sort Order : * < pre > * sort ( x ) or sort ( x , sp . asc ( ) ) * - & gt ; [ 1 , 2 , 3] * sort ( x , sp . desc ( ) ) * - & gt ; [ 3 , 2 , 1] * sort ( y ) * - & gt ; [ c , a , b ] * sort ( y , sp . alpha ( ) ) * - & gt ; [ a , b , c ] * sort ( y , sp . alpha ( ) . desc ( ) ) * - & gt ; [ c , a , b ] * < / pre > * Limit ( e . g . for Pagination ) : * < pre > * sort ( x , sp . limit ( 0 , 2 ) ) * - & gt ; [ 1 , 2] * sort ( y , sp . alpha ( ) . desc ( ) . limit ( 1 , 2 ) ) * - & gt ; [ b , a ] * < / pre > * Sorting by external keys : * < pre > * sort ( x , sb . by ( w * ) ) * - & gt ; [ 3 , 2 , 1] * sort ( x , sb . by ( w * ) . desc ( ) ) * - & gt ; [ 1 , 2 , 3] * < / pre > * Getting external keys : * < pre > * sort ( x , sp . by ( w * ) . get ( k * ) ) * - & gt ; [ x , y , z ] * sort ( x , sp . by ( w * ) . get ( # ) . get ( k * ) ) * - & gt ; [ 3 , x , 2 , y , 1 , z ] * < / pre > * @ see # sort ( byte [ ] ) * @ see # sort ( byte [ ] , SortingParams , byte [ ] ) * @ param key * @ param sortingParameters * @ return a list of sorted elements . */ @ Override public List < byte [ ] > sort ( final byte [ ] key , final SortingParams sortingParameters ) { } }
checkIsInMultiOrPipeline ( ) ; client . sort ( key , sortingParameters ) ; return client . getBinaryMultiBulkReply ( ) ;
public class ClientProcessor { /** * ( non - Javadoc ) * @ see net . timewalker . ffmq4 . remote . transport . PacketTransportListener # packetReceived ( net . timewalker . ffmq4 . remote . transport . packet . AbstractPacket ) */ @ Override public boolean packetReceived ( AbstractPacket packet ) { } }
AbstractQueryPacket query = ( AbstractQueryPacket ) packet ; AbstractResponsePacket response = null ; // Process packet try { try { response = process ( query ) ; } catch ( JMSException e ) { log . debug ( "#" + id + " process() failed with " + e . toString ( ) ) ; response = new ErrorResponse ( e ) ; } } catch ( Exception e ) { log . error ( "#" + id + " Cannot process command" , e ) ; } // Send response if ( response != null && query . isResponseExpected ( ) ) { // Map endpoint id on response response . setEndpointId ( query . getEndpointId ( ) ) ; try { if ( traceEnabled ) log . trace ( "#" + id + " Sending " + response ) ; transport . send ( response ) ; } catch ( Exception e ) { log . warn ( "#" + id + " Cannot send response to client : " + e . toString ( ) ) ; transport . close ( ) ; } } return localConnection != null ; // Connection still valid ?
public class HashIntSet { /** * { @ inheritDoc } */ @ Override public HashIntSet convert ( Collection < Integer > c ) { } }
HashIntSet res = new HashIntSet ( ( int ) ( c . size ( ) / LOAD_FACTOR ) + 1 ) ; for ( int e : c ) { res . add ( e ) ; } return res ;
public class JmxMetricReporter { /** * The JMX Reporter is activated only if the jmxEnabled property is set . If the jmxAutoStart property is enabled , * the JMX Reporter will start automatically . * @ param configurationProperties configuration properties * @ param metricRegistry metric registry * @ return { @ link JmxMetricReporter } */ @ Override public JmxMetricReporter init ( ConfigurationProperties configurationProperties , MetricRegistry metricRegistry ) { } }
if ( configurationProperties . isJmxEnabled ( ) ) { jmxReporter = JmxReporter . forRegistry ( metricRegistry ) . inDomain ( MetricRegistry . name ( getClass ( ) , configurationProperties . getUniqueName ( ) ) ) . build ( ) ; } if ( configurationProperties . isJmxAutoStart ( ) ) { start ( ) ; } return this ;
public class MotionEventUtils { /** * Calculate the vertical move motion direction . * @ param delta moved delta . * @ param threshold threshold to detect the motion . * @ return the motion direction for the vertical axis . */ public static MotionDirection getVerticalMotionDirection ( float delta , float threshold ) { } }
if ( threshold < 0 ) { throw new IllegalArgumentException ( "threshold should be positive or zero." ) ; } return delta < - threshold ? MotionDirection . DOWN : delta > threshold ? MotionDirection . UP : MotionDirection . FIX ;
public class StreamGraph { /** * Adds a new virtual node that is used to connect a downstream vertex to only the outputs * with the selected names . * < p > When adding an edge from the virtual node to a downstream node the connection will be made * to the original node , only with the selected names given here . * @ param originalId ID of the node that should be connected to . * @ param virtualId ID of the virtual node . * @ param selectedNames The selected names . */ public void addVirtualSelectNode ( Integer originalId , Integer virtualId , List < String > selectedNames ) { } }
if ( virtualSelectNodes . containsKey ( virtualId ) ) { throw new IllegalStateException ( "Already has virtual select node with id " + virtualId ) ; } virtualSelectNodes . put ( virtualId , new Tuple2 < Integer , List < String > > ( originalId , selectedNames ) ) ;
public class Envelope2D { /** * Queries a corner of the envelope . * @ param index * Indicates a corner of the envelope . * 0 means lower left or ( xmin , ymin ) * 1 means upper left or ( xmin , ymax ) * 2 means upper right or ( xmax , ymax ) * 3 means lower right or ( xmax , ymin ) * @ return Point at a corner of the envelope . */ public Point2D queryCorner ( int index ) { } }
switch ( index ) { case 0 : return Point2D . construct ( xmin , ymin ) ; case 1 : return Point2D . construct ( xmin , ymax ) ; case 2 : return Point2D . construct ( xmax , ymax ) ; case 3 : return Point2D . construct ( xmax , ymin ) ; default : throw new IndexOutOfBoundsException ( ) ; }
public class AmazonRoute53ResolverClient { /** * Gets the IP addresses for a specified resolver endpoint . * @ param listResolverEndpointIpAddressesRequest * @ return Result of the ListResolverEndpointIpAddresses operation returned by the service . * @ throws ResourceNotFoundException * The specified resource doesn ' t exist . * @ throws InvalidParameterException * One or more parameters in this request are not valid . * @ throws InternalServiceErrorException * We encountered an unknown error . Try again in a few minutes . * @ throws InvalidNextTokenException * The value that you specified for < code > NextToken < / code > in a < code > List < / code > request isn ' t valid . * @ throws ThrottlingException * The request was throttled . Try again in a few minutes . * @ sample AmazonRoute53Resolver . ListResolverEndpointIpAddresses * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / route53resolver - 2018-04-01 / ListResolverEndpointIpAddresses " * target = " _ top " > AWS API Documentation < / a > */ @ Override public ListResolverEndpointIpAddressesResult listResolverEndpointIpAddresses ( ListResolverEndpointIpAddressesRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListResolverEndpointIpAddresses ( request ) ;
public class Timex2 { /** * getter for comment - gets * @ generated * @ return value of the feature */ public String getComment ( ) { } }
if ( Timex2_Type . featOkTst && ( ( Timex2_Type ) jcasType ) . casFeat_comment == null ) jcasType . jcas . throwFeatMissing ( "comment" , "de.julielab.jules.types.ace.Timex2" ) ; return jcasType . ll_cas . ll_getStringValue ( addr , ( ( Timex2_Type ) jcasType ) . casFeatCode_comment ) ;
public class CssScanner { /** * SUBSTRINGMATCH * = */ private void _substringmatch ( ) throws IOException { } }
if ( debug ) { checkState ( reader . curChar == '*' ) ; } builder . type = Type . SUBSTRINGMATCH ; builder . append ( "*=" ) ; reader . next ( ) ; if ( debug ) { checkState ( reader . curChar == '=' ) ; }
public class CollectionUtils { /** * Merge the given Properties instance into the given Map , copying all properties ( key - value pairs ) over . < p > Uses * { @ code Properties . propertyNames ( ) } to even catch default properties linked into the original Properties * instance . * @ param props the Properties instance to merge ( may be { @ code null } ) . * @ param map the target Map to merge the properties into . */ @ SuppressWarnings ( "unchecked" ) public static < K , V > void mergePropertiesIntoMap ( Properties props , Map < K , V > map ) { } }
if ( map == null ) { throw new IllegalArgumentException ( "Map must not be null" ) ; } if ( props != null ) { for ( Enumeration < ? > en = props . propertyNames ( ) ; en . hasMoreElements ( ) ; ) { String key = ( String ) en . nextElement ( ) ; Object value = props . get ( key ) ; if ( value == null ) { // Allow for defaults fallback or potentially overridden accessor . . . value = props . getProperty ( key ) ; } map . put ( ( K ) key , ( V ) value ) ; } }
public class ExtendedProperties { /** * Returns a set of Entries representing property key - value pairs . The returned set is a view to * the internal data structures and reflects changes to this instance . Potential defaults are * included in the returned set . * @ return The entry set */ @ Override public Set < Entry < String , String > > entrySet ( ) { } }
if ( entrySet == null ) { entrySet = new EntrySet ( ) ; } return entrySet ;
public class CheckArg { /** * Check that the array is not empty * @ param argument Array * @ param name The name of the argument * @ throws IllegalArgumentException If array is null or empty */ public static void isNotEmpty ( Object [ ] argument , String name ) { } }
isNotNull ( argument , name ) ; if ( argument . length == 0 ) { throw new IllegalArgumentException ( CommonI18n . argumentMayNotBeEmpty . text ( name ) ) ; }
public class PTDImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . PTD__XPBASE : setXPBASE ( ( Integer ) newValue ) ; return ; case AfplibPackage . PTD__YPBASE : setYPBASE ( ( Integer ) newValue ) ; return ; case AfplibPackage . PTD__XPUNITVL : setXPUNITVL ( ( Integer ) newValue ) ; return ; case AfplibPackage . PTD__YPUNITVL : setYPUNITVL ( ( Integer ) newValue ) ; return ; case AfplibPackage . PTD__XPEXTENT : setXPEXTENT ( ( Integer ) newValue ) ; return ; case AfplibPackage . PTD__YPEXTENT : setYPEXTENT ( ( Integer ) newValue ) ; return ; case AfplibPackage . PTD__RESERVED : setRESERVED ( ( Integer ) newValue ) ; return ; case AfplibPackage . PTD__CS : getCS ( ) . clear ( ) ; getCS ( ) . addAll ( ( Collection < ? extends Triplet > ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class SparkIntegrationChecker { /** * Saves Spark with Alluixo integration checker results . * @ param resultStatus Spark job result status * @ param reportWriter save user - facing messages to a generated file */ private void printResultInfo ( Status resultStatus , PrintWriter reportWriter ) { } }
switch ( resultStatus ) { case FAIL_TO_FIND_CLASS : reportWriter . println ( FAIL_TO_FIND_CLASS_MESSAGE ) ; reportWriter . println ( TEST_FAILED_MESSAGE ) ; break ; case FAIL_TO_FIND_FS : reportWriter . println ( FAIL_TO_FIND_FS_MESSAGE ) ; reportWriter . println ( TEST_FAILED_MESSAGE ) ; break ; case FAIL_TO_SUPPORT_HA : reportWriter . println ( FAIL_TO_SUPPORT_HA_MESSAGE ) ; reportWriter . println ( TEST_FAILED_MESSAGE ) ; break ; default : reportWriter . println ( TEST_PASSED_MESSAGE ) ; break ; }
public class GetTypeRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetTypeRequest getTypeRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getTypeRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getTypeRequest . getApiId ( ) , APIID_BINDING ) ; protocolMarshaller . marshall ( getTypeRequest . getTypeName ( ) , TYPENAME_BINDING ) ; protocolMarshaller . marshall ( getTypeRequest . getFormat ( ) , FORMAT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Money { /** * ( non - Javadoc ) * @ see org . javamoney . moneta . AbstractMoney # multiply ( java . lang . Number ) */ @ Override public Money multiply ( Number multiplicand ) { } }
NumberVerifier . checkNoInfinityOrNaN ( multiplicand ) ; BigDecimal multiplicandBD = MoneyUtils . getBigDecimal ( multiplicand ) ; if ( multiplicandBD . equals ( BigDecimal . ONE ) ) { return this ; } MathContext mc = MoneyUtils . getMathContext ( monetaryContext , RoundingMode . HALF_EVEN ) ; BigDecimal dec = this . number . multiply ( multiplicandBD , mc ) ; return new Money ( dec , getCurrency ( ) , monetaryContext ) ;
public class UrlEncoded { /** * Decode String with % encoding . * This method makes the assumption that the majority of calls * will need no decoding and uses the 8859 encoding . */ public static String decodeString ( String encoded ) { } }
return decodeString ( encoded , 0 , encoded . length ( ) , StringUtil . __ISO_8859_1 ) ;
public class ValidatorVault { /** * Validate the find methods . */ private void findValidation ( Class < ? > vaultClass , Class < ? > assetClass , Class < ? > idClass ) { } }
for ( Method method : vaultClass . getMethods ( ) ) { if ( ! method . getName ( ) . startsWith ( "find" ) ) { continue ; } if ( ! Modifier . isAbstract ( method . getModifiers ( ) ) ) { continue ; } TypeRef resultRef = findResult ( method . getParameters ( ) ) ; if ( resultRef == null ) { continue ; } TypeRef typeRef = resultRef . to ( Result . class ) . param ( 0 ) ; Class < ? > typeClass = typeRef . rawClass ( ) ; // id return type if ( unbox ( idClass ) . equals ( unbox ( typeClass ) ) ) { continue ; } if ( Collection . class . isAssignableFrom ( typeClass ) ) { continue ; } else if ( Stream . class . isAssignableFrom ( typeClass ) ) { continue ; } else if ( Modifier . isAbstract ( typeClass . getModifiers ( ) ) ) { // assumed to be proxy continue ; } new ShimConverter < > ( assetClass , typeClass ) ; }
public class WaveformDetail { /** * Color waveforms are represented by a series of sixteen bit integers into which color and height information are * packed . This function returns the integer corresponding to a particular half - frame in the waveform . * @ param waveBytes the raw data making up the waveform * @ param segment the index of hte half - frame of interest * @ return the sixteen - bit number encoding the height and RGB values of that segment */ private int getColorWaveformBits ( final ByteBuffer waveBytes , final int segment ) { } }
final int base = ( segment * 2 ) ; final int big = Util . unsign ( waveBytes . get ( base ) ) ; final int small = Util . unsign ( waveBytes . get ( base + 1 ) ) ; return big * 256 + small ;
public class RoundingQueryBuilder { /** * Sets the rounding names of the { @ link MonetaryRounding } instances . This method allows to * access the { @ link MonetaryRounding } instances by passing a name , which most of the time * identifies a certain rounding instance . Each entry is first matched as name on equality . If no instance * with such a name exists , the value passed is interpreted as a regular * expression to lookup roundings . * @ param roundingName the ( custom ) rounding name expression , not { @ code null } . * @ return this instance for chaining */ public RoundingQueryBuilder setRoundingName ( String roundingName ) { } }
Objects . requireNonNull ( roundingName ) ; set ( RoundingQuery . KEY_QUERY_ROUNDING_NAME , roundingName ) ; return this ;
public class XHTMLParser { /** * Interpret the passed XHTML fragment as HTML and retrieve a result container * with all body elements . * @ param sXHTML * The XHTML text fragment . This fragment is parsed as an HTML body and * may therefore not contain the & lt ; body & gt ; tag . * @ return < code > null < / code > if the passed text could not be interpreted as * XHTML or if no body element was found , an { @ link IMicroContainer } * with all body children otherwise . */ @ Nullable public IMicroContainer unescapeXHTMLFragment ( @ Nullable final String sXHTML ) { } }
// Ensure that the content is surrounded by a single tag final IMicroDocument aDoc = parseXHTMLFragment ( sXHTML ) ; if ( aDoc != null && aDoc . getDocumentElement ( ) != null ) { // Find " body " case insensitive final IMicroElement eBody = aDoc . getDocumentElement ( ) . getFirstChildElement ( EHTMLElement . BODY . getElementName ( ) ) ; if ( eBody != null ) { final IMicroContainer ret = new MicroContainer ( ) ; if ( eBody . hasChildren ( ) ) { // We need a copy because detachFromParent is modifying for ( final IMicroNode aChildNode : eBody . getAllChildren ( ) ) ret . appendChild ( aChildNode . detachFromParent ( ) ) ; } return ret ; } } return null ;
public class FullList { /** * Builds a new full list , with null as blank element * @ param capacity the initial capacity of the list * @ param elements the elements * @ param < F > the type of the elements * @ return the full list */ public static < F > FullList < F > newListWithCapacity ( final int capacity , final F ... elements ) { } }
final FullList < F > l = new FullList < F > ( null , capacity ) ; l . addAll ( Arrays . asList ( elements ) ) ; return l ;
public class BinaryFormatUtils { /** * Write message to writer . * @ param writer The binary writer . * @ param message The message to write . * @ param < Message > The message type . * @ param < Field > The field type . * @ return The number of bytes written . * @ throws IOException If write failed . */ public static < Message extends PMessage < Message , Field > , Field extends PField > int writeMessage ( BigEndianBinaryWriter writer , Message message ) throws IOException { } }
if ( message instanceof BinaryWriter ) { return ( ( BinaryWriter ) message ) . writeBinary ( writer ) ; } int len = 0 ; if ( message instanceof PUnion ) { if ( ( ( PUnion ) message ) . unionFieldIsSet ( ) ) { PField field = ( ( PUnion ) message ) . unionField ( ) ; len += writeFieldSpec ( writer , forType ( field . getDescriptor ( ) . getType ( ) ) , field . getId ( ) ) ; len += writeFieldValue ( writer , message . get ( field . getId ( ) ) , field . getDescriptor ( ) ) ; } } else { for ( PField field : message . descriptor ( ) . getFields ( ) ) { if ( message . has ( field . getId ( ) ) ) { len += writeFieldSpec ( writer , forType ( field . getDescriptor ( ) . getType ( ) ) , field . getId ( ) ) ; len += writeFieldValue ( writer , message . get ( field . getId ( ) ) , field . getDescriptor ( ) ) ; } } } len += writer . writeUInt8 ( BinaryType . STOP ) ; return len ;
public class BDBRepositoryBuilder { /** * Sets the repository environment home directory , which is required . */ public void setEnvironmentHomeFile ( File envHome ) { } }
try { // Switch to canonical for more detailed error messages . envHome = envHome . getCanonicalFile ( ) ; } catch ( IOException e ) { } mEnvHome = envHome ;
public class CapacityTaskScheduler { /** * called when a job completes */ synchronized void jobCompleted ( JobInProgress job ) { } }
QueueSchedulingInfo qsi = queueInfoMap . get ( job . getProfile ( ) . getQueueName ( ) ) ; // qsi shouldn ' t be null // update numJobsByUser LOG . debug ( "JOb to be removed for user " + job . getProfile ( ) . getUser ( ) ) ; Integer i = qsi . numJobsByUser . get ( job . getProfile ( ) . getUser ( ) ) ; i -- ; if ( 0 == i . intValue ( ) ) { qsi . numJobsByUser . remove ( job . getProfile ( ) . getUser ( ) ) ; // remove job footprint from our TSIs qsi . mapTSI . numSlotsOccupiedByUser . remove ( job . getProfile ( ) . getUser ( ) ) ; qsi . reduceTSI . numSlotsOccupiedByUser . remove ( job . getProfile ( ) . getUser ( ) ) ; LOG . debug ( "No more jobs for user, number of users = " + qsi . numJobsByUser . size ( ) ) ; } else { qsi . numJobsByUser . put ( job . getProfile ( ) . getUser ( ) , i ) ; LOG . debug ( "User still has " + i + " jobs, number of users = " + qsi . numJobsByUser . size ( ) ) ; }
public class SecurityServiceImpl { /** * Method will be called for each UserRegistryService that is unregistered * in the OSGi service registry . We must remove this instance from our * internal map . * @ param ref Reference to an unregistered UserRegistryService */ protected Map < String , Object > unsetUserRegistry ( ServiceReference < UserRegistryService > ref ) { } }
userRegistry . removeReference ( ( String ) ref . getProperty ( KEY_ID ) , ref ) ; userRegistry . removeReference ( String . valueOf ( ref . getProperty ( KEY_SERVICE_ID ) ) , ref ) ; // determine a new user registry service userRegistryService . set ( null ) ; return getServiceProperties ( ) ;
public class StringGroovyMethods { /** * Iterates through this CharSequence line by line . Each line is passed * to the given 1 or 2 arg closure . If a 2 arg closure is found * the line count is passed as the second argument . * @ param self a CharSequence * @ param closure a closure * @ return the last value returned by the closure * @ throws java . io . IOException if an error occurs * @ see # eachLine ( String , groovy . lang . Closure ) * @ since 1.8.2 */ public static < T > T eachLine ( CharSequence self , @ ClosureParams ( value = FromString . class , options = { } }
"String" , "String,Integer" } ) Closure < T > closure ) throws IOException { return eachLine ( self . toString ( ) , 0 , closure ) ;
public class DefaultNumberValue { /** * ( non - Javadoc ) * @ see javax . money . NumberValue # getDoubleValueExact ( ) */ @ Override public double doubleValueExact ( ) { } }
double d = this . number . doubleValue ( ) ; if ( d == Double . NEGATIVE_INFINITY || d == Double . POSITIVE_INFINITY ) { throw new ArithmeticException ( "Unable to convert to double: " + this . number ) ; } return d ;
public class Stream { /** * Returns the last element wrapped by { @ code Optional } class . * If stream is empty , returns { @ code Optional . empty ( ) } . * < p > This is a short - circuiting terminal operation . * @ return an { @ code Optional } with the last element * or { @ code Optional . empty ( ) } if the stream is empty * @ since 1.1.8 */ @ NotNull public Optional < T > findLast ( ) { } }
return reduce ( new BinaryOperator < T > ( ) { @ Override public T apply ( T left , T right ) { return right ; } } ) ;
public class MediaClient { /** * Get information of thumbnail job . * @ param request The request object containing all options for creating new water mark . * @ return The information of the thumbnail job . */ public GetThumbnailJobResponse getThumbnailJob ( GetThumbnailJobRequest request ) { } }
checkStringNotEmpty ( request . getJobId ( ) , "The parameter jobId should NOT be null or empty string." ) ; InternalRequest internalRequest = createRequest ( HttpMethodName . GET , request , THUMBNAIL , request . getJobId ( ) ) ; return invokeHttpClient ( internalRequest , GetThumbnailJobResponse . class ) ;
public class DataUtil { /** * Read the input stream into a byte buffer . To deal with slow input streams , you may interrupt the thread this * method is executing on . The data read until being interrupted will be available . * @ param inStream the input stream to read from * @ param maxSize the maximum size in bytes to read from the stream . Set to 0 to be unlimited . * @ return the filled byte buffer * @ throws IOException if an exception occurs whilst reading from the input stream . */ public static ByteBuffer readToByteBuffer ( InputStream inStream , int maxSize ) throws IOException { } }
Validate . isTrue ( maxSize >= 0 , "maxSize must be 0 (unlimited) or larger" ) ; final ConstrainableInputStream input = ConstrainableInputStream . wrap ( inStream , bufferSize , maxSize ) ; return input . readToByteBuffer ( maxSize ) ;
public class SheetResourcesImpl { /** * Sort a sheet according to the sort criteria . * It mirrors to the following Smartsheet REST API method : POST / sheet / { sheetId } / sort * Exceptions : * - IllegalArgumentException : if any argument is null * - InvalidRequestException : if there is any problem with the REST API request * - AuthorizationException : if there is any problem with the REST API authorization ( access token ) * - ServiceUnavailableException : if the REST API service is not available ( possibly due to rate limiting ) * - SmartsheetRestException : if there is any other REST API related error occurred during the operation * - SmartsheetException : if there is any other error occurred during the operation * @ param sheetId the sheet id * @ param sortSpecifier the sort criteria * @ return the update request object * @ throws SmartsheetException the smartsheet exception */ public Sheet sortSheet ( long sheetId , SortSpecifier sortSpecifier ) throws SmartsheetException { } }
return this . sortSheet ( sheetId , sortSpecifier , null ) ;
public class CommercePriceEntryPersistenceImpl { /** * Caches the commerce price entry in the entity cache if it is enabled . * @ param commercePriceEntry the commerce price entry */ @ Override public void cacheResult ( CommercePriceEntry commercePriceEntry ) { } }
entityCache . putResult ( CommercePriceEntryModelImpl . ENTITY_CACHE_ENABLED , CommercePriceEntryImpl . class , commercePriceEntry . getPrimaryKey ( ) , commercePriceEntry ) ; finderCache . putResult ( FINDER_PATH_FETCH_BY_UUID_G , new Object [ ] { commercePriceEntry . getUuid ( ) , commercePriceEntry . getGroupId ( ) } , commercePriceEntry ) ; finderCache . putResult ( FINDER_PATH_FETCH_BY_C_C , new Object [ ] { commercePriceEntry . getCommercePriceListId ( ) , commercePriceEntry . getCPInstanceUuid ( ) } , commercePriceEntry ) ; finderCache . putResult ( FINDER_PATH_FETCH_BY_C_ERC , new Object [ ] { commercePriceEntry . getCompanyId ( ) , commercePriceEntry . getExternalReferenceCode ( ) } , commercePriceEntry ) ; commercePriceEntry . resetOriginalValues ( ) ;
public class CommerceAccountUserRelPersistenceImpl { /** * Clears the cache for all commerce account user rels . * The { @ link EntityCache } and { @ link FinderCache } are both cleared by this method . */ @ Override public void clearCache ( ) { } }
entityCache . clearCache ( CommerceAccountUserRelImpl . class ) ; finderCache . clearCache ( FINDER_CLASS_NAME_ENTITY ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITH_PAGINATION ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITHOUT_PAGINATION ) ;
public class WorkQueue { /** * Start worker threads with a shared log . * @ param executorService * the executor service * @ param numTasks * the number of worker tasks to start */ private void startWorkers ( final ExecutorService executorService , final int numTasks ) { } }
for ( int i = 0 ; i < numTasks ; i ++ ) { workerFutures . add ( executorService . submit ( new Callable < Void > ( ) { @ Override public Void call ( ) throws Exception { runWorkLoop ( ) ; return null ; } } ) ) ; }
public class ClassWriter { /** * Puts one byte and two shorts into the constant pool . * @ param b a byte . * @ param s1 a short . * @ param s2 another short . */ private void put122 ( final int b , final int s1 , final int s2 ) { } }
pool . putBS ( b , s1 ) . putShort ( s2 ) ;
public class CreateSpatialIndexGeneratorOracle { /** * Generates the SQL for deleting any existing record from the * < code > USER _ SDO _ GEOM _ METADATA < / code > table . Typically this record shouldn ' t be present but we * must ensure that it does not already exist . * @ param statement * the create spatial index statement . * @ param database * the database instance . * @ return the SQL to delete any existing metadata record . */ protected String generateDeleteMetadataSql ( final CreateSpatialIndexStatement statement , final Database database ) { } }
final StringBuilder sql = new StringBuilder ( ) ; sql . append ( "DELETE FROM user_sdo_geom_metadata " ) ; final String tableName = statement . getTableName ( ) . trim ( ) ; sql . append ( "WHERE table_name = '" ) . append ( database . correctObjectName ( tableName , Table . class ) ) ; final String columnName = statement . getColumns ( ) [ 0 ] . trim ( ) ; sql . append ( "' AND column_name = '" ) . append ( database . correctObjectName ( columnName , Column . class ) ) ; sql . append ( "'" ) ; return sql . toString ( ) ;
public class PowerAdapter { /** * Concatenates the specified views with this adapter , and returns the result . * @ param views The views to be prepended . * @ return The resulting composite adapter . * @ see # prepend ( PowerAdapter . . . ) * @ see # concat ( PowerAdapter . . . ) */ @ CheckResult @ NonNull public final PowerAdapter prepend ( @ NonNull ViewFactory ... views ) { } }
checkNotNull ( views , "views" ) ; if ( views . length == 0 ) { return this ; } return prepend ( asAdapter ( views ) ) ;
public class OmdbBuilder { /** * The type of video to return : Movie , Series or Episode . * @ param resultType [ OPTIONAL ] The type to limit the build to * @ return */ public OmdbBuilder setResultType ( final ResultType resultType ) { } }
if ( ! ResultType . isDefault ( resultType ) ) { params . add ( Param . RESULT , resultType ) ; } return this ;
public class CmsUserDriver { /** * Returns the organizational unit represented by the given resource . < p > * @ param dbc the current db context * @ param resource the resource that represents an organizational unit * @ return the organizational unit represented by the given resource * @ throws CmsException if something goes wrong */ protected CmsOrganizationalUnit internalCreateOrgUnitFromResource ( CmsDbContext dbc , CmsResource resource ) throws CmsException { } }
if ( ! resource . getRootPath ( ) . startsWith ( ORGUNIT_BASE_FOLDER ) ) { throw new CmsDataAccessException ( Messages . get ( ) . container ( Messages . ERR_READ_ORGUNIT_1 , resource . getRootPath ( ) ) ) ; } // get the data String name = resource . getRootPath ( ) . substring ( ORGUNIT_BASE_FOLDER . length ( ) ) ; if ( ( name . length ( ) > 0 ) && ! name . endsWith ( CmsOrganizationalUnit . SEPARATOR ) ) { name += CmsOrganizationalUnit . SEPARATOR ; } String description = m_driverManager . readPropertyObject ( dbc , resource , ORGUNIT_PROPERTY_DESCRIPTION , false ) . getStructureValue ( ) ; int flags = ( resource . getFlags ( ) & ~ CmsResource . FLAG_INTERNAL ) ; // remove the internal flag String projectId = m_driverManager . readPropertyObject ( dbc , resource , ORGUNIT_PROPERTY_PROJECTID , false ) . getStructureValue ( ) ; // create the object return new CmsOrganizationalUnit ( resource . getStructureId ( ) , name , description , flags , ( projectId == null ? null : new CmsUUID ( projectId ) ) ) ;
public class HtmlUtil { /** * 清除指定HTML标签 , 不包括内容 < br > * 不区分大小写 * @ param content 文本 * @ param tagNames 要清除的标签 * @ return 去除标签后的文本 */ public static String unwrapHtmlTag ( String content , String ... tagNames ) { } }
return removeHtmlTag ( content , false , tagNames ) ;
public class streamselector { /** * Use this API to fetch filtered set of streamselector resources . * filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */ public static streamselector [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
streamselector obj = new streamselector ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; streamselector [ ] response = ( streamselector [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class JavaLexer { /** * $ ANTLR start " OctalLiteral " */ public final void mOctalLiteral ( ) throws RecognitionException { } }
try { int _type = OctalLiteral ; int _channel = DEFAULT_TOKEN_CHANNEL ; // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1306:14 : ( ' 0 ' ( ' 0 ' . . ' 7 ' ) + ( IntegerTypeSuffix ) ? ) // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1306:16 : ' 0 ' ( ' 0 ' . . ' 7 ' ) + ( IntegerTypeSuffix ) ? { match ( '0' ) ; // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1306:20 : ( ' 0 ' . . ' 7 ' ) + int cnt7 = 0 ; loop7 : while ( true ) { int alt7 = 2 ; int LA7_0 = input . LA ( 1 ) ; if ( ( ( LA7_0 >= '0' && LA7_0 <= '7' ) ) ) { alt7 = 1 ; } switch ( alt7 ) { case 1 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : { if ( ( input . LA ( 1 ) >= '0' && input . LA ( 1 ) <= '7' ) ) { input . consume ( ) ; } else { MismatchedSetException mse = new MismatchedSetException ( null , input ) ; recover ( mse ) ; throw mse ; } } break ; default : if ( cnt7 >= 1 ) break loop7 ; EarlyExitException eee = new EarlyExitException ( 7 , input ) ; throw eee ; } cnt7 ++ ; } // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 1306:32 : ( IntegerTypeSuffix ) ? int alt8 = 2 ; int LA8_0 = input . LA ( 1 ) ; if ( ( LA8_0 == 'L' || LA8_0 == 'l' ) ) { alt8 = 1 ; } switch ( alt8 ) { case 1 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : { if ( input . LA ( 1 ) == 'L' || input . LA ( 1 ) == 'l' ) { input . consume ( ) ; } else { MismatchedSetException mse = new MismatchedSetException ( null , input ) ; recover ( mse ) ; throw mse ; } } break ; } } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving }
public class CommonOps_ZDRM { /** * Returns the magnitude squared of the complex element with the largest magnitude < br > * < br > * Max { | a < sub > ij < / sub > | ^ 2 } for all i and j < br > * @ param a A matrix . Not modified . * @ return The max magnitude squared */ public static double elementMaxMagnitude2 ( ZMatrixD1 a ) { } }
final int size = a . getDataLength ( ) ; double max = 0 ; for ( int i = 0 ; i < size ; ) { double real = a . data [ i ++ ] ; double imaginary = a . data [ i ++ ] ; double m = real * real + imaginary * imaginary ; if ( m > max ) { max = m ; } } return max ;
public class CmsJspContentAccessValueWrapper { /** * Turn on macro resolving for the wrapped value . < p > * Macro resolving is turned off by default . * When turned on , a macro resolver is initialized with * the current OpenCms user context and the URI of the current resource . * This means known macros contained in the wrapped value will be resolved when the output String is generated . * For example , a < code > % ( property . Title ) < / code > in the value would be replaced with the * value of the title property . Macros that can not be resolved will be kept . < p > * Usage example on a JSP with the JSTL : < pre > * & lt ; cms : contentload . . . & gt ; * & lt ; cms : contentaccess var = " content " / & gt ; * The text with macros resolved : $ { content . value [ ' Text ' ] . resolveMacros } * & lt ; / cms : contentload & gt ; < / pre > * @ return a value wrapper with macro resolving turned on * @ see CmsMacroResolver */ public CmsJspContentAccessValueWrapper getResolveMacros ( ) { } }
if ( m_macroResolver == null ) { CmsMacroResolver macroResolver = CmsMacroResolver . newInstance ( ) ; macroResolver . setCmsObject ( m_cms ) ; macroResolver . setKeepEmptyMacros ( true ) ; return new CmsJspContentAccessValueWrapper ( this , macroResolver ) ; } // macro resolving is already turned on return this ;
public class Executor { /** * Returns when this executor started or should start being idle . */ public long getIdleStartMilliseconds ( ) { } }
if ( isIdle ( ) ) return Math . max ( creationTime , owner . getConnectTime ( ) ) ; else { return Math . max ( startTime + Math . max ( 0 , executableEstimatedDuration ) , System . currentTimeMillis ( ) + 15000 ) ; }
public class ResourceLoader { /** * Given a class or resource name , returns a patch to that resource . * @ param name the class or resource name . * @ return the path representing the class or resource . */ @ Override public PathImpl getPath ( String name ) { } }
if ( name . startsWith ( "/" ) ) return _path . lookup ( "." + name ) ; else return _path . lookup ( name ) ;
public class AppConfigChecker { /** * F743-6605 */ static void validateStatefulTimeoutOnSFSB ( BeanMetaData bmd , TraceComponent tc ) { } }
StatefulTimeout statefulTimeout = bmd . enterpriseBeanClass . getAnnotation ( javax . ejb . StatefulTimeout . class ) ; if ( statefulTimeout != null ) { if ( bmd . type != InternalConstants . TYPE_STATEFUL_SESSION ) { Tr . warning ( tc , "STATEFUL_TIMEOUT_ON_NON_SFSB_CNTR0304W" , new Object [ ] { bmd . getName ( ) , bmd . getModuleMetaData ( ) . getName ( ) , bmd . getModuleMetaData ( ) . getApplicationMetaData ( ) . getName ( ) } ) ; // F743-6605.1 , d641570 } } // begin F743-6605.1 EnterpriseBean eb = bmd . wccm . enterpriseBean ; if ( eb instanceof Session ) { Session session = ( Session ) eb ; com . ibm . ws . javaee . dd . ejb . StatefulTimeout statefulTimeoutXML = session . getStatefulTimeout ( ) ; // F743-6605.1 if ( statefulTimeoutXML != null ) { if ( bmd . type != InternalConstants . TYPE_STATEFUL_SESSION ) { Object [ ] parms = new Object [ ] { bmd . getName ( ) , bmd . getModuleMetaData ( ) . getName ( ) , bmd . getModuleMetaData ( ) . getApplicationMetaData ( ) . getName ( ) } ; Tr . warning ( tc , "STATEFUL_TIMEOUT_ON_NON_SFSB_CNTR0310W" , parms ) ; } } } // end F743-6605.1