signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Vectors { /** * Returns a subview for the given { @ code IntegerVector } with a specified * offset and length . * @ param vector the { @ code Vector } whose values will be shown in the view * @ param offset the index of { @ code v } at which the first index of this * view starts * @ param length the length of this view . * @ throws IllegalArgumentException if < ul > < li > { @ code offset } is * negative < li > { @ code length } is less than zero < li > the sum of { @ code * offset } plus { @ code length } is greater than the length of { @ code * vector } < / ul > */ public static SparseIntegerVector subview ( SparseIntegerVector vector , int offset , int length ) { } }
if ( vector == null ) throw new NullPointerException ( "Cannot create view of a " + "null vector" ) ; return new SparseIntegerVectorView ( vector , offset , length ) ;
public class TranslationServiceClient { /** * Creates a glossary and returns the long - running operation . Returns NOT _ FOUND , if the project * doesn ' t exist . * < p > Sample code : * < pre > < code > * try ( TranslationServiceClient translationServiceClient = TranslationServiceClient . create ( ) ) { * String formattedParent = TranslationServiceClient . formatLocationName ( " [ PROJECT ] " , " [ LOCATION ] " ) ; * Glossary glossary = Glossary . newBuilder ( ) . build ( ) ; * CreateGlossaryRequest request = CreateGlossaryRequest . newBuilder ( ) * . setParent ( formattedParent ) * . setGlossary ( glossary ) * . build ( ) ; * Glossary response = translationServiceClient . createGlossaryAsync ( request ) . get ( ) ; * < / code > < / pre > * @ param request The request object containing all of the parameters for the API call . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi ( "The surface for long-running operations is not stable yet and may change in the future." ) public final OperationFuture < Glossary , CreateGlossaryMetadata > createGlossaryAsync ( CreateGlossaryRequest request ) { } }
return createGlossaryOperationCallable ( ) . futureCall ( request ) ;
public class ChromeDriverFactory { /** * Converts String to Boolean \ Integer or returns original String . * @ param value string to convert * @ return string ' s object representation */ private Object convertStringToNearestObjectType ( String value ) { } }
switch ( value ) { case "true" : return true ; case "false" : return false ; default : { if ( NumberUtils . isParsable ( value ) ) { return Integer . parseInt ( value ) ; } return value ; } }
public class JournalSegmentDescriptor { /** * Copies the segment to a new buffer . */ JournalSegmentDescriptor copyTo ( ByteBuffer buffer ) { } }
buffer . putInt ( version ) ; buffer . putLong ( id ) ; buffer . putLong ( index ) ; buffer . putInt ( maxSegmentSize ) ; buffer . putInt ( maxEntries ) ; buffer . putLong ( updated ) ; buffer . put ( locked ? ( byte ) 1 : ( byte ) 0 ) ; return this ;
public class DirectQuickSelectSketch { /** * Wrap a sketch around the given source Memory containing sketch data that originated from * this sketch . * @ param srcMem < a href = " { @ docRoot } / resources / dictionary . html # mem " > See Memory < / a > * The given Memory object must be in hash table form and not read only . * @ param seed < a href = " { @ docRoot } / resources / dictionary . html # seed " > See Update Hash Seed < / a > * @ return instance of this sketch */ static DirectQuickSelectSketch writableWrap ( final WritableMemory srcMem , final long seed ) { } }
final int preambleLongs = extractPreLongs ( srcMem ) ; // byte 0 final int lgNomLongs = extractLgNomLongs ( srcMem ) ; // byte 3 final int lgArrLongs = extractLgArrLongs ( srcMem ) ; // byte 4 UpdateSketch . checkUnionQuickSelectFamily ( srcMem , preambleLongs , lgNomLongs ) ; checkMemIntegrity ( srcMem , seed , preambleLongs , lgNomLongs , lgArrLongs ) ; final int lgRF = extractLgResizeFactor ( srcMem ) ; // byte 0 final ResizeFactor myRF = ResizeFactor . getRF ( lgRF ) ; if ( ( myRF == ResizeFactor . X1 ) && ( lgArrLongs != Util . startingSubMultiple ( lgNomLongs + 1 , myRF , MIN_LG_ARR_LONGS ) ) ) { insertLgResizeFactor ( srcMem , ResizeFactor . X2 . lg ( ) ) ; } final DirectQuickSelectSketch dqss = new DirectQuickSelectSketch ( seed , srcMem ) ; dqss . hashTableThreshold_ = setHashTableThreshold ( lgNomLongs , lgArrLongs ) ; return dqss ;
public class DRL6Expressions { /** * $ ANTLR start synpred11 _ DRL6Expressions */ public final void synpred11_DRL6Expressions_fragment ( ) throws RecognitionException { } }
// src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 421:5 : ( DOUBLE _ AMPER ( fullAnnotation [ null ] ) ? operator ) // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 421:6 : DOUBLE _ AMPER ( fullAnnotation [ null ] ) ? operator { match ( input , DOUBLE_AMPER , FOLLOW_DOUBLE_AMPER_in_synpred11_DRL6Expressions1972 ) ; if ( state . failed ) return ; // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 421:19 : ( fullAnnotation [ null ] ) ? int alt100 = 2 ; int LA100_0 = input . LA ( 1 ) ; if ( ( LA100_0 == AT ) ) { alt100 = 1 ; } switch ( alt100 ) { case 1 : // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 421:19 : fullAnnotation [ null ] { pushFollow ( FOLLOW_fullAnnotation_in_synpred11_DRL6Expressions1974 ) ; fullAnnotation ( null ) ; state . _fsp -- ; if ( state . failed ) return ; } break ; } pushFollow ( FOLLOW_operator_in_synpred11_DRL6Expressions1978 ) ; operator ( ) ; state . _fsp -- ; if ( state . failed ) return ; }
public class Option { /** * If { @ code x = = null } , return { @ link None } , else return { @ link Some } * containing { @ code x } . * @ param < T > The type of values * @ param x The value * @ return An optional value */ public static < T > OptionType < T > of ( final @ Nullable T x ) { } }
if ( x == null ) { return None . none ( ) ; } return Some . some ( x ) ;
public class TargetsApi { /** * Delete a target * Delete the target from the agent & # 39 ; s personal favorites . * @ param id The ID of the target . ( required ) * @ param type The type of target . ( required ) * @ return ApiSuccessResponse * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiSuccessResponse deletePersonalFavorite ( String id , String type ) throws ApiException { } }
ApiResponse < ApiSuccessResponse > resp = deletePersonalFavoriteWithHttpInfo ( id , type ) ; return resp . getData ( ) ;
public class IndexFacesRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( IndexFacesRequest indexFacesRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( indexFacesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( indexFacesRequest . getCollectionId ( ) , COLLECTIONID_BINDING ) ; protocolMarshaller . marshall ( indexFacesRequest . getImage ( ) , IMAGE_BINDING ) ; protocolMarshaller . marshall ( indexFacesRequest . getExternalImageId ( ) , EXTERNALIMAGEID_BINDING ) ; protocolMarshaller . marshall ( indexFacesRequest . getDetectionAttributes ( ) , DETECTIONATTRIBUTES_BINDING ) ; protocolMarshaller . marshall ( indexFacesRequest . getMaxFaces ( ) , MAXFACES_BINDING ) ; protocolMarshaller . marshall ( indexFacesRequest . getQualityFilter ( ) , QUALITYFILTER_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ExposureEstimator { /** * This method returns the value random variable of the product within the specified model , evaluated at a given evalutationTime . * Note : For a lattice this is often the value conditional to evalutationTime , for a Monte - Carlo simulation this is the ( sum of ) value discounted to evaluation time . * Cashflows prior evaluationTime are not considered . * @ param evaluationTime The time on which this products value should be observed . * @ param model The model used to price the product . * @ return The random variable representing the value of the product discounted to evaluation time * @ throws net . finmath . exception . CalculationException Thrown if the valuation fails , specific cause may be available via the < code > cause ( ) < / code > method . */ @ Override public RandomVariable getValue ( double evaluationTime , LIBORModelMonteCarloSimulationModel model ) throws CalculationException { } }
final RandomVariable one = model . getRandomVariableForConstant ( 1.0 ) ; final RandomVariable zero = model . getRandomVariableForConstant ( 0.0 ) ; RandomVariable values = underlying . getValue ( evaluationTime , model ) ; if ( values . getFiltrationTime ( ) > evaluationTime ) { RandomVariable filterNaN = values . isNaN ( ) . sub ( 1.0 ) . mult ( - 1.0 ) ; RandomVariable valuesFiltered = values . mult ( filterNaN ) ; /* * Cut off two standard deviations from regression */ double valuesMean = valuesFiltered . getAverage ( ) ; double valuesStdDev = valuesFiltered . getStandardDeviation ( ) ; double valuesFloor = valuesMean * ( 1.0 - Math . signum ( valuesMean ) * 1E-5 ) - 3.0 * valuesStdDev ; double valuesCap = valuesMean * ( 1.0 + Math . signum ( valuesMean ) * 1E-5 ) + 3.0 * valuesStdDev ; RandomVariable filter = values . sub ( valuesFloor ) . choose ( one , zero ) . mult ( values . sub ( valuesCap ) . mult ( - 1.0 ) . choose ( one , zero ) ) ; filter = filter . mult ( filterNaN ) ; // Filter values and regressionBasisFunctions values = values . mult ( filter ) ; RandomVariable [ ] regressionBasisFunctions = getRegressionBasisFunctions ( evaluationTime , model ) ; RandomVariable [ ] filteredRegressionBasisFunctions = new RandomVariable [ regressionBasisFunctions . length ] ; for ( int i = 0 ; i < regressionBasisFunctions . length ; i ++ ) { filteredRegressionBasisFunctions [ i ] = regressionBasisFunctions [ i ] . mult ( filter ) ; } // Remove foresight through conditional expectation MonteCarloConditionalExpectationRegression condExpEstimator = new MonteCarloConditionalExpectationRegression ( filteredRegressionBasisFunctions , regressionBasisFunctions ) ; // Calculate cond . expectation . Note that no discounting ( numeraire division ) is required ! values = condExpEstimator . getConditionalExpectation ( values ) ; } // Return values return values ;
public class AWSMigrationHubClient { /** * Sets the migration state of an application . For a given application identified by the value passed to * < code > ApplicationId < / code > , its status is set or updated by passing one of three values to < code > Status < / code > : * < code > NOT _ STARTED | IN _ PROGRESS | COMPLETED < / code > . * @ param notifyApplicationStateRequest * @ return Result of the NotifyApplicationState operation returned by the service . * @ throws AccessDeniedException * You do not have sufficient access to perform this action . * @ throws InternalServerErrorException * Exception raised when there is an internal , configuration , or dependency error encountered . * @ throws ServiceUnavailableException * Exception raised when there is an internal , configuration , or dependency error encountered . * @ throws DryRunOperationException * Exception raised to indicate a successfully authorized action when the < code > DryRun < / code > flag is set to * " true " . * @ throws UnauthorizedOperationException * Exception raised to indicate a request was not authorized when the < code > DryRun < / code > flag is set to * " true " . * @ throws InvalidInputException * Exception raised when the provided input violates a policy constraint or is entered in the wrong format * or data type . * @ throws PolicyErrorException * Exception raised when there are problems accessing ADS ( Application Discovery Service ) ; most likely due * to a misconfigured policy or the < code > migrationhub - discovery < / code > role is missing or not configured * correctly . * @ throws ResourceNotFoundException * Exception raised when the request references a resource ( ADS configuration , update stream , migration * task , etc . ) that does not exist in ADS ( Application Discovery Service ) or in Migration Hub ' s repository . * @ sample AWSMigrationHub . NotifyApplicationState * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / AWSMigrationHub - 2017-05-31 / NotifyApplicationState " * target = " _ top " > AWS API Documentation < / a > */ @ Override public NotifyApplicationStateResult notifyApplicationState ( NotifyApplicationStateRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeNotifyApplicationState ( request ) ;
public class RubyDependencyResolver { /** * Get gems file name { gems . locked / Gemfile . lock } that will be created in bundle install command . * Bundler version < 2 will create ' Gemfile . lock ' if ' Gemfile ' exist , If ' Gemfile ' doesn ' t exist and ' gems . rb ' exist it will create ' gems . locked ' * Bundler version > = 2 will create ' gems . locked ' if ' gems . rb ' exist , If ' gems . rb ' doesn ' t exist and ' Gemfile ' exist it will create ' Gemfile . lock ' * @ return Gem file that will be created - ' gems . locked ' / ' Gemfile . lock ' */ private String getGemsFileName ( ) { } }
File gemsRb = new File ( rootDirectory + fileSeparator + GEMS_RB ) ; File gemFile = new File ( rootDirectory + fileSeparator + GEM_FILE ) ; if ( ! gemsRb . isFile ( ) && ! gemFile . isFile ( ) ) { return null ; } List < String > bundleVersionResult = cli . runCmd ( rootDirectory , cli . getCommandParams ( BUNDLE , Constants . VERSION ) ) ; if ( bundleVersionResult != null ) { if ( bundleVersionResult . get ( 0 ) . contains ( "Bundler version" ) ) { Pattern p = Pattern . compile ( "Bundler version ((?:\\d|\\.)+)" ) ; Matcher m = p . matcher ( bundleVersionResult . get ( 0 ) ) ; if ( m . find ( ) ) { String version = m . group ( 1 ) ; int versionNumber = Integer . parseInt ( version . substring ( 0 , version . indexOf ( '.' ) ) ) ; if ( versionNumber < 2 ) { // In version < 2 if gems . rb exist and gemFile doesn ' t exist return " gems . locked " else " gemFile . lock " if ( gemFile . isFile ( ) ) { return GEM_FILE_LOCK ; } else if ( gemsRb . isFile ( ) ) { return GEMS_LOCKED ; } } else { // In version > 2 if gems . rb exist return " gems . locked " else " gemFile . lock " if ( gemsRb . isFile ( ) ) { return GEMS_LOCKED ; } else if ( gemFile . isFile ( ) ) { return GEM_FILE_LOCK ; } } } } } return null ;
public class PluginMessageDescription { /** * Create a description for a RateCondition object . * @ param condition the condition * @ return a description to be used on email templates */ public String rate ( RateCondition condition ) { } }
String description ; if ( condition . getContext ( ) != null && condition . getContext ( ) . get ( CONTEXT_PROPERTY_DESCRIPTION ) != null ) { description = condition . getContext ( ) . get ( CONTEXT_PROPERTY_DESCRIPTION ) ; } else { description = condition . getDataId ( ) ; } switch ( condition . getDirection ( ) ) { case DECREASING : description += " decreasing " ; break ; case INCREASING : description += " increasing " ; break ; case NA : break ; default : throw new IllegalArgumentException ( condition . getDirection ( ) . name ( ) ) ; } switch ( condition . getOperator ( ) ) { case GT : description += " greater than " ; break ; case GTE : description += " greater or equal than " ; break ; case LT : description += " less than " ; break ; case LTE : description += " less or equal than " ; break ; default : throw new IllegalArgumentException ( condition . getOperator ( ) . name ( ) ) ; } description += decimalFormat . format ( condition . getThreshold ( ) ) ; if ( condition . getContext ( ) != null && condition . getContext ( ) . get ( CONTEXT_PROPERTY_UNIT ) != null ) { description += " " + condition . getContext ( ) . get ( CONTEXT_PROPERTY_UNIT ) ; } switch ( condition . getPeriod ( ) ) { case DAY : description = " per day " ; break ; case HOUR : description = " per hour " ; break ; case MINUTE : description = " per minute " ; break ; case SECOND : description = " per second " ; break ; case WEEK : description = " per week " ; break ; default : throw new IllegalArgumentException ( condition . getOperator ( ) . name ( ) ) ; } return description ;
public class RtmpGroupSettingsMarshaller { /** * Marshall the given parameter object . */ public void marshall ( RtmpGroupSettings rtmpGroupSettings , ProtocolMarshaller protocolMarshaller ) { } }
if ( rtmpGroupSettings == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( rtmpGroupSettings . getAuthenticationScheme ( ) , AUTHENTICATIONSCHEME_BINDING ) ; protocolMarshaller . marshall ( rtmpGroupSettings . getCacheFullBehavior ( ) , CACHEFULLBEHAVIOR_BINDING ) ; protocolMarshaller . marshall ( rtmpGroupSettings . getCacheLength ( ) , CACHELENGTH_BINDING ) ; protocolMarshaller . marshall ( rtmpGroupSettings . getCaptionData ( ) , CAPTIONDATA_BINDING ) ; protocolMarshaller . marshall ( rtmpGroupSettings . getInputLossAction ( ) , INPUTLOSSACTION_BINDING ) ; protocolMarshaller . marshall ( rtmpGroupSettings . getRestartDelay ( ) , RESTARTDELAY_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Base64Encoder { /** * 编码为Base64 < br > * 如果isMultiLine为 < code > true < / code > , 则每76个字符一个换行符 , 否则在一行显示 * @ param arr 被编码的数组 * @ param isMultiLine 在76个char之后是CRLF还是EOF * @ param isUrlSafe 是否使用URL安全字符 , 一般为 < code > false < / code > * @ return 编码后的bytes */ public static byte [ ] encode ( byte [ ] arr , boolean isMultiLine , boolean isUrlSafe ) { } }
if ( null == arr ) { return null ; } int len = arr . length ; if ( len == 0 ) { return new byte [ 0 ] ; } int evenlen = ( len / 3 ) * 3 ; int cnt = ( ( len - 1 ) / 3 + 1 ) << 2 ; int destlen = cnt + ( isMultiLine ? ( cnt - 1 ) / 76 << 1 : 0 ) ; byte [ ] dest = new byte [ destlen ] ; byte [ ] encodeTable = isUrlSafe ? URL_SAFE_ENCODE_TABLE : STANDARD_ENCODE_TABLE ; for ( int s = 0 , d = 0 , cc = 0 ; s < evenlen ; ) { int i = ( arr [ s ++ ] & 0xff ) << 16 | ( arr [ s ++ ] & 0xff ) << 8 | ( arr [ s ++ ] & 0xff ) ; dest [ d ++ ] = encodeTable [ ( i >>> 18 ) & 0x3f ] ; dest [ d ++ ] = encodeTable [ ( i >>> 12 ) & 0x3f ] ; dest [ d ++ ] = encodeTable [ ( i >>> 6 ) & 0x3f ] ; dest [ d ++ ] = encodeTable [ i & 0x3f ] ; if ( isMultiLine && ++ cc == 19 && d < destlen - 2 ) { dest [ d ++ ] = '\r' ; dest [ d ++ ] = '\n' ; cc = 0 ; } } int left = len - evenlen ; // 剩余位数 if ( left > 0 ) { int i = ( ( arr [ evenlen ] & 0xff ) << 10 ) | ( left == 2 ? ( ( arr [ len - 1 ] & 0xff ) << 2 ) : 0 ) ; dest [ destlen - 4 ] = encodeTable [ i >> 12 ] ; dest [ destlen - 3 ] = encodeTable [ ( i >>> 6 ) & 0x3f ] ; if ( isUrlSafe ) { // 在URL Safe模式下 , = 为URL中的关键字符 , 不需要补充 。 空余的byte位要去掉 。 int urlSafeLen = destlen - 2 ; if ( 2 == left ) { dest [ destlen - 2 ] = encodeTable [ i & 0x3f ] ; urlSafeLen += 1 ; } byte [ ] urlSafeDest = new byte [ urlSafeLen ] ; System . arraycopy ( dest , 0 , urlSafeDest , 0 , urlSafeLen ) ; return urlSafeDest ; } else { dest [ destlen - 2 ] = ( left == 2 ) ? encodeTable [ i & 0x3f ] : ( byte ) '=' ; dest [ destlen - 1 ] = '=' ; } } return dest ;
public class LongTupleStreams { /** * Returns a stream that returns { @ link MutableLongTuple } s up to the given * maximum , in lexicographical iteration order . < br > * < br > * A copy of the given tuple will be stored internally . < br > * < br > * Also see < a href = " . . / . . / package - summary . html # IterationOrder " > * Iteration Order < / a > * @ param max The maximum values , exclusive * @ return The stream */ public static Stream < MutableLongTuple > lexicographicalStream ( LongTuple max ) { } }
return stream ( Order . LEXICOGRAPHICAL , LongTuples . zero ( max . getSize ( ) ) , max ) ;
public class SuggestedFixes { /** * Returns a human - friendly name of the given type for use in fixes . */ public static String qualifyType ( VisitorState state , SuggestedFix . Builder fix , TypeMirror type ) { } }
return type . accept ( new SimpleTypeVisitor8 < String , SuggestedFix . Builder > ( ) { @ Override protected String defaultAction ( TypeMirror e , Builder builder ) { return e . toString ( ) ; } @ Override public String visitArray ( ArrayType t , Builder builder ) { return t . getComponentType ( ) . accept ( this , builder ) + "[]" ; } @ Override public String visitDeclared ( DeclaredType t , Builder builder ) { String baseType = qualifyType ( state , builder , ( ( Type ) t ) . tsym ) ; if ( t . getTypeArguments ( ) . isEmpty ( ) ) { return baseType ; } StringBuilder b = new StringBuilder ( baseType ) ; b . append ( '<' ) ; boolean started = false ; for ( TypeMirror arg : t . getTypeArguments ( ) ) { if ( started ) { b . append ( ',' ) ; } b . append ( arg . accept ( this , builder ) ) ; started = true ; } b . append ( '>' ) ; return b . toString ( ) ; } } , fix ) ;
public class BigtableTableAdminClientWrapper { /** * { @ inheritDoc } */ @ Override public ApiFuture < Table > createTableAsync ( CreateTableRequest request ) { } }
com . google . bigtable . admin . v2 . CreateTableRequest requestProto = request . toProto ( instanceName . getProjectId ( ) , instanceName . getInstanceId ( ) ) ; return ApiFutureUtil . transformAndAdapt ( delegate . createTableAsync ( requestProto ) , new Function < com . google . bigtable . admin . v2 . Table , Table > ( ) { @ Override public Table apply ( com . google . bigtable . admin . v2 . Table tableProto ) { return Table . fromProto ( tableProto ) ; } } ) ;
public class CPAttachmentFileEntryPersistenceImpl { /** * Returns an ordered range of all the cp attachment file entries where classNameId = & # 63 ; and classPK = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPAttachmentFileEntryModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param classNameId the class name ID * @ param classPK the class pk * @ param start the lower bound of the range of cp attachment file entries * @ param end the upper bound of the range of cp attachment file entries ( not inclusive ) * @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > ) * @ param retrieveFromCache whether to retrieve from the finder cache * @ return the ordered range of matching cp attachment file entries */ @ Override public List < CPAttachmentFileEntry > findByC_C ( long classNameId , long classPK , int start , int end , OrderByComparator < CPAttachmentFileEntry > orderByComparator , boolean retrieveFromCache ) { } }
boolean pagination = true ; FinderPath finderPath = null ; Object [ ] finderArgs = null ; if ( ( start == QueryUtil . ALL_POS ) && ( end == QueryUtil . ALL_POS ) && ( orderByComparator == null ) ) { pagination = false ; finderPath = FINDER_PATH_WITHOUT_PAGINATION_FIND_BY_C_C ; finderArgs = new Object [ ] { classNameId , classPK } ; } else { finderPath = FINDER_PATH_WITH_PAGINATION_FIND_BY_C_C ; finderArgs = new Object [ ] { classNameId , classPK , start , end , orderByComparator } ; } List < CPAttachmentFileEntry > list = null ; if ( retrieveFromCache ) { list = ( List < CPAttachmentFileEntry > ) finderCache . getResult ( finderPath , finderArgs , this ) ; if ( ( list != null ) && ! list . isEmpty ( ) ) { for ( CPAttachmentFileEntry cpAttachmentFileEntry : list ) { if ( ( classNameId != cpAttachmentFileEntry . getClassNameId ( ) ) || ( classPK != cpAttachmentFileEntry . getClassPK ( ) ) ) { list = null ; break ; } } } } if ( list == null ) { StringBundler query = null ; if ( orderByComparator != null ) { query = new StringBundler ( 4 + ( orderByComparator . getOrderByFields ( ) . length * 2 ) ) ; } else { query = new StringBundler ( 4 ) ; } query . append ( _SQL_SELECT_CPATTACHMENTFILEENTRY_WHERE ) ; query . append ( _FINDER_COLUMN_C_C_CLASSNAMEID_2 ) ; query . append ( _FINDER_COLUMN_C_C_CLASSPK_2 ) ; if ( orderByComparator != null ) { appendOrderByComparator ( query , _ORDER_BY_ENTITY_ALIAS , orderByComparator ) ; } else if ( pagination ) { query . append ( CPAttachmentFileEntryModelImpl . ORDER_BY_JPQL ) ; } String sql = query . toString ( ) ; Session session = null ; try { session = openSession ( ) ; Query q = session . createQuery ( sql ) ; QueryPos qPos = QueryPos . getInstance ( q ) ; qPos . add ( classNameId ) ; qPos . add ( classPK ) ; if ( ! pagination ) { list = ( List < CPAttachmentFileEntry > ) QueryUtil . list ( q , getDialect ( ) , start , end , false ) ; Collections . sort ( list ) ; list = Collections . unmodifiableList ( list ) ; } else { list = ( List < CPAttachmentFileEntry > ) QueryUtil . list ( q , getDialect ( ) , start , end ) ; } cacheResult ( list ) ; finderCache . putResult ( finderPath , finderArgs , list ) ; } catch ( Exception e ) { finderCache . removeResult ( finderPath , finderArgs ) ; throw processException ( e ) ; } finally { closeSession ( session ) ; } } return list ;
public class WriteRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( WriteRequest writeRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( writeRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( writeRequest . getPutRequest ( ) , PUTREQUEST_BINDING ) ; protocolMarshaller . marshall ( writeRequest . getDeleteRequest ( ) , DELETEREQUEST_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class GenericEncodingStrategy { /** * Generates code that stores a one or four byte generation value into a * byte array referenced by the local variable . * @ param encodedVar references a byte array * @ param offset offset into byte array * @ param generation if less than zero , no code is generated */ private void encodeGeneration ( CodeAssembler a , LocalVariable encodedVar , int offset , int generation ) { } }
if ( offset < 0 ) { throw new IllegalArgumentException ( ) ; } if ( generation < 0 ) { return ; } if ( generation < 128 ) { a . loadLocal ( encodedVar ) ; a . loadConstant ( offset ) ; a . loadConstant ( ( byte ) generation ) ; a . storeToArray ( TypeDesc . BYTE ) ; } else { generation |= 0x80000000 ; for ( int i = 0 ; i < 4 ; i ++ ) { a . loadLocal ( encodedVar ) ; a . loadConstant ( offset + i ) ; a . loadConstant ( ( byte ) ( generation >> ( 8 * ( 3 - i ) ) ) ) ; a . storeToArray ( TypeDesc . BYTE ) ; } }
public class SuperActivityToast { /** * Saves the state of all SuperToasts that are showing and / or pending . * This should be called in the { @ link android . app . Activity # onSaveInstanceState ( android . os . Bundle ) } * method of your Activity . * @ param bundle The Bundle provided in onSaveInstanceState ( ) */ @ SuppressWarnings ( "unchecked" ) public static void onSaveState ( Bundle bundle ) { } }
final ArrayList < Style > styleList = new ArrayList ( ) ; // Create a list of every Style used by a SuperToast in the queue for ( SuperToast superToast : Toaster . getInstance ( ) . getQueue ( ) ) { if ( superToast instanceof SuperActivityToast ) { superToast . getStyle ( ) . isSuperActivityToast = true ; } styleList . add ( superToast . getStyle ( ) ) ; } bundle . putParcelableArrayList ( BUNDLE_KEY , styleList ) ; // Let ' s avoid any erratic behavior and cancel any showing / pending SuperActivityToasts manually Toaster . getInstance ( ) . cancelAllSuperToasts ( ) ;
public class AbstractHealthIndicator { /** * Builds the whole health result . * @ return The health result to provide to the indicator . */ protected HealthResult getHealthResult ( ) { } }
HealthResult . Builder builder = HealthResult . builder ( getName ( ) ) ; try { builder . details ( getHealthInformation ( ) ) ; builder . status ( this . healthStatus ) ; } catch ( Exception e ) { builder . status ( HealthStatus . DOWN ) ; builder . exception ( e ) ; } return builder . build ( ) ;
public class SelfCalls { /** * Get an Iterator over all self call sites . */ public Iterator < CallSite > callSiteIterator ( ) { } }
return new Iterator < CallSite > ( ) { private final Iterator < CallGraphEdge > iter = callGraph . edgeIterator ( ) ; @ Override public boolean hasNext ( ) { return iter . hasNext ( ) ; } @ Override public CallSite next ( ) { return iter . next ( ) . getCallSite ( ) ; } @ Override public void remove ( ) { iter . remove ( ) ; } } ;
public class BuildCache { /** * This method creates a string describing a cycle which has been detected . * It should only be called if a cycle with the specified dependency has * actually been detected . * @ param objectName * name of the object which has the dependency * @ param dependencyName * name of the object objectName depends on * @ return String describing the cyclic dependency */ synchronized private String getCycle ( String objectName , String dependencyName ) { } }
// Determine if adding this dependency will create a cycle . StringBuilder sb = new StringBuilder ( ) ; sb . append ( objectName ) ; sb . append ( " -> " ) ; sb . append ( dependencyName ) ; String nextObjectName = dependencies . get ( dependencyName ) ; while ( nextObjectName != null && ! objectName . equals ( nextObjectName ) ) { sb . append ( " -> " ) ; sb . append ( nextObjectName ) ; nextObjectName = dependencies . get ( nextObjectName ) ; } sb . append ( " -> " ) ; sb . append ( objectName ) ; return sb . toString ( ) ;
public class DirectoryHelper { /** * Returns the size of directory including all subfolders . */ public static long getSize ( File dir ) { } }
long size = 0 ; for ( File file : dir . listFiles ( ) ) { if ( file . isFile ( ) ) { size += file . length ( ) ; } else { size += getSize ( file ) ; } } return size ;
public class TCPConnectRequestContextFactory { /** * Create a new connection request context based upon the input needed to * fully define * the context . The local address is assumed to be null , and the local port * will be an ephemeral port * @ param _ remoteHostName * host name of the remote side of the connection * @ param _ remotePort * port to be used by the remote side of the connection * @ param _ timeout * timeout for waiting for the connection to complete * @ return a connect request context to be used by the channel connection */ public TCPConnectRequestContext createTCPConnectRequestContext ( String _remoteHostName , int _remotePort , int _timeout ) { } }
return new TCPConnectRequestContextImpl ( _remoteHostName , _remotePort , _timeout ) ;
public class Filters { /** * The collectionRejectFilter rejects a certain collection . */ public static < E > Filter < E > collectionRejectFilter ( Collection < E > objs ) { } }
return new CollectionAcceptFilter < E > ( objs , false ) ;
public class EventSubscriptionsInner { /** * List all global event subscriptions for a topic type . * List all global event subscriptions under an Azure subscription for a topic type . * @ param topicTypeName Name of the topic type * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < EventSubscriptionInner > > listGlobalBySubscriptionForTopicTypeAsync ( String topicTypeName , final ServiceCallback < List < EventSubscriptionInner > > serviceCallback ) { } }
return ServiceFuture . fromResponse ( listGlobalBySubscriptionForTopicTypeWithServiceResponseAsync ( topicTypeName ) , serviceCallback ) ;
public class QueueFactories { /** * Creates an async . Queue backed by an Agrona ManyToOneConcurrentArrayQueue bounded by specified queueSize * Wait strategy used is NoWaitRetry by default for both Consumers and Producers * ( both Consumers and Producers will repeatedly retry until successful ) . Use * withConsumerWaitStrategy & amp ; withProducerWaitStrategy methods on the returned queue to change the * wait strategy * < pre > * { @ code * queue . withConsumerWaitStrategy ( new DirectWaitStrategy ( ) ) * . withProducerWaitStrategy ( new YieldWait ( ) ) ; * } < / pre > * @ param queueSize upper bound for Queue * @ return bounded wait free Queue Factory backed by an Agrona ManyToOneConcurrentArrayQueue */ public static < T > QueueFactory < T > boundedNonBlockingQueue ( final int queueSize ) { } }
return ( ) -> new Queue < T > ( new ManyToOneConcurrentArrayQueue < > ( queueSize ) , new NoWaitRetry < > ( ) , new NoWaitRetry < > ( ) ) ;
public class ScalarFieldUpdater { /** * Add new SV scalar field . */ private void addSVScalar ( ) { } }
String fieldValue = m_dbObj . getFieldValue ( m_fieldName ) ; m_dbTran . addScalarValueColumn ( m_tableDef , m_dbObj . getObjectID ( ) , m_fieldName , fieldValue ) ; addTermColumns ( fieldValue ) ;
public class A_CmsJSPAction { /** * Creates string for get — request with given list of resources . < p > * @ param resources to be transmitted * @ return valid string for get - request */ protected String getRequestString ( List < CmsResource > resources ) { } }
String res = "?" ; for ( CmsResource resource : resources ) { res += "resources=" + resource . getStructureId ( ) . getStringValue ( ) + "&" ; } return res . substring ( 0 , res . length ( ) - 1 ) ; // Remove last " & "
public class Async { /** * Runs an array of functions in series , working on a shared context . * However , if any of the functions pass an error to the callback , * the next function is not executed and the outcome is immediately called with the error . */ @ SafeVarargs public final void waterfall ( final C context , final Outcome < C > outcome , final Function < C > ... functions ) { } }
_series ( context , outcome , functions ) ;
public class DocumentTypeUrl { /** * Get Resource Url for GetDocumentType * @ param documentTypeName The name of the document type to retrieve . * @ param responseFields Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object . This parameter should only be used to retrieve data . Attempting to update data using this parameter may cause data loss . * @ return String Resource Url */ public static MozuUrl getDocumentTypeUrl ( String documentTypeName , String responseFields ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/content/documenttypes/{documentTypeName}?responseFields={responseFields}" ) ; formatter . formatUrl ( "documentTypeName" , documentTypeName ) ; formatter . formatUrl ( "responseFields" , responseFields ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class WebhookAction { /** * Sets the < b > Name < / b > for the custom Webhook User * @ param name * A not - null String name for the new Webhook user . * @ throws IllegalArgumentException * If the specified name is not in the range of 2-100. * @ return The current WebhookAction for chaining convenience . */ @ CheckReturnValue public WebhookAction setName ( String name ) { } }
Checks . notNull ( name , "Webhook name" ) ; Checks . check ( name . length ( ) >= 2 && name . length ( ) <= 100 , "The webhook name must be in the range of 2-100!" ) ; this . name = name ; return this ;
public class HashtableOnDisk { /** * getInstance . Initializes a HashtableOnDisk instance over the specified * FileManager , from the specified instanceid . The instanceid was * used to originally create the instance in the createInstance method . * @ param filemgr The FileManager for the HTOD . * @ param auto _ rehash If " true " , the HTOD will automatically double in * capacity when its occupancy exceeds its threshold . If " false " * the HTOD will increase only if the startRehash ( ) method is * invoked . * @ param instanceid The instance of the HTOD in the FileManager . * @ param initfn An interface to which each object will be passed on * initialztion * only * if it is determined that the HTOD was not * previously properly closed . * @ return A HashtableOnDisk pointer */ static public HashtableOnDisk getStaticInstance ( FileManager filemgr , boolean auto_rehash , long instanceid , HashtableInitInterface initfn , boolean hasCacheValue , HTODDynacache htoddc ) throws FileManagerException , ClassNotFoundException , IOException , HashtableOnDiskException { } }
if ( instanceid == 0 ) { instanceid = filemgr . start ( ) ; } HashtableOnDisk answer = null ; try { answer = new HashtableOnDisk ( filemgr , auto_rehash , instanceid , initfn , hasCacheValue , htoddc ) ; } catch ( EOFException e ) { // eof means the file is empty and there is no such instance } return answer ;
public class TwitterImpl { /** * / path / to / video . mp4 - - file - field " media " */ private void uploadMediaChunkedAppend ( String fileName , InputStream media , int segmentIndex , long mediaId ) throws TwitterException { } }
post ( conf . getUploadBaseURL ( ) + "media/upload.json" , new HttpParameter [ ] { new HttpParameter ( "command" , CHUNKED_APPEND ) , new HttpParameter ( "media_id" , mediaId ) , new HttpParameter ( "segment_index" , segmentIndex ) , new HttpParameter ( "media" , fileName , media ) } ) ;
public class RayHandler { /** * Sets combined camera matrix . * < p > Matrix must be set to work in box2d coordinates , it will be copied * and used for culling and rendering . Remember to update it if camera * changes . This will work with rotated cameras . * < p > NOTE : Matrix4 is assumed to be orthogonal for culling * and directional lights . * @ param combined * matrix that include projection and translation matrices * @ deprecated use { @ link # setCombinedMatrix ( OrthographicCamera ) } or * { @ link # setCombinedMatrix ( Matrix4 , float , float , float , float ) } instead */ @ Deprecated public void setCombinedMatrix ( Matrix4 combined ) { } }
System . arraycopy ( combined . val , 0 , this . combined . val , 0 , 16 ) ; // updateCameraCorners float invWidth = combined . val [ Matrix4 . M00 ] ; final float halfViewPortWidth = 1f / invWidth ; final float x = - halfViewPortWidth * combined . val [ Matrix4 . M03 ] ; x1 = x - halfViewPortWidth ; x2 = x + halfViewPortWidth ; float invHeight = combined . val [ Matrix4 . M11 ] ; final float halfViewPortHeight = 1f / invHeight ; final float y = - halfViewPortHeight * combined . val [ Matrix4 . M13 ] ; y1 = y - halfViewPortHeight ; y2 = y + halfViewPortHeight ;
public class AbsSetting { /** * 获得数组型 * @ param key 属性名 * @ param defaultValue 默认的值 * @ return 属性值 */ public String [ ] getStringsWithDefault ( String key , String [ ] defaultValue ) { } }
String [ ] value = getStrings ( key , null ) ; if ( null == value ) { value = defaultValue ; } return value ;
public class GroupBuilder { /** * Registers a variable for later reference inside custom expressions . * The new variable will perform the calculation using the field information passed in the ColumnProperty * parameter . Such ColumnProperty will be properly registered in the report design as a field ( $ F { . . . } ) * @ param name * @ param property * @ param className * @ param operation * @ return */ public GroupBuilder addVariable ( String name , String property , String className , DJCalculation operation ) { } }
group . getVariables ( ) . add ( new DJGroupVariableDef ( name , new ColumnProperty ( property , className ) , operation ) ) ; return this ;
public class PrefixedProperties { /** * Gets the prefixed key and parse it to an boolean [ ] < br > * Each comma - separated list can be used . * @ param key * the key * @ return boolean [ ] or null if the key couldn ' t get found . */ public boolean [ ] getBooleanArray ( final String key ) { } }
final String [ ] value = getArray ( key ) ; if ( value != null ) { final boolean [ ] result = new boolean [ value . length ] ; for ( int i = 0 ; i < value . length ; i ++ ) { result [ i ] = Boolean . valueOf ( value [ i ] ) . booleanValue ( ) ; } return result ; } return null ;
public class LoggingInfoMarshaller { /** * Marshall the given parameter object . */ public void marshall ( LoggingInfo loggingInfo , ProtocolMarshaller protocolMarshaller ) { } }
if ( loggingInfo == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( loggingInfo . getS3BucketName ( ) , S3BUCKETNAME_BINDING ) ; protocolMarshaller . marshall ( loggingInfo . getS3KeyPrefix ( ) , S3KEYPREFIX_BINDING ) ; protocolMarshaller . marshall ( loggingInfo . getS3Region ( ) , S3REGION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DescribeSMBFileSharesRequest { /** * An array containing the Amazon Resource Name ( ARN ) of each file share to be described . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setFileShareARNList ( java . util . Collection ) } or { @ link # withFileShareARNList ( java . util . Collection ) } if you * want to override the existing values . * @ param fileShareARNList * An array containing the Amazon Resource Name ( ARN ) of each file share to be described . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeSMBFileSharesRequest withFileShareARNList ( String ... fileShareARNList ) { } }
if ( this . fileShareARNList == null ) { setFileShareARNList ( new com . amazonaws . internal . SdkInternalList < String > ( fileShareARNList . length ) ) ; } for ( String ele : fileShareARNList ) { this . fileShareARNList . add ( ele ) ; } return this ;
public class EqualsCheckerCollection { /** * This method is called from { @ link # isEqualNotNull ( Object , Object ) } to check if the { @ link Object # getClass ( ) type } * of two given { @ link Collection } s should be considered as equal . * @ param collection1 is the first { @ link Collection } . * @ param collection2 is the second { @ link Collection } . * @ return { @ code true } if the { @ link Object # getClass ( ) type } of the given { @ link Collection } s is considered as equal , * { @ code false } otherwise . */ protected boolean isEqualCollectionType ( Collection < ? > collection1 , Collection < ? > collection2 ) { } }
return collection1 . getClass ( ) . equals ( collection2 . getClass ( ) ) ;
public class IDCreator { /** * Reset the counters so that we keep generating simple IDs within * single chem object or a set of them */ private static void resetCounters ( ) { } }
atomCount = 0 ; bondCount = 0 ; atomContainerCount = 0 ; atomContainerSetCount = 0 ; reactionCount = 0 ; reactionSetCount = 0 ; chemModelCount = 0 ; chemSequenceCount = 0 ; chemFileCount = 0 ;
public class AppenderFile { /** * Keep the maximum size of log files close to maximum value . * Logs are cached if the size of the currently used log file exceeds max value . */ private void rollOverFiles ( ) { } }
Context context = appContextRef . get ( ) ; if ( context != null ) { File dir = context . getFilesDir ( ) ; File mainFile = new File ( dir , name ( 1 ) ) ; if ( mainFile . exists ( ) ) { float fileSize = mainFile . length ( ) ; fileSize = fileSize / 1024.0f ; // In kilobytes if ( fileSize > fileSizeLimitKb ) { File file ; File target ; file = new File ( dir , name ( maxFiles ) ) ; if ( file . exists ( ) ) { file . delete ( ) ; } for ( int i = maxFiles - 1 ; i > 0 ; i -- ) { file = new File ( dir , name ( i ) ) ; if ( file . exists ( ) ) { target = new File ( dir , name ( i + 1 ) ) ; file . renameTo ( target ) ; } } } } }
public class ValidationDataRepository { /** * Find by param type and method and url list . * @ param paramType the param type * @ param method the method * @ param url the url * @ return the list */ public List < ValidationData > findByParamTypeAndMethodAndUrl ( ParamType paramType , String method , String url ) { } }
return this . findByMethodAndUrl ( method , url ) . stream ( ) . filter ( d -> d . getParamType ( ) . equals ( paramType ) ) . collect ( Collectors . toList ( ) ) ;
public class TileWriter { private boolean createFolderAndCheckIfExists ( final File pFile ) { } }
if ( pFile . mkdirs ( ) ) { return true ; } if ( Configuration . getInstance ( ) . isDebugMode ( ) ) { Log . d ( IMapView . LOGTAG , "Failed to create " + pFile + " - wait and check again" ) ; } // if create failed , wait a bit in case another thread created it try { Thread . sleep ( 500 ) ; } catch ( final InterruptedException ignore ) { } // and then check again if ( pFile . exists ( ) ) { if ( Configuration . getInstance ( ) . isDebugMode ( ) ) { Log . d ( IMapView . LOGTAG , "Seems like another thread created " + pFile ) ; } return true ; } else { if ( Configuration . getInstance ( ) . isDebugMode ( ) ) { Log . d ( IMapView . LOGTAG , "File still doesn't exist: " + pFile ) ; } return false ; }
public class CodecUtil { /** * Term value . * @ param term * the term * @ return the string */ public static String termValue ( String term ) { } }
int i = term . indexOf ( MtasToken . DELIMITER ) ; String value = null ; if ( i >= 0 ) { value = term . substring ( ( i + MtasToken . DELIMITER . length ( ) ) ) ; value = ( value . length ( ) > 0 ) ? value : null ; } return ( value == null ) ? null : value . replace ( "\u0000" , "" ) ;
public class FixedWidthReader { /** * Estimates and returns the type for each column in the delimited text file { @ code file } * The type is determined by checking a sample of the data in the file . Because only a sample of the data is * checked , * the types may be incorrect . If that is the case a Parse Exception will be thrown . * The method { @ code printColumnTypes ( ) } can be used to print a list of the detected columns that can be * corrected and * used to explicitly specify the correct column types . */ public ColumnType [ ] detectColumnTypes ( Reader reader , FixedWidthReadOptions options ) { } }
boolean header = options . header ( ) ; int linesToSkip = header ? 1 : 0 ; AbstractParser < ? > parser = fixedWidthParser ( options ) ; try { return getTypes ( reader , options , linesToSkip , parser ) ; } finally { parser . stopParsing ( ) ; // we don ' t close the reader since we didn ' t create it }
public class QueryOptionsListHandle { /** * Returns a HashMap of the named query options from the server . * The keys are the names of the query options , the values are the corresponding URIs on the server . * @ return The map of names to URIs . */ @ Override public HashMap < String , String > getValuesMap ( ) { } }
if ( optionsHolder == null ) return null ; else return optionsHolder . getOptionsMap ( ) ;
public class AbstractJobLauncher { /** * Start the scheduled executor for executing job cancellation . * The executor , upon started , waits on the condition variable indicating a cancellation is requested , * i . e . , it waits for a cancellation request to arrive . If a cancellation is requested , the executor * is unblocked and calls { @ link # executeCancellation ( ) } to execute the cancellation . Upon completion * of the cancellation execution , the executor notifies the caller that requested the cancellation on * the conditional variable indicating the cancellation has been executed so the caller is unblocked . * Upon successful execution of the cancellation , it sets the job state to * { @ link JobState . RunningState # CANCELLED } . */ protected void startCancellationExecutor ( ) { } }
this . cancellationExecutor . execute ( new Runnable ( ) { @ Override public void run ( ) { synchronized ( AbstractJobLauncher . this . cancellationRequest ) { try { while ( ! AbstractJobLauncher . this . cancellationRequested ) { // Wait for a cancellation request to arrive AbstractJobLauncher . this . cancellationRequest . wait ( ) ; } LOG . info ( "Cancellation has been requested for job " + AbstractJobLauncher . this . jobContext . getJobId ( ) ) ; executeCancellation ( ) ; LOG . info ( "Cancellation has been executed for job " + AbstractJobLauncher . this . jobContext . getJobId ( ) ) ; } catch ( InterruptedException ie ) { Thread . currentThread ( ) . interrupt ( ) ; } } synchronized ( AbstractJobLauncher . this . cancellationExecution ) { AbstractJobLauncher . this . cancellationExecuted = true ; AbstractJobLauncher . this . jobContext . getJobState ( ) . setState ( JobState . RunningState . CANCELLED ) ; // Notify that the cancellation has been executed AbstractJobLauncher . this . cancellationExecution . notifyAll ( ) ; } } } ) ;
public class DecimalFormat { /** * < strong > [ icu ] < / strong > Sets the minimum number of significant digits that will be displayed . If * < code > min < / code > is less than one then it is set to one . If the maximum significant * digits count is less than < code > min < / code > , then it is set to < code > min < / code > . * This function also enables the use of significant digits by this formatter - * { @ link # areSignificantDigitsUsed ( ) } will return true . * @ param min the fewest significant digits to be shown */ public void setMinimumSignificantDigits ( int min ) { } }
if ( min < 1 ) { min = 1 ; } // pin max sig dig to > = min int max = Math . max ( maxSignificantDigits , min ) ; minSignificantDigits = min ; maxSignificantDigits = max ; setSignificantDigitsUsed ( true ) ;
public class BatchSuspendUserRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( BatchSuspendUserRequest batchSuspendUserRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( batchSuspendUserRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( batchSuspendUserRequest . getAccountId ( ) , ACCOUNTID_BINDING ) ; protocolMarshaller . marshall ( batchSuspendUserRequest . getUserIdList ( ) , USERIDLIST_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Validation { /** * Bootstrap performance estimation of a regression model . * @ param < T > the data type of input objects . * @ param k k - fold bootstrap estimation . * @ param trainer a regression model trainer that is properly parameterized . * @ param x the test data set . * @ param y the test data response values . * @ param measure the performance measure of regression . * @ return k - by - m test result matrix , where k is the number of * bootstrap samples and m is the number of performance measures . */ public static < T > double [ ] bootstrap ( int k , RegressionTrainer < T > trainer , T [ ] x , double [ ] y , RegressionMeasure measure ) { } }
if ( k < 2 ) { throw new IllegalArgumentException ( "Invalid k for k-fold bootstrap: " + k ) ; } int n = x . length ; double [ ] results = new double [ k ] ; Bootstrap bootstrap = new Bootstrap ( n , k ) ; for ( int i = 0 ; i < k ; i ++ ) { T [ ] trainx = Math . slice ( x , bootstrap . train [ i ] ) ; double [ ] trainy = Math . slice ( y , bootstrap . train [ i ] ) ; Regression < T > model = trainer . train ( trainx , trainy ) ; int nt = bootstrap . test [ i ] . length ; double [ ] truth = new double [ nt ] ; double [ ] predictions = new double [ nt ] ; for ( int j = 0 ; j < nt ; j ++ ) { int l = bootstrap . test [ i ] [ j ] ; truth [ j ] = y [ l ] ; predictions [ j ] = model . predict ( x [ l ] ) ; } results [ i ] = measure . measure ( truth , predictions ) ; } return results ;
public class EitherT { /** * { @ inheritDoc } */ @ Override public < R2 > EitherT < M , L , R2 > fmap ( Function < ? super R , ? extends R2 > fn ) { } }
return MonadT . super . < R2 > fmap ( fn ) . coerce ( ) ;
public class UberData { /** * Use a { @ link Boolean } to support returning { @ literal null } , and if it is { @ literal null } , don ' t render . */ @ Nullable public Boolean isTemplated ( ) { } }
return Optional . ofNullable ( this . url ) . map ( s -> s . contains ( "{?" ) ? true : null ) . orElse ( null ) ;
public class TarBuffer { /** * Read a record from the input stream and return the data . * @ return The record data . */ public byte [ ] readRecord ( ) throws IOException { } }
if ( this . debug ) { System . err . println ( "ReadRecord: recIdx = " + this . currRecIdx + " blkIdx = " + this . currBlkIdx ) ; } if ( this . inStream == null ) { throw new IOException ( "reading from an output buffer" ) ; } if ( this . currRecIdx >= this . recsPerBlock ) { if ( ! this . readBlock ( ) ) { return null ; } } byte [ ] result = new byte [ this . recordSize ] ; System . arraycopy ( this . blockBuffer , ( this . currRecIdx * this . recordSize ) , result , 0 , this . recordSize ) ; this . currRecIdx ++ ; return result ;
public class RepositoryXmlHandler { /** * endElement callback . most elements are build up from here . */ public void endElement ( String uri , String name , String qName ) { } }
boolean isDebug = logger . isDebugEnabled ( ) ; try { switch ( getLiteralId ( qName ) ) { case MAPPING_REPOSITORY : { if ( isDebug ) logger . debug ( " < " + tags . getTagById ( MAPPING_REPOSITORY ) ) ; this . m_CurrentAttrContainer = null ; m_CurrentCLD = null ; break ; } case CLASS_DESCRIPTOR : { if ( isDebug ) logger . debug ( " < " + tags . getTagById ( CLASS_DESCRIPTOR ) ) ; m_CurrentCLD = null ; this . m_CurrentAttrContainer = null ; break ; } case OBJECT_CACHE : { if ( m_CurrentAttrContainer != null ) { if ( isDebug ) logger . debug ( " < " + tags . getTagById ( OBJECT_CACHE ) ) ; } this . m_CurrentAttrContainer = m_CurrentCLD ; break ; } case CLASS_EXTENT : { break ; } case FIELD_DESCRIPTOR : { if ( isDebug ) logger . debug ( " < " + tags . getTagById ( FIELD_DESCRIPTOR ) ) ; m_CurrentFLD = null ; m_CurrentAttrContainer = m_CurrentCLD ; break ; } case REFERENCE_DESCRIPTOR : { if ( isDebug ) logger . debug ( " < " + tags . getTagById ( REFERENCE_DESCRIPTOR ) ) ; m_CurrentORD = null ; m_CurrentAttrContainer = m_CurrentCLD ; break ; } case FOREIGN_KEY : { if ( isDebug ) logger . debug ( " < " + tags . getTagById ( FOREIGN_KEY ) ) ; break ; } case COLLECTION_DESCRIPTOR : { if ( isDebug ) logger . debug ( " < " + tags . getTagById ( COLLECTION_DESCRIPTOR ) ) ; m_CurrentCOD = null ; m_CurrentAttrContainer = m_CurrentCLD ; break ; } case INVERSE_FK : { if ( isDebug ) logger . debug ( " < " + tags . getTagById ( INVERSE_FK ) ) ; break ; } case ORDERBY : { if ( isDebug ) logger . debug ( " < " + tags . getTagById ( ORDERBY ) ) ; break ; } case FK_POINTING_TO_THIS_CLASS : { if ( isDebug ) logger . debug ( " < " + tags . getTagById ( FK_POINTING_TO_THIS_CLASS ) ) ; break ; } case FK_POINTING_TO_ITEMS_CLASS : { if ( isDebug ) logger . debug ( " < " + tags . getTagById ( FK_POINTING_TO_ITEMS_CLASS ) ) ; break ; } case ATTRIBUTE : { if ( m_CurrentAttrContainer != null ) { if ( isDebug ) logger . debug ( " < " + tags . getTagById ( ATTRIBUTE ) ) ; } break ; } case DOCUMENTATION : { if ( isDebug ) logger . debug ( " < " + tags . getTagById ( DOCUMENTATION ) ) ; break ; } // case SEQUENCE _ MANAGER : // / / currently not used on class - descriptor level // / / if ( isDebug ) logger . debug ( " < " + tags . getTagById ( SEQUENCE _ MANAGER ) ) ; // this . m _ CurrentAttrContainer = null ; // break ; // case CONNECTION _ POOL : // / / not used on class - descriptor level // / / if ( isDebug ) logger . debug ( " < " + tags . getTagById ( CONNECTION _ POOL ) ) ; // this . m _ CurrentAttrContainer = null ; // break ; // case JDBC _ CONNECTION _ DESCRIPTOR : // / / not used on class - descriptor level // / / if ( isDebug ) logger . debug ( " < " + tags . getTagById ( JDBC _ CONNECTION _ DESCRIPTOR ) ) ; // this . m _ CurrentAttrContainer = null ; // break ; case QUERY_CUSTOMIZER : { m_CurrentAttrContainer = m_CurrentCOD ; break ; } case INDEX_DESCRIPTOR : { m_CurrentCLD . getIndexes ( ) . add ( m_CurrentIndexDescriptor ) ; m_CurrentIndexDescriptor = null ; break ; } case INDEX_COLUMN : { // ignore ; all processing done in startElement break ; } case INSERT_PROCEDURE : { if ( isDebug ) logger . debug ( " < " + tags . getTagById ( INSERT_PROCEDURE ) ) ; m_CurrentCLD . setInsertProcedure ( ( InsertProcedureDescriptor ) m_CurrentProcedure ) ; m_CurrentProcedure = null ; break ; } case UPDATE_PROCEDURE : { if ( isDebug ) logger . debug ( " < " + tags . getTagById ( UPDATE_PROCEDURE ) ) ; m_CurrentCLD . setUpdateProcedure ( ( UpdateProcedureDescriptor ) m_CurrentProcedure ) ; m_CurrentProcedure = null ; break ; } case DELETE_PROCEDURE : { if ( isDebug ) logger . debug ( " < " + tags . getTagById ( DELETE_PROCEDURE ) ) ; m_CurrentCLD . setDeleteProcedure ( ( DeleteProcedureDescriptor ) m_CurrentProcedure ) ; m_CurrentProcedure = null ; break ; } case CONSTANT_ARGUMENT : { if ( isDebug ) logger . debug ( " < " + tags . getTagById ( CONSTANT_ARGUMENT ) ) ; break ; } case RUNTIME_ARGUMENT : { if ( isDebug ) logger . debug ( " < " + tags . getTagById ( RUNTIME_ARGUMENT ) ) ; break ; } // handle failure : default : { logger . debug ( "Ignoring unused Element " + qName ) ; } } } catch ( Exception ex ) { if ( ex instanceof MetadataException ) throw ( MetadataException ) ex ; else throw new MetadataException ( "Exception when reading metadata information," + " please check your repository.xml file" , ex ) ; }
public class View { /** * Instantiate View subtype from XML stream . * @ param name Alternative name to use or { @ code null } to keep the one in xml . */ public static View createViewFromXML ( String name , InputStream xml ) throws IOException { } }
try ( InputStream in = new BufferedInputStream ( xml ) ) { View v = ( View ) Jenkins . XSTREAM . fromXML ( in ) ; if ( name != null ) v . name = name ; Jenkins . checkGoodName ( v . name ) ; return v ; } catch ( StreamException | ConversionException | Error e ) { // mostly reflection errors throw new IOException ( "Unable to read" , e ) ; }
public class DataContainer { /** * Adds properties to the data . * @ param toAdd the properties to add * @ return a new copy of the template . */ @ SuppressWarnings ( "unchecked" ) public A addAll ( Iterable < Property > toAdd ) { } }
Data data = getData ( ) ; Data modified = data . addAll ( toAdd ) ; if ( data == modified ) { return ( A ) this ; } return copy ( delegate . put ( "data" , Property . toArrayNode ( data ) ) ) ;
public class JavaInlineExpressionCompiler { /** * Append the inline code for the given XTypeLiteral . * @ param expression the expression of the operation . * @ param parentExpression is the expression that contains this one , or { @ code null } if the current expression is * the root expression . * @ param feature the feature that contains the expression . * @ param output the output . * @ return { @ code true } if a text was appended . */ @ SuppressWarnings ( "static-method" ) protected Boolean _generate ( XTypeLiteral expression , XExpression parentExpression , XtendExecutable feature , InlineAnnotationTreeAppendable output ) { } }
output . appendTypeConstant ( expression . getType ( ) ) ; return Boolean . TRUE ;
public class ConnecClient { /** * Return all the entities matching the parameters and using the provided client * @ param entity * name * @ param groupId * customer group id * @ param params * criteria * @ param httpClient * MnoHttpClient to use * @ return list of entities * @ throws AuthenticationException * @ throws ApiException * @ throws InvalidRequestException */ public < T > T all ( String entityName , String groupId , Map < String , ? > params , MnoHttpClient httpClient , Class < T > clazz ) throws AuthenticationException , ApiException , InvalidRequestException { } }
String jsonBody = httpClient . get ( getCollectionUrl ( entityName , groupId ) , MnoMapHelper . toUnderscoreHash ( params ) ) ; return GSON . fromJson ( jsonBody , clazz ) ;
public class PubSubEventHandler { /** * Subscribes an event handler for an event class type . * @ param clazz an event class * @ param handler an event handler */ public void subscribe ( final Class < ? extends T > clazz , final EventHandler < ? extends T > handler ) { } }
lock . writeLock ( ) . lock ( ) ; try { List < EventHandler < ? extends T > > list = clazzToListOfHandlersMap . get ( clazz ) ; if ( list == null ) { list = new LinkedList < EventHandler < ? extends T > > ( ) ; clazzToListOfHandlersMap . put ( clazz , list ) ; } list . add ( handler ) ; } finally { lock . writeLock ( ) . unlock ( ) ; }
public class CommerceWarehouseLocalServiceUtil { /** * Updates the commerce warehouse in the database or adds it if it does not yet exist . Also notifies the appropriate model listeners . * @ param commerceWarehouse the commerce warehouse * @ return the commerce warehouse that was updated */ public static com . liferay . commerce . model . CommerceWarehouse updateCommerceWarehouse ( com . liferay . commerce . model . CommerceWarehouse commerceWarehouse ) { } }
return getService ( ) . updateCommerceWarehouse ( commerceWarehouse ) ;
public class Routes { private void add ( HttpMethod method , String url , String acceptedType , Object target ) { } }
RouteEntry entry = new RouteEntry ( ) ; entry . httpMethod = method ; entry . path = url ; entry . target = target ; entry . acceptedType = acceptedType ; LOG . debug ( "Adds route: " + entry ) ; // Adds to end of list routes . add ( entry ) ;
public class ZipUtils { /** * Zips arrays into single array of tuples * @ return */ @ SuppressWarnings ( "unchecked" ) public static < T1 , T2 , T3 > Tuple3 < T1 , T2 , T3 > [ ] zip ( T1 [ ] arr1 , T2 [ ] arr2 , T3 [ ] arr3 ) { } }
int resultSize = _max ( arr1 . length , arr2 . length , arr3 . length ) ; Tuple3 [ ] result = new Tuple3 [ resultSize ] ; for ( int i = 0 ; i < resultSize ; i ++ ) result [ i ] = tuple ( next ( arr1 , i ) , next ( arr2 , i ) , next ( arr3 , i ) ) ; return result ;
public class NotificationBoard { /** * Set the dim - behind layer a specific opacity . * @ param alpha */ public void dimAt ( float alpha ) { } }
if ( ! mDimEnabled ) { return ; } if ( mDimView == null ) { mDimView = makeDimView ( ) ; } if ( ! mDimView . isShown ( ) ) { mDimView . setVisibility ( VISIBLE ) ; mDimView . setBackgroundColor ( mDimColor ) ; } mDimView . setAlpha ( alpha ) ;
public class DeviceManagerClient { /** * Lists the last few versions of the device state in descending order ( i . e . : newest first ) . * < p > Sample code : * < pre > < code > * try ( DeviceManagerClient deviceManagerClient = DeviceManagerClient . create ( ) ) { * DeviceName name = DeviceName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ REGISTRY ] " , " [ DEVICE ] " ) ; * ListDeviceStatesResponse response = deviceManagerClient . listDeviceStates ( name . toString ( ) ) ; * < / code > < / pre > * @ param name The name of the device . For example , * ` projects / p0 / locations / us - central1 / registries / registry0 / devices / device0 ` or * ` projects / p0 / locations / us - central1 / registries / registry0 / devices / { num _ id } ` . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final ListDeviceStatesResponse listDeviceStates ( String name ) { } }
ListDeviceStatesRequest request = ListDeviceStatesRequest . newBuilder ( ) . setName ( name ) . build ( ) ; return listDeviceStates ( request ) ;
public class ClassDiscoverer { /** * Finds all external class references * @ param outline root of the generated code * @ param classes set of generated classes * @ return set of external classes * @ throws IllegalAccessException throw if there ' s an error introspecting the annotations */ static Set < JClass > discoverDirectClasses ( Outline outline , Set < ClassOutline > classes ) throws IllegalAccessException { } }
Set < String > directClassNames = new LinkedHashSet < > ( ) ; for ( ClassOutline classOutline : classes ) { // for each field , if it ' s a bean , then visit it List < FieldOutline > fields = findAllDeclaredAndInheritedFields ( classOutline ) ; for ( FieldOutline fieldOutline : fields ) { JType rawType = fieldOutline . getRawType ( ) ; CPropertyInfo propertyInfo = fieldOutline . getPropertyInfo ( ) ; boolean isCollection = propertyInfo . isCollection ( ) ; if ( isCollection ) { JClass collClazz = ( JClass ) rawType ; JClass collType = collClazz . getTypeParameters ( ) . get ( 0 ) ; addIfDirectClass ( directClassNames , collType ) ; } else { addIfDirectClass ( directClassNames , rawType ) ; } parseXmlAnnotations ( outline , fieldOutline , directClassNames ) ; } } Set < JClass > direct = directClassNames . stream ( ) . map ( cn -> outline . getCodeModel ( ) . directClass ( cn ) ) . collect ( Collectors . toCollection ( LinkedHashSet :: new ) ) ; return direct ;
public class HttpRequest { /** * 获取请求URL分段中含prefix段的float值 < br > * 例如请求URL / pipes / record / query / point : 40.0 < br > * 获取time参数 : float point = request . getRequstURIPath ( " point : " , 0.0f ) ; * @ param prefix prefix段前缀 * @ param defvalue 默认float值 * @ return float值 */ public float getRequstURIPath ( String prefix , float defvalue ) { } }
String val = getRequstURIPath ( prefix , null ) ; try { return val == null ? defvalue : Float . parseFloat ( val ) ; } catch ( NumberFormatException e ) { return defvalue ; }
public class SceneStructureMetric { /** * Specifies the camera model being used . * @ param which Which camera is being specified * @ param fixed If these parameters are constant or not * @ param model The camera model */ public void setCamera ( int which , boolean fixed , BundleAdjustmentCamera model ) { } }
cameras [ which ] . known = fixed ; cameras [ which ] . model = model ;
public class ProtoTruth { /** * Assert on a { @ link ListMultimap } with { @ link Message } values . * < p > This allows for the equality configurations on { @ link ProtoSubject } to be applied to all * comparison tests available on { @ link MultimapSubject . UsingCorrespondence } . */ public static < K , M extends Message > ListMultimapWithProtoValuesSubject < ? , K , M , ListMultimap < K , M > > assertThat ( @ NullableDecl ListMultimap < K , M > listMultimap ) { } }
return assertAbout ( protos ( ) ) . that ( listMultimap ) ;
public class BuildService { /** * Build an image * @ param imageConfig the image configuration * @ param params mojo params for the project * @ param noCache if not null , dictate the caching behaviour . Otherwise its taken from the build configuration * @ param buildArgs * @ throws DockerAccessException * @ throws MojoExecutionException */ protected void buildImage ( ImageConfiguration imageConfig , MojoParameters params , boolean noCache , Map < String , String > buildArgs ) throws DockerAccessException , MojoExecutionException { } }
String imageName = imageConfig . getName ( ) ; ImageName . validate ( imageName ) ; BuildImageConfiguration buildConfig = imageConfig . getBuildConfiguration ( ) ; String oldImageId = null ; CleanupMode cleanupMode = buildConfig . cleanupMode ( ) ; if ( cleanupMode . isRemove ( ) ) { oldImageId = queryService . getImageId ( imageName ) ; } if ( buildConfig . getDockerArchive ( ) != null ) { File tarArchive = buildConfig . getAbsoluteDockerTarPath ( params ) ; String archiveImageName = getArchiveImageName ( buildConfig , tarArchive ) ; long time = System . currentTimeMillis ( ) ; docker . loadImage ( imageName , tarArchive ) ; log . info ( "%s: Loaded tarball in %s" , buildConfig . getDockerArchive ( ) , EnvUtil . formatDurationTill ( time ) ) ; if ( archiveImageName != null && ! archiveImageName . equals ( imageName ) ) { docker . tag ( archiveImageName , imageName , true ) ; } return ; } long time = System . currentTimeMillis ( ) ; File dockerArchive = archiveService . createArchive ( imageName , buildConfig , params , log ) ; log . info ( "%s: Created %s in %s" , imageConfig . getDescription ( ) , dockerArchive . getName ( ) , EnvUtil . formatDurationTill ( time ) ) ; Map < String , String > mergedBuildMap = prepareBuildArgs ( buildArgs , buildConfig ) ; // auto is now supported by docker , consider switching ? BuildOptions opts = new BuildOptions ( buildConfig . getBuildOptions ( ) ) . dockerfile ( getDockerfileName ( buildConfig ) ) . forceRemove ( cleanupMode . isRemove ( ) ) . noCache ( noCache ) . cacheFrom ( buildConfig . getCacheFrom ( ) ) . buildArgs ( mergedBuildMap ) ; String newImageId = doBuildImage ( imageName , dockerArchive , opts ) ; log . info ( "%s: Built image %s" , imageConfig . getDescription ( ) , newImageId ) ; if ( oldImageId != null && ! oldImageId . equals ( newImageId ) ) { try { docker . removeImage ( oldImageId , true ) ; log . info ( "%s: Removed old image %s" , imageConfig . getDescription ( ) , oldImageId ) ; } catch ( DockerAccessException exp ) { if ( cleanupMode == CleanupMode . TRY_TO_REMOVE ) { log . warn ( "%s: %s (old image)%s" , imageConfig . getDescription ( ) , exp . getMessage ( ) , ( exp . getCause ( ) != null ? " [" + exp . getCause ( ) . getMessage ( ) + "]" : "" ) ) ; } else { throw exp ; } } }
public class Zips { /** * Creates an iterator yielding values from the source iterator and its * index . * < code > E . g : * counted ( [ " a " , " b " , " c " ] , [ 0 . . inf ] ) - > [ ( 0 , " a " ) , ( 1 , " b " ) , ( 2 , " c " ) ] * < / code > * @ param < CT > the counter type * @ param < ET > the element type * @ param iterable the source iterable * @ param range the source range * @ return the resulting iterator */ public static < CT , ET > Iterator < Pair < CT , ET > > counted ( Iterable < ET > iterable , Range < CT > range ) { } }
dbc . precondition ( iterable != null , "cannot call counted with a null iterable" ) ; return new ZipShortestIterator < CT , ET > ( range . iterator ( ) , iterable . iterator ( ) ) ;
public class TrainingSpecification { /** * A list of < code > MetricDefinition < / code > objects , which are used for parsing metrics generated by the algorithm . * @ param metricDefinitions * A list of < code > MetricDefinition < / code > objects , which are used for parsing metrics generated by the * algorithm . */ public void setMetricDefinitions ( java . util . Collection < MetricDefinition > metricDefinitions ) { } }
if ( metricDefinitions == null ) { this . metricDefinitions = null ; return ; } this . metricDefinitions = new java . util . ArrayList < MetricDefinition > ( metricDefinitions ) ;
public class RenameFileExtensions { /** * Changes the suffix from the Filename . Example : test . dat to test . xxx * @ param file * The file to change . * @ param newSuffix * The new suffix . You must start with a dot . For instance : . xxx * @ return true if the file was renamed . * @ throws FileNotRenamedException * If the file could not renamed . * @ throws FileDoesNotExistException * If the file does not exist . * @ throws IOException * Signals that an I / O exception has occurred . * @ throws FileIsADirectoryException * the file is A directory exception */ public static boolean changeFilenameSuffix ( final File file , final String newSuffix ) throws FileNotRenamedException , FileDoesNotExistException , IOException , FileIsADirectoryException { } }
return changeFilenameSuffix ( file , newSuffix , false ) ;
public class SqlExecutor { /** * 执行存储过程 * @ param sql 需要执行的存储过程 * @ return 存储过程执行结果 * @ throws SQLException SQL执行异常 */ public Object [ ] call ( Sql sql ) throws SQLException { } }
long start = System . currentTimeMillis ( ) ; if ( sql . validate ( ) == false ) { return null ; } List < Object > result = new ArrayList < Object > ( ) ; Statement stmt = null ; try { stmt = this . createStatment ( conn , sql ) ; if ( stmt instanceof CallableStatement ) { CallableStatement callStmt = ( CallableStatement ) stmt ; callStmt . execute ( ) ; List < Object > tmpResults = this . getProcedureOutValue ( callStmt , sql ) ; for ( Object tmpResult : tmpResults ) { if ( tmpResult instanceof ResultSet ) { result . add ( getResultSet ( ( ResultSet ) tmpResult ) ) ; } else { result . add ( tmpResult ) ; } } } } catch ( SQLException e ) { throw e ; } finally { try { if ( stmt != null && stmt . isClosed ( ) == false ) { stmt . close ( ) ; } } catch ( SQLException e ) { logger . error ( e . getMessage ( ) , e ) ; } } logger . debug ( String . format ( "Execute %s used %d ms" , sql . getSql ( ) , System . currentTimeMillis ( ) - start ) ) ; return result . toArray ( ) ;
public class SolverAggregatorInterface { /** * Called when a sample is received from the aggregator . * @ param session * the session the sample was received on . * @ param sampleMessage * the received sample . */ protected void solverSampleReceived ( IoSession session , SampleMessage sampleMessage ) { } }
for ( SampleListener listener : this . sampleListeners ) { listener . sampleReceived ( this , sampleMessage ) ; }
public class CmsDisplayWidget { /** * Represents a value change event . < p > */ public void fireChangeEvent ( ) { } }
String result = "" ; if ( m_textbox . getText ( ) != null ) { if ( ! m_textbox . getText ( ) . equals ( m_default ) ) { result = m_textbox . getText ( ) ; } } ValueChangeEvent . fire ( this , result ) ;
public class MethodBuilder { /** * Add proxy method to check for exception */ private void addProxyCheckException ( TypeSpec . Builder classBuilder ) { } }
MethodSpec . Builder methodBuilder = MethodSpec . methodBuilder ( "checkException" ) . addModifiers ( Modifier . PRIVATE ) . returns ( Throwable . class ) . addParameter ( ClassName . get ( "android.os" , "Parcel" ) , "reply" ) . addStatement ( "int code = reply.readInt()" ) . addStatement ( "Throwable exception = null" ) . beginControlFlow ( "if (code != 0)" ) . addStatement ( "String msg = reply.readString()" ) . beginControlFlow ( "if (code == REMOTER_EXCEPTION_CODE)" ) . addStatement ( "exception = (Throwable) reply.readSerializable()" ) . endControlFlow ( ) . beginControlFlow ( "else" ) . addStatement ( "exception = new RuntimeException(msg)" ) . endControlFlow ( ) . endControlFlow ( ) . addStatement ( "return exception" ) ; classBuilder . addMethod ( methodBuilder . build ( ) ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link Class } { @ code > } } */ @ XmlElementDecl ( namespace = "http://schema.intuit.com/finance/v3" , name = "Class" , substitutionHeadNamespace = "http://schema.intuit.com/finance/v3" , substitutionHeadName = "IntuitObject" ) public JAXBElement < Class > createClass ( Class value ) { } }
return new JAXBElement < Class > ( _Class_QNAME , Class . class , null , value ) ;
public class StaticLog { /** * Info等级日志 , 小于Warn < br > * 由于动态获取Log , 效率较低 , 建议在非频繁调用的情况下使用 ! ! * @ param format 格式文本 , { } 代表变量 * @ param arguments 变量对应的参数 */ public static void info ( String format , Object ... arguments ) { } }
info ( LogFactory . indirectGet ( ) , format , arguments ) ;
public class AbstractXsdGeneratorMojo { /** * { @ inheritDoc } */ @ Override protected boolean isReGenerationRequired ( ) { } }
// Use the stale flag method to identify if we should re - generate the XSDs from the sources . // Basically , we should re - generate the XSDs if : // a ) The staleFile does not exist // b ) The staleFile exists and is older than one of the sources ( Java or XJB files ) . // " Older " is determined by comparing the modification timestamp of the staleFile and the source files . final File staleFile = getStaleFile ( ) ; final String debugPrefix = "StaleFile [" + FileSystemUtilities . getCanonicalPath ( staleFile ) + "]" ; boolean stale = ! staleFile . exists ( ) ; if ( stale ) { getLog ( ) . debug ( debugPrefix + " not found. XML Schema (re-)generation required." ) ; } else { final List < URL > sources = getSources ( ) ; if ( getLog ( ) . isDebugEnabled ( ) ) { getLog ( ) . debug ( debugPrefix + " found. Checking timestamps on source Java " + "files to determine if XML Schema (re-)generation is required." ) ; } final long staleFileLastModified = staleFile . lastModified ( ) ; for ( URL current : sources ) { final URLConnection sourceFileConnection ; try { sourceFileConnection = current . openConnection ( ) ; sourceFileConnection . connect ( ) ; } catch ( Exception e ) { if ( getLog ( ) . isDebugEnabled ( ) ) { getLog ( ) . debug ( "Could not open a sourceFileConnection to [" + current + "]" , e ) ; } // Can ' t determine if the staleFile is younger than this source . // Re - generate to be on the safe side . stale = true ; break ; } try { if ( sourceFileConnection . getLastModified ( ) > staleFileLastModified ) { if ( getLog ( ) . isDebugEnabled ( ) ) { getLog ( ) . debug ( current . toString ( ) + " is newer than the stale flag file." ) ; } stale = true ; } } finally { if ( sourceFileConnection instanceof HttpURLConnection ) { ( ( HttpURLConnection ) sourceFileConnection ) . disconnect ( ) ; } } } } // All done . return stale ;
public class ProvFactory { /** * ( non - Javadoc ) * @ see org . openprovenance . prov . model . LiteralConstructor # newDuration ( java . lang . String ) */ public Duration newDuration ( String lexicalRepresentation ) { } }
Duration dur = dataFactory . newDuration ( lexicalRepresentation ) ; return dur ;
public class UsersApi { /** * Gets the user information for a specified user . * Retrieves the user information for the specified user . To return additional user information that details the last login date , login status , and the user & # 39 ; s password expiration date , set the optional & # x60 ; additional _ info & # x60 ; query string parameter to * * true * * . * @ param accountId The external account number ( int ) or account ID Guid . ( required ) * @ param userId The user ID of the user being accessed . Generally this is the user ID of the authenticated user , but if the authenticated user is an Admin on the account , this may be another user the Admin user is accessing . ( required ) * @ return UserInformation */ public UserInformation getInformation ( String accountId , String userId ) throws ApiException { } }
return getInformation ( accountId , userId , null ) ;
public class IoUtil { /** * Transforms a { @ link DomDocument } to XML output . * @ param document the DOM document to transform * @ param result the { @ link StreamResult } to write to */ public static void transformDocumentToXml ( DomDocument document , StreamResult result ) { } }
TransformerFactory transformerFactory = TransformerFactory . newInstance ( ) ; try { Transformer transformer = transformerFactory . newTransformer ( ) ; transformer . setOutputProperty ( OutputKeys . ENCODING , "UTF-8" ) ; transformer . setOutputProperty ( OutputKeys . INDENT , "yes" ) ; transformer . setOutputProperty ( "{http://xml.apache.org/xslt}indent-amount" , "2" ) ; synchronized ( document ) { transformer . transform ( document . getDomSource ( ) , result ) ; } } catch ( TransformerConfigurationException e ) { throw new ModelIoException ( "Unable to create a transformer for the model" , e ) ; } catch ( TransformerException e ) { throw new ModelIoException ( "Unable to transform model to xml" , e ) ; }
public class MMFF94PartialCharges { /** * Main method which assigns MMFF94 partial charges * @ param ac AtomContainer * @ return AtomContainer with MMFF94 partial charges as atom properties */ public IAtomContainer assignMMFF94PartialCharges ( IAtomContainer ac ) throws CDKException { } }
if ( ! mmff . assignAtomTypes ( ac ) ) throw new CDKException ( "Molecule had an atom of unknown MMFF type" ) ; mmff . partialCharges ( ac ) ; mmff . clearProps ( ac ) ; for ( IAtom atom : ac . atoms ( ) ) atom . setProperty ( MMFF_94_CHARGE , atom . getCharge ( ) ) ; return ac ;
public class OffsetDateTime { /** * Returns a copy of this { @ code OffsetDateTime } with the specified number of months subtracted . * This method subtracts the specified amount from the months field in three steps : * < ol > * < li > Subtract the input months from the month - of - year field < / li > * < li > Check if the resulting date would be invalid < / li > * < li > Adjust the day - of - month to the last valid day if necessary < / li > * < / ol > * For example , 2007-03-31 minus one month would result in the invalid date * 2007-04-31 . Instead of returning an invalid result , the last valid day * of the month , 2007-04-30 , is selected instead . * This instance is immutable and unaffected by this method call . * @ param months the months to subtract , may be negative * @ return an { @ code OffsetDateTime } based on this date - time with the months subtracted , not null * @ throws DateTimeException if the result exceeds the supported date range */ public OffsetDateTime minusMonths ( long months ) { } }
return ( months == Long . MIN_VALUE ? plusMonths ( Long . MAX_VALUE ) . plusMonths ( 1 ) : plusMonths ( - months ) ) ;
public class BasicRook { /** * { @ code clear } closes all resources still present in this { @ code Rook } and * removes them from this { @ code Rook } . * @ throws IllegalStateException * When this { @ code BasicRook } is closed . * @ throws RuntimeException * When one or more invocations of * { @ link java . lang . AutoCloseable # close ( ) close ( ) } on the collected * resources throw exception . * @ since 1.0 */ public void clear ( ) { } }
checkNotClosed ( ) ; if ( objects_to_close == null ) { return ; } RuntimeException re = null ; for ( AutoCloseable ac : objects_to_close ) { try { ac . close ( ) ; } catch ( Exception e ) { if ( re == null ) { re = new RuntimeException ( e ) ; } else { re . addSuppressed ( e ) ; } } } objects_to_close . clear ( ) ; if ( re != null ) { throw re ; }
public class JSONObject { /** * 一次性Put 键值对 , 如果key已经存在抛出异常 , 如果键值中有null值 , 忽略 * @ param key 键 * @ param value 值对象 , 可以是以下类型 : Boolean , Double , Integer , JSONArray , JSONObject , Long , String , or the JSONNull . NULL . * @ return this . * @ throws JSONException 值是无穷数字 、 键重复抛出异常 */ public JSONObject putOnce ( String key , Object value ) throws JSONException { } }
if ( key != null && value != null ) { if ( rawHashMap . containsKey ( key ) ) { throw new JSONException ( "Duplicate key \"{}\"" , key ) ; } this . put ( key , value ) ; } return this ;
public class TimePickerSettings { /** * setFormatForMenuTimes , This sets the format that is used to display or parse menu times in * the time picker , using a pattern string . The default format is generated using the locale of * the settings instance . * Available pattern strings can be found in the Javadocs for the DateTimeFormatter class , at * this URL : https : / / docs . oracle . com / javase / 8 / docs / api / java / time / format / DateTimeFormatter . html * If the time picker has already been constructed , then calling this function will cause * immediate validation of the text field text . */ public void setFormatForMenuTimes ( String patternString ) { } }
DateTimeFormatter formatter = PickerUtilities . createFormatterFromPatternString ( patternString , locale ) ; setFormatForMenuTimes ( formatter ) ;
public class FileUtils { /** * Parse a string into a stream of Quads . * @ param line the line of text * @ return the Quad */ public static Stream < Quad > parseQuad ( final String line ) { } }
final List < Token > tokens = new ArrayList < > ( ) ; makeTokenizerString ( line ) . forEachRemaining ( tokens :: add ) ; final List < Node > nodes = tokens . stream ( ) . filter ( Token :: isNode ) . map ( Token :: asNode ) . filter ( Objects :: nonNull ) . collect ( toList ( ) ) ; if ( nodes . size ( ) == 3 ) { return of ( rdf . asQuad ( create ( defaultGraphIRI , nodes . get ( 0 ) , nodes . get ( 1 ) , nodes . get ( 2 ) ) ) ) ; } else if ( nodes . size ( ) == 4 ) { return of ( rdf . asQuad ( create ( nodes . get ( 3 ) , nodes . get ( 0 ) , nodes . get ( 1 ) , nodes . get ( 2 ) ) ) ) ; } else { LOGGER . warn ( "Skipping invalid data value: {}" , line ) ; return empty ( ) ; }
public class KeyStore { /** * Stores this keystore to the given output stream , and protects its * integrity with the given password . * @ param stream the output stream to which this keystore is written . * @ param password the password to generate the keystore integrity check * @ exception KeyStoreException if the keystore has not been initialized * ( loaded ) . * @ exception IOException if there was an I / O problem with data * @ exception NoSuchAlgorithmException if the appropriate data integrity * algorithm could not be found * @ exception CertificateException if any of the certificates included in * the keystore data could not be stored */ public final void store ( OutputStream stream , char [ ] password ) throws KeyStoreException , IOException , NoSuchAlgorithmException , CertificateException { } }
if ( ! initialized ) { throw new KeyStoreException ( "Uninitialized keystore" ) ; } keyStoreSpi . engineStore ( stream , password ) ;
public class VFSUtils { /** * Decode the path . * @ param path the path to decode * @ param encoding the encoding * @ return decoded path */ public static String decode ( String path , String encoding ) { } }
try { return URLDecoder . decode ( path , encoding ) ; } catch ( UnsupportedEncodingException e ) { throw MESSAGES . cannotDecode ( path , encoding , e ) ; }
public class StringUtils { /** * Properly splits a delimited separated string . * @ param input The input * @ param separator The separator * @ return A list of all the cells in the input */ @ SneakyThrows public static List < String > split ( CharSequence input , char separator ) { } }
if ( input == null ) { return new ArrayList < > ( ) ; } Preconditions . checkArgument ( separator != '"' , "Separator cannot be a quote" ) ; try ( CSVReader reader = CSV . builder ( ) . delimiter ( separator ) . reader ( new StringReader ( input . toString ( ) ) ) ) { List < String > all = new ArrayList < > ( ) ; List < String > row ; while ( ( row = reader . nextRow ( ) ) != null ) { all . addAll ( row ) ; } return all ; }
public class MatchResultPredicates { /** * Generates a Predicate that only accepts the Match Results that have a Match Type lower or equal to matchType * @ param matchType the matchType that defines the boundary * @ param < T > a subclass of MatchResult * @ param < S > a subclass of MatchType * @ return the Predicate */ public static < T extends MatchResult , S extends MatchType > Predicate < T > lowerOrEqualTo ( S matchType ) { } }
return Predicates . or ( lowerThan ( matchType ) , equalTo ( matchType ) ) ;
public class AmazonWorkLinkClient { /** * Describes the networking configuration to access the internal websites associated with the specified fleet . * @ param describeCompanyNetworkConfigurationRequest * @ return Result of the DescribeCompanyNetworkConfiguration operation returned by the service . * @ throws UnauthorizedException * You are not authorized to perform this action . * @ throws InternalServerErrorException * The service is temporarily unavailable . * @ throws InvalidRequestException * The request is not valid . * @ throws ResourceNotFoundException * The requested resource was not found . * @ throws TooManyRequestsException * The number of requests exceeds the limit . * @ sample AmazonWorkLink . DescribeCompanyNetworkConfiguration * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / worklink - 2018-09-25 / DescribeCompanyNetworkConfiguration " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DescribeCompanyNetworkConfigurationResult describeCompanyNetworkConfiguration ( DescribeCompanyNetworkConfigurationRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeCompanyNetworkConfiguration ( request ) ;
public class ConsumerContainer { /** * Checks if all preconditions are fulfilled on the broker to * successfully register a consumer there . One important precondition * is the existence of the queue the consumer shall consume from . * @ param consumerHolders The consumer holders * @ throws IOException if the precondition check fails */ protected void checkPreconditions ( List < ConsumerHolder > consumerHolders ) throws IOException { } }
Channel channel = createChannel ( ) ; for ( ConsumerHolder consumerHolder : consumerHolders ) { String queue = consumerHolder . getConfiguration ( ) . getQueueName ( ) ; try { channel . queueDeclarePassive ( queue ) ; LOGGER . debug ( "Queue {} found on broker" , queue ) ; } catch ( IOException e ) { LOGGER . error ( "Queue {} not found on broker" , queue ) ; throw e ; } } channel . close ( ) ;
public class Program { /** * Add a rule to this program . * @ param name is the rule name * @ param condition is the rule condition * @ return the rule */ public Rule addRule ( String name , Node condition ) { } }
Rule rule = new Rule ( name , condition ) ; rules . put ( name , rule ) ; return rule ;