signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class DefaultPreprocessorListener { /** * Handles an error . * The behaviour of this method is defined by the * implementation . It may simply record the error message , or * it may throw an exception . */ @ Override public void handleError ( Source source , int line , int column , String msg ) throws LexerException { } }
errors ++ ; print ( source . getName ( ) + ":" + line + ":" + column + ": error: " + msg ) ;
public class CmsSiteMatcher { /** * Initializes the member variables . < p > * @ param serverProtocol to protocol required to access this site * @ param serverName the server URL prefix to which this site is mapped * @ param serverPort the port required to access this site * @ param timeOffset the time offset */ private void init ( String serverProtocol , String serverName , int serverPort , long timeOffset ) { } }
setServerProtocol ( serverProtocol ) ; setServerName ( serverName ) ; setServerPort ( serverPort ) ; setTimeOffset ( timeOffset ) ;
public class AmazonInspectorClient { /** * Assigns attributes ( key and value pairs ) to the findings that are specified by the ARNs of the findings . * @ param addAttributesToFindingsRequest * @ return Result of the AddAttributesToFindings operation returned by the service . * @ throws InternalException * Internal server error . * @ throws InvalidInputException * The request was rejected because an invalid or out - of - range value was supplied for an input parameter . * @ throws AccessDeniedException * You do not have required permissions to access the requested resource . * @ throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist . The error code describes * the entity . * @ throws ServiceTemporarilyUnavailableException * The serice is temporary unavailable . * @ sample AmazonInspector . AddAttributesToFindings * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / inspector - 2016-02-16 / AddAttributesToFindings " * target = " _ top " > AWS API Documentation < / a > */ @ Override public AddAttributesToFindingsResult addAttributesToFindings ( AddAttributesToFindingsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeAddAttributesToFindings ( request ) ;
public class ApptentiveInternal { /** * < p > Internal use only . < / p > * This bundle could be any bundle sent to us by a push Intent from any supported platform . For that reason , it needs to be checked in multiple ways . * @ param pushBundle a Bundle , or null . * @ return a String , or null . */ static String getApptentivePushNotificationData ( Bundle pushBundle ) { } }
if ( pushBundle != null ) { if ( pushBundle . containsKey ( PUSH_EXTRA_KEY_PARSE ) ) { // Parse ApptentiveLog . v ( PUSH , "Got a Parse Push." ) ; String parseDataString = pushBundle . getString ( PUSH_EXTRA_KEY_PARSE ) ; if ( parseDataString == null ) { ApptentiveLog . e ( PUSH , "com.parse.Data is null." ) ; return null ; } try { JSONObject parseJson = new JSONObject ( parseDataString ) ; return parseJson . optString ( APPTENTIVE_PUSH_EXTRA_KEY , null ) ; } catch ( JSONException e ) { ApptentiveLog . e ( PUSH , "com.parse.Data is corrupt: %s" , parseDataString ) ; logException ( e ) ; return null ; } } else if ( pushBundle . containsKey ( PUSH_EXTRA_KEY_UA ) ) { // Urban Airship ApptentiveLog . v ( PUSH , "Got an Urban Airship push." ) ; Bundle uaPushBundle = pushBundle . getBundle ( PUSH_EXTRA_KEY_UA ) ; if ( uaPushBundle == null ) { ApptentiveLog . e ( PUSH , "Urban Airship push extras bundle is null" ) ; return null ; } return uaPushBundle . getString ( APPTENTIVE_PUSH_EXTRA_KEY ) ; } else if ( pushBundle . containsKey ( APPTENTIVE_PUSH_EXTRA_KEY ) ) { // All others // Straight FCM / GCM / SNS , or nested ApptentiveLog . v ( PUSH , "Found apptentive push data." ) ; return pushBundle . getString ( APPTENTIVE_PUSH_EXTRA_KEY ) ; } else { ApptentiveLog . e ( PUSH , "Got an unrecognizable push." ) ; } } ApptentiveLog . e ( PUSH , "Push bundle was null." ) ; return null ;
public class BigtableWhileMatchResultScannerAdapter { /** * Returns { @ code true } iff there are matching { @ link WhileMatchFilter } labels or no { @ link * WhileMatchFilter } labels . * @ param row a { @ link FlatRow } object . * @ return a boolean value . */ public static boolean hasMatchingLabels ( FlatRow row ) { } }
int inLabelCount = 0 ; int outLabelCount = 0 ; for ( FlatRow . Cell cell : row . getCells ( ) ) { for ( String label : cell . getLabels ( ) ) { // TODO ( kevinsi4508 ) : Make sure { @ code label } is a { @ link WhileMatchFilter } label . // TODO ( kevinsi4508 ) : Handle multiple { @ link WhileMatchFilter } labels . if ( label . endsWith ( WHILE_MATCH_FILTER_IN_LABEL_SUFFIX ) ) { inLabelCount ++ ; } else if ( label . endsWith ( WHILE_MATCH_FILTER_OUT_LABEL_SUFFIX ) ) { outLabelCount ++ ; } } } // Checks if there is mismatching { @ link WhileMatchFilter } label . if ( inLabelCount != outLabelCount ) { return false ; } return true ;
public class TraceEventHelper { /** * Get events * @ param data The data * @ return The events * @ exception Exception If an error occurs */ private static List < TraceEvent > getEvents ( List < String > data ) throws Exception { } }
List < TraceEvent > result = new ArrayList < TraceEvent > ( ) ; for ( String s : data ) { result . add ( TraceEvent . parse ( s ) ) ; } return result ;
public class EmbeddedNeo4jTupleSnapshot { /** * TODO : We should create a query to read this value */ private Object readEmbeddedProperty ( String column ) { } }
String [ ] split = split ( column ) ; Node embeddedNode = node ; for ( int i = 0 ; i < split . length - 1 ; i ++ ) { String relType = split [ i ] ; Iterator < Relationship > rels = embeddedNode . getRelationships ( Direction . OUTGOING , withName ( relType ) ) . iterator ( ) ; if ( rels . hasNext ( ) ) { embeddedNode = rels . next ( ) . getEndNode ( ) ; } else { return null ; } } return readProperty ( embeddedNode , split [ split . length - 1 ] ) ;
public class SibRaManagedConnection { /** * Used to indicate that a connection has been closed . Notifies the * connection event listeners and removes the connection from the set of * open connections . * @ param connection * the connection being closed */ void connectionClosed ( final SibRaConnection connection ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . entry ( this , TRACE , "connectionClosed" , connection ) ; } final ConnectionEvent event = new ConnectionEvent ( this , ConnectionEvent . CONNECTION_CLOSED ) ; event . setConnectionHandle ( connection ) ; for ( Iterator iterator = _eventListeners . iterator ( ) ; iterator . hasNext ( ) ; ) { final ConnectionEventListener listener = ( ConnectionEventListener ) iterator . next ( ) ; listener . connectionClosed ( event ) ; } _connections . remove ( connection ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . exit ( this , TRACE , "connectionClosed" ) ; }
public class MultimapWithProtoValuesSubject { /** * Specifies that the ordering of repeated fields for these explicitly specified field descriptors * should be ignored when comparing for equality . Sub - fields must be specified explicitly if their * orders are to be ignored as well . * < p > Use { @ link # ignoringRepeatedFieldOrderForValues ( ) } instead to ignore order for all fields . * @ see # ignoringRepeatedFieldOrderForValues ( ) for details . */ public MultimapWithProtoValuesFluentAssertion < M > ignoringRepeatedFieldOrderOfFieldDescriptorsForValues ( Iterable < FieldDescriptor > fieldDescriptors ) { } }
return usingConfig ( config . ignoringRepeatedFieldOrderOfFieldDescriptors ( fieldDescriptors ) ) ;
public class Dispatcher { void error ( ServletRequest servletRequest , ServletResponse servletResponse ) throws ServletException , IOException { } }
dispatch ( servletRequest , servletResponse , Dispatcher . __ERROR ) ;
public class StorePackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getExtendedDataSchema ( ) { } }
if ( extendedDataSchemaEClass == null ) { extendedDataSchemaEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( StorePackage . eNS_URI ) . getEClassifiers ( ) . get ( 48 ) ; } return extendedDataSchemaEClass ;
public class RuntimeConfigurationMarshaller { /** * Marshall the given parameter object . */ public void marshall ( RuntimeConfiguration runtimeConfiguration , ProtocolMarshaller protocolMarshaller ) { } }
if ( runtimeConfiguration == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( runtimeConfiguration . getServerProcesses ( ) , SERVERPROCESSES_BINDING ) ; protocolMarshaller . marshall ( runtimeConfiguration . getMaxConcurrentGameSessionActivations ( ) , MAXCONCURRENTGAMESESSIONACTIVATIONS_BINDING ) ; protocolMarshaller . marshall ( runtimeConfiguration . getGameSessionActivationTimeoutSeconds ( ) , GAMESESSIONACTIVATIONTIMEOUTSECONDS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ChunkedIntArray { /** * Retrieve the contents of a record into a user - supplied buffer array . * Used to reduce addressing overhead when code will access several * columns of the record . * @ param position int Record number * @ param buffer int [ ] Integer array provided by user , must be large enough * to hold a complete record . */ void readSlot ( int position , int [ ] buffer ) { } }
/* try System . arraycopy ( fastArray , position * slotsize , buffer , 0 , slotsize ) ; catch ( ArrayIndexOutOfBoundsException aioobe ) */ { // System . out . println ( " Using slow read ( 2 ) : " + position ) ; position *= slotsize ; int chunkpos = position >> lowbits ; int slotpos = ( position & lowmask ) ; // Grow if needed if ( chunkpos > chunks . size ( ) - 1 ) chunks . addElement ( new int [ chunkalloc ] ) ; int [ ] chunk = chunks . elementAt ( chunkpos ) ; System . arraycopy ( chunk , slotpos , buffer , 0 , slotsize ) ; }
public class Assert { /** * Asserts that the { @ link Object } reference is not { @ literal null } . * The assertion holds if and only if the { @ link Object } reference is not { @ literal null } . * @ param obj { @ link Object } reference to evaluate . * @ param message { @ link Supplier } containing the message using in the { @ link NullPointerException } thrown * if the assertion fails . * @ throws java . lang . IllegalArgumentException if the { @ link Object } reference is { @ literal null } . * @ see java . lang . Object */ public static void notNull ( Object obj , Supplier < String > message ) { } }
if ( isNull ( obj ) ) { throw new IllegalArgumentException ( message . get ( ) ) ; }
public class ReflectionBasedPropertyMetadata { /** * Helper method to extract the potentially aliased name of the field . * @ param fieldReference the field reference . * @ return the potentially aliased name of the field . */ private static String extractName ( final Field fieldReference ) { } }
com . couchbase . client . java . repository . annotation . Field annotation = fieldReference . getAnnotation ( com . couchbase . client . java . repository . annotation . Field . class ) ; if ( annotation == null || annotation . value ( ) == null || annotation . value ( ) . isEmpty ( ) ) { return fieldReference . getName ( ) ; } else { return annotation . value ( ) ; }
public class Friend { /** * Gets the name of this friend . If the name was null then we try to fetch * the name with your Riot API Key if provided . Enable forcedUpdate to * always fetch the latest name of this Friend even when the name is not * null . * @ param forcedUpdate * True will force to update the name even when it is not null . * @ return The name of this Friend or null if no name is assigned . */ public String getName ( boolean forcedUpdate ) { } }
String name = get ( ) . getName ( ) ; if ( ( name == null || forcedUpdate ) && api . getRiotApi ( ) != null ) { try { name = api . getRiotApi ( ) . getName ( getUserId ( ) ) ; setName ( name ) ; } catch ( final IOException e ) { e . printStackTrace ( ) ; } } return name ;
public class OntClassMention { /** * indexed getter for semanticTypes - gets an indexed value - Names or IDs of associated semantic types . * @ generated * @ param i index in the array to get * @ return value of the element at index i */ public String getSemanticTypes ( int i ) { } }
if ( OntClassMention_Type . featOkTst && ( ( OntClassMention_Type ) jcasType ) . casFeat_semanticTypes == null ) jcasType . jcas . throwFeatMissing ( "semanticTypes" , "de.julielab.jules.types.OntClassMention" ) ; jcasType . jcas . checkArrayBounds ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( OntClassMention_Type ) jcasType ) . casFeatCode_semanticTypes ) , i ) ; return jcasType . ll_cas . ll_getStringArrayValue ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( OntClassMention_Type ) jcasType ) . casFeatCode_semanticTypes ) , i ) ;
public class DateTimeFormatterBuilder { /** * Appends just a printer . With no matching parser , a parser cannot be * built from this DateTimeFormatterBuilder . * The printer interface is part of the low - level part of the formatting API . * Normally , instances are extracted from another formatter . * Note however that any formatter specific information , such as the locale , * time - zone , chronology , offset parsing or pivot / default year , will not be * extracted by this method . * @ param printer the printer to add , not null * @ return this DateTimeFormatterBuilder , for chaining * @ throws IllegalArgumentException if printer is null or of an invalid type */ public DateTimeFormatterBuilder append ( DateTimePrinter printer ) { } }
checkPrinter ( printer ) ; return append0 ( DateTimePrinterInternalPrinter . of ( printer ) , null ) ;
public class JobsImpl { /** * Lists the execution status of the Job Preparation and Job Release task for the specified job across the compute nodes where the job has run . * This API returns the Job Preparation and Job Release task status on all compute nodes that have run the Job Preparation or Job Release task . This includes nodes which have since been removed from the pool . If this API is invoked on a job which has no Job Preparation or Job Release task , the Batch service returns HTTP status code 409 ( Conflict ) with an error code of JobPreparationTaskNotSpecified . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ param jobListPreparationAndReleaseTaskStatusNextOptions Additional parameters for the operation * @ param serviceFuture the ServiceFuture object tracking the Retrofit calls * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < JobPreparationAndReleaseTaskExecutionInformation > > listPreparationAndReleaseTaskStatusNextAsync ( final String nextPageLink , final JobListPreparationAndReleaseTaskStatusNextOptions jobListPreparationAndReleaseTaskStatusNextOptions , final ServiceFuture < List < JobPreparationAndReleaseTaskExecutionInformation > > serviceFuture , final ListOperationCallback < JobPreparationAndReleaseTaskExecutionInformation > serviceCallback ) { } }
return AzureServiceFuture . fromHeaderPageResponse ( listPreparationAndReleaseTaskStatusNextSinglePageAsync ( nextPageLink , jobListPreparationAndReleaseTaskStatusNextOptions ) , new Func1 < String , Observable < ServiceResponseWithHeaders < Page < JobPreparationAndReleaseTaskExecutionInformation > , JobListPreparationAndReleaseTaskStatusHeaders > > > ( ) { @ Override public Observable < ServiceResponseWithHeaders < Page < JobPreparationAndReleaseTaskExecutionInformation > , JobListPreparationAndReleaseTaskStatusHeaders > > call ( String nextPageLink ) { return listPreparationAndReleaseTaskStatusNextSinglePageAsync ( nextPageLink , jobListPreparationAndReleaseTaskStatusNextOptions ) ; } } , serviceCallback ) ;
public class RouteMatcher { /** * Specify a handler that will be called for a matching HTTP DELETE * @ param regex A regular expression * @ param handler The handler to call */ public RouteMatcher deleteWithRegEx ( String regex , Handler < HttpServerRequest > handler ) { } }
addRegEx ( regex , handler , deleteBindings ) ; return this ;
public class TemplateParser { /** * Process a v - for value . It will register the loop variables as a local variable in the context * stack . * @ param vForValue The value of the v - for attribute * @ return A processed v - for value , should be placed in the HTML in place of the original v - for * value */ private String processVForValue ( String vForValue ) { } }
VForDefinition vForDef = new VForDefinition ( vForValue , context , logger ) ; // Set return of the " in " expression currentExpressionReturnType = vForDef . getInExpressionType ( ) ; String inExpression = this . processExpression ( vForDef . getInExpression ( ) ) ; // And return the newly built definition return vForDef . getVariableDefinition ( ) + " in " + inExpression ;
public class GISCoordinates { /** * This function convert France Lambert II coordinate to * France Lambert IV coordinate . * @ param x is the coordinate in France Lambert II * @ param y is the coordinate in France Lambert II * @ return the France Lambert IV coordinate . */ @ Pure public static Point2d L2_L4 ( double x , double y ) { } }
final Point2d ntfLambdaPhi = NTFLambert_NTFLambdaPhi ( x , y , LAMBERT_2_N , LAMBERT_2_C , LAMBERT_2_XS , LAMBERT_2_YS ) ; return NTFLambdaPhi_NTFLambert ( ntfLambdaPhi . getX ( ) , ntfLambdaPhi . getY ( ) , LAMBERT_4_N , LAMBERT_4_C , LAMBERT_4_XS , LAMBERT_4_YS ) ;
public class CmsWorkplace { /** * Initializes this workplace class instance . < p > * @ param cms the user context * @ param session the session */ protected void initWorkplaceMembers ( CmsObject cms , HttpSession session ) { } }
m_cms = cms ; m_session = session ; // check role try { checkRole ( ) ; } catch ( CmsRoleViolationException e ) { throw new CmsIllegalStateException ( e . getMessageContainer ( ) , e ) ; } // get / create the workplace settings m_settings = ( CmsWorkplaceSettings ) m_session . getAttribute ( CmsWorkplaceManager . SESSION_WORKPLACE_SETTINGS ) ; if ( m_settings == null ) { // create the settings object m_settings = new CmsWorkplaceSettings ( ) ; m_settings = initWorkplaceSettings ( m_cms , m_settings , false ) ; storeSettings ( m_session , m_settings ) ; } // initialize messages CmsMessages messages = OpenCms . getWorkplaceManager ( ) . getMessages ( getLocale ( ) ) ; // generate a new multi messages object and add the messages from the workplace m_messages = new CmsMultiMessages ( getLocale ( ) ) ; m_messages . addMessages ( messages ) ; initMessages ( ) ; if ( m_jsp != null ) { // check request for changes in the workplace settings initWorkplaceRequestValues ( m_settings , m_jsp . getRequest ( ) ) ; } // set cms context accordingly initWorkplaceCmsContext ( m_settings , m_cms ) ; // timewarp reset logic initTimeWarp ( m_settings . getUserSettings ( ) , m_session ) ;
public class MessageAction { /** * { @ inheritDoc } * @ throws java . lang . IllegalArgumentException * If the appended CharSequence is too big and will cause the content to * exceed the { @ value net . dv8tion . jda . core . entities . Message # MAX _ CONTENT _ LENGTH } character limit * @ return Updated MessageAction for chaining convenience */ @ Override @ CheckReturnValue public MessageAction append ( final CharSequence csq , final int start , final int end ) { } }
if ( content . length ( ) + end - start > Message . MAX_CONTENT_LENGTH ) throw new IllegalArgumentException ( "A message may not exceed 2000 characters. Please limit your input!" ) ; content . append ( csq , start , end ) ; return this ;
public class RequestParameters { /** * Sets a parameter to a list of values . * @ param namethe parameter * @ param valuesthe list of values */ public void put ( String name , String ... values ) { } }
getMap ( ) . put ( name , Arrays . asList ( values ) ) ;
public class AWSGlueClient { /** * Removes a specified Database from a Data Catalog . * < note > * After completing this operation , you will no longer have access to the tables ( and all table versions and * partitions that might belong to the tables ) and the user - defined functions in the deleted database . AWS Glue * deletes these " orphaned " resources asynchronously in a timely manner , at the discretion of the service . * To ensure immediate deletion of all related resources , before calling < code > DeleteDatabase < / code > , use * < code > DeleteTableVersion < / code > or < code > BatchDeleteTableVersion < / code > , < code > DeletePartition < / code > or * < code > BatchDeletePartition < / code > , < code > DeleteUserDefinedFunction < / code > , and < code > DeleteTable < / code > or * < code > BatchDeleteTable < / code > , to delete any resources that belong to the database . * < / note > * @ param deleteDatabaseRequest * @ return Result of the DeleteDatabase operation returned by the service . * @ throws EntityNotFoundException * A specified entity does not exist * @ throws InvalidInputException * The input provided was not valid . * @ throws InternalServiceException * An internal service error occurred . * @ throws OperationTimeoutException * The operation timed out . * @ sample AWSGlue . DeleteDatabase * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / glue - 2017-03-31 / DeleteDatabase " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DeleteDatabaseResult deleteDatabase ( DeleteDatabaseRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteDatabase ( request ) ;
public class CmsListManager { /** * Updates the search result . < p > * @ param fieldFacets the field facets * @ param rangeFacets the range facets */ public void search ( Map < String , List < String > > fieldFacets , Map < String , List < String > > rangeFacets ) { } }
search ( fieldFacets , rangeFacets , null ) ;
public class LBiObjSrtPredicateBuilder { /** * One of ways of creating builder . This might be the only way ( considering all _ functional _ builders ) that might be utilize to specify generic params only once . */ @ Nonnull public static < T1 , T2 > LBiObjSrtPredicateBuilder < T1 , T2 > biObjSrtPredicate ( Consumer < LBiObjSrtPredicate < T1 , T2 > > consumer ) { } }
return new LBiObjSrtPredicateBuilder ( consumer ) ;
public class MaskPasswordsConfig { /** * Returns the list of name / password pairs defined at the global level . * < p > Modifications broughts to the returned list has no impact on this * configuration ( the returned value is a copy ) . Also , the list can be * empty but never { @ code null } . < / p > * @ since 2.7 */ public List < VarPasswordPair > getGlobalVarPasswordPairs ( ) { } }
List < VarPasswordPair > r = new ArrayList < VarPasswordPair > ( getGlobalVarPasswordPairsList ( ) . size ( ) ) ; // deep copy for ( VarPasswordPair varPasswordPair : getGlobalVarPasswordPairsList ( ) ) { r . add ( ( VarPasswordPair ) varPasswordPair . clone ( ) ) ; } return r ;
public class PageSection { /** * Create a simple footer , with a styled text * @ param text the text * @ param ts the style * @ return the footer */ public static Footer simpleFooter ( final String text , final TextStyle ts ) { } }
return new SimplePageSectionBuilder ( ) . text ( Text . styledContent ( text , ts ) ) . buildFooter ( ) ;
public class Email { /** * Converts a list of { @ link EmailAddress } to a list of recipients . Each * recipient will have the type { @ link RecipientType # TO } . * @ param to * the list of addresses to convert * @ return the list of recipients */ public static Recipient [ ] toRecipient ( EmailAddress [ ] to ) { } }
Recipient [ ] addresses = new Recipient [ to . length ] ; int i = 0 ; for ( EmailAddress t : to ) { addresses [ i ++ ] = new Recipient ( t ) ; } return addresses ;
public class CleaneLingStyleSolver { /** * Returns the unit propagated literals on level zero . * @ return unit propagated literal on level zero */ public LNGIntVector upZeroLiterals ( ) { } }
final LNGIntVector upZeroLiterals = new LNGIntVector ( ) ; for ( int i = 0 ; i < this . trail . size ( ) ; ++ i ) { final int lit = this . trail . get ( i ) ; if ( var ( lit ) . level ( ) > 0 ) { break ; } else { upZeroLiterals . push ( lit ) ; } } return upZeroLiterals ;
public class ArrayUtils { /** * 裁剪数组 。 Returns a new array if array is not empty and start greater or equals * 0 and less than array length ; array self otherwise ; * @ param array 源数组 。 array to be handed . * @ param start 裁剪的起始位置的索引 。 start index number . * @ return 一个新的数组 , 包含从指定索引开始到数组结束的所有元素 。 如果数组为空或起始索引小于0或大于数组本身索引长度 , 则返回数组本身 。 a new * array if array is not empty and start greater or equals 0 and less * than array length ; array self otherwise ; */ public static < T > T [ ] subArray ( final T [ ] array , final int start ) { } }
int end = array == null ? 0 : array . length - 1 ; return subArray ( array , start , end ) ;
public class JobSchedulesImpl { /** * Lists all of the job schedules in the specified account . * @ param jobScheduleListOptions Additional parameters for the operation * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; CloudJobSchedule & gt ; object */ public Observable < ServiceResponseWithHeaders < Page < CloudJobSchedule > , JobScheduleListHeaders > > listWithServiceResponseAsync ( final JobScheduleListOptions jobScheduleListOptions ) { } }
return listSinglePageAsync ( jobScheduleListOptions ) . concatMap ( new Func1 < ServiceResponseWithHeaders < Page < CloudJobSchedule > , JobScheduleListHeaders > , Observable < ServiceResponseWithHeaders < Page < CloudJobSchedule > , JobScheduleListHeaders > > > ( ) { @ Override public Observable < ServiceResponseWithHeaders < Page < CloudJobSchedule > , JobScheduleListHeaders > > call ( ServiceResponseWithHeaders < Page < CloudJobSchedule > , JobScheduleListHeaders > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } JobScheduleListNextOptions jobScheduleListNextOptions = null ; if ( jobScheduleListOptions != null ) { jobScheduleListNextOptions = new JobScheduleListNextOptions ( ) ; jobScheduleListNextOptions . withClientRequestId ( jobScheduleListOptions . clientRequestId ( ) ) ; jobScheduleListNextOptions . withReturnClientRequestId ( jobScheduleListOptions . returnClientRequestId ( ) ) ; jobScheduleListNextOptions . withOcpDate ( jobScheduleListOptions . ocpDate ( ) ) ; } return Observable . just ( page ) . concatWith ( listNextWithServiceResponseAsync ( nextPageLink , jobScheduleListNextOptions ) ) ; } } ) ;
public class CalculateMinimumTimeFieldSize { /** * getFormattedDateWidthInPixels , This returns the width ( in pixels ) of the longest formatted * time , using the supplied DateTimeFormatter instance and font . Note that the locale * information is built into the display format . * You may optionally add extra characters to the longestTimeString that is used in the * calculation , by supplying a nonzero value for the parameter numberOfExtraCharacters . This * parameter can also be used with a negative value to reduce the size that is returned . */ static public int getFormattedTimeWidthInPixels ( DateTimeFormatter formatForDisplayTime , Font fontValidTime , int numberOfExtraCharacters ) { } }
// Create the font metrics that will be used in the calculation . JTextField textField = new JTextField ( ) ; FontMetrics fontMetrics = textField . getFontMetrics ( fontValidTime ) ; // Create the longest possible time . LocalTime longestTime = LocalTime . of ( 22 , 59 , 59 , 999999999 ) ; // Generate the longest time string . Note , The locale is built into the formatter instance . String longestTimeString = longestTime . format ( formatForDisplayTime ) ; // Get the width of the longest time string ( in pixels ) , using the supplied font metrics . int longestTimeWidth = fontMetrics . stringWidth ( longestTimeString ) ; // Add space for two characters . int singleNumericCharacterWidth = fontMetrics . stringWidth ( "8" ) ; longestTimeWidth += ( 2 * singleNumericCharacterWidth ) ; // Don ' t return return anything less than 50 pixels by default . longestTimeWidth = ( longestTimeWidth < 50 ) ? 50 : longestTimeWidth ; // If requested , pad the result with space for any ( programmer specified ) extra characters . longestTimeWidth += ( numberOfExtraCharacters * singleNumericCharacterWidth ) ; // Return the width of the longest formatted time , in pixels . return longestTimeWidth ;
public class GenericCsvInputFormat { @ Override public void open ( FileInputSplit split ) throws IOException { } }
super . open ( split ) ; // instantiate the parsers @ SuppressWarnings ( "unchecked" ) FieldParser < Object > [ ] parsers = new FieldParser [ fieldTypes . length ] ; for ( int i = 0 ; i < fieldTypes . length ; i ++ ) { if ( fieldTypes [ i ] != null ) { Class < ? extends FieldParser < ? > > parserType = FieldParser . getParserForType ( fieldTypes [ i ] ) ; if ( parserType == null ) { throw new RuntimeException ( "No parser available for type '" + fieldTypes [ i ] . getName ( ) + "'." ) ; } @ SuppressWarnings ( "unchecked" ) FieldParser < Object > p = ( FieldParser < Object > ) InstantiationUtil . instantiate ( parserType , FieldParser . class ) ; parsers [ i ] = p ; } } this . fieldParsers = parsers ; // skip the first line , if we are at the beginning of a file and have the option set if ( this . skipFirstLineAsHeader && this . splitStart == 0 ) { readLine ( ) ; // read and ignore }
public class FieldValueMappingCallback { /** * Resolves a property via a property { @ link Resource } . This is used to retrieve relative or absolute references to * the properties of resources other than the current resource . Such references cannot be reliably retrieved using a * resource ' s { @ link ValueMap } as the value map may be < code > null < / code > and does not support access to properties of parent resources . * @ return the resolved value , or < code > null < / code > . */ private < T > T resolvePropertyTypedValueFromForeignResource ( FieldData field , Class < T > propertyType ) { } }
Resource property = this . resource . getResourceResolver ( ) . getResource ( this . resource , field . path ) ; if ( property == null ) { return null ; } // Only adaptation to String - types is supported by the property resource if ( propertyType == String . class || propertyType == String [ ] . class ) { return property . adaptTo ( propertyType ) ; } // Obtain the ValueMap representation of the parent containing the property to use property conversion Resource parent = property . getParent ( ) ; if ( parent == null ) { return null ; } ValueMap properties = parent . adaptTo ( ValueMap . class ) ; if ( properties == null ) { return null ; } return new PrimitiveSupportingValueMap ( properties ) . get ( property . getName ( ) , propertyType ) ;
public class DebugPhaseListener { /** * Returns the debug - info Map for the given component . * ATTENTION : this method is duplicate in UIInput . * @ param clientId * @ return */ @ SuppressWarnings ( "unchecked" ) public static Map < String , List < Object [ ] > > getDebugInfoMap ( String clientId ) { } }
final Map < String , Object > requestMap = FacesContext . getCurrentInstance ( ) . getExternalContext ( ) . getRequestMap ( ) ; Map < String , List < Object [ ] > > debugInfo = ( Map < String , List < Object [ ] > > ) requestMap . get ( ErrorPageWriter . DEBUG_INFO_KEY + clientId ) ; if ( debugInfo == null ) { // no debug info available yet , create one and put it on the attributes map debugInfo = new HashMap < String , List < Object [ ] > > ( ) ; requestMap . put ( ErrorPageWriter . DEBUG_INFO_KEY + clientId , debugInfo ) ; } return debugInfo ;
public class HDFSUtils { /** * { @ inheritDoc } */ @ Override public long getFileSize ( String path ) throws IOException , URISyntaxException { } }
FileSystem fs = FileSystem . get ( conf ) ; FileStatus fstatus = fs . getFileStatus ( new Path ( path ) ) ; if ( fstatus == null ) { return - 1 ; } return fstatus . getLen ( ) ;
public class IfcObjectImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public EList < IfcRelDefinesByObject > getDeclares ( ) { } }
return ( EList < IfcRelDefinesByObject > ) eGet ( Ifc4Package . Literals . IFC_OBJECT__DECLARES , true ) ;
public class GoogleCloudStorageReadChannel { /** * Initializes metadata ( size , encoding , etc ) from HTTP { @ code headers } . */ @ VisibleForTesting protected void initMetadata ( HttpHeaders headers ) throws IOException { } }
checkState ( ! metadataInitialized , "can not initialize metadata, it already initialized for '%s'" , resourceIdString ) ; long sizeFromMetadata ; String range = headers . getContentRange ( ) ; if ( range != null ) { sizeFromMetadata = Long . parseLong ( range . substring ( range . lastIndexOf ( '/' ) + 1 ) ) ; } else { sizeFromMetadata = headers . getContentLength ( ) ; } String generation = headers . getFirstHeaderStringValue ( "x-goog-generation" ) ; initMetadata ( headers . getContentEncoding ( ) , sizeFromMetadata , generation ) ;
public class AbstractApplicationPage { /** * Creates a PageComponent for the given PageComponentDescriptor . * @ param descriptor * the descriptor * @ return the created PageComponent */ protected PageComponent createPageComponent ( PageComponentDescriptor descriptor ) { } }
PageComponent pageComponent = descriptor . createPageComponent ( ) ; pageComponent . setContext ( new DefaultViewContext ( this , createPageComponentPane ( pageComponent ) ) ) ; if ( pageComponent instanceof ApplicationListener && getApplicationEventMulticaster ( ) != null ) { getApplicationEventMulticaster ( ) . addApplicationListener ( ( ApplicationListener ) pageComponent ) ; } return pageComponent ;
public class AmazonChimeClient { /** * Retrieves termination setting details for the specified Amazon Chime Voice Connector . * @ param getVoiceConnectorTerminationRequest * @ return Result of the GetVoiceConnectorTermination operation returned by the service . * @ throws UnauthorizedClientException * The client is not currently authorized to make the request . * @ throws NotFoundException * One or more of the resources in the request does not exist in the system . * @ throws ForbiddenException * The client is permanently forbidden from making the request . For example , when a user tries to create an * account from an unsupported region . * @ throws BadRequestException * The input parameters don ' t match the service ' s restrictions . * @ throws ThrottledClientException * The client exceeded its request rate limit . * @ throws ServiceUnavailableException * The service is currently unavailable . * @ throws ServiceFailureException * The service encountered an unexpected error . * @ sample AmazonChime . GetVoiceConnectorTermination * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / chime - 2018-05-01 / GetVoiceConnectorTermination " * target = " _ top " > AWS API Documentation < / a > */ @ Override public GetVoiceConnectorTerminationResult getVoiceConnectorTermination ( GetVoiceConnectorTerminationRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetVoiceConnectorTermination ( request ) ;
public class RingbufferMergeData { /** * Adds an item to the tail of the ringbuffer . If there is no space in the * ringbuffer , the add will overwrite the oldest item in the ringbuffer . * The method allows null values . * The returned value is the sequence of the added item . Using this sequence * you can read the added item . * < h3 > Using the sequence as ID < / h3 > * This sequence will always be unique for this ringbuffer instance so it * can be used as a unique ID generator if you are publishing items on this * ringbuffer . However you need to take care of correctly determining an * initial ID when any node uses the ringbuffer for the first time . The most * reliable way to do that is to write a dummy item into the ringbuffer and * use the returned sequence as initial ID . On the reading side , this dummy * item should be discard . Please keep in mind that this ID is not the * sequence of the item you are about to publish but from a previously * published item . So it can ' t be used to find that item . * @ param item the item to add * @ return the sequence of the added item */ public long add ( Object item ) { } }
tailSequence ++ ; if ( tailSequence - items . length == headSequence ) { headSequence ++ ; } int index = toIndex ( tailSequence ) ; items [ index ] = item ; return tailSequence ;
public class Path { @ Override public void read ( DataInputView in ) throws IOException { } }
final boolean isNotNull = in . readBoolean ( ) ; if ( isNotNull ) { final String scheme = StringUtils . readNullableString ( in ) ; final String userInfo = StringUtils . readNullableString ( in ) ; final String host = StringUtils . readNullableString ( in ) ; final int port = in . readInt ( ) ; final String path = StringUtils . readNullableString ( in ) ; final String query = StringUtils . readNullableString ( in ) ; final String fragment = StringUtils . readNullableString ( in ) ; try { uri = new URI ( scheme , userInfo , host , port , path , query , fragment ) ; } catch ( URISyntaxException e ) { throw new IOException ( "Error reconstructing URI" , e ) ; } }
public class ConnectionHandler { /** * Tries to write any outstanding write bytes , runs in any thread ( possibly unlocked ) */ private void tryWriteBytes ( ) throws IOException { } }
lock . lock ( ) ; try { // Iterate through the outbound ByteBuff queue , pushing as much as possible into the OS ' network buffer . Iterator < ByteBuffer > bytesIterator = bytesToWrite . iterator ( ) ; while ( bytesIterator . hasNext ( ) ) { ByteBuffer buff = bytesIterator . next ( ) ; bytesToWriteRemaining -= channel . write ( buff ) ; if ( ! buff . hasRemaining ( ) ) bytesIterator . remove ( ) ; else { setWriteOps ( ) ; break ; } } // If we are done writing , clear the OP _ WRITE interestOps if ( bytesToWrite . isEmpty ( ) ) key . interestOps ( key . interestOps ( ) & ~ SelectionKey . OP_WRITE ) ; // Don ' t bother waking up the selector here , since we ' re just removing an op , not adding } finally { lock . unlock ( ) ; }
public class ServerModel { /** * Unregister a filter model . * @ param model filter model to unregister */ public void removeFilterModel ( final FilterModel model ) { } }
if ( model . getUrlPatterns ( ) != null ) { try { deassociateBundle ( model . getContextModel ( ) . getVirtualHosts ( ) , model . getContextModel ( ) . getBundle ( ) ) ; filterLock . writeLock ( ) . lock ( ) ; for ( String virtualHost : resolveVirtualHosts ( model ) ) { for ( String urlPattern : model . getUrlPatterns ( ) ) { String fullPath = getFullPath ( model . getContextModel ( ) , urlPattern ) ; Set < UrlPattern > urlSet = filterUrlPatterns . get ( virtualHost ) . get ( fullPath ) ; UrlPattern toDelete = null ; for ( UrlPattern pattern : urlSet ) { FilterModel filterModel = ( FilterModel ) pattern . getModel ( ) ; Class < ? > filter = filterModel . getFilterClass ( ) ; Class < ? > matchFilter = model . getFilterClass ( ) ; if ( filter != null && filter . equals ( matchFilter ) ) { toDelete = pattern ; break ; } } urlSet . remove ( toDelete ) ; } } } finally { filterLock . writeLock ( ) . unlock ( ) ; } }
public class SecurityConstraintTypeImpl { /** * If not already created , a new < code > web - resource - collection < / code > element will be created and returned . * Otherwise , the first existing < code > web - resource - collection < / code > element will be returned . * @ return the instance defined for the element < code > web - resource - collection < / code > */ public WebResourceCollectionType < SecurityConstraintType < T > > getOrCreateWebResourceCollection ( ) { } }
List < Node > nodeList = childNode . get ( "web-resource-collection" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new WebResourceCollectionTypeImpl < SecurityConstraintType < T > > ( this , "web-resource-collection" , childNode , nodeList . get ( 0 ) ) ; } return createWebResourceCollection ( ) ;
public class ResponseImpl { /** * / * ( non - Javadoc ) * @ see com . tvd12 . ezyfox . core . command . Response # exclude ( java . util . Collection ) */ @ Override public < U extends ApiBaseUser > Response exclude ( Collection < U > users ) { } }
for ( U user : users ) this . usernames . remove ( user . getName ( ) ) ; return this ;
public class ViewableMessageBodyWriter { /** * Get { @ link org . glassfish . jersey . server . mvc . spi . ViewableContext viewable context } . * User defined ( custom ) contexts have higher priority than the default ones * ( i . e . { @ link ResolvingViewableContext } ) . * @ return { @ code non - null } viewable context . */ private ViewableContext getViewableContext ( ) { } }
final Set < ViewableContext > customProviders = Providers . getCustomProviders ( injectionManager , ViewableContext . class ) ; if ( ! customProviders . isEmpty ( ) ) { return customProviders . iterator ( ) . next ( ) ; } return Providers . getProviders ( injectionManager , ViewableContext . class ) . iterator ( ) . next ( ) ;
public class CPDefinitionInventoryLocalServiceBaseImpl { /** * Updates the cp definition inventory in the database or adds it if it does not yet exist . Also notifies the appropriate model listeners . * @ param cpDefinitionInventory the cp definition inventory * @ return the cp definition inventory that was updated */ @ Indexable ( type = IndexableType . REINDEX ) @ Override public CPDefinitionInventory updateCPDefinitionInventory ( CPDefinitionInventory cpDefinitionInventory ) { } }
return cpDefinitionInventoryPersistence . update ( cpDefinitionInventory ) ;
public class AbstractContextBuilder { /** * キューを保持するクラスタのConnectionFactoryを取得する 。 * @ param queueName キュー名 * @ return ConnectionFactory */ public ConnectionFactory getConnectionFactory ( String queueName ) { } }
// このメソッドを呼び出す前に 、 // this # initContextMap ( ) を利用してcontextMapを初期しておくこと 。 if ( this . contextMap == null ) { return null ; } RabbitmqClusterContext context = this . contextMap . get ( queueName ) ; if ( context == null ) { return null ; } return context . getConnectionFactory ( ) ;
public class SARLBuilderPreferenceAccess { /** * Load the generator configuration from the preferences . * @ param generatorConfig the configuration to set up . * @ param context the context of the building . */ @ SuppressWarnings ( { } }
"static-method" , "checkstyle:npathcomplexity" } ) public void loadBuilderPreferences ( GeneratorConfig2 generatorConfig , IProject context ) { final IPreferenceStore preferenceStore = SARLPreferences . getSARLPreferencesFor ( context ) ; if ( preferenceStore != null ) { if ( preferenceStore . contains ( PREF_GENERATED_TEST_SOURCE_FOLDER ) ) { generatorConfig . setGeneratedTestSourceFolder ( preferenceStore . getString ( PREF_GENERATED_TEST_SOURCE_FOLDER ) ) ; } if ( preferenceStore . contains ( PREF_GENERATE_INLINE ) ) { generatorConfig . setGenerateInlineAnnotation ( preferenceStore . getBoolean ( PREF_GENERATE_INLINE ) ) ; } if ( generatorConfig . isGenerateInlineAnnotation ( ) && preferenceStore . contains ( PREF_USE_EXPRESSION_INTERPRETER ) ) { generatorConfig . setUseExpressionInterpreterForInlineAnnotation ( preferenceStore . getBoolean ( PREF_USE_EXPRESSION_INTERPRETER ) ) ; } if ( preferenceStore . contains ( PREF_GENERATE_PURE ) ) { generatorConfig . setGeneratePureAnnotation ( preferenceStore . getBoolean ( PREF_GENERATE_PURE ) ) ; } if ( preferenceStore . contains ( PREF_GENERATE_EQUALITY_TEST_FUNCTIONS ) ) { generatorConfig . setGenerateEqualityTestFunctions ( preferenceStore . getBoolean ( PREF_GENERATE_EQUALITY_TEST_FUNCTIONS ) ) ; } if ( preferenceStore . contains ( PREF_GENERATE_TOSTRING_FUNCTIONS ) ) { generatorConfig . setGenerateToStringFunctions ( preferenceStore . getBoolean ( PREF_GENERATE_TOSTRING_FUNCTIONS ) ) ; } if ( preferenceStore . contains ( PREF_GENERATE_CLONE_FUNCTIONS ) ) { generatorConfig . setGenerateCloneFunctions ( preferenceStore . getBoolean ( PREF_GENERATE_CLONE_FUNCTIONS ) ) ; } if ( preferenceStore . contains ( PREF_GENERATE_SERIAL_NUMBER_FIELDS ) ) { generatorConfig . setGenerateSerialNumberFields ( preferenceStore . getBoolean ( PREF_GENERATE_SERIAL_NUMBER_FIELDS ) ) ; } }
public class NGram { /** * Produces nGrams for nGram matcher . * @ param str source string * @ param gramlength gram length * @ return ngrams */ private static String [ ] generateNGrams ( String str , int gramlength ) { } }
if ( str == null || str . length ( ) == 0 ) return null ; ArrayList < String > grams = new ArrayList < String > ( ) ; int length = str . length ( ) ; String gram ; if ( length < gramlength ) { for ( int i = 1 ; i <= length ; i ++ ) { gram = str . substring ( 0 , i ) ; if ( grams . indexOf ( gram ) == - 1 ) grams . add ( gram ) ; } gram = str . substring ( length - 1 , length ) ; if ( grams . indexOf ( gram ) == - 1 ) grams . add ( gram ) ; } else { for ( int i = 1 ; i <= gramlength - 1 ; i ++ ) { gram = str . substring ( 0 , i ) ; if ( grams . indexOf ( gram ) == - 1 ) grams . add ( gram ) ; } for ( int i = 0 ; i < length - gramlength + 1 ; i ++ ) { gram = str . substring ( i , i + gramlength ) ; if ( grams . indexOf ( gram ) == - 1 ) grams . add ( gram ) ; } for ( int i = length - gramlength + 1 ; i < length ; i ++ ) { gram = str . substring ( i , length ) ; if ( grams . indexOf ( gram ) == - 1 ) grams . add ( gram ) ; } } return grams . toArray ( new String [ grams . size ( ) ] ) ;
public class DescribeRemediationConfigurationsRequest { /** * A list of AWS Config rule names of remediation configurations for which you want details . * @ param configRuleNames * A list of AWS Config rule names of remediation configurations for which you want details . */ public void setConfigRuleNames ( java . util . Collection < String > configRuleNames ) { } }
if ( configRuleNames == null ) { this . configRuleNames = null ; return ; } this . configRuleNames = new com . amazonaws . internal . SdkInternalList < String > ( configRuleNames ) ;
public class BaseTagGenerator { /** * Synchronizes the scripting variables of the given custom tag for * the given scope . */ protected void syncScriptingVars ( JavaCodeWriter tagWriter , int scope ) { } }
TagVariableInfo [ ] tagVarInfos = ti . getTagVariableInfos ( ) ; VariableInfo [ ] varInfos = ti . getVariableInfo ( collectedTagData . getTagData ( ) ) ; if ( varInfos == null ) varInfos = new VariableInfo [ 0 ] ; if ( ( varInfos . length == 0 ) && ( tagVarInfos . length == 0 ) ) { return ; } if ( varInfos . length > 0 ) { for ( int i = 0 ; i < varInfos . length ; i ++ ) { if ( varInfos [ i ] . getScope ( ) == scope ) { tagWriter . print ( varInfos [ i ] . getVarName ( ) ) ; tagWriter . print ( " = ((" ) ; String className = varInfos [ i ] . getClassName ( ) ; className = replaceCharacters ( className ) ; // PK40417 & gt ; , & lt ; , & amp ; , < \ \ % , and % \ \ > tagWriter . print ( className ) ; tagWriter . print ( ") " + pageContextVar + ".findAttribute(" ) ; // PK65013 tagWriter . print ( GeneratorUtils . quote ( varInfos [ i ] . getVarName ( ) ) ) ; tagWriter . print ( "))" ) ; // PK26741 if ( jspOptions . isUseRepeatInt ( ) && isRepeatTag ) tagWriter . println ( ".intValue();" ) ; else tagWriter . println ( ";" ) ; // PK26741 } } } else { for ( int i = 0 ; i < tagVarInfos . length ; i ++ ) { if ( tagVarInfos [ i ] . getScope ( ) == scope ) { String name = tagVarInfos [ i ] . getNameGiven ( ) ; if ( name == null ) { name = collectedTagData . getTagData ( ) . getAttributeString ( tagVarInfos [ i ] . getNameFromAttribute ( ) ) ; } else if ( tagVarInfos [ i ] . getNameFromAttribute ( ) != null ) { // alias continue ; } tagWriter . print ( name ) ; tagWriter . print ( " = ((" ) ; String className = tagVarInfos [ i ] . getClassName ( ) ; className = replaceCharacters ( className ) ; // PK40417 & gt ; , & lt ; , & amp ; , < \ \ % , and % \ \ > tagWriter . print ( className ) ; tagWriter . print ( ") " + pageContextVar + ".findAttribute(" ) ; // PK65013 tagWriter . print ( GeneratorUtils . quote ( name ) ) ; tagWriter . print ( "))" ) ; // PK26741 if ( jspOptions . isUseRepeatInt ( ) && isRepeatTag ) tagWriter . println ( ".intValue();" ) ; else tagWriter . println ( ";" ) ; // PK26741 } } }
public class ServiceRemoveStepHandler { /** * If the { @ link OperationContext # isResourceServiceRestartAllowed ( ) context allows resource removal } , * removes services ; otherwise puts the process in reload - required state . The following services are * removed : * < ul > * < li > The service named by the value returned from { @ link # serviceName ( String , PathAddress ) } , if there is one < / li > * < li > The service names associated with any { @ code unavailableCapabilities } * passed to the constructor . < / li > * < / ul > * { @ inheritDoc } */ @ Override protected void performRuntime ( OperationContext context , ModelNode operation , ModelNode model ) { } }
if ( context . isResourceServiceRestartAllowed ( ) ) { final PathAddress address = context . getCurrentAddress ( ) ; final String name = address . getLastElement ( ) . getValue ( ) ; ServiceName nonCapabilityServiceName = serviceName ( name , address ) ; if ( nonCapabilityServiceName != null ) { context . removeService ( serviceName ( name , address ) ) ; } Set < RuntimeCapability > capabilitySet = unavailableCapabilities . isEmpty ( ) ? context . getResourceRegistration ( ) . getCapabilities ( ) : unavailableCapabilities ; for ( RuntimeCapability < ? > capability : capabilitySet ) { if ( capability . getCapabilityServiceValueType ( ) != null ) { context . removeService ( capability . getCapabilityServiceName ( address ) ) ; } } } else { context . reloadRequired ( ) ; }
public class Systems { /** * Returns the instance for the specified implementation . If the system is * unknown , throws an exception . * @ param impl * implementation to be checked . * @ return instances */ public static JaxRx getInstance ( final String impl ) { } }
final String path = Systems . getSystems ( ) . get ( impl ) ; if ( path == null ) { throw new JaxRxException ( 404 , "Unknown implementation: " + impl ) ; } JaxRx jaxrx = INSTANCES . get ( path ) ; if ( jaxrx == null ) { try { jaxrx = ( JaxRx ) Class . forName ( path ) . newInstance ( ) ; INSTANCES . put ( path , jaxrx ) ; } catch ( final Exception ex ) { throw new JaxRxException ( ex ) ; } } return jaxrx ;
public class WebApp { /** * PK91120 Start */ public boolean notifyServletRequestCreated ( ServletRequest request ) { } }
if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) logger . logp ( Level . FINE , CLASS_NAME , "notifyServletRequestCreated" , "ENTRY" ) ; // PI26908 boolean servletRequestListenerCreated = false ; if ( ! servletRequestListeners . isEmpty ( ) ) { WebContainerRequestState reqState = WebContainerRequestState . getInstance ( true ) ; if ( reqState . getAttribute ( "com.ibm.ws.webcontainer.invokeListenerRequest" ) == null ) { reqState . setAttribute ( "com.ibm.ws.webcontainer.invokeListenerRequest" , false ) ; Iterator i = servletRequestListeners . iterator ( ) ; ServletRequestEvent sEvent = new ServletRequestEvent ( this . getFacade ( ) , request ) ; while ( i . hasNext ( ) ) { // get the listener ServletRequestListener sL = ( ServletRequestListener ) i . next ( ) ; // invoke the listener ' s request initd method sL . requestInitialized ( sEvent ) ; } servletRequestListenerCreated = true ; } else { if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) logger . logp ( Level . FINE , CLASS_NAME , "notifyServletRequestCreated" , " ServletListener already invoked for request, reqState --> " + reqState ) ; } } return servletRequestListenerCreated ;
public class FilterDriver { /** * Run the test with this method . * The data is input for the { @ link String } . * @ param separator separator of data * @ param input input { @ link String } data * @ param output result of { @ link Record } { @ link List } * @ throws URISyntaxException * @ throws IOException */ protected void run ( String separator , String input , List < Record > output ) throws IOException , URISyntaxException { } }
run ( null , separator , false , input , output ) ;
public class BindTransformer { /** * Get transformer for type . * @ param property * the property * @ return transform */ public static BindTransform lookup ( BindProperty property ) { } }
TypeName typeName = property . getPropertyType ( ) . getTypeName ( ) ; if ( property . hasTypeAdapter ( ) ) { typeName = typeName ( property . typeAdapter . dataType ) ; } return lookup ( typeName ) ;
public class KiteConnect { /** * Retrieves quote and market depth for an instrument * @ param instruments is the array of tradingsymbol and exchange or instrument token . For example { NSE : NIFTY 50 , BSE : SENSEX } or { 256265 , 265} * @ return Map of String and Quote . * @ throws KiteException is thrown for all Kite trade related errors . * @ throws JSONException is thrown when there is exception while parsing response . * @ throws IOException is thrown when there is connection related error . */ public Map < String , Quote > getQuote ( String [ ] instruments ) throws KiteException , JSONException , IOException { } }
KiteRequestHandler kiteRequestHandler = new KiteRequestHandler ( proxy ) ; JSONObject jsonObject = kiteRequestHandler . getRequest ( routes . get ( "market.quote" ) , "i" , instruments , apiKey , accessToken ) ; Type type = new TypeToken < Map < String , Quote > > ( ) { } . getType ( ) ; return gson . fromJson ( String . valueOf ( jsonObject . get ( "data" ) ) , type ) ;
public class ClassPathCollector { /** * Creates a class path string from the entries using the path . seperator * @ param entries * @ return the class path string */ public static String toCpEnvString ( Collection < ? extends String > entries ) { } }
if ( entries . size ( ) > 0 ) { StringBuffer classPath = new StringBuffer ( "" ) ; for ( String cp : new HashSet < String > ( entries ) ) // remove dublicates { if ( cp == null ) { continue ; } classPath . append ( cp ) ; classPath . append ( System . getProperty ( "path.separator" ) ) ; } classPath . deleteCharAt ( classPath . length ( ) - 1 ) ; return classPath . toString ( ) . trim ( ) ; } return "" ;
public class WikipediaTemplateInfoGenerator { /** * Fill map ( mapToFill ) with template data * @ param textForTemplateExtraction * text for template extraction * @ param filterToApply * filter to apply for templates * @ param id * id of a page / revision * @ param mapToFill * map to fill with data */ private void fillMapWithTemplateData ( String textForTemplateExtraction , TemplateFilter filterToApply , int id , Map < String , Set < Integer > > mapToFill ) { } }
Set < String > names = getTemplateNames ( textForTemplateExtraction ) ; // Update the map with template values for current page for ( String name : names ) { // filter templates - only use templates from a provided // whitelist if ( filterToApply . acceptTemplate ( name ) ) { // Create records for TEMPLATE - > PAGES / REVISION map if ( mapToFill . containsKey ( name ) ) { // add the page id to the set for the current template Set < Integer > pIdList = mapToFill . remove ( name ) ; pIdList . add ( id ) ; mapToFill . put ( name , pIdList ) ; } else { // add new list with page id of current page Set < Integer > newIdList = new HashSet < Integer > ( ) ; newIdList . add ( id ) ; mapToFill . put ( name , newIdList ) ; } } }
public class ReflectionUtil { /** * throws RuntimeException if class is not found */ public static Class < ? > classForName ( String className ) { } }
try { return Class . forName ( className , false , Javers . class . getClassLoader ( ) ) ; } catch ( ClassNotFoundException ex ) { throw new JaversException ( ex ) ; }
public class FMT { /** * Hook called when the processd file is not compliant with the formatter . * @ param file the file that is not compliant * @ param formatted the corresponding formatted of the file . */ @ Override protected void onNonComplyingFile ( File file , String formatted ) throws IOException { } }
CharSink sink = Files . asCharSink ( file , Charsets . UTF_8 ) ; sink . write ( formatted ) ;
public class ActionContext { /** * Evict cached object * @ param key * the key indexed the cached object to be evicted */ public void evictCache ( String key ) { } }
H . Session sess = session ( ) ; if ( null != sess ) { sess . evict ( key ) ; } else { app ( ) . cache ( ) . evict ( key ) ; }
public class DefaultPlexusCipher { public String decryptDecorated ( final String str , final String passPhrase ) throws PlexusCipherException { } }
if ( str == null || str . length ( ) < 1 ) { return str ; } if ( isEncryptedString ( str ) ) { return decrypt ( unDecorate ( str ) , passPhrase ) ; } return decrypt ( str , passPhrase ) ;
public class ResourceList { /** * Return this { @ link ResourceList } as a map from resource path ( obtained from { @ link Resource # getPath ( ) } ) to a * { @ link ResourceList } of { @ link Resource } objects that have that path . * @ return This { @ link ResourceList } as a map from resource path ( obtained from { @ link Resource # getPath ( ) } ) to a * { @ link ResourceList } of { @ link Resource } objects that have that path . */ public Map < String , ResourceList > asMap ( ) { } }
final Map < String , ResourceList > pathToResourceList = new HashMap < > ( ) ; for ( final Resource resource : this ) { final String path = resource . getPath ( ) ; ResourceList resourceList = pathToResourceList . get ( path ) ; if ( resourceList == null ) { resourceList = new ResourceList ( 1 ) ; pathToResourceList . put ( path , resourceList ) ; } resourceList . add ( resource ) ; } return pathToResourceList ;
public class UnitJudge { /** * 注意 : 请先使用 { { @ link # defined ( String , String ) } } 检查合法性 , 然后再使用本方法取属性 */ public static boolean isSequential ( String group , String unit ) { } }
try { return UnitRouter . SINGLETON . newestDefinition ( Unit . fullName ( group , unit ) ) . getInput ( ) . isSequential ( ) ; } catch ( UnitUndefinedException e ) { throw new RuntimeException ( "Please call UnitJudge.defined() first." , e ) ; }
public class Expression { /** * Returns an identical { @ link Expression } with the given source location . */ public Expression withSourceLocation ( SourceLocation location ) { } }
checkNotNull ( location ) ; if ( location . equals ( this . location ) ) { return this ; } return new Expression ( resultType , features , location ) { @ Override protected void doGen ( CodeBuilder adapter ) { Expression . this . gen ( adapter ) ; } } ;
public class MultipleOutputFormat { /** * Create a composite record writer that can write key / value data to different * output files * @ param fs * the file system to use * @ param job * the job conf for the job * @ param name * the leaf file name for the output file ( such as part - 00000 " ) * @ param arg3 * a progressable for reporting progress . * @ return a composite record writer * @ throws IOException */ public RecordWriter < K , V > getRecordWriter ( FileSystem fs , JobConf job , String name , Progressable arg3 ) throws IOException { } }
final FileSystem myFS = fs ; final String myName = generateLeafFileName ( name ) ; final JobConf myJob = job ; final Progressable myProgressable = arg3 ; return new RecordWriter < K , V > ( ) { // a cache storing the record writers for different output files . TreeMap < String , RecordWriter < K , V > > recordWriters = new TreeMap < String , RecordWriter < K , V > > ( ) ; public void write ( K key , V value ) throws IOException { // get the file name based on the key String keyBasedPath = generateFileNameForKeyValue ( key , value , myName ) ; // get the file name based on the input file name String finalPath = getInputFileBasedOutputFileName ( myJob , keyBasedPath ) ; // get the actual key K actualKey = generateActualKey ( key , value ) ; V actualValue = generateActualValue ( key , value ) ; RecordWriter < K , V > rw = this . recordWriters . get ( finalPath ) ; if ( rw == null ) { // if we don ' t have the record writer yet for the final path , create // one // and add it to the cache rw = getBaseRecordWriter ( myFS , myJob , finalPath , myProgressable ) ; this . recordWriters . put ( finalPath , rw ) ; } rw . write ( actualKey , actualValue ) ; } ; public void close ( Reporter reporter ) throws IOException { Iterator < String > keys = this . recordWriters . keySet ( ) . iterator ( ) ; while ( keys . hasNext ( ) ) { RecordWriter < K , V > rw = this . recordWriters . get ( keys . next ( ) ) ; rw . close ( reporter ) ; } this . recordWriters . clear ( ) ; } ; } ;
public class ObjectCacheTwoLevelImpl { /** * Make sure that the Identity objects of garbage collected cached * objects are removed too . */ private void processQueue ( ) { } }
CacheEntry sv ; while ( ( sv = ( CacheEntry ) queue . poll ( ) ) != null ) { sessionCache . remove ( sv . oid ) ; }
public class UsersInner { /** * Deletes the user on a databox edge / gateway device . * @ param deviceName The device name . * @ param name The user name . * @ param resourceGroupName The resource group name . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > deleteAsync ( String deviceName , String name , String resourceGroupName , final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromResponse ( deleteWithServiceResponseAsync ( deviceName , name , resourceGroupName ) , serviceCallback ) ;
public class FSM2MealyParserIO { /** * Parse a transition . * @ throws FSMParseException * when the transition is illegal . * @ throws IOException * see { @ link StreamTokenizer # nextToken ( ) } . */ @ Override protected void parseTransition ( StreamTokenizer streamTokenizer ) throws FSMParseException , IOException { } }
try { // check we will read a state index if ( streamTokenizer . nextToken ( ) != StreamTokenizer . TT_WORD ) { throw new FSMParseException ( EXPECT_NUMBER , streamTokenizer ) ; } // read the source state index int from = Integer . parseInt ( streamTokenizer . sval ) ; // check if such a state exists if ( ! getStates ( ) . isEmpty ( ) && ! getStates ( ) . contains ( from ) ) { throw new FSMParseException ( String . format ( NO_SUCH_STATE , from ) , streamTokenizer ) ; } // check we will read a state index if ( streamTokenizer . nextToken ( ) != StreamTokenizer . TT_WORD ) { throw new FSMParseException ( EXPECT_NUMBER , streamTokenizer ) ; } // read the target state index int to = Integer . parseInt ( streamTokenizer . sval ) ; // check if such a state exists if ( ! getStates ( ) . isEmpty ( ) && ! getStates ( ) . contains ( to ) ) { throw new FSMParseException ( String . format ( NO_SUCH_STATE , to ) , streamTokenizer ) ; } // check we will read an input edge label if ( streamTokenizer . nextToken ( ) != '"' ) { throw new FSMParseException ( EXPECT_STRING , streamTokenizer ) ; } // read the input , and convert the input string to actual input final I input = getInputParser ( ) . apply ( streamTokenizer . sval ) ; // add it to the set of inputs getInputs ( ) . add ( input ) ; // check we will read an output edge label if ( streamTokenizer . nextToken ( ) != '"' ) { throw new FSMParseException ( EXPECT_STRING , streamTokenizer ) ; } // read the output , and convert the output string to actual output final O output = getOutputParser ( ) . apply ( streamTokenizer . sval ) ; // create the Mealy machine transition final Pair < O , Integer > prev = getTransitions ( ) . put ( Pair . of ( from , input ) , Pair . of ( output , to ) ) ; // check for non - determinism if ( prev != null ) { throw new FSMParseException ( String . format ( NON_DETERMINISM_DETECTED , prev ) , streamTokenizer ) ; } } catch ( NumberFormatException nfe ) { throw new FSMParseException ( nfe , streamTokenizer ) ; }
public class Messenger { /** * Command for raw api request * @ param service service name * @ param method method name * @ param params request params */ @ ObjectiveCName ( "rawRequestWithService:withMethod:WithParams:" ) public void rawRequest ( String service , String method , ApiRawValue params ) { } }
modules . getExternalModule ( ) . rawRequest ( service , method , params ) ;
public class GeoPackageExtensions { /** * Delete the Related Tables extension * @ param geoPackage * GeoPackage * @ since 3.2.0 */ public static void deleteRelatedTablesExtension ( GeoPackageCore geoPackage ) { } }
RelatedTablesCoreExtension relatedTablesExtension = getRelatedTableExtension ( geoPackage ) ; if ( relatedTablesExtension . has ( ) ) { relatedTablesExtension . removeExtension ( ) ; }
public class Node { /** * This method adds an interaction scoped correlation id . * @ param id The id * @ return The node */ public Node addInteractionCorrelationId ( String id ) { } }
this . correlationIds . add ( new CorrelationIdentifier ( Scope . Interaction , id ) ) ; return this ;
public class ExcelUtils { /** * 双检锁保证单例 */ public static ExcelUtils getInstance ( ) { } }
if ( null == excelUtils ) { synchronized ( ExcelUtils . class ) { if ( null == excelUtils ) { excelUtils = new ExcelUtils ( ) ; } } } return excelUtils ;
public class File { /** * Returns a list of the files that your account has access to . The files are returned sorted by * creation date , with the most recently created files appearing first . */ public static FileCollection list ( Map < String , Object > params , RequestOptions options ) throws StripeException { } }
return requestCollection ( classUrl ( File . class ) , params , FileCollection . class , options ) ;
public class AbstractUserTokenService { /** * If the passed token is null or expired or if there is no user associated * with the token , this method will throw an { @ link Exception } . * @ param userToken * @ throws Exception if the token is not valid ( e . g . because it is expired ) */ @ Override @ Transactional ( readOnly = true ) public void validateToken ( E userToken ) throws Exception { } }
// call super super . validateToken ( userToken ) ; // check if user is associated if ( userToken . getUser ( ) == null ) { throw new Exception ( "There is no user associated with this token." ) ; }
public class CreateCollectionRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CreateCollectionRequest createCollectionRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( createCollectionRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createCollectionRequest . getCollectionId ( ) , COLLECTIONID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DelayedClientTransport { /** * Use the picker to try picking a transport for every pending stream , proceed the stream if the * pick is successful , otherwise keep it pending . * < p > This method may be called concurrently with { @ code newStream ( ) } , and it ' s safe . All pending * streams will be served by the latest picker ( if a same picker is given more than once , they are * considered different pickers ) as soon as possible . * < p > This method < strong > must not < / strong > be called concurrently with itself . */ final void reprocess ( @ Nullable SubchannelPicker picker ) { } }
ArrayList < PendingStream > toProcess ; synchronized ( lock ) { lastPicker = picker ; lastPickerVersion ++ ; if ( picker == null || ! hasPendingStreams ( ) ) { return ; } toProcess = new ArrayList < > ( pendingStreams ) ; } ArrayList < PendingStream > toRemove = new ArrayList < > ( ) ; for ( final PendingStream stream : toProcess ) { PickResult pickResult = picker . pickSubchannel ( stream . args ) ; CallOptions callOptions = stream . args . getCallOptions ( ) ; final ClientTransport transport = GrpcUtil . getTransportFromPickResult ( pickResult , callOptions . isWaitForReady ( ) ) ; if ( transport != null ) { Executor executor = defaultAppExecutor ; // createRealStream may be expensive . It will start real streams on the transport . If // there are pending requests , they will be serialized too , which may be expensive . Since // we are now on transport thread , we need to offload the work to an executor . if ( callOptions . getExecutor ( ) != null ) { executor = callOptions . getExecutor ( ) ; } executor . execute ( new Runnable ( ) { @ Override public void run ( ) { stream . createRealStream ( transport ) ; } } ) ; toRemove . add ( stream ) ; } // else : stay pending } synchronized ( lock ) { // Between this synchronized and the previous one : // - Streams may have been cancelled , which may turn pendingStreams into emptiness . // - shutdown ( ) may be called , which may turn pendingStreams into null . if ( ! hasPendingStreams ( ) ) { return ; } pendingStreams . removeAll ( toRemove ) ; // Because delayed transport is long - lived , we take this opportunity to down - size the // hashmap . if ( pendingStreams . isEmpty ( ) ) { pendingStreams = new LinkedHashSet < > ( ) ; } if ( ! hasPendingStreams ( ) ) { // There may be a brief gap between delayed transport clearing in - use state , and first real // transport starting streams and setting in - use state . During the gap the whole channel ' s // in - use state may be false . However , it shouldn ' t cause spurious switching to idleness // ( which would shutdown the transports and LoadBalancer ) because the gap should be shorter // than IDLE _ MODE _ DEFAULT _ TIMEOUT _ MILLIS ( 1 second ) . syncContext . executeLater ( reportTransportNotInUse ) ; if ( shutdownStatus != null && reportTransportTerminated != null ) { syncContext . executeLater ( reportTransportTerminated ) ; reportTransportTerminated = null ; } } } syncContext . drain ( ) ;
public class RealVoltDB { /** * move back to partition leader ' s node . */ private void checkExportStreamMastership ( ) { } }
for ( Initiator initiator : m_iv2Initiators . values ( ) ) { if ( initiator . getPartitionId ( ) != MpInitiator . MP_INIT_PID ) { SpInitiator spInitiator = ( SpInitiator ) initiator ; if ( spInitiator . isLeader ( ) ) { ExportManager . instance ( ) . takeMastership ( spInitiator . getPartitionId ( ) ) ; } } }
public class NetUtil { /** * 根据long值获取ip v4地址 * @ param longIP IP的long表示形式 * @ return IP V4 地址 */ public static String longToIpv4 ( long longIP ) { } }
final StringBuilder sb = new StringBuilder ( ) ; // 直接右移24位 sb . append ( String . valueOf ( longIP >>> 24 ) ) ; sb . append ( "." ) ; // 将高8位置0 , 然后右移16位 sb . append ( String . valueOf ( ( longIP & 0x00FFFFFF ) >>> 16 ) ) ; sb . append ( "." ) ; sb . append ( String . valueOf ( ( longIP & 0x0000FFFF ) >>> 8 ) ) ; sb . append ( "." ) ; sb . append ( String . valueOf ( longIP & 0x000000FF ) ) ; return sb . toString ( ) ;
public class KeyVaultClientBaseImpl { /** * Deletes the certificate contacts for a specified key vault . * Deletes the certificate contacts for a specified key vault certificate . This operation requires the certificates / managecontacts permission . * @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Contacts > deleteCertificateContactsAsync ( String vaultBaseUrl , final ServiceCallback < Contacts > serviceCallback ) { } }
return ServiceFuture . fromResponse ( deleteCertificateContactsWithServiceResponseAsync ( vaultBaseUrl ) , serviceCallback ) ;
public class HttpRequestData { /** * < p > Set ( add / change ) cookie value . < / p > * @ param pName Name * @ param pValue Value */ @ Override public final void setCookieValue ( final String pName , final String pValue ) { } }
Cookie cookie = null ; if ( this . httpReq . getCookies ( ) != null ) { for ( Cookie co : this . httpReq . getCookies ( ) ) { if ( co . getName ( ) . equals ( pName ) ) { cookie = co ; cookie . setValue ( pValue ) ; break ; } } } if ( cookie == null ) { cookie = new Cookie ( pName , pValue ) ; cookie . setMaxAge ( Integer . MAX_VALUE ) ; } // application path is either root " / " of server address // or WEB application name e . g . / bsa - 433 String path = this . httpReq . getServletContext ( ) . getContextPath ( ) ; if ( "" . equals ( path ) ) { path = "/" ; } cookie . setPath ( path ) ; this . httpResp . addCookie ( cookie ) ;
public class JaxRsWebEndpointImpl { /** * { @ inheritDoc } */ @ Override public void init ( ServletConfig servletConfig , JaxRsProviderFactoryService providerFactoryService ) throws ServletException { } }
super . init ( servletConfig , providerFactoryService ) ; LibertyJaxRsServerFactoryBean jaxRsServerFactory = new LibertyJaxRsServerFactoryBean ( endpointInfo , jaxRsModuleMetaData , beanCustomizers , servletConfig , providerFactoryService ) ; if ( features != null && ! features . isEmpty ( ) ) { jaxRsServerFactory . setFeatures ( features ) ; } jaxRsServerFactory . doInit ( ) ; ClassLoader origClassLoader = jaxRsServerFactory . getBus ( ) . getExtension ( ClassLoader . class ) ; jaxRsServerFactory . getBus ( ) . setExtension ( null , ClassLoader . class ) ; server = jaxRsServerFactory . create ( ) ; jaxRsServerFactory . getBus ( ) . setExtension ( origClassLoader , ClassLoader . class ) ; configureEndpointInfoProperties ( endpointInfo , server . getEndpoint ( ) . getEndpointInfo ( ) ) ; server . start ( ) ; destination = ( AbstractHTTPDestination ) server . getDestination ( ) ;
public class HttpEndpointImpl { /** * Process HTTP chain work . * @ param enableEndpoint True to enable the associated HTTP chain . False , to disable it . * @ param isPause True if this call is being made for pause endpoint processing . */ public void processHttpChainWork ( boolean enableEndpoint , boolean isPause ) { } }
if ( enableEndpoint ) { // enable the endpoint if it is currently disabled // it ' s ok if the endpoint is stopped , the config update will occur @ next start endpointState . compareAndSet ( DISABLED , ENABLED ) ; if ( httpPort >= 0 ) { httpChain . enable ( ) ; } if ( httpsPort >= 0 && sslFactoryProvider . getService ( ) != null ) { httpSecureChain . enable ( ) ; } if ( ! isPause ) { // Use an update action so they pick up the new settings performAction ( updateAction ) ; } else { updateAction . run ( ) ; } } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "endpoint disabled: " + ( String ) endpointConfig . get ( "id" ) ) ; } // The endpoint has been disabled - - stop it now endpointState . set ( DISABLED ) ; if ( ! isPause ) { performAction ( stopAction ) ; } else { stopAction . run ( ) ; } }
public class CmsScheduledJobInfo { /** * Sets the job name . < p > * @ param jobName the job name to set */ public void setJobName ( String jobName ) { } }
checkFrozen ( ) ; if ( CmsStringUtil . isEmpty ( jobName ) || ! jobName . trim ( ) . equals ( jobName ) ) { throw new CmsIllegalArgumentException ( Messages . get ( ) . container ( Messages . ERR_BAD_JOB_NAME_1 , jobName ) ) ; } m_jobName = jobName ;
public class ESInstanceDetailsMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ESInstanceDetails eSInstanceDetails , ProtocolMarshaller protocolMarshaller ) { } }
if ( eSInstanceDetails == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( eSInstanceDetails . getInstanceClass ( ) , INSTANCECLASS_BINDING ) ; protocolMarshaller . marshall ( eSInstanceDetails . getInstanceSize ( ) , INSTANCESIZE_BINDING ) ; protocolMarshaller . marshall ( eSInstanceDetails . getRegion ( ) , REGION_BINDING ) ; protocolMarshaller . marshall ( eSInstanceDetails . getCurrentGeneration ( ) , CURRENTGENERATION_BINDING ) ; protocolMarshaller . marshall ( eSInstanceDetails . getSizeFlexEligible ( ) , SIZEFLEXELIGIBLE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DataUtil { /** * big - endian or motorola format . */ public static long readLongBigEndian ( InputStream io ) throws IOException { } }
long value = io . read ( ) ; if ( value < 0 ) throw new EOFException ( ) ; value <<= 56 ; int i = io . read ( ) ; if ( i < 0 ) throw new EOFException ( ) ; value |= ( ( long ) i ) << 48 ; i = io . read ( ) ; if ( i < 0 ) throw new EOFException ( ) ; value |= ( ( long ) i ) << 40 ; i = io . read ( ) ; if ( i < 0 ) throw new EOFException ( ) ; value |= ( ( long ) i ) << 32 ; i = io . read ( ) ; if ( i < 0 ) throw new EOFException ( ) ; value |= ( ( long ) i ) << 24 ; i = io . read ( ) ; if ( i < 0 ) throw new EOFException ( ) ; value |= i << 16 ; i = io . read ( ) ; if ( i < 0 ) throw new EOFException ( ) ; value |= i << 8 ; i = io . read ( ) ; if ( i < 0 ) throw new EOFException ( ) ; value |= i ; return value ;
public class SVGGlyph { /** * resize the svg to a certain width and height * @ param width * @ param height */ public void setSize ( double width , double height ) { } }
this . setMinSize ( StackPane . USE_PREF_SIZE , StackPane . USE_PREF_SIZE ) ; this . setPrefSize ( width , height ) ; this . setMaxSize ( StackPane . USE_PREF_SIZE , StackPane . USE_PREF_SIZE ) ;
public class InstanceClient { /** * Retrieves the list of instances contained within the specified zone . * < p > Sample code : * < pre > < code > * try ( InstanceClient instanceClient = InstanceClient . create ( ) ) { * ProjectZoneName zone = ProjectZoneName . of ( " [ PROJECT ] " , " [ ZONE ] " ) ; * for ( Instance element : instanceClient . listInstances ( zone ) . iterateAll ( ) ) { * / / doThingsWith ( element ) ; * < / code > < / pre > * @ param zone The name of the zone for this request . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final ListInstancesPagedResponse listInstances ( ProjectZoneName zone ) { } }
ListInstancesHttpRequest request = ListInstancesHttpRequest . newBuilder ( ) . setZone ( zone == null ? null : zone . toString ( ) ) . build ( ) ; return listInstances ( request ) ;
public class GetDomainsResult { /** * An array of key - value pairs containing information about each of the domain entries in the user ' s account . * @ param domains * An array of key - value pairs containing information about each of the domain entries in the user ' s account . */ public void setDomains ( java . util . Collection < Domain > domains ) { } }
if ( domains == null ) { this . domains = null ; return ; } this . domains = new java . util . ArrayList < Domain > ( domains ) ;
public class DescribeBrokerResult { /** * The list of all ActiveMQ usernames for the specified broker . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setUsers ( java . util . Collection ) } or { @ link # withUsers ( java . util . Collection ) } if you want to override the * existing values . * @ param users * The list of all ActiveMQ usernames for the specified broker . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeBrokerResult withUsers ( UserSummary ... users ) { } }
if ( this . users == null ) { setUsers ( new java . util . ArrayList < UserSummary > ( users . length ) ) ; } for ( UserSummary ele : users ) { this . users . add ( ele ) ; } return this ;
public class TraceContext { /** * This method is looking for sampling tag in nodes properties to override current sampling . * @ param node It sh * @ return boolean whether trace should be sampled or not */ private static boolean checkForSamplingProperties ( Node node ) { } }
Set < Property > samplingProperties = node instanceof ContainerNode ? ( ( ContainerNode ) node ) . getPropertiesIncludingDescendants ( Tags . SAMPLING_PRIORITY . getKey ( ) ) : node . getProperties ( Tags . SAMPLING_PRIORITY . getKey ( ) ) ; for ( Property prop : samplingProperties ) { int priority = 0 ; try { priority = Integer . parseInt ( prop . getValue ( ) ) ; } catch ( NumberFormatException ex ) { // continue on error } if ( priority > 0 ) { return true ; } } return false ;
public class SnapshotTaskClientImpl { /** * { @ inheritDoc } */ @ Override public GetSnapshotContentsTaskResult getSnapshotContents ( String snapshotId , int pageNumber , int pageSize , String prefix ) throws ContentStoreException { } }
GetSnapshotContentsTaskParameters taskParams = new GetSnapshotContentsTaskParameters ( ) ; taskParams . setSnapshotId ( snapshotId ) ; taskParams . setPageNumber ( pageNumber ) ; taskParams . setPageSize ( pageSize ) ; taskParams . setPrefix ( prefix ) ; String taskResult = contentStore . performTask ( SnapshotConstants . GET_SNAPSHOT_CONTENTS_TASK_NAME , taskParams . serialize ( ) ) ; return GetSnapshotContentsTaskResult . deserialize ( taskResult ) ;
public class GlobalTypeImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case DroolsPackage . GLOBAL_TYPE__IDENTIFIER : setIdentifier ( ( String ) newValue ) ; return ; case DroolsPackage . GLOBAL_TYPE__TYPE : setType ( ( String ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class HBaseClient { /** * Gets the batch size . * @ param persistenceUnit * the persistence unit * @ param puProperties * the pu properties * @ return the batch size */ private int getBatchSize ( String persistenceUnit , Map < String , Object > puProperties ) { } }
String batch_size = puProperties != null ? ( String ) puProperties . get ( PersistenceProperties . KUNDERA_BATCH_SIZE ) : null ; return batch_size != null ? Integer . valueOf ( batch_size ) : KunderaMetadataManager . getPersistenceUnitMetadata ( kunderaMetadata , persistenceUnit ) . getBatchSize ( ) ;