signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class NetworkSecurityGroupsInner { /** * Updates a network security group tags . * @ param resourceGroupName The name of the resource group . * @ param networkSecurityGroupName The name of the network security group . * @ param tags Resource tags . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < NetworkSecurityGroupInner > updateTagsAsync ( String resourceGroupName , String networkSecurityGroupName , Map < String , String > tags ) { } }
return updateTagsWithServiceResponseAsync ( resourceGroupName , networkSecurityGroupName , tags ) . map ( new Func1 < ServiceResponse < NetworkSecurityGroupInner > , NetworkSecurityGroupInner > ( ) { @ Override public NetworkSecurityGroupInner call ( ServiceResponse < NetworkSecurityGroupInner > response ) { return response . body ( ) ; } } ) ;
public class ConvertJsonToXml { /** * Converts a JSON array or a JSON object to a XML document . * @ param json - The JSON array or object ( in the form of a String ) . * @ param prettyPrint - The flag for formatting the resulted XML . If it is true the result will contain tabs and newline ( ' \ n ' ) chars . * Default value : true * Valid values : true , false * @ param showXmlDeclaration - The flag for showing the xml declaration ( < ? xml version = " 1.0 " encoding = " UTF - 8 " standalone = " yes " ? > ) . * If this is true then rootTagName can ' t be empty . * Default value : false * Valid values : true , false * @ param rootTagName - The XML tag name . If this input is empty you will get a list of XML elements . * @ param defaultJsonArrayItemName - Default XML tag name for items in a JSON array if there isn ' t a pair ( array name , array item name ) defined in jsonArraysNames and jsonArraysItemNames . * Default value : ' item ' * @ param jsonArraysNames - The list of array names separated by delimiter . * @ param jsonArraysItemNames - The coresponding list of array item names separated by delimiter . * @ param namespacesPrefixes - The list of tag prefixes separated by delimiter . * @ param namespacesUris - The coresponding list of namespaces uris separated by delimiter . * @ param delimiter - The list separator * Default value : ' , ' * @ return The converted JSON array or object as an XML document */ @ Action ( name = "Convert JSON to XML" , outputs = { } }
@ Output ( RETURN_RESULT ) , @ Output ( RETURN_CODE ) } , responses = { @ Response ( text = ResponseNames . SUCCESS , field = RETURN_CODE , value = SUCCESS ) , @ Response ( text = ResponseNames . FAILURE , field = RETURN_CODE , value = FAILURE ) } ) public Map < String , String > execute ( @ Param ( value = JSON , required = true ) String json , @ Param ( value = PRETTY_PRINT ) String prettyPrint , @ Param ( value = SHOW_XML_DECLARATION ) String showXmlDeclaration , @ Param ( value = ROOT_TAG_NAME ) String rootTagName , @ Param ( value = DEFAULT_JSON_ARRAY_ITEM_NAME ) String defaultJsonArrayItemName , @ Param ( value = NAMESPACES_PREFIXES ) String namespacesPrefixes , @ Param ( value = NAMESPACES_URIS ) String namespacesUris , @ Param ( value = JSON_ARRAYS_NAMES ) String jsonArraysNames , @ Param ( value = JSON_ARRAYS_ITEM_NAMES ) String jsonArraysItemNames , @ Param ( value = DELIMITER ) String delimiter ) { try { showXmlDeclaration = StringUtils . defaultIfEmpty ( showXmlDeclaration , TRUE ) ; prettyPrint = StringUtils . defaultIfEmpty ( prettyPrint , TRUE ) ; ValidateUtils . validateInputs ( prettyPrint , showXmlDeclaration ) ; final ConvertJsonToXmlInputs inputs = new ConvertJsonToXmlInputs . ConvertJsonToXmlInputsBuilder ( ) . withJson ( json ) . withPrettyPrint ( Boolean . parseBoolean ( prettyPrint ) ) . withShowXmlDeclaration ( Boolean . parseBoolean ( showXmlDeclaration ) ) . withRootTagName ( rootTagName ) . withDefaultJsonArrayItemName ( defaultJsonArrayItemName ) . withNamespaces ( namespacesUris , namespacesPrefixes , delimiter ) . withJsonArraysNames ( jsonArraysNames , jsonArraysItemNames , delimiter ) . build ( ) ; final ConvertJsonToXmlService converter = new ConvertJsonToXmlService ( ) ; converter . setNamespaces ( inputs . getNamespaces ( ) ) ; converter . setJsonArrayItemNames ( inputs . getArraysItemNames ( ) ) ; converter . setJsonArrayItemName ( inputs . getDefaultJsonArrayItemName ( ) ) ; final String xml = converter . convertToXmlString ( inputs ) ; return getSuccessResultsMap ( xml ) ; } catch ( Exception e ) { return getFailureResultsMap ( e ) ; }
public class DeleteEntityRecognizerRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteEntityRecognizerRequest deleteEntityRecognizerRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteEntityRecognizerRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteEntityRecognizerRequest . getEntityRecognizerArn ( ) , ENTITYRECOGNIZERARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Datatype_Builder { /** * Replaces the value to be returned by { @ link Datatype # getType ( ) } by applying { @ code mapper } to * it and using the result . * @ return this { @ code Builder } object * @ throws NullPointerException if { @ code mapper } is null or returns null * @ throws IllegalStateException if the field has not been set */ public Datatype . Builder mapType ( UnaryOperator < TypeClass > mapper ) { } }
Objects . requireNonNull ( mapper ) ; return setType ( mapper . apply ( getType ( ) ) ) ;
public class DscNodesInner { /** * Update the dsc node . * @ param resourceGroupName Name of an Azure Resource group . * @ param automationAccountName The name of the automation account . * @ param nodeId Parameters supplied to the update dsc node . * @ param parameters Parameters supplied to the update dsc node . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the DscNodeInner object if successful . */ public DscNodeInner update ( String resourceGroupName , String automationAccountName , String nodeId , DscNodeUpdateParameters parameters ) { } }
return updateWithServiceResponseAsync ( resourceGroupName , automationAccountName , nodeId , parameters ) . toBlocking ( ) . single ( ) . body ( ) ;
public class CapabilitiesHelper { /** * Acquire capabilities from a { @ link DefaultCapabilitiesBuilder } provider . * @ param builder * the { @ link DefaultCapabilitiesBuilder } provider to acquire capabilities from * @ return the { @ link DesiredCapabilities } which came from the providers . */ public static DesiredCapabilities retrieveCustomCapabilities ( Class < ? extends DefaultCapabilitiesBuilder > builder ) { } }
logger . entering ( builder ) ; DesiredCapabilities caps = new DesiredCapabilities ( ) ; if ( builder != null && ! builder . getName ( ) . equals ( DefaultCapabilitiesBuilder . class . getName ( ) ) ) { try { caps = builder . newInstance ( ) . createCapabilities ( ) ; } catch ( InstantiationException | IllegalAccessException e ) { throw new IllegalStateException ( "Unable to apply desired capabilities from " + builder . getName ( ) , e ) ; } } logger . exiting ( caps ) ; return caps ;
public class Rollbar { /** * Handle all uncaught errors on { @ code thread } with this ` Rollbar ` . * @ param thread the thread to handle errors on */ public void handleUncaughtErrors ( Thread thread ) { } }
final Rollbar rollbar = this ; thread . setUncaughtExceptionHandler ( new Thread . UncaughtExceptionHandler ( ) { public void uncaughtException ( Thread t , Throwable e ) { rollbar . log ( e , null , null , null , true ) ; } } ) ;
public class CPOptionLocalServiceBaseImpl { /** * Adds the cp option to the database . Also notifies the appropriate model listeners . * @ param cpOption the cp option * @ return the cp option that was added */ @ Indexable ( type = IndexableType . REINDEX ) @ Override public CPOption addCPOption ( CPOption cpOption ) { } }
cpOption . setNew ( true ) ; return cpOptionPersistence . update ( cpOption ) ;
public class MetaServiceImpl { /** * TODO Recursion to same type will result in endless loop */ public SServiceType createSServiceType ( SClass sClass , boolean recurse ) throws UserException , ServerException { } }
if ( sClass == null ) { return null ; } SServiceType sServiceType = new SServiceType ( ) ; sServiceType . setName ( sClass . getName ( ) ) ; sServiceType . setSimpleName ( sClass . getSimpleName ( ) ) ; sServiceType . setSimpleType ( SServiceSimpleType . valueOf ( sClass . getSimpleType ( ) . name ( ) ) ) ; for ( SField field : sClass . getOwnFields ( ) ) { SServiceField sServiceField = new SServiceField ( ) ; sServiceField . setName ( field . getName ( ) ) ; if ( recurse ) { sServiceField . setType ( createSServiceType ( field . getType ( ) , recurse ) ) ; sServiceField . setGenericType ( createSServiceType ( field . getGenericType ( ) , recurse ) ) ; } sServiceField . setDoc ( field . getDoc ( ) ) ; sServiceType . getFields ( ) . add ( sServiceField ) ; } return sServiceType ;
public class GameSettings { /** * Checks if the caller has permission to read the game settings * @ return the instance with all the settings as constants */ public static GameSettings getInstance ( ) { } }
SecurityManager sm = System . getSecurityManager ( ) ; if ( sm != null ) { sm . checkPermission ( new GamePermission ( "readSettings" ) ) ; } return INSTANCE ;
public class MutablePropertySources { /** * Add the given property source object with highest precedence . */ public void addFirst ( PropertySource < ? > propertySource ) { } }
if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Adding PropertySource '" + propertySource . getName ( ) + "' with highest search precedence" ) ; } removeIfPresent ( propertySource ) ; this . propertySourceList . add ( 0 , propertySource ) ;
public class CmsDriverManager { /** * Returns all locked resources in a given folder . < p > * @ param dbc the current database context * @ param resource the folder to search in * @ param filter the lock filter * @ return a list of locked resources * @ throws CmsException if the current project is locked */ public List < CmsResource > getLockedResourcesObjects ( CmsDbContext dbc , CmsResource resource , CmsLockFilter filter ) throws CmsException { } }
return m_lockManager . getLockedResources ( dbc , resource , filter ) ;
public class ANXAdapters { /** * Adapts a List of ANXOrders to a List of LimitOrders * @ param anxOrders * @ param currency * @ param orderType * @ return */ public static List < LimitOrder > adaptOrders ( List < ANXOrder > anxOrders , String tradedCurrency , String currency , String orderType , String id ) { } }
List < LimitOrder > limitOrders = new ArrayList < > ( ) ; for ( ANXOrder anxOrder : anxOrders ) { limitOrders . add ( adaptOrder ( anxOrder . getAmount ( ) , anxOrder . getPrice ( ) , tradedCurrency , currency , orderType , id , new Date ( anxOrder . getStamp ( ) ) ) ) ; } return limitOrders ;
public class DatabasesInner { /** * Exports a database . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param databaseName The name of the database . * @ param parameters The database export request parameters . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the ImportExportOperationResultInner object */ public Observable < ImportExportOperationResultInner > beginExportAsync ( String resourceGroupName , String serverName , String databaseName , ImportExportDatabaseDefinition parameters ) { } }
return beginExportWithServiceResponseAsync ( resourceGroupName , serverName , databaseName , parameters ) . map ( new Func1 < ServiceResponse < ImportExportOperationResultInner > , ImportExportOperationResultInner > ( ) { @ Override public ImportExportOperationResultInner call ( ServiceResponse < ImportExportOperationResultInner > response ) { return response . body ( ) ; } } ) ;
public class ConnectionDAODefaultImpl { public String [ ] black_box ( final Connection connection , final int length ) throws DevFailed { } }
checkIfTango ( connection , "black_box" ) ; build_connection ( connection ) ; String [ ] result = new String [ 0 ] ; try { result = connection . device . black_box ( length ) ; } catch ( final DevFailed e ) { final String reason = "TangoApi_CANNOT_READ_BLACK BOX" ; final String desc = "Cannot read black box on " + connection . devname ; final String origin = "Connection.black_box()" ; Except . throw_connection_failed ( e , reason , desc , origin ) ; } catch ( final Exception e ) { ApiUtilDAODefaultImpl . removePendingRepliesOfDevice ( connection ) ; throw_dev_failed ( connection , e , "black_box" , false ) ; } return result ;
public class AppServiceEnvironmentsInner { /** * Create or update a worker pool . * Create or update a worker pool . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param name Name of the App Service Environment . * @ param workerPoolName Name of the worker pool . * @ param workerPoolEnvelope Properties of the worker pool . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the WorkerPoolResourceInner object if successful . */ public WorkerPoolResourceInner beginCreateOrUpdateWorkerPool ( String resourceGroupName , String name , String workerPoolName , WorkerPoolResourceInner workerPoolEnvelope ) { } }
return beginCreateOrUpdateWorkerPoolWithServiceResponseAsync ( resourceGroupName , name , workerPoolName , workerPoolEnvelope ) . toBlocking ( ) . single ( ) . body ( ) ;
public class ApiOvhMe { /** * Get this object properties * REST : GET / me / identity / user / { user } * @ param user [ required ] User ' s login */ public OvhUser identity_user_user_GET ( String user ) throws IOException { } }
String qPath = "/me/identity/user/{user}" ; StringBuilder sb = path ( qPath , user ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhUser . class ) ;
public class ConvexHull { /** * Dynamically inserts each geometry into the convex hull . */ private void addMultiVertexGeometry_ ( MultiVertexGeometry mvg ) { } }
Point point = new Point ( ) ; Point2D pt_p = new Point2D ( ) ; for ( int i = 0 ; i < mvg . getPointCount ( ) ; i ++ ) { mvg . getXY ( i , pt_p ) ; int p = addPoint_ ( pt_p ) ; if ( p != - 1 ) { mvg . getPointByVal ( i , point ) ; int tp = m_shape . addPoint ( m_path_handle , point ) ; m_tree_hull . setElement ( p , tp ) ; // reset the place holder to tp } }
public class SCoverageReportMojo { /** * { @ inheritDoc } */ @ Override public String getName ( Locale locale ) { } }
if ( StringUtils . isEmpty ( name ) ) { return getBundle ( locale ) . getString ( "report.scoverage.name" ) ; } return name ;
public class BOCImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . BOC__OBJ_CNAME : setObjCName ( ( String ) newValue ) ; return ; case AfplibPackage . BOC__TRIPLETS : getTriplets ( ) . clear ( ) ; getTriplets ( ) . addAll ( ( Collection < ? extends Triplet > ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class ChemFile { /** * Removes a ChemSequence from this container . * @ param pos The position from which to remove * @ see # chemSequences * @ see # addChemSequence ( org . openscience . cdk . interfaces . IChemSequence ) */ @ Override public void removeChemSequence ( int pos ) { } }
chemSequences [ pos ] . removeListener ( this ) ; for ( int i = pos ; i < chemSequenceCount - 1 ; i ++ ) { chemSequences [ i ] = chemSequences [ i + 1 ] ; } chemSequences [ chemSequenceCount - 1 ] = null ; chemSequenceCount -- ; notifyChanged ( ) ;
import java . util . * ; class CalculateColumnSum { /** * This function computes the sum of a given column in a given 2D list ( matrix ) . * For example , * Given matrix = [ [ 1 , 2 , 3 , 2 ] , [ 4 , 5 , 6 , 2 ] , [ 7 , 8 , 9 , 5 ] ] , * calculate _ column _ sum ( matrix , 0 ) will give 12, * calculate _ column _ sum ( matrix , 1 ) will give 15, * calculate _ column _ sum ( matrix , 3 ) will give 9. * @ param matrix A 2D list of integers * @ param columnIndex Index of the column whose sum needs to be computed * @ return Sum of all the elements in the specified column of the 2D list */ public static int calculateColumnSum ( ArrayList < ArrayList < Integer > > matrix , int columnIndex ) { } }
int columnTotal = 0 ; for ( ArrayList < Integer > row : matrix ) { columnTotal += row . get ( columnIndex ) ; } return columnTotal ;
public class Matrix3f { /** * Set the values of this matrix by reading 9 float values from off - heap memory in column - major order , * starting at the given address . * This method will throw an { @ link UnsupportedOperationException } when JOML is used with ` - Djoml . nounsafe ` . * < em > This method is unsafe as it can result in a crash of the JVM process when the specified address range does not belong to this process . < / em > * @ param address * the off - heap memory address to read the matrix values from in column - major order * @ return this */ public Matrix3f setFromAddress ( long address ) { } }
if ( Options . NO_UNSAFE ) throw new UnsupportedOperationException ( "Not supported when using joml.nounsafe" ) ; MemUtil . MemUtilUnsafe unsafe = ( MemUtil . MemUtilUnsafe ) MemUtil . INSTANCE ; unsafe . get ( this , address ) ; return this ;
public class AbstractExtraLanguageGenerator { /** * Generate the given script . * @ param script the script . * @ param context the context . */ protected void _generate ( SarlScript script , IExtraLanguageGeneratorContext context ) { } }
if ( script != null ) { for ( final XtendTypeDeclaration content : script . getXtendTypes ( ) ) { if ( context . getCancelIndicator ( ) . isCanceled ( ) ) { return ; } try { generate ( content , context ) ; } finally { context . clearData ( ) ; } } }
public class ListDialogDecorator { /** * Returns an array , which identifies the currently checked list items . * @ return An array , which identifies the currently checked list items , as { @ link Boolean } array */ @ Nullable private boolean [ ] getCheckedItems ( ) { } }
if ( listView != null && adapter != null ) { boolean [ ] result = new boolean [ adapter . getItemCount ( ) ] ; for ( int i = 0 ; i < result . length ; i ++ ) { result [ i ] = adapter . isItemChecked ( i ) ; } return result ; } else { return checkedItems ; }
public class StringContext { /** * Set the given string in the given array at the given index . This will * overwrite the existing value at the given index . * @ param strArray The array to set in * @ param index The index of the element to overwrite * @ param str The string value to set * @ return < code > true < / code > if the index was valid and the value was set , * < code > false < / code > otherwise */ public boolean setInStringArray ( String [ ] strArray , int index , String str ) { } }
boolean result = false ; if ( strArray != null && index >= 0 && index < strArray . length ) { strArray [ index ] = str ; result = true ; } return result ;
public class StreamDecoder { /** * Factory for java . nio . channels . Channels . newReader */ public static StreamDecoder forDecoder ( ReadableByteChannel ch , CharsetDecoder dec , int minBufferCap ) { } }
return new StreamDecoder ( ch , dec , minBufferCap ) ;
public class MessageLog { /** * Convert the command to the screen document type . * @ param strCommand The command text . * @ param The standard document type ( MAINT / DISPLAY / SELECT / MENU / etc ) . */ public int commandToDocType ( String strCommand ) { } }
if ( MessageLog . MESSAGE_SCREEN . equalsIgnoreCase ( strCommand ) ) return MessageLog . MESSAGE_SCREEN_MODE ; if ( MessageLog . SOURCE_SCREEN . equalsIgnoreCase ( strCommand ) ) return MessageLog . SOURCE_SCREEN_MODE ; return super . commandToDocType ( strCommand ) ;
public class ArrayUtils { /** * Replaces the first occurrence of the oldValue by the newValue . * If the item is found , a new array is returned . Otherwise the original array is returned . * @ param src * @ param oldValue the value to look for * @ param newValues the value that is inserted . * @ param < T > the type of the array * @ return */ public static < T > T [ ] replaceFirst ( T [ ] src , T oldValue , T [ ] newValues ) { } }
int index = indexOf ( src , oldValue ) ; if ( index == - 1 ) { return src ; } T [ ] dst = ( T [ ] ) Array . newInstance ( src . getClass ( ) . getComponentType ( ) , src . length - 1 + newValues . length ) ; // copy the first part till the match System . arraycopy ( src , 0 , dst , 0 , index ) ; // copy the second part from the match System . arraycopy ( src , index + 1 , dst , index + newValues . length , src . length - index - 1 ) ; // copy the newValues into the dst System . arraycopy ( newValues , 0 , dst , index , newValues . length ) ; return dst ;
public class AppHelper { /** * Retrieves the value of an argument as a string . * @ param name the ( case - sensitive ) name of the argument to retrieve . * @ return the value of the argument as a string ( as passed on the command line ) . Will terminate the application if a required argument is found missing , printing out detailed usage . */ public String stringValue ( String name ) { } }
try { return this . getArgumentByName ( name ) . Value ; } catch ( Exception npe ) { printErrorAndQuit ( String . format ( "Argument '%s' was not provided." , name ) ) ; } return null ; // We will never get here : printErrorAndQuit will have terminated the application !
public class AbstractRestAgent { /** * Sends a request to the NFVO API for updating an instance of type T specified by its ID . * @ param object the new object that is sent in the update request * @ param id the ID of the object to update * @ return the updated object * @ throws SDKException if the request fails */ @ Help ( help = "Update the object of type {#} passing the new object and the id of the old object" ) public T update ( final T object , final String id ) throws SDKException { } }
return ( T ) requestPut ( id , object ) ;
public class JSTypeRegistry { /** * @ return Which scope in the provided scope chain the provided name is declared in , or else null . * This assumed that the Scope construction is * complete . It can not be used during scope construction to determine if a name is already * defined as a shadowed name from a parent scope would be returned . */ private static StaticScope getLookupScope ( StaticScope scope , String name ) { } }
if ( scope != null && scope . getParentScope ( ) != null ) { StaticSlot slot = scope . getSlot ( getRootElementOfName ( name ) ) ; return slot != null ? slot . getScope ( ) : null ; } return scope ;
public class IBAN { /** * Liefert die IBAN formattiert in der DIN - Form . Dies ist die uebliche * Papierform , in der die IBAN in 4er - Bloecke formattiert wird , jeweils * durch Leerzeichen getrennt . * @ return formatierte IBAN , z . B . " DE19 1234 1234 1234 1234 12" */ public String getFormatted ( ) { } }
String input = this . getUnformatted ( ) + " " ; StringBuilder buf = new StringBuilder ( ) ; for ( int i = 0 ; i < this . getUnformatted ( ) . length ( ) ; i += 4 ) { buf . append ( input , i , i + 4 ) ; buf . append ( ' ' ) ; } return buf . toString ( ) . trim ( ) ;
public class GroupNameAndArnMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GroupNameAndArn groupNameAndArn , ProtocolMarshaller protocolMarshaller ) { } }
if ( groupNameAndArn == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( groupNameAndArn . getGroupName ( ) , GROUPNAME_BINDING ) ; protocolMarshaller . marshall ( groupNameAndArn . getGroupArn ( ) , GROUPARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SqlFetcher { /** * Generates an instance of { @ link OpenedObject } for the given object . This is usually called * when prefetching is disabled . The retry is performed at the query execution layer . */ @ Override protected OpenedObject < T > generateOpenObject ( T object ) throws IOException { } }
final File outFile = File . createTempFile ( FETCH_FILE_PREFIX , null , temporaryDirectory ) ; return new OpenedObject < > ( object , openObjectFunction . open ( object , outFile ) , outFile :: delete ) ;
public class AbstractReplicator { /** * Stops a running replicator . This method returns immediately ; when the replicator actually * stops , the replicator will change its status ' s activity level to ` kCBLStopped ` * and the replicator change notification will be notified accordingly . */ public void stop ( ) { } }
synchronized ( lock ) { Log . i ( DOMAIN , "%s: Replicator is stopping ..." , this ) ; if ( c4repl != null ) { c4repl . stop ( ) ; // this is async ; status will change when repl actually stops } else { Log . i ( DOMAIN , "%s: Replicator has been stopped or offlined ..." , this ) ; } if ( c4ReplStatus . getActivityLevel ( ) == C4ReplicatorStatus . ActivityLevel . OFFLINE ) { Log . i ( DOMAIN , "%s: Replicator has been offlined; " + "make the replicator into the stopped state now." , this ) ; final C4ReplicatorStatus c4replStatus = new C4ReplicatorStatus ( ) ; c4replStatus . setActivityLevel ( C4ReplicatorStatus . ActivityLevel . STOPPED ) ; this . c4StatusChanged ( c4replStatus ) ; } if ( reachabilityManager != null ) { reachabilityManager . removeNetworkReachabilityListener ( this ) ; } }
public class RecommendedElasticPoolsInner { /** * Returns recommended elastic pools . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < RecommendedElasticPoolInner > > listByServerAsync ( String resourceGroupName , String serverName , final ServiceCallback < List < RecommendedElasticPoolInner > > serviceCallback ) { } }
return ServiceFuture . fromResponse ( listByServerWithServiceResponseAsync ( resourceGroupName , serverName ) , serviceCallback ) ;
public class MPEGAudioFrameHeader { /** * Based on the bitrate index found in the header , try to find and set the * bitrate from the table . * @ param bitrateIndex the bitrate index read from the header */ private int findBitRate ( int bitrateIndex , int version , int layer ) { } }
int ind = - 1 ; if ( version == MPEG_V_1 ) { if ( layer == MPEG_L_1 ) { ind = 0 ; } else if ( layer == MPEG_L_2 ) { ind = 1 ; } else if ( layer == MPEG_L_3 ) { ind = 2 ; } } else if ( ( version == MPEG_V_2 ) || ( version == MPEG_V_25 ) ) { if ( layer == MPEG_L_1 ) { ind = 3 ; } else if ( ( layer == MPEG_L_2 ) || ( layer == MPEG_L_3 ) ) { ind = 4 ; } } if ( ( ind != - 1 ) && ( bitrateIndex >= 0 ) && ( bitrateIndex <= 15 ) ) { return bitrateTable [ bitrateIndex ] [ ind ] ; } return - 1 ;
public class AbstractInjectorCreator { /** * Sub generations */ protected void doSubGeneration ( TreeLogger logger , GeneratorContext context ) { } }
for ( InjectorWritterSubGenerate delegate : Iterables . filter ( this . delegates , InjectorWritterSubGenerate . class ) ) { delegate . subGenerate ( logger , context ) ; }
public class DefaultTraceEventParser { /** * For performance reasons , trying to create as few objects as possible in this class . * Also for performance reasons , this code should also choose char searches over String searches when possible . */ @ Override public ITraceEvent createEvent ( String rawEventText , int sourceLineNumber ) throws IntraceException { } }
if ( rawEventText == null ) { return null ; } String parts [ ] = rawEventText . split ( "~" ) ; rawEventText = parts [ 0 ] ; StackTraceElement [ ] stackTrace = null ; if ( parts . length == 2 ) { stackTrace = parseStackTrace ( parts [ 1 ] ) ; } int indexOfLastColon = rawEventText . lastIndexOf ( TOKEN_COLON ) ; if ( indexOfLastColon == - 1 ) { throw new RuntimeException ( "1000 Major exception. To separate parts of a date, there should be multiple colons in this event text, but found zero [" + rawEventText + "]" ) ; } sourceLineNumber = - 1 ; // Input sourceLineNumber is ignored . - 1 is default used if trace event doesn ' t have line number , which probably means agent wasn ' t configured to provide it . // If the character to the left of the last colon is a curly , // then the text to the right of the last colon is the source line number . char isCurlyBrace = rawEventText . charAt ( indexOfLastColon - 1 ) ; if ( isCurlyBrace == ENTRY_MARKER || isCurlyBrace == EXIT_MARKER ) { sourceLineNumber = Integer . parseInt ( rawEventText . substring ( indexOfLastColon + 1 ) ) ; } else { // then there is no line number at the end of the line , so see if there is a curly at the end of the line . isCurlyBrace = rawEventText . charAt ( rawEventText . length ( ) - 1 ) ; } ITraceEvent event = null ; if ( isCurlyBrace == ENTRY_MARKER ) { event = m_entryExitParser . createEvent ( rawEventText , sourceLineNumber ) ; event . setEventType ( EventType . ENTRY ) ; } else if ( isCurlyBrace == EXIT_MARKER ) { event = m_entryExitParser . createEvent ( rawEventText , sourceLineNumber ) ; event . setEventType ( EventType . EXIT ) ; } else { event = m_otherParser . createEvent ( rawEventText , sourceLineNumber ) ; } event . setStackTrace ( stackTrace ) ; return event ;
public class RDBMEntityLockStore { /** * Delete all IEntityLocks from the underlying store . */ @ Override public void deleteAll ( ) throws LockingException { } }
Connection conn = null ; Statement stmnt = null ; try { String sql = "DELETE FROM " + LOCK_TABLE ; if ( log . isDebugEnabled ( ) ) log . debug ( "RDBMEntityLockStore.deleteAll(): " + sql ) ; conn = RDBMServices . getConnection ( ) ; try { stmnt = conn . createStatement ( ) ; int rc = stmnt . executeUpdate ( sql ) ; if ( log . isDebugEnabled ( ) ) { String msg = "Deleted " + rc + " locks." ; log . debug ( "RDBMEntityLockStore.deleteAll(): " + msg ) ; } } finally { if ( stmnt != null ) stmnt . close ( ) ; } } catch ( SQLException sqle ) { throw new LockingException ( "Problem deleting locks" , sqle ) ; } finally { RDBMServices . releaseConnection ( conn ) ; }
public class BitVector { /** * convenience methods */ @ Override public void setAll ( boolean value ) { } }
checkMutable ( ) ; if ( value ) { performAdjSet ( start , finish ) ; } else { performAdjClear ( start , finish ) ; }
public class CategoryGraph { /** * Creates the hyponym map , that maps from nodes to their ( recursive ) number of hyponyms for each node . * " recursive " means that the hyponyms of hyponyms are also taken into account . * @ throws WikiApiException */ private void createHyponymCountMap ( ) throws WikiApiException { } }
// do only create hyponymMap , if it was not already computed if ( hyponymCountMap != null ) { return ; } File hyponymCountMapSerializedFile = new File ( wiki . getWikipediaId ( ) + "_" + hyponymCountMapFilename ) ; hyponymCountMap = new HashMap < Integer , Integer > ( ) ; if ( hyponymCountMapSerializedFile . exists ( ) ) { logger . info ( "Loading saved hyponymyCountMap ..." ) ; hyponymCountMap = this . deserializeMap ( hyponymCountMapSerializedFile ) ; logger . info ( "Done loading saved hyponymyCountMap" ) ; return ; } // a queue holding the nodes to process List < Integer > queue = new ArrayList < Integer > ( ) ; // In the category graph a node may have more than one father . // Thus , we check whether a node was already visited . // Then , it is not expanded again . Set < Integer > visited = new HashSet < Integer > ( ) ; // initialize the queue with all leaf nodes Set < Integer > leafNodes = this . __getLeafNodes ( ) ; queue . addAll ( leafNodes ) ; logger . info ( leafNodes . size ( ) + " leaf nodes." ) ; // while the queue is not empty while ( ! queue . isEmpty ( ) ) { // remove first element from queue int currNode = queue . get ( 0 ) ; queue . remove ( 0 ) ; // logger . info ( queue . size ( ) ) ; if ( visited . contains ( currNode ) ) { continue ; } Set < Integer > children = __getChildren ( currNode ) ; int validChildren = 0 ; int sumChildHyponyms = 0 ; boolean invalid = false ; for ( int child : children ) { if ( graph . containsVertex ( child ) ) { if ( hyponymCountMap . containsKey ( child ) ) { sumChildHyponyms += hyponymCountMap . get ( child ) ; validChildren ++ ; } else { invalid = true ; } } } if ( invalid ) { // One of the childs is not in the hyponymCountMap yet // Re - Enter the node into the queue and continue with next node queue . add ( currNode ) ; continue ; } // mark as visited visited . add ( currNode ) ; // number of hyponomys of current node is the number of its own hyponomies and the sum of the hyponomies of its children . int currNodeHyponomyCount = validChildren + sumChildHyponyms ; hyponymCountMap . put ( currNode , currNodeHyponomyCount ) ; // add parents of current node to queue for ( int parent : __getParents ( currNode ) ) { if ( graph . containsVertex ( parent ) ) { queue . add ( parent ) ; } } } // while queue not empty logger . info ( visited . size ( ) + " nodes visited" ) ; if ( visited . size ( ) != graph . vertexSet ( ) . size ( ) ) { throw new WikiApiException ( "Visited only " + visited . size ( ) + " out of " + graph . vertexSet ( ) . size ( ) + " nodes." ) ; } if ( hyponymCountMap . size ( ) != graph . vertexSet ( ) . size ( ) ) { throw new WikiApiException ( "HyponymCountMap does not contain an entry for each node in the graph." + hyponymCountMap . size ( ) + "/" + graph . vertexSet ( ) . size ( ) ) ; } scaleHyponymCountMap ( ) ; logger . info ( "Computed hyponymCountMap" ) ; serializeMap ( hyponymCountMap , hyponymCountMapSerializedFile ) ; logger . info ( "Serialized hyponymCountMap" ) ;
public class Duzzt { /** * Initialize the Duzzt embedded DSL generator . * @ param utils the APUtils instance wrapping the { @ link javax . annotation . processing . ProcessingEnvironment } * @ throws DuzztInitializationException if a fatal error occurs during initialization */ public void init ( APUtils utils ) throws DuzztInitializationException { } }
URL url = GenerateEDSLProcessor . class . getResource ( ST_RESOURCE_NAME ) ; this . sourceGenGroup = new STGroupFile ( url , ST_ENCODING , ST_DELIM_START_CHAR , ST_DELIM_STOP_CHAR ) ; sourceGenGroup . setListener ( new ReporterDiagnosticListener ( utils . getReporter ( ) ) ) ; sourceGenGroup . load ( ) ; if ( ! sourceGenGroup . isDefined ( ST_MAIN_TEMPLATE_NAME ) ) { sourceGenGroup = null ; throw new DuzztInitializationException ( "Could not find main template '" + ST_MAIN_TEMPLATE_NAME + "' in template group file " + url . toString ( ) ) ; } this . isJava9OrNewer = isJava9OrNewer ( utils . getProcessingEnv ( ) . getSourceVersion ( ) ) ;
public class CassandraJavaRDD { /** * Narrows down the selected set of columns . * Use this for better performance , when you don ' t need all the columns in the result RDD . When * called multiple times , it selects the subset of the already selected columns , so after a column * was removed by the previous { @ code select } call , it is not possible to add it back . < / p > */ public CassandraJavaRDD < R > select ( ColumnRef ... columns ) { } }
Seq < ColumnRef > columnRefs = JavaApiHelper . toScalaSeq ( columns ) ; CassandraRDD < R > newRDD = rdd ( ) . select ( columnRefs ) ; return wrap ( newRDD ) ;
public class RDBMPermissionImpl { /** * Insert the method ' s description here . Creation date : ( 11/6/01 5:19:57 PM ) * @ return java . lang . String */ private static java . lang . String getFindPermissionSql ( ) { } }
if ( findPermissionSql == null ) { StringBuffer sqlBuff = new StringBuffer ( getSelectPermissionSql ( ) ) ; sqlBuff . append ( "WHERE " ) ; sqlBuff . append ( OWNER_COLUMN ) ; sqlBuff . append ( " = ? AND " ) ; sqlBuff . append ( PRINCIPAL_TYPE_COLUMN ) ; sqlBuff . append ( " = ? AND " ) ; sqlBuff . append ( PRINCIPAL_KEY_COLUMN ) ; sqlBuff . append ( " = ? AND " ) ; sqlBuff . append ( ACTIVITY_COLUMN ) ; sqlBuff . append ( " = ? AND " ) ; sqlBuff . append ( TARGET_COLUMN ) ; sqlBuff . append ( " = ? " ) ; sqlBuff . append ( TYPE_COLUMN ) ; sqlBuff . append ( " = ? " ) ; findPermissionSql = sqlBuff . toString ( ) ; } return findPermissionSql ;
public class UUIDIdentifierGenerator { public Serializable generate ( SessionImplementor session , Object object ) throws HibernateException { } }
UUID uuid = UUID . randomUUID ( ) ; StringBuilder sb = new StringBuilder ( ) ; sb . append ( Long . toHexString ( uuid . getMostSignificantBits ( ) ) ) ; sb . append ( Long . toHexString ( uuid . getLeastSignificantBits ( ) ) ) ; while ( sb . length ( ) < 32 ) { sb . append ( '0' ) ; } return sb . toString ( ) ;
public class DefaultOpenAPIModelFilter { /** * { @ inheritDoc } */ @ Override public Object visitExtension ( Context context , String key , Object extension ) { } }
return extension ;
public class CacheProxy { /** * Performs the bulk load where the existing entries are replace . */ private void loadAllAndReplaceExisting ( Set < ? extends K > keys ) { } }
int [ ] ignored = { 0 } ; Map < K , V > loaded = cacheLoader . get ( ) . loadAll ( keys ) ; for ( Map . Entry < ? extends K , ? extends V > entry : loaded . entrySet ( ) ) { putNoCopyOrAwait ( entry . getKey ( ) , entry . getValue ( ) , /* publishToWriter */ false , ignored ) ; }
public class ChronoLocalDateTimeImpl { @ Override public long until ( Temporal endExclusive , TemporalUnit unit ) { } }
Objects . requireNonNull ( endExclusive , "endExclusive" ) ; @ SuppressWarnings ( "unchecked" ) ChronoLocalDateTime < D > end = ( ChronoLocalDateTime < D > ) getChronology ( ) . localDateTime ( endExclusive ) ; if ( unit instanceof ChronoUnit ) { if ( unit . isTimeBased ( ) ) { long amount = end . getLong ( EPOCH_DAY ) - date . getLong ( EPOCH_DAY ) ; switch ( ( ChronoUnit ) unit ) { case NANOS : amount = Math . multiplyExact ( amount , NANOS_PER_DAY ) ; break ; case MICROS : amount = Math . multiplyExact ( amount , MICROS_PER_DAY ) ; break ; case MILLIS : amount = Math . multiplyExact ( amount , MILLIS_PER_DAY ) ; break ; case SECONDS : amount = Math . multiplyExact ( amount , SECONDS_PER_DAY ) ; break ; case MINUTES : amount = Math . multiplyExact ( amount , MINUTES_PER_DAY ) ; break ; case HOURS : amount = Math . multiplyExact ( amount , HOURS_PER_DAY ) ; break ; case HALF_DAYS : amount = Math . multiplyExact ( amount , 2 ) ; break ; } return Math . addExact ( amount , time . until ( end . toLocalTime ( ) , unit ) ) ; } ChronoLocalDate endDate = end . toLocalDate ( ) ; if ( end . toLocalTime ( ) . isBefore ( time ) ) { endDate = endDate . minus ( 1 , ChronoUnit . DAYS ) ; } return date . until ( endDate , unit ) ; } Objects . requireNonNull ( unit , "unit" ) ; return unit . between ( this , end ) ;
public class HttpWorkerAbstract { /** * Returns an instance of a HTTP Worker based on the request method * @ param requestMethod One of GET , PUT , POST , DELETE , HEAD * @ param client * @ return */ public static HttpWorkerAbstract getWorkerStrategy ( final String requestMethod , final HttpClient client ) { } }
if ( requestMethod . equals ( HEAD ) ) { return new HttpWorkerHead ( client ) ; } else if ( requestMethod . equals ( GET ) ) { return new HttpWorkerGet ( client ) ; } else if ( requestMethod . equals ( POST ) ) { return new HttpWorkerPost ( client ) ; } else if ( requestMethod . equals ( PUT ) ) { return new HttpWorkerPut ( client ) ; } else if ( requestMethod . equals ( DELETE ) ) { return new HttpWorkerDelete ( client ) ; } return new HttpWorkerGet ( client ) ;
public class GlobalNamespace { /** * If the client adds new nodes to the AST , scan these new nodes to see if they ' ve added any * references to the global namespace . * @ param newNodes New nodes to check . */ void scanNewNodes ( Set < AstChange > newNodes ) { } }
BuildGlobalNamespace builder = new BuildGlobalNamespace ( ) ; for ( AstChange info : newNodes ) { if ( ! info . node . isQualifiedName ( ) && ! NodeUtil . mayBeObjectLitKey ( info . node ) ) { continue ; } scanFromNode ( builder , info . module , info . scope , info . node ) ; }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link CoordinatesType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link CoordinatesType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "tupleList" ) public JAXBElement < CoordinatesType > createTupleList ( CoordinatesType value ) { } }
return new JAXBElement < CoordinatesType > ( _TupleList_QNAME , CoordinatesType . class , null , value ) ;
public class MimeTypeDetector { /** * Returns true if subarrays are equal , with the given mask . * The mask must have length < tt > len < / tt > . */ private boolean subArraysEqualWithMask ( byte [ ] a , int aStart , byte [ ] b , int bStart , byte [ ] mask , int maskStart , int len ) { } }
for ( int i = aStart , j = bStart , k = maskStart ; len > 0 ; i ++ , j ++ , k ++ , len -- ) { if ( ( a [ i ] & mask [ k ] ) != ( b [ j ] & mask [ k ] ) ) { return false ; } } return true ;
public class BeanUtils { /** * 根据给定实例化参数类型和数据实例化指定类型的对象 。 Returns a newly allocated instance of the class * represented by this class use given constructor parameters class type and * constructor parameters value . * @ param clazz * 想要实例化的类型 。 want to create instance class . * @ param fieldType * 构造函数的唯一参数类型 。 constructor single parameter class type . * @ param value * 构造函数的唯一参数数据 。 constructor single parameter value . * @ return 实例化对象 。 a newly allocated instance of the class represented by this * class . */ public static < T > T newInstance ( final Class < T > clazz , final Class < ? > fieldType , final Object value ) { } }
return ClassUtils . instantiateClass ( clazz , getParameterTypes ( fieldType ) , getParameterValues ( value ) ) ;
public class ntpparam { /** * Use this API to unset the properties of ntpparam resource . * Properties that need to be unset are specified in args array . */ public static base_response unset ( nitro_service client , ntpparam resource , String [ ] args ) throws Exception { } }
ntpparam unsetresource = new ntpparam ( ) ; return unsetresource . unset_resource ( client , args ) ;
public class BeanUtil { /** * 解析Bean中的属性值 * @ param bean Bean对象 , 支持Map 、 List 、 Collection 、 Array * @ param expression 表达式 , 例如 : person . friend [ 5 ] . name * @ see BeanPath # get ( Object ) * @ since 4.0.6 */ public static void setProperty ( Object bean , String expression , Object value ) { } }
BeanPath . create ( expression ) . set ( bean , value ) ;
public class SparseDirectedTypedEdgeSet { /** * { @ inheritDoc } */ public int disconnect ( int v ) { } }
if ( connected . remove ( v ) ) { int removed = 0 ; BitSet b = inEdges . remove ( v ) ; if ( b != null ) { int edges = b . cardinality ( ) ; size -= edges ; removed += edges ; } b = outEdges . remove ( v ) ; if ( b != null ) { int edges = b . cardinality ( ) ; size -= edges ; removed += edges ; } assert removed > 0 : "connected removed an edge that wasn't listed elsewhere" ; return removed ; } return 0 ;
public class JavacJ2ObjCIncompatibleStripper { /** * Checks for any J2ObjCIncompatible annotations . Returns whether * the caller should to continue scanning this node . */ private boolean checkAnnotations ( List < ? extends AnnotationTree > annotations , Tree node ) { } }
for ( AnnotationTree annotation : annotations ) { if ( isJ2ObjCIncompatible ( annotation ) ) { nodesToStrip . add ( node ) ; return false ; } } return true ;
public class ComponentView { /** * Adds an individual component to this view . * @ param component the Component to add * @ param addRelationships whether to add relationships to / from the component */ public void add ( Component component , boolean addRelationships ) { } }
if ( component != null ) { if ( ! component . getContainer ( ) . equals ( getContainer ( ) ) ) { throw new IllegalArgumentException ( "Only components belonging to " + container . getName ( ) + " can be added to this view." ) ; } addElement ( component , addRelationships ) ; }
public class DescribeAutomationExecutionsResult { /** * The list of details about each automation execution which has occurred which matches the filter specification , if * any . * @ return The list of details about each automation execution which has occurred which matches the filter * specification , if any . */ public java . util . List < AutomationExecutionMetadata > getAutomationExecutionMetadataList ( ) { } }
if ( automationExecutionMetadataList == null ) { automationExecutionMetadataList = new com . amazonaws . internal . SdkInternalList < AutomationExecutionMetadata > ( ) ; } return automationExecutionMetadataList ;
public class Actors { /** * Null - safe method for setting keyboard focus . * @ param actor if is not null and has a stage , will be set as stage ' s keyboard focused actor . */ public static void setKeyboardFocus ( final Actor actor ) { } }
if ( actor != null && actor . getStage ( ) != null ) { actor . getStage ( ) . setKeyboardFocus ( actor ) ; }
public class VueGWTTools { /** * Return a " deep " value in a given object by following an expression in the form : * " parent . child . property " . This only works if all the chain is exposed using JsInterop . * @ param object The root object to get on * @ param path The path to follow * @ param < T > The type of object we get in return * @ return The object at the end of the chain */ public static < T > T getDeepValue ( Object object , String path ) { } }
JsPropertyMap objectMap = ( JsPropertyMap ) object ; String [ ] pathSplit = path . split ( "\\." ) ; for ( String s : pathSplit ) { objectMap = ( JsPropertyMap ) objectMap . get ( s ) ; } return ( T ) objectMap ;
public class JSONObject { /** * Get the string associated with a key . * @ param key * A key string . * @ return A string which is the value . * @ throws JSONException * if there is no string value for the key . */ public String getString ( String key ) throws JSONException { } }
Object object = this . get ( key ) ; if ( object instanceof String ) { return ( String ) object ; } else if ( NULL . equals ( object ) ) { return ( String ) null ; } throw new JSONException ( "JSONObject[" + quote ( key ) + "] not a string." ) ;
public class MergePolicyValidator { /** * Checks if the given { @ link InMemoryFormat } can be merged by the given * { @ code mergePolicy } instance . * When a wrong policy is detected , it does one of two things : * if { @ code failFast } is { @ code true } and the cluster version is 3.10 or later , * it throws an { @ link InvalidConfigurationException } , otherwise it logs a warning . * @ return { @ code true } if the given { @ code inMemoryFormat } can be merged by * the supplied { @ code mergePolicy } , { @ code false } otherwise */ public static boolean checkMergePolicySupportsInMemoryFormat ( String name , Object mergePolicy , InMemoryFormat inMemoryFormat , boolean failFast , ILogger logger ) { } }
if ( inMemoryFormat != NATIVE ) { return true ; } if ( mergePolicy instanceof SplitBrainMergePolicy ) { return true ; } if ( failFast ) { throw new InvalidConfigurationException ( createSplitRecoveryWarningMsg ( name , mergePolicy . getClass ( ) . getName ( ) ) ) ; } logger . warning ( createSplitRecoveryWarningMsg ( name , mergePolicy . getClass ( ) . getName ( ) ) ) ; return false ;
public class DescribeWorkspaceBundlesRequest { /** * The identifiers of the bundles . You cannot combine this parameter with any other filter . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setBundleIds ( java . util . Collection ) } or { @ link # withBundleIds ( java . util . Collection ) } if you want to * override the existing values . * @ param bundleIds * The identifiers of the bundles . You cannot combine this parameter with any other filter . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeWorkspaceBundlesRequest withBundleIds ( String ... bundleIds ) { } }
if ( this . bundleIds == null ) { setBundleIds ( new com . amazonaws . internal . SdkInternalList < String > ( bundleIds . length ) ) ; } for ( String ele : bundleIds ) { this . bundleIds . add ( ele ) ; } return this ;
public class Args { /** * Return the int - argument with the given name . * @ param name the argument name * @ return the int argument value , if any */ public Optional < Integer > intArg ( final String name ) { } }
return arg ( name ) . flatMap ( s -> parse ( s , Integer :: valueOf ) ) ;
public class MannTriangulator { /** * Release a pooled bag * @ param pb The bag to release * @ return The next available bag */ private PointBag freePointBag ( PointBag pb ) { } }
PointBag next = pb . next ; pb . clear ( ) ; pb . next = nextFreePointBag ; nextFreePointBag = pb ; return next ;
public class AbstractMedia { /** * Queues the supplied notification up to be dispatched to this abstract media ' s observers . */ public void queueNotification ( ObserverList . ObserverOp < Object > amop ) { } }
if ( _observers != null ) { if ( _mgr != null ) { _mgr . queueNotification ( _observers , amop ) ; } else { log . warning ( "Have no manager, dropping notification" , "media" , this , "op" , amop ) ; } }
public class MPP9Reader { /** * Retrieve any resource field aliases defined in the MPP file . * @ param map index to field map * @ param data resource field name alias data */ private void processFieldNameAliases ( Map < Integer , FieldType > map , byte [ ] data ) { } }
if ( data != null ) { int offset = 0 ; int index = 0 ; CustomFieldContainer fields = m_file . getCustomFields ( ) ; while ( offset < data . length ) { String alias = MPPUtility . getUnicodeString ( data , offset ) ; if ( ! alias . isEmpty ( ) ) { FieldType field = map . get ( Integer . valueOf ( index ) ) ; if ( field != null ) { fields . getCustomField ( field ) . setAlias ( alias ) ; } } offset += ( alias . length ( ) + 1 ) * 2 ; index ++ ; } }
public class FlexBase64 { /** * Decodes a Base64 encoded byte array into a new byte buffer . The returned byte buffer is a heap buffer , * and it is therefor possible to retrieve the backing array using { @ link java . nio . ByteBuffer # array ( ) } , * { @ link java . nio . ByteBuffer # arrayOffset ( ) } and { @ link java . nio . ByteBuffer # limit ( ) } . The latter is very * important since the decoded array may be larger than the decoded data . This is due to length estimation which * avoids an unnecessary array copy . * @ param source the Base64 content to decode * @ param off position to start decoding from in source * @ param limit position to stop decoding in source ( exclusive ) * @ return a byte buffer containing the decoded output * @ throws IOException if the encoding is invalid or corrupted */ public static ByteBuffer decode ( byte [ ] source , int off , int limit ) throws IOException { } }
return Decoder . decode ( source , off , limit , false ) ;
public class CreateIntegrationResponseResult { /** * A key - value map specifying response parameters that are passed to the method response from the backend . The key * is a method response header parameter name and the mapped value is an integration response header value , a static * value enclosed within a pair of single quotes , or a JSON expression from the integration response body . The * mapping key must match the pattern of method . response . header . { name } , where name is a valid and unique header * name . The mapped non - static value must match the pattern of integration . response . header . { name } or * integration . response . body . { JSON - expression } , where name is a valid and unique response header name and * JSON - expression is a valid JSON expression without the $ prefix . * @ param responseParameters * A key - value map specifying response parameters that are passed to the method response from the backend . * The key is a method response header parameter name and the mapped value is an integration response header * value , a static value enclosed within a pair of single quotes , or a JSON expression from the integration * response body . The mapping key must match the pattern of method . response . header . { name } , where name is a * valid and unique header name . The mapped non - static value must match the pattern of * integration . response . header . { name } or integration . response . body . { JSON - expression } , where name is a valid * and unique response header name and JSON - expression is a valid JSON expression without the $ prefix . * @ return Returns a reference to this object so that method calls can be chained together . */ public CreateIntegrationResponseResult withResponseParameters ( java . util . Map < String , String > responseParameters ) { } }
setResponseParameters ( responseParameters ) ; return this ;
public class ConfusionMatrix { /** * The total number of instances covered by this confusion matrix . * @ return the total number of instances covered by this confusion matrix */ public int totalInstances ( ) { } }
int total = 0 ; for ( int i = 0 ; i < confusion . length ; i ++ ) { for ( int j = 0 ; j < confusion [ i ] . length ; j ++ ) { total += confusion [ i ] [ j ] ; } } return total ;
public class AbstractContext { /** * / * ( non - Javadoc ) * @ see org . jboss . arquillian . core . spi . context . Context # clearAll ( ) */ @ Override public void clearAll ( ) { } }
synchronized ( this ) { if ( isActive ( ) ) { deactivateAll ( ) ; } activeStore . remove ( ) ; for ( Map . Entry < T , ObjectStore > entry : stores . entrySet ( ) ) { entry . getValue ( ) . clear ( ) ; } stores . clear ( ) ; }
public class Mutator { /** * create a new residue which is of the new type . * Only the atoms N , Ca , C , O , Cb will be considered . * @ param oldAmino * @ param newType * @ return a new , mutated , residue * @ throws PDBParseException */ public AminoAcid mutateResidue ( AminoAcid oldAmino , String newType ) throws PDBParseException { } }
AminoAcid newgroup = new AminoAcidImpl ( ) ; newgroup . setResidueNumber ( oldAmino . getResidueNumber ( ) ) ; newgroup . setPDBName ( newType ) ; AtomIterator aiter = new AtomIterator ( oldAmino ) ; while ( aiter . hasNext ( ) ) { Atom a = aiter . next ( ) ; if ( supportedAtoms . contains ( a . getName ( ) ) ) { newgroup . addAtom ( a ) ; } } return newgroup ;
public class Cob2AvroJob { /** * Gets the job input record choice strategy class . * @ param conf The job configuration . * @ return The job input record choice strategy class , or null if not set . */ public static Class < ? extends FromCobolChoiceStrategy > getInputChoiceStrategy ( Configuration conf ) { } }
return conf . getClass ( CONF_INPUT_RECORD_CHOICE_STRATEGY_CLASS , null , FromCobolChoiceStrategy . class ) ;
public class ObjectUtils { /** * Calls the named bean setter property on the object , converting * the given value to the correct type . Note that parameter ' prop ' * is converted to a method name according to Lisp convention : * ( foo - bar ) , and not the usual Java dromedaryCase ( fooBar ) . So * " foo - bar " will become " setFooBar " . * < p > The value conversion is mostly straightforward , except that if * the type of the method ' s first parameter is not a * java . lang . Something , then the method will assume that the value * is the name of an object in the ' objects ' map , and pass that . * < p > Further , if the type is a Collection , the method will assume * the value is a list of object names separated by whitespace . */ public static void setBeanProperty ( Object object , String prop , String value , Map < String , Object > objects ) { } }
prop = makePropertyName ( prop ) ; try { boolean found = false ; Method [ ] methods = object . getClass ( ) . getMethods ( ) ; for ( int ix = 0 ; ix < methods . length && ! found ; ix ++ ) { if ( ! methods [ ix ] . getName ( ) . equals ( prop ) ) continue ; if ( methods [ ix ] . getParameterTypes ( ) . length != 1 ) continue ; Class type = methods [ ix ] . getParameterTypes ( ) [ 0 ] ; methods [ ix ] . invoke ( object , convertToType ( value , type , objects ) ) ; found = true ; } if ( ! found ) throw new DukeConfigException ( "Couldn't find method '" + prop + "' in " + "class " + object . getClass ( ) ) ; } catch ( IllegalArgumentException e ) { throw new DukeConfigException ( "Couldn't set bean property " + prop + " on object of class " + object . getClass ( ) + ": " + e ) ; } catch ( IllegalAccessException e ) { throw new DukeException ( e ) ; } catch ( InvocationTargetException e ) { throw new DukeConfigException ( "Couldn't set bean property " + prop + " on object of class " + object . getClass ( ) + ": " + e ) ; }
public class Deadline { /** * For testing */ static Deadline after ( long duration , TimeUnit units , Ticker ticker ) { } }
checkNotNull ( units , "units" ) ; return new Deadline ( ticker , units . toNanos ( duration ) , true ) ;
public class CPInstancePersistenceImpl { /** * Returns the cp instance where CPDefinitionId = & # 63 ; and sku = & # 63 ; or returns < code > null < / code > if it could not be found , optionally using the finder cache . * @ param CPDefinitionId the cp definition ID * @ param sku the sku * @ param retrieveFromCache whether to retrieve from the finder cache * @ return the matching cp instance , or < code > null < / code > if a matching cp instance could not be found */ @ Override public CPInstance fetchByC_S ( long CPDefinitionId , String sku , boolean retrieveFromCache ) { } }
Object [ ] finderArgs = new Object [ ] { CPDefinitionId , sku } ; Object result = null ; if ( retrieveFromCache ) { result = finderCache . getResult ( FINDER_PATH_FETCH_BY_C_S , finderArgs , this ) ; } if ( result instanceof CPInstance ) { CPInstance cpInstance = ( CPInstance ) result ; if ( ( CPDefinitionId != cpInstance . getCPDefinitionId ( ) ) || ! Objects . equals ( sku , cpInstance . getSku ( ) ) ) { result = null ; } } if ( result == null ) { StringBundler query = new StringBundler ( 4 ) ; query . append ( _SQL_SELECT_CPINSTANCE_WHERE ) ; query . append ( _FINDER_COLUMN_C_S_CPDEFINITIONID_2 ) ; boolean bindSku = false ; if ( sku == null ) { query . append ( _FINDER_COLUMN_C_S_SKU_1 ) ; } else if ( sku . equals ( "" ) ) { query . append ( _FINDER_COLUMN_C_S_SKU_3 ) ; } else { bindSku = true ; query . append ( _FINDER_COLUMN_C_S_SKU_2 ) ; } String sql = query . toString ( ) ; Session session = null ; try { session = openSession ( ) ; Query q = session . createQuery ( sql ) ; QueryPos qPos = QueryPos . getInstance ( q ) ; qPos . add ( CPDefinitionId ) ; if ( bindSku ) { qPos . add ( sku ) ; } List < CPInstance > list = q . list ( ) ; if ( list . isEmpty ( ) ) { finderCache . putResult ( FINDER_PATH_FETCH_BY_C_S , finderArgs , list ) ; } else { CPInstance cpInstance = list . get ( 0 ) ; result = cpInstance ; cacheResult ( cpInstance ) ; } } catch ( Exception e ) { finderCache . removeResult ( FINDER_PATH_FETCH_BY_C_S , finderArgs ) ; throw processException ( e ) ; } finally { closeSession ( session ) ; } } if ( result instanceof List < ? > ) { return null ; } else { return ( CPInstance ) result ; }
public class CmsTree { /** * Creates the output for a tree node . < p > * @ param path the path of the resource represented by this tree node * @ param title the resource name * @ param type the resource type * @ param folder if the resource is a folder * @ param state the resource state * @ param grey if true , the node is displayed in grey * @ return the output for a tree node */ private String getNode ( String path , String title , int type , boolean folder , CmsResourceState state , boolean grey ) { } }
StringBuffer result = new StringBuffer ( 64 ) ; String parent = CmsResource . getParentFolder ( path ) ; result . append ( "parent.aC(\"" ) ; // name result . append ( title ) ; result . append ( "\"," ) ; // type result . append ( type ) ; result . append ( "," ) ; // folder if ( folder ) { result . append ( 1 ) ; } else { result . append ( 0 ) ; } result . append ( "," ) ; // hashcode of path result . append ( path . hashCode ( ) ) ; result . append ( "," ) ; // hashcode of parent path result . append ( ( parent != null ) ? parent . hashCode ( ) : 0 ) ; result . append ( "," ) ; // resource state result . append ( state ) ; result . append ( "," ) ; // project status if ( grey ) { result . append ( 1 ) ; } else { result . append ( 0 ) ; } result . append ( ");\n" ) ; return result . toString ( ) ;
public class EditText { /** * @ return the minimum number of lines displayed in this TextView , or - 1 if the minimum * height was set in pixels instead using { @ link # setMinHeight ( int ) or # setDividerHeight ( int ) } . * @ see # setMinLines ( int ) * @ attr ref android . R . styleable # TextView _ minLines */ @ TargetApi ( Build . VERSION_CODES . JELLY_BEAN ) public int getMinLines ( ) { } }
if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . JELLY_BEAN ) return mInputView . getMinLines ( ) ; return - 1 ;
public class GVRCursorController { /** * Get the latest key event processed by the { @ link GVRCursorController } if * there is one ( not all { @ link GVRCursorController } report { @ link KeyEvent } * s ) . Note that this value will be null if the latest event processed by * the { @ link GVRCursorController } did not contain a { @ link KeyEvent } . * Note that this function also returns a null . To get every * { @ link KeyEvent } reported by the { @ link GVRCursorController } use the * { @ link IControllerEvent } or the { @ link ISensorEvents } listener to * query for the { @ link KeyEvent } whenever a a callback is made . * The { @ link KeyEvent } would be valid for the lifetime of that callback and * would be reset to null on completion . * @ return the { @ link KeyEvent } or null if there isn ' t one . */ public KeyEvent getKeyEvent ( ) { } }
synchronized ( eventLock ) { if ( processedKeyEvent . isEmpty ( ) ) { return null ; } else { return processedKeyEvent . get ( processedKeyEvent . size ( ) - 1 ) ; } }
public class CmsSecurityManager { /** * Returns the child resources of a resource , that is the resources * contained in a folder . < p > * With the parameters < code > getFolders < / code > and < code > getFiles < / code > * you can control what type of resources you want in the result list : * files , folders , or both . < p > * This method is mainly used by the workplace explorer . < p > * @ param context the current request context * @ param resource the resource to return the child resources for * @ param filter the resource filter to use * @ param getFolders if true the child folders are included in the result * @ param getFiles if true the child files are included in the result * @ return a list of all child resources * @ throws CmsException if something goes wrong * @ throws CmsSecurityException if the user has insufficient permission for the given resource ( read is required ) */ public List < CmsResource > readChildResources ( CmsRequestContext context , CmsResource resource , CmsResourceFilter filter , boolean getFolders , boolean getFiles ) throws CmsException , CmsSecurityException { } }
List < CmsResource > result = null ; CmsDbContext dbc = m_dbContextFactory . getDbContext ( context ) ; try { // check the access permissions checkPermissions ( dbc , resource , CmsPermissionSet . ACCESS_READ , true , CmsResourceFilter . ALL ) ; result = m_driverManager . readChildResources ( dbc , resource , filter , getFolders , getFiles , true ) ; } catch ( Exception e ) { dbc . report ( null , Messages . get ( ) . container ( Messages . ERR_READ_CHILD_RESOURCES_1 , context . getSitePath ( resource ) ) , e ) ; } finally { dbc . clear ( ) ; } return result ;
public class DataSource { /** * Create a database as a data source . * @ param database the database used as a source of data for query . * @ return { @ code DataSource . Database } object . */ @ NonNull public static As database ( @ NonNull com . couchbase . lite . Database database ) { } }
if ( database == null ) { throw new IllegalArgumentException ( "database cannot be null." ) ; } return new As ( database ) ;
public class InternalXbaseWithAnnotationsParser { /** * InternalXbaseWithAnnotations . g : 433:1 : ruleXAnnotationElementValue returns [ EObject current = null ] : ( ( ( ( ( ( ) ' # ' ' [ ' ) ) = > ( ( ) otherlv _ 1 = ' # ' otherlv _ 2 = ' [ ' ) ) ( ( ( lv _ elements _ 3_0 = ruleXAnnotationOrExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXAnnotationOrExpression ) ) ) * ) ? otherlv _ 6 = ' ] ' ) | this _ XAnnotationOrExpression _ 7 = ruleXAnnotationOrExpression ) ; */ public final EObject ruleXAnnotationElementValue ( ) throws RecognitionException { } }
EObject current = null ; Token otherlv_1 = null ; Token otherlv_2 = null ; Token otherlv_4 = null ; Token otherlv_6 = null ; EObject lv_elements_3_0 = null ; EObject lv_elements_5_0 = null ; EObject this_XAnnotationOrExpression_7 = null ; enterRule ( ) ; try { // InternalXbaseWithAnnotations . g : 439:2 : ( ( ( ( ( ( ( ) ' # ' ' [ ' ) ) = > ( ( ) otherlv _ 1 = ' # ' otherlv _ 2 = ' [ ' ) ) ( ( ( lv _ elements _ 3_0 = ruleXAnnotationOrExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXAnnotationOrExpression ) ) ) * ) ? otherlv _ 6 = ' ] ' ) | this _ XAnnotationOrExpression _ 7 = ruleXAnnotationOrExpression ) ) // InternalXbaseWithAnnotations . g : 440:2 : ( ( ( ( ( ( ) ' # ' ' [ ' ) ) = > ( ( ) otherlv _ 1 = ' # ' otherlv _ 2 = ' [ ' ) ) ( ( ( lv _ elements _ 3_0 = ruleXAnnotationOrExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXAnnotationOrExpression ) ) ) * ) ? otherlv _ 6 = ' ] ' ) | this _ XAnnotationOrExpression _ 7 = ruleXAnnotationOrExpression ) { // InternalXbaseWithAnnotations . g : 440:2 : ( ( ( ( ( ( ) ' # ' ' [ ' ) ) = > ( ( ) otherlv _ 1 = ' # ' otherlv _ 2 = ' [ ' ) ) ( ( ( lv _ elements _ 3_0 = ruleXAnnotationOrExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXAnnotationOrExpression ) ) ) * ) ? otherlv _ 6 = ' ] ' ) | this _ XAnnotationOrExpression _ 7 = ruleXAnnotationOrExpression ) int alt11 = 2 ; alt11 = dfa11 . predict ( input ) ; switch ( alt11 ) { case 1 : // InternalXbaseWithAnnotations . g : 441:3 : ( ( ( ( ( ) ' # ' ' [ ' ) ) = > ( ( ) otherlv _ 1 = ' # ' otherlv _ 2 = ' [ ' ) ) ( ( ( lv _ elements _ 3_0 = ruleXAnnotationOrExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXAnnotationOrExpression ) ) ) * ) ? otherlv _ 6 = ' ] ' ) { // InternalXbaseWithAnnotations . g : 441:3 : ( ( ( ( ( ) ' # ' ' [ ' ) ) = > ( ( ) otherlv _ 1 = ' # ' otherlv _ 2 = ' [ ' ) ) ( ( ( lv _ elements _ 3_0 = ruleXAnnotationOrExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXAnnotationOrExpression ) ) ) * ) ? otherlv _ 6 = ' ] ' ) // InternalXbaseWithAnnotations . g : 442:4 : ( ( ( ( ) ' # ' ' [ ' ) ) = > ( ( ) otherlv _ 1 = ' # ' otherlv _ 2 = ' [ ' ) ) ( ( ( lv _ elements _ 3_0 = ruleXAnnotationOrExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXAnnotationOrExpression ) ) ) * ) ? otherlv _ 6 = ' ] ' { // InternalXbaseWithAnnotations . g : 442:4 : ( ( ( ( ) ' # ' ' [ ' ) ) = > ( ( ) otherlv _ 1 = ' # ' otherlv _ 2 = ' [ ' ) ) // InternalXbaseWithAnnotations . g : 443:5 : ( ( ( ) ' # ' ' [ ' ) ) = > ( ( ) otherlv _ 1 = ' # ' otherlv _ 2 = ' [ ' ) { // InternalXbaseWithAnnotations . g : 450:5 : ( ( ) otherlv _ 1 = ' # ' otherlv _ 2 = ' [ ' ) // InternalXbaseWithAnnotations . g : 451:6 : ( ) otherlv _ 1 = ' # ' otherlv _ 2 = ' [ ' { // InternalXbaseWithAnnotations . g : 451:6 : ( ) // InternalXbaseWithAnnotations . g : 452:7: { if ( state . backtracking == 0 ) { current = forceCreateModelElement ( grammarAccess . getXAnnotationElementValueAccess ( ) . getXListLiteralAction_0_0_0_0 ( ) , current ) ; } } otherlv_1 = ( Token ) match ( input , 18 , FOLLOW_10 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_1 , grammarAccess . getXAnnotationElementValueAccess ( ) . getNumberSignKeyword_0_0_0_1 ( ) ) ; } otherlv_2 = ( Token ) match ( input , 19 , FOLLOW_11 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_2 , grammarAccess . getXAnnotationElementValueAccess ( ) . getLeftSquareBracketKeyword_0_0_0_2 ( ) ) ; } } } // InternalXbaseWithAnnotations . g : 468:4 : ( ( ( lv _ elements _ 3_0 = ruleXAnnotationOrExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXAnnotationOrExpression ) ) ) * ) ? int alt10 = 2 ; int LA10_0 = input . LA ( 1 ) ; if ( ( ( LA10_0 >= RULE_STRING && LA10_0 <= RULE_ID ) || ( LA10_0 >= 13 && LA10_0 <= 14 ) || ( LA10_0 >= 18 && LA10_0 <= 19 ) || LA10_0 == 26 || ( LA10_0 >= 42 && LA10_0 <= 43 ) || LA10_0 == 48 || LA10_0 == 55 || LA10_0 == 59 || LA10_0 == 61 || ( LA10_0 >= 65 && LA10_0 <= 67 ) || ( LA10_0 >= 70 && LA10_0 <= 82 ) || LA10_0 == 84 ) ) { alt10 = 1 ; } switch ( alt10 ) { case 1 : // InternalXbaseWithAnnotations . g : 469:5 : ( ( lv _ elements _ 3_0 = ruleXAnnotationOrExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXAnnotationOrExpression ) ) ) * { // InternalXbaseWithAnnotations . g : 469:5 : ( ( lv _ elements _ 3_0 = ruleXAnnotationOrExpression ) ) // InternalXbaseWithAnnotations . g : 470:6 : ( lv _ elements _ 3_0 = ruleXAnnotationOrExpression ) { // InternalXbaseWithAnnotations . g : 470:6 : ( lv _ elements _ 3_0 = ruleXAnnotationOrExpression ) // InternalXbaseWithAnnotations . g : 471:7 : lv _ elements _ 3_0 = ruleXAnnotationOrExpression { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXAnnotationElementValueAccess ( ) . getElementsXAnnotationOrExpressionParserRuleCall_0_1_0_0 ( ) ) ; } pushFollow ( FOLLOW_12 ) ; lv_elements_3_0 = ruleXAnnotationOrExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXAnnotationElementValueRule ( ) ) ; } add ( current , "elements" , lv_elements_3_0 , "org.eclipse.xtext.xbase.annotations.XbaseWithAnnotations.XAnnotationOrExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } // InternalXbaseWithAnnotations . g : 488:5 : ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXAnnotationOrExpression ) ) ) * loop9 : do { int alt9 = 2 ; int LA9_0 = input . LA ( 1 ) ; if ( ( LA9_0 == 15 ) ) { alt9 = 1 ; } switch ( alt9 ) { case 1 : // InternalXbaseWithAnnotations . g : 489:6 : otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXAnnotationOrExpression ) ) { otherlv_4 = ( Token ) match ( input , 15 , FOLLOW_9 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_4 , grammarAccess . getXAnnotationElementValueAccess ( ) . getCommaKeyword_0_1_1_0 ( ) ) ; } // InternalXbaseWithAnnotations . g : 493:6 : ( ( lv _ elements _ 5_0 = ruleXAnnotationOrExpression ) ) // InternalXbaseWithAnnotations . g : 494:7 : ( lv _ elements _ 5_0 = ruleXAnnotationOrExpression ) { // InternalXbaseWithAnnotations . g : 494:7 : ( lv _ elements _ 5_0 = ruleXAnnotationOrExpression ) // InternalXbaseWithAnnotations . g : 495:8 : lv _ elements _ 5_0 = ruleXAnnotationOrExpression { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXAnnotationElementValueAccess ( ) . getElementsXAnnotationOrExpressionParserRuleCall_0_1_1_1_0 ( ) ) ; } pushFollow ( FOLLOW_12 ) ; lv_elements_5_0 = ruleXAnnotationOrExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXAnnotationElementValueRule ( ) ) ; } add ( current , "elements" , lv_elements_5_0 , "org.eclipse.xtext.xbase.annotations.XbaseWithAnnotations.XAnnotationOrExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; default : break loop9 ; } } while ( true ) ; } break ; } otherlv_6 = ( Token ) match ( input , 20 , FOLLOW_2 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_6 , grammarAccess . getXAnnotationElementValueAccess ( ) . getRightSquareBracketKeyword_0_2 ( ) ) ; } } } break ; case 2 : // InternalXbaseWithAnnotations . g : 520:3 : this _ XAnnotationOrExpression _ 7 = ruleXAnnotationOrExpression { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXAnnotationElementValueAccess ( ) . getXAnnotationOrExpressionParserRuleCall_1 ( ) ) ; } pushFollow ( FOLLOW_2 ) ; this_XAnnotationOrExpression_7 = ruleXAnnotationOrExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = this_XAnnotationOrExpression_7 ; afterParserOrEnumRuleCall ( ) ; } } break ; } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class DiskClient { /** * Retrieves an aggregated list of persistent disks . * < p > Sample code : * < pre > < code > * try ( DiskClient diskClient = DiskClient . create ( ) ) { * ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ; * for ( DisksScopedList element : diskClient . aggregatedListDisks ( project ) . iterateAll ( ) ) { * / / doThingsWith ( element ) ; * < / code > < / pre > * @ param project Project ID for this request . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final AggregatedListDisksPagedResponse aggregatedListDisks ( ProjectName project ) { } }
AggregatedListDisksHttpRequest request = AggregatedListDisksHttpRequest . newBuilder ( ) . setProject ( project == null ? null : project . toString ( ) ) . build ( ) ; return aggregatedListDisks ( request ) ;
public class DescribeUserStackAssociationsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeUserStackAssociationsRequest describeUserStackAssociationsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeUserStackAssociationsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeUserStackAssociationsRequest . getStackName ( ) , STACKNAME_BINDING ) ; protocolMarshaller . marshall ( describeUserStackAssociationsRequest . getUserName ( ) , USERNAME_BINDING ) ; protocolMarshaller . marshall ( describeUserStackAssociationsRequest . getAuthenticationType ( ) , AUTHENTICATIONTYPE_BINDING ) ; protocolMarshaller . marshall ( describeUserStackAssociationsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( describeUserStackAssociationsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Client { /** * Do a POST HTTP request to the given REST - URL . * @ param httpMethod * HTTP method for this request ( GET , POST , PUT , DELETE ) . * @ param restUrl * REST URL . * @ param params * Parameters for adding to the query string . * @ throws IOException * if the request go bad . */ private Object postRequest ( HttpMethod httpMethod , String restUrl , Map < String , String > params ) throws IOException , WebServiceException { } }
HttpURLConnection conn = null ; BufferedWriter wr = null ; try { // Make the URL urlString = new StringBuilder ( this . host ) . append ( restUrl ) ; LOGGER . debug ( "Doing HTTP request: POST [{}]" , urlString . toString ( ) ) ; // Connection configuration // Proxy proxy = DEBUG _ HTTP ? new Proxy ( Proxy . Type . HTTP , new InetSocketAddress ( " 127.0.0.1 " , 8008 ) ) : Proxy . NO _ PROXY ; URL url = new URL ( urlString . toString ( ) ) ; conn = ( HttpURLConnection ) url . openConnection ( ) ; conn . setDoInput ( true ) ; conn . setDoOutput ( true ) ; conn . setUseCaches ( false ) ; conn . setRequestMethod ( HttpMethod . POST . name ( ) ) ; conn . setRequestProperty ( "Content-Type" , "application/x-www-form-urlencoded" ) ; conn . setRequestProperty ( "Connection" , "Keep-Alive" ) ; wr = new BufferedWriter ( new OutputStreamWriter ( conn . getOutputStream ( ) ) ) ; // Writes into the alive connection if ( httpMethod != HttpMethod . POST ) { params . put ( "http_method" , httpMethod . name ( ) ) ; } wr . write ( this . makeParamsString ( params , false ) ) ; wr . flush ( ) ; // Gets the response return this . readResponse ( restUrl , conn ) ; } catch ( WebServiceException e ) { LOGGER . debug ( "WebServiceException catched. Request error" , e ) ; throw e ; } catch ( IOException e ) { LOGGER . debug ( "IOException catched. Request error" , e ) ; throw e ; } catch ( Exception e ) { LOGGER . debug ( "Exception catched, throwing a new IOException. Request error" , e ) ; throw new IOException ( e . getLocalizedMessage ( ) ) ; } finally { if ( wr != null ) { wr . close ( ) ; } if ( conn != null ) { conn . disconnect ( ) ; } }
public class DynamicByteArray { /** * Byte compare a set of bytes against the bytes in this dynamic array . * @ param other source of the other bytes * @ param otherOffset start offset in the other array * @ param otherLength number of bytes in the other array * @ param ourOffset the offset in our array * @ param ourLength the number of bytes in our array * @ return negative for less , 0 for equal , positive for greater */ public int compare ( byte [ ] other , int otherOffset , int otherLength , int ourOffset , int ourLength ) { } }
return 0 - data . compareTo ( ourOffset , ourLength , other , otherOffset , otherLength ) ;
public class DefaultGroovyMethodsSupport { /** * helper method for getAt and putAt */ protected static RangeInfo subListBorders ( int size , EmptyRange range ) { } }
int from = normaliseIndex ( DefaultTypeTransformation . intUnbox ( range . getFrom ( ) ) , size ) ; return new RangeInfo ( from , from , false ) ;
public class SnowflakeFileTransferAgent { /** * A helper method to verify if the local file path from GS matches * what ' s parsed locally . This is for security purpose as documented in * SNOW - 15153. * @ param localFilePathFromGS the local file path to verify * @ throws SnowflakeSQLException */ private void verifyLocalFilePath ( String localFilePathFromGS ) throws SnowflakeSQLException { } }
if ( command == null ) { logger . error ( "null command" ) ; return ; } if ( command . indexOf ( FILE_PROTOCOL ) < 0 ) { logger . error ( "file:// prefix not found in command: {}" , command ) ; return ; } int localFilePathBeginIdx = command . indexOf ( FILE_PROTOCOL ) + FILE_PROTOCOL . length ( ) ; boolean isLocalFilePathQuoted = ( localFilePathBeginIdx > FILE_PROTOCOL . length ( ) ) && ( command . charAt ( localFilePathBeginIdx - 1 - FILE_PROTOCOL . length ( ) ) == '\'' ) ; // the ending index is exclusive int localFilePathEndIdx = 0 ; String localFilePath = "" ; if ( isLocalFilePathQuoted ) { // look for the matching quote localFilePathEndIdx = command . indexOf ( "'" , localFilePathBeginIdx ) ; if ( localFilePathEndIdx > localFilePathBeginIdx ) { localFilePath = command . substring ( localFilePathBeginIdx , localFilePathEndIdx ) ; } // unescape backslashes to match the file name from GS localFilePath = localFilePath . replaceAll ( "\\\\\\\\" , "\\\\" ) ; } else { // look for the first space or new line or semi colon List < Integer > indexList = new ArrayList < > ( ) ; char [ ] delimiterChars = { ' ' , '\n' , ';' } ; for ( int i = 0 ; i < delimiterChars . length ; i ++ ) { int charIndex = command . indexOf ( delimiterChars [ i ] , localFilePathBeginIdx ) ; if ( charIndex != - 1 ) { indexList . add ( charIndex ) ; } } localFilePathEndIdx = indexList . isEmpty ( ) ? - 1 : Collections . min ( indexList ) ; if ( localFilePathEndIdx > localFilePathBeginIdx ) { localFilePath = command . substring ( localFilePathBeginIdx , localFilePathEndIdx ) ; } else if ( localFilePathEndIdx == - 1 ) { localFilePath = command . substring ( localFilePathBeginIdx ) ; } } if ( ! localFilePath . isEmpty ( ) && ! localFilePath . equals ( localFilePathFromGS ) ) { throw new SnowflakeSQLException ( SqlState . INTERNAL_ERROR , ErrorCode . INTERNAL_ERROR . getMessageCode ( ) , "Unexpected local file path from GS. From GS: " + localFilePathFromGS + ", expected: " + localFilePath ) ; } else if ( localFilePath . isEmpty ( ) ) { logger . debug ( "fail to parse local file path from command: {}" , command ) ; } else { logger . trace ( "local file path from GS matches local parsing: {}" , localFilePath ) ; }
public class Function { /** * Logging of an Exception in a function with custom message and message parameters . * @ param t The thrown Exception * @ param msg The message to be printed * @ param messageParams The parameters for the message */ protected void logException ( final Throwable t , final String msg , final Object [ ] messageParams ) { } }
logger . error ( msg , messageParams , t ) ;
public class Matrix { /** * Inserts a given { @ code that } matrix ( B ) into this matrix ( A ) . The original * values are overwritten by the new ones . * @ param that the matrix to insert * @ param destRow the row to insert at in the destination matrix * @ param destColumn the column to insert at in the destination matrix * @ param rows number of rows to insert * @ param columns number of columns to insert * @ return a matrix with the parameter inserted into it */ public Matrix insert ( Matrix that , int destRow , int destColumn , int rows , int columns ) { } }
return insert ( that , 0 , 0 , destRow , destColumn , rows , columns ) ;
public class DrizzlePreparedStatement { /** * Sets the designated parameter to the given < code > Reader < / code > object . When a very large UNICODE value is input * to a < code > LONGVARCHAR < / code > parameter , it may be more practical to send it via a < code > java . io . Reader < / code > * object . The data will be read from the stream as needed until end - of - file is reached . The JDBC driver will do * any necessary conversion from UNICODE to the database char format . * < P > < B > Note : < / B > This stream object can either be a standard Java stream object or your own subclass that * implements the standard interface . < P > < B > Note : < / B > Consult your JDBC driver documentation to determine if it * might be more efficient to use a version of < code > setCharacterStream < / code > which takes a length parameter . * @ param parameterIndex the first parameter is 1 , the second is 2 , . . . * @ param reader the < code > java . io . Reader < / code > object that contains the Unicode data * @ throws java . sql . SQLException if parameterIndex does not correspond to a parameter marker in the SQL statement ; * if a database access error occurs or this method is called on a closed * < code > PreparedStatement < / code > * @ throws java . sql . SQLFeatureNotSupportedException * if the JDBC driver does not support this method * @ since 1.6 */ public void setCharacterStream ( final int parameterIndex , final Reader reader ) throws SQLException { } }
if ( reader == null ) { setNull ( parameterIndex , Types . BLOB ) ; return ; } try { setParameter ( parameterIndex , new BufferedReaderParameter ( reader ) ) ; } catch ( IOException e ) { throw SQLExceptionMapper . getSQLException ( "Could not read reader" , e ) ; }
public class JavaCustomReceiver { /** * Create a socket connection and receive data until receiver is stopped */ private void receive ( ) { } }
try { Socket socket = null ; BufferedReader reader = null ; try { // connect to the server socket = new Socket ( host , port ) ; reader = new BufferedReader ( new InputStreamReader ( socket . getInputStream ( ) , StandardCharsets . UTF_8 ) ) ; // Until stopped or connection broken continue reading String userInput ; while ( ! isStopped ( ) && ( userInput = reader . readLine ( ) ) != null ) { System . out . println ( "Received data '" + userInput + "'" ) ; store ( userInput ) ; } } finally { Closeables . close ( reader , /* swallowIOException = */ true ) ; Closeables . close ( socket , /* swallowIOException = */ true ) ; } // Restart in an attempt to connect again when server is active again restart ( "Trying to connect again" ) ; } catch ( ConnectException ce ) { // restart if could not connect to server restart ( "Could not connect" , ce ) ; } catch ( Throwable t ) { restart ( "Error receiving data" , t ) ; }
public class FurnaceClasspathScanner { /** * Scans given archive for files passing given filter , adds the results into given list . */ private void handleArchiveByFile ( Predicate < String > filter , File archive , List < String > discoveredFiles ) { } }
try { try ( ZipFile zip = new ZipFile ( archive ) ) { Enumeration < ? extends ZipEntry > entries = zip . entries ( ) ; while ( entries . hasMoreElements ( ) ) { ZipEntry entry = entries . nextElement ( ) ; String name = entry . getName ( ) ; if ( filter . accept ( name ) ) discoveredFiles . add ( name ) ; } } } catch ( IOException e ) { throw new RuntimeException ( "Error handling file " + archive , e ) ; }
public class VaultsInner { /** * The List operation gets information about the vaults associated with the subscription . * @ param top Maximum number of results to return . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; VaultInner & gt ; object */ public Observable < Page < VaultInner > > listBySubscriptionAsync ( final Integer top ) { } }
return listBySubscriptionWithServiceResponseAsync ( top ) . map ( new Func1 < ServiceResponse < Page < VaultInner > > , Page < VaultInner > > ( ) { @ Override public Page < VaultInner > call ( ServiceResponse < Page < VaultInner > > response ) { return response . body ( ) ; } } ) ;
public class CommerceShippingFixedOptionLocalServiceBaseImpl { /** * Returns the number of rows matching the dynamic query . * @ param dynamicQuery the dynamic query * @ param projection the projection to apply to the query * @ return the number of rows matching the dynamic query */ @ Override public long dynamicQueryCount ( DynamicQuery dynamicQuery , Projection projection ) { } }
return commerceShippingFixedOptionPersistence . countWithDynamicQuery ( dynamicQuery , projection ) ;
public class DateItem { /** * Constructs a new TimeItem from a String previously gotten from the { @ link # toString ( ) } method . * @ param code The string to parse from . * @ return A new TimeItem , or null if there was an error . */ public static DateItem fromString ( String code ) { } }
String [ ] items = code . split ( "\n" ) ; if ( items . length != 6 ) return null ; int year , month , day , id ; try { year = Integer . parseInt ( items [ 2 ] ) ; month = Integer . parseInt ( items [ 3 ] ) ; day = Integer . parseInt ( items [ 4 ] ) ; id = Integer . parseInt ( items [ 5 ] ) ; } catch ( NumberFormatException e ) { e . printStackTrace ( ) ; return null ; } return new DateItem ( emptyToNull ( items [ 0 ] ) , emptyToNull ( items [ 1 ] ) , year , month , day , id ) ;
public class LimitedConnectionsFileSystem { private < T extends StreamWithTimeout > T createStream ( final SupplierWithException < T , IOException > streamOpener , final HashSet < T > openStreams , final boolean output ) throws IOException { } }
final int outputLimit = output && maxNumOpenOutputStreams > 0 ? maxNumOpenOutputStreams : Integer . MAX_VALUE ; final int inputLimit = ! output && maxNumOpenInputStreams > 0 ? maxNumOpenInputStreams : Integer . MAX_VALUE ; final int totalLimit = maxNumOpenStreamsTotal > 0 ? maxNumOpenStreamsTotal : Integer . MAX_VALUE ; final int outputCredit = output ? 1 : 0 ; final int inputCredit = output ? 0 : 1 ; // because waiting for availability may take long , we need to be interruptible here // and handle interrupted exceptions as I / O errors // even though the code is written to make sure the lock is held for a short time only , // making the lock acquisition interruptible helps to guard against the cases where // a supposedly fast operation ( like ' getPos ( ) ' on a stream ) actually takes long . try { lock . lockInterruptibly ( ) ; try { // some integrity checks assert openOutputStreams . size ( ) <= numReservedOutputStreams ; assert openInputStreams . size ( ) <= numReservedInputStreams ; // wait until there are few enough streams so we can open another waitForAvailability ( totalLimit , outputLimit , inputLimit ) ; // We do not open the stream here in the locked scope because opening a stream // could take a while . Holding the lock during that operation would block all concurrent // attempts to try and open a stream , effectively serializing all calls to open the streams . numReservedOutputStreams += outputCredit ; numReservedInputStreams += inputCredit ; } finally { lock . unlock ( ) ; } } catch ( InterruptedException e ) { // restore interruption flag Thread . currentThread ( ) . interrupt ( ) ; throw new IOException ( "interrupted before opening stream" ) ; } // open the stream outside the lock . boolean success = false ; try { final T out = streamOpener . get ( ) ; // add the stream to the set , need to re - acquire the lock lock . lock ( ) ; try { openStreams . add ( out ) ; } finally { lock . unlock ( ) ; } // good , can now return cleanly success = true ; return out ; } finally { if ( ! success ) { // remove the reserved credit // we must open this non - interruptibly , because this must succeed ! lock . lock ( ) ; try { numReservedOutputStreams -= outputCredit ; numReservedInputStreams -= inputCredit ; available . signalAll ( ) ; } finally { lock . unlock ( ) ; } } }