signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class BatchGetDevEndpointsResult { /** * A list of DevEndpoint definitions . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setDevEndpoints ( java . util . Collection ) } or { @ link # withDevEndpoints ( java . util . Collection ) } if you want to * override the existing values . * @ param devEndpoints * A list of DevEndpoint definitions . * @ return Returns a reference to this object so that method calls can be chained together . */ public BatchGetDevEndpointsResult withDevEndpoints ( DevEndpoint ... devEndpoints ) { } }
if ( this . devEndpoints == null ) { setDevEndpoints ( new java . util . ArrayList < DevEndpoint > ( devEndpoints . length ) ) ; } for ( DevEndpoint ele : devEndpoints ) { this . devEndpoints . add ( ele ) ; } return this ;
public class LiferayWarPackagingProcessor { /** * ( non - Javadoc ) * @ see * org . jboss . shrinkwrap . resolver . spi . maven . archive . packaging . PackagingProcessor * # importBuildOutput ( org . jboss . shrinkwrap . resolver . api . maven . strategy . * MavenResolutionStrategy ) */ @ Override public LiferayWarPackagingProcessor importBuildOutput ( MavenResolutionStrategy strategy ) { } }
log . debug ( "Building Liferay Plugin Archive" ) ; ParsedPomFile pomFile = session . getParsedPomFile ( ) ; // Compile and add Java classes if ( Validate . isReadable ( pomFile . getSourceDirectory ( ) ) ) { compile ( pomFile . getSourceDirectory ( ) , pomFile . getBuildOutputDirectory ( ) , ScopeType . COMPILE , ScopeType . RUNTIME , ScopeType . SYSTEM , ScopeType . IMPORT , ScopeType . PROVIDED ) ; JavaArchive classes = ShrinkWrap . create ( ExplodedImporter . class , "webinf_clases.jar" ) . importDirectory ( pomFile . getBuildOutputDirectory ( ) ) . as ( JavaArchive . class ) ; archive = archive . merge ( classes , ArchivePaths . create ( "WEB-INF/classes" ) ) ; // Raise bug with shrink wrap ? Since configure creates the base war // in target classes , we need to delete from the archive log . trace ( "Removing temp file: " + pomFile . getFinalName ( ) + " form archive" ) ; archive . delete ( ArchivePaths . create ( "WEB-INF/classes" , pomFile . getFinalName ( ) ) ) ; } // Add Resources for ( Resource resource : pomFile . getResources ( ) ) { archive . addAsResource ( resource . getSource ( ) , resource . getTargetPath ( ) ) ; } // Webapp build WarPluginConfiguration warPluginConfiguration = new WarPluginConfiguration ( pomFile ) ; if ( Validate . isReadable ( warPluginConfiguration . getWarSourceDirectory ( ) ) ) { WebArchive webapp = ShrinkWrap . create ( ExplodedImporter . class , "webapp.war" ) . importDirectory ( warPluginConfiguration . getWarSourceDirectory ( ) , applyFilter ( warPluginConfiguration ) ) . as ( WebArchive . class ) ; archive . merge ( webapp ) ; } // Add manifest try { Manifest manifest = warPluginConfiguration . getArchiveConfiguration ( ) . asManifest ( ) ; ByteArrayOutputStream bout = new ByteArrayOutputStream ( ) ; manifest . write ( bout ) ; archive . setManifest ( new StringAsset ( bout . toString ( ) ) ) ; } catch ( MavenImporterException e ) { log . error ( "Error adding manifest" , e ) ; } catch ( IOException e ) { log . error ( "Error adding manifest" , e ) ; } // add dependencies this . session = AddAllDeclaredDependenciesTask . INSTANCE . execute ( session ) ; final Collection < MavenResolvedArtifact > artifacts = session . resolveDependencies ( strategy ) ; for ( MavenResolvedArtifact artifact : artifacts ) { archive . addAsLibrary ( artifact . asFile ( ) ) ; } // Archive Filtering archive = ArchiveFilteringUtils . filterArchiveContent ( archive , WebArchive . class , warPluginConfiguration . getIncludes ( ) , warPluginConfiguration . getExcludes ( ) ) ; // Liferay Plugin Deployer LiferayPluginConfiguration liferayPluginConfiguration = new LiferayPluginConfiguration ( pomFile ) ; // Temp Archive for processing by Liferay deployers String baseDirPath = liferayPluginConfiguration . getBaseDir ( ) ; File tempDestFile = new File ( baseDirPath , pomFile . getFinalName ( ) ) ; File baseDir = new File ( baseDirPath ) ; if ( ! baseDir . exists ( ) ) { baseDir . mkdirs ( ) ; log . info ( "Created dir " + baseDir ) ; } log . trace ( "Temp Archive:" + tempDestFile . getName ( ) ) ; archive . as ( ZipExporter . class ) . exportTo ( tempDestFile , true ) ; FileUtils . deleteQuietly ( new File ( pomFile . getFinalName ( ) ) ) ; if ( "hook" . equals ( liferayPluginConfiguration . getPluginType ( ) ) ) { // perform hook deployer task HookDeployerTask . INSTANCE . execute ( session ) ; } else { // default is always portletdeployer PortletDeployerTask . INSTANCE . execute ( session ) ; } // Call Liferay Deployer LiferayPluginConfiguration configuration = new LiferayPluginConfiguration ( pomFile ) ; File ddPluginArchiveFile = new File ( configuration . getDestDir ( ) , pomFile . getArtifactId ( ) + ".war" ) ; archive = ShrinkWrap . create ( ZipImporter . class , pomFile . getFinalName ( ) ) . importFrom ( ddPluginArchiveFile ) . as ( WebArchive . class ) ; try { FileUtils . forceDelete ( ddPluginArchiveFile ) ; FileUtils . forceDelete ( new File ( configuration . getBaseDir ( ) , pomFile . getFinalName ( ) ) ) ; } catch ( IOException e ) { // nothing to do } return this ;
public class OSecurityHelper { /** * Transform name to { @ link ORule . ResourceGeneric } * @ param name name to transform * @ return { @ link ORule . ResourceGeneric } or null */ public static ORule . ResourceGeneric getResourceGeneric ( String name ) { } }
String shortName = Strings . beforeFirst ( name , '.' ) ; if ( Strings . isEmpty ( shortName ) ) shortName = name ; ORule . ResourceGeneric value = ORule . ResourceGeneric . valueOf ( shortName ) ; if ( value == null ) value = ORule . mapLegacyResourceToGenericResource ( name ) ; return value ;
public class URL { /** * Sets the fields of the URL . This is not a public method so that only URLStreamHandlers can * modify URL fields . URLs are otherwise constant . * @ param protocol the name of the protocol to use * @ param host the name of the host * @ param port the port number on the host * @ param file the file on the host * @ param ref the internal reference in the URL */ protected void set ( final String protocol , final String host , final int port , final String file , final String ref ) { } }
this . protocol = protocol ; this . host = host ; this . authority = port == - 1 ? host : host + ":" + port ; this . port = port ; this . file = file ; this . ref = ref ; /* * This is very important . We must recompute this after the URL has been changed . */ this . hashCodeValue = - 1 ; final int q = file . lastIndexOf ( '?' ) ; if ( q == - 1 ) { this . path = file ; } else { this . query = file . substring ( q + 1 ) ; this . path = file . substring ( 0 , q ) ; }
public class OrderItemUrl { /** * Get Resource Url for GetQuoteItemsByQuoteName * @ param customerAccountId The unique identifier of the customer account for which to retrieve wish lists . * @ param filter A set of filter expressions representing the search parameters for a query . This parameter is optional . Refer to [ Sorting and Filtering ] ( . . / . . / . . / . . / Developer / api - guides / sorting - filtering . htm ) for a list of supported filters . * @ param pageSize When creating paged results from a query , this value indicates the zero - based offset in the complete result set where the returned entities begin . For example , with this parameter set to 25 , to get the 51st through the 75th items , set startIndex to 50. * @ param quoteName * @ param responseFields Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object . This parameter should only be used to retrieve data . Attempting to update data using this parameter may cause data loss . * @ param sortBy The element to sort the results by and the channel in which the results appear . Either ascending ( a - z ) or descending ( z - a ) channel . Optional . Refer to [ Sorting and Filtering ] ( . . / . . / . . / . . / Developer / api - guides / sorting - filtering . htm ) for more information . * @ param startIndex When creating paged results from a query , this value indicates the zero - based offset in the complete result set where the returned entities begin . For example , with pageSize set to 25 , to get the 51st through the 75th items , set this parameter to 50. * @ return String Resource Url */ public static MozuUrl getQuoteItemsByQuoteNameUrl ( Integer customerAccountId , String filter , Integer pageSize , String quoteName , String responseFields , String sortBy , Integer startIndex ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/commerce/quotes/customers/{customerAccountId}/{quoteName}/items?startIndex={startIndex}&pageSize={pageSize}&sortBy={sortBy}&filter={filter}&responseFields={responseFields}" ) ; formatter . formatUrl ( "customerAccountId" , customerAccountId ) ; formatter . formatUrl ( "filter" , filter ) ; formatter . formatUrl ( "pageSize" , pageSize ) ; formatter . formatUrl ( "quoteName" , quoteName ) ; formatter . formatUrl ( "responseFields" , responseFields ) ; formatter . formatUrl ( "sortBy" , sortBy ) ; formatter . formatUrl ( "startIndex" , startIndex ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class X509CRLEntryImpl { /** * Gets a Set of the extension ( s ) marked CRITICAL in this * X509CRLEntry . In the returned set , each extension is * represented by its OID string . * @ return a set of the extension oid strings in the * Object that are marked critical . */ public Set < String > getCriticalExtensionOIDs ( ) { } }
if ( extensions == null ) { return null ; } Set < String > extSet = new TreeSet < > ( ) ; for ( Extension ex : extensions . getAllExtensions ( ) ) { if ( ex . isCritical ( ) ) { extSet . add ( ex . getExtensionId ( ) . toString ( ) ) ; } } return extSet ;
public class SimonInterceptor { /** * Around invoke method that measures the split for one method invocation . * @ param context invocation context * @ return return value from the invocation * @ throws Exception exception thrown from the invocation */ @ AroundInvoke public Object monitor ( InvocationContext context ) throws Exception { } }
if ( isMonitored ( context ) ) { String simonName = getSimonName ( context ) ; try ( Split ignored = SimonManager . getStopwatch ( simonName ) . start ( ) ) { return context . proceed ( ) ; } } else { return context . proceed ( ) ; }
public class HtmlTree { /** * This method adds a string content to the htmltree . If the last content member * added is a StringContent , append the string to that StringContent or else * create a new StringContent and add it to the html tree . * @ param stringContent string content that needs to be added */ public void addContent ( String stringContent ) { } }
if ( ! content . isEmpty ( ) ) { Content lastContent = content . get ( content . size ( ) - 1 ) ; if ( lastContent instanceof StringContent ) lastContent . addContent ( stringContent ) ; else addContent ( new StringContent ( stringContent ) ) ; } else addContent ( new StringContent ( stringContent ) ) ;
public class ClassLocator { /** * Checks whether the " otherclass " is a subclass of the given " superclass " . * @ param superclass the superclass to check against * @ param otherclass this class is checked whether it is a subclass * of the the superclass * @ return TRUE if " otherclass " is a true subclass */ public static boolean isSubclass ( Class superclass , Class otherclass ) { } }
Class currentclass ; boolean result ; String key ; key = superclass . getName ( ) + "-" + otherclass . getName ( ) ; if ( m_CheckSubClass . containsKey ( key ) ) return m_CheckSubClass . get ( key ) ; currentclass = otherclass ; do { result = currentclass . equals ( superclass ) ; // topmost class reached ? if ( currentclass . equals ( Object . class ) || ( currentclass . getSuperclass ( ) == null ) ) break ; if ( ! result ) currentclass = currentclass . getSuperclass ( ) ; } while ( ! result ) ; m_CheckSubClass . put ( key , result ) ; return result ;
public class SortableArrayList { /** * Performs a binary search , attempting to locate the specified * object . The array must be in the sort order defined by the supplied * { @ link Comparator } for this to operate correctly . * @ return the index of the object in question or * < code > ( - ( < i > insertion point < / i > ) - 1 ) < / code > ( always a negative * value ) if the object was not found in the list . */ public int binarySearch ( T key , Comparator < ? super T > comp ) { } }
return ArrayUtil . binarySearch ( _elements , 0 , _size , key , comp ) ;
public class JobDeleteOptions { /** * Set the time the request was issued . Client libraries typically set this to the current system clock time ; set it explicitly if you are calling the REST API directly . * @ param ocpDate the ocpDate value to set * @ return the JobDeleteOptions object itself . */ public JobDeleteOptions withOcpDate ( DateTime ocpDate ) { } }
if ( ocpDate == null ) { this . ocpDate = null ; } else { this . ocpDate = new DateTimeRfc1123 ( ocpDate ) ; } return this ;
public class XAnnotationInvocationHandler { /** * Implementation of dynamicProxy . hashCode ( ) */ private int hashCodeImpl ( ) { } }
int result = 0 ; for ( Map . Entry < String , Object > e : memberValues . entrySet ( ) ) { result += ( 127 * e . getKey ( ) . hashCode ( ) ) ^ memberValueHashCode ( e . getValue ( ) ) ; } return result ;
public class AmazonECSClient { /** * Stops a running task . Any tags associated with the task will be deleted . * When < a > StopTask < / a > is called on a task , the equivalent of < code > docker stop < / code > is issued to the containers * running in the task . This results in a < code > SIGTERM < / code > value and a default 30 - second timeout , after which * the < code > SIGKILL < / code > value is sent and the containers are forcibly stopped . If the container handles the * < code > SIGTERM < / code > value gracefully and exits within 30 seconds from receiving it , no < code > SIGKILL < / code > * value is sent . * < note > * The default 30 - second timeout can be configured on the Amazon ECS container agent with the * < code > ECS _ CONTAINER _ STOP _ TIMEOUT < / code > variable . For more information , see < a * href = " https : / / docs . aws . amazon . com / AmazonECS / latest / developerguide / ecs - agent - config . html " > Amazon ECS Container * Agent Configuration < / a > in the < i > Amazon Elastic Container Service Developer Guide < / i > . * < / note > * @ param stopTaskRequest * @ return Result of the StopTask operation returned by the service . * @ throws ServerException * These errors are usually caused by a server issue . * @ throws ClientException * These errors are usually caused by a client action , such as using an action or resource on behalf of a * user that doesn ' t have permissions to use the action or resource , or specifying an identifier that is not * valid . * @ throws InvalidParameterException * The specified parameter is invalid . Review the available parameters for the API request . * @ throws ClusterNotFoundException * The specified cluster could not be found . You can view your available clusters with < a > ListClusters < / a > . * Amazon ECS clusters are Region - specific . * @ sample AmazonECS . StopTask * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ecs - 2014-11-13 / StopTask " target = " _ top " > AWS API * Documentation < / a > */ @ Override public StopTaskResult stopTask ( StopTaskRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeStopTask ( request ) ;
public class GuiUtilities { /** * Create an image to make a color picker button . * @ param button the button . * @ param color the color to set . * @ param size the optional size of the image . */ public static void colorButton ( JButton button , Color color , Integer size ) { } }
if ( size == null ) size = 15 ; BufferedImage bi = new BufferedImage ( size , size , BufferedImage . TYPE_INT_RGB ) ; Graphics2D gr = ( Graphics2D ) bi . getGraphics ( ) ; gr . setColor ( color ) ; gr . fillRect ( 0 , 0 , size , size ) ; gr . dispose ( ) ; button . setIcon ( new ImageIcon ( bi ) ) ;
public class KeysDAORedisImpl { /** * { @ inheritDoc } */ @ Override public KeyInfo getKeyInfo ( final String key ) { } }
return PoolUtils . doWorkInPoolNicely ( this . jedisPool , new KeyDAOPoolWork ( key ) ) ;
public class AttributeProxy { public void put_property ( DbDatum property ) throws DevFailed { } }
DbAttribute db_att = new DbAttribute ( deviceName ) ; db_att . add ( property ) ; dev . put_attribute_property ( db_att ) ;
public class CqlBulkRecordWriter { /** * The column values must correspond to the order in which * they appear in the insert stored procedure . * Key is not used , so it can be null or any object . * @ param key * any object or null . * @ param values * the values to write . * @ throws IOException */ @ Override public void write ( Object key , List < ByteBuffer > values ) throws IOException { } }
prepareWriter ( ) ; try { ( ( CQLSSTableWriter ) writer ) . rawAddRow ( values ) ; if ( null != progress ) progress . progress ( ) ; if ( null != context ) HadoopCompat . progress ( context ) ; } catch ( InvalidRequestException e ) { throw new IOException ( "Error adding row with key: " + key , e ) ; }
public class MemoryFileItem { /** * A convenience method to write an uploaded item to disk . The client code * is not concerned with whether or not the item is stored in memory , or on * disk in a temporary location . They just want to write the uploaded item * to a file . * This implementation first attempts to rename the uploaded item to the * specified destination file , if the item was originally written to disk . * Otherwise , the data will be copied to the specified file . * This method is only guaranteed to work < em > once < / em > , the first time it * is invoked for a particular item . This is because , in the event that the * method renames a temporary file , that file will no longer be available * to copy or rename again at a later time . * @ param file the { @ code File } into which the uploaded item should * be stored * @ throws Exception if an error occurs */ public void write ( File file ) throws Exception { } }
try ( FileOutputStream fout = new FileOutputStream ( file ) ) { fout . write ( get ( ) ) ; }
public class PartitionIDRange { /** * = = = = = Helper methods to generate PartitionIDRanges = = = = = */ public static List < PartitionIDRange > getGlobalRange ( final int partitionBits ) { } }
Preconditions . checkArgument ( partitionBits >= 0 && partitionBits < ( Integer . SIZE - 1 ) , "Invalid partition bits: %s" , partitionBits ) ; final int partitionIdBound = ( 1 << ( partitionBits ) ) ; return ImmutableList . of ( new PartitionIDRange ( 0 , partitionIdBound , partitionIdBound ) ) ;
public class Evaluation { /** * Calculate the average recall for all classes - can specify whether macro or micro averaging should be used * NOTE : if any classes have tp = 0 and fn = 0 , ( recall = 0/0 ) these are excluded from the average * @ param averaging Averaging method - macro or micro * @ return Average recall */ public double recall ( EvaluationAveraging averaging ) { } }
if ( getNumRowCounter ( ) == 0.0 ) { return 0.0 ; // No data } int nClasses = confusion ( ) . getClasses ( ) . size ( ) ; if ( averaging == EvaluationAveraging . Macro ) { double macroRecall = 0.0 ; int count = 0 ; for ( int i = 0 ; i < nClasses ; i ++ ) { double thisClassRecall = recall ( i , - 1 ) ; if ( thisClassRecall != - 1 ) { macroRecall += thisClassRecall ; count ++ ; } } macroRecall /= count ; return macroRecall ; } else if ( averaging == EvaluationAveraging . Micro ) { long tpCount = 0 ; long fnCount = 0 ; for ( int i = 0 ; i < nClasses ; i ++ ) { tpCount += truePositives . getCount ( i ) ; fnCount += falseNegatives . getCount ( i ) ; } return EvaluationUtils . recall ( tpCount , fnCount , DEFAULT_EDGE_VALUE ) ; } else { throw new UnsupportedOperationException ( "Unknown averaging approach: " + averaging ) ; }
public class PeerTableImpl { /** * Life cycle */ @ Override public void start ( ) throws IllegalDiameterStateException , IOException { } }
logger . debug ( "Starting PeerTable. Going to call connect on all peers in the peerTable" ) ; for ( Peer peer : peerTable . values ( ) ) { try { peer . connect ( ) ; } catch ( Exception e ) { logger . warn ( "Can not start connect procedure to peer [" + peer + "]" , e ) ; } } logger . debug ( "Calling start on the router" ) ; router . start ( ) ; isStarted = true ;
public class SslCodec { /** * Creates a new downstream connection as { @ link LinkedIOSubchannel } * of the network connection together with an { @ link SSLEngine } . * @ param event * the accepted event */ @ Handler ( channels = EncryptedChannel . class ) public void onConnected ( Connected event , IOSubchannel encryptedChannel ) { } }
new PlainChannel ( event , encryptedChannel ) ;
public class ModelsImpl { /** * Create an entity role for an entity in the application . * @ param appId The application ID . * @ param versionId The version ID . * @ param hEntityId The hierarchical entity extractor ID . * @ param createHierarchicalEntityRoleOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the UUID object */ public Observable < ServiceResponse < UUID > > createHierarchicalEntityRoleWithServiceResponseAsync ( UUID appId , String versionId , UUID hEntityId , CreateHierarchicalEntityRoleOptionalParameter createHierarchicalEntityRoleOptionalParameter ) { } }
if ( this . client . endpoint ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.endpoint() is required and cannot be null." ) ; } if ( appId == null ) { throw new IllegalArgumentException ( "Parameter appId is required and cannot be null." ) ; } if ( versionId == null ) { throw new IllegalArgumentException ( "Parameter versionId is required and cannot be null." ) ; } if ( hEntityId == null ) { throw new IllegalArgumentException ( "Parameter hEntityId is required and cannot be null." ) ; } final String name = createHierarchicalEntityRoleOptionalParameter != null ? createHierarchicalEntityRoleOptionalParameter . name ( ) : null ; return createHierarchicalEntityRoleWithServiceResponseAsync ( appId , versionId , hEntityId , name ) ;
public class UriUtils { /** * Creates an URI as a String . * @ param scheme * the scheme * @ param host * the host * @ param port * the port * @ param path * the path * @ param query * the query * @ param fragment * the fragment * @ return the uri */ public static String createURI ( final String scheme , final String host , int port , final String path , final String query , final String fragment ) { } }
StringBuilder buffer = new StringBuilder ( Parameters . SMALL_BUFFER_SIZE ) ; if ( host != null ) { if ( scheme != null ) { buffer . append ( scheme ) ; buffer . append ( "://" ) ; } buffer . append ( host ) ; if ( port > 0 ) { buffer . append ( ':' ) ; buffer . append ( port ) ; } } if ( path == null || ! path . startsWith ( "/" ) ) { buffer . append ( '/' ) ; } if ( path != null ) { buffer . append ( path ) ; } if ( query != null ) { buffer . append ( '?' ) ; buffer . append ( query ) ; } if ( fragment != null ) { buffer . append ( '#' ) ; buffer . append ( fragment ) ; } return buffer . toString ( ) ;
public class SearchUrl { /** * Get Resource Url for DeleteSearchTuningRule * @ param searchTuningRuleCode The unique identifier of the search tuning rule . * @ return String Resource Url */ public static MozuUrl deleteSearchTuningRuleUrl ( String searchTuningRuleCode ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/commerce/catalog/admin/search/searchtuningrules/{searchTuningRuleCode}" ) ; formatter . formatUrl ( "searchTuningRuleCode" , searchTuningRuleCode ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class FSFlowTemplateCatalog { /** * Determine if an URI of a jobTemplate or a FlowTemplate is valid . * @ param flowURI The given job / flow template * @ return true if the URI is valid . */ private boolean validateTemplateURI ( URI flowURI ) { } }
if ( ! this . sysConfig . hasPath ( ServiceConfigKeys . TEMPLATE_CATALOGS_FULLY_QUALIFIED_PATH_KEY ) ) { log . error ( "Missing config " + ServiceConfigKeys . TEMPLATE_CATALOGS_FULLY_QUALIFIED_PATH_KEY ) ; return false ; } if ( ! flowURI . getScheme ( ) . equals ( FS_SCHEME ) ) { log . error ( "Expected scheme " + FS_SCHEME + " got unsupported scheme " + flowURI . getScheme ( ) ) ; return false ; } return true ;
public class PlainTime { /** * / * [ deutsch ] * < p > Erzeugt eine neue Uhrzeit mit Stunde , Minute , Sekunde und * Nanosekunde . < / p > * @ param hour hour in the range { @ code 0-23 } or { @ code 24} * if the other argumenta equal to { @ code 0} * @ param minute minute in the range { @ code 0-59} * @ param second second in the range { @ code 0-59} * @ param nanosecond nanosecond in the range { @ code 0-999,999,999} * @ return new or cached wall time * @ throws IllegalArgumentException if any argument is out of range * @ see # of ( int ) * @ see # of ( int , int ) * @ see # of ( int , int , int ) * @ see # NANO _ OF _ SECOND */ public static PlainTime of ( int hour , int minute , int second , int nanosecond ) { } }
return PlainTime . of ( hour , minute , second , nanosecond , true ) ;
public class gslbsite_binding { /** * Use this API to fetch gslbsite _ binding resource of given name . */ public static gslbsite_binding get ( nitro_service service , String sitename ) throws Exception { } }
gslbsite_binding obj = new gslbsite_binding ( ) ; obj . set_sitename ( sitename ) ; gslbsite_binding response = ( gslbsite_binding ) obj . get_resource ( service ) ; return response ;
public class FeatureGen { /** * Write features . * @ param fout the fout * @ throws IOException Signals that an I / O exception has occurred . */ public void writeFeatures ( PrintWriter fout ) throws IOException { } }
// write the number of features fout . println ( Integer . toString ( features . size ( ) ) ) ; for ( int i = 0 ; i < features . size ( ) ; i ++ ) { Feature f = ( Feature ) features . get ( i ) ; fout . println ( f . toString ( data . cpInt2Str , data . lbInt2Str ) ) ; } // wirte the line # # # . . . fout . println ( Option . modelSeparator ) ;
public class DiagnosticsInner { /** * Get Detector . * Get Detector . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param siteName Site Name * @ param diagnosticCategory Diagnostic Category * @ param detectorName Detector Name * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; DetectorDefinitionInner & gt ; object */ public Observable < ServiceResponse < Page < DetectorDefinitionInner > > > getSiteDetectorWithServiceResponseAsync ( final String resourceGroupName , final String siteName , final String diagnosticCategory , final String detectorName ) { } }
return getSiteDetectorSinglePageAsync ( resourceGroupName , siteName , diagnosticCategory , detectorName ) . concatMap ( new Func1 < ServiceResponse < Page < DetectorDefinitionInner > > , Observable < ServiceResponse < Page < DetectorDefinitionInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < DetectorDefinitionInner > > > call ( ServiceResponse < Page < DetectorDefinitionInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( getSiteDetectorNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ;
public class UtilConversion { /** * Convert binary array to number representation . * @ param binary The binary to convert ( must not be < code > null < / code > ) . * @ return The number representation . * @ throws LionEngineException If invalid arguments . */ public static int fromBinary ( boolean [ ] binary ) { } }
Check . notNull ( binary ) ; int number = 0 ; for ( final boolean current : binary ) { number = number << 1 | boolToInt ( current ) ; } return number ;
public class BulkheadAspect { /** * handle the asynchronous completable future flow * @ param proceedingJoinPoint AOPJoinPoint * @ param bulkhead configured bulkhead * @ return CompletionStage */ private Object handleJoinPointCompletableFuture ( ProceedingJoinPoint proceedingJoinPoint , io . github . resilience4j . bulkhead . Bulkhead bulkhead ) { } }
return bulkhead . executeCompletionStage ( ( ) -> { try { return ( CompletionStage < ? > ) proceedingJoinPoint . proceed ( ) ; } catch ( Throwable throwable ) { throw new CompletionException ( throwable ) ; } } ) ;
public class Introspector { /** * Gets the < code > BeanInfo < / code > object which contains the information of the properties , events and methods of the specified bean class . * < ol > * < li > If < code > flag = = IGNORE _ ALL _ BEANINFO < / code > , the < code > Introspector < / code > will ignore all < code > BeanInfo < / code > class . < / li > * < li > If < code > flag = = IGNORE _ IMMEDIATE _ BEANINFO < / code > , the < code > Introspector < / code > will ignore the < code > BeanInfo < / code > class of the current bean class . * < / li > * < li > If < code > flag = = USE _ ALL _ BEANINFO < / code > , the < code > Introspector < / code > will use all < code > BeanInfo < / code > class which have been found . < / li > * < / ol > * The < code > Introspector < / code > will cache the < code > BeanInfo < / code > object . Subsequent calls to this method will be answered with the cached data . * @ param beanClass * the specified bean class . * @ param flags * the flag to control the usage of the explicit < code > BeanInfo < / code > class . * @ return the < code > BeanInfo < / code > of the bean class . * @ throws IntrospectionException */ public static BeanInfo getBeanInfo ( Class < ? > beanClass , int flags ) throws IntrospectionException { } }
if ( flags == USE_ALL_BEANINFO ) { // try to use cache return getBeanInfo ( beanClass ) ; } return getBeanInfoImplAndInit ( beanClass , null , flags ) ;
public class DefaultParameterNameProvider { /** * Add the parameter types to a list of names . * @ param parameterTypes parameterTypes * @ return list of strings */ protected List < String > defaultParameterTypes ( Class < ? > [ ] parameterTypes ) { } }
List < String > names = new ArrayList < > ( ) ; for ( int i = 0 ; i < parameterTypes . length ; i ++ ) { names . add ( "arg" + i ) ; } return names ;
public class StandardFieldsDialog { /** * Sets the given pop up menu to the field with the given label . * The pop up menu is only set to { @ link JComponent } fields . * @ param fieldLabel the label of the field . * @ param popup the pop up menu . * @ since TODO add version * @ see JComponent # setComponentPopupMenu ( JPopupMenu ) */ public void setFieldPopupMenu ( String fieldLabel , JPopupMenu popup ) { } }
Component c = this . fieldMap . get ( fieldLabel ) ; if ( c != null ) { if ( c instanceof JComponent ) { ( ( JComponent ) c ) . setComponentPopupMenu ( popup ) ; } else { handleUnexpectedFieldClass ( fieldLabel , c ) ; } }
public class CSSDataURL { /** * Get the data content of this Data URL as String . If no String * representation was provided in the constructor , than it is lazily created * inside this method in which case instances of this class are not * thread - safe . If a non - < code > null < / code > String was provided in the * constructor , this object is immutable . No Base64 encoding is performed in * this method . * @ return The content in a String representation using the charset of this * object . Never < code > null < / code > . */ @ Nonnull public String getContentAsString ( ) { } }
if ( m_sContent == null ) m_sContent = new String ( m_aContent , m_aCharset ) ; return m_sContent ;
public class FilterableTableExample { /** * Helper to create the table column heading ' s WDecoratedLabel . * @ param text The readable text content of the column header * @ param menu The WMenu we want in this column header . * @ return WDecoratedLabel used to create a column heading . */ private WDecoratedLabel buildColumnHeader ( final String text , final WMenu menu ) { } }
WDecoratedLabel label = new WDecoratedLabel ( null , new WText ( text ) , menu ) ; return label ;
public class PeerAwareInstanceRegistryImpl { /** * Replicate the < em > ASG status < / em > updates to peer eureka nodes . If this * event is a replication from other nodes , then it is not replicated to * other nodes . * @ param asgName the asg name for which the status needs to be replicated . * @ param newStatus the { @ link ASGStatus } information that needs to be replicated . * @ param isReplication true if this is a replication event from other nodes , false otherwise . */ @ Override public void statusUpdate ( final String asgName , final ASGStatus newStatus , final boolean isReplication ) { } }
// If this is replicated from an other node , do not try to replicate again . if ( isReplication ) { return ; } for ( final PeerEurekaNode node : peerEurekaNodes . getPeerEurekaNodes ( ) ) { replicateASGInfoToReplicaNodes ( asgName , newStatus , node ) ; }
public class MethodInfo { /** * Get a list of annotations on this method , along with any annotation parameter values . * @ return a list of annotations on this method , along with any annotation parameter values , wrapped in * { @ link AnnotationInfo } objects , or the empty list if none . */ public AnnotationInfoList getAnnotationInfo ( ) { } }
if ( ! scanResult . scanSpec . enableAnnotationInfo ) { throw new IllegalArgumentException ( "Please call ClassGraph#enableAnnotationInfo() before #scan()" ) ; } return annotationInfo == null ? AnnotationInfoList . EMPTY_LIST : AnnotationInfoList . getIndirectAnnotations ( annotationInfo , /* annotatedClass = */ null ) ;
public class RLogin { /** * Disconnect the rlogin client from the remote host . */ public void disconnect ( ) { } }
try { if ( client . isConnected ( ) ) { client . disconnect ( ) ; } if ( standardInputReaderThread != null ) { standardInputReaderThread = null ; } if ( outputReaderThread != null ) { outputReaderThread . join ( ) ; outputReaderThread = null ; } writer = null ; } catch ( InterruptedException ex ) { } catch ( IOException e ) { logger . fatal ( "Error while disconnecting from rlogin session. Host: " + remoteHost , e ) ; }
public class Group { /** * read _ attribute _ asynch _ i - access limited to package Group */ @ Override int read_attribute_asynch_i ( final String a , final boolean fwd , final int rid ) throws DevFailed { } }
synchronized ( this ) { final Iterator it = elements . iterator ( ) ; while ( it . hasNext ( ) ) { final GroupElement e = ( GroupElement ) it . next ( ) ; if ( e instanceof GroupDeviceElement || fwd ) { e . read_attribute_asynch_i ( a , fwd , rid ) ; } } return rid ; }
public class IntegerStringBinding { /** * { @ inheritDoc } */ @ Override public Integer unmarshal ( String object ) { } }
return Integer . valueOf ( Integer . parseInt ( object ) ) ;
public class ApnsPayloadBuilder { /** * < p > Sets the key of the subtitle string in the receiving app ' s localized string list to be shown for the push * notification . Clears any previously - set literal subtitle . The message in the app ' s string list may optionally * have placeholders , which will be populated by values from the given { @ code alertSubtitleArguments } . < / p > * < p > By default , no subtitle is included . Requires iOS 10 or newer . < / p > * @ param localizedAlertSubtitleKey a key to a string in the receiving app ' s localized string list * @ param alertSubtitleArguments arguments to populate placeholders in the localized subtitle string ; may be * { @ code null } * @ return a reference to this payload builder * @ since 0.8.1 */ public ApnsPayloadBuilder setLocalizedAlertSubtitle ( final String localizedAlertSubtitleKey , final String ... alertSubtitleArguments ) { } }
this . localizedAlertSubtitleKey = localizedAlertSubtitleKey ; this . localizedAlertSubtitleArguments = ( alertSubtitleArguments != null && alertSubtitleArguments . length > 0 ) ? alertSubtitleArguments : null ; this . alertSubtitle = null ; return this ;
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertIfcTimeSeriesScheduleTypeEnumToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class InstanceClient { /** * Retrieves aggregated list of all of the instances in your project across all regions and zones . * < p > Sample code : * < pre > < code > * try ( InstanceClient instanceClient = InstanceClient . create ( ) ) { * ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ; * for ( InstancesScopedList element : instanceClient . aggregatedListInstances ( project ) . iterateAll ( ) ) { * / / doThingsWith ( element ) ; * < / code > < / pre > * @ param project Project ID for this request . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final AggregatedListInstancesPagedResponse aggregatedListInstances ( ProjectName project ) { } }
AggregatedListInstancesHttpRequest request = AggregatedListInstancesHttpRequest . newBuilder ( ) . setProject ( project == null ? null : project . toString ( ) ) . build ( ) ; return aggregatedListInstances ( request ) ;
public class IRI_CustomFieldSerializer { /** * Deserializes the content of the object from the * { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } . * @ param streamReader the { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } to read the * object ' s content from * @ param instance the object instance to deserialize * @ throws com . google . gwt . user . client . rpc . SerializationException * if the deserialization operation is not * successful */ @ Override public void deserializeInstance ( SerializationStreamReader streamReader , IRI instance ) throws SerializationException { } }
deserialize ( streamReader , instance ) ;
public class JdbcCpoXaAdapter { /** * Update the Object in the datasource . The CpoAdapter will check to see if the object exists in the datasource . If it * exists then the object will be updated . If it does not exist , an exception will be thrown * < pre > Example : * < code > * class SomeObject so = new SomeObject ( ) ; * class CpoAdapter cpo = null ; * try { * cpo = new JdbcCpoAdapter ( new JdbcDataSourceInfo ( driver , url , user , password , 1,1 , false ) ) ; * } catch ( CpoException ce ) { * / / Handle the error * cpo = null ; * if ( cpo ! = null ) { * so . setId ( 1 ) ; * so . setName ( " SomeName " ) ; * try { * cpo . updateObject ( " updateSomeObject " , so ) ; * } catch ( CpoException ce ) { * / / Handle the error * < / code > * < / pre > * @ param name The String name of the UPDATE Function Group that will be used to create the object in the datasource . * null signifies that the default rules will be used . * @ param obj This is an object that has been defined within the metadata of the datasource . If the class is not * defined an exception will be thrown . * @ return The number of objects updated in the datasource * @ throws CpoException Thrown if there are errors accessing the datasource */ @ Override public < T > long updateObject ( String name , T obj ) throws CpoException { } }
return getCurrentResource ( ) . updateObject ( name , obj ) ;
public class LdapDirectory { /** * If the named property is present and has a value use that . * Otherwise , set the value to the given default and use that . * @ param pr * @ param name * @ param defaultVal * @ return String */ public String checkProp ( final Properties pr , final String name , final String defaultVal ) { } }
String val = pr . getProperty ( name ) ; if ( val == null ) { pr . put ( name , defaultVal ) ; val = defaultVal ; } return val ;
public class LightblueHttpClient { /** * ( non - Javadoc ) * @ see com . redhat . lightblue . client . LightblueClient # data ( com . redhat . lightblue . client . request . LightblueRequest ) */ @ Override public DefaultLightblueDataResponse data ( LightblueDataRequest lightblueRequest ) throws LightblueParseException , LightblueResponseException , LightblueHttpClientException , LightblueException { } }
if ( ! lightblueRequest . hasExecution ( ) ) { lightblueRequest . execution ( configuration . getExecution ( ) ) ; } HttpResponse response = callService ( lightblueRequest , configuration . getDataServiceURI ( ) ) ; return new DefaultLightblueDataResponse ( response . getBody ( ) , response . getHeaders ( ) , mapper ) ;
public class NumberConverter { /** * STATE SAVE / RESTORE */ public void restoreState ( FacesContext facesContext , Object state ) { } }
if ( state != null ) { Object values [ ] = ( Object [ ] ) state ; _currencyCode = ( String ) values [ 0 ] ; _currencySymbol = ( String ) values [ 1 ] ; _locale = ( Locale ) values [ 2 ] ; Integer value = ( Integer ) values [ 3 ] ; _maxFractionDigits = value != null ? value . intValue ( ) : 0 ; value = ( Integer ) values [ 4 ] ; _maxIntegerDigits = value != null ? value . intValue ( ) : 0 ; value = ( Integer ) values [ 5 ] ; _minFractionDigits = value != null ? value . intValue ( ) : 0 ; value = ( Integer ) values [ 6 ] ; _minIntegerDigits = value != null ? value . intValue ( ) : 0 ; _pattern = ( String ) values [ 7 ] ; _type = ( String ) values [ 8 ] ; _groupingUsed = ( ( Boolean ) values [ 9 ] ) . booleanValue ( ) ; _integerOnly = ( ( Boolean ) values [ 10 ] ) . booleanValue ( ) ; _maxFractionDigitsSet = ( ( Boolean ) values [ 11 ] ) . booleanValue ( ) ; _maxIntegerDigitsSet = ( ( Boolean ) values [ 12 ] ) . booleanValue ( ) ; _minFractionDigitsSet = ( ( Boolean ) values [ 13 ] ) . booleanValue ( ) ; _minIntegerDigitsSet = ( ( Boolean ) values [ 14 ] ) . booleanValue ( ) ; }
public class TreeInfo { /** * Skip parens and return the enclosed expression */ public static JCTree skipParens ( JCTree tree ) { } }
if ( tree . hasTag ( PARENS ) ) return skipParens ( ( JCParens ) tree ) ; else return tree ;
public class ClientProxyImpl { /** * CHECKSTYLE : OFF */ private Object doChainedInvocation ( URI uri , MultivaluedMap < String , String > headers , OperationResourceInfo ori , Object [ ] methodParams , Object body , int bodyIndex , Exchange exchange , Map < String , Object > invocationContext ) throws Throwable { } }
// CHECKSTYLE : ON Bus configuredBus = getConfiguration ( ) . getBus ( ) ; Bus origBus = BusFactory . getAndSetThreadDefaultBus ( configuredBus ) ; ClassLoaderHolder origLoader = null ; try { ClassLoader loader = configuredBus . getExtension ( ClassLoader . class ) ; if ( loader != null ) { origLoader = ClassLoaderUtils . setThreadContextClassloader ( loader ) ; } Message outMessage = createMessage ( body , ori , headers , uri , exchange , invocationContext , true ) ; if ( bodyIndex != - 1 ) { outMessage . put ( Type . class , ori . getMethodToInvoke ( ) . getGenericParameterTypes ( ) [ bodyIndex ] ) ; } outMessage . getExchange ( ) . setOneWay ( ori . isOneway ( ) ) ; setSupportOnewayResponseProperty ( outMessage ) ; outMessage . setContent ( OperationResourceInfo . class , ori ) ; setPlainOperationNameProperty ( outMessage , ori . getMethodToInvoke ( ) . getName ( ) ) ; outMessage . getExchange ( ) . put ( Method . class , ori . getMethodToInvoke ( ) ) ; outMessage . put ( Annotation . class . getName ( ) , getMethodAnnotations ( ori . getAnnotatedMethod ( ) , bodyIndex ) ) ; outMessage . getExchange ( ) . put ( Message . SERVICE_OBJECT , proxy ) ; if ( methodParams != null ) { outMessage . put ( List . class , Arrays . asList ( methodParams ) ) ; } if ( body != null ) { outMessage . put ( PROXY_METHOD_PARAM_BODY_INDEX , bodyIndex ) ; } outMessage . getInterceptorChain ( ) . add ( bodyWriter ) ; Map < String , Object > reqContext = getRequestContext ( outMessage ) ; reqContext . put ( OperationResourceInfo . class . getName ( ) , ori ) ; reqContext . put ( PROXY_METHOD_PARAM_BODY_INDEX , bodyIndex ) ; // execute chain InvocationCallback < Object > asyncCallback = checkAsyncCallback ( ori , reqContext , outMessage ) ; if ( asyncCallback != null ) { return doInvokeAsync ( ori , outMessage , asyncCallback ) ; } doRunInterceptorChain ( outMessage ) ; Object [ ] results = preProcessResult ( outMessage ) ; if ( results != null && results . length == 1 ) { return results [ 0 ] ; } try { return handleResponse ( outMessage , ori . getClassResourceInfo ( ) . getServiceClass ( ) ) ; } finally { completeExchange ( outMessage . getExchange ( ) , true ) ; } } finally { if ( origLoader != null ) { origLoader . reset ( ) ; } if ( origBus != configuredBus ) { BusFactory . setThreadDefaultBus ( origBus ) ; } }
public class VersionRegEx { /** * Compares two Versions with additionally considering the build meta data field if * all other parts are equal . Note : This is < em > not < / em > part of the semantic version * specification . * Comparison of the build meta data parts happens exactly as for pre release * identifiers . Considering of build meta data first kicks in if both versions are * equal when using their natural order . * This method fulfills the general contract for Java ' s { @ link Comparator Comparators } * and { @ link Comparable Comparables } . * @ param v1 The first version for comparison . * @ param v2 The second version for comparison . * @ return A value below 0 iff { @ code v1 & lt ; v2 } , a value above 0 iff * { @ code v1 & gt ; v2 < / tt > and 0 iff < tt > v1 = v2 } . * @ throws NullPointerException If either parameter is null . * @ since 0.3.0 */ public static int compareWithBuildMetaData ( VersionRegEx v1 , VersionRegEx v2 ) { } }
// throw NPE to comply with Comparable specification if ( v1 == null ) { throw new NullPointerException ( "v1 is null" ) ; } else if ( v2 == null ) { throw new NullPointerException ( "v2 is null" ) ; } return compare ( v1 , v2 , true ) ;
public class GetBuiltinIntentResult { /** * An array of < code > BuiltinIntentSlot < / code > objects , one entry for each slot type in the intent . * @ param slots * An array of < code > BuiltinIntentSlot < / code > objects , one entry for each slot type in the intent . */ public void setSlots ( java . util . Collection < BuiltinIntentSlot > slots ) { } }
if ( slots == null ) { this . slots = null ; return ; } this . slots = new java . util . ArrayList < BuiltinIntentSlot > ( slots ) ;
public class GenerateAsyncMojo { /** * Determine if a client service method is deprecated . * @ see MGWT - 352 */ private boolean isDeprecated ( JavaMethod method ) { } }
if ( method == null ) return false ; for ( Annotation annotation : method . getAnnotations ( ) ) { if ( "java.lang.Deprecated" . equals ( annotation . getType ( ) . getFullyQualifiedName ( ) ) ) { return true ; } } return method . getTagByName ( "deprecated" ) != null ;
public class Matth { /** * Returns the smallest power of two greater than or equal to { @ code x } . This is equivalent to * { @ code checkedPow ( 2 , log2 ( x , CEILING ) ) } . * @ throws IllegalArgumentException if { @ code x < = 0} * @ throws ArithmeticException of the next - higher power of two is not representable as a * { @ code long } , i . e . when { @ code x > 2 ^ 62} * @ since 20.0 */ public static long ceilingPowerOfTwo ( long x ) { } }
checkPositive ( "x" , x ) ; if ( x > MAX_SIGNED_POWER_OF_TWO ) { throw new ArithmeticException ( "ceilingPowerOfTwo(" + x + ") is not representable as a long" ) ; } return 1L << - Long . numberOfLeadingZeros ( x - 1 ) ;
public class NewConnectionInitialReadCallback { /** * invoke discrimination process . * @ param inVC * @ param req * @ param errorOnRead */ private void sendToDiscriminators ( VirtualConnection inVC , TCPReadRequestContext req , boolean errorOnRead ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "sendToDiscriminators" ) ; } boolean doAgain ; req . setJITAllocateSize ( 0 ) ; // JIT Allocate was on for the initial read , // reset it TCPConnLink conn = ( ( TCPReadRequestContextImpl ) req ) . getTCPConnLink ( ) ; VirtualConnection vc = inVC ; do { doAgain = false ; int state ; try { state = tcpChannel . getDiscriminationProcess ( ) . discriminate ( vc , req . getBuffers ( ) , conn ) ; } catch ( DiscriminationProcessException dpe ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) Tr . event ( tc , "Exception occurred while discriminating data received from client " + req . getInterface ( ) . getRemoteAddress ( ) + " " + req . getInterface ( ) . getRemotePort ( ) ) ; ( ( TCPReadRequestContextImpl ) req ) . getTCPConnLink ( ) . close ( vc , new IOException ( "Discrimination failed " + dpe . getMessage ( ) ) ) ; break ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Discrimination returned " + state ) ; } if ( state == DiscriminationProcess . SUCCESS ) { ConnectionReadyCallback cb = conn . getApplicationCallback ( ) ; // is cb is null , then connlink may have been destroyed by channel stop // if so , nothing more needs to be done if ( cb != null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Calling application callback.ready method" ) ; } cb . ready ( vc ) ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "No application callback found, closing connection" ) ; } ( ( TCPReadRequestContextImpl ) req ) . getTCPConnLink ( ) . close ( vc , null ) ; } } else if ( state == DiscriminationProcess . AGAIN ) { if ( errorOnRead ) { // error on first read , don ' t retry if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) Tr . event ( tc , "First read timed out, and more than one discriminator asked for more data" + req . getInterface ( ) . getRemoteAddress ( ) + " " + req . getInterface ( ) . getRemotePort ( ) ) ; ( ( TCPReadRequestContextImpl ) req ) . getTCPConnLink ( ) . close ( vc , null ) ; } else if ( requestFull ( req ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) Tr . event ( tc , "Discrimination failed, no one claimed data even after 1 complete buffer presented - probably garbage passed in" + req . getInterface ( ) . getRemoteAddress ( ) + " " + req . getInterface ( ) . getRemotePort ( ) ) ; ( ( TCPReadRequestContextImpl ) req ) . getTCPConnLink ( ) . close ( vc , null ) ; } else { vc = req . read ( 1 , this , false , TCPRequestContext . USE_CHANNEL_TIMEOUT ) ; if ( vc != null ) { doAgain = true ; } } } else { // FAILURE if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) Tr . event ( tc , "Error occurred while discriminating data received from client " + req . getInterface ( ) . getRemoteAddress ( ) + " " + req . getInterface ( ) . getRemotePort ( ) ) ; ( ( TCPReadRequestContextImpl ) req ) . getTCPConnLink ( ) . close ( vc , null ) ; } } while ( doAgain ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "sendToDiscriminators" ) ; }
public class HelpFormatter { /** * Prints the usage statement for the specified application . * @ param pw The PrintWriter to print the usage statement * @ param width The number of characters to display per line * @ param app The application name * @ param options The command line Options */ public void printUsage ( PrintWriter pw , int width , String app , Options options ) { } }
// initialise the string buffer StringBuffer buff = new StringBuffer ( getSyntaxPrefix ( ) ) . append ( app ) . append ( " " ) ; // create a list for processed option groups Collection < OptionGroup > processedGroups = new ArrayList < OptionGroup > ( ) ; List < Option > optList = new ArrayList < Option > ( options . getOptions ( ) ) ; if ( getOptionComparator ( ) != null ) { Collections . sort ( optList , getOptionComparator ( ) ) ; } // iterate over the options for ( Iterator < Option > it = optList . iterator ( ) ; it . hasNext ( ) ; ) { // get the next Option Option option = it . next ( ) ; // check if the option is part of an OptionGroup OptionGroup group = options . getOptionGroup ( option ) ; // if the option is part of a group if ( group != null ) { // and if the group has not already been processed if ( ! processedGroups . contains ( group ) ) { // add the group to the processed list processedGroups . add ( group ) ; // add the usage clause appendOptionGroup ( buff , group ) ; } // otherwise the option was displayed in the group // previously so ignore it . } // if the Option is not part of an OptionGroup else { appendOption ( buff , option , option . isRequired ( ) ) ; } if ( it . hasNext ( ) ) { buff . append ( " " ) ; } } // call printWrapped printWrapped ( pw , width , buff . toString ( ) . indexOf ( ' ' ) + 1 , buff . toString ( ) ) ;
public class UriUtil { /** * Get the path of a file from the Uri . * @ param contentResolver the content resolver which will query for the source file * @ param srcUri The source uri * @ return The Path for the file or null if doesn ' t exists */ @ Nullable public static String getRealPathFromUri ( ContentResolver contentResolver , final Uri srcUri ) { } }
String result = null ; if ( isLocalContentUri ( srcUri ) ) { Cursor cursor = null ; try { cursor = contentResolver . query ( srcUri , null , null , null , null ) ; if ( cursor != null && cursor . moveToFirst ( ) ) { int idx = cursor . getColumnIndex ( MediaStore . Images . ImageColumns . DATA ) ; if ( idx != - 1 ) { result = cursor . getString ( idx ) ; } } } finally { if ( cursor != null ) { cursor . close ( ) ; } } } else if ( isLocalFileUri ( srcUri ) ) { result = srcUri . getPath ( ) ; } return result ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertWindowSpecificationCFORMATToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class GenericLogicDiscoverer { /** * Generic implementation for finding all the Services or Operations that have one specific type as input or output . * @ param entityType the MSM URI of the type of entity we are looking for . Only supports Service and Operation . * @ param relationship the MSM URI of the relationship we are looking for . Only supports hasInput and hasOutput . * @ param type the input / output type ( modelReference that is ) we are looking for . * @ returns a Set of URIs of matching services / operations . Note that this method makes no use of reasoning and therefore * the match will always be exact in this case . */ private Set < URI > listEntitiesWithType ( URI entityType , URI relationship , URI type ) { } }
Set < URI > entities = ImmutableSet . of ( ) ; // Deal with services if ( entityType . toASCIIString ( ) . equals ( MSM . Service . getURI ( ) ) ) { // Get the adequate type if ( relationship . toASCIIString ( ) . equals ( MSM . hasInput . getURI ( ) ) ) { entities = this . serviceManager . listServicesWithInputType ( type ) ; } else if ( relationship . toASCIIString ( ) . equals ( MSM . hasOutput . getURI ( ) ) ) { entities = this . serviceManager . listServicesWithOutputType ( type ) ; } // Deal with operations } else if ( entityType . toASCIIString ( ) . equals ( MSM . Operation . getURI ( ) ) ) { // Get the adequate type if ( relationship . toASCIIString ( ) . equals ( MSM . hasInput . getURI ( ) ) ) { entities = this . serviceManager . listOperationsWithInputType ( type ) ; } else if ( relationship . toASCIIString ( ) . equals ( MSM . hasOutput . getURI ( ) ) ) { entities = this . serviceManager . listOperationsWithOutputType ( type ) ; } } return entities ;
public class Job { /** * Perform log rotation . */ public void logRotate ( ) throws IOException , InterruptedException { } }
BuildDiscarder bd = getBuildDiscarder ( ) ; if ( bd != null ) bd . perform ( this ) ;
public class CmisConnector { /** * Converts binary content into JCR node . * @ param id the id of the CMIS document . * @ return JCR node representation . */ private Document cmisContent ( String id ) { } }
DocumentWriter writer = newDocument ( ObjectId . toString ( ObjectId . Type . CONTENT , id ) ) ; org . apache . chemistry . opencmis . client . api . Document doc = ( org . apache . chemistry . opencmis . client . api . Document ) session . getObject ( id ) ; writer . setPrimaryType ( NodeType . NT_RESOURCE ) ; writer . setParent ( id ) ; ContentStream contentStream = doc . getContentStream ( ) ; if ( contentStream != null ) { BinaryValue content = new CmisConnectorBinary ( contentStream , getSourceName ( ) , id , getMimeTypeDetector ( ) ) ; writer . addProperty ( JcrConstants . JCR_DATA , content ) ; writer . addProperty ( JcrConstants . JCR_MIME_TYPE , contentStream . getMimeType ( ) ) ; } Property < Object > lastModified = doc . getProperty ( PropertyIds . LAST_MODIFICATION_DATE ) ; Property < Object > lastModifiedBy = doc . getProperty ( PropertyIds . LAST_MODIFIED_BY ) ; writer . addProperty ( JcrLexicon . LAST_MODIFIED , properties . jcrValues ( lastModified ) ) ; writer . addProperty ( JcrLexicon . LAST_MODIFIED_BY , properties . jcrValues ( lastModifiedBy ) ) ; return writer . document ( ) ;
public class MetaClassImpl { /** * Invoke a missing method on the given object with the given arguments . * @ param instance The object the method should be invoked on . * @ param methodName The name of the method to invoke . * @ param arguments The arguments to the invoked method . * @ return The result of the method invocation . */ public Object invokeMissingMethod ( Object instance , String methodName , Object [ ] arguments ) { } }
return invokeMissingMethod ( instance , methodName , arguments , null , false ) ;
public class CollapseSpliterator { /** * l + < first | acc | ? > */ private R pushLeft ( T first , R acc ) { } }
synchronized ( root ) { Connector < T , R > l = left ; if ( l == null ) return acc ; left = null ; l . rhs = null ; T laright = l . right ; l . right = none ( ) ; if ( l . acc == NONE ) { l . acc = acc ; l . left = first ; return none ( ) ; } if ( this . mergeable . test ( laright , first ) ) { l . acc = this . combiner . apply ( l . acc , acc ) ; return l . drainLeft ( ) ; } if ( l . left == NONE ) { left = new Connector < > ( null , acc , this ) ; return l . drain ( ) ; } } return acc ;
public class Model { /** * Sets attribute value as < code > java . sql . Date < / code > . * If there is a { @ link Converter } registered for the attribute that converts from Class < code > S < / code > to Class * < code > java . sql . Date < / code > , given the value is an instance of < code > S < / code > , then it will be used , * otherwise performs a conversion using { @ link Convert # toSqlDate ( Object ) } . * @ param attributeName name of attribute . * @ param value value to convert . * @ return reference to this model . */ public < T extends Model > T setDate ( String attributeName , Object value ) { } }
Converter < Object , java . sql . Date > converter = modelRegistryLocal . converterForValue ( attributeName , value , java . sql . Date . class ) ; return setRaw ( attributeName , converter != null ? converter . convert ( value ) : Convert . toSqlDate ( value ) ) ;
public class Strings { /** * Fixes case of a word : Str becomes str , but URL stays URL . * @ param s Word to be fixed * @ return all - lowercase or all - uppercase word . */ public static String fixCase ( String s ) { } }
if ( s == null || s . length ( ) == 0 ) { return s ; } if ( Character . isUpperCase ( s . charAt ( 0 ) ) && ( s . length ( ) == 1 || Character . isLowerCase ( s . charAt ( 1 ) ) ) ) { s = s . toLowerCase ( ) ; } return s ;
public class MetricAggregator { /** * Retrieves the specified metric for the current minute * @ param metric The metric identity * @ return The metric aggregate for the metric at the current minute */ private MetricAggregate getAggregate ( final MetricIdentity identity ) { } }
// get the map from utc minute to aggregate for this metric if ( ! aggregates . containsKey ( identity ) ) { aggregates . put ( identity , new HashMap < Long , MetricAggregate > ( ) ) ; } Map < Long , MetricAggregate > metricAggregates = aggregates . get ( identity ) ; // get the aggregate for this minute if ( ! metricAggregates . containsKey ( currentMinute ) ) { MetricAggregate initialAggregate = MetricAggregate . fromMetricIdentity ( identity , currentMinute ) ; if ( identity . getType ( ) . equals ( MetricMonitorType . GAUGE ) ) { if ( lastValues . containsKey ( identity ) ) { initialAggregate . setValue ( lastValues . get ( identity ) ) ; } } metricAggregates . put ( currentMinute , initialAggregate ) ; } MetricAggregate aggregate = metricAggregates . get ( currentMinute ) ; return aggregate ;
public class CassandraJavaPairRDD { /** * Returns a copy of this RDD with connector changed to the specified one . */ @ SuppressWarnings ( "unchecked" ) public CassandraJavaPairRDD < K , V > withConnector ( CassandraConnector connector ) { } }
CassandraRDD < Tuple2 < K , V > > newRDD = rdd ( ) . withConnector ( connector ) ; return wrap ( newRDD ) ;
public class XmlInputStream { /** * Keep track of what we ' ve given them . * @ param s the sequence of characters given * @ param wanted the number of characters wanted * @ param got the number of characters given */ private void given ( CharSequence s , int wanted , int got ) { } }
red . append ( s ) ; given += got ; LOGGER . trace ( "Given: [" + wanted + "," + got + "]-" + s ) ;
public class HandlablesImpl { /** * Remove the object from its type list . * @ param type The type reference . * @ param object The object reference . */ private void remove ( Class < ? > type , Object object ) { } }
final Set < ? > set = items . get ( type ) ; if ( set != null ) { set . remove ( object ) ; }
public class SparseCpuLevel1 { /** * Find the index of the element with minimum absolute value * @ param N The number of elements in vector X * @ param X a vector * @ param incX The increment of X * @ return the index of the element with minimum absolute value */ @ Override protected int isamin ( long N , INDArray X , int incX ) { } }
return ( int ) cblas_isamin ( ( int ) N , ( FloatPointer ) X . data ( ) . addressPointer ( ) , incX ) ;
public class PublishedTrackReader { /** * Add the requested query string arguments to the Request . * @ param request Request to add query string arguments to */ private void addQueryParams ( final Request request ) { } }
if ( getPageSize ( ) != null ) { request . addQueryParam ( "PageSize" , Integer . toString ( getPageSize ( ) ) ) ; }
public class HebrewCalendar { /** * Returns the length of the given month in the given year */ protected int handleGetMonthLength ( int extendedYear , int month ) { } }
// Resolve out - of - range months . This is necessary in order to // obtain the correct year . We correct to // a 12 - or 13 - month year ( add / subtract 12 or 13 , depending // on the year ) but since we _ always _ number from 0 . . 12 , and // the leap year determines whether or not month 5 ( Adar 1) // is present , we allow 0 . . 12 in any given year . while ( month < 0 ) { month += monthsInYear ( -- extendedYear ) ; } // Careful : allow 0 . . 12 in all years while ( month > 12 ) { month -= monthsInYear ( extendedYear ++ ) ; } switch ( month ) { case HESHVAN : case KISLEV : // These two month lengths can vary return MONTH_LENGTH [ month ] [ yearType ( extendedYear ) ] ; default : // The rest are a fixed length return MONTH_LENGTH [ month ] [ 0 ] ; }
public class ZCertStore { /** * check if a z85 - based public key is in the certificate store . * This method will scan the folder for changes on every call * @ param publicKey */ public boolean containsPublicKey ( String publicKey ) { } }
Utils . checkArgument ( publicKey . length ( ) == 40 , "z85 publickeys should have a length of 40 bytes but got " + publicKey . length ( ) ) ; reloadIfNecessary ( ) ; return publicKeys . containsKey ( publicKey ) ;
public class DockerComputerLauncher { /** * Wait until slave is up and ready for connection . */ public boolean waitUp ( String cloudId , DockerSlaveTemplate dockerSlaveTemplate , InspectContainerResponse containerInspect ) { } }
if ( isFalse ( containerInspect . getState ( ) . getRunning ( ) ) ) { throw new IllegalStateException ( "Container '" + containerInspect . getId ( ) + "' is not running!" ) ; } return true ;
public class MessageFieldDesc { /** * Move the pertinenent information from the request to this reply message . * Override this to be more specific . * Add some code like : this . put ( messageRequest . get ( ) ) ; */ public void moveRequestInfoToReply ( Message messageRequest ) { } }
super . moveRequestInfoToReply ( messageRequest ) ; if ( ( this . getKeyInformation ( ) & ECHO_PARAM ) != 0 ) { // Move this to reply this . put ( ( ( BaseMessage ) messageRequest ) . get ( this . getFullKey ( null ) ) ) ; }
public class ProcessorStylesheetElement { /** * Receive notification of the end of an element . * @ param handler non - null reference to current StylesheetHandler that is constructing the Templates . * @ param uri The Namespace URI , or an empty string . * @ param localName The local name ( without prefix ) , or empty string if not namespace processing . * @ param rawName The qualified name ( with prefix ) . */ public void endElement ( StylesheetHandler handler , String uri , String localName , String rawName ) throws org . xml . sax . SAXException { } }
super . endElement ( handler , uri , localName , rawName ) ; handler . popElemTemplateElement ( ) ; handler . popStylesheet ( ) ;
public class Executable { /** * Launches a browser opening an URL . * @ param url the URL you want to open in the browser * @ throws IOException */ public static final void launchBrowser ( String url ) throws IOException { } }
try { if ( isMac ( ) ) { Class macUtils = Class . forName ( "com.apple.mrj.MRJFileUtils" ) ; Method openURL = macUtils . getDeclaredMethod ( "openURL" , new Class [ ] { String . class } ) ; openURL . invoke ( null , new Object [ ] { url } ) ; } else if ( isWindows ( ) ) Runtime . getRuntime ( ) . exec ( "rundll32 url.dll,FileProtocolHandler " + url ) ; else { // assume Unix or Linux String [ ] browsers = { "firefox" , "opera" , "konqueror" , "mozilla" , "netscape" } ; String browser = null ; for ( int count = 0 ; count < browsers . length && browser == null ; count ++ ) if ( Runtime . getRuntime ( ) . exec ( new String [ ] { "which" , browsers [ count ] } ) . waitFor ( ) == 0 ) browser = browsers [ count ] ; if ( browser == null ) throw new Exception ( "Could not find web browser." ) ; else Runtime . getRuntime ( ) . exec ( new String [ ] { browser , url } ) ; } } catch ( Exception e ) { throw new IOException ( "Error attempting to launch web browser" ) ; }
public class UberData { /** * Transform the payload of a { @ link EntityModel } into { @ link UberData } . * @ param obj * @ return */ private static List < UberData > extractProperties ( Object obj ) { } }
if ( PRIMITIVE_TYPES . contains ( obj . getClass ( ) ) ) { return Collections . singletonList ( new UberData ( ) . withValue ( obj ) ) ; } return PropertyUtils . findProperties ( obj ) . entrySet ( ) . stream ( ) . map ( entry -> new UberData ( ) . withName ( entry . getKey ( ) ) . withValue ( entry . getValue ( ) ) ) . collect ( Collectors . toList ( ) ) ;
public class SearcherFactory { /** * Creates an instance of the Searcher interface implementing the searching algorithm based on the SearchType , * otherwise returns the provided default Searcher implementation if a Searcher based on the specified SearchType * is not available . * @ param < T > the Class type of the actual Searcher implementation based on the SearchType . * @ param type the type of searching algorithm Searcher implementation to create . * @ param defaultSearcher the default Searcher implementation to use if a Searcher based on the specified SearchType * is not available . * @ return a Searcher implementation subclass that implements the searching algorithm based on the SearchType , * or the provided default Searcher implementation if the Searcher based on the SearchType is not available . * @ see # createSearcher ( SearchType ) * @ see org . cp . elements . util . search . Searcher * @ see org . cp . elements . util . search . SearchType */ public static < T extends Searcher > T createSearcherElseDefault ( final SearchType type , final T defaultSearcher ) { } }
try { return createSearcher ( type ) ; } catch ( IllegalArgumentException ignore ) { return defaultSearcher ; }
public class CoordinatorAccessor { /** * Left is error Right is Tuple < dimensions , metrics > * @ param name * @ param reqHeaders * @ return */ public Either < String , Tuple2 < List < String > , List < String > > > aboutDataSource ( String name , Map < String , String > reqHeaders ) { } }
Either < String , Either < JSONArray , JSONObject > > resp = fireCommand ( "druid/coordinator/v1/metadata/datasources/" + name , null , reqHeaders ) ; if ( resp . isLeft ( ) ) { return new Left < > ( resp . left ( ) . get ( ) ) ; } Either < JSONArray , JSONObject > goodResp = resp . right ( ) . get ( ) ; if ( goodResp . isRight ( ) ) { JSONObject data = goodResp . right ( ) . get ( ) ; if ( data . has ( "segments" ) ) { JSONArray segmentsArray = data . getJSONArray ( "segments" ) ; if ( segmentsArray . length ( ) == 0 ) { return new Left < > ( "No segments received.." ) ; } JSONObject firstItem = segmentsArray . getJSONObject ( 0 ) ; String dims = firstItem . getString ( "dimensions" ) ; String metrics = firstItem . getString ( "metrics" ) ; return new Right < > ( new Tuple2 < > ( Arrays . asList ( dims . split ( "," ) ) , Arrays . asList ( metrics . split ( "," ) ) ) ) ; } else { return new Left < > ( "No segments key in the response.." ) ; } } return new Left < > ( "Unexpected response " + goodResp . left ( ) . get ( ) . toString ( ) ) ;
public class AssignedDiscountUrl { /** * Get Resource Url for GetAssignedDiscounts * @ param couponSetCode The unique identifier of the coupon set . * @ return String Resource Url */ public static MozuUrl getAssignedDiscountsUrl ( String couponSetCode ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/commerce/catalog/admin/couponsets/{couponSetCode}/assigneddiscounts" ) ; formatter . formatUrl ( "couponSetCode" , couponSetCode ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class YamlConfigurationParser { /** * Merges configurations from an Object into an existing Map . * @ param configurationMap * @ param parsed */ @ SuppressWarnings ( "unchecked" ) private void mergeConfigs ( Map < String , Map < String , ? > > configurationMap , Object parsed ) { } }
if ( parsed instanceof Map ) { Map < ? , ? > parsedMap = ( Map < ? , ? > ) parsed ; for ( Object key : parsedMap . keySet ( ) ) { if ( key instanceof String ) { Map < String , Object > existingValue = ( Map < String , Object > ) configurationMap . get ( ( String ) key ) ; if ( ! configurationMap . containsKey ( ( String ) key ) ) { existingValue = new HashMap < String , Object > ( ) ; configurationMap . put ( ( String ) key , existingValue ) ; } Object parsedValue = parsedMap . get ( key ) ; if ( parsedValue instanceof Map ) { Map < ? , ? > parsedValueMap = ( Map < ? , ? > ) parsedValue ; for ( Object parsedKey : parsedValueMap . keySet ( ) ) { if ( parsedKey instanceof String ) { existingValue . put ( ( String ) parsedKey , parsedValueMap . get ( parsedKey ) ) ; } } } } } }
public class Utils { /** * Converts a Heron Config object into a TopologyAPI . Config . Builder . Config entries with null * keys or values are ignored . * @ param config heron Config object * @ return TopologyAPI . Config . Builder with values loaded from config */ public static TopologyAPI . Config . Builder getConfigBuilder ( Config config ) { } }
TopologyAPI . Config . Builder cBldr = TopologyAPI . Config . newBuilder ( ) ; Set < String > apiVars = config . getApiVars ( ) ; for ( String key : config . keySet ( ) ) { if ( key == null ) { LOG . warning ( "ignore: null config key found" ) ; continue ; } Object value = config . get ( key ) ; if ( value == null ) { LOG . warning ( "ignore: config key " + key + " has null value" ) ; continue ; } TopologyAPI . Config . KeyValue . Builder b = TopologyAPI . Config . KeyValue . newBuilder ( ) ; b . setKey ( key ) ; if ( apiVars . contains ( key ) ) { b . setType ( TopologyAPI . ConfigValueType . STRING_VALUE ) ; b . setValue ( value . toString ( ) ) ; } else { b . setType ( TopologyAPI . ConfigValueType . JAVA_SERIALIZED_VALUE ) ; b . setSerializedValue ( ByteString . copyFrom ( serialize ( value ) ) ) ; } cBldr . addKvs ( b ) ; } return cBldr ;
public class ExpectValues { /** * Find the enumerated object that matchs the input name using the given * offset and length into that name . If none exist , then a null value is * returned . * @ param name * @ param offset * - starting point in that name * @ param length * - length to use from that starting point * @ return ExpectValues */ public static ExpectValues match ( String name , int offset , int length ) { } }
if ( null == name ) return null ; return ( ExpectValues ) myMatcher . match ( name , offset , length ) ;
public class SecurityCenterClient { /** * Gets the settings for an organization . * < p > Sample code : * < pre > < code > * try ( SecurityCenterClient securityCenterClient = SecurityCenterClient . create ( ) ) { * OrganizationSettingsName name = OrganizationSettingsName . of ( " [ ORGANIZATION ] " ) ; * OrganizationSettings response = securityCenterClient . getOrganizationSettings ( name . toString ( ) ) ; * < / code > < / pre > * @ param name Name of the organization to get organization settings for . Its format is * " organizations / [ organization _ id ] / organizationSettings " . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final OrganizationSettings getOrganizationSettings ( String name ) { } }
GetOrganizationSettingsRequest request = GetOrganizationSettingsRequest . newBuilder ( ) . setName ( name ) . build ( ) ; return getOrganizationSettings ( request ) ;
public class ChatService { /** * Sets the listening state of the service . When set to false , the service stops listening to * all events . * @ param active The active state . */ public void setActive ( boolean active ) { } }
if ( this . active != active ) { this . active = active ; inviteListener . setActive ( active ) ; acceptListener . setActive ( active ) ; participants . clear ( ) ; participants . add ( self ) ; participantListener . setActive ( active ) ; }
public class CliClient { /** * Process get operation with conditions ( using Thrift get _ indexed _ slices method ) * @ param statement - tree representation of the current statement * Format : ^ ( NODE _ THRIFT _ GET _ WITH _ CONDITIONS cf ^ ( CONDITIONS ^ ( CONDITION > = column1 value1 ) . . . ) ^ ( NODE _ LIMIT int ) * ) */ private void executeGetWithConditions ( Tree statement ) { } }
if ( ! CliMain . isConnected ( ) || ! hasKeySpace ( ) ) return ; long startTime = System . nanoTime ( ) ; IndexClause clause = new IndexClause ( ) ; String columnFamily = CliCompiler . getColumnFamily ( statement , currentCfDefs ( ) ) ; // ^ ( CONDITIONS ^ ( CONDITION $ column $ value ) . . . ) Tree conditions = statement . getChild ( 1 ) ; // fetching column family definition CfDef columnFamilyDef = getCfDef ( columnFamily ) ; // fetching all columns SlicePredicate predicate = new SlicePredicate ( ) ; SliceRange sliceRange = new SliceRange ( ) ; sliceRange . setStart ( new byte [ 0 ] ) . setFinish ( new byte [ 0 ] ) ; predicate . setSlice_range ( sliceRange ) ; for ( int i = 0 ; i < conditions . getChildCount ( ) ; i ++ ) { // ^ ( CONDITION operator $ column $ value ) Tree condition = conditions . getChild ( i ) ; String operator = condition . getChild ( 0 ) . getText ( ) ; String columnNameString = CliUtils . unescapeSQLString ( condition . getChild ( 1 ) . getText ( ) ) ; // it could be a basic string or function call Tree valueTree = condition . getChild ( 2 ) ; try { ByteBuffer value ; ByteBuffer columnName = columnNameAsBytes ( columnNameString , columnFamily ) ; if ( valueTree . getType ( ) == CliParser . FUNCTION_CALL ) { value = convertValueByFunction ( valueTree , columnFamilyDef , columnName ) ; } else { String valueString = CliUtils . unescapeSQLString ( valueTree . getText ( ) ) ; value = columnValueAsBytes ( columnName , columnFamily , valueString ) ; } // index operator from string IndexOperator idxOperator = CliUtils . getIndexOperator ( operator ) ; // adding new index expression into index clause clause . addToExpressions ( new IndexExpression ( columnName , idxOperator , value ) ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } } List < KeySlice > slices ; clause . setStart_key ( new byte [ ] { } ) ; // when we have ^ ( NODE _ LIMIT Integer ) if ( statement . getChildCount ( ) == 3 ) { Tree limitNode = statement . getChild ( 2 ) ; int limitValue = Integer . parseInt ( limitNode . getChild ( 0 ) . getText ( ) ) ; if ( limitValue == 0 ) { throw new IllegalArgumentException ( "LIMIT should be greater than zero." ) ; } clause . setCount ( limitValue ) ; } try { ColumnParent parent = new ColumnParent ( columnFamily ) ; slices = thriftClient . get_indexed_slices ( parent , clause , predicate , consistencyLevel ) ; printSliceList ( columnFamilyDef , slices ) ; } catch ( InvalidRequestException e ) { throw new RuntimeException ( e ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } elapsedTime ( startTime ) ;
public class BaseQuartzJob { /** * Utility method to extract a Spring Bean from the Spring * { @ link ApplicationContext } that is supplied in the Quartz * { @ link JobExecutionContext } . The method is parameterized to use dynamic * typing . * @ param < T > * the bean type * @ param clazz * the { @ link Class } type of bean to get * @ return the bean instance */ public final < T > T getBean ( Class < T > clazz ) { } }
if ( this . applicationContext != null ) { return ( T ) this . applicationContext . getBean ( clazz ) ; } return null ;
public class TemplateClassLoader { /** * Detect Template changes */ public void detectChanges ( ) { } }
if ( engine . isProdMode ( ) ) return ; // Now check for file modification List < TemplateClass > modifieds = new ArrayList < TemplateClass > ( ) ; for ( TemplateClass tc : engine . classes ( ) . all ( ) ) { if ( tc . refresh ( ) ) modifieds . add ( tc ) ; } Set < TemplateClass > modifiedWithDependencies = new HashSet < TemplateClass > ( ) ; modifiedWithDependencies . addAll ( modifieds ) ; List < ClassDefinition > newDefinitions = new ArrayList < ClassDefinition > ( ) ; boolean dirtySig = false ; for ( TemplateClass tc : modifiedWithDependencies ) { if ( tc . compile ( ) == null ) { engine . classes ( ) . remove ( tc ) ; currentState = new TemplateClassloaderState ( ) ; // show others that we have changed . . } else { int sigChecksum = tc . sigChecksum ; tc . enhance ( ) ; if ( sigChecksum != tc . sigChecksum ) { dirtySig = true ; } newDefinitions . add ( new ClassDefinition ( tc . javaClass , tc . enhancedByteCode ) ) ; currentState = new TemplateClassloaderState ( ) ; // show others that we have changed . . } } if ( ! newDefinitions . isEmpty ( ) ) { throw new ClassReloadException ( "Need Reload" ) ; } // Check signature ( variable name & annotations aware ! ) if ( dirtySig ) { throw new ClassReloadException ( "Signature change !" ) ; } // Now check if there is new classCache or removed classCache int hash = computePathHash ( ) ; if ( hash != this . pathHash ) { // Remove class for deleted files ! ! for ( TemplateClass tc : engine . classes ( ) . all ( ) ) { if ( ! tc . templateResource . isValid ( ) ) { engine . classes ( ) . remove ( tc ) ; currentState = new TemplateClassloaderState ( ) ; // show others that we have changed . . } } throw new ClassReloadException ( "Path has changed" ) ; }
public class DataSet { /** * Strips the dataset down to the specified labels * and remaps them * @ param labels the labels to strip down to */ @ Override public void filterAndStrip ( int [ ] labels ) { } }
DataSet filtered = filterBy ( labels ) ; List < Integer > newLabels = new ArrayList < > ( ) ; // map new labels to index according to passed in labels Map < Integer , Integer > labelMap = new HashMap < > ( ) ; for ( int i = 0 ; i < labels . length ; i ++ ) labelMap . put ( labels [ i ] , i ) ; // map examples for ( int i = 0 ; i < filtered . numExamples ( ) ; i ++ ) { DataSet example = filtered . get ( i ) ; int o2 = example . outcome ( ) ; Integer outcome = labelMap . get ( o2 ) ; newLabels . add ( outcome ) ; } INDArray newLabelMatrix = Nd4j . create ( filtered . numExamples ( ) , labels . length ) ; if ( newLabelMatrix . rows ( ) != newLabels . size ( ) ) throw new IllegalStateException ( "Inconsistent label sizes" ) ; for ( int i = 0 ; i < newLabelMatrix . rows ( ) ; i ++ ) { Integer i2 = newLabels . get ( i ) ; if ( i2 == null ) throw new IllegalStateException ( "Label not found on row " + i ) ; INDArray newRow = FeatureUtil . toOutcomeVector ( i2 , labels . length ) ; newLabelMatrix . putRow ( i , newRow ) ; } setFeatures ( filtered . getFeatures ( ) ) ; setLabels ( newLabelMatrix ) ;
public class ReplicationLinksInner { /** * Sets which replica database is primary by failing over from the current primary replica database . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param databaseName The name of the database that has the replication link to be failed over . * @ param linkId The ID of the replication link to be failed over . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void beginFailover ( String resourceGroupName , String serverName , String databaseName , String linkId ) { } }
beginFailoverWithServiceResponseAsync ( resourceGroupName , serverName , databaseName , linkId ) . toBlocking ( ) . single ( ) . body ( ) ;
public class Metrics { /** * Reports an event to < code > meter < / code > described by given parameters . * @ param metrics - { @ link Metrics } instance with { @ link MetricRegistry } initialized . * @ param component - part of metric description . * @ param methodName - part of metric description . * @ param eventType - part of metric description . */ public static void mark ( Metrics metrics , String component , String methodName , String eventType ) { } }
if ( metrics != null ) { mark ( metrics . getMeter ( component , methodName , eventType ) ) ; }
public class PluralRulePrinter { /** * Print each child node of a struct , joining them together with a * delimiter string . */ private static void join ( StringBuilder buf , Node < PluralType > parent , String delimiter ) { } }
List < Node < PluralType > > nodes = parent . asStruct ( ) . nodes ( ) ; int size = nodes . size ( ) ; for ( int i = 0 ; i < size ; i ++ ) { if ( i > 0 ) { buf . append ( delimiter ) ; } print ( buf , nodes . get ( i ) ) ; }
public class AbstractImmutableMapBuilder { /** * Create a delta consisting of defaults to register . * @ param defaults * default entries . * @ return delta */ protected final MapDelta < K , V > genDefaults ( final Map < K , ? extends V > defaults ) { } }
return defaults . isEmpty ( ) ? Nop . instance ( ) : new Defaults < > ( defaults ) ;
public class MergeRequestApi { /** * Get information about a single merge request . * < p > NOTE : GitLab API V4 uses IID ( internal ID ) , V3 uses ID to identify the merge request . < / p > * < pre > < code > GitLab Endpoint : GET / projects / : id / merge _ requests / : merge _ request _ id < / code > < / pre > * @ param projectIdOrPath the project in the form of an Integer ( ID ) , String ( path ) , or Project instance * @ param mergeRequestIid the internal ID of the merge request * @ return the specified MergeRequest instance * @ throws GitLabApiException if any exception occurs */ public MergeRequest getMergeRequest ( Object projectIdOrPath , Integer mergeRequestIid ) throws GitLabApiException { } }
Response response = get ( Response . Status . OK , null , "projects" , getProjectIdOrPath ( projectIdOrPath ) , "merge_requests" , mergeRequestIid ) ; return ( response . readEntity ( MergeRequest . class ) ) ;
public class DBRef { /** * Append the PDB representation of this DBRef to the provided StringBuffer * @ param buf the StringBuffer to write to . */ @ Override public void toPDB ( StringBuffer buf ) { } }
Formatter formatter = new Formatter ( new StringBuilder ( ) , Locale . UK ) ; // DBREF 3ETA A 990 1295 UNP P06213 INSR _ HUMAN 1017 1322 // DBREF 3EH2 A 2 767 UNP P53992 SC24C _ HUMAN 329 1094 // DBREF 3EH2 A 2 767 UNP P53992 SC24C _ HUMAN 329 1094 // DBREF 3ETA A 990 1295 UNP P06213 INSR _ HUMAN 1017 1322 formatter . format ( "DBREF %4s %1s %4d%1s %4d%1s %-6s %-8s %-12s%6d%1c%6d%1c " , idCode , chainName , seqbegin , insertBegin , seqEnd , insertEnd , database , dbAccession , dbIdCode , dbSeqBegin , idbnsBegin , dbSeqEnd , idbnsEnd ) ; buf . append ( formatter . toString ( ) ) ; formatter . close ( ) ;
public class CmsSecurityManager { /** * Reads a group based on its id . < p > * @ param context the current request context * @ param groupId the id of the group that is to be read * @ return the requested group * @ throws CmsException if operation was not successful */ public CmsGroup readGroup ( CmsRequestContext context , CmsUUID groupId ) throws CmsException { } }
CmsDbContext dbc = m_dbContextFactory . getDbContext ( context ) ; CmsGroup result = null ; try { result = m_driverManager . readGroup ( dbc , groupId ) ; } catch ( Exception e ) { dbc . report ( null , Messages . get ( ) . container ( Messages . ERR_READ_GROUP_FOR_ID_1 , groupId . toString ( ) ) , e ) ; } finally { dbc . clear ( ) ; } return result ;
public class ElevationUtil { /** * Creates and returns a bitmap , which can be used to emulate a shadow , which is located at a * corner of an elevated view on pre - Lollipop devices . * @ param context * The context , which should be used , as an instance of the class { @ link Context } . The * context may not be null * @ param elevation * The elevation , which should be emulated , in dp as an { @ link Integer } value . The * elevation must be at least 0 and at maximum the value of the constant * < code > MAX _ ELEVATION < / code > * @ param orientation * The orientation of the shadow in relation to the elevated view as a value of the enum * { @ link Orientation } . The orientation may either be < code > TOP _ LEFT < / code > , * < code > TOP _ RIGHT < / code > , < code > BOTTOM _ LEFT < / code > or < code > BOTTOM _ RIGHT < / code > * @ param parallelLight * True , if parallel light should be emulated , false otherwise * @ return The bitmap , which has been created , as an instance of the class { @ link Bitmap } or * null , if the given elevation is 0 */ private static Bitmap createEdgeShadow ( @ NonNull final Context context , final int elevation , @ NonNull final Orientation orientation , final boolean parallelLight ) { } }
if ( elevation == 0 ) { return null ; } else { float shadowWidth = getShadowWidth ( context , elevation , orientation , parallelLight ) ; int shadowColor = getShadowColor ( elevation , orientation , parallelLight ) ; int bitmapWidth = ( int ) Math . round ( ( orientation == Orientation . LEFT || orientation == Orientation . RIGHT ) ? Math . ceil ( shadowWidth ) : 1 ) ; int bitmapHeight = ( int ) Math . round ( ( orientation == Orientation . TOP || orientation == Orientation . BOTTOM ) ? Math . ceil ( shadowWidth ) : 1 ) ; Bitmap bitmap = Bitmap . createBitmap ( bitmapWidth , bitmapHeight , Bitmap . Config . ARGB_8888 ) ; Canvas canvas = new Canvas ( bitmap ) ; Shader linearGradient = createLinearGradient ( orientation , bitmapWidth , bitmapHeight , shadowWidth , shadowColor ) ; Paint paint = new Paint ( ) ; paint . setAntiAlias ( true ) ; paint . setDither ( true ) ; paint . setShader ( linearGradient ) ; canvas . drawRect ( 0 , 0 , bitmapWidth , bitmapHeight , paint ) ; return bitmap ; }