signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Mockito { /** * Additional method helps users of JDK7 + to hide heap pollution / unchecked generics array creation */ @ SuppressWarnings ( { } }
"unchecked" , "varargs" } ) @ CheckReturnValue public static Stubber doThrow ( Class < ? extends Throwable > toBeThrown , Class < ? extends Throwable > ... toBeThrownNext ) { return MOCKITO_CORE . stubber ( ) . doThrow ( toBeThrown , toBeThrownNext ) ;
public class GenerateHalDocsJsonMojo { /** * Get constant field value . * @ param javaClazz QDox class * @ param javaField QDox field * @ param compileClassLoader Classloader for compile dependencies * @ param fieldType Field type * @ return Value */ @ SuppressWarnings ( "unchecked" ) private < T > T getStaticFieldValue ( JavaClass javaClazz , JavaField javaField , ClassLoader compileClassLoader , Class < T > fieldType ) { } }
try { Class < ? > clazz = compileClassLoader . loadClass ( javaClazz . getFullyQualifiedName ( ) ) ; Field field = clazz . getField ( javaField . getName ( ) ) ; return ( T ) field . get ( fieldType ) ; } catch ( ClassNotFoundException | NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException ex ) { throw new RuntimeException ( "Unable to get contanst value of field '" + javaClazz . getName ( ) + "#" + javaField . getName ( ) + ":\n" + ex . getMessage ( ) , ex ) ; }
public class Utils { /** * Split - out the path , query , and fragment parts of the given URI string . The URI is * expected to that obtained from a GET or other HTTP request . The extracted parts * are not decoded . For example , if the URI string is : * < pre > * / foo / bar ? x = % 20 + y = 2%20 # thisbethefragment * < / pre > * the extract parts are : * < pre > * path : / foo / bar * query : x = % 20 + y = 2%20 * fragment : thisbethefragment * < / pre > * All parameters must be non - null . * @ param uriStr URI string to be split . * @ param uriPath Will contain path extracted from URI . * @ param uriQuery Will contain query extracted from URI , if any , not decoded . * @ param uriFragment Will contain fragment extracted from URI , if any , not decoded . */ public static void splitURI ( String uriStr , StringBuilder uriPath , StringBuilder uriQuery , StringBuilder uriFragment ) { } }
assert uriStr != null ; assert uriPath != null ; assert uriQuery != null ; assert uriFragment != null ; // Find location of query ( ? ) and fragment ( # ) markers , if any . int quesInx = uriStr . indexOf ( '?' ) ; int hashInx = uriStr . indexOf ( '#' ) ; if ( hashInx >= 0 && quesInx >= 0 && hashInx < quesInx ) { // Technically this is an invalid URI since the fragment should always follow // the query . We ' ll just pretend we didn ' t see the hash . hashInx = - 1 ; } // The path starts at index 0 . Point to where it ends . uriPath . setLength ( 0 ) ; int pathEndInx = quesInx >= 0 ? quesInx : hashInx >= 0 ? hashInx : uriStr . length ( ) ; uriPath . append ( uriStr . substring ( 0 , pathEndInx ) ) ; // Extract the query part , if any . uriQuery . setLength ( 0 ) ; if ( quesInx >= pathEndInx ) { int quesEndInx = hashInx > quesInx ? hashInx : uriStr . length ( ) ; uriQuery . append ( uriStr . substring ( quesInx + 1 , quesEndInx ) ) ; } // Extract the fragment part , if any . uriFragment . setLength ( 0 ) ; if ( hashInx >= 0 ) { uriFragment . append ( uriStr . substring ( hashInx + 1 , uriStr . length ( ) ) ) ; }
public class ComponentBindingsProviderFactoryMBeanImpl { /** * ( non - Javadoc ) * @ see com . sixdimensions . wcm . cq . component . bindings . jmx . * ComponentBindingsProviderFactoryMBean # getLoadedResourceTypes ( ) */ @ Override @ Description ( "Gets all of th resource types which have bound Component Bindings Providers" ) public String [ ] getLoadedResourceTypes ( ) { } }
Set < String > types = ( ( ComponentBindingsProviderFactoryImpl ) componentBindingsProviderFactory ) . getLoadedResourceTypes ( ) ; return types . toArray ( new String [ types . size ( ) ] ) ;
public class VcfReader { /** * Stream the specified readable . * @ param readable readable to stream , must not be null * @ param listener event based reader callback , must not be null * @ throws IOException if an I / O error occurs */ public static void stream ( final Readable readable , final VcfStreamListener listener ) throws IOException { } }
StreamingVcfParser . stream ( readable , listener ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcAppliedValueRelationship ( ) { } }
if ( ifcAppliedValueRelationshipEClass == null ) { ifcAppliedValueRelationshipEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 23 ) ; } return ifcAppliedValueRelationshipEClass ;
public class CategoryGraph { /** * Intrinsic information content ( Seco Etal . 2004 ) allows to compute information content from the structure of the taxonomy ( no corpus needed ) . * IC ( n ) = 1 - log ( hypo ( n ) + 1 ) / log ( # cat ) * hypo ( n ) is the ( recursive ) number of hyponyms of a node n . Recursive means that the hyponyms of hyponyms are also taken into account * # cat is the number of categories in the graph * @ param category The category node for which the intrinsic information content should be returned . * @ return The intrinsic information content for this category node . * @ throws WikiApiException Thrown if errors occurred . */ public double getIntrinsicInformationContent ( Category category ) throws WikiApiException { } }
int node = category . getPageId ( ) ; int hyponymCount = getHyponymCountMap ( ) . get ( node ) ; int numberOfNodes = this . getNumberOfNodes ( ) ; if ( hyponymCount > numberOfNodes ) { throw new WikiApiException ( "Something is wrong with the hyponymCountMap. " + hyponymCount + " hyponyms, but only " + numberOfNodes + " nodes." ) ; } logger . debug ( category . getTitle ( ) . getPlainTitle ( ) + " has # hyponyms: " + hyponymCount ) ; double intrinsicIC = - 1 ; if ( hyponymCount >= 0 ) { intrinsicIC = ( 1 - ( Math . log ( hyponymCount + 1 ) / Math . log ( numberOfNodes ) ) ) ; } return intrinsicIC ;
public class RuleClassifier { /** * The following three functions are used for the prediction */ protected double [ ] firstHit ( Instance inst ) { } }
boolean fired = false ; int countFired = 0 ; double [ ] votes = new double [ this . numClass ] ; for ( int j = 0 ; j < this . ruleSet . size ( ) ; j ++ ) { if ( this . ruleSet . get ( j ) . ruleEvaluate ( inst ) == true ) { countFired = countFired + 1 ; for ( int z = 0 ; z < this . numClass ; z ++ ) { votes [ z ] = this . ruleSet . get ( j ) . obserClassDistrib . getValue ( z ) / this . ruleSet . get ( j ) . obserClassDistrib . sumOfValues ( ) ; } return votes ; } } if ( countFired > 0 ) { fired = true ; } else { fired = false ; } if ( fired == false ) { votes = oberversDistribProb ( inst , this . observedClassDistribution ) ; } return votes ;
public class FoxHttpAnnotationParser { /** * Parse the given interface for the use of FoxHttp * @ param serviceInterface interface to parse * @ param foxHttpClient FoxHttpClient to use * @ param < T > interface class to parse * @ return Proxy of the interface * @ throws FoxHttpRequestException */ @ SuppressWarnings ( "unchecked" ) public < T > T parseInterface ( final Class < T > serviceInterface , FoxHttpClient foxHttpClient ) throws FoxHttpException { } }
try { Method [ ] methods = serviceInterface . getDeclaredMethods ( ) ; for ( Method method : methods ) { FoxHttpMethodParser foxHttpMethodParser = new FoxHttpMethodParser ( ) ; foxHttpMethodParser . parseMethod ( method , foxHttpClient ) ; FoxHttpRequestBuilder foxHttpRequestBuilder = new FoxHttpRequestBuilder ( foxHttpMethodParser . getUrl ( ) , foxHttpMethodParser . getRequestType ( ) , foxHttpClient ) . setRequestHeader ( foxHttpMethodParser . getHeaderFields ( ) ) . setSkipResponseBody ( foxHttpMethodParser . isSkipResponseBody ( ) ) . setFollowRedirect ( foxHttpMethodParser . isFollowRedirect ( ) ) ; requestCache . put ( method , foxHttpRequestBuilder ) ; } return ( T ) Proxy . newProxyInstance ( serviceInterface . getClassLoader ( ) , new Class [ ] { serviceInterface } , new FoxHttpAnnotationInvocationHandler ( requestCache , responseParsers ) ) ; } catch ( FoxHttpException e ) { throw e ; } catch ( Exception e ) { throw new FoxHttpRequestException ( e ) ; }
public class ManagementGroupVertex { /** * Returns the list of successors of this group vertex . A successor is a group vertex which can be reached via a * group edge originating at this group vertex . * @ return the list of successors of this group vertex . */ public List < ManagementGroupVertex > getSuccessors ( ) { } }
final List < ManagementGroupVertex > successors = new ArrayList < ManagementGroupVertex > ( ) ; for ( ManagementGroupEdge edge : this . forwardEdges ) { successors . add ( edge . getTarget ( ) ) ; } return successors ;
public class WebAppConfigurator { /** * Create a configuration item using the current source . * See { @ link # getConfigSource ( ) } and { @ link # getLibraryURI ( ) } . * @ param value The value to place in the configuration item . A null value * may be provided . * @ param comparator The comparator to be used for the configuration item . * Null to select the default comparator . * @ return The new configuration item . */ public < T > ConfigItem < T > createConfigItem ( T value , MergeComparator < T > comparator ) { } }
return new ConfigItemImpl < T > ( value , getConfigSource ( ) , getLibraryURI ( ) , comparator ) ;
public class StringUtils { /** * Return encoded string by given charset . * @ param source source string to be handle . * @ param sourceCharset source string charset name . * @ param encodingCharset want encoding to which charset . * @ return a new string has been encoded . * @ throws IllegalArgumentExceptionWORD _ PATTERN If the named charset is not * supported */ public static String encoding ( final String source , final String sourceCharset , final String encodingCharset ) throws IllegalArgumentException { } }
byte [ ] sourceBytes ; String encodeString = null ; try { sourceBytes = source . getBytes ( sourceCharset ) ; encodeString = new String ( sourceBytes , encodingCharset ) ; } catch ( UnsupportedEncodingException e ) { throw new IllegalArgumentException ( String . format ( "Unsupported encoding:%s or %s" , sourceCharset , encodingCharset ) ) ; } return encodeString ;
public class AsyncFile { /** * Creates a directory by creating all nonexistent parent directories first . * @ param executor executor for running tasks in other thread * @ param dir the directory to create * @ param attrs an optional list of file attributes to set atomically when creating the directory */ public static Promise < Void > createDirectories ( Executor executor , Path dir , FileAttribute ... attrs ) { } }
return ofBlockingRunnable ( executor , ( ) -> { try { Files . createDirectories ( dir , attrs ) ; } catch ( IOException e ) { throw new UncheckedException ( e ) ; } } ) ;
public class Activator { /** * Gets Configuration Admin service from service registry . * @ param bundleContext bundle context * @ return configuration admin service * @ throws IllegalStateException - If no Configuration Admin service is available */ private ConfigurationAdmin getConfigurationAdmin ( final BundleContext bundleContext ) { } }
final ServiceReference ref = bundleContext . getServiceReference ( ConfigurationAdmin . class . getName ( ) ) ; if ( ref == null ) { throw new IllegalStateException ( "Cannot find a configuration admin service" ) ; } return ( ConfigurationAdmin ) bundleContext . getService ( ref ) ;
public class MultiRowJdbcPersonAttributeDao { /** * The { @ link Map } of columns from a name column to value columns . Keys are Strings , * Values are Strings or { @ link java . util . List } of Strings . * @ param nameValueColumnMap The Map of name column to value column ( s ) . */ public void setNameValueColumnMappings ( final Map < String , ? > nameValueColumnMap ) { } }
if ( nameValueColumnMap == null ) { this . nameValueColumnMappings = null ; } else { final Map < String , Set < String > > mappings = MultivaluedPersonAttributeUtils . parseAttributeToAttributeMapping ( nameValueColumnMap ) ; if ( mappings . containsValue ( null ) ) { throw new IllegalArgumentException ( "nameValueColumnMap may not have null values" ) ; } this . nameValueColumnMappings = mappings ; }
public class ResourceManagerBase { /** * Called by the engine when it has decided configuration has changed . Care should be taken to ensure continuity of operation despite * configuration change - resource amount currently in used should not be reset for example . < br > * Note that the RM key can change , but never the RM ID . < br > * < br > * This will often be overloaded - but calling the base implementation may still be useful as it handles configuration precedence . */ void refreshConfiguration ( ResourceManager configuration ) { } }
this . definition = configuration ; this . key = configuration . getKey ( ) . toLowerCase ( ) ; this . currentProperties = new HashMap < > ( ) ; // Add hard - coded defaults to properties setDefaultProperties ( ) ; // Add values from new configuration to properties this . currentProperties . putAll ( configuration . getParameterCache ( ) ) ;
public class UserAgentParserImpl { /** * Sort by size and alphabet , so the first match can be returned immediately */ static Rule [ ] getOrderedRules ( final Rule [ ] rules ) { } }
final Comparator < Rule > c = Comparator . comparing ( Rule :: getSize ) . reversed ( ) . thenComparing ( Rule :: getPattern ) ; final Rule [ ] result = Arrays . copyOf ( rules , rules . length ) ; parallelSort ( result , c ) ; return result ;
public class GeometryExtrude { /** * Extract the linestring " roof " . * @ param lineString * @ param height * @ return */ public static Geometry extractRoof ( LineString lineString , double height ) { } }
LineString result = ( LineString ) lineString . copy ( ) ; result . apply ( new TranslateCoordinateSequenceFilter ( height ) ) ; return result ;
public class WritableSessionCache { /** * Signal that the transaction that was active and in which this session participated has completed and that this session * should no longer use a transaction - specific workspace cache . */ private void completeTransaction ( final String txId , String wsName ) { } }
getWorkspace ( ) . clear ( ) ; // reset the ws cache to the shared ( global one ) setWorkspaceCache ( sharedWorkspaceCache ( ) ) ; // and clear some tx specific data COMPLETE_FUNCTION_BY_TX_AND_WS . compute ( txId , ( transactionId , funcsByWsName ) -> { funcsByWsName . remove ( wsName ) ; if ( funcsByWsName . isEmpty ( ) ) { // this is the last ws cache we are clearing for this tx so mark all the keys as unlocked LOCKED_KEYS_BY_TX_ID . remove ( txId ) ; // and remove the map return null ; } // there are other ws caches which need clearing for this tx , so just return the updated map return funcsByWsName ; } ) ;
public class FileUtils { /** * Reads an { @ link ImmutableListMultimap } from a { @ link CharSource } , where each line is a * key , a tab character ( " \ t " ) , and a value . Blank lines and lines beginning with " # " are ignored . */ public static ImmutableListMultimap < String , File > loadStringToFileListMultimap ( final CharSource source ) throws IOException { } }
return loadMultimap ( source , Functions . < String > identity ( ) , FileFunction . INSTANCE , IsCommentLine . INSTANCE ) ;
public class ThrowUnchecked { /** * Throws the root cause of the given exception if it is unchecked or an * instance of any of the given declared types . Otherwise , it is thrown as * an UndeclaredThrowableException . If the root cause is null , then the * original exception is thrown . This method only returns normally if the * exception is null . * @ param t exception whose root cause is to be thrown * @ param declaredTypes if exception is checked and is not an instance of * any of these types , then it is thrown as an * UndeclaredThrowableException . */ public static void fireDeclaredRootCause ( Throwable t , Class ... declaredTypes ) { } }
Throwable root = t ; while ( root != null ) { Throwable cause = root . getCause ( ) ; if ( cause == null ) { break ; } root = cause ; } fireDeclared ( root , declaredTypes ) ;
public class LevelDB { /** * Try to avoid use - after close since that has the tendency of crashing the JVM . This doesn ' t * prevent methods that retrieved the instance from using it after close , but hopefully will * catch most cases ; otherwise , we ' ll need some kind of locking . */ DB db ( ) { } }
DB _db = this . _db . get ( ) ; if ( _db == null ) { throw new IllegalStateException ( "DB is closed." ) ; } return _db ;
public class CmsFormatterBeanParser { /** * Parses the mappings . < p > * @ param formatterLoc the formatter value location * @ return the mappings */ private List < CmsMetaMapping > parseMetaMappings ( I_CmsXmlContentLocation formatterLoc ) { } }
List < CmsMetaMapping > mappings = new ArrayList < CmsMetaMapping > ( ) ; for ( I_CmsXmlContentValueLocation mappingLoc : formatterLoc . getSubValues ( N_META_MAPPING ) ) { String key = CmsConfigurationReader . getString ( m_cms , mappingLoc . getSubValue ( N_KEY ) ) ; String element = CmsConfigurationReader . getString ( m_cms , mappingLoc . getSubValue ( N_ELEMENT ) ) ; String defaultValue = CmsConfigurationReader . getString ( m_cms , mappingLoc . getSubValue ( N_DEFAULT ) ) ; String orderStr = CmsConfigurationReader . getString ( m_cms , mappingLoc . getSubValue ( N_ORDER ) ) ; int order = 1000 ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( orderStr ) ) { try { order = Integer . parseInt ( orderStr ) ; } catch ( NumberFormatException e ) { // nothing to do } } CmsMetaMapping mapping = new CmsMetaMapping ( key , element , order , defaultValue ) ; mappings . add ( mapping ) ; } return mappings ;
public class JcrStatement { @ Override public boolean execute ( String sql ) throws SQLException { } }
notClosed ( ) ; warning = null ; moreResults = 0 ; try { // Convert the supplied SQL into JCR - SQL2 . . . String jcrSql2 = connection . nativeSQL ( sql ) ; // Create the query . . . final QueryResult jcrResults = getJcrRepositoryDelegate ( ) . execute ( jcrSql2 , this . sqlLanguage ) ; results = new JcrResultSet ( this , jcrResults , null ) ; moreResults = 1 ; } catch ( RepositoryException e ) { throw new SQLException ( e . getLocalizedMessage ( ) , e ) ; } return true ; // always a ResultSet
public class DescribeImportTasksRequest { /** * An array of name - value pairs that you provide to filter the results for the < code > DescribeImportTask < / code > * request to a specific subset of results . Currently , wildcard values aren ' t supported for filters . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setFilters ( java . util . Collection ) } or { @ link # withFilters ( java . util . Collection ) } if you want to override * the existing values . * @ param filters * An array of name - value pairs that you provide to filter the results for the * < code > DescribeImportTask < / code > request to a specific subset of results . Currently , wildcard values aren ' t * supported for filters . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeImportTasksRequest withFilters ( ImportTaskFilter ... filters ) { } }
if ( this . filters == null ) { setFilters ( new java . util . ArrayList < ImportTaskFilter > ( filters . length ) ) ; } for ( ImportTaskFilter ele : filters ) { this . filters . add ( ele ) ; } return this ;
public class Router { /** * Specify a middleware that will be called for a matching HTTP CONNECT * @ param pattern The simple pattern * @ param handlers The middleware to call */ public Router connect ( @ NotNull final String pattern , @ NotNull final IMiddleware ... handlers ) { } }
addPattern ( "CONNECT" , pattern , handlers , connectBindings ) ; return this ;
public class ActiveSyncManager { /** * Clean up tasks to stop sync point after we have journaled . * @ param syncPoint the sync point to stop * @ throws InvalidPathException */ public void stopSyncPostJournal ( AlluxioURI syncPoint ) throws InvalidPathException { } }
MountTable . Resolution resolution = mMountTable . resolve ( syncPoint ) ; long mountId = resolution . getMountId ( ) ; // Remove initial sync thread Future < ? > syncFuture = mSyncPathStatus . remove ( syncPoint ) ; if ( syncFuture != null ) { syncFuture . cancel ( true ) ; } if ( mFilterMap . get ( mountId ) . isEmpty ( ) ) { // syncPoint removed was the last syncPoint for the mountId mFilterMap . remove ( mountId ) ; Future < ? > future = mPollerMap . remove ( mountId ) ; if ( future != null ) { future . cancel ( true ) ; } } // Tell UFS to stop monitoring the path try ( CloseableResource < UnderFileSystem > ufs = resolution . acquireUfsResource ( ) ) { ufs . get ( ) . stopSync ( resolution . getUri ( ) ) ; } catch ( IOException e ) { LOG . info ( "Ufs IOException for uri {}, exception is {}" , syncPoint , e ) ; } // Stop active sync polling on a particular UFS if it is the last sync point if ( mFilterMap . containsKey ( mountId ) && mFilterMap . get ( mountId ) . isEmpty ( ) ) { try ( CloseableResource < UnderFileSystem > ufs = resolution . acquireUfsResource ( ) ) { ufs . get ( ) . stopActiveSyncPolling ( ) ; } catch ( IOException e ) { LOG . warn ( "Encountered IOException when trying to stop polling thread {}" , e ) ; } }
public class Endpoint { /** * Creates a new host { @ link Endpoint } . * @ deprecated Use { @ link # of ( String , int ) } and { @ link # withWeight ( int ) } , * e . g . { @ code Endpoint . of ( " foo . com " , 80 ) . withWeight ( 500 ) } . */ @ Deprecated public static Endpoint of ( String host , int port , int weight ) { } }
return of ( host , port ) . withWeight ( weight ) ;
public class hqlParser { /** * hql . g : 544:1 : multiplyExpression : unaryExpression ( ( STAR ^ | DIV ^ ) unaryExpression ) * ; */ public final hqlParser . multiplyExpression_return multiplyExpression ( ) throws RecognitionException { } }
hqlParser . multiplyExpression_return retval = new hqlParser . multiplyExpression_return ( ) ; retval . start = input . LT ( 1 ) ; CommonTree root_0 = null ; Token STAR204 = null ; Token DIV205 = null ; ParserRuleReturnScope unaryExpression203 = null ; ParserRuleReturnScope unaryExpression206 = null ; CommonTree STAR204_tree = null ; CommonTree DIV205_tree = null ; try { // hql . g : 545:2 : ( unaryExpression ( ( STAR ^ | DIV ^ ) unaryExpression ) * ) // hql . g : 545:4 : unaryExpression ( ( STAR ^ | DIV ^ ) unaryExpression ) * { root_0 = ( CommonTree ) adaptor . nil ( ) ; pushFollow ( FOLLOW_unaryExpression_in_multiplyExpression2452 ) ; unaryExpression203 = unaryExpression ( ) ; state . _fsp -- ; adaptor . addChild ( root_0 , unaryExpression203 . getTree ( ) ) ; // hql . g : 545:20 : ( ( STAR ^ | DIV ^ ) unaryExpression ) * loop73 : while ( true ) { int alt73 = 2 ; int LA73_0 = input . LA ( 1 ) ; if ( ( LA73_0 == DIV || LA73_0 == STAR ) ) { alt73 = 1 ; } switch ( alt73 ) { case 1 : // hql . g : 545:22 : ( STAR ^ | DIV ^ ) unaryExpression { // hql . g : 545:22 : ( STAR ^ | DIV ^ ) int alt72 = 2 ; int LA72_0 = input . LA ( 1 ) ; if ( ( LA72_0 == STAR ) ) { alt72 = 1 ; } else if ( ( LA72_0 == DIV ) ) { alt72 = 2 ; } else { NoViableAltException nvae = new NoViableAltException ( "" , 72 , 0 , input ) ; throw nvae ; } switch ( alt72 ) { case 1 : // hql . g : 545:24 : STAR ^ { STAR204 = ( Token ) match ( input , STAR , FOLLOW_STAR_in_multiplyExpression2458 ) ; STAR204_tree = ( CommonTree ) adaptor . create ( STAR204 ) ; root_0 = ( CommonTree ) adaptor . becomeRoot ( STAR204_tree , root_0 ) ; } break ; case 2 : // hql . g : 545:32 : DIV ^ { DIV205 = ( Token ) match ( input , DIV , FOLLOW_DIV_in_multiplyExpression2463 ) ; DIV205_tree = ( CommonTree ) adaptor . create ( DIV205 ) ; root_0 = ( CommonTree ) adaptor . becomeRoot ( DIV205_tree , root_0 ) ; } break ; } pushFollow ( FOLLOW_unaryExpression_in_multiplyExpression2468 ) ; unaryExpression206 = unaryExpression ( ) ; state . _fsp -- ; adaptor . addChild ( root_0 , unaryExpression206 . getTree ( ) ) ; } break ; default : break loop73 ; } } } retval . stop = input . LT ( - 1 ) ; retval . tree = ( CommonTree ) adaptor . rulePostProcessing ( root_0 ) ; adaptor . setTokenBoundaries ( retval . tree , retval . start , retval . stop ) ; } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; retval . tree = ( CommonTree ) adaptor . errorNode ( input , retval . start , input . LT ( - 1 ) , re ) ; } finally { // do for sure before leaving } return retval ;
public class DefaultGroovyMethods { /** * Returns the first item from the Iterable . * < pre class = " groovyTestCase " > * def set = [ 3 , 4 , 2 ] as LinkedHashSet * assert set . first ( ) = = 3 * / / check original is unaltered * assert set = = [ 3 , 4 , 2 ] as Set * < / pre > * The first element returned by the Iterable ' s iterator is returned . * If the Iterable doesn ' t guarantee a defined order it may appear like * a random element is returned . * @ param self an Iterable * @ return the first item from the Iterable * @ throws NoSuchElementException if the Iterable is empty and you try to access the first ( ) item . * @ since 1.8.7 */ public static < T > T first ( Iterable < T > self ) { } }
Iterator < T > iterator = self . iterator ( ) ; if ( ! iterator . hasNext ( ) ) { throw new NoSuchElementException ( "Cannot access first() element from an empty Iterable" ) ; } return iterator . next ( ) ;
public class ItemsMergeImpl { /** * blockyTandemMergeSortRecursion ( ) is called by blockyTandemMergeSort ( ) . * In addition to performing the algorithm ' s top down recursion , * it manages the buffer swapping that eliminates most copying . * It also maps the input ' s pre - sorted blocks into the subarrays * that are processed by tandemMerge ( ) . * @ param < T > the data type * @ param keySrc key source * @ param valSrc value source * @ param keyDst key destination * @ param valDst value destination * @ param grpStart group start , refers to pre - sorted blocks such as block 0 , block 1 , etc . * @ param grpLen group length , refers to pre - sorted blocks such as block 0 , block 1 , etc . * @ param blkSize block size * @ param arrLim array limit * @ param comparator to compare keys */ private static < T > void blockyTandemMergeSortRecursion ( final T [ ] keySrc , final long [ ] valSrc , final T [ ] keyDst , final long [ ] valDst , final int grpStart , final int grpLen , // block indices final int blkSize , final int arrLim , final Comparator < ? super T > comparator ) { } }
// Important note : grpStart and grpLen do NOT refer to positions in the underlying array . // Instead , they refer to the pre - sorted blocks , such as block 0 , block 1 , etc . assert ( grpLen > 0 ) ; if ( grpLen == 1 ) { return ; } final int grpLen1 = grpLen / 2 ; final int grpLen2 = grpLen - grpLen1 ; assert ( grpLen1 >= 1 ) ; assert ( grpLen2 >= grpLen1 ) ; final int grpStart1 = grpStart ; final int grpStart2 = grpStart + grpLen1 ; // swap roles of src and dst blockyTandemMergeSortRecursion ( keyDst , valDst , keySrc , valSrc , grpStart1 , grpLen1 , blkSize , arrLim , comparator ) ; // swap roles of src and dst blockyTandemMergeSortRecursion ( keyDst , valDst , keySrc , valSrc , grpStart2 , grpLen2 , blkSize , arrLim , comparator ) ; // here we convert indices of blocks into positions in the underlying array . final int arrStart1 = grpStart1 * blkSize ; final int arrStart2 = grpStart2 * blkSize ; final int arrLen1 = grpLen1 * blkSize ; int arrLen2 = grpLen2 * blkSize ; // special case for the final block which might be shorter than blkSize . if ( ( arrStart2 + arrLen2 ) > arrLim ) { arrLen2 = arrLim - arrStart2 ; } tandemMerge ( keySrc , valSrc , arrStart1 , arrLen1 , arrStart2 , arrLen2 , keyDst , valDst , arrStart1 , comparator ) ; // which will be arrStart3
public class InvocationDecoder { /** * Splits out the query string and unescape the value . */ public void splitQueryAndUnescape ( I invocation , byte [ ] rawURIBytes , int uriLength ) throws IOException { } }
for ( int i = 0 ; i < uriLength ; i ++ ) { if ( rawURIBytes [ i ] == '?' ) { i ++ ; // XXX : should be the host encoding ? String queryString = byteToChar ( rawURIBytes , i , uriLength - i , "ISO-8859-1" ) ; invocation . setQueryString ( queryString ) ; uriLength = i - 1 ; break ; } } String rawURIString = byteToChar ( rawURIBytes , 0 , uriLength , "ISO-8859-1" ) ; invocation . setRawURI ( rawURIString ) ; String decodedURI = normalizeUriEscape ( rawURIBytes , 0 , uriLength , _encoding ) ; decodedURI = decodeURI ( rawURIString , decodedURI , invocation ) ; String uri = normalizeUri ( decodedURI ) ; invocation . setURI ( uri ) ;
public class Batch { /** * / * ( non - Javadoc ) * @ see com . googlecode . batchfb . Batcher # graph ( java . lang . String , com . googlecode . batchfb . Param [ ] ) */ @ Override public GraphRequest < JsonNode > graph ( String object , Param ... params ) { } }
return this . graph ( object , JsonNode . class , params ) ;
public class CompactOffHeapLinearHashTable { /** * Returns " insert " position in terms of consequent putValue ( ) */ public long remove ( long addr , long posToRemove ) { } }
long posToShift = posToRemove ; while ( true ) { posToShift = step ( posToShift ) ; // volatile read not needed because removed is performed under exclusive lock long entryToShift = readEntry ( addr , posToShift ) ; if ( empty ( entryToShift ) ) break ; long insertPos = hlPos ( key ( entryToShift ) ) ; // the following condition essentially means circular permutations // of three ( r = posToRemove , s = posToShift , i = insertPos ) // positions are accepted : // [ . . . i . . r . . . s . ] or // [ . . . r . . s . . . i . ] or // [ . . . s . . i . . . r . ] boolean cond1 = insertPos <= posToRemove ; boolean cond2 = posToRemove <= posToShift ; if ( ( cond1 && cond2 ) || // chain wrapped around capacity ( posToShift < insertPos && ( cond1 || cond2 ) ) ) { writeEntry ( addr , posToRemove , entryToShift ) ; posToRemove = posToShift ; } } clearEntry ( addr , posToRemove ) ; return posToRemove ;
public class Features { /** * Add a feature . Stores its interface , and all sub interfaces which describe also a { @ link Feature } annotated by * { @ link FeatureInterface } . * @ param feature The feature to add . * @ throws LionEngineException If feature is not annotated by { @ link FeatureInterface } or already referenced . */ public void add ( Feature feature ) { } }
if ( ! isAnnotated ( feature ) ) { throw new LionEngineException ( ERROR_FEATURE_NOT_ANNOTATED + feature . getClass ( ) ) ; } final Feature old ; // CHECKSTYLE IGNORE LINE : InnerAssignment if ( ( old = typeToFeature . put ( feature . getClass ( ) , feature ) ) != null ) { throw new LionEngineException ( ERROR_FEATURE_EXISTS + feature . getClass ( ) + WITH + old . getClass ( ) ) ; } checkTypeDepth ( feature , feature . getClass ( ) ) ;
public class SpringSecurityUserManager { /** * { @ inheritDoc } */ @ Override public void logout ( User appSensorUser ) { } }
logger . info ( "Request received to logout user <{}>." , appSensorUser . getUsername ( ) ) ; userResponseCache . setUserLoggedOut ( appSensorUser . getUsername ( ) ) ;
public class MediaClient { /** * Retrieve the media information of an object in Bos bucket . * @ param request The request object containing all options for retrieving media information . * @ return The media information of an object in Bos bucket . */ public GetMediaInfoOfFileResponse getMediaInfoOfFile ( GetMediaInfoOfFileRequest request ) { } }
checkNotNull ( request , "The parameter request should NOT be null." ) ; checkStringNotEmpty ( request . getBucket ( ) , "The parameter bucket should NOT be null or empty string." ) ; checkStringNotEmpty ( request . getKey ( ) , "The parameter key should NOT be null or empty string." ) ; InternalRequest internalRequest = createRequest ( HttpMethodName . GET , request , MEDIAINFO ) ; internalRequest . addParameter ( "bucket" , request . getBucket ( ) ) ; internalRequest . addParameter ( "key" , request . getKey ( ) ) ; return invokeHttpClient ( internalRequest , GetMediaInfoOfFileResponse . class ) ;
public class LogViewer { /** * Runs LogViewer using values in System Properties to find custom levels and header . * @ param args * - command line arguments to LogViewer * @ return code indicating status of the execution : 0 on success , 1 otherwise . */ public int execute ( String [ ] args ) { } }
Map < String , LevelDetails > levels = readLevels ( System . getProperty ( "logviewer.custom.levels" ) ) ; String [ ] header = readHeader ( System . getProperty ( "logviewer.custom.header" ) ) ; return execute ( args , levels , header ) ;
public class EndpointLinksResolver { /** * Resolves links to the known endpoints based on a request with the given * { @ code requestUrl } . * @ param requestUrl the url of the request for the endpoint links * @ return the links */ public Map < String , Link > resolveLinks ( String requestUrl ) { } }
String normalizedUrl = normalizeRequestUrl ( requestUrl ) ; Map < String , Link > links = new LinkedHashMap < > ( ) ; links . put ( "self" , new Link ( normalizedUrl ) ) ; for ( ExposableEndpoint < ? > endpoint : this . endpoints ) { if ( endpoint instanceof ExposableWebEndpoint ) { collectLinks ( links , ( ExposableWebEndpoint ) endpoint , normalizedUrl ) ; } else if ( endpoint instanceof PathMappedEndpoint ) { String rootPath = ( ( PathMappedEndpoint ) endpoint ) . getRootPath ( ) ; Link link = createLink ( normalizedUrl , rootPath ) ; links . put ( endpoint . getEndpointId ( ) . toLowerCaseString ( ) , link ) ; } } return links ;
public class HiveRegistrationPolicyBase { /** * Determine whether a database or table name is valid . * A name is valid if and only if : it starts with an alphanumeric character , contains only alphanumeric characters * and ' _ ' , and is NOT composed of numbers only . */ protected static boolean isNameValid ( String name ) { } }
Preconditions . checkNotNull ( name ) ; name = name . toLowerCase ( ) ; return VALID_DB_TABLE_NAME_PATTERN_1 . matcher ( name ) . matches ( ) && VALID_DB_TABLE_NAME_PATTERN_2 . matcher ( name ) . matches ( ) ;
public class TraceTurboFilter { /** * Initialize and add this turbo filter to the loggerFactory . */ @ Override public void start ( ) { } }
LoggerContext loggerFactory = ( LoggerContext ) LoggerFactory . getILoggerFactory ( ) ; setContext ( loggerFactory ) ; loggerFactory . addTurboFilter ( this ) ; super . start ( ) ;
public class DogmaApi { /** * Get effect information ( asynchronously ) Get information on a dogma effect * - - - This route expires daily at 11:05 * @ param effectId * A dogma effect ID ( required ) * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ param callback * The callback to be executed when the API call finishes * @ return The request call * @ throws ApiException * If fail to process the API call , e . g . serializing the request * body object */ public com . squareup . okhttp . Call getDogmaEffectsEffectIdAsync ( Integer effectId , String datasource , String ifNoneMatch , final ApiCallback < DogmaEffectResponse > callback ) throws ApiException { } }
com . squareup . okhttp . Call call = getDogmaEffectsEffectIdValidateBeforeCall ( effectId , datasource , ifNoneMatch , callback ) ; Type localVarReturnType = new TypeToken < DogmaEffectResponse > ( ) { } . getType ( ) ; apiClient . executeAsync ( call , localVarReturnType , callback ) ; return call ;
public class CmsProperty { /** * Returns the map value representation for the given String . < p > * The given value is split along the < code > | < / code > char , the map keys and values are separated by a < code > = < / code > . < p > * @ param value the value to create the map representation for * @ return the map value representation for the given String */ private Map < String , String > createMapFromValue ( String value ) { } }
if ( value == null ) { return null ; } List < String > entries = createListFromValue ( value ) ; Iterator < String > i = entries . iterator ( ) ; Map < String , String > result = new HashMap < String , String > ( entries . size ( ) ) ; boolean rebuildDelimiters = false ; if ( value . indexOf ( VALUE_MAP_DELIMITER_REPLACEMENT ) != - 1 ) { rebuildDelimiters = true ; } while ( i . hasNext ( ) ) { String entry = i . next ( ) ; int index = entry . indexOf ( VALUE_MAP_DELIMITER ) ; if ( index != - 1 ) { String key = entry . substring ( 0 , index ) ; String val = "" ; if ( ( index + 1 ) < entry . length ( ) ) { val = entry . substring ( index + 1 ) ; } if ( CmsStringUtil . isNotEmpty ( key ) ) { if ( rebuildDelimiters ) { key = rebuildDelimiter ( key , VALUE_MAP_DELIMITER , VALUE_MAP_DELIMITER_REPLACEMENT ) ; val = rebuildDelimiter ( val , VALUE_MAP_DELIMITER , VALUE_MAP_DELIMITER_REPLACEMENT ) ; } result . put ( key , val ) ; } } } return result ;
public class XlsUtil { /** * 导出list对象到excel * @ param config 配置 * @ param list 导出的list * @ param outputStream 输出流 * @ return 处理结果 , true成功 , false失败 * @ throws Exception */ public static boolean list2Xls ( ExcelConfig config , List < ? > list , OutputStream outputStream ) throws Exception { } }
try { String [ ] header = config . getHeaders ( ) ; String [ ] names = config . getNames ( ) ; String [ ] values ; WritableWorkbook wb = Workbook . createWorkbook ( outputStream ) ; String sheetName = ( config . getSheet ( ) != null && ! config . getSheet ( ) . equals ( "" ) ) ? config . getSheet ( ) : ( "sheet" + config . getSheetNum ( ) ) ; WritableSheet sheet = wb . createSheet ( sheetName , 0 ) ; int row = 0 ; int column = 0 ; int rowadd = 0 ; // 写入标题 if ( config . getHeader ( ) ) { for ( column = 0 ; column < header . length ; column ++ ) { sheet . addCell ( new Label ( column , row + rowadd , header [ column ] ) ) ; if ( config . getColumn ( column ) . getWidth ( ) != null ) { sheet . setColumnView ( column , config . getColumn ( column ) . getWidth ( ) / 7 ) ; } } rowadd ++ ; } // 写入内容 / / 行 for ( row = 0 ; row < list . size ( ) ; row ++ ) { Object rowData = list . get ( row ) ; values = getObjValues ( rowData , names ) ; for ( column = 0 ; column < values . length ; column ++ ) { sheet . addCell ( new Label ( column , row + rowadd , values [ column ] ) ) ; } } wb . write ( ) ; wb . close ( ) ; } catch ( Exception e1 ) { return false ; } return true ;
public class SizeDAOValidatorsRule { /** * ( non - Javadoc ) * @ see net . leadware . persistence . tools . validator . base . AbstractDAOValidatorsRule # getValidators ( ) */ @ Override protected Annotation [ ] getValidators ( ) { } }
// Si l ' annotation en cours est nulle if ( this . annotation == null ) { // On retourne null return null ; } // Si l ' annotation en cours n ' est pas de l ' instance if ( ! ( annotation instanceof SizeDAOValidators ) ) { // On retourne null return null ; } // On caste SizeDAOValidators castedValidators = ( SizeDAOValidators ) annotation ; // Liste des Annotations SizeDAOValidator [ ] tValidators = castedValidators . value ( ) ; // On retourne la liste return tValidators ;
public class LogUtils { /** * Creates a Writer used to write test results to the log . xml file . * @ param logDir * The directory containing the test session results . * @ param callpath * A test session identifier . * @ return A PrintWriter object , or { @ code null } if one could not be * created . * @ throws Exception */ public static PrintWriter createLog ( File logDir , String callpath ) throws Exception { } }
if ( logDir != null ) { File dir = new File ( logDir , callpath ) ; String path = logDir . toString ( ) + "/" + callpath . split ( "/" ) [ 0 ] ; System . setProperty ( "PATH" , path ) ; dir . mkdir ( ) ; File f = new File ( dir , "log.xml" ) ; f . delete ( ) ; BufferedWriter writer = new BufferedWriter ( new OutputStreamWriter ( new FileOutputStream ( f ) , "UTF-8" ) ) ; return new PrintWriter ( writer ) ; } return null ;
public class XMLChecker { /** * Checks if the specified part of a character array matches the < em > PubidLiteral < / em > * production . * See : < a href = " http : / / www . w3 . org / TR / REC - xml # NT - PubidLiteral " > Definition of PubidLiteral < / a > . * @ param ch the character array that contains the characters to be checked , cannot be * < code > null < / code > . * @ param start the start index into < code > ch < / code > , must be & gt ; = 0. * @ param length the number of characters to take from < code > ch < / code > , starting at the * < code > start < / code > index . * @ throws NullPointerException if < code > ch = = null < / code > . * @ throws IndexOutOfBoundsException if * < code > start & lt ; 0 | | start + length & gt ; ch . length < / code > . * @ throws InvalidXMLException if the specified character string does not match the * < em > PubidLiteral < / em > production . */ public static final void checkPubidLiteral ( char [ ] ch , int start , int length ) throws NullPointerException , IndexOutOfBoundsException , InvalidXMLException { } }
// Minimum length is 3 if ( length < 3 ) { throw new InvalidXMLException ( "Minimum length for the 'PubidLiteral' production is 3 characters." ) ; } int lastIndex = start + length - 1 ; char firstChar = ch [ 0 ] ; char lastChar = ch [ lastIndex ] ; // First and last char : single qoute ( apostrophe ) String otherAllowedChars ; if ( firstChar == '\'' ) { if ( lastChar != '\'' ) { throw new InvalidXMLException ( "First character is '\\'', but the " + "last character is 0x" + Integer . toHexString ( lastChar ) + '.' ) ; } otherAllowedChars = "-()+,./:=?;!*#@$_%" ; // First and last char : double qoute character } else if ( firstChar == '"' ) { if ( lastChar != '"' ) { throw new InvalidXMLException ( "First character is '\"', but the " + "last character is 0x" + Integer . toHexString ( lastChar ) + '.' ) ; } otherAllowedChars = "-'()+,./:=?;!*#@$_%" ; // First character is invalid } else { throw new InvalidXMLException ( "First char must either be '\\'' or " + "'\"' instead of 0x" + Integer . toHexString ( firstChar ) + '.' ) ; } // Check each character for ( int i = 1 ; i < length - 1 ; i ++ ) { char c = ch [ i ] ; if ( c != 0x20 && c != 0x0D && c != 0x0A && ! isLetter ( c ) && ! isDigit ( c ) && otherAllowedChars . indexOf ( c ) < 0 ) { // TODO : Quote character properly , even if it is an apostrophe throw new InvalidXMLException ( "The character '" + c + "' (0x" + Integer . toHexString ( c ) + ") is not valid for the " + "'PubidLiteral' production." ) ; } }
public class MappingDataTypeCompletion { /** * This method wraps the variable that holds data property values with a data type predicate . * It will replace the variable with a new function symbol and update the rule atom . * However , if the users already defined the data - type in the mapping , this method simply accepts the function symbol . */ private void insertVariableDataTyping ( Term term , Function atom , int position , Map < String , List < IndexedPosition > > termOccurenceIndex ) throws UnknownDatatypeException { } }
if ( term instanceof Function ) { Function function = ( Function ) term ; Predicate functionSymbol = function . getFunctionSymbol ( ) ; if ( function . isDataTypeFunction ( ) || ( functionSymbol instanceof URITemplatePredicate ) || ( functionSymbol instanceof BNodePredicate ) ) { // NO - OP for already assigned datatypes , or object properties , or bnodes } else if ( function . isOperation ( ) ) { for ( int i = 0 ; i < function . getArity ( ) ; i ++ ) { insertVariableDataTyping ( function . getTerm ( i ) , function , i , termOccurenceIndex ) ; } } else { throw new IllegalArgumentException ( "Unsupported subtype of: " + Function . class . getSimpleName ( ) ) ; } } else if ( term instanceof Variable ) { Variable variable = ( Variable ) term ; Term newTerm ; RDFDatatype type = getDataType ( termOccurenceIndex , variable ) ; newTerm = termFactory . getTypedTerm ( variable , type ) ; log . info ( "Datatype " + type + " for the value " + variable + " of the property " + atom + " has been " + "inferred " + "from the database" ) ; atom . setTerm ( position , newTerm ) ; } else if ( term instanceof ValueConstant ) { Term newTerm = termFactory . getTypedTerm ( term , ( ( ValueConstant ) term ) . getType ( ) ) ; atom . setTerm ( position , newTerm ) ; } else { throw new IllegalArgumentException ( "Unsupported subtype of: " + Term . class . getSimpleName ( ) ) ; }
public class XPathParser { /** * Given an string , init an XPath object for selections , * in order that a parse doesn ' t * have to be done each time the expression is evaluated . * @ param compiler The compiler object . * @ param expression A string conforming to the XPath grammar . * @ param namespaceContext An object that is able to resolve prefixes in * the XPath to namespaces . * @ throws javax . xml . transform . TransformerException */ public void initXPath ( Compiler compiler , String expression , PrefixResolver namespaceContext ) throws javax . xml . transform . TransformerException { } }
m_ops = compiler ; m_namespaceContext = namespaceContext ; m_functionTable = compiler . getFunctionTable ( ) ; Lexer lexer = new Lexer ( compiler , namespaceContext , this ) ; lexer . tokenize ( expression ) ; m_ops . setOp ( 0 , OpCodes . OP_XPATH ) ; m_ops . setOp ( OpMap . MAPINDEX_LENGTH , 2 ) ; // Patch for Christine ' s gripe . She wants her errorHandler to return from // a fatal error and continue trying to parse , rather than throwing an exception . // Without the patch , that put us into an endless loop . // % REVIEW % Is there a better way of doing this ? // % REVIEW % Are there any other cases which need the safety net ? // ( and if so do we care right now , or should we rewrite the XPath // grammar engine and can fix it at that time ? ) try { nextToken ( ) ; Expr ( ) ; if ( null != m_token ) { String extraTokens = "" ; while ( null != m_token ) { extraTokens += "'" + m_token + "'" ; nextToken ( ) ; if ( null != m_token ) extraTokens += ", " ; } error ( XPATHErrorResources . ER_EXTRA_ILLEGAL_TOKENS , new Object [ ] { extraTokens } ) ; // " Extra illegal tokens : " + extraTokens ) ; } } catch ( org . apache . xpath . XPathProcessorException e ) { if ( CONTINUE_AFTER_FATAL_ERROR . equals ( e . getMessage ( ) ) ) { // What I _ want _ to do is null out this XPath . // I doubt this has the desired effect , but I ' m not sure what else to do . // % REVIEW % ! ! ! initXPath ( compiler , "/.." , namespaceContext ) ; } else throw e ; } compiler . shrink ( ) ;
public class ServiceContainerHelper { /** * Ensures the specified service is stopped . * @ param controller a service controller */ public static void stop ( ServiceController < ? > controller ) { } }
try { transition ( controller , State . DOWN ) ; } catch ( StartException e ) { // This can ' t happen throw new IllegalStateException ( e ) ; }
public class ConvertBufferedImage { /** * Converts a buffered image into an image of the specified type . In a ' dst ' image is provided * it will be used for output , otherwise a new image will be created . */ public static < T extends ImageGray < T > > T convertFromSingle ( BufferedImage src , T dst , Class < T > type ) { } }
if ( type == GrayU8 . class ) { return ( T ) convertFrom ( src , ( GrayU8 ) dst ) ; } else if ( GrayI16 . class . isAssignableFrom ( type ) ) { return ( T ) convertFrom ( src , ( GrayI16 ) dst , ( Class ) type ) ; } else if ( type == GrayF32 . class ) { return ( T ) convertFrom ( src , ( GrayF32 ) dst ) ; } else { throw new IllegalArgumentException ( "Unknown type " + type ) ; }
public class ApplicationGatewaysInner { /** * Gets the backend health of the specified application gateway in a resource group . * @ param resourceGroupName The name of the resource group . * @ param applicationGatewayName The name of the application gateway . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < ApplicationGatewayBackendHealthInner > backendHealthAsync ( String resourceGroupName , String applicationGatewayName , final ServiceCallback < ApplicationGatewayBackendHealthInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( backendHealthWithServiceResponseAsync ( resourceGroupName , applicationGatewayName ) , serviceCallback ) ;
public class ConnectionManagerServiceImpl { /** * Returns the connection manager for this configuration . * This method lazily initializes the connection manager service if necessary . * @ param ref reference to the connection factory . * @ param svc the connection factory service * @ return the connection manager for this configuration . * @ throws ResourceException if an error occurs */ @ Override public ConnectionManager getConnectionManager ( ResourceInfo refInfo , AbstractConnectionFactoryService svc ) throws ResourceException { } }
final boolean trace = TraceComponent . isAnyTracingEnabled ( ) ; if ( trace && tc . isEntryEnabled ( ) ) Tr . entry ( this , tc , "getConnectionManager" , refInfo , svc ) ; ConnectionManager cm ; lock . readLock ( ) . lock ( ) ; try { if ( pm == null ) try { // Switch to write lock for lazy initialization lock . readLock ( ) . unlock ( ) ; lock . writeLock ( ) . lock ( ) ; if ( pm == null ) createPoolManager ( svc ) ; } finally { // Downgrade to read lock for rest of method lock . readLock ( ) . lock ( ) ; lock . writeLock ( ) . unlock ( ) ; } CMConfigData cmConfigData = getCMConfigData ( svc , refInfo ) ; String cfDetailsKey = cmConfigData . getCFDetailsKey ( ) ; cm = cfKeyToCM . get ( cfDetailsKey ) ; if ( cm == null ) { CommonXAResourceInfo xaResInfo = new EmbXAResourceInfo ( cmConfigData ) ; J2CGlobalConfigProperties gConfigProps = pm . getGConfigProps ( ) ; synchronized ( this ) { cm = cfKeyToCM . get ( cfDetailsKey ) ; if ( cm == null ) { cm = new ConnectionManager ( svc , pm , gConfigProps , xaResInfo ) ; cfKeyToCM . put ( cfDetailsKey , cm ) ; } } } } finally { lock . readLock ( ) . unlock ( ) ; } if ( trace && tc . isEntryEnabled ( ) ) Tr . exit ( this , tc , "getConnectionManager" , cm ) ; return cm ;
public class ApiOvhEmailpro { /** * Alter this object properties * REST : PUT / email / pro / { service } * @ param body [ required ] New object properties * @ param service [ required ] The internal name of your pro organization * API beta */ public void service_PUT ( String service , OvhService body ) throws IOException { } }
String qPath = "/email/pro/{service}" ; StringBuilder sb = path ( qPath , service ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ;
public class AbstractWSingleSelectList { /** * { @ inheritDoc } */ @ Override protected boolean doHandleRequest ( final Request request ) { } }
// First we need to figure out if the new selection is the same as the // previous selection . final Object newSelection = getRequestValue ( request ) ; final Object priorSelection = getValue ( ) ; boolean changed = ! Util . equals ( newSelection , priorSelection ) ; if ( changed ) { setData ( newSelection ) ; } return changed ;
public class Entity { /** * Method getListTypeValues . * @ param valuesClass Class < T > . * @ return Collection < T > . */ < T extends ListValue > Collection < T > getListTypeValues ( Class < T > valuesClass ) { } }
return instance . get ( ) . listTypeValues ( valuesClass ) ;
public class ProductSearchClient { /** * Formats a string containing the fully - qualified path to represent a product resource . * @ deprecated Use the { @ link ProductName } class instead . */ @ Deprecated public static final String formatProductName ( String project , String location , String product ) { } }
return PRODUCT_PATH_TEMPLATE . instantiate ( "project" , project , "location" , location , "product" , product ) ;
public class Matrix { /** * Converts this matrix using the given { @ code factory } . * @ param factory the factory that creates an output matrix * @ param < T > type of the result matrix * @ return converted matrix */ public < T extends Matrix > T to ( MatrixFactory < T > factory ) { } }
T result = factory . apply ( rows , columns ) ; apply ( LinearAlgebra . IN_PLACE_COPY_MATRIX_TO_MATRIX , result ) ; return result ;
public class HttpRequestMessageImpl { /** * Set the value of the scheme in the Request by using the * int identifiers . * @ param scheme */ @ Override public void setScheme ( SchemeValues scheme ) { } }
this . myScheme = scheme ; super . setFirstLineChanged ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "setScheme(v): " + ( null != scheme ? scheme . getName ( ) : null ) ) ; }
public class SquareRegularClustersIntoGrids { /** * Add all the nodes into the list which lie along the line defined by a and b . a is assumed to be * an end point . Care is taken to not cycle . */ int addLineToGrid ( SquareNode a , SquareNode b , List < SquareNode > list ) { } }
int total = 2 ; // double maxAngle = UtilAngle . radian ( 45 ) ; while ( true ) { // double slopeX0 = b . center . x - a . center . x ; // double slopeY0 = b . center . y - a . center . y ; // double angleAB = Math . atan2 ( slopeY0 , slopeX0 ) ; // see which side the edge belongs to on b boolean matched = false ; int side ; for ( side = 0 ; side < 4 ; side ++ ) { if ( b . edges [ side ] != null && b . edges [ side ] . destination ( b ) == a ) { matched = true ; break ; } } if ( ! matched ) { throw new RuntimeException ( "BUG!" ) ; } // must be on the adjacent side side = ( side + 2 ) % 4 ; if ( b . edges [ side ] == null ) break ; SquareNode c = b . edges [ side ] . destination ( b ) ; if ( c . graph == SEARCHED ) break ; // double slopeX1 = c . center . x - b . center . x ; // double slopeY1 = c . center . y - b . center . y ; // double angleBC = Math . atan2 ( slopeY1 , slopeX1 ) ; // double acute = Math . abs ( UtilAngle . minus ( angleAB , angleBC ) ) ; // if ( acute > = maxAngle ) // break ; total ++ ; c . graph = SEARCHED ; list . add ( c ) ; a = b ; b = c ; } return total ;
public class InsertSpan { /** * TLDR : we are guarding against setting null , as doing so implies tombstones . We are dodging setX * to keep code simpler than other alternatives described below . * < p > If there ' s consistently 8 tombstones ( nulls ) per row , then we ' ll only need 125 spans in a * trace ( rows in a partition ) to trigger the ` tombstone _ warn _ threshold warnings being logged in * the C * nodes . And if we go to 12500 spans in a trace then that whole trace partition would * become unreadable . Cassandra warns at a 1000 tombstones in any query , and fails on 100000 * tombstones . * < p > There ' s also a small question about disk usage efficiency . Each tombstone is a cell name and * basically empty cell value entry stored on disk . Given that the cells are , apart from tags and * annotations , generally very small then this could be proportionally an unnecessary waste of * disk . * < p > To avoid this relying upon a number of variant prepared statements for inserting a span is * the normal practice . * < p > Another popular practice is to insert those potentially null columns as separate statements * ( and optionally put them together into UNLOGGED batches ) . This works as multiple writes to the * same partition has little overhead , and here we ' re not worried about lack of isolation between * those writes , as the write is asynchronous anyway . An example of this approach is in the * cassandra - reaper project here : * https : / / github . com / thelastpickle / cassandra - reaper / blob / master / src / server / src / main / java / io / cassandrareaper / storage / CassandraStorage . java # L622 - L642 */ @ Override protected ResultSetFuture newFuture ( ) { } }
BoundStatement bound = factory . preparedStatement . bind ( ) . setUUID ( "ts_uuid" , input . ts_uuid ( ) ) . setString ( "trace_id" , input . trace_id ( ) ) . setString ( "id" , input . id ( ) ) ; // now set the nullable fields if ( null != input . trace_id_high ( ) ) bound . setString ( "trace_id_high" , input . trace_id_high ( ) ) ; if ( null != input . parent_id ( ) ) bound . setString ( "parent_id" , input . parent_id ( ) ) ; if ( null != input . kind ( ) ) bound . setString ( "kind" , input . kind ( ) ) ; if ( null != input . span ( ) ) bound . setString ( "span" , input . span ( ) ) ; if ( 0L != input . ts ( ) ) bound . setLong ( "ts" , input . ts ( ) ) ; if ( 0L != input . duration ( ) ) bound . setLong ( "duration" , input . duration ( ) ) ; if ( null != input . l_ep ( ) ) bound . set ( "l_ep" , input . l_ep ( ) , EndpointUDT . class ) ; if ( null != input . r_ep ( ) ) bound . set ( "r_ep" , input . r_ep ( ) , EndpointUDT . class ) ; if ( ! input . annotations ( ) . isEmpty ( ) ) bound . setList ( "annotations" , input . annotations ( ) ) ; if ( ! input . tags ( ) . isEmpty ( ) ) bound . setMap ( "tags" , input . tags ( ) ) ; if ( input . shared ( ) ) bound . setBool ( "shared" , true ) ; if ( input . debug ( ) ) bound . setBool ( "debug" , true ) ; if ( factory . searchEnabled ) { if ( null != input . l_ep ( ) ) bound . setString ( "l_service" , input . l_ep ( ) . getService ( ) ) ; if ( null != input . annotation_query ( ) ) { bound . setString ( "annotation_query" , input . annotation_query ( ) ) ; } } return factory . session . executeAsync ( bound ) ;
public class DistRaid { /** * Checks if the map - reduce job has completed . * @ return true if the job completed , false otherwise . * @ throws IOException */ public boolean checkComplete ( ) throws IOException { } }
JobID jobID = runningJob . getID ( ) ; if ( runningJob . isComplete ( ) ) { // delete job directory final String jobdir = jobconf . get ( JOB_DIR_LABEL ) ; if ( jobdir != null ) { final Path jobpath = new Path ( jobdir ) ; jobpath . getFileSystem ( jobconf ) . delete ( jobpath , true ) ; } if ( runningJob . isSuccessful ( ) ) { LOG . info ( "Job Complete(Succeeded): " + jobID ) ; } else { LOG . info ( "Job Complete(Failed): " + jobID ) ; } raidPolicyPathPairList . clear ( ) ; Counters ctrs = runningJob . getCounters ( ) ; if ( ctrs != null ) { RaidNodeMetrics metrics = RaidNodeMetrics . getInstance ( RaidNodeMetrics . DEFAULT_NAMESPACE_ID ) ; if ( ctrs . findCounter ( Counter . FILES_FAILED ) != null ) { long filesFailed = ctrs . findCounter ( Counter . FILES_FAILED ) . getValue ( ) ; metrics . raidFailures . inc ( filesFailed ) ; } long slotSeconds = ctrs . findCounter ( JobInProgress . Counter . SLOTS_MILLIS_MAPS ) . getValue ( ) / 1000 ; metrics . raidSlotSeconds . inc ( slotSeconds ) ; } return true ; } else { String report = ( " job " + jobID + " map " + StringUtils . formatPercent ( runningJob . mapProgress ( ) , 0 ) + " reduce " + StringUtils . formatPercent ( runningJob . reduceProgress ( ) , 0 ) ) ; if ( ! report . equals ( lastReport ) ) { LOG . info ( report ) ; lastReport = report ; } TaskCompletionEvent [ ] events = runningJob . getTaskCompletionEvents ( jobEventCounter ) ; jobEventCounter += events . length ; for ( TaskCompletionEvent event : events ) { if ( event . getTaskStatus ( ) == TaskCompletionEvent . Status . FAILED ) { LOG . info ( " Job " + jobID + " " + event . toString ( ) ) ; } } return false ; }
public class PackageBasedActionConfigBuilder { /** * Checks if class package match provided list of package locators * @ param classPackageName * name of class package * @ return true if class package is on the { @ link # packageLocators } list */ protected boolean checkPackageLocators ( String classPackageName ) { } }
if ( packageLocators != null && ! disablePackageLocatorsScanning && classPackageName . length ( ) > 0 && ( packageLocatorsBasePackage == null || classPackageName . startsWith ( packageLocatorsBasePackage ) ) ) { for ( String packageLocator : packageLocators ) { String [ ] splitted = classPackageName . split ( "\\." ) ; if ( es . cenobit . struts2 . json . util . StringUtils . contains ( splitted , packageLocator , false ) ) return true ; } } return false ;
public class ServletRESTRequestWithParams { /** * ( non - Javadoc ) * @ see com . ibm . wsspi . rest . handler . RESTRequest # getParameterMap ( ) */ @ Override public Map < String , String [ ] > getParameterMap ( ) { } }
ServletRESTRequestImpl ret = castRequest ( ) ; if ( ret != null ) return ret . getParameterMap ( ) ; return null ;
public class AssetServiceLocator { /** * For the given interface , get the stub implementation . * If this service has no port for the given interface , * then ServiceException is thrown . */ public java . rmi . Remote getPort ( Class serviceEndpointInterface ) throws javax . xml . rpc . ServiceException { } }
try { if ( com . google . api . ads . adwords . axis . v201809 . cm . AssetServiceInterface . class . isAssignableFrom ( serviceEndpointInterface ) ) { com . google . api . ads . adwords . axis . v201809 . cm . AssetServiceSoapBindingStub _stub = new com . google . api . ads . adwords . axis . v201809 . cm . AssetServiceSoapBindingStub ( new java . net . URL ( AssetServiceInterfacePort_address ) , this ) ; _stub . setPortName ( getAssetServiceInterfacePortWSDDServiceName ( ) ) ; return _stub ; } } catch ( java . lang . Throwable t ) { throw new javax . xml . rpc . ServiceException ( t ) ; } throw new javax . xml . rpc . ServiceException ( "There is no stub implementation for the interface: " + ( serviceEndpointInterface == null ? "null" : serviceEndpointInterface . getName ( ) ) ) ;
public class PrimaveraPMFileWriter { /** * Writes a list of UDF types . * @ author lsong * @ param type parent entity type * @ param mpxj parent entity * @ return list of UDFAssignmentType instances */ private List < UDFAssignmentType > writeUDFType ( FieldTypeClass type , FieldContainer mpxj ) { } }
List < UDFAssignmentType > out = new ArrayList < UDFAssignmentType > ( ) ; for ( CustomField cf : m_sortedCustomFieldsList ) { FieldType fieldType = cf . getFieldType ( ) ; if ( fieldType != null && type == fieldType . getFieldTypeClass ( ) ) { Object value = mpxj . getCachedValue ( fieldType ) ; if ( FieldTypeHelper . valueIsNotDefault ( fieldType , value ) ) { UDFAssignmentType udf = m_factory . createUDFAssignmentType ( ) ; udf . setTypeObjectId ( FieldTypeHelper . getFieldID ( fieldType ) ) ; setUserFieldValue ( udf , fieldType . getDataType ( ) , value ) ; out . add ( udf ) ; } } } return out ;
public class ExcelFunctions { /** * Returns the sum of all arguments */ public static BigDecimal sum ( EvaluationContext ctx , Object ... args ) { } }
if ( args . length == 0 ) { throw new RuntimeException ( "Wrong number of arguments" ) ; } BigDecimal result = BigDecimal . ZERO ; for ( Object arg : args ) { result = result . add ( Conversions . toDecimal ( arg , ctx ) ) ; } return result ;
public class SampledStat { /** * Timeout any windows that have expired in the absence of any events */ protected void purgeObsoleteSamples ( MetricConfig config , long now ) { } }
long expireAge = config . samples ( ) * config . timeWindowMs ( ) ; for ( int i = 0 ; i < samples . size ( ) ; i ++ ) { Sample sample = this . samples . get ( i ) ; if ( now - sample . lastWindowMs >= expireAge ) { // The samples array is used as a circular list . The rank represents how many spots behind the current // window is window # i at . The current sample is rank 0 , the next older sample is 1 and so on until // the oldest sample , which has a rank equal to samples . size ( ) - 1. int rank = current - i ; if ( rank < 0 ) { rank += samples . size ( ) ; } // Here we reset the expired window to a time in the past that is offset proportionally to its rank . sample . reset ( now - rank * config . timeWindowMs ( ) ) ; } }
public class CmsResourceWrapperModules { /** * Gets the virtual resources for the import folder . < p > * @ param cms the CMS context * @ return the virtual resources for the import folder */ private List < CmsResource > getVirtualResourcesForImport ( CmsObject cms ) { } }
List < CmsResource > result = Lists . newArrayList ( ) ; return result ;
public class SyslogMessage { /** * Generates an < a href = " http : / / tools . ietf . org / html / rfc5424 " > RFC - 5424 < / a > message . */ public String toRfc5424SyslogMessage ( ) { } }
StringWriter sw = new StringWriter ( msg == null ? 32 : msg . size ( ) + 32 ) ; try { toRfc5424SyslogMessage ( sw ) ; } catch ( IOException e ) { throw new IllegalStateException ( e ) ; } return sw . toString ( ) ;
public class BeanO { /** * Chanced EnterpriseBean to Object . d366807.1 */ public boolean isCallerInRole ( String roleName , Object bean ) { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "isCallerInRole, role = " + roleName + " EJB = " + bean ) ; // 182011 } // Check whether security is enabled or not . Note , the subclasses // do override this method to do additional processing and then they // call super . isCallerInRole which usually causes this code to // execute . However , the subclass will throw IllegalStateException // ( e . g . MessageDrivenBeanO ) if not valid to call isCallerInRole . boolean inRole ; // d444696.2 EJBSecurityCollaborator < ? > securityCollaborator = container . ivSecurityCollaborator ; if ( securityCollaborator == null ) // d444696.2 { // Security is disabled , so return false for 1 . x and 2 . x modules to // ensure pre EJB 3 applications see no behavior change . For EJB 3 modules // or later , return true so that we are consistent with web container . BeanMetaData bmd = home . beanMetaData ; if ( isTraceOn && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "isCallerInRole called with security disabled for EJB module version = " + bmd . ivModuleVersion ) ; } inRole = ( bmd . ivModuleVersion >= BeanMetaData . J2EE_EJB_VERSION_3_0 ) ; } else // d444696.2 { // Pass null EJSDeployedSupport for callback methods . d739835 EJSDeployedSupport s = bean == null ? null : EJSContainer . getMethodContext ( ) ; try { inRole = isCallerInRole ( securityCollaborator , roleName , s ) ; } catch ( RuntimeException ex ) { FFDCFilter . processException ( ex , CLASS_NAME + ".isCallerInRole" , "982" , this ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "isCallerInRole collaborator throwing" , ex ) ; throw ex ; } } if ( isTraceOn && tc . isEntryEnabled ( ) ) // d444696.2 { Tr . exit ( tc , "isCallerInRole returning " + inRole ) ; } return inRole ; // d444696.2
public class AsynchronousBean { /** * Assigns the given activity to the asynchronous EJB for background execution . * @ param callable * the { @ code Activity } to be executed asynchronously . * @ return * the activity ' s { @ code Future } object , for result retrieval . * @ see * org . dihedron . patterns . activities . concurrent . ActivityExecutor # submit ( org . dihedron . patterns . activities . Activity ) */ public Future < ActivityData > submit ( ActivityCallable callable ) throws ActivityException { } }
logger . info ( "submitting activity to asynchronous EJB..." ) ; return new AsyncResult < ActivityData > ( callable . call ( ) ) ;
public class AsyncInputStream { /** * Resumes the reading . * @ return the current { @ code AsyncInputStream } */ @ Override public AsyncInputStream resume ( ) { } }
switch ( state ) { case STATUS_CLOSED : throw new IllegalStateException ( "Cannot resume, already closed" ) ; case STATUS_PAUSED : state = STATUS_ACTIVE ; doRead ( ) ; } return this ;
public class DateUtils { /** * # func 获取月份差 < br > * @ author dongguoshuang */ public static int getMonthSpan ( Date begin , Date end ) { } }
Calendar beginCal = new GregorianCalendar ( ) ; beginCal . setTime ( begin ) ; Calendar endCal = new GregorianCalendar ( ) ; endCal . setTime ( end ) ; int m = ( endCal . get ( Calendar . MONTH ) ) - ( beginCal . get ( Calendar . MONTH ) ) ; int y = ( endCal . get ( Calendar . YEAR ) ) - ( beginCal . get ( Calendar . YEAR ) ) ; return y * 12 + m ;
public class RiakNode { /** * Sets the maximum number of connections allowed . * @ param maxConnections the maxConnections to set . * @ return a reference to this RiakNode . * @ see Builder # withMaxConnections ( int ) */ public RiakNode setMaxConnections ( int maxConnections ) { } }
stateCheck ( State . CREATED , State . RUNNING , State . HEALTH_CHECKING ) ; if ( maxConnections >= getMinConnections ( ) ) { permits . setMaxPermits ( maxConnections ) ; } else { throw new IllegalArgumentException ( "Max connections less than min connections" ) ; } // TODO : reap delta ? return this ;
public class QueuePlugin { /** * Adds new functions , to be executed , onto the end of the named * queue of all matched elements . */ @ SuppressWarnings ( "unchecked" ) public T queue ( final String name , Function ... funcs ) { } }
for ( final Function f : funcs ) { for ( Element e : elements ( ) ) { queue ( e , name , f ) ; } } return ( T ) this ;
public class FieldType { /** * Return the value from the field in the object that is defined by this FieldType . If the field is a foreign object * then the ID of the field is returned instead . */ public Object extractJavaFieldValue ( Object object ) throws SQLException { } }
Object val = extractRawJavaFieldValue ( object ) ; // if this is a foreign object then we want its reference field if ( foreignRefField != null && val != null ) { val = foreignRefField . extractRawJavaFieldValue ( val ) ; } return val ;
public class DefaultGroovyMethods { /** * todo : remove after putAt ( Splice ) gets deleted */ @ Deprecated protected static List getSubList ( List self , List splice ) { } }
int left ; int right = 0 ; boolean emptyRange = false ; if ( splice . size ( ) == 2 ) { left = DefaultTypeTransformation . intUnbox ( splice . get ( 0 ) ) ; right = DefaultTypeTransformation . intUnbox ( splice . get ( 1 ) ) ; } else if ( splice instanceof IntRange ) { IntRange range = ( IntRange ) splice ; left = range . getFrom ( ) ; right = range . getTo ( ) ; } else if ( splice instanceof EmptyRange ) { RangeInfo info = subListBorders ( self . size ( ) , ( EmptyRange ) splice ) ; left = info . from ; emptyRange = true ; } else { throw new IllegalArgumentException ( "You must specify a list of 2 indexes to create a sub-list" ) ; } int size = self . size ( ) ; left = normaliseIndex ( left , size ) ; right = normaliseIndex ( right , size ) ; List sublist /* = null */ ; if ( ! emptyRange ) { sublist = self . subList ( left , right + 1 ) ; } else { sublist = self . subList ( left , left ) ; } return sublist ;
public class TextHandler { /** * 执行text标签SQL文本内容和有序参数的拼接 . * @ param source 构建所需的资源对象 * @ param valueText xml中value的文本内容 * @ return 返回SqlInfo对象 */ private SqlInfo doBuildSqlInfo ( BuildSource source , String valueText ) { } }
SqlInfo sqlInfo = source . getSqlInfo ( ) ; Node node = source . getNode ( ) ; this . concatSqlText ( node , sqlInfo ) ; return XmlSqlInfoBuilder . newInstace ( source ) . buildTextSqlParams ( valueText ) ;
public class BoxApiFolder { /** * Gets a request that moves a folder to another folder * @ param id id of folder to move * @ param parentId id of parent folder to move folder into * @ return request to move a folder */ public BoxRequestsFolder . UpdateFolder getMoveRequest ( String id , String parentId ) { } }
BoxRequestsFolder . UpdateFolder request = new BoxRequestsFolder . UpdateFolder ( id , getFolderInfoUrl ( id ) , mSession ) . setParentId ( parentId ) ; return request ;
public class Configuration { /** * Return true if the given qualifier should be excluded and false otherwise . * @ param qualifier the qualifier to check . */ public boolean shouldExcludeQualifier ( String qualifier ) { } }
if ( excludedQualifiers . contains ( "all" ) || excludedQualifiers . contains ( qualifier ) || excludedQualifiers . contains ( qualifier + ".*" ) ) { return true ; } else { int index = - 1 ; while ( ( index = qualifier . indexOf ( "." , index + 1 ) ) != - 1 ) { if ( excludedQualifiers . contains ( qualifier . substring ( 0 , index + 1 ) + "*" ) ) { return true ; } } return false ; }
public class CqlQuery { /** * Trims the first column from the row if it ' s name is equal to " KEY " */ private List < Column > filterKeyColumn ( CqlRow row ) { } }
if ( suppressKeyInColumns && row . isSetColumns ( ) && row . columns . size ( ) > 0 ) { Iterator < Column > columnsIterator = row . getColumnsIterator ( ) ; Column column = columnsIterator . next ( ) ; if ( column . name . duplicate ( ) . equals ( KEY_BB ) ) { columnsIterator . remove ( ) ; } } return row . getColumns ( ) ;
public class BinTools { /** * Convert hex digit to numerical value . * @ param c * 0-9 , a - f , A - F allowd . * @ return 0-15 * @ throws IllegalArgumentException * on non - hex character */ public static int hex2bin ( char c ) { } }
if ( c >= '0' && c <= '9' ) { return ( c - '0' ) ; } if ( c >= 'A' && c <= 'F' ) { return ( c - 'A' + 10 ) ; } if ( c >= 'a' && c <= 'f' ) { return ( c - 'a' + 10 ) ; } throw new IllegalArgumentException ( "Input string may only contain hex digits, but found '" + c + "'" ) ;
public class TwitterImpl { /** * / * Tweets Resources */ @ Override public ResponseList < Status > getRetweets ( long statusId ) throws TwitterException { } }
return factory . createStatusList ( get ( conf . getRestBaseURL ( ) + "statuses/retweets/" + statusId + ".json?count=100" ) ) ;
public class EFMImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . EFM__FM_NAME : return FM_NAME_EDEFAULT == null ? fmName != null : ! FM_NAME_EDEFAULT . equals ( fmName ) ; } return super . eIsSet ( featureID ) ;
public class StringUtil { /** * Center the contents of the string . If the supplied string is longer than the desired width , it is truncated to the * specified length . If the supplied string is shorter than the desired width , padding characters are added to the beginning * and end of the string such that the length is that specified ; one additional padding character is prepended if required . * All leading and trailing whitespace is removed before centering . * @ param str the string to be left justified ; if null , an empty string is used * @ param width the desired width of the string ; must be positive * @ param padWithChar the character to use for padding , if needed * @ return the left justified string * @ see # setLength ( String , int , char ) */ public static String justifyCenter ( String str , final int width , char padWithChar ) { } }
// Trim the leading and trailing whitespace . . . str = str != null ? str . trim ( ) : "" ; int addChars = width - str . length ( ) ; if ( addChars < 0 ) { // truncate return str . subSequence ( 0 , width ) . toString ( ) ; } // Write the content . . . int prependNumber = addChars / 2 ; int appendNumber = prependNumber ; if ( ( prependNumber + appendNumber ) != addChars ) { ++ prependNumber ; } final StringBuilder sb = new StringBuilder ( ) ; // Prepend the pad character ( s ) . . . while ( prependNumber > 0 ) { sb . append ( padWithChar ) ; -- prependNumber ; } // Add the actual content sb . append ( str ) ; // Append the pad character ( s ) . . . while ( appendNumber > 0 ) { sb . append ( padWithChar ) ; -- appendNumber ; } return sb . toString ( ) ;
public class PortableNavigatorContext { /** * Resets the state to the initial state for future reuse . */ void reset ( ) { } }
cd = initCd ; serializer = initSerializer ; in . position ( initPosition ) ; finalPosition = initFinalPosition ; offset = initOffset ;
public class ScopeService { /** * Returns either all scopes or scopes for a specific client _ id passed as query parameter . * @ param req request * @ return string If query param client _ id is passed , then the scopes for that client _ id will be returned . * Otherwise , all available scopes will be returned in JSON format . */ public String getScopes ( HttpRequest req ) throws OAuthException { } }
QueryStringDecoder dec = new QueryStringDecoder ( req . uri ( ) ) ; Map < String , List < String > > queryParams = dec . parameters ( ) ; if ( queryParams . containsKey ( "client_id" ) ) { return getScopes ( queryParams . get ( "client_id" ) . get ( 0 ) ) ; } List < Scope > scopes = DBManagerFactory . getInstance ( ) . getAllScopes ( ) . blockingGet ( ) ; String jsonString ; try { jsonString = JSON . toJSONString ( scopes ) ; } catch ( Exception e ) { LOG . error ( "cannot load scopes" , e ) ; throw new OAuthException ( e , null , HttpResponseStatus . BAD_REQUEST ) ; } return jsonString ;
public class MimeMessageHelper { /** * Determines the right resource name and optionally attaches the correct extension to the name . */ static String determineResourceName ( final AttachmentResource attachmentResource , final boolean includeExtension ) { } }
final String datasourceName = attachmentResource . getDataSource ( ) . getName ( ) ; String resourceName ; if ( ! valueNullOrEmpty ( attachmentResource . getName ( ) ) ) { resourceName = attachmentResource . getName ( ) ; } else if ( ! valueNullOrEmpty ( datasourceName ) ) { resourceName = datasourceName ; } else { resourceName = "resource" + UUID . randomUUID ( ) ; } if ( includeExtension && ! valueNullOrEmpty ( datasourceName ) ) { @ SuppressWarnings ( "UnnecessaryLocalVariable" ) final String possibleFilename = datasourceName ; if ( ! resourceName . contains ( "." ) && possibleFilename . contains ( "." ) ) { final String extension = possibleFilename . substring ( possibleFilename . lastIndexOf ( "." ) ) ; if ( ! resourceName . endsWith ( extension ) ) { resourceName += extension ; } } } else if ( ! includeExtension && resourceName . contains ( "." ) && resourceName . equals ( datasourceName ) ) { final String extension = resourceName . substring ( resourceName . lastIndexOf ( "." ) ) ; resourceName = resourceName . replace ( extension , "" ) ; } return resourceName ;
public class SpoilerElement { /** * Return a map of all spoilers contained in a message . * The map uses the language of a spoiler as key . * If a spoiler has no language attribute , its key will be an empty String . * @ param message message * @ return map of spoilers */ public static Map < String , String > getSpoilers ( Message message ) { } }
if ( ! containsSpoiler ( message ) ) { return Collections . emptyMap ( ) ; } List < ExtensionElement > spoilers = message . getExtensions ( SpoilerElement . ELEMENT , NAMESPACE ) ; Map < String , String > map = new HashMap < > ( ) ; for ( ExtensionElement e : spoilers ) { SpoilerElement s = ( SpoilerElement ) e ; if ( s . getLanguage ( ) == null || s . getLanguage ( ) . equals ( "" ) ) { map . put ( "" , s . getHint ( ) ) ; } else { map . put ( s . getLanguage ( ) , s . getHint ( ) ) ; } } return map ;
public class ImmediateExpressions { /** * Report whether a sample of the variable satisfies the criteria . */ public static < V > boolean the ( Sampler < V > variable , Matcher < ? super V > criteria ) { } }
variable . takeSample ( ) ; return criteria . matches ( variable . sampledValue ( ) ) ;
public class JsonSerializer { /** * Writes the object out as json . * @ param out * output writer * @ param json * a { @ link JsonElement } * @ param pretty * if true , a properly indented version of the json is written * @ throws IOException * if there is a problem writing to the writer */ public static void serialize ( @ Nonnull Writer out , @ Nonnull JsonElement json , boolean pretty ) throws IOException { } }
BufferedWriter bw = new BufferedWriter ( out ) ; serialize ( bw , json , pretty , 0 ) ; if ( pretty ) { bw . write ( '\n' ) ; } bw . flush ( ) ;
public class GitLabApi { /** * Sets up all future calls to the GitLab API to be done as another user specified by provided user ID . * To revert back to normal non - sudo operation you must call unsudo ( ) , or pass null as the sudoAsId . * @ param sudoAsId the ID of the user to sudo as , null will turn off sudo * @ throws GitLabApiException if any exception occurs */ public void setSudoAsId ( Integer sudoAsId ) throws GitLabApiException { } }
if ( sudoAsId == null ) { apiClient . setSudoAsId ( null ) ; return ; } // Get the User specified by the sudoAsId , if you are not an admin or the username is not found , this will fail User user = getUserApi ( ) . getUser ( sudoAsId ) ; if ( user == null || ! user . getId ( ) . equals ( sudoAsId ) ) { throw new GitLabApiException ( "the specified user ID was not found" ) ; } apiClient . setSudoAsId ( sudoAsId ) ;
public class FileUtils { /** * Loads the properties from a file specified as a parameter . * @ param propertiesFile * Path to the properties file . * @ throws IOException * is something goes wrong while reading the file */ public void loadPropertiesFromFile ( final String propertiesFile ) throws IOException { } }
LOG . info ( "Loading properties from file: " + propertiesFile ) ; File file = new File ( propertiesFile ) ; String bundleName = file . getPath ( ) . substring ( 0 , file . getPath ( ) . indexOf ( "properties" ) - 1 ) ; FileInputStream inputStream = null ; try { inputStream = new FileInputStream ( file ) ; ResourceBundle bundle = new PropertyResourceBundle ( inputStream ) ; LOG . info ( "Adding to bunlde: " + bundleName ) ; bundles . put ( bundleName , bundle ) ; } finally { if ( inputStream != null ) { inputStream . close ( ) ; } }
public class PolygonMarkers { /** * { @ inheritDoc } */ @ Override public void setVisibleMarkers ( boolean visible ) { } }
for ( Marker marker : markers ) { marker . setVisible ( visible ) ; } for ( PolygonHoleMarkers hole : holes ) { hole . setVisibleMarkers ( visible ) ; }
public class JobsImpl { /** * Lists the execution status of the Job Preparation and Job Release task for the specified job across the compute nodes where the job has run . * This API returns the Job Preparation and Job Release task status on all compute nodes that have run the Job Preparation or Job Release task . This includes nodes which have since been removed from the pool . If this API is invoked on a job which has no Job Preparation or Job Release task , the Batch service returns HTTP status code 409 ( Conflict ) with an error code of JobPreparationTaskNotSpecified . * @ param jobId The ID of the job . * @ param jobListPreparationAndReleaseTaskStatusOptions Additional parameters for the operation * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; JobPreparationAndReleaseTaskExecutionInformation & gt ; object */ public Observable < Page < JobPreparationAndReleaseTaskExecutionInformation > > listPreparationAndReleaseTaskStatusAsync ( final String jobId , final JobListPreparationAndReleaseTaskStatusOptions jobListPreparationAndReleaseTaskStatusOptions ) { } }
return listPreparationAndReleaseTaskStatusWithServiceResponseAsync ( jobId , jobListPreparationAndReleaseTaskStatusOptions ) . map ( new Func1 < ServiceResponseWithHeaders < Page < JobPreparationAndReleaseTaskExecutionInformation > , JobListPreparationAndReleaseTaskStatusHeaders > , Page < JobPreparationAndReleaseTaskExecutionInformation > > ( ) { @ Override public Page < JobPreparationAndReleaseTaskExecutionInformation > call ( ServiceResponseWithHeaders < Page < JobPreparationAndReleaseTaskExecutionInformation > , JobListPreparationAndReleaseTaskStatusHeaders > response ) { return response . body ( ) ; } } ) ;
public class RouteProcessor { /** * { @ inheritDoc } * @ param annotations * @ param roundEnv */ @ Override public boolean process ( Set < ? extends TypeElement > annotations , RoundEnvironment roundEnv ) { } }
if ( CollectionUtils . isNotEmpty ( annotations ) ) { Set < ? extends Element > routeElements = roundEnv . getElementsAnnotatedWith ( Route . class ) ; try { logger . info ( ">>> Found routes, start... <<<" ) ; this . parseRoutes ( routeElements ) ; } catch ( Exception e ) { logger . error ( e ) ; } return true ; } return false ;
public class DiSH { /** * Extracts the clusters from the cluster order . * @ param relation the database storing the objects * @ param clusterOrder the cluster order to extract the clusters from * @ return the extracted clusters */ private Object2ObjectOpenCustomHashMap < long [ ] , List < ArrayModifiableDBIDs > > extractClusters ( Relation < V > relation , DiSHClusterOrder clusterOrder ) { } }
FiniteProgress progress = LOG . isVerbose ( ) ? new FiniteProgress ( "Extract Clusters" , relation . size ( ) , LOG ) : null ; Object2ObjectOpenCustomHashMap < long [ ] , List < ArrayModifiableDBIDs > > clustersMap = new Object2ObjectOpenCustomHashMap < > ( BitsUtil . FASTUTIL_HASH_STRATEGY ) ; // Note clusterOrder currently contains DBID objects anyway . WritableDataStore < Pair < long [ ] , ArrayModifiableDBIDs > > entryToClusterMap = DataStoreUtil . makeStorage ( relation . getDBIDs ( ) , DataStoreFactory . HINT_TEMP | DataStoreFactory . HINT_HOT , Pair . class ) ; for ( DBIDIter iter = clusterOrder . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { V object = relation . get ( iter ) ; long [ ] preferenceVector = clusterOrder . getCommonPreferenceVector ( iter ) ; // get the list of ( parallel ) clusters for the preference vector List < ArrayModifiableDBIDs > parallelClusters = clustersMap . get ( preferenceVector ) ; if ( parallelClusters == null ) { parallelClusters = new ArrayList < > ( ) ; clustersMap . put ( preferenceVector , parallelClusters ) ; } // look for the proper cluster ArrayModifiableDBIDs cluster = null ; for ( ArrayModifiableDBIDs c : parallelClusters ) { NumberVector c_centroid = ProjectedCentroid . make ( preferenceVector , relation , c ) ; long [ ] commonPreferenceVector = BitsUtil . andCMin ( preferenceVector , preferenceVector ) ; int subspaceDim = subspaceDimensionality ( object , c_centroid , preferenceVector , preferenceVector , commonPreferenceVector ) ; if ( subspaceDim == clusterOrder . getCorrelationValue ( iter ) ) { double d = weightedDistance ( object , c_centroid , commonPreferenceVector ) ; if ( d <= 2 * epsilon ) { cluster = c ; break ; } } } if ( cluster == null ) { cluster = DBIDUtil . newArray ( ) ; parallelClusters . add ( cluster ) ; } cluster . add ( iter ) ; entryToClusterMap . put ( iter , new Pair < > ( preferenceVector , cluster ) ) ; LOG . incrementProcessed ( progress ) ; } LOG . ensureCompleted ( progress ) ; if ( LOG . isDebuggingFiner ( ) ) { int dim = RelationUtil . dimensionality ( relation ) ; StringBuilder msg = new StringBuilder ( "Step 0" ) ; for ( Map . Entry < long [ ] , List < ArrayModifiableDBIDs > > clusterList : clustersMap . entrySet ( ) ) { for ( ArrayModifiableDBIDs c : clusterList . getValue ( ) ) { msg . append ( '\n' ) . append ( BitsUtil . toStringLow ( clusterList . getKey ( ) , dim ) ) . append ( " ids " ) . append ( c . size ( ) ) ; } } LOG . debugFiner ( msg . toString ( ) ) ; } // add the predecessor to the cluster DBIDVar cur = DBIDUtil . newVar ( ) , pre = DBIDUtil . newVar ( ) ; for ( long [ ] pv : clustersMap . keySet ( ) ) { List < ArrayModifiableDBIDs > parallelClusters = clustersMap . get ( pv ) ; for ( ArrayModifiableDBIDs cluster : parallelClusters ) { if ( cluster . isEmpty ( ) ) { continue ; } cluster . assignVar ( 0 , cur ) ; clusterOrder . getPredecessor ( cur , pre ) ; if ( ! pre . isSet ( ) || DBIDUtil . equal ( pre , cur ) ) { continue ; } // parallel cluster if ( BitsUtil . equal ( clusterOrder . getCommonPreferenceVector ( pre ) , clusterOrder . getCommonPreferenceVector ( cur ) ) ) { continue ; } if ( clusterOrder . getCorrelationValue ( pre ) < clusterOrder . getCorrelationValue ( cur ) || clusterOrder . getReachability ( pre ) < clusterOrder . getReachability ( cur ) ) { continue ; } Pair < long [ ] , ArrayModifiableDBIDs > oldCluster = entryToClusterMap . get ( pre ) ; oldCluster . second . remove ( pre ) ; cluster . add ( pre ) ; entryToClusterMap . put ( pre , new Pair < > ( pv , cluster ) ) ; } } return clustersMap ;
public class LambdaToMethod { /** * Create new synthetic method with given flags , name , type , owner */ private MethodSymbol makePrivateSyntheticMethod ( long flags , Name name , Type type , Symbol owner ) { } }
return new MethodSymbol ( flags | SYNTHETIC | PRIVATE , name , type , owner ) ;
public class Money { /** * ( non - Javadoc ) * @ see * MonetaryAmount # divideAndRemainder ( MonetaryAmount ) */ @ Override public Money [ ] divideAndRemainder ( double divisor ) { } }
if ( NumberVerifier . isInfinityAndNotNaN ( divisor ) ) { Money zero = Money . of ( 0 , getCurrency ( ) ) ; return new Money [ ] { zero , zero } ; } return divideAndRemainder ( new BigDecimal ( String . valueOf ( divisor ) ) ) ;