signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class FlexCompColMatrix { /** * Sets the given column equal the passed vector */ public void setColumn ( int i , SparseVector x ) { } }
if ( x . size ( ) != numRows ) throw new IllegalArgumentException ( "New column must be of the same size as existing column" ) ; colD [ i ] = x ;
public class Entities { /** * Gets any entity within the container marked with the specified type . * @ param container * Entity container . * @ param type * Entity type . * @ return Gets an Optional of the resulting entity or an empty Optional if it was not found . * @ see Entity # markAsType ( Class ) */ public static < T extends Entity > Optional < T > anyEntityOfType ( final EntityContainer container , final Class < T > type ) { } }
return container . streamEntitiesOfType ( type ) . findAny ( ) ;
public class MidaoFactory { /** * Returns new Pooled { @ link javax . sql . DataSource } implementation * @ param poolProperties pool properties * @ return new Pooled { @ link javax . sql . DataSource } implementation * @ throws java . sql . SQLException */ public static DataSource createDataSource ( Properties poolProperties ) throws SQLException { } }
try { return MidaoFrameworkPoolBinder . createDataSource ( poolProperties ) ; } catch ( NoClassDefFoundError ex ) { throw new NoClassDefFoundError ( ERROR_COULDNT_FIND_POOL_PROVIDER ) ; }
public class AssessmentRun { /** * A list of notifications for the event subscriptions . A notification about a particular generated finding is added * to this list only once . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setNotifications ( java . util . Collection ) } or { @ link # withNotifications ( java . util . Collection ) } if you want * to override the existing values . * @ param notifications * A list of notifications for the event subscriptions . A notification about a particular generated finding * is added to this list only once . * @ return Returns a reference to this object so that method calls can be chained together . */ public AssessmentRun withNotifications ( AssessmentRunNotification ... notifications ) { } }
if ( this . notifications == null ) { setNotifications ( new java . util . ArrayList < AssessmentRunNotification > ( notifications . length ) ) ; } for ( AssessmentRunNotification ele : notifications ) { this . notifications . add ( ele ) ; } return this ;
public class IOUtils { /** * Read stopwords in memory from a file , one per line . Stopwords are converted to lowercase when read . * @ param reader * the input reader . * @ return a set of stopwords . * @ throws IOException */ public static Set < String > readStopwords ( BufferedReader reader ) throws IOException { } }
HashSet < String > words = new HashSet < String > ( ) ; String nextLine ; while ( ( ( nextLine = reader . readLine ( ) ) != null ) ) { words . add ( nextLine . trim ( ) . toLowerCase ( ) ) ; } return words ;
public class PeepholeReplaceKnownMethods { /** * Try to fold . substr ( ) calls on strings */ private Node tryFoldStringSubstr ( Node n , Node stringNode , Node arg1 ) { } }
checkArgument ( n . isCall ( ) ) ; checkArgument ( stringNode . isString ( ) ) ; checkArgument ( arg1 != null ) ; int start ; int length ; String stringAsString = stringNode . getString ( ) ; Double maybeStart = NodeUtil . getNumberValue ( arg1 ) ; if ( maybeStart != null ) { start = maybeStart . intValue ( ) ; } else { return n ; } Node arg2 = arg1 . getNext ( ) ; if ( arg2 != null ) { Double maybeLength = NodeUtil . getNumberValue ( arg2 ) ; if ( maybeLength != null ) { length = maybeLength . intValue ( ) ; } else { return n ; } if ( arg2 . getNext ( ) != null ) { // If we got more args than we expected , bail out . return n ; } } else { // parameter 2 not passed length = stringAsString . length ( ) - start ; } // Don ' t handle these cases . The specification actually does // specify the behavior in some of these cases , but we haven ' t // done a thorough investigation that it is correctly implemented // in all browsers . if ( ( start + length ) > stringAsString . length ( ) || ( length < 0 ) || ( start < 0 ) ) { return n ; } String result = stringAsString . substring ( start , start + length ) ; Node resultNode = IR . string ( result ) ; Node parent = n . getParent ( ) ; parent . replaceChild ( n , resultNode ) ; reportChangeToEnclosingScope ( parent ) ; return resultNode ;
public class BranchEvent { /** * Logs this BranchEvent to Branch for tracking and analytics * @ param context Current context * @ return { @ code true } if the event is logged to Branch */ public boolean logEvent ( Context context ) { } }
boolean isReqQueued = false ; String reqPath = isStandardEvent ? Defines . RequestPath . TrackStandardEvent . getPath ( ) : Defines . RequestPath . TrackCustomEvent . getPath ( ) ; if ( Branch . getInstance ( ) != null ) { Branch . getInstance ( ) . handleNewRequest ( new ServerRequestLogEvent ( context , reqPath ) ) ; isReqQueued = true ; } return isReqQueued ;
public class ResourceUtils { /** * recurse add all the files matching given name pattern inside the given * directory and all subdirectories */ public static void addFiles ( final Project findBugsProject , File clzDir , final Pattern pat ) { } }
if ( clzDir . isDirectory ( ) ) { clzDir . listFiles ( new FileCollector ( pat , findBugsProject ) ) ; }
public class MissingDataHandler { /** * This function filters out rows or columns that contain NA values , Default : axis = 0 , how = ' any ' , thresh = 0 , columns = null , * inplace = false * @ param axis = 0 : drop by row , 1 : drop by column , default 0 * @ param how = ' any ' or ' all ' , default ' any ' * @ param thresh = required number of non - NA values to skip , default 0 * @ param columns = only consider NA dropping on the given columns , set to null for all columns of the DDF , default null * @ return a DDF with NAs filtered */ @ Override public DDF dropNA ( Axis axis , NAChecking how , long thresh , List < String > columns ) throws DDFException { } }
DDF newddf = null ; int numcols = this . getDDF ( ) . getNumColumns ( ) ; if ( columns == null ) { columns = this . getDDF ( ) . getColumnNames ( ) ; } String sqlCmd = "" ; if ( axis == Axis . ROW ) { // drop row with NA if ( thresh > 0 ) { if ( thresh > numcols ) { throw new DDFException ( "Required number of non-NA values per row must be less than or equal the number of columns." ) ; } else { sqlCmd = dropNARowSQL ( numcols - thresh + 1 , columns ) ; } } else if ( how == NAChecking . ANY ) { sqlCmd = dropNARowSQL ( 1 , columns ) ; } else if ( how == NAChecking . ALL ) { sqlCmd = dropNARowSQL ( numcols , columns ) ; } newddf = this . getManager ( ) . sql2ddf ( String . format ( sqlCmd , this . getDDF ( ) . getTableName ( ) ) , false ) ; } else if ( axis == Axis . COLUMN ) { // drop column with NA List < String > cols = Lists . newArrayList ( ) ; long numrows = this . getDDF ( ) . getNumRows ( ) ; if ( thresh > 0 ) { if ( thresh > numrows ) { throw new DDFException ( "Required number of non-NA values per column must be less than or equal the number of rows." ) ; } else { cols = selectedColumns ( numrows - thresh + 1 , columns ) ; } } else if ( how == NAChecking . ANY ) { cols = selectedColumns ( 1 , columns ) ; } else if ( how == NAChecking . ALL ) { cols = selectedColumns ( numrows , columns ) ; } newddf = this . getDDF ( ) . VIEWS . project ( cols ) ; } else { throw new DDFException ( "Either choose Axis.ROW for row-based NA filtering or Axis.COLUMN for column-based NA filtering" ) ; } newddf . getMetaDataHandler ( ) . copyFactor ( this . getDDF ( ) ) ; return newddf ;
public class CacheHandler { /** * 删除缓存组 * @ param mt * @ param mapperNameSpace * @ param removePkCache 是否同时删除按主键的缓存 */ private void removeCacheByGroup ( String msId , String mapperNameSpace , boolean removePkCache ) { } }
// 删除cachegroup关联缓存 String entityName = mapperNameRalateEntityNames . get ( mapperNameSpace ) ; getCacheProvider ( ) . clearGroup ( entityName , removePkCache ) ; logger . debug ( "_autocache_ method[{}] remove cache Group:{}" , msId , entityName ) ; // 关联缓存 if ( cacheEvictCascades . containsKey ( msId ) ) { String cacheGroup ; for ( String entity : cacheEvictCascades . get ( msId ) ) { cacheGroup = entity + GROUPKEY_SUFFIX ; getCacheProvider ( ) . clearExpiredGroupKeys ( entity + GROUPKEY_SUFFIX ) ; logger . debug ( "_autocache_ method[{}] remove Cascade cache Group:[{}]" , msId , cacheGroup ) ; } }
public class CmsMultiDialog { /** * Checks if the resource operation is an operation on at least one folder . < p > * @ return true if the operation an operation on at least one folder , otherwise false */ protected boolean isOperationOnFolder ( ) { } }
Iterator < String > i = getResourceList ( ) . iterator ( ) ; while ( i . hasNext ( ) ) { String resName = i . next ( ) ; try { CmsResource curRes = getCms ( ) . readResource ( resName , CmsResourceFilter . ALL ) ; if ( curRes . isFolder ( ) ) { // found a folder return true ; } } catch ( CmsException e ) { // can usually be ignored if ( LOG . isInfoEnabled ( ) ) { LOG . info ( e . getLocalizedMessage ( ) ) ; } } } return false ;
public class ADSUtil { /** * Computes an upper bound for the length of a splitting word . Based on * I . V . Kogan . " Estimated Length of a Minimal Simple Conditional Diagnostic Experiment " . In : Automation and Remote * Control 34 ( 1973) * @ param n * the size of the automaton ( number of states ) * @ param i * the number of states that should be distinguished by the current splitting word * @ param m * the number of states that should originally be distinguished * @ return upper bound for the length of a splitting word */ public static long computeMaximumSplittingWordLength ( final int n , final int i , final int m ) { } }
if ( m == 2 ) { return n ; } return LongMath . binomial ( n , i ) - LongMath . binomial ( m - 1 , i - 1 ) - 1 ;
public class RamEventStore { /** * Converts an opaque handle into a long ID . If the handle is not a Long , this will throw an * { @ link java . lang . IllegalArgumentException } . * @ param handle The handle to convert to an ID . * @ return The ID . */ private Long handleToId ( Object handle ) { } }
if ( handle instanceof Long ) { return ( Long ) handle ; } else { throw new IllegalArgumentException ( "Expected handle to be a Long, but was: " + handle . getClass ( ) . getCanonicalName ( ) ) ; }
public class ByteArrayReader { /** * Reads a standard SSH1 MPINT using the first 16 bits as the length prefix * @ return * @ throws IOException */ public BigInteger readMPINT ( ) throws IOException { } }
short bits = readShort ( ) ; byte [ ] raw = new byte [ ( bits + 7 ) / 8 + 1 ] ; raw [ 0 ] = 0 ; readFully ( raw , 1 , raw . length - 1 ) ; return new BigInteger ( raw ) ;
public class AsynchronousRequest { /** * For more info on pvp ranks API go < a href = " https : / / wiki . guildwars2 . com / wiki / API : 2 / pvp / ranks " > here < / a > < br / > * Give user the access to { @ link Callback # onResponse ( Call , Response ) } and { @ link Callback # onFailure ( Call , Throwable ) } methods for custom interactions * @ param ids list of PvP rank id * @ param callback callback that is going to be used for { @ link Call # enqueue ( Callback ) } * @ throws GuildWars2Exception empty ID list * @ throws NullPointerException if given { @ link Callback } is empty * @ see PvPRank PvP rank info */ public void getPvPRankInfo ( int [ ] ids , Callback < List < PvPRank > > callback ) throws GuildWars2Exception , NullPointerException { } }
isParamValid ( new ParamChecker ( ids ) ) ; gw2API . getPvPRankInfo ( processIds ( ids ) , GuildWars2 . lang . getValue ( ) ) . enqueue ( callback ) ;
public class WorkArounds { /** * TODO : This method exists because of a bug in javac which does not * handle " @ deprecated tag in package - info . java " , when this issue * is fixed this method and its uses must be jettisoned . */ public boolean isDeprecated0 ( Element e ) { } }
if ( ! utils . getDeprecatedTrees ( e ) . isEmpty ( ) ) { return true ; } JavacTypes jctypes = ( ( DocEnvImpl ) configuration . docEnv ) . toolEnv . typeutils ; TypeMirror deprecatedType = utils . getDeprecatedType ( ) ; for ( AnnotationMirror anno : e . getAnnotationMirrors ( ) ) { if ( jctypes . isSameType ( anno . getAnnotationType ( ) . asElement ( ) . asType ( ) , deprecatedType ) ) return true ; } return false ;
public class UsbConnection { /** * Fires a frame received event ( { @ link KNXListener # frameReceived ( FrameEvent ) } ) for the supplied EMI * < code > frame < / code > . * @ param frame the EMI1 / EMI2 / cEMI L - data frame to generate the event for * @ throws KNXFormatException on error creating cEMI message */ private void fireFrameReceived ( final KnxTunnelEmi emiType , final byte [ ] frame ) throws KNXFormatException { } }
logger . debug ( "received {} frame {}" , emiType , DataUnitBuilder . toHex ( frame , "" ) ) ; final FrameEvent fe = emiType == KnxTunnelEmi . CEmi ? new FrameEvent ( this , CEMIFactory . create ( frame , 0 , frame . length ) ) : new FrameEvent ( this , frame ) ; listeners . fire ( l -> l . frameReceived ( fe ) ) ;
public class AuthenticationContext { /** * Acquires security token from the authority . * @ param resource * Identifier of the target resource that is the recipient of the * requested token . * @ param clientAssertion * The client assertion to use for client authentication . * @ param callback * optional callback object for non - blocking execution . * @ return A { @ link Future } object representing the * { @ link AuthenticationResult } of the call . It contains Access * Token and the Access Token ' s expiration time . Refresh Token * property will be null for this overload . */ public Future < AuthenticationResult > acquireToken ( final String resource , final ClientAssertion clientAssertion , final AuthenticationCallback callback ) { } }
this . validateInput ( resource , clientAssertion , true ) ; final ClientAuthentication clientAuth = createClientAuthFromClientAssertion ( clientAssertion ) ; final AdalOAuthAuthorizationGrant authGrant = new AdalOAuthAuthorizationGrant ( new ClientCredentialsGrant ( ) , resource ) ; return this . acquireToken ( authGrant , clientAuth , callback ) ;
public class FluentCloseableIterable { /** * Returns the elements from this fluent iterable that satisfy a predicate . The * resulting fluent iterable ' s iterator does not support { @ code remove ( ) } . */ public final FluentCloseableIterable < T > filter ( Predicate < ? super T > predicate ) { } }
return from ( CloseableIterables . filter ( this , predicate ) ) ;
public class Ui { /** * Check the AttributeSet values have a attribute String , on user set the attribute resource . * Form android styles namespace * @ param attrs AttributeSet * @ param attribute The attribute to retrieve * @ return If have the attribute return True */ public static boolean isHaveAttribute ( AttributeSet attrs , String attribute ) { } }
return attrs . getAttributeValue ( Ui . androidStyleNameSpace , attribute ) != null ;
public class ConsulClient { /** * Event */ @ Override public Response < Event > eventFire ( String event , String payload , EventParams eventParams , QueryParams queryParams ) { } }
return eventClient . eventFire ( event , payload , eventParams , queryParams ) ;
public class ProtoTruth { /** * Assert on a single { @ link Message } instance . */ public static ProtoSubject < ? , Message > assertThat ( @ NullableDecl Message message ) { } }
return assertAbout ( protos ( ) ) . that ( message ) ;
public class JavaParser { /** * $ ANTLR start synpred139 _ Java */ public final void synpred139_Java_fragment ( ) throws RecognitionException { } }
// src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 658:9 : ( enumDeclaration ( ' ; ' ) ? ) // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 658:9 : enumDeclaration ( ' ; ' ) ? { pushFollow ( FOLLOW_enumDeclaration_in_synpred139_Java2608 ) ; enumDeclaration ( ) ; state . _fsp -- ; if ( state . failed ) return ; // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 658:25 : ( ' ; ' ) ? int alt210 = 2 ; int LA210_0 = input . LA ( 1 ) ; if ( ( LA210_0 == 52 ) ) { alt210 = 1 ; } switch ( alt210 ) { case 1 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 658:25 : ' ; ' { match ( input , 52 , FOLLOW_52_in_synpred139_Java2610 ) ; if ( state . failed ) return ; } break ; } }
public class StreamThrottler { /** * Throttles an { @ link InputStream } if throttling is configured . * @ param inputStream { @ link InputStream } to throttle . * @ param sourceURI used for selecting the throttling policy . * @ param targetURI used for selecting the throttling policy . */ @ Builder ( builderMethodName = "throttleInputStream" , builderClassName = "InputStreamThrottler" ) private ThrottledInputStream doThrottleInputStream ( InputStream inputStream , URI sourceURI , URI targetURI ) { } }
Preconditions . checkNotNull ( inputStream , "InputStream cannot be null." ) ; Limiter limiter = new NoopLimiter ( ) ; if ( sourceURI != null && targetURI != null ) { StreamCopierSharedLimiterKey key = new StreamCopierSharedLimiterKey ( sourceURI , targetURI ) ; try { limiter = new MultiLimiter ( limiter , this . broker . getSharedResource ( new SharedLimiterFactory < S > ( ) , key ) ) ; } catch ( NotConfiguredException nce ) { log . warn ( "Could not create a Limiter for key " + key , nce ) ; } } else { log . info ( "Not throttling input stream because source or target URIs are not defined." ) ; } Optional < MeteredInputStream > meteredStream = MeteredInputStream . findWrappedMeteredInputStream ( inputStream ) ; if ( ! meteredStream . isPresent ( ) ) { meteredStream = Optional . of ( MeteredInputStream . builder ( ) . in ( inputStream ) . build ( ) ) ; inputStream = meteredStream . get ( ) ; } return new ThrottledInputStream ( inputStream , limiter , meteredStream . get ( ) ) ;
public class FirestoreAdminClient { /** * Gets the metadata and configuration for a Field . * < p > Sample code : * < pre > < code > * try ( FirestoreAdminClient firestoreAdminClient = FirestoreAdminClient . create ( ) ) { * FieldName name = FieldName . of ( " [ PROJECT ] " , " [ DATABASE ] " , " [ COLLECTION _ ID ] " , " [ FIELD _ ID ] " ) ; * Field response = firestoreAdminClient . getField ( name ) ; * < / code > < / pre > * @ param name A name of the form * ` projects / { project _ id } / databases / { database _ id } / collectionGroups / { collection _ id } / fields / { field _ id } ` * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final Field getField ( FieldName name ) { } }
GetFieldRequest request = GetFieldRequest . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . build ( ) ; return getField ( request ) ;
public class XMLChar { /** * Returns true if the encoding name is a valid IANA encoding . * This method does not verify that there is a decoder available * for this encoding , only that the characters are valid for an * IANA encoding name . * @ param ianaEncoding The IANA encoding name . */ public static boolean isValidIANAEncoding ( String ianaEncoding ) { } }
if ( ianaEncoding != null ) { int length = ianaEncoding . length ( ) ; if ( length > 0 ) { char c = ianaEncoding . charAt ( 0 ) ; if ( ( c >= 'A' && c <= 'Z' ) || ( c >= 'a' && c <= 'z' ) ) { for ( int i = 1 ; i < length ; i ++ ) { c = ianaEncoding . charAt ( i ) ; if ( ( c < 'A' || c > 'Z' ) && ( c < 'a' || c > 'z' ) && ( c < '0' || c > '9' ) && c != '.' && c != '_' && c != '-' ) { return false ; } } return true ; } } } return false ;
public class FilePickerActivity { /** * Set the background color of the header * @ param colorResId Resource Id of the color * @ param drawableResId Resource Id of the drawable */ private void setHeaderBackground ( int colorResId , int drawableResId ) { } }
if ( drawableResId == - 1 ) { try { header . setBackgroundColor ( getResources ( ) . getColor ( colorResId ) ) ; } catch ( Resources . NotFoundException e ) { e . printStackTrace ( ) ; } } else { try { header . setBackgroundDrawable ( getResources ( ) . getDrawable ( drawableResId ) ) ; } catch ( Resources . NotFoundException e ) { e . printStackTrace ( ) ; } }
public class HiveUtils { /** * Normally hive . aux . jars . path is expanded from just being a path to the full * list of files in the directory by the hive shell script . Since we normally * won ' t be running from the script , it ' s up to us to do that work here . We * use a heuristic that if there is no occurrence of " . jar " in the original , * it needs expansion . Otherwise it ' s already been done for us . * Also , surround the files with uri niceities . */ static String expandHiveAuxJarsPath ( String original ) throws IOException { } }
if ( original == null || original . contains ( ".jar" ) ) return original ; File [ ] files = new File ( original ) . listFiles ( ) ; if ( files == null || files . length == 0 ) { LOG . info ( "No files in to expand in aux jar path. Returning original parameter" ) ; return original ; } return filesToURIString ( files ) ;
public class FastStringBuffer { /** * Append the contents of another FastStringBuffer onto * this FastStringBuffer , growing the storage if necessary . * NOTE THAT after calling append ( ) , previously obtained * references to m _ array [ ] may no longer be valid . * @ param value FastStringBuffer whose contents are * to be appended . */ public final void append ( FastStringBuffer value ) { } }
// Complicating factor here is that the two buffers may use // different chunk sizes , and even if they ' re the same we ' re // probably on a different alignment due to previously appended // data . We have to work through the source in bite - sized chunks . if ( value == null ) return ; int strlen = value . length ( ) ; if ( 0 == strlen ) return ; int copyfrom = 0 ; char [ ] chunk = m_array [ m_lastChunk ] ; int available = m_chunkSize - m_firstFree ; // Repeat while data remains to be copied while ( strlen > 0 ) { // Copy what fits if ( available > strlen ) available = strlen ; int sourcechunk = ( copyfrom + value . m_chunkSize - 1 ) >>> value . m_chunkBits ; int sourcecolumn = copyfrom & value . m_chunkMask ; int runlength = value . m_chunkSize - sourcecolumn ; if ( runlength > available ) runlength = available ; System . arraycopy ( value . m_array [ sourcechunk ] , sourcecolumn , m_array [ m_lastChunk ] , m_firstFree , runlength ) ; if ( runlength != available ) System . arraycopy ( value . m_array [ sourcechunk + 1 ] , 0 , m_array [ m_lastChunk ] , m_firstFree + runlength , available - runlength ) ; strlen -= available ; copyfrom += available ; // If there ' s more left , allocate another chunk and continue if ( strlen > 0 ) { // Extend array ? int i = m_array . length ; if ( m_lastChunk + 1 == i ) { char [ ] [ ] newarray = new char [ i + 16 ] [ ] ; System . arraycopy ( m_array , 0 , newarray , 0 , i ) ; m_array = newarray ; } // Advance one chunk chunk = m_array [ ++ m_lastChunk ] ; if ( chunk == null ) { // Hierarchical encapsulation if ( m_lastChunk == 1 << m_rebundleBits && m_chunkBits < m_maxChunkBits ) { // Should do all the work of both encapsulating // existing data and establishing new sizes / offsets m_innerFSB = new FastStringBuffer ( this ) ; } // Add a chunk . chunk = m_array [ m_lastChunk ] = new char [ m_chunkSize ] ; } available = m_chunkSize ; m_firstFree = 0 ; } } // Adjust the insert point in the last chunk , when we ' ve reached it . m_firstFree += available ;
public class Record { /** * Returns the unique value of the property converted to an instance of a certain class , or * null if the property has no value . Note that this method fails if the property has multiple * values or its unique value cannot be converted to the requested class ; if this is not the * desired behavior , use { @ link # getUnique ( URI , Class , Object ) } supplying an appropriate * default value to be returned in case of failure . * @ param property * the property to read * @ param valueClass * the class to convert the value to * @ param < T > * the type of result * @ return the unique value of the property , converted to the class specified ; null if the * property has no value * @ throws IllegalStateException * in case the property has multiple values * @ throws IllegalArgumentException * in case the unique property value cannot be converted to the class specified */ @ SuppressWarnings ( "unchecked" ) @ Nullable public < T > T getUnique ( final URI property , final Class < T > valueClass ) throws IllegalStateException , IllegalArgumentException { } }
final Object result ; synchronized ( this ) { result = doGet ( property , valueClass ) ; } if ( result == null ) { return null ; } else if ( result instanceof List < ? > ) { final List < T > list = ( List < T > ) result ; final StringBuilder builder = new StringBuilder ( "Expected one value for property " ) . append ( property ) . append ( ", found " ) . append ( list . size ( ) ) . append ( " values: " ) ; for ( int i = 0 ; i < Math . min ( 3 , list . size ( ) ) ; ++ i ) { builder . append ( i > 0 ? ", " : "" ) . append ( list . get ( i ) ) ; } builder . append ( list . size ( ) > 3 ? ", ..." : "" ) ; throw new IllegalStateException ( builder . toString ( ) ) ; } else { return ( T ) result ; }
public class Template { /** * Returns the inferred method type of the template based on the given actual argument types . * @ throws InferException if no instances of the specified type variables would allow the { @ code * actualArgTypes } to match the { @ code expectedArgTypes } */ private Type infer ( Warner warner , Inliner inliner , List < Type > freeTypeVariables , List < Type > expectedArgTypes , Type returnType , List < Type > actualArgTypes ) throws InferException { } }
Symtab symtab = inliner . symtab ( ) ; Type methodType = new MethodType ( expectedArgTypes , returnType , List . < Type > nil ( ) , symtab . methodClass ) ; if ( ! freeTypeVariables . isEmpty ( ) ) { methodType = new ForAll ( freeTypeVariables , methodType ) ; } Enter enter = inliner . enter ( ) ; MethodSymbol methodSymbol = new MethodSymbol ( 0 , inliner . asName ( "__m__" ) , methodType , symtab . unknownSymbol ) ; Type site = symtab . methodClass . type ; Env < AttrContext > env = enter . getTopLevelEnv ( TreeMaker . instance ( inliner . getContext ( ) ) . TopLevel ( List . < JCTree > nil ( ) ) ) ; // Set up the resolution phase : try { Field field = AttrContext . class . getDeclaredField ( "pendingResolutionPhase" ) ; field . setAccessible ( true ) ; field . set ( env . info , newMethodResolutionPhase ( autoboxing ( ) ) ) ; } catch ( ReflectiveOperationException e ) { throw new LinkageError ( e . getMessage ( ) , e ) ; } Object resultInfo ; try { Class < ? > resultInfoClass = Class . forName ( "com.sun.tools.javac.comp.Attr$ResultInfo" ) ; Constructor < ? > resultInfoCtor = resultInfoClass . getDeclaredConstructor ( Attr . class , KindSelector . class , Type . class ) ; resultInfoCtor . setAccessible ( true ) ; resultInfo = resultInfoCtor . newInstance ( Attr . instance ( inliner . getContext ( ) ) , KindSelector . PCK , Type . noType ) ; } catch ( ReflectiveOperationException e ) { throw new LinkageError ( e . getMessage ( ) , e ) ; } // Type inference sometimes produces diagnostics , so we need to catch them to avoid interfering // with the enclosing compilation . Log . DeferredDiagnosticHandler handler = new Log . DeferredDiagnosticHandler ( Log . instance ( inliner . getContext ( ) ) ) ; try { MethodType result = callCheckMethod ( warner , inliner , resultInfo , actualArgTypes , methodSymbol , site , env ) ; if ( ! handler . getDiagnostics ( ) . isEmpty ( ) ) { throw new InferException ( handler . getDiagnostics ( ) ) ; } return result ; } finally { Log . instance ( inliner . getContext ( ) ) . popDiagnosticHandler ( handler ) ; }
public class Evaluators { /** * Return true if < em > all < / em > evaluators are in closed state * ( and their processing queues are empty ) . */ public synchronized boolean allEvaluatorsAreClosed ( ) { } }
synchronized ( this . evaluators ) { for ( final EvaluatorManager eval : this . evaluators . values ( ) ) { if ( ! eval . isClosed ( ) ) { return false ; } } } return true ;
public class ClassFields { /** * Get the table name . */ public String getTableNames ( boolean bAddQuotes ) { } }
return ( m_tableName == null ) ? Record . formatTableNames ( CLASS_FIELDS_FILE , bAddQuotes ) : super . getTableNames ( bAddQuotes ) ;
public class PasswordValidationCallback { /** * < p > Clear the password . < / p > */ public void clearPassword ( ) { } }
char [ ] password = this . password ; this . password = null ; if ( password != null ) { Arrays . fill ( password , ( char ) 0 ) ; }
public class Input { /** * Reads a short array in bulk . This may be more efficient than reading them individually . */ public short [ ] readShorts ( int length ) throws KryoException { } }
short [ ] array = new short [ length ] ; if ( optional ( length << 1 ) == length << 1 ) { byte [ ] buffer = this . buffer ; int p = this . position ; for ( int i = 0 ; i < length ; i ++ , p += 2 ) array [ i ] = ( short ) ( ( buffer [ p ] & 0xFF ) | ( ( buffer [ p + 1 ] & 0xFF ) ) << 8 ) ; position = p ; } else { for ( int i = 0 ; i < length ; i ++ ) array [ i ] = readShort ( ) ; } return array ;
public class TimeoutException { /** * Utility method that produce the message of the timeout . * @ param timeout the maximum time to wait . * @ param unit the time unit of the timeout argument * @ return formatted string that contains the timeout information . */ public static String getTimeoutMessage ( long timeout , TimeUnit unit ) { } }
return String . format ( "Timeout of %d %s reached" , timeout , requireNonNull ( unit , "unit" ) ) ;
public class DateRangeCondition { /** * Returns the { @ link SpatialOperation } representing the specified { @ code String } . * @ param operation a { @ code String } representing a { @ link SpatialOperation } * @ return the { @ link SpatialOperation } representing the specified { @ code String } */ static SpatialOperation parseSpatialOperation ( String operation ) { } }
if ( operation == null ) { throw new IndexException ( "Operation is required" ) ; } else if ( operation . equalsIgnoreCase ( "is_within" ) ) { return SpatialOperation . IsWithin ; } else if ( operation . equalsIgnoreCase ( "contains" ) ) { return SpatialOperation . Contains ; } else if ( operation . equalsIgnoreCase ( "intersects" ) ) { return SpatialOperation . Intersects ; } else { throw new IndexException ( "Operation is invalid: {}" , operation ) ; }
public class XPathUtils { /** * Evaluate XPath expression with result type Node . * @ param node * @ param xPathExpression * @ param nsContext * @ return */ public static Node evaluateAsNode ( Node node , String xPathExpression , NamespaceContext nsContext ) { } }
Node result = ( Node ) evaluateExpression ( node , xPathExpression , nsContext , XPathConstants . NODE ) ; if ( result == null ) { throw new CitrusRuntimeException ( "No result for XPath expression: '" + xPathExpression + "'" ) ; } return result ;
public class JSTalkBackFilter { /** * Creates producer with given producer id , category and subsystem and register it in producer registry . * @ param producerId id of the producer * @ param category name of the category * @ param subsystem name of the subsystem * @ return PageInBrowserStats producer */ private OnDemandStatsProducer < PageInBrowserStats > createProducer ( final String producerId , final String category , final String subsystem ) { } }
OnDemandStatsProducer < PageInBrowserStats > producer = limit == - 1 ? new OnDemandStatsProducer < PageInBrowserStats > ( producerId , category , subsystem , new PageInBrowserStatsFactory ( ) ) : new EntryCountLimitedOnDemandStatsProducer < PageInBrowserStats > ( producerId , category , subsystem , new PageInBrowserStatsFactory ( ) , limit ) ; ProducerRegistryFactory . getProducerRegistryInstance ( ) . registerProducer ( producer ) ; return producer ;
public class DocumentTreeUrl { /** * Get Resource Url for UpdateTreeDocumentContent * @ param documentListName Name of content documentListName to delete * @ param documentName The name of the document in the site . * @ return String Resource Url */ public static MozuUrl updateTreeDocumentContentUrl ( String documentListName , String documentName ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/content/documentlists/{documentListName}/documentTree/{documentName}/content?folderPath={folderPath}&folderId={folderId}" ) ; formatter . formatUrl ( "documentListName" , documentListName ) ; formatter . formatUrl ( "documentName" , documentName ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class FuncKey { /** * Execute the function . The function must return * a valid object . * @ param xctxt The current execution context . * @ return A valid XObject . * @ throws javax . xml . transform . TransformerException */ public XObject execute ( XPathContext xctxt ) throws javax . xml . transform . TransformerException { } }
// TransformerImpl transformer = ( TransformerImpl ) xctxt ; TransformerImpl transformer = ( TransformerImpl ) xctxt . getOwnerObject ( ) ; XNodeSet nodes = null ; int context = xctxt . getCurrentNode ( ) ; DTM dtm = xctxt . getDTM ( context ) ; int docContext = dtm . getDocumentRoot ( context ) ; if ( DTM . NULL == docContext ) { // path . error ( context , XPATHErrorResources . ER _ CONTEXT _ HAS _ NO _ OWNERDOC ) ; / / " context does not have an owner document ! " ) ; } String xkeyname = getArg0 ( ) . execute ( xctxt ) . str ( ) ; QName keyname = new QName ( xkeyname , xctxt . getNamespaceContext ( ) ) ; XObject arg = getArg1 ( ) . execute ( xctxt ) ; boolean argIsNodeSetDTM = ( XObject . CLASS_NODESET == arg . getType ( ) ) ; KeyManager kmgr = transformer . getKeyManager ( ) ; // Don ' t bother with nodeset logic if the thing is only one node . if ( argIsNodeSetDTM ) { XNodeSet ns = ( XNodeSet ) arg ; ns . setShouldCacheNodes ( true ) ; int len = ns . getLength ( ) ; if ( len <= 1 ) argIsNodeSetDTM = false ; } if ( argIsNodeSetDTM ) { Hashtable usedrefs = null ; DTMIterator ni = arg . iter ( ) ; int pos ; UnionPathIterator upi = new UnionPathIterator ( ) ; upi . exprSetParent ( this ) ; while ( DTM . NULL != ( pos = ni . nextNode ( ) ) ) { dtm = xctxt . getDTM ( pos ) ; XMLString ref = dtm . getStringValue ( pos ) ; if ( null == ref ) continue ; if ( null == usedrefs ) usedrefs = new Hashtable ( ) ; if ( usedrefs . get ( ref ) != null ) { continue ; // We already have ' em . } else { // ISTRUE being used as a dummy value . usedrefs . put ( ref , ISTRUE ) ; } XNodeSet nl = kmgr . getNodeSetDTMByKey ( xctxt , docContext , keyname , ref , xctxt . getNamespaceContext ( ) ) ; nl . setRoot ( xctxt . getCurrentNode ( ) , xctxt ) ; // try upi . addIterator ( nl ) ; // catch ( CloneNotSupportedException cnse ) // / / will never happen . // mnodeset . addNodesInDocOrder ( nl , xctxt ) ; needed ? ? } int current = xctxt . getCurrentNode ( ) ; upi . setRoot ( current , xctxt ) ; nodes = new XNodeSet ( upi ) ; } else { XMLString ref = arg . xstr ( ) ; nodes = kmgr . getNodeSetDTMByKey ( xctxt , docContext , keyname , ref , xctxt . getNamespaceContext ( ) ) ; nodes . setRoot ( xctxt . getCurrentNode ( ) , xctxt ) ; } return nodes ;
public class MapMethods { /** * Sorts by Value a Associative Array in ascending order . * @ param associativeArray * @ return */ public static AssociativeArray sortAssociativeArrayByValueAscending ( AssociativeArray associativeArray ) { } }
ArrayList < Map . Entry < Object , Object > > entries = new ArrayList < > ( associativeArray . entrySet ( ) ) ; Collections . sort ( entries , ( Map . Entry < Object , Object > a , Map . Entry < Object , Object > b ) -> { Double va = TypeInference . toDouble ( a . getValue ( ) ) ; Double vb = TypeInference . toDouble ( b . getValue ( ) ) ; return va . compareTo ( vb ) ; } ) ; AssociativeArray sortedAssociativeArray = new AssociativeArray ( ) ; for ( Map . Entry < Object , Object > entry : entries ) { sortedAssociativeArray . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } return sortedAssociativeArray ;
public class BytecodeUtils { /** * Compare a string valued expression to another expression using soy = = semantics . * @ param stringExpr An expression that is known to be an unboxed string * @ param other An expression to compare it to . */ private static Expression doEqualsString ( SoyExpression stringExpr , SoyExpression other ) { } }
// This is compatible with SharedRuntime . compareString , which interestingly makes = = break // transitivity . See b / 21461181 SoyRuntimeType otherRuntimeType = other . soyRuntimeType ( ) ; if ( otherRuntimeType . isKnownStringOrSanitizedContent ( ) ) { if ( stringExpr . isNonNullable ( ) ) { return stringExpr . invoke ( MethodRef . EQUALS , other . unboxAsString ( ) ) ; } else { return MethodRef . OBJECTS_EQUALS . invoke ( stringExpr , other . unboxAsString ( ) ) ; } } if ( otherRuntimeType . isKnownNumber ( ) && other . isNonNullable ( ) ) { // in this case , we actually try to convert stringExpr to a number return MethodRef . RUNTIME_STRING_EQUALS_AS_NUMBER . invoke ( stringExpr , other . coerceToDouble ( ) ) ; } // We don ' t know what other is , assume the worst and call out to our boxed implementation for // string comparisons . return MethodRef . RUNTIME_COMPARE_NULLABLE_STRING . invoke ( stringExpr , other . box ( ) ) ;
public class ObjectPool { /** * Close the pool , destroying all objects */ public void close ( ) { } }
synchronized ( closeLock ) { if ( closed ) return ; closed = true ; } synchronized ( this ) { Iterator < T > allObjects = all . iterator ( ) ; while ( allObjects . hasNext ( ) ) { T poolObject = allObjects . next ( ) ; internalDestroyPoolObject ( poolObject ) ; } all . clear ( ) ; available . clear ( ) ; // Unlock all waiting threads notifyAll ( ) ; }
public class CPDefinitionOptionRelLocalServiceBaseImpl { /** * Adds the cp definition option rel to the database . Also notifies the appropriate model listeners . * @ param cpDefinitionOptionRel the cp definition option rel * @ return the cp definition option rel that was added */ @ Indexable ( type = IndexableType . REINDEX ) @ Override public CPDefinitionOptionRel addCPDefinitionOptionRel ( CPDefinitionOptionRel cpDefinitionOptionRel ) { } }
cpDefinitionOptionRel . setNew ( true ) ; return cpDefinitionOptionRelPersistence . update ( cpDefinitionOptionRel ) ;
public class V1InstanceGetter { /** * Get Messages filtered by the criteria specified in the passed in filter . * @ param filter Limit the items returned . If null , then all items returned . * @ return Collection of items as specified in the filter . */ public Collection < Message > messages ( MessageFilter filter ) { } }
return get ( Message . class , ( filter != null ) ? filter : new MessageFilter ( ) ) ;
public class BitvUnit { /** * JUnit Assertion to verify a { @ link com . gargoylesoftware . htmlunit . html . HtmlPage } instance for accessibility . * @ param htmlPage { @ link com . gargoylesoftware . htmlunit . html . HtmlPage } instance * @ param testable rule ( s ) to apply */ public static void assertAccessibility ( HtmlPage htmlPage , Testable testable ) { } }
assertThat ( htmlPage , is ( compliantTo ( testable ) ) ) ;
public class DeleteBackupRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteBackupRequest deleteBackupRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteBackupRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteBackupRequest . getBackupArn ( ) , BACKUPARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class FnNumber { /** * It converts the input into a { @ link BigDecimal } using the given scale and { @ link RoundingMode } * @ param scale the scale to be used * @ param roundingMode the { @ link RoundingMode } to round the input with * @ return the { @ link BigDecimal } */ public static final Function < Number , BigDecimal > toBigDecimal ( final int scale , final RoundingMode roundingMode ) { } }
return new ToBigDecimal ( scale , roundingMode ) ;
public class TranscoderWrapperStatisticsSupport { /** * { @ inheritDoc } */ public CachedData encode ( final Object object ) { } }
final CachedData result = _delegate . encode ( object ) ; _statistics . register ( StatsType . CACHED_DATA_SIZE , result . getData ( ) . length ) ; return result ;
public class FrameManager { /** * Removes a frameView from the list and exits if it was the last * @ param frameView NavigationFrameView to remove from the list */ private void removeNavigationFrameView ( SwingFrame frameView ) { } }
navigationFrames . remove ( frameView ) ; if ( navigationFrames . size ( ) == 0 ) { System . exit ( 0 ) ; }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link Bill } { @ code > } } */ @ XmlElementDecl ( namespace = "http://schema.intuit.com/finance/v3" , name = "Bill" , substitutionHeadNamespace = "http://schema.intuit.com/finance/v3" , substitutionHeadName = "IntuitObject" ) public JAXBElement < Bill > createBill ( Bill value ) { } }
return new JAXBElement < Bill > ( _Bill_QNAME , Bill . class , null , value ) ;
public class FuzzySymbolicVariableConstraintSolver { private boolean isFound ( String [ ] s , String st ) { } }
String [ ] t = s ; java . util . Arrays . sort ( t ) ; if ( Arrays . binarySearch ( t , st ) >= 0 ) return true ; return false ;
public class SARLLabelProvider { /** * Replies the image for the given element . * < p > This function is a Xtext dispatch function for { @ link # imageDescriptor ( Object ) } . * @ param element the element . * @ return the image descriptor . * @ see # imageDescriptor ( Object ) */ protected ImageDescriptor imageDescriptor ( SarlCapacity element ) { } }
final JvmDeclaredType jvmElement = this . jvmModelAssociations . getInferredType ( element ) ; return this . images . forCapacity ( element . getVisibility ( ) , this . adornments . get ( jvmElement ) ) ;
public class CmsFlexResponse { /** * Method overload from the standard HttpServletRequest API . < p > * @ see javax . servlet . http . HttpServletResponse # setHeader ( java . lang . String , java . lang . String ) */ @ Override public void setHeader ( String name , String value ) { } }
if ( isSuspended ( ) ) { return ; } if ( CmsRequestUtil . HEADER_CONTENT_TYPE . equalsIgnoreCase ( name ) ) { setContentType ( value ) ; return ; } if ( m_cachingRequired && ! m_includeMode ) { setHeaderList ( m_bufferHeaders , name , value ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_FLEXRESPONSE_SETTING_HEADER_IN_ELEMENT_BUFFER_2 , name , value ) ) ; } } if ( m_writeOnlyToBuffer ) { setHeaderList ( m_headers , name , value ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_FLEXRESPONSE_SETTING_HEADER_IN_HEADERS_2 , name , value ) ) ; } } else { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_FLEXRESPONSE_SETTING_HEADER_IN_PARENT_RESPONSE_2 , name , value ) ) ; } m_res . setHeader ( name , value ) ; }
public class SharedObject { /** * Unregister event listener * @ param listener * Event listener */ protected void unregister ( IEventListener listener ) { } }
log . debug ( "unregister - listener: {}" , listener ) ; if ( listeners . remove ( listener ) ) { listenerStats . decrement ( ) ; }
public class CmsDbSettingsPanel { /** * Saves form field data to setup bean . */ public void saveToSetupBean ( ) { } }
Map < String , String [ ] > result = new HashMap < String , String [ ] > ( ) ; CmsSetupBean bean = m_setupBean ; if ( m_dbCreateUser . isVisible ( ) ) { bean . setDbCreateUser ( m_dbCreateUser . getValue ( ) ) ; } if ( m_dbCreatePwd . isVisible ( ) ) { bean . setDbCreatePwd ( m_dbCreatePwd . getValue ( ) ) ; } if ( m_dbWorkUser . isVisible ( ) ) { bean . setDbWorkUser ( m_dbWorkUser . getValue ( ) ) ; } if ( m_templateDb . isVisible ( ) ) { setDbProp ( "templateDb" , m_templateDb . getValue ( ) ) ; } if ( m_dbWorkPwd . isVisible ( ) ) { bean . setDbWorkPwd ( m_dbWorkPwd . getValue ( ) ) ; } if ( m_dbCreateConStr . isVisible ( ) ) { bean . setDbCreateConStr ( m_dbCreateConStr . getValue ( ) ) ; } if ( m_dbName . isVisible ( ) ) { bean . setDb ( m_dbName . getValue ( ) ) ; } result . put ( "dbCreateConStr" , new String [ ] { m_dbCreateConStr . getValue ( ) } ) ; result . put ( "dbName" , new String [ ] { bean . getDatabase ( ) } ) ; result . put ( "dbProduct" , new String [ ] { bean . getDatabase ( ) } ) ; result . put ( "dbProvider" , new String [ ] { "sql" } ) ; result . put ( "dbName" , new String [ ] { m_dbName . getValue ( ) } ) ; result . put ( "db" , new String [ ] { bean . getDb ( ) } ) ; result . put ( "createDb" , new String [ ] { Boolean . toString ( m_createDb . getValue ( ) . booleanValue ( ) ) } ) ; result . put ( "createTables" , new String [ ] { Boolean . toString ( m_createTables . getValue ( ) . booleanValue ( ) ) } ) ; result . put ( "jdbcDriver" , new String [ ] { dbProp ( "driver" ) } ) ; result . put ( "templateDb" , new String [ ] { dbProp ( "templateDb" ) } ) ; result . put ( "dbCreateUser" , new String [ ] { bean . getDbCreateUser ( ) } ) ; result . put ( "dbCreatePwd" , new String [ ] { bean . getDbCreatePwd ( ) } ) ; result . put ( "dbWorkUser" , new String [ ] { bean . getDbWorkUser ( ) } ) ; result . put ( "dbWorkPwd" , new String [ ] { bean . getDbWorkPwd ( ) } ) ; result . put ( "dbDefaultTablespace" , new String [ ] { m_defaultTablespace . getValue ( ) } ) ; result . put ( "dbTemporaryTablespace" , new String [ ] { m_temporaryTablespace . getValue ( ) } ) ; result . put ( "dbIndexTablespace" , new String [ ] { m_indexTablespace . getValue ( ) } ) ; // result . put ( " servletMapping " , new String [ ] { getServeltMapping ( ) } ) ; result . put ( "submit" , new String [ ] { Boolean . TRUE . toString ( ) } ) ; bean . setDbParamaters ( result , bean . getDatabase ( ) , null , null ) ;
public class RuleSessionImpl { /** * / * ( non - Javadoc ) * @ see nz . co . senanque . rules . RuleSession # exclude ( nz . co . senanque . rules . RuleContext , java . lang . String , nz . co . senanque . validationengine . ProxyField ) */ @ InternalFunction ( precedence = 1 , isAssign = true ) public void exclude ( RuleContext ruleContext , String key , RuleProxyField proxyField ) { } }
exclude ( proxyField , key , ruleContext ) ;
public class ScreenFullAwt { /** * Format resolution to string . * @ param resolution The resolution reference . * @ param depth The depth reference . * @ return The formatted string . */ private static String formatResolution ( Resolution resolution , int depth ) { } }
return new StringBuilder ( MIN_LENGTH ) . append ( String . valueOf ( resolution . getWidth ( ) ) ) . append ( Constant . STAR ) . append ( String . valueOf ( resolution . getHeight ( ) ) ) . append ( Constant . STAR ) . append ( depth ) . append ( Constant . SPACE ) . append ( Constant . AT ) . append ( String . valueOf ( resolution . getRate ( ) ) ) . append ( Constant . UNIT_RATE ) . toString ( ) ;
public class FakeTable { /** * Add this record ( Always called from the record class ) . * Make the remote add call with the current data . * @ param record The record to add . * @ exception DBException File exception . */ public void doAdd ( Record record ) throws DBException { } }
record = this . moveRecordToBase ( record ) ; int iOpenMode = record . getOpenMode ( ) ; RecordChangedHandler recordChangeListener = ( RecordChangedHandler ) record . getListener ( RecordChangedHandler . class ) ; Object [ ] rgobjEnabledFields = record . setEnableFieldListeners ( false ) ; boolean [ ] rgbEnabled = record . setEnableListeners ( false ) ; if ( recordChangeListener != null ) recordChangeListener . setEnabledListener ( true ) ; // I may need this listener ( for an update ) to do the update = ID AND = LastChangeDate try { record . setOpenMode ( 0 ) ; // Normal add this . getNextTable ( ) . add ( record ) ; } catch ( DBException ex ) { throw ex ; } finally { record . setOpenMode ( iOpenMode ) ; record . setEnableListeners ( rgbEnabled ) ; record . setEnableFieldListeners ( rgobjEnabledFields ) ; }
public class HBeanPredecessors { /** * If this key value is of predecessor familiy type . */ public static boolean isPredecessor ( KeyValue kv ) { } }
if ( Bytes . equals ( kv . getFamily ( ) , PRED_COLUMN_FAMILY ) ) { return true ; } return false ;
public class ComparatorCompat { /** * Adds the comparator , that uses a function for extract * a { @ code long } sort key , to the chain . * @ param keyExtractor the function that extracts the sort key * @ return the new { @ code ComparatorCompat } instance */ @ NotNull public ComparatorCompat < T > thenComparingLong ( @ NotNull ToLongFunction < ? super T > keyExtractor ) { } }
return thenComparing ( comparingLong ( keyExtractor ) ) ;
public class FailoverGroupsInner { /** * Fails over from the current primary server to this server . This operation might result in data loss . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server containing the failover group . * @ param failoverGroupName The name of the failover group . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the FailoverGroupInner object */ public Observable < FailoverGroupInner > beginForceFailoverAllowDataLossAsync ( String resourceGroupName , String serverName , String failoverGroupName ) { } }
return beginForceFailoverAllowDataLossWithServiceResponseAsync ( resourceGroupName , serverName , failoverGroupName ) . map ( new Func1 < ServiceResponse < FailoverGroupInner > , FailoverGroupInner > ( ) { @ Override public FailoverGroupInner call ( ServiceResponse < FailoverGroupInner > response ) { return response . body ( ) ; } } ) ;
public class KuteIO { /** * Read a * { @ link slieb . kute . api . Resource . Readable } resource from its input stream . This is almost like { @ link KuteIO # readResource ( Resource . Readable ) } , * except that it uses the { @ link slieb . kute . api . Resource . Readable # getInputStream ( ) } method instead . * @ param resource An { @ link slieb . kute . api . Resource . Readable } resource . * @ return The string result of reading { @ link slieb . kute . api . Resource . Readable # getInputStream ( ) } * @ throws IOException If there is an exception getting the inputStream or reading from it . */ public static String readResourceWithInputStream ( Resource . Readable resource ) throws IOException { } }
try ( InputStream inputStream = resource . getInputStream ( ) ) { return IOUtils . toString ( inputStream ) ; }
public class FilterByStatusLayout { /** * Get - status of FILTER . */ private void getTargetFilterStatuses ( ) { } }
unknown = SPUIComponentProvider . getButton ( UIComponentIdProvider . UNKNOWN_STATUS_ICON , TargetUpdateStatus . UNKNOWN . toString ( ) , i18n . getMessage ( UIMessageIdProvider . TOOLTIP_TARGET_STATUS_UNKNOWN ) , SPUIDefinitions . SP_BUTTON_STATUS_STYLE , false , FontAwesome . SQUARE , SPUIButtonStyleSmall . class ) ; inSync = SPUIComponentProvider . getButton ( UIComponentIdProvider . INSYNCH_STATUS_ICON , TargetUpdateStatus . IN_SYNC . toString ( ) , i18n . getMessage ( UIMessageIdProvider . TOOLTIP_STATUS_INSYNC ) , SPUIDefinitions . SP_BUTTON_STATUS_STYLE , false , FontAwesome . SQUARE , SPUIButtonStyleSmall . class ) ; pending = SPUIComponentProvider . getButton ( UIComponentIdProvider . PENDING_STATUS_ICON , TargetUpdateStatus . PENDING . toString ( ) , i18n . getMessage ( UIMessageIdProvider . TOOLTIP_STATUS_PENDING ) , SPUIDefinitions . SP_BUTTON_STATUS_STYLE , false , FontAwesome . SQUARE , SPUIButtonStyleSmall . class ) ; error = SPUIComponentProvider . getButton ( UIComponentIdProvider . ERROR_STATUS_ICON , TargetUpdateStatus . ERROR . toString ( ) , i18n . getMessage ( UIMessageIdProvider . TOOLTIP_STATUS_ERROR ) , SPUIDefinitions . SP_BUTTON_STATUS_STYLE , false , FontAwesome . SQUARE , SPUIButtonStyleSmall . class ) ; registered = SPUIComponentProvider . getButton ( UIComponentIdProvider . REGISTERED_STATUS_ICON , TargetUpdateStatus . REGISTERED . toString ( ) , i18n . getMessage ( UIMessageIdProvider . TOOLTIP_STATUS_REGISTERED ) , SPUIDefinitions . SP_BUTTON_STATUS_STYLE , false , FontAwesome . SQUARE , SPUIButtonStyleSmall . class ) ; overdue = SPUIComponentProvider . getButton ( UIComponentIdProvider . OVERDUE_STATUS_ICON , OVERDUE_CAPTION , i18n . getMessage ( UIMessageIdProvider . TOOLTIP_STATUS_OVERDUE ) , SPUIDefinitions . SP_BUTTON_STATUS_STYLE , false , FontAwesome . SQUARE , SPUIButtonStyleSmall . class ) ; applyStatusBtnStyle ( ) ; unknown . setData ( "filterStatusOne" ) ; inSync . setData ( "filterStatusTwo" ) ; pending . setData ( "filterStatusThree" ) ; error . setData ( "filterStatusFour" ) ; registered . setData ( "filterStatusFive" ) ; overdue . setData ( "filterStatusSix" ) ; unknown . addClickListener ( this ) ; inSync . addClickListener ( this ) ; pending . addClickListener ( this ) ; error . addClickListener ( this ) ; registered . addClickListener ( this ) ; overdue . addClickListener ( this ) ;
public class COFFFileHeader { /** * Reads the header ' s fields . */ private void read ( ) { } }
// define the specification format final int key = 0 ; final int description = 1 ; final int offset = 2 ; final int length = 3 ; SpecificationFormat format = new SpecificationFormat ( key , description , offset , length ) ; // read the header data try { data = IOUtil . readHeaderEntries ( COFFHeaderKey . class , format , COFF_SPEC_FILE , headerbytes , getOffset ( ) ) ; } catch ( IOException e ) { logger . error ( "unable to read coff specification: " + e . getMessage ( ) ) ; }
public class CalendarPeriod { /** * list of periods , false if not */ public boolean IsContained ( String pFrom , String pTo ) { } }
if ( periods == null ) { // echo " IsContained ( ) TO BE IMPLEMENTED FLLKJ : : { { } ' ' < br / > " ; throw new RuntimeException ( "Error Periods is Null" ) ; } for ( int i = 0 ; i < periods . size ( ) ; i ++ ) { Period period = periods . get ( i ) ; if ( period . getFrom ( ) . compareTo ( pFrom ) > 0 ) return false ; if ( ( pFrom . compareTo ( period . getFrom ( ) ) >= 0 ) && ( pTo . compareTo ( period . getTo ( ) ) <= 0 ) ) return true ; } return false ;
public class DeviceStatus { /** * Check if current connection is Wi - Fi . * @ param context Context to use * @ return true if current connection is Wi - Fi false otherwise */ public static boolean isCurrentConnectionWifi ( Context context ) { } }
ConnectivityManager cm = ( ConnectivityManager ) context . getSystemService ( Context . CONNECTIVITY_SERVICE ) ; if ( cm == null ) { return false ; } NetworkInfo info = cm . getActiveNetworkInfo ( ) ; return info != null && info . getType ( ) == ConnectivityManager . TYPE_WIFI ;
public class BasicWorkspaceManager { /** * This method checks , if Workspace with a given Id was created before this call * @ param id * @ return */ @ Override public boolean checkIfWorkspaceExists ( @ NonNull String id ) { } }
ensureThreadExistense ( ) ; return backingMap . get ( ) . containsKey ( id ) ;
public class DynamoDbStoreTransaction { /** * Gets the expected value for a particular key and column , if any * @ param store the store to put the expected key column value * @ param key the key to get the expected value for * @ param column the column to get the expected value for * @ return the expected value of the given key - column pair , if any . */ public StaticBuffer get ( final AbstractDynamoDbStore store , final StaticBuffer key , final StaticBuffer column ) { } }
// This method assumes the caller has called contains ( . . ) and received a positive response return expectedValues . get ( store ) . get ( key ) . get ( column ) ;
public class AppServiceEnvironmentsInner { /** * Get usage metrics for a multi - role pool of an App Service Environment . * Get usage metrics for a multi - role pool of an App Service Environment . * ServiceResponse < PageImpl < UsageInner > > * @ param resourceGroupName Name of the resource group to which the resource belongs . * ServiceResponse < PageImpl < UsageInner > > * @ param name Name of the App Service Environment . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the PagedList & lt ; UsageInner & gt ; object wrapped in { @ link ServiceResponse } if successful . */ public Observable < ServiceResponse < Page < UsageInner > > > listMultiRoleUsagesSinglePageAsync ( final String resourceGroupName , final String name ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( name == null ) { throw new IllegalArgumentException ( "Parameter name is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } return service . listMultiRoleUsages ( resourceGroupName , name , this . client . subscriptionId ( ) , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < Page < UsageInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < UsageInner > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < PageImpl < UsageInner > > result = listMultiRoleUsagesDelegate ( response ) ; return Observable . just ( new ServiceResponse < Page < UsageInner > > ( result . body ( ) , result . response ( ) ) ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class APSPSolver { /** * Batch create intervals ( for many constraints ) */ private boolean cCreate ( Bounds [ ] in , int [ ] from , int [ ] to ) { } }
long [ ] old_d = new long [ in . length ] ; long [ ] old_D = new long [ in . length ] ; boolean [ ] added = new boolean [ in . length ] ; for ( int i = 0 ; i < added . length ; i ++ ) added [ i ] = false ; boolean rollback = false ; int rollBackPoint = - 1 ; for ( int i = 0 ; i < in . length ; i ++ ) { // Conversion long min = in [ i ] . min ; long max = in [ i ] . max ; if ( in [ i ] . max == APSPSolver . INF ) max = H - O ; if ( in [ i ] . min == - APSPSolver . INF ) min = - 1 * ( H - O ) ; in [ i ] = new Bounds ( min , max ) ; // Checks if ( in [ i ] . min > in [ i ] . max ) { rollback = true ; rollBackPoint = i ; break ; /* return false ; */ } if ( from [ i ] == to [ i ] ) { rollback = true ; rollBackPoint = i ; break ; /* return false ; */ } SimpleDistanceConstraint con = tPoints [ from [ i ] ] . getOut ( to [ i ] ) ; if ( con != null ) { // Already existing edge // check intersection between active con and new con // added [ i ] = false ; if ( ( con . getMinimum ( ) > in [ i ] . max ) || ( con . getMaximum ( ) < in [ i ] . min ) ) { rollback = true ; rollBackPoint = i ; break ; /* return false ; */ } // Update active con old_d [ i ] = con . getMinimum ( ) ; old_D [ i ] = con . getMaximum ( ) ; if ( con . getMinimum ( ) < in [ i ] . min ) con . setMinimum ( in [ i ] . min ) ; if ( con . getMaximum ( ) > in [ i ] . max ) con . setMaximum ( in [ i ] . max ) ; } else { // Non - existing arc // Adding tentative constraint added [ i ] = true ; con = new SimpleDistanceConstraint ( ) ; con . setFrom ( this . getVariable ( from [ i ] ) ) ; con . setTo ( this . getVariable ( to [ i ] ) ) ; con . setMinimum ( in [ i ] . min ) ; con . setMaximum ( in [ i ] . max ) ; con . addInterval ( new Bounds ( in [ i ] . min , in [ i ] . max ) ) ; tPoints [ from [ i ] ] . setOut ( to [ i ] , con ) ; } } if ( rollback ) { for ( int i = rollBackPoint - 1 ; i >= 0 ; i -- ) { SimpleDistanceConstraint con = tPoints [ from [ i ] ] . getOut ( to [ i ] ) ; if ( ! added [ i ] ) { // Rollback in case of already existing edge con . setMinimum ( old_d [ i ] ) ; con . setMaximum ( old_D [ i ] ) ; } else { // Rollback in case of new edge con . removeInterval ( in [ i ] ) ; tPoints [ from [ i ] ] . setOut ( to [ i ] , null ) ; } } return false ; } if ( backupDMatrixSimple ) saveDMatrixInternal ( ) ; if ( ! this . fromScratchDistanceMatrixComputation ( ) ) { if ( backupDMatrixSimple ) restoreDMatrixInternal ( ) ; return false ; } // Ok update for ( int i = in . length - 1 ; i >= 0 ; i -- ) { if ( ! added [ i ] ) { SimpleDistanceConstraint con = tPoints [ from [ i ] ] . getOut ( to [ i ] ) ; con . addInterval ( in [ i ] ) ; } } // [ lb , ub ] = [ - di0 , d0i ] for ( int j = 0 ; j < MAX_USED + 1 ; j ++ ) if ( tPoints [ j ] . isUsed ( ) == true ) { tPoints [ j ] . setLowerBound ( sum ( - distance [ j ] [ 0 ] , O ) ) ; tPoints [ j ] . setUpperBound ( sum ( distance [ 0 ] [ j ] , O ) ) ; } return true ;
public class BookKeeperJournalInputStream { /** * a new stream for a new ledger entry */ private InputStream nextEntryStream ( ) throws IOException { } }
long nextLedgerEntryId = currentStreamState . getNextLedgerEntryId ( ) ; if ( nextLedgerEntryId > maxLedgerEntryIdSeen ) { updateMaxLedgerEntryIdSeen ( ) ; if ( nextLedgerEntryId > maxLedgerEntryIdSeen ) { // Return null if we ' ve reached the end of the ledger : we can not // read beyond the end of the ledger and it is up to the caller to // either find the new " tail " of the ledger ( if the ledger is in - // progress ) or open the next ledger ( if the ledger is finalized ) if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Requesting to ledger entryId " + nextLedgerEntryId + ", but " + " maxLedgerEntryIdSeen is " + maxLedgerEntryIdSeen + ", ledger length is " + ledger . getLength ( ) ) ; } return null ; } } try { Enumeration < LedgerEntry > entries = ledger . readEntries ( nextLedgerEntryId , nextLedgerEntryId ) ; currentStreamState . incrementNextLedgerEntryId ( ) ; if ( entries . hasMoreElements ( ) ) { LedgerEntry entry = entries . nextElement ( ) ; if ( entries . hasMoreElements ( ) ) { throw new IllegalStateException ( "More than one entry retrieved!" ) ; } currentStreamState . setOffsetInEntry ( 0 ) ; return entry . getEntryInputStream ( ) ; } } catch ( BKException e ) { throw new IOException ( "Unrecoverable BookKeeper error reading entry " + nextLedgerEntryId , e ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; throw new IOException ( "Interrupted reading BookKeeper entry " + nextLedgerEntryId , e ) ; } return null ;
public class DigitalClockSkin { /** * * * * * * Canvas * * * * * */ private void drawTime ( final ZonedDateTime TIME ) { } }
ctx . clearRect ( 0 , 0 , width , height ) ; // draw the time if ( clock . isTextVisible ( ) ) { ctx . setFill ( textColor ) ; ctx . setTextBaseline ( VPos . CENTER ) ; ctx . setTextAlign ( TextAlignment . CENTER ) ; if ( Locale . US == clock . getLocale ( ) ) { ctx . setFont ( Fonts . digital ( 0.5 * height ) ) ; ctx . fillText ( clock . isSecondsVisible ( ) ? AMPM_HHMMSS_FORMATTER . format ( TIME ) : AMPM_HHMM_FORMATTER . format ( TIME ) , centerX , clock . isDateVisible ( ) ? height * 0.35 : centerY ) ; } else { ctx . setFont ( Fonts . digital ( 0.6 * height ) ) ; ctx . fillText ( clock . isSecondsVisible ( ) ? HHMMSS_FORMATTER . format ( TIME ) : HHMM_FORMATTER . format ( TIME ) , centerX , clock . isDateVisible ( ) ? height * 0.3 : centerY ) ; } } // draw the date if ( clock . isDateVisible ( ) ) { ctx . setFill ( dateColor ) ; ctx . setFont ( Fonts . digital ( 0.34 * height ) ) ; ctx . fillText ( dateFormat . format ( TIME ) , centerX , height * 0.77 ) ; } // draw the alarmOn icon if ( clock . isAlarmsEnabled ( ) && clock . getAlarms ( ) . size ( ) > 0 ) { ctx . setFill ( alarmColor ) ; }
public class ReladomoDeserializer { protected Object invokeStaticMethod ( Class classToInvoke , String methodName ) throws DeserializationException { } }
try { Method method = ReflectionMethodCache . getZeroArgMethod ( classToInvoke , methodName ) ; return method . invoke ( null , NULL_ARGS ) ; } catch ( Exception e ) { throw new DeserializationException ( "Could not invoke method " + methodName + " on class " + classToInvoke , e ) ; }
public class MutableURI { /** * Get the earliest index , searching for the first occurrance of * any one of the given delimiters . * @ param s the string to be indexed * @ param delims the delimiters used to index * @ param offset the from index * @ return the earlier index if there are delimiters */ protected static int indexFirstOf ( String s , String delims , int offset ) { } }
if ( s == null || s . length ( ) == 0 ) { return - 1 ; } if ( delims == null || delims . length ( ) == 0 ) { return - 1 ; } // check boundaries if ( offset < 0 ) { offset = 0 ; } else if ( offset > s . length ( ) ) { return - 1 ; } // s is never null int min = s . length ( ) ; char [ ] delim = delims . toCharArray ( ) ; for ( int i = 0 ; i < delim . length ; i ++ ) { int at = s . indexOf ( delim [ i ] , offset ) ; if ( at >= 0 && at < min ) { min = at ; } } return ( min == s . length ( ) ) ? - 1 : min ;
public class IterativeDataSet { /** * Registers an { @ link Aggregator } for the iteration . Aggregators can be used to maintain simple statistics during the * iteration , such as number of elements processed . The aggregators compute global aggregates : After each iteration step , * the values are globally aggregated to produce one aggregate that represents statistics across all parallel instances . * The value of an aggregator can be accessed in the next iteration . * Aggregators can be accessed inside a function via the { @ link AbstractFunction # getIterationRuntimeContext ( ) } method . * @ param name The name under which the aggregator is registered . * @ param aggregator The aggregator class . * @ return The IterativeDataSet itself , to allow chaining function calls . */ public IterativeDataSet < T > registerAggregator ( String name , Class < ? extends Aggregator < ? > > aggregator ) { } }
this . aggregators . registerAggregator ( name , aggregator ) ; return this ;
public class LocalTaskScheduler { /** * TODO : detecter si la capacite totale permet de faire passer tout le monde ( et mettre passif dans ce cas ) */ private void updateDStartsInf ( BitSet watchHosts ) throws ContradictionException { } }
for ( int idx = 0 ; idx < vInSize . get ( ) ; idx ++ ) { int i = vIn . quickGet ( idx ) ; if ( ! dStarts [ i ] . isInstantiated ( ) && ! associatedToCSliceOnCurrentNode ( i ) ) { int lastT = - 1 ; for ( int x = sortedMinProfile . length - 1 ; x >= 0 ; x -- ) { int t = sortedMinProfile [ x ] ; if ( t <= dStarts [ i ] . getLB ( ) ) { break ; } int prevT = sortedMinProfile [ x - 1 ] ; if ( t <= dStarts [ i ] . getUB ( ) && exceedCapacity ( profilesMin , prevT , dUsages [ i ] ) ) { lastT = t ; break ; } } if ( dStarts [ i ] . updateLowerBound ( Math . max ( lastT , early . getLB ( ) ) , aCause ) && associateCTask [ i ] != NO_ASSOCIATIONS && dStarts [ i ] . isInstantiated ( ) ) { watchHosts . set ( cHosters [ associateCTask [ i ] ] . getValue ( ) ) ; } } }
public class SelectSubPlanAssembler { /** * Given a join node and plan - sub - graph for outer and inner sub - nodes , * construct the plan - sub - graph for that node . * @ param joinNode A parent join node . * @ param outerPlan The outer node plan - sub - graph . * @ param innerPlan The inner node plan - sub - graph . * @ return A completed plan - sub - graph * or null if a valid plan can not be produced for given access paths . */ private IndexSortablePlanNode getSelectSubPlanForJoin ( BranchNode joinNode , AbstractPlanNode outerPlan , AbstractPlanNode innerPlan ) { } }
// Filter ( post - join ) expressions ArrayList < AbstractExpression > whereClauses = new ArrayList < > ( ) ; whereClauses . addAll ( joinNode . m_whereInnerList ) ; whereClauses . addAll ( joinNode . m_whereInnerOuterList ) ; if ( joinNode . getJoinType ( ) == JoinType . FULL ) { // For all other join types , the whereOuterList expressions were pushed down to the outer node whereClauses . addAll ( joinNode . m_whereOuterList ) ; } // The usual approach of calculating a local ( partial ) join result on each partition , // then sending and merging them with other partial results on the coordinator does not // ensure correct answers for some queries like : // SELECT * FROM replicated LEFT JOIN partitioned ON . . . // They require a " global view " of the partitioned working set in order to // properly identify which replicated rows need to be null - padded , // and to ensure that the same replicated row is not null - padded redundantly on multiple partitions . // Many queries using this pattern impractically require redistribution and caching of a considerable // subset of a partitioned table in preparation for a " coordinated " join . // Yet , there may be useful cases with sufficient constant - based partitioned table filtering // in the " ON clause " to keep the distributed working set size under control , like // SELECT * FROM replicated R LEFT JOIN partitioned P // ON R . key = = P . non _ partition _ key AND P . non _ partition _ key BETWEEN ? and ? ; // Such queries need to be prohibited by the planner if it can not guarantee the // correct results that require a " send before join " plan . // This could be the case if the replicated - to - partition join in these examples // were subject to another join with a partitioned table , like // SELECT * FROM replicated R LEFT JOIN partitioned P1 ON . . . // LEFT JOIN also _ partitioned P2 ON . . . assert ( joinNode . getRightNode ( ) != null ) ; JoinNode innerJoinNode = joinNode . getRightNode ( ) ; AccessPath innerAccessPath = innerJoinNode . m_currentAccessPath ; // We may need to add a send / receive pair to the inner plan for the special case . // This trick only works once per plan , BUT once the partitioned data has been // received on the coordinator , it can be treated as replicated data in later // joins , which MAY help with later outer joins with replicated data . boolean needInnerSendReceive = m_partitioning . requiresTwoFragments ( ) && ! innerPlan . hasReplicatedResult ( ) && outerPlan . hasReplicatedResult ( ) && joinNode . getJoinType ( ) != JoinType . INNER ; // When the inner plan is an IndexScan , there MAY be a choice of whether to join using a // NestLoopJoin ( NLJ ) or a NestLoopIndexJoin ( NLIJ ) . The NLJ will have an advantage over the // NLIJ in the cases where it applies , since it does a single access or iteration over the index // and caches the result , where the NLIJ does an index access or iteration for each outer row . // The NestLoopJoin applies when the inner IndexScan is driven only by parameter and constant // expressions determined at the start of the query . That requires that none of the IndexScan ' s // various expressions that drive the index access may reference columns from the outer row // - - they can only reference columns of the index ' s base table ( the indexed expressions ) // as well as constants and parameters . The IndexScan ' s " otherExprs " expressions that only // drive post - filtering are not an issue since the NestLoopJoin does feature per - outer - tuple // post - filtering on each pass over the cached index scan result . // The special case of an OUTER JOIN of replicated outer row data with a partitioned inner // table requires that the partitioned data be sent to the coordinator prior to the join . // This limits the join option to NLJ . The index scan must make a single index access on // each partition and cache the result at the coordinator for post - filtering . // This requires that the index access be based on parameters or constants only // - - the replicated outer row data will only be available later at the coordinator , // so it can not drive the per - partition index scan . // If the NLJ option is precluded for the usual reason ( outer - row - based indexing ) AND // the NLIJ is precluded by the special case ( OUTER JOIN of replicated outer rows and // partitioned inner rows ) this method returns null , effectively rejecting this indexed // access path for the inner node . Other access paths or join orders may prove more successful . boolean canHaveNLJ = true ; boolean canHaveNLIJ = true ; if ( innerPlan instanceof IndexScanPlanNode ) { if ( hasInnerOuterIndexExpression ( joinNode . getRightNode ( ) . getTableAlias ( ) , innerAccessPath . indexExprs , innerAccessPath . initialExpr , innerAccessPath . endExprs ) ) { canHaveNLJ = false ; } } else { canHaveNLIJ = false ; } if ( needInnerSendReceive ) { canHaveNLIJ = false ; } // Prohibit FULL join plans with distributed outer and replicated inner branches - // The join must happen on either a coordinator node or both tables must be joined on // partition columns if ( joinNode . getJoinType ( ) == JoinType . FULL && m_partitioning . requiresTwoFragments ( ) && ! outerPlan . hasReplicatedResult ( ) && innerPlan . hasReplicatedResult ( ) ) { canHaveNLIJ = false ; canHaveNLJ = false ; } AbstractJoinPlanNode ajNode = null ; if ( canHaveNLJ ) { NestLoopPlanNode nljNode = new NestLoopPlanNode ( ) ; // get all the clauses that join the applicable two tables // Copy innerAccessPath . joinExprs to leave it unchanged , // avoiding accumulation of redundant expressions when // joinClauses gets built up for various alternative plans . ArrayList < AbstractExpression > joinClauses = new ArrayList < > ( innerAccessPath . joinExprs ) ; if ( ( innerPlan instanceof IndexScanPlanNode ) || ( innerPlan instanceof NestLoopIndexPlanNode && innerPlan . getChild ( 0 ) instanceof MaterializedScanPlanNode ) ) { // InnerPlan is an IndexScan OR an NLIJ of a MaterializedScan // ( IN LIST ) and an IndexScan . In this case , the inner and // inner - outer non - index join expressions ( if any ) are in the // indexScan ' s otherExpr . The former should stay as IndexScanPlan // predicates but the latter need to be pulled up into NLJ // predicates because the IndexScan is executed once , not once // per outer tuple . ArrayList < AbstractExpression > otherExprs = new ArrayList < > ( ) ; // PLEASE do not update the " innerAccessPath . otherExprs " , it may be reused // for other path evaluation on the other outer side join . List < AbstractExpression > innerExpr = filterSingleTVEExpressions ( innerAccessPath . otherExprs , otherExprs ) ; joinClauses . addAll ( otherExprs ) ; IndexScanPlanNode scanNode = null ; if ( innerPlan instanceof IndexScanPlanNode ) { scanNode = ( IndexScanPlanNode ) innerPlan ; } else { assert ( innerPlan instanceof NestLoopIndexPlanNode ) ; scanNode = ( ( NestLoopIndexPlanNode ) innerPlan ) . getInlineIndexScan ( ) ; } scanNode . setPredicate ( innerExpr ) ; } else if ( innerJoinNode instanceof BranchNode && joinNode . getJoinType ( ) != JoinType . INNER ) { // If the innerJoinNode is a LEAF node OR if the join type is an INNER join , // the conditions that apply to the inner side // have been applied as predicates to the inner scan node already . // otherExpr of innerAccessPath comes from its parentNode ' s joinInnerList . // For Outer join ( LEFT or FULL ) , it could mean a join predicate on the table of // the inner node ONLY , that can not be pushed down . joinClauses . addAll ( innerAccessPath . otherExprs ) ; } nljNode . setJoinPredicate ( ExpressionUtil . combinePredicates ( joinClauses ) ) ; // combine the tails plan graph with the new head node nljNode . addAndLinkChild ( outerPlan ) ; // If successful in the special case , the NLJ plan must be modified to cause the // partitioned inner data to be sent to the coordinator prior to the join . // This is done by adding send and receive plan nodes between the NLJ and its // right child node . if ( needInnerSendReceive ) { // This trick only works once per plan . if ( outerPlan . hasAnyNodeOfClass ( AbstractReceivePlanNode . class ) || innerPlan . hasAnyNodeOfClass ( AbstractReceivePlanNode . class ) ) { return null ; } innerPlan = addSendReceivePair ( innerPlan ) ; } nljNode . addAndLinkChild ( innerPlan ) ; ajNode = nljNode ; } else if ( canHaveNLIJ ) { NestLoopIndexPlanNode nlijNode = new NestLoopIndexPlanNode ( ) ; IndexScanPlanNode innerNode = ( IndexScanPlanNode ) innerPlan ; // Set IndexScan predicate . The INNER join expressions for a FULL join come from // the innerAccessPath . joinExprs and need to be combined with the other join expressions innerNode . setPredicate ( innerAccessPath . joinExprs , innerAccessPath . otherExprs ) ; nlijNode . addInlinePlanNode ( innerPlan ) ; // combine the tails plan graph with the new head node nlijNode . addAndLinkChild ( outerPlan ) ; ajNode = nlijNode ; } else { m_recentErrorMsg = "Unsupported special case of complex OUTER JOIN between replicated outer table and partitioned inner table." ; return null ; } ajNode . setJoinType ( joinNode . getJoinType ( ) ) ; ajNode . setPreJoinPredicate ( ExpressionUtil . combinePredicates ( joinNode . m_joinOuterList ) ) ; ajNode . setWherePredicate ( ExpressionUtil . combinePredicates ( whereClauses ) ) ; ajNode . resolveSortDirection ( ) ; return ajNode ;
public class GenericScriptProvider { /** * 加载 * @ throws Exception */ protected final void load ( ) throws Exception { } }
synchronized ( this ) { Set < Class < ? > > classes = classProvider . getLoadedClasses ( ) ; initScriptClasses ( classes ) ; onScriptLoaded ( classes ) ; }
public class AbstractGeneratedSQLTransform { /** * / * ( non - Javadoc ) * @ see com . abubusoft . kripton . processor . sqlite . transform . AbstractSQLTransform # generateWriteParam2ContentValues ( com . squareup . javapoet . MethodSpec . Builder , com . abubusoft . kripton . processor . sqlite . model . SQLiteModelMethod , java . lang . String , com . squareup . javapoet . TypeName , com . abubusoft . kripton . processor . core . ModelProperty ) */ @ Override public void generateWriteParam2ContentValues ( Builder methodBuilder , SQLiteModelMethod method , String paramName , TypeName paramTypeName , ModelProperty property ) { } }
String methodName = method . getParent ( ) . generateJava2ContentSerializer ( paramTypeName ) ; methodBuilder . addCode ( "$L($L)" , methodName , paramName ) ;
public class TenantService { /** * Validate that the given modifications are allowed ; throw any transgressions found . */ private void validateTenantUpdate ( TenantDefinition oldTenantDef , TenantDefinition newTenantDef ) { } }
Utils . require ( oldTenantDef . getName ( ) . equals ( newTenantDef . getName ( ) ) , "Tenant name cannot be changed: %s" , newTenantDef . getName ( ) ) ; Map < String , Object > oldDBServiceOpts = oldTenantDef . getOptionMap ( "DBService" ) ; String oldDBService = oldDBServiceOpts == null ? null : ( String ) oldDBServiceOpts . get ( "dbservice" ) ; Map < String , Object > newDBServiceOpts = newTenantDef . getOptionMap ( "DBService" ) ; String newDBService = newDBServiceOpts == null ? null : ( String ) newDBServiceOpts . get ( "dbservice" ) ; Utils . require ( ( oldDBService == null && newDBService == null ) || oldDBService . equals ( newDBService ) , "'DBService.dbservice' parameter cannot be changed: tenant=%s, previous=%s, new=%s" , newTenantDef . getName ( ) , oldDBService , newDBService ) ;
public class CmsCmisTypeManager { /** * Refreshes the internal data if the last update was longer ago than the udpate interval . < p > */ private synchronized void refresh ( ) { } }
try { long now = System . currentTimeMillis ( ) ; if ( ( now - m_lastUpdate ) > UPDATE_INTERVAL ) { setup ( ) ; } } catch ( CmsException e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; }
public class SchemeInformationImpl { /** * { @ inheritDoc } */ @ Override public void setSchemeIdentifier ( String schemeIdentifier ) { } }
XSURI uri = null ; if ( schemeIdentifier != null ) { uri = ( new XSURIBuilder ( ) ) . buildObject ( this . getElementQName ( ) . getNamespaceURI ( ) , SCHEME_IDENTIFIER_LOCAL_NAME , this . getElementQName ( ) . getPrefix ( ) ) ; uri . setValue ( schemeIdentifier ) ; } this . setSchemeIdentifier ( uri ) ;
public class Interceptor { /** * this method will be invoked after methodToBeInvoked is invoked */ protected Object doInvoke ( Object proxy , Method methodToBeInvoked , Object [ ] args ) throws Throwable { } }
Method m = getRealSubject ( ) . getClass ( ) . getMethod ( methodToBeInvoked . getName ( ) , methodToBeInvoked . getParameterTypes ( ) ) ; return m . invoke ( getRealSubject ( ) , args ) ;
public class SortWorker { /** * Add a pointer to the key value pair with the provided parameters . */ final void addPointer ( int keyPos , int keySize , int valuePos , int valueSize ) { } }
assert keyPos + keySize == valuePos ; int start = memoryBuffer . limit ( ) - POINTER_SIZE_BYTES ; memoryBuffer . putInt ( start , keyPos ) ; memoryBuffer . putInt ( start + 4 , valuePos ) ; memoryBuffer . putInt ( start + 8 , valueSize ) ; memoryBuffer . limit ( start ) ; valuesHeld ++ ;
public class CausticUtil { /** * Converts an AWT { @ link java . awt . Color } into a normalized float color . * @ param color The AWT color to convert * @ return The color as a 4 float vector */ public static Vector4f fromAWTColor ( Color color ) { } }
return new Vector4f ( color . getRed ( ) / 255f , color . getGreen ( ) / 255f , color . getBlue ( ) / 255f , color . getAlpha ( ) / 255f ) ;
public class Resource { /** * Returns true if field corresponding to fieldID is set ( has been assigned a value ) and false otherwise */ public boolean isSet ( _Fields field ) { } }
if ( field == null ) { throw new IllegalArgumentException ( ) ; } switch ( field ) { case ID : return isSetId ( ) ; case LOCATION : return isSetLocation ( ) ; } throw new IllegalStateException ( ) ;
public class TiffDocument { /** * Gets the ifd count . * @ return the ifd count */ public int getIfdCount ( ) { } }
int c = 0 ; if ( metadata != null && metadata . contains ( "IFD" ) ) c = getMetadataList ( "IFD" ) . size ( ) ; return c ;
public class BatchPreparedStatementExecutor { /** * Execute batch . * @ return execute results * @ throws SQLException SQL exception */ public int [ ] executeBatch ( ) throws SQLException { } }
final boolean isExceptionThrown = ExecutorExceptionHandler . isExceptionThrown ( ) ; SQLExecuteCallback < int [ ] > callback = new SQLExecuteCallback < int [ ] > ( getDatabaseType ( ) , isExceptionThrown ) { @ Override protected int [ ] executeSQL ( final RouteUnit routeUnit , final Statement statement , final ConnectionMode connectionMode ) throws SQLException { return statement . executeBatch ( ) ; } } ; List < int [ ] > results = executeCallback ( callback ) ; if ( isAccumulate ( ) ) { return accumulate ( results ) ; } else { return results . get ( 0 ) ; }
public class EtcdNettyClient { /** * Connect to server * @ param etcdRequest to request with * @ param < R > Type of response * @ throws IOException if request could not be sent . */ @ SuppressWarnings ( "unchecked" ) protected < R > void connect ( final EtcdRequest < R > etcdRequest ) throws IOException { } }
this . connect ( etcdRequest , etcdRequest . getPromise ( ) . getConnectionState ( ) ) ;
public class CoronaJobTrackerRunner { /** * Copies the job file to the working directory of the process that will be * started . */ @ SuppressWarnings ( "deprecation" ) private void localizeTaskConfiguration ( TaskTracker tracker , JobConf ttConf , String workDir , Task t , JobID jobID ) throws IOException { } }
Path jobFile = new Path ( t . getJobFile ( ) ) ; FileSystem systemFS = tracker . systemFS ; this . localizedJobFile = new Path ( workDir , jobID + ".xml" ) ; LOG . info ( "Localizing CJT configuration from " + jobFile + " to " + localizedJobFile ) ; systemFS . copyToLocalFile ( jobFile , localizedJobFile ) ; JobConf localJobConf = new JobConf ( localizedJobFile ) ; boolean modified = Task . saveStaticResolutions ( localJobConf ) ; if ( modified ) { FSDataOutputStream out = new FSDataOutputStream ( new FileOutputStream ( localizedJobFile . toUri ( ) . getPath ( ) ) ) ; try { localJobConf . writeXml ( out ) ; } catch ( IOException e ) { out . close ( ) ; throw e ; } } // Add the values from the job conf to the configuration of this runner this . conf . addResource ( localizedJobFile ) ;
public class DSLMapParser { /** * src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 119:1 : entry : scope _ section ( meta _ section ) ? key _ section EQUALS ( value _ section ) ? ( EOL | EOF ) - > ^ ( VT _ ENTRY scope _ section ( meta _ section ) ? key _ section ( value _ section ) ? ) ; */ public final DSLMapParser . entry_return entry ( ) throws RecognitionException { } }
DSLMapParser . entry_return retval = new DSLMapParser . entry_return ( ) ; retval . start = input . LT ( 1 ) ; Object root_0 = null ; Token EQUALS7 = null ; Token EOL9 = null ; Token EOF10 = null ; ParserRuleReturnScope scope_section4 = null ; ParserRuleReturnScope meta_section5 = null ; ParserRuleReturnScope key_section6 = null ; ParserRuleReturnScope value_section8 = null ; Object EQUALS7_tree = null ; Object EOL9_tree = null ; Object EOF10_tree = null ; RewriteRuleTokenStream stream_EQUALS = new RewriteRuleTokenStream ( adaptor , "token EQUALS" ) ; RewriteRuleTokenStream stream_EOL = new RewriteRuleTokenStream ( adaptor , "token EOL" ) ; RewriteRuleTokenStream stream_EOF = new RewriteRuleTokenStream ( adaptor , "token EOF" ) ; RewriteRuleSubtreeStream stream_meta_section = new RewriteRuleSubtreeStream ( adaptor , "rule meta_section" ) ; RewriteRuleSubtreeStream stream_key_section = new RewriteRuleSubtreeStream ( adaptor , "rule key_section" ) ; RewriteRuleSubtreeStream stream_scope_section = new RewriteRuleSubtreeStream ( adaptor , "rule scope_section" ) ; RewriteRuleSubtreeStream stream_value_section = new RewriteRuleSubtreeStream ( adaptor , "rule value_section" ) ; try { // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 119:8 : ( scope _ section ( meta _ section ) ? key _ section EQUALS ( value _ section ) ? ( EOL | EOF ) - > ^ ( VT _ ENTRY scope _ section ( meta _ section ) ? key _ section ( value _ section ) ? ) ) // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 119:10 : scope _ section ( meta _ section ) ? key _ section EQUALS ( value _ section ) ? ( EOL | EOF ) { pushFollow ( FOLLOW_scope_section_in_entry336 ) ; scope_section4 = scope_section ( ) ; state . _fsp -- ; if ( state . failed ) return retval ; if ( state . backtracking == 0 ) stream_scope_section . add ( scope_section4 . getTree ( ) ) ; // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 119:24 : ( meta _ section ) ? int alt3 = 2 ; int LA3_0 = input . LA ( 1 ) ; if ( ( LA3_0 == LEFT_SQUARE ) ) { int LA3_1 = input . LA ( 2 ) ; if ( ( LA3_1 == LITERAL ) ) { int LA3_3 = input . LA ( 3 ) ; if ( ( LA3_3 == RIGHT_SQUARE ) ) { int LA3_5 = input . LA ( 4 ) ; if ( ( synpred3_DSLMap ( ) ) ) { alt3 = 1 ; } } } else if ( ( LA3_1 == RIGHT_SQUARE ) ) { int LA3_4 = input . LA ( 3 ) ; if ( ( synpred3_DSLMap ( ) ) ) { alt3 = 1 ; } } } switch ( alt3 ) { case 1 : // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 119:24 : meta _ section { pushFollow ( FOLLOW_meta_section_in_entry338 ) ; meta_section5 = meta_section ( ) ; state . _fsp -- ; if ( state . failed ) return retval ; if ( state . backtracking == 0 ) stream_meta_section . add ( meta_section5 . getTree ( ) ) ; } break ; } pushFollow ( FOLLOW_key_section_in_entry341 ) ; key_section6 = key_section ( ) ; state . _fsp -- ; if ( state . failed ) return retval ; if ( state . backtracking == 0 ) stream_key_section . add ( key_section6 . getTree ( ) ) ; EQUALS7 = ( Token ) match ( input , EQUALS , FOLLOW_EQUALS_in_entry343 ) ; if ( state . failed ) return retval ; if ( state . backtracking == 0 ) stream_EQUALS . add ( EQUALS7 ) ; // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 119:57 : ( value _ section ) ? int alt4 = 2 ; int LA4_0 = input . LA ( 1 ) ; if ( ( ( LA4_0 >= COLON && LA4_0 <= DOT ) || LA4_0 == EQUALS || ( LA4_0 >= LEFT_CURLY && LA4_0 <= LITERAL ) || LA4_0 == RIGHT_SQUARE ) ) { alt4 = 1 ; } switch ( alt4 ) { case 1 : // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 119:57 : value _ section { pushFollow ( FOLLOW_value_section_in_entry345 ) ; value_section8 = value_section ( ) ; state . _fsp -- ; if ( state . failed ) return retval ; if ( state . backtracking == 0 ) stream_value_section . add ( value_section8 . getTree ( ) ) ; } break ; } // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 119:72 : ( EOL | EOF ) int alt5 = 2 ; int LA5_0 = input . LA ( 1 ) ; if ( ( LA5_0 == EOL ) ) { alt5 = 1 ; } else if ( ( LA5_0 == EOF ) ) { alt5 = 2 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return retval ; } NoViableAltException nvae = new NoViableAltException ( "" , 5 , 0 , input ) ; throw nvae ; } switch ( alt5 ) { case 1 : // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 119:73 : EOL { EOL9 = ( Token ) match ( input , EOL , FOLLOW_EOL_in_entry349 ) ; if ( state . failed ) return retval ; if ( state . backtracking == 0 ) stream_EOL . add ( EOL9 ) ; } break ; case 2 : // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 119:77 : EOF { EOF10 = ( Token ) match ( input , EOF , FOLLOW_EOF_in_entry351 ) ; if ( state . failed ) return retval ; if ( state . backtracking == 0 ) stream_EOF . add ( EOF10 ) ; } break ; } // AST REWRITE // elements : meta _ section , scope _ section , key _ section , value _ section // token labels : // rule labels : retval // token list labels : // rule list labels : // wildcard labels : if ( state . backtracking == 0 ) { retval . tree = root_0 ; RewriteRuleSubtreeStream stream_retval = new RewriteRuleSubtreeStream ( adaptor , "rule retval" , retval != null ? retval . getTree ( ) : null ) ; root_0 = ( Object ) adaptor . nil ( ) ; // 120:5 : - > ^ ( VT _ ENTRY scope _ section ( meta _ section ) ? key _ section ( value _ section ) ? ) { // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 120:8 : ^ ( VT _ ENTRY scope _ section ( meta _ section ) ? key _ section ( value _ section ) ? ) { Object root_1 = ( Object ) adaptor . nil ( ) ; root_1 = ( Object ) adaptor . becomeRoot ( ( Object ) adaptor . create ( VT_ENTRY , "VT_ENTRY" ) , root_1 ) ; adaptor . addChild ( root_1 , stream_scope_section . nextTree ( ) ) ; // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 120:33 : ( meta _ section ) ? if ( stream_meta_section . hasNext ( ) ) { adaptor . addChild ( root_1 , stream_meta_section . nextTree ( ) ) ; } stream_meta_section . reset ( ) ; adaptor . addChild ( root_1 , stream_key_section . nextTree ( ) ) ; // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 120:59 : ( value _ section ) ? if ( stream_value_section . hasNext ( ) ) { adaptor . addChild ( root_1 , stream_value_section . nextTree ( ) ) ; } stream_value_section . reset ( ) ; adaptor . addChild ( root_0 , root_1 ) ; } } retval . tree = root_0 ; } } retval . stop = input . LT ( - 1 ) ; if ( state . backtracking == 0 ) { retval . tree = ( Object ) adaptor . rulePostProcessing ( root_0 ) ; adaptor . setTokenBoundaries ( retval . tree , retval . start , retval . stop ) ; } } catch ( RecognitionException e ) { reportError ( e ) ; } catch ( RewriteEmptyStreamException e ) { } finally { // do for sure before leaving } return retval ;
public class AbstractFilter { /** * Adds a log message to be emitted when this AbstractFilter is initialized ( and the Log is made available to it ) . * @ param logLevel The logLevel of the message to emit . * @ param message The message to emit . */ protected final void addDelayedLogMessage ( final String logLevel , final String message ) { } }
this . delayedLogMessages . add ( new DelayedLogMessage ( logLevel , message ) ) ;
public class FastSet { /** * { @ inheritDoc } */ @ Override public void flip ( int e ) { } }
int wordIndex = wordIndex ( e ) ; expandTo ( wordIndex ) ; int mask = ( 1 << e ) ; words [ wordIndex ] ^= mask ; fixFirstEmptyWord ( ) ; if ( size >= 0 ) { if ( ( words [ wordIndex ] & mask ) == 0 ) { size -- ; } else { size ++ ; } }
public class Bits { /** * Return the index of the least bit position & ge ; x that is set . * If none are set , returns - 1 . This provides a nice way to iterate * over the members of a bit set : * < pre > { @ code * for ( int i = bits . nextBit ( 0 ) ; i > = 0 ; i = bits . nextBit ( i + 1 ) ) . . . * } < / pre > */ public int nextBit ( int x ) { } }
Assert . check ( currentState != BitsState . UNKNOWN ) ; int windex = x >>> wordshift ; if ( windex >= bits . length ) { return - 1 ; } int word = bits [ windex ] & ~ ( ( 1 << ( x & wordmask ) ) - 1 ) ; while ( true ) { if ( word != 0 ) { return ( windex << wordshift ) + trailingZeroBits ( word ) ; } windex ++ ; if ( windex >= bits . length ) { return - 1 ; } word = bits [ windex ] ; }
public class DriverFactory { /** * Creates new { @ link Driver } . * < b > This method is protected only for testing < / b > */ protected InternalDriver createDriver ( SecurityPlan securityPlan , SessionFactory sessionFactory , MetricsProvider metricsProvider , Config config ) { } }
return new InternalDriver ( securityPlan , sessionFactory , metricsProvider , config . logging ( ) ) ;
public class BigDecimalSQLTransform { /** * / * ( non - Javadoc ) * @ see com . abubusoft . kripton . processor . sqlite . transform . AbstractSQLTransform # generateWriteParam2ContentValues ( com . squareup . javapoet . MethodSpec . Builder , com . abubusoft . kripton . processor . sqlite . model . SQLiteModelMethod , java . lang . String , com . squareup . javapoet . TypeName , com . abubusoft . kripton . processor . core . ModelProperty ) */ @ Override public void generateWriteParam2ContentValues ( Builder methodBuilder , SQLiteModelMethod method , String paramName , TypeName paramTypeName , ModelProperty property ) { } }
methodBuilder . addCode ( "$L" , paramName ) ;
public class BusLayerConstants { /** * Replies if the preferred color for bus stops selection . * @ return Color for selection */ @ Pure public static int getSelectionColor ( ) { } }
final Preferences prefs = Preferences . userNodeForPackage ( BusLayerConstants . class ) ; if ( prefs != null ) { final String str = prefs . get ( "SELECTION_COLOR" , null ) ; // $ NON - NLS - 1 $ if ( str != null ) { try { return Integer . valueOf ( str ) ; } catch ( Throwable exception ) { } } } return DEFAULT_SELECTION_COLOR ;
public class WorkspacesApi { /** * Get Workspace File * Retrieves a workspace file ( the binary ) . * @ param accountId The external account number ( int ) or account ID Guid . ( required ) * @ param workspaceId Specifies the workspace ID GUID . ( required ) * @ param folderId The ID of the folder being accessed . ( required ) * @ param fileId Specifies the room file ID GUID . ( required ) * @ return void */ public void getWorkspaceFile ( String accountId , String workspaceId , String folderId , String fileId ) throws ApiException { } }
getWorkspaceFile ( accountId , workspaceId , folderId , fileId , null ) ;