signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ConsumeToEndOfSessionDecodingState { /** * { @ inheritDoc } */ public DecodingState finishDecode ( ProtocolDecoderOutput out ) throws Exception { } }
try { if ( buffer == null ) { buffer = IoBuffer . allocate ( 0 ) ; } buffer . flip ( ) ; return finishDecode ( buffer , out ) ; } finally { buffer = null ; }
public class Task { private void startNetworkingRequest ( ) { } }
Logger . v ( "Task[%s] start networking request" , this ) ; if ( mExecutor != null ) { final NetworkingPrioritizable < T > prioritizable = new NetworkingPrioritizable < T > ( this ) ; final NetworkingRequest < T > request = new NetworkingRequest < T > ( prioritizable , mPriority . ordinal ( ) , this ) ; mExecutor . executeNetworkingRequest ( request ) ; } else { throw new IllegalStateException ( Messages . NO_EXECUTOR ) ; }
public class Beans { /** * Creates an instance of a given type by choosing the best constructor that matches the given list of arguments . * @ param < T > the type of the object to create * @ param type the type of the object to create * @ param args the arguments to pass to the constructor * @ return the object created * @ throws NoSuchMethodException if an appropriate constructor cannot be found * @ throws IllegalAccessException if an appropriate constructor cannot be accessed * @ throws InvocationTargetException if errors occur while invoking the constructor * @ throws InstantiationException if the constructor itself fails in any way */ public static < T > T create ( Class < T > type , Object ... args ) throws NoSuchMethodException , IllegalAccessException , InvocationTargetException , InstantiationException { } }
Class < ? > [ ] argumentTypes = new Class < ? > [ args . length ] ; for ( int i = 0 ; i < args . length ; i ++ ) { argumentTypes [ i ] = args [ i ] . getClass ( ) ; } return type . cast ( choose ( type . getConstructors ( ) , new ConstructorParameterExtractor < T > ( ) , null , argumentTypes ) . newInstance ( args ) ) ;
public class MamManager { /** * Get the preferences stored in the server . * @ return the MAM preferences result * @ throws NoResponseException * @ throws XMPPErrorException * @ throws NotConnectedException * @ throws InterruptedException * @ throws NotLoggedInException */ public MamPrefsResult retrieveArchivingPreferences ( ) throws NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException , NotLoggedInException { } }
MamPrefsIQ mamPrefIQ = new MamPrefsIQ ( ) ; return queryMamPrefs ( mamPrefIQ ) ;
public class CmsUpdateBean { /** * Prepares step 1 of the update wizard . < p > */ public void prepareUpdateStep1b ( ) { } }
if ( ! isInitialized ( ) ) { return ; } if ( ( m_dbUpdateThread != null ) && ( m_dbUpdateThread . isFinished ( ) ) ) { // update is already finished , just wait for client to collect final data return ; } if ( m_dbUpdateThread == null ) { m_dbUpdateThread = new CmsUpdateDBThread ( this ) ; } if ( ! m_dbUpdateThread . isAlive ( ) ) { m_dbUpdateThread . start ( ) ; }
public class SparseBaseLevel1 { /** * computes a vector - vector dot product . * @ param n number of accessed element * @ param alpha * @ param X an INDArray * @ param Y an INDArray * @ return the vector - vector dot product of X and Y */ @ Override public double dot ( long n , double alpha , INDArray X , INDArray Y ) { } }
if ( X instanceof BaseSparseNDArray ) { BaseSparseNDArray sparseX = ( BaseSparseNDArray ) X ; DataBuffer pointers = sparseX . getVectorCoordinates ( ) ; switch ( X . data ( ) . dataType ( ) ) { case DOUBLE : DefaultOpExecutioner . validateDataType ( DataType . DOUBLE , X , Y ) ; return ddoti ( n , X , pointers , Y ) ; case FLOAT : DefaultOpExecutioner . validateDataType ( DataType . FLOAT , X , Y ) ; return sdoti ( n , X , pointers , Y ) ; case HALF : DefaultOpExecutioner . validateDataType ( DataType . HALF , X , Y ) ; return hdoti ( n , X , pointers , Y ) ; default : } } throw new UnsupportedOperationException ( ) ;
public class MedianSolver { /** * { @ inheritDoc } * @ throws IllegalArgumentException if two runs of wrapped solver returns results of different size */ public void run ( ) throws IllegalArgumentException { } }
List < List < ResultEntry > > groups = null ; for ( int i = 0 ; i < this . repeatedRuns ; ++ i ) { this . solver . getResult ( ) . clearResults ( ) ; this . solver . run ( ) ; Result result = this . solver . getResult ( ) ; if ( groups == null ) { // create empty groups groups = new ArrayList < List < ResultEntry > > ( result . getResultEntries ( ) . size ( ) ) ; // allocate space in every group for ( int j = 0 ; j < result . getResultEntries ( ) . size ( ) ; ++ j ) { groups . add ( new ArrayList < ResultEntry > ( this . repeatedRuns ) ) ; } } if ( result . getResultEntries ( ) . size ( ) != groups . size ( ) ) throw new IllegalArgumentException ( String . format ( "Wrapped solver must return result with identical number of elements each run, " + "given %d in run %d, expected %d" , result . getResultEntries ( ) . size ( ) , i , groups . size ( ) ) ) ; for ( int j = 0 ; j < result . getResultEntries ( ) . size ( ) ; j ++ ) { ResultEntry resultEntry = result . getResultEntries ( ) . get ( j ) ; groups . get ( j ) . add ( resultEntry ) ; } } int middleIndex = this . repeatedRuns / 2 ; for ( List < ResultEntry > group : groups ) { Collections . sort ( group , new ResultEntryFitnessComparator ( ) ) ; if ( this . logger . isDebugEnabled ( ) ) { this . logger . debug ( "Sorted result entries, dump:" ) ; for ( int i = 0 ; i < group . size ( ) ; i ++ ) { ResultEntry resultEntry = group . get ( i ) ; this . logger . debug ( i + ". " + resultEntry . getBestFitness ( ) ) ; } } this . getResult ( ) . addEntry ( group . get ( middleIndex ) ) ; } if ( this . file != null ) { try { PrintStream printStream = new PrintStream ( file ) ; for ( List < ResultEntry > group : groups ) { printStream . printf ( "Sorted entries for %s/%s\n" , group . get ( 0 ) . getAlgorithm ( ) , group . get ( 0 ) . getProblem ( ) ) ; for ( int i = 0 ; i < group . size ( ) ; i ++ ) { printStream . printf ( "%03d. %.5f\n" , i , group . get ( i ) . getBestFitness ( ) ) ; } } } catch ( FileNotFoundException e ) { e . printStackTrace ( ) ; } }
public class Streams { /** * Copy all of the bytes from the input stream to the output stream wrapping * streams in buffers as needed . * @ param input Stream to read bytes from . * @ param output Stream to write bytes to . * @ return The total number of bytes copied . * @ throws IOException Failed to copy bytes . */ public static long copyb ( InputStream input , OutputStream output ) throws IOException { } }
if ( ! ( input instanceof BufferedInputStream ) ) { input = new BufferedInputStream ( input ) ; } if ( ! ( output instanceof BufferedOutputStream ) ) { output = new BufferedOutputStream ( output ) ; } long bytes = copy ( input , output , DEFAULT_BUFFER_SIZE ) ; output . flush ( ) ; return bytes ;
public class PcErrorMsgUtils { /** * Replace error msg . * @ param origMsg * the orig msg * @ return the string */ public static String replaceErrorMsg ( String origMsg ) { } }
String replaceMsg = origMsg ; for ( ERROR_TYPE errorType : ERROR_TYPE . values ( ) ) { if ( origMsg == null ) { replaceMsg = PcConstants . NA ; return replaceMsg ; } if ( origMsg . contains ( errorMapOrig . get ( errorType ) ) ) { replaceMsg = errorMapReplace . get ( errorType ) ; break ; } } return replaceMsg ;
public class ClientSidePreparedStatement { /** * { inheritdoc } . */ public int [ ] executeBatch ( ) throws SQLException { } }
checkClose ( ) ; int size = parameterList . size ( ) ; if ( size == 0 ) { return new int [ 0 ] ; } lock . lock ( ) ; try { executeInternalBatch ( size ) ; results . commandEnd ( ) ; return results . getCmdInformation ( ) . getUpdateCounts ( ) ; } catch ( SQLException sqle ) { throw executeBatchExceptionEpilogue ( sqle , size ) ; } finally { executeBatchEpilogue ( ) ; lock . unlock ( ) ; }
public class DynamicSheetMapper { /** * IFJAVA8 _ END */ private SheetMapper < T > getPoiMapper ( int startRow , Sheet sheet ) { } }
Row row = sheet . getRow ( startRow ) ; List < CsvColumnKey > keys = new ArrayList < CsvColumnKey > ( row . getLastCellNum ( ) - row . getFirstCellNum ( ) ) ; for ( short i = row . getFirstCellNum ( ) ; i <= row . getLastCellNum ( ) ; i ++ ) { Cell cell = row . getCell ( i ) ; if ( cell != null && cell . getCellType ( ) != Cell . CELL_TYPE_BLANK ) { keys . add ( new CsvColumnKey ( cell . getStringCellValue ( ) , i ) ) ; } } return getPoiMapper ( new MapperKey < CsvColumnKey > ( keys . toArray ( new CsvColumnKey [ 0 ] ) ) ) ;
public class ExpressionParser { /** * Parses the { @ literal < semver - expr > } non - terminal . * < pre > * { @ literal * < semver - expr > : : = " ( " < semver - expr > " ) " * | " ! " " ( " < semver - expr > " ) " * | < semver - expr > < more - expr > * | < range > * < / pre > * @ return the expression AST */ private CompositeExpression parseSemVerExpression ( ) { } }
CompositeExpression expr ; if ( tokens . positiveLookahead ( NOT ) ) { tokens . consume ( ) ; consumeNextToken ( LEFT_PAREN ) ; expr = not ( parseSemVerExpression ( ) ) ; consumeNextToken ( RIGHT_PAREN ) ; } else if ( tokens . positiveLookahead ( LEFT_PAREN ) ) { consumeNextToken ( LEFT_PAREN ) ; expr = parseSemVerExpression ( ) ; consumeNextToken ( RIGHT_PAREN ) ; } else { expr = parseRange ( ) ; } return parseMoreExpressions ( expr ) ;
public class CheckArg { /** * Asserts that the specified first object is the same as ( = = ) the specified second object . * @ param < T > * @ param argument The argument to assert as the same as < code > object < / code > . * @ param argumentName The name that will be used within the exception message for the argument should an exception be thrown * @ param object The object to assert as the same as < code > argument < / code > . * @ param objectName The name that will be used within the exception message for < code > object < / code > should an exception be * thrown ; if < code > null < / code > and < code > object < / code > is not < code > null < / code > , < code > object . toString ( ) < / code > will * be used . * @ throws IllegalArgumentException If the specified objects are not the same . */ public static < T > void isSame ( final T argument , String argumentName , final T object , String objectName ) { } }
if ( argument != object ) { if ( objectName == null ) objectName = getObjectName ( object ) ; throw new IllegalArgumentException ( CommonI18n . argumentMustBeSameAs . text ( argumentName , objectName ) ) ; }
public class TcpIpHandlerAdapter { /** * / * ( non - Javadoc ) * @ see org . apache . mina . core . service . IoHandlerAdapter # messageReceived ( org . apache . mina . core . session . IoSession , java . lang . Object ) */ @ Override public final void messageReceived ( final IoSession session , final Object message ) throws Exception { } }
final String str = message . toString ( ) ; // Parse the command String cmd = null ; final String [ ] args = str . trim ( ) . split ( "[\t ]" ) ; if ( args . length > 0 ) { cmd = args [ 0 ] ; } List < String > lstArg = new ArrayList < String > ( ) ; if ( args . length > 0 ) { boolean isArg = false ; int index = 0 ; for ( String val : args ) { if ( val . equals ( "-a" ) ) { lstArg . add ( val ) ; index ++ ; isArg = true ; } else { if ( val . length ( ) > 0 ) { if ( isArg && val . charAt ( 0 ) != '-' && val . indexOf ( '=' ) == - 1 ) { lstArg . set ( index - 1 , lstArg . get ( index - 1 ) + " " + val ) ; } else { lstArg . add ( val ) ; index ++ ; } } else { lstArg . set ( index - 1 , lstArg . get ( index - 1 ) + " " ) ; } } } } // Try to launch the command final TcpCommand command = TcpIpCommands . getCommands ( ) . get ( cmd ) ; String result = null ; // launch the command and get the result if ( command != null ) { if ( args . length > 1 ) { try { String [ ] finalArgs = new String [ lstArg . size ( ) ] ; for ( int i = 0 ; i < finalArgs . length ; i ++ ) { String [ ] argSplit = lstArg . get ( i ) . split ( "=" ) ; if ( argSplit . length == 2 && argSplit [ 1 ] . charAt ( 0 ) == '"' && argSplit [ 1 ] . charAt ( argSplit [ 1 ] . length ( ) - 1 ) == '"' ) { finalArgs [ i ] = argSplit [ 0 ] + "=" + argSplit [ 1 ] . substring ( 1 , argSplit [ 1 ] . length ( ) - 1 ) ; } else { finalArgs [ i ] = lstArg . get ( i ) ; } } result = command . process ( session , Arrays . copyOfRange ( finalArgs , 1 , finalArgs . length ) ) ; } catch ( Exception e ) { result = LaunchingMessageKind . ELAUNCH0007 . format ( cmd , e ) ; } } else { result = command . process ( session , new String [ ] { } ) ; } } else { session . write ( "command unknown :" + cmd ) ; } // If the command returned a result , write it for the user if ( result != null ) { session . write ( result ) ; } else { session . write ( "" ) ; } session . write ( ENDLINE ) ; session . write ( ENDLINE ) ; session . write ( "> " ) ;
public class TranslationMap { /** * This loads the translation files from the specified folder . */ public TranslationMap doImport ( File folder ) { } }
try { for ( String locale : LOCALES ) { TranslationHashMap trMap = new TranslationHashMap ( getLocale ( locale ) ) ; trMap . doImport ( new FileInputStream ( new File ( folder , locale + ".txt" ) ) ) ; add ( trMap ) ; } postImportHook ( ) ; return this ; } catch ( Exception ex ) { throw new RuntimeException ( ex ) ; }
public class LookupCoordinatorManager { /** * It is assumed that currLookupsStateOnNode " toLoad " and " toDrop " are disjoint . */ @ VisibleForTesting Set < String > getToBeDroppedFromNode ( LookupsState < LookupExtractorFactoryMapContainer > currLookupsStateOnNode , Map < String , LookupExtractorFactoryMapContainer > nodeTierLookupsToBe ) { } }
Set < String > toDrop = new HashSet < > ( ) ; // { currently loading / loaded on the node } - { currently pending deletion on node } - { lookups node should actually have } toDrop . addAll ( currLookupsStateOnNode . getCurrent ( ) . keySet ( ) ) ; toDrop . addAll ( currLookupsStateOnNode . getToLoad ( ) . keySet ( ) ) ; toDrop = Sets . difference ( toDrop , currLookupsStateOnNode . getToDrop ( ) ) ; toDrop = Sets . difference ( toDrop , nodeTierLookupsToBe . keySet ( ) ) ; return toDrop ;
public class QueriedGetResponseUnmarshaller { /** * { @ inheritDoc } */ @ Override protected void onBoolean ( Boolean bool , String fieldName , JsonParser jp ) { } }
log . trace ( fieldName + " " + bool ) ; if ( resultStarted && entityStarted && idFound && fieldName != null && bool != null ) { ClassUtil . setSilent ( getEntityInstance ( ) , fieldName , bool ) ; }
public class JMPath { /** * Build relative destination path path . * @ param destinationDirPath the destination dir path * @ param baseDirPath the base dir path * @ param sourcePath the source path * @ return the path */ public static Path buildRelativeDestinationPath ( Path destinationDirPath , Path baseDirPath , Path sourcePath ) { } }
Path extractSubPath = extractSubPath ( baseDirPath , sourcePath ) ; return destinationDirPath . resolve ( extractSubPath ) ;
public class Columns { /** * Returns the { @ link Column } identified by the specified name , or { @ code null } if not found . * @ param name The name of the { @ link Column } to be returned . * @ return The { @ link Column } identified by the specified name , or { @ code null } if not found . */ public Column getColumn ( String name ) { } }
for ( Column column : columns ) { if ( column . getName ( ) . equals ( name ) ) { return column ; } } return null ;
public class Arguments { /** * Sets a key / value pair to the collection of arguments . This guarantees only one value will be assigned to the * argument key . A { @ code null } value indicates the key should be removed . * If the key starts with { @ code - D } it ' s assumed it ' s a system property argument and the prefix will be stripped * from the key when checking for uniqueness . * @ param key the key for the argument * @ param value the value of the argument which may be { @ code null } */ public void set ( final String key , final String value ) { } }
if ( key != null ) { if ( value == null ) { map . remove ( key ) ; } else { final Argument argument ; if ( key . startsWith ( "-D" ) ) { argument = createSystemProperty ( key , value ) ; } else { argument = create ( key , value ) ; } set ( argument ) ; } }
public class KeyrefReader { /** * Read key definitions * @ param filename absolute URI to DITA map with key definitions * @ param doc key definition DITA map */ public void read ( final URI filename , final Document doc ) { } }
currentFile = filename ; rootScope = null ; // TODO : use KeyScope implementation that retains order KeyScope keyScope = readScopes ( doc ) ; keyScope = cascadeChildKeys ( keyScope ) ; // TODO : determine effective key definitions here keyScope = inheritParentKeys ( keyScope ) ; rootScope = resolveIntermediate ( keyScope ) ;
public class AbstractAzkabanServlet { /** * Retrieves a warn message from a cookie . azkaban . warn . message */ protected String getWarnMessageFromCookie ( final HttpServletRequest request ) { } }
final Cookie cookie = getCookieByName ( request , AZKABAN_WARN_MESSAGE ) ; if ( cookie == null ) { return null ; } return cookie . getValue ( ) ;
public class RMIRegistryManager { /** * Starts rmiregistry on the specified port . If property * " service . monitor . inProcess " is set to true in configSchema . xml , * then an in - process rmiregistry is started . * Otherwise rmiregistry will be started in a seperate process . * @ param port on which the rmiregistry needs to be started * @ return true if successful or it is already started , false otherwise */ public static boolean startRMIRegistry ( Configuration configuration , int port ) { } }
if ( isRMIRegistryRunning ( configuration , port ) ) return true ; if ( configuration . getBoolean ( FoundationMonitoringConstants . IN_PROC_RMI ) ) { return startInProcRMIRegistry ( port ) ; } else { return startOutProcRMIRegistry ( configuration , port ) ; }
public class ViewUtils { /** * Returns a path with unix style folder * separators that starts and ends with a " / " . * @ param path The path to normalizeFile * @ return The normalized path */ @ Nonnull public static String normalizeFolder ( @ Nullable String path ) { } }
if ( path == null ) { path = "" ; } else { path = normalizeFile ( path , null ) ; } if ( ! path . endsWith ( "/" ) ) { path = path + "/" ; } return path ;
public class UserContext { /** * Returns the managed user context . * @ return User context */ @ SuppressWarnings ( "unchecked" ) public static ISharedContext < IUser > getUserContext ( ) { } }
return ( ISharedContext < IUser > ) ContextManager . getInstance ( ) . getSharedContext ( UserContext . class . getName ( ) ) ;
public class Utils { /** * Lower bound binary search . Find the index to the first element in the list * that compares greater than or equal to key . * @ param < T > * Type of the input key . * @ param list * The list * @ param key * The input key . * @ param cmp * Comparator for the key . * @ return The index to the desired element if it exists ; or list . size ( ) * otherwise . */ public static < T > int lowerBound ( List < ? extends T > list , T key , Comparator < ? super T > cmp ) { } }
int low = 0 ; int high = list . size ( ) ; while ( low < high ) { int mid = ( low + high ) >>> 1 ; T midVal = list . get ( mid ) ; int ret = cmp . compare ( midVal , key ) ; if ( ret < 0 ) low = mid + 1 ; else high = mid ; } return low ;
public class ProxyManager { /** * Given a proxy URL returns a two element arrays containing the host name and the port * @ param url The proxy host URL . * @ param defPort The default proxy port * @ return An optional containing an array of the host name and the proxy port or empty when url is empty . */ private Optional < String [ ] > parseProxy ( String url , String defPort ) { } }
if ( ! Strings . isNullOrEmpty ( url ) ) { String [ ] result = new String [ 2 ] ; int p = url . indexOf ( "://" ) ; if ( p != - 1 ) url = url . substring ( p + 3 ) ; if ( ( p = url . indexOf ( '@' ) ) != - 1 ) url = url . substring ( p + 1 ) ; if ( ( p = url . indexOf ( ':' ) ) != - 1 ) { result [ 0 ] = url . substring ( 0 , p ) ; result [ 1 ] = url . substring ( p + 1 ) ; } else { result [ 0 ] = url ; result [ 1 ] = defPort ; } // remove trailing slash from the host name p = result [ 0 ] . indexOf ( '/' ) ; if ( p != - 1 ) { result [ 0 ] = result [ 0 ] . substring ( 0 , p ) ; } // remove trailing slash from the port number p = result [ 1 ] . indexOf ( '/' ) ; if ( p != - 1 ) { result [ 1 ] = result [ 1 ] . substring ( 0 , p ) ; } return Optional . of ( result ) ; } return Optional . empty ( ) ;
public class OutboundResourceAdapterImpl { /** * { @ inheritDoc } */ public OutboundResourceAdapter copy ( ) { } }
return new OutboundResourceAdapterImpl ( CopyUtil . cloneList ( connectionDefinition ) , transactionSupport , CopyUtil . cloneList ( authenticationMechanism ) , reauthenticationSupport , CopyUtil . cloneString ( id ) , CopyUtil . cloneString ( transactionSupportId ) , CopyUtil . cloneString ( reauthenticationSupportId ) ) ;
public class FlinkKafkaProducer011 { /** * For each checkpoint we create new { @ link FlinkKafkaProducer } so that new transactions will not clash * with transactions created during previous checkpoints ( { @ code producer . initTransactions ( ) } assures that we * obtain new producerId and epoch counters ) . */ private FlinkKafkaProducer < byte [ ] , byte [ ] > createTransactionalProducer ( ) throws FlinkKafka011Exception { } }
String transactionalId = availableTransactionalIds . poll ( ) ; if ( transactionalId == null ) { throw new FlinkKafka011Exception ( FlinkKafka011ErrorCode . PRODUCERS_POOL_EMPTY , "Too many ongoing snapshots. Increase kafka producers pool size or decrease number of concurrent checkpoints." ) ; } FlinkKafkaProducer < byte [ ] , byte [ ] > producer = initTransactionalProducer ( transactionalId , true ) ; producer . initTransactions ( ) ; return producer ;
public class BigMoney { /** * Returns a copy of this monetary value rounded to the specified scale without * changing the current scale . * Scale is described in { @ link BigDecimal } and represents the point below which * the monetary value is zero . Negative scales round increasingly large numbers . * Unlike { @ link # withScale ( int ) } , this scale of the result is unchanged . * < ul > * < li > Rounding ' EUR 45.23 ' to a scale of - 1 returns 40.00 or 50.00 depending on the rounding mode . * < li > Rounding ' EUR 45.23 ' to a scale of 0 returns 45.00 or 46.00 depending on the rounding mode . * < li > Rounding ' EUR 45.23 ' to a scale of 1 returns 45.20 or 45.30 depending on the rounding mode . * < li > Rounding ' EUR 45.23 ' to a scale of 2 has no effect ( it already has that scale ) . * < li > Rounding ' EUR 45.23 ' to a scale of 3 has no effect ( the scale is not increased ) . * < / ul > * This instance is immutable and unaffected by this method . * @ param scale the new scale * @ param roundingMode the rounding mode to use , not null * @ return the new instance with the amount converted to be positive , never null * @ throws ArithmeticException if the rounding fails */ public BigMoney rounded ( int scale , RoundingMode roundingMode ) { } }
MoneyUtils . checkNotNull ( roundingMode , "RoundingMode must not be null" ) ; if ( scale >= getScale ( ) ) { return this ; } int currentScale = amount . scale ( ) ; BigDecimal newAmount = amount . setScale ( scale , roundingMode ) . setScale ( currentScale ) ; return BigMoney . of ( currency , newAmount ) ;
public class BundleUtils { /** * Returns a optional boolean value . In other words , returns the value mapped by key if it exists and is a boolean . * The bundle argument is allowed to be { @ code null } . If the bundle is null , this method returns a fallback value . * @ param bundle a bundle . If the bundle is null , this method will return a fallback value . * @ param key a key for the value . * @ param fallback fallback value . * @ return a boolean value if exists , fallback value otherwise . * @ see android . os . Bundle # getBoolean ( String , boolean ) */ public static boolean optBoolean ( @ Nullable Bundle bundle , @ Nullable String key , boolean fallback ) { } }
if ( bundle == null ) { return fallback ; } return bundle . getBoolean ( key , fallback ) ;
public class PropTransitivity { @ Override public void propagate ( int evtmask ) throws ContradictionException { } }
int n = g . getNbMaxNodes ( ) ; for ( int i : g . getPotentialNodes ( ) ) { for ( int j = 0 ; j < n ; j ++ ) { if ( g . getMandSuccOrNeighOf ( i ) . contains ( j ) ) { arcEnforced ( i , j ) ; } else if ( ! g . getPotSuccOrNeighOf ( i ) . contains ( j ) ) { arcRemoved ( i , j ) ; } } } filter ( ) ; gdm . unfreeze ( ) ;
public class JCudaDriver { /** * < pre > * CUresult cuMemAllocManaged ( * CUdeviceptr * dptr , * size _ t bytesize , * unsigned int flags ) * < / pre > * < div > Allocates memory that will be automatically managed by the Unified * Memory system . < / div > < div > * < h6 > Description < / h6 > * Allocates < tt > bytesize < / tt > bytes of managed memory on the device and * returns in < tt > * dptr < / tt > a pointer to the allocated memory . If the * device doesn ' t support allocating managed memory , * CUDA _ ERROR _ NOT _ SUPPORTED is returned . Support for managed memory can be * queried using the device attribute CU _ DEVICE _ ATTRIBUTE _ MANAGED _ MEMORY . * The allocated memory is suitably aligned for any kind of variable . The * memory is not cleared . If < tt > bytesize < / tt > is 0 , cuMemAllocManaged * returns CUDA _ ERROR _ INVALID _ VALUE . The pointer is valid on the CPU and on * all GPUs in the system that support managed memory . All accesses to this * pointer must obey the Unified Memory programming model . * < tt > flags < / tt > specifies the default stream association for this * allocation . < tt > flags < / tt > must be one of CU _ MEM _ ATTACH _ GLOBAL or * CU _ MEM _ ATTACH _ HOST . If CU _ MEM _ ATTACH _ GLOBAL is specified , then this * memory is accessible from any stream on any device . If CU _ MEM _ ATTACH _ HOST * is specified , then the allocation is created with initial visibility * restricted to host access only ; an explicit call to * cuStreamAttachMemAsync will be required to enable access on the device . * If the association is later changed via cuStreamAttachMemAsync to a * single stream , the default association as specifed during * cuMemAllocManaged is restored when that stream is destroyed . For * _ _ managed _ _ variables , the default association is always * CU _ MEM _ ATTACH _ GLOBAL . Note that destroying a stream is an asynchronous * operation , and as a result , the change to default association won ' t * happen until all work in the stream has completed . * Memory allocated with cuMemAllocManaged should be released with * cuMemFree . * On a multi - GPU system with peer - to - peer support , where multiple GPUs * support managed memory , the physical storage is created on the GPU which * is active at the time cuMemAllocManaged is called . All other GPUs will * reference the data at reduced bandwidth via peer mappings over the PCIe * bus . The Unified Memory management system does not migrate memory between * GPUs . * On a multi - GPU system where multiple GPUs support managed memory , but not * all pairs of such GPUs have peer - to - peer support between them , the * physical storage is created in ' zero - copy ' or system memory . All GPUs * will reference the data at reduced bandwidth over the PCIe bus . In these * circumstances , use of the environment variable , CUDA _ VISIBLE _ DEVICES , is * recommended to restrict CUDA to only use those GPUs that have * peer - to - peer support . Alternatively , users can also set * CUDA _ MANAGED _ FORCE _ DEVICE _ ALLOC to a non - zero value to force the driver * to always use device memory for physical storage . When this environment * variable is set to a non - zero value , all contexts created in that process * on devices that support managed memory have to be peer - to - peer compatible * with each other . Context creation will fail if a context is created on a * device that supports managed memory and is not peer - to - peer compatible * with any of the other managed memory supporting devices on which contexts * were previously created , even if those contexts have been destroyed . * These environment variables are described in the CUDA programming guide * under the " CUDA environment variables " section . * < div > < span > Note : < / span > * Note that this function may also return error codes from previous , * asynchronous launches . * < / div > * < / div > * @ param dptr The device pointer * @ param bytesize The size in bytes * @ param flags The flags * @ return CUDA _ SUCCESS , CUDA _ ERROR _ DEINITIALIZED , * CUDA _ ERROR _ NOT _ INITIALIZED , CUDA _ ERROR _ INVALID _ CONTEXT , * CUDA _ ERROR _ NOT _ SUPPORTED , CUDA _ ERROR _ INVALID _ VALUE , * CUDA _ ERROR _ OUT _ OF _ MEMORY * @ see JCudaDriver # cuArray3DCreate * @ see JCudaDriver # cuArray3DGetDescriptor * @ see JCudaDriver # cuArrayCreate * @ see JCudaDriver # cuArrayDestroy * @ see JCudaDriver # cuArrayGetDescriptor * @ see JCudaDriver # cuMemAllocHost * @ see JCudaDriver # cuMemAllocPitch * @ see JCudaDriver # cuMemcpy2D * @ see JCudaDriver # cuMemcpy2DAsync * @ see JCudaDriver # cuMemcpy2DUnaligned * @ see JCudaDriver # cuMemcpy3D * @ see JCudaDriver # cuMemcpy3DAsync * @ see JCudaDriver # cuMemcpyAtoA * @ see JCudaDriver # cuMemcpyAtoD * @ see JCudaDriver # cuMemcpyAtoH * @ see JCudaDriver # cuMemcpyAtoHAsync * @ see JCudaDriver # cuMemcpyDtoA * @ see JCudaDriver # cuMemcpyDtoD * @ see JCudaDriver # cuMemcpyDtoDAsync * @ see JCudaDriver # cuMemcpyDtoH * @ see JCudaDriver # cuMemcpyDtoHAsync * @ see JCudaDriver # cuMemcpyHtoA * @ see JCudaDriver # cuMemcpyHtoAAsync * @ see JCudaDriver # cuMemcpyHtoD * @ see JCudaDriver # cuMemcpyHtoDAsync * @ see JCudaDriver # cuMemFree * @ see JCudaDriver # cuMemFreeHost * @ see JCudaDriver # cuMemGetAddressRange * @ see JCudaDriver # cuMemGetInfo * @ see JCudaDriver # cuMemHostAlloc * @ see JCudaDriver # cuMemHostGetDevicePointer * @ see JCudaDriver # cuMemsetD2D8 * @ see JCudaDriver # cuMemsetD2D16 * @ see JCudaDriver # cuMemsetD2D32 * @ see JCudaDriver # cuMemsetD8 * @ see JCudaDriver # cuMemsetD16 * @ see JCudaDriver # cuMemsetD32 * @ see JCudaDriver # cuDeviceGetAttribute * @ see JCudaDriver # cuStreamAttachMemAsync */ public static int cuMemAllocManaged ( CUdeviceptr dptr , long bytesize , int flags ) { } }
return checkResult ( cuMemAllocManagedNative ( dptr , bytesize , flags ) ) ;
public class ConvertImage { /** * Converts a { @ link InterleavedU16 } into a { @ link GrayU16 } by computing the average value of each pixel * across all the bands . * @ param input ( Input ) The ImageInterleaved that is being converted . Not modified . * @ param output ( Optional ) The single band output image . If null a new image is created . Modified . * @ return Converted image . */ public static GrayU16 average ( InterleavedU16 input , GrayU16 output ) { } }
if ( output == null ) { output = new GrayU16 ( input . width , input . height ) ; } else { output . reshape ( input . width , input . height ) ; } if ( BoofConcurrency . USE_CONCURRENT ) { ConvertInterleavedToSingle_MT . average ( input , output ) ; } else { ConvertInterleavedToSingle . average ( input , output ) ; } return output ;
public class SheetBindingErrors { /** * グローバルエラーを取得する * @ return エラーがない場合は空のリストを返す */ public List < ObjectError > getGlobalErrors ( ) { } }
return errors . stream ( ) . filter ( e -> ! ( e instanceof FieldError ) ) . collect ( Collectors . toList ( ) ) ;
public class StorePackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getReferenceDataValue ( ) { } }
if ( referenceDataValueEClass == null ) { referenceDataValueEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( StorePackage . eNS_URI ) . getEClassifiers ( ) . get ( 23 ) ; } return referenceDataValueEClass ;
public class UserCoreDao { /** * Query for the row where the field is like the value * @ param fieldName * field name * @ param value * value * @ return result * @ since 3.0.1 */ public TResult queryForLike ( String fieldName , Object value ) { } }
return queryForLike ( fieldName , value , null , null , null ) ;
public class BasicWeekyearDateTimeField { /** * Set the Year of a week based year component of the specified time instant . * @ see org . joda . time . DateTimeField # set * @ param instant the time instant in millis to update . * @ param year the year ( - 9999,9999 ) to set the date to . * @ return the updated DateTime . * @ throws IllegalArgumentException if year is invalid . */ public long set ( long instant , int year ) { } }
FieldUtils . verifyValueBounds ( this , Math . abs ( year ) , iChronology . getMinYear ( ) , iChronology . getMaxYear ( ) ) ; // Do nothing if no real change is requested . int thisWeekyear = get ( instant ) ; if ( thisWeekyear == year ) { return instant ; } // Calculate the DayOfWeek ( to be preserved ) . int thisDow = iChronology . getDayOfWeek ( instant ) ; // Calculate the maximum weeks in the target year . int weeksInFromYear = iChronology . getWeeksInYear ( thisWeekyear ) ; int weeksInToYear = iChronology . getWeeksInYear ( year ) ; int maxOutWeeks = ( weeksInToYear < weeksInFromYear ) ? weeksInToYear : weeksInFromYear ; // Get the current week of the year . This will be preserved in // the output unless it is greater than the maximum possible // for the target weekyear . In that case it is adjusted // to the maximum possible . int setToWeek = iChronology . getWeekOfWeekyear ( instant ) ; if ( setToWeek > maxOutWeeks ) { setToWeek = maxOutWeeks ; } // Get a wroking copy of the current date - time . // This can be a convenience for debugging . long workInstant = instant ; // Get a copy // Attempt to get close to the proper weekyear . // Note - we cannot currently call ourself , so we just call // set for the year . This at least gets us close . workInstant = iChronology . setYear ( workInstant , year ) ; // Calculate the weekyear number for the get close to value // ( which might not be equal to the year just set ) . int workWoyYear = get ( workInstant ) ; // At most we are off by one year , which can be " fixed " by // adding / subtracting a week . if ( workWoyYear < year ) { workInstant += DateTimeConstants . MILLIS_PER_WEEK ; } else if ( workWoyYear > year ) { workInstant -= DateTimeConstants . MILLIS_PER_WEEK ; } // Set the proper week in the current weekyear . // BEGIN : possible set WeekOfWeekyear logic . int currentWoyWeek = iChronology . getWeekOfWeekyear ( workInstant ) ; // No range check required ( we already know it is OK ) . workInstant = workInstant + ( setToWeek - currentWoyWeek ) * ( long ) DateTimeConstants . MILLIS_PER_WEEK ; // END : possible set WeekOfWeekyear logic . // Reset DayOfWeek to previous value . // Note : This works fine , but it ideally shouldn ' t invoke other // fields from within a field . workInstant = iChronology . dayOfWeek ( ) . set ( workInstant , thisDow ) ; // Return result . return workInstant ;
public class CmsSitemapController { /** * Updates the given entry . < p > * @ param entryId the entry id */ public void updateEntry ( CmsUUID entryId ) { } }
getChildren ( entryId , CmsSitemapTreeItem . getItemById ( entryId ) . isOpen ( ) , null ) ;
public class BlockJUnit4ClassRunner { /** * Returns a { @ link Statement } : if { @ code method } ' s { @ code @ Test } annotation * has the { @ link Test # expected ( ) } attribute , return normally only if { @ code next } * throws an exception of the correct type , and throw an exception * otherwise . */ protected Statement possiblyExpectingExceptions ( FrameworkMethod method , Object test , Statement next ) { } }
Test annotation = method . getAnnotation ( Test . class ) ; Class < ? extends Throwable > expectedExceptionClass = getExpectedException ( annotation ) ; return expectedExceptionClass != null ? new ExpectException ( next , expectedExceptionClass ) : next ;
public class RestService { /** * Converts a HTTP request parameter to a entity value of which the type is defined by the * attribute . For file attributes persists the file in the file store and persist a file meta data * entity . * @ param attr attribute * @ param paramValue HTTP parameter value * @ return Object */ public Object toEntityValue ( Attribute attr , Object paramValue , Object id ) { } }
// Treat empty strings as null if ( ( paramValue instanceof String ) && ( ( String ) paramValue ) . isEmpty ( ) ) { paramValue = null ; } Object value ; AttributeType attrType = attr . getDataType ( ) ; switch ( attrType ) { case BOOL : value = convertBool ( attr , paramValue ) ; break ; case EMAIL : case ENUM : case HTML : case HYPERLINK : case SCRIPT : case STRING : case TEXT : value = convertString ( attr , paramValue ) ; break ; case CATEGORICAL : case XREF : value = convertRef ( attr , paramValue ) ; break ; case CATEGORICAL_MREF : case MREF : case ONE_TO_MANY : value = convertMref ( attr , paramValue ) ; break ; case DATE : value = convertDate ( attr , paramValue ) ; break ; case DATE_TIME : value = convertDateTime ( attr , paramValue ) ; break ; case DECIMAL : value = convertDecimal ( attr , paramValue ) ; break ; case FILE : value = convertFile ( attr , paramValue , id ) ; break ; case INT : value = convertInt ( attr , paramValue ) ; break ; case LONG : value = convertLong ( attr , paramValue ) ; break ; case COMPOUND : throw new IllegalAttributeTypeException ( attrType ) ; default : throw new UnexpectedEnumException ( attrType ) ; } return value ;
public class ProjectApi { /** * Get a single of project snippet . * < pre > < code > GET / projects / : id / snippets / : snippet _ id < / code > < / pre > * @ param projectIdOrPath projectIdOrPath the project in the form of an Integer ( ID ) , String ( path ) , or Project instance , required * @ param snippetId the ID of the project ' s snippet * @ return the specified project Snippet * @ throws GitLabApiException if any exception occurs */ public Snippet getSnippet ( Object projectIdOrPath , Integer snippetId ) throws GitLabApiException { } }
Response response = get ( Response . Status . OK , null , "projects" , getProjectIdOrPath ( projectIdOrPath ) , "snippets" , snippetId ) ; return ( response . readEntity ( Snippet . class ) ) ;
public class FileChannelDataLogger { /** * reopens the datalogger . It is assumed that the logger is open . * @ param accessMode * @ throws IOException * @ throws java . lang . IllegalStateException * logger isn ' t open */ public void reopen ( AccessMode accessMode ) throws IOException { } }
if ( this . randomAccess == null ) { associatedRandomAccessFile ( ) ; } this . accessMode = accessMode ; // write start sequences . . . switch ( accessMode ) { case READ : maybeRead ( ) ; // start reading from the fiorts position this . randomAccess . rewind ( ) ; break ; case WRITE : maybeWritten ( ) ; this . randomAccess . reset ( ) ; break ; case APPEND : maybeWritten ( ) ; this . randomAccess . forwardWind ( ) ; break ; default : throw new IllegalArgumentException ( "Invalid AccessMode " + accessMode ) ; }
public class JvmAgentConfig { private void initMode ( Map < String , String > agentConfig ) { } }
String mode = agentConfig . get ( "mode" ) ; if ( mode != null && ! mode . equals ( "start" ) && ! mode . equals ( "stop" ) ) { throw new IllegalArgumentException ( "Invalid running mode '" + mode + "'. Must be either 'start' or 'stop'" ) ; } isStopMode = "stop" . equals ( mode ) ;
public class ValueRange { /** * Obtains a variable value range . * This factory obtains a range where the minimum value is fixed and the maximum value may vary . * For example , the ISO day - of - month always starts at 1 , but ends between 28 and 31. * @ param min the minimum value * @ param maxSmallest the smallest maximum value * @ param maxLargest the largest maximum value * @ return the ValueRange for min , smallest max , largest max , not null * @ throws IllegalArgumentException if * the minimum is greater than the smallest maximum , * or the smallest maximum is greater than the largest maximum */ public static ValueRange of ( long min , long maxSmallest , long maxLargest ) { } }
return of ( min , min , maxSmallest , maxLargest ) ;
public class KbTypeConflictException { /** * Converts a Throwable to a KbTypeConflictException with the specified detail message . If the * Throwable is a KbTypeConflictException and if the Throwable ' s message is identical to the * one supplied , the Throwable will be passed through unmodified ; otherwise , it will be wrapped in * a new KbTypeConflictException with the detail message . * @ param cause the Throwable to convert * @ param message the specified detail message * @ return a KbTypeConflictException */ public static KbTypeConflictException fromThrowable ( String message , Throwable cause ) { } }
return ( cause instanceof KbTypeConflictException && Objects . equals ( message , cause . getMessage ( ) ) ) ? ( KbTypeConflictException ) cause : new KbTypeConflictException ( message , cause ) ;
public class UpworkRestClient { /** * Generate error as JSONObject * @ param code Error code * @ param message Error message * @ throws JSONException * @ return { @ link JSONObject } */ private static JSONObject genError ( Integer code , String message ) throws JSONException { } }
// TODO : HTTP - Status ( 404 , etc ) , for now return status line return new JSONObject ( "{error: {code: \"" + code . toString ( ) + "\", message: \"" + message + "\"}}" ) ;
public class CommandLineInterface { /** * Command line easy parser * @ param args command line arguments */ private void parse ( String [ ] args ) { } }
for ( int i = 0 ; i < args . length ; i ++ ) { if ( opt_with_value . containsKey ( args [ i ] ) ) { String key = opt_with_value . get ( args [ i ] ) ; values . put ( key , args [ i + 1 ] ) ; i ++ ; } else if ( args [ i ] . startsWith ( "-" ) ) { opt_without_value . add ( args [ i ] ) ; } else { arglist . add ( args [ i ] ) ; } }
public class IOGroovyMethods { /** * Pipe an InputStream into an OutputStream for efficient stream copying . * @ param self stream on which to write * @ param in stream to read from * @ return the outputstream itself * @ throws IOException if an I / O error occurs . * @ since 1.0 */ public static OutputStream leftShift ( OutputStream self , InputStream in ) throws IOException { } }
byte [ ] buf = new byte [ 1024 ] ; while ( true ) { int count = in . read ( buf , 0 , buf . length ) ; if ( count == - 1 ) break ; if ( count == 0 ) { Thread . yield ( ) ; continue ; } self . write ( buf , 0 , count ) ; } self . flush ( ) ; return self ;
public class GeoId { /** * Extract the primitive bounds from this geoId . * < p > A geoId is an identifier from which the localization could be extracted . * The location is here restricted to the bounds of the primitives . * @ return a rectangle or < code > null < / code > if invalid geoid . */ @ Pure public Rectangle2d toBounds2D ( ) { } }
int startIndex = this . id . indexOf ( '#' ) ; if ( startIndex <= 0 ) { return null ; } try { int endIndex = this . id . indexOf ( ';' , startIndex ) ; if ( endIndex <= startIndex ) { return null ; } final long minx = Long . parseLong ( this . id . substring ( startIndex + 1 , endIndex ) ) ; startIndex = endIndex + 1 ; endIndex = this . id . indexOf ( ';' , startIndex ) ; if ( endIndex <= startIndex ) { return null ; } final long miny = Long . parseLong ( this . id . substring ( startIndex , endIndex ) ) ; startIndex = endIndex + 1 ; endIndex = this . id . indexOf ( ';' , startIndex ) ; if ( endIndex <= startIndex ) { return null ; } final long maxx = Long . parseLong ( this . id . substring ( startIndex , endIndex ) ) ; startIndex = endIndex + 1 ; final long maxy = Long . parseLong ( this . id . substring ( startIndex ) ) ; final Rectangle2d r = new Rectangle2d ( ) ; r . setFromCorners ( minx , miny , maxx , maxy ) ; return r ; } catch ( Throwable exception ) { } return null ;
public class FileUtils { /** * Copy a file from it ' s source input to the specified output * file if it can . * @ param source the input file to copy * @ param output the output destination * @ return true if successful , false otherwise */ public static boolean copy ( File source , File output ) { } }
// Check to see if output exists if ( output . exists ( ) && output . canWrite ( ) ) { // Delete the existing file , and create a new one if ( output . delete ( ) ) { try { output . createNewFile ( ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } } } else if ( ! output . exists ( ) ) { try { output . createNewFile ( ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } } // now that we have performed a prelimanary check on the output , time to copy if ( source . exists ( ) && source . canRead ( ) ) { try { FileInputStream fis = new FileInputStream ( source ) ; FileOutputStream fos = new FileOutputStream ( output ) ; byte [ ] buffer = new byte [ 1024 ] ; int len = 0 ; while ( ( len = fis . read ( buffer ) ) > 0 ) { fos . write ( buffer , 0 , len ) ; } fis . close ( ) ; fos . close ( ) ; return true ; } catch ( FileNotFoundException e ) { e . printStackTrace ( ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } } return false ;
public class QuerySources { /** * Obtain a { @ link NodeSequence } that returns the ( queryable ) node with the given key in the workspace , where the node is * assigned the given score . * @ param workspaceName the name of the workspace ; may not be null * @ param identifier the { @ link NodeKey # getIdentifier ( ) identifier } of the node ; may not be null * @ param score the score for the node * @ return the sequence of node ( s ) ; never null */ public NodeSequence singleNode ( String workspaceName , String identifier , float score ) { } }
// Get the node filter to use . . . NodeFilter nodeFilter = nodeFilterForWorkspace ( workspaceName ) ; if ( nodeFilter != null ) { // Find the node by path . . . NodeCache cache = repo . getWorkspaceCache ( workspaceName ) ; if ( NodeKey . isValidFormat ( identifier ) ) { NodeKey key = new NodeKey ( identifier ) ; CachedNode node = cache . getNode ( key ) ; if ( node != null && nodeFilter . includeNode ( node , cache ) ) { return NodeSequence . withNodes ( Collections . singleton ( node ) , score , workspaceName ) ; } } // Try with the same source and workspace key as the root node . . . NodeKey key = cache . getRootKey ( ) . withId ( identifier ) ; CachedNode node = cache . getNode ( key ) ; if ( node != null && nodeFilter . includeNode ( node , cache ) ) { return NodeSequence . withNodes ( Collections . singleton ( node ) , score , workspaceName ) ; } } return NodeSequence . emptySequence ( 1 ) ;
public class InfiniteScrollPanel { /** * Will render the provided data result with the provided { @ link Renderer } . * This method will also check if recycling is enabled ( You can turn on recycling by setting { @ link this # setRecycleManager ( RecycleManager ) } . */ private void render ( List < T > data ) { } }
List < Widget > widgets = new ArrayList < > ( ) ; for ( T model : data ) { Widget widget = renderer . render ( model ) ; widget . getElement ( ) . setId ( "item-" + itemCount ) ; add ( widget ) ; widgets . add ( widget ) ; itemCount ++ ; } // Check if recycling is enabled if ( isEnableRecycling ( ) ) { recycleManager . recycleWidgets ( widgets ) ; } // Will force the scroll panel to have a scroll if it isn ' t visible if ( ! hasScrollBar ( ) ) { int height = $ ( widgets . get ( 0 ) . getElement ( ) ) . outerHeight ( ) ; getElement ( ) . getStyle ( ) . setHeight ( height , Style . Unit . PX ) ; }
public class JdbcCpoXaAdapter { /** * Executes an Object whose metadata will call an executable within the datasource . It is assumed that the executable * object exists in the metadatasource . If the executable does not exist , an exception will be thrown . * < pre > Example : * < code > * class SomeObject so = new SomeObject ( ) ; * class CpoAdapter cpo = null ; * try { * cpo = new JdbcCpoAdapter ( new JdbcDataSourceInfo ( driver , url , user , password , 1,1 , false ) ) ; * } catch ( CpoException ce ) { * / / Handle the error * cpo = null ; * if ( cpo ! = null ) { * so . setId ( 1 ) ; * so . setName ( " SomeName " ) ; * try { * cpo . executeObject ( so ) ; * } catch ( CpoException ce ) { * / / Handle the error * < / code > * < / pre > * @ param obj This is an Object that has been defined within the metadata of the datasource . If the class is not * defined an exception will be thrown . If the object does not exist in the datasource , an exception will be thrown . * This object is used to populate the IN parameters used to executed the datasource object . * An object of this type will be created and filled with the returned data from the value _ object . This newly created * object will be returned from this method . * @ return An object populated with the OUT parameters returned from the executable object * @ throws CpoException Thrown if there are errors accessing the datasource */ @ Override public < T > T executeObject ( T obj ) throws CpoException { } }
return getCurrentResource ( ) . executeObject ( obj ) ;
public class CPTaxCategoryServiceBaseImpl { /** * Sets the cp definition local service . * @ param cpDefinitionLocalService the cp definition local service */ public void setCPDefinitionLocalService ( com . liferay . commerce . product . service . CPDefinitionLocalService cpDefinitionLocalService ) { } }
this . cpDefinitionLocalService = cpDefinitionLocalService ;
public class AdminResource { /** * Fetches the thread stack dump for this application . * @ return json representing the thread stack dump . */ @ GET @ Path ( "stack" ) @ Produces ( MediaType . TEXT_PLAIN ) public String getThreadDump ( ) { } }
if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "==> AdminResource.getThreadDump()" ) ; } ThreadGroup topThreadGroup = Thread . currentThread ( ) . getThreadGroup ( ) ; while ( topThreadGroup . getParent ( ) != null ) { topThreadGroup = topThreadGroup . getParent ( ) ; } Thread [ ] threads = new Thread [ topThreadGroup . activeCount ( ) ] ; int nr = topThreadGroup . enumerate ( threads ) ; StringBuilder builder = new StringBuilder ( ) ; for ( int i = 0 ; i < nr ; i ++ ) { builder . append ( threads [ i ] . getName ( ) ) . append ( "\nState: " ) . append ( threads [ i ] . getState ( ) ) . append ( "\n" ) ; String stackTrace = StringUtils . join ( threads [ i ] . getStackTrace ( ) , "\n" ) ; builder . append ( stackTrace ) ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "<== AdminResource.getThreadDump()" ) ; } return builder . toString ( ) ;
public class GetGlue { /** * Build an OAuth access token request . * @ param clientId The OAuth client id obtained from tvtag . * @ param clientSecret The OAuth client secret obtained from tvtag . * @ param redirectUri The redirect URI previously used for obtaining the auth code . * @ param authCode A previously obtained auth code . * @ return A tvtag OAuth access token request . * @ throws OAuthSystemException */ public static OAuthClientRequest getAccessTokenRequest ( String clientId , String clientSecret , String redirectUri , String authCode ) throws OAuthSystemException { } }
return OAuthClientRequest . tokenLocation ( OAUTH2_ACCESS_TOKEN_URL ) . setGrantType ( GrantType . AUTHORIZATION_CODE ) . setClientId ( clientId ) . setClientSecret ( clientSecret ) . setRedirectURI ( redirectUri ) . setCode ( authCode ) . buildQueryMessage ( ) ;
public class WASCDIAnnotationInjectionProvider { /** * { @ inheritDoc } */ @ Override public Object inject ( Object instance ) throws InjectionProviderException { } }
if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { logger . logp ( Level . FINE , CLASS_NAME , "inject(Object instance)" , "instance =" + instance . getClass ( ) . getName ( ) ) ; } ManagedObject mo = null ; if ( isAvailable ) { try { mo = runtimeAnnotationHelper . inject ( instance ) ; } catch ( RuntimeException exc ) { throw new InjectionProviderException ( exc ) ; } } return mo ;
public class AWSGlueClient { /** * Retrieves the definition of a trigger . * @ param getTriggerRequest * @ return Result of the GetTrigger operation returned by the service . * @ throws EntityNotFoundException * A specified entity does not exist * @ throws InvalidInputException * The input provided was not valid . * @ throws InternalServiceException * An internal service error occurred . * @ throws OperationTimeoutException * The operation timed out . * @ sample AWSGlue . GetTrigger * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / glue - 2017-03-31 / GetTrigger " target = " _ top " > AWS API * Documentation < / a > */ @ Override public GetTriggerResult getTrigger ( GetTriggerRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetTrigger ( request ) ;
public class AbstractHistogram { /** * Get the count of recorded values at a specific value ( to within the histogram resolution at the value level ) . * @ param value The value for which to provide the recorded count * @ return The total count of values recorded in the histogram within the value range that is * { @ literal > = } lowestEquivalentValue ( < i > value < / i > ) and { @ literal < = } highestEquivalentValue ( < i > value < / i > ) */ public long getCountAtValue ( final long value ) throws ArrayIndexOutOfBoundsException { } }
final int index = Math . min ( Math . max ( 0 , countsArrayIndex ( value ) ) , ( countsArrayLength - 1 ) ) ; return getCountAtIndex ( index ) ;
public class RequestTemplate { /** * The URL for the request . If the template has not been resolved , the url will represent a uri * template . * @ return the url */ public String url ( ) { } }
/* build the fully qualified url with all query parameters */ StringBuilder url = new StringBuilder ( this . path ( ) ) ; if ( ! this . queries . isEmpty ( ) ) { url . append ( this . queryLine ( ) ) ; } if ( fragment != null ) { url . append ( fragment ) ; } return url . toString ( ) ;
public class DescribeEffectivePatchesForPatchBaselineResult { /** * An array of patches and patch status . * @ return An array of patches and patch status . */ public java . util . List < EffectivePatch > getEffectivePatches ( ) { } }
if ( effectivePatches == null ) { effectivePatches = new com . amazonaws . internal . SdkInternalList < EffectivePatch > ( ) ; } return effectivePatches ;
public class AuthorizationUtils { /** * Check a list of resource - actions to be performed as a result of an HTTP request . * If one of the resource - actions fails the authorization check , this method returns the failed * Access object from the check . * Otherwise , return ACCESS _ OK if all resource - actions were successfully authorized . * This function will set the DRUID _ AUTHORIZATION _ CHECKED attribute in the request . * If this attribute is already set when this function is called , an exception is thrown . * @ param request HTTP request to be authorized * @ param resourceActions An Iterable of resource - actions to authorize * @ return ACCESS _ OK or the Access object from the first failed check */ public static Access authorizeAllResourceActions ( final HttpServletRequest request , final Iterable < ResourceAction > resourceActions , final AuthorizerMapper authorizerMapper ) { } }
if ( request . getAttribute ( AuthConfig . DRUID_ALLOW_UNSECURED_PATH ) != null ) { return Access . OK ; } if ( request . getAttribute ( AuthConfig . DRUID_AUTHORIZATION_CHECKED ) != null ) { throw new ISE ( "Request already had authorization check." ) ; } Access access = authorizeAllResourceActions ( authenticationResultFromRequest ( request ) , resourceActions , authorizerMapper ) ; request . setAttribute ( AuthConfig . DRUID_AUTHORIZATION_CHECKED , access . isAllowed ( ) ) ; return access ;
public class POIUtils { /** * CellRangeAddressを文字列形式のリストに変換する 。 * @ since 0.5 * @ param region * @ return */ private static List < String > convertSqref ( final CellRangeAddressList region ) { } }
List < String > sqref = new ArrayList < > ( ) ; for ( CellRangeAddress range : region . getCellRangeAddresses ( ) ) { sqref . add ( range . formatAsString ( ) ) ; } return sqref ;
public class CommercePriceListLocalServiceBaseImpl { /** * Deletes the commerce price list from the database . Also notifies the appropriate model listeners . * @ param commercePriceList the commerce price list * @ return the commerce price list that was removed * @ throws PortalException */ @ Indexable ( type = IndexableType . DELETE ) @ Override public CommercePriceList deleteCommercePriceList ( CommercePriceList commercePriceList ) throws PortalException { } }
return commercePriceListPersistence . remove ( commercePriceList ) ;
public class EmbeddedPostgreSQLController { /** * Return configuration tweaks in a format appropriate for ness - jdbc DatabaseModule . */ public ImmutableMap < String , String > getConfigurationTweak ( String dbModuleName ) { } }
final DbInfo db = cluster . getNextDb ( ) ; return ImmutableMap . of ( "ness.db." + dbModuleName + ".uri" , getJdbcUri ( db ) , "ness.db." + dbModuleName + ".ds.user" , db . user ) ;
public class ShrinkWrapFileSystem { /** * { @ inheritDoc } * @ see java . nio . file . FileSystem # getRootDirectories ( ) */ @ Override public Iterable < Path > getRootDirectories ( ) { } }
this . checkClosed ( ) ; // Each ShrinkWrapFileSystem has one root directory final Path path = new ShrinkWrapPath ( ArchivePaths . root ( ) , this ) ; final List < Path > paths = new ArrayList < > ( 1 ) ; paths . add ( path ) ; return Collections . unmodifiableList ( paths ) ;
public class VirtualNetworksInner { /** * Lists usage stats . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the PagedList & lt ; VirtualNetworkUsageInner & gt ; object if successful . */ public PagedList < VirtualNetworkUsageInner > listUsageNext ( final String nextPageLink ) { } }
ServiceResponse < Page < VirtualNetworkUsageInner > > response = listUsageNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) ; return new PagedList < VirtualNetworkUsageInner > ( response . body ( ) ) { @ Override public Page < VirtualNetworkUsageInner > nextPage ( String nextPageLink ) { return listUsageNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
public class ModelResourceStructure { /** * < p > isEmptyEntity . < / p > * @ param entity a { @ link java . lang . Object } object . * @ return a boolean . */ protected boolean isEmptyEntity ( Object entity ) { } }
return entity == null || ( entity instanceof Collection && ( ( Collection ) entity ) . isEmpty ( ) ) ;
public class ProcessorText { /** * Receive notification of the end of an element . * @ param handler non - null reference to current StylesheetHandler that is constructing the Templates . * @ param uri The Namespace URI , or an empty string . * @ param localName The local name ( without prefix ) , or empty string if not namespace processing . * @ param rawName The qualified name ( with prefix ) . */ public void endElement ( StylesheetHandler handler , String uri , String localName , String rawName ) throws org . xml . sax . SAXException { } }
ProcessorCharacters charProcessor = ( ProcessorCharacters ) handler . getProcessorFor ( null , "text()" , "text" ) ; charProcessor . setXslTextElement ( null ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcChimney ( ) { } }
if ( ifcChimneyEClass == null ) { ifcChimneyEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 89 ) ; } return ifcChimneyEClass ;
public class Settings { /** * Sets a property value . * @ param key the key for the property * @ param value the value for the property */ public void setInt ( @ NotNull final String key , @ NotNull final int value ) { } }
props . setProperty ( key , String . valueOf ( value ) ) ; LOGGER . debug ( "Setting: {}='{}'" , key , value ) ;
public class DriverConfiguration { /** * Gets an integer from the given element . * If the given element is { @ code null } or does not have an integer , zero is returned . * @ param element the element with the integer value * @ return an integer */ private int getIntValue ( Element element ) { } }
if ( element != null ) { try { return Integer . parseInt ( element . getValue ( ) ) ; } catch ( NumberFormatException e ) { logger . error ( "Failed to extract an integer from: " + element . getValue ( ) ) ; } } return 0 ;
public class ClassRef { /** * Writes an array of Class objects . * @ param out target to write to * @ param types Classes to be written */ public static void writeClasses ( ObjectOutput out , Class < ? > [ ] types ) throws java . io . IOException { } }
int i ; if ( types . length > Byte . MAX_VALUE ) { throw new RuntimeException ( "to many dimensions" ) ; } out . writeByte ( ( byte ) types . length ) ; for ( i = 0 ; i < types . length ; i ++ ) { write ( out , types [ i ] ) ; }
public class PDStorageManager { /** * Get all { @ link PDStoredBusinessEntity } objects matching the provided query . * This is a specialization of * { @ link # searchAllDocuments ( Query , int , Consumer ) } . * @ param aQuery * The query to be executed . May not be < code > null < / code > . * @ param nMaxResultCount * Maximum number of results . Values & le ; 0 mean all . * @ return A non - < code > null < / code > but maybe empty list of matching documents * @ see # searchAllDocuments ( Query , int , Consumer ) */ @ Nonnull @ ReturnsMutableCopy public ICommonsList < PDStoredBusinessEntity > getAllDocuments ( @ Nonnull final Query aQuery , @ CheckForSigned final int nMaxResultCount ) { } }
final ICommonsList < PDStoredBusinessEntity > aTargetList = new CommonsArrayList < > ( ) ; try { searchAllDocuments ( aQuery , nMaxResultCount , aTargetList :: add ) ; } catch ( final IOException ex ) { LOGGER . error ( "Error searching for documents with query " + aQuery , ex ) ; } return aTargetList ;
public class BeanAttributesFactory { /** * Creates new { @ link BeanAttributes } to represent a managed bean . */ public static < T > BeanAttributes < T > forBean ( EnhancedAnnotated < T , ? > annotated , BeanManagerImpl manager ) { } }
return new BeanAttributesBuilder < T > ( annotated , manager ) . build ( ) ;
public class GenericTreeWalker { /** * Return the previous Node from the current node , after applying filter , * whatToshow . If result is not null , set the current Node . */ public Node previousNode ( ) { } }
if ( currentNode == null ) return null ; // get sibling Node result = getPreviousSibling ( currentNode ) ; if ( result == null ) { result = getParentNode ( currentNode ) ; if ( result != null ) { currentNode = result ; return result ; } return null ; } // get the lastChild of result . Node lastChild = getLastChild ( result ) ; Node prev = lastChild ; while ( lastChild != null ) { prev = lastChild ; lastChild = getLastChild ( prev ) ; } lastChild = prev ; // if there is a lastChild which passes filters return it . if ( lastChild != null ) { currentNode = lastChild ; return lastChild ; } // otherwise return the previous sibling . currentNode = result ; return result ;
public class Stream { /** * Gets a timeout possibly set on the { @ code Stream } and represented by the absolute * milliseconds timestamp when the { @ code Stream } will be closed . * @ return the milliseconds timestamp when this { @ code Stream } will be closed , or null if no * timeout has been set */ public final Long getTimeout ( ) { } }
synchronized ( this . state ) { return this . state . timeoutFuture == null ? null : this . state . timeoutFuture . getDelay ( TimeUnit . MILLISECONDS ) + System . currentTimeMillis ( ) ; }
public class LineItemSummary { /** * Sets the lineItemType value for this LineItemSummary . * @ param lineItemType * Indicates the line item type of a { @ code LineItem } . This attribute * is required . * The line item type determines the default priority * of the line item . More information can be * found on the < a href = " https : / / support . google . com / dfp _ premium / answer / 177279 " > * Ad Manager Help Center < / a > . */ public void setLineItemType ( com . google . api . ads . admanager . axis . v201811 . LineItemType lineItemType ) { } }
this . lineItemType = lineItemType ;
public class NodeModelStreamer { /** * Create a canonical represenation of the data type value . Defaults to the value converter . * @ since 2.9 */ protected String getFormattedDatatypeValue ( ICompositeNode node , AbstractRule rule , String text ) throws ValueConverterException { } }
Object value = valueConverter . toValue ( text , rule . getName ( ) , node ) ; text = valueConverter . toString ( value , rule . getName ( ) ) ; return text ;
public class SQLRunner { /** * Prepare print . * @ param _ print the print * @ throws EFapsException the e faps exception */ private void preparePrint ( final AbstractPrint _print ) throws EFapsException { } }
for ( final Select select : _print . getSelection ( ) . getAllSelects ( ) ) { for ( final AbstractElement < ? > element : select . getElements ( ) ) { if ( element instanceof AbstractDataElement ) { ( ( AbstractDataElement < ? > ) element ) . append2SQLSelect ( sqlSelect ) ; } } } if ( sqlSelect . getColumns ( ) . size ( ) > 0 ) { for ( final Select select : _print . getSelection ( ) . getAllSelects ( ) ) { for ( final AbstractElement < ? > element : select . getElements ( ) ) { if ( element instanceof AbstractDataElement ) { ( ( AbstractDataElement < ? > ) element ) . append2SQLWhere ( sqlSelect . getWhere ( ) ) ; } } } if ( _print instanceof ObjectPrint ) { addWhere4ObjectPrint ( ( ObjectPrint ) _print ) ; } else if ( _print instanceof ListPrint ) { addWhere4ListPrint ( ( ListPrint ) _print ) ; } else { addTypeCriteria ( ( QueryPrint ) _print ) ; addWhere4QueryPrint ( ( QueryPrint ) _print ) ; } addCompanyCriteria ( _print ) ; }
public class ShareableResource { /** * Set the resource consumption of a node . * @ param n the node * @ param val the value to set * @ return the current resource */ public ShareableResource setCapacity ( Node n , int val ) { } }
if ( val < 0 ) { throw new IllegalArgumentException ( String . format ( "The '%s' capacity of node '%s' must be >= 0" , rcId , n ) ) ; } nodesCapacity . put ( n , val ) ; return this ;
public class ListInstancesRequest { /** * The type of instance group for which to list the instances . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setInstanceGroupTypes ( java . util . Collection ) } or { @ link # withInstanceGroupTypes ( java . util . Collection ) } if * you want to override the existing values . * @ param instanceGroupTypes * The type of instance group for which to list the instances . * @ return Returns a reference to this object so that method calls can be chained together . * @ see InstanceGroupType */ public ListInstancesRequest withInstanceGroupTypes ( String ... instanceGroupTypes ) { } }
if ( this . instanceGroupTypes == null ) { setInstanceGroupTypes ( new com . amazonaws . internal . SdkInternalList < String > ( instanceGroupTypes . length ) ) ; } for ( String ele : instanceGroupTypes ) { this . instanceGroupTypes . add ( ele ) ; } return this ;
public class SslContextBuilder { /** * Trusted certificates for verifying the remote endpoint ' s certificate . The input stream should * contain an X . 509 certificate collection in PEM format . { @ code null } uses the system default . */ public SslContextBuilder trustManager ( InputStream trustCertCollectionInputStream ) { } }
try { return trustManager ( SslContext . toX509Certificates ( trustCertCollectionInputStream ) ) ; } catch ( Exception e ) { throw new IllegalArgumentException ( "Input stream does not contain valid certificates." , e ) ; }
public class HTMLParser { /** * Extracts the title of the page . * @ param html * @ return */ public static String extractTitle ( String html ) { } }
Matcher m = TITLE_PATTERN . matcher ( html ) ; if ( m . find ( ) ) { return clear ( m . group ( 0 ) ) ; } return null ;
public class Descriptives { /** * Calculates Geometric Mean * @ param flatDataCollection * @ return */ public static double geometricMean ( FlatDataCollection flatDataCollection ) { } }
int n = 0 ; double geometricMean = 0.0 ; Iterator < Double > it = flatDataCollection . iteratorDouble ( ) ; while ( it . hasNext ( ) ) { Double v = it . next ( ) ; if ( v != null ) { if ( v <= 0.0 ) { throw new IllegalArgumentException ( "Negative or zero values are not allowed." ) ; } ++ n ; geometricMean += Math . log ( v ) ; } } geometricMean = Math . exp ( geometricMean / n ) ; return geometricMean ;
public class ListLocationsResult { /** * An array that contains a list of locations . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setLocations ( java . util . Collection ) } or { @ link # withLocations ( java . util . Collection ) } if you want to * override the existing values . * @ param locations * An array that contains a list of locations . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListLocationsResult withLocations ( LocationListEntry ... locations ) { } }
if ( this . locations == null ) { setLocations ( new java . util . ArrayList < LocationListEntry > ( locations . length ) ) ; } for ( LocationListEntry ele : locations ) { this . locations . add ( ele ) ; } return this ;
public class ModelsImpl { /** * Updates the name and children of a hierarchical entity model . * @ param appId The application ID . * @ param versionId The version ID . * @ param hEntityId The hierarchical entity extractor ID . * @ param hierarchicalModelUpdateObject Model containing names of the children of the hierarchical entity . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the OperationStatus object */ public Observable < ServiceResponse < OperationStatus > > updateHierarchicalEntityWithServiceResponseAsync ( UUID appId , String versionId , UUID hEntityId , HierarchicalEntityModel hierarchicalModelUpdateObject ) { } }
if ( this . client . endpoint ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.endpoint() is required and cannot be null." ) ; } if ( appId == null ) { throw new IllegalArgumentException ( "Parameter appId is required and cannot be null." ) ; } if ( versionId == null ) { throw new IllegalArgumentException ( "Parameter versionId is required and cannot be null." ) ; } if ( hEntityId == null ) { throw new IllegalArgumentException ( "Parameter hEntityId is required and cannot be null." ) ; } if ( hierarchicalModelUpdateObject == null ) { throw new IllegalArgumentException ( "Parameter hierarchicalModelUpdateObject is required and cannot be null." ) ; } Validator . validate ( hierarchicalModelUpdateObject ) ; String parameterizedHost = Joiner . on ( ", " ) . join ( "{Endpoint}" , this . client . endpoint ( ) ) ; return service . updateHierarchicalEntity ( appId , versionId , hEntityId , hierarchicalModelUpdateObject , this . client . acceptLanguage ( ) , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < OperationStatus > > > ( ) { @ Override public Observable < ServiceResponse < OperationStatus > > call ( Response < ResponseBody > response ) { try { ServiceResponse < OperationStatus > clientResponse = updateHierarchicalEntityDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class Xml { /** * Normalize document . * @ param expression The expression to evaluate ( must not be < code > null < / code > ) . */ void normalize ( String expression ) { } }
final XPath xPath = XPathFactory . newInstance ( ) . newXPath ( ) ; try { final NodeList nodeList = ( NodeList ) xPath . evaluate ( expression , document , XPathConstants . NODESET ) ; for ( int i = 0 ; i < nodeList . getLength ( ) ; ++ i ) { final Node node = nodeList . item ( i ) ; node . getParentNode ( ) . removeChild ( node ) ; } } catch ( final XPathExpressionException exception ) { Verbose . exception ( exception ) ; }
public class ChemSequence { /** * Adds an chemModel to this container . * @ param chemModel The chemModel to be added to this container * @ see # getChemModel */ @ Override public void addChemModel ( IChemModel chemModel ) { } }
if ( chemModelCount + 1 >= chemModels . length ) { growChemModelArray ( ) ; } chemModels [ chemModelCount ] = chemModel ; chemModelCount ++ ;
public class NameUtils { /** * Returns the property name representation of the given name . * @ param name The name to convert * @ return The property name representation */ public static String getPropertyNameRepresentation ( String name ) { } }
// Strip any package from the name . int pos = name . lastIndexOf ( '.' ) ; if ( pos != - 1 ) { name = name . substring ( pos + 1 ) ; } if ( name . isEmpty ( ) ) { return name ; } // Check whether the name begins with two upper case letters . if ( name . length ( ) > 1 && Character . isUpperCase ( name . charAt ( 0 ) ) && Character . isUpperCase ( name . charAt ( 1 ) ) ) { return name ; } String propertyName = name . substring ( 0 , 1 ) . toLowerCase ( Locale . ENGLISH ) + name . substring ( 1 ) ; if ( propertyName . indexOf ( ' ' ) > - 1 ) { propertyName = propertyName . replaceAll ( "\\s" , "" ) ; } return propertyName ;
public class MetaService { public static void deleteQueue ( DbConn cnx , int id ) { } }
int countRunning = cnx . runSelectSingle ( "ji_select_count_by_queue" , Integer . class , id ) ; if ( countRunning > 0 ) { cnx . setRollbackOnly ( ) ; throw new JqmAdminApiUserException ( "cannot delete a queue with running instances. Disable it, wait for the end of all running instances, then retry." ) ; } cnx . runUpdate ( "dp_delete_for_queue" , id ) ; QueryResult qr = cnx . runUpdate ( "q_delete_by_id" , id ) ; if ( qr . nbUpdated != 1 ) { cnx . setRollbackOnly ( ) ; throw new JqmAdminApiUserException ( "no item with ID " + id ) ; }
public class ListAcceptedPortfolioSharesRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListAcceptedPortfolioSharesRequest listAcceptedPortfolioSharesRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listAcceptedPortfolioSharesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listAcceptedPortfolioSharesRequest . getAcceptLanguage ( ) , ACCEPTLANGUAGE_BINDING ) ; protocolMarshaller . marshall ( listAcceptedPortfolioSharesRequest . getPageToken ( ) , PAGETOKEN_BINDING ) ; protocolMarshaller . marshall ( listAcceptedPortfolioSharesRequest . getPageSize ( ) , PAGESIZE_BINDING ) ; protocolMarshaller . marshall ( listAcceptedPortfolioSharesRequest . getPortfolioShareType ( ) , PORTFOLIOSHARETYPE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class JsonReader { /** * Reads in a key value with an expected key . * @ param expectedKey The expected key * @ return The next key value Tuple2 * @ throws IOException Something went wrong reading */ public Val nextKeyValue ( String expectedKey ) throws IOException { } }
Tuple2 < String , Val > Tuple2 = nextKeyValue ( ) ; if ( expectedKey != null && ( Tuple2 == null || ! Tuple2 . getKey ( ) . equals ( expectedKey ) ) ) { throw new IOException ( "Expected a Key-Value Tuple2 with named " + expectedKey ) ; } return Tuple2 . getV2 ( ) ;
public class KmeansCalculator { /** * クラスタリングが収束したかを判定する 。 * @ param basePoints 前回の中心点リスト * @ param newPoints 今回の中心点リスト * @ param convergenceThres 収束閾値 * @ return 収束した場合true 、 収束していない場合false */ public static boolean isConvergenced ( List < KmeansPoint > basePoints , List < KmeansPoint > newPoints , double convergenceThres ) { } }
boolean result = true ; for ( int pointIndex = 0 ; pointIndex < basePoints . size ( ) ; pointIndex ++ ) { double distance = MathUtils . distance ( basePoints . get ( pointIndex ) . getDataPoint ( ) , newPoints . get ( pointIndex ) . getDataPoint ( ) ) ; if ( distance > convergenceThres ) { result = false ; break ; } } return result ;
public class BitUtils { /** * Add Integer to the current position with the specified size * Be careful with java integer bit sign * @ param pValue * the value to set * @ param pLength * the length of the integer */ public void setNextInteger ( final int pValue , final int pLength ) { } }
if ( pLength > Integer . SIZE ) { throw new IllegalArgumentException ( "Integer overflow with length > 32" ) ; } setNextValue ( pValue , pLength , Integer . SIZE - 1 ) ;
public class JsMessagingEngineImpl { /** * ( non - Javadoc ) * @ see com . ibm . ws . sib . admin . JsEngineComponent # destroy ( ) */ @ Override public void destroy ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "destroy" , this ) ; setState ( STATE_DESTROYING ) ; if ( _mbeanFactory != null ) ( ( MessagingMBeanFactoryImpl ) _mbeanFactory ) . deregisterAll ( ) ; _messageProcessor . destroy ( ) ; _messageStore . destroy ( ) ; setState ( STATE_DESTROYED ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "destroy" ) ;
public class Reflector { /** * return all methods that are defined by the class itself ( not extended ) * @ param clazz * @ return */ public static Method [ ] getDeclaredMethods ( Class clazz ) { } }
Method [ ] methods = clazz . getMethods ( ) ; ArrayList list = new ArrayList ( ) ; for ( int i = 0 ; i < methods . length ; i ++ ) { if ( methods [ i ] . getDeclaringClass ( ) == clazz ) list . add ( methods [ i ] ) ; } if ( list . size ( ) == 0 ) return new Method [ 0 ] ; return ( Method [ ] ) list . toArray ( new Method [ list . size ( ) ] ) ;
public class DJTimeSeriesChartBuilder { /** * Adds the specified serie column to the dataset with custom label . * @ param column the serie column */ public DJTimeSeriesChartBuilder addSerie ( AbstractColumn column , StringExpression labelExpression ) { } }
getDataset ( ) . addSerie ( column , labelExpression ) ; return this ;
public class PtoPOutputHandler { /** * This method should only be called when the PtoPOutputHandler was created * for a Link with an unknown targetCellule and WLM has now told us correct * targetCellule . */ public void updateTargetCellule ( SIBUuid8 targetMEUuid ) throws SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "updateTargetCellule" , targetMEUuid ) ; this . targetMEUuid = targetMEUuid ; sourceStreamManager . updateTargetCellule ( targetMEUuid ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "updateTargetCellule" ) ;