signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class DOMHelper { /** * Add a child element to a parent element
* @ param doc
* @ param parentElement
* @ param elementName
* @ param elementValue */
public static void appendChild ( Document doc , Element parentElement , String elementName , String elementValue ) { } }
|
Element child = doc . createElement ( elementName ) ; Text text = doc . createTextNode ( elementValue ) ; child . appendChild ( text ) ; parentElement . appendChild ( child ) ;
|
public class HttpEndpointImpl { /** * Schedule an activity to run off the SCR action thread ,
* if the ExecutorService is available
* @ param action Runnable action to execute
* @ param addToQueue Set to false if the action should be scheduled independently of the actionQueue */
@ Trivial private void performAction ( Runnable action , boolean addToQueue ) { } }
|
ExecutorService exec = executorService . getService ( ) ; if ( exec == null ) { // If we can ' t find the executor service , we have to run it in place .
action . run ( ) ; } else { // If we can find the executor service , we ' ll add the action to the queue .
// If the actionFuture is null ( no pending actions ) and the configFuture is null ( no
// pending configuration updates ) , we ' ll submit the actionsRunner to the executor
// service to drain the queue
// configFuture is used to avoid bouncing the endpoint multiple times because of a
// single configuration update .
// actionFuture is only set to a non - null value by kicking off the executor service here .
// actionsRunner syncs on actionQueue , so we can ' t add any new actions while we are
// draining the queue . When the queue is empty , actionFuture is explicitly set to null .
// Long story short , it prevents us from kicking off multiple executors which could run in
// random order .
if ( addToQueue ) { synchronized ( actionQueue ) { actionQueue . add ( action ) ; if ( ( actionFuture == null ) && ( configFuture == null ) ) { actionFuture = exec . submit ( actionsRunner ) ; } } } else { // Schedule immediately
exec . submit ( action ) ; } }
|
public class ContractsApi { /** * Get corporation contract items Lists items of a particular contract - - -
* This route is cached for up to 3600 seconds SSO Scope :
* esi - contracts . read _ corporation _ contracts . v1 SSO Scope :
* esi - contracts . read _ character _ contracts . v1
* @ param contractId
* ID of a contract ( required )
* @ param corporationId
* An EVE corporation ID ( required )
* @ param datasource
* The server name you would like data from ( optional , default to
* tranquility )
* @ param ifNoneMatch
* ETag from a previous request . A 304 will be returned if this
* matches the current ETag ( optional )
* @ param token
* Access token to use if unable to set a header ( optional )
* @ return ApiResponse & lt ; List & lt ; CorporationContractsItemsResponse & gt ; & gt ;
* @ throws ApiException
* If fail to call the API , e . g . server error or cannot
* deserialize the response body */
public ApiResponse < List < CorporationContractsItemsResponse > > getCorporationsCorporationIdContractsContractIdItemsWithHttpInfo ( Integer contractId , Integer corporationId , String datasource , String ifNoneMatch , String token ) throws ApiException { } }
|
com . squareup . okhttp . Call call = getCorporationsCorporationIdContractsContractIdItemsValidateBeforeCall ( contractId , corporationId , datasource , ifNoneMatch , token , null ) ; Type localVarReturnType = new TypeToken < List < CorporationContractsItemsResponse > > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
|
public class HttpJsonSerializer { /** * Parses one or more data points for storage
* @ return an array of data points to process for storage
* @ throws JSONException if parsing failed
* @ throws BadRequestException if the content was missing or parsing failed
* @ since 2.4 */
@ Override public < T extends IncomingDataPoint > List < T > parsePutV1 ( final Class < T > type , final TypeReference < ArrayList < T > > typeReference ) { } }
|
if ( ! query . hasContent ( ) ) { throw new BadRequestException ( "Missing request content" ) ; } // convert to a string so we can handle character encoding properly
final String content = query . getContent ( ) . trim ( ) ; final int firstbyte = content . charAt ( 0 ) ; try { if ( firstbyte == '{' ) { final T dp = JSON . parseToObject ( content , type ) ; final ArrayList < T > dps = new ArrayList < T > ( 1 ) ; dps . add ( dp ) ; return dps ; } else if ( firstbyte == '[' ) { return JSON . parseToObject ( content , typeReference ) ; } else { throw new BadRequestException ( "The JSON must start as an object or an array" ) ; } } catch ( IllegalArgumentException iae ) { throw new BadRequestException ( "Unable to parse the given JSON" , iae ) ; }
|
public class FmtDuration { /** * { @ inheritDoc }
* @ throws SuperCsvCellProcessorException
* if value is null or not a Duration */
public Object execute ( final Object value , final CsvContext context ) { } }
|
validateInputNotNull ( value , context ) ; if ( ! ( value instanceof Duration ) ) { throw new SuperCsvCellProcessorException ( Duration . class , value , context , this ) ; } final Duration duration = ( Duration ) value ; final String result = duration . toString ( ) ; return next . execute ( result , context ) ;
|
public class Args { /** * Produces the flag form of a string value .
* @ return { @ code [ - - name , value ] } or { @ code [ ] } if value is null . */
static List < String > string ( String name , @ Nullable String value ) { } }
|
if ( ! Strings . isNullOrEmpty ( value ) ) { return Arrays . asList ( "--" + name , value ) ; } return Collections . emptyList ( ) ;
|
public class CmsDefaultUserSettings { /** * Sets the default copy mode when copying a folder of the user . < p >
* @ param mode the default copy mode when copying a folder of the user */
public void setDialogCopyFolderMode ( String mode ) { } }
|
CmsResourceCopyMode copyMode = CmsResource . COPY_AS_NEW ; if ( mode . equalsIgnoreCase ( COPYMODE_SIBLING ) ) { copyMode = CmsResource . COPY_AS_SIBLING ; } else if ( mode . equalsIgnoreCase ( COPYMODE_PRESERVE ) ) { copyMode = CmsResource . COPY_PRESERVE_SIBLING ; } setDialogCopyFolderMode ( copyMode ) ;
|
public class U { /** * Documented , # slice */
public static < T > List < T > slice ( final Iterable < T > iterable , final int start ) { } }
|
final List < T > result ; if ( start >= 0 ) { result = newArrayList ( iterable ) . subList ( start , size ( iterable ) ) ; } else { result = newArrayList ( iterable ) . subList ( size ( iterable ) + start , size ( iterable ) ) ; } return result ;
|
public class MathExpressions { /** * Create a { @ code ln ( num ) } expression
* < p > Returns the natural logarithm of num . < / p >
* @ param num numeric expression
* @ return ln ( num ) */
public static < A extends Number & Comparable < ? > > NumberExpression < Double > ln ( Expression < A > num ) { } }
|
return Expressions . numberOperation ( Double . class , Ops . MathOps . LN , num ) ;
|
public class MultiPageWidget { /** * Recalculate view port for the multi page widget based on new adapter .
* Data adapter has to have universal view size , see { @ link Adapter # hasUniformViewSize ( ) }
* @ param adapter */
protected void recalculateViewPort ( final Adapter adapter ) { } }
|
Log . d ( Log . SUBSYSTEM . LAYOUT , TAG , "recalculateViewPort mMaxVisiblePageCount = %d mAdapter =%s " + "mAdapter.hasUniformViewSize() = %b" , mMaxVisiblePageCount , adapter , ( adapter != null ? adapter . hasUniformViewSize ( ) : false ) ) ; if ( mMaxVisiblePageCount < Integer . MAX_VALUE && adapter != null && adapter . hasUniformViewSize ( ) ) { int num = Math . min ( mMaxVisiblePageCount , adapter . getCount ( ) ) ; int [ ] ids = new int [ num ] ; for ( int i = 0 ; i < num ; ++ i ) { ids [ i ] = i ; } float width = 0 , height = 0 , depth = 0 ; for ( Layout listLayout : mLayouts ) { listLayout . enableClipping ( true ) ; float w = listLayout . calculateWidth ( ids ) ; if ( ! Float . isNaN ( w ) ) { width = Math . max ( w , width ) ; } float h = listLayout . calculateHeight ( ids ) ; if ( ! Float . isNaN ( h ) ) { height = Math . max ( h , height ) ; } float d = listLayout . calculateDepth ( ids ) ; if ( ! Float . isNaN ( d ) ) { depth = Math . max ( d , depth ) ; } } // Log . d ( Log . SUBSYSTEM . LAYOUT ,
Log . d ( TAG , "recalculateViewPort(%s) mMaxVisiblePageCount = %d [%f, %f, %f]" , getName ( ) , mMaxVisiblePageCount , width , height , depth ) ; setViewPortWidth ( width ) ; setViewPortHeight ( height ) ; setViewPortDepth ( depth ) ; }
|
public class StatefulSessionActivationStrategy { /** * When stateful beans are uninstalled we destroy the beans . The beans
* are removed from the cache or passivation directory and removed from
* the Stateful Bean Reaper . < p >
* If the module is reinstalled and the same reference is reused , then an
* Exception will occur indicating that the object could not be found . < p >
* Previously , the beans were passivated , but in addition to this causing
* too much overhead during server shutdown , it is also likely that the
* bean class has been changed in an incompatible way across a module
* stop and start . d106838
* Also , removing the bean ( i . e . calling ejbRemove ) can also be
* problematic as ejbRemove frequently contains calls to remove other
* beans . < p >
* Currently , they are " uninstalled " , which transitions the bean
* the passivated and then destroyed without actually streaming the
* instance to a file . d112866
* For z / OS , probably need a way to call this when a bean is truly being
* uninstalled so that we can delete Stateful Session beans for the same
* reason stated earlier . This method is not called on z / OS . Instead we
* call atPassivate from Activator . uninstallBean . */
@ Override void atUninstall ( BeanId beanId , BeanO bean ) { } }
|
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "atUninstall (" + beanId + ")" ) ; // The ' bean ' parameter may be null ( i . e . when calling to uninstall
// a passivated bean ) , so don ' t count on it . LI3408
MasterKey key = new MasterKey ( beanId ) ; synchronized ( locks . getLock ( key ) ) { if ( ( bean = ( BeanO ) cache . find ( key ) ) != null ) { // Regardless of whether the bean has timed out or not , remove it
// from the cache and reaper and call uninstall , which will result
// in ejbPassivate being called for this bean , but skip streaming
// the bean out to a file . d106838 d112866
if ( isTraceOn && tc . isEventEnabled ( ) ) Tr . event ( tc , "Found bean in cache: uninstalling" ) ; try { cache . remove ( key , true ) ; bean . ivCacheKey = null ; // d199233
reaper . remove ( beanId ) ; // d159666.1
( ( StatefulBeanO ) bean ) . uninstall ( ) ; // d112866
} catch ( IllegalOperationException ioe ) { // This occurs if the application is stopped while the bean
// is enlisted with an active transaction ( i . e . probably hung ) .
// Nothing should be done except release the pin taken above
// on the find . d159666.1
FFDCFilter . processException ( ioe , CLASS_NAME + ".atUninstall" , "590" , this ) ; if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Found bean in cache: active!" ) ; cache . unpin ( key ) ; } } else { // Since the bean is not in the cache , most likely , the bean
// is in the passivated state , so the file just needs to be
// deleted . However , it is also possible the StatefulBeanReaper
// has already removed the file . d129562
if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Bean not in cache: removing file" ) ; try { // Check if in bean is in the reaper list . If so , then
// delete the file used when bean was passivated . Note ,
// we do not want to remote the failover cache entry if the reason
// we are uninstalling is because server is shutting down .
// That is why we pass false to the Passivator remove method .
if ( reaper . remove ( beanId ) ) { ivPassivator . remove ( beanId , false ) ; // LIDB2018-1
} } catch ( RemoteException rex ) { // Just log the exception . Not being able to remove the file
// should not be cause to fail the uninstall of the bean .
FFDCFilter . processException ( rex , CLASS_NAME + ".atUninstall" , "598" , this ) ; Tr . warning ( tc , "REMOVE_FROM_PASSIVATION_STORE_FAILED_CNTR0016W" , new Object [ ] { beanId , rex } ) ; } } } if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "atUninstall" ) ;
|
public class Numbers { /** * Canonicalizes the given { @ link Number } value for the purpose of a
* hash - based lookup .
* The method accepts a { @ link Comparable } instance and returns one because
* query engine acts on comparable values , but { @ link Number } is not
* comparable .
* Special numeric canonicalization logic is used for { @ link Double } ,
* { @ link Long } , { @ link Float } , { @ link Integer } , { @ link Short } and
* { @ link Byte } : all whole numbers in the [ { @ link Long # MIN _ VALUE } ,
* { @ link Long # MAX _ VALUE } ] range are represented as { @ link Long } , all other
* whole and non - whole numbers are presented as { @ link Double } . That logic
* allows to mix numeric types while performing hash - based lookups , e . g .
* while using one of the standard { @ link java . util . Map } implementations .
* @ param value the { @ link Number } to canonicalize .
* @ return a canonical representation of the given { @ link Number } or
* the original { @ link Number } if there is no special canonical
* representation for it . */
public static Comparable canonicalizeForHashLookup ( Comparable value ) { } }
|
Class clazz = value . getClass ( ) ; assert value instanceof Number ; Number number = ( Number ) value ; if ( isDoubleRepresentable ( clazz ) ) { double doubleValue = number . doubleValue ( ) ; long longValue = number . longValue ( ) ; if ( equalDoubles ( doubleValue , ( double ) longValue ) ) { return longValue ; } else if ( clazz == Float . class ) { return doubleValue ; } } else if ( isLongRepresentableExceptLong ( clazz ) ) { return number . longValue ( ) ; } return value ;
|
public class Main { /** * Prepare a command line for execution from a Windows batch script .
* The method quotes all arguments so that spaces are handled as expected . Quotes within arguments
* are " double quoted " ( which is batch for escaping a quote ) . This page has more details about
* quoting and other batch script fun stuff : http : / / ss64 . com / nt / syntax - esc . html */
private static String prepareWindowsCommand ( List < String > cmd , Map < String , String > childEnv ) { } }
|
StringBuilder cmdline = new StringBuilder ( ) ; for ( Map . Entry < String , String > e : childEnv . entrySet ( ) ) { cmdline . append ( String . format ( "set %s=%s" , e . getKey ( ) , e . getValue ( ) ) ) ; cmdline . append ( " && " ) ; } for ( String arg : cmd ) { cmdline . append ( quoteForBatchScript ( arg ) ) ; cmdline . append ( " " ) ; } return cmdline . toString ( ) ;
|
public class DistortImageOps { /** * Finds an axis - aligned bounding box which would contain a image after it has been transformed .
* The returned bounding box can be larger then the original image .
* @ param srcWidth Width of the source image
* @ param srcHeight Height of the source image
* @ param transform Transform being applied to the image
* @ return Bounding box */
public static RectangleLength2D_I32 boundBox ( int srcWidth , int srcHeight , Point2D_F32 work , PixelTransform < Point2D_F32 > transform ) { } }
|
int x0 , y0 , x1 , y1 ; transform . compute ( 0 , 0 , work ) ; x0 = x1 = ( int ) work . x ; y0 = y1 = ( int ) work . y ; for ( int i = 1 ; i < 4 ; i ++ ) { if ( i == 1 ) transform . compute ( srcWidth , 0 , work ) ; else if ( i == 2 ) transform . compute ( 0 , srcHeight , work ) ; else if ( i == 3 ) transform . compute ( srcWidth - 1 , srcHeight , work ) ; if ( work . x < x0 ) x0 = ( int ) work . x ; else if ( work . x > x1 ) x1 = ( int ) work . x ; if ( work . y < y0 ) y0 = ( int ) work . y ; else if ( work . y > y1 ) y1 = ( int ) work . y ; } return new RectangleLength2D_I32 ( x0 , y0 , x1 - x0 , y1 - y0 ) ;
|
public class JacksonJson { /** * Unmarshal the JSON data on the specified Reader instance and populate a List of instances of the provided returnType class .
* @ param < T > the generics type for the List
* @ param returnType an instance of this type class will be contained in the returned List
* @ param reader the Reader instance that contains the JSON data
* @ return a List of the provided class containing the parsed data from the Reader
* @ throws JsonParseException when an error occurs parsing the provided JSON
* @ throws JsonMappingException if a JSON error occurs
* @ throws IOException if an error occurs reading the JSON data */
public < T > List < T > unmarshalList ( Class < T > returnType , Reader reader ) throws JsonParseException , JsonMappingException , IOException { } }
|
ObjectMapper objectMapper = getContext ( null ) ; CollectionType javaType = objectMapper . getTypeFactory ( ) . constructCollectionType ( List . class , returnType ) ; return ( objectMapper . readValue ( reader , javaType ) ) ;
|
public class ProductSearchClient { /** * Adds a Product to the specified ProductSet . If the Product is already present , no change is
* made .
* < p > One Product can be added to at most 100 ProductSets .
* < p > Possible errors :
* < p > & # 42 ; Returns NOT _ FOUND if the Product or the ProductSet doesn ' t exist .
* < p > Sample code :
* < pre > < code >
* try ( ProductSearchClient productSearchClient = ProductSearchClient . create ( ) ) {
* ProductSetName name = ProductSetName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ PRODUCT _ SET ] " ) ;
* ProductName product = ProductName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ PRODUCT ] " ) ;
* productSearchClient . addProductToProductSet ( name , product ) ;
* < / code > < / pre >
* @ param name The resource name for the ProductSet to modify .
* < p > Format is : ` projects / PROJECT _ ID / locations / LOC _ ID / productSets / PRODUCT _ SET _ ID `
* @ param product The resource name for the Product to be added to this ProductSet .
* < p > Format is : ` projects / PROJECT _ ID / locations / LOC _ ID / products / PRODUCT _ ID `
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final void addProductToProductSet ( ProductSetName name , ProductName product ) { } }
|
AddProductToProductSetRequest request = AddProductToProductSetRequest . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . setProduct ( product == null ? null : product . toString ( ) ) . build ( ) ; addProductToProductSet ( request ) ;
|
public class ChatDirector { /** * Dispatches the provided message to our chat displays . */
public void dispatchMessage ( ChatMessage message , String localType ) { } }
|
setClientInfo ( message , localType ) ; dispatchPreparedMessage ( message ) ;
|
public class KubernetesScheduler { /** * Add containers for a scale - up event from an update command
* @ param containersToAdd the list of containers that need to be added
* NOTE : Due to the mechanics of Kubernetes pod creation , each container must be created on
* a one - by - one basis . If one container out of many containers to be deployed failed , it will
* leave the topology in a bad state .
* TODO ( jrcrawfo ) - - ( https : / / github . com / apache / incubator - heron / issues / 1981) */
@ Override public Set < PackingPlan . ContainerPlan > addContainers ( Set < PackingPlan . ContainerPlan > containersToAdd ) { } }
|
controller . addContainers ( containersToAdd ) ; return containersToAdd ;
|
public class JDBCBlob { /** * Returns an < code > InputStream < / code > object that contains a partial < code > Blob < / code > value ,
* starting with the byte specified by pos , which is length bytes in length .
* @ param pos the offset to the first byte of the partial value to be retrieved .
* The first byte in the < code > Blob < / code > is at position 1
* @ param length the length in bytes of the partial value to be retrieved
* @ return < code > InputStream < / code > through which the partial < code > Blob < / code > value can be read .
* @ throws SQLException if pos is less than 1 or if pos is greater than the number of bytes
* in the < code > Blob < / code > or if pos + length is greater than the number of bytes
* in the < code > Blob < / code >
* @ exception SQLFeatureNotSupportedException if the JDBC driver does not support
* this method
* @ since JDK 1.6 , HSQLDB 1.9.0 */
public InputStream getBinaryStream ( long pos , long length ) throws SQLException { } }
|
final byte [ ] ldata = data ; checkValid ( ldata ) ; final int dlen = ldata . length ; if ( pos < MIN_POS || pos > dlen ) { throw Util . outOfRangeArgument ( "pos: " + pos ) ; } pos -- ; if ( length < 0 || length > dlen - pos ) { throw Util . outOfRangeArgument ( "length: " + length ) ; } if ( pos == 0 && length == dlen ) { return new ByteArrayInputStream ( ldata ) ; } // Let client decide on policy .
// Zero - copy is ( possibly much ) faster because it does
// not allocate extra memory and does not involve copy
// processing .
// However , because it could lead to unexpected memory ,
// stress , it is not polite to always pass back a stream
// whose buffer is the full size required to represent the
// underlying BLOB value .
// if ( isGetBinaryStreamUsesCopy ( ) ) {
final byte [ ] out = new byte [ ( int ) length ] ; System . arraycopy ( ldata , ( int ) pos , out , 0 , ( int ) length ) ; return new ByteArrayInputStream ( out ) ; // } else {
// return new BinaryInputStream ( ldata , pos , length ) ;
|
public class Validate { /** * Method without varargs to increase performance */
public static < T extends Collection < ? > > T validIndex ( final T collection , final int index , final String message ) { } }
|
return INSTANCE . validIndex ( collection , index , message ) ;
|
public class Utils { /** * Load all the byte data from an input stream .
* @ param stream the input stream from which to read
* @ return a byte array containing all the data from the stream */
public static byte [ ] loadBytesFromStream ( InputStream stream ) { } }
|
try { BufferedInputStream bis = new BufferedInputStream ( stream ) ; byte [ ] theData = new byte [ 10000000 ] ; int dataReadSoFar = 0 ; byte [ ] buffer = new byte [ 1024 ] ; int read = 0 ; while ( ( read = bis . read ( buffer ) ) != - 1 ) { System . arraycopy ( buffer , 0 , theData , dataReadSoFar , read ) ; dataReadSoFar += read ; } bis . close ( ) ; // Resize to actual data read
byte [ ] returnData = new byte [ dataReadSoFar ] ; System . arraycopy ( theData , 0 , returnData , 0 , dataReadSoFar ) ; return returnData ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; }
|
public class TaskResult { /** * Inserts a Bundle value into the mapping of this Bundle , replacing any existing value for the
* given key . Either key or value may be null .
* @ param key a String , or null
* @ param value a Bundle object , or null
* @ return itself */
public TaskResult add ( String key , Bundle value ) { } }
|
mBundle . putBundle ( key , value ) ; return this ;
|
public class AboutJenkins { /** * A pre - check to see if a string is a build timestamp formatted date .
* @ param s the string .
* @ return { @ code true } if it is likely that the string will parse as a build timestamp formatted date . */
static boolean mayBeDate ( String s ) { } }
|
if ( s == null || s . length ( ) != "yyyy-MM-dd_HH-mm-ss" . length ( ) ) { return false ; } for ( int i = 0 ; i < s . length ( ) ; i ++ ) { switch ( s . charAt ( i ) ) { case '-' : switch ( i ) { case 4 : case 7 : case 13 : case 16 : break ; default : return false ; } break ; case '_' : if ( i != 10 ) { return false ; } break ; case '0' : case '1' : switch ( i ) { case 4 : case 7 : case 10 : case 13 : case 16 : return false ; default : break ; } break ; case '2' : switch ( i ) { case 4 : case 5 : // months 0-1
case 7 : case 10 : case 13 : case 16 : return false ; default : break ; } break ; case '3' : switch ( i ) { case 0 : // year will safely begin with digit 2 for next 800 - odd years
case 4 : case 5 : // months 0-1
case 7 : case 10 : case 11 : // hours 0-2
case 13 : case 16 : return false ; default : break ; } break ; case '4' : case '5' : switch ( i ) { case 0 : // year will safely begin with digit 2 for next 800 - odd years
case 4 : case 5 : // months 0-1
case 7 : case 8 : // days 0-3
case 10 : case 11 : // hours 0-2
case 13 : case 16 : return false ; default : break ; } break ; case '6' : case '7' : case '8' : case '9' : switch ( i ) { case 0 : // year will safely begin with digit 2 for next 800 - odd years
case 4 : case 5 : // months 0-1
case 7 : case 8 : // days 0-3
case 10 : case 11 : // hours 0-2
case 13 : case 14 : // minutes 0-5
case 16 : case 17 : // seconds 0-5
return false ; default : break ; } break ; default : return false ; } } return true ;
|
public class ToSparseArray { /** * Returns a method that can be used with { @ link solid . stream . Stream # collect ( Func1 ) }
* to convert a stream into a { @ link SparseArray } .
* Use this method instead of { @ link # toSparseArray ( Func1 ) } } for better performance on
* streams that can have more than 10 items .
* @ param < T > a type of stream items .
* @ param itemToKey a method that should return a key for an item .
* @ param initialCapacity initial capacity on the sparse array .
* @ return a method that converts an iterable into a { @ link SparseArray } . */
public static < T > Func1 < Iterable < T > , SparseArray < T > > toSparseArray ( final Func1 < T , Integer > itemToKey , final int initialCapacity ) { } }
|
return new Func1 < Iterable < T > , SparseArray < T > > ( ) { @ Override public SparseArray < T > call ( Iterable < T > iterable ) { SparseArray < T > array = new SparseArray < > ( initialCapacity ) ; for ( T value : iterable ) array . put ( itemToKey . call ( value ) , value ) ; return array ; } } ;
|
public class JsonArrayDeserializer { /** * { @ inheritDoc } */
@ Override public JsonArray deserialize ( JsonParser p , DeserializationContext ctxt ) throws IOException , JsonProcessingException { } }
|
List list = p . readValueAs ( List . class ) ; return new JsonArray ( list ) ;
|
public class CalendarControl { /** * UpdateCalendar Method . */
public void updateCalendar ( Calendar calStart , Calendar calEnd ) { } }
|
Anniversary recAnniversary = new Anniversary ( this . getRecordOwner ( ) ) ; AnnivMaster recAnnivMaster = new AnnivMaster ( this . getRecordOwner ( ) ) ; recAnniversary . setKeyArea ( Anniversary . START_DATE_TIME_KEY ) ; try { while ( recAnniversary . hasNext ( ) ) { recAnniversary . next ( ) ; if ( recAnniversary . getField ( Anniversary . START_DATE_TIME ) . compareTo ( this . getField ( CalendarControl . START_ANNIV_DATE ) ) > 0 ) break ; // end of the entries that are not in the current range .
recAnniversary . edit ( ) ; recAnniversary . remove ( ) ; } while ( recAnnivMaster . hasNext ( ) ) { recAnnivMaster . next ( ) ; recAnnivMaster . addAppointments ( recAnniversary , calStart , calEnd ) ; } } catch ( DBException ex ) { ex . printStackTrace ( ) ; } finally { recAnniversary . free ( ) ; recAnnivMaster . free ( ) ; }
|
public class PropertyChangeSupportUtils { /** * Removes a named property change listener to the given JavaBean . The bean
* must provide the optional support for listening on named properties
* as described in section 7.4.5 of the
* < a href = " http : / / java . sun . com / products / javabeans / docs / spec . html " > Java Bean
* Specification < / a > . The bean class must provide the method :
* < pre >
* public void removePropertyChangeHandler ( String , PropertyChangeListener ) ;
* < / pre >
* @ param bean the bean to remove the property change listener from
* @ param propertyName the name of the observed property
* @ param listener the listener to remove
* @ throws FatalBeanException
* if the property change handler cannot be removed successfully */
public static void removePropertyChangeListener ( Object bean , String propertyName , PropertyChangeListener listener ) { } }
|
Assert . notNull ( propertyName , "The property name must not be null." ) ; Assert . notNull ( listener , "The listener must not be null." ) ; if ( bean instanceof PropertyChangePublisher ) { ( ( PropertyChangePublisher ) bean ) . removePropertyChangeListener ( propertyName , listener ) ; } else { Class beanClass = bean . getClass ( ) ; Method namedPCLRemover = getNamedPCLRemover ( beanClass ) ; if ( namedPCLRemover == null ) throw new FatalBeanException ( "Could not find the bean method" + "/npublic void removePropertyChangeListener(String, PropertyChangeListener);/nin bean '" + bean + "'" ) ; try { namedPCLRemover . invoke ( bean , new Object [ ] { propertyName , listener } ) ; } catch ( InvocationTargetException e ) { throw new FatalBeanException ( "Due to an InvocationTargetException we failed to remove " + "a named PropertyChangeListener from bean '" + bean + "'" , e ) ; } catch ( IllegalAccessException e ) { throw new FatalBeanException ( "Due to an IllegalAccessException we failed to remove " + "a named PropertyChangeListener from bean '" + bean + "'" , e ) ; } }
|
public class nstrafficdomain_stats { /** * Use this API to fetch statistics of nstrafficdomain _ stats resource of given name . */
public static nstrafficdomain_stats get ( nitro_service service , Long td ) throws Exception { } }
|
nstrafficdomain_stats obj = new nstrafficdomain_stats ( ) ; obj . set_td ( td ) ; nstrafficdomain_stats response = ( nstrafficdomain_stats ) obj . stat_resource ( service ) ; return response ;
|
public class ConfigurationAdminImpl { /** * @ see
* org . osgi . service . cm . ConfigurationAdmin # createFactoryConfiguration ( java .
* lang . String )
* When a Configuration object is created by either getConfiguration or
* createFactoryConfiguration , it becomes bound to the location of the calling
* bundle . This location is obtained with the associated bundle ' s getLocation
* method . */
@ Override public ExtendedConfiguration createFactoryConfiguration ( String factoryPid ) throws IOException { } }
|
ExtendedConfiguration config = caFactory . getConfigurationStore ( ) . createFactoryConfiguration ( factoryPid , bundle . getLocation ( ) ) ; return config ;
|
public class DecoratingDynamicTypeBuilder { /** * { @ inheritDoc } */
@ SuppressWarnings ( "unchecked" ) public DynamicType . Builder < T > ignoreAlso ( LatentMatcher < ? super MethodDescription > ignoredMethods ) { } }
|
return new DecoratingDynamicTypeBuilder < T > ( instrumentedType , typeAttributeAppender , asmVisitorWrapper , classFileVersion , auxiliaryTypeNamingStrategy , annotationValueFilterFactory , annotationRetention , implementationContextFactory , methodGraphCompiler , typeValidation , classWriterStrategy , new LatentMatcher . Disjunction < MethodDescription > ( this . ignoredMethods , ignoredMethods ) , auxiliaryTypes , classFileLocator ) ;
|
public class MavenImportUtils { /** * Restore the original pom file .
* @ param projectDir the folder in which the pom file is located .
* @ param monitor the progress monitor .
* @ throws IOException if the pom file cannot be changed . */
static void restorePom ( File projectDir , IProgressMonitor monitor ) throws IOException { } }
|
final File pomFile = new File ( projectDir , POM_FILE ) ; final File savedPomFile = new File ( projectDir , POM_BACKUP_FILE ) ; if ( savedPomFile . exists ( ) ) { if ( pomFile . exists ( ) ) { pomFile . delete ( ) ; } Files . copy ( savedPomFile , pomFile ) ; savedPomFile . delete ( ) ; } monitor . worked ( 1 ) ;
|
public class Isotope { /** * Compares an isotope with this isotope .
* @ param object Object of type Isotope
* @ return true if the isotopes are equal */
@ Override public boolean compare ( Object object ) { } }
|
if ( ! ( object instanceof Isotope ) ) { return false ; } if ( ! super . compare ( object ) ) { return false ; } Isotope isotope = ( Isotope ) object ; if ( isotope . getMassNumber ( ) != null && massNumber != null && isotope . getMassNumber ( ) . intValue ( ) != this . getMassNumber ( ) . intValue ( ) ) return false ; if ( isotope . getMassNumber ( ) == null && massNumber != null ) return false ; if ( isotope . getExactMass ( ) != null && exactMass != null ) { double diff = Math . abs ( isotope . getExactMass ( ) . doubleValue ( ) - this . getExactMass ( ) . doubleValue ( ) ) ; if ( diff > 0.0000001 ) return false ; } if ( isotope . getExactMass ( ) == null && exactMass != null ) return false ; if ( isotope . getNaturalAbundance ( ) != null && naturalAbundance != null ) { double diff = Math . abs ( isotope . getNaturalAbundance ( ) . doubleValue ( ) - this . getNaturalAbundance ( ) . doubleValue ( ) ) ; if ( diff > 0.0000001 ) return false ; } if ( isotope . getNaturalAbundance ( ) == null && naturalAbundance != null ) return false ; return true ;
|
public class MapScaleBar { /** * Determines if a redraw is necessary or not
* @ return true if redraw is necessary , false otherwise */
protected boolean isRedrawNecessary ( ) { } }
|
if ( this . redrawNeeded || this . prevMapPosition == null ) { return true ; } MapPosition currentMapPosition = this . mapViewPosition . getMapPosition ( ) ; if ( currentMapPosition . zoomLevel != this . prevMapPosition . zoomLevel ) { return true ; } double latitudeDiff = Math . abs ( currentMapPosition . latLong . latitude - this . prevMapPosition . latLong . latitude ) ; return latitudeDiff > LATITUDE_REDRAW_THRESHOLD ;
|
public class ReferenceEntityLockService { /** * Answers if the entity represented by the entityType and entityKey already has a lock of some
* type .
* @ param entityType
* @ param entityKey
* @ exception org . apereo . portal . concurrency . LockingException */
private boolean isLocked ( Class entityType , String entityKey ) throws LockingException { } }
|
return isLocked ( entityType , entityKey , null ) ;
|
public class ReferenceCountedOpenSslEngine { /** * Read plaintext data from the OpenSSL internal BIO */
private int readPlaintextData ( final ByteBuffer dst ) { } }
|
final int sslRead ; final int pos = dst . position ( ) ; if ( dst . isDirect ( ) ) { sslRead = SSL . readFromSSL ( ssl , bufferAddress ( dst ) + pos , dst . limit ( ) - pos ) ; if ( sslRead > 0 ) { dst . position ( pos + sslRead ) ; } } else { final int limit = dst . limit ( ) ; final int len = min ( maxEncryptedPacketLength0 ( ) , limit - pos ) ; final ByteBuf buf = alloc . directBuffer ( len ) ; try { sslRead = SSL . readFromSSL ( ssl , memoryAddress ( buf ) , len ) ; if ( sslRead > 0 ) { dst . limit ( pos + sslRead ) ; buf . getBytes ( buf . readerIndex ( ) , dst ) ; dst . limit ( limit ) ; } } finally { buf . release ( ) ; } } return sslRead ;
|
public class LdapAuthFilter { /** * Handles an authentication request .
* @ param request HTTP request
* @ param response HTTP response
* @ return an authentication object that contains the principal object if successful .
* @ throws IOException ex */
@ Override public Authentication attemptAuthentication ( HttpServletRequest request , HttpServletResponse response ) throws IOException { } }
|
final String requestURI = request . getRequestURI ( ) ; UserAuthentication userAuth = null ; String username = request . getParameter ( USERNAME ) ; String password = request . getParameter ( PASSWORD ) ; String appid = SecurityUtils . getAppidFromAuthRequest ( request ) ; if ( requestURI . endsWith ( LDAP_ACTION ) && ! StringUtils . isBlank ( username ) && ! StringUtils . isBlank ( password ) ) { try { App app = Para . getDAO ( ) . read ( App . id ( appid == null ? Config . getRootAppIdentifier ( ) : appid ) ) ; Authentication auth = new LDAPAuthentication ( username , password ) . withApp ( app ) ; // set authentication in context to avoid warning message from SpringSecurityAuthenticationSource
SecurityContextHolder . getContext ( ) . setAuthentication ( new AnonymousAuthenticationToken ( "key" , "anonymous" , AuthorityUtils . createAuthorityList ( "ROLE_ANONYMOUS" ) ) ) ; Authentication ldapAuth = getAuthenticationManager ( ) . authenticate ( auth ) ; if ( ldapAuth != null ) { // success !
userAuth = getOrCreateUser ( app , ldapAuth ) ; } } catch ( Exception ex ) { LOG . info ( "Failed to authenticate '{}' with LDAP server: {}" , username , ex . getMessage ( ) ) ; throw new AuthenticationServiceException ( ex . getMessage ( ) , ex . getCause ( ) ) ; } } return SecurityUtils . checkIfActive ( userAuth , SecurityUtils . getAuthenticatedUser ( userAuth ) , true ) ;
|
public class ConnectionDecorator { /** * { @ inheritDoc } */
@ Override public void setNetworkTimeout ( Executor executor , int milliseconds ) throws SQLException { } }
|
getTarget ( ) . setNetworkTimeout ( executor , milliseconds ) ;
|
public class ProgressListener { /** * Protected so that a subclass can override how a consumer is invoked , particularly how an exception is handled .
* @ param consumer
* @ param progressUpdate */
protected void invokeConsumer ( Consumer < ProgressUpdate > consumer , ProgressUpdate progressUpdate ) { } }
|
try { consumer . accept ( progressUpdate ) ; } catch ( Throwable t ) { logger . error ( "Exception thrown by a Consumer<ProgressUpdate> consumer: " + consumer + "; progressUpdate: " + progressUpdate , t ) ; }
|
public class GreenPepperServerServiceImpl { /** * { @ inheritDoc } */
public boolean doesRequirementHasReferences ( Requirement requirement ) throws GreenPepperServerException { } }
|
try { sessionService . startSession ( ) ; boolean hasReferences = ! documentDao . getAllReferences ( requirement ) . isEmpty ( ) ; log . debug ( "Does Requirement " + requirement . getName ( ) + " Document Has References: " + hasReferences ) ; return hasReferences ; } catch ( Exception ex ) { throw handleException ( ERROR , ex ) ; } finally { sessionService . closeSession ( ) ; }
|
public class CmsScrollBar { /** * @ param reziseable true if the panel is resizeable */
public void isResizeable ( boolean reziseable ) { } }
|
if ( reziseable ) { getElement ( ) . getStyle ( ) . setMarginBottom ( 7 , Unit . PX ) ; } else { getElement ( ) . getStyle ( ) . setMarginBottom ( 0 , Unit . PX ) ; }
|
public class IBEA { /** * Execute ( ) method */
@ Override public void run ( ) { } }
|
int evaluations ; List < S > solutionSet , offSpringSolutionSet ; // Initialize the variables
solutionSet = new ArrayList < > ( populationSize ) ; archive = new ArrayList < > ( archiveSize ) ; evaluations = 0 ; // - > Create the initial solutionSet
S newSolution ; for ( int i = 0 ; i < populationSize ; i ++ ) { newSolution = problem . createSolution ( ) ; problem . evaluate ( newSolution ) ; evaluations ++ ; solutionSet . add ( newSolution ) ; } while ( evaluations < maxEvaluations ) { List < S > union = new ArrayList < > ( ) ; union . addAll ( solutionSet ) ; union . addAll ( archive ) ; calculateFitness ( union ) ; archive = union ; while ( archive . size ( ) > populationSize ) { removeWorst ( archive ) ; } // Create a new offspringPopulation
offSpringSolutionSet = new ArrayList < > ( populationSize ) ; S parent1 ; S parent2 ; while ( offSpringSolutionSet . size ( ) < populationSize ) { int j = 0 ; do { j ++ ; parent1 = selectionOperator . execute ( archive ) ; } while ( j < IBEA . TOURNAMENTS_ROUNDS ) ; int k = 0 ; do { k ++ ; parent2 = selectionOperator . execute ( archive ) ; } while ( k < IBEA . TOURNAMENTS_ROUNDS ) ; List < S > parents = new ArrayList < > ( 2 ) ; parents . add ( parent1 ) ; parents . add ( parent2 ) ; // make the crossover
List < S > offspring = crossoverOperator . execute ( parents ) ; mutationOperator . execute ( offspring . get ( 0 ) ) ; problem . evaluate ( offspring . get ( 0 ) ) ; // problem . evaluateConstraints ( offSpring [ 0 ] ) ;
offSpringSolutionSet . add ( offspring . get ( 0 ) ) ; evaluations ++ ; } solutionSet = offSpringSolutionSet ; }
|
public class LoggingConfiguration { /** * / * ( non - Javadoc )
* @ see org . apache . ojb . broker . util . configuration . impl . ConfigurationAbstractImpl # load ( ) */
protected void load ( ) { } }
|
Logger bootLogger = LoggerFactory . getBootLogger ( ) ; // first we check whether the system property
// org . apache . ojb . broker . util . logging . Logger
// is set ( or its alias LoggerClass which is deprecated )
ClassLoader contextLoader = ClassHelper . getClassLoader ( ) ; String loggerClassName ; _loggerClass = null ; properties = new Properties ( ) ; loggerClassName = getLoggerClass ( System . getProperties ( ) ) ; _loggerConfigFile = getLoggerConfigFile ( System . getProperties ( ) ) ; InputStream ojbLogPropFile ; if ( loggerClassName == null ) { // now we ' re trying to load the OJB - logging . properties file
String ojbLogPropFilePath = System . getProperty ( OJB_LOGGING_PROPERTIES_FILE , OJB_LOGGING_PROPERTIES_FILE ) ; try { URL ojbLoggingURL = ClassHelper . getResource ( ojbLogPropFilePath ) ; if ( ojbLoggingURL == null ) { ojbLoggingURL = ( new File ( ojbLogPropFilePath ) ) . toURL ( ) ; } ojbLogPropFile = ojbLoggingURL . openStream ( ) ; try { bootLogger . info ( "Found logging properties file: " + ojbLogPropFilePath ) ; properties . load ( ojbLogPropFile ) ; _loggerConfigFile = getLoggerConfigFile ( properties ) ; loggerClassName = getLoggerClass ( properties ) ; } finally { ojbLogPropFile . close ( ) ; } } catch ( Exception ex ) { if ( loggerClassName == null ) { bootLogger . warn ( "Can't read logging properties file using path '" + ojbLogPropFilePath + "', message is: " + SystemUtils . LINE_SEPARATOR + ex . getMessage ( ) + SystemUtils . LINE_SEPARATOR + "Will try to load logging properties from OJB.properties file" ) ; } else { bootLogger . info ( "Problems while closing resources for path '" + ojbLogPropFilePath + "', message is: " + SystemUtils . LINE_SEPARATOR + ex . getMessage ( ) , ex ) ; } } } if ( loggerClassName == null ) { // deprecated : load the OJB . properties file
// this is not good because we have all OJB properties in this config
String ojbPropFile = System . getProperty ( "OJB.properties" , "OJB.properties" ) ; try { ojbLogPropFile = contextLoader . getResourceAsStream ( ojbPropFile ) ; if ( ojbLogPropFile != null ) { try { properties . load ( ojbLogPropFile ) ; loggerClassName = getLoggerClass ( properties ) ; _loggerConfigFile = getLoggerConfigFile ( properties ) ; if ( loggerClassName != null ) { // deprecation warning for after 1.0
bootLogger . warn ( "Please use a separate '" + OJB_LOGGING_PROPERTIES_FILE + "' file to specify your logging settings" ) ; } } finally { ojbLogPropFile . close ( ) ; } } } catch ( Exception ex ) { } } if ( loggerClassName != null ) { try { _loggerClass = ClassHelper . getClass ( loggerClassName ) ; bootLogger . info ( "Logging: Found logger class '" + loggerClassName ) ; } catch ( ClassNotFoundException ex ) { _loggerClass = PoorMansLoggerImpl . class ; bootLogger . warn ( "Could not load logger class " + loggerClassName + ", defaulting to " + _loggerClass . getName ( ) , ex ) ; } } else { // still no logger configured - lets check whether commons - logging is configured
if ( ( System . getProperty ( PROPERTY_COMMONS_LOGGING_LOG ) != null ) || ( System . getProperty ( PROPERTY_COMMONS_LOGGING_LOGFACTORY ) != null ) ) { // yep , so use commons - logging
_loggerClass = CommonsLoggerImpl . class ; bootLogger . info ( "Logging: Found commons logging properties, use " + _loggerClass ) ; } else { // but perhaps there is a log4j . properties file ?
try { ojbLogPropFile = contextLoader . getResourceAsStream ( "log4j.properties" ) ; if ( ojbLogPropFile != null ) { // yep , so use log4j
_loggerClass = Log4jLoggerImpl . class ; _loggerConfigFile = "log4j.properties" ; bootLogger . info ( "Logging: Found 'log4j.properties' file, use " + _loggerClass ) ; ojbLogPropFile . close ( ) ; } } catch ( Exception ex ) { } if ( _loggerClass == null ) { // or a commons - logging . properties file ?
try { ojbLogPropFile = contextLoader . getResourceAsStream ( "commons-logging.properties" ) ; if ( ojbLogPropFile != null ) { // yep , so use commons - logging
_loggerClass = CommonsLoggerImpl . class ; _loggerConfigFile = "commons-logging.properties" ; bootLogger . info ( "Logging: Found 'commons-logging.properties' file, use " + _loggerClass ) ; ojbLogPropFile . close ( ) ; } } catch ( Exception ex ) { } if ( _loggerClass == null ) { // no , so default to poor man ' s logging
bootLogger . info ( "** Can't find logging configuration file, use default logger **" ) ; _loggerClass = PoorMansLoggerImpl . class ; } } } }
|
public class SpringIOUtils { /** * Copy the contents of the given input File to the given output File .
* @ param in the file to copy from
* @ param out the file to copy to
* @ return the number of bytes copied
* @ throws java . io . IOException in case of I / O errors */
public static int copy ( File in , File out ) throws IOException { } }
|
assert in != null : "No input File specified" ; assert out != null : "No output File specified" ; return copy ( new BufferedInputStream ( Files . newInputStream ( in . toPath ( ) ) ) , new BufferedOutputStream ( Files . newOutputStream ( out . toPath ( ) ) ) ) ;
|
public class DurationEvaluators { /** * Register a new { @ link ActionDurationEvaluator } .
* @ param a the action class
* @ param e the evaluator to register for the given action
* @ return { @ code false } if this action delete a previous evaluator for that action */
public boolean register ( Class < ? extends Action > a , ActionDurationEvaluator e ) { } }
|
return durations . put ( a , e ) == null ;
|
public class CPDefinitionVirtualSettingLocalServiceBaseImpl { /** * Returns the cp definition virtual setting matching the UUID and group .
* @ param uuid the cp definition virtual setting ' s UUID
* @ param groupId the primary key of the group
* @ return the matching cp definition virtual setting
* @ throws PortalException if a matching cp definition virtual setting could not be found */
@ Override public CPDefinitionVirtualSetting getCPDefinitionVirtualSettingByUuidAndGroupId ( String uuid , long groupId ) throws PortalException { } }
|
return cpDefinitionVirtualSettingPersistence . findByUUID_G ( uuid , groupId ) ;
|
public class IOUtils { /** * Locates this file either using the given URL , or in the CLASSPATH , or in the file system
* The CLASSPATH takes priority over the file system !
* This stream is buffered and gzipped ( if necessary )
* @ param textFileOrUrl
* @ return An InputStream for loading a resource
* @ throws IOException */
public static InputStream getInputStreamFromURLOrClasspathOrFileSystem ( String textFileOrUrl ) throws IOException { } }
|
InputStream in ; if ( textFileOrUrl . matches ( "https?://.*" ) ) { URL u = new URL ( textFileOrUrl ) ; URLConnection uc = u . openConnection ( ) ; in = uc . getInputStream ( ) ; } else { try { in = findStreamInClasspathOrFileSystem ( textFileOrUrl ) ; } catch ( FileNotFoundException e ) { try { // Maybe this happens to be some other format of URL ?
URL u = new URL ( textFileOrUrl ) ; URLConnection uc = u . openConnection ( ) ; in = uc . getInputStream ( ) ; } catch ( IOException e2 ) { // TODO : freaking Java 1.5 didn ' t have an IOException that
// could take a throwable as a cause
throw new IOException ( "Unable to resolve \"" + textFileOrUrl + "\" as either " + "class path, filename or URL" ) ; } } } // buffer this stream
in = new BufferedInputStream ( in ) ; // gzip it if necessary
if ( textFileOrUrl . endsWith ( ".gz" ) ) in = new GZIPInputStream ( in ) ; return in ;
|
public class WebFacelettaglibraryDescriptorImpl { /** * If not already created , a new < code > tag < / code > element will be created and returned .
* Otherwise , the first existing < code > tag < / code > element will be returned .
* @ return the instance defined for the element < code > tag < / code > */
public FaceletTaglibTagType < WebFacelettaglibraryDescriptor > getOrCreateTag ( ) { } }
|
List < Node > nodeList = model . get ( "tag" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new FaceletTaglibTagTypeImpl < WebFacelettaglibraryDescriptor > ( this , "tag" , model , nodeList . get ( 0 ) ) ; } return createTag ( ) ;
|
public class ModuleSpaces { /** * Delete a Space .
* @ param spaceId Space ID
* @ return Integer representing the result ( 204 , or an error code )
* @ throws IllegalArgumentException if space ' s id is null . */
public Integer delete ( String spaceId ) { } }
|
assertNotNull ( spaceId , "spaceId" ) ; return service . delete ( spaceId ) . blockingFirst ( ) . code ( ) ;
|
public class Sql { /** * Performs the closure ( containing batch operations ) within a batch using a given batch size .
* After every < code > batchSize < / code > < code > addBatch ( sqlBatchOperation ) < / code >
* operations , automatically calls an < code > executeBatch ( ) < / code > operation to " chunk " up the database operations
* into partitions . Though not normally needed , you can also explicitly call < code > executeBatch ( ) < / code > which
* after executing the current batch , resets the batch count back to zero .
* The closure will be called with a single argument ; the database statement
* ( actually a < code > BatchingStatementWrapper < / code > helper object )
* associated with this batch .
* Use it like this for batchSize of 20:
* < pre >
* def updateCounts = sql . withBatch ( 20 ) { stmt { @ code - > }
* stmt . addBatch ( " insert into TABLENAME . . . " )
* stmt . addBatch ( " insert into TABLENAME . . . " )
* stmt . addBatch ( " insert into TABLENAME . . . " )
* < / pre >
* For integrity and performance reasons , you may wish to consider executing your batch command ( s ) within a transaction :
* < pre >
* sql . withTransaction {
* def result1 = sql . withBatch { . . . }
* < / pre >
* @ param batchSize partition the batch into batchSize pieces , i . e . after batchSize
* < code > addBatch ( ) < / code > invocations , call < code > executeBatch ( ) < / code > automatically ;
* 0 means manual calls to executeBatch are required
* @ param closure the closure containing batch and optionally other statements
* @ return an array of update counts containing one element for each
* command in the batch . The elements of the array are ordered according
* to the order in which commands were added to the batch .
* @ throws SQLException if a database access error occurs ,
* or this method is called on a closed < code > Statement < / code > , or the
* driver does not support batch statements . Throws { @ link java . sql . BatchUpdateException }
* ( a subclass of < code > SQLException < / code > ) if one of the commands sent to the
* database fails to execute properly or attempts to return a result set .
* @ see # withBatch ( Closure )
* @ see BatchingStatementWrapper
* @ see Statement */
public int [ ] withBatch ( int batchSize , Closure closure ) throws SQLException { } }
|
Connection connection = createConnection ( ) ; BatchingStatementWrapper statement = null ; boolean savedWithinBatch = withinBatch ; try { withinBatch = true ; statement = new BatchingStatementWrapper ( createStatement ( connection ) , batchSize , LOG ) ; closure . call ( statement ) ; return statement . executeBatch ( ) ; } catch ( SQLException e ) { LOG . warning ( "Error during batch execution: " + e . getMessage ( ) ) ; throw e ; } finally { closeResources ( statement ) ; closeResources ( connection ) ; withinBatch = savedWithinBatch ; }
|
public class FieldObjectAccess { /** * a field is sending a new value ( out )
* @ throws java . lang . Exception */
@ Override public void out ( ) throws Exception { } }
|
Object val = fa . getFieldValue ( ) ; // Object val = access ;
if ( ens . shouldFire ( ) ) { DataflowEvent e = new DataflowEvent ( ens . getController ( ) , this , val ) ; // / / DataflowEvent e = new DataflowEvent ( ens . getController ( ) , this , access . toObject ( ) ) ;
ens . fireOut ( e ) ; // / / the value might be altered
val = e . getValue ( ) ; } // if data = = null this unconsumed @ Out , its OK but we do not want to set it .
if ( data != null ) { data . setFieldValue ( val ) ; } fa . out ( ) ;
|
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link AdviceType } { @ code > } } */
@ XmlElementDecl ( namespace = "urn:oasis:names:tc:xacml:3.0:core:schema:wd-17" , name = "Advice" ) public JAXBElement < AdviceType > createAdvice ( AdviceType value ) { } }
|
return new JAXBElement < AdviceType > ( _Advice_QNAME , AdviceType . class , null , value ) ;
|
public class MemcachedClient { /** * Asynchronous CAS operation .
* @ param < T >
* @ param key the key
* @ param casId the CAS identifier ( from a gets operation )
* @ param value the new value
* @ param tc the transcoder to serialize and unserialize the value
* @ return a future that will indicate the status of the CAS
* @ throws IllegalStateException in the rare circumstance where queue is too
* full to accept any more requests */
@ Override public < T > OperationFuture < CASResponse > asyncCAS ( String key , long casId , T value , Transcoder < T > tc ) { } }
|
return asyncCAS ( key , casId , 0 , value , tc ) ;
|
public class SecureDfuImpl { /** * Sends the Calculate Checksum request . As a response a notification will be sent with current
* offset and CRC32 of the current object .
* @ return requested object info .
* @ throws DeviceDisconnectedException
* @ throws DfuException
* @ throws UploadAbortedException
* @ throws RemoteDfuException thrown when the returned status code is not equal to
* { @ link # DFU _ STATUS _ SUCCESS } . */
private ObjectChecksum readChecksum ( ) throws DeviceDisconnectedException , DfuException , UploadAbortedException , RemoteDfuException , UnknownResponseException { } }
|
if ( ! mConnected ) throw new DeviceDisconnectedException ( "Unable to read Checksum: device disconnected" ) ; writeOpCode ( mControlPointCharacteristic , OP_CODE_CALCULATE_CHECKSUM ) ; final byte [ ] response = readNotificationResponse ( ) ; final int status = getStatusCode ( response , OP_CODE_CALCULATE_CHECKSUM_KEY ) ; if ( status == SecureDfuError . EXTENDED_ERROR ) throw new RemoteDfuExtendedErrorException ( "Receiving Checksum failed" , response [ 3 ] ) ; if ( status != DFU_STATUS_SUCCESS ) throw new RemoteDfuException ( "Receiving Checksum failed" , status ) ; final ObjectChecksum checksum = new ObjectChecksum ( ) ; checksum . offset = mControlPointCharacteristic . getIntValue ( BluetoothGattCharacteristic . FORMAT_UINT32 , 3 ) ; checksum . CRC32 = mControlPointCharacteristic . getIntValue ( BluetoothGattCharacteristic . FORMAT_UINT32 , 3 + 4 ) ; return checksum ;
|
public class ITFReader { /** * Identify where the start of the middle / payload section starts .
* @ param row row of black / white values to search
* @ return Array , containing index of start of ' start block ' and end of
* ' start block ' */
private int [ ] decodeStart ( BitArray row ) throws NotFoundException { } }
|
int endStart = skipWhiteSpace ( row ) ; int [ ] startPattern = findGuardPattern ( row , endStart , START_PATTERN ) ; // Determine the width of a narrow line in pixels . We can do this by
// getting the width of the start pattern and dividing by 4 because its
// made up of 4 narrow lines .
this . narrowLineWidth = ( startPattern [ 1 ] - startPattern [ 0 ] ) / 4 ; validateQuietZone ( row , startPattern [ 0 ] ) ; return startPattern ;
|
public class PluginMessageDescription { /** * Create a description for an ExternalCondition object .
* @ param condition the condition
* @ return a description to be used on email templates */
public String external ( ExternalCondition condition ) { } }
|
String description = "AlerterId: " + condition . getAlerterId ( ) ; description += " DataId: " + condition . getDataId ( ) ; description += " Expression: " + condition . getExpression ( ) ; return description ;
|
public class CacheProviderWrapper { /** * Returns the current dependency IDs size for the disk cache .
* @ return The current dependency ids size for the disk cache . */
@ Override public int getDepIdsSizeDisk ( ) { } }
|
final String methodName = "getDepIdsSizeDisk()" ; if ( this . swapToDisk ) { // TODO write code to support getDepIdsSizeDisk function
if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " cacheName=" + cacheName + " ERROR because it is not implemented yet" ) ; } } else { if ( this . featureSupport . isDiskCacheSupported ( ) == false ) { Tr . error ( tc , "DYNA1064E" , new Object [ ] { methodName , cacheName , this . cacheProviderName } ) ; } else { if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " cacheName=" + cacheName + " no operation is done because the disk cache offload is not enabled" ) ; } } } return 0 ;
|
public class ReplaceDescendantsUtil { /** * タグとclass指定で子孫要素を置換する なお 、 replaceのディープコピーで置換されます 。
* @ param < T >
* tag type . ( i . e . Div . class , Span . class . . . )
* @ param target
* object for scan
* @ param tagType
* tag class
* @ param clazz
* class property of tag
* @ param replace
* replacement tag object .
* @ throws TagTypeUnmatchException */
public static < T extends AbstractJaxb > void replaceDescendants ( T target , Class < T > tagType , String clazz , T replace ) throws TagTypeUnmatchException { } }
|
execute ( target , tagType , clazz , replace ) ;
|
public class OdsFactory { /** * Create a new ODS file writer from a document . Be careful : this method opens immediatly a
* stream .
* @ param filename the name of the destination file
* @ return the ods writer
* @ throws FileNotFoundException if the file can ' t be found */
public NamedOdsFileWriter createWriter ( final String filename ) throws IOException { } }
|
final NamedOdsDocument document = this . createNamedDocument ( ) ; final NamedOdsFileWriter writer = OdsFileDirectWriter . builder ( this . logger , document ) . openResult ( this . openFile ( filename ) ) . build ( ) ; document . addObserver ( writer ) ; document . prepareFlush ( ) ; return writer ;
|
public class Parser { /** * Test if the dollar character ( < tt > $ < / tt > ) at the given offset starts a dollar - quoted string and
* return the offset of the ending dollar character .
* @ param query query
* @ param offset start offset
* @ return offset of the ending dollar character */
public static int parseDollarQuotes ( final char [ ] query , int offset ) { } }
|
if ( offset + 1 < query . length && ( offset == 0 || ! isIdentifierContChar ( query [ offset - 1 ] ) ) ) { int endIdx = - 1 ; if ( query [ offset + 1 ] == '$' ) { endIdx = offset + 1 ; } else if ( isDollarQuoteStartChar ( query [ offset + 1 ] ) ) { for ( int d = offset + 2 ; d < query . length ; ++ d ) { if ( query [ d ] == '$' ) { endIdx = d ; break ; } else if ( ! isDollarQuoteContChar ( query [ d ] ) ) { break ; } } } if ( endIdx > 0 ) { // found ; note : tag includes start and end $ character
int tagIdx = offset ; int tagLen = endIdx - offset + 1 ; offset = endIdx ; // loop continues at endIdx + 1
for ( ++ offset ; offset < query . length ; ++ offset ) { if ( query [ offset ] == '$' && subArraysEqual ( query , tagIdx , offset , tagLen ) ) { offset += tagLen - 1 ; break ; } } } } return offset ;
|
public class PaxPropertySetter { /** * Set the properites for the object that match the
* < code > prefix < / code > passed as parameter . */
public void setProperties ( Properties properties , String prefix ) { } }
|
int len = prefix . length ( ) ; for ( Enumeration e = properties . propertyNames ( ) ; e . hasMoreElements ( ) ; ) { String key = ( String ) e . nextElement ( ) ; // handle only properties that start with the desired frefix .
if ( key . startsWith ( prefix ) ) { // ignore key if it contains dots after the prefix
if ( key . indexOf ( '.' , len + 1 ) > 0 ) { // System . err . println ( " - - - - - Ignoring - - - [ " + key
// + " ] , prefix = [ " + prefix + " ] . " ) ;
continue ; } String value = OptionConverter . findAndSubst ( key , properties ) ; key = key . substring ( len ) ; if ( ( "layout" . equals ( key ) || "errorHandler" . equals ( key ) || "appenders" . equals ( key ) ) && obj instanceof Appender ) { continue ; } // if the property type is an OptionHandler
// ( for example , triggeringPolicy of org . apache . log4j . rolling . RollingFileAppender )
PropertyDescriptor prop = getPropertyDescriptor ( Introspector . decapitalize ( key ) ) ; if ( prop != null && OptionHandler . class . isAssignableFrom ( prop . getPropertyType ( ) ) && prop . getWriteMethod ( ) != null ) { OptionHandler opt = ( OptionHandler ) OptionConverter . instantiateByKey ( properties , prefix + key , prop . getPropertyType ( ) , null ) ; PaxPropertySetter setter = new PaxPropertySetter ( opt ) ; setter . setProperties ( properties , prefix + key + "." ) ; try { prop . getWriteMethod ( ) . invoke ( this . obj , new Object [ ] { opt } ) ; } catch ( IllegalAccessException ex ) { LogLog . warn ( "Failed to set property [" + key + "] to value \"" + value + "\". " , ex ) ; } catch ( InvocationTargetException ex ) { if ( ex . getTargetException ( ) instanceof InterruptedException || ex . getTargetException ( ) instanceof InterruptedIOException ) { Thread . currentThread ( ) . interrupt ( ) ; } LogLog . warn ( "Failed to set property [" + key + "] to value \"" + value + "\". " , ex ) ; } catch ( RuntimeException ex ) { LogLog . warn ( "Failed to set property [" + key + "] to value \"" + value + "\". " , ex ) ; } continue ; } else if ( prop != null && OptionFactory . class . isAssignableFrom ( prop . getPropertyType ( ) ) && prop . getWriteMethod ( ) != null ) { OptionFactory factory = new ObjectFactory ( properties , prefix + key ) ; try { prop . getWriteMethod ( ) . invoke ( this . obj , new Object [ ] { factory } ) ; } catch ( IllegalAccessException ex ) { LogLog . warn ( "Failed to set property [" + key + "] to value \"" + value + "\". " , ex ) ; } catch ( InvocationTargetException ex ) { if ( ex . getTargetException ( ) instanceof InterruptedException || ex . getTargetException ( ) instanceof InterruptedIOException ) { Thread . currentThread ( ) . interrupt ( ) ; } LogLog . warn ( "Failed to set property [" + key + "] to value \"" + value + "\". " , ex ) ; } catch ( RuntimeException ex ) { LogLog . warn ( "Failed to set property [" + key + "] to value \"" + value + "\". " , ex ) ; } continue ; } setProperty ( key , value ) ; } } activate ( ) ;
|
public class TableKeysAndAttributes { /** * Adds a primary key to be included in the batch get - item operation . A
* primary key could consist of either a hash - key or both a
* hash - key and a range - key depending on the schema of the table . */
public TableKeysAndAttributes addPrimaryKey ( PrimaryKey primaryKey ) { } }
|
if ( primaryKey != null ) { if ( primaryKeys == null ) primaryKeys = new ArrayList < PrimaryKey > ( ) ; checkConsistency ( primaryKey ) ; this . primaryKeys . add ( primaryKey ) ; } return this ;
|
public class BreadCrumbPresenter { /** * Searches in all categories for the category that matches a breadcrumb .
* @ param breadcrumb the breadcrumb , which the matching category should have
* @ return a matching category or null if nothing is found */
private Category searchCategory ( String breadcrumb ) { } }
|
return model . getFlatCategoriesLst ( ) . stream ( ) . filter ( cat -> cat . getBreadcrumb ( ) . equals ( breadcrumb ) ) . findAny ( ) . orElse ( null ) ;
|
public class JSONObject { /** * Serialize this JSONObject to a string .
* @ param jsonReferenceToId
* a map from json reference to id
* @ param includeNullValuedFields
* if true , include null valued fields
* @ param depth
* the nesting depth
* @ param indentWidth
* the indent width
* @ param buf
* the buf */
void toJSONString ( final Map < ReferenceEqualityKey < JSONReference > , CharSequence > jsonReferenceToId , final boolean includeNullValuedFields , final int depth , final int indentWidth , final StringBuilder buf ) { } }
|
final boolean prettyPrint = indentWidth > 0 ; final int n = items . size ( ) ; int numDisplayedFields ; if ( includeNullValuedFields ) { numDisplayedFields = n ; } else { numDisplayedFields = 0 ; for ( final Entry < String , Object > item : items ) { if ( item . getValue ( ) != null ) { numDisplayedFields ++ ; } } } if ( objectId == null && numDisplayedFields == 0 ) { buf . append ( "{}" ) ; } else { buf . append ( prettyPrint ? "{\n" : "{" ) ; if ( objectId != null ) { // id will be non - null if this object does not have an @ Id field , but was referenced by
// another object ( need to include ID _ TAG )
if ( prettyPrint ) { JSONUtils . indent ( depth + 1 , indentWidth , buf ) ; } buf . append ( '"' ) ; buf . append ( JSONUtils . ID_KEY ) ; buf . append ( prettyPrint ? "\": " : "\":" ) ; JSONSerializer . jsonValToJSONString ( objectId , jsonReferenceToId , includeNullValuedFields , depth + 1 , indentWidth , buf ) ; if ( numDisplayedFields > 0 ) { buf . append ( ',' ) ; } if ( prettyPrint ) { buf . append ( '\n' ) ; } } for ( int i = 0 , j = 0 ; i < n ; i ++ ) { final Entry < String , Object > item = items . get ( i ) ; final Object val = item . getValue ( ) ; if ( val != null || includeNullValuedFields ) { final String key = item . getKey ( ) ; if ( key == null ) { // Keys must be quoted , so the unquoted null value cannot be a key
// ( Should not happen - - JSONParser . parseJSONObject checks for null keys )
throw new IllegalArgumentException ( "Cannot serialize JSON object with null key" ) ; } if ( prettyPrint ) { JSONUtils . indent ( depth + 1 , indentWidth , buf ) ; } buf . append ( '"' ) ; JSONUtils . escapeJSONString ( key , buf ) ; buf . append ( prettyPrint ? "\": " : "\":" ) ; JSONSerializer . jsonValToJSONString ( val , jsonReferenceToId , includeNullValuedFields , depth + 1 , indentWidth , buf ) ; if ( ++ j < numDisplayedFields ) { buf . append ( ',' ) ; } if ( prettyPrint ) { buf . append ( '\n' ) ; } } } if ( prettyPrint ) { JSONUtils . indent ( depth , indentWidth , buf ) ; } buf . append ( '}' ) ; }
|
public class CmsFileBuffer { /** * Changes the size of this buffer . < p >
* @ param size the new size */
public void truncate ( int size ) { } }
|
m_buffer . truncate ( size ) ; m_position = Math . min ( size , m_position ) ;
|
public class ObjectFactory2 { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link DerivedByInsertionFrom } { @ code > } } */
@ XmlElementDecl ( namespace = "http://www.w3.org/ns/prov#" , name = "derivedByInsertionFrom" ) public JAXBElement < DerivedByInsertionFrom > createDerivedByInsertionFrom ( DerivedByInsertionFrom value ) { } }
|
return new JAXBElement < DerivedByInsertionFrom > ( _DerivedByInsertionFrom_QNAME , DerivedByInsertionFrom . class , null , value ) ;
|
public class KubernetesDockerRunner { /** * Deletes stale workflow instance execution pods . */
@ VisibleForTesting void tryCleanupPods ( ) { } }
|
var pods = client . pods ( ) . list ( ) . getItems ( ) ; pods . stream ( ) . map ( pod -> runAsync ( guard ( ( ) -> tryCleanupPod ( pod ) ) , executor ) ) . collect ( toList ( ) ) . forEach ( CompletableFuture :: join ) ; tracer . getCurrentSpan ( ) . addAnnotation ( "processed" , Map . of ( "pods" , AttributeValue . longAttributeValue ( pods . size ( ) ) ) ) ;
|
public class GlassfishDetector { /** * { @ inheritDoc }
* @ param pMBeanServerExecutor */
public ServerHandle detect ( MBeanServerExecutor pMBeanServerExecutor ) { } }
|
String version = detectVersion ( pMBeanServerExecutor ) ; if ( version != null ) { return new GlassfishServerHandle ( version , new HashMap < String , String > ( ) ) ; } else { return null ; }
|
public class Objects2 { /** * 如果对象值是 null 返回替代对象 , 否则返回对象本身 。
* @ param value
* 对象
* @ param replacement
* 替代对象
* @ param < T >
* 对象类型
* @ return 结果 */
public static < T > T isNull ( final T value , final T replacement ) { } }
|
if ( value == null ) { return replacement ; } return value ;
|
public class LocalWeightedHistogramRotRect { /** * create the list of points in square coordinates that it will sample . values will range
* from - 0.5 to 0.5 along each axis . */
protected void createSamplePoints ( int numSamples ) { } }
|
for ( int y = 0 ; y < numSamples ; y ++ ) { float regionY = ( y / ( numSamples - 1.0f ) - 0.5f ) ; for ( int x = 0 ; x < numSamples ; x ++ ) { float regionX = ( x / ( numSamples - 1.0f ) - 0.5f ) ; samplePts . add ( new Point2D_F32 ( regionX , regionY ) ) ; } }
|
public class CmsDialog { /** * Builds a button row with an " ok " and a " cancel " button . < p >
* @ param okAttributes additional attributes for the " ok " button
* @ param cancelAttributes additional attributes for the " cancel " button
* @ return the button row */
public String dialogButtonsOkCancel ( String okAttributes , String cancelAttributes ) { } }
|
return dialogButtons ( new int [ ] { BUTTON_OK , BUTTON_CANCEL } , new String [ ] { okAttributes , cancelAttributes } ) ;
|
public class Cluster { /** * get the current assignment for the topology . */
public SchedulerAssignment getAssignmentById ( String topologyId ) { } }
|
if ( this . assignments . containsKey ( topologyId ) ) { return this . assignments . get ( topologyId ) ; } return null ;
|
public class BaseDestinationHandler { /** * ( non - Javadoc )
* @ see com . ibm . ws . sib . processor . impl . interfaces . MessageEventListener # registerForEvents ( com . ibm . ws . sib . processor . impl . interfaces . SIMPMessage ) */
@ Override public void registerForEvents ( SIMPMessage msg ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "registerForEvents" , msg ) ; msg . registerMessageEventListener ( MessageEvents . EXPIRY_NOTIFICATION , this ) ; msg . registerMessageEventListener ( MessageEvents . REFERENCES_DROPPED_TO_ZERO , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "registerForEvents" ) ;
|
public class CheckAccessControls { /** * If the superclass is final , this method returns an instance of the superclass . */
@ Nullable private static ObjectType getSuperClassInstanceIfFinal ( @ Nullable JSType type ) { } }
|
if ( type == null ) { return null ; } ObjectType obj = type . toObjectType ( ) ; FunctionType ctor = ( obj == null ) ? null : obj . getSuperClassConstructor ( ) ; JSDocInfo doc = ( ctor == null ) ? null : ctor . getJSDocInfo ( ) ; if ( doc != null && doc . isFinal ( ) ) { return ctor . getInstanceType ( ) ; } return null ;
|
public class TemplatesApi { /** * Gets a list of templates for a specified account .
* Retrieves the definition of the specified template .
* @ param accountId The external account number ( int ) or account ID Guid . ( required )
* @ param templateId The ID of the template being accessed . ( required )
* @ return EnvelopeTemplate */
public EnvelopeTemplate get ( String accountId , String templateId ) throws ApiException { } }
|
return get ( accountId , templateId , null ) ;
|
public class MessageReceiverFilterList { /** * Constructor .
* @ param baseMessageQueue My parent message queue . */
public void init ( BaseMessageReceiver receiver ) { } }
|
m_receiver = receiver ; m_mapFilters = new ConcurrentHashMap < Integer , BaseMessageFilter > ( ) ; m_intNext = new Integer ( 1 ) ;
|
public class AnnotationUtils { /** * Return a pair of Field [ ] whose left element is
* the array of keys fields .
* The right element contains the array of all other non - key fields .
* @ param clazz the Class object
* @ return a pair object whose first element contains key fields , and whose second element contains all other columns . */
public static Pair < Field [ ] , Field [ ] > filterKeyFields ( Class clazz ) { } }
|
Field [ ] filtered = filterDeepFields ( clazz ) ; List < Field > keys = new ArrayList < > ( ) ; List < Field > others = new ArrayList < > ( ) ; for ( Field field : filtered ) { if ( isKey ( field . getAnnotation ( DeepField . class ) ) ) { keys . add ( field ) ; } else { others . add ( field ) ; } } return Pair . create ( keys . toArray ( new Field [ keys . size ( ) ] ) , others . toArray ( new Field [ others . size ( ) ] ) ) ;
|
public class CmsErrorWidget { /** * Creates the a new error label . < p >
* @ return a label with the appropriate style for an error label */
private static Label createErrorLabel ( ) { } }
|
Label label = new Label ( ) ; label . addStyleName ( I_CmsLayoutBundle . INSTANCE . generalCss ( ) . cornerAll ( ) ) ; label . addStyleName ( I_CmsInputLayoutBundle . INSTANCE . inputCss ( ) . error ( ) ) ; return label ;
|
public class CommerceSubscriptionEntryPersistenceImpl { /** * Creates a new commerce subscription entry with the primary key . Does not add the commerce subscription entry to the database .
* @ param commerceSubscriptionEntryId the primary key for the new commerce subscription entry
* @ return the new commerce subscription entry */
@ Override public CommerceSubscriptionEntry create ( long commerceSubscriptionEntryId ) { } }
|
CommerceSubscriptionEntry commerceSubscriptionEntry = new CommerceSubscriptionEntryImpl ( ) ; commerceSubscriptionEntry . setNew ( true ) ; commerceSubscriptionEntry . setPrimaryKey ( commerceSubscriptionEntryId ) ; String uuid = PortalUUIDUtil . generate ( ) ; commerceSubscriptionEntry . setUuid ( uuid ) ; commerceSubscriptionEntry . setCompanyId ( companyProvider . getCompanyId ( ) ) ; return commerceSubscriptionEntry ;
|
public class ListSelectDialog { /** * Shortcut for quickly creating a new dialog
* @ param textGUI Text GUI to add the dialog to
* @ param title Title of the dialog
* @ param description Description of the dialog
* @ param listBoxHeight Maximum height of the list box , scrollbars will be used if there are more items
* @ param items Items in the dialog
* @ param < T > Type of items in the dialog
* @ return The selected item or { @ code null } if cancelled */
public static < T > T showDialog ( WindowBasedTextGUI textGUI , String title , String description , int listBoxHeight , T ... items ) { } }
|
int width = 0 ; for ( T item : items ) { width = Math . max ( width , TerminalTextUtils . getColumnWidth ( item . toString ( ) ) ) ; } width += 2 ; return showDialog ( textGUI , title , description , new TerminalSize ( width , listBoxHeight ) , items ) ;
|
public class CheckDatabase { /** * delete a particular segment in the shard . */
public static void deleteSegment ( Olap olap , ApplicationDefinition appDef , String shard , String segment ) { } }
|
VDirectory appDir = olap . getRoot ( appDef ) ; VDirectory shardDir = appDir . getDirectory ( shard ) ; VDirectory segmentDir = shardDir . getDirectory ( segment ) ; segmentDir . delete ( ) ;
|
public class AmazonWorkLinkClient { /** * Retrieves a list of fleets for the current account and Region .
* @ param listFleetsRequest
* @ return Result of the ListFleets operation returned by the service .
* @ throws UnauthorizedException
* You are not authorized to perform this action .
* @ throws InternalServerErrorException
* The service is temporarily unavailable .
* @ throws InvalidRequestException
* The request is not valid .
* @ throws TooManyRequestsException
* The number of requests exceeds the limit .
* @ sample AmazonWorkLink . ListFleets
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / worklink - 2018-09-25 / ListFleets " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public ListFleetsResult listFleets ( ListFleetsRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeListFleets ( request ) ;
|
public class HealthCheckClient { /** * Deletes the specified HealthCheck resource .
* < p > Sample code :
* < pre > < code >
* try ( HealthCheckClient healthCheckClient = HealthCheckClient . create ( ) ) {
* ProjectGlobalHealthCheckName healthCheck = ProjectGlobalHealthCheckName . of ( " [ PROJECT ] " , " [ HEALTH _ CHECK ] " ) ;
* Operation response = healthCheckClient . deleteHealthCheck ( healthCheck . toString ( ) ) ;
* < / code > < / pre >
* @ param healthCheck Name of the HealthCheck resource to delete .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final Operation deleteHealthCheck ( String healthCheck ) { } }
|
DeleteHealthCheckHttpRequest request = DeleteHealthCheckHttpRequest . newBuilder ( ) . setHealthCheck ( healthCheck ) . build ( ) ; return deleteHealthCheck ( request ) ;
|
public class Member { /** * Create a MemberUpdater to execute update .
* @ param pathAccountSid The SID of the Account that created the resource ( s ) to
* update
* @ param pathQueueSid The SID of the Queue in which to find the members
* @ param pathCallSid The Call SID of the resource ( s ) to update
* @ param url The absolute URL of the Queue resource
* @ param method How to pass the update request data
* @ return MemberUpdater capable of executing the update */
public static MemberUpdater updater ( final String pathAccountSid , final String pathQueueSid , final String pathCallSid , final URI url , final HttpMethod method ) { } }
|
return new MemberUpdater ( pathAccountSid , pathQueueSid , pathCallSid , url , method ) ;
|
public class Tuple6 { /** * Apply attribute 1 as argument to a function and return a new tuple with the substituted argument . */
public final < U1 > Tuple6 < U1 , T2 , T3 , T4 , T5 , T6 > map1 ( Function < ? super T1 , ? extends U1 > function ) { } }
|
return Tuple . tuple ( function . apply ( v1 ) , v2 , v3 , v4 , v5 , v6 ) ;
|
public class ThreadClockImpl { /** * Threads run method that increments the clock and resets the generated
* nano seconds counter . */
public void run ( ) { } }
|
try { while ( -- expires >= 0 ) { sleep ( sysInterval ) ; synchronized ( ThreadClockImpl . class ) { currentTimeMillis += sysInterval ; } } } catch ( InterruptedException e ) { System . out . println ( "Clock thread interrupted" ) ; }
|
public class InjectionUtil { /** * Calls a method with the provided arguments as parameters .
* @ param retClass the method return value
* @ param targetClass the instance class
* @ param target the instance containing the method
* @ param method the method name
* @ param argClasses types of the method arguments
* @ param args method arguments used during invocation
* @ param < T > relating type parameter
* @ return method return value */
public static < T > T callMethod ( Class < T > retClass , Class < ? > targetClass , Object target , String method , Class [ ] argClasses , Object [ ] args ) { } }
|
try { Method classMethod = targetClass . getDeclaredMethod ( method , argClasses ) ; return AccessController . doPrivileged ( new SetMethodPrivilegedAction < T > ( classMethod , target , args ) ) ; } catch ( NoSuchMethodException e ) { throw new TransfuseInjectionException ( "Exception during method injection: NoSuchFieldException" , e ) ; } catch ( PrivilegedActionException e ) { throw new TransfuseInjectionException ( "PrivilegedActionException Exception during field injection" , e ) ; } catch ( Exception e ) { throw new TransfuseInjectionException ( "Exception during field injection" , e ) ; }
|
public class HtmlBaseTag { /** * Attribute implementation .
* @ param name
* @ param value
* @ param tsh
* @ throws JspException */
protected void setStateAttribute ( String name , String value , AbstractHtmlState tsh ) throws JspException { } }
|
boolean error = false ; // validate the name attribute , in the case of an error simply return .
if ( name == null || name . length ( ) <= 0 ) { String s = Bundle . getString ( "Tags_AttributeNameNotSet" ) ; registerTagError ( s , null ) ; error = true ; } // it ' s not legal to set the id or name attributes this way
if ( name != null && ( name . equals ( ID ) || name . equals ( NAME ) ) ) { String s = Bundle . getString ( "Tags_AttributeMayNotBeSet" , new Object [ ] { name } ) ; registerTagError ( s , null ) ; } if ( error ) return ; // if there is a style or class we will let them override the base
if ( name . equals ( CLASS ) ) { tsh . styleClass = value ; return ; } else if ( name . equals ( STYLE ) ) { tsh . style = value ; return ; } tsh . registerAttribute ( AbstractHtmlState . ATTR_GENERAL , name , value ) ;
|
public class FastSet { /** * { @ inheritDoc } */
@ Override public boolean containsAll ( IntSet c ) { } }
|
if ( c == null || c . isEmpty ( ) || c == this ) { return true ; } if ( isEmpty ( ) ) { return false ; } final FastSet other = convert ( c ) ; if ( other . firstEmptyWord > firstEmptyWord ) { return false ; } final int [ ] localWords = words ; // faster
final int [ ] localOtherWords = other . words ; // faster
for ( int i = 0 ; i < other . firstEmptyWord ; i ++ ) { int o = localOtherWords [ i ] ; if ( ( localWords [ i ] & o ) != o ) { return false ; } } return true ;
|
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link ConeType } { @ code > }
* @ param value
* Java instance representing xml element ' s value .
* @ return
* the new instance of { @ link JAXBElement } { @ code < } { @ link ConeType } { @ code > } */
@ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "Cone" , substitutionHeadNamespace = "http://www.opengis.net/gml" , substitutionHeadName = "_GriddedSurface" ) public JAXBElement < ConeType > createCone ( ConeType value ) { } }
|
return new JAXBElement < ConeType > ( _Cone_QNAME , ConeType . class , null , value ) ;
|
public class SearchBuilderLegacy { /** * Returns the { @ link SearchBuilder } represented by the specified JSON { @ code String } .
* @ param json the JSON { @ code String } representing a { @ link SearchBuilder }
* @ return the { @ link SearchBuilder } represented by the specified JSON { @ code String } */
static SearchBuilder fromJson ( String json ) { } }
|
try { return JsonSerializer . fromString ( json , SearchBuilderLegacy . class ) . builder ; } catch ( IOException e ) { throw new IndexException ( e , "Unparseable JSON search: {}" , json ) ; }
|
public class MavenResolverSystemBaseImpl { /** * { @ inheritDoc }
* @ see org . jboss . shrinkwrap . resolver . api . maven . PomlessResolveStageBase # loadPomFromClassLoaderResource ( java . lang . String ) */
@ Override public EQUIPPEDRESOLVESTAGETYPE loadPomFromClassLoaderResource ( String pathToPomResource ) throws IllegalArgumentException , InvalidConfigurationFileException { } }
|
return delegate . loadPomFromClassLoaderResource ( pathToPomResource ) ;
|
public class RenderingPipelineConfiguration { /** * Beans below this point are elements of the " standard " uPortal Rendering Pipeline - - where the
* uPortal webapp renders all elements of the UI and handles all requests . */
@ Bean ( name = "standardRenderingPipeline" ) public IPortalRenderingPipeline getStandardRenderingPipeline ( ) { } }
|
final DynamicRenderingPipeline rslt = new DynamicRenderingPipeline ( ) ; rslt . setPipeline ( getAnalyticsIncorporationComponent ( ) ) ; return rslt ;
|
public class WebApplicationContext { /** * Stop the web application .
* Handlers for resource , servlet , filter and security are removed
* as they are recreated and configured by any subsequent call to start ( ) .
* @ exception InterruptedException */
protected void doStop ( ) throws Exception { } }
|
MultiException mex = new MultiException ( ) ; Thread thread = Thread . currentThread ( ) ; ClassLoader lastContextLoader = thread . getContextClassLoader ( ) ; try { // Context listeners
if ( _contextListeners != null ) { if ( _webAppHandler != null ) { ServletContextEvent event = new ServletContextEvent ( getServletContext ( ) ) ; for ( int i = LazyList . size ( _contextListeners ) ; i -- > 0 ; ) { try { ( ( ServletContextListener ) LazyList . get ( _contextListeners , i ) ) . contextDestroyed ( event ) ; } catch ( Exception e ) { mex . add ( e ) ; } } } } _contextListeners = null ; // Stop the context
try { super . doStop ( ) ; } catch ( Exception e ) { mex . add ( e ) ; } // clean up
clearSecurityConstraints ( ) ; if ( _webAppHandler != null ) removeHandler ( _webAppHandler ) ; _webAppHandler = null ; if ( _errorPages != null ) _errorPages . clear ( ) ; _errorPages = null ; _webApp = null ; _webInf = null ; _configurations = null ; } finally { thread . setContextClassLoader ( lastContextLoader ) ; } if ( mex != null ) mex . ifExceptionThrow ( ) ;
|
public class Animation { /** * Animates this { @ link Animation } . < br >
* Sets the { @ link # started } and { @ link # finished } status .
* @ param timer the timer
* @ return the s */
public S animate ( Timer timer ) { } }
|
long elapsed = timer . elapsedTime ( ) - Timer . tickToTime ( delay ) ; started = elapsed > transform . getDelay ( ) ; finished = elapsed > transform . totalDuration ( ) && transform . getLoops ( ) != - 1 ; if ( ! started && ! renderBefore ) return null ; if ( finished && ! renderAfter ) return null ; transform . transform ( transformable , elapsed ) ; return transformable ;
|
public class SamlIdPObjectSigner { /** * Prepare endpoint url scheme security handler .
* @ param < T > the type parameter
* @ param outboundContext the outbound context
* @ throws Exception the exception */
protected < T extends SAMLObject > void prepareEndpointURLSchemeSecurityHandler ( final MessageContext < T > outboundContext ) throws Exception { } }
|
val handlerEnd = new EndpointURLSchemeSecurityHandler ( ) ; handlerEnd . initialize ( ) ; handlerEnd . invoke ( outboundContext ) ;
|
public class MarkLogicClient { /** * commits a transaction
* @ throws MarkLogicTransactionException */
public void commitTransaction ( ) throws MarkLogicTransactionException { } }
|
if ( isActiveTransaction ( ) ) { try { sync ( ) ; this . tx . commit ( ) ; this . tx = null ; } catch ( MarkLogicSesameException e ) { logger . error ( e . getLocalizedMessage ( ) ) ; throw new MarkLogicTransactionException ( e ) ; } } else { throw new MarkLogicTransactionException ( "No active transaction to commit." ) ; }
|
public class UntilElementImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public void eUnset ( int featureID ) { } }
|
switch ( featureID ) { case SimpleAntlrPackage . UNTIL_ELEMENT__LEFT : setLeft ( ( RuleElement ) null ) ; return ; case SimpleAntlrPackage . UNTIL_ELEMENT__RIGHT : setRight ( ( RuleElement ) null ) ; return ; } super . eUnset ( featureID ) ;
|
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EEnum getIfcUnitaryControlElementTypeEnum ( ) { } }
|
if ( ifcUnitaryControlElementTypeEnumEEnum == null ) { ifcUnitaryControlElementTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 1095 ) ; } return ifcUnitaryControlElementTypeEnumEEnum ;
|
public class Field { /** * Same as { @ link Field # getNeighbours ( ) } but filtered with { @ link Field # isVisible ( ) } . */
public Set < VisibleField > getVisibleNeighbours ( ) { } }
|
return getNeighbours ( ) . stream ( ) . filter ( Field :: isVisible ) . map ( Field :: asVisibleField ) . collect ( Collectors . toSet ( ) ) ;
|
public class IgnoreClassHelper { /** * Try to exclude JDK classes and known JDBC Drivers and Libraries .
* We want to do this for performance reasons - that is skip checking for
* enhancement on classes that we know are not part of the application code
* and should not be enhanced .
* @ param className the className of the class being defined .
* @ return true if this class should not be processed . */
public boolean isIgnoreClass ( String className ) { } }
|
if ( className == null ) { return true ; } className = className . replace ( '.' , '/' ) ; if ( processPackages . length > 0 ) { // use specific positive matching
return specificMatching ( className ) ; } // we don ' t have specific packages to process so instead
// we will ignore packages that we know we don ' t want
// ignore $ Proxy classes
if ( className . startsWith ( "$" ) ) { return true ; } int firstSlash = className . indexOf ( '/' ) ; if ( firstSlash == - 1 ) { return true ; } String firstPackage = className . substring ( 0 , firstSlash ) ; if ( ignoreOneLevel . contains ( firstPackage ) ) { return true ; } int secondSlash = className . indexOf ( '/' , firstSlash + 1 ) ; if ( secondSlash == - 1 ) { return false ; } String secondPackage = className . substring ( 0 , secondSlash ) ; if ( ignoreTwoLevel . contains ( secondPackage ) ) { return true ; } int thirdSlash = className . indexOf ( '/' , secondSlash + 1 ) ; if ( thirdSlash == - 1 ) { return false ; } String thirdPackage = className . substring ( 0 , thirdSlash ) ; return ignoreThreeLevel . contains ( thirdPackage ) ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.