signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class JcrSystemViewExporter { /** * Fires the appropriate SAX events on the content handler to build the XML elements for the property . * @ param property the property to be exported * @ param contentHandler the SAX content handler for which SAX events will be invoked as the XML document is created . * @ param skipBinary if < code > true < / code > , indicates that binary properties should not be exported * @ throws SAXException if an exception occurs during generation of the XML document * @ throws RepositoryException if an exception occurs accessing the content repository */ private void emitProperty ( Property property , ContentHandler contentHandler , boolean skipBinary ) throws RepositoryException , SAXException { } }
assert property instanceof AbstractJcrProperty : "Illegal attempt to use " + getClass ( ) . getName ( ) + " on non-ModeShape property" ; AbstractJcrProperty prop = ( AbstractJcrProperty ) property ; // first set the property sv : name attribute AttributesImpl propAtts = new AttributesImpl ( ) ; propAtts . addAttribute ( JcrSvLexicon . NAME . getNamespaceUri ( ) , JcrSvLexicon . NAME . getLocalName ( ) , getPrefixedName ( JcrSvLexicon . NAME ) , PropertyType . nameFromValue ( PropertyType . STRING ) , prop . getName ( ) ) ; // and it ' s sv : type attribute propAtts . addAttribute ( JcrSvLexicon . TYPE . getNamespaceUri ( ) , JcrSvLexicon . TYPE . getLocalName ( ) , getPrefixedName ( JcrSvLexicon . TYPE ) , PropertyType . nameFromValue ( PropertyType . STRING ) , org . modeshape . jcr . api . PropertyType . nameFromValue ( prop . getType ( ) ) ) ; // and it ' s sv : multiple attribute if ( prop . isMultiple ( ) ) { propAtts . addAttribute ( JcrSvLexicon . TYPE . getNamespaceUri ( ) , JcrSvLexicon . TYPE . getLocalName ( ) , getPrefixedName ( JcrSvLexicon . MULTIPLE ) , PropertyType . nameFromValue ( PropertyType . BOOLEAN ) , Boolean . TRUE . toString ( ) ) ; } // output the sv : property element startElement ( contentHandler , JcrSvLexicon . PROPERTY , propAtts ) ; // then output a sv : value element for each of its values if ( prop instanceof JcrMultiValueProperty ) { Value [ ] values = prop . getValues ( ) ; for ( Value value : values ) { emitValue ( value , contentHandler , property . getType ( ) , skipBinary ) ; } } else { emitValue ( property . getValue ( ) , contentHandler , property . getType ( ) , skipBinary ) ; } // end the sv : property element endElement ( contentHandler , JcrSvLexicon . PROPERTY ) ;
public class Matrix { /** * Generate matrix with random elements * @ param m Number of rows . * @ param n Number of colums . * @ return An m - by - n matrix with uniformly distributed random elements . */ public static Matrix random ( int m , int n ) { } }
Matrix A = new Matrix ( m , n ) ; double [ ] [ ] X = A . getArray ( ) ; for ( int i = 0 ; i < m ; i ++ ) { for ( int j = 0 ; j < n ; j ++ ) { X [ i ] [ j ] = Math . random ( ) ; } } return A ;
public class BackedSession { /** * PM03375.1 : Introduce boolean parameter to distinguish between application invoked invalidation and timeout . * If so then we need to call sessionCacheDiscard */ public synchronized void invalidate ( boolean appInvoked ) { } }
if ( com . ibm . websphere . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_WAS . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_WAS . entering ( methodClassName , methodNames [ INVALIDATE ] , appInvoked ) ; } getSwappableListeners ( BackedSession . HTTP_SESSION_BINDING_LISTENER ) ; if ( appInvoked && ( ! invalInProgress ) && isValid ( ) ) _storeCallback . sessionCacheDiscard ( this ) ; super . invalidate ( ) ; // The session will be invalid , but do we need to worry about these in case we are already processing this session somewhere else ? if ( this . appDataChanges != null ) this . appDataChanges . clear ( ) ; if ( this . appDataRemovals != null ) this . appDataRemovals . clear ( ) ; this . update = null ; this . userWriteHit = false ; this . maxInactWriteHit = false ; this . listenCntHit = false ; setSwappableData ( null ) ; this . mNonswappableData = null ; if ( com . ibm . websphere . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_WAS . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_WAS . exiting ( methodClassName , methodNames [ INVALIDATE ] ) ; }
public class FeatureGenerators { /** * Gets a feature generator that applies a generator , then * applies a converter to the generated feature names . * @ param generator * @ param converter * @ return */ public static < A , B , C > FeatureGenerator < A , C > postConvertingFeatureGenerator ( FeatureGenerator < A , B > generator , Function < B , C > converter ) { } }
return new PostConvertingFeatureGenerator < A , B , C > ( generator , converter ) ;
public class DescribeCrossAccountAccessRoleRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeCrossAccountAccessRoleRequest describeCrossAccountAccessRoleRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeCrossAccountAccessRoleRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class KunderaCriteriaBuilder { /** * ( non - Javadoc ) * @ see * javax . persistence . criteria . CriteriaBuilder # size ( javax . persistence . criteria * . Expression ) */ @ Override public < C extends Collection < ? > > Expression < Integer > size ( Expression < C > arg0 ) { } }
// TODO Auto - generated method stub return null ;
public class Ftp { /** * * check if file or directory exists if it exists return FTPFile otherwise null * @ param client * @ param strPath * @ return FTPFile or null * @ throws IOException * @ throws PageException / private FTPFile exists ( FTPClient client , String strPath ) throws * PageException , IOException { strPath = strPath . trim ( ) ; * / / get parent path FTPPath path = new FTPPath ( client . printWorkingDirectory ( ) , strPath ) ; String * name = path . getName ( ) ; print . out ( " path : " + name ) ; * / / when directory FTPFile [ ] files = null ; try { files = client . listFiles ( path . getPath ( ) ) ; } catch * ( IOException e ) { } * if ( files ! = null ) { for ( int i = 0 ; i < files . length ; i + + ) { if ( files [ i ] . getName ( ) . equalsIgnoreCase ( name ) ) * { return files [ i ] ; } } * } return null ; } */ private FTPFile existsFile ( AFTPClient client , String strPath , boolean isFile ) throws PageException , IOException { } }
strPath = strPath . trim ( ) ; if ( strPath . equals ( "/" ) ) { FTPFile file = new FTPFile ( ) ; file . setName ( "/" ) ; file . setType ( FTPFile . DIRECTORY_TYPE ) ; return file ; } // get parent path FTPPath path = new FTPPath ( client , strPath ) ; String p = path . getPath ( ) ; String n = path . getName ( ) ; strPath = p ; if ( "//" . equals ( p ) ) strPath = "/" ; if ( isFile ) strPath += n ; // when directory FTPFile [ ] files = null ; try { files = client . listFiles ( p ) ; } catch ( IOException e ) { } if ( files != null ) { for ( int i = 0 ; i < files . length ; i ++ ) { if ( files [ i ] . getName ( ) . equalsIgnoreCase ( n ) ) { return files [ i ] ; } } } return null ;
public class TargetHttpsProxyClient { /** * Sets the QUIC override policy for TargetHttpsProxy . * < p > Sample code : * < pre > < code > * try ( TargetHttpsProxyClient targetHttpsProxyClient = TargetHttpsProxyClient . create ( ) ) { * ProjectGlobalTargetHttpsProxyName targetHttpsProxy = ProjectGlobalTargetHttpsProxyName . of ( " [ PROJECT ] " , " [ TARGET _ HTTPS _ PROXY ] " ) ; * TargetHttpsProxiesSetQuicOverrideRequest targetHttpsProxiesSetQuicOverrideRequestResource = TargetHttpsProxiesSetQuicOverrideRequest . newBuilder ( ) . build ( ) ; * Operation response = targetHttpsProxyClient . setQuicOverrideTargetHttpsProxy ( targetHttpsProxy . toString ( ) , targetHttpsProxiesSetQuicOverrideRequestResource ) ; * < / code > < / pre > * @ param targetHttpsProxy Name of the TargetHttpsProxy resource to set the QUIC override policy * for . The name should conform to RFC1035. * @ param targetHttpsProxiesSetQuicOverrideRequestResource * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation setQuicOverrideTargetHttpsProxy ( String targetHttpsProxy , TargetHttpsProxiesSetQuicOverrideRequest targetHttpsProxiesSetQuicOverrideRequestResource ) { } }
SetQuicOverrideTargetHttpsProxyHttpRequest request = SetQuicOverrideTargetHttpsProxyHttpRequest . newBuilder ( ) . setTargetHttpsProxy ( targetHttpsProxy ) . setTargetHttpsProxiesSetQuicOverrideRequestResource ( targetHttpsProxiesSetQuicOverrideRequestResource ) . build ( ) ; return setQuicOverrideTargetHttpsProxy ( request ) ;
public class Async { /** * Sends a { @ link TextMessage } . * @ param queueName name of queue * @ param text body of message * @ param deliveryMode delivery mode : { @ link javax . jms . DeliveryMode } . * @ param priority priority of the message . Correct values are from 0 to 9 , with higher number denoting a * higher priority . * @ param timeToLive the message ' s lifetime ( in milliseconds , where 0 is to never expire ) */ public void sendTextMessage ( String queueName , String text , int deliveryMode , int priority , int timeToLive ) { } }
checkStarted ( ) ; try ( Session session = producerConnection . createSession ( ) ) { checkInRange ( deliveryMode , 1 , 2 , "delivery mode" ) ; checkInRange ( priority , 0 , 9 , "priority" ) ; if ( timeToLive < 0 ) throw new AsyncException ( "time to live cannot be negative" ) ; Queue queue = ( Queue ) jmsServer . lookup ( QUEUE_NAMESPACE + queueName ) ; if ( queue == null ) throw new AsyncException ( "Failed to find queue: " + queueName ) ; Message message = session . createTextMessage ( text ) ; MessageProducer p = session . createProducer ( queue ) ; p . send ( message , deliveryMode , priority , timeToLive ) ; } catch ( AsyncException e ) { throw e ; } catch ( Exception e ) { throw new AsyncException ( "Failed to send message" , e ) ; }
public class BugInstance { /** * Add a method annotation for the method which is called by given * instruction . * @ param cpg * the constant pool for the method containing the call * @ param inv * the InvokeInstruction * @ return this object */ @ Nonnull public BugInstance addCalledMethod ( ConstantPoolGen cpg , InvokeInstruction inv ) { } }
String className = inv . getClassName ( cpg ) ; String methodName = inv . getMethodName ( cpg ) ; String methodSig = inv . getSignature ( cpg ) ; addMethod ( className , methodName , methodSig , inv . getOpcode ( ) == Const . INVOKESTATIC ) ; describe ( MethodAnnotation . METHOD_CALLED ) ; return this ;
public class FormatUtil { /** * Formats a time delta in human readable format . * @ param time time delta in ms * @ return Formatted string */ public static String formatTimeDelta ( long time , CharSequence sep ) { } }
final StringBuilder sb = new StringBuilder ( ) ; final Formatter fmt = new Formatter ( sb ) ; for ( int i = TIME_UNIT_SIZES . length - 1 ; i >= 0 ; -- i ) { // We do not include ms if we are in the order of minutes . if ( i == 0 && sb . length ( ) > 4 ) { continue ; } // Separator if ( sb . length ( ) > 0 ) { sb . append ( sep ) ; } final long acValue = time / TIME_UNIT_SIZES [ i ] ; time = time % TIME_UNIT_SIZES [ i ] ; if ( ! ( acValue == 0 && sb . length ( ) == 0 ) ) { fmt . format ( "%0" + TIME_UNIT_DIGITS [ i ] + "d%s" , Long . valueOf ( acValue ) , TIME_UNIT_NAMES [ i ] ) ; } } fmt . close ( ) ; return sb . toString ( ) ;
public class ThrowableUtil { /** * Gets the stack trace from a Throwable as a String . * @ param cause the { @ link Throwable } to be examined * @ return the stack trace as generated by { @ link Throwable # printStackTrace ( java . io . PrintWriter ) } method . */ public static String stackTraceToString ( Throwable cause ) { } }
ByteArrayOutputStream out = new ByteArrayOutputStream ( ) ; PrintStream pout = new PrintStream ( out ) ; cause . printStackTrace ( pout ) ; pout . flush ( ) ; try { return new String ( out . toByteArray ( ) ) ; } finally { try { out . close ( ) ; } catch ( IOException ignore ) { // ignore as should never happen } }
public class XmlParser { /** * Skip whitespace characters . * < pre > * [ 3 ] S : : = ( # x20 | # x9 | # xd | # xa ) + * < / pre > */ private void skipWhitespace ( ) throws SAXException , IOException { } }
// OK , do it the slow way . char c = readCh ( ) ; while ( isWhitespace ( c ) ) { c = readCh ( ) ; } unread ( c ) ;
public class GeoPackageOverlayFactory { /** * Create a composite overlay by first adding a tile overlay for the tile DAO followed by the provided overlay * @ param tileDao tile dao * @ param overlay bounded overlay * @ return composite overlay */ public static CompositeOverlay getCompositeOverlay ( TileDao tileDao , BoundedOverlay overlay ) { } }
List < TileDao > tileDaos = new ArrayList < > ( ) ; tileDaos . add ( tileDao ) ; return getCompositeOverlay ( tileDaos , overlay ) ;
public class nat64 { /** * Use this API to add nat64. */ public static base_response add ( nitro_service client , nat64 resource ) throws Exception { } }
nat64 addresource = new nat64 ( ) ; addresource . name = resource . name ; addresource . acl6name = resource . acl6name ; addresource . netprofile = resource . netprofile ; return addresource . add_resource ( client ) ;
public class ProcessorContext { /** * Gets as . * @ param < T > the type parameter * @ param name the name * @ param clazz the clazz * @ return the as */ public < T > T getAs ( String name , @ NonNull Class < T > clazz ) { } }
if ( properties . containsKey ( name ) ) { return Cast . as ( properties . get ( name ) , clazz ) ; } return Config . get ( name ) . as ( clazz ) ;
public class RuleTableComponent { /** * GEN - LAST : event _ addBtnActionPerformed */ private void removeBtnActionPerformed ( java . awt . event . ActionEvent evt ) { } }
// GEN - FIRST : event _ removeBtnActionPerformed DefaultTableModel model = ( DefaultTableModel ) rules . getModel ( ) ; int [ ] rows = rules . getSelectedRows ( ) ; for ( int i = 0 ; i < rows . length ; i ++ ) { model . removeRow ( rows [ i ] - i ) ; scan . removeMatchRule ( rows [ i ] - i ) ; }
public class BlockMetadataManager { /** * Moves an existing block to another location currently hold by a temp block . * @ param blockMeta the metadata of the block to move * @ param tempBlockMeta a placeholder in the destination directory * @ return the new block metadata if success , absent otherwise * @ throws BlockDoesNotExistException when the block to move is not found * @ throws BlockAlreadyExistsException when the block to move already exists in the destination * @ throws WorkerOutOfSpaceException when destination have no extra space to hold the block to * move */ public BlockMeta moveBlockMeta ( BlockMeta blockMeta , TempBlockMeta tempBlockMeta ) throws BlockDoesNotExistException , WorkerOutOfSpaceException , BlockAlreadyExistsException { } }
StorageDir srcDir = blockMeta . getParentDir ( ) ; StorageDir dstDir = tempBlockMeta . getParentDir ( ) ; srcDir . removeBlockMeta ( blockMeta ) ; BlockMeta newBlockMeta = new BlockMeta ( blockMeta . getBlockId ( ) , blockMeta . getBlockSize ( ) , dstDir ) ; dstDir . removeTempBlockMeta ( tempBlockMeta ) ; dstDir . addBlockMeta ( newBlockMeta ) ; return newBlockMeta ;
public class MessageLogGridScreen { /** * SetupSFields Method . */ public void setupSFields ( ) { } }
this . getRecord ( MessageLog . MESSAGE_LOG_FILE ) . getField ( MessageLog . MESSAGE_STATUS_ID ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; this . getRecord ( MessageLog . MESSAGE_LOG_FILE ) . getField ( MessageLog . MESSAGE_TIME ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; this . getRecord ( MessageLog . MESSAGE_LOG_FILE ) . getField ( MessageLog . DESCRIPTION ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; this . getRecord ( MessageLog . MESSAGE_LOG_FILE ) . getField ( MessageLog . MESSAGE_PROCESS_INFO_ID ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ; this . getRecord ( MessageLog . MESSAGE_LOG_FILE ) . getField ( MessageLog . MESSAGE_TRANSPORT_ID ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ;
public class BlockingArrayQueue { @ Override public boolean offer ( E e ) { } }
Objects . requireNonNull ( e ) ; boolean notEmpty = false ; _tailLock . lock ( ) ; // Size cannot grow . . . only shrink try { int size = _size . get ( ) ; if ( size >= _maxCapacity ) return false ; // Should we expand array ? if ( size == _elements . length ) { _headLock . lock ( ) ; try { if ( ! grow ( ) ) return false ; } finally { _headLock . unlock ( ) ; } } // Re - read head and tail after a possible grow int tail = _indexes [ TAIL_OFFSET ] ; _elements [ tail ] = e ; _indexes [ TAIL_OFFSET ] = ( tail + 1 ) % _elements . length ; notEmpty = _size . getAndIncrement ( ) == 0 ; } finally { _tailLock . unlock ( ) ; } if ( notEmpty ) { _headLock . lock ( ) ; try { _notEmpty . signal ( ) ; } finally { _headLock . unlock ( ) ; } } return true ;
public class OpenTSDBMessageFormatter { /** * private List < String > formatResult ( Result result ) { * return this . formatResult ( result , null ) ; */ private List < String > formatResult ( Result result , Server server ) { } }
List < String > resultStrings = new LinkedList < > ( ) ; if ( result . getValuePath ( ) . isEmpty ( ) ) { processOneMetric ( resultStrings , server , result , result . getValue ( ) , null , null ) ; } else { processOneMetric ( resultStrings , server , result , result . getValue ( ) , tagName , StringUtils . join ( result . getValuePath ( ) , '.' ) ) ; } return resultStrings ;
public class ModelWrapper { /** * Get the model object populated with the value of the DB search results * @ return */ public T getAsObject ( ) { } }
try { T modelClassObj = modelClazz . newInstance ( ) ; Set < String > columnNameSet = mFieldMap . keySet ( ) ; for ( String columnName : columnNameSet ) { D6ModelClassFieldInfo fieldInfo = mFieldMap . get ( columnName ) ; final Field field = fieldInfo . field ; final Object value = fieldInfo . value ; if ( value != null ) { // try to set ' null ' if available try { field . set ( modelClassObj , null ) ; } catch ( Exception e ) { } try { field . set ( modelClassObj , value ) ; } catch ( IllegalAccessException e ) { // handling this exception for field . set ( o , value ) ; loge ( "#getAsObject" , e ) ; } catch ( IllegalArgumentException e ) { // e . printStackTrace ( ) ; final String name = field . getName ( ) ; final Class < ? > type = field . getType ( ) ; String msg = "The value of '" + columnName + "'=" + value + "(" + value . getClass ( ) + ") couldn't set to variable '" + name + "'(" + type + ")" ; loge ( "#getAsObject " + msg ) ; } } else { // value is null // check is this column CompositType like MySQL ' s Geometry final List < Object > compositObjValues = fieldInfo . valuesForSpecialType ; if ( field . getType ( ) == org . riversun . d6 . model . Geometry . class ) { if ( compositObjValues != null && compositObjValues . size ( ) > 1 ) { final org . riversun . d6 . model . Geometry newGeometryObj = new org . riversun . d6 . model . Geometry ( ) ; newGeometryObj . x = ( Double ) compositObjValues . get ( 0 ) ; newGeometryObj . y = ( Double ) compositObjValues . get ( 1 ) ; field . set ( modelClassObj , newGeometryObj ) ; } } } } return modelClassObj ; } catch ( IllegalAccessException e ) { throw new D6RuntimeException ( e ) ; } catch ( InstantiationException e ) { throw new D6RuntimeException ( "Cannot instanciate model object from '" + modelClazz . getName ( ) + "' If you declare '" + modelClazz . getName ( ) + "' as inner class, please make it static." ) ; }
public class BigtableDataGrpcClient { /** * Creates a { @ link Metadata } that contains pertinent headers . */ private Metadata createMetadata ( String tableName ) { } }
Metadata metadata = new Metadata ( ) ; if ( tableName != null ) { metadata . put ( GRPC_RESOURCE_PREFIX_KEY , tableName ) ; } return metadata ;
public class TableA5TAB { /** * { @ inheritDoc } */ @ Override protected void readRow ( int uniqueID , byte [ ] data ) { } }
Map < String , Object > map = new HashMap < String , Object > ( ) ; map . put ( "UNIQUE_ID" , Integer . valueOf ( uniqueID ) ) ; int originalDuration = PEPUtility . getShort ( data , 22 ) ; int remainingDuration = PEPUtility . getShort ( data , 24 ) ; int percentComplete = 0 ; if ( originalDuration != 0 ) { percentComplete = ( ( originalDuration - remainingDuration ) * 100 ) / originalDuration ; } map . put ( "ORIGINAL_DURATION" , Duration . getInstance ( originalDuration , TimeUnit . DAYS ) ) ; map . put ( "REMAINING_DURATION" , Duration . getInstance ( remainingDuration , TimeUnit . DAYS ) ) ; map . put ( "PERCENT_COMPLETE" , Integer . valueOf ( percentComplete ) ) ; map . put ( "TARGET_START" , PEPUtility . getStartDate ( data , 4 ) ) ; map . put ( "TARGET_FINISH" , PEPUtility . getFinishDate ( data , 6 ) ) ; map . put ( "ACTUAL_START" , PEPUtility . getStartDate ( data , 16 ) ) ; map . put ( "ACTUAL_FINISH" , PEPUtility . getFinishDate ( data , 18 ) ) ; addRow ( uniqueID , map ) ;
public class DiagnosticsInner { /** * Get Site Analysis . * Get Site Analysis . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param siteName Site Name * @ param diagnosticCategory Diagnostic Category * @ param analysisName Analysis Name * @ param slot Slot - optional * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the DiagnosticAnalysisInner object */ public Observable < DiagnosticAnalysisInner > getSiteAnalysisSlotAsync ( String resourceGroupName , String siteName , String diagnosticCategory , String analysisName , String slot ) { } }
return getSiteAnalysisSlotWithServiceResponseAsync ( resourceGroupName , siteName , diagnosticCategory , analysisName , slot ) . map ( new Func1 < ServiceResponse < DiagnosticAnalysisInner > , DiagnosticAnalysisInner > ( ) { @ Override public DiagnosticAnalysisInner call ( ServiceResponse < DiagnosticAnalysisInner > response ) { return response . body ( ) ; } } ) ;
public class InternalXbaseWithAnnotationsParser { /** * $ ANTLR start synpred35 _ InternalXbaseWithAnnotations */ public final void synpred35_InternalXbaseWithAnnotations_fragment ( ) throws RecognitionException { } }
// InternalXbaseWithAnnotations . g : 4368:5 : ( ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) ) // InternalXbaseWithAnnotations . g : 4368:6 : ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) { // InternalXbaseWithAnnotations . g : 4368:6 : ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) // InternalXbaseWithAnnotations . g : 4369:6 : ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) { // InternalXbaseWithAnnotations . g : 4369:6 : ( ( ruleJvmTypeReference ) ) // InternalXbaseWithAnnotations . g : 4370:7 : ( ruleJvmTypeReference ) { // InternalXbaseWithAnnotations . g : 4370:7 : ( ruleJvmTypeReference ) // InternalXbaseWithAnnotations . g : 4371:8 : ruleJvmTypeReference { pushFollow ( FOLLOW_3 ) ; ruleJvmTypeReference ( ) ; state . _fsp -- ; if ( state . failed ) return ; } } // InternalXbaseWithAnnotations . g : 4374:6 : ( ( ruleValidID ) ) // InternalXbaseWithAnnotations . g : 4375:7 : ( ruleValidID ) { // InternalXbaseWithAnnotations . g : 4375:7 : ( ruleValidID ) // InternalXbaseWithAnnotations . g : 4376:8 : ruleValidID { pushFollow ( FOLLOW_2 ) ; ruleValidID ( ) ; state . _fsp -- ; if ( state . failed ) return ; } } } }
public class CoinsuperAdapters { /** * There is no method to discern market versus limit order type - so this returns a generic * GenericOrder as a status * @ param * @ return */ public static CoinsuperGenericOrder adaptOrder ( String orderId , OrderList orderList ) { } }
BigDecimal averagePrice = new BigDecimal ( orderList . getPriceLimit ( ) ) ; BigDecimal cumulativeAmount = new BigDecimal ( orderList . getQuantity ( ) ) ; BigDecimal totalFee = new BigDecimal ( orderList . getFee ( ) ) ; BigDecimal amount = new BigDecimal ( orderList . getQuantity ( ) ) ; OrderType action = OrderType . ASK ; if ( orderList . getAction ( ) . equals ( "Buy" ) ) { action = OrderType . BID ; } // Order Status UNDEAL : Not Executed , PARTDEAL : Partially Executed , DEAL : Order Complete , CANCEL : // Canceled OrderStatus orderStatus = OrderStatus . PENDING_NEW ; if ( orderList . getState ( ) . equals ( "UNDEAL" ) ) { orderStatus = OrderStatus . PENDING_NEW ; } else if ( orderList . getState ( ) . equals ( "Canceled" ) ) { orderStatus = OrderStatus . CANCELED ; } CoinsuperGenericOrder coinsuperGenericOrder = new CoinsuperGenericOrder ( action , amount , new CurrencyPair ( orderList . getSymbol ( ) ) , orderId , CommonUtil . timeStampToDate ( orderList . getUtcCreate ( ) ) , averagePrice , cumulativeAmount , totalFee , orderStatus ) ; return coinsuperGenericOrder ;
public class LcdsServiceProxy { /** * Call a group finder action * @ param uuid The uuid of the team * @ param mode The game mode of lobby * @ param procCall The name of the action * @ param object Call args * @ return unknown */ public Object call ( String uuid , GameMode mode , String procCall , JsonObject object ) { } }
return client . sendRpcAndWait ( SERVICE , "call" , uuid , mode . name ( ) , procCall , object . toString ( ) ) ;
public class JournalSegment { /** * Replays all open journal entries . The journal entry will call into * the callback listener with an open InputStream to read the entry . */ public void replay ( ReplayCallback replayCallback ) { } }
TempBuffer tReadBuffer = TempBuffer . createLarge ( ) ; byte [ ] readBuffer = tReadBuffer . buffer ( ) ; int bufferLength = readBuffer . length ; try ( InStore jIn = _blockStore . openRead ( _startAddress , getSegmentSize ( ) ) ) { Replay replay = readReplay ( jIn ) ; if ( replay == null ) { return ; } long address = replay . getCheckpointStart ( ) ; long next ; setSequence ( replay . getSequence ( ) ) ; TempBuffer tBuffer = TempBuffer . create ( ) ; byte [ ] tempBuffer = tBuffer . buffer ( ) ; jIn . read ( getBlockAddress ( address ) , readBuffer , 0 , bufferLength ) ; ReadStream is = new ReadStream ( ) ; while ( address < _tailAddress && ( next = scanItem ( jIn , address , readBuffer , tempBuffer ) ) > 0 ) { boolean isOverflow = getBlockAddress ( address ) != getBlockAddress ( next ) ; // if scanning has passed the buffer boundary , need to re - read // the initial buffer if ( isOverflow ) { jIn . read ( getBlockAddress ( address ) , readBuffer , 0 , bufferLength ) ; } ReplayInputStream rIn = new ReplayInputStream ( jIn , readBuffer , address ) ; is . init ( rIn ) ; try { replayCallback . onItem ( is ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; log . log ( Level . FINER , e . toString ( ) , e ) ; } address = next ; if ( isOverflow ) { jIn . read ( getBlockAddress ( address ) , readBuffer , 0 , bufferLength ) ; } _index = address ; _flushIndex = address ; } }
public class ArchiveInputStream { /** * Returns the content type based on the content of the ZIP file . The content type may be * truncated using { @ link # setContentType ( int ) } . * @ return A bit field of { @ link DfuBaseService # TYPE _ SOFT _ DEVICE TYPE _ SOFT _ DEVICE } , * { @ link DfuBaseService # TYPE _ BOOTLOADER TYPE _ BOOTLOADER } and * { @ link DfuBaseService # TYPE _ APPLICATION TYPE _ APPLICATION } */ public int getContentType ( ) { } }
type = 0 ; // In Secure DFU the softDeviceSize and bootloaderSize may be 0 if both are in the ZIP file . // The size of each part is embedded in the Init packet . if ( softDeviceAndBootloaderBytes != null ) type |= DfuBaseService . TYPE_SOFT_DEVICE | DfuBaseService . TYPE_BOOTLOADER ; // In Legacy DFU the size of each of these parts was given in the manifest file . if ( softDeviceSize > 0 ) type |= DfuBaseService . TYPE_SOFT_DEVICE ; if ( bootloaderSize > 0 ) type |= DfuBaseService . TYPE_BOOTLOADER ; if ( applicationSize > 0 ) type |= DfuBaseService . TYPE_APPLICATION ; return type ;
public class SmbFile { /** * The CIFS protocol provides for DOS " wildcards " to be used as * a performance enhancement . The client does not have to filter * the names and the server does not have to return all directory * entries . * The wildcard expression may consist of two special meta * characters in addition to the normal filename characters . The ' * ' * character matches any number of characters in part of a name . If * the expression begins with one or more ' ? ' s then exactly that * many characters will be matched whereas if it ends with ' ? ' s * it will match that many characters < i > or less < / i > . * Wildcard expressions will not filter workgroup names or server names . * < blockquote > < pre > * winnt > ls c ? o * * clock . avi - rw - - 82944 Mon Oct 14 1996 1:38 AM * Cookies drw - - 0 Fri Nov 13 1998 9:42 PM * 2 items in 5ms * < / pre > < / blockquote > * @ param wildcard a wildcard expression * @ throws SmbException * @ return An array of < code > SmbFile < / code > objects representing file * and directories , workgroups , servers , or shares depending on the context * of the resource URL */ public SmbFile [ ] listFiles ( String wildcard ) throws SmbException { } }
return listFiles ( wildcard , ATTR_DIRECTORY | ATTR_HIDDEN | ATTR_SYSTEM , null , null ) ;
public class CommercePriceListPersistenceImpl { /** * Returns the commerce price list where uuid = & # 63 ; and groupId = & # 63 ; or returns < code > null < / code > if it could not be found . Uses the finder cache . * @ param uuid the uuid * @ param groupId the group ID * @ return the matching commerce price list , or < code > null < / code > if a matching commerce price list could not be found */ @ Override public CommercePriceList fetchByUUID_G ( String uuid , long groupId ) { } }
return fetchByUUID_G ( uuid , groupId , true ) ;
public class LocalTaskQueue { /** * / * ( non - Javadoc ) * @ see org . duracloud . queue . TaskQueue # requeue ( org . duracloud . queue . task . Task ) */ @ Override public void requeue ( Task task ) { } }
this . inprocess . remove ( task ) ; task . incrementAttempts ( ) ; this . queue . add ( task ) ;
public class Event { /** * A list of resources referenced by the event returned . * @ return A list of resources referenced by the event returned . */ public java . util . List < Resource > getResources ( ) { } }
if ( resources == null ) { resources = new com . amazonaws . internal . SdkInternalList < Resource > ( ) ; } return resources ;
public class ModelMigration { /** * { @ inheritDoc } */ @ Override protected void writeExtraPlatformDdl ( String fullVersion , CurrentModel currentModel , Migration dbMigration , File writePath ) throws IOException { } }
throw new UnsupportedOperationException ( "writeExtraPlatformDdl Unsupported" ) ;
public class DefaultMessage { /** * Renders this message on the given GUI component . This implementation only * supports components of type { @ link javax . swing . text . JTextComponent } or { @ link javax . swing . JLabel } . * @ throws IllegalArgumentException if { @ code component } is not a { @ link javax . swing . text . JTextComponent } * or a { @ link javax . swing . JLabel } . */ public void renderMessage ( JComponent component ) { } }
if ( component instanceof JTextComponent ) { ( ( JTextComponent ) component ) . setText ( getMessage ( ) ) ; } else if ( component instanceof JLabel ) { JLabel label = ( JLabel ) component ; label . setText ( LabelUtils . htmlBlock ( getMessage ( ) ) ) ; label . setIcon ( getIcon ( ) ) ; } else { throw new IllegalArgumentException ( "Unsupported component type " + component ) ; }
public class WebFragmentTypeImpl { /** * If not already created , a new < code > security - constraint < / code > element will be created and returned . * Otherwise , the first existing < code > security - constraint < / code > element will be returned . * @ return the instance defined for the element < code > security - constraint < / code > */ public SecurityConstraintType < WebFragmentType < T > > getOrCreateSecurityConstraint ( ) { } }
List < Node > nodeList = childNode . get ( "security-constraint" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new SecurityConstraintTypeImpl < WebFragmentType < T > > ( this , "security-constraint" , childNode , nodeList . get ( 0 ) ) ; } return createSecurityConstraint ( ) ;
public class SyncAgentsInner { /** * Lists databases linked to a sync agent . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ param serviceFuture the ServiceFuture object tracking the Retrofit calls * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < SyncAgentLinkedDatabaseInner > > listLinkedDatabasesNextAsync ( final String nextPageLink , final ServiceFuture < List < SyncAgentLinkedDatabaseInner > > serviceFuture , final ListOperationCallback < SyncAgentLinkedDatabaseInner > serviceCallback ) { } }
return AzureServiceFuture . fromPageResponse ( listLinkedDatabasesNextSinglePageAsync ( nextPageLink ) , new Func1 < String , Observable < ServiceResponse < Page < SyncAgentLinkedDatabaseInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < SyncAgentLinkedDatabaseInner > > > call ( String nextPageLink ) { return listLinkedDatabasesNextSinglePageAsync ( nextPageLink ) ; } } , serviceCallback ) ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getEDG ( ) { } }
if ( edgEClass == null ) { edgEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 238 ) ; } return edgEClass ;
public class XMPPTCPConnection { /** * The server has indicated that TLS negotiation can start . We now need to secure the * existing plain connection and perform a handshake . This method won ' t return until the * connection has finished the handshake or an error occurred while securing the connection . * @ throws IOException * @ throws CertificateException * @ throws NoSuchAlgorithmException * @ throws NoSuchProviderException * @ throws KeyStoreException * @ throws UnrecoverableKeyException * @ throws KeyManagementException * @ throws SmackException * @ throws Exception if an exception occurs . */ @ SuppressWarnings ( "LiteralClassName" ) private void proceedTLSReceived ( ) throws NoSuchAlgorithmException , CertificateException , IOException , KeyStoreException , NoSuchProviderException , UnrecoverableKeyException , KeyManagementException , SmackException { } }
SmackTlsContext smackTlsContext = getSmackTlsContext ( ) ; Socket plain = socket ; // Secure the plain connection socket = smackTlsContext . sslContext . getSocketFactory ( ) . createSocket ( plain , config . getXMPPServiceDomain ( ) . toString ( ) , plain . getPort ( ) , true ) ; final SSLSocket sslSocket = ( SSLSocket ) socket ; // Immediately set the enabled SSL protocols and ciphers . See SMACK - 712 why this is // important ( at least on certain platforms ) and it seems to be a good idea anyways to // prevent an accidental implicit handshake . TLSUtils . setEnabledProtocolsAndCiphers ( sslSocket , config . getEnabledSSLProtocols ( ) , config . getEnabledSSLCiphers ( ) ) ; // Initialize the reader and writer with the new secured version initReaderAndWriter ( ) ; // Proceed to do the handshake sslSocket . startHandshake ( ) ; if ( smackTlsContext . daneVerifier != null ) { smackTlsContext . daneVerifier . finish ( sslSocket . getSession ( ) ) ; } final HostnameVerifier verifier = getConfiguration ( ) . getHostnameVerifier ( ) ; if ( verifier == null ) { throw new IllegalStateException ( "No HostnameVerifier set. Use connectionConfiguration.setHostnameVerifier() to configure." ) ; } else if ( ! verifier . verify ( getXMPPServiceDomain ( ) . toString ( ) , sslSocket . getSession ( ) ) ) { throw new CertificateException ( "Hostname verification of certificate failed. Certificate does not authenticate " + getXMPPServiceDomain ( ) ) ; } // Set that TLS was successful secureSocket = sslSocket ;
public class ProviderRest { /** * Get the list of available providers * < pre > * GET / providers * Request : * GET / providers HTTP / 1.1 * Accept : application / xml * Response : * { @ code * < ? xml version = " 1.0 " encoding = " UTF - 8 " ? > * < collection href = " / providers " > * < items offset = " 0 " total = " 1 " > * < provider > * < uuid > fc923960-03fe - 41eb - 8a21 - a56709f9370f < / uuid > * < name > provider - prueba < / name > * < / provider > * < / items > * < / collection > * < / pre > * Example : < li > curl http : / / localhost : 8080 / sla - service / providers < / li > * @ return XML information with the different details of the providers * @ throws JAXBException */ @ GET @ Produces ( MediaType . APPLICATION_XML ) public Response getProviders ( ) { } }
logger . debug ( "StartOf getProviders - REQUEST for /providers" ) ; ProviderHelper providerRestService = getProviderHelper ( ) ; String serializedProviders = null ; try { serializedProviders = providerRestService . getProviders ( ) ; } catch ( HelperException e ) { logger . info ( "getTemplates exception:" + e . getMessage ( ) ) ; return buildResponse ( e ) ; } logger . debug ( "EndOf getTemplates" ) ; return buildResponse ( 200 , serializedProviders ) ;
public class StringRecordBuilder { /** * adds a value to the builder . * ex . * < pre > * builder . addAll ( & quot ; 192.0.2.1 & quot ; , & quot ; 192.0.2.2 & quot ; ) ; * < / pre > */ public StringRecordBuilder < D > addAll ( Collection < String > records ) { } }
for ( String value : checkNotNull ( records , "records" ) ) { add ( value ) ; } return this ;
public class SelectionPageGenerator { /** * Creates hidden { @ code < input > } HTML elements for any existing request parameters . */ String getHiddenInputHtmlForRequestParameters ( ) { } }
StringBuilder html = new StringBuilder ( ) ; if ( parameterMap != null ) { Set < Entry < String , String [ ] > > entries = parameterMap . entrySet ( ) ; for ( Entry < String , String [ ] > entry : entries ) { html . append ( getHiddenInputForRequestParam ( entry ) ) ; } } return html . toString ( ) ;
public class Boxing { /** * Transforms any array into an array of { @ code Long } . * @ param src source array * @ param srcPos start position * @ param len length * @ return Long array */ public static Long [ ] boxLongs ( Object src , int srcPos , int len ) { } }
return boxLongs ( array ( src ) , srcPos , len ) ;
public class AbstractCommandLineRunner { /** * Construct and return the input root path map . The key is the exec path of each input file , and * the value is the corresponding root relative path . */ @ GwtIncompatible ( "Unnecessary" ) private Map < String , String > constructRootRelativePathsMap ( ) { } }
Map < String , String > rootRelativePathsMap = new LinkedHashMap < > ( ) ; for ( String mapString : config . manifestMaps ) { int colonIndex = mapString . indexOf ( ':' ) ; checkState ( colonIndex > 0 ) ; String execPath = mapString . substring ( 0 , colonIndex ) ; String rootRelativePath = mapString . substring ( colonIndex + 1 ) ; checkState ( rootRelativePath . indexOf ( ':' ) == - 1 ) ; rootRelativePathsMap . put ( execPath , rootRelativePath ) ; } return rootRelativePathsMap ;
public class FormSupport { /** * Set the placeholder text to use on the specified form field . * This text will be shown when the field is blank and unfocused . */ public < B extends TextBoxBase > B setPlaceholderText ( B box , String placeholder ) { } }
box . getElement ( ) . setAttribute ( "placeholder" , placeholder ) ; return box ;
public class PowerMock { /** * Creates a mock object that supports mocking of final and native methods * and invokes a specific constructor based on the supplied argument values . * @ param < T > the type of the mock object * @ param type the type of the mock object * @ param constructorArguments The constructor arguments that will be used to invoke a * certain constructor . * @ return the mock object . */ public static < T > T createMock ( Class < T > type , Object ... constructorArguments ) { } }
Constructor < ? > constructor = WhiteboxImpl . findUniqueConstructorOrThrowException ( type , constructorArguments ) ; ConstructorArgs constructorArgs = new ConstructorArgs ( constructor , constructorArguments ) ; return doMock ( type , false , new DefaultMockStrategy ( ) , constructorArgs , ( Method [ ] ) null ) ;
public class ComputeNodesImpl { /** * Disables task scheduling on the specified compute node . * You can disable task scheduling on a node only if its current scheduling state is enabled . * @ param poolId The ID of the pool that contains the compute node . * @ param nodeId The ID of the compute node on which you want to disable task scheduling . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceResponseWithHeaders } object if successful . */ public Observable < ServiceResponseWithHeaders < Void , ComputeNodeDisableSchedulingHeaders > > disableSchedulingWithServiceResponseAsync ( String poolId , String nodeId ) { } }
if ( this . client . batchUrl ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.batchUrl() is required and cannot be null." ) ; } if ( poolId == null ) { throw new IllegalArgumentException ( "Parameter poolId is required and cannot be null." ) ; } if ( nodeId == null ) { throw new IllegalArgumentException ( "Parameter nodeId is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } final DisableComputeNodeSchedulingOption nodeDisableSchedulingOption = null ; final ComputeNodeDisableSchedulingOptions computeNodeDisableSchedulingOptions = null ; Integer timeout = null ; UUID clientRequestId = null ; Boolean returnClientRequestId = null ; DateTime ocpDate = null ; NodeDisableSchedulingParameter nodeDisableSchedulingParameter = new NodeDisableSchedulingParameter ( ) ; nodeDisableSchedulingParameter . withNodeDisableSchedulingOption ( null ) ; String parameterizedHost = Joiner . on ( ", " ) . join ( "{batchUrl}" , this . client . batchUrl ( ) ) ; DateTimeRfc1123 ocpDateConverted = null ; if ( ocpDate != null ) { ocpDateConverted = new DateTimeRfc1123 ( ocpDate ) ; } return service . disableScheduling ( poolId , nodeId , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , timeout , clientRequestId , returnClientRequestId , ocpDateConverted , nodeDisableSchedulingParameter , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponseWithHeaders < Void , ComputeNodeDisableSchedulingHeaders > > > ( ) { @ Override public Observable < ServiceResponseWithHeaders < Void , ComputeNodeDisableSchedulingHeaders > > call ( Response < ResponseBody > response ) { try { ServiceResponseWithHeaders < Void , ComputeNodeDisableSchedulingHeaders > clientResponse = disableSchedulingDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class ElementFilter { /** * Returns a list of { @ code exports } directives in { @ code directives } . * @ return a list of { @ code exports } directives in { @ code directives } * @ param directives the directives to filter * @ since 9 * @ spec JPMS */ public static List < ExportsDirective > exportsIn ( Iterable < ? extends Directive > directives ) { } }
return listFilter ( directives , DirectiveKind . EXPORTS , ExportsDirective . class ) ;
public class CmsSourceSearchForm { /** * Initializes the form fields . < p > */ private void initFields ( ) { } }
CmsObject cms = A_CmsUI . getCmsObject ( ) ; boolean online = cms . getRequestContext ( ) . getCurrentProject ( ) . isOnlineProject ( ) ; if ( m_searchPattern . getValue ( ) . isEmpty ( ) ) { m_searchPattern . setValue ( REGEX_ALL ) ; } m_resourceSearch . setUseRootPaths ( true ) ; m_replaceResource . setUseRootPaths ( true ) ; m_resourceSearch . requireFile ( ) ; m_replaceResource . requireFile ( ) ; CmsObject rootCms ; try { rootCms = OpenCms . initCmsObject ( cms ) ; rootCms . getRequestContext ( ) . setSiteRoot ( "" ) ; m_resourceSearch . setCmsObject ( rootCms ) ; m_resourceSearch . setDefaultPath ( cms . getRequestContext ( ) . getSiteRoot ( ) ) ; m_replaceResource . setCmsObject ( rootCms ) ; m_replaceResource . setDefaultPath ( cms . getRequestContext ( ) . getSiteRoot ( ) ) ; } catch ( CmsException e1 ) { } m_siteSelect . setContainerDataSource ( CmsVaadinUtils . getAvailableSitesContainer ( cms , CmsVaadinUtils . PROPERTY_LABEL ) ) ; m_siteSelect . setItemCaptionPropertyId ( CmsVaadinUtils . PROPERTY_LABEL ) ; m_siteSelect . setTextInputAllowed ( true ) ; m_siteSelect . setNullSelectionAllowed ( false ) ; m_siteSelect . setFilteringMode ( FilteringMode . CONTAINS ) ; m_siteSelect . setValue ( cms . getRequestContext ( ) . getSiteRoot ( ) ) ; try { for ( CmsPropertyDefinition prop : A_CmsUI . getCmsObject ( ) . readAllPropertyDefinitions ( ) ) { m_property . addItem ( prop ) ; m_property . setItemCaption ( prop , prop . getName ( ) ) ; } } catch ( CmsException e ) { } m_siteSelect . addValueChangeListener ( new ValueChangeListener ( ) { private static final long serialVersionUID = - 1079794209679015125L ; public void valueChange ( ValueChangeEvent event ) { try { updateSearchRoot ( ) ; } catch ( CmsException e ) { LOG . error ( "Unable to initialize CmsObject" , e ) ; } } } ) ; m_property . setNullSelectionAllowed ( false ) ; m_property . select ( m_property . getItemIds ( ) . iterator ( ) . next ( ) ) ; m_property . setFilteringMode ( FilteringMode . CONTAINS ) ; m_searchType . setFilteringMode ( FilteringMode . OFF ) ; m_searchType . setNullSelectionAllowed ( false ) ; m_searchType . addItem ( SearchType . fullText ) ; m_searchType . setItemCaption ( SearchType . fullText , CmsVaadinUtils . getMessageText ( Messages . GUI_SOURCESEARCH_SERACH_TYPE_FULLTEXT_0 ) ) ; m_searchType . addItem ( SearchType . contentValues ) ; m_searchType . setItemCaption ( SearchType . contentValues , CmsVaadinUtils . getMessageText ( Messages . GUI_SOURCESEARCH_SERACH_TYPE_XMLCONTENT_0 ) ) ; m_searchType . addItem ( SearchType . properties ) ; m_searchType . setItemCaption ( SearchType . properties , CmsVaadinUtils . getMessageText ( Messages . GUI_SOURCESEARCH_PROPERTY_SEARCH_0 ) ) ; m_searchType . addItem ( SearchType . resourcetype ) ; m_searchType . setItemCaption ( SearchType . resourcetype , CmsVaadinUtils . getMessageText ( Messages . GUI_SOURCESEARCH_RESOURCE_SEARCH_0 ) ) ; m_searchType . addItem ( SearchType . renameContainer ) ; m_searchType . setItemCaption ( SearchType . renameContainer , CmsVaadinUtils . getMessageText ( Messages . GUI_SOURCESEARCH_RENAME_CONTAINER_SEARCH_0 ) ) ; if ( OpenCms . getSearchManager ( ) . getSolrServerConfiguration ( ) . isEnabled ( ) ) { m_searchIndex . setFilteringMode ( FilteringMode . OFF ) ; m_searchIndex . setNullSelectionAllowed ( false ) ; String selectIndex = null ; for ( CmsSearchIndex index : OpenCms . getSearchManager ( ) . getAllSolrIndexes ( ) ) { boolean offlineMode = I_CmsSearchIndex . REBUILD_MODE_OFFLINE . equals ( index . getRebuildMode ( ) ) ; // in case the current project is offline , show offline indexes , otherwise show online indexes if ( ( ! online && offlineMode ) || ( online && ! offlineMode ) ) { m_searchIndex . addItem ( index . getName ( ) ) ; if ( selectIndex == null ) { selectIndex = index . getName ( ) ; } } } if ( selectIndex != null ) { m_searchIndex . setValue ( selectIndex ) ; // only add the solr search types if there is an index available m_searchType . addItem ( SearchType . solr ) ; m_searchType . setItemCaption ( SearchType . solr , CmsVaadinUtils . getMessageText ( Messages . GUI_SOURCESEARCH_SERACH_TYPE_SOLR_0 ) ) ; m_searchType . addItem ( SearchType . solrContentValues ) ; m_searchType . setItemCaption ( SearchType . solrContentValues , CmsVaadinUtils . getMessageText ( Messages . GUI_SOURCESEARCH_SERACH_TYPE_SOLR_CONTENT_VALUES_0 ) ) ; } } m_searchType . setValue ( SearchType . fullText ) ; m_searchRoot . setValue ( "/" ) ; m_searchRoot . disableSiteSwitch ( ) ; m_searchRoot . setResourceFilter ( CmsResourceFilter . DEFAULT_FOLDERS ) ; m_searchRoot . requireFolder ( ) ; m_locale . setFilteringMode ( FilteringMode . OFF ) ; for ( Locale locale : OpenCms . getLocaleManager ( ) . getAvailableLocales ( ) ) { m_locale . addItem ( locale ) ; } m_resourceType . setNullSelectionAllowed ( true ) ; IndexedContainer resTypes = CmsVaadinUtils . getResourceTypesContainer ( ) ; resTypes . addContainerFilter ( CmsVaadinUtils . FILTER_NO_FOLDERS ) ; m_resourceType . setContainerDataSource ( resTypes ) ; m_resourceType . setItemCaptionPropertyId ( PropertyId . caption ) ; m_resourceType . setItemIconPropertyId ( PropertyId . icon ) ; m_resourceType . setFilteringMode ( FilteringMode . CONTAINS ) ; m_workProject . setNullSelectionAllowed ( false ) ; IndexedContainer projects = CmsVaadinUtils . getProjectsContainer ( A_CmsUI . getCmsObject ( ) , "caption" ) ; projects . removeItem ( CmsProject . ONLINE_PROJECT_ID ) ; m_workProject . setContainerDataSource ( projects ) ; m_workProject . setItemCaptionPropertyId ( "caption" ) ; if ( online ) { m_replace . setEnabled ( false ) ; } else { m_workProject . setValue ( cms . getRequestContext ( ) . getCurrentProject ( ) . getUuid ( ) ) ; }
public class DTMDocumentImpl { /** * Wrapper for ChunkedIntArray . append , to automatically update the * previous sibling ' s " next " reference ( if necessary ) and periodically * wake a reader who may have encountered incomplete data and entered * a wait state . * @ param w0 int As in ChunkedIntArray . append * @ param w1 int As in ChunkedIntArray . append * @ param w2 int As in ChunkedIntArray . append * @ param w3 int As in ChunkedIntArray . append * @ return int As in ChunkedIntArray . append * @ see ChunkedIntArray . append */ private final int appendNode ( int w0 , int w1 , int w2 , int w3 ) { } }
// A decent compiler may inline this . int slotnumber = nodes . appendSlot ( w0 , w1 , w2 , w3 ) ; if ( DEBUG ) System . out . println ( slotnumber + ": " + w0 + " " + w1 + " " + w2 + " " + w3 ) ; if ( previousSiblingWasParent ) nodes . writeEntry ( previousSibling , 2 , slotnumber ) ; previousSiblingWasParent = false ; // Set the default ; endElement overrides return slotnumber ;
public class ClosedState { /** * Checks to determine if a threshold has been met and the circuit should be opened or closed . * When a failure ratio is configured , the circuit is opened after the expected number of executions based on whether * the ratio was exceeded . * If a failure threshold is configured , the circuit is opened if the expected number of executions fails else it ' s * closed if a single execution succeeds . */ synchronized void checkThreshold ( ) { } }
Ratio failureRatio = circuit . getFailureThreshold ( ) ; // Handle failure threshold ratio if ( failureRatio != null && bitSet . occupiedBits ( ) >= failureRatio . getDenominator ( ) && bitSet . negativeRatioValue ( ) >= failureRatio . getValue ( ) ) circuit . open ( ) ; // Handle no thresholds configured else if ( failureRatio == null && bitSet . negativeRatioValue ( ) == 1 ) circuit . open ( ) ;
public class CheckpointCoordinator { public void startCheckpointScheduler ( ) { } }
synchronized ( lock ) { if ( shutdown ) { throw new IllegalArgumentException ( "Checkpoint coordinator is shut down" ) ; } // make sure all prior timers are cancelled stopCheckpointScheduler ( ) ; periodicScheduling = true ; long initialDelay = ThreadLocalRandom . current ( ) . nextLong ( minPauseBetweenCheckpointsNanos / 1_000_000L , baseInterval + 1L ) ; currentPeriodicTrigger = timer . scheduleAtFixedRate ( new ScheduledTrigger ( ) , initialDelay , baseInterval , TimeUnit . MILLISECONDS ) ; }
public class ResultIterator { /** * on check relation event , invokes populate entities and set relational * entities , in case relations are present . */ private void onCheckRelation ( ) { } }
try { results = populateEntities ( entityMetadata , client ) ; if ( entityMetadata . isRelationViaJoinTable ( ) || ( entityMetadata . getRelationNames ( ) != null && ! ( entityMetadata . getRelationNames ( ) . isEmpty ( ) ) ) ) { query . setRelationalEntities ( results , client , entityMetadata ) ; } } catch ( Exception e ) { throw new PersistenceException ( "Error while scrolling over results, Caused by :." , e ) ; }
public class BitZMarketDataServiceRaw { /** * Get Kline data * @ param currencyPair * @ param resolution * @ param size * @ param microsecond * @ return * @ throws IOException */ public BitZKline getKline ( CurrencyPair currencyPair , BitZKlineResolution resolution , Integer size , String microsecond ) throws IOException { } }
return bitz . getKline ( BitZUtils . toPairString ( currencyPair ) , resolution . code ( ) , size , microsecond ) . getData ( ) ;
public class FileSystem { /** * Create a zip file from the given input file . * If the input file is a directory , the content of the directory is zipped . * If the input file is a standard file , it is zipped . * @ param input the name of the file to compress . * @ param output the name of the ZIP file to create . * @ throws IOException when ziiping is failing . * @ since 6.2 */ @ SuppressWarnings ( "checkstyle:npathcomplexity" ) public static void zipFile ( File input , OutputStream output ) throws IOException { } }
try ( ZipOutputStream zos = new ZipOutputStream ( output ) ) { if ( input == null ) { return ; } final LinkedList < File > candidates = new LinkedList < > ( ) ; candidates . add ( input ) ; final byte [ ] buffer = new byte [ BUFFER_SIZE ] ; int len ; File file ; File relativeFile ; String zipFilename ; final File rootDirectory = ( input . isDirectory ( ) ) ? input : input . getParentFile ( ) ; while ( ! candidates . isEmpty ( ) ) { file = candidates . removeFirst ( ) ; assert file != null ; if ( file . getAbsoluteFile ( ) . equals ( rootDirectory . getAbsoluteFile ( ) ) ) { relativeFile = null ; } else { relativeFile = makeRelative ( file , rootDirectory , false ) ; } if ( file . isDirectory ( ) ) { if ( relativeFile != null ) { zipFilename = fromFileStandardToURLStandard ( relativeFile ) + URL_PATH_SEPARATOR ; final ZipEntry zipEntry = new ZipEntry ( zipFilename ) ; zos . putNextEntry ( zipEntry ) ; zos . closeEntry ( ) ; } candidates . addAll ( Arrays . asList ( file . listFiles ( ) ) ) ; } else if ( relativeFile != null ) { try ( FileInputStream fis = new FileInputStream ( file ) ) { zipFilename = fromFileStandardToURLStandard ( relativeFile ) ; final ZipEntry zipEntry = new ZipEntry ( zipFilename ) ; zos . putNextEntry ( zipEntry ) ; while ( ( len = fis . read ( buffer ) ) > 0 ) { zos . write ( buffer , 0 , len ) ; } zos . closeEntry ( ) ; } } } }
public class Escape { /** * Escapes HTML appending to StringBuilder . * @ param sb StringBuilder to append to * @ param html input */ public static void html ( StringBuilder sb , String html ) { } }
for ( int i = 0 ; i < html . length ( ) ; i ++ ) { char c = html . charAt ( i ) ; switch ( c ) { case '&' : sb . append ( "&amp;" ) ; break ; case '"' : sb . append ( "&quot;" ) ; break ; case '\'' : sb . append ( "&apos;" ) ; break ; case '<' : sb . append ( "&lt;" ) ; break ; case '>' : sb . append ( "&gt;" ) ; break ; default : sb . append ( c ) ; } }
public class RuleElementImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setGuarded ( RuleElement newGuarded ) { } }
if ( newGuarded != guarded ) { NotificationChain msgs = null ; if ( guarded != null ) msgs = ( ( InternalEObject ) guarded ) . eInverseRemove ( this , EOPPOSITE_FEATURE_BASE - SimpleAntlrPackage . RULE_ELEMENT__GUARDED , null , msgs ) ; if ( newGuarded != null ) msgs = ( ( InternalEObject ) newGuarded ) . eInverseAdd ( this , EOPPOSITE_FEATURE_BASE - SimpleAntlrPackage . RULE_ELEMENT__GUARDED , null , msgs ) ; msgs = basicSetGuarded ( newGuarded , msgs ) ; if ( msgs != null ) msgs . dispatch ( ) ; } else if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , SimpleAntlrPackage . RULE_ELEMENT__GUARDED , newGuarded , newGuarded ) ) ;
public class ListInventoryEntriesResult { /** * A list of inventory items on the instance ( s ) . * @ return A list of inventory items on the instance ( s ) . */ public java . util . List < java . util . Map < String , String > > getEntries ( ) { } }
if ( entries == null ) { entries = new com . amazonaws . internal . SdkInternalList < java . util . Map < String , String > > ( ) ; } return entries ;
public class StringUtils { /** * 此方法会去除字符串首尾的空白字符 , 然后把中间的多个连续的换行符替换为一个换行符号 * @ param text * @ return */ public static String reduceLineBreaks ( String text ) { } }
if ( isEmpty ( text ) ) { return text ; } String textTrimed = trimWhitespace ( text ) ; return textTrimed . replaceAll ( "(\\r\\n|\\r|\\n)+" , "\n" ) ;
public class NexusAnalyzer { /** * Initializes the analyzer once before any analysis is performed . * @ param engine a reference to the dependency - check engine * @ throws InitializationException if there ' s an error during initialization */ @ Override public void prepareFileTypeAnalyzer ( Engine engine ) throws InitializationException { } }
LOGGER . debug ( "Initializing Nexus Analyzer" ) ; LOGGER . debug ( "Nexus Analyzer enabled: {}" , isEnabled ( ) ) ; if ( isEnabled ( ) ) { final boolean useProxy = useProxy ( ) ; LOGGER . debug ( "Using proxy: {}" , useProxy ) ; try { searcher = new NexusSearch ( getSettings ( ) , useProxy ) ; if ( ! searcher . preflightRequest ( ) ) { setEnabled ( false ) ; throw new InitializationException ( "There was an issue getting Nexus status. Disabling analyzer." ) ; } } catch ( MalformedURLException mue ) { setEnabled ( false ) ; throw new InitializationException ( "Malformed URL to Nexus" , mue ) ; } }
public class BaseXMLBuilder { /** * Imports another BaseXMLBuilder document into this document at the * current position . The entire document provided is imported . * @ param builder * the BaseXMLBuilder document to be imported . */ protected void importXMLBuilderImpl ( BaseXMLBuilder builder ) { } }
assertElementContainsNoOrWhitespaceOnlyTextNodes ( this . xmlNode ) ; Node importedNode = getDocument ( ) . importNode ( builder . getDocument ( ) . getDocumentElement ( ) , true ) ; this . xmlNode . appendChild ( importedNode ) ;
public class DefaultGroovyMethods { /** * Returns the items from the Object array excluding the last item . * < pre class = " groovyTestCase " > * String [ ] strings = [ " a " , " b " , " c " ] * def result = strings . init ( ) * assert result . length = = 2 * assert strings . class . componentType = = String * < / pre > * @ param self an array * @ return an array without its last element * @ throws NoSuchElementException if the array is empty and you try to access the init ( ) item . * @ since 2.4.0 */ public static < T > T [ ] init ( T [ ] self ) { } }
if ( self . length == 0 ) { throw new NoSuchElementException ( "Cannot access init() for an empty Object array" ) ; } T [ ] result = createSimilarArray ( self , self . length - 1 ) ; System . arraycopy ( self , 0 , result , 0 , self . length - 1 ) ; return result ;
public class MessageLog { /** * Get this record . */ public MessageLogModel getMessageLog ( String ID ) { } }
int iOldOrder = this . getDefaultOrder ( ) ; try { this . addNew ( ) ; this . getField ( MessageLog . ID ) . setString ( ID ) ; this . setKeyArea ( MessageLog . ID_KEY ) ; if ( this . seek ( null ) ) return this ; } catch ( DBException ex ) { ex . printStackTrace ( ) ; } finally { this . setKeyArea ( iOldOrder ) ; } return null ;
public class AWSElasticBeanstalkClient { /** * Swaps the CNAMEs of two environments . * @ param swapEnvironmentCNAMEsRequest * Swaps the CNAMEs of two environments . * @ return Result of the SwapEnvironmentCNAMEs operation returned by the service . * @ sample AWSElasticBeanstalk . SwapEnvironmentCNAMEs * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / elasticbeanstalk - 2010-12-01 / SwapEnvironmentCNAMEs " * target = " _ top " > AWS API Documentation < / a > */ @ Override public SwapEnvironmentCNAMEsResult swapEnvironmentCNAMEs ( SwapEnvironmentCNAMEsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeSwapEnvironmentCNAMEs ( request ) ;
public class ConvolutionUtils { /** * Reduce a 2d CNN layer mask array ( of 0s and 1s ) according to the layer configuration . Note that when a CNN layer * changes the shape of the activations ( for example , stride > 1 ) the corresponding mask array needs to change shape * also ( as there is a correspondence between the two ) . This method performs the forward pass for the mask . * @ param inMask Input mask array - rank 4 , shape [ mb , c , h , 1 ] or [ mb , c , w , 1 ] or [ mb , c , h , w ] * @ param kernel Kernel configuration for the layer * @ param stride Stride * @ param padding Padding * @ param dilation Dilation * @ param convolutionMode Convolution mode * @ return The mask array corresponding to the network output */ public static INDArray cnn2dMaskReduction ( INDArray inMask , int [ ] kernel , int [ ] stride , int [ ] padding , int [ ] dilation , ConvolutionMode convolutionMode ) { } }
// Mask array should be broadcastable with CNN activations . Thus should have shape [ mb , x , y , z ] // where : // x = = 1 OR channels // y = = 1 OR height // z = = 1 OR width if ( inMask . rank ( ) != 4 ) { throw new IllegalStateException ( "Expected rank 4 mask array for 2D CNN layers. Mask arrays for 2D CNN layers " + "must have shape [batchSize,channels,X,Y] where X = (1 or activationsHeight) and Y = (1 or activationsWidth): " + "Got rank " + inMask . rank ( ) + " array with shape " + Arrays . toString ( inMask . shape ( ) ) ) ; } if ( convolutionMode == ConvolutionMode . Same && stride [ 0 ] == 1 && stride [ 1 ] == 1 ) { // Output activations size same as input activations size return inMask ; } if ( inMask . size ( 2 ) == 1 && inMask . size ( 3 ) == 1 ) { // per - example mask - broadcast along all channels / x / y return inMask ; } int [ ] k ; int [ ] s ; int [ ] p ; int [ ] d ; if ( inMask . size ( 3 ) == 1 ) { // [ mb , x , y , 1 ] case - > pool mask along height k = new int [ ] { kernel [ 0 ] , 1 } ; s = new int [ ] { stride [ 0 ] , 1 } ; p = new int [ ] { padding [ 0 ] , 0 } ; d = new int [ ] { dilation [ 0 ] , 1 } ; } else if ( inMask . size ( 2 ) == 1 ) { // [ mb , x , 1 , z ] case - > pool mask along width k = new int [ ] { 1 , kernel [ 1 ] } ; s = new int [ ] { 1 , stride [ 1 ] } ; p = new int [ ] { 0 , padding [ 1 ] } ; d = new int [ ] { 1 , dilation [ 1 ] } ; } else { // [ mb , x , y , z ] - > pool mask along height and width k = kernel ; s = stride ; p = padding ; d = dilation ; } int [ ] outSize = ConvolutionUtils . getOutputSize ( inMask , k , s , p , convolutionMode , d ) ; // Also performs validation boolean allEq = true ; for ( int i = 0 ; i < outSize . length ; i ++ ) { if ( outSize [ i ] != inMask . size ( i ) ) { allEq = false ; break ; } } if ( allEq ) { // Same output size - > same mask size return inMask ; } long [ ] outArraySize = new long [ ] { inMask . size ( 0 ) , inMask . size ( 1 ) , outSize [ 0 ] , outSize [ 1 ] } ; INDArray outMask = Nd4j . createUninitialized ( outArraySize ) ; Op op = new LegacyPooling2D ( inMask , kernel [ 0 ] , kernel [ 1 ] , stride [ 0 ] , stride [ 1 ] , padding [ 0 ] , padding [ 1 ] , dilation [ 0 ] , dilation [ 1 ] , convolutionMode == ConvolutionMode . Same , LegacyPooling2D . Pooling2DType . MAX , 0.0 , outMask ) ; Nd4j . getExecutioner ( ) . exec ( op ) ; return outMask ;
public class BitCount { /** * Population count * It counts 24 words at a time , then 3 at a time , then 1 at a time * @ param buffer * array of < code > int < / code > * @ param n * number of elements of < code > buffer < / code > to count * @ return population count */ public static int count ( int [ ] buffer , int n ) { } }
final int n1 = n - n % 24 ; final int n2 = n - n % 3 ; int cnt = 0 ; int i ; for ( i = 0 ; i < n1 ; i += 24 ) cnt += merging3 ( buffer , i ) ; for ( ; i < n2 ; i += 3 ) cnt += merging2 ( buffer , i ) ; cnt += popcount_fbsd2 ( buffer , i , n ) ; return cnt ;
public class AmazonRedshiftClient { /** * Modifies a cluster subnet group to include the specified list of VPC subnets . The operation replaces the existing * list of subnets with the new list of subnets . * @ param modifyClusterSubnetGroupRequest * @ return Result of the ModifyClusterSubnetGroup operation returned by the service . * @ throws ClusterSubnetGroupNotFoundException * The cluster subnet group name does not refer to an existing cluster subnet group . * @ throws ClusterSubnetQuotaExceededException * The request would result in user exceeding the allowed number of subnets in a cluster subnet groups . For * information about increasing your quota , go to < a * href = " https : / / docs . aws . amazon . com / redshift / latest / mgmt / amazon - redshift - limits . html " > Limits in Amazon * Redshift < / a > in the < i > Amazon Redshift Cluster Management Guide < / i > . * @ throws SubnetAlreadyInUseException * A specified subnet is already in use by another cluster . * @ throws InvalidSubnetException * The requested subnet is not valid , or not all of the subnets are in the same VPC . * @ throws UnauthorizedOperationException * Your account is not authorized to perform the requested operation . * @ throws DependentServiceRequestThrottlingException * The request cannot be completed because a dependent service is throttling requests made by Amazon * Redshift on your behalf . Wait and retry the request . * @ sample AmazonRedshift . ModifyClusterSubnetGroup * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / redshift - 2012-12-01 / ModifyClusterSubnetGroup " * target = " _ top " > AWS API Documentation < / a > */ @ Override public ClusterSubnetGroup modifyClusterSubnetGroup ( ModifyClusterSubnetGroupRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeModifyClusterSubnetGroup ( request ) ;
public class AnnotationMethodInterceptor { /** * Intercept all methods calls . * @ param obj The enhanced CGLIB instance * @ param method Intercepted method * @ param args Method arguments * @ param proxy This method proxy */ @ Override public Object intercept ( Object obj , Method method , Object [ ] args , MethodProxy proxy ) throws Throwable { } }
if ( method . getName ( ) . equals ( "annotationType" ) ) { return annotationType ; } else if ( method . getName ( ) . equals ( "toString" ) ) { return toString ( ) ; } else if ( method . getName ( ) . equals ( "equals" ) ) { return annotationEquals ( args [ 0 ] ) ; } else if ( method . getName ( ) . equals ( "hashCode" ) ) { return proxy . hashCode ( ) ; } else { return attributeData . get ( method . getName ( ) ) ; }
public class StatsDTelegrafWriterFactory { /** * Copied from InfluxDbWriterFactory * @ param tags * @ return */ private ImmutableMap < String , String > initCustomTagsMap ( ImmutableMap < String , String > tags ) { } }
return ImmutableMap . copyOf ( firstNonNull ( tags , Collections . < String , String > emptyMap ( ) ) ) ;
public class CmsCmisTypeManager { /** * Creates the CMIS property definition for an OpenCms resource property definition . < p > * @ param cmsDef the OpenCms property definition * @ return the CMIS property definition */ PropertyDefinition < ? > createOpenCmsPropertyDefinition ( CmsPropertyDefinition cmsDef ) { } }
return createPropDef ( PROPERTY_PREFIX + cmsDef . getName ( ) , cmsDef . getName ( ) , cmsDef . getName ( ) , PropertyType . STRING , Cardinality . SINGLE , Updatability . READWRITE , false , false ) ;
public class ElmBaseVisitor { /** * Visit a Instance . This method will be called for * every node in the tree that is a Instance . * @ param elm the ELM tree * @ param context the context passed to the visitor * @ return the visitor result */ public T visitInstance ( Instance elm , C context ) { } }
for ( InstanceElement element : elm . getElement ( ) ) { visitInstanceElement ( element , context ) ; } return null ;
public class InvokeSubProcessActivity { /** * Method to get the Process Instance from the database * @ param procInstId * @ return * @ throws DataAccessException */ private ProcessInstance getProcInstFromDB ( Long procInstId ) throws DataAccessException { } }
TransactionWrapper transaction = null ; EngineDataAccessDB edao = new EngineDataAccessDB ( ) ; try { transaction = edao . startTransaction ( ) ; return edao . getProcessInstance ( procInstId ) ; } catch ( SQLException e ) { logger . severe ( "InvokeSubProcessActivity -> Failed to load process instance for " + procInstId ) ; return null ; } finally { edao . stopTransaction ( transaction ) ; }
public class FastAdapter { /** * creates a new FastAdapter with the provided adapters * if adapters is null , a default ItemAdapter is defined * @ param adapters the adapters which this FastAdapter should use * @ return a new FastAdapter */ public static < Item extends IItem , A extends IAdapter > FastAdapter < Item > with ( @ Nullable Collection < A > adapters ) { } }
return with ( adapters , null ) ;
public class SegmentedJournal { /** * Returns the index of the last segment in the log . * @ param index the compaction index * @ return the starting index of the last segment in the log */ public long getCompactableIndex ( long index ) { } }
Map . Entry < Long , JournalSegment < E > > segmentEntry = segments . floorEntry ( index ) ; return segmentEntry != null ? segmentEntry . getValue ( ) . index ( ) : 0 ;
public class Ifc4FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public IfcKnotType createIfcKnotTypeFromString ( EDataType eDataType , String initialValue ) { } }
IfcKnotType result = IfcKnotType . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class LogicalContainerAwareReentrantTypeResolver { /** * / * @ Nullable */ @ SuppressWarnings ( "unused" ) protected LightweightTypeReference getReturnTypeOfOverriddenOperation ( JvmOperation operation , ResolvedTypes resolvedTypes , IFeatureScopeSession session ) { } }
if ( operation . getVisibility ( ) == JvmVisibility . PRIVATE ) return null ; if ( InferredTypeIndicator . isInferred ( operation . getReturnType ( ) ) ) { LightweightTypeReference declaringType = resolvedTypes . getActualType ( operation . getDeclaringType ( ) ) ; if ( declaringType == null ) { throw new IllegalStateException ( "Cannot determine declaring type of operation: " + operation ) ; } LightweightTypeReference result = overrideHelper . getReturnTypeOfOverriddenOperation ( operation , declaringType ) ; return result ; } return null ;
public class ParseUtil { /** * Returns a new String constructed from the specified String by replacing * the URL escape sequences and UTF8 encoding with the characters they * represent . */ public static String decode ( String s ) { } }
int n = s . length ( ) ; if ( ( n == 0 ) || ( s . indexOf ( '%' ) < 0 ) ) return s ; StringBuilder sb = new StringBuilder ( n ) ; ByteBuffer bb = ByteBuffer . allocate ( n ) ; CharBuffer cb = CharBuffer . allocate ( n ) ; CharsetDecoder dec = ThreadLocalCoders . decoderFor ( "UTF-8" ) . onMalformedInput ( CodingErrorAction . REPORT ) . onUnmappableCharacter ( CodingErrorAction . REPORT ) ; char c = s . charAt ( 0 ) ; for ( int i = 0 ; i < n ; ) { assert c == s . charAt ( i ) ; if ( c != '%' ) { sb . append ( c ) ; if ( ++ i >= n ) break ; c = s . charAt ( i ) ; continue ; } bb . clear ( ) ; int ui = i ; for ( ; ; ) { assert ( n - i >= 2 ) ; try { bb . put ( unescape ( s , i ) ) ; } catch ( NumberFormatException e ) { throw new IllegalArgumentException ( ) ; } i += 3 ; if ( i >= n ) break ; c = s . charAt ( i ) ; if ( c != '%' ) break ; } bb . flip ( ) ; cb . clear ( ) ; dec . reset ( ) ; CoderResult cr = dec . decode ( bb , cb , true ) ; if ( cr . isError ( ) ) throw new IllegalArgumentException ( "Error decoding percent encoded characters" ) ; cr = dec . flush ( cb ) ; if ( cr . isError ( ) ) throw new IllegalArgumentException ( "Error decoding percent encoded characters" ) ; sb . append ( cb . flip ( ) . toString ( ) ) ; } return sb . toString ( ) ;
public class StreamUtils { /** * Wraps the Writer in a BufferedWriter , unless it already is a BufferedWriter . * @ param writer The Writer to check and possibly wrap . * @ return A BufferedWriter . */ private static BufferedWriter bufferOutput ( Writer writer ) { } }
BufferedWriter out ; if ( writer instanceof BufferedWriter ) { out = ( BufferedWriter ) writer ; } else { out = new BufferedWriter ( writer ) ; } return out ;
public class ListManagementImagesImpl { /** * Add an image to the list with list Id equal to list Id passed . * @ param listId List Id of the image list . * @ param addImageOptionalParameter the object representing the optional parameters to be set before calling this API * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Image > addImageAsync ( String listId , AddImageOptionalParameter addImageOptionalParameter , final ServiceCallback < Image > serviceCallback ) { } }
return ServiceFuture . fromResponse ( addImageWithServiceResponseAsync ( listId , addImageOptionalParameter ) , serviceCallback ) ;
public class CPOptionCategoryPersistenceImpl { /** * Returns the cp option category where uuid = & # 63 ; and groupId = & # 63 ; or throws a { @ link NoSuchCPOptionCategoryException } if it could not be found . * @ param uuid the uuid * @ param groupId the group ID * @ return the matching cp option category * @ throws NoSuchCPOptionCategoryException if a matching cp option category could not be found */ @ Override public CPOptionCategory findByUUID_G ( String uuid , long groupId ) throws NoSuchCPOptionCategoryException { } }
CPOptionCategory cpOptionCategory = fetchByUUID_G ( uuid , groupId ) ; if ( cpOptionCategory == null ) { StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "uuid=" ) ; msg . append ( uuid ) ; msg . append ( ", groupId=" ) ; msg . append ( groupId ) ; msg . append ( "}" ) ; if ( _log . isDebugEnabled ( ) ) { _log . debug ( msg . toString ( ) ) ; } throw new NoSuchCPOptionCategoryException ( msg . toString ( ) ) ; } return cpOptionCategory ;
public class AuthorizationHandler { /** * Ends the current request by sending a HTTP 401 status code and the default unauthorized template * @ param exchange The HttpServerExchange */ private void endRequest ( HttpServerExchange exchange ) { } }
exchange . setStatusCode ( StatusCodes . UNAUTHORIZED ) ; Server . headers ( ) . entrySet ( ) . stream ( ) . filter ( entry -> StringUtils . isNotBlank ( entry . getValue ( ) ) ) . forEach ( entry -> exchange . getResponseHeaders ( ) . add ( entry . getKey ( ) . toHttpString ( ) , entry . getValue ( ) ) ) ; exchange . endExchange ( ) ;
public class NumericRefinement { /** * Gets the { @ link NumericRefinement # OPERATOR _ GT operator } matching the given short name . * @ param operatorName the short name of an operator . * @ return the integer representation of this operator . * @ throws IllegalStateException if operatorName is not a known operator name . */ public static int getOperatorCode ( String operatorName ) { } }
switch ( operatorName ) { case "lt" : return OPERATOR_LT ; case "le" : return OPERATOR_LE ; case "eq" : return OPERATOR_EQ ; case "ne" : return OPERATOR_NE ; case "ge" : return OPERATOR_GE ; case "gt" : return OPERATOR_GT ; default : throw new IllegalStateException ( String . format ( ERROR_INVALID_NAME , operatorName ) ) ; }
public class JBBPDslBuilder { /** * Add named unsigned short field . * @ param name name of the field , can be null for anonymous * @ return the builder instance , must not be null */ public JBBPDslBuilder UShort ( final String name ) { } }
final Item item = new Item ( BinType . USHORT , name , this . byteOrder ) ; this . addItem ( item ) ; return this ;
public class CmsFileUtil { /** * Removes a trailing separator from a path if required . < p > * In case we have the root folder " / " , the separator is not removed . < p > * @ param path the path to remove the trailing separator from * @ return the path without a trailing separator */ public static String removeTrailingSeparator ( String path ) { } }
int l = path . length ( ) ; if ( ( l <= 1 ) || ( path . charAt ( l - 1 ) != '/' ) ) { return path ; } else { return path . substring ( 0 , l - 1 ) ; }
public class BtcFormat { /** * Takes an object representing a bitcoin quantity of any type the * client is permitted to pass us , and return a BigInteger representing the * number of satoshis having the equivalent value . */ private static BigInteger inSatoshis ( Object qty ) { } }
BigInteger satoshis ; // the value might be bitcoins or satoshis if ( qty instanceof Long || qty instanceof Integer ) satoshis = BigInteger . valueOf ( ( ( Number ) qty ) . longValue ( ) ) ; else if ( qty instanceof BigInteger ) satoshis = ( BigInteger ) qty ; else if ( qty instanceof BigDecimal ) satoshis = ( ( BigDecimal ) qty ) . movePointRight ( Coin . SMALLEST_UNIT_EXPONENT ) . setScale ( 0 , BigDecimal . ROUND_HALF_UP ) . unscaledValue ( ) ; else if ( qty instanceof Coin ) satoshis = BigInteger . valueOf ( ( ( Coin ) qty ) . value ) ; else throw new IllegalArgumentException ( "Cannot format a " + qty . getClass ( ) . getSimpleName ( ) + " as a Bicoin value" ) ; return satoshis ;
public class UriComponentsBuilder { /** * Add the given query parameters . * @ param params the params * @ return this UriComponentsBuilder */ public UriComponentsBuilder queryParams ( MultiValueMap < String , String > params ) { } }
Assert . notNull ( params , "'params' must not be null" ) ; this . queryParams . putAll ( params ) ; return this ;
public class JSConsumerSet { /** * Returns the name of a classification specified by XD . * @ return */ public synchronized void registerKey ( DispatchableKey key ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "registerKey" , key ) ; keyRegistry . add ( key ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "registerKey" ) ;
public class PlatformSummary { /** * The additions associated with the platform . * @ return The additions associated with the platform . */ public java . util . List < String > getSupportedAddonList ( ) { } }
if ( supportedAddonList == null ) { supportedAddonList = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return supportedAddonList ;
public class CommerceShippingMethodPersistenceImpl { /** * Returns a range of all the commerce shipping methods where groupId = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceShippingMethodModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param groupId the group ID * @ param start the lower bound of the range of commerce shipping methods * @ param end the upper bound of the range of commerce shipping methods ( not inclusive ) * @ return the range of matching commerce shipping methods */ @ Override public List < CommerceShippingMethod > findByGroupId ( long groupId , int start , int end ) { } }
return findByGroupId ( groupId , start , end , null ) ;
public class MagicMimeEntry { /** * Match byte . * @ param bbuf the bbuf * @ return true , if successful * @ throws IOException Signals that an I / O exception has occurred . */ private boolean matchByte ( final ByteBuffer bbuf ) throws IOException { } }
final byte b = bbuf . get ( 0 ) ; return b == getContent ( ) . charAt ( 0 ) ;
public class UTF8ByteArrayUtils { /** * Find the first occured tab in a UTF - 8 encoded string * @ param utf a byte array containing a UTF - 8 encoded string * @ param start starting offset * @ param length no . of bytes * @ return position that first tab occures otherwise - 1 * @ deprecated use { @ link StreamKeyValUtil # findTab ( byte [ ] , int , int ) } */ @ Deprecated public static int findTab ( byte [ ] utf , int start , int length ) { } }
return StreamKeyValUtil . findTab ( utf , start , length ) ;
public class CommercePriceListUserSegmentEntryRelPersistenceImpl { /** * Returns the commerce price list user segment entry rel with the primary key or throws a { @ link com . liferay . portal . kernel . exception . NoSuchModelException } if it could not be found . * @ param primaryKey the primary key of the commerce price list user segment entry rel * @ return the commerce price list user segment entry rel * @ throws NoSuchPriceListUserSegmentEntryRelException if a commerce price list user segment entry rel with the primary key could not be found */ @ Override public CommercePriceListUserSegmentEntryRel findByPrimaryKey ( Serializable primaryKey ) throws NoSuchPriceListUserSegmentEntryRelException { } }
CommercePriceListUserSegmentEntryRel commercePriceListUserSegmentEntryRel = fetchByPrimaryKey ( primaryKey ) ; if ( commercePriceListUserSegmentEntryRel == null ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } throw new NoSuchPriceListUserSegmentEntryRelException ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } return commercePriceListUserSegmentEntryRel ;
public class LayerControlPanelPresenterImpl { /** * Check if current viewPort resolution is between the minimum ( inclusive ) and * the maximum scale ( exclusive ) of the layer . * Inclusive / exclusive follows SLD convention : exclusive minResolution , inclusive maxResolution . * @ param viewPort the viewPort * @ param layer layer * @ return whether the layer is visible in the provided viewPort resolution */ public boolean isLayerVisibleAtViewPortResolution ( ViewPort viewPort , Layer layer ) { } }
if ( viewPort . getResolution ( ) > layer . getMinResolution ( ) && viewPort . getResolution ( ) <= layer . getMaxResolution ( ) ) { return true ; } return false ;
public class HealthDependencyDto { /** * Creates a new builder for { @ code HealthDependencyDto } . */ public static Builder builder ( ) { } }
return new AutoValue_HealthDependencyDto . Builder ( ) . setName ( Optional . empty ( ) ) . setPrimary ( false ) . setType ( Optional . empty ( ) ) . setHealthy ( false ) . setLinks ( Optional . empty ( ) ) . setMessage ( Optional . empty ( ) ) ;
public class HttpServerBuilder { /** * Defines a folder resource whose content fill be hosted * rule . * @ param contextRoot * the root path to the content * @ param folder * the rule that creates the temporary folder that should be hosted by the http server . * @ return * this builder */ public HttpServerBuilder contentFrom ( final String contextRoot , final TemporaryFolder folder ) { } }
resources . put ( contextRoot , folder ) ; return this ;
public class SendMembersActiveGossipThread { /** * Performs the sending of the membership list , after we have incremented our own heartbeat . */ protected void sendMembershipList ( LocalGossipMember me , List < LocalGossipMember > memberList ) { } }
GossipService . LOGGER . debug ( "Send sendMembershipList() is called." ) ; me . setHeartbeat ( System . currentTimeMillis ( ) ) ; LocalGossipMember member = selectPartner ( memberList ) ; if ( member == null ) { return ; } try ( DatagramSocket socket = new DatagramSocket ( ) ) { socket . setSoTimeout ( gossipManager . getSettings ( ) . getGossipInterval ( ) ) ; InetAddress dest = InetAddress . getByName ( member . getHost ( ) ) ; ActiveGossipMessage message = new ActiveGossipMessage ( ) ; message . getMembers ( ) . add ( convert ( me ) ) ; for ( LocalGossipMember other : memberList ) { message . getMembers ( ) . add ( convert ( other ) ) ; } byte [ ] json_bytes = om . writeValueAsString ( message ) . getBytes ( ) ; int packet_length = json_bytes . length ; if ( packet_length < GossipManager . MAX_PACKET_SIZE ) { byte [ ] buf = createBuffer ( packet_length , json_bytes ) ; DatagramPacket datagramPacket = new DatagramPacket ( buf , buf . length , dest , member . getPort ( ) ) ; socket . send ( datagramPacket ) ; } else { GossipService . LOGGER . error ( "The length of the to be send message is too large (" + packet_length + " > " + GossipManager . MAX_PACKET_SIZE + ")." ) ; } } catch ( IOException e1 ) { GossipService . LOGGER . warn ( e1 ) ; }
public class IntStreamSubject { /** * Fails if the subject does not contain all of the given elements . If an element appears more * than once in the given elements , then it must appear at least that number of times in the * actual elements . * < p > To also test that the contents appear in the given order , make a call to { @ code inOrder ( ) } * on the object returned by this method . The expected elements must appear in the given order * within the actual elements , but they are not required to be consecutive . */ @ CanIgnoreReturnValue public Ordered containsAllIn ( Iterable < ? > expected ) { } }
return check ( ) . that ( actualList ) . containsAtLeastElementsIn ( expected ) ;
public class ObjectWritable { /** * This class should be used for writing only class / method names ! ! ! ! */ public static void writeStringCached ( DataOutput out , String entityName ) throws IOException { } }
byte [ ] name = getByteNameWithCaching ( entityName ) ; // name should never be null at this point if ( name == null ) { throw new RuntimeException ( "Cannot retrieve class name" ) ; } out . write ( name , 0 , name . length ) ;
public class CommerceOrderItemUtil { /** * Returns an ordered range of all the commerce order items where CPInstanceId = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceOrderItemModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param CPInstanceId the cp instance ID * @ param start the lower bound of the range of commerce order items * @ param end the upper bound of the range of commerce order items ( not inclusive ) * @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > ) * @ return the ordered range of matching commerce order items */ public static List < CommerceOrderItem > findByCPInstanceId ( long CPInstanceId , int start , int end , OrderByComparator < CommerceOrderItem > orderByComparator ) { } }
return getPersistence ( ) . findByCPInstanceId ( CPInstanceId , start , end , orderByComparator ) ;