signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class AbstractOptions { /** * Returns the value of the specified { @ code option } . * @ param < O > the type of the option * @ param < V > the type of the value * @ return the value of the specified { @ code option } . { @ code defaultValue } if there ' s no such option . */ protected final < O extends AbstractOption < V > , V > V getOrElse0 ( O option , V defaultValue ) { } }
return get0 ( option ) . orElse ( defaultValue ) ;
public class StringUtils { /** * < p > Case in - sensitive find of the first index within a String * from the specified position . < / p > * < p > A < code > null < / code > String will return < code > - 1 < / code > . * A negative start position is treated as zero . * An empty ( " " ) search String always matches . * A start position greater than the string length only matches * an empty search String . < / p > * < pre > * StringUtils . indexOfIgnoreCase ( null , * , * ) = - 1 * StringUtils . indexOfIgnoreCase ( * , null , * ) = - 1 * StringUtils . indexOfIgnoreCase ( " " , " " , 0 ) = 0 * StringUtils . indexOfIgnoreCase ( " aabaabaa " , " A " , 0 ) = 0 * StringUtils . indexOfIgnoreCase ( " aabaabaa " , " B " , 0 ) = 2 * StringUtils . indexOfIgnoreCase ( " aabaabaa " , " AB " , 0 ) = 1 * StringUtils . indexOfIgnoreCase ( " aabaabaa " , " B " , 3 ) = 5 * StringUtils . indexOfIgnoreCase ( " aabaabaa " , " B " , 9 ) = - 1 * StringUtils . indexOfIgnoreCase ( " aabaabaa " , " B " , - 1 ) = 2 * StringUtils . indexOfIgnoreCase ( " aabaabaa " , " " , 2 ) = 2 * StringUtils . indexOfIgnoreCase ( " abc " , " " , 9 ) = 3 * < / pre > * @ param str the String to check , may be null * @ param searchStr the String to find , may be null * @ param startPos the start position , negative treated as zero * @ return the first index of the search String , * - 1 if no match or < code > null < / code > string input * @ since 2.5 */ public static int indexOfIgnoreCase ( String str , String searchStr , int startPos ) { } }
if ( str == null || searchStr == null ) { return INDEX_NOT_FOUND ; } if ( startPos < 0 ) { startPos = 0 ; } int endLimit = ( str . length ( ) - searchStr . length ( ) ) + 1 ; if ( startPos > endLimit ) { return INDEX_NOT_FOUND ; } if ( searchStr . length ( ) == 0 ) { return startPos ; } for ( int i = startPos ; i < endLimit ; i ++ ) { if ( str . regionMatches ( true , i , searchStr , 0 , searchStr . length ( ) ) ) { return i ; } } return INDEX_NOT_FOUND ;
public class ListWebhooksResult { /** * The JSON detail returned for each webhook in the list output for the ListWebhooks call . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setWebhooks ( java . util . Collection ) } or { @ link # withWebhooks ( java . util . Collection ) } if you want to override * the existing values . * @ param webhooks * The JSON detail returned for each webhook in the list output for the ListWebhooks call . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListWebhooksResult withWebhooks ( ListWebhookItem ... webhooks ) { } }
if ( this . webhooks == null ) { setWebhooks ( new java . util . ArrayList < ListWebhookItem > ( webhooks . length ) ) ; } for ( ListWebhookItem ele : webhooks ) { this . webhooks . add ( ele ) ; } return this ;
public class ConfigurationBuilder { /** * Adding Endpoint for API * Valid URLs are : * tcp : / / [ host ] : [ port ] * tls : / / [ host ] : [ port ] * ws : / / [ host ] : [ port ] * wss : / / [ host ] : [ port ] * @ param url endpoint url * @ return this */ @ NotNull @ ObjectiveCName ( "addEndpoint:" ) public ConfigurationBuilder addEndpoint ( @ NotNull String url ) { } }
try { endpoints . addEndpoint ( url ) ; } catch ( ConnectionEndpointArray . UnknownSchemeException e ) { throw new RuntimeException ( e . getMessage ( ) ) ; } return this ;
public class ServiceLoader { /** * Creates a new service loader for the given service type , using the * current thread ' s { @ linkplain java . lang . Thread # getContextClassLoader * context class loader } . * < p > An invocation of this convenience method of the form * < blockquote > < pre > * ServiceLoader . load ( < i > service < / i > ) < / pre > < / blockquote > * is equivalent to * < blockquote > < pre > * ServiceLoader . load ( < i > service < / i > , * Thread . currentThread ( ) . getContextClassLoader ( ) ) < / pre > < / blockquote > * @ param service * The interface or abstract class representing the service * @ return A new service loader */ public static < S > ServiceLoader < S > load ( Class < S > service ) { } }
return ServiceLoader . load ( service , ClassLoader . getSystemClassLoader ( ) ) ;
public class TraceEventHelper { /** * Pretty print event * @ param te The event * @ return The string */ public static String prettyPrint ( TraceEvent te ) { } }
if ( te . getType ( ) != TraceEvent . GET_CONNECTION_LISTENER && te . getType ( ) != TraceEvent . GET_CONNECTION_LISTENER_NEW && te . getType ( ) != TraceEvent . GET_INTERLEAVING_CONNECTION_LISTENER && te . getType ( ) != TraceEvent . GET_INTERLEAVING_CONNECTION_LISTENER_NEW && te . getType ( ) != TraceEvent . RETURN_CONNECTION_LISTENER && te . getType ( ) != TraceEvent . RETURN_CONNECTION_LISTENER_WITH_KILL && te . getType ( ) != TraceEvent . RETURN_INTERLEAVING_CONNECTION_LISTENER && te . getType ( ) != TraceEvent . RETURN_INTERLEAVING_CONNECTION_LISTENER_WITH_KILL && te . getType ( ) != TraceEvent . CREATE_CONNECTION_LISTENER_GET && te . getType ( ) != TraceEvent . CREATE_CONNECTION_LISTENER_PREFILL && te . getType ( ) != TraceEvent . CREATE_CONNECTION_LISTENER_INCREMENTER && te . getType ( ) != TraceEvent . DESTROY_CONNECTION_LISTENER_RETURN && te . getType ( ) != TraceEvent . DESTROY_CONNECTION_LISTENER_IDLE && te . getType ( ) != TraceEvent . DESTROY_CONNECTION_LISTENER_INVALID && te . getType ( ) != TraceEvent . DESTROY_CONNECTION_LISTENER_FLUSH && te . getType ( ) != TraceEvent . DESTROY_CONNECTION_LISTENER_ERROR && te . getType ( ) != TraceEvent . DESTROY_CONNECTION_LISTENER_PREFILL && te . getType ( ) != TraceEvent . DESTROY_CONNECTION_LISTENER_INCREMENTER && te . getType ( ) != TraceEvent . EXCEPTION && te . getType ( ) != TraceEvent . PUSH_CCM_CONTEXT && te . getType ( ) != TraceEvent . POP_CCM_CONTEXT ) return te . toString ( ) ; StringBuilder sb = new StringBuilder ( ) ; sb . append ( "IJTRACER" ) ; sb . append ( "-" ) ; sb . append ( te . getPool ( ) ) ; sb . append ( "-" ) ; sb . append ( te . getManagedConnectionPool ( ) ) ; sb . append ( "-" ) ; sb . append ( te . getThreadId ( ) ) ; sb . append ( "-" ) ; sb . append ( te . getType ( ) ) ; sb . append ( "-" ) ; sb . append ( te . getTimestamp ( ) ) ; sb . append ( "-" ) ; sb . append ( te . getConnectionListener ( ) ) ; sb . append ( "-" ) ; sb . append ( "DATA" ) ; return sb . toString ( ) ;
public class MvcServlet { /** * Enables MVC capabilities in the given factory by injecting a { @ link BundleSpecificDispatcherServlet } . * @ param factory must not be < code > null < / code > . * @ param context must not be < code > null < / code > . */ public void enableMvc ( ConfigurableListableBeanFactory factory , BundleContext context ) { } }
if ( factory == null ) { throw new IllegalArgumentException ( "Method argument factory must not be null." ) ; } if ( context == null ) { throw new IllegalArgumentException ( "Method argument context must not be null." ) ; } BundleAwareServletConfig servletConfig = new BundleAwareServletConfig ( context ) ; factory . addBeanPostProcessor ( new ServletContextAwareProcessor ( this . servletContext , servletConfig ) ) ; factory . ignoreDependencyInterface ( ServletContextAware . class ) ; factory . ignoreDependencyInterface ( ServletConfigAware . class ) ; final BundleSpecificDispatcherServlet dispatcherServlet = createBundleSpecificDispatcherServlet ( factory , servletConfig ) ; factory . registerSingleton ( generateNameFor ( BundleSpecificDispatcherServlet . class ) , dispatcherServlet ) ; this . mvcCapableBundles . put ( context . getBundle ( ) , dispatcherServlet ) ;
public class RegexpUtil { /** * Convert a string to a Pattern object . * < ul > * < li > If the host starts and ends with / then it is compiled as a regular expression < / li > * < li > Otherwise the hosts must exactly match < / li > * < / ul > * @ param expr the expression to compile */ public static Pattern compilePattern ( final String expr ) { } }
Pattern pattern ; final int lastChar = expr . length ( ) - 1 ; if ( expr . charAt ( 0 ) == '/' && expr . charAt ( lastChar ) == '/' ) { pattern = Pattern . compile ( expr . substring ( 1 , lastChar ) ) ; } else { pattern = Pattern . compile ( Pattern . quote ( expr ) ) ; } return pattern ;
public class DescribeSpotFleetRequestsResult { /** * Information about the configuration of your Spot Fleet . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setSpotFleetRequestConfigs ( java . util . Collection ) } or * { @ link # withSpotFleetRequestConfigs ( java . util . Collection ) } if you want to override the existing values . * @ param spotFleetRequestConfigs * Information about the configuration of your Spot Fleet . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeSpotFleetRequestsResult withSpotFleetRequestConfigs ( SpotFleetRequestConfig ... spotFleetRequestConfigs ) { } }
if ( this . spotFleetRequestConfigs == null ) { setSpotFleetRequestConfigs ( new com . amazonaws . internal . SdkInternalList < SpotFleetRequestConfig > ( spotFleetRequestConfigs . length ) ) ; } for ( SpotFleetRequestConfig ele : spotFleetRequestConfigs ) { this . spotFleetRequestConfigs . add ( ele ) ; } return this ;
public class MtasPosition { /** * Gets the end . * @ return the end */ public Integer getEnd ( ) { } }
if ( mtasPositionType . equals ( POSITION_RANGE ) || mtasPositionType . equals ( POSITION_SET ) ) { return mtasPositionEnd ; } else if ( mtasPositionType . equals ( POSITION_SINGLE ) ) { return mtasPositionStart ; } else { return null ; }
public class JSONParser { /** * use to return Primitive Type , or String , Or JsonObject or JsonArray * generated by a ContainerFactory */ public < T > T parse ( byte [ ] in , Class < T > mapTo ) throws ParseException { } }
return getPBytes ( ) . parse ( in , JSONValue . defaultReader . getMapper ( mapTo ) ) ;
public class DeepLearningTask { /** * Transfer ownership from global ( shared ) model to local model which will be worked on */ @ Override protected void setupLocal ( ) { } }
assert ( _localmodel == null ) ; super . setupLocal ( ) ; if ( model_info ( ) . get_params ( ) . _elastic_averaging ) { // Load my local model from DKV , to continue training _localmodel = DKV . getGet ( _sharedmodel . localModelInfoKey ( H2O . SELF ) ) ; if ( _localmodel != null ) { if ( ! Arrays . equals ( _localmodel . units , _sharedmodel . units ) ) { _localmodel = IcedUtils . deepCopy ( _sharedmodel ) ; } else { // Make sure that the local model has the right global ( shared ) parameters after checkpoint restart ! _localmodel . set_params ( _sharedmodel . get_params ( ) , _sharedmodel . _model_id ) ; _localmodel . set_processed_global ( _sharedmodel . get_processed_global ( ) ) ; } } else { // first time around - use the randomized initial weights and don ' t spread the shared ( random ) model _localmodel = IcedUtils . deepCopy ( _sharedmodel ) ; _sharedmodel = null ; } } else { _localmodel = _sharedmodel ; _sharedmodel = null ; } _localmodel . set_processed_local ( 0 ) ;
public class PDFBuilder { /** * Creates a < code > CatalogManager < / code > , used to resolve DTDs and other entities . * @ return A < code > CatalogManager < / code > to be used for resolving DTDs and other entities . */ protected CatalogManager createCatalogManager ( ) { } }
CatalogManager manager = new CatalogManager ( ) ; manager . setIgnoreMissingProperties ( true ) ; ClassLoader classLoader = Thread . currentThread ( ) . getContextClassLoader ( ) ; StringBuffer builder = new StringBuffer ( ) ; boolean first = true ; for ( int i = 0 ; i < catalogs . length ; i ++ ) { final String catalog = catalogs [ i ] ; try { Enumeration < URL > enumeration = classLoader . getResources ( catalog ) ; while ( enumeration . hasMoreElements ( ) ) { if ( ! first ) { builder . append ( ';' ) ; } else { first = false ; } URL resource = enumeration . nextElement ( ) ; builder . append ( resource . toExternalForm ( ) ) ; } } catch ( IOException ioe ) { getLog ( ) . warn ( "Failed to search for catalog files: " + catalog ) ; // Let ' s be a little tolerant here . } } // builder . append ( " jar : file : / Users / salmanqureshi / . m2 / repository / net / sf / docbook / docbook - xsl / 1.76.1 / docbook - xsl - 1.76.1 - ns - resources . zip ! / docbook / catalog . xml " ) ; String catalogFiles = builder . toString ( ) ; if ( catalogFiles . length ( ) == 0 ) { getLog ( ) . warn ( "Failed to find catalog files." ) ; } else { if ( getLog ( ) . isDebugEnabled ( ) ) { getLog ( ) . debug ( "Catalogs to load: " + catalogFiles ) ; } manager . setCatalogFiles ( catalogFiles ) ; } return manager ;
public class SeaGlassMenuUI { /** * Paints the specified component . This implementation does nothing . * @ param context * context for the component being painted * @ param g * the { @ code Graphics } object used for painting * @ see # update ( Graphics , JComponent ) */ protected void paint ( SynthContext context , Graphics g ) { } }
SeaGlassContext accContext = getContext ( menuItem , Region . MENU_ITEM_ACCELERATOR ) ; // Refetch the appropriate check indicator for the current state String prefix = getPropertyPrefix ( ) ; Icon checkIcon = style . getIcon ( context , prefix + ".checkIcon" ) ; Icon arrowIcon = style . getIcon ( context , prefix + ".arrowIcon" ) ; SeaGlassGraphicsUtils . paint ( context , accContext , g , checkIcon , arrowIcon , acceleratorDelimiter , defaultTextIconGap , getPropertyPrefix ( ) ) ; accContext . dispose ( ) ;
public class SyncMapItem { /** * Create a SyncMapItemUpdater to execute update . * @ param pathServiceSid The service _ sid * @ param pathMapSid The map _ sid * @ param pathKey The key * @ return SyncMapItemUpdater capable of executing the update */ public static SyncMapItemUpdater updater ( final String pathServiceSid , final String pathMapSid , final String pathKey ) { } }
return new SyncMapItemUpdater ( pathServiceSid , pathMapSid , pathKey ) ;
public class DefaultServiceTaskBehavior { /** * We have two different implementation strategy that can be executed * in according if we have a connector action definition match or not . */ @ Override public void execute ( DelegateExecution execution ) { } }
ActionDefinition actionDefinition = findRelatedActionDefinition ( execution ) ; Connector connector = getConnector ( getImplementation ( execution ) ) ; IntegrationContext integrationContext = connector . apply ( integrationContextBuilder . from ( execution , actionDefinition ) ) ; execution . setVariables ( outboundVariablesProvider . calculateVariables ( integrationContext , actionDefinition ) ) ; leave ( execution ) ;
public class ImageMetadataModule { /** * Entry point of image metadata ModuleElem . * @ param input Input parameters and resources . * @ return null * @ throws DITAOTException exception */ @ Override public AbstractPipelineOutput execute ( final AbstractPipelineInput input ) throws DITAOTException { } }
if ( logger == null ) { throw new IllegalStateException ( "Logger not set" ) ; } final Collection < FileInfo > images = job . getFileInfo ( f -> ATTR_FORMAT_VALUE_IMAGE . equals ( f . format ) || ATTR_FORMAT_VALUE_HTML . equals ( f . format ) ) ; if ( ! images . isEmpty ( ) ) { final File outputDir = new File ( input . getAttribute ( ANT_INVOKER_EXT_PARAM_OUTPUTDIR ) ) ; final ImageMetadataFilter writer = new ImageMetadataFilter ( outputDir , job ) ; writer . setLogger ( logger ) ; writer . setJob ( job ) ; final Predicate < FileInfo > filter = fileInfoFilter != null ? fileInfoFilter : f -> ! f . isResourceOnly && ATTR_FORMAT_VALUE_DITA . equals ( f . format ) ; for ( final FileInfo f : job . getFileInfo ( filter ) ) { writer . write ( new File ( job . tempDir , f . file . getPath ( ) ) . getAbsoluteFile ( ) ) ; } storeImageFormat ( writer . getImages ( ) , outputDir ) ; try { job . write ( ) ; } catch ( IOException e ) { throw new DITAOTException ( "Failed to serialize job configuration: " + e . getMessage ( ) , e ) ; } } return null ;
public class ScanLeftSpliterator { /** * / * ( non - Javadoc ) * @ see java . util . Spliterator # forEachRemaining ( java . util . function . Consumer ) */ @ Override public void forEachRemaining ( Consumer < ? super U > action ) { } }
source . forEachRemaining ( e -> { action . accept ( current = function . apply ( current , e ) ) ; } ) ;
public class WriteClass { /** * Create the Class for this field . * @ param codeType */ public void writeClass ( String strClassName , CodeType codeType ) { } }
if ( ! this . readThisClass ( strClassName ) ) // Get the field this is based on return ; this . writeHeading ( strClassName , this . getPackage ( codeType ) , codeType ) ; // Write the first few lines of the files this . writeIncludes ( codeType ) ; if ( m_MethodNameList . size ( ) != 0 ) m_MethodNameList . removeAllElements ( ) ; this . writeClassInterface ( ) ; this . writeClassFields ( CodeType . THICK ) ; // Write the C + + fields for this class this . writeDefaultConstructor ( strClassName ) ; this . writeClassInit ( ) ; this . writeInit ( ) ; // Special case . . . zero all class fields ! this . writeProgramDesc ( strClassName ) ; this . writeClassMethods ( CodeType . THICK ) ; // Write the remaining methods for this class this . writeEndCode ( true ) ;
public class XPathBuilder { /** * < p > < b > Used for finding element process ( to generate xpath address ) < / b > < / p > * @ param id eg . id = " buttonSubmit " * @ param < T > the element which calls this method * @ return this element */ @ SuppressWarnings ( "unchecked" ) public < T extends XPathBuilder > T setId ( final String id ) { } }
this . id = id ; return ( T ) this ;
public class ModbusRTUTransport { /** * getRequest - Read a request , after the unit and function code * @ param function - Modbus function code * @ param out - Byte stream buffer to hold actual message */ private void getRequest ( int function , BytesOutputStream out ) throws IOException { } }
int byteCount ; byte inpBuf [ ] = new byte [ 256 ] ; try { if ( ( function & 0x80 ) == 0 ) { switch ( function ) { case Modbus . READ_EXCEPTION_STATUS : case Modbus . READ_COMM_EVENT_COUNTER : case Modbus . READ_COMM_EVENT_LOG : case Modbus . REPORT_SLAVE_ID : readRequestData ( 0 , out ) ; break ; case Modbus . READ_FIFO_QUEUE : readRequestData ( 2 , out ) ; break ; case Modbus . READ_MEI : readRequestData ( 3 , out ) ; break ; case Modbus . READ_COILS : case Modbus . READ_INPUT_DISCRETES : case Modbus . READ_MULTIPLE_REGISTERS : case Modbus . READ_INPUT_REGISTERS : case Modbus . WRITE_COIL : case Modbus . WRITE_SINGLE_REGISTER : readRequestData ( 4 , out ) ; break ; case Modbus . MASK_WRITE_REGISTER : readRequestData ( 6 , out ) ; break ; case Modbus . READ_FILE_RECORD : case Modbus . WRITE_FILE_RECORD : byteCount = readByte ( ) ; out . write ( byteCount ) ; readRequestData ( byteCount , out ) ; break ; case Modbus . WRITE_MULTIPLE_COILS : case Modbus . WRITE_MULTIPLE_REGISTERS : readBytes ( inpBuf , 4 ) ; out . write ( inpBuf , 0 , 4 ) ; byteCount = readByte ( ) ; out . write ( byteCount ) ; readRequestData ( byteCount , out ) ; break ; case Modbus . READ_WRITE_MULTIPLE : readRequestData ( 8 , out ) ; byteCount = readByte ( ) ; out . write ( byteCount ) ; readRequestData ( byteCount , out ) ; break ; default : throw new IOException ( String . format ( "getResponse unrecognised function code [%s]" , function ) ) ; } } } catch ( IOException e ) { throw new IOException ( "getResponse serial port exception" ) ; }
public class SourceDocInfo { /** * getter for uri - gets * @ generated * @ return value of the feature */ public String getUri ( ) { } }
if ( SourceDocInfo_Type . featOkTst && ( ( SourceDocInfo_Type ) jcasType ) . casFeat_uri == null ) jcasType . jcas . throwFeatMissing ( "uri" , "de.unihd.dbs.uima.types.heideltime.SourceDocInfo" ) ; return jcasType . ll_cas . ll_getStringValue ( addr , ( ( SourceDocInfo_Type ) jcasType ) . casFeatCode_uri ) ;
public class DirectoryHelper { /** * Compress files and directories . */ private static void compressDirectory ( String relativePath , File srcPath , ZipOutputStream zip ) throws IOException { } }
if ( srcPath . isDirectory ( ) ) { zip . putNextEntry ( new ZipEntry ( relativePath + "/" + srcPath . getName ( ) + "/" ) ) ; zip . closeEntry ( ) ; String files [ ] = srcPath . list ( ) ; for ( int i = 0 ; i < files . length ; i ++ ) { compressDirectory ( relativePath + "/" + srcPath . getName ( ) , new File ( srcPath , files [ i ] ) , zip ) ; } } else { InputStream in = new FileInputStream ( srcPath ) ; try { zip . putNextEntry ( new ZipEntry ( relativePath + "/" + srcPath . getName ( ) ) ) ; transfer ( in , zip ) ; zip . closeEntry ( ) ; } finally { if ( in != null ) { in . close ( ) ; } } }
public class TaskSlotTable { /** * Check whether the slot for the given index is allocated for the given job and allocation id . * @ param index of the task slot * @ param jobId for which the task slot should be allocated * @ param allocationId which should match the task slot ' s allocation id * @ return True if the given task slot is allocated for the given job and allocation id */ public boolean isAllocated ( int index , JobID jobId , AllocationID allocationId ) { } }
TaskSlot taskSlot = taskSlots . get ( index ) ; return taskSlot . isAllocated ( jobId , allocationId ) ;
public class RegularCyclicVertexSearch { /** * Perform a depth first search from the vertex < i > v < / i > . * @ param v vertex to search from * @ param prev the state before we vistaed our parent ( previous state ) * @ param curr the current state ( including our parent ) */ private void search ( int v , long prev , long curr ) { } }
state [ v ] = curr ; // store the state before we visited v curr = setBit ( curr , v ) ; // include v in our current state ( state [ v ] is unmodified ) visited |= curr ; // mark v as visited ( or being visited ) // neighbors of v for ( int w : g [ v ] ) { // w has been visited or is partially visited further up stack if ( visited ( w ) ) { // if w is in our prev state we have a cycle of size > 2. // we don ' t check out current state as this will always // include w - they are adjacent if ( isBitSet ( prev , w ) ) { numCycles ++ ; // xor the state when we last visited ' w ' with our current // state . this set is all the vertices we visited since then // and are all in a cycle add ( state [ w ] ^ curr ) ; } } else { // recursively call for the unvisited neighbor w search ( w , state [ v ] , curr ) ; } }
public class DataEncryption { /** * Encrypts the specified plainText using AES - 256 and returns a Base64 encoded * representation of the encrypted bytes . * @ param secretKey the secret key to use to encrypt with * @ param plainText the text to encrypt * @ return a Base64 encoded representation of the encrypted bytes * @ throws Exception a number of exceptions may be thrown * @ since 1.3.0 */ public static String encryptAsString ( final SecretKey secretKey , final String plainText ) throws Exception { } }
return Base64 . getEncoder ( ) . encodeToString ( encryptAsBytes ( secretKey , plainText ) ) ;
public class DatabasesInner { /** * Remove Database principals permissions . * @ param resourceGroupName The name of the resource group containing the Kusto cluster . * @ param clusterName The name of the Kusto cluster . * @ param databaseName The name of the database in the Kusto cluster . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < DatabasePrincipalListResultInner > removePrincipalsAsync ( String resourceGroupName , String clusterName , String databaseName , final ServiceCallback < DatabasePrincipalListResultInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( removePrincipalsWithServiceResponseAsync ( resourceGroupName , clusterName , databaseName ) , serviceCallback ) ;
public class DescribePointSurf { /** * Compute the sign of the Laplacian using a sparse convolution . * @ param x center * @ param y center * @ param scale scale of the feature * @ return true if positive */ public boolean computeLaplaceSign ( int x , int y , double scale ) { } }
int s = ( int ) Math . ceil ( scale ) ; kerXX = DerivativeIntegralImage . kernelDerivXX ( 9 * s , kerXX ) ; kerYY = DerivativeIntegralImage . kernelDerivYY ( 9 * s , kerYY ) ; double lap = GIntegralImageOps . convolveSparse ( ii , kerXX , x , y ) ; lap += GIntegralImageOps . convolveSparse ( ii , kerYY , x , y ) ; return lap > 0 ;
public class SwaggerValidatorActivity { /** * Populates " response " variable with a default JSON status object . Can be overwritten by * custom logic in a downstream activity , or in a JsonRestService implementation . */ protected Object handleResult ( Result result ) throws ActivityException { } }
ServiceValuesAccess serviceValues = getRuntimeContext ( ) . getServiceValues ( ) ; StatusResponse statusResponse ; if ( result . isError ( ) ) { logsevere ( "Validation error: " + result . getStatus ( ) . toString ( ) ) ; statusResponse = new StatusResponse ( result . getWorstCode ( ) , result . getStatus ( ) . getMessage ( ) ) ; String responseHeadersVarName = serviceValues . getResponseHeadersVariableName ( ) ; Map < String , String > responseHeaders = serviceValues . getResponseHeaders ( ) ; if ( responseHeaders == null ) { Variable responseHeadersVar = getMainProcessDefinition ( ) . getVariable ( responseHeadersVarName ) ; if ( responseHeadersVar == null ) throw new ActivityException ( "Missing response headers variable: " + responseHeadersVarName ) ; responseHeaders = new HashMap < > ( ) ; } responseHeaders . put ( Listener . METAINFO_HTTP_STATUS_CODE , String . valueOf ( statusResponse . getStatus ( ) . getCode ( ) ) ) ; setVariableValue ( responseHeadersVarName , responseHeaders ) ; } else { statusResponse = new StatusResponse ( com . centurylink . mdw . model . Status . OK , "Valid request" ) ; } String responseVariableName = serviceValues . getResponseVariableName ( ) ; Variable responseVariable = getMainProcessDefinition ( ) . getVariable ( responseVariableName ) ; if ( responseVariable == null ) throw new ActivityException ( "Missing response variable: " + responseVariableName ) ; Object responseObject ; if ( responseVariable . getType ( ) . equals ( Jsonable . class . getName ( ) ) ) responseObject = statusResponse ; // _ type has not been set , so serialization would fail else responseObject = serviceValues . fromJson ( responseVariableName , statusResponse . getJson ( ) ) ; setVariableValue ( responseVariableName , responseObject ) ; return ! result . isError ( ) ;
public class BAMInputFormat { /** * file repeatedly and checking addIndexedSplits for an index repeatedly . */ private int addProbabilisticSplits ( List < InputSplit > splits , int i , List < InputSplit > newSplits , Configuration cfg ) throws IOException { } }
final Path path = ( ( FileSplit ) splits . get ( i ) ) . getPath ( ) ; try ( final SeekableStream sin = WrapSeekable . openPath ( path . getFileSystem ( cfg ) , path ) ) { final BAMSplitGuesser guesser = new BAMSplitGuesser ( sin , cfg ) ; FileVirtualSplit previousSplit = null ; for ( ; i < splits . size ( ) ; ++ i ) { FileSplit fspl = ( FileSplit ) splits . get ( i ) ; if ( ! fspl . getPath ( ) . equals ( path ) ) break ; long beg = fspl . getStart ( ) ; long end = beg + fspl . getLength ( ) ; long alignedBeg = guesser . guessNextBAMRecordStart ( beg , end ) ; // As the guesser goes to the next BGZF block before looking for BAM // records , the ending BGZF blocks have to always be traversed fully . // Hence force the length to be 0xffff , the maximum possible . long alignedEnd = end << 16 | 0xffff ; if ( alignedBeg == end ) { // No records detected in this split : merge it to the previous one . // This could legitimately happen e . g . if we have a split that is // so small that it only contains the middle part of a BGZF block . // Of course , if it ' s the first split , then this is simply not a // valid BAM file . // FIXME : In theory , any number of splits could only contain parts // of the BAM header before we start to see splits that contain BAM // records . For now , we require that the split size is at least as // big as the header and don ' t handle that case . if ( previousSplit == null ) throw new IOException ( "'" + path + "': " + "no reads in first split: bad BAM file or tiny split size?" ) ; previousSplit . setEndVirtualOffset ( alignedEnd ) ; } else { previousSplit = new FileVirtualSplit ( path , alignedBeg , alignedEnd , fspl . getLocations ( ) ) ; if ( logger . isDebugEnabled ( ) ) { final long byteOffset = alignedBeg >>> 16 ; final long recordOffset = alignedBeg & 0xffff ; logger . debug ( "Split {}: byte offset: {} record offset: {}, virtual offset: {}" , i , byteOffset , recordOffset , alignedBeg ) ; } newSplits . add ( previousSplit ) ; } } } return i ;
public class OpenPgpContact { /** * Update the contacts keys by consulting the users PubSub nodes . * This method fetches the users metadata node and then tries to fetch any announced keys . * @ param connection our { @ link XMPPConnection } . * @ throws InterruptedException In case the thread gets interrupted . * @ throws SmackException . NotConnectedException in case the connection is not connected . * @ throws SmackException . NoResponseException in case the server doesn ' t respond . * @ throws XMPPException . XMPPErrorException in case of an XMPP protocol error . * @ throws PubSubException . NotALeafNodeException in case the metadata node is not a { @ link LeafNode } . * @ throws PubSubException . NotAPubSubNodeException in case the metadata node is not a PubSub node . * @ throws IOException IO is brittle . */ public void updateKeys ( XMPPConnection connection ) throws InterruptedException , SmackException . NotConnectedException , SmackException . NoResponseException , XMPPException . XMPPErrorException , PubSubException . NotALeafNodeException , PubSubException . NotAPubSubNodeException , IOException { } }
PublicKeysListElement metadata = OpenPgpPubSubUtil . fetchPubkeysList ( connection , getJid ( ) ) ; if ( metadata == null ) { return ; } updateKeys ( connection , metadata ) ;
public class KafkaRequestHandler { /** * This implementation of onAdd watches for responses , and immediately sends them out to the * appropriate exchange / queue for routing to the necessary client application ( s ) */ @ Override public void onAdd ( Response response ) { } }
ensureInitialized ( ) ; KeyValuePair keyValuePair = new KeyValuePair ( KafkaConstants . RESPONSE_TYPE , gson . toJson ( response ) ) ; String message = gson . toJson ( keyValuePair ) ; try { kafkaSender . send ( buildTopicNames ( response ) , message ) ; } catch ( InterruptedException | ExecutionException e ) { logger . error ( "Failed to send add response message to output topic." , e ) ; }
public class GenericServlet { /** * Returns the name of this servlet instance . * See { @ link ServletConfig # getServletName } . * @ return the name of this servlet instance */ public String getServletName ( ) { } }
ServletConfig sc = getServletConfig ( ) ; if ( sc == null ) { throw new IllegalStateException ( lStrings . getString ( "err.servlet_config_not_initialized" ) ) ; } return sc . getServletName ( ) ;
public class Conversions { /** * Convert from RDF typed literal to native Java object . * @ param lit RDF typed literal * @ return Java object converted from the lexical value based on the mappings specified by the literal datatype URI . */ public static Object toData ( TypedLiteral lit ) { } }
if ( lit == null ) throw new IllegalArgumentException ( "Can't convert null literal" ) ; Conversion < ? > c = uriConversions . get ( lit . getDataType ( ) ) ; if ( c == null ) throw new IllegalArgumentException ( "Don't know how to convert literal of type " + lit . getDataType ( ) ) ; return c . data ( lit . getLexical ( ) ) ;
public class EnumUtil { /** * 获得枚举名对应指定字段值的Map < br > * 键为枚举名 , 值为字段值 * @ param clazz 枚举类 * @ param fieldName 字段名 , 最终调用getXXX方法 * @ return 枚举名对应指定字段值的Map */ public static Map < String , Object > getNameFieldMap ( Class < ? extends Enum < ? > > clazz , String fieldName ) { } }
final Enum < ? > [ ] enums = clazz . getEnumConstants ( ) ; if ( null == enums ) { return null ; } final Map < String , Object > map = MapUtil . newHashMap ( enums . length ) ; for ( Enum < ? > e : enums ) { map . put ( e . name ( ) , ReflectUtil . getFieldValue ( e , fieldName ) ) ; } return map ;
public class TransitionBuilder { /** * Finds the associate END _ IF for an IF , ELSE _ IF or ELSE pointer */ private Tree < Row > findEndIf ( Tree < Row > pointer ) { } }
while ( ! Type . END_IF . equals ( pointer . getContent ( ) . getType ( ) ) ) { pointer = pointer . getNext ( ) ; } return pointer ;
public class LPC { /** * Calculate an LPC using the given Auto - correlation data . Static method * used since this is slightly faster than a more strictly object - oriented * approach . * @ param lpc LPC to calculate * @ param R Autocorrelation data to use */ public static void calculate ( LPC lpc , long [ ] R ) { } }
int coeffCount = lpc . order ; // calculate first iteration directly double [ ] A = lpc . rawCoefficients ; for ( int i = 0 ; i < coeffCount + 1 ; i ++ ) A [ i ] = 0.0 ; A [ 0 ] = 1 ; double E = R [ 0 ] ; // calculate remaining iterations if ( R [ 0 ] == 0 ) { for ( int i = 0 ; i < coeffCount + 1 ; i ++ ) A [ i ] = 0.0 ; } else { double [ ] ATemp = lpc . tempCoefficients ; for ( int i = 0 ; i < coeffCount + 1 ; i ++ ) ATemp [ i ] = 0.0 ; for ( int k = 0 ; k < coeffCount ; k ++ ) { double lambda = 0.0 ; double temp = 0 ; for ( int j = 0 ; j <= k ; j ++ ) { temp += A [ j ] * R [ k + 1 - j ] ; } lambda = - temp / E ; for ( int i = 0 ; i <= k + 1 ; i ++ ) { ATemp [ i ] = A [ i ] + lambda * A [ k + 1 - i ] ; } System . arraycopy ( ATemp , 0 , A , 0 , coeffCount + 1 ) ; E = ( 1 - lambda * lambda ) * E ; } } lpc . rawError = E ;
public class GetJobRunRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetJobRunRequest getJobRunRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getJobRunRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getJobRunRequest . getJobName ( ) , JOBNAME_BINDING ) ; protocolMarshaller . marshall ( getJobRunRequest . getRunId ( ) , RUNID_BINDING ) ; protocolMarshaller . marshall ( getJobRunRequest . getPredecessorsIncluded ( ) , PREDECESSORSINCLUDED_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class WriteRender { /** * ( non - Javadoc ) * @ see com . alibaba . simpleimage . ImageRender # dispose ( ) */ @ Override public void dispose ( ) throws SimpleImageException { } }
super . dispose ( ) ; if ( needClean ) { if ( stream != null ) { try { stream . flush ( ) ; stream . close ( ) ; } catch ( IOException e ) { throw new SimpleImageException ( e ) ; } } } this . stream = null ; this . image = null ; this . param = null ;
public class SwitchPreference { /** * Adapts the preference ' s switch , depending on the preference ' s properties and on whether it is * currently checked or not . */ private void adaptSwitch ( ) { } }
if ( switchWidget != null ) { switchWidget . setTextOn ( getSwitchTextOn ( ) ) ; switchWidget . setTextOff ( getSwitchTextOff ( ) ) ; switchWidget . setShowText ( ! TextUtils . isEmpty ( getSwitchTextOn ( ) ) || ! TextUtils . isEmpty ( getSwitchTextOff ( ) ) ) ; switchWidget . setChecked ( isChecked ( ) ) ; }
public class IdToken { /** * Returns whether the audience in the payload contains only client IDs that are trusted as * specified in step 2 of < a * href = " http : / / openid . net / specs / openid - connect - basic - 1_0-27 . html # id . token . validation " > ID Token * Validation < / a > . * @ param trustedClientIds list of trusted client IDs */ public final boolean verifyAudience ( Collection < String > trustedClientIds ) { } }
Collection < String > audience = getPayload ( ) . getAudienceAsList ( ) ; if ( audience . isEmpty ( ) ) { return false ; } return trustedClientIds . containsAll ( audience ) ;
public class PrettyPrinter { /** * Removes any sensitive information ( such as an AdWords API developer token ) from the provided * XML . * @ return a Source if sanitizing was successful , or null if unable to parse and sanitize the * provided XML */ private Source sanitizeXml ( @ Nullable String xml ) { } }
if ( xml != null && ! sensitiveXPathStrings . isEmpty ( ) ) { try { DocumentBuilder documentBuilder = documentBuilderSupplier . get ( ) ; XPath xpath = xpathSupplier . get ( ) ; if ( documentBuilder != null && xpath != null ) { Document doc = documentBuilder . parse ( new InputSource ( new StringReader ( xml ) ) ) ; for ( String xpathString : sensitiveXPathStrings ) { XPathExpression expr = xpath . compile ( xpathString ) ; Node node = ( Node ) expr . evaluate ( doc , XPathConstants . NODE ) ; if ( node != null && node . getTextContent ( ) != null ) { node . setTextContent ( "REDACTED" ) ; } } return new DOMSource ( doc ) ; } } catch ( SAXException e ) { libLogger . warn ( "Unable to parse XML: {}" , e ) ; } catch ( IOException e ) { libLogger . warn ( "Unable to parse XML: {}" , e ) ; } catch ( XPathExpressionException e ) { libLogger . warn ( "Unable to parse XML: {}" , e ) ; } } return null ;
public class FlickerRenderer { /** * Stoppt das Rendern . */ public final void stop ( ) { } }
if ( this . thread != null ) { try { if ( this . thread != null ) { this . thread . interrupt ( ) ; synchronized ( this . thread ) { this . thread . notifyAll ( ) ; } } } finally { this . thread = null ; } }
public class MapViewProjection { /** * Computes vertical extend of the map view . * @ return the latitude span of the map in degrees */ public double getLatitudeSpan ( ) { } }
if ( this . mapView . getWidth ( ) > 0 && this . mapView . getHeight ( ) > 0 ) { LatLong top = fromPixels ( 0 , 0 ) ; LatLong bottom = fromPixels ( 0 , this . mapView . getHeight ( ) ) ; return Math . abs ( top . latitude - bottom . latitude ) ; } throw new IllegalStateException ( INVALID_MAP_VIEW_DIMENSIONS ) ;
public class BuildWrappers { /** * List up all { @ link BuildWrapperDescriptor } s that are applicable for the given project . * @ return * The signature doesn ' t use { @ link BuildWrapperDescriptor } to maintain compatibility * with { @ link BuildWrapper } implementations before 1.150. */ public static List < Descriptor < BuildWrapper > > getFor ( AbstractProject < ? , ? > project ) { } }
List < Descriptor < BuildWrapper > > result = new ArrayList < > ( ) ; Descriptor pd = Jenkins . getInstance ( ) . getDescriptor ( ( Class ) project . getClass ( ) ) ; for ( Descriptor < BuildWrapper > w : BuildWrapper . all ( ) ) { if ( pd instanceof AbstractProjectDescriptor && ! ( ( AbstractProjectDescriptor ) pd ) . isApplicable ( w ) ) continue ; if ( w instanceof BuildWrapperDescriptor ) { BuildWrapperDescriptor bwd = ( BuildWrapperDescriptor ) w ; if ( bwd . isApplicable ( project ) ) result . add ( bwd ) ; } else { // old BuildWrapper that doesn ' t implement BuildWrapperDescriptor result . add ( w ) ; } } return result ;
public class DefaultGroovyMethods { /** * Modifies this collection by removing a single instance of the specified * element from this collection , if it is present . Essentially an alias for * { @ link Collection # remove ( Object ) } but with no ambiguity for Collection & lt ; Integer & gt ; . * Example : * < pre class = " groovyTestCase " > * def list = [ 1 , 2 , 3 , 2] * list . removeElement ( 2) * assert [ 1 , 3 , 2 ] = = list * < / pre > * @ param self a Collection * @ param o element to be removed from this collection , if present * @ return true if an element was removed as a result of this call * @ since 2.4.0 */ public static < E > boolean removeElement ( Collection < E > self , Object o ) { } }
return self . remove ( o ) ;
public class HttpClusterConfig { /** * Builds an HTTP - based cluster configuration from some replicas descriptors . * @ param local the local replica : This is the configuration that will be used by local agent to define itself and * start server endpoint . * @ param remotes known replicas in the cluster . * @ return a valid configuration . */ public static @ Nonnull ClusterConfig from ( @ Nonnull HttpReplica local , HttpReplica ... remotes ) { } }
return new HttpClusterConfig ( local , remotes ) ;
public class Evaluation { /** * Calculate the average F _ beta score across all classes , using macro or micro averaging * @ param beta Beta value to use * @ param averaging Averaging method to use */ public double fBeta ( double beta , EvaluationAveraging averaging ) { } }
if ( getNumRowCounter ( ) == 0.0 ) { return Double . NaN ; // No data } int nClasses = confusion ( ) . getClasses ( ) . size ( ) ; if ( nClasses == 2 ) { return EvaluationUtils . fBeta ( beta , ( long ) truePositives . getCount ( 1 ) , ( long ) falsePositives . getCount ( 1 ) , ( long ) falseNegatives . getCount ( 1 ) ) ; } if ( averaging == EvaluationAveraging . Macro ) { double macroFBeta = 0.0 ; int count = 0 ; for ( int i = 0 ; i < nClasses ; i ++ ) { double thisFBeta = fBeta ( beta , i , - 1 ) ; if ( thisFBeta != - 1 ) { macroFBeta += thisFBeta ; count ++ ; } } macroFBeta /= count ; return macroFBeta ; } else if ( averaging == EvaluationAveraging . Micro ) { long tpCount = 0 ; long fpCount = 0 ; long fnCount = 0 ; for ( int i = 0 ; i < nClasses ; i ++ ) { tpCount += truePositives . getCount ( i ) ; fpCount += falsePositives . getCount ( i ) ; fnCount += falseNegatives . getCount ( i ) ; } return EvaluationUtils . fBeta ( beta , tpCount , fpCount , fnCount ) ; } else { throw new UnsupportedOperationException ( "Unknown averaging approach: " + averaging ) ; }
public class ModelsImpl { /** * Suggests examples that would improve the accuracy of the intent model . * @ param appId The application ID . * @ param versionId The version ID . * @ param intentId The intent classifier ID . * @ param getIntentSuggestionsOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the List & lt ; IntentsSuggestionExample & gt ; object */ public Observable < ServiceResponse < List < IntentsSuggestionExample > > > getIntentSuggestionsWithServiceResponseAsync ( UUID appId , String versionId , UUID intentId , GetIntentSuggestionsOptionalParameter getIntentSuggestionsOptionalParameter ) { } }
if ( this . client . endpoint ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.endpoint() is required and cannot be null." ) ; } if ( appId == null ) { throw new IllegalArgumentException ( "Parameter appId is required and cannot be null." ) ; } if ( versionId == null ) { throw new IllegalArgumentException ( "Parameter versionId is required and cannot be null." ) ; } if ( intentId == null ) { throw new IllegalArgumentException ( "Parameter intentId is required and cannot be null." ) ; } final Integer take = getIntentSuggestionsOptionalParameter != null ? getIntentSuggestionsOptionalParameter . take ( ) : null ; return getIntentSuggestionsWithServiceResponseAsync ( appId , versionId , intentId , take ) ;
public class FSDirectoryManager { /** * { @ inheritDoc } */ public boolean rename ( final String from , final String to ) { } }
final File src = new File ( baseDir , from ) ; final File dest = new File ( baseDir , to ) ; try { SecurityHelper . doPrivilegedIOExceptionAction ( new PrivilegedExceptionAction < Void > ( ) { public Void run ( ) throws IOException { DirectoryHelper . renameFile ( src , dest ) ; return null ; } } ) ; } catch ( IOException e ) { return false ; } return true ;
public class RandomVariableLazyEvaluation { /** * / * ( non - Javadoc ) * @ see net . finmath . stochastic . RandomVariable # getRealizationsStream ( ) */ @ Override public DoubleStream getRealizationsStream ( ) { } }
if ( isDeterministic ( ) ) { return DoubleStream . generate ( new DoubleSupplier ( ) { @ Override public double getAsDouble ( ) { return valueIfNonStochastic ; } } ) ; } else { return IntStream . range ( 0 , size ( ) ) . mapToDouble ( realizations ) . parallel ( ) ; }
public class ServletRedirectTask { /** * { @ inheritDoc } */ public void init ( EntityConfig config ) { } }
// Instance Members . this . response = ( Phrase ) config . getValue ( SOURCE ) ; this . resource = ( Phrase ) config . getValue ( RESOURCE ) ;
public class PrefixedProperties { /** * Loads a json file . Reading from the given InputStream . The InputStream * itself will not be closed after usage . * @ param is * the is * @ throws IOException * Signals that an I / O exception has occurred . */ public void loadFromYAML ( final InputStream is ) throws IOException { } }
lock . writeLock ( ) . lock ( ) ; try { final YAMLFactory f = new YAMLFactory ( ) ; final YAMLParser jp = f . createParser ( is ) ; configureJsonParser ( jp ) ; if ( jp . nextToken ( ) == JsonToken . START_OBJECT ) { traverseJSON ( jp , null ) ; } } finally { lock . writeLock ( ) . unlock ( ) ; }
public class MessageWriter { /** * Writes the message body from a string using the given charset * as encoding and setting the content type to text / plain . * @ param bodyAsString Body to write as string * @ param charset The charset to encode the string */ public void writeBodyFromString ( String bodyAsString , Charset charset ) { } }
message . contentEncoding ( charset . name ( ) ) . contentType ( Message . TEXT_PLAIN ) ; byte [ ] bodyContent = bodyAsString . getBytes ( charset ) ; message . body ( bodyContent ) ;
public class FSDataset { /** * Get File name for a given block . */ public File getBlockFile ( int namespaceId , Block b ) throws IOException { } }
File f = validateBlockFile ( namespaceId , b ) ; if ( f == null ) { if ( InterDatanodeProtocol . LOG . isDebugEnabled ( ) ) { InterDatanodeProtocol . LOG . debug ( "b=" + b + ", volumeMap=" + volumeMap ) ; } throw new IOException ( "Block " + b + ", namespace= " + namespaceId + " is not valid." ) ; } return f ;
public class NotifierBase { /** * Dispatch a standard wave which could be handled by a custom method of the component . * This method is called from the JIT ( JRebirth Internal Thread ) < br > * @ param wave the wave that contains all information * @ throws WaveException if wave dispatching fails */ private void processUndefinedWave ( final Wave wave ) throws WaveException { } }
LOGGER . info ( NOTIFIER_CONSUMES , wave . toString ( ) ) ; wave . status ( Status . Consumed ) ; // Retrieve all interested object from the map if ( this . notifierMap . containsKey ( wave . waveType ( ) ) ) { final WaveSubscription ws = this . notifierMap . get ( wave . waveType ( ) ) ; // Filter and store all Wave handlers into the wave // They will be removed from the list when they are handled in order to know if there is any handler left before wave . setWaveHandlers ( ws . getWaveHandlers ( ) . stream ( ) . filter ( wh -> wh . check ( wave ) ) . collect ( Collectors . toList ( ) ) ) ; // For each object interested in that wave type , process the action // Make a defensive copy to protect from concurrent modification exception when the wavehandler will be removed from list for ( final WaveHandler waveHandler : new ArrayList < > ( wave . getWaveHandlers ( ) ) ) { // The handler will be performed in the right thread according to RunType annotation waveHandler . handle ( wave ) ; } } else { LOGGER . warn ( NO_WAVE_LISTENER , wave . waveType ( ) . toString ( ) ) ; if ( CoreParameters . DEVELOPER_MODE . get ( ) ) { this . unprocessedWaveHandler . manageUnprocessedWave ( NO_WAVE_LISTENER . getText ( wave . waveType ( ) . toString ( ) ) , wave ) ; } } // The current wave will be marked as Handled when all Wave Handlers will be terminated // if ( ! wave . isRelated ( ) ) { // LOGGER . info ( NOTIFIER _ HANDLES , wave . toString ( ) ) ; // wave . status ( Status . Handled ) ;
public class FileBytes { /** * Maps a portion of the randomAccessFile into memory and returns a { @ link UnsafeMappedBytes } instance . * @ param offset The offset from which to map the randomAccessFile into memory . * @ param size The count of the bytes to map into memory . * @ param mode The mode in which to map the randomAccessFile into memory . * @ return The mapped bytes . * @ throws IllegalArgumentException If { @ code count } is greater than the maximum allowed * { @ link java . nio . MappedByteBuffer } count : { @ link Integer # MAX _ VALUE } */ public UnsafeMappedBytes map ( long offset , long size , FileChannel . MapMode mode ) { } }
return new UnsafeMappedBytes ( file , new MappedMemoryAllocator ( randomAccessFile , mode , offset ) . allocate ( size ) ) ;
public class DeleteVpcPeeringAuthorizationRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteVpcPeeringAuthorizationRequest deleteVpcPeeringAuthorizationRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteVpcPeeringAuthorizationRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteVpcPeeringAuthorizationRequest . getGameLiftAwsAccountId ( ) , GAMELIFTAWSACCOUNTID_BINDING ) ; protocolMarshaller . marshall ( deleteVpcPeeringAuthorizationRequest . getPeerVpcId ( ) , PEERVPCID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CPTaxCategoryServiceBaseImpl { /** * Sets the cp definition specification option value local service . * @ param cpDefinitionSpecificationOptionValueLocalService the cp definition specification option value local service */ public void setCPDefinitionSpecificationOptionValueLocalService ( com . liferay . commerce . product . service . CPDefinitionSpecificationOptionValueLocalService cpDefinitionSpecificationOptionValueLocalService ) { } }
this . cpDefinitionSpecificationOptionValueLocalService = cpDefinitionSpecificationOptionValueLocalService ;
public class PropertiesEndpointGroup { /** * Creates a new { @ link EndpointGroup } instance that loads the host names ( or IP address ) and the port * numbers of the { @ link Endpoint } from the { @ link Properties } . The { @ link Properties } must contain at * least one property whose name starts with { @ code endpointKeyPrefix } : * < pre > { @ code * example . hosts . 0 = example1 . com : 36462 * example . hosts . 1 = example2 . com : 36462 * example . hosts . 2 = example3 . com : 36462 * } < / pre > * @ param properties the { @ link Properties } where the list of { @ link Endpoint } s is loaded from * @ param endpointKeyPrefix the property name prefix * @ throws IllegalArgumentException if failed to load any hosts from the specified { @ link Properties } */ public static PropertiesEndpointGroup of ( Properties properties , String endpointKeyPrefix ) { } }
return new PropertiesEndpointGroup ( loadEndpoints ( requireNonNull ( properties , "properties" ) , requireNonNull ( endpointKeyPrefix , "endpointKeyPrefix" ) , 0 ) ) ;
public class DescribedUserMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribedUser describedUser , ProtocolMarshaller protocolMarshaller ) { } }
if ( describedUser == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describedUser . getArn ( ) , ARN_BINDING ) ; protocolMarshaller . marshall ( describedUser . getHomeDirectory ( ) , HOMEDIRECTORY_BINDING ) ; protocolMarshaller . marshall ( describedUser . getPolicy ( ) , POLICY_BINDING ) ; protocolMarshaller . marshall ( describedUser . getRole ( ) , ROLE_BINDING ) ; protocolMarshaller . marshall ( describedUser . getSshPublicKeys ( ) , SSHPUBLICKEYS_BINDING ) ; protocolMarshaller . marshall ( describedUser . getTags ( ) , TAGS_BINDING ) ; protocolMarshaller . marshall ( describedUser . getUserName ( ) , USERNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class NetworkWatchersInner { /** * Gets all network watchers by resource group . * @ param resourceGroupName The name of the resource group . * @ return the PagedList < NetworkWatcherInner > object if successful . */ public PagedList < NetworkWatcherInner > listByResourceGroup ( String resourceGroupName ) { } }
PageImpl1 < NetworkWatcherInner > page = new PageImpl1 < > ( ) ; page . setItems ( listByResourceGroupWithServiceResponseAsync ( resourceGroupName ) . toBlocking ( ) . single ( ) . body ( ) ) ; page . setNextPageLink ( null ) ; return new PagedList < NetworkWatcherInner > ( page ) { @ Override public Page < NetworkWatcherInner > nextPage ( String nextPageLink ) { return null ; } } ;
public class Story { /** * Evaluates a function defined in ink . * @ param functionName * The name of the function as declared in ink . * @ param arguments * The arguments that the ink function takes , if any . Note that we * don ' t ( can ' t ) do any validation on the number of arguments right * now , so make sure you get it right ! * @ return The return value as returned from the ink function with ` ~ return * myValue ` , or null if nothing is returned . * @ throws Exception */ public Object evaluateFunction ( String functionName , Object [ ] arguments ) throws Exception { } }
return evaluateFunction ( functionName , null , arguments ) ;
public class CommerceWarehousePersistenceImpl { /** * Returns an ordered range of all the commerce warehouses where groupId = & # 63 ; and active = & # 63 ; and primary = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceWarehouseModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param groupId the group ID * @ param active the active * @ param primary the primary * @ param start the lower bound of the range of commerce warehouses * @ param end the upper bound of the range of commerce warehouses ( not inclusive ) * @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > ) * @ return the ordered range of matching commerce warehouses */ @ Override public List < CommerceWarehouse > findByG_A_P ( long groupId , boolean active , boolean primary , int start , int end , OrderByComparator < CommerceWarehouse > orderByComparator ) { } }
return findByG_A_P ( groupId , active , primary , start , end , orderByComparator , true ) ;
public class CnvBnRsToDate { /** * < p > Convert parameter with using name . < / p > * @ param pAddParam additional params , e . g . entity class UserRoleTomcat * to reveal derived columns for its composite ID , or field Enum class * to reveal Enum value by index . * @ param pFrom from a bean * @ param pName by a name * @ return pTo to a bean * @ throws Exception - an exception */ @ Override public final Date convert ( final Map < String , Object > pAddParam , final IRecordSet < RS > pFrom , final String pName ) throws Exception { } }
Long longVal = pFrom . getLong ( pName ) ; if ( longVal != null ) { return new Date ( longVal ) ; } return null ;
public class Broadcast { /** * Details on the HLS and RTMP broadcast streams . For an HLS stream , the URL is provided . * See the < a href = " https : / / tokbox . com / developer / guides / broadcast / live - streaming / " > OpenTok * live streaming broadcast developer guide < / a > for more information on how to use this URL . * For each RTMP stream , the RTMP server URL and stream name are provided , along with the RTMP * stream ' s status . */ @ JsonProperty ( "broadcastUrls" ) private void unpack ( Map < String , Object > broadcastUrls ) { } }
if ( broadcastUrls == null ) return ; hls = ( String ) broadcastUrls . get ( "hls" ) ; ArrayList < Map < String , String > > rtmpResponse = ( ArrayList < Map < String , String > > ) broadcastUrls . get ( "rtmp" ) ; if ( rtmpResponse == null || rtmpResponse . size ( ) == 0 ) return ; for ( Map < String , String > element : rtmpResponse ) { Rtmp rtmp = new Rtmp ( ) ; rtmp . setId ( element . get ( "id" ) ) ; rtmp . setServerUrl ( element . get ( "serverUrl" ) ) ; rtmp . setStreamName ( element . get ( "streamName" ) ) ; this . rtmpList . add ( rtmp ) ; }
public class Reflect { /** * The full blown resolver method . All other method invocation methods * delegate to this . The method may be static or dynamic unless * staticOnly is set ( in which case object may be null ) . * If staticOnly is set then only static methods will be located . * This method performs caching ( caches discovered methods through the * class manager and utilizes cached methods . ) * This method determines whether to attempt to use non - public methods * based on Capabilities . haveAccessibility ( ) and will set the accessibilty * flag on the method as necessary . * If , when directed to find a static method , this method locates a more * specific matching instance method it will throw a descriptive exception * analogous to the error that the Java compiler would produce . * Note : as of 2.0 . x this is a problem because there is no way to work * around this with a cast . * @ param staticOnly * The method located must be static , the object param may be null . * @ return the method or null if no matching method was found . */ protected static Invocable resolveJavaMethod ( Class < ? > clas , String name , Class < ? > [ ] types , boolean staticOnly ) throws UtilEvalError { } }
if ( clas == null ) throw new InterpreterError ( "null class" ) ; Invocable method = BshClassManager . memberCache . get ( clas ) . findMethod ( name , types ) ; checkFoundStaticMethod ( method , staticOnly , clas ) ; return method ;
public class Messager { /** * Print warning message , increment warning count . * Part of DocErrorReporter . * @ param pos the position where the error occurs * @ param msg message to print */ public void printWarning ( SourcePosition pos , String msg ) { } }
if ( diagListener != null ) { report ( DiagnosticType . WARNING , pos , msg ) ; return ; } if ( nwarnings < MaxWarnings ) { String prefix = ( pos == null ) ? programName : pos . toString ( ) ; PrintWriter warnWriter = getWriter ( WriterKind . WARNING ) ; warnWriter . println ( prefix + ": " + getText ( "javadoc.warning" ) + " - " + msg ) ; warnWriter . flush ( ) ; nwarnings ++ ; }
public class BasePTA { /** * Retrieves the state reached by the given word ( represented as an { @ code int } array ) . If there is no path for the * given word in the PTA , { @ code null } is returned . * @ param word * the word * @ return the state reached by this word , or { @ code null } if there is no path for the given word in the PTA */ @ Nullable public S getState ( int [ ] word ) { } }
S curr = root ; int len = word . length ; for ( int i = 0 ; i < len && curr != null ; i ++ ) { curr = curr . getSuccessor ( word [ i ] ) ; } return curr ;
public class DeleteTranscriptionJobRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteTranscriptionJobRequest deleteTranscriptionJobRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteTranscriptionJobRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteTranscriptionJobRequest . getTranscriptionJobName ( ) , TRANSCRIPTIONJOBNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class UpdatePartitionRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdatePartitionRequest updatePartitionRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updatePartitionRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updatePartitionRequest . getCatalogId ( ) , CATALOGID_BINDING ) ; protocolMarshaller . marshall ( updatePartitionRequest . getDatabaseName ( ) , DATABASENAME_BINDING ) ; protocolMarshaller . marshall ( updatePartitionRequest . getTableName ( ) , TABLENAME_BINDING ) ; protocolMarshaller . marshall ( updatePartitionRequest . getPartitionValueList ( ) , PARTITIONVALUELIST_BINDING ) ; protocolMarshaller . marshall ( updatePartitionRequest . getPartitionInput ( ) , PARTITIONINPUT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class HiveOrcSerDeManager { /** * Extensible if there ' s other source - of - truth for fetching schema instead of interacting with HDFS . * For purpose of initializing { @ link org . apache . hadoop . hive . ql . io . orc . OrcSerde } object , it will require : * org . apache . hadoop . hive . serde . serdeConstants # LIST _ COLUMNS and * org . apache . hadoop . hive . serde . serdeConstants # LIST _ COLUMN _ TYPES * Keeping { @ link # SCHEMA _ LITERAL } will be a nice - to - have thing but not actually necessary in terms of functionality . */ protected void addSchemaPropertiesHelper ( Path path , HiveRegistrationUnit hiveUnit ) throws IOException { } }
TypeInfo schema = getSchemaFromLatestFile ( path , this . fs ) ; if ( schema instanceof StructTypeInfo ) { StructTypeInfo structTypeInfo = ( StructTypeInfo ) schema ; hiveUnit . setSerDeProp ( SCHEMA_LITERAL , schema ) ; hiveUnit . setSerDeProp ( serdeConstants . LIST_COLUMNS , Joiner . on ( "," ) . join ( structTypeInfo . getAllStructFieldNames ( ) ) ) ; hiveUnit . setSerDeProp ( serdeConstants . LIST_COLUMN_TYPES , Joiner . on ( "," ) . join ( structTypeInfo . getAllStructFieldTypeInfos ( ) . stream ( ) . map ( x -> x . getTypeName ( ) ) . collect ( Collectors . toList ( ) ) ) ) ; } else { // Hive always uses a struct with a field for each of the top - level columns as the root object type . // So for here we assume to - be - registered ORC files follow this pattern . throw new IllegalStateException ( "A valid ORC schema should be an instance of struct" ) ; }
public class SvgGraphicsContext { /** * Show the specified group . If the group does not exist , nothing will happen . * @ param group * The group object . */ public void unhide ( Object group ) { } }
if ( isAttached ( ) ) { Element element = helper . getGroup ( group ) ; if ( element != null ) { Dom . setElementAttribute ( element , "display" , "inline" ) ; } }
public class PickleUtils { /** * read a number of signed bytes */ public static byte [ ] readbytes ( InputStream input , int n ) throws IOException { } }
byte [ ] buffer = new byte [ n ] ; readbytes_into ( input , buffer , 0 , n ) ; return buffer ;
public class ListTemplatesResult { /** * An array the contains the name and creation time stamp for each template in your Amazon SES account . * @ return An array the contains the name and creation time stamp for each template in your Amazon SES account . */ public java . util . List < TemplateMetadata > getTemplatesMetadata ( ) { } }
if ( templatesMetadata == null ) { templatesMetadata = new com . amazonaws . internal . SdkInternalList < TemplateMetadata > ( ) ; } return templatesMetadata ;
public class Tuple { /** * Shifts the first element of the tuple resulting in a tuple of degree - 1. * @ return A new tuple without the shifted element ; */ public Tuple shiftLeft ( ) { } }
if ( degree ( ) < 2 ) { return Tuple0 . INSTANCE ; } Object [ ] copy = new Object [ degree ( ) - 1 ] ; System . arraycopy ( array ( ) , 1 , copy , 0 , copy . length ) ; return NTuple . of ( copy ) ;
public class RegisterEcsClusterRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( RegisterEcsClusterRequest registerEcsClusterRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( registerEcsClusterRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( registerEcsClusterRequest . getEcsClusterArn ( ) , ECSCLUSTERARN_BINDING ) ; protocolMarshaller . marshall ( registerEcsClusterRequest . getStackId ( ) , STACKID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class BlockThreadManager { /** * Start any available FrameThread objects encoding , so long as there are * waiting BlockEncodeRequest objects . */ synchronized private void startFrameThreads ( ) { } }
if ( ! process ) return ; int requests = unassignedEncodeRequests . size ( ) ; int frames = inactiveFrameThreads . size ( ) ; frames = ( requests <= frames ) ? requests : frames ; for ( int i = 0 ; i < frames ; i ++ ) { FrameThread ft = inactiveFrameThreads . remove ( 0 ) ; Thread thread = new Thread ( ft ) ; frameThreadMap . put ( ft , thread ) ; thread . start ( ) ; }
public class ImageRenderer { /** * Determine the path value of an image value . * @ param context * the { @ link FacesContext } for the current request . * @ param component * the component to obtain the image information from * @ return the encoded path to the image source */ public static String getImageSource ( FacesContext context , UIComponent component ) { } }
Image image = ( Image ) component ; ResourceHandler handler = context . getApplication ( ) . getResourceHandler ( ) ; String resourceName = image . getName ( ) ; String value = image . getValue ( ) ; if ( value != null && value . length ( ) > 0 ) { if ( resourceName != null && image . getLibrary ( ) != null ) { if ( FacesContext . getCurrentInstance ( ) . isProjectStage ( ProjectStage . Development ) ) { LOGGER . warning ( "Please use either the 'value' attribute of b:image, or the 'name' and 'library' attribute pair. If all three attributes are provided, BootsFaces uses the 'value' attributes, ignoring both 'name' and 'library'." ) ; } } if ( handler . isResourceURL ( value ) ) { return value ; } else { value = context . getApplication ( ) . getViewHandler ( ) . getResourceURL ( context , value ) ; return ( context . getExternalContext ( ) . encodeResourceURL ( value ) ) ; } } String library = image . getLibrary ( ) ; Resource res = handler . createResource ( resourceName , library ) ; if ( res == null ) { if ( context . isProjectStage ( ProjectStage . Development ) ) { String msg = "Unable to find resource " + resourceName ; FacesMessages . error ( component . getClientId ( context ) , msg , msg ) ; } return "RES_NOT_FOUND" ; } else { return ( context . getExternalContext ( ) . encodeResourceURL ( res . getRequestPath ( ) ) ) ; }
public class Tuple4 { /** * Apply attribute 3 as argument to a function and return a new tuple with the substituted argument . */ public final < U3 > Tuple4 < T1 , T2 , U3 , T4 > map3 ( Function < ? super T3 , ? extends U3 > function ) { } }
return Tuple . tuple ( v1 , v2 , function . apply ( v3 ) , v4 ) ;
public class RevisionUtils { /** * Splits a revision ID into its generation number and opaque suffix string */ public static String parseRevIDSuffix ( String rev ) { } }
String result = null ; int dashPos = rev . indexOf ( '-' ) ; if ( dashPos >= 0 ) { result = rev . substring ( dashPos + 1 ) ; } return result ;
public class IoUtils { /** * Copy method for copying data from an { @ link InputStream } to a * { @ link RandomAccessFile } . * @ param is The input data . * @ param raf The file to write to . * @ param offset The offset within the file to start writing at . * @ param length The number of bytes to copy . * @ return The number of bytes copied . * @ throws IOException If any IO error occurs . */ public static long copy ( final InputStream is , final RandomAccessFile raf , final long offset , long length , final LongRangeListener lrl , final WriteListener wl ) throws IOException { } }
if ( length < 0 ) { throw new IllegalArgumentException ( "Invalid byte count: " + length ) ; } final byte buffer [ ] = new byte [ DEFAULT_BUFFER_SIZE ] ; int bytesRead = 0 ; long written = 0 ; long filePosition = offset ; try { while ( length > 0 ) { if ( length < DEFAULT_BUFFER_SIZE ) { bytesRead = is . read ( buffer , 0 , ( int ) length ) ; } else { bytesRead = is . read ( buffer , 0 , DEFAULT_BUFFER_SIZE ) ; } if ( bytesRead == - 1 ) { break ; } length -= bytesRead ; synchronized ( raf ) { raf . seek ( filePosition ) ; raf . write ( buffer , 0 , bytesRead ) ; } if ( wl != null ) { wl . onBytesRead ( bytesRead ) ; } if ( lrl != null ) { lrl . onRangeComplete ( new LongRange ( filePosition , filePosition + bytesRead - 1 ) ) ; } filePosition += bytesRead ; written += bytesRead ; // LOG . debug ( " IoUtils now written : { } " , written ) ; } return written ; } catch ( final IOException e ) { LOG . debug ( "Got IOException during copy" , e ) ; throw e ; } catch ( final RuntimeException e ) { LOG . warn ( "Runtime error" , e ) ; throw e ; }
public class DisableDomainTransferLockRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DisableDomainTransferLockRequest disableDomainTransferLockRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( disableDomainTransferLockRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( disableDomainTransferLockRequest . getDomainName ( ) , DOMAINNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class UnaryOperatorBuilder { /** * Builds the functional interface implementation and if previously provided calls the consumer . */ @ Nonnull public final UnaryOperator < T > build ( ) { } }
final UnaryOperator < T > eventuallyFinal = this . eventually ; UnaryOperator < T > retval ; final Case < LPredicate < T > , UnaryOperator < T > > [ ] casesArray = cases . toArray ( new Case [ cases . size ( ) ] ) ; retval = Function4U . < T > unaryOp ( a -> { try { for ( Case < LPredicate < T > , UnaryOperator < T > > aCase : casesArray ) { if ( aCase . casePredicate ( ) . test ( a ) ) { return aCase . caseFunction ( ) . apply ( a ) ; } } return eventuallyFinal . apply ( a ) ; } catch ( Error e ) { // NOSONAR throw e ; } catch ( Throwable e ) { // NOSONAR throw Handler . handleOrPropagate ( e , handling ) ; } } ) ; if ( consumer != null ) { consumer . accept ( retval ) ; } return retval ;
public class FeatureCollection { /** * Sets the list of features contained within this feature collection . All * previously existing features are removed as a result of setting this * property . * @ param features */ public void setFeatures ( List < Feature > features ) { } }
this . mFeatures . clear ( ) ; if ( features != null ) { this . mFeatures . addAll ( features ) ; }
public class RemoteTopicSpaceControl { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . runtime . SIMPRemoteTopicSpaceControllable # clearTopics ( ) * Deletes all topics from the PubSubOutputHandler / Proxy and Matchspace . */ public void clearTopics ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "clearTopics" ) ; // we only clear the topics if the remote me // has subscribed to some topics on this me . if ( _outputHandler != null ) { if ( _outputHandler . getTopics ( ) != null ) { Iterator topics = Arrays . asList ( _outputHandler . getTopics ( ) ) . iterator ( ) ; LinkedList topicSpaces = new LinkedList ( ) ; LinkedList topicSpaceMappings = new LinkedList ( ) ; // Iterate through each of the topics and add the matching ts uuid and ts mapping for ( int i = 0 ; i < _outputHandler . getTopics ( ) . length ; i ++ ) { topicSpaces . add ( _outputHandler . getTopicSpaceUuid ( ) . toString ( ) ) ; topicSpaceMappings . add ( _outputHandler . getTopicSpaceMapping ( ) ) ; } // get the NeighbourProxyListener NeighbourProxyListener listener = _messageProcessor . getProxyHandler ( ) . getProxyListener ( ) ; try { ExternalAutoCommitTransaction tran = _messageProcessor . getTXManager ( ) . createAutoCommitTransaction ( ) ; listener . deleteProxySubscription ( topics , topicSpaces . iterator ( ) , topicSpaceMappings . iterator ( ) , _outputHandler . getTargetMEUuid ( ) , _outputHandler . getBusName ( ) , tran ) ; } catch ( SIResourceException e ) { FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.runtime.RemoteTopicSpaceControl.clearTopics" , "1:456:1.34" , this ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.runtime.RemoteTopicSpaceControl.clearTopics" , "1:461:1.34" , SIMPUtils . getStackTrace ( e ) } ) ; SibTr . exception ( tc , e ) ; } } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "No Topics to delete for topicspace with id " + _outputHandler . getDestinationName ( ) ) ; } } // end if PSOH = = null if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "clearTopics" ) ;
public class CQLSchemaManager { /** * Drop the keyspace for this DBService ' s tenant . The keyspace is dropped with the * following CQL command : * < pre > * DROP KEYSPACE " < i > keyspace < / i > " ; * < / pre > */ public void dropKeyspace ( ) { } }
String cqlKeyspace = m_dbservice . getKeyspace ( ) ; m_logger . info ( "Dropping keyspace: {}" , cqlKeyspace ) ; StringBuilder cql = new StringBuilder ( ) ; cql . append ( "DROP KEYSPACE " ) ; cql . append ( cqlKeyspace ) ; cql . append ( ";" ) ; executeCQL ( cql . toString ( ) ) ;
public class SipApplicationSessionActivationListenerAttribute { /** * / * ( non - Javadoc ) * @ see javax . servlet . sip . SipApplicationSessionActivationListener # sessionWillPassivate ( javax . servlet . sip . SipApplicationSessionEvent ) */ public void sessionWillPassivate ( SipApplicationSessionEvent event ) { } }
logger . info ( "Following sip application session just passivated " + event . getApplicationSession ( ) . getId ( ) + " cause " + ( ( SipApplicationSessionActivationEvent ) event ) . getCause ( ) ) ;
public class DefaultTransitionManager { /** * Stops all transitions . */ @ Override public void stopTransition ( ) { } }
// call listeners so they can perform their actions first , like modifying this adapter ' s transitions for ( int i = 0 , size = mListenerList . size ( ) ; i < size ; i ++ ) { mListenerList . get ( i ) . onTransitionEnd ( this ) ; } for ( int i = 0 , size = mTransitionList . size ( ) ; i < size ; i ++ ) { mTransitionList . get ( i ) . stopTransition ( ) ; }
public class IonReaderTextRawTokensX { /** * Skips over the closing } } too . */ protected void skip_over_lob ( int lobToken , SavePoint sp ) throws IOException { } }
switch ( lobToken ) { case IonTokenConstsX . TOKEN_STRING_DOUBLE_QUOTE : skip_double_quoted_string ( sp ) ; skip_clob_close_punctuation ( ) ; break ; case IonTokenConstsX . TOKEN_STRING_TRIPLE_QUOTE : skip_triple_quoted_clob_string ( sp ) ; skip_clob_close_punctuation ( ) ; break ; case IonTokenConstsX . TOKEN_OPEN_DOUBLE_BRACE : skip_over_blob ( sp ) ; break ; default : error ( "unexpected token " + IonTokenConstsX . getTokenName ( lobToken ) + " encountered for lob content" ) ; }
public class JSONSerializer { /** * Translate the ServiceWrapper into the wireline string . See { @ link ServiceWrapper } * @ param service the instance of the ServiceWrapper * @ return the wireline string */ public String marshalService ( ServiceWrapper service ) { } }
Gson gson = new Gson ( ) ; Service jsonService = composeServiceFromServiceWrapper ( service ) ; String jsonString = gson . toJson ( jsonService ) ; return jsonString ;
public class SpellingCheckRule { /** * Returns true iff the word at the given position should be ignored by the spell checker . * If possible , use { @ link # ignoreToken ( AnalyzedTokenReadings [ ] , int ) } instead . * @ since 2.6 */ protected boolean ignoreWord ( List < String > words , int idx ) throws IOException { } }
return ignoreWord ( words . get ( idx ) ) ;
public class FSImageSerialization { /** * Same comments apply for this method as for readString ( ) */ @ SuppressWarnings ( "deprecation" ) public static byte [ ] readBytes ( DataInputStream in ) throws IOException { } }
UTF8 ustr = TL_DATA . get ( ) . U_STR ; ustr . readFields ( in ) ; int len = ustr . getLength ( ) ; byte [ ] bytes = new byte [ len ] ; System . arraycopy ( ustr . getBytes ( ) , 0 , bytes , 0 , len ) ; return bytes ;
public class AnnivMasterHandler { /** * Called when a change is the record status is about to happen / has happened . * @ param field If this file change is due to a field , this is the field . * @ param iChangeType The type of change that occurred . * @ param bDisplayOption If true , display any changes . * @ return an error code . * ADD _ TYPE - Before a write . * UPDATE _ TYPE - Before an update . * DELETE _ TYPE - Before a delete . * AFTER _ UPDATE _ TYPE - After a write or update . * LOCK _ TYPE - Before a lock . * SELECT _ TYPE - After a select . * DESELECT _ TYPE - After a deselect . * MOVE _ NEXT _ TYPE - After a move . * AFTER _ REQUERY _ TYPE - Record opened . * SELECT _ EOF _ TYPE - EOF Hit . */ public int doRecordChange ( FieldInfo field , int iChangeType , boolean bDisplayOption ) { } }
AnnivMaster recAnnivMaster = ( AnnivMaster ) this . getOwner ( ) ; if ( iChangeType == DBConstants . AFTER_ADD_TYPE ) { Object bookmark = recAnnivMaster . getLastModified ( DBConstants . BOOKMARK_HANDLE ) ; try { recAnnivMaster . setHandle ( bookmark , DBConstants . BOOKMARK_HANDLE ) ; Calendar calStart = ( ( DateTimeField ) this . getCalendarControl ( ) . getField ( CalendarControl . START_ANNIV_DATE ) ) . getCalendar ( ) ; Calendar calEnd = ( ( DateTimeField ) this . getCalendarControl ( ) . getField ( CalendarControl . END_ANNIV_DATE ) ) . getCalendar ( ) ; recAnnivMaster . addAppointments ( this . getAnniversary ( ) , calStart , calEnd ) ; recAnnivMaster . addNew ( ) ; } catch ( DBException ex ) { ex . printStackTrace ( ) ; } } if ( iChangeType == DBConstants . AFTER_UPDATE_TYPE ) { Calendar calStart = ( ( DateTimeField ) this . getCalendarControl ( ) . getField ( CalendarControl . START_ANNIV_DATE ) ) . getCalendar ( ) ; Calendar calEnd = ( ( DateTimeField ) this . getCalendarControl ( ) . getField ( CalendarControl . END_ANNIV_DATE ) ) . getCalendar ( ) ; recAnnivMaster . removeAppointments ( this . getAnniversary ( ) ) ; recAnnivMaster . addAppointments ( this . getAnniversary ( ) , calStart , calEnd ) ; } if ( iChangeType == DBConstants . AFTER_DELETE_TYPE ) { recAnnivMaster . removeAppointments ( this . getAnniversary ( ) ) ; } return super . doRecordChange ( field , iChangeType , bDisplayOption ) ;
public class InboundNatRulesInner { /** * Gets the specified load balancer inbound nat rule . * @ param resourceGroupName The name of the resource group . * @ param loadBalancerName The name of the load balancer . * @ param inboundNatRuleName The name of the inbound nat rule . * @ param expand Expands referenced resources . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < InboundNatRuleInner > getAsync ( String resourceGroupName , String loadBalancerName , String inboundNatRuleName , String expand , final ServiceCallback < InboundNatRuleInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getWithServiceResponseAsync ( resourceGroupName , loadBalancerName , inboundNatRuleName , expand ) , serviceCallback ) ;
public class Payments { /** * 根据refundNumber查询退款记录 * @ param refundNumber * @ return */ public RefundQuery refundQueryByRefundNumber ( String refundNumber ) { } }
RefundQueryRequestWrapper refundQueryRequestWrapper = new RefundQueryRequestWrapper ( ) ; refundQueryRequestWrapper . setRefundNumber ( refundNumber ) ; return refundQuery ( refundQueryRequestWrapper ) ;
public class PDLucene { /** * Get a searcher on this index . * @ return < code > null < / code > if no reader or no searcher could be obtained * @ throws IOException * On IO error */ @ Nullable public IndexSearcher getSearcher ( ) throws IOException { } }
_checkClosing ( ) ; final IndexReader aReader = getReader ( ) ; if ( aReader == null ) { // Index not readable LOGGER . warn ( "Index not readable" ) ; return null ; } if ( m_aSearchReader == aReader ) { // Reader did not change - use cached searcher assert m_aSearcher != null ; } else { // Create new searcher only if necessary m_aSearchReader = aReader ; m_aSearcher = new IndexSearcher ( aReader ) ; } return m_aSearcher ;
public class BlobContainersInner { /** * Clears legal hold tags . Clearing the same or non - existent tag results in an idempotent operation . ClearLegalHold clears out only the specified tags in the request . * @ param resourceGroupName The name of the resource group within the user ' s subscription . The name is case insensitive . * @ param accountName The name of the storage account within the specified resource group . Storage account names must be between 3 and 24 characters in length and use numbers and lower - case letters only . * @ param containerName The name of the blob container within the specified storage account . Blob container names must be between 3 and 63 characters in length and use numbers , lower - case letters and dash ( - ) only . Every dash ( - ) character must be immediately preceded and followed by a letter or number . * @ param tags Each tag should be 3 to 23 alphanumeric characters and is normalized to lower case at SRP . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the LegalHoldInner object */ public Observable < LegalHoldInner > clearLegalHoldAsync ( String resourceGroupName , String accountName , String containerName , List < String > tags ) { } }
return clearLegalHoldWithServiceResponseAsync ( resourceGroupName , accountName , containerName , tags ) . map ( new Func1 < ServiceResponse < LegalHoldInner > , LegalHoldInner > ( ) { @ Override public LegalHoldInner call ( ServiceResponse < LegalHoldInner > response ) { return response . body ( ) ; } } ) ;
public class AbstractPrintQuery { /** * Add a Phrase to this PrintQuery . A Phrase is something like : * < code > " $ & lt ; attribute [ LastName ] & gt ; - $ & lt ; attribute [ FirstName ] & gt ; " < / code > * This would return " John - Doe " . One Phrase can contain various selects * as defined for { @ link # addSelect ( String . . . ) } and string to connect them . * @ param _ key key the phrase can be accessed * @ param _ phraseStmt phrase to add * @ throws EFapsException on error * @ return this PrintQuery */ public AbstractPrintQuery addPhrase ( final String _key , final String _phraseStmt ) throws EFapsException { } }
ValueList list = null ; final ValueParser parser = new ValueParser ( new StringReader ( _phraseStmt ) ) ; try { list = parser . ExpressionString ( ) ; } catch ( final ParseException e ) { throw new EFapsException ( PrintQuery . class . toString ( ) , e ) ; } final Phrase phrase = new Phrase ( _key , _phraseStmt , list ) ; this . key2Phrase . put ( _key , phrase ) ; for ( final String selectStmt : list . getExpressions ( ) ) { final OneSelect oneselect = new OneSelect ( this , selectStmt ) ; this . allSelects . add ( oneselect ) ; phrase . addSelect ( oneselect ) ; oneselect . analyzeSelectStmt ( ) ; } return this ;
public class TagUtils { /** * String - - > long * @ param value * @ return */ public static double getDouble ( Object value ) { } }
if ( value == null ) { return 0 ; } return Double . valueOf ( value . toString ( ) ) . doubleValue ( ) ;